diff --git a/.cursor/rules/ai-integration.mdc b/.cursor/rules/ai-integration.mdc new file mode 100644 index 0000000..3d00054 --- /dev/null +++ b/.cursor/rules/ai-integration.mdc @@ -0,0 +1,34 @@ +--- +description: +globs: +alwaysApply: false +--- +# KI-Integration + +Die Anwendung integriert OpenAI für KI-Funktionalitäten: + +## Konfiguration +- [app.py](mdc:app.py): OpenAI-Client-Initialisierung +- [requirements.txt](mdc:requirements.txt): OpenAI SDK als Abhängigkeit + +## Endpunkte +- `/api/assistant`: Hauptendpunkt für KI-Anfragen + +## Funktionalitäten +- Chatbot-Integration: Benutzer können mit einem KI-Assistenten kommunizieren +- Inhaltsanalyse: KI kann Gedanken und Konzepte analysieren +- Vorschläge: Kontextbezogene Vorschläge basierend auf dem Benutzerkontext + +## Implementation +- Verwendet den OpenAI SDK für API-Aufrufe +- Kontextübergabe für konsistente Konversationen +- Streaming-Antworten für bessere Benutzererfahrung + +## Konfigurationsparameter +- `OPENAI_API_KEY`: API-Schlüssel (in .env-Datei) +- Das System verwendet vorwiegend das Chat-Completion-API + +## Sicherheitsmaßnahmen +- API-Schlüssel werden sicher über Umgebungsvariablen geladen +- Ratenbegrenzung und Fehlerbehandlung für API-Aufrufe +- Eingabevalidierung vor API-Anfragen diff --git a/.cursor/rules/authentication.mdc b/.cursor/rules/authentication.mdc new file mode 100644 index 0000000..0ed6641 --- /dev/null +++ b/.cursor/rules/authentication.mdc @@ -0,0 +1,36 @@ +--- +description: +globs: +alwaysApply: false +--- +# Authentifizierung und Benutzerrollen + +Die Anwendung nutzt Flask-Login für das Authentifizierungssystem: + +## Hauptkomponenten +- [LoginManager](mdc:app.py): Konfiguration im app.py +- [User Model](mdc:models.py): Die User-Klasse implementiert UserMixin für Flask-Login +- Passwort-Hashing: Verwendet Werkzeug Security für sichere Passwort-Speicherung + +## Authentifizierungsrouten +- `/login`: Benutzeranmeldung (GET/POST) +- `/register`: Benutzerregistrierung (GET/POST) +- `/logout`: Benutzerabmeldung + +## Benutzerrollen +- Reguläre Benutzer: Grundlegende Funktionen +- Administratoren (`is_admin=True`): Erweiterte Privilegien + +## Zugriffskontrollen +- `@login_required`: Decorator für routenspezifischen Authentifizierungsschutz +- `@admin_required`: Benutzerdefinierter Decorator für Admin-Zugriffskontrolle + +## Sitzungsverwaltung +- Tracking von Anmeldezeit (`last_login`) +- Langlebige Sitzungen für Präferenzen (z.B. Dark Mode) +- Angepasste Flash-Nachrichten + +## Profilmanagement +- `/settings`: Benutzereinstellungen aktualisieren +- Passwortänderung +- Profildetails (Biografie, Avatar, etc.) diff --git a/.cursor/rules/configuration.mdc b/.cursor/rules/configuration.mdc new file mode 100644 index 0000000..80fd245 --- /dev/null +++ b/.cursor/rules/configuration.mdc @@ -0,0 +1,31 @@ +--- +description: +globs: +alwaysApply: false +--- +# Konfiguration und Umgebungsvariablen + +Die Anwendung verwendet Umgebungsvariablen für die Konfiguration: + +## Konfigurationsdateien +- [.env](mdc:.env): Haupt-Umgebungsvariablen (nicht in Git) +- [example.env](mdc:example.env): Beispiel-Konfiguration als Vorlage + +## Wichtige Konfigurationsparameter +- `SECRET_KEY`: Geheimer Schlüssel für Flask-Sitzungen +- `SQLALCHEMY_DATABASE_URI`: Datenbankverbindung +- `OPENAI_API_KEY`: API-Schlüssel für OpenAI-Integration + +## Anwendungsinitialisierung +- [run.py](mdc:run.py): Lädt Umgebungsvariablen und startet die Anwendung +- [app.py](mdc:app.py): Konfiguriert Flask mit den geladenen Umgebungsvariablen +- [init_db.py](mdc:init_db.py): Initialisiert die Datenbank mit Beispieldaten + +## Datenbank-Konfiguration +- SQLite-Datenbank im `/database`-Verzeichnis +- Automatische Erstellung der Datenbankstruktur bei Anwendungsstart +- Beispieldaten werden mit `init_database()` erstellt + +## Ausführung der Anwendung +- Entwicklungsserver: `python run.py` +- In Produktion: Nutzung von Gunicorn (siehe requirements.txt) diff --git a/.cursor/rules/data-models.mdc b/.cursor/rules/data-models.mdc new file mode 100644 index 0000000..0a008d1 --- /dev/null +++ b/.cursor/rules/data-models.mdc @@ -0,0 +1,31 @@ +--- +description: +globs: +alwaysApply: false +--- +# Datenmodelle + +Die Anwendung verwendet SQLAlchemy als ORM mit folgenden Hauptmodellen: + +## Benutzer und Authentifizierung +- [User](mdc:models.py): Benutzermodell mit Authentifizierung und Profildaten + +## Mind-Mapping und Wissensorganisation +- [Category](mdc:models.py): Wissenschaftliche Kategorien zur Organisation der Mindmap +- [MindMapNode](mdc:models.py): Knoten in der öffentlichen Mindmap +- [UserMindmap](mdc:models.py): Benutzerspezifische Mindmaps +- [UserMindmapNode](mdc:models.py): Speichert Positionen von Knoten in Benutzer-Mindmaps +- [MindmapNote](mdc:models.py): Private Notizen zu Mindmap-Elementen + +## Gedanken und Inhalte +- [Thought](mdc:models.py): Gedanken und Konzepte, die in Mindmaps verknüpft werden +- [ThoughtRelation](mdc:models.py): Verknüpfungen zwischen verschiedenen Gedanken +- [ThoughtRating](mdc:models.py): Bewertungen von Gedanken durch Benutzer +- [Comment](mdc:models.py): Kommentare zu Gedanken + +## Hauptbeziehungen +- Benutzer → Gedanken: 1-zu-n (Autor) +- Benutzer → MindMaps: 1-zu-n +- Gedanken ↔ MindMapNodes: n-zu-m +- Kategorien → MindMapNodes: 1-zu-n +- Gedanken ↔ Gedanken: über ThoughtRelation (gerichtete Beziehungen) diff --git a/.cursor/rules/development-workflow.mdc b/.cursor/rules/development-workflow.mdc new file mode 100644 index 0000000..c57bc98 --- /dev/null +++ b/.cursor/rules/development-workflow.mdc @@ -0,0 +1,32 @@ +--- +description: +globs: +alwaysApply: false +--- +# Entwicklungs-Workflow + +## Grundlegende Entwicklungsschritte +1. Umgebung einrichten: Python 3.11 und Abhängigkeiten installieren +2. `.env`-Datei basierend auf `example.env` erstellen +3. Datenbank initialisieren: `python init_db.py` +4. Entwicklungsserver starten: `python run.py` + +## Datenbankentwicklung +- Models in [models.py](mdc:models.py) definieren +- Migrationen bei Schemaänderungen durchführen +- Testdaten über [init_db.py](mdc:init_db.py) bereitstellen + +## Anwendungsentwicklung +- Neue Routen in [app.py](mdc:app.py) hinzufügen +- Frontend-Templates in `/templates` erstellen/anpassen +- API-Endpoints für AJAX/Frontend-Integration implementieren + +## Testing +- Tests mit pytest schreiben (siehe requirements.txt) +- Flask-Testumgebung für Integrationstest verwenden + +## Best Practices +- Immer auf Datenbankmodelle zurückgreifen (kein Raw-SQL) +- API-Endpunkte mit Authentifizierung schützen +- Flash-Nachrichten für Benutzerrückmeldungen verwenden +- Code-Dokumentation in deutscher Sprache halten diff --git a/.cursor/rules/frontend-structure.mdc b/.cursor/rules/frontend-structure.mdc new file mode 100644 index 0000000..8caaa3e --- /dev/null +++ b/.cursor/rules/frontend-structure.mdc @@ -0,0 +1,41 @@ +--- +description: +globs: +alwaysApply: false +--- +# Frontend-Struktur + +Die Anwendung verwendet ein Flask-Jinja2-Template-System mit JavaScript-Erweiterungen: + +## Template-Struktur +- `/templates`: Hauptverzeichnis für Jinja2-Templates +- `/templates/errors`: Fehlerseiten (404, 500, etc.) +- Layout-Templates für einheitliches Design + +## Frontend-Assets +- `/static/css`: CSS-Dateien (mit Tailwind) + - `/static/css/src`: Quell-CSS-Dateien +- `/static/js`: JavaScript-Dateien + - `/static/js/modules`: Modulare JS-Komponenten +- `/static/img`: Bilder und grafische Elemente + +## JavaScript-Funktionalität +- API-Integration: Asynchrone Kommunikation mit Backend +- Mindmap-Visualisierung: Interaktive Darstellung von Konzepten +- Benutzeroberflächen-Interaktivität: Drag & Drop, Tooltips, Modals + +## CSS-Framework +- Tailwind CSS für responsive Design-Elemente +- TAILWIND CDN verwenden, nicht manuell build! + +## Responsive Design +- Mobile-first Ansatz für verschiedene Gerätetypen +- Anpassungsfähiges Layout für verschiedene Bildschirmgrößen + +## Zugänglichkeit +- Semantisches HTML für bessere Zugänglichkeit +- ARIA-Attribute für Screenreader-Unterstützung + +## Internationalisierung +- Deutsche Benutzeroberfläche als Standard +- Vorbereitet für mehrsprachige Unterstützung diff --git a/.cursor/rules/project-structure.mdc b/.cursor/rules/project-structure.mdc new file mode 100644 index 0000000..086aae7 --- /dev/null +++ b/.cursor/rules/project-structure.mdc @@ -0,0 +1,27 @@ +--- +description: +globs: +alwaysApply: false +--- +# Projekt-Struktur (Systades) + +## Hauptkomponenten +Diese Python-Flask-Webanwendung implementiert ein Mind-Mapping und Gedanken-Management System: + +- [app.py](mdc:app.py): Hauptanwendungsdatei mit allen Routen und Endpunkten +- [models.py](mdc:models.py): Datenbankmodelle und Beziehungen +- [run.py](mdc:run.py): Startpunkt der Anwendung +- [init_db.py](mdc:init_db.py): Initialisiert die Datenbank mit Beispieldaten + +## Projektstruktur +- `/database`: Enthält SQLite-Datenbank +- `/docs`: Dokumentation +- `/static`: Frontend-Ressourcen (CSS, JS, Bilder) +- `/templates`: Jinja2-Templates für die Webseiten +- `/utils`: Hilfsfunktionen und -klassen + +## Hauptfunktionalität +- Mind-Mapping: Visualisierung von Wissen und Beziehungen +- Gedanken-Management: Erfassung und Organisation von Ideen und Konzepten +- Benutzer-Management: Registrierung, Login, Profile +- API-Endpunkte: RESTful-Schnittstellen für Frontend-Integration diff --git a/.cursor/rules/routing.mdc b/.cursor/rules/routing.mdc new file mode 100644 index 0000000..736b152 --- /dev/null +++ b/.cursor/rules/routing.mdc @@ -0,0 +1,43 @@ +--- +description: +globs: +alwaysApply: false +--- +# Routing und API-Endpunkte + +## Hauptrouten (Webseiten) +- `/`: Startseite +- `/login`, `/register`, `/logout`: Authentifizierung +- `/mindmap`: Öffentliche Mindmap-Ansicht +- `/profile`: Benutzerprofil +- `/settings`: Benutzereinstellungen +- `/search`: Suchfunktion +- `/my_account`: Kontoübersicht + +## API-Endpunkte +### Mindmap-Verwaltung +- `/api/mindmap`: Öffentliche Mindmap-Daten abrufen +- `/api/mindmap/public`: Öffentliche Mindmap abrufen +- `/api/mindmap/user/`: Benutzer-Mindmap abrufen +- `/api/mindmap//add_node`: Knoten hinzufügen +- `/api/mindmap//remove_node/`: Knoten entfernen +- `/api/mindmap//update_node_position`: Knotenposition aktualisieren +- `/api/mindmap//notes`: Notizen verwalten + +### Gedanken und Inhalte +- `/api/thoughts`: Gedanken erstellen +- `/api/thoughts/`: Gedanken abrufen, aktualisieren, löschen +- `/api/thoughts//bookmark`: Lesezeichen setzen/entfernen +- `/api/nodes//thoughts`: Gedanken zu einem Knoten abrufen/hinzufügen + +### System und Benutzereinstellungen +- `/api/set_dark_mode`, `/api/get_dark_mode`: Erscheinungsbild-Einstellungen +- `/api/assistant`: KI-Assistent-Kommunikation +- `/api/categories`: Kategorien abrufen +- `/api/get_flash_messages`: Flash-Nachrichten für AJAX-Anfragen + +## Fehlerbehandlung +- 404: Page Not Found +- 403: Forbidden +- 500: Internal Server Error +- 429: Too Many Requests diff --git a/.env b/.env new file mode 100644 index 0000000..07072cb --- /dev/null +++ b/.env @@ -0,0 +1,2 @@ +SECRET_KEY=eed9298856dc9363cd32778265780d6904ba24e6a6b815a2cc382bcdd767ea7b +OPENAI_API_KEY=sk-dein-openai-api-schluessel-hier diff --git a/.vscode/jsconfig.json b/.vscode/jsconfig.json new file mode 100644 index 0000000..e9441a4 --- /dev/null +++ b/.vscode/jsconfig.json @@ -0,0 +1,8 @@ +{ + "compilerOptions": { + "target": "esnext", + "lib": [ + "esnext" + ] + } +} \ No newline at end of file diff --git a/.vscode/main.js b/.vscode/main.js new file mode 100644 index 0000000..ead02a8 --- /dev/null +++ b/.vscode/main.js @@ -0,0 +1,68 @@ +/// +// @ts-check +// API: https://code.visualstudio.com/api/references/vscode-api +// @ts-ignore +const vscode = require('vscode'); + * @typedef {import('vscode').ExtensionContext} ExtensionContext + * @typedef {import('vscode').commands} commands + * @typedef {import('vscode').window} window + * @typedef {import('vscode').TextEditor} TextEditor + * @typedef {import('vscode').TextDocument} TextDocument + */ + +/** + * Aktiviert die Erweiterung und registriert den Auto-Resume-Befehl + * @param {vscode.ExtensionContext} context - Der Erweiterungskontext + */ +function activate(context) { + const disposable = vscode.commands.registerCommand('extension.autoResume', () => { + const editor = vscode.window.activeTextEditor; + if (!editor) return; + + const document = editor.document; + const text = document.getText(); + + // Track last click time to avoid multiple clicks + let lastClickTime = 0; + + // Main function that looks for and clicks the resume link + function clickResumeLink() { + // Prevent clicking too frequently (3 second cooldown) + const now = Date.now(); + if (now - lastClickTime < 3000) return; + + // Check if text contains rate limit text + if (text.includes('stop the agent after 25 tool calls') || + text.includes('Note: we default stop')) { + + // Find the resume link position + const resumePos = text.indexOf('resume the conversation'); + if (resumePos !== -1) { + vscode.window.showInformationMessage('Auto-resuming conversation...'); + lastClickTime = now; + } + } + } + + // Führe periodisch aus + const interval = global.setInterval(clickResumeLink, 1000); + + // Speichere das Intervall in den Subscriptions + context.subscriptions.push({ + dispose: () => global.clearInterval(interval) + }); + // Führe die Funktion sofort aus + clickResumeLink(); + }); + + context.subscriptions.push(disposable); +} + +function deactivate() { + // Cleanup if needed +} + +module.exports = { + activate, + deactivate +} diff --git a/COMMON_ERRORS.md b/COMMON_ERRORS.md new file mode 100644 index 0000000..3b51eb2 --- /dev/null +++ b/COMMON_ERRORS.md @@ -0,0 +1,239 @@ +# ABSOLUTE DON'TS: +- Verwendung von npm anstelle der Tailwind CDN +- Implementierung von Content Security Policy (CSP) - UNTER KEINEN UMSTÄNDEN! +- Implementierung von Cross-Site Request Forgery (CSRF) Schutz +- Implementierung von Security Headers +- Verwendung von TypeScript anstelle von JavaScript +- Implementierung von OAuth oder anderen externen Authentifizierungsmethoden + +# HÄUFIGE FEHLER: +- Verwendung der falschen Datenbank (die korrekte ist: database/systades.db) +- Falsche Pfadangaben bei statischen Dateien +- Vergessen der deutschen Spracheinstellungen in Templates +- Nicht beachten der vorhandenen Projektstruktur +- Falsche Einbindung der Neural Network Background Animation +- Verwendung von englischen Variablennamen in deutschen Funktionen +- Vergessen der Mindmap-Datenstruktur gemäß der Roadmap + +# Häufige Fehler und Lösungen + +## Datenbankfehler + +### Fehler: "no such column: user.password" + +**Fehlerbeschreibung:** +``` +sqlalchemy.exc.OperationalError: (sqlite3.OperationalError) no such column: user.password +[SQL: SELECT user.id AS user_id, user.username AS user_username, user.email AS user_email, user.password AS user_password, user.created_at AS user_created_at, user.is_active AS user_is_active, user.role AS user_role +FROM user +WHERE user.id = ?] +``` + +**Ursache:** +Die Spalte `password` fehlt in der Tabelle `user` der SQLite-Datenbank. Dies kann durch eine unvollständige Datenbankinitialisierung oder ein fehlerhaftes Schema-Update verursacht worden sein. + +**Lösung:** + +1. **Datenbank reparieren mit dem Fix-Skript** + + ```bash + python fix_user_table.py + ``` + + Dieses Skript: + - Prüft, ob die Tabelle `user` existiert und erstellt sie, falls nicht + - Prüft, ob die Spalte `password` existiert und fügt sie hinzu, falls nicht + +2. **Standardbenutzer erstellen** + + ```bash + python create_default_users.py + ``` + + Dieses Skript: + - Erstellt Standardbenutzer (admin, user), falls keine vorhanden sind + - Setzt Passwörter mit korrektem Hashing + +3. **Datenbank testen** + + ```bash + python test_app.py + ``` + + Dieses Skript überprüft: + - Ob die Datenbank existiert + - Ob die Tabelle `user` korrekt konfiguriert ist + - Ob Benutzer vorhanden sind + +## Häufige Ursachen für Datenbankfehler + +1. **Inkonsistente Datenbankschemas** + - Unterschiede zwischen dem SQLAlchemy-Modell und der tatsächlichen Datenbankstruktur + - Fehlende Spalten, die in den Modellen definiert sind + +2. **Falsche Datenbankinitialisierung** + - Die Datenbank wurde nicht korrekt initialisiert + - Fehler bei der Migration oder dem Schema-Update + +3. **Datenbankdatei-Korrumpierung** + - Die SQLite-Datenbankdatei wurde beschädigt + - Lösung: Sicherung wiederherstellen oder Datenbank neu erstellen + +## Vorbeugende Maßnahmen + +1. **Regelmäßige Backups** + - Tägliche Sicherung der Datenbankdatei + +2. **Schema-Validierung** + - Regelmäßige Überprüfung der Datenbankstruktur + - Automatisierte Tests für Datenbankschema + +3. **Datenbankmigration** + - Verwenden Sie Flask-Migrate für strukturierte Datenbank-Updates + - Dokumentieren Sie alle Schemaänderungen + +## Content Security Policy (CSP) + +### Problem: Externe Ressourcen werden nicht geladen +**Fehler:** Externe Ressourcen wie CDNs werden nicht korrekt geladen. + +**Lösung:** +1. Stellen Sie sicher, dass die URLs in den Templates korrekt sind: + ```html + + ``` + +2. Überprüfen Sie die Netzwerkverbindung und ob die CDN-Domains erreichbar sind. + +3. Verwenden Sie lokale Ressourcen als Alternative: + ```html + + ``` + +### Problem: CSP-Fehler (script-src-elem 'none', font-src 'none', etc.) +**Fehler:** Skripte werden aufgrund von CSP-Einschränkungen blockiert. + +**Lösung:** +1. WICHTIG: CSP-Einschränkungen NICHT implementieren! Diese sind im Projekt unerwünscht. + +2. Falls CSP-Header bereits vorhanden sind, entfernen Sie diese aus der Anwendung: + ```python + # Entfernen Sie diesen Code, falls vorhanden: + # @app.after_request + # def add_security_headers(response): + # response.headers['Content-Security-Policy'] = '...' + # return response + ``` + +3. Stellen Sie sicher, dass externe Ressourcen direkt geladen werden: + ```html + + + ``` + +4. Für Inline-Skripte keine Nonce verwenden: + ```html + + ``` + +### Problem: Tailwind CSS CDN wird blockiert +**Fehler:** Tailwind CSS kann nicht von CDN geladen werden. + +**Lösung:** +1. Verwenden Sie die lokale Version von Tailwind CSS: + ```html + + ``` + +2. Alternativ können Sie die CDN-Version direkt im Template einbinden: + ```html + + ``` + +3. Stellen Sie sicher, dass die Datei `static/css/tailwind.min.css` existiert und aktuell ist. + +## Authentifizierung + +### Problem: Login funktioniert nicht +**Fehler:** Benutzer kann sich nicht einloggen. + +**Lösung:** +1. Standard-Admin-Benutzer erstellen: `python TOOLS.py user:admin` +2. Passwort zurücksetzen: `python TOOLS.py user:reset-pw -u USERNAME -p NEWPASSWORD` + +## Neural Network Background + +### Problem: Hintergrund-Animation wird nicht angezeigt +**Fehler:** Die Neural Network Animation im Hintergrund erscheint nicht. + +**Lösung:** +1. Überprüfen Sie, ob die Datei `static/neural-network-background.js` korrekt eingebunden ist: + ```html + + ``` + +2. Initialisieren Sie die Animation im Template: + ```html + + ``` + +3. Stellen Sie sicher, dass keine CSS-Regeln die Animation überdecken: + ```css + #neural-network-background { + z-index: -10; + opacity: 1; + } + ``` + +## Mindmap-Funktionalität + +### Problem: Mindmap-Daten werden nicht geladen +**Fehler:** Die dynamische Mindmap zeigt keine Daten an. + +**Lösung:** +1. Überprüfen Sie die API-Endpunkte für die Mindmap-Daten: + ```python + @app.route('/api/mindmap/nodes', methods=['GET']) + def get_mindmap_nodes(): + # Implementierung... + ``` + +2. Stellen Sie sicher, dass die AJAX-Anfragen korrekt implementiert sind: + ```javascript + fetch('/api/mindmap/nodes') + .then(response => response.json()) + .then(data => { + // Verarbeitung der Mindmap-Daten + }); + ``` + +3. Überprüfen Sie die Datenbankeinträge für Mindmap-Knoten und -Verbindungen. + +## ChatGPT-Assistent + +### Problem: Assistent reagiert nicht auf Eingaben +**Fehler:** Der ChatGPT-Assistent verarbeitet keine Benutzereingaben. + +**Lösung:** +1. Überprüfen Sie die Einbindung der JavaScript-Datei: + ```html + + ``` + +2. Stellen Sie sicher, dass der Assistent korrekt initialisiert wird: + ```javascript + document.addEventListener('DOMContentLoaded', () => { + const assistant = new ChatGPTAssistant(); + assistant.initialize(); + }); + ``` + +3. Überprüfen Sie die API-Endpunkte für die Kommunikation mit dem Assistenten. \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..f1f7ca2 --- /dev/null +++ b/README.md @@ -0,0 +1,196 @@ +# MindMapProjekt - Roadmap + +## Projektübersicht +Das MindMapProjekt ist eine interaktive Plattform zum Visualisieren, Erforschen und Teilen von Wissen. Das Projekt wird umfassend überarbeitet, um ein modernes, benutzerfreundliches Design und erweiterte Funktionalitäten zu bieten. + +## Technischer Stack +- **Backend**: Python/Flask +- **Frontend**: + - Tailwind CSS (via CDN) für moderne UI + - SVG-Bibliotheken für Visualisierungen (D3.js) + - JavaScript/Alpine.js für interaktive Komponenten + - WebGL für animierte Hintergrundeffekte +- **Datenbank**: SQLite mit SQLAlchemy +- **KI-Integration**: OpenAI API für intelligente Assistenz + +## Installation und Verwendung + +### Installation +1. Repository klonen +2. Virtuelle Umgebung erstellen: `python -m venv venv` +3. Virtuelle Umgebung aktivieren: + - Windows: `venv\Scripts\activate` + - Unix/MacOS: `source venv/bin/activate` +4. Abhängigkeiten installieren: `pip install -r requirements.txt` +5. Datenbank initialisieren: `python TOOLS.py db:rebuild` +6. Admin-Benutzer erstellen: `python TOOLS.py user:admin` +7. Server starten: `python TOOLS.py server:run` + +### Standardbenutzer +- **Admin-Benutzer**: Username: `admin` / Passwort: `admin` +- **Testbenutzer**: Username: `user` / Passwort: `user` + +### Verwaltungswerkzeuge mit TOOLS.py +Das Projekt enthält ein zentrales Verwaltungsskript `TOOLS.py`, das verschiedene Hilfsfunktionen bietet: + +#### Datenbankverwaltung +- `python TOOLS.py db:fix` - Reparieren der Datenbankstruktur +- `python TOOLS.py db:rebuild` - Datenbank neu aufbauen (löscht alle Daten!) +- `python TOOLS.py db:test` - Datenbankverbindung und Modelle testen +- `python TOOLS.py db:stats` - Datenbankstatistiken anzeigen + +#### Benutzerverwaltung +- `python TOOLS.py user:list` - Alle Benutzer anzeigen +- `python TOOLS.py user:create -u USERNAME -e EMAIL -p PASSWORD [-a]` - Neuen Benutzer erstellen +- `python TOOLS.py user:admin` - Admin-Benutzer erstellen (admin/admin) +- `python TOOLS.py user:reset-pw -u USERNAME -p NEWPASSWORD` - Benutzerpasswort zurücksetzen +- `python TOOLS.py user:delete -u USERNAME` - Benutzer löschen + +#### Serververwaltung +- `python TOOLS.py server:run [--host HOST] [--port PORT] [--no-debug]` - Entwicklungsserver starten + +Für detaillierte Hilfe: `python TOOLS.py -h` + +## Roadmap der Überarbeitung + +### Phase 1: Grundlegende Infrastruktur ✅ +- [x] Bestandsaufnahme des aktuellen Projekts +- [x] Erstellung der Roadmap +- [x] Aktualisierung der Abhängigkeiten +- [x] Integration von Tailwind CSS +- [x] Einrichtung der SVG-Bibliotheken (D3.js) +- [x] Favicon erstellen +- [x] Setup-Skript für einfache Installation + +### Phase 2: Design-Überarbeitung ✅ +- [x] Implementierung des Dark Mode +- [x] Erstellung eines modernen, minimalistischen UI mit Tech-Ästhetik +- [x] Responsive Design für alle Geräte +- [x] Gestaltung der Landing Page mit großer Typografie +- [x] Animierter Neurales Netzwerk-Hintergrund mit WebGL + +### Phase 3: Mindmap-Funktionalitäten 🔄 +- [x] Verbesserte Visualisierung mit SVG und D3.js +- [x] Implementierung der Mouseover-Funktion +- [x] Entwicklung der Suchfunktion für Knoten +- [x] Clustertopologie für neuronale Netzwerkdarstellung +- [x] Fehlerbehandlung für robuste Datenverarbeitung und Knotenverweise +- [x] Verbesserte Verbindungserkennung zwischen Knoten +- [ ] Tagging-System für Inhalte +- [ ] Quellenmanagement und -verlinkung +- [ ] Upload-Funktionalität an Knotenpunkten + +### Phase 4: Kernseitenentwicklung +- [ ] Überarbeitung der Startseite mit neuen Features +- [ ] Entwicklung der "Wer sind wir?"-Seite +- [ ] Implementierung von Impressum und Datenschutzerklärung +- [ ] Erstellung der Kontaktseite mit FAQs +- [ ] Überarbeitung des Benutzerprofilbereichs + +### Phase 5: Community-Features +- [ ] Entwicklung des Autorenbereichs +- [ ] Implementierung von Community-Bereichen für Themenbereiche +- [ ] Verbesserter Kommentarbereich +- [ ] Benutzerrechtemanagement + +### Phase 6: KI-Integration +- [ ] Implementierung des Frage-Antwort-Systems +- [ ] KI-generierte Themeneinleitungen +- [ ] Intelligente Suchunterstützung +- [ ] Geführte Pfade durch Themenbereiche +- [ ] Vorgeschlagene Chat-Möglichkeiten + +### Phase 7: Benutzerprofilfunktionen +- [ ] Speichern von Thematiken +- [ ] Persönliche Mindmap/Pinboard +- [ ] Beitragsmanagement +- [ ] Benutzerstatistiken und -aktivitäten + +### Phase 8: Testing und Optimierung +- [ ] Umfassende Tests aller Funktionen +- [ ] Performance-Optimierung +- [ ] SEO-Implementierung +- [ ] Barrierefreiheit prüfen und verbessern + +### Phase 9: Dokumentation und Einführung +- [ ] Erstellung von Benutzeranleitungen +- [ ] Entwicklerdokumentation +- [ ] Administratorenhandbuch +- [ ] Guided Tour für neue Benutzer + +## Aktueller Status +- **Phase 1**: ✅ Abgeschlossen +- **Phase 2**: ✅ Abgeschlossen +- **Phase 3**: 🔄 In Bearbeitung (75% abgeschlossen) + +## Aktuelle Fortschritte +- Grundlegende UI modernisiert mit Tailwind CSS und Dark Mode +- Neues Favicon für bessere visuelle Identität erstellt +- Setup-Prozess vereinfacht mit einem Shell-Skript +- Mindmap-Visualisierung komplett überarbeitet mit D3.js für eine interaktivere Erfahrung +- Responsive Design für optimale Darstellung auf allen Geräten +- Animierter neuronaler Netzwerk-Hintergrund mit WebGL implementiert +- Verbesserte neuronale Cluster-Darstellung in der MindMap-Ansicht +- Behebung von kritischen Fehlern in der Knotenvisualisierung und Verbindungserkennung +- Robustere Datenverarbeitung für Mindmap-Knoten implementiert +- Fehlerbehandlung für verschiedene API-Datenformate verbessert + +## Neuronaler Netzwerk-Hintergrund + +Ein wesentliches neues Feature ist der animierte Hintergrund, der ein neuronales Netzwerk simuliert: + +- **WebGL-basierte Rendering-Engine** für hohe Performance +- **Dynamische Knoten und Verbindungen** mit realistischem Bewegungsverhalten +- **Neuronenfeuer-Simulation** mit Signalweiterleitung zwischen Knoten +- **Clustertopologie** für realistisches Erscheinungsbild +- **Anpassbare Farbgebung und Animationsparameter** +- **Flüssige Animationen** mit über 100 Knotenpunkten + +Die Animation ist vollständig responsiv und passt sich automatisch an verschiedene Bildschirmgrößen an, ohne die Browser-Performance zu beeinträchtigen. + +## Mindmap-Verbesserungen + +Die Mindmap-Darstellung wurde grundlegend überarbeitet: + +- **D3.js Force-Directed Graph** für intuitive Knotenpositionierung +- **Verbesserte Fehlerbehandlung** für robustere Datenverarbeitung +- **Neuronale Cluster-Gruppierung** von thematisch zusammengehörigen Inhalten +- **Glasmorphismus-Effekte** für moderne visuelle Darstellung +- **Verbesserte Hover- und Selektionseffekte** +- **Flüssige Animationen** bei Knotenauswahl und -fokussierung + +## Nächste Schritte +- Fertigstellung des Tagging-Systems für Gedanken +- Verbesserung der Gedankenansicht im Mindmap-Bereich +- Implementierung von Quellenmanagement +- Überarbeitung der Startseite mit neuen Features + +## Content Security Policy (CSP) + +Die Anwendung implementiert eine Content Security Policy, um die Sicherheit zu erhöhen und unerwünschte externe Ressourcen zu blockieren. CSP wird in `app.py` konfiguriert und schränkt ein, welche Ressourcen geladen werden dürfen. + +### Aktualisierung (06.06.2024) +Die Anwendung verwendet nun die Tailwind CSS CDN für vereinfachte Entwicklung. Die CSP wurde entsprechend angepasst, um die Domain `cdn.tailwindcss.com` zu erlauben. + +### Lokale und CDN-Ressourcen + +Die Anwendung nutzt eine Mischung aus lokalen Ressourcen und CDNs: +- **CDN-Ressourcen**: + - Tailwind CSS (cdn.tailwindcss.com) +- **Lokale Ressourcen**: + - Alpine.js + - Font Awesome + - Google Fonts (Inter und JetBrains Mono) + - WebGL-Animation (neural-network-background.js) + +### CSP-Nonces + +Die Anwendung verwendet Nonces für Inline-Skripte. In den Templates wird `{{ csp_nonce }}` verwendet, um den Nonce-Wert einzufügen: + +```html + +``` + +*Zuletzt aktualisiert: 15.06.2024* \ No newline at end of file diff --git a/ROADMAP.md b/ROADMAP.md new file mode 100644 index 0000000..bd55bc9 --- /dev/null +++ b/ROADMAP.md @@ -0,0 +1,172 @@ +# Systades Mindmap - Entwicklungs-Roadmap + +Diese Roadmap beschreibt die geplante Entwicklung der dynamischen, benutzerorientierten Mindmap-Funktionalität für das Systades-Projekt. + +## Phase 1: Grundlegendes Datenmodell und Backend (Abgeschlossen ✅) + +- [x] Entwurf des Datenbankschemas für benutzerorientierte Mindmaps +- [x] Implementierung der Modelle in models.py +- [x] Erstellung der API-Endpunkte für CRUD-Operationen +- [x] Integration mit der bestehenden Benutzerauthentifizierung +- [x] Seed-Daten für die Entwicklung und Tests + +## Phase 2: Dynamische Mindmap-Visualisierung (Abgeschlossen ✅) + +- [x] Anpassung des Frontend-Codes zur Verwendung der DB-Daten anstelle des SVG +- [x] Implementierung von AJAX-Anfragen zum Laden der Mindmap-Daten +- [x] Dynamisches Rendering der Knoten, Verbindungen und Labels +- [x] Drag-and-Drop-Funktionalität für die Bewegung von Knoten +- [x] Zoom- und Pan-Funktionalität mit Persistenz der Ansicht +- [x] Verbesserte Fehlerbehandlung in der Knotenvisualisierung +- [x] Robustere Verbindungserkennung zwischen Knoten +- [x] Implementierung von Glasmorphismus-Effekten für moderneres UI + +## Phase 3: Visuelles Design und UX (Abgeschlossen ✅) + +- [x] Implementierung des Dark Mode +- [x] Entwicklung eines modernen, minimalistischen UI +- [x] Animierter neuronaler Netzwerk-Hintergrund mit WebGL +- [x] Responsive Design für alle Geräte +- [x] Verbesserte Hover- und Selektionseffekte +- [x] Clustertopologie für neuronale Netzwerkdarstellung +- [x] Animierte Neuronenfeuer-Simulation mit Signalweiterleitung + +## Phase 4: Benutzerdefinierte Mindmaps (Aktuell 🔄) + +- [x] UI für das Betrachten bestehender Mindmaps +- [ ] UI für das Erstellen und Bearbeiten eigener Mindmaps +- [ ] Funktion zum Hinzufügen/Entfernen von Knoten aus der öffentlichen Mindmap +- [ ] Speichern der Knotenpositionen und Ansichtseinstellungen +- [ ] Benutzerspezifische Visualisierungseinstellungen +- [ ] Dashboard mit Übersicht aller Mindmaps des Benutzers + +## Phase 5: Notizen und Annotationen + +- [x] Anzeige von Gedanken zu Mindmap-Knoten +- [ ] UI für das Hinzufügen privater Notizen zu Knoten +- [ ] Visuelle Anzeige von Notizen in der Mindmap +- [ ] Texteditor mit Markdown-Unterstützung für Notizen +- [ ] Kategorisierung und Farbkodierung von Notizen +- [ ] Suchfunktion für Notizen + +## Phase 6: Tagging und Quellenmanagement + +- [ ] Tagging-System für Inhalte implementieren +- [ ] Verknüpfen von Quellen mit Mindmap-Knoten +- [ ] Upload-Funktionalität für Dateien und Medien +- [ ] Verwaltung von Zitaten und Referenzen +- [ ] Visuelles Feedback für Tags und Quellen in der Mindmap + +## Phase 7: Integrationen und Erweiterungen + +- [ ] Import/Export-Funktionalität für Mindmaps (JSON, PNG) +- [ ] Teilen von Mindmaps (öffentlich/privat/mit bestimmten Benutzern) +- [ ] Kollaborative Bearbeitung von Mindmaps +- [ ] Verknüpfung mit externen Ressourcen (Links, Dateien) +- [ ] Versionierung von Mindmaps + +## Phase 8: KI-Integration und Analyse + +- [ ] KI-gestützte Vorschläge für Verbindungen zwischen Knoten +- [ ] Automatische Kategorisierung von Inhalten +- [ ] Visualisierung von Beziehungsstärken und -typen +- [ ] Mindmap-Statistiken und Analysen +- [ ] KI-basierte Zusammenfassung von Teilbereichen der Mindmap + +## Phase 9: Optimierung und Skalierung + +- [ ] Performance-Optimierung für große Mindmaps +- [ ] Verbesserung der Benutzerfreundlichkeit basierend auf Feedback +- [ ] Erweiterte Such- und Filterfunktionen +- [ ] Mobile Optimierung +- [ ] Offline-Funktionalität mit Synchronisierung + +## Technische Schulden und Refactoring + +- [ ] Trennung der Datenbank-Logik vom Flask-App-Code +- [ ] Einführung von Unit-Tests und Integration-Tests +- [ ] Überarbeitung der API-Dokumentation +- [ ] Caching-Strategien für bessere Performance +- [ ] Verbesserte Fehlerbehandlung und Logging + +## KI-Integration + +### Aktuelle Implementation +- Integration von OpenAI mit dem gpt-4o-mini-Modell für den KI-Assistenten +- Datenbankzugriff für den KI-Assistenten, um direkt Informationen aus der Datenbank abzufragen +- Verbesserte Benutzeroberfläche für den KI-Assistenten mit kontextbezogenen Vorschlägen + +### Zukünftige Verbesserungen +- Implementierung von Vektorsuche für präzisere Datenbank-Abfragen durch die KI +- Erweiterung der KI-Funktionalität für tiefere Analyse von Zusammenhängen zwischen Gedanken +- KI-gestützte Vorschläge für neue Verbindungen zwischen Gedanken basierend auf Inhaltsanalyse +- Finetuning des KI-Modells auf die spezifischen Anforderungen der Anwendung +- Erweiterung auf multimodale Fähigkeiten (Bild- und Textanalyse) + +--- + +## Implementierungsdetails + +### Datenbankschema + +Das Datenbankschema umfasst folgende Hauptentitäten: + +1. **Category** - Wissenschaftliche Kategorien für die öffentliche Mindmap +2. **MindMapNode** - Öffentliche Mindmap-Knoten mit Metadaten +3. **UserMindmap** - Benutzerdefinierte Mindmaps +4. **UserMindmapNode** - Verknüpfung zwischen Benutzermindmaps und öffentlichen Knoten +5. **MindmapNote** - Benutzerspezifische Notizen +6. **Thought** - Gedanken und Inhalte, die Knoten zugeordnet sind +7. **ThoughtRelation** - Beziehungen zwischen Gedanken + +### Frontend-Technologien + +- D3.js für die Visualisierung der Mindmap +- WebGL für den neuronalen Netzwerk-Hintergrund +- AJAX für dynamisches Laden von Daten +- Interaktive Bedienelemente mit JavaScript +- Responsive Design mit Tailwind CSS + +### Backend-APIs + +Die implementierten API-Endpunkte umfassen: + +- `/api/mindmap/public` - Abrufen der öffentlichen Mindmap-Struktur +- `/api/mindmap/user/` - Abrufen benutzerdefinierter Mindmaps +- `/api/mindmap//add_node` - Hinzufügen eines Knotens zur Benutzer-Mindmap +- `/api/mindmap//remove_node/` - Entfernen eines Knotens +- `/api/mindmap//update_node_position` - Aktualisierung von Knotenpositionen +- `/api/mindmap//notes` - Verwaltung von Notizen +- `/api/nodes//thoughts` - Abrufen und Hinzufügen von Gedanken zu Knoten +- `/api/get_dark_mode` - Abrufen der Dark Mode Einstellung + +## Neuronaler Netzwerk-Hintergrund + +Der neue WebGL-basierte Hintergrund bietet: + +- WebGL-basierte Rendering-Engine für optimale Performance +- Dynamische Knoten und Verbindungen mit realistischem Verhalten +- Clustering von neuronalen Knoten für natürlicheres Erscheinungsbild +- Simulation von neuronaler Aktivität und Signalweiterleitung +- Anpassbare visuelle Parameter (Helligkeit, Dichte, Geschwindigkeit) +- Vollständig responsives Design für alle Bildschirmgrößen + +## Aktuelle Verbesserungen +- Tailwind CSS wurde auf CDN-Version aktualisiert (06.06.2024) +- Content Security Policy (CSP) für Tailwind CSS CDN und WebGL konfiguriert +- Behebung kritischer Fehler in der Mindmap-Knotenvisualisierung (15.06.2024) +- Verbesserte Verbindungserkennung zwischen Knoten implementiert +- Robuste Fehlerbehandlung für verschiedene API-Datenformate + +## Zukünftige Aufgaben (Q3 2024) +- Implementierung des Tagging-Systems für Gedanken +- Quellenmanagement für Mindmap-Knoten +- Erweiterte Benutzerprofilfunktionen +- Verbesserung der mobilen Benutzererfahrung +- Integration von Exportfunktionen für Mindmaps + +*Zuletzt aktualisiert: 15.06.2024* + +## [Entfernt] CORS-Unterstützung (flask-cors) +- Die flask-cors-Bibliothek und alle zugehörigen Initialisierungen wurden entfernt. +- CORS wird nicht mehr unterstützt oder benötigt. \ No newline at end of file diff --git a/TOOLS.py b/TOOLS.py new file mode 100644 index 0000000..ebea075 --- /dev/null +++ b/TOOLS.py @@ -0,0 +1,125 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +""" +TOOLS.py - Main utility script for the website application. + +This script provides a command-line interface to all utilities +for database management, user management, and server administration. + +Usage: + python3 TOOLS.py [command] [options] + +Available commands: + - db:fix Fix database schema + - db:rebuild Completely rebuild the database + - db:test Test database connection and models + - db:stats Show database statistics + + - user:list List all users + - user:create Create a new user + - user:admin Create admin user (username: admin, password: admin) + - user:reset-pw Reset user password + - user:delete Delete a user + + - server:run Run the development server + +Examples: + python3 TOOLS.py db:rebuild + python3 TOOLS.py user:admin + python3 TOOLS.py server:run --port 8080 +""" + +import os +import sys +import argparse +from utils import ( + fix_database_schema, rebuild_database, run_all_tests, print_database_stats, + list_users, create_user, reset_password, delete_user, create_admin_user, + run_development_server +) + +def parse_args(): + parser = argparse.ArgumentParser( + description='Website Administration Tools', + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=__doc__ + ) + + # Main command argument + parser.add_argument('command', help='Command to execute') + + # Additional arguments + parser.add_argument('--username', '-u', help='Username for user commands') + parser.add_argument('--email', '-e', help='Email for user creation') + parser.add_argument('--password', '-p', help='Password for user creation/reset') + parser.add_argument('--admin', '-a', action='store_true', help='Make user an admin') + parser.add_argument('--host', help='Host for server (default: 127.0.0.1)') + parser.add_argument('--port', type=int, help='Port for server (default: 5000)') + parser.add_argument('--no-debug', action='store_true', help='Disable debug mode for server') + + return parser.parse_args() + +def main(): + args = parse_args() + + # Database commands + if args.command == 'db:fix': + fix_database_schema() + + elif args.command == 'db:rebuild': + print("WARNING: This will delete all data in the database!") + confirm = input("Are you sure you want to continue? (y/n): ").lower() + if confirm == 'y': + rebuild_database() + else: + print("Aborted.") + + elif args.command == 'db:test': + run_all_tests() + + elif args.command == 'db:stats': + print_database_stats() + + # User commands + elif args.command == 'user:list': + list_users() + + elif args.command == 'user:create': + if not args.username or not args.email or not args.password: + print("Error: Username, email, and password are required.") + print("Example: python3 TOOLS.py user:create -u username -e email -p password [-a]") + sys.exit(1) + create_user(args.username, args.email, args.password, args.admin) + + elif args.command == 'user:admin': + create_admin_user() + + elif args.command == 'user:reset-pw': + if not args.username or not args.password: + print("Error: Username and password are required.") + print("Example: python3 TOOLS.py user:reset-pw -u username -p new_password") + sys.exit(1) + reset_password(args.username, args.password) + + elif args.command == 'user:delete': + if not args.username: + print("Error: Username is required.") + print("Example: python3 TOOLS.py user:delete -u username") + sys.exit(1) + delete_user(args.username) + + # Server commands + elif args.command == 'server:run': + host = args.host or '127.0.0.1' + port = args.port or 5000 + debug = not args.no_debug + run_development_server(host=host, port=port, debug=debug) + + else: + print(f"Unknown command: {args.command}") + print("Run 'python3 TOOLS.py -h' for usage information") + sys.exit(1) + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/__pycache__/app.cpython-311.pyc b/__pycache__/app.cpython-311.pyc new file mode 100644 index 0000000..491a265 Binary files /dev/null and b/__pycache__/app.cpython-311.pyc differ diff --git a/__pycache__/app.cpython-313.pyc b/__pycache__/app.cpython-313.pyc new file mode 100644 index 0000000..584ed75 Binary files /dev/null and b/__pycache__/app.cpython-313.pyc differ diff --git a/__pycache__/app.cpython-36.pyc b/__pycache__/app.cpython-36.pyc new file mode 100644 index 0000000..bc18657 Binary files /dev/null and b/__pycache__/app.cpython-36.pyc differ diff --git a/__pycache__/init_db.cpython-311.pyc b/__pycache__/init_db.cpython-311.pyc new file mode 100644 index 0000000..212cb48 Binary files /dev/null and b/__pycache__/init_db.cpython-311.pyc differ diff --git a/__pycache__/init_db.cpython-313.pyc b/__pycache__/init_db.cpython-313.pyc new file mode 100644 index 0000000..d8fef85 Binary files /dev/null and b/__pycache__/init_db.cpython-313.pyc differ diff --git a/__pycache__/models.cpython-311.pyc b/__pycache__/models.cpython-311.pyc new file mode 100644 index 0000000..f4e031e Binary files /dev/null and b/__pycache__/models.cpython-311.pyc differ diff --git a/__pycache__/models.cpython-313.pyc b/__pycache__/models.cpython-313.pyc new file mode 100644 index 0000000..3844fb7 Binary files /dev/null and b/__pycache__/models.cpython-313.pyc differ diff --git a/app.py b/app.py new file mode 100644 index 0000000..ec7e857 --- /dev/null +++ b/app.py @@ -0,0 +1,1535 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import os +from datetime import datetime, timedelta +from flask import Flask, render_template, request, redirect, url_for, flash, jsonify, session, g +from flask_login import LoginManager, UserMixin, login_user, logout_user, login_required, current_user +from flask_sqlalchemy import SQLAlchemy +from werkzeug.security import generate_password_hash, check_password_hash +import json +from enum import Enum +from flask_wtf import FlaskForm +from wtforms import StringField, PasswordField, BooleanField, TextAreaField, SelectField, HiddenField +from wtforms.validators import DataRequired, Email, Length, EqualTo, ValidationError +from functools import wraps +import secrets +from sqlalchemy.sql import func +from openai import OpenAI +from dotenv import load_dotenv +from flask_socketio import SocketIO, emit +from flask_migrate import Migrate + +# Modelle importieren +from models import ( + db, User, Thought, Comment, MindMapNode, ThoughtRelation, ThoughtRating, + RelationType, Category, UserMindmap, UserMindmapNode, MindmapNote, + node_thought_association, user_thought_bookmark, node_relationship +) + +# Lade .env-Datei +load_dotenv() # force=True erzwingt die Synchronisierung + +# Bestimme den absoluten Pfad zur Datenbank +basedir = os.path.abspath(os.path.dirname(__file__)) +db_path = os.path.join(basedir, 'database', 'systades.db') +# Stellen Sie sicher, dass das Verzeichnis existiert +os.makedirs(os.path.dirname(db_path), exist_ok=True) + +app = Flask(__name__) +app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY', 'default-dev-key') +app.config['SQLALCHEMY_DATABASE_URI'] = f'sqlite:///{db_path}' +app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False +app.config['PERMANENT_SESSION_LIFETIME'] = timedelta(days=365) # Langlebige Session für Dark Mode-Einstellung +app.config['UPLOAD_FOLDER'] = os.getenv('UPLOAD_FOLDER', os.path.join(os.getcwd(), 'uploads')) +app.config['WTF_CSRF_ENABLED'] = False + +# OpenAI API-Konfiguration +api_key = os.environ.get("OPENAI_API_KEY") +if not api_key: + print("WARNUNG: Kein OPENAI_API_KEY in Umgebungsvariablen gefunden. KI-Funktionalität wird nicht verfügbar sein.") + api_key = "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" + +client = OpenAI(api_key=api_key) + +# Dark Mode Einstellung in Session speichern +@app.before_request +def handle_dark_mode(): + if 'dark_mode' not in session: + session['dark_mode'] = False # Standardmäßig Light Mode + +# Context processor für Dark Mode +@app.context_processor +def inject_dark_mode(): + return {'dark_mode': session.get('dark_mode', False)} + +# Route zum Umschalten des Dark Mode +@app.route('/toggle-dark-mode', methods=['POST']) +def toggle_dark_mode(): + session['dark_mode'] = not session.get('dark_mode', False) + return jsonify({'success': True, 'dark_mode': session['dark_mode']}) + +# Context processor für globale Template-Variablen +@app.context_processor +def inject_globals(): + """Inject global variables into all templates.""" + return { + 'current_year': datetime.now().year + } + +# Context-Prozessor für alle Templates +@app.context_processor +def inject_current_year(): + return {'current_year': datetime.now().year} + +# Initialisiere die Datenbank +db.init_app(app) + +# Initialisiere den Login-Manager +login_manager = LoginManager(app) +login_manager.login_view = 'login' + +# Erst nach der App-Initialisierung die DB-Check-Funktionen importieren +from utils.db_check import check_db_connection, initialize_db_if_needed + +# SocketIO initialisieren +socketio = SocketIO(app) + +migrate = Migrate(app, db) + +def create_default_categories(): + """Erstellt die Standardkategorien für die Mindmap""" + # Hauptkategorien + main_categories = [ + { + "name": "Philosophie", + "description": "Philosophisches Denken und Konzepte", + "color_code": "#9F7AEA", + "icon": "fa-brain", + "subcategories": [ + {"name": "Ethik", "description": "Moralische Grundsätze", "icon": "fa-balance-scale"}, + {"name": "Logik", "description": "Gesetze des Denkens", "icon": "fa-project-diagram"}, + {"name": "Erkenntnistheorie", "description": "Natur des Wissens", "icon": "fa-lightbulb"} + ] + }, + { + "name": "Wissenschaft", + "description": "Wissenschaftliche Disziplinen und Forschung", + "color_code": "#48BB78", + "icon": "fa-flask", + "subcategories": [ + {"name": "Physik", "description": "Gesetze der Materie und Energie", "icon": "fa-atom"}, + {"name": "Biologie", "description": "Wissenschaft des Lebens", "icon": "fa-dna"}, + {"name": "Mathematik", "description": "Abstrakte Strukturen", "icon": "fa-calculator"}, + {"name": "Informatik", "description": "Wissenschaft der Datenverarbeitung", "icon": "fa-laptop-code"} + ] + }, + { + "name": "Technologie", + "description": "Technologische Entwicklungen und Anwendungen", + "color_code": "#ED8936", + "icon": "fa-microchip", + "subcategories": [ + {"name": "Künstliche Intelligenz", "description": "Intelligente Maschinen", "icon": "fa-robot"}, + {"name": "Programmierung", "description": "Softwareentwicklung", "icon": "fa-code"}, + {"name": "Elektronik", "description": "Elektronische Systeme", "icon": "fa-memory"} + ] + }, + { + "name": "Künste", + "description": "Kunstformen und kulturelle Ausdrucksweisen", + "color_code": "#ED64A6", + "icon": "fa-palette", + "subcategories": [ + {"name": "Literatur", "description": "Schriftliche Werke", "icon": "fa-book"}, + {"name": "Musik", "description": "Klangkunst", "icon": "fa-music"}, + {"name": "Bildende Kunst", "description": "Visuelle Kunstformen", "icon": "fa-paint-brush"} + ] + }, + { + "name": "Psychologie", + "description": "Menschliches Verhalten und Geist", + "color_code": "#4299E1", + "icon": "fa-comments", + "subcategories": [ + {"name": "Kognition", "description": "Denken und Wahrnehmen", "icon": "fa-brain"}, + {"name": "Emotionen", "description": "Gefühlswelt", "icon": "fa-heart"}, + {"name": "Persönlichkeit", "description": "Charaktereigenschaften", "icon": "fa-user"} + ] + } + ] + + # Kategorien erstellen + for main_cat_data in main_categories: + # Prüfen, ob die Kategorie bereits existiert + existing_cat = Category.query.filter_by(name=main_cat_data["name"]).first() + if existing_cat: + continue + + # Hauptkategorie erstellen + main_category = Category( + name=main_cat_data["name"], + description=main_cat_data["description"], + color_code=main_cat_data["color_code"], + icon=main_cat_data["icon"] + ) + db.session.add(main_category) + db.session.flush() # Um die ID zu generieren + + # Unterkategorien erstellen + for sub_cat_data in main_cat_data.get("subcategories", []): + sub_category = Category( + name=sub_cat_data["name"], + description=sub_cat_data["description"], + color_code=main_cat_data["color_code"], + icon=sub_cat_data.get("icon", main_cat_data["icon"]), + parent_id=main_category.id + ) + db.session.add(sub_category) + + db.session.commit() + print("Standard-Kategorien wurden erstellt!") + +def initialize_database(): + """Initialisiert die Datenbank mit Grunddaten, falls diese leer ist""" + try: + print("Initialisiere die Datenbank...") + + # Erstelle alle Tabellen + db.create_all() + + # Prüfe, ob bereits Benutzer existieren + if User.query.count() == 0: + print("Erstelle Admin-Benutzer...") + admin = User( + username="admin", + email="admin@example.com", + is_admin=True + ) + admin.set_password("admin123") # In echter Umgebung ein sicheres Passwort verwenden! + db.session.add(admin) + + # Prüfe, ob bereits Kategorien existieren + if Category.query.count() == 0: + print("Erstelle Standard-Kategorien...") + create_default_categories() + + # Stelle sicher, dass die Standard-Knoten für die öffentliche Mindmap existieren + if MindMapNode.query.count() == 0: + print("Erstelle Standard-Knoten für die Mindmap...") + + # Hauptknoten: Wissen + root_node = MindMapNode( + name="Wissen", + description="Zentrale Wissensbasis", + color_code="#4299E1", + is_public=True + ) + db.session.add(root_node) + db.session.flush() # Um die ID zu generieren + + # Verwandte Kategorien finden + philosophy = Category.query.filter_by(name="Philosophie").first() + science = Category.query.filter_by(name="Wissenschaft").first() + technology = Category.query.filter_by(name="Technologie").first() + arts = Category.query.filter_by(name="Künste").first() + + # Erstelle Hauptthemenknoten + nodes = [ + MindMapNode( + name="Philosophie", + description="Philosophisches Denken", + color_code="#9F7AEA", + category=philosophy, + is_public=True + ), + MindMapNode( + name="Wissenschaft", + description="Wissenschaftliche Erkenntnisse", + color_code="#48BB78", + category=science, + is_public=True + ), + MindMapNode( + name="Technologie", + description="Technologische Entwicklungen", + color_code="#ED8936", + category=technology, + is_public=True + ), + MindMapNode( + name="Künste", + description="Künstlerische Ausdrucksformen", + color_code="#ED64A6", + category=arts, + is_public=True + ) + ] + + # Füge Knoten zur Datenbank hinzu + for node in nodes: + db.session.add(node) + + db.session.commit() + + # Nachdem wir die IDs haben, füge die Verbindungen hinzu + all_nodes = MindMapNode.query.all() + root = MindMapNode.query.filter_by(name="Wissen").first() + + if root: + for node in all_nodes: + if node.id != root.id: + root.children.append(node) + + # Speichere die Änderungen + db.session.commit() + + print("Datenbankinitialisierung abgeschlossen.") + except Exception as e: + print(f"Fehler bei der Datenbankinitialisierung: {str(e)}") + db.session.rollback() + raise + +# Instead of before_first_request, which is deprecated in newer Flask versions +# Use a function to initialize the database that will be called during app creation +def init_app_database(app_instance): + """Initialisiert die Datenbank für die Flask-App""" + with app_instance.app_context(): + # Überprüfe und initialisiere die Datenbank bei Bedarf + if not initialize_db_if_needed(db, initialize_database): + print("WARNUNG: Datenbankinitialisierung fehlgeschlagen. Einige Funktionen könnten eingeschränkt sein.") + +# Call the function to initialize the database +init_app_database(app) + +# Benutzerdefinierter Decorator für Admin-Zugriff +def admin_required(f): + @wraps(f) + @login_required + def decorated_function(*args, **kwargs): + if not current_user.is_admin: + flash('Zugriff verweigert. Nur Administratoren dürfen diese Seite aufrufen.', 'error') + return redirect(url_for('index')) + return f(*args, **kwargs) + return decorated_function + +@login_manager.user_loader +def load_user(id): + return User.query.get(int(id)) + +# Routes for authentication +@app.route('/login', methods=['GET', 'POST']) +def login(): + if request.method == 'POST': + username = request.form.get('username') + password = request.form.get('password') + + user = User.query.filter_by(username=username).first() + if user and user.check_password(password): + login_user(user) + # Aktualisiere letzten Login-Zeitpunkt + user.last_login = datetime.utcnow() + db.session.commit() + + next_page = request.args.get('next') + return redirect(next_page or url_for('index')) + flash('Ungültiger Benutzername oder Passwort') + return render_template('login.html') + +@app.route('/register', methods=['GET', 'POST']) +def register(): + if request.method == 'POST': + username = request.form.get('username') + email = request.form.get('email') + password = request.form.get('password') + + if User.query.filter_by(username=username).first(): + flash('Benutzername existiert bereits') + return redirect(url_for('register')) + + if User.query.filter_by(email=email).first(): + flash('E-Mail ist bereits registriert') + return redirect(url_for('register')) + + user = User(username=username, email=email) + user.set_password(password) + db.session.add(user) + + # Erstelle eine Standard-Mindmap für den neuen Benutzer + default_mindmap = UserMindmap( + name='Meine Mindmap', + description='Meine persönliche Wissenslandschaft', + user=user + ) + db.session.add(default_mindmap) + db.session.commit() + + login_user(user) + flash('Dein Konto wurde erfolgreich erstellt!', 'success') + return redirect(url_for('index')) + return render_template('register.html') + +@app.route('/logout') +@login_required +def logout(): + logout_user() + return redirect(url_for('index')) + +# Route for the homepage +@app.route('/') +def index(): + return render_template('index.html') + +# Route for the mindmap page +@app.route('/mindmap') +def mindmap(): + """Zeigt die öffentliche Mindmap an.""" + try: + # Sicherstellen, dass wir Kategorien haben + if Category.query.count() == 0: + create_default_categories() + + # Hole alle Kategorien der obersten Ebene + categories = Category.query.filter_by(parent_id=None).all() + + # Transformiere Kategorien in ein anzeigbares Format für die Vorlage + category_tree = [build_category_tree(cat) for cat in categories] + + return render_template('mindmap.html', categories=category_tree) + except Exception as e: + # Bei Fehler leere Kategorienliste übergeben und Fehler protokollieren + print(f"Fehler beim Laden der Mindmap-Kategorien: {str(e)}") + return render_template('mindmap.html', categories=[], error=str(e)) + +# Route for user profile +@app.route('/profile') +@login_required +def profile(): + # Lade Benutzer-Mindmaps + user_mindmaps = UserMindmap.query.filter_by(user_id=current_user.id).all() + + # Lade Statistiken + thought_count = Thought.query.filter_by(user_id=current_user.id).count() + bookmark_count = db.session.query(user_thought_bookmark).filter( + user_thought_bookmark.c.user_id == current_user.id).count() + + # Berechne tatsächliche Werte für Benutzerstatistiken + contributions_count = Comment.query.filter_by(user_id=current_user.id).count() + + # Berechne Verbindungen (Anzahl der Gedankenverknüpfungen) + connections_count = ThoughtRelation.query.filter( + (ThoughtRelation.source_id.in_( + db.session.query(Thought.id).filter_by(user_id=current_user.id) + )) | + (ThoughtRelation.target_id.in_( + db.session.query(Thought.id).filter_by(user_id=current_user.id) + )) + ).count() + + # Berechne durchschnittliche Bewertung der Gedanken des Benutzers + avg_rating = db.session.query(func.avg(ThoughtRating.relevance_score)).join( + Thought, Thought.id == ThoughtRating.thought_id + ).filter(Thought.user_id == current_user.id).scalar() or 0 + + # Hole die Anzahl der Follower (falls implementiert) + # In diesem Beispiel nehmen wir an, dass es keine Follower-Funktionalität gibt + followers_count = 0 + + # Hole den Standort des Benutzers aus der Datenbank, falls vorhanden + location = "Deutschland" # Standardwert + + return render_template('profile.html', + user=current_user, + user_mindmaps=user_mindmaps, + thought_count=thought_count, + bookmark_count=bookmark_count, + connections_count=connections_count, + contributions_count=contributions_count, + followers_count=followers_count, + rating=round(avg_rating, 1), + location=location) + +# Route für Benutzereinstellungen +@app.route('/settings', methods=['GET', 'POST']) +@login_required +def settings(): + if request.method == 'POST': + action = request.form.get('action') + + if action == 'update_profile': + current_user.bio = request.form.get('bio') + + # Update avatar if provided + avatar_url = request.form.get('avatar_url') + if avatar_url: + current_user.avatar = avatar_url + + db.session.commit() + flash('Profil erfolgreich aktualisiert!', 'success') + + elif action == 'update_password': + current_password = request.form.get('current_password') + new_password = request.form.get('new_password') + confirm_password = request.form.get('confirm_password') + + if not current_user.check_password(current_password): + flash('Aktuelles Passwort ist nicht korrekt', 'error') + elif new_password != confirm_password: + flash('Neue Passwörter stimmen nicht überein', 'error') + else: + current_user.set_password(new_password) + db.session.commit() + flash('Passwort erfolgreich aktualisiert!', 'success') + + return redirect(url_for('settings')) + + return render_template('settings.html') + +# API-Endpunkt für Flash-Nachrichten +@app.route('/api/get_flash_messages') +def get_flash_messages(): + """Liefert aktuelle Flash-Nachrichten für API/JS-Clients.""" + # Hole alle gespeicherten Flash-Nachrichten + messages = [] + flashed_messages = session.get('_flashes', []) + + # Formatierung der Nachrichten für die API-Antwort + for category, message in flashed_messages: + messages.append({ + 'category': category, + 'message': message + }) + + # Lösche die Nachrichten aus der Session, nachdem sie abgerufen wurden + session.pop('_flashes', None) + + return jsonify(messages) + +# Routes für rechtliche Seiten +@app.route('/impressum/') +def impressum(): + return render_template('impressum.html') + +@app.route('/datenschutz/') +def datenschutz(): + return render_template('datenschutz.html') + +@app.route('/agb/') +def agb(): + return render_template('agb.html') + +@app.route('/ueber-uns/') +def ueber_uns(): + return render_template('ueber_uns.html') + +# Benutzer-Mindmap-Funktionalität +@app.route('/my-mindmap/') +@login_required +def user_mindmap(mindmap_id): + mindmap = UserMindmap.query.get_or_404(mindmap_id) + + # Sicherheitscheck: Nur eigene Mindmaps oder öffentliche Mindmaps + if mindmap.user_id != current_user.id and mindmap.is_private: + flash("Du hast keinen Zugriff auf diese Mindmap.", "error") + return redirect(url_for('profile')) + + return render_template('user_mindmap.html', mindmap=mindmap) + +@app.route('/mindmap/create', methods=['GET', 'POST']) +@login_required +def create_mindmap(): + if request.method == 'POST': + name = request.form.get('name') + description = request.form.get('description') + is_private = request.form.get('is_private') == 'on' + + new_mindmap = UserMindmap( + name=name, + description=description, + user_id=current_user.id, + is_private=is_private + ) + + db.session.add(new_mindmap) + db.session.commit() + + flash('Neue Mindmap erfolgreich erstellt!', 'success') + return redirect(url_for('user_mindmap', mindmap_id=new_mindmap.id)) + + return render_template('create_mindmap.html') + +@app.route('/mindmap//edit', methods=['GET', 'POST']) +@login_required +def edit_mindmap(mindmap_id): + mindmap = UserMindmap.query.get_or_404(mindmap_id) + + # Sicherheitscheck + if mindmap.user_id != current_user.id: + flash("Du kannst nur deine eigenen Mindmaps bearbeiten.", "error") + return redirect(url_for('profile')) + + if request.method == 'POST': + mindmap.name = request.form.get('name') + mindmap.description = request.form.get('description') + mindmap.is_private = request.form.get('is_private') == 'on' + + db.session.commit() + flash('Mindmap erfolgreich aktualisiert!', 'success') + return redirect(url_for('user_mindmap', mindmap_id=mindmap.id)) + + return render_template('edit_mindmap.html', mindmap=mindmap) + +@app.route('/mindmap//delete', methods=['POST']) +@login_required +def delete_mindmap(mindmap_id): + mindmap = UserMindmap.query.get_or_404(mindmap_id) + + # Sicherheitscheck + if mindmap.user_id != current_user.id: + flash("Du kannst nur deine eigenen Mindmaps löschen.", "error") + return redirect(url_for('profile')) + + db.session.delete(mindmap) + db.session.commit() + + flash('Mindmap erfolgreich gelöscht!', 'success') + return redirect(url_for('profile')) + +# API-Endpunkte für Mindmap-Daten +@app.route('/api/mindmap/public') +def get_public_mindmap(): + """Liefert die öffentliche Mindmap-Struktur.""" + # Hole alle Kategorien der obersten Ebene + root_categories = Category.query.filter_by(parent_id=None).all() + + # Baue Baumstruktur auf + result = [] + for category in root_categories: + result.append(build_category_tree(category)) + + return jsonify(result) + +def build_category_tree(category): + """ + Erstellt eine Baumstruktur für eine Kategorie mit all ihren Unterkategorien + und dazugehörigen Knoten + + Args: + category: Ein Category-Objekt + + Returns: + dict: Eine JSON-serialisierbare Darstellung der Kategoriestruktur + """ + # Kategorie-Basisinformationen + category_dict = { + 'id': category.id, + 'name': category.name, + 'description': category.description, + 'color_code': category.color_code, + 'icon': category.icon, + 'nodes': [], + 'children': [] + } + + # Knoten zur Kategorie hinzufügen + if category.nodes: + for node in category.nodes: + category_dict['nodes'].append({ + 'id': node.id, + 'name': node.name, + 'description': node.description or '', + 'color_code': node.color_code or '#9F7AEA', + 'thought_count': len(node.thoughts) if hasattr(node, 'thoughts') else 0 + }) + + # Rekursiv Unterkategorien hinzufügen + if category.children: + for child in category.children: + category_dict['children'].append(build_category_tree(child)) + + return category_dict + +@app.route('/api/mindmap/user/') +@login_required +def get_user_mindmap(mindmap_id): + """Liefert die benutzerdefinierte Mindmap-Struktur.""" + mindmap = UserMindmap.query.get_or_404(mindmap_id) + + # Sicherheitscheck + if mindmap.user_id != current_user.id and mindmap.is_private: + return jsonify({'error': 'Nicht autorisiert'}), 403 + + # Hole alle verknüpften Knoten mit Position + nodes_data = db.session.query( + MindMapNode, UserMindmapNode + ).join( + UserMindmapNode, UserMindmapNode.node_id == MindMapNode.id + ).filter( + UserMindmapNode.user_mindmap_id == mindmap_id + ).all() + + # Knoten formatieren + nodes = [] + for node, user_node in nodes_data: + nodes.append({ + 'id': node.id, + 'name': node.name, + 'description': node.description, + 'color_code': node.color_code, + 'x': user_node.x_position, + 'y': user_node.y_position, + 'scale': user_node.scale, + 'thought_count': len(node.thoughts) + }) + + # Hole Notizen zu dieser Mindmap + notes = MindmapNote.query.filter_by( + mindmap_id=mindmap_id, + user_id=current_user.id + ).all() + + notes_data = [{ + 'id': note.id, + 'content': note.content, + 'node_id': note.node_id, + 'thought_id': note.thought_id, + 'color_code': note.color_code, + 'created_at': note.created_at.isoformat() + } for note in notes] + + return jsonify({ + 'id': mindmap.id, + 'name': mindmap.name, + 'description': mindmap.description, + 'is_private': mindmap.is_private, + 'nodes': nodes, + 'notes': notes_data + }) + +@app.route('/api/mindmap//add_node', methods=['POST']) +@login_required +def add_node_to_mindmap(mindmap_id): + """Fügt einen öffentlichen Knoten zur Benutzer-Mindmap hinzu.""" + data = request.json + mindmap = UserMindmap.query.get_or_404(mindmap_id) + + # Sicherheitscheck + if mindmap.user_id != current_user.id: + return jsonify({'error': 'Nicht autorisiert'}), 403 + + node_id = data.get('node_id') + x_pos = data.get('x', 0) + y_pos = data.get('y', 0) + + node = MindMapNode.query.get_or_404(node_id) + + # Prüfen, ob der Knoten bereits in der Mindmap existiert + existing = UserMindmapNode.query.filter_by( + user_mindmap_id=mindmap_id, + node_id=node_id + ).first() + + if existing: + # Update Position + existing.x_position = x_pos + existing.y_position = y_pos + else: + # Neuen Knoten hinzufügen + user_node = UserMindmapNode( + user_mindmap_id=mindmap_id, + node_id=node_id, + x_position=x_pos, + y_position=y_pos + ) + db.session.add(user_node) + + db.session.commit() + + return jsonify({ + 'success': True, + 'node_id': node_id, + 'x': x_pos, + 'y': y_pos + }) + +@app.route('/api/mindmap//remove_node/', methods=['DELETE']) +@login_required +def remove_node_from_mindmap(mindmap_id, node_id): + """Entfernt einen Knoten aus der Benutzer-Mindmap.""" + mindmap = UserMindmap.query.get_or_404(mindmap_id) + + # Sicherheitscheck + if mindmap.user_id != current_user.id: + return jsonify({'error': 'Nicht autorisiert'}), 403 + + user_node = UserMindmapNode.query.filter_by( + user_mindmap_id=mindmap_id, + node_id=node_id + ).first_or_404() + + db.session.delete(user_node) + db.session.commit() + + return jsonify({'success': True}) + +@app.route('/api/mindmap//update_node_position', methods=['POST']) +@login_required +def update_node_position(mindmap_id): + """Aktualisiert die Position eines Knotens in der Benutzer-Mindmap.""" + data = request.json + mindmap = UserMindmap.query.get_or_404(mindmap_id) + + # Sicherheitscheck + if mindmap.user_id != current_user.id: + return jsonify({'error': 'Nicht autorisiert'}), 403 + + node_id = data.get('node_id') + x_pos = data.get('x') + y_pos = data.get('y') + scale = data.get('scale') + + user_node = UserMindmapNode.query.filter_by( + user_mindmap_id=mindmap_id, + node_id=node_id + ).first_or_404() + + if x_pos is not None: + user_node.x_position = x_pos + if y_pos is not None: + user_node.y_position = y_pos + if scale is not None: + user_node.scale = scale + + db.session.commit() + + return jsonify({'success': True}) + +# Notizen-Funktionalität +@app.route('/api/mindmap//notes', methods=['GET']) +@login_required +def get_mindmap_notes(mindmap_id): + """Liefert alle Notizen zu einer Mindmap.""" + mindmap = UserMindmap.query.get_or_404(mindmap_id) + + # Sicherheitscheck + if mindmap.user_id != current_user.id: + return jsonify({'error': 'Nicht autorisiert'}), 403 + + notes = MindmapNote.query.filter_by( + mindmap_id=mindmap_id, + user_id=current_user.id + ).all() + + return jsonify([{ + 'id': note.id, + 'content': note.content, + 'node_id': note.node_id, + 'thought_id': note.thought_id, + 'color_code': note.color_code, + 'created_at': note.created_at.isoformat(), + 'last_modified': note.last_modified.isoformat() + } for note in notes]) + +@app.route('/api/mindmap//notes', methods=['POST']) +@login_required +def add_mindmap_note(mindmap_id): + """Fügt eine neue Notiz zur Mindmap hinzu.""" + data = request.json + mindmap = UserMindmap.query.get_or_404(mindmap_id) + + # Sicherheitscheck + if mindmap.user_id != current_user.id: + return jsonify({'error': 'Nicht autorisiert'}), 403 + + content = data.get('content') + node_id = data.get('node_id') + thought_id = data.get('thought_id') + color_code = data.get('color_code', "#FFF59D") # Gelber Standard + + note = MindmapNote( + user_id=current_user.id, + mindmap_id=mindmap_id, + node_id=node_id, + thought_id=thought_id, + content=content, + color_code=color_code + ) + + db.session.add(note) + db.session.commit() + + return jsonify({ + 'id': note.id, + 'content': note.content, + 'node_id': note.node_id, + 'thought_id': note.thought_id, + 'color_code': note.color_code, + 'created_at': note.created_at.isoformat(), + 'last_modified': note.last_modified.isoformat() + }) + +@app.route('/api/notes/', methods=['PUT']) +@login_required +def update_note(note_id): + """Aktualisiert eine bestehende Notiz.""" + data = request.json + note = MindmapNote.query.get_or_404(note_id) + + # Sicherheitscheck + if note.user_id != current_user.id: + return jsonify({'error': 'Nicht autorisiert'}), 403 + + content = data.get('content') + color_code = data.get('color_code') + + if content: + note.content = content + if color_code: + note.color_code = color_code + + note.last_modified = datetime.utcnow() + db.session.commit() + + return jsonify({ + 'id': note.id, + 'content': note.content, + 'color_code': note.color_code, + 'last_modified': note.last_modified.isoformat() + }) + +@app.route('/api/notes/', methods=['DELETE']) +@login_required +def delete_note(note_id): + """Löscht eine Notiz.""" + note = MindmapNote.query.get_or_404(note_id) + + # Sicherheitscheck + if note.user_id != current_user.id: + return jsonify({'error': 'Nicht autorisiert'}), 403 + + db.session.delete(note) + db.session.commit() + + return jsonify({'success': True}) + +# API routes for mindmap and thoughts +@app.route('/api/mindmap') +def get_mindmap(): + """API-Endpunkt zur Bereitstellung der Mindmap-Daten in hierarchischer Form.""" + # Root-Knoten: Knoten ohne Eltern + root_nodes = MindMapNode.query.\ + outerjoin(node_relationship, MindMapNode.id == node_relationship.c.child_id).\ + filter(node_relationship.c.parent_id == None).all() + + result = [] + for node in root_nodes: + node_data = build_node_tree(node) + result.append(node_data) + return jsonify({"nodes": result}) + +def build_node_tree(node): + """Erzeugt eine hierarchische Darstellung eines Knotens inkl. seiner Kindknoten.""" + thought_count = len(node.thoughts) + node_data = { + "id": node.id, + "name": node.name, + "description": node.description or "", + "thought_count": thought_count, + "children": [] + } + for child in node.children: + child_data = build_node_tree(child) + node_data["children"].append(child_data) + return node_data + +@app.route('/api/nodes//thoughts') +def get_node_thoughts(node_id): + """Liefert alle Gedanken, die mit einem Knoten verknüpft sind.""" + node = MindMapNode.query.get_or_404(node_id) + + thoughts = [] + for thought in node.thoughts: + author = thought.author + thoughts.append({ + 'id': thought.id, + 'title': thought.title, + 'abstract': thought.abstract, + 'content': thought.content[:200] + '...' if len(thought.content) > 200 else thought.content, + 'keywords': thought.keywords, + 'author': { + 'id': author.id, + 'username': author.username + }, + 'created_at': thought.created_at.isoformat(), + 'color_code': thought.color_code, + 'avg_rating': thought.average_rating, + 'bookmarked': current_user.is_authenticated and thought in current_user.bookmarked_thoughts + }) + + return jsonify(thoughts) + +@app.route('/api/nodes//thoughts', methods=['POST']) +@login_required +def add_node_thought(node_id): + """Fügt einen neuen Gedanken zu einem Knoten hinzu.""" + data = request.json + node = MindMapNode.query.get_or_404(node_id) + + title = data.get('title') + content = data.get('content') + abstract = data.get('abstract', '') + keywords = data.get('keywords', '') + color_code = data.get('color_code', '#B39DDB') # Standard-Lila + + # Kategorie des Knotens bestimmen + category_name = node.category.name if node.category else "Allgemein" + + thought = Thought( + title=title, + content=content, + abstract=abstract, + keywords=keywords, + color_code=color_code, + branch=category_name, + user_id=current_user.id, + source_type='User Input' + ) + + node.thoughts.append(thought) + db.session.add(thought) + db.session.commit() + + return jsonify({ + 'id': thought.id, + 'title': thought.title, + 'success': True + }) + +@app.route('/api/thoughts/', methods=['GET']) +def get_thought(thought_id): + """Liefert Details zu einem Gedanken.""" + thought = Thought.query.get_or_404(thought_id) + + author = thought.author + is_bookmarked = False + if current_user.is_authenticated: + is_bookmarked = thought in current_user.bookmarked_thoughts + + # Verknüpfte Knoten abrufen + nodes = [{ + 'id': node.id, + 'name': node.name, + 'category': node.category.name if node.category else None + } for node in thought.nodes] + + return jsonify({ + 'id': thought.id, + 'title': thought.title, + 'content': thought.content, + 'abstract': thought.abstract, + 'keywords': thought.keywords, + 'branch': thought.branch, + 'color_code': thought.color_code, + 'created_at': thought.created_at.isoformat(), + 'author': { + 'id': author.id, + 'username': author.username, + 'avatar': author.avatar + }, + 'avg_rating': thought.average_rating, + 'bookmarked': is_bookmarked, + 'nodes': nodes, + 'source_type': thought.source_type + }) + +@app.route('/api/thoughts', methods=['POST']) +@login_required +def add_thought(): + """Erstellt einen neuen Gedanken.""" + data = request.json + + title = data.get('title') + content = data.get('content') + abstract = data.get('abstract', '') + keywords = data.get('keywords', '') + branch = data.get('branch', 'Allgemein') + color_code = data.get('color_code', '#B39DDB') + + thought = Thought( + title=title, + content=content, + abstract=abstract, + keywords=keywords, + branch=branch, + color_code=color_code, + user_id=current_user.id, + source_type='User Input' + ) + + # Knoten-IDs, falls vorhanden + node_ids = data.get('node_ids', []) + for node_id in node_ids: + node = MindMapNode.query.get(node_id) + if node: + thought.nodes.append(node) + + db.session.add(thought) + db.session.commit() + + return jsonify({ + 'id': thought.id, + 'title': thought.title, + 'success': True + }) + +@app.route('/api/thoughts/', methods=['PUT']) +@login_required +def update_thought(thought_id): + """Aktualisiert einen bestehenden Gedanken.""" + thought = Thought.query.get_or_404(thought_id) + + # Sicherheitscheck + if thought.user_id != current_user.id and not current_user.is_admin: + return jsonify({'error': 'Nicht autorisiert'}), 403 + + data = request.json + + # Aktualisiere Felder, die gesendet wurden + if 'title' in data: + thought.title = data['title'] + if 'content' in data: + thought.content = data['content'] + if 'abstract' in data: + thought.abstract = data['abstract'] + if 'keywords' in data: + thought.keywords = data['keywords'] + if 'color_code' in data: + thought.color_code = data['color_code'] + + thought.last_modified = datetime.utcnow() + db.session.commit() + + return jsonify({ + 'id': thought.id, + 'success': True + }) + +@app.route('/api/thoughts/', methods=['DELETE']) +@login_required +def delete_thought(thought_id): + """Löscht einen Gedanken.""" + thought = Thought.query.get_or_404(thought_id) + + # Sicherheitscheck + if thought.user_id != current_user.id and not current_user.is_admin: + return jsonify({'error': 'Nicht autorisiert'}), 403 + + db.session.delete(thought) + db.session.commit() + + return jsonify({'success': True}) + +@app.route('/api/thoughts//bookmark', methods=['POST']) +@login_required +def bookmark_thought(thought_id): + """Fügt einen Gedanken zu den Bookmarks des Benutzers hinzu oder entfernt ihn.""" + thought = Thought.query.get_or_404(thought_id) + + # Toggle Bookmark-Status + if thought in current_user.bookmarked_thoughts: + current_user.bookmarked_thoughts.remove(thought) + action = 'removed' + else: + current_user.bookmarked_thoughts.append(thought) + action = 'added' + + db.session.commit() + + return jsonify({ + 'success': True, + 'action': action + }) + +@app.route('/api/categories') +def get_categories(): + """API-Endpunkt, der alle Kategorien als hierarchische Struktur zurückgibt""" + try: + # Hole alle Kategorien der obersten Ebene + categories = Category.query.filter_by(parent_id=None).all() + + # Transformiere zu einer Baumstruktur + category_tree = [build_category_tree(cat) for cat in categories] + + return jsonify(category_tree) + except Exception as e: + print(f"Fehler beim Abrufen der Kategorien: {str(e)}") + return jsonify({ + 'success': False, + 'error': 'Kategorien konnten nicht geladen werden' + }), 500 + +@app.route('/api/set_dark_mode', methods=['POST']) +def set_dark_mode(): + """Speichert die Dark Mode-Einstellung in der Session.""" + data = request.json + dark_mode = data.get('darkMode', False) + + session['dark_mode'] = 'true' if dark_mode else 'false' + session.permanent = True + + return jsonify({ + 'success': True, + 'darkMode': dark_mode + }) + +@app.route('/api/get_dark_mode', methods=['GET']) +def get_dark_mode(): + """Liefert die aktuelle Dark Mode-Einstellung.""" + dark_mode = session.get('dark_mode', 'true') # Standard: Dark Mode aktiviert + + return jsonify({ + 'success': True, + 'darkMode': dark_mode + }) + +# Fehlerhandler +@app.errorhandler(404) +def page_not_found(e): + return render_template('errors/404.html'), 404 + +@app.errorhandler(403) +def forbidden(e): + return render_template('errors/403.html'), 403 + +@app.errorhandler(500) +def internal_server_error(e): + return render_template('errors/500.html'), 500 + +@app.errorhandler(429) +def too_many_requests(e): + return render_template('errors/429.html'), 429 + +# OpenAI-Integration für KI-Assistenz +@app.route('/api/assistant', methods=['POST']) +def chat_with_assistant(): + """Chatbot-API mit OpenAI Integration und Datenbankzugriff.""" + data = request.json + + # Prüfen, ob wir ein einzelnes Prompt oder ein messages-Array haben + if 'messages' in data: + messages = data.get('messages', []) + if not messages: + return jsonify({ + 'error': 'Keine Nachrichten vorhanden.' + }), 400 + + # Extrahiere Systemnachricht falls vorhanden, sonst Standard-Systemnachricht + system_message = next((msg['content'] for msg in messages if msg['role'] == 'system'), + "Du bist ein spezialisierter Assistent für Systades, eine innovative Wissensmanagement-Plattform. " + "Systades ist ein intelligentes System zur Verwaltung, Verknüpfung und Visualisierung von Wissen. " + "Die Plattform ermöglicht es Nutzern, Gedanken zu erfassen, in Kategorien zu organisieren und durch Mindmaps zu visualisieren. " + "Wichtige Funktionen sind:\n" + "- Gedankenverwaltung mit Titeln, Zusammenfassungen und Keywords\n" + "- Kategorisierung und thematische Organisation\n" + "- Interaktive Mindmaps zur Wissensvisualisierung\n" + "- KI-gestützte Analyse und Zusammenfassung von Inhalten\n" + "- Kollaborative Wissensarbeit und Teilen von Inhalten\n\n" + "Du antwortest AUSSCHLIESSLICH auf Fragen bezüglich der Systades-Wissensdatenbank und Website. " + "Du kannst Informationen zu Gedanken, Kategorien und Mindmaps liefern und durch Themen führen. " + "Antworte informativ, sachlich und gut strukturiert auf Deutsch.") + + # Formatiere Nachrichten für OpenAI API + api_messages = [{"role": "system", "content": system_message}] + + # Füge Benutzer- und Assistenten-Nachrichten hinzu + for msg in messages: + if msg['role'] in ['user', 'assistant']: + api_messages.append({"role": msg['role'], "content": msg['content']}) + else: + # Alte Implementierung für direktes Prompt + prompt = data.get('prompt', '') + context = data.get('context', '') + selected_items = data.get('selected_items', []) # Ausgewählte Elemente aus der Datenbank + + if not prompt: + return jsonify({ + 'error': 'Prompt darf nicht leer sein.' + }), 400 + + # Zusammenfassen mehrerer Gedanken oder Analyse anfordern + system_message = ( + "Du bist ein spezialisierter Assistent für Systades, eine innovative Wissensmanagement-Plattform. " + "Systades ist ein intelligentes System zur Verwaltung, Verknüpfung und Visualisierung von Wissen. " + "Die Plattform ermöglicht es Nutzern, Gedanken zu erfassen, in Kategorien zu organisieren und durch Mindmaps zu visualisieren. " + "Wichtige Funktionen sind:\n" + "- Gedankenverwaltung mit Titeln, Zusammenfassungen und Keywords\n" + "- Kategorisierung und thematische Organisation\n" + "- Interaktive Mindmaps zur Wissensvisualisierung\n" + "- KI-gestützte Analyse und Zusammenfassung von Inhalten\n" + "- Kollaborative Wissensarbeit und Teilen von Inhalten\n\n" + "Du antwortest AUSSCHLIESSLICH auf Fragen bezüglich der Systades-Wissensdatenbank und Website. " + "Du kannst Informationen zu Gedanken, Kategorien und Mindmaps liefern und durch Themen führen. " + "Antworte informativ, sachlich und gut strukturiert auf Deutsch." + ) + + if context: + system_message += f"\n\nKontext: {context}" + + if selected_items: + system_message += "\n\nAusgewählte Elemente aus der Datenbank:\n" + for item in selected_items: + if 'type' in item and 'data' in item: + if item['type'] == 'thought': + system_message += f"- Gedanke: {item['data'].get('title', 'Unbekannter Titel')}\n" + system_message += f" Zusammenfassung: {item['data'].get('abstract', 'Keine Zusammenfassung')}\n" + system_message += f" Keywords: {item['data'].get('keywords', 'Keine Keywords')}\n" + elif item['type'] == 'category': + system_message += f"- Kategorie: {item['data'].get('name', 'Unbekannte Kategorie')}\n" + system_message += f" Beschreibung: {item['data'].get('description', 'Keine Beschreibung')}\n" + system_message += f" Unterkategorien: {item['data'].get('subcategories', 'Keine Unterkategorien')}\n" + elif item['type'] == 'mindmap': + system_message += f"- Mindmap: {item['data'].get('name', 'Unbekannte Mindmap')}\n" + system_message += f" Beschreibung: {item['data'].get('description', 'Keine Beschreibung')}\n" + system_message += f" Knoten: {item['data'].get('nodes', 'Keine Knoten')}\n" + + api_messages = [ + {"role": "system", "content": system_message}, + {"role": "user", "content": prompt} + ] + + # Extrahiere die letzte Benutzernachricht für Datenbankabfragen + user_message = next((msg['content'] for msg in reversed(api_messages) if msg['role'] == 'user'), '') + + # Prüfen, ob die Anfrage nach Datenbankinformationen sucht + db_context = check_database_query(user_message) + + if db_context: + # Erweitere den Kontext mit Datenbankinformationen + api_messages.append({ + "role": "system", + "content": f"Hier sind relevante Informationen aus der Datenbank:\n\n{db_context}" + }) + + try: + # Überprüfen ob OpenAI API-Key konfiguriert ist + if not client.api_key or client.api_key.startswith("sk-dummy"): + print("Warnung: OpenAI API-Key ist nicht oder nur als Dummy konfiguriert!") + return jsonify({ + 'error': 'Der OpenAI API-Key ist nicht korrekt konfiguriert. Bitte konfigurieren Sie die Umgebungsvariable OPENAI_API_KEY.' + }), 500 + + # API-Aufruf mit Timeout + import time + start_time = time.time() + + response = client.chat.completions.create( + model="gpt-4o-mini", + messages=api_messages, + max_tokens=1000, # Erhöht für ausführlichere Antworten und detaillierte Führungen + temperature=0.7, + timeout=20 # 20 Sekunden Timeout + ) + + print(f"OpenAI API-Antwortzeit: {time.time() - start_time:.2f} Sekunden") + + answer = response.choices[0].message.content + + # Für das neue Format erwarten wir response statt answer + return jsonify({ + 'response': answer + }) + + except Exception as e: + import traceback + print(f"Fehler bei der OpenAI-Anfrage: {str(e)}") + print(traceback.format_exc()) + + return jsonify({ + 'error': f'Fehler bei der OpenAI-Anfrage: {str(e)}' + }), 500 + +def check_database_query(user_message): + """ + Überprüft, ob die Benutzeranfrage nach Datenbankinformationen sucht und extrahiert + relevante Daten aus der Datenbank. + """ + context = [] + + # Prüfen auf verschiedene Datenbankabfragemuster + if any(keyword in user_message.lower() for keyword in ['gedanken', 'thought', 'beitrag', 'inhalt']): + # Suche nach relevanten Gedanken + thoughts = Thought.query.filter( + db.or_( + Thought.title.ilike(f'%{word}%') for word in user_message.split() + if len(word) > 3 # Nur längere Wörter zur Suche verwenden + ) + ).limit(5).all() + + if thoughts: + context.append("Relevante Gedanken:") + for thought in thoughts: + context.append(f"- Titel: {thought.title}") + context.append(f" Zusammenfassung: {thought.abstract if thought.abstract else 'Keine Zusammenfassung verfügbar'}") + context.append(f" Keywords: {thought.keywords if thought.keywords else 'Keine Keywords verfügbar'}") + context.append("") + + if any(keyword in user_message.lower() for keyword in ['kategorie', 'category', 'themengebiet', 'bereich']): + # Suche nach Kategorien + categories = Category.query.filter( + db.or_( + Category.name.ilike(f'%{word}%') for word in user_message.split() + if len(word) > 3 + ) + ).limit(5).all() + + if categories: + context.append("Relevante Kategorien:") + for category in categories: + context.append(f"- Name: {category.name}") + context.append(f" Beschreibung: {category.description}") + context.append("") + + if any(keyword in user_message.lower() for keyword in ['mindmap', 'karte', 'visualisierung']): + # Suche nach öffentlichen Mindmaps + mindmap_nodes = MindMapNode.query.filter( + db.and_( + MindMapNode.is_public == True, + db.or_( + MindMapNode.name.ilike(f'%{word}%') for word in user_message.split() + if len(word) > 3 + ) + ) + ).limit(5).all() + + if mindmap_nodes: + context.append("Relevante Mindmap-Knoten:") + for node in mindmap_nodes: + context.append(f"- Name: {node.name}") + context.append(f" Beschreibung: {node.description if node.description else 'Keine Beschreibung verfügbar'}") + if node.category: + context.append(f" Kategorie: {node.category.name}") + context.append("") + + return "\n".join(context) if context else "" + +@app.route('/search') +def search_thoughts_page(): + """Seite zur Gedankensuche anzeigen.""" + return render_template('search.html') + +@app.route('/my_account') +def my_account(): + """Zeigt die persönliche Merkliste an.""" + if not current_user.is_authenticated: + flash('Bitte melde dich an, um auf deine Merkliste zuzugreifen.', 'warning') + return redirect(url_for('login')) + + # Hole die Lesezeichen des Benutzers + bookmarked_thoughts = current_user.bookmarked_thoughts + + return render_template('my_account.html', bookmarked_thoughts=bookmarked_thoughts) + +# Dummy-Route, um 404-Fehler für fehlende Netzwerk-Hintergrundbilder zu vermeiden +@app.route('/static/network-bg.jpg') +@app.route('/static/network-bg.svg') +def dummy_network_bg(): + """Leere Antwort für die nicht mehr verwendeten Netzwerk-Hintergrundbilder.""" + return '', 200 + +# Route zum expliziten Neu-Laden der Umgebungsvariablen +@app.route('/admin/reload-env', methods=['POST']) +@admin_required +def reload_env(): + """Lädt die Umgebungsvariablen aus der .env-Datei neu.""" + try: + # Erzwinge das Neuladen der .env-Datei + load_dotenv(override=True, force=True) + + # OpenAI API-Key ist bereits fest kodiert + # client wurde bereits mit festem API-Key initialisiert + + # Weitere Umgebungsvariablen hier aktualisieren, falls nötig + app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY', app.config['SECRET_KEY']) + + return jsonify({ + 'success': True, + 'message': 'Umgebungsvariablen wurden erfolgreich neu geladen.' + }) + except Exception as e: + return jsonify({ + 'success': False, + 'message': f'Fehler beim Neuladen der Umgebungsvariablen: {str(e)}' + }), 500 + +# Flask starten +if __name__ == '__main__': + with app.app_context(): + # Make sure tables exist + db.create_all() + socketio.run(app, debug=True, host='0.0.0.0') + +@app.route('/api/refresh-mindmap') +def refresh_mindmap(): + """ + API-Endpunkt zum Neuladen der Mindmap-Daten, + wenn die Datenbank-Verbindung vorübergehend fehlgeschlagen ist + """ + try: + # Stelle sicher, dass wir Kategorien haben + if Category.query.count() == 0: + create_default_categories() + + # Hole alle Kategorien und Knoten + categories = Category.query.filter_by(parent_id=None).all() + category_tree = [build_category_tree(cat) for cat in categories] + + # Hole alle Mindmap-Knoten + nodes = MindMapNode.query.all() + node_data = [] + + for node in nodes: + node_obj = { + 'id': node.id, + 'name': node.name, + 'description': node.description or '', + 'color_code': node.color_code or '#9F7AEA', + 'thought_count': len(node.thoughts), + 'category_id': node.category_id + } + + # Verbindungen hinzufügen + node_obj['connections'] = [{'target': child.id} for child in node.children] + + node_data.append(node_obj) + + return jsonify({ + 'success': True, + 'categories': category_tree, + 'nodes': node_data + }) + + except Exception as e: + print(f"Fehler beim Neuladen der Mindmap: {str(e)}") + return jsonify({ + 'success': False, + 'error': 'Datenbankverbindung konnte nicht hergestellt werden' + }), 500 + + +# Route zur Mindmap HTML-Seite +@app.route('/mindmap') +def mindmap_page(): + return render_template('mindmap.html') + +# Fehlerbehandlung +@app.errorhandler(404) +def not_found(e): + return jsonify({'error': 'Nicht gefunden'}), 404 + +@app.errorhandler(400) +def bad_request(e): + return jsonify({'error': 'Fehlerhafte Anfrage'}), 400 + +@app.errorhandler(500) +def server_error(e): + return jsonify({'error': 'Serverfehler'}), 500 \ No newline at end of file diff --git a/check_schema.py b/check_schema.py new file mode 100644 index 0000000..2714d89 --- /dev/null +++ b/check_schema.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import sqlite3 + +# Verbindung zur Datenbank herstellen +conn = sqlite3.connect('systades.db') +cursor = conn.cursor() + +# Liste aller Tabellen abrufen +print("Alle Tabellen in der Datenbank:") +cursor.execute("SELECT name FROM sqlite_master WHERE type='table';") +tables = cursor.fetchall() +for table in tables: + print(f"- {table[0]}") + +# Schema der Datenbank abrufen +cursor.execute("SELECT sql FROM sqlite_master WHERE type='table';") +schemas = cursor.fetchall() + +# Schematische Informationen ausgeben +print("\nDatenbankschema:") +for schema in schemas: + print("\n" + str(schema[0])) + +# Schema der User-Tabelle genauer untersuchen, falls vorhanden +if ('user',) in tables: + print("\n\nBenutzer-Tabellenschema:") + cursor.execute("PRAGMA table_info(user);") + user_columns = cursor.fetchall() + for column in user_columns: + print(f"Column: {column[1]}, Type: {column[2]}, NOT NULL: {column[3]}, Default: {column[4]}, Primary Key: {column[5]}") + +conn.close() \ No newline at end of file diff --git a/create_default_users.py b/create_default_users.py new file mode 100644 index 0000000..3136d44 --- /dev/null +++ b/create_default_users.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import sqlite3 +import os +from werkzeug.security import generate_password_hash +from datetime import datetime + +# Prüfen, ob die Datenbank existiert +db_path = 'systades.db' +if not os.path.exists(db_path): + print(f"Datenbank {db_path} existiert nicht.") + exit(1) + +# Verbindung zur Datenbank herstellen +conn = sqlite3.connect(db_path) +cursor = conn.cursor() + +# Überprüfen, ob bereits Benutzer vorhanden sind +cursor.execute("SELECT COUNT(*) FROM user;") +user_count = cursor.fetchone()[0] + +if user_count == 0: + print("Keine Benutzer in der Datenbank gefunden. Erstelle Standardbenutzer...") + + # Standardbenutzer definieren + default_users = [ + { + 'username': 'admin', + 'email': 'admin@example.com', + 'password': generate_password_hash('admin'), + 'created_at': datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S'), + 'is_active': 1, + 'role': 'admin' + }, + { + 'username': 'user', + 'email': 'user@example.com', + 'password': generate_password_hash('user'), + 'created_at': datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S'), + 'is_active': 1, + 'role': 'user' + } + ] + + # Benutzer einfügen + for user in default_users: + cursor.execute(""" + INSERT INTO user (username, email, password, created_at, is_active, role) + VALUES (?, ?, ?, ?, ?, ?); + """, ( + user['username'], + user['email'], + user['password'], + user['created_at'], + user['is_active'], + user['role'] + )) + conn.commit() + print(f"{len(default_users)} Standardbenutzer wurden erstellt.") +else: + print(f"Es sind bereits {user_count} Benutzer in der Datenbank vorhanden.") + +# Überprüfen der eingefügten Benutzer +print("\nBenutzer in der Datenbank:") +cursor.execute("SELECT id, username, email, role FROM user;") +users = cursor.fetchall() +for user in users: + print(f"ID: {user[0]}, Benutzername: {user[1]}, E-Mail: {user[2]}, Rolle: {user[3]}") + +conn.close() +print("\nBenutzeraktualisierung abgeschlossen.") \ No newline at end of file diff --git a/database/__pycache__/models.cpython-313.pyc b/database/__pycache__/models.cpython-313.pyc new file mode 100644 index 0000000..78b3d6a Binary files /dev/null and b/database/__pycache__/models.cpython-313.pyc differ diff --git a/database/systades.V2.db.backup b/database/systades.V2.db.backup new file mode 100644 index 0000000..063f9e9 Binary files /dev/null and b/database/systades.V2.db.backup differ diff --git a/database/systades.db b/database/systades.db new file mode 100644 index 0000000..c8067b8 Binary files /dev/null and b/database/systades.db differ diff --git a/database/systades.db.backup b/database/systades.db.backup new file mode 100644 index 0000000..1223cb7 Binary files /dev/null and b/database/systades.db.backup differ diff --git a/docs/ANLEITUNG.md b/docs/ANLEITUNG.md new file mode 100644 index 0000000..7be84ae --- /dev/null +++ b/docs/ANLEITUNG.md @@ -0,0 +1,69 @@ +# Anleitung: Animierter Netzwerk-Hintergrund + +Diese Anleitung erklärt, wie Sie ein Netzwerk-Bild als animierten Hintergrund für die gesamte Website einrichten können. + +## Option 1: Manuelle Installation + +1. Kopieren Sie das gewünschte Netzwerk-Bild (z.B. `d2efd014-1325-471f-b9a7-90d025eb81d6.png`) in die Datei `website/static/network-bg.jpg`. + + Sie können dafür das beiliegende Batch-Skript verwenden: + ``` + copy-network-image.bat d2efd014-1325-471f-b9a7-90d025eb81d6.png + ``` + +2. Starten Sie den Flask-Server mit dem folgenden Befehl: + ``` + python start-flask-server.py + ``` + +3. Öffnen Sie die Website unter http://127.0.0.1:5000/ + +## Anpassung der Animation + +Sie können die Animation des Netzwerk-Hintergrunds anpassen, indem Sie die Datei `website/static/network-background.js` bearbeiten. Hier sind die wichtigsten Parameter: + +```javascript +let animationSpeed = 0.0005; // Geschwindigkeit der Rotation +let scaleSpeed = 0.0002; // Geschwindigkeit der Skalierung +let opacitySpeed = 0.0003; // Geschwindigkeit der Transparenzänderung +``` + +## Animation der Mindmap-Verbindungen + +Die Verbindungen zwischen den Knoten in der Mindmap werden jetzt mit einer fließenden Animation dargestellt. Diese Animationen verbessern die Sichtbarkeit der Zusammenhänge und machen die Interaktion mit der Karte intuitiver. + +### Funktionen: + +1. **Animierte Linien**: Die Verbindungslinien zwischen den Knoten bewegen sich in einem fließenden Muster. +2. **Hervorhebung bei Hover**: Beim Überfahren eines Knotens oder einer Verbindung mit der Maus werden diese hervorgehoben. +3. **Kategorien-Beziehungen**: Die visuellen Verbindungen zwischen den Kategorien sind jetzt deutlicher erkennbar. + +## Position des Auswahlfelds + +Das Auswahlfeld auf der Karte wurde weiter nach links verschoben, sodass es vollständig sichtbar ist, wenn keine Auswahl getroffen wurde. Die Größe wurde ebenfalls angepasst, um die Lesbarkeit zu verbessern. + +## Wiederherstellung des ursprünglichen Hintergrunds (optional) + +Wenn Sie zum ursprünglichen Sternenhintergrund zurückkehren möchten, müssen Sie folgende Änderungen vornehmen: + +1. Bearbeiten Sie die Datei `website/templates/base.html` und ersetzen Sie: + ```html + + + ``` + + mit: + ```html + + + + ``` + +2. Bearbeiten Sie die Datei `website/templates/mindmap.html` und entfernen Sie die Zeile: + ```html + + ``` + +3. Entfernen Sie den CSS-Code für `#mindmap-container::before` und die anderen netzwerkspezifischen Stile aus der Datei `website/templates/mindmap.html`. + +4. Starten Sie den Flask-Server neu, um die Änderungen zu übernehmen. \ No newline at end of file diff --git a/docs/Grundstruktur (funktionales Modell).pdf b/docs/Grundstruktur (funktionales Modell).pdf new file mode 100644 index 0000000..746a29f Binary files /dev/null and b/docs/Grundstruktur (funktionales Modell).pdf differ diff --git a/example.env b/example.env new file mode 100644 index 0000000..831801d --- /dev/null +++ b/example.env @@ -0,0 +1,13 @@ +# MindMap Umgebungsvariablen +# Kopiere diese Datei zu .env und passe die Werte an + +# Flask +SECRET_KEY=dein-geheimer-schluessel-hier + +# OpenAI API +OPENAI_API_KEY=sk-dein-openai-api-schluessel-hier + +# Datenbank +# Bei Bedarf kann hier eine andere Datenbank-URL angegeben werden +# Der Pfad wird relativ zum Projektverzeichnis angegeben +# SQLALCHEMY_DATABASE_URI=sqlite:////absoluter/pfad/zu/database/systades.db \ No newline at end of file diff --git a/fix_user_table.py b/fix_user_table.py new file mode 100644 index 0000000..54f3b0f --- /dev/null +++ b/fix_user_table.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import sqlite3 +import os + +# Prüfen, ob die Datenbank existiert +db_path = 'systades.db' +if not os.path.exists(db_path): + print(f"Datenbank {db_path} existiert nicht.") + exit(1) + +# Verbindung zur Datenbank herstellen +conn = sqlite3.connect(db_path) +cursor = conn.cursor() + +# Prüfen, ob die User-Tabelle existiert +cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='user';") +if not cursor.fetchone(): + print("Die Tabelle 'user' existiert nicht. Erstelle neue Tabelle...") + cursor.execute(''' + CREATE TABLE user ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + username VARCHAR(80) NOT NULL UNIQUE, + email VARCHAR(120) NOT NULL UNIQUE, + password VARCHAR(512) NOT NULL, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + is_active BOOLEAN DEFAULT 1, + role VARCHAR(20) DEFAULT 'user' + ) + ''') + conn.commit() + print("Die Tabelle 'user' wurde erfolgreich erstellt.") +else: + # Überprüfen, ob die Spalte 'password' existiert + cursor.execute("PRAGMA table_info(user);") + columns = cursor.fetchall() + column_names = [column[1] for column in columns] + + if 'password' not in column_names: + print("Die Spalte 'password' fehlt in der Tabelle 'user'. Füge Spalte hinzu...") + cursor.execute("ALTER TABLE user ADD COLUMN password VARCHAR(512);") + conn.commit() + print("Die Spalte 'password' wurde erfolgreich hinzugefügt.") + else: + print("Die Spalte 'password' existiert bereits in der Tabelle 'user'.") + +# Überprüfen der aktualisierten Spaltenstruktur +print("\nAktualisierte Tabellenspalten der 'user'-Tabelle:") +cursor.execute("PRAGMA table_info(user);") +updated_columns = cursor.fetchall() +for column in updated_columns: + print(f"Column: {column[1]}, Type: {column[2]}, NOT NULL: {column[3]}, Default: {column[4]}, Primary Key: {column[5]}") + +# Datenbanktabellen anzeigen +print("\nAlle Tabellen in der Datenbank:") +cursor.execute("SELECT name FROM sqlite_master WHERE type='table';") +tables = cursor.fetchall() +for table in tables: + print(f"- {table[0]}") + +# Schemaüberprüfung der user-Tabelle +print("\nSchema der 'user'-Tabelle:") +cursor.execute("SELECT sql FROM sqlite_master WHERE type='table' AND name='user';") +schema = cursor.fetchone() +if schema: + print(schema[0]) + +conn.close() +print("\nDatenbankaktualisierung abgeschlossen.") \ No newline at end of file diff --git a/init_db.py b/init_db.py new file mode 100644 index 0000000..59bebab --- /dev/null +++ b/init_db.py @@ -0,0 +1,246 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from app import app, initialize_database, db_path +from models import db, User, Thought, Comment, MindMapNode, ThoughtRelation, ThoughtRating, RelationType +from models import Category, UserMindmap, UserMindmapNode, MindmapNote +import os +import sqlite3 +from flask import Flask +from flask_sqlalchemy import SQLAlchemy +from datetime import datetime + +# Erstelle eine temporäre Flask-App, um die Datenbank zu initialisieren +app = Flask(__name__) +app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///systades.db' +app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False +db.init_app(app) + +def init_db(): + with app.app_context(): + print("Initialisiere Datenbank...") + + # Tabellen erstellen + db.create_all() + print("Tabellen wurden erstellt.") + + # Standardbenutzer erstellen, falls keine vorhanden sind + if User.query.count() == 0: + print("Erstelle Standardbenutzer...") + create_default_users() + + # Standardkategorien erstellen, falls keine vorhanden sind + if Category.query.count() == 0: + print("Erstelle Standardkategorien...") + create_default_categories() + + # Beispiel-Mindmap erstellen, falls keine Knoten vorhanden sind + if MindMapNode.query.count() == 0: + print("Erstelle Beispiel-Mindmap...") + create_sample_mindmap() + + print("Datenbankinitialisierung abgeschlossen.") + +def create_default_users(): + """Erstellt Standardbenutzer für die Anwendung""" + users = [ + { + 'username': 'admin', + 'email': 'admin@example.com', + 'password': 'admin', + 'role': 'admin' + }, + { + 'username': 'user', + 'email': 'user@example.com', + 'password': 'user', + 'role': 'user' + } + ] + + for user_data in users: + password = user_data.pop('password') + user = User(**user_data) + user.set_password(password) + db.session.add(user) + + db.session.commit() + print(f"{len(users)} Benutzer wurden erstellt.") + +def create_default_categories(): + """Erstellt die Standardkategorien für die Mindmap""" + categories = [ + { + 'name': 'Konzept', + 'description': 'Abstrakte Ideen und theoretische Konzepte', + 'color_code': '#6366f1', + 'icon': 'lightbulb' + }, + { + 'name': 'Technologie', + 'description': 'Hardware, Software, Tools und Plattformen', + 'color_code': '#10b981', + 'icon': 'cpu' + }, + { + 'name': 'Prozess', + 'description': 'Workflows, Methodologien und Vorgehensweisen', + 'color_code': '#f59e0b', + 'icon': 'git-branch' + }, + { + 'name': 'Person', + 'description': 'Personen, Teams und Organisationen', + 'color_code': '#ec4899', + 'icon': 'user' + }, + { + 'name': 'Dokument', + 'description': 'Dokumentationen, Referenzen und Ressourcen', + 'color_code': '#3b82f6', + 'icon': 'file-text' + } + ] + + for cat_data in categories: + category = Category(**cat_data) + db.session.add(category) + + db.session.commit() + print(f"{len(categories)} Kategorien wurden erstellt.") + +def create_sample_mindmap(): + """Erstellt eine Beispiel-Mindmap mit Knoten und Beziehungen""" + + # Kategorien für die Zuordnung + categories = Category.query.all() + category_map = {cat.name: cat for cat in categories} + + # Beispielknoten erstellen + nodes = [ + { + 'name': 'Wissensmanagement', + 'description': 'Systematische Erfassung, Speicherung und Nutzung von Wissen in Organisationen.', + 'color_code': '#6366f1', + 'icon': 'database', + 'category': category_map.get('Konzept'), + 'x': 0, + 'y': 0 + }, + { + 'name': 'Mind-Mapping', + 'description': 'Technik zur visuellen Darstellung von Informationen und Zusammenhängen.', + 'color_code': '#10b981', + 'icon': 'git-branch', + 'category': category_map.get('Prozess'), + 'x': 200, + 'y': -150 + }, + { + 'name': 'Cytoscape.js', + 'description': 'JavaScript-Bibliothek für die Visualisierung und Manipulation von Graphen.', + 'color_code': '#3b82f6', + 'icon': 'code', + 'category': category_map.get('Technologie'), + 'x': 350, + 'y': -50 + }, + { + 'name': 'Socket.IO', + 'description': 'Bibliothek für Echtzeit-Kommunikation zwischen Client und Server.', + 'color_code': '#3b82f6', + 'icon': 'zap', + 'category': category_map.get('Technologie'), + 'x': 350, + 'y': 100 + }, + { + 'name': 'Kollaboration', + 'description': 'Zusammenarbeit mehrerer Benutzer an gemeinsamen Inhalten.', + 'color_code': '#f59e0b', + 'icon': 'users', + 'category': category_map.get('Prozess'), + 'x': 200, + 'y': 150 + }, + { + 'name': 'SQLite', + 'description': 'Leichtgewichtige relationale Datenbank, die ohne Server-Prozess auskommt.', + 'color_code': '#3b82f6', + 'icon': 'database', + 'category': category_map.get('Technologie'), + 'x': 0, + 'y': 200 + }, + { + 'name': 'Flask', + 'description': 'Leichtgewichtiges Python-Webframework für die Entwicklung von Webanwendungen.', + 'color_code': '#3b82f6', + 'icon': 'server', + 'category': category_map.get('Technologie'), + 'x': -200, + 'y': 150 + }, + { + 'name': 'REST API', + 'description': 'Architekturstil für verteilte Systeme, insbesondere Webanwendungen.', + 'color_code': '#10b981', + 'icon': 'link', + 'category': category_map.get('Konzept'), + 'x': -200, + 'y': -150 + }, + { + 'name': 'Dokumentation', + 'description': 'Strukturierte Erfassung und Beschreibung von Informationen und Prozessen.', + 'color_code': '#ec4899', + 'icon': 'file-text', + 'category': category_map.get('Dokument'), + 'x': -350, + 'y': 0 + } + ] + + # Knoten in die Datenbank einfügen + node_objects = {} + for node_data in nodes: + category = node_data.pop('category', None) + x = node_data.pop('x', 0) + y = node_data.pop('y', 0) + node = MindMapNode(**node_data) + if category: + node.category_id = category.id + db.session.add(node) + db.session.flush() # Generiert IDs für neue Objekte + node_objects[node.name] = node + + # Beziehungen erstellen + relationships = [ + ('Wissensmanagement', 'Mind-Mapping'), + ('Wissensmanagement', 'Kollaboration'), + ('Wissensmanagement', 'Dokumentation'), + ('Mind-Mapping', 'Cytoscape.js'), + ('Kollaboration', 'Socket.IO'), + ('Wissensmanagement', 'SQLite'), + ('SQLite', 'Flask'), + ('Flask', 'REST API'), + ('REST API', 'Socket.IO'), + ('REST API', 'Dokumentation') + ] + + for parent_name, child_name in relationships: + parent = node_objects.get(parent_name) + child = node_objects.get(child_name) + if parent and child: + parent.children.append(child) + + db.session.commit() + print(f"{len(nodes)} Knoten und {len(relationships)} Beziehungen wurden erstellt.") + +if __name__ == '__main__': + init_db() + print("Datenbank wurde erfolgreich initialisiert!") + print("Sie können die Anwendung jetzt mit 'python app.py' starten") + print("Anmelden mit:") + print(" Admin: username=admin, password=admin") + print(" User: username=user, password=user") \ No newline at end of file diff --git a/migrations/README b/migrations/README new file mode 100644 index 0000000..0e04844 --- /dev/null +++ b/migrations/README @@ -0,0 +1 @@ +Single-database configuration for Flask. diff --git a/migrations/__pycache__/env.cpython-311.pyc b/migrations/__pycache__/env.cpython-311.pyc new file mode 100644 index 0000000..ff9a50d Binary files /dev/null and b/migrations/__pycache__/env.cpython-311.pyc differ diff --git a/migrations/alembic.ini b/migrations/alembic.ini new file mode 100644 index 0000000..ec9d45c --- /dev/null +++ b/migrations/alembic.ini @@ -0,0 +1,50 @@ +# A generic, single database configuration. + +[alembic] +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic,flask_migrate + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[logger_flask_migrate] +level = INFO +handlers = +qualname = flask_migrate + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 0000000..4c97092 --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,113 @@ +import logging +from logging.config import fileConfig + +from flask import current_app + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) +logger = logging.getLogger('alembic.env') + + +def get_engine(): + try: + # this works with Flask-SQLAlchemy<3 and Alchemical + return current_app.extensions['migrate'].db.get_engine() + except (TypeError, AttributeError): + # this works with Flask-SQLAlchemy>=3 + return current_app.extensions['migrate'].db.engine + + +def get_engine_url(): + try: + return get_engine().url.render_as_string(hide_password=False).replace( + '%', '%%') + except AttributeError: + return str(get_engine().url).replace('%', '%%') + + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +config.set_main_option('sqlalchemy.url', get_engine_url()) +target_db = current_app.extensions['migrate'].db + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def get_metadata(): + if hasattr(target_db, 'metadatas'): + return target_db.metadatas[None] + return target_db.metadata + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, target_metadata=get_metadata(), literal_binds=True + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + # this callback is used to prevent an auto-migration from being generated + # when there are no changes to the schema + # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html + def process_revision_directives(context, revision, directives): + if getattr(config.cmd_opts, 'autogenerate', False): + script = directives[0] + if script.upgrade_ops.is_empty(): + directives[:] = [] + logger.info('No changes in schema detected.') + + conf_args = current_app.extensions['migrate'].configure_args + if conf_args.get("process_revision_directives") is None: + conf_args["process_revision_directives"] = process_revision_directives + + connectable = get_engine() + + with connectable.connect() as connection: + context.configure( + connection=connection, + target_metadata=get_metadata(), + **conf_args + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 0000000..2c01563 --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/__pycache__/d4406f5b12f7_add_password_column_to_user.cpython-311.pyc b/migrations/versions/__pycache__/d4406f5b12f7_add_password_column_to_user.cpython-311.pyc new file mode 100644 index 0000000..bb1e7e0 Binary files /dev/null and b/migrations/versions/__pycache__/d4406f5b12f7_add_password_column_to_user.cpython-311.pyc differ diff --git a/migrations/versions/d4406f5b12f7_add_password_column_to_user.py b/migrations/versions/d4406f5b12f7_add_password_column_to_user.py new file mode 100644 index 0000000..df6a69a --- /dev/null +++ b/migrations/versions/d4406f5b12f7_add_password_column_to_user.py @@ -0,0 +1,46 @@ +"""Add password column to user + +Revision ID: d4406f5b12f7 +Revises: +Create Date: 2025-04-28 21:26:37.430823 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'd4406f5b12f7' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('user', schema=None) as batch_op: + batch_op.add_column(sa.Column('password', sa.String(length=512), nullable=False, server_default="changeme")) + batch_op.add_column(sa.Column('is_active', sa.Boolean(), nullable=True)) + batch_op.add_column(sa.Column('role', sa.String(length=20), nullable=True)) + batch_op.drop_column('last_login') + batch_op.drop_column('bio') + batch_op.drop_column('password_hash') + batch_op.drop_column('is_admin') + batch_op.drop_column('avatar') + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('user', schema=None) as batch_op: + batch_op.add_column(sa.Column('avatar', sa.VARCHAR(length=200), nullable=True)) + batch_op.add_column(sa.Column('is_admin', sa.BOOLEAN(), nullable=True)) + batch_op.add_column(sa.Column('password_hash', sa.VARCHAR(length=128), nullable=True)) + batch_op.add_column(sa.Column('bio', sa.TEXT(), nullable=True)) + batch_op.add_column(sa.Column('last_login', sa.DATETIME(), nullable=True)) + batch_op.drop_column('role') + batch_op.drop_column('is_active') + batch_op.drop_column('password') + + # ### end Alembic commands ### diff --git a/models.py b/models.py new file mode 100644 index 0000000..4f3b78e --- /dev/null +++ b/models.py @@ -0,0 +1,308 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from flask_sqlalchemy import SQLAlchemy +from flask_login import UserMixin +from datetime import datetime +from werkzeug.security import generate_password_hash, check_password_hash +from enum import Enum +import uuid as uuid_pkg +import os + +db = SQLAlchemy() + +# Beziehungstypen für Gedankenverknüpfungen +class RelationType(Enum): + SUPPORTS = "stützt" + CONTRADICTS = "widerspricht" + BUILDS_UPON = "baut auf auf" + GENERALIZES = "verallgemeinert" + SPECIFIES = "spezifiziert" + INSPIRES = "inspiriert" + +# Beziehungstabelle für viele-zu-viele Beziehung zwischen MindMapNodes +node_relationship = db.Table('node_relationship', + db.Column('parent_id', db.Integer, db.ForeignKey('mind_map_node.id'), primary_key=True), + db.Column('child_id', db.Integer, db.ForeignKey('mind_map_node.id'), primary_key=True) +) + +# Beziehungstabelle für öffentliche Knoten und Gedanken +node_thought_association = db.Table('node_thought_association', + db.Column('node_id', db.Integer, db.ForeignKey('mind_map_node.id'), primary_key=True), + db.Column('thought_id', db.Integer, db.ForeignKey('thought.id'), primary_key=True) +) + +# Beziehungstabelle für Benutzer-spezifische Mindmap-Knoten und Gedanken +user_mindmap_thought_association = db.Table('user_mindmap_thought_association', + db.Column('user_mindmap_id', db.Integer, db.ForeignKey('user_mindmap.id'), primary_key=True), + db.Column('thought_id', db.Integer, db.ForeignKey('thought.id'), primary_key=True) +) + +# Beziehungstabelle für Benutzer-Bookmarks von Gedanken +user_thought_bookmark = db.Table('user_thought_bookmark', + db.Column('user_id', db.Integer, db.ForeignKey('user.id'), primary_key=True), + db.Column('thought_id', db.Integer, db.ForeignKey('thought.id'), primary_key=True), + db.Column('created_at', db.DateTime, default=datetime.utcnow) +) + +class User(db.Model, UserMixin): + id = db.Column(db.Integer, primary_key=True) + username = db.Column(db.String(80), unique=True, nullable=False) + email = db.Column(db.String(120), unique=True, nullable=False) + password = db.Column(db.String(512), nullable=False) + created_at = db.Column(db.DateTime, default=datetime.utcnow) + is_active = db.Column(db.Boolean, default=True) + role = db.Column(db.String(20), default="user") # 'user', 'admin', 'moderator' + + # Relationships + threads = db.relationship('Thread', backref='creator', lazy=True) + messages = db.relationship('Message', backref='author', lazy=True) + projects = db.relationship('Project', backref='owner', lazy=True) + + def __repr__(self): + return f'' + + def set_password(self, password): + self.password = generate_password_hash(password) + + def check_password(self, password): + return check_password_hash(self.password, password) + +class Category(db.Model): + """Wissenschaftliche Kategorien für die Gliederung der öffentlichen Mindmap""" + id = db.Column(db.Integer, primary_key=True) + name = db.Column(db.String(100), unique=True, nullable=False) + description = db.Column(db.Text) + color_code = db.Column(db.String(7)) # Hex color + icon = db.Column(db.String(50)) + parent_id = db.Column(db.Integer, db.ForeignKey('category.id'), nullable=True) + + # Beziehungen + children = db.relationship('Category', backref=db.backref('parent', remote_side=[id])) + nodes = db.relationship('MindMapNode', backref='category', lazy=True) + + def __repr__(self): + return f'' + +class MindMapNode(db.Model): + """Öffentliche Mindmap-Knoten, die für alle Benutzer sichtbar sind""" + id = db.Column(db.Integer, primary_key=True) + name = db.Column(db.String(100), nullable=False) + description = db.Column(db.Text) + color_code = db.Column(db.String(7)) + icon = db.Column(db.String(50)) + is_public = db.Column(db.Boolean, default=True) + created_at = db.Column(db.DateTime, default=datetime.utcnow) + created_by_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=True) + category_id = db.Column(db.Integer, db.ForeignKey('category.id'), nullable=True) + + __table_args__ = {'extend_existing': True} + + # Beziehungen für Baumstruktur (mehrere Eltern möglich) + parents = db.relationship( + 'MindMapNode', + secondary=node_relationship, + primaryjoin=(node_relationship.c.child_id == id), + secondaryjoin=(node_relationship.c.parent_id == id), + backref=db.backref('children', lazy='dynamic'), + lazy='dynamic' + ) + + # Beziehungen zu Gedanken + thoughts = db.relationship('Thought', + secondary=node_thought_association, + backref=db.backref('nodes', lazy='dynamic')) + + # Beziehung zum Ersteller + created_by = db.relationship('User', backref='created_nodes') + + def __repr__(self): + return f'' + + def to_dict(self): + return { + 'id': self.id, + 'name': self.name, + 'description': self.description, + 'color_code': self.color_code, + 'icon': self.icon, + 'category_id': self.category_id, + 'created_at': self.created_at.isoformat() if self.created_at else None + } + +class UserMindmap(db.Model): + """Benutzerspezifische Mindmap, die vom Benutzer personalisierbar ist""" + id = db.Column(db.Integer, primary_key=True) + name = db.Column(db.String(100), nullable=False) + description = db.Column(db.Text) + user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) + created_at = db.Column(db.DateTime, default=datetime.utcnow) + last_modified = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) + is_private = db.Column(db.Boolean, default=True) + + # Beziehungen zu öffentlichen Knoten + public_nodes = db.relationship('MindMapNode', + secondary='user_mindmap_node', + backref=db.backref('in_user_mindmaps', lazy='dynamic')) + + # Beziehungen zu Gedanken + thoughts = db.relationship('Thought', + secondary=user_mindmap_thought_association, + backref=db.backref('in_user_mindmaps', lazy='dynamic')) + + # Notizen zu dieser Mindmap + notes = db.relationship('MindmapNote', backref='mindmap', lazy=True) + +# Beziehungstabelle für benutzerorientierte Mindmaps und öffentliche Knoten +class UserMindmapNode(db.Model): + """Speichert die Beziehung zwischen Benutzer-Mindmaps und öffentlichen Knoten inkl. Position""" + user_mindmap_id = db.Column(db.Integer, db.ForeignKey('user_mindmap.id'), primary_key=True) + node_id = db.Column(db.Integer, db.ForeignKey('mind_map_node.id'), primary_key=True) + x_position = db.Column(db.Float, default=0) # Position X auf der Mindmap + y_position = db.Column(db.Float, default=0) # Position Y auf der Mindmap + scale = db.Column(db.Float, default=1.0) # Größe des Knotens + added_at = db.Column(db.DateTime, default=datetime.utcnow) + +class MindmapNote(db.Model): + """Private Notizen der Benutzer zu ihrer Mindmap""" + id = db.Column(db.Integer, primary_key=True) + user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) + mindmap_id = db.Column(db.Integer, db.ForeignKey('user_mindmap.id'), nullable=False) + node_id = db.Column(db.Integer, db.ForeignKey('mind_map_node.id'), nullable=True) + thought_id = db.Column(db.Integer, db.ForeignKey('thought.id'), nullable=True) + content = db.Column(db.Text, nullable=False) + created_at = db.Column(db.DateTime, default=datetime.utcnow) + last_modified = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) + color_code = db.Column(db.String(7), default="#FFF59D") # Farbe der Notiz + +class Thought(db.Model): + """Gedanken und Inhalte, die in der Mindmap verknüpft werden können""" + id = db.Column(db.Integer, primary_key=True) + title = db.Column(db.String(200), nullable=False) + content = db.Column(db.Text, nullable=False) + abstract = db.Column(db.Text) + keywords = db.Column(db.String(500)) + color_code = db.Column(db.String(7)) # Hex color code + source_type = db.Column(db.String(50)) # PDF, Markdown, Text etc. + branch = db.Column(db.String(100), nullable=False) + user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) + created_at = db.Column(db.DateTime, default=datetime.utcnow) + last_modified = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) + + # Beziehungen + comments = db.relationship('Comment', backref='thought', lazy=True, cascade="all, delete-orphan") + ratings = db.relationship('ThoughtRating', backref='thought', lazy=True) + + outgoing_relations = db.relationship( + 'ThoughtRelation', + foreign_keys='ThoughtRelation.source_id', + backref='source_thought', + lazy=True + ) + incoming_relations = db.relationship( + 'ThoughtRelation', + foreign_keys='ThoughtRelation.target_id', + backref='target_thought', + lazy=True + ) + + @property + def average_rating(self): + if not self.ratings: + return 0 + return sum(r.relevance_score for r in self.ratings) / len(self.ratings) + +class ThoughtRelation(db.Model): + """Beziehungen zwischen Gedanken""" + id = db.Column(db.Integer, primary_key=True) + source_id = db.Column(db.Integer, db.ForeignKey('thought.id'), nullable=False) + target_id = db.Column(db.Integer, db.ForeignKey('thought.id'), nullable=False) + relation_type = db.Column(db.Enum(RelationType), nullable=False) + created_at = db.Column(db.DateTime, default=datetime.utcnow) + created_by_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) + + # Beziehung zum Ersteller + created_by = db.relationship('User', backref='created_relations') + +class ThoughtRating(db.Model): + """Bewertungen von Gedanken durch Benutzer""" + id = db.Column(db.Integer, primary_key=True) + thought_id = db.Column(db.Integer, db.ForeignKey('thought.id'), nullable=False) + user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) + relevance_score = db.Column(db.Integer, nullable=False) # 1-5 + created_at = db.Column(db.DateTime, default=datetime.utcnow) + + __table_args__ = ( + db.UniqueConstraint('thought_id', 'user_id', name='unique_thought_rating'), + ) + + # Beziehung zum Benutzer + user = db.relationship('User', backref='ratings') + +class Comment(db.Model): + """Kommentare zu Gedanken""" + id = db.Column(db.Integer, primary_key=True) + content = db.Column(db.Text, nullable=False) + thought_id = db.Column(db.Integer, db.ForeignKey('thought.id'), nullable=False) + user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) + created_at = db.Column(db.DateTime, default=datetime.utcnow) + last_modified = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) + +# Thread model +class Thread(db.Model): + id = db.Column(db.Integer, primary_key=True) + title = db.Column(db.String(200), nullable=False) + description = db.Column(db.Text) + created_at = db.Column(db.DateTime, default=datetime.utcnow) + updated_at = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) + user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) + + # Relationships + messages = db.relationship('Message', backref='thread', lazy=True, cascade="all, delete-orphan") + + def __repr__(self): + return f'' + +# Message model +class Message(db.Model): + id = db.Column(db.Integer, primary_key=True) + content = db.Column(db.Text, nullable=False) + created_at = db.Column(db.DateTime, default=datetime.utcnow) + user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) + thread_id = db.Column(db.Integer, db.ForeignKey('thread.id'), nullable=False) + role = db.Column(db.String(20), default="user") # 'user', 'assistant', 'system' + + def __repr__(self): + return f'' + +# Project model +class Project(db.Model): + id = db.Column(db.Integer, primary_key=True) + title = db.Column(db.String(150), nullable=False) + description = db.Column(db.Text) + created_at = db.Column(db.DateTime, default=datetime.utcnow) + updated_at = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) + user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False) + category_id = db.Column(db.Integer, db.ForeignKey('category.id')) + + # Relationships + documents = db.relationship('Document', backref='project', lazy=True, cascade="all, delete-orphan") + + def __repr__(self): + return f'' + +# Document model +class Document(db.Model): + id = db.Column(db.Integer, primary_key=True) + title = db.Column(db.String(150), nullable=False) + content = db.Column(db.Text) + created_at = db.Column(db.DateTime, default=datetime.utcnow) + updated_at = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) + project_id = db.Column(db.Integer, db.ForeignKey('project.id'), nullable=False) + filename = db.Column(db.String(150), nullable=True) + file_path = db.Column(db.String(300), nullable=True) + file_type = db.Column(db.String(50), nullable=True) + file_size = db.Column(db.Integer, nullable=True) + + def __repr__(self): + return f'' \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..f31db59 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,17 @@ +flask==2.2.5 +flask-login==0.6.2 +flask-wtf +email-validator +python-dotenv +werkzeug==2.2.3 +flask-sqlalchemy==3.0.5 +openai +requests==2.31.0 +gunicorn==21.2.0 +#pillow==10.0.1 +pytest==7.4.0 +pytest-flask==1.2.0 +Flask-Migrate +flask-socketio==5.3.6 +python-engineio==4.8.2 +python-socketio==5.11.1 \ No newline at end of file diff --git a/static/css/all.min.css b/static/css/all.min.css new file mode 100644 index 0000000..3757245 --- /dev/null +++ b/static/css/all.min.css @@ -0,0 +1,27 @@ +/* + * Font Awesome 6.4.0 + * + * This is a placeholder file. For production, you should: + * 1. Download Font Awesome from https://fontawesome.com/download + * 2. Extract the downloaded package + * 3. Copy the 'css/all.min.css' file to this location + * 4. Copy the 'webfonts' folder to '/static/webfonts/' + * + * Alternatively, you can install via npm and copy the files: + * npm install @fortawesome/fontawesome-free + * cp -r node_modules/@fortawesome/fontawesome-free/css/all.min.css static/css/ + * cp -r node_modules/@fortawesome/fontawesome-free/webfonts/ static/ + */ + +/* Placeholder styles for common Font Awesome icons */ +.fa, .fas, .far, .fab { + display: inline-block; + width: 1em; + text-align: center; +} + +/* Warning message */ +body::before { + content: "Font Awesome CSS placeholder. Please replace with the actual file."; + display: none; +} \ No newline at end of file diff --git a/static/css/assistant.css b/static/css/assistant.css new file mode 100644 index 0000000..22e9eea --- /dev/null +++ b/static/css/assistant.css @@ -0,0 +1,203 @@ +/* ChatGPT Assistent Styles - Verbesserte Version */ +#chatgpt-assistant { + font-family: 'Inter', sans-serif; +} + +#assistant-chat { + transition: max-height 0.4s cubic-bezier(0.25, 0.1, 0.25, 1), + opacity 0.3s cubic-bezier(0.25, 0.1, 0.25, 1); + box-shadow: 0 10px 25px -5px rgba(0, 0, 0, 0.25); + border-radius: 0.75rem; + overflow: hidden; + max-width: calc(100vw - 2rem); +} + +#assistant-toggle { + transition: transform 0.3s ease, background-color 0.2s ease; + box-shadow: 0 4px 12px rgba(0, 0, 0, 0.15); + z-index: 60; +} + +#assistant-toggle:hover { + transform: scale(1.1) rotate(10deg); +} + +#assistant-history { + scrollbar-width: thin; + scrollbar-color: rgba(156, 163, 175, 0.5) transparent; +} + +#assistant-history::-webkit-scrollbar { + width: 6px; +} + +#assistant-history::-webkit-scrollbar-track { + background: transparent; +} + +#assistant-history::-webkit-scrollbar-thumb { + background-color: rgba(156, 163, 175, 0.5); + border-radius: 3px; +} + +.dark #assistant-history::-webkit-scrollbar-thumb { + background-color: rgba(156, 163, 175, 0.3); +} + +/* Verbesserte Message-Bubbles mit Schatten und Animation */ +#assistant-history .flex > div { + box-shadow: 0 2px 5px rgba(0, 0, 0, 0.05); + animation: messageAppear 0.3s ease-out forwards; + opacity: 0; + transform: translateY(10px); +} + +@keyframes messageAppear { + to { + opacity: 1; + transform: translateY(0); + } +} + +/* Verzögerte Animation für Messages */ +#assistant-history .flex:nth-child(1) > div { animation-delay: 0.05s; } +#assistant-history .flex:nth-child(2) > div { animation-delay: 0.1s; } +#assistant-history .flex:nth-child(3) > div { animation-delay: 0.15s; } +#assistant-history .flex:nth-child(4) > div { animation-delay: 0.2s; } +#assistant-history .flex:nth-child(5) > div { animation-delay: 0.25s; } + +/* Vorschläge styling */ +#assistant-suggestions { + padding: 0.5rem 0.75rem; + transition: all 0.3s ease; +} + +.suggestion-pill { + animation: pillAppear 0.4s ease forwards; + opacity: 0; + transform: scale(0.9); + box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1); +} + +@keyframes pillAppear { + to { + opacity: 1; + transform: scale(1); + } +} + +/* Styling für verschiedene Verzögerungen bei Vorschlägen */ +#assistant-suggestions button:nth-child(1) { animation-delay: 0.1s; } +#assistant-suggestions button:nth-child(2) { animation-delay: 0.2s; } +#assistant-suggestions button:nth-child(3) { animation-delay: 0.3s; } + +/* Mach Platz für Notifications, damit sie nicht mit dem Assistenten überlappen */ +.notification-area { + bottom: 5rem; +} + +/* Verbesserte Glassmorphism-Effekt */ +.glass-morphism { + background: rgba(255, 255, 255, 0.25); + backdrop-filter: blur(12px); + -webkit-backdrop-filter: blur(12px); + box-shadow: 0 8px 32px 0 rgba(31, 38, 135, 0.15); + border: 1px solid rgba(255, 255, 255, 0.2); +} + +.dark .glass-morphism { + background: rgba(15, 23, 42, 0.35); + border: 1px solid rgba(255, 255, 255, 0.06); + box-shadow: 0 8px 32px 0 rgba(0, 0, 0, 0.4); +} + +/* Verbesserte Farbpalette für Dark Theme */ +.dark { + --tw-bg-opacity: 1; + background-color: rgba(10, 15, 25, var(--tw-bg-opacity)) !important; + min-height: 100vh; +} + +.dark .bg-dark-900 { + --tw-bg-opacity: 1; + background-color: rgba(10, 15, 25, var(--tw-bg-opacity)) !important; +} + +.dark .bg-dark-800 { + --tw-bg-opacity: 1; + background-color: rgba(15, 23, 42, var(--tw-bg-opacity)) !important; +} + +.dark .bg-dark-700 { + --tw-bg-opacity: 1; + background-color: rgba(23, 33, 64, var(--tw-bg-opacity)) !important; +} + +/* Typing Indicator Animation Styles */ +.typing-indicator { + display: flex; + align-items: center; +} + +.typing-indicator span { + height: 8px; + width: 8px; + background-color: #888; + border-radius: 50%; + display: inline-block; + margin: 0 2px; + opacity: 0.4; + animation: bounce 1.4s infinite ease-in-out; +} + +.typing-indicator span:nth-child(1) { animation-delay: 0s; } +.typing-indicator span:nth-child(2) { animation-delay: 0.2s; } +.typing-indicator span:nth-child(3) { animation-delay: 0.4s; } + +@keyframes bounce { + 0%, 80%, 100% { transform: translateY(0); } + 40% { transform: translateY(-8px); } +} + +/* Chat Input Fokus-Effekt */ +#assistant-chat input:focus { + border-color: var(--primary-500, #3B82F6); + box-shadow: 0 0 0 2px rgba(59, 130, 246, 0.25); +} + +.dark #assistant-chat input:focus { + box-shadow: 0 0 0 2px rgba(59, 130, 246, 0.4); +} + +/* Verbesserte Responsive Layouts */ +@media (max-width: 640px) { + #assistant-chat { + width: calc(100vw - 2rem) !important; + } + + #chatgpt-assistant { + right: 1rem; + bottom: 1rem; + } +} + +/* Footer immer unten */ +html, body { + height: 100%; + margin: 0; + min-height: 100vh; +} + +body { + display: flex; + flex-direction: column; + min-height: 100vh; +} + +main { + flex: 1 0 auto; +} + +footer { + flex-shrink: 0; +} \ No newline at end of file diff --git a/static/css/base-styles.css b/static/css/base-styles.css new file mode 100644 index 0000000..989c7fd --- /dev/null +++ b/static/css/base-styles.css @@ -0,0 +1,526 @@ +/* Base Styles - Dark, Mystical Theme */ + +/* Global Variables */ +:root { + --font-sans: 'Inter', system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif; + --font-mono: 'JetBrains Mono', SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace; + + /* Light Theme */ + --bg-primary-light: #f8fafc; + --bg-secondary-light: #f1f5f9; + --bg-tertiary-light: #e2e8f0; + --text-primary-light: #1e293b; + --text-secondary-light: #475569; + --accent-primary-light: #7c3aed; + --accent-secondary-light: #8b5cf6; + --accent-tertiary-light: #a78bfa; + --border-light: #e2e8f0; + --shadow-light: 0 4px 6px -1px rgba(0, 0, 0, 0.1), 0 2px 4px -1px rgba(0, 0, 0, 0.06); + --glow-light: 0 0 15px rgba(139, 92, 246, 0.3); + + /* Dark Theme */ + --bg-primary-dark: #0a0e19; + --bg-secondary-dark: #111827; + --bg-tertiary-dark: #1f2937; + --text-primary-dark: #f9fafb; + --text-secondary-dark: #e5e7eb; + --accent-primary-dark: #6d28d9; + --accent-secondary-dark: #8b5cf6; + --accent-tertiary-dark: #a78bfa; + --border-dark: #1f2937; + --shadow-dark: 0 4px 6px -1px rgba(0, 0, 0, 0.5), 0 2px 4px -1px rgba(0, 0, 0, 0.3); + --glow-dark: 0 0 15px rgba(124, 58, 237, 0.5); + + /* Transitions */ + --transition-fast: 150ms ease-in-out; + --transition-normal: 300ms ease-in-out; + --transition-slow: 500ms ease-in-out; + + /* Light mode optimierte Farben */ + --light-bg: #f9fafb; + --light-text: #1e293b; + --light-heading: #0f172a; + --light-primary: #3b82f6; + --light-primary-hover: #4f46e5; + --light-secondary: #6b7280; + --light-border: #e5e7eb; + --light-card-bg: rgba(255, 255, 255, 0.92); + --light-navbar-bg: rgba(255, 255, 255, 0.92); + --light-input-bg: #ffffff; + --light-input-border: #d1d5db; + --light-input-focus: #3b82f6; + --light-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.1), 0 2px 4px -1px rgba(0, 0, 0, 0.06); +} + +/* Base Styles */ +html { + font-family: var(--font-sans); + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; + scroll-behavior: smooth; +} + +body { + transition: background-color var(--transition-normal), color var(--transition-normal); + overflow-x: hidden; + min-height: 100vh; + font-family: var(--font-sans); + line-height: 1.5; +} + +/* Dark Mode */ +html.dark body { + background-color: var(--bg-primary-dark); + color: var(--text-primary-dark); +} + +/* Light Mode */ +body:not(.dark) { + background-color: var(--light-bg); + color: var(--light-text); +} + +/* Typography */ +h1, h2, h3, h4, h5, h6 { + font-weight: 600; + line-height: 1.2; +} + +.hero-heading { + font-size: 2.75rem; + font-weight: 700; + letter-spacing: -0.02em; + line-height: 1.1; +} + +.section-heading { + font-size: 2rem; + font-weight: 700; + letter-spacing: -0.01em; +} + +.gradient-text { + background-clip: text; + -webkit-background-clip: text; + color: transparent; + display: inline-block; +} + +html.dark .gradient-text { + background-image: linear-gradient(135deg, var(--accent-primary-dark), var(--accent-secondary-dark)); + text-shadow: var(--glow-dark); +} + +.gradient-text { + background-image: linear-gradient(135deg, var(--accent-primary-light), var(--accent-secondary-light)); + text-shadow: var(--glow-light); +} + +/* Mystical elements */ +.mystical-border { + position: relative; +} + +.mystical-border::before { + content: ''; + position: absolute; + top: 0; + left: 0; + right: 0; + bottom: 0; + border: 1px solid; + border-radius: inherit; + pointer-events: none; + opacity: 0.3; +} + +html.dark .mystical-border::before { + border-color: var(--accent-primary-dark); + box-shadow: var(--glow-dark); +} + +.mystical-border::before { + border-color: var(--accent-primary-light); + box-shadow: var(--glow-light); +} + +/* Navigation Links */ +.nav-link { + position: relative; + padding: 0.5rem 0.75rem; + border-radius: 0.5rem; + transition: var(--transition-normal); +} + +html.dark .nav-link { + color: var(--text-secondary-dark); +} + +.nav-link { + color: var(--text-secondary-light); +} + +html.dark .nav-link:hover { + color: var(--text-primary-dark); + background-color: rgba(31, 41, 55, 0.5); +} + +.nav-link:hover { + color: var(--text-primary-light); + background-color: rgba(241, 245, 249, 0.5); +} + +html.dark .nav-link-active { + color: var(--accent-tertiary-dark); + background-color: rgba(109, 40, 217, 0.15); +} + +.nav-link-light-active { + color: var(--accent-primary-light); + background-color: rgba(139, 92, 246, 0.1); +} + +/* Glass Morphism Effects */ +.glass-navbar-dark { + background-color: rgba(10, 14, 25, 0.8); + backdrop-filter: blur(10px); + -webkit-backdrop-filter: blur(10px); + border-color: rgba(255, 255, 255, 0.1); + box-shadow: 0 4px 30px rgba(0, 0, 0, 0.3); +} + +.glass-navbar-light { + background-color: rgba(255, 255, 255, 0.8); + backdrop-filter: blur(10px); + -webkit-backdrop-filter: blur(10px); + border-color: rgba(226, 232, 240, 0.5); + box-shadow: 0 4px 30px rgba(0, 0, 0, 0.1); +} + +.glass-morphism { + transition: background-color var(--transition-normal), backdrop-filter var(--transition-normal); +} + +/* Cards */ +.mystical-card { + border-radius: 0.75rem; + overflow: hidden; + transition: var(--transition-normal); +} + +html.dark .mystical-card { + background-color: var(--bg-secondary-dark); + border: 1px solid var(--border-dark); + box-shadow: var(--shadow-dark); +} + +.mystical-card { + background-color: var(--bg-secondary-light); + border: 1px solid var(--border-light); + box-shadow: var(--shadow-light); +} + +html.dark .mystical-card:hover { + box-shadow: var(--glow-dark), var(--shadow-dark); + border-color: var(--accent-primary-dark); +} + +.mystical-card:hover { + box-shadow: var(--glow-light), var(--shadow-light); + border-color: var(--accent-primary-light); +} + +/* Buttons */ +.mystical-button { + padding: 0.625rem 1.25rem; + border-radius: 0.5rem; + font-weight: 500; + transition: var(--transition-normal); + position: relative; + overflow: hidden; +} + +.mystical-button::before { + content: ''; + position: absolute; + top: 0; + left: 0; + width: 100%; + height: 100%; + background: linear-gradient(135deg, transparent, rgba(255, 255, 255, 0.05), transparent); + transform: translateX(-100%); + transition: transform 0.8s ease-in-out; +} + +.mystical-button:hover::before { + transform: translateX(100%); +} + +html.dark .mystical-button-primary { + background-color: var(--accent-primary-dark); + color: white; +} + +.mystical-button-primary { + background-color: var(--accent-primary-light); + color: white; +} + +html.dark .mystical-button-primary:hover { + background-color: var(--accent-secondary-dark); + box-shadow: var(--glow-dark); +} + +.mystical-button-primary:hover { + background-color: var(--accent-secondary-light); + box-shadow: var(--glow-light); +} + +html.dark .mystical-button-secondary { + background-color: var(--bg-tertiary-dark); + color: var(--text-primary-dark); + border: 1px solid var(--border-dark); +} + +.mystical-button-secondary { + background-color: var(--bg-tertiary-light); + color: var(--text-primary-light); + border: 1px solid var(--border-light); +} + +html.dark .mystical-button-secondary:hover { + background-color: var(--bg-secondary-dark); + border-color: var(--accent-tertiary-dark); +} + +.mystical-button-secondary:hover { + background-color: var(--bg-secondary-light); + border-color: var(--accent-tertiary-light); +} + +/* Inputs */ +.mystical-input { + padding: 0.5rem 0.75rem; + border-radius: 0.5rem; + transition: var(--transition-normal); + width: 100%; + outline: none; +} + +html.dark .mystical-input { + background-color: var(--bg-tertiary-dark); + border: 1px solid var(--border-dark); + color: var(--text-primary-dark); +} + +.mystical-input { + background-color: var(--bg-tertiary-light); + border: 1px solid var(--border-light); + color: var(--text-primary-light); +} + +html.dark .mystical-input:focus { + border-color: var(--accent-tertiary-dark); + box-shadow: 0 0 0 3px rgba(139, 92, 246, 0.25); +} + +.mystical-input:focus { + border-color: var(--accent-tertiary-light); + box-shadow: 0 0 0 3px rgba(139, 92, 246, 0.15); +} + +/* Animations */ +@keyframes floatAnimation { + 0% { transform: translateY(0); } + 50% { transform: translateY(-5px); } + 100% { transform: translateY(0); } +} + +.animate-float { + animation: floatAnimation 3s ease-in-out infinite; +} + +@keyframes fadeIn { + from { opacity: 0; } + to { opacity: 1; } +} + +.animate-fade-in { + animation: fadeIn 0.5s ease-out forwards; +} + +/* Scroll Bars */ +::-webkit-scrollbar { + width: 0.5rem; + height: 0.5rem; +} + +html.dark ::-webkit-scrollbar-track { + background: var(--bg-secondary-dark); +} + +::-webkit-scrollbar-track { + background: var(--bg-secondary-light); +} + +html.dark ::-webkit-scrollbar-thumb { + background: var(--accent-primary-dark); + border-radius: 0.25rem; +} + +::-webkit-scrollbar-thumb { + background: var(--accent-primary-light); + border-radius: 0.25rem; +} + +html.dark ::-webkit-scrollbar-thumb:hover { + background: var(--accent-secondary-dark); +} + +::-webkit-scrollbar-thumb:hover { + background: var(--accent-secondary-light); +} + +/* Responsive Utilities */ +@media (max-width: 640px) { + .hero-heading { + font-size: 2rem; + } + + .section-heading { + font-size: 1.5rem; + } +} + +/* Additional background elements */ +.mystical-dot { + position: absolute; + border-radius: 50%; + opacity: 0.15; + filter: blur(3px); + z-index: -1; + transition: opacity var(--transition-normal); +} + +html.dark .mystical-dot { + background-color: var(--accent-primary-dark); + box-shadow: 0 0 15px var(--accent-primary-dark); +} + +.mystical-dot { + background-color: var(--accent-primary-light); + box-shadow: 0 0 15px var(--accent-primary-light); +} + +/* Accessibility */ +.sr-only { + position: absolute; + width: 1px; + height: 1px; + padding: 0; + margin: -1px; + overflow: hidden; + clip: rect(0, 0, 0, 0); + white-space: nowrap; + border-width: 0; +} + +/* Focus styles for keyboard navigation */ +:focus-visible { + outline: 2px solid var(--accent-primary-light); + outline-offset: 2px; +} + +html.dark :focus-visible { + outline-color: var(--accent-primary-dark); +} + +/* Light Mode Überschriften */ +body:not(.dark) h1, +body:not(.dark) h2, +body:not(.dark) h3, +body:not(.dark) h4, +body:not(.dark) h5, +body:not(.dark) h6 { + color: var(--light-heading); +} + +/* Light Mode Links */ +body:not(.dark) a { + color: var(--light-primary); +} + +body:not(.dark) a:hover { + color: var(--light-primary-hover); +} + +/* Light Mode Buttons */ +body:not(.dark) .btn, +body:not(.dark) button:not(.toggle) { + background-color: var(--light-primary); + color: white; + border: none; + box-shadow: var(--light-shadow); + border-radius: 0.375rem; + padding: 0.5rem 1rem; + transition: all 0.3s ease; +} + +body:not(.dark) .btn:hover, +body:not(.dark) button:not(.toggle):hover { + background-color: var(--light-primary-hover); + transform: translateY(-2px); + box-shadow: 0 6px 10px rgba(0, 0, 0, 0.1); +} + +/* Light Mode Cards und Panels */ +body:not(.dark) .card, +body:not(.dark) .panel { + background-color: var(--light-card-bg); + border: 1px solid var(--light-border); + border-radius: 0.5rem; + box-shadow: var(--light-shadow); +} + +/* Light Mode Tabelle */ +body:not(.dark) table { + background-color: var(--light-card-bg); + border-collapse: collapse; +} + +body:not(.dark) th { + background-color: var(--light-bg); + color: var(--light-heading); + border-bottom: 1px solid var(--light-border); +} + +body:not(.dark) td { + border-bottom: 1px solid var(--light-border); +} + +/* Light Mode Inputs */ +body:not(.dark) input, +body:not(.dark) textarea, +body:not(.dark) select { + background-color: var(--light-input-bg); + border: 1px solid var(--light-input-border); + color: var(--light-text); + border-radius: 0.375rem; + padding: 0.5rem; +} + +body:not(.dark) input:focus, +body:not(.dark) textarea:focus, +body:not(.dark) select:focus { + border-color: var(--light-input-focus); + box-shadow: 0 0 0 3px rgba(59, 130, 246, 0.1); + outline: none; +} + +/* Navbar im Light Mode verbessern */ +body:not(.dark) nav, +body:not(.dark) .navbar { + background-color: var(--light-navbar-bg); + box-shadow: var(--light-shadow); + border-bottom: 1px solid var(--light-border); +} \ No newline at end of file diff --git a/static/css/main.css b/static/css/main.css new file mode 100644 index 0000000..94cea3b --- /dev/null +++ b/static/css/main.css @@ -0,0 +1,3884 @@ +*, ::before, ::after { + --tw-border-spacing-x: 0; + --tw-border-spacing-y: 0; + --tw-translate-x: 0; + --tw-translate-y: 0; + --tw-rotate: 0; + --tw-skew-x: 0; + --tw-skew-y: 0; + --tw-scale-x: 1; + --tw-scale-y: 1; + --tw-pan-x: ; + --tw-pan-y: ; + --tw-pinch-zoom: ; + --tw-scroll-snap-strictness: proximity; + --tw-gradient-from-position: ; + --tw-gradient-via-position: ; + --tw-gradient-to-position: ; + --tw-ordinal: ; + --tw-slashed-zero: ; + --tw-numeric-figure: ; + --tw-numeric-spacing: ; + --tw-numeric-fraction: ; + --tw-ring-inset: ; + --tw-ring-offset-width: 0px; + --tw-ring-offset-color: #fff; + --tw-ring-color: rgb(59 130 246 / 0.5); + --tw-ring-offset-shadow: 0 0 #0000; + --tw-ring-shadow: 0 0 #0000; + --tw-shadow: 0 0 #0000; + --tw-shadow-colored: 0 0 #0000; + --tw-blur: ; + --tw-brightness: ; + --tw-contrast: ; + --tw-grayscale: ; + --tw-hue-rotate: ; + --tw-invert: ; + --tw-saturate: ; + --tw-sepia: ; + --tw-drop-shadow: ; + --tw-backdrop-blur: ; + --tw-backdrop-brightness: ; + --tw-backdrop-contrast: ; + --tw-backdrop-grayscale: ; + --tw-backdrop-hue-rotate: ; + --tw-backdrop-invert: ; + --tw-backdrop-opacity: ; + --tw-backdrop-saturate: ; + --tw-backdrop-sepia: ; + --tw-contain-size: ; + --tw-contain-layout: ; + --tw-contain-paint: ; + --tw-contain-style: ; +} + +::backdrop { + --tw-border-spacing-x: 0; + --tw-border-spacing-y: 0; + --tw-translate-x: 0; + --tw-translate-y: 0; + --tw-rotate: 0; + --tw-skew-x: 0; + --tw-skew-y: 0; + --tw-scale-x: 1; + --tw-scale-y: 1; + --tw-pan-x: ; + --tw-pan-y: ; + --tw-pinch-zoom: ; + --tw-scroll-snap-strictness: proximity; + --tw-gradient-from-position: ; + --tw-gradient-via-position: ; + --tw-gradient-to-position: ; + --tw-ordinal: ; + --tw-slashed-zero: ; + --tw-numeric-figure: ; + --tw-numeric-spacing: ; + --tw-numeric-fraction: ; + --tw-ring-inset: ; + --tw-ring-offset-width: 0px; + --tw-ring-offset-color: #fff; + --tw-ring-color: rgb(59 130 246 / 0.5); + --tw-ring-offset-shadow: 0 0 #0000; + --tw-ring-shadow: 0 0 #0000; + --tw-shadow: 0 0 #0000; + --tw-shadow-colored: 0 0 #0000; + --tw-blur: ; + --tw-brightness: ; + --tw-contrast: ; + --tw-grayscale: ; + --tw-hue-rotate: ; + --tw-invert: ; + --tw-saturate: ; + --tw-sepia: ; + --tw-drop-shadow: ; + --tw-backdrop-blur: ; + --tw-backdrop-brightness: ; + --tw-backdrop-contrast: ; + --tw-backdrop-grayscale: ; + --tw-backdrop-hue-rotate: ; + --tw-backdrop-invert: ; + --tw-backdrop-opacity: ; + --tw-backdrop-saturate: ; + --tw-backdrop-sepia: ; + --tw-contain-size: ; + --tw-contain-layout: ; + --tw-contain-paint: ; + --tw-contain-style: ; +} + +/* +! tailwindcss v3.4.17 | MIT License | https://tailwindcss.com +*/ + +/* +1. Prevent padding and border from affecting element width. (https://github.com/mozdevs/cssremedy/issues/4) +2. Allow adding a border to an element by just adding a border-width. (https://github.com/tailwindcss/tailwindcss/pull/116) +*/ + +*, +::before, +::after { + box-sizing: border-box; + /* 1 */ + border-width: 0; + /* 2 */ + border-style: solid; + /* 2 */ + border-color: #e5e7eb; + /* 2 */ +} + +::before, +::after { + --tw-content: ''; +} + +/* +1. Use a consistent sensible line-height in all browsers. +2. Prevent adjustments of font size after orientation changes in iOS. +3. Use a more readable tab size. +4. Use the user's configured `sans` font-family by default. +5. Use the user's configured `sans` font-feature-settings by default. +6. Use the user's configured `sans` font-variation-settings by default. +7. Disable tap highlights on iOS +*/ + +html, +:host { + line-height: 1.5; + /* 1 */ + -webkit-text-size-adjust: 100%; + /* 2 */ + -moz-tab-size: 4; + /* 3 */ + -o-tab-size: 4; + tab-size: 4; + /* 3 */ + font-family: Inter, ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont, sans-serif; + /* 4 */ + font-feature-settings: normal; + /* 5 */ + font-variation-settings: normal; + /* 6 */ + -webkit-tap-highlight-color: transparent; + /* 7 */ +} + +/* +1. Remove the margin in all browsers. +2. Inherit line-height from `html` so users can set them as a class directly on the `html` element. +*/ + +body { + margin: 0; + /* 1 */ + line-height: inherit; + /* 2 */ +} + +/* +1. Add the correct height in Firefox. +2. Correct the inheritance of border color in Firefox. (https://bugzilla.mozilla.org/show_bug.cgi?id=190655) +3. Ensure horizontal rules are visible by default. +*/ + +hr { + height: 0; + /* 1 */ + color: inherit; + /* 2 */ + border-top-width: 1px; + /* 3 */ +} + +/* +Add the correct text decoration in Chrome, Edge, and Safari. +*/ + +abbr:where([title]) { + -webkit-text-decoration: underline dotted; + text-decoration: underline dotted; +} + +/* +Remove the default font size and weight for headings. +*/ + +h1, +h2, +h3, +h4, +h5, +h6 { + font-size: inherit; + font-weight: inherit; +} + +/* +Reset links to optimize for opt-in styling instead of opt-out. +*/ + +a { + color: inherit; + text-decoration: inherit; +} + +/* +Add the correct font weight in Edge and Safari. +*/ + +b, +strong { + font-weight: bolder; +} + +/* +1. Use the user's configured `mono` font-family by default. +2. Use the user's configured `mono` font-feature-settings by default. +3. Use the user's configured `mono` font-variation-settings by default. +4. Correct the odd `em` font sizing in all browsers. +*/ + +code, +kbd, +samp, +pre { + font-family: JetBrains Mono, ui-monospace, SFMono-Regular, monospace; + /* 1 */ + font-feature-settings: normal; + /* 2 */ + font-variation-settings: normal; + /* 3 */ + font-size: 1em; + /* 4 */ +} + +/* +Add the correct font size in all browsers. +*/ + +small { + font-size: 80%; +} + +/* +Prevent `sub` and `sup` elements from affecting the line height in all browsers. +*/ + +sub, +sup { + font-size: 75%; + line-height: 0; + position: relative; + vertical-align: baseline; +} + +sub { + bottom: -0.25em; +} + +sup { + top: -0.5em; +} + +/* +1. Remove text indentation from table contents in Chrome and Safari. (https://bugs.chromium.org/p/chromium/issues/detail?id=999088, https://bugs.webkit.org/show_bug.cgi?id=201297) +2. Correct table border color inheritance in all Chrome and Safari. (https://bugs.chromium.org/p/chromium/issues/detail?id=935729, https://bugs.webkit.org/show_bug.cgi?id=195016) +3. Remove gaps between table borders by default. +*/ + +table { + text-indent: 0; + /* 1 */ + border-color: inherit; + /* 2 */ + border-collapse: collapse; + /* 3 */ +} + +/* +1. Change the font styles in all browsers. +2. Remove the margin in Firefox and Safari. +3. Remove default padding in all browsers. +*/ + +button, +input, +optgroup, +select, +textarea { + font-family: inherit; + /* 1 */ + font-feature-settings: inherit; + /* 1 */ + font-variation-settings: inherit; + /* 1 */ + font-size: 100%; + /* 1 */ + font-weight: inherit; + /* 1 */ + line-height: inherit; + /* 1 */ + letter-spacing: inherit; + /* 1 */ + color: inherit; + /* 1 */ + margin: 0; + /* 2 */ + padding: 0; + /* 3 */ +} + +/* +Remove the inheritance of text transform in Edge and Firefox. +*/ + +button, +select { + text-transform: none; +} + +/* +1. Correct the inability to style clickable types in iOS and Safari. +2. Remove default button styles. +*/ + +button, +input:where([type='button']), +input:where([type='reset']), +input:where([type='submit']) { + -webkit-appearance: button; + /* 1 */ + background-color: transparent; + /* 2 */ + background-image: none; + /* 2 */ +} + +/* +Use the modern Firefox focus style for all focusable elements. +*/ + +:-moz-focusring { + outline: auto; +} + +/* +Remove the additional `:invalid` styles in Firefox. (https://github.com/mozilla/gecko-dev/blob/2f9eacd9d3d995c937b4251a5557d95d494c9be1/layout/style/res/forms.css#L728-L737) +*/ + +:-moz-ui-invalid { + box-shadow: none; +} + +/* +Add the correct vertical alignment in Chrome and Firefox. +*/ + +progress { + vertical-align: baseline; +} + +/* +Correct the cursor style of increment and decrement buttons in Safari. +*/ + +::-webkit-inner-spin-button, +::-webkit-outer-spin-button { + height: auto; +} + +/* +1. Correct the odd appearance in Chrome and Safari. +2. Correct the outline style in Safari. +*/ + +[type='search'] { + -webkit-appearance: textfield; + /* 1 */ + outline-offset: -2px; + /* 2 */ +} + +/* +Remove the inner padding in Chrome and Safari on macOS. +*/ + +::-webkit-search-decoration { + -webkit-appearance: none; +} + +/* +1. Correct the inability to style clickable types in iOS and Safari. +2. Change font properties to `inherit` in Safari. +*/ + +::-webkit-file-upload-button { + -webkit-appearance: button; + /* 1 */ + font: inherit; + /* 2 */ +} + +/* +Add the correct display in Chrome and Safari. +*/ + +summary { + display: list-item; +} + +/* +Removes the default spacing and border for appropriate elements. +*/ + +blockquote, +dl, +dd, +h1, +h2, +h3, +h4, +h5, +h6, +hr, +figure, +p, +pre { + margin: 0; +} + +fieldset { + margin: 0; + padding: 0; +} + +legend { + padding: 0; +} + +ol, +ul, +menu { + list-style: none; + margin: 0; + padding: 0; +} + +/* +Reset default styling for dialogs. +*/ + +dialog { + padding: 0; +} + +/* +Prevent resizing textareas horizontally by default. +*/ + +textarea { + resize: vertical; +} + +/* +1. Reset the default placeholder opacity in Firefox. (https://github.com/tailwindlabs/tailwindcss/issues/3300) +2. Set the default placeholder color to the user's configured gray 400 color. +*/ + +input::-moz-placeholder, textarea::-moz-placeholder { + opacity: 1; + /* 1 */ + color: #9ca3af; + /* 2 */ +} + +input::placeholder, +textarea::placeholder { + opacity: 1; + /* 1 */ + color: #9ca3af; + /* 2 */ +} + +/* +Set the default cursor for buttons. +*/ + +button, +[role="button"] { + cursor: pointer; +} + +/* +Make sure disabled buttons don't get the pointer cursor. +*/ + +:disabled { + cursor: default; +} + +/* +1. Make replaced elements `display: block` by default. (https://github.com/mozdevs/cssremedy/issues/14) +2. Add `vertical-align: middle` to align replaced elements more sensibly by default. (https://github.com/jensimmons/cssremedy/issues/14#issuecomment-634934210) + This can trigger a poorly considered lint error in some tools but is included by design. +*/ + +img, +svg, +video, +canvas, +audio, +iframe, +embed, +object { + display: block; + /* 1 */ + vertical-align: middle; + /* 2 */ +} + +/* +Constrain images and videos to the parent width and preserve their intrinsic aspect ratio. (https://github.com/mozdevs/cssremedy/issues/14) +*/ + +img, +video { + max-width: 100%; + height: auto; +} + +/* Make elements with the HTML hidden attribute stay hidden by default */ + +[hidden]:where(:not([hidden="until-found"])) { + display: none; +} + +[type='text'],input:where(:not([type])),[type='email'],[type='url'],[type='password'],[type='number'],[type='date'],[type='datetime-local'],[type='month'],[type='search'],[type='tel'],[type='time'],[type='week'],[multiple],textarea,select { + -webkit-appearance: none; + -moz-appearance: none; + appearance: none; + background-color: #fff; + border-color: #6b7280; + border-width: 1px; + border-radius: 0px; + padding-top: 0.5rem; + padding-right: 0.75rem; + padding-bottom: 0.5rem; + padding-left: 0.75rem; + font-size: 1rem; + line-height: 1.5rem; + --tw-shadow: 0 0 #0000; +} + +[type='text']:focus, input:where(:not([type])):focus, [type='email']:focus, [type='url']:focus, [type='password']:focus, [type='number']:focus, [type='date']:focus, [type='datetime-local']:focus, [type='month']:focus, [type='search']:focus, [type='tel']:focus, [type='time']:focus, [type='week']:focus, [multiple]:focus, textarea:focus, select:focus { + outline: 2px solid transparent; + outline-offset: 2px; + --tw-ring-inset: var(--tw-empty,/*!*/ /*!*/); + --tw-ring-offset-width: 0px; + --tw-ring-offset-color: #fff; + --tw-ring-color: #2563eb; + --tw-ring-offset-shadow: var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color); + --tw-ring-shadow: var(--tw-ring-inset) 0 0 0 calc(1px + var(--tw-ring-offset-width)) var(--tw-ring-color); + box-shadow: var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow); + border-color: #2563eb; +} + +input::-moz-placeholder, textarea::-moz-placeholder { + color: #6b7280; + opacity: 1; +} + +input::placeholder,textarea::placeholder { + color: #6b7280; + opacity: 1; +} + +::-webkit-datetime-edit-fields-wrapper { + padding: 0; +} + +::-webkit-date-and-time-value { + min-height: 1.5em; + text-align: inherit; +} + +::-webkit-datetime-edit { + display: inline-flex; +} + +::-webkit-datetime-edit,::-webkit-datetime-edit-year-field,::-webkit-datetime-edit-month-field,::-webkit-datetime-edit-day-field,::-webkit-datetime-edit-hour-field,::-webkit-datetime-edit-minute-field,::-webkit-datetime-edit-second-field,::-webkit-datetime-edit-millisecond-field,::-webkit-datetime-edit-meridiem-field { + padding-top: 0; + padding-bottom: 0; +} + +select { + background-image: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' fill='none' viewBox='0 0 20 20'%3e%3cpath stroke='%236b7280' stroke-linecap='round' stroke-linejoin='round' stroke-width='1.5' d='M6 8l4 4 4-4'/%3e%3c/svg%3e"); + background-position: right 0.5rem center; + background-repeat: no-repeat; + background-size: 1.5em 1.5em; + padding-right: 2.5rem; + -webkit-print-color-adjust: exact; + print-color-adjust: exact; +} + +[multiple],[size]:where(select:not([size="1"])) { + background-image: initial; + background-position: initial; + background-repeat: unset; + background-size: initial; + padding-right: 0.75rem; + -webkit-print-color-adjust: unset; + print-color-adjust: unset; +} + +[type='checkbox'],[type='radio'] { + -webkit-appearance: none; + -moz-appearance: none; + appearance: none; + padding: 0; + -webkit-print-color-adjust: exact; + print-color-adjust: exact; + display: inline-block; + vertical-align: middle; + background-origin: border-box; + -webkit-user-select: none; + -moz-user-select: none; + user-select: none; + flex-shrink: 0; + height: 1rem; + width: 1rem; + color: #2563eb; + background-color: #fff; + border-color: #6b7280; + border-width: 1px; + --tw-shadow: 0 0 #0000; +} + +[type='checkbox'] { + border-radius: 0px; +} + +[type='radio'] { + border-radius: 100%; +} + +[type='checkbox']:focus,[type='radio']:focus { + outline: 2px solid transparent; + outline-offset: 2px; + --tw-ring-inset: var(--tw-empty,/*!*/ /*!*/); + --tw-ring-offset-width: 2px; + --tw-ring-offset-color: #fff; + --tw-ring-color: #2563eb; + --tw-ring-offset-shadow: var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color); + --tw-ring-shadow: var(--tw-ring-inset) 0 0 0 calc(2px + var(--tw-ring-offset-width)) var(--tw-ring-color); + box-shadow: var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow); +} + +[type='checkbox']:checked,[type='radio']:checked { + border-color: transparent; + background-color: currentColor; + background-size: 100% 100%; + background-position: center; + background-repeat: no-repeat; +} + +[type='checkbox']:checked { + background-image: url("data:image/svg+xml,%3csvg viewBox='0 0 16 16' fill='white' xmlns='http://www.w3.org/2000/svg'%3e%3cpath d='M12.207 4.793a1 1 0 010 1.414l-5 5a1 1 0 01-1.414 0l-2-2a1 1 0 011.414-1.414L6.5 9.086l4.293-4.293a1 1 0 011.414 0z'/%3e%3c/svg%3e"); +} + +@media (forced-colors: active) { + [type='checkbox']:checked { + -webkit-appearance: auto; + -moz-appearance: auto; + appearance: auto; + } +} + +[type='radio']:checked { + background-image: url("data:image/svg+xml,%3csvg viewBox='0 0 16 16' fill='white' xmlns='http://www.w3.org/2000/svg'%3e%3ccircle cx='8' cy='8' r='3'/%3e%3c/svg%3e"); +} + +@media (forced-colors: active) { + [type='radio']:checked { + -webkit-appearance: auto; + -moz-appearance: auto; + appearance: auto; + } +} + +[type='checkbox']:checked:hover,[type='checkbox']:checked:focus,[type='radio']:checked:hover,[type='radio']:checked:focus { + border-color: transparent; + background-color: currentColor; +} + +[type='checkbox']:indeterminate { + background-image: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' fill='none' viewBox='0 0 16 16'%3e%3cpath stroke='white' stroke-linecap='round' stroke-linejoin='round' stroke-width='2' d='M4 8h8'/%3e%3c/svg%3e"); + border-color: transparent; + background-color: currentColor; + background-size: 100% 100%; + background-position: center; + background-repeat: no-repeat; +} + +@media (forced-colors: active) { + [type='checkbox']:indeterminate { + -webkit-appearance: auto; + -moz-appearance: auto; + appearance: auto; + } +} + +[type='checkbox']:indeterminate:hover,[type='checkbox']:indeterminate:focus { + border-color: transparent; + background-color: currentColor; +} + +[type='file'] { + background: unset; + border-color: inherit; + border-width: 0; + border-radius: 0; + padding: 0; + font-size: unset; + line-height: inherit; +} + +[type='file']:focus { + outline: 1px solid ButtonText; + outline: 1px auto -webkit-focus-ring-color; +} + +html { + scroll-behavior: smooth; +} + +body { + --tw-bg-opacity: 1; + background-color: rgb(249 250 251 / var(--tw-bg-opacity, 1)); + font-family: Inter, ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont, sans-serif; + --tw-text-opacity: 1; + color: rgb(31 41 55 / var(--tw-text-opacity, 1)); +} + +body:is(.dark *) { + --tw-bg-opacity: 1; + background-color: rgb(44 44 51 / var(--tw-bg-opacity, 1)); + --tw-text-opacity: 1; + color: rgb(243 244 246 / var(--tw-text-opacity, 1)); +} + +h1, h2, h3, h4, h5, h6 { + font-weight: 700; + --tw-text-opacity: 1; + color: rgb(17 24 39 / var(--tw-text-opacity, 1)); +} + +h1:is(.dark *), h2:is(.dark *), h3:is(.dark *), h4:is(.dark *), h5:is(.dark *), h6:is(.dark *) { + --tw-text-opacity: 1; + color: rgb(255 255 255 / var(--tw-text-opacity, 1)); +} + +h1 { + font-size: 2.25rem; + line-height: 2.5rem; +} + +@media (min-width: 768px) { + h1 { + font-size: 3rem; + line-height: 1; + } +} + +@media (min-width: 1024px) { + h1 { + font-size: 3.75rem; + line-height: 1; + } +} + +h2 { + font-size: 1.875rem; + line-height: 2.25rem; +} + +@media (min-width: 768px) { + h2 { + font-size: 2.25rem; + line-height: 2.5rem; + } +} + +h3 { + font-size: 1.5rem; + line-height: 2rem; +} + +@media (min-width: 768px) { + h3 { + font-size: 1.875rem; + line-height: 2.25rem; + } +} + +h4 { + font-size: 1.25rem; + line-height: 1.75rem; +} + +@media (min-width: 768px) { + h4 { + font-size: 1.5rem; + line-height: 2rem; + } +} + +a { + --tw-text-opacity: 1; + color: rgb(22 84 246 / var(--tw-text-opacity, 1)); + transition-property: color, background-color, border-color, text-decoration-color, fill, stroke; + transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1); + transition-duration: 150ms; +} + +a:hover { + --tw-text-opacity: 1; + color: rgb(17 66 226 / var(--tw-text-opacity, 1)); +} + +a:is(.dark *) { + --tw-text-opacity: 1; + color: rgb(90 147 255 / var(--tw-text-opacity, 1)); +} + +a:hover:is(.dark *) { + --tw-text-opacity: 1; + color: rgb(142 184 255 / var(--tw-text-opacity, 1)); +} + +input, textarea, select { + border-radius: 0.375rem; + border-width: 1px; + --tw-border-opacity: 1; + border-color: rgb(209 213 219 / var(--tw-border-opacity, 1)); + --tw-bg-opacity: 1; + background-color: rgb(255 255 255 / var(--tw-bg-opacity, 1)); + padding-left: 0.75rem; + padding-right: 0.75rem; + padding-top: 0.5rem; + padding-bottom: 0.5rem; +} + +input:focus, textarea:focus, select:focus { + --tw-border-opacity: 1; + border-color: rgb(41 112 255 / var(--tw-border-opacity, 1)); + outline: 2px solid transparent; + outline-offset: 2px; + --tw-ring-offset-shadow: var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color); + --tw-ring-shadow: var(--tw-ring-inset) 0 0 0 calc(2px + var(--tw-ring-offset-width)) var(--tw-ring-color); + box-shadow: var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow, 0 0 #0000); + --tw-ring-opacity: 1; + --tw-ring-color: rgb(41 112 255 / var(--tw-ring-opacity, 1)); +} + +input:is(.dark *), textarea:is(.dark *), select:is(.dark *) { + --tw-border-opacity: 1; + border-color: rgb(111 111 126 / var(--tw-border-opacity, 1)); + --tw-bg-opacity: 1; + background-color: rgb(73 73 79 / var(--tw-bg-opacity, 1)); +} + +input:focus:is(.dark *), textarea:focus:is(.dark *), select:focus:is(.dark *) { + --tw-border-opacity: 1; + border-color: rgb(90 147 255 / var(--tw-border-opacity, 1)); + --tw-ring-opacity: 1; + --tw-ring-color: rgb(90 147 255 / var(--tw-ring-opacity, 1)); +} + +label { + margin-bottom: 0.25rem; + display: block; + font-size: 0.875rem; + line-height: 1.25rem; + font-weight: 500; + --tw-text-opacity: 1; + color: rgb(55 65 81 / var(--tw-text-opacity, 1)); +} + +label:is(.dark *) { + --tw-text-opacity: 1; + color: rgb(209 213 219 / var(--tw-text-opacity, 1)); +} + +::-moz-placeholder { + --tw-text-opacity: 1; + color: rgb(156 163 175 / var(--tw-text-opacity, 1)); +} + +::placeholder { + --tw-text-opacity: 1; + color: rgb(156 163 175 / var(--tw-text-opacity, 1)); +} + +:is(.dark *)::-moz-placeholder { + --tw-text-opacity: 1; + color: rgb(107 114 128 / var(--tw-text-opacity, 1)); +} + +:is(.dark *)::placeholder { + --tw-text-opacity: 1; + color: rgb(107 114 128 / var(--tw-text-opacity, 1)); +} + +.container { + width: 100%; +} + +@media (min-width: 640px) { + .container { + max-width: 640px; + } +} + +@media (min-width: 768px) { + .container { + max-width: 768px; + } +} + +@media (min-width: 1024px) { + .container { + max-width: 1024px; + } +} + +@media (min-width: 1280px) { + .container { + max-width: 1280px; + } +} + +@media (min-width: 1536px) { + .container { + max-width: 1536px; + } +} + +.prose { + color: #1f2937; + max-width: 65ch; +} + +.prose :where(p):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + margin-top: 1.25em; + margin-bottom: 1.25em; +} + +.prose :where([class~="lead"]):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + color: var(--tw-prose-lead); + font-size: 1.25em; + line-height: 1.6; + margin-top: 1.2em; + margin-bottom: 1.2em; +} + +.prose :where(a):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + color: #2970ff; + text-decoration: underline; + font-weight: 500; +} + +.prose :where(a):not(:where([class~="not-prose"],[class~="not-prose"] *)):hover { + color: #1142e2; +} + +.prose :where(strong):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + color: var(--tw-prose-bold); + font-weight: 600; +} + +.prose :where(a strong):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + color: inherit; +} + +.prose :where(blockquote strong):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + color: inherit; +} + +.prose :where(thead th strong):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + color: inherit; +} + +.prose :where(ol):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + list-style-type: decimal; + margin-top: 1.25em; + margin-bottom: 1.25em; + padding-inline-start: 1.625em; +} + +.prose :where(ol[type="A"]):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + list-style-type: upper-alpha; +} + +.prose :where(ol[type="a"]):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + list-style-type: lower-alpha; +} + +.prose :where(ol[type="A" s]):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + list-style-type: upper-alpha; +} + +.prose :where(ol[type="a" s]):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + list-style-type: lower-alpha; +} + +.prose :where(ol[type="I"]):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + list-style-type: upper-roman; +} + +.prose :where(ol[type="i"]):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + list-style-type: lower-roman; +} + +.prose :where(ol[type="I" s]):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + list-style-type: upper-roman; +} + +.prose :where(ol[type="i" s]):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + list-style-type: lower-roman; +} + +.prose :where(ol[type="1"]):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + list-style-type: decimal; +} + +.prose :where(ul):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + list-style-type: disc; + margin-top: 1.25em; + margin-bottom: 1.25em; + padding-inline-start: 1.625em; +} + +.prose :where(ol > li):not(:where([class~="not-prose"],[class~="not-prose"] *))::marker { + font-weight: 400; + color: var(--tw-prose-counters); +} + +.prose :where(ul > li):not(:where([class~="not-prose"],[class~="not-prose"] *))::marker { + color: var(--tw-prose-bullets); +} + +.prose :where(dt):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + color: var(--tw-prose-headings); + font-weight: 600; + margin-top: 1.25em; +} + +.prose :where(hr):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + border-color: var(--tw-prose-hr); + border-top-width: 1px; + margin-top: 3em; + margin-bottom: 3em; +} + +.prose :where(blockquote):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + font-weight: 500; + font-style: italic; + color: var(--tw-prose-quotes); + border-inline-start-width: 0.25rem; + border-inline-start-color: var(--tw-prose-quote-borders); + quotes: "\201C""\201D""\2018""\2019"; + margin-top: 1.6em; + margin-bottom: 1.6em; + padding-inline-start: 1em; +} + +.prose :where(blockquote p:first-of-type):not(:where([class~="not-prose"],[class~="not-prose"] *))::before { + content: open-quote; +} + +.prose :where(blockquote p:last-of-type):not(:where([class~="not-prose"],[class~="not-prose"] *))::after { + content: close-quote; +} + +.prose :where(h1):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + color: var(--tw-prose-headings); + font-weight: 800; + font-size: 2.25em; + margin-top: 0; + margin-bottom: 0.8888889em; + line-height: 1.1111111; +} + +.prose :where(h1 strong):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + font-weight: 900; + color: inherit; +} + +.prose :where(h2):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + color: var(--tw-prose-headings); + font-weight: 700; + font-size: 1.5em; + margin-top: 2em; + margin-bottom: 1em; + line-height: 1.3333333; +} + +.prose :where(h2 strong):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + font-weight: 800; + color: inherit; +} + +.prose :where(h3):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + color: var(--tw-prose-headings); + font-weight: 600; + font-size: 1.25em; + margin-top: 1.6em; + margin-bottom: 0.6em; + line-height: 1.6; +} + +.prose :where(h3 strong):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + font-weight: 700; + color: inherit; +} + +.prose :where(h4):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + color: var(--tw-prose-headings); + font-weight: 600; + margin-top: 1.5em; + margin-bottom: 0.5em; + line-height: 1.5; +} + +.prose :where(h4 strong):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + font-weight: 700; + color: inherit; +} + +.prose :where(img):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + margin-top: 2em; + margin-bottom: 2em; +} + +.prose :where(picture):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + display: block; + margin-top: 2em; + margin-bottom: 2em; +} + +.prose :where(video):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + margin-top: 2em; + margin-bottom: 2em; +} + +.prose :where(kbd):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + font-weight: 500; + font-family: inherit; + color: var(--tw-prose-kbd); + box-shadow: 0 0 0 1px rgb(var(--tw-prose-kbd-shadows) / 10%), 0 3px 0 rgb(var(--tw-prose-kbd-shadows) / 10%); + font-size: 0.875em; + border-radius: 0.3125rem; + padding-top: 0.1875em; + padding-inline-end: 0.375em; + padding-bottom: 0.1875em; + padding-inline-start: 0.375em; +} + +.prose :where(code):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + color: var(--tw-prose-code); + font-weight: 600; + font-size: 0.875em; +} + +.prose :where(code):not(:where([class~="not-prose"],[class~="not-prose"] *))::before { + content: "`"; +} + +.prose :where(code):not(:where([class~="not-prose"],[class~="not-prose"] *))::after { + content: "`"; +} + +.prose :where(a code):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + color: inherit; +} + +.prose :where(h1 code):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + color: inherit; +} + +.prose :where(h2 code):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + color: inherit; + font-size: 0.875em; +} + +.prose :where(h3 code):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + color: inherit; + font-size: 0.9em; +} + +.prose :where(h4 code):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + color: inherit; +} + +.prose :where(blockquote code):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + color: inherit; +} + +.prose :where(thead th code):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + color: inherit; +} + +.prose :where(pre):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + color: var(--tw-prose-pre-code); + background-color: var(--tw-prose-pre-bg); + overflow-x: auto; + font-weight: 400; + font-size: 0.875em; + line-height: 1.7142857; + margin-top: 1.7142857em; + margin-bottom: 1.7142857em; + border-radius: 0.375rem; + padding-top: 0.8571429em; + padding-inline-end: 1.1428571em; + padding-bottom: 0.8571429em; + padding-inline-start: 1.1428571em; +} + +.prose :where(pre code):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + background-color: transparent; + border-width: 0; + border-radius: 0; + padding: 0; + font-weight: inherit; + color: inherit; + font-size: inherit; + font-family: inherit; + line-height: inherit; +} + +.prose :where(pre code):not(:where([class~="not-prose"],[class~="not-prose"] *))::before { + content: none; +} + +.prose :where(pre code):not(:where([class~="not-prose"],[class~="not-prose"] *))::after { + content: none; +} + +.prose :where(table):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + width: 100%; + table-layout: auto; + margin-top: 2em; + margin-bottom: 2em; + font-size: 0.875em; + line-height: 1.7142857; +} + +.prose :where(thead):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + border-bottom-width: 1px; + border-bottom-color: var(--tw-prose-th-borders); +} + +.prose :where(thead th):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + color: var(--tw-prose-headings); + font-weight: 600; + vertical-align: bottom; + padding-inline-end: 0.5714286em; + padding-bottom: 0.5714286em; + padding-inline-start: 0.5714286em; +} + +.prose :where(tbody tr):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + border-bottom-width: 1px; + border-bottom-color: var(--tw-prose-td-borders); +} + +.prose :where(tbody tr:last-child):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + border-bottom-width: 0; +} + +.prose :where(tbody td):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + vertical-align: baseline; +} + +.prose :where(tfoot):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + border-top-width: 1px; + border-top-color: var(--tw-prose-th-borders); +} + +.prose :where(tfoot td):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + vertical-align: top; +} + +.prose :where(th, td):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + text-align: start; +} + +.prose :where(figure > *):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + margin-top: 0; + margin-bottom: 0; +} + +.prose :where(figcaption):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + color: var(--tw-prose-captions); + font-size: 0.875em; + line-height: 1.4285714; + margin-top: 0.8571429em; +} + +.prose { + --tw-prose-body: #374151; + --tw-prose-headings: #111827; + --tw-prose-lead: #4b5563; + --tw-prose-links: #111827; + --tw-prose-bold: #111827; + --tw-prose-counters: #6b7280; + --tw-prose-bullets: #d1d5db; + --tw-prose-hr: #e5e7eb; + --tw-prose-quotes: #111827; + --tw-prose-quote-borders: #e5e7eb; + --tw-prose-captions: #6b7280; + --tw-prose-kbd: #111827; + --tw-prose-kbd-shadows: 17 24 39; + --tw-prose-code: #111827; + --tw-prose-pre-code: #e5e7eb; + --tw-prose-pre-bg: #1f2937; + --tw-prose-th-borders: #d1d5db; + --tw-prose-td-borders: #e5e7eb; + --tw-prose-invert-body: #d1d5db; + --tw-prose-invert-headings: #fff; + --tw-prose-invert-lead: #9ca3af; + --tw-prose-invert-links: #fff; + --tw-prose-invert-bold: #fff; + --tw-prose-invert-counters: #9ca3af; + --tw-prose-invert-bullets: #4b5563; + --tw-prose-invert-hr: #374151; + --tw-prose-invert-quotes: #f3f4f6; + --tw-prose-invert-quote-borders: #374151; + --tw-prose-invert-captions: #9ca3af; + --tw-prose-invert-kbd: #fff; + --tw-prose-invert-kbd-shadows: 255 255 255; + --tw-prose-invert-code: #fff; + --tw-prose-invert-pre-code: #d1d5db; + --tw-prose-invert-pre-bg: rgb(0 0 0 / 50%); + --tw-prose-invert-th-borders: #4b5563; + --tw-prose-invert-td-borders: #374151; + font-size: 1rem; + line-height: 1.75; +} + +.prose :where(picture > img):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + margin-top: 0; + margin-bottom: 0; +} + +.prose :where(li):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + margin-top: 0.5em; + margin-bottom: 0.5em; +} + +.prose :where(ol > li):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + padding-inline-start: 0.375em; +} + +.prose :where(ul > li):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + padding-inline-start: 0.375em; +} + +.prose :where(.prose > ul > li p):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + margin-top: 0.75em; + margin-bottom: 0.75em; +} + +.prose :where(.prose > ul > li > p:first-child):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + margin-top: 1.25em; +} + +.prose :where(.prose > ul > li > p:last-child):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + margin-bottom: 1.25em; +} + +.prose :where(.prose > ol > li > p:first-child):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + margin-top: 1.25em; +} + +.prose :where(.prose > ol > li > p:last-child):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + margin-bottom: 1.25em; +} + +.prose :where(ul ul, ul ol, ol ul, ol ol):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + margin-top: 0.75em; + margin-bottom: 0.75em; +} + +.prose :where(dl):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + margin-top: 1.25em; + margin-bottom: 1.25em; +} + +.prose :where(dd):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + margin-top: 0.5em; + padding-inline-start: 1.625em; +} + +.prose :where(hr + *):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + margin-top: 0; +} + +.prose :where(h2 + *):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + margin-top: 0; +} + +.prose :where(h3 + *):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + margin-top: 0; +} + +.prose :where(h4 + *):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + margin-top: 0; +} + +.prose :where(thead th:first-child):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + padding-inline-start: 0; +} + +.prose :where(thead th:last-child):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + padding-inline-end: 0; +} + +.prose :where(tbody td, tfoot td):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + padding-top: 0.5714286em; + padding-inline-end: 0.5714286em; + padding-bottom: 0.5714286em; + padding-inline-start: 0.5714286em; +} + +.prose :where(tbody td:first-child, tfoot td:first-child):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + padding-inline-start: 0; +} + +.prose :where(tbody td:last-child, tfoot td:last-child):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + padding-inline-end: 0; +} + +.prose :where(figure):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + margin-top: 2em; + margin-bottom: 2em; +} + +.prose :where(.prose > :first-child):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + margin-top: 0; +} + +.prose :where(.prose > :last-child):not(:where([class~="not-prose"],[class~="not-prose"] *)) { + margin-bottom: 0; +} + +.form-input,.form-textarea,.form-select,.form-multiselect { + -webkit-appearance: none; + -moz-appearance: none; + appearance: none; + background-color: #fff; + border-color: #6b7280; + border-width: 1px; + border-radius: 0px; + padding-top: 0.5rem; + padding-right: 0.75rem; + padding-bottom: 0.5rem; + padding-left: 0.75rem; + font-size: 1rem; + line-height: 1.5rem; + --tw-shadow: 0 0 #0000; +} + +.form-input:focus, .form-textarea:focus, .form-select:focus, .form-multiselect:focus { + outline: 2px solid transparent; + outline-offset: 2px; + --tw-ring-inset: var(--tw-empty,/*!*/ /*!*/); + --tw-ring-offset-width: 0px; + --tw-ring-offset-color: #fff; + --tw-ring-color: #2563eb; + --tw-ring-offset-shadow: var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color); + --tw-ring-shadow: var(--tw-ring-inset) 0 0 0 calc(1px + var(--tw-ring-offset-width)) var(--tw-ring-color); + box-shadow: var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow); + border-color: #2563eb; +} + +.form-input::-moz-placeholder, .form-textarea::-moz-placeholder { + color: #6b7280; + opacity: 1; +} + +.form-input::placeholder,.form-textarea::placeholder { + color: #6b7280; + opacity: 1; +} + +.form-input::-webkit-datetime-edit-fields-wrapper { + padding: 0; +} + +.form-input::-webkit-date-and-time-value { + min-height: 1.5em; + text-align: inherit; +} + +.form-input::-webkit-datetime-edit { + display: inline-flex; +} + +.form-input::-webkit-datetime-edit,.form-input::-webkit-datetime-edit-year-field,.form-input::-webkit-datetime-edit-month-field,.form-input::-webkit-datetime-edit-day-field,.form-input::-webkit-datetime-edit-hour-field,.form-input::-webkit-datetime-edit-minute-field,.form-input::-webkit-datetime-edit-second-field,.form-input::-webkit-datetime-edit-millisecond-field,.form-input::-webkit-datetime-edit-meridiem-field { + padding-top: 0; + padding-bottom: 0; +} + +.form-select { + background-image: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' fill='none' viewBox='0 0 20 20'%3e%3cpath stroke='%236b7280' stroke-linecap='round' stroke-linejoin='round' stroke-width='1.5' d='M6 8l4 4 4-4'/%3e%3c/svg%3e"); + background-position: right 0.5rem center; + background-repeat: no-repeat; + background-size: 1.5em 1.5em; + padding-right: 2.5rem; + -webkit-print-color-adjust: exact; + print-color-adjust: exact; +} + +.form-select:where([size]:not([size="1"])) { + background-image: initial; + background-position: initial; + background-repeat: unset; + background-size: initial; + padding-right: 0.75rem; + -webkit-print-color-adjust: unset; + print-color-adjust: unset; +} + +.btn { + display: inline-flex; + align-items: center; + justify-content: center; + border-radius: 0.375rem; + padding-left: 1rem; + padding-right: 1rem; + padding-top: 0.5rem; + padding-bottom: 0.5rem; + font-size: 1rem; + line-height: 1.5rem; + font-weight: 500; + --tw-shadow: 0 1px 2px 0 rgb(0 0 0 / 0.05); + --tw-shadow-colored: 0 1px 2px 0 var(--tw-shadow-color); + box-shadow: var(--tw-ring-offset-shadow, 0 0 #0000), var(--tw-ring-shadow, 0 0 #0000), var(--tw-shadow); + transition-property: all; + transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1); + transition-duration: 150ms; +} + +.btn:focus { + outline: 2px solid transparent; + outline-offset: 2px; + --tw-ring-offset-shadow: var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color); + --tw-ring-shadow: var(--tw-ring-inset) 0 0 0 calc(2px + var(--tw-ring-offset-width)) var(--tw-ring-color); + box-shadow: var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow, 0 0 #0000); + --tw-ring-offset-width: 2px; +} + +.btn-primary { + display: inline-flex; + align-items: center; + justify-content: center; + border-radius: 0.375rem; + padding-left: 1rem; + padding-right: 1rem; + padding-top: 0.5rem; + padding-bottom: 0.5rem; + font-size: 1rem; + line-height: 1.5rem; + font-weight: 500; + --tw-shadow: 0 1px 2px 0 rgb(0 0 0 / 0.05); + --tw-shadow-colored: 0 1px 2px 0 var(--tw-shadow-color); + box-shadow: var(--tw-ring-offset-shadow, 0 0 #0000), var(--tw-ring-shadow, 0 0 #0000), var(--tw-shadow); + transition-property: all; + transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1); + transition-duration: 150ms; +} + +.btn-primary:focus { + outline: 2px solid transparent; + outline-offset: 2px; + --tw-ring-offset-shadow: var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color); + --tw-ring-shadow: var(--tw-ring-inset) 0 0 0 calc(2px + var(--tw-ring-offset-width)) var(--tw-ring-color); + box-shadow: var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow, 0 0 #0000); + --tw-ring-offset-width: 2px; +} + +.btn-primary { + --tw-bg-opacity: 1; + background-color: rgb(22 84 246 / var(--tw-bg-opacity, 1)); + --tw-text-opacity: 1; + color: rgb(255 255 255 / var(--tw-text-opacity, 1)); +} + +.btn-primary:hover { + --tw-bg-opacity: 1; + background-color: rgb(17 66 226 / var(--tw-bg-opacity, 1)); + --tw-shadow: 0 4px 6px -1px rgb(0 0 0 / 0.1), 0 2px 4px -2px rgb(0 0 0 / 0.1); + --tw-shadow-colored: 0 4px 6px -1px var(--tw-shadow-color), 0 2px 4px -2px var(--tw-shadow-color); + box-shadow: var(--tw-ring-offset-shadow, 0 0 #0000), var(--tw-ring-shadow, 0 0 #0000), var(--tw-shadow); +} + +.btn-primary:focus { + --tw-ring-opacity: 1; + --tw-ring-color: rgb(41 112 255 / var(--tw-ring-opacity, 1)); +} + +.btn-primary:active { + --tw-translate-y: 0.125rem; + transform: translate(var(--tw-translate-x), var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y)); +} + +.btn-primary:is(.dark *) { + --tw-bg-opacity: 1; + background-color: rgb(17 66 226 / var(--tw-bg-opacity, 1)); +} + +.btn-primary:hover:is(.dark *) { + --tw-bg-opacity: 1; + background-color: rgb(22 84 246 / var(--tw-bg-opacity, 1)); +} + +.btn-secondary { + display: inline-flex; + align-items: center; + justify-content: center; + border-radius: 0.375rem; + padding-left: 1rem; + padding-right: 1rem; + padding-top: 0.5rem; + padding-bottom: 0.5rem; + font-size: 1rem; + line-height: 1.5rem; + font-weight: 500; + --tw-shadow: 0 1px 2px 0 rgb(0 0 0 / 0.05); + --tw-shadow-colored: 0 1px 2px 0 var(--tw-shadow-color); + box-shadow: var(--tw-ring-offset-shadow, 0 0 #0000), var(--tw-ring-shadow, 0 0 #0000), var(--tw-shadow); + transition-property: all; + transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1); + transition-duration: 150ms; +} + +.btn-secondary:focus { + outline: 2px solid transparent; + outline-offset: 2px; + --tw-ring-offset-shadow: var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color); + --tw-ring-shadow: var(--tw-ring-inset) 0 0 0 calc(2px + var(--tw-ring-offset-width)) var(--tw-ring-color); + box-shadow: var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow, 0 0 #0000); + --tw-ring-offset-width: 2px; +} + +.btn-secondary { + --tw-bg-opacity: 1; + background-color: rgb(120 51 248 / var(--tw-bg-opacity, 1)); + --tw-text-opacity: 1; + color: rgb(255 255 255 / var(--tw-text-opacity, 1)); +} + +.btn-secondary:hover { + --tw-bg-opacity: 1; + background-color: rgb(105 36 226 / var(--tw-bg-opacity, 1)); + --tw-shadow: 0 4px 6px -1px rgb(0 0 0 / 0.1), 0 2px 4px -2px rgb(0 0 0 / 0.1); + --tw-shadow-colored: 0 4px 6px -1px var(--tw-shadow-color), 0 2px 4px -2px var(--tw-shadow-color); + box-shadow: var(--tw-ring-offset-shadow, 0 0 #0000), var(--tw-ring-shadow, 0 0 #0000), var(--tw-shadow); +} + +.btn-secondary:focus { + --tw-ring-opacity: 1; + --tw-ring-color: rgb(139 85 255 / var(--tw-ring-opacity, 1)); +} + +.btn-secondary:active { + --tw-translate-y: 0.125rem; + transform: translate(var(--tw-translate-x), var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y)); +} + +.btn-secondary:is(.dark *) { + --tw-bg-opacity: 1; + background-color: rgb(105 36 226 / var(--tw-bg-opacity, 1)); +} + +.btn-secondary:hover:is(.dark *) { + --tw-bg-opacity: 1; + background-color: rgb(120 51 248 / var(--tw-bg-opacity, 1)); +} + +.btn-outline { + display: inline-flex; + align-items: center; + justify-content: center; + border-radius: 0.375rem; + padding-left: 1rem; + padding-right: 1rem; + padding-top: 0.5rem; + padding-bottom: 0.5rem; + font-size: 1rem; + line-height: 1.5rem; + font-weight: 500; + --tw-shadow: 0 1px 2px 0 rgb(0 0 0 / 0.05); + --tw-shadow-colored: 0 1px 2px 0 var(--tw-shadow-color); + box-shadow: var(--tw-ring-offset-shadow, 0 0 #0000), var(--tw-ring-shadow, 0 0 #0000), var(--tw-shadow); + transition-property: all; + transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1); + transition-duration: 150ms; +} + +.btn-outline:focus { + outline: 2px solid transparent; + outline-offset: 2px; + --tw-ring-offset-shadow: var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color); + --tw-ring-shadow: var(--tw-ring-inset) 0 0 0 calc(2px + var(--tw-ring-offset-width)) var(--tw-ring-color); + box-shadow: var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow, 0 0 #0000); + --tw-ring-offset-width: 2px; +} + +.btn-outline { + border-width: 2px; + --tw-border-opacity: 1; + border-color: rgb(209 213 219 / var(--tw-border-opacity, 1)); + --tw-bg-opacity: 1; + background-color: rgb(255 255 255 / var(--tw-bg-opacity, 1)); + --tw-text-opacity: 1; + color: rgb(55 65 81 / var(--tw-text-opacity, 1)); +} + +.btn-outline:hover { + --tw-border-opacity: 1; + border-color: rgb(41 112 255 / var(--tw-border-opacity, 1)); + --tw-bg-opacity: 1; + background-color: rgb(249 250 251 / var(--tw-bg-opacity, 1)); + --tw-text-opacity: 1; + color: rgb(22 84 246 / var(--tw-text-opacity, 1)); +} + +.btn-outline:focus { + --tw-ring-opacity: 1; + --tw-ring-color: rgb(107 114 128 / var(--tw-ring-opacity, 1)); +} + +.btn-outline:active { + --tw-translate-y: 0.125rem; + transform: translate(var(--tw-translate-x), var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y)); +} + +.btn-outline:is(.dark *) { + --tw-border-opacity: 1; + border-color: rgb(111 111 126 / var(--tw-border-opacity, 1)); + background-color: transparent; + --tw-text-opacity: 1; + color: rgb(229 231 235 / var(--tw-text-opacity, 1)); +} + +.btn-outline:hover:is(.dark *) { + --tw-border-opacity: 1; + border-color: rgb(90 147 255 / var(--tw-border-opacity, 1)); + --tw-bg-opacity: 1; + background-color: rgb(73 73 79 / var(--tw-bg-opacity, 1)); + --tw-text-opacity: 1; + color: rgb(90 147 255 / var(--tw-text-opacity, 1)); +} + +.card { + overflow: hidden; + border-radius: 0.5rem; + border-width: 1px; + --tw-border-opacity: 1; + border-color: rgb(229 231 235 / var(--tw-border-opacity, 1)); + --tw-bg-opacity: 1; + background-color: rgb(255 255 255 / var(--tw-bg-opacity, 1)); + --tw-shadow: 0 4px 6px -1px rgb(0 0 0 / 0.1), 0 2px 4px -2px rgb(0 0 0 / 0.1); + --tw-shadow-colored: 0 4px 6px -1px var(--tw-shadow-color), 0 2px 4px -2px var(--tw-shadow-color); + box-shadow: var(--tw-ring-offset-shadow, 0 0 #0000), var(--tw-ring-shadow, 0 0 #0000), var(--tw-shadow); + transition-property: box-shadow; + transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1); + transition-duration: 300ms; +} + +.card:hover { + --tw-shadow: 0 10px 15px -3px rgb(0 0 0 / 0.1), 0 4px 6px -4px rgb(0 0 0 / 0.1); + --tw-shadow-colored: 0 10px 15px -3px var(--tw-shadow-color), 0 4px 6px -4px var(--tw-shadow-color); + box-shadow: var(--tw-ring-offset-shadow, 0 0 #0000), var(--tw-ring-shadow, 0 0 #0000), var(--tw-shadow); +} + +.card:is(.dark *) { + --tw-border-opacity: 1; + border-color: rgb(91 91 105 / var(--tw-border-opacity, 1)); + --tw-bg-opacity: 1; + background-color: rgb(73 73 79 / var(--tw-bg-opacity, 1)); +} + +.gradient-text { + background-image: linear-gradient(to right, var(--tw-gradient-stops)); + --tw-gradient-from: #1654f6 var(--tw-gradient-from-position); + --tw-gradient-to: rgb(22 84 246 / 0) var(--tw-gradient-to-position); + --tw-gradient-stops: var(--tw-gradient-from), var(--tw-gradient-to); + --tw-gradient-to: #7833f8 var(--tw-gradient-to-position); + -webkit-background-clip: text; + background-clip: text; + font-weight: 700; + color: transparent; +} + +.gradient-text:is(.dark *) { + --tw-gradient-from: #2970ff var(--tw-gradient-from-position); + --tw-gradient-to: rgb(41 112 255 / 0) var(--tw-gradient-to-position); + --tw-gradient-stops: var(--tw-gradient-from), var(--tw-gradient-to); + --tw-gradient-to: #8b55ff var(--tw-gradient-to-position); +} + +.node-tooltip { + z-index: 50; + max-width: 20rem; + border-radius: 0.5rem; + border-width: 1px; + --tw-border-opacity: 1; + border-color: rgb(229 231 235 / var(--tw-border-opacity, 1)); + --tw-bg-opacity: 1; + background-color: rgb(255 255 255 / var(--tw-bg-opacity, 1)); + padding: 0.75rem; + font-size: 0.875rem; + line-height: 1.25rem; + --tw-text-opacity: 1; + color: rgb(31 41 55 / var(--tw-text-opacity, 1)); + --tw-shadow: 0 10px 15px -3px rgb(0 0 0 / 0.1), 0 4px 6px -4px rgb(0 0 0 / 0.1); + --tw-shadow-colored: 0 10px 15px -3px var(--tw-shadow-color), 0 4px 6px -4px var(--tw-shadow-color); + box-shadow: var(--tw-ring-offset-shadow, 0 0 #0000), var(--tw-ring-shadow, 0 0 #0000), var(--tw-shadow); +} + +.node-tooltip:is(.dark *) { + --tw-border-opacity: 1; + border-color: rgb(91 91 105 / var(--tw-border-opacity, 1)); + --tw-bg-opacity: 1; + background-color: rgb(44 44 51 / var(--tw-bg-opacity, 1)); + --tw-text-opacity: 1; + color: rgb(255 255 255 / var(--tw-text-opacity, 1)); +} + +/* Mindmap-spezifische Stile */ + +.mindmap-container { + border-radius: 0.5rem; + background-color: rgb(249 250 251 / 0.8); + padding: 1rem; + --tw-shadow: inset 0 2px 4px 0 rgb(0 0 0 / 0.05); + --tw-shadow-colored: inset 0 2px 4px 0 var(--tw-shadow-color); + box-shadow: var(--tw-ring-offset-shadow, 0 0 #0000), var(--tw-ring-shadow, 0 0 #0000), var(--tw-shadow); +} + +.mindmap-container:is(.dark *) { + background-color: rgb(44 44 51 / 0.8); +} + +.form-control { + width: 100%; +} + +/* Verbesserte Formulareingabefelder */ + +.form-input, + .form-textarea, + .form-select { + width: 100%; + border-radius: 0.375rem; + border-width: 1px; + --tw-border-opacity: 1; + border-color: rgb(209 213 219 / var(--tw-border-opacity, 1)); + --tw-bg-opacity: 1; + background-color: rgb(255 255 255 / var(--tw-bg-opacity, 1)); + padding-left: 1rem; + padding-right: 1rem; + padding-top: 0.5rem; + padding-bottom: 0.5rem; + --tw-text-opacity: 1; + color: rgb(31 41 55 / var(--tw-text-opacity, 1)); + --tw-shadow: 0 1px 2px 0 rgb(0 0 0 / 0.05); + --tw-shadow-colored: 0 1px 2px 0 var(--tw-shadow-color); + box-shadow: var(--tw-ring-offset-shadow, 0 0 #0000), var(--tw-ring-shadow, 0 0 #0000), var(--tw-shadow); +} + +.form-input:focus, + .form-textarea:focus, + .form-select:focus { + --tw-border-opacity: 1; + border-color: rgb(41 112 255 / var(--tw-border-opacity, 1)); + --tw-ring-offset-shadow: var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color); + --tw-ring-shadow: var(--tw-ring-inset) 0 0 0 calc(3px + var(--tw-ring-offset-width)) var(--tw-ring-color); + box-shadow: var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow, 0 0 #0000); + --tw-ring-color: rgb(41 112 255 / var(--tw-ring-opacity, 1)); + --tw-ring-opacity: 0.5; +} + +.form-input:is(.dark *), + .form-textarea:is(.dark *), + .form-select:is(.dark *) { + --tw-border-opacity: 1; + border-color: rgb(111 111 126 / var(--tw-border-opacity, 1)); + --tw-bg-opacity: 1; + background-color: rgb(73 73 79 / var(--tw-bg-opacity, 1)); + --tw-text-opacity: 1; + color: rgb(243 244 246 / var(--tw-text-opacity, 1)); +} + +.form-input:focus:is(.dark *), + .form-textarea:focus:is(.dark *), + .form-select:focus:is(.dark *) { + --tw-border-opacity: 1; + border-color: rgb(90 147 255 / var(--tw-border-opacity, 1)); + --tw-ring-opacity: 1; + --tw-ring-color: rgb(90 147 255 / var(--tw-ring-opacity, 1)); +} + +.visible { + visibility: visible; +} + +.static { + position: static; +} + +.fixed { + position: fixed; +} + +.absolute { + position: absolute; +} + +.relative { + position: relative; +} + +.sticky { + position: sticky; +} + +.inset-0 { + inset: 0px; +} + +.bottom-4 { + bottom: 1rem; +} + +.left-0 { + left: 0px; +} + +.left-1\/2 { + left: 50%; +} + +.left-3 { + left: 0.75rem; +} + +.right-3 { + right: 0.75rem; +} + +.right-4 { + right: 1rem; +} + +.top-0 { + top: 0px; +} + +.top-1\/2 { + top: 50%; +} + +.top-6 { + top: 1.5rem; +} + +.z-10 { + z-index: 10; +} + +.z-50 { + z-index: 50; +} + +.order-1 { + order: 1; +} + +.order-2 { + order: 2; +} + +.mx-auto { + margin-left: auto; + margin-right: auto; +} + +.mb-0 { + margin-bottom: 0px; +} + +.mb-1 { + margin-bottom: 0.25rem; +} + +.mb-1\.5 { + margin-bottom: 0.375rem; +} + +.mb-12 { + margin-bottom: 3rem; +} + +.mb-16 { + margin-bottom: 4rem; +} + +.mb-2 { + margin-bottom: 0.5rem; +} + +.mb-3 { + margin-bottom: 0.75rem; +} + +.mb-4 { + margin-bottom: 1rem; +} + +.mb-6 { + margin-bottom: 1.5rem; +} + +.mb-8 { + margin-bottom: 2rem; +} + +.me-2 { + margin-inline-end: 0.5rem; +} + +.ml-1 { + margin-left: 0.25rem; +} + +.ml-3 { + margin-left: 0.75rem; +} + +.ml-4 { + margin-left: 1rem; +} + +.ml-auto { + margin-left: auto; +} + +.mr-1 { + margin-right: 0.25rem; +} + +.mr-2 { + margin-right: 0.5rem; +} + +.mr-3 { + margin-right: 0.75rem; +} + +.mt-1 { + margin-top: 0.25rem; +} + +.mt-10 { + margin-top: 2.5rem; +} + +.mt-12 { + margin-top: 3rem; +} + +.mt-16 { + margin-top: 4rem; +} + +.mt-2 { + margin-top: 0.5rem; +} + +.mt-3 { + margin-top: 0.75rem; +} + +.mt-4 { + margin-top: 1rem; +} + +.mt-5 { + margin-top: 1.25rem; +} + +.mt-8 { + margin-top: 2rem; +} + +.block { + display: block; +} + +.inline-block { + display: inline-block; +} + +.flex { + display: flex; +} + +.inline-flex { + display: inline-flex; +} + +.table { + display: table; +} + +.grid { + display: grid; +} + +.hidden { + display: none; +} + +.h-10 { + height: 2.5rem; +} + +.h-12 { + height: 3rem; +} + +.h-16 { + height: 4rem; +} + +.h-2 { + height: 0.5rem; +} + +.h-3 { + height: 0.75rem; +} + +.h-32 { + height: 8rem; +} + +.h-5 { + height: 1.25rem; +} + +.h-64 { + height: 16rem; +} + +.h-7 { + height: 1.75rem; +} + +.h-8 { + height: 2rem; +} + +.h-80 { + height: 20rem; +} + +.h-full { + height: 100%; +} + +.max-h-0 { + max-height: 0px; +} + +.max-h-48 { + max-height: 12rem; +} + +.max-h-80 { + max-height: 20rem; +} + +.max-h-96 { + max-height: 24rem; +} + +.max-h-\[90vh\] { + max-height: 90vh; +} + +.min-h-\[65vh\] { + min-height: 65vh; +} + +.min-h-screen { + min-height: 100vh; +} + +.w-1\/3 { + width: 33.333333%; +} + +.w-10 { + width: 2.5rem; +} + +.w-12 { + width: 3rem; +} + +.w-14 { + width: 3.5rem; +} + +.w-16 { + width: 4rem; +} + +.w-2 { + width: 0.5rem; +} + +.w-3 { + width: 0.75rem; +} + +.w-5 { + width: 1.25rem; +} + +.w-60 { + width: 15rem; +} + +.w-64 { + width: 16rem; +} + +.w-8 { + width: 2rem; +} + +.w-80 { + width: 20rem; +} + +.w-full { + width: 100%; +} + +.max-w-2xl { + max-width: 42rem; +} + +.max-w-3xl { + max-width: 48rem; +} + +.max-w-4xl { + max-width: 56rem; +} + +.max-w-\[85\%\] { + max-width: 85%; +} + +.max-w-screen-xl { + max-width: 1280px; +} + +.max-w-sm { + max-width: 24rem; +} + +.flex-1 { + flex: 1 1 0%; +} + +.flex-shrink { + flex-shrink: 1; +} + +.flex-shrink-0 { + flex-shrink: 0; +} + +.flex-grow { + flex-grow: 1; +} + +.-translate-x-1\/2 { + --tw-translate-x: -50%; + transform: translate(var(--tw-translate-x), var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y)); +} + +.-translate-y-1\/2 { + --tw-translate-y: -50%; + transform: translate(var(--tw-translate-x), var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y)); +} + +.-translate-y-10 { + --tw-translate-y: -2.5rem; + transform: translate(var(--tw-translate-x), var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y)); +} + +.translate-x-0 { + --tw-translate-x: 0px; + transform: translate(var(--tw-translate-x), var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y)); +} + +.translate-x-7 { + --tw-translate-x: 1.75rem; + transform: translate(var(--tw-translate-x), var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y)); +} + +.translate-y-0 { + --tw-translate-y: 0px; + transform: translate(var(--tw-translate-x), var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y)); +} + +.translate-y-4 { + --tw-translate-y: 1rem; + transform: translate(var(--tw-translate-x), var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y)); +} + +.scale-100 { + --tw-scale-x: 1; + --tw-scale-y: 1; + transform: translate(var(--tw-translate-x), var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y)); +} + +.scale-95 { + --tw-scale-x: .95; + --tw-scale-y: .95; + transform: translate(var(--tw-translate-x), var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y)); +} + +.transform { + transform: translate(var(--tw-translate-x), var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y)); +} + +@keyframes float { + 0%, 100% { + transform: translateY(0); + } + + 50% { + transform: translateY(-10px); + } +} + +.animate-float { + animation: float 6s ease-in-out infinite; +} + +@keyframes pulse { + 50% { + opacity: .5; + } +} + +.animate-pulse { + animation: pulse 2s cubic-bezier(0.4, 0, 0.6, 1) infinite; +} + +@keyframes spin { + to { + transform: rotate(360deg); + } +} + +.animate-spin { + animation: spin 1s linear infinite; +} + +.resize { + resize: both; +} + +.list-disc { + list-style-type: disc; +} + +.grid-cols-1 { + grid-template-columns: repeat(1, minmax(0, 1fr)); +} + +.flex-col { + flex-direction: column; +} + +.flex-wrap { + flex-wrap: wrap; +} + +.items-start { + align-items: flex-start; +} + +.items-center { + align-items: center; +} + +.justify-start { + justify-content: flex-start; +} + +.justify-end { + justify-content: flex-end; +} + +.justify-center { + justify-content: center; +} + +.justify-between { + justify-content: space-between; +} + +.gap-1 { + gap: 0.25rem; +} + +.gap-2 { + gap: 0.5rem; +} + +.gap-3 { + gap: 0.75rem; +} + +.gap-4 { + gap: 1rem; +} + +.gap-6 { + gap: 1.5rem; +} + +.gap-8 { + gap: 2rem; +} + +.space-x-2 > :not([hidden]) ~ :not([hidden]) { + --tw-space-x-reverse: 0; + margin-right: calc(0.5rem * var(--tw-space-x-reverse)); + margin-left: calc(0.5rem * calc(1 - var(--tw-space-x-reverse))); +} + +.space-x-3 > :not([hidden]) ~ :not([hidden]) { + --tw-space-x-reverse: 0; + margin-right: calc(0.75rem * var(--tw-space-x-reverse)); + margin-left: calc(0.75rem * calc(1 - var(--tw-space-x-reverse))); +} + +.space-x-4 > :not([hidden]) ~ :not([hidden]) { + --tw-space-x-reverse: 0; + margin-right: calc(1rem * var(--tw-space-x-reverse)); + margin-left: calc(1rem * calc(1 - var(--tw-space-x-reverse))); +} + +.space-x-6 > :not([hidden]) ~ :not([hidden]) { + --tw-space-x-reverse: 0; + margin-right: calc(1.5rem * var(--tw-space-x-reverse)); + margin-left: calc(1.5rem * calc(1 - var(--tw-space-x-reverse))); +} + +.space-y-1 > :not([hidden]) ~ :not([hidden]) { + --tw-space-y-reverse: 0; + margin-top: calc(0.25rem * calc(1 - var(--tw-space-y-reverse))); + margin-bottom: calc(0.25rem * var(--tw-space-y-reverse)); +} + +.space-y-2 > :not([hidden]) ~ :not([hidden]) { + --tw-space-y-reverse: 0; + margin-top: calc(0.5rem * calc(1 - var(--tw-space-y-reverse))); + margin-bottom: calc(0.5rem * var(--tw-space-y-reverse)); +} + +.space-y-3 > :not([hidden]) ~ :not([hidden]) { + --tw-space-y-reverse: 0; + margin-top: calc(0.75rem * calc(1 - var(--tw-space-y-reverse))); + margin-bottom: calc(0.75rem * var(--tw-space-y-reverse)); +} + +.space-y-4 > :not([hidden]) ~ :not([hidden]) { + --tw-space-y-reverse: 0; + margin-top: calc(1rem * calc(1 - var(--tw-space-y-reverse))); + margin-bottom: calc(1rem * var(--tw-space-y-reverse)); +} + +.space-y-6 > :not([hidden]) ~ :not([hidden]) { + --tw-space-y-reverse: 0; + margin-top: calc(1.5rem * calc(1 - var(--tw-space-y-reverse))); + margin-bottom: calc(1.5rem * var(--tw-space-y-reverse)); +} + +.overflow-hidden { + overflow: hidden; +} + +.overflow-x-auto { + overflow-x: auto; +} + +.overflow-y-auto { + overflow-y: auto; +} + +.overflow-x-hidden { + overflow-x: hidden; +} + +.rounded { + border-radius: 0.25rem; +} + +.rounded-full { + border-radius: 9999px; +} + +.rounded-lg { + border-radius: 0.5rem; +} + +.rounded-md { + border-radius: 0.375rem; +} + +.rounded-l-lg { + border-top-left-radius: 0.5rem; + border-bottom-left-radius: 0.5rem; +} + +.rounded-r-lg { + border-top-right-radius: 0.5rem; + border-bottom-right-radius: 0.5rem; +} + +.border { + border-width: 1px; +} + +.border-b { + border-bottom-width: 1px; +} + +.border-b-2 { + border-bottom-width: 2px; +} + +.border-l-2 { + border-left-width: 2px; +} + +.border-l-4 { + border-left-width: 4px; +} + +.border-t { + border-top-width: 1px; +} + +.border-t-2 { + border-top-width: 2px; +} + +.border-dark-500 { + --tw-border-opacity: 1; + border-color: rgb(111 111 126 / var(--tw-border-opacity, 1)); +} + +.border-dark-600 { + --tw-border-opacity: 1; + border-color: rgb(91 91 105 / var(--tw-border-opacity, 1)); +} + +.border-gray-100 { + --tw-border-opacity: 1; + border-color: rgb(243 244 246 / var(--tw-border-opacity, 1)); +} + +.border-gray-200 { + --tw-border-opacity: 1; + border-color: rgb(229 231 235 / var(--tw-border-opacity, 1)); +} + +.border-gray-200\/20 { + border-color: rgb(229 231 235 / 0.2); +} + +.border-gray-300 { + --tw-border-opacity: 1; + border-color: rgb(209 213 219 / var(--tw-border-opacity, 1)); +} + +.border-gray-300\/20 { + border-color: rgb(209 213 219 / 0.2); +} + +.border-gray-300\/50 { + border-color: rgb(209 213 219 / 0.5); +} + +.border-gray-700 { + --tw-border-opacity: 1; + border-color: rgb(55 65 81 / var(--tw-border-opacity, 1)); +} + +.border-gray-800\/50 { + border-color: rgb(31 41 55 / 0.5); +} + +.border-primary-400 { + --tw-border-opacity: 1; + border-color: rgb(90 147 255 / var(--tw-border-opacity, 1)); +} + +.border-purple-500 { + --tw-border-opacity: 1; + border-color: rgb(168 85 247 / var(--tw-border-opacity, 1)); +} + +.border-slate-700 { + --tw-border-opacity: 1; + border-color: rgb(51 65 85 / var(--tw-border-opacity, 1)); +} + +.border-slate-700\/20 { + border-color: rgb(51 65 85 / 0.2); +} + +.bg-black\/50 { + background-color: rgb(0 0 0 / 0.5); +} + +.bg-blue-100 { + --tw-bg-opacity: 1; + background-color: rgb(219 234 254 / var(--tw-bg-opacity, 1)); +} + +.bg-blue-500\/20 { + background-color: rgb(59 130 246 / 0.2); +} + +.bg-dark-700 { + --tw-bg-opacity: 1; + background-color: rgb(73 73 79 / var(--tw-bg-opacity, 1)); +} + +.bg-dark-800 { + --tw-bg-opacity: 1; + background-color: rgb(44 44 51 / var(--tw-bg-opacity, 1)); +} + +.bg-dark-900 { + --tw-bg-opacity: 1; + background-color: rgb(24 24 28 / var(--tw-bg-opacity, 1)); +} + +.bg-gray-100 { + --tw-bg-opacity: 1; + background-color: rgb(243 244 246 / var(--tw-bg-opacity, 1)); +} + +.bg-gray-100\/20 { + background-color: rgb(243 244 246 / 0.2); +} + +.bg-gray-700 { + --tw-bg-opacity: 1; + background-color: rgb(55 65 81 / var(--tw-bg-opacity, 1)); +} + +.bg-green-100 { + --tw-bg-opacity: 1; + background-color: rgb(220 252 231 / var(--tw-bg-opacity, 1)); +} + +.bg-green-400 { + --tw-bg-opacity: 1; + background-color: rgb(74 222 128 / var(--tw-bg-opacity, 1)); +} + +.bg-green-500\/20 { + background-color: rgb(34 197 94 / 0.2); +} + +.bg-primary-100 { + --tw-bg-opacity: 1; + background-color: rgb(217 231 255 / var(--tw-bg-opacity, 1)); +} + +.bg-primary-400 { + --tw-bg-opacity: 1; + background-color: rgb(90 147 255 / var(--tw-bg-opacity, 1)); +} + +.bg-primary-500 { + --tw-bg-opacity: 1; + background-color: rgb(41 112 255 / var(--tw-bg-opacity, 1)); +} + +.bg-primary-600 { + --tw-bg-opacity: 1; + background-color: rgb(22 84 246 / var(--tw-bg-opacity, 1)); +} + +.bg-purple-500 { + --tw-bg-opacity: 1; + background-color: rgb(168 85 247 / var(--tw-bg-opacity, 1)); +} + +.bg-purple-500\/20 { + background-color: rgb(168 85 247 / 0.2); +} + +.bg-purple-600 { + --tw-bg-opacity: 1; + background-color: rgb(147 51 234 / var(--tw-bg-opacity, 1)); +} + +.bg-purple-900\/20 { + background-color: rgb(88 28 135 / 0.2); +} + +.bg-purple-900\/30 { + background-color: rgb(88 28 135 / 0.3); +} + +.bg-red-500\/20 { + background-color: rgb(239 68 68 / 0.2); +} + +.bg-secondary-400 { + --tw-bg-opacity: 1; + background-color: rgb(166 133 255 / var(--tw-bg-opacity, 1)); +} + +.bg-secondary-700 { + --tw-bg-opacity: 1; + background-color: rgb(105 36 226 / var(--tw-bg-opacity, 1)); +} + +.bg-slate-700 { + --tw-bg-opacity: 1; + background-color: rgb(51 65 85 / var(--tw-bg-opacity, 1)); +} + +.bg-slate-800\/80 { + background-color: rgb(30 41 59 / 0.8); +} + +.bg-slate-900\/50 { + background-color: rgb(15 23 42 / 0.5); +} + +.bg-white { + --tw-bg-opacity: 1; + background-color: rgb(255 255 255 / var(--tw-bg-opacity, 1)); +} + +.bg-white\/10 { + background-color: rgb(255 255 255 / 0.1); +} + +.bg-opacity-30 { + --tw-bg-opacity: 0.3; +} + +.bg-gradient-to-br { + background-image: linear-gradient(to bottom right, var(--tw-gradient-stops)); +} + +.bg-gradient-to-r { + background-image: linear-gradient(to right, var(--tw-gradient-stops)); +} + +.from-purple-500 { + --tw-gradient-from: #a855f7 var(--tw-gradient-from-position); + --tw-gradient-to: rgb(168 85 247 / 0) var(--tw-gradient-to-position); + --tw-gradient-stops: var(--tw-gradient-from), var(--tw-gradient-to); +} + +.from-purple-600 { + --tw-gradient-from: #9333ea var(--tw-gradient-from-position); + --tw-gradient-to: rgb(147 51 234 / 0) var(--tw-gradient-to-position); + --tw-gradient-stops: var(--tw-gradient-from), var(--tw-gradient-to); +} + +.from-slate-900\/80 { + --tw-gradient-from: rgb(15 23 42 / 0.8) var(--tw-gradient-from-position); + --tw-gradient-to: rgb(15 23 42 / 0) var(--tw-gradient-to-position); + --tw-gradient-stops: var(--tw-gradient-from), var(--tw-gradient-to); +} + +.to-blue-500 { + --tw-gradient-to: #3b82f6 var(--tw-gradient-to-position); +} + +.to-blue-600 { + --tw-gradient-to: #2563eb var(--tw-gradient-to-position); +} + +.to-slate-800\/60 { + --tw-gradient-to: rgb(30 41 59 / 0.6) var(--tw-gradient-to-position); +} + +.p-12 { + padding: 3rem; +} + +.p-2 { + padding: 0.5rem; +} + +.p-2\.5 { + padding: 0.625rem; +} + +.p-3 { + padding: 0.75rem; +} + +.p-4 { + padding: 1rem; +} + +.p-6 { + padding: 1.5rem; +} + +.p-8 { + padding: 2rem; +} + +.px-1 { + padding-left: 0.25rem; + padding-right: 0.25rem; +} + +.px-2 { + padding-left: 0.5rem; + padding-right: 0.5rem; +} + +.px-3 { + padding-left: 0.75rem; + padding-right: 0.75rem; +} + +.px-4 { + padding-left: 1rem; + padding-right: 1rem; +} + +.px-8 { + padding-left: 2rem; + padding-right: 2rem; +} + +.py-1 { + padding-top: 0.25rem; + padding-bottom: 0.25rem; +} + +.py-10 { + padding-top: 2.5rem; + padding-bottom: 2.5rem; +} + +.py-12 { + padding-top: 3rem; + padding-bottom: 3rem; +} + +.py-16 { + padding-top: 4rem; + padding-bottom: 4rem; +} + +.py-2 { + padding-top: 0.5rem; + padding-bottom: 0.5rem; +} + +.py-20 { + padding-top: 5rem; + padding-bottom: 5rem; +} + +.py-3 { + padding-top: 0.75rem; + padding-bottom: 0.75rem; +} + +.py-4 { + padding-top: 1rem; + padding-bottom: 1rem; +} + +.py-5 { + padding-top: 1.25rem; + padding-bottom: 1.25rem; +} + +.py-8 { + padding-top: 2rem; + padding-bottom: 2rem; +} + +.pb-10 { + padding-bottom: 2.5rem; +} + +.pb-32 { + padding-bottom: 8rem; +} + +.pl-10 { + padding-left: 2.5rem; +} + +.pl-3 { + padding-left: 0.75rem; +} + +.pl-6 { + padding-left: 1.5rem; +} + +.pr-4 { + padding-right: 1rem; +} + +.pr-9 { + padding-right: 2.25rem; +} + +.pt-16 { + padding-top: 4rem; +} + +.pt-2 { + padding-top: 0.5rem; +} + +.pt-20 { + padding-top: 5rem; +} + +.pt-4 { + padding-top: 1rem; +} + +.pt-6 { + padding-top: 1.5rem; +} + +.text-left { + text-align: left; +} + +.text-center { + text-align: center; +} + +.font-mono { + font-family: JetBrains Mono, ui-monospace, SFMono-Regular, monospace; +} + +.text-2xl { + font-size: 1.5rem; + line-height: 2rem; +} + +.text-3xl { + font-size: 1.875rem; + line-height: 2.25rem; +} + +.text-4xl { + font-size: 2.25rem; + line-height: 2.5rem; +} + +.text-5xl { + font-size: 3rem; + line-height: 1; +} + +.text-6xl { + font-size: 3.75rem; + line-height: 1; +} + +.text-lg { + font-size: 1.125rem; + line-height: 1.75rem; +} + +.text-sm { + font-size: 0.875rem; + line-height: 1.25rem; +} + +.text-xl { + font-size: 1.25rem; + line-height: 1.75rem; +} + +.text-xs { + font-size: 0.75rem; + line-height: 1rem; +} + +.font-bold { + font-weight: 700; +} + +.font-medium { + font-weight: 500; +} + +.font-semibold { + font-weight: 600; +} + +.uppercase { + text-transform: uppercase; +} + +.tracking-tight { + letter-spacing: -0.025em; +} + +.tracking-wider { + letter-spacing: 0.05em; +} + +.text-amber-400 { + --tw-text-opacity: 1; + color: rgb(251 191 36 / var(--tw-text-opacity, 1)); +} + +.text-blue-400 { + --tw-text-opacity: 1; + color: rgb(96 165 250 / var(--tw-text-opacity, 1)); +} + +.text-blue-500 { + --tw-text-opacity: 1; + color: rgb(59 130 246 / var(--tw-text-opacity, 1)); +} + +.text-blue-600 { + --tw-text-opacity: 1; + color: rgb(37 99 235 / var(--tw-text-opacity, 1)); +} + +.text-blue-800 { + --tw-text-opacity: 1; + color: rgb(30 64 175 / var(--tw-text-opacity, 1)); +} + +.text-gray-200 { + --tw-text-opacity: 1; + color: rgb(229 231 235 / var(--tw-text-opacity, 1)); +} + +.text-gray-300 { + --tw-text-opacity: 1; + color: rgb(209 213 219 / var(--tw-text-opacity, 1)); +} + +.text-gray-400 { + --tw-text-opacity: 1; + color: rgb(156 163 175 / var(--tw-text-opacity, 1)); +} + +.text-gray-500 { + --tw-text-opacity: 1; + color: rgb(107 114 128 / var(--tw-text-opacity, 1)); +} + +.text-gray-600 { + --tw-text-opacity: 1; + color: rgb(75 85 99 / var(--tw-text-opacity, 1)); +} + +.text-gray-700 { + --tw-text-opacity: 1; + color: rgb(55 65 81 / var(--tw-text-opacity, 1)); +} + +.text-gray-800 { + --tw-text-opacity: 1; + color: rgb(31 41 55 / var(--tw-text-opacity, 1)); +} + +.text-gray-900 { + --tw-text-opacity: 1; + color: rgb(17 24 39 / var(--tw-text-opacity, 1)); +} + +.text-green-400 { + --tw-text-opacity: 1; + color: rgb(74 222 128 / var(--tw-text-opacity, 1)); +} + +.text-green-500 { + --tw-text-opacity: 1; + color: rgb(34 197 94 / var(--tw-text-opacity, 1)); +} + +.text-green-800 { + --tw-text-opacity: 1; + color: rgb(22 101 52 / var(--tw-text-opacity, 1)); +} + +.text-primary-400 { + --tw-text-opacity: 1; + color: rgb(90 147 255 / var(--tw-text-opacity, 1)); +} + +.text-primary-600 { + --tw-text-opacity: 1; + color: rgb(22 84 246 / var(--tw-text-opacity, 1)); +} + +.text-purple-300 { + --tw-text-opacity: 1; + color: rgb(216 180 254 / var(--tw-text-opacity, 1)); +} + +.text-purple-400 { + --tw-text-opacity: 1; + color: rgb(192 132 252 / var(--tw-text-opacity, 1)); +} + +.text-purple-500 { + --tw-text-opacity: 1; + color: rgb(168 85 247 / var(--tw-text-opacity, 1)); +} + +.text-red-400 { + --tw-text-opacity: 1; + color: rgb(248 113 113 / var(--tw-text-opacity, 1)); +} + +.text-red-500 { + --tw-text-opacity: 1; + color: rgb(239 68 68 / var(--tw-text-opacity, 1)); +} + +.text-red-600 { + --tw-text-opacity: 1; + color: rgb(220 38 38 / var(--tw-text-opacity, 1)); +} + +.text-secondary-400 { + --tw-text-opacity: 1; + color: rgb(166 133 255 / var(--tw-text-opacity, 1)); +} + +.text-white { + --tw-text-opacity: 1; + color: rgb(255 255 255 / var(--tw-text-opacity, 1)); +} + +.text-yellow-400 { + --tw-text-opacity: 1; + color: rgb(250 204 21 / var(--tw-text-opacity, 1)); +} + +.text-yellow-500 { + --tw-text-opacity: 1; + color: rgb(234 179 8 / var(--tw-text-opacity, 1)); +} + +.opacity-0 { + opacity: 0; +} + +.opacity-100 { + opacity: 1; +} + +.opacity-75 { + opacity: 0.75; +} + +.shadow-lg { + --tw-shadow: 0 10px 15px -3px rgb(0 0 0 / 0.1), 0 4px 6px -4px rgb(0 0 0 / 0.1); + --tw-shadow-colored: 0 10px 15px -3px var(--tw-shadow-color), 0 4px 6px -4px var(--tw-shadow-color); + box-shadow: var(--tw-ring-offset-shadow, 0 0 #0000), var(--tw-ring-shadow, 0 0 #0000), var(--tw-shadow); +} + +.shadow-md { + --tw-shadow: 0 4px 6px -1px rgb(0 0 0 / 0.1), 0 2px 4px -2px rgb(0 0 0 / 0.1); + --tw-shadow-colored: 0 4px 6px -1px var(--tw-shadow-color), 0 2px 4px -2px var(--tw-shadow-color); + box-shadow: var(--tw-ring-offset-shadow, 0 0 #0000), var(--tw-ring-shadow, 0 0 #0000), var(--tw-shadow); +} + +.shadow-xl { + --tw-shadow: 0 20px 25px -5px rgb(0 0 0 / 0.1), 0 8px 10px -6px rgb(0 0 0 / 0.1); + --tw-shadow-colored: 0 20px 25px -5px var(--tw-shadow-color), 0 8px 10px -6px var(--tw-shadow-color); + box-shadow: var(--tw-ring-offset-shadow, 0 0 #0000), var(--tw-ring-shadow, 0 0 #0000), var(--tw-shadow); +} + +.outline { + outline-style: solid; +} + +.ring-1 { + --tw-ring-offset-shadow: var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color); + --tw-ring-shadow: var(--tw-ring-inset) 0 0 0 calc(1px + var(--tw-ring-offset-width)) var(--tw-ring-color); + box-shadow: var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow, 0 0 #0000); +} + +.ring-black { + --tw-ring-opacity: 1; + --tw-ring-color: rgb(0 0 0 / var(--tw-ring-opacity, 1)); +} + +.ring-opacity-5 { + --tw-ring-opacity: 0.05; +} + +.blur { + --tw-blur: blur(8px); + filter: var(--tw-blur) var(--tw-brightness) var(--tw-contrast) var(--tw-grayscale) var(--tw-hue-rotate) var(--tw-invert) var(--tw-saturate) var(--tw-sepia) var(--tw-drop-shadow); +} + +.drop-shadow { + --tw-drop-shadow: drop-shadow(0 1px 2px rgb(0 0 0 / 0.1)) drop-shadow(0 1px 1px rgb(0 0 0 / 0.06)); + filter: var(--tw-blur) var(--tw-brightness) var(--tw-contrast) var(--tw-grayscale) var(--tw-hue-rotate) var(--tw-invert) var(--tw-saturate) var(--tw-sepia) var(--tw-drop-shadow); +} + +.filter { + filter: var(--tw-blur) var(--tw-brightness) var(--tw-contrast) var(--tw-grayscale) var(--tw-hue-rotate) var(--tw-invert) var(--tw-saturate) var(--tw-sepia) var(--tw-drop-shadow); +} + +.backdrop-blur-sm { + --tw-backdrop-blur: blur(4px); + -webkit-backdrop-filter: var(--tw-backdrop-blur) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia); + backdrop-filter: var(--tw-backdrop-blur) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia); +} + +.backdrop-filter { + -webkit-backdrop-filter: var(--tw-backdrop-blur) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia); + backdrop-filter: var(--tw-backdrop-blur) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia); +} + +.transition { + transition-property: color, background-color, border-color, text-decoration-color, fill, stroke, opacity, box-shadow, transform, filter, -webkit-backdrop-filter; + transition-property: color, background-color, border-color, text-decoration-color, fill, stroke, opacity, box-shadow, transform, filter, backdrop-filter; + transition-property: color, background-color, border-color, text-decoration-color, fill, stroke, opacity, box-shadow, transform, filter, backdrop-filter, -webkit-backdrop-filter; + transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1); + transition-duration: 150ms; +} + +.transition-all { + transition-property: all; + transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1); + transition-duration: 150ms; +} + +.transition-colors { + transition-property: color, background-color, border-color, text-decoration-color, fill, stroke; + transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1); + transition-duration: 150ms; +} + +.transition-transform { + transition-property: transform; + transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1); + transition-duration: 150ms; +} + +.duration-100 { + transition-duration: 100ms; +} + +.duration-150 { + transition-duration: 150ms; +} + +.duration-200 { + transition-duration: 200ms; +} + +.duration-300 { + transition-duration: 300ms; +} + +.duration-75 { + transition-duration: 75ms; +} + +.ease-in { + transition-timing-function: cubic-bezier(0.4, 0, 1, 1); +} + +.ease-in-out { + transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1); +} + +.ease-out { + transition-timing-function: cubic-bezier(0, 0, 0.2, 1); +} + +.glass-effect { + border-width: 1px; + --tw-border-opacity: 1; + border-color: rgb(229 231 235 / var(--tw-border-opacity, 1)); + background-color: rgb(255 255 255 / 0.95); + --tw-shadow: 0 4px 6px -1px rgb(0 0 0 / 0.1), 0 2px 4px -2px rgb(0 0 0 / 0.1); + --tw-shadow-colored: 0 4px 6px -1px var(--tw-shadow-color), 0 2px 4px -2px var(--tw-shadow-color); + box-shadow: var(--tw-ring-offset-shadow, 0 0 #0000), var(--tw-ring-shadow, 0 0 #0000), var(--tw-shadow); + --tw-backdrop-blur: blur(12px); + -webkit-backdrop-filter: var(--tw-backdrop-blur) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia); + backdrop-filter: var(--tw-backdrop-blur) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia); +} + +.glass-effect:is(.dark *) { + border-color: rgb(73 73 79 / 0.5); + background-color: rgb(44 44 51 / 0.9); + --tw-shadow: 0 20px 25px -5px rgb(0 0 0 / 0.1), 0 8px 10px -6px rgb(0 0 0 / 0.1); + --tw-shadow-colored: 0 20px 25px -5px var(--tw-shadow-color), 0 8px 10px -6px var(--tw-shadow-color); + box-shadow: var(--tw-ring-offset-shadow, 0 0 #0000), var(--tw-ring-shadow, 0 0 #0000), var(--tw-shadow); +} + +/* Form validation styles */ + +.is-valid { + --tw-border-opacity: 1; + border-color: rgb(34 197 94 / var(--tw-border-opacity, 1)); +} + +.is-valid:is(.dark *) { + --tw-border-opacity: 1; + border-color: rgb(74 222 128 / var(--tw-border-opacity, 1)); +} + +.is-invalid { + --tw-border-opacity: 1; + border-color: rgb(239 68 68 / var(--tw-border-opacity, 1)); +} + +.is-invalid:is(.dark *) { + --tw-border-opacity: 1; + border-color: rgb(248 113 113 / var(--tw-border-opacity, 1)); +} + +.is-valid:focus { + --tw-border-opacity: 1; + border-color: rgb(34 197 94 / var(--tw-border-opacity, 1)); + --tw-ring-color: rgb(34 197 94 / 0.3); +} + +.is-valid:focus:is(.dark *) { + --tw-border-opacity: 1; + border-color: rgb(74 222 128 / var(--tw-border-opacity, 1)); + --tw-ring-color: rgb(74 222 128 / 0.3); +} + +.is-invalid:focus { + --tw-border-opacity: 1; + border-color: rgb(239 68 68 / var(--tw-border-opacity, 1)); + --tw-ring-color: rgb(239 68 68 / 0.3); +} + +.is-invalid:focus:is(.dark *) { + --tw-border-opacity: 1; + border-color: rgb(248 113 113 / var(--tw-border-opacity, 1)); + --tw-ring-color: rgb(248 113 113 / 0.3); +} + +.form-hint { + margin-top: 0.25rem; + font-size: 0.75rem; + line-height: 1rem; + --tw-text-opacity: 1; + color: rgb(107 114 128 / var(--tw-text-opacity, 1)); +} + +.form-hint:is(.dark *) { + --tw-text-opacity: 1; + color: rgb(156 163 175 / var(--tw-text-opacity, 1)); +} + +.form-label { + margin-bottom: 0.25rem; + display: block; + font-size: 0.875rem; + line-height: 1.25rem; + font-weight: 500; + --tw-text-opacity: 1; + color: rgb(55 65 81 / var(--tw-text-opacity, 1)); +} + +.form-label:is(.dark *) { + --tw-text-opacity: 1; + color: rgb(209 213 219 / var(--tw-text-opacity, 1)); +} + +.input-icon { + pointer-events: none; + position: absolute; + top: 0px; + bottom: 0px; + left: 0px; + display: flex; + align-items: center; + padding-left: 0.75rem; + --tw-text-opacity: 1; + color: rgb(107 114 128 / var(--tw-text-opacity, 1)); +} + +.input-icon:is(.dark *) { + --tw-text-opacity: 1; + color: rgb(156 163 175 / var(--tw-text-opacity, 1)); +} + +.input-with-icon { + padding-left: 2.5rem; +} + +.dark\:prose-invert:is(.dark *) { + --tw-prose-body: var(--tw-prose-invert-body); + --tw-prose-headings: var(--tw-prose-invert-headings); + --tw-prose-lead: var(--tw-prose-invert-lead); + --tw-prose-links: var(--tw-prose-invert-links); + --tw-prose-bold: var(--tw-prose-invert-bold); + --tw-prose-counters: var(--tw-prose-invert-counters); + --tw-prose-bullets: var(--tw-prose-invert-bullets); + --tw-prose-hr: var(--tw-prose-invert-hr); + --tw-prose-quotes: var(--tw-prose-invert-quotes); + --tw-prose-quote-borders: var(--tw-prose-invert-quote-borders); + --tw-prose-captions: var(--tw-prose-invert-captions); + --tw-prose-kbd: var(--tw-prose-invert-kbd); + --tw-prose-kbd-shadows: var(--tw-prose-invert-kbd-shadows); + --tw-prose-code: var(--tw-prose-invert-code); + --tw-prose-pre-code: var(--tw-prose-invert-pre-code); + --tw-prose-pre-bg: var(--tw-prose-invert-pre-bg); + --tw-prose-th-borders: var(--tw-prose-invert-th-borders); + --tw-prose-td-borders: var(--tw-prose-invert-td-borders); +} + +.hover\:-translate-y-0\.5:hover { + --tw-translate-y: -0.125rem; + transform: translate(var(--tw-translate-x), var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y)); +} + +.hover\:-translate-y-1:hover { + --tw-translate-y: -0.25rem; + transform: translate(var(--tw-translate-x), var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y)); +} + +.hover\:bg-dark-600:hover { + --tw-bg-opacity: 1; + background-color: rgb(91 91 105 / var(--tw-bg-opacity, 1)); +} + +.hover\:bg-gray-100:hover { + --tw-bg-opacity: 1; + background-color: rgb(243 244 246 / var(--tw-bg-opacity, 1)); +} + +.hover\:bg-gray-200:hover { + --tw-bg-opacity: 1; + background-color: rgb(229 231 235 / var(--tw-bg-opacity, 1)); +} + +.hover\:bg-gray-200\/50:hover { + background-color: rgb(229 231 235 / 0.5); +} + +.hover\:bg-gray-50:hover { + --tw-bg-opacity: 1; + background-color: rgb(249 250 251 / var(--tw-bg-opacity, 1)); +} + +.hover\:bg-gray-800\/50:hover { + background-color: rgb(31 41 55 / 0.5); +} + +.hover\:bg-primary-700:hover { + --tw-bg-opacity: 1; + background-color: rgb(17 66 226 / var(--tw-bg-opacity, 1)); +} + +.hover\:bg-white\/10:hover { + background-color: rgb(255 255 255 / 0.1); +} + +.hover\:bg-white\/5:hover { + background-color: rgb(255 255 255 / 0.05); +} + +.hover\:from-purple-700:hover { + --tw-gradient-from: #7e22ce var(--tw-gradient-from-position); + --tw-gradient-to: rgb(126 34 206 / 0) var(--tw-gradient-to-position); + --tw-gradient-stops: var(--tw-gradient-from), var(--tw-gradient-to); +} + +.hover\:to-blue-700:hover { + --tw-gradient-to: #1d4ed8 var(--tw-gradient-to-position); +} + +.hover\:text-blue-800:hover { + --tw-text-opacity: 1; + color: rgb(30 64 175 / var(--tw-text-opacity, 1)); +} + +.hover\:text-gray-200:hover { + --tw-text-opacity: 1; + color: rgb(229 231 235 / var(--tw-text-opacity, 1)); +} + +.hover\:text-gray-900:hover { + --tw-text-opacity: 1; + color: rgb(17 24 39 / var(--tw-text-opacity, 1)); +} + +.hover\:text-purple-300:hover { + --tw-text-opacity: 1; + color: rgb(216 180 254 / var(--tw-text-opacity, 1)); +} + +.hover\:text-red-800:hover { + --tw-text-opacity: 1; + color: rgb(153 27 27 / var(--tw-text-opacity, 1)); +} + +.hover\:text-white:hover { + --tw-text-opacity: 1; + color: rgb(255 255 255 / var(--tw-text-opacity, 1)); +} + +.hover\:shadow-lg:hover { + --tw-shadow: 0 10px 15px -3px rgb(0 0 0 / 0.1), 0 4px 6px -4px rgb(0 0 0 / 0.1); + --tw-shadow-colored: 0 10px 15px -3px var(--tw-shadow-color), 0 4px 6px -4px var(--tw-shadow-color); + box-shadow: var(--tw-ring-offset-shadow, 0 0 #0000), var(--tw-ring-shadow, 0 0 #0000), var(--tw-shadow); +} + +.focus\:border-transparent:focus { + border-color: transparent; +} + +.focus\:outline-none:focus { + outline: 2px solid transparent; + outline-offset: 2px; +} + +.focus\:ring-1:focus { + --tw-ring-offset-shadow: var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color); + --tw-ring-shadow: var(--tw-ring-inset) 0 0 0 calc(1px + var(--tw-ring-offset-width)) var(--tw-ring-color); + box-shadow: var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow, 0 0 #0000); +} + +.focus\:ring-2:focus { + --tw-ring-offset-shadow: var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color); + --tw-ring-shadow: var(--tw-ring-inset) 0 0 0 calc(2px + var(--tw-ring-offset-width)) var(--tw-ring-color); + box-shadow: var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow, 0 0 #0000); +} + +.focus\:ring-primary-500:focus { + --tw-ring-opacity: 1; + --tw-ring-color: rgb(41 112 255 / var(--tw-ring-opacity, 1)); +} + +.focus\:ring-purple-500:focus { + --tw-ring-opacity: 1; + --tw-ring-color: rgb(168 85 247 / var(--tw-ring-opacity, 1)); +} + +.dark\:inline:is(.dark *) { + display: inline; +} + +.dark\:hidden:is(.dark *) { + display: none; +} + +.dark\:border-dark-600:is(.dark *) { + --tw-border-opacity: 1; + border-color: rgb(91 91 105 / var(--tw-border-opacity, 1)); +} + +.dark\:border-gray-700:is(.dark *) { + --tw-border-opacity: 1; + border-color: rgb(55 65 81 / var(--tw-border-opacity, 1)); +} + +.dark\:border-gray-700\/20:is(.dark *) { + border-color: rgb(55 65 81 / 0.2); +} + +.dark\:border-gray-700\/30:is(.dark *) { + border-color: rgb(55 65 81 / 0.3); +} + +.dark\:border-gray-800:is(.dark *) { + --tw-border-opacity: 1; + border-color: rgb(31 41 55 / var(--tw-border-opacity, 1)); +} + +.dark\:bg-blue-900\/30:is(.dark *) { + background-color: rgb(30 58 138 / 0.3); +} + +.dark\:bg-dark-700:is(.dark *) { + --tw-bg-opacity: 1; + background-color: rgb(73 73 79 / var(--tw-bg-opacity, 1)); +} + +.dark\:bg-dark-700\/20:is(.dark *) { + background-color: rgb(73 73 79 / 0.2); +} + +.dark\:bg-dark-800:is(.dark *) { + --tw-bg-opacity: 1; + background-color: rgb(44 44 51 / var(--tw-bg-opacity, 1)); +} + +.dark\:bg-green-900\/30:is(.dark *) { + background-color: rgb(20 83 45 / 0.3); +} + +.dark\:bg-primary-900:is(.dark *) { + --tw-bg-opacity: 1; + background-color: rgb(21 51 144 / var(--tw-bg-opacity, 1)); +} + +.dark\:text-blue-300:is(.dark *) { + --tw-text-opacity: 1; + color: rgb(147 197 253 / var(--tw-text-opacity, 1)); +} + +.dark\:text-blue-400:is(.dark *) { + --tw-text-opacity: 1; + color: rgb(96 165 250 / var(--tw-text-opacity, 1)); +} + +.dark\:text-gray-200:is(.dark *) { + --tw-text-opacity: 1; + color: rgb(229 231 235 / var(--tw-text-opacity, 1)); +} + +.dark\:text-gray-300:is(.dark *) { + --tw-text-opacity: 1; + color: rgb(209 213 219 / var(--tw-text-opacity, 1)); +} + +.dark\:text-gray-400:is(.dark *) { + --tw-text-opacity: 1; + color: rgb(156 163 175 / var(--tw-text-opacity, 1)); +} + +.dark\:text-gray-500:is(.dark *) { + --tw-text-opacity: 1; + color: rgb(107 114 128 / var(--tw-text-opacity, 1)); +} + +.dark\:text-green-300:is(.dark *) { + --tw-text-opacity: 1; + color: rgb(134 239 172 / var(--tw-text-opacity, 1)); +} + +.dark\:text-primary-400:is(.dark *) { + --tw-text-opacity: 1; + color: rgb(90 147 255 / var(--tw-text-opacity, 1)); +} + +.dark\:text-red-400:is(.dark *) { + --tw-text-opacity: 1; + color: rgb(248 113 113 / var(--tw-text-opacity, 1)); +} + +.dark\:text-white:is(.dark *) { + --tw-text-opacity: 1; + color: rgb(255 255 255 / var(--tw-text-opacity, 1)); +} + +.dark\:hover\:bg-dark-600:hover:is(.dark *) { + --tw-bg-opacity: 1; + background-color: rgb(91 91 105 / var(--tw-bg-opacity, 1)); +} + +.dark\:hover\:bg-dark-700\/30:hover:is(.dark *) { + background-color: rgb(73 73 79 / 0.3); +} + +.dark\:hover\:bg-white\/5:hover:is(.dark *) { + background-color: rgb(255 255 255 / 0.05); +} + +.dark\:hover\:text-blue-300:hover:is(.dark *) { + --tw-text-opacity: 1; + color: rgb(147 197 253 / var(--tw-text-opacity, 1)); +} + +.dark\:hover\:text-red-300:hover:is(.dark *) { + --tw-text-opacity: 1; + color: rgb(252 165 165 / var(--tw-text-opacity, 1)); +} + +@media (min-width: 640px) { + .sm\:h-96 { + height: 24rem; + } + + .sm\:w-96 { + width: 24rem; + } + + .sm\:flex-row { + flex-direction: row; + } + + .sm\:px-6 { + padding-left: 1.5rem; + padding-right: 1.5rem; + } +} + +@media (min-width: 768px) { + .md\:mb-0 { + margin-bottom: 0px; + } + + .md\:block { + display: block; + } + + .md\:flex { + display: flex; + } + + .md\:w-1\/3 { + width: 33.333333%; + } + + .md\:w-2\/3 { + width: 66.666667%; + } + + .md\:grid-cols-2 { + grid-template-columns: repeat(2, minmax(0, 1fr)); + } + + .md\:grid-cols-3 { + grid-template-columns: repeat(3, minmax(0, 1fr)); + } + + .md\:flex-row { + flex-direction: row; + } + + .md\:items-start { + align-items: flex-start; + } + + .md\:items-center { + align-items: center; + } + + .md\:justify-start { + justify-content: flex-start; + } + + .md\:justify-between { + justify-content: space-between; + } + + .md\:p-12 { + padding: 3rem; + } + + .md\:p-8 { + padding: 2rem; + } + + .md\:text-left { + text-align: left; + } + + .md\:text-right { + text-align: right; + } + + .md\:text-2xl { + font-size: 1.5rem; + line-height: 2rem; + } + + .md\:text-4xl { + font-size: 2.25rem; + line-height: 2.5rem; + } + + .md\:text-5xl { + font-size: 3rem; + line-height: 1; + } + + .md\:text-7xl { + font-size: 4.5rem; + line-height: 1; + } +} + +@media (min-width: 1024px) { + .lg\:order-1 { + order: 1; + } + + .lg\:order-2 { + order: 2; + } + + .lg\:col-span-1 { + grid-column: span 1 / span 1; + } + + .lg\:col-span-2 { + grid-column: span 2 / span 2; + } + + .lg\:grid-cols-3 { + grid-template-columns: repeat(3, minmax(0, 1fr)); + } + + .lg\:grid-cols-4 { + grid-template-columns: repeat(4, minmax(0, 1fr)); + } + + .lg\:px-8 { + padding-left: 2rem; + padding-right: 2rem; + } + + .lg\:text-8xl { + font-size: 6rem; + line-height: 1; + } +} \ No newline at end of file diff --git a/static/css/neural-network-background.css b/static/css/neural-network-background.css new file mode 100644 index 0000000..f5c6b76 --- /dev/null +++ b/static/css/neural-network-background.css @@ -0,0 +1,106 @@ +/* Neural Network Background CSS */ + +/* Make sure the neural network background is always visible */ +#neural-network-background { + position: fixed !important; + top: 0 !important; + left: 0 !important; + width: 100% !important; + height: 100% !important; + z-index: -10 !important; /* Below content but above regular background */ + pointer-events: none !important; + opacity: 1 !important; +} + +/* Override any solid background colors for the body */ +body, body.dark { + background-color: transparent !important; +} + +/* Make sure any background color is removed */ +html.dark, html { + background-color: transparent !important; +} + +/* Make sure any fixed backgrounds are removed */ +#app-container { + background-color: transparent !important; +} + +/* Ensure content is properly visible over the background */ +.glass-morphism { + background-color: rgba(17, 24, 39, 0.6) !important; + backdrop-filter: blur(5px) !important; +} + +/* Dark Mode - Navbar */ +body.dark .glass-navbar-dark { + background-color: rgba(10, 14, 25, 0.7) !important; +} + +/* Light Mode - Verbesserter Navbar */ +body .glass-navbar-light { + background-color: rgba(255, 255, 255, 0.92) !important; + backdrop-filter: blur(10px) !important; + box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.1), 0 2px 4px -1px rgba(0, 0, 0, 0.06) !important; + border-bottom: 1px solid rgba(220, 220, 220, 0.5) !important; +} + +/* Light Mode - Verbesserte Lesbarkeit für Navbar-Elemente */ +body:not(.dark) .navbar-link, +body:not(.dark) .navbar-item { + color: #1e3a8a !important; /* Dunkles Blau für bessere Lesbarkeit */ +} + +body:not(.dark) .navbar-link:hover, +body:not(.dark) .navbar-item:hover { + color: #4f46e5 !important; /* Helles Lila beim Hover */ + background-color: rgba(240, 245, 255, 0.9) !important; +} + +/* Light Mode - Buttons verbessert */ +body:not(.dark) .btn, +body:not(.dark) button { + background-color: #3b82f6 !important; /* Klares Blau statt Grau */ + color: white !important; + border: none !important; + box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1) !important; +} + +body:not(.dark) .btn:hover, +body:not(.dark) button:hover { + background-color: #4f46e5 !important; /* Lila beim Hover */ + box-shadow: 0 4px 6px rgba(0, 0, 0, 0.12) !important; +} + +/* Verbesserte Karten im Light Mode */ +body:not(.dark) .card, +body:not(.dark) .panel { + background-color: rgba(255, 255, 255, 0.92) !important; + border: 1px solid rgba(220, 220, 220, 0.8) !important; + box-shadow: 0 4px 6px rgba(0, 0, 0, 0.05) !important; +} + +/* Verbesserte Lesbarkeit für Text im Light Mode */ +body:not(.dark) { + color: #1e293b !important; /* Dunkles Blau-Grau statt Schwarz */ +} + +body:not(.dark) h1, +body:not(.dark) h2, +body:not(.dark) h3, +body:not(.dark) h4, +body:not(.dark) h5, +body:not(.dark) h6 { + color: #0f172a !important; /* Fast schwarz für Überschriften */ +} + +/* Make sure footer has proper transparency and styling */ +body.dark footer { + background-color: rgba(10, 14, 25, 0.7) !important; +} + +body:not(.dark) footer { + background-color: rgba(249, 250, 251, 0.92) !important; + border-top: 1px solid rgba(220, 220, 220, 0.8) !important; +} \ No newline at end of file diff --git a/static/css/src/input.css b/static/css/src/input.css new file mode 100644 index 0000000..f150dec --- /dev/null +++ b/static/css/src/input.css @@ -0,0 +1,207 @@ +@tailwind base; +@tailwind components; +@tailwind utilities; + +@layer base { + html { + @apply scroll-smooth; + } + + body { + @apply bg-gray-50 text-gray-800 dark:bg-dark-800 dark:text-gray-100 font-sans; + } + + h1, h2, h3, h4, h5, h6 { + @apply font-bold text-gray-900 dark:text-white; + } + + h1 { + @apply text-4xl md:text-5xl lg:text-6xl; + } + + h2 { + @apply text-3xl md:text-4xl; + } + + h3 { + @apply text-2xl md:text-3xl; + } + + h4 { + @apply text-xl md:text-2xl; + } + + a { + @apply text-primary-600 hover:text-primary-700 dark:text-primary-400 dark:hover:text-primary-300 transition-colors; + } + + input, textarea, select { + @apply bg-white border border-gray-300 rounded-md px-3 py-2 focus:outline-none focus:ring-2 focus:ring-primary-500 focus:border-primary-500 dark:bg-dark-700 dark:border-dark-500 dark:focus:ring-primary-400 dark:focus:border-primary-400; + } + + label { + @apply block text-sm font-medium text-gray-700 dark:text-gray-300 mb-1; + } + + ::placeholder { + @apply text-gray-400 dark:text-gray-500; + } +} + +@layer components { + .btn { + @apply inline-flex items-center justify-center px-4 py-2 font-medium rounded-md transition-all focus:outline-none focus:ring-2 focus:ring-offset-2 shadow-sm text-base; + } + + .btn-primary { + @apply btn bg-primary-600 hover:bg-primary-700 text-white focus:ring-primary-500 hover:shadow-md active:translate-y-0.5 dark:bg-primary-700 dark:hover:bg-primary-600; + } + + .btn-secondary { + @apply btn bg-secondary-600 hover:bg-secondary-700 text-white focus:ring-secondary-500 hover:shadow-md active:translate-y-0.5 dark:bg-secondary-700 dark:hover:bg-secondary-600; + } + + .btn-outline { + @apply btn border-2 border-gray-300 dark:border-dark-500 bg-white dark:bg-transparent text-gray-700 dark:text-gray-200 hover:bg-gray-50 hover:border-primary-500 hover:text-primary-600 dark:hover:bg-dark-700 dark:hover:border-primary-400 dark:hover:text-primary-400 focus:ring-gray-500 active:translate-y-0.5; + } + + .card { + @apply bg-white shadow-md dark:bg-dark-700 rounded-lg overflow-hidden border border-gray-200 dark:border-dark-600 hover:shadow-lg transition-shadow duration-300; + } + + .gradient-text { + @apply text-transparent bg-clip-text bg-gradient-to-r from-primary-600 to-secondary-600 dark:from-primary-500 dark:to-secondary-500 font-bold; + } + + .node-tooltip { + @apply max-w-xs p-3 bg-white text-gray-800 dark:bg-dark-800 dark:text-white rounded-lg shadow-lg text-sm z-50 border border-gray-200 dark:border-dark-600; + } + + .mindmap-node { + @apply cursor-pointer transition-all duration-200 hover:shadow-lg border-2 border-gray-200 dark:border-dark-600; + } + + /* Mindmap-spezifische Stile */ + .mindmap-container { + @apply bg-gray-50/80 dark:bg-dark-800/80 rounded-lg p-4 shadow-inner; + } + + .mindmap-node-root { + @apply bg-primary-100 dark:bg-primary-900 text-primary-900 dark:text-primary-100 border-primary-300 dark:border-primary-700; + } + + .mindmap-node-branch { + @apply bg-secondary-100 dark:bg-secondary-900 text-secondary-900 dark:text-secondary-100 border-secondary-300 dark:border-secondary-700; + } + + .mindmap-node-leaf { + @apply bg-gray-100 dark:bg-dark-700 text-gray-800 dark:text-gray-200 border-gray-300 dark:border-dark-500; + } + + .mindmap-link { + @apply stroke-gray-400 dark:stroke-gray-500 stroke-[2]; + } + + .mindmap-link-active { + @apply stroke-primary-500 dark:stroke-primary-400 stroke-[3]; + } + + .form-group { + @apply mb-4; + } + + .form-control { + @apply w-full; + } + + .ascii-art { + @apply font-mono text-xs leading-none whitespace-pre tracking-tight select-none text-primary-700 dark:text-primary-400 opacity-80 dark:opacity-60 font-bold; + } + + /* Verbesserte Formulareingabefelder */ + .form-input, + .form-textarea, + .form-select { + @apply w-full rounded-md border border-gray-300 bg-white px-4 py-2 text-gray-800 shadow-sm + focus:border-primary-500 focus:ring focus:ring-primary-500 focus:ring-opacity-50 + dark:border-dark-500 dark:bg-dark-700 dark:text-gray-100 dark:focus:border-primary-400 dark:focus:ring-primary-400; + } + + .form-input-lg { + @apply py-3 text-lg; + } + + .form-input-sm { + @apply py-1 text-sm; + } + + .form-checkbox, + .form-radio { + @apply h-5 w-5 rounded border-gray-300 text-primary-600 shadow-sm + focus:border-primary-500 focus:ring focus:ring-primary-500 focus:ring-opacity-50 + dark:border-dark-500 dark:bg-dark-700 dark:focus:border-primary-400 dark:focus:ring-primary-400; + } + + .form-checkbox { + @apply rounded; + } + + .form-radio { + @apply rounded-full; + } + + .form-error { + @apply mt-1 text-sm text-red-600 dark:text-red-400; + } +} + +@layer utilities { + .tech-gradient { + @apply bg-gradient-to-r from-primary-600 to-secondary-500; + } + + .glass-effect { + @apply bg-white/95 backdrop-blur-md border border-gray-200 shadow-md dark:bg-dark-800/90 dark:border-dark-700/50 dark:shadow-xl; + } + + .focus-ring { + @apply focus:outline-none focus:ring-2 focus:ring-primary-500 focus:ring-offset-2 dark:focus:ring-primary-400 dark:focus:ring-offset-dark-800; + } + + .input-focus { + @apply focus:outline-none focus:ring-2 focus:ring-primary-500 focus:border-primary-500 dark:focus:ring-primary-400 dark:focus:border-primary-400; + } +} + +/* Form validation styles */ +.is-valid { + @apply border-green-500 dark:border-green-400; +} + +.is-invalid { + @apply border-red-500 dark:border-red-400; +} + +.is-valid:focus { + @apply ring-green-500/30 border-green-500 dark:ring-green-400/30 dark:border-green-400; +} + +.is-invalid:focus { + @apply ring-red-500/30 border-red-500 dark:ring-red-400/30 dark:border-red-400; +} + +.form-hint { + @apply text-xs text-gray-500 dark:text-gray-400 mt-1; +} + +.form-label { + @apply block text-sm font-medium text-gray-700 dark:text-gray-300 mb-1; +} + +.input-icon { + @apply absolute inset-y-0 left-0 pl-3 flex items-center pointer-events-none text-gray-500 dark:text-gray-400; +} + +.input-with-icon { + @apply pl-10; +} \ No newline at end of file diff --git a/static/css/style.css b/static/css/style.css new file mode 100644 index 0000000..51ecb7e --- /dev/null +++ b/static/css/style.css @@ -0,0 +1,1644 @@ +/* Grundlegende Stile und Farbvariablen */ +:root { + /* Hauptfarbpalette - Dunkle Blau-, Violett- und Grautöne */ + --primary-color: #6c5dd3; + --primary-hover: #5a4db8; + --secondary-color: #7645d9; + --accent-color: #4cdfff; + --success-color: #35c9be; + --info-color: #3e7fff; + --warning-color: #ffb648; + --danger-color: #fe536e; + --light-color: #e9e9f0; + --dark-color: #14142b; + --gray-color: #6f6e84; + + /* Hintergrund und Neumorphismus */ + --background-start: #14142b; + --background-end: #1e1e2e; + --card-bg: #1a1a2e; + --glass-bg: rgba(30, 30, 46, 0.7); + --glass-border: rgba(72, 71, 138, 0.2); + + /* Neumorphische Schatten */ + --shadow-dark: 5px 5px 15px rgba(0, 0, 0, 0.5); + --shadow-light: -5px -5px 15px rgba(90, 90, 150, 0.05); + --inner-shadow: inset 2px 2px 5px rgba(0, 0, 0, 0.5), inset -2px -2px 5px rgba(90, 90, 150, 0.1); + + /* Glasmorphismus */ + --glass-card-shadow: 0 8px 32px rgba(0, 0, 0, 0.3); + --glass-blur: blur(12px); + + /* Fonts */ + --serif-font: 'Playfair Display', Georgia, serif; + --body-font: 'Poppins', 'Source Sans Pro', sans-serif; + --monospace-font: 'Fira Code', monospace; + + /* Abstände */ + --standard-spacing: 2rem; + --small-spacing: 1rem; + + /* High contrast colors for elements - Intensivere Farben für besseren Kontrast */ + --primary-bright: #9a7dff; + --secondary-bright: #bb82ff; + --accent-bright: #50eaff; + --success-bright: #50ffe4; + --warning-bright: #ffe050; + --danger-bright: #ff5050; + + /* Neue verbesserte Glasmorphismus Variablen */ + --glass-navbar-dark: rgba(14, 18, 32, 0.85); + --glass-navbar-light: rgba(255, 255, 255, 0.85); + --glass-card-dark: rgba(24, 28, 45, 0.8); + --glass-card-light: rgba(255, 255, 255, 0.8); +} + +/* Basiselemente */ +body { + min-height: 100vh; + display: flex; + flex-direction: column; + background: rgb(24 24 28 / var(--tw-bg-opacity, 1)); + color: var(--light-color); + font-family: var(--body-font); + line-height: 1.6; + letter-spacing: 0.01em; + overflow-x: hidden; +} + +main { + flex: 1; + z-index: 1; + position: relative; + padding: var(--standard-spacing) 0; +} + +h1, h2, h3, h4, h5, h6 { + font-family: var(--serif-font); + font-weight: 700; + line-height: 1.3; + margin-bottom: var(--small-spacing); + letter-spacing: 0.02em; +} + +p { + margin-bottom: var(--small-spacing); +} + +a { + text-decoration: none; + color: var(--accent-bright); + transition: all 0.3s ease; +} + +a:hover { + color: var(--primary-bright); + text-shadow: 0 0 8px rgba(154, 125, 255, 0.5); +} + +/* Verbesserte Glasmorphismus und Neumorphismus Elemente */ +.glass { + background: var(--glass-bg); + backdrop-filter: var(--glass-blur); + -webkit-backdrop-filter: var(--glass-blur); + border: 1px solid var(--glass-border); + border-radius: 16px; + box-shadow: var(--glass-card-shadow), 0 0 15px rgba(108, 93, 211, 0.2); + margin-bottom: var(--standard-spacing); + padding: var(--standard-spacing); + transition: transform 0.3s ease, box-shadow 0.3s ease; +} + +.glass:hover { + transform: translateY(-5px); + box-shadow: 0 15px 30px rgba(0, 0, 0, 0.4), 0 0 20px rgba(154, 125, 255, 0.3); + border: 1px solid rgba(154, 125, 255, 0.3); +} + +/* Verbesserte Navbar Styles */ +.glass-navbar-dark { + background: var(--glass-navbar-dark); + backdrop-filter: blur(15px); + -webkit-backdrop-filter: blur(15px); + border-color: rgba(255, 255, 255, 0.1); + box-shadow: 0 4px 20px rgba(0, 0, 0, 0.25); +} + +.glass-navbar-light { + background: var(--glass-navbar-light); + backdrop-filter: blur(15px); + -webkit-backdrop-filter: blur(15px); + border-color: rgba(0, 0, 0, 0.05); + box-shadow: 0 4px 15px rgba(0, 0, 0, 0.08); +} + +.nav-link { + padding: 0.6rem 1rem; + border-radius: 0.75rem; + font-weight: 500; + transition: all 0.25s ease; + position: relative; + color: rgba(255, 255, 255, 0.85); +} + +.nav-link:hover { + background: rgba(179, 143, 255, 0.2); + color: white; + transform: translateY(-2px); + box-shadow: 0 4px 12px rgba(0, 0, 0, 0.1); +} + +.nav-link-active { + background: rgba(179, 143, 255, 0.3); + color: white; + font-weight: 600; + box-shadow: 0 4px 12px rgba(0, 0, 0, 0.15); +} + +.nav-link-active::after { + content: ''; + position: absolute; + bottom: 0; + left: 10%; + width: 80%; + height: 2px; + background: linear-gradient(90deg, transparent, rgba(255, 255, 255, 0.7), transparent); +} + +.nav-link-light { + color: rgba(26, 32, 44, 0.85); +} + +.nav-link-light:hover { + background: rgba(179, 143, 255, 0.15); + color: rgba(26, 32, 44, 1); + box-shadow: 0 4px 12px rgba(0, 0, 0, 0.05); +} + +.nav-link-light-active { + background: rgba(179, 143, 255, 0.2); + color: rgba(26, 32, 44, 1); + font-weight: 600; + box-shadow: 0 4px 12px rgba(0, 0, 0, 0.1); +} + +.nav-link-light-active::after { + background: linear-gradient(90deg, transparent, rgba(26, 32, 44, 0.5), transparent); +} + +/* Neu gestylte Cards mit verbesserten Effekten */ +.card { + background: var(--glass-card-dark); + backdrop-filter: blur(var(--glass-blur)); + -webkit-backdrop-filter: blur(var(--glass-blur)); + border-radius: 1.25rem; + border: 1px solid rgba(255, 255, 255, 0.12); + padding: 1.75rem; + transition: all 0.3s ease; + box-shadow: 0 10px 30px rgba(0, 0, 0, 0.25); + height: 100%; + display: flex; + flex-direction: column; + overflow: hidden; + position: relative; + z-index: 1; +} + +.card::before { + content: ''; + position: absolute; + top: 0; + left: 0; + right: 0; + height: 60%; + background: linear-gradient(to bottom, + rgba(179, 143, 255, 0.05) 0%, + rgba(179, 143, 255, 0) 100%); + z-index: -1; + transition: all 0.3s ease; + opacity: 0; + border-radius: 1.25rem 1.25rem 0 0; +} + +.card:hover { + transform: translateY(-5px); + box-shadow: 0 15px 40px rgba(0, 0, 0, 0.35); + border: 1px solid rgba(255, 255, 255, 0.2); +} + +.card:hover::before { + opacity: 1; +} + +.card .icon { + font-size: 2.5rem; + margin-bottom: 1.5rem; + display: inline-block; + background: linear-gradient(135deg, #b38fff, #58a9ff); + -webkit-background-clip: text; + background-clip: text; + color: transparent; + filter: drop-shadow(0 0 8px rgba(179, 143, 255, 0.4)); + transition: all 0.3s ease; +} + +.card:hover .icon { + transform: scale(1.1); + filter: drop-shadow(0 0 12px rgba(179, 143, 255, 0.6)); +} + +.card h3 { + font-size: 1.5rem; + font-weight: 700; + margin-bottom: 1rem; + color: #ffffff; + text-shadow: 0 2px 3px rgba(0, 0, 0, 0.2); + transition: all 0.3s ease; +} + +.card:hover h3 { + transform: translateY(-2px); + text-shadow: 0 3px 5px rgba(0, 0, 0, 0.3); +} + +.card p { + color: rgba(255, 255, 255, 0.9); + font-size: 1rem; + line-height: 1.6; + margin-bottom: 0; +} + +/* Light Mode Card Styles */ +.light .card { + background: var(--glass-card-light); + border: 1px solid rgba(0, 0, 0, 0.07); + box-shadow: 0 8px 20px rgba(0, 0, 0, 0.1); +} + +.light .card::before { + background: linear-gradient(to bottom, + rgba(179, 143, 255, 0.03) 0%, + rgba(179, 143, 255, 0) 100%); +} + +.light .card:hover { + border: 1px solid rgba(0, 0, 0, 0.15); + box-shadow: 0 15px 35px rgba(0, 0, 0, 0.15); +} + +.light .card h3 { + color: #1a202c; + text-shadow: none; +} + +.light .card p { + color: rgba(26, 32, 44, 0.9); +} + +/* Verbesserte Feature-Card Variante */ +.feature-card { + padding: 2rem; + border-radius: 1.5rem; + background: var(--glass-card-dark); + backdrop-filter: blur(15px); + -webkit-backdrop-filter: blur(15px); + border: 1px solid rgba(255, 255, 255, 0.15); + box-shadow: 0 12px 36px rgba(0, 0, 0, 0.3); + transition: all 0.4s cubic-bezier(0.175, 0.885, 0.32, 1.275); + height: 100%; + display: flex; + flex-direction: column; + position: relative; + overflow: hidden; + z-index: 1; +} + +.feature-card::before { + content: ''; + position: absolute; + width: 150%; + height: 150%; + background: radial-gradient( + circle at top right, + rgba(179, 143, 255, 0.15), + transparent 70% + ); + top: -25%; + right: -25%; + z-index: -1; + opacity: 0; + transition: opacity 0.4s ease; +} + +.feature-card:hover { + transform: translateY(-7px) scale(1.02); + box-shadow: 0 20px 48px rgba(0, 0, 0, 0.4); + border: 1px solid rgba(255, 255, 255, 0.25); +} + +.feature-card:hover::before { + opacity: 1; +} + +.feature-card .icon { + font-size: 2.75rem; + margin-bottom: 1.25rem; + background: linear-gradient(135deg, #b38fff, #58a9ff); + -webkit-background-clip: text; + background-clip: text; + color: transparent; + filter: drop-shadow(0 0 10px rgba(179, 143, 255, 0.6)); + transition: all 0.3s ease; +} + +.feature-card:hover .icon { + transform: scale(1.1) translateY(-3px); + filter: drop-shadow(0 0 15px rgba(179, 143, 255, 0.8)); +} + +.feature-card h3 { + font-size: 1.75rem; + font-weight: 700; + margin-bottom: 0.9rem; + color: #ffffff; + text-shadow: 0 2px 3px rgba(0, 0, 0, 0.3); + letter-spacing: 0.2px; + transition: all 0.3s ease; +} + +.feature-card:hover h3 { + transform: translateY(-2px); +} + +.feature-card p { + color: rgba(255, 255, 255, 0.95); + font-size: 1.1rem; + line-height: 1.6; + font-weight: 400; + text-shadow: 0 1px 2px rgba(0, 0, 0, 0.25); + letter-spacing: 0.3px; +} + +/* Light Mode Anpassungen für Feature-Cards */ +.light .feature-card { + background: var(--glass-card-light); + border: 1px solid rgba(0, 0, 0, 0.08); + color: #1a202c; + box-shadow: 0 10px 30px rgba(0, 0, 0, 0.12); +} + +.light .feature-card:hover { + box-shadow: 0 16px 40px rgba(0, 0, 0, 0.15); +} + +.light .feature-card h3 { + color: #1a202c; + text-shadow: none; +} + +.light .feature-card p { + color: rgba(26, 32, 44, 0.9); + text-shadow: none; +} + +/* Stilvolle Farbverläufe für Akzente */ +.gradient-text { + background: linear-gradient(135deg, rgba(200, 170, 255, 1), rgba(100, 180, 255, 1)); + -webkit-background-clip: text; + background-clip: text; + color: transparent; + font-weight: 700; + letter-spacing: -0.02em; + text-shadow: 0 2px 10px rgba(179, 143, 255, 0.3); + filter: drop-shadow(0 2px 6px rgba(179, 143, 255, 0.3)); +} + +.gradient-bg { + background: linear-gradient(135deg, var(--primary-bright), var(--secondary-bright)); +} + +/* Navbar Design - Überarbeitet mit verbessertem Glassmorphismus und Responsivität */ +.navbar { + background: rgba(20, 20, 43, 0.85); /* Etwas weniger transparent */ + backdrop-filter: blur(15px); /* Stärkerer Blur */ + -webkit-backdrop-filter: blur(15px); + box-shadow: 0 6px 20px rgba(0, 0, 0, 0.4); /* Stärkerer Schatten */ + border-bottom: 1px solid rgba(255, 255, 255, 0.15); /* Hellerer Border */ + padding: 0.8rem 0; /* Etwas weniger Padding */ + transition: all 0.3s ease; +} + +.navbar-brand { + font-family: 'Inter', sans-serif; /* Konsistente Schriftart */ + font-weight: 800; /* Stärkerer Font */ + font-size: 1.8rem; /* Größerer Font */ + color: white; /* Standardfarbe Weiß */ + letter-spacing: -0.03em; /* Engerer Buchstabenabstand */ + text-shadow: 0 2px 4px rgba(0, 0, 0, 0.3); /* Textschatten */ +} + +.navbar-brand i { + margin-right: 0.5rem; + background: linear-gradient(135deg, var(--primary-bright), var(--accent-bright)); /* Hellerer Gradient */ + -webkit-background-clip: text; + background-clip: text; + color: transparent; + filter: drop-shadow(0 0 8px rgba(154, 125, 255, 0.6)); /* Stärkerer Schatten */ +} + +.navbar-nav .nav-link { + color: rgba(255, 255, 255, 1); /* Vollständig weißer Text für bessere Lesbarkeit */ + font-weight: 600; /* Fetterer Text */ + padding: 0.6rem 1.2rem; /* Angepasstes Padding */ + border-radius: 12px; /* Größerer Border-Radius */ + transition: all 0.3s ease; + margin: 0 0.3rem; /* Angepasster Margin */ + backdrop-filter: blur(10px); /* Leichter Blur für Links */ + -webkit-backdrop-filter: blur(10px); + text-shadow: 0 1px 2px rgba(0, 0, 0, 0.5); /* Textschatten für bessere Lesbarkeit */ + font-size: 1.05rem; /* Etwas größere Schrift */ +} + +.navbar-nav .nav-link:hover, +.navbar-nav .nav-link.active { + color: var(--accent-bright); /* Hellerer Akzent */ + background: rgba(154, 125, 255, 0.15); /* Hellerer Hintergrund bei Hover/Active */ + box-shadow: 0 0 12px rgba(154, 125, 255, 0.2); /* Leichter Schatten */ + transform: translateY(-2px); /* Leichter Hover-Effekt */ +} + +.navbar-nav .nav-link i { + margin-right: 0.4rem; /* Angepasster Margin */ + transition: transform 0.3s ease; +} + +.navbar-nav .nav-link:hover i { + transform: translateY(-2px); + color: var(--accent-bright); +} + +/* Dark Mode Anpassungen für Navbar */ +.dark .navbar { + background: rgba(14, 18, 32, 0.9); /* Dunklerer Hintergrund */ + border-bottom-color: rgba(255, 255, 255, 0.08); /* Weniger sichtbarer Border */ + box-shadow: 0 6px 20px rgba(0, 0, 0, 0.5); +} + +.dark .navbar-brand { + color: white; +} + +.dark .navbar-nav .nav-link { + color: rgba(255, 255, 255, 0.8); +} + +.dark .navbar-nav .nav-link:hover, +.dark .navbar-nav .nav-link.active { + color: var(--accent-bright); + background: rgba(154, 125, 255, 0.1); + box-shadow: 0 0 10px rgba(154, 125, 255, 0.15); +} + +/* Light Mode Anpassungen für Navbar */ +.light .navbar { + background: rgba(240, 244, 248, 0.9); /* Heller Hintergrund */ + border-bottom-color: rgba(0, 0, 0, 0.1); /* Dunklerer Border */ + box-shadow: 0 4px 15px rgba(0, 0, 0, 0.1); +} + +.light .navbar-brand { + color: #1a202c; /* Dunkler Text */ +} + +.light .navbar-brand i { + background: linear-gradient(135deg, var(--primary-color), var(--accent-color)); /* Original Gradient */ + -webkit-background-clip: text; + background-clip: text; + color: transparent; + filter: none; /* Kein Schatten */ +} + +.light .navbar-nav .nav-link { + color: #4a5568; /* Dunklerer Text */ + backdrop-filter: none; /* Kein Blur */ + -webkit-backdrop-filter: none; +} + +.light .navbar-nav .nav-link:hover, +.light .navbar-nav .nav-link.active { + color: var(--primary-color); /* Primärfarbe */ + background: rgba(108, 93, 211, 0.08); /* Leichter Hintergrund */ + box-shadow: none; +} + +.light .navbar-nav .nav-link:hover i { + color: var(--primary-color); +} + +/* Responsivität für Navbar */ +@media (max-width: 768px) { + .navbar { + padding: 0.6rem 0; + } + + .navbar-brand { + font-size: 1.4rem; + } + + .navbar-nav .nav-link { + padding: 0.5rem 1rem; + margin: 0.2rem 0; + text-align: center; + justify-content: center; + } +} + +/* Buttons */ +.btn { + padding: 0.7rem 1.6rem; + border-radius: 12px; + font-weight: 700; /* Fetterer Text */ + transition: all 0.3s ease; + border: none; + letter-spacing: 0.05em; + text-transform: uppercase; + font-size: 0.9rem; /* Größere Schrift */ + position: relative; + overflow: hidden; + z-index: 1; + text-shadow: 0 1px 3px rgba(0, 0, 0, 0.6); /* Stärkerer Textschatten für bessere Lesbarkeit */ + box-shadow: 0 4px 12px rgba(0, 0, 0, 0.3); /* Stärkerer Schatten für bessere Sichtbarkeit */ +} + +.btn::before { + content: ''; + position: absolute; + top: 0; + left: 0; + width: 0; + height: 100%; + background: rgba(255, 255, 255, 0.1); + transition: width 0.3s ease; + z-index: -1; +} + +.btn:hover::before { + width: 100%; +} + +.btn-primary { + background: var(--light-color); + color: #000 !important; + box-shadow: 0 4px 15px rgba(108, 93, 211, 0.6); + font-weight: 700; + border: 2px solid rgba(255, 255, 255, 0.2); /* Sichtbarer Rand */ +} + +.btn-primary:hover { + transform: translateY(-2px); + box-shadow: 0 8px 20px rgba(108, 93, 211, 0.6); +} + +.btn-outline-light { + background: transparent; + border: 1px solid var(--glass-border); + color: var(--light-color); + box-shadow: var(--shadow-dark), var(--shadow-light); +} + +.btn-outline-light:hover { + background: rgba(255, 255, 255, 0.1); + color: var(--accent-color); + border-color: var(--accent-color); +} + +/* Glass-effect für UI-Komponenten */ +.glass-effect { + background: rgba(30, 30, 46, 0.5); + backdrop-filter: blur(10px); + -webkit-backdrop-filter: blur(10px); + border-radius: 16px; + border: 1px solid rgba(72, 71, 138, 0.2); + box-shadow: 0 8px 32px rgba(0, 0, 0, 0.2); +} + +/* Gradient-Hintergrund, der über gesamte Seite geht */ +.full-page-bg { + position: fixed; + top: 0; + left: 0; + width: 100%; + height: 100%; + background: var(--dark-color); + z-index: -10; +} + +.full-page-bg::before { + content: ''; + position: absolute; + top: 0; + left: 0; + width: 100%; + height: 100%; + background: radial-gradient( + circle at top right, + rgba(118, 69, 217, 0.1), + transparent 40% + ), + radial-gradient( + circle at bottom left, + rgba(76, 223, 255, 0.05), + transparent 40% + ); + z-index: -5; +} + +/* Überarbeitungen für benutzerdefinierte Stile */ +.btn-primary { + border-radius: 12px !important; + font-weight: 600 !important; +} + +.btn-outline { + border-radius: 12px !important; + border: 1px solid rgba(76, 223, 255, 0.5) !important; + background: transparent !important; + color: var(--accent-bright) !important; + transition: all 0.3s ease !important; +} + +.btn-outline:hover { + background: rgba(76, 223, 255, 0.1) !important; + border-color: var(--accent-bright) !important; + color: white !important; + transform: translateY(-2px) !important; +} + +/* Bessere Lesbarkeit für Buttons */ +button, .btn, a.btn { + font-weight: 700 !important; + letter-spacing: 0.03em !important; + text-shadow: 0 1px 3px rgba(0, 0, 0, 0.8) !important; + font-size: 1rem !important; +} + +/* Farbschema für Dark/Light Mode strikt trennen */ +.dark body { + background: var(--dark-color); + color: white; +} + +.light body { + background: #f8f9fa; + color: #333; +} + +.light .card { + background: rgba(255, 255, 255, 0.7); + border: 1px solid rgba(230, 230, 250, 0.3); + color: #333; +} + +.light .glass-effect { + background: rgba(255, 255, 255, 0.7); + border: 1px solid rgba(230, 230, 250, 0.3); +} + +.light .feature-card { + background: rgba(255, 255, 255, 0.7); + border: 1px solid rgba(230, 230, 250, 0.3); +} + +.light .feature-card h3 { + color: #333; +} + +.light .feature-card p { + color: #555; +} + +.light .card-header { + background: rgba(245, 245, 255, 0.7); + border-bottom: 1px solid rgba(230, 230, 250, 0.5); +} + +.light .card-footer { + background: rgba(245, 245, 255, 0.7); + border-top: 1px solid rgba(230, 230, 250, 0.5); +} + +.light .full-page-bg { + background: #f0f2f5; +} + +.light .full-page-bg::before { + background: radial-gradient( + circle at top right, + rgba(118, 69, 217, 0.05), + transparent 40% + ), + radial-gradient( + circle at bottom left, + rgba(76, 223, 255, 0.03), + transparent 40% + ); +} + +/* Verbesserter Kontrast für Text in Light-Mode */ +.light h1, .light h2, .light h3, .light h4, .light h5, .light h6 { + color: #222; +} + +.light p, .light span, .light div { + color: #444; +} + +.light a { + color: var(--primary-color); +} + +.light a:hover { + color: var(--primary-hover); +} + +/* Spezielle Anpassungen für kleinere Bildschirme */ +@media (max-width: 768px) { + .card, .feature-card, .glass-effect { + border-radius: 14px; + } + + .card-header { + border-radius: 14px 14px 0 0; + } + + .card-footer { + border-radius: 0 0 14px 14px; + } +} + +@media (max-width: 576px) { + .card, .feature-card, .glass-effect { + border-radius: 12px; + } + + .card-header { + border-radius: 12px 12px 0 0; + } + + .card-footer { + border-radius: 0 0 12px 12px; + } +} + +/* Gemeinsame Stile für alle Modi */ +.thought-card { + background: rgba(26, 26, 46, 0.7); + border-radius: 16px; + backdrop-filter: blur(12px); + -webkit-backdrop-filter: blur(12px); + border: 1px solid rgba(80, 80, 160, 0.15); + box-shadow: 0 10px 25px rgba(0, 0, 0, 0.3); + margin-bottom: 1.5rem; + overflow: hidden; + transition: all 0.3s ease; +} + +.thought-card:hover { + transform: translateY(-5px); + border-color: rgba(154, 125, 255, 0.3); + box-shadow: 0 15px 35px rgba(0, 0, 0, 0.4), 0 0 20px rgba(154, 125, 255, 0.15); +} + +.thought-card .card-header { + background: rgba(20, 20, 40, 0.7); + padding: 1rem 1.25rem; +} + +.thought-card .card-body { + padding: 1.25rem; +} + +.thought-card .metadata { + display: flex; + flex-wrap: wrap; + align-items: center; + font-size: 0.85rem; + color: var(--gray-color); + margin-top: 0.5rem; + gap: 1rem; +} + +.thought-card .keywords { + display: flex; + flex-wrap: wrap; + gap: 0.5rem; + margin-top: 1rem; +} + +/* Keywords & Tags */ +.keyword-tag { + display: inline-flex; + align-items: center; + padding: 0.25rem 0.75rem; + border-radius: 2rem; + font-size: 0.75rem; + font-weight: 600; + background: linear-gradient(120deg, rgba(154, 125, 255, 0.2), rgba(80, 234, 255, 0.2)); + border: 1px solid rgba(154, 125, 255, 0.2); + color: var(--primary-bright); + transition: all 0.2s ease; + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; + max-width: 150px; +} + +.keyword-tag:hover { + background: linear-gradient(120deg, rgba(154, 125, 255, 0.3), rgba(80, 234, 255, 0.3)); + border-color: rgba(154, 125, 255, 0.4); + color: var(--accent-bright); +} + +/* Verbesserte Relation-Badges */ +.relation-badge { + display: inline-flex; + align-items: center; + padding: 0.3rem 0.75rem; + border-radius: 2rem; + font-size: 0.75rem; + font-weight: 600; + margin-right: 0.5rem; + margin-bottom: 0.5rem; + transition: all 0.2s ease; +} + +.relation-badge:hover { + transform: translateY(-2px); +} + +.relation-supports { + background: rgba(53, 201, 190, 0.15); + color: var(--success-bright); + border: 1px solid rgba(53, 201, 190, 0.3); +} + +.relation-contradicts { + background: rgba(254, 83, 110, 0.15); + color: var(--danger-bright); + border: 1px solid rgba(254, 83, 110, 0.3); +} + +.relation-builds-upon { + background: rgba(62, 127, 255, 0.15); + color: var(--info-color); + border: 1px solid rgba(62, 127, 255, 0.3); +} + +.relation-generalizes { + background: rgba(255, 182, 72, 0.15); + color: var(--warning-bright); + border: 1px solid rgba(255, 182, 72, 0.3); +} + +.relation-specifies { + background: rgba(154, 125, 255, 0.15); + color: var(--primary-bright); + border: 1px solid rgba(154, 125, 255, 0.3); +} + +.relation-inspires { + background: rgba(118, 69, 217, 0.15); + color: var(--secondary-bright); + border: 1px solid rgba(118, 69, 217, 0.3); +} + +/* Formulare */ +.form-control, .form-select { + background: rgba(20, 20, 43, 0.5); + border: 1px solid var(--glass-border); + color: var(--light-color); + border-radius: 12px; + padding: 0.75rem 1rem; + margin-bottom: var(--small-spacing); + font-family: var(--body-font); + transition: all 0.3s ease; +} + +.form-control:focus, .form-select:focus { + background: rgba(30, 30, 60, 0.7); + color: var(--light-color); + border-color: var(--primary-color); + box-shadow: 0 0 0 0.25rem rgba(108, 93, 211, 0.25), var(--inner-shadow); +} + +.form-control::placeholder { + color: rgba(255, 255, 255, 0.4); +} + +.form-label { + color: var(--light-color); + font-weight: 500; + margin-bottom: 0.5rem; + letter-spacing: 0.03em; +} + +.input-group-text { + background: rgba(108, 93, 211, 0.2); + color: var(--accent-color); + border: 1px solid var(--glass-border); + border-radius: 12px 0 0 12px; +} + +/* Bewertungs-Sterne */ +.rating { + color: var(--warning-color); + font-size: 1.2rem; + display: flex; + margin-bottom: var(--small-spacing); +} + +.rating i { + cursor: pointer; + transition: transform 0.2s ease, color 0.2s ease; + margin-right: 0.4rem; +} + +.rating i:hover { + transform: scale(1.2) rotate(5deg); + color: var(--accent-color); +} + +/* Suchformular */ +.search-form { + background: var(--glass-bg); + backdrop-filter: var(--glass-blur); + -webkit-backdrop-filter: var(--glass-blur); + padding: var(--standard-spacing); + border-radius: 16px; + border: 1px solid var(--glass-border); + box-shadow: var(--glass-card-shadow); + margin-bottom: var(--standard-spacing); +} + +.search-form .input-group { + background: rgba(20, 20, 43, 0.5); + border-radius: 12px; + overflow: hidden; + margin-bottom: var(--small-spacing); + box-shadow: var(--inner-shadow); +} + +.search-form .form-control { + border: none; + background: transparent; + margin-bottom: 0; + box-shadow: none; +} + +.search-form .btn { + border-radius: 0 12px 12px 0; +} + +/* Mindmap-Visualisierung */ +.mindmap-container { + height: 600px; + width: 100%; + border-radius: 16px; + background: rgba(20, 20, 43, 0.7) !important; + backdrop-filter: blur(10px); + -webkit-backdrop-filter: blur(10px); + border: 1px solid rgba(72, 71, 138, 0.2); + box-shadow: 0 8px 32px rgba(0, 0, 0, 0.3); + overflow: hidden; + margin-bottom: 2rem; +} + +/* Filter-Sidebar */ +.filter-sidebar { + background: var(--glass-bg); + backdrop-filter: var(--glass-blur); + -webkit-backdrop-filter: var(--glass-blur); + padding: 1.5rem; + border-radius: 16px; + border: 1px solid var(--glass-border); + box-shadow: var(--glass-card-shadow); + margin-bottom: var(--standard-spacing); +} + +.filter-sidebar .form-check-label { + color: var(--light-color); +} + +/* Tabellen */ +.table { + color: var(--light-color); + margin-bottom: var(--standard-spacing); +} + +.table thead th { + background: rgba(20, 20, 43, 0.7); + border-color: var(--glass-border); + font-weight: 600; + text-transform: uppercase; + font-size: 0.8rem; + letter-spacing: 0.05em; + padding: 1.2rem 1rem; +} + +.table tbody td { + border-color: var(--glass-border); + vertical-align: middle; + padding: 1.2rem 1rem; + background: rgba(30, 30, 60, 0.3); +} + +/* Modals */ +.modal-content { + background: var(--glass-bg); + backdrop-filter: var(--glass-blur); + -webkit-backdrop-filter: var(--glass-blur); + border: 1px solid var(--glass-border); + box-shadow: var(--glass-card-shadow); + color: var(--light-color); + border-radius: 16px; +} + +.modal-header { + border-bottom: 1px solid var(--glass-border); + background: rgba(20, 20, 43, 0.7); + padding: 1.2rem 1.5rem; + border-radius: 16px 16px 0 0; +} + +.modal-header .btn-close { + filter: invert(1); +} + +.modal-body { + padding: 1.5rem; +} + +.modal-footer { + border-top: 1px solid var(--glass-border); + background: rgba(20, 20, 43, 0.5); + padding: 1.2rem 1.5rem; + border-radius: 0 0 16px 16px; +} + +/* Alerts */ +.alert { + border-radius: 12px; + border: none; + color: #000; + margin-bottom: var(--standard-spacing); + padding: 1rem 1.5rem; + backdrop-filter: var(--glass-blur); +} + +.alert-success { + background: rgba(53, 201, 190, 0.8); +} + +.alert-danger { + background: rgba(254, 83, 110, 0.8); + color: #fff; +} + +.alert-warning { + background: rgba(255, 182, 72, 0.8); +} + +.alert-info { + background: rgba(62, 127, 255, 0.8); + color: #fff; +} + +/* Kommentare */ +.comment-item { + padding: 1rem; + border-bottom: 1px solid var(--glass-border); + margin-bottom: 1rem; + background: rgba(30, 30, 60, 0.3); + border-radius: 12px; +} + +.comment-item:last-child { + border-bottom: none; +} + +/* Animationen */ +@keyframes fadeIn { + from { + opacity: 0; + transform: translateY(20px); + } + to { + opacity: 1; + transform: translateY(0); + } +} + +.fade-in { + animation: fadeIn 0.4s ease-out forwards; +} + +@keyframes pulseGlow { + 0% { + box-shadow: 0 0 5px rgba(108, 93, 211, 0.5); + } + 50% { + box-shadow: 0 0 20px rgba(108, 93, 211, 0.8); + } + 100% { + box-shadow: 0 0 5px rgba(108, 93, 211, 0.5); + } +} + +.pulse-glow { + animation: pulseGlow 3s infinite; +} + +@keyframes float { + 0% { + transform: translateY(0px); + } + 50% { + transform: translateY(-10px); + } + 100% { + transform: translateY(0px); + } +} + +.float { + animation: float 6s ease-in-out infinite; +} + +/* Organische Formen */ +.shape { + position: absolute; + border-radius: 50%; + filter: blur(60px); + z-index: -1; + opacity: 0.5; +} + +.shape-1 { + background: var(--primary-color); + width: 300px; + height: 300px; + top: -100px; + right: -100px; +} + +.shape-2 { + background: var(--secondary-color); + width: 400px; + height: 400px; + bottom: -200px; + left: -200px; +} + +.shape-3 { + background: var(--accent-color); + width: 200px; + height: 200px; + bottom: 20%; + right: 10%; +} + +/* Hintergrund-Effekte */ +.bg-animation { + position: fixed; + top: 0; + left: 0; + width: 100%; + height: 100%; + z-index: -1; + overflow: hidden; +} + +.bg-gradient { + background: linear-gradient(135deg, var(--background-start), var(--background-end)); + width: 100%; + height: 100%; +} + +.stars { + position: absolute; + top: 0; + left: 0; + width: 100%; + height: 100%; + pointer-events: none; +} + +.star { + position: absolute; + background: white; + border-radius: 50%; + opacity: 0.3; + animation: twinkle 5s infinite; +} + +@keyframes twinkle { + 0% { opacity: 0.2; } + 50% { opacity: 0.5; } + 100% { opacity: 0.2; } +} + +/* Abstrakte Datenvisualisierungen */ +.data-viz { + position: relative; + overflow: hidden; + min-height: 200px; + border-radius: 16px; +} + +.data-point { + position: absolute; + width: 8px; + height: 8px; + border-radius: 50%; + background: var(--accent-color); + opacity: 0.7; +} + +.data-line { + position: absolute; + height: 1px; + background: linear-gradient(90deg, transparent, var(--primary-color), transparent); + opacity: 0.3; +} + +/* Footer - Überarbeitet mit verbessertem Glassmorphismus und Responsivität */ +footer { + background: rgba(20, 20, 43, 0.85); /* Etwas weniger transparent */ + backdrop-filter: blur(15px); /* Stärkerer Blur */ + -webkit-backdrop-filter: blur(15px); + box-shadow: 0 -6px 20px rgba(0, 0, 0, 0.4); /* Schatten nach oben */ + border-top: 1px solid rgba(255, 255, 255, 0.15); /* Hellerer Border */ + padding: 1.5rem 0; + color: rgba(255, 255, 255, 0.9); /* Hellerer Text */ + margin-top: 4rem; /* Konsistenter Abstand */ + transition: all 0.3s ease; +} + +.dark footer { + background: rgba(14, 18, 32, 0.9); /* Dunklerer Hintergrund */ + border-top-color: rgba(255, 255, 255, 0.08); /* Weniger sichtbarer Border */ + box-shadow: 0 -6px 20px rgba(0, 0, 0, 0.5); +} + +.light footer { + background: rgba(240, 244, 248, 0.9); /* Heller Hintergrund */ + border-top-color: rgba(0, 0, 0, 0.1); /* Dunklerer Border */ + box-shadow: 0 -4px 15px rgba(0, 0, 0, 0.1); + color: #4a5568; /* Dunklerer Text */ +} + +/* Responsivität für Footer */ +@media (max-width: 768px) { + footer { + padding: 1rem 0; + } +} + +/* Icon-Stilisierung */ +.icon-3d { + filter: drop-shadow(2px 2px 2px rgba(0, 0, 0, 0.5)); + transition: all 0.3s ease; +} + +.icon-3d:hover { + filter: drop-shadow(4px 4px 4px rgba(108, 93, 211, 0.8)); + transform: translateY(-2px) scale(1.1); + color: var(--accent-color); +} + +/* Responsive Design */ +@media (max-width: 992px) { + :root { + --standard-spacing: 1.5rem; + --small-spacing: 0.75rem; + } + + .mindmap-container { + height: 500px; + } + + .shape { + display: none; + } +} + +@media (max-width: 768px) { + :root { + --standard-spacing: 1.25rem; + --small-spacing: 0.6rem; + } + + .mindmap-container { + height: 400px; + } + + .navbar-brand { + font-size: 1.2rem; + } + + h1 { + font-size: 1.8rem; + } + + h2 { + font-size: 1.5rem; + } +} + +@media (max-width: 576px) { + :root { + --standard-spacing: 1rem; + --small-spacing: 0.5rem; + } + + .glass, .card, .neumorph { + padding: 1rem; + } + + .card-header, .card-body, .card-footer { + padding: 1rem; + } +} + +/* Barrierefreiheit */ +.sr-only { + position: absolute; + width: 1px; + height: 1px; + padding: 0; + margin: -1px; + overflow: hidden; + clip: rect(0,0,0,0); + border: 0; +} + +/* Druckanpassungen */ +@media print { + body { + background: white; + color: black; + } + + .navbar, + .filter-sidebar, + .rating i, + .bg-animation, + .shape { + display: none; + } + + .glass, .card, .mindmap-container, .search-form, .filter-sidebar, .neumorph { + background: white !important; + box-shadow: none; + border: 1px solid #ddd; + color: black; + } + + .card-header, .btn-primary { + background: #eee !important; + color: black; + } + + .table { + color: black; + } + + .form-control, .form-select { + background: white; + color: black; + border-color: #ddd; + } + + .thought-card { + break-inside: avoid; + page-break-inside: avoid; + } +} + +/* Fix for dark background not extending over the entire page */ +html, body { + min-height: 100vh; + width: 100%; + margin: 0; + padding: 0; + overflow-x: hidden; + background: linear-gradient(135deg, var(--background-start), var(--background-end)); + background-attachment: fixed; +} + +/* Sticky navbar */ +.navbar.sticky-top { + position: sticky; + top: 0; + z-index: 1000; +} + +/* Light Mode Optimierungen für wichtige UI-Komponenten */ + +/* Buttons im Light Mode */ +.btn-primary:not(.dark-mode .btn-primary) { + background-color: var(--light-primary, #3b82f6); + color: white; + border: none; + font-weight: 500; +} + +.btn-primary:not(.dark-mode .btn-primary):hover { + background-color: var(--light-primary-hover, #4f46e5); + transform: translateY(-1px); + box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1); +} + +.btn-secondary:not(.dark-mode .btn-secondary) { + background-color: #f3f4f6; + color: #374151; + border: 1px solid #d1d5db; + font-weight: 500; +} + +.btn-secondary:not(.dark-mode .btn-secondary):hover { + background-color: #e5e7eb; +} + +/* Navbar im Light Mode */ +.navbar:not(.dark-mode .navbar), +.nav:not(.dark-mode .nav) { + background-color: rgba(255, 255, 255, 0.95); + border-bottom: 1px solid #e5e7eb; + box-shadow: 0 2px 4px rgba(0, 0, 0, 0.05); +} + +.navbar:not(.dark-mode .navbar) .nav-link, +.nav:not(.dark-mode .nav) .nav-link { + color: #1e3a8a; + font-weight: 500; +} + +.navbar:not(.dark-mode .navbar) .nav-link:hover, +.nav:not(.dark-mode .nav) .nav-link:hover { + color: #4f46e5; +} + +.navbar:not(.dark-mode .navbar) .navbar-brand, +.nav:not(.dark-mode .nav) .navbar-brand { + color: #0f172a; + font-weight: 700; +} + +/* Dropdown Menüs im Light Mode */ +.dropdown-menu:not(.dark-mode .dropdown-menu) { + background-color: white; + border: 1px solid #e5e7eb; + box-shadow: 0 4px 6px rgba(0, 0, 0, 0.05), 0 10px 15px rgba(0, 0, 0, 0.1); + border-radius: 0.5rem; + padding: 0.5rem 0; +} + +.dropdown-item:not(.dark-mode .dropdown-item) { + color: #1e293b; + padding: 0.5rem 1rem; +} + +.dropdown-item:not(.dark-mode .dropdown-item):hover { + background-color: #f1f5f9; + color: #4f46e5; +} + +/* Karten im Light Mode */ +.card:not(.dark-mode .card) { + background-color: white; + border: 1px solid #e5e7eb; + box-shadow: 0 4px 6px rgba(0, 0, 0, 0.03), 0 1px 3px rgba(0, 0, 0, 0.05); + border-radius: 0.5rem; + overflow: hidden; +} + +.card-header:not(.dark-mode .card-header) { + background-color: #f8fafc; + border-bottom: 1px solid #e5e7eb; + padding: 1rem 1.5rem; +} + +.card-footer:not(.dark-mode .card-footer) { + background-color: #f8fafc; + border-top: 1px solid #e5e7eb; +} + +/* Formulare im Light Mode */ +.form-control:not(.dark-mode .form-control) { + background-color: white; + border: 1px solid #d1d5db; + border-radius: 0.375rem; + padding: 0.5rem 0.75rem; + color: #1e293b; +} + +.form-control:not(.dark-mode .form-control):focus { + border-color: #3b82f6; + box-shadow: 0 0 0 3px rgba(59, 130, 246, 0.25); +} + +/* Tabs im Light Mode */ +.nav-tabs:not(.dark-mode .nav-tabs) { + border-bottom-color: #e5e7eb; +} + +.nav-tabs:not(.dark-mode .nav-tabs) .nav-link { + color: #64748b; + border: 1px solid transparent; +} + +.nav-tabs:not(.dark-mode .nav-tabs) .nav-link:hover { + border-color: #e5e7eb #e5e7eb #e5e7eb; + color: #3b82f6; +} + +.nav-tabs:not(.dark-mode .nav-tabs) .nav-link.active { + color: #0f172a; + background-color: white; + border-color: #e5e7eb #e5e7eb white; + font-weight: 500; +} + +/* Alerts im Light Mode */ +.alert:not(.dark-mode .alert) { + border-radius: 0.5rem; + border: 1px solid transparent; +} + +.alert-primary:not(.dark-mode .alert-primary) { + background-color: #eff6ff; + border-color: #bfdbfe; + color: #1e40af; +} + +.alert-success:not(.dark-mode .alert-success) { + background-color: #f0fdf4; + border-color: #bbf7d0; + color: #166534; +} + +.alert-warning:not(.dark-mode .alert-warning) { + background-color: #fffbeb; + border-color: #fef3c7; + color: #92400e; +} + +.alert-danger:not(.dark-mode .alert-danger) { + background-color: #fef2f2; + border-color: #fecaca; + color: #b91c1c; +} + +/* Badges im Light Mode */ +.badge:not(.dark-mode .badge) { + font-weight: 500; + padding: 0.25em 0.6em; + border-radius: 0.375rem; +} + +.badge-primary:not(.dark-mode .badge-primary) { + background-color: #3b82f6; + color: white; +} + +.badge-secondary:not(.dark-mode .badge-secondary) { + background-color: #f3f4f6; + color: #1f2937; +} + +/* Tabellen im Light Mode */ +table:not(.dark-mode table) { + background-color: white; + border-collapse: collapse; + width: 100%; +} + +table:not(.dark-mode table) th { + background-color: #f8fafc; + border-bottom: 1px solid #e5e7eb; + color: #0f172a; + font-weight: 600; + padding: 0.75rem; + text-align: left; +} + +table:not(.dark-mode table) td { + border-bottom: 1px solid #e5e7eb; + padding: 0.75rem; + color: #1e293b; +} + +table:not(.dark-mode table) tr:hover { + background-color: #f8fafc; +} \ No newline at end of file diff --git a/static/css/tailwind.min.css b/static/css/tailwind.min.css new file mode 100644 index 0000000..bcc5184 --- /dev/null +++ b/static/css/tailwind.min.css @@ -0,0 +1,6 @@ +/** + * Failed to bundle using Rollup v2.79.2: the file imports a not supported node.js built-in module "fs". + * If you believe this to be an issue with jsDelivr, and not with the package itself, please open an issue at https://github.com/jsdelivr/jsdelivr + */ + + throw new Error('Failed to bundle using Rollup v2.79.2: the file imports a not supported node.js built-in module "fs". If you believe this to be an issue with jsDelivr, and not with the package itself, please open an issue at https://github.com/jsdelivr/jsdelivr'); \ No newline at end of file diff --git a/static/d3-extensions.js b/static/d3-extensions.js new file mode 100644 index 0000000..4b88dd7 --- /dev/null +++ b/static/d3-extensions.js @@ -0,0 +1,526 @@ +/** + * D3.js Erweiterungen für verbesserte Mindmap-Funktionalität + * Diese Datei enthält zusätzliche Hilfsfunktionen und Erweiterungen für D3.js + */ + +class D3Extensions { + /** + * Erstellt einen verbesserten radialen Farbverlauf + * @param {Object} defs - Das D3 defs Element + * @param {string} id - ID für den Gradienten + * @param {string} baseColor - Grundfarbe in hexadezimal oder RGB + * @returns {Object} - Das erstellte Gradient-Element + */ + static createEnhancedRadialGradient(defs, id, baseColor) { + // Farben berechnen + const d3Color = d3.color(baseColor); + const lightColor = d3Color.brighter(0.7); + const darkColor = d3Color.darker(0.3); + const midColor = d3Color; + + // Gradient erstellen + const gradient = defs.append('radialGradient') + .attr('id', id) + .attr('cx', '30%') + .attr('cy', '30%') + .attr('r', '70%'); + + // Farbstops hinzufügen für realistischeren Verlauf + gradient.append('stop') + .attr('offset', '0%') + .attr('stop-color', lightColor.formatHex()); + + gradient.append('stop') + .attr('offset', '50%') + .attr('stop-color', midColor.formatHex()); + + gradient.append('stop') + .attr('offset', '100%') + .attr('stop-color', darkColor.formatHex()); + + return gradient; + } + + /** + * Erstellt einen Glüheffekt-Filter + * @param {Object} defs - D3-Referenz auf den defs-Bereich + * @param {String} id - ID des Filters + * @param {String} color - Farbe des Glüheffekts (Hex-Code) + * @param {Number} strength - Stärke des Glüheffekts + * @returns {Object} D3-Referenz auf den erstellten Filter + */ + static createGlowFilter(defs, id, color = '#b38fff', strength = 5) { + const filter = defs.append('filter') + .attr('id', id) + .attr('x', '-50%') + .attr('y', '-50%') + .attr('width', '200%') + .attr('height', '200%'); + + // Unschärfe-Effekt + filter.append('feGaussianBlur') + .attr('in', 'SourceGraphic') + .attr('stdDeviation', strength) + .attr('result', 'blur'); + + // Farbverstärkung für den Glüheffekt + filter.append('feColorMatrix') + .attr('in', 'blur') + .attr('type', 'matrix') + .attr('values', '0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 -7') + .attr('result', 'glow'); + + // Farbflut mit der angegebenen Farbe + filter.append('feFlood') + .attr('flood-color', color) + .attr('flood-opacity', '0.7') + .attr('result', 'color'); + + // Zusammensetzen des Glüheffekts mit der Farbe + filter.append('feComposite') + .attr('in', 'color') + .attr('in2', 'glow') + .attr('operator', 'in') + .attr('result', 'glow-color'); + + // Zusammenfügen aller Ebenen + const feMerge = filter.append('feMerge'); + feMerge.append('feMergeNode') + .attr('in', 'glow-color'); + feMerge.append('feMergeNode') + .attr('in', 'SourceGraphic'); + + return filter; + } + + /** + * Berechnet eine konsistente Farbe aus einem String + * @param {string} str - Eingabestring + * @returns {string} - Generierte Farbe als Hex-String + */ + static stringToColor(str) { + let hash = 0; + for (let i = 0; i < str.length; i++) { + hash = str.charCodeAt(i) + ((hash << 5) - hash); + } + + // Basis-Farbpalette für konsistente Farben + const colorPalette = [ + "#4299E1", // Blau + "#9F7AEA", // Lila + "#ED64A6", // Pink + "#48BB78", // Grün + "#ECC94B", // Gelb + "#F56565", // Rot + "#38B2AC", // Türkis + "#ED8936", // Orange + "#667EEA", // Indigo + ]; + + // Farbe aus der Palette wählen basierend auf dem Hash + const colorIndex = Math.abs(hash) % colorPalette.length; + return colorPalette[colorIndex]; + } + + /** + * Erstellt einen Schatteneffekt-Filter + * @param {Object} defs - D3-Referenz auf den defs-Bereich + * @param {String} id - ID des Filters + * @returns {Object} D3-Referenz auf den erstellten Filter + */ + static createShadowFilter(defs, id) { + const filter = defs.append('filter') + .attr('id', id) + .attr('x', '-50%') + .attr('y', '-50%') + .attr('width', '200%') + .attr('height', '200%'); + + // Einfacher Schlagschatten + filter.append('feDropShadow') + .attr('dx', 0) + .attr('dy', 4) + .attr('stdDeviation', 4) + .attr('flood-color', 'rgba(0, 0, 0, 0.3)'); + + return filter; + } + + /** + * Erstellt einen Glasmorphismus-Effekt-Filter + * @param {Object} defs - D3-Referenz auf den defs-Bereich + * @param {String} id - ID des Filters + * @returns {Object} D3-Referenz auf den erstellten Filter + */ + static createGlassMorphismFilter(defs, id) { + const filter = defs.append('filter') + .attr('id', id) + .attr('x', '-50%') + .attr('y', '-50%') + .attr('width', '200%') + .attr('height', '200%'); + + // Hintergrund-Unschärfe für den Glaseffekt + filter.append('feGaussianBlur') + .attr('in', 'SourceGraphic') + .attr('stdDeviation', 8) + .attr('result', 'blur'); + + // Hellere Farbe für den Glaseffekt + filter.append('feColorMatrix') + .attr('in', 'blur') + .attr('type', 'matrix') + .attr('values', '1 0 0 0 0.1 0 1 0 0 0.1 0 0 1 0 0.1 0 0 0 0.6 0') + .attr('result', 'glass'); + + // Überlagerung mit dem Original + const feMerge = filter.append('feMerge'); + feMerge.append('feMergeNode') + .attr('in', 'glass'); + feMerge.append('feMergeNode') + .attr('in', 'SourceGraphic'); + + return filter; + } + + /** + * Erstellt einen verstärkten Glasmorphismus-Effekt mit Farbverlauf + * @param {Object} defs - D3-Referenz auf den defs-Bereich + * @param {String} id - ID des Filters + * @param {String} color1 - Erste Farbe des Verlaufs (Hex-Code) + * @param {String} color2 - Zweite Farbe des Verlaufs (Hex-Code) + * @returns {Object} D3-Referenz auf den erstellten Filter + */ + static createEnhancedGlassMorphismFilter(defs, id, color1 = '#b38fff', color2 = '#58a9ff') { + // Farbverlauf für den Glaseffekt definieren + const gradientId = `gradient-${id}`; + const gradient = defs.append('linearGradient') + .attr('id', gradientId) + .attr('x1', '0%') + .attr('y1', '0%') + .attr('x2', '100%') + .attr('y2', '100%'); + + gradient.append('stop') + .attr('offset', '0%') + .attr('stop-color', color1) + .attr('stop-opacity', '0.3'); + + gradient.append('stop') + .attr('offset', '100%') + .attr('stop-color', color2) + .attr('stop-opacity', '0.3'); + + // Filter erstellen + const filter = defs.append('filter') + .attr('id', id) + .attr('x', '-50%') + .attr('y', '-50%') + .attr('width', '200%') + .attr('height', '200%'); + + // Hintergrund-Unschärfe + filter.append('feGaussianBlur') + .attr('in', 'SourceGraphic') + .attr('stdDeviation', 6) + .attr('result', 'blur'); + + // Farbverlauf einfügen + const feImage = filter.append('feImage') + .attr('xlink:href', `#${gradientId}`) + .attr('result', 'gradient') + .attr('x', '0%') + .attr('y', '0%') + .attr('width', '100%') + .attr('height', '100%') + .attr('preserveAspectRatio', 'none'); + + // Zusammenfügen aller Ebenen + const feMerge = filter.append('feMerge'); + feMerge.append('feMergeNode') + .attr('in', 'blur'); + feMerge.append('feMergeNode') + .attr('in', 'gradient'); + feMerge.append('feMergeNode') + .attr('in', 'SourceGraphic'); + + return filter; + } + + /** + * Erstellt einen 3D-Glaseffekt mit verbesserter Tiefe und Reflexionen + * @param {Object} defs - D3-Referenz auf den defs-Bereich + * @param {String} id - ID des Filters + * @returns {Object} D3-Referenz auf den erstellten Filter + */ + static create3DGlassEffect(defs, id) { + const filter = defs.append('filter') + .attr('id', id) + .attr('x', '-50%') + .attr('y', '-50%') + .attr('width', '200%') + .attr('height', '200%'); + + // Farbmatrix für Transparenz + filter.append('feColorMatrix') + .attr('type', 'matrix') + .attr('in', 'SourceGraphic') + .attr('values', '1 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0.7 0') + .attr('result', 'transparent'); + + // Hintergrund-Unschärfe für Tiefe + filter.append('feGaussianBlur') + .attr('in', 'transparent') + .attr('stdDeviation', '4') + .attr('result', 'blurred'); + + // Lichtquelle und Schattierung hinzufügen + const lightSource = filter.append('feSpecularLighting') + .attr('in', 'blurred') + .attr('surfaceScale', '6') + .attr('specularConstant', '1') + .attr('specularExponent', '30') + .attr('lighting-color', '#ffffff') + .attr('result', 'specular'); + + lightSource.append('fePointLight') + .attr('x', '100') + .attr('y', '100') + .attr('z', '200'); + + // Lichtreflexion verstärken + filter.append('feComposite') + .attr('in', 'specular') + .attr('in2', 'SourceGraphic') + .attr('operator', 'in') + .attr('result', 'specularHighlight'); + + // Inneren Schatten erzeugen + const innerShadow = filter.append('feOffset') + .attr('in', 'SourceAlpha') + .attr('dx', '0') + .attr('dy', '1') + .attr('result', 'offsetblur'); + + innerShadow.append('feGaussianBlur') + .attr('in', 'offsetblur') + .attr('stdDeviation', '2') + .attr('result', 'innerShadow'); + + filter.append('feComposite') + .attr('in', 'innerShadow') + .attr('in2', 'SourceGraphic') + .attr('operator', 'out') + .attr('result', 'innerShadowEffect'); + + // Schichten kombinieren + const feMerge = filter.append('feMerge'); + feMerge.append('feMergeNode') + .attr('in', 'blurred'); + feMerge.append('feMergeNode') + .attr('in', 'innerShadowEffect'); + feMerge.append('feMergeNode') + .attr('in', 'specularHighlight'); + feMerge.append('feMergeNode') + .attr('in', 'SourceGraphic'); + + return filter; + } + + /** + * Fügt einen Partikelsystem-Effekt für interaktive Knoten hinzu + * @param {Object} parent - Das übergeordnete SVG-Element + * @param {number} x - X-Koordinate des Zentrums + * @param {number} y - Y-Koordinate des Zentrums + * @param {string} color - Partikelfarbe (Hex-Code) + * @param {number} count - Anzahl der Partikel + */ + static createParticleEffect(parent, x, y, color = '#b38fff', count = 5) { + const particles = []; + + for (let i = 0; i < count; i++) { + const particle = parent.append('circle') + .attr('cx', x) + .attr('cy', y) + .attr('r', 0) + .attr('fill', color) + .style('opacity', 0.8); + + particles.push(particle); + + // Partikel animieren + animateParticle(particle); + } + + function animateParticle(particle) { + // Zufällige Richtung und Geschwindigkeit + const angle = Math.random() * Math.PI * 2; + const speed = 1 + Math.random() * 2; + const distance = 20 + Math.random() * 30; + + // Zielposition berechnen + const targetX = x + Math.cos(angle) * distance; + const targetY = y + Math.sin(angle) * distance; + + // Animation mit zufälliger Dauer + const duration = 1000 + Math.random() * 500; + + particle + .attr('r', 0) + .style('opacity', 0.8) + .transition() + .duration(duration) + .attr('cx', targetX) + .attr('cy', targetY) + .attr('r', 2 + Math.random() * 3) + .style('opacity', 0) + .on('end', function() { + // Partikel entfernen + particle.remove(); + }); + } + } + + /** + * Führt eine Pulsanimation auf einem Knoten durch + * @param {Object} node - D3-Knoten-Selektion + * @returns {void} + */ + static pulseAnimation(node) { + if (!node) return; + + const circle = node.select('circle'); + const originalRadius = parseFloat(circle.attr('r')); + const originalFill = circle.attr('fill'); + + // Pulsanimation + circle + .transition() + .duration(400) + .attr('r', originalRadius * 1.3) + .attr('fill', '#b38fff') + .transition() + .duration(400) + .attr('r', originalRadius) + .attr('fill', originalFill); + } + + /** + * Berechnet eine adaptive Schriftgröße basierend auf der Textlänge + * @param {string} text - Der anzuzeigende Text + * @param {number} maxSize - Maximale Schriftgröße in Pixel + * @param {number} minSize - Minimale Schriftgröße in Pixel + * @returns {number} - Die berechnete Schriftgröße + */ + static getAdaptiveFontSize(text, maxSize = 14, minSize = 10) { + if (!text) return maxSize; + + // Linear die Schriftgröße basierend auf der Textlänge anpassen + const length = text.length; + if (length <= 6) return maxSize; + if (length >= 20) return minSize; + + // Lineare Interpolation + const factor = (length - 6) / (20 - 6); + return maxSize - factor * (maxSize - minSize); + } + + /** + * Fügt einen Pulsierenden Effekt zu einer Selektion hinzu + * @param {Object} selection - D3-Selektion + * @param {number} duration - Dauer eines Puls-Zyklus in ms + * @param {number} minOpacity - Minimale Opazität + * @param {number} maxOpacity - Maximale Opazität + */ + static addPulseEffect(selection, duration = 1500, minOpacity = 0.4, maxOpacity = 0.9) { + function pulse() { + selection + .transition() + .duration(duration / 2) + .style('opacity', minOpacity) + .transition() + .duration(duration / 2) + .style('opacity', maxOpacity) + .on('end', pulse); + } + + // Initialen Stil setzen + selection.style('opacity', maxOpacity); + + // Pulsanimation starten + pulse(); + } + + /** + * Verarbeitet Daten aus der Datenbank für die Mindmap-Visualisierung + * @param {Array} databaseNodes - Knotendaten aus der Datenbank + * @param {Array} links - Verbindungsdaten oder null für automatische Extraktion + * @returns {Object} Aufbereitete Daten für D3.js + */ + static processDbNodesForVisualization(databaseNodes, links = null) { + // Überprüfe, ob Daten vorhanden sind + if (!databaseNodes || databaseNodes.length === 0) { + console.warn('Keine Knotendaten zum Verarbeiten vorhanden'); + return { nodes: [], links: [] }; + } + + // Knoten mit D3-Kompatiblem Format erstellen + const nodes = databaseNodes.map(node => { + // Farbgenerierung, falls keine vorhanden + const nodeColor = node.color_code || + node.color || + D3Extensions.stringToColor(node.name || 'default'); + + return { + id: node.id, + name: node.name, + description: node.description || '', + thought_count: node.thought_count || 0, + color: nodeColor, + // Zusätzliche Attribute + category_id: node.category_id, + is_public: node.is_public !== undefined ? node.is_public : true, + // Position, falls vorhanden + x: node.x_position, + y: node.y_position, + // Größe, falls vorhanden + scale: node.scale || 1.0 + }; + }); + + // Verbindungen verarbeiten + let processedLinks = []; + + if (links && Array.isArray(links)) { + // Verwende übergebene Verbindungen + processedLinks = links.map(link => { + return { + source: link.source, + target: link.target, + // Zusätzliche Attribute + type: link.type || 'default', + strength: link.strength || 1 + }; + }); + } else { + // Extrahiere Verbindungen aus den Knoten + databaseNodes.forEach(node => { + if (node.connections && Array.isArray(node.connections)) { + node.connections.forEach(conn => { + processedLinks.push({ + source: node.id, + target: conn.target, + type: conn.type || 'default', + strength: conn.strength || 1 + }); + }); + } + }); + } + + return { nodes, links: processedLinks }; + } +} + +// Globale Verfügbarkeit sicherstellen +window.D3Extensions = D3Extensions; diff --git a/static/example.png b/static/example.png new file mode 100644 index 0000000..c87515b Binary files /dev/null and b/static/example.png differ diff --git a/static/fonts/inter.css b/static/fonts/inter.css new file mode 100644 index 0000000..8da757d --- /dev/null +++ b/static/fonts/inter.css @@ -0,0 +1,35 @@ +/* Inter font - Local version */ +@font-face { + font-family: 'Inter'; + font-style: normal; + font-weight: 300; + src: url('../fonts/inter-light.woff2') format('woff2'); +} + +@font-face { + font-family: 'Inter'; + font-style: normal; + font-weight: 400; + src: url('../fonts/inter-regular.woff2') format('woff2'); +} + +@font-face { + font-family: 'Inter'; + font-style: normal; + font-weight: 500; + src: url('../fonts/inter-medium.woff2') format('woff2'); +} + +@font-face { + font-family: 'Inter'; + font-style: normal; + font-weight: 600; + src: url('../fonts/inter-semibold.woff2') format('woff2'); +} + +@font-face { + font-family: 'Inter'; + font-style: normal; + font-weight: 700; + src: url('../fonts/inter-bold.woff2') format('woff2'); +} \ No newline at end of file diff --git a/static/fonts/jetbrains-mono.css b/static/fonts/jetbrains-mono.css new file mode 100644 index 0000000..d6b101c --- /dev/null +++ b/static/fonts/jetbrains-mono.css @@ -0,0 +1,21 @@ +/* JetBrains Mono font - Local version */ +@font-face { + font-family: 'JetBrains Mono'; + font-style: normal; + font-weight: 400; + src: url('../fonts/jetbrainsmono-regular.woff2') format('woff2'); +} + +@font-face { + font-family: 'JetBrains Mono'; + font-style: normal; + font-weight: 500; + src: url('../fonts/jetbrainsmono-medium.woff2') format('woff2'); +} + +@font-face { + font-family: 'JetBrains Mono'; + font-style: normal; + font-weight: 700; + src: url('../fonts/jetbrainsmono-bold.woff2') format('woff2'); +} \ No newline at end of file diff --git a/static/img/favicon-gen.py b/static/img/favicon-gen.py new file mode 100644 index 0000000..ba6faeb --- /dev/null +++ b/static/img/favicon-gen.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import os +from PIL import Image +import cairosvg + +# Pfad zum SVG-Favicon +svg_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'favicon.svg') +# Ausgabepfad für das PNG +png_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'favicon.png') +# Ausgabepfad für das ICO +ico_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'favicon.ico') + +# SVG zu PNG konvertieren +cairosvg.svg2png(url=svg_path, write_to=png_path, output_width=512, output_height=512) + +# PNG zu ICO konvertieren +img = Image.open(png_path) +img.save(ico_path, sizes=[(16, 16), (32, 32), (48, 48), (64, 64), (128, 128)]) + +print(f"Favicon erfolgreich erstellt: {ico_path}") + +# Optional: PNG-Datei löschen, wenn nur ICO benötigt wird +# os.remove(png_path) \ No newline at end of file diff --git a/static/img/favicon.svg b/static/img/favicon.svg new file mode 100644 index 0000000..6cc8d31 --- /dev/null +++ b/static/img/favicon.svg @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/static/js/alpine.min.js b/static/js/alpine.min.js new file mode 100644 index 0000000..a69fd22 --- /dev/null +++ b/static/js/alpine.min.js @@ -0,0 +1,5 @@ +(()=>{var Xe=!1,Ze=!1,V=[],Qe=-1;function Kt(e){En(e)}function En(e){V.includes(e)||V.push(e),Sn()}function ye(e){let t=V.indexOf(e);t!==-1&&t>Qe&&V.splice(t,1)}function Sn(){!Ze&&!Xe&&(Xe=!0,queueMicrotask(An))}function An(){Xe=!1,Ze=!0;for(let e=0;ee.effect(t,{scheduler:r=>{tt?Kt(r):r()}}),et=e.raw}function rt(e){D=e}function Ht(e){let t=()=>{};return[n=>{let i=D(n);return e._x_effects||(e._x_effects=new Set,e._x_runEffects=()=>{e._x_effects.forEach(o=>o())}),e._x_effects.add(i),t=()=>{i!==void 0&&(e._x_effects.delete(i),$(i))},i},()=>{t()}]}var qt=[],Ut=[],Wt=[];function Gt(e){Wt.push(e)}function be(e,t){typeof t=="function"?(e._x_cleanups||(e._x_cleanups=[]),e._x_cleanups.push(t)):(t=e,Ut.push(t))}function Jt(e){qt.push(e)}function Yt(e,t,r){e._x_attributeCleanups||(e._x_attributeCleanups={}),e._x_attributeCleanups[t]||(e._x_attributeCleanups[t]=[]),e._x_attributeCleanups[t].push(r)}function nt(e,t){!e._x_attributeCleanups||Object.entries(e._x_attributeCleanups).forEach(([r,n])=>{(t===void 0||t.includes(r))&&(n.forEach(i=>i()),delete e._x_attributeCleanups[r])})}var ot=new MutationObserver(it),st=!1;function se(){ot.observe(document,{subtree:!0,childList:!0,attributes:!0,attributeOldValue:!0}),st=!0}function at(){On(),ot.disconnect(),st=!1}var ae=[],ct=!1;function On(){ae=ae.concat(ot.takeRecords()),ae.length&&!ct&&(ct=!0,queueMicrotask(()=>{Tn(),ct=!1}))}function Tn(){it(ae),ae.length=0}function h(e){if(!st)return e();at();let t=e();return se(),t}var lt=!1,ve=[];function Xt(){lt=!0}function Zt(){lt=!1,it(ve),ve=[]}function it(e){if(lt){ve=ve.concat(e);return}let t=[],r=[],n=new Map,i=new Map;for(let o=0;os.nodeType===1&&t.push(s)),e[o].removedNodes.forEach(s=>s.nodeType===1&&r.push(s))),e[o].type==="attributes")){let s=e[o].target,a=e[o].attributeName,c=e[o].oldValue,l=()=>{n.has(s)||n.set(s,[]),n.get(s).push({name:a,value:s.getAttribute(a)})},u=()=>{i.has(s)||i.set(s,[]),i.get(s).push(a)};s.hasAttribute(a)&&c===null?l():s.hasAttribute(a)?(u(),l()):u()}i.forEach((o,s)=>{nt(s,o)}),n.forEach((o,s)=>{qt.forEach(a=>a(s,o))});for(let o of r)if(!t.includes(o)&&(Ut.forEach(s=>s(o)),o._x_cleanups))for(;o._x_cleanups.length;)o._x_cleanups.pop()();t.forEach(o=>{o._x_ignoreSelf=!0,o._x_ignore=!0});for(let o of t)r.includes(o)||!o.isConnected||(delete o._x_ignoreSelf,delete o._x_ignore,Wt.forEach(s=>s(o)),o._x_ignore=!0,o._x_ignoreSelf=!0);t.forEach(o=>{delete o._x_ignoreSelf,delete o._x_ignore}),t=null,r=null,n=null,i=null}function we(e){return F(L(e))}function N(e,t,r){return e._x_dataStack=[t,...L(r||e)],()=>{e._x_dataStack=e._x_dataStack.filter(n=>n!==t)}}function L(e){return e._x_dataStack?e._x_dataStack:typeof ShadowRoot=="function"&&e instanceof ShadowRoot?L(e.host):e.parentNode?L(e.parentNode):[]}function F(e){let t=new Proxy({},{ownKeys:()=>Array.from(new Set(e.flatMap(r=>Object.keys(r)))),has:(r,n)=>e.some(i=>i.hasOwnProperty(n)),get:(r,n)=>(e.find(i=>{if(i.hasOwnProperty(n)){let o=Object.getOwnPropertyDescriptor(i,n);if(o.get&&o.get._x_alreadyBound||o.set&&o.set._x_alreadyBound)return!0;if((o.get||o.set)&&o.enumerable){let s=o.get,a=o.set,c=o;s=s&&s.bind(t),a=a&&a.bind(t),s&&(s._x_alreadyBound=!0),a&&(a._x_alreadyBound=!0),Object.defineProperty(i,n,{...c,get:s,set:a})}return!0}return!1})||{})[n],set:(r,n,i)=>{let o=e.find(s=>s.hasOwnProperty(n));return o?o[n]=i:e[e.length-1][n]=i,!0}});return t}function Ee(e){let t=n=>typeof n=="object"&&!Array.isArray(n)&&n!==null,r=(n,i="")=>{Object.entries(Object.getOwnPropertyDescriptors(n)).forEach(([o,{value:s,enumerable:a}])=>{if(a===!1||s===void 0)return;let c=i===""?o:`${i}.${o}`;typeof s=="object"&&s!==null&&s._x_interceptor?n[o]=s.initialize(e,c,o):t(s)&&s!==n&&!(s instanceof Element)&&r(s,c)})};return r(e)}function Se(e,t=()=>{}){let r={initialValue:void 0,_x_interceptor:!0,initialize(n,i,o){return e(this.initialValue,()=>Cn(n,i),s=>ut(n,i,s),i,o)}};return t(r),n=>{if(typeof n=="object"&&n!==null&&n._x_interceptor){let i=r.initialize.bind(r);r.initialize=(o,s,a)=>{let c=n.initialize(o,s,a);return r.initialValue=c,i(o,s,a)}}else r.initialValue=n;return r}}function Cn(e,t){return t.split(".").reduce((r,n)=>r[n],e)}function ut(e,t,r){if(typeof t=="string"&&(t=t.split(".")),t.length===1)e[t[0]]=r;else{if(t.length===0)throw error;return e[t[0]]||(e[t[0]]={}),ut(e[t[0]],t.slice(1),r)}}var Qt={};function y(e,t){Qt[e]=t}function ce(e,t){return Object.entries(Qt).forEach(([r,n])=>{let i=null;function o(){if(i)return i;{let[s,a]=ft(t);return i={interceptor:Se,...s},be(t,a),i}}Object.defineProperty(e,`$${r}`,{get(){return n(t,o())},enumerable:!1})}),e}function er(e,t,r,...n){try{return r(...n)}catch(i){X(i,e,t)}}function X(e,t,r=void 0){Object.assign(e,{el:t,expression:r}),console.warn(`Alpine Expression Error: ${e.message} + +${r?'Expression: "'+r+`" + +`:""}`,t),setTimeout(()=>{throw e},0)}var Ae=!0;function Oe(e){let t=Ae;Ae=!1;let r=e();return Ae=t,r}function R(e,t,r={}){let n;return x(e,t)(i=>n=i,r),n}function x(...e){return tr(...e)}var tr=dt;function rr(e){tr=e}function dt(e,t){let r={};ce(r,e);let n=[r,...L(e)],i=typeof t=="function"?Rn(n,t):Mn(n,t,e);return er.bind(null,e,t,i)}function Rn(e,t){return(r=()=>{},{scope:n={},params:i=[]}={})=>{let o=t.apply(F([n,...e]),i);Te(r,o)}}var pt={};function Nn(e,t){if(pt[e])return pt[e];let r=Object.getPrototypeOf(async function(){}).constructor,n=/^[\n\s]*if.*\(.*\)/.test(e)||/^(let|const)\s/.test(e)?`(async()=>{ ${e} })()`:e,o=(()=>{try{return new r(["__self","scope"],`with (scope) { __self.result = ${n} }; __self.finished = true; return __self.result;`)}catch(s){return X(s,t,e),Promise.resolve()}})();return pt[e]=o,o}function Mn(e,t,r){let n=Nn(t,r);return(i=()=>{},{scope:o={},params:s=[]}={})=>{n.result=void 0,n.finished=!1;let a=F([o,...e]);if(typeof n=="function"){let c=n(n,a).catch(l=>X(l,r,t));n.finished?(Te(i,n.result,a,s,r),n.result=void 0):c.then(l=>{Te(i,l,a,s,r)}).catch(l=>X(l,r,t)).finally(()=>n.result=void 0)}}}function Te(e,t,r,n,i){if(Ae&&typeof t=="function"){let o=t.apply(r,n);o instanceof Promise?o.then(s=>Te(e,s,r,n)).catch(s=>X(s,i,t)):e(o)}else typeof t=="object"&&t instanceof Promise?t.then(o=>e(o)):e(t)}var mt="x-";function O(e=""){return mt+e}function nr(e){mt=e}var ht={};function p(e,t){return ht[e]=t,{before(r){if(!ht[r]){console.warn("Cannot find directive `${directive}`. `${name}` will use the default order of execution");return}let n=H.indexOf(r);H.splice(n>=0?n:H.indexOf("DEFAULT"),0,e)}}}function le(e,t,r){if(t=Array.from(t),e._x_virtualDirectives){let o=Object.entries(e._x_virtualDirectives).map(([a,c])=>({name:a,value:c})),s=_t(o);o=o.map(a=>s.find(c=>c.name===a.name)?{name:`x-bind:${a.name}`,value:`"${a.value}"`}:a),t=t.concat(o)}let n={};return t.map(ir((o,s)=>n[o]=s)).filter(or).map(In(n,r)).sort(Dn).map(o=>Pn(e,o))}function _t(e){return Array.from(e).map(ir()).filter(t=>!or(t))}var gt=!1,ue=new Map,sr=Symbol();function ar(e){gt=!0;let t=Symbol();sr=t,ue.set(t,[]);let r=()=>{for(;ue.get(t).length;)ue.get(t).shift()();ue.delete(t)},n=()=>{gt=!1,r()};e(r),n()}function ft(e){let t=[],r=a=>t.push(a),[n,i]=Ht(e);return t.push(i),[{Alpine:j,effect:n,cleanup:r,evaluateLater:x.bind(x,e),evaluate:R.bind(R,e)},()=>t.forEach(a=>a())]}function Pn(e,t){let r=()=>{},n=ht[t.type]||r,[i,o]=ft(e);Yt(e,t.original,o);let s=()=>{e._x_ignore||e._x_ignoreSelf||(n.inline&&n.inline(e,t,i),n=n.bind(n,e,t,i),gt?ue.get(sr).push(n):n())};return s.runCleanups=o,s}var Ce=(e,t)=>({name:r,value:n})=>(r.startsWith(e)&&(r=r.replace(e,t)),{name:r,value:n}),Re=e=>e;function ir(e=()=>{}){return({name:t,value:r})=>{let{name:n,value:i}=cr.reduce((o,s)=>s(o),{name:t,value:r});return n!==t&&e(n,t),{name:n,value:i}}}var cr=[];function Z(e){cr.push(e)}function or({name:e}){return lr().test(e)}var lr=()=>new RegExp(`^${mt}([^:^.]+)\\b`);function In(e,t){return({name:r,value:n})=>{let i=r.match(lr()),o=r.match(/:([a-zA-Z0-9\-:]+)/),s=r.match(/\.[^.\]]+(?=[^\]]*$)/g)||[],a=t||e[r]||r;return{type:i?i[1]:null,value:o?o[1]:null,modifiers:s.map(c=>c.replace(".","")),expression:n,original:a}}}var xt="DEFAULT",H=["ignore","ref","data","id","bind","init","for","model","modelable","transition","show","if",xt,"teleport"];function Dn(e,t){let r=H.indexOf(e.type)===-1?xt:e.type,n=H.indexOf(t.type)===-1?xt:t.type;return H.indexOf(r)-H.indexOf(n)}function q(e,t,r={}){e.dispatchEvent(new CustomEvent(t,{detail:r,bubbles:!0,composed:!0,cancelable:!0}))}function T(e,t){if(typeof ShadowRoot=="function"&&e instanceof ShadowRoot){Array.from(e.children).forEach(i=>T(i,t));return}let r=!1;if(t(e,()=>r=!0),r)return;let n=e.firstElementChild;for(;n;)T(n,t,!1),n=n.nextElementSibling}function S(e,...t){console.warn(`Alpine Warning: ${e}`,...t)}var ur=!1;function dr(){ur&&S("Alpine has already been initialized on this page. Calling Alpine.start() more than once can cause problems."),ur=!0,document.body||S("Unable to initialize. Trying to load Alpine before `` is available. Did you forget to add `defer` in Alpine's ` + + + + + + + + + + +
+
+

Interaktive Mindmap

+
+ +
+
+ +
+ + + + + + + +
+ +
+ +
+ +
+ +
+ Mindmap-Anwendung © 2023 +
+
+ + + + + + + + \ No newline at end of file diff --git a/static/js/mindmap.js b/static/js/mindmap.js new file mode 100644 index 0000000..110ac4b --- /dev/null +++ b/static/js/mindmap.js @@ -0,0 +1,749 @@ +/** + * Mindmap.js - Interaktive Mind-Map Implementierung + * - Cytoscape.js für Graph-Rendering + * - Fetch API für REST-Zugriffe + * - Socket.IO für Echtzeit-Synchronisation + */ + +(async () => { + /* 1. Initialisierung und Grundkonfiguration */ + const cy = cytoscape({ + container: document.getElementById('cy'), + style: [ + { + selector: 'node', + style: { + 'label': 'data(name)', + 'text-valign': 'center', + 'color': '#fff', + 'background-color': 'data(color)', + 'width': 45, + 'height': 45, + 'font-size': 11, + 'text-outline-width': 1, + 'text-outline-color': '#000', + 'text-outline-opacity': 0.5, + 'text-wrap': 'wrap', + 'text-max-width': 80 + } + }, + { + selector: 'node[icon]', + style: { + 'background-image': function(ele) { + return `static/img/icons/${ele.data('icon')}.svg`; + }, + 'background-width': '60%', + 'background-height': '60%', + 'background-position-x': '50%', + 'background-position-y': '40%', + 'text-margin-y': 10 + } + }, + { + selector: 'edge', + style: { + 'width': 2, + 'line-color': '#888', + 'target-arrow-shape': 'triangle', + 'curve-style': 'bezier', + 'target-arrow-color': '#888' + } + }, + { + selector: ':selected', + style: { + 'border-width': 3, + 'border-color': '#f8f32b' + } + } + ], + layout: { + name: 'breadthfirst', + directed: true, + padding: 30, + spacingFactor: 1.2 + } + }); + + /* 2. Hilfs-Funktionen für API-Zugriffe */ + const get = async endpoint => { + try { + const response = await fetch(endpoint); + if (!response.ok) { + console.error(`API-Fehler: ${endpoint} antwortet mit Status ${response.status}`); + return []; // Leeres Array zurückgeben bei Fehlern + } + return await response.json(); + } catch (error) { + console.error(`Fehler beim Abrufen von ${endpoint}:`, error); + return []; // Leeres Array zurückgeben bei Netzwerkfehlern + } + }; + + const post = async (endpoint, body) => { + try { + const response = await fetch(endpoint, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(body) + }); + if (!response.ok) { + console.error(`API-Fehler: ${endpoint} antwortet mit Status ${response.status}`); + return {}; // Leeres Objekt zurückgeben bei Fehlern + } + return await response.json(); + } catch (error) { + console.error(`Fehler beim POST zu ${endpoint}:`, error); + return {}; // Leeres Objekt zurückgeben bei Netzwerkfehlern + } + }; + + const del = async endpoint => { + try { + const response = await fetch(endpoint, { method: 'DELETE' }); + if (!response.ok) { + console.error(`API-Fehler: ${endpoint} antwortet mit Status ${response.status}`); + return {}; // Leeres Objekt zurückgeben bei Fehlern + } + return await response.json(); + } catch (error) { + console.error(`Fehler beim DELETE zu ${endpoint}:`, error); + return {}; // Leeres Objekt zurückgeben bei Netzwerkfehlern + } + }; + + /* 3. Kategorien laden für Style-Informationen */ + let categories = await get('/api/categories'); + + /* 4. Daten laden und Rendering */ + const loadMindmap = async () => { + try { + // Nodes und Beziehungen parallel laden + const [nodes, relationships] = await Promise.all([ + get('/api/mind_map_nodes'), + get('/api/node_relationships') + ]); + + // Graph leeren (für Reload-Fälle) + cy.elements().remove(); + + // Überprüfen, ob nodes ein Array ist, wenn nicht, setze es auf ein leeres Array + const nodesArray = Array.isArray(nodes) ? nodes : []; + + // Knoten zum Graph hinzufügen + cy.add( + nodesArray.map(node => { + // Kategorie-Informationen für Styling abrufen + const category = categories.find(c => c.id === node.category_id) || {}; + + return { + data: { + id: node.id.toString(), + name: node.name, + description: node.description, + color: node.color_code || category.color_code || '#6b7280', + icon: node.icon || category.icon, + category_id: node.category_id + }, + position: node.x && node.y ? { x: node.x, y: node.y } : undefined + }; + }) + ); + + // Überprüfen, ob relationships ein Array ist, wenn nicht, setze es auf ein leeres Array + const relationshipsArray = Array.isArray(relationships) ? relationships : []; + + // Kanten zum Graph hinzufügen + cy.add( + relationshipsArray.map(rel => ({ + data: { + id: `${rel.parent_id}_${rel.child_id}`, + source: rel.parent_id.toString(), + target: rel.child_id.toString() + } + })) + ); + + // Wenn keine Knoten geladen wurden, Fallback-Knoten erstellen + if (nodesArray.length === 0) { + // Mindestens einen Standardknoten hinzufügen + cy.add({ + data: { + id: 'fallback-1', + name: 'Mindmap', + description: 'Erstellen Sie hier Ihre eigene Mindmap', + color: '#3b82f6', + icon: 'help-circle' + }, + position: { x: 300, y: 200 } + }); + + // Erfolgsmeldung anzeigen + console.log('Mindmap erfolgreich initialisiert mit Fallback-Knoten'); + + // Info-Meldung für Benutzer anzeigen + const infoBox = document.createElement('div'); + infoBox.classList.add('info-message'); + infoBox.style.position = 'absolute'; + infoBox.style.top = '50%'; + infoBox.style.left = '50%'; + infoBox.style.transform = 'translate(-50%, -50%)'; + infoBox.style.padding = '15px 20px'; + infoBox.style.backgroundColor = 'rgba(59, 130, 246, 0.9)'; + infoBox.style.color = 'white'; + infoBox.style.borderRadius = '8px'; + infoBox.style.zIndex = '5'; + infoBox.style.maxWidth = '80%'; + infoBox.style.textAlign = 'center'; + infoBox.style.boxShadow = '0 4px 12px rgba(0,0,0,0.15)'; + infoBox.innerHTML = 'Mindmap erfolgreich initialisiert.
Verwenden Sie die Werkzeugleiste, um Knoten hinzuzufügen.'; + + document.getElementById('cy').appendChild(infoBox); + + // Meldung nach 5 Sekunden ausblenden + setTimeout(() => { + infoBox.style.opacity = '0'; + infoBox.style.transition = 'opacity 0.5s ease'; + setTimeout(() => { + if (infoBox.parentNode) { + infoBox.parentNode.removeChild(infoBox); + } + }, 500); + }, 5000); + } + + // Layout anwenden wenn keine Positionsdaten vorhanden + const nodesWithoutPosition = cy.nodes().filter(node => + !node.position() || (node.position().x === 0 && node.position().y === 0) + ); + + if (nodesWithoutPosition.length > 0) { + cy.layout({ + name: 'breadthfirst', + directed: true, + padding: 30, + spacingFactor: 1.2 + }).run(); + } + + // Tooltip-Funktionalität + cy.nodes().unbind('mouseover').bind('mouseover', (event) => { + const node = event.target; + const description = node.data('description'); + + if (description) { + const tooltip = document.getElementById('node-tooltip') || + document.createElement('div'); + + if (!tooltip.id) { + tooltip.id = 'node-tooltip'; + tooltip.style.position = 'absolute'; + tooltip.style.backgroundColor = '#333'; + tooltip.style.color = '#fff'; + tooltip.style.padding = '8px'; + tooltip.style.borderRadius = '4px'; + tooltip.style.maxWidth = '250px'; + tooltip.style.zIndex = 10; + tooltip.style.pointerEvents = 'none'; + tooltip.style.transition = 'opacity 0.2s'; + tooltip.style.boxShadow = '0 2px 10px rgba(0,0,0,0.3)'; + document.body.appendChild(tooltip); + } + + const renderedPosition = node.renderedPosition(); + const containerRect = cy.container().getBoundingClientRect(); + + tooltip.innerHTML = description; + tooltip.style.left = (containerRect.left + renderedPosition.x + 25) + 'px'; + tooltip.style.top = (containerRect.top + renderedPosition.y - 15) + 'px'; + tooltip.style.opacity = '1'; + } + }); + + cy.nodes().unbind('mouseout').bind('mouseout', () => { + const tooltip = document.getElementById('node-tooltip'); + if (tooltip) { + tooltip.style.opacity = '0'; + } + }); + + } catch (error) { + console.error('Fehler beim Laden der Mindmap:', error); + alert('Die Mindmap konnte nicht geladen werden. Bitte prüfen Sie die Konsole für Details.'); + } + }; + + // Initial laden + await loadMindmap(); + + /* 5. Socket.IO für Echtzeit-Synchronisation */ + const socket = io(); + + socket.on('node_added', async (node) => { + // Kategorie-Informationen für Styling abrufen + const category = categories.find(c => c.id === node.category_id) || {}; + + cy.add({ + data: { + id: node.id.toString(), + name: node.name, + description: node.description, + color: node.color_code || category.color_code || '#6b7280', + icon: node.icon || category.icon, + category_id: node.category_id + } + }); + + // Layout neu anwenden, wenn nötig + if (!node.x || !node.y) { + cy.layout({ name: 'breadthfirst', directed: true, padding: 30 }).run(); + } + }); + + socket.on('node_updated', (node) => { + const cyNode = cy.$id(node.id.toString()); + if (cyNode.length > 0) { + // Kategorie-Informationen für Styling abrufen + const category = categories.find(c => c.id === node.category_id) || {}; + + cyNode.data({ + name: node.name, + description: node.description, + color: node.color_code || category.color_code || '#6b7280', + icon: node.icon || category.icon, + category_id: node.category_id + }); + + if (node.x && node.y) { + cyNode.position({ x: node.x, y: node.y }); + } + } + }); + + socket.on('node_deleted', (nodeId) => { + const cyNode = cy.$id(nodeId.toString()); + if (cyNode.length > 0) { + cy.remove(cyNode); + } + }); + + socket.on('relationship_added', (rel) => { + cy.add({ + data: { + id: `${rel.parent_id}_${rel.child_id}`, + source: rel.parent_id.toString(), + target: rel.child_id.toString() + } + }); + }); + + socket.on('relationship_deleted', (rel) => { + const edgeId = `${rel.parent_id}_${rel.child_id}`; + const cyEdge = cy.$id(edgeId); + if (cyEdge.length > 0) { + cy.remove(cyEdge); + } + }); + + socket.on('category_updated', async () => { + // Kategorien neu laden + categories = await get('/api/categories'); + // Nodes aktualisieren, die diese Kategorie verwenden + cy.nodes().forEach(node => { + const categoryId = node.data('category_id'); + if (categoryId) { + const category = categories.find(c => c.id === categoryId); + if (category) { + node.data('color', node.data('color_code') || category.color_code); + node.data('icon', node.data('icon') || category.icon); + } + } + }); + }); + + /* 6. UI-Interaktionen */ + // Knoten hinzufügen + const btnAddNode = document.getElementById('addNode'); + if (btnAddNode) { + btnAddNode.addEventListener('click', async () => { + const name = prompt('Knotenname eingeben:'); + if (!name) return; + + const description = prompt('Beschreibung (optional):'); + + // Kategorie auswählen + let categoryId = null; + if (categories.length > 0) { + const categoryOptions = categories.map((c, i) => `${i}: ${c.name}`).join('\n'); + const categoryChoice = prompt( + `Kategorie auswählen (Nummer eingeben):\n${categoryOptions}`, + '0' + ); + + if (categoryChoice !== null) { + const index = parseInt(categoryChoice, 10); + if (!isNaN(index) && index >= 0 && index < categories.length) { + categoryId = categories[index].id; + } + } + } + + // Knoten erstellen + await post('/api/mind_map_node', { + name, + description, + category_id: categoryId + }); + // Darstellung wird durch Socket.IO Event übernommen + }); + } + + // Verbindung hinzufügen + const btnAddEdge = document.getElementById('addEdge'); + if (btnAddEdge) { + btnAddEdge.addEventListener('click', async () => { + const sel = cy.$('node:selected'); + if (sel.length !== 2) { + alert('Bitte genau zwei Knoten auswählen (Parent → Child)'); + return; + } + + const [parent, child] = sel.map(node => node.id()); + await post('/api/node_relationship', { + parent_id: parent, + child_id: child + }); + // Darstellung wird durch Socket.IO Event übernommen + }); + } + + // Knoten bearbeiten + const btnEditNode = document.getElementById('editNode'); + if (btnEditNode) { + btnEditNode.addEventListener('click', async () => { + const sel = cy.$('node:selected'); + if (sel.length !== 1) { + alert('Bitte genau einen Knoten auswählen'); + return; + } + + const node = sel[0]; + const nodeData = node.data(); + + const name = prompt('Knotenname:', nodeData.name); + if (!name) return; + + const description = prompt('Beschreibung:', nodeData.description || ''); + + // Kategorie auswählen + let categoryId = nodeData.category_id; + if (categories.length > 0) { + const categoryOptions = categories.map((c, i) => `${i}: ${c.name}`).join('\n'); + const categoryChoice = prompt( + `Kategorie auswählen (Nummer eingeben):\n${categoryOptions}`, + categories.findIndex(c => c.id === categoryId).toString() + ); + + if (categoryChoice !== null) { + const index = parseInt(categoryChoice, 10); + if (!isNaN(index) && index >= 0 && index < categories.length) { + categoryId = categories[index].id; + } + } + } + + // Knoten aktualisieren + await post(`/api/mind_map_node/${nodeData.id}`, { + name, + description, + category_id: categoryId + }); + // Darstellung wird durch Socket.IO Event übernommen + }); + } + + // Knoten löschen + const btnDeleteNode = document.getElementById('deleteNode'); + if (btnDeleteNode) { + btnDeleteNode.addEventListener('click', async () => { + const sel = cy.$('node:selected'); + if (sel.length !== 1) { + alert('Bitte genau einen Knoten auswählen'); + return; + } + + if (confirm('Sind Sie sicher, dass Sie diesen Knoten löschen möchten?')) { + const nodeId = sel[0].id(); + await del(`/api/mind_map_node/${nodeId}`); + // Darstellung wird durch Socket.IO Event übernommen + } + }); + } + + // Verbindung löschen + const btnDeleteEdge = document.getElementById('deleteEdge'); + if (btnDeleteEdge) { + btnDeleteEdge.addEventListener('click', async () => { + const sel = cy.$('edge:selected'); + if (sel.length !== 1) { + alert('Bitte genau eine Verbindung auswählen'); + return; + } + + if (confirm('Sind Sie sicher, dass Sie diese Verbindung löschen möchten?')) { + const edge = sel[0]; + const parentId = edge.source().id(); + const childId = edge.target().id(); + + await del(`/api/node_relationship/${parentId}/${childId}`); + // Darstellung wird durch Socket.IO Event übernommen + } + }); + } + + // Layout aktualisieren + const btnReLayout = document.getElementById('reLayout'); + if (btnReLayout) { + btnReLayout.addEventListener('click', () => { + cy.layout({ + name: 'breadthfirst', + directed: true, + padding: 30, + spacingFactor: 1.2 + }).run(); + }); + } + + /* 7. Position speichern bei Drag & Drop */ + cy.on('dragfree', 'node', async (e) => { + const node = e.target; + const position = node.position(); + + await post(`/api/mind_map_node/${node.id()}/position`, { + x: Math.round(position.x), + y: Math.round(position.y) + }); + + // Andere Benutzer erhalten die Position über den node_updated Event + }); + + /* 8. Kontextmenü (optional) */ + const setupContextMenu = () => { + cy.on('cxttap', 'node', function(e) { + const node = e.target; + const nodeData = node.data(); + + // Position des Kontextmenüs berechnen + const renderedPosition = node.renderedPosition(); + const containerRect = cy.container().getBoundingClientRect(); + const menuX = containerRect.left + renderedPosition.x; + const menuY = containerRect.top + renderedPosition.y; + + // Kontextmenü erstellen oder aktualisieren + let contextMenu = document.getElementById('context-menu'); + if (!contextMenu) { + contextMenu = document.createElement('div'); + contextMenu.id = 'context-menu'; + contextMenu.style.position = 'absolute'; + contextMenu.style.backgroundColor = '#fff'; + contextMenu.style.border = '1px solid #ccc'; + contextMenu.style.borderRadius = '4px'; + contextMenu.style.padding = '5px 0'; + contextMenu.style.boxShadow = '0 2px 10px rgba(0,0,0,0.2)'; + contextMenu.style.zIndex = 1000; + document.body.appendChild(contextMenu); + } + + // Menüinhalte + contextMenu.innerHTML = ` + + + + `; + + // Styling für Menüpunkte + const menuItems = contextMenu.querySelectorAll('.menu-item'); + menuItems.forEach(item => { + item.style.padding = '8px 20px'; + item.style.cursor = 'pointer'; + item.style.fontSize = '14px'; + + item.addEventListener('mouseover', function() { + this.style.backgroundColor = '#f0f0f0'; + }); + + item.addEventListener('mouseout', function() { + this.style.backgroundColor = 'transparent'; + }); + + // Event-Handler + item.addEventListener('click', async function() { + const action = this.getAttribute('data-action'); + + switch(action) { + case 'edit': + // Knoten bearbeiten (gleiche Logik wie beim Edit-Button) + const name = prompt('Knotenname:', nodeData.name); + if (name) { + const description = prompt('Beschreibung:', nodeData.description || ''); + await post(`/api/mind_map_node/${nodeData.id}`, { name, description }); + } + break; + + case 'connect': + // Modus zum Verbinden aktivieren + cy.nodes().unselect(); + node.select(); + alert('Wählen Sie nun einen zweiten Knoten aus, um eine Verbindung zu erstellen'); + break; + + case 'delete': + if (confirm('Sind Sie sicher, dass Sie diesen Knoten löschen möchten?')) { + await del(`/api/mind_map_node/${nodeData.id}`); + } + break; + } + + // Menü schließen + contextMenu.style.display = 'none'; + }); + }); + + // Menü positionieren und anzeigen + contextMenu.style.left = menuX + 'px'; + contextMenu.style.top = menuY + 'px'; + contextMenu.style.display = 'block'; + + // Event-Listener zum Schließen des Menüs + const closeMenu = function() { + if (contextMenu) { + contextMenu.style.display = 'none'; + } + document.removeEventListener('click', closeMenu); + }; + + // Verzögerung, um den aktuellen Click nicht zu erfassen + setTimeout(() => { + document.addEventListener('click', closeMenu); + }, 0); + + e.preventDefault(); + }); + }; + + // Kontextmenü aktivieren (optional) + // setupContextMenu(); + + /* 9. Export-Funktion (optional) */ + const btnExport = document.getElementById('exportMindmap'); + if (btnExport) { + btnExport.addEventListener('click', () => { + const elements = cy.json().elements; + const exportData = { + nodes: elements.nodes.map(n => ({ + id: n.data.id, + name: n.data.name, + description: n.data.description, + category_id: n.data.category_id, + x: Math.round(n.position?.x || 0), + y: Math.round(n.position?.y || 0) + })), + relationships: elements.edges.map(e => ({ + parent_id: e.data.source, + child_id: e.data.target + })) + }; + + const blob = new Blob([JSON.stringify(exportData, null, 2)], {type: 'application/json'}); + const url = URL.createObjectURL(blob); + const a = document.createElement('a'); + a.href = url; + a.download = 'mindmap_export.json'; + document.body.appendChild(a); + a.click(); + document.body.removeChild(a); + URL.revokeObjectURL(url); + }); + } + + /* 10. Filter-Funktion nach Kategorien (optional) */ + const setupCategoryFilters = () => { + const filterContainer = document.getElementById('category-filters'); + if (!filterContainer || !categories.length) return; + + filterContainer.innerHTML = ''; + + // "Alle anzeigen" Option + const allBtn = document.createElement('button'); + allBtn.innerText = 'Alle Kategorien'; + allBtn.className = 'category-filter active'; + allBtn.onclick = () => { + document.querySelectorAll('.category-filter').forEach(btn => btn.classList.remove('active')); + allBtn.classList.add('active'); + cy.nodes().removeClass('filtered').show(); + cy.edges().show(); + }; + filterContainer.appendChild(allBtn); + + // Filter-Button pro Kategorie + categories.forEach(category => { + const btn = document.createElement('button'); + btn.innerText = category.name; + btn.className = 'category-filter'; + btn.style.backgroundColor = category.color_code; + btn.style.color = '#fff'; + btn.onclick = () => { + document.querySelectorAll('.category-filter').forEach(btn => btn.classList.remove('active')); + btn.classList.add('active'); + + const matchingNodes = cy.nodes().filter(node => node.data('category_id') === category.id); + cy.nodes().addClass('filtered').hide(); + matchingNodes.removeClass('filtered').show(); + + // Verbindungen zu/von diesen Knoten anzeigen + cy.edges().hide(); + matchingNodes.connectedEdges().show(); + }; + filterContainer.appendChild(btn); + }); + }; + + // Filter-Funktionalität aktivieren (optional) + // setupCategoryFilters(); + + /* 11. Suchfunktion (optional) */ + const searchInput = document.getElementById('search-mindmap'); + if (searchInput) { + searchInput.addEventListener('input', (e) => { + const searchTerm = e.target.value.toLowerCase(); + + if (!searchTerm) { + cy.nodes().removeClass('search-hidden').show(); + cy.edges().show(); + return; + } + + cy.nodes().forEach(node => { + const name = node.data('name').toLowerCase(); + const description = (node.data('description') || '').toLowerCase(); + + if (name.includes(searchTerm) || description.includes(searchTerm)) { + node.removeClass('search-hidden').show(); + node.connectedEdges().show(); + } else { + node.addClass('search-hidden').hide(); + // Kanten nur verstecken, wenn beide verbundenen Knoten versteckt sind + node.connectedEdges().forEach(edge => { + const otherNode = edge.source().id() === node.id() ? edge.target() : edge.source(); + if (otherNode.hasClass('search-hidden')) { + edge.hide(); + } + }); + } + }); + }); + } + + console.log('Mindmap erfolgreich initialisiert'); +})(); \ No newline at end of file diff --git a/static/js/modules/chatgpt-assistant.js b/static/js/modules/chatgpt-assistant.js new file mode 100644 index 0000000..0da90a2 --- /dev/null +++ b/static/js/modules/chatgpt-assistant.js @@ -0,0 +1,572 @@ +/** + * ChatGPT Assistent Modul + * Verwaltet die Interaktion mit der OpenAI API und die Benutzeroberfläche des Assistenten + */ + +class ChatGPTAssistant { + constructor() { + this.messages = []; + this.isOpen = false; + this.isLoading = false; + this.container = null; + this.chatHistory = null; + this.inputField = null; + this.suggestionArea = null; + this.maxRetries = 2; + this.retryCount = 0; + this.markdownParser = null; + this.initializeMarkdownParser(); + } + + /** + * Initialisiert den Markdown-Parser + */ + async initializeMarkdownParser() { + // Dynamisch marked.js laden, wenn noch nicht vorhanden + if (!window.marked) { + try { + // Prüfen, ob marked.js bereits im Dokument geladen ist + if (!document.querySelector('script[src*="marked"]')) { + // Falls nicht, Script-Tag erstellen und einfügen + const script = document.createElement('script'); + script.src = 'https://cdn.jsdelivr.net/npm/marked/marked.min.js'; + script.async = true; + + // Promise erstellen, das resolved wird, wenn das Script geladen wurde + await new Promise((resolve, reject) => { + script.onload = resolve; + script.onerror = reject; + document.head.appendChild(script); + }); + + console.log('Marked.js erfolgreich geladen'); + } + + // Marked konfigurieren + this.markdownParser = window.marked; + this.markdownParser.setOptions({ + gfm: true, + breaks: true, + sanitize: true, + smartLists: true, + smartypants: true + }); + } catch (error) { + console.error('Fehler beim Laden von marked.js:', error); + // Fallback-Parser, der nur einfache Absätze erkennt + this.markdownParser = { + parse: (text) => { + return text.split('\n').map(line => { + if (line.trim() === '') return '
'; + return `

${line}

`; + }).join(''); + } + }; + } + } else { + // Marked ist bereits geladen + this.markdownParser = window.marked; + } + } + + /** + * Initialisiert den Assistenten und fügt die UI zum DOM hinzu + */ + init() { + // Assistent-Container erstellen + this.createAssistantUI(); + + // Event-Listener hinzufügen + this.setupEventListeners(); + + // Ersten Willkommensnachricht anzeigen + this.addMessage("assistant", "Hallo! Ich bin dein KI-Assistent (4o-mini) und habe Zugriff auf die Wissensdatenbank. Wie kann ich dir helfen?\n\nDu kannst mir Fragen über:\n- **Gedanken** in der Datenbank\n- **Kategorien** und Wissenschaftsbereiche\n- **Mindmaps** und Wissensverknüpfungen\n\nstellen."); + + // Vorschläge anzeigen + this.showSuggestions([ + "Zeige mir Gedanken zur künstlichen Intelligenz", + "Welche Kategorien gibt es in der Datenbank?", + "Suche nach Mindmaps zum Thema Informatik" + ]); + + console.log('KI-Assistent initialisiert!'); + } + + /** + * Erstellt die UI-Elemente für den Assistenten + */ + createAssistantUI() { + // Hauptcontainer erstellen + this.container = document.createElement('div'); + this.container.id = 'chatgpt-assistant'; + this.container.className = 'fixed bottom-4 right-4 z-50 flex flex-col'; + + // Button zum Öffnen/Schließen des Assistenten + const toggleButton = document.createElement('button'); + toggleButton.id = 'assistant-toggle'; + toggleButton.className = 'ml-auto bg-primary-600 hover:bg-primary-700 text-white rounded-full p-3 shadow-lg transition-all duration-300 mb-2'; + toggleButton.innerHTML = ''; + + // Chat-Container + const chatContainer = document.createElement('div'); + chatContainer.id = 'assistant-chat'; + chatContainer.className = 'bg-white dark:bg-dark-800 rounded-lg shadow-xl overflow-hidden transition-all duration-300 w-80 md:w-96 max-h-0 opacity-0'; + + // Chat-Header + const header = document.createElement('div'); + header.className = 'bg-primary-600 text-white p-3 flex items-center justify-between'; + header.innerHTML = ` +
+ + KI-Assistent (4o-mini) +
+ + `; + + // Chat-Verlauf + this.chatHistory = document.createElement('div'); + this.chatHistory.id = 'assistant-history'; + this.chatHistory.className = 'p-3 overflow-y-auto max-h-96 space-y-3'; + + // Vorschlagsbereich + this.suggestionArea = document.createElement('div'); + this.suggestionArea.id = 'assistant-suggestions'; + this.suggestionArea.className = 'px-3 pb-2 flex flex-wrap gap-2 overflow-x-auto hidden'; + + // Chat-Eingabe + const inputContainer = document.createElement('div'); + inputContainer.className = 'border-t border-gray-200 dark:border-dark-600 p-3 flex items-center'; + + this.inputField = document.createElement('input'); + this.inputField.type = 'text'; + this.inputField.placeholder = 'Stelle eine Frage zur Wissensdatenbank...'; + this.inputField.className = 'flex-1 border border-gray-300 dark:border-dark-600 dark:bg-dark-700 dark:text-white rounded-l-lg px-3 py-2 focus:outline-none focus:ring-2 focus:ring-primary-500'; + + const sendButton = document.createElement('button'); + sendButton.id = 'assistant-send'; + sendButton.className = 'bg-primary-600 hover:bg-primary-700 text-white px-4 py-2 rounded-r-lg'; + sendButton.innerHTML = ''; + + // Elemente zusammenfügen + inputContainer.appendChild(this.inputField); + inputContainer.appendChild(sendButton); + + chatContainer.appendChild(header); + chatContainer.appendChild(this.chatHistory); + chatContainer.appendChild(this.suggestionArea); + chatContainer.appendChild(inputContainer); + + this.container.appendChild(toggleButton); + this.container.appendChild(chatContainer); + + // Zum DOM hinzufügen + document.body.appendChild(this.container); + } + + /** + * Richtet Event-Listener für die Benutzeroberfläche ein + */ + setupEventListeners() { + // Toggle-Button + const toggleButton = document.getElementById('assistant-toggle'); + if (toggleButton) { + toggleButton.addEventListener('click', () => this.toggleAssistant()); + } + + // Schließen-Button + const closeButton = document.getElementById('assistant-close'); + if (closeButton) { + closeButton.addEventListener('click', () => this.toggleAssistant(false)); + } + + // Senden-Button + const sendButton = document.getElementById('assistant-send'); + if (sendButton) { + sendButton.addEventListener('click', () => this.sendMessage()); + } + + // Enter-Taste im Eingabefeld + if (this.inputField) { + this.inputField.addEventListener('keyup', (e) => { + if (e.key === 'Enter') { + this.sendMessage(); + } + }); + } + + // Vorschläge klickbar machen + if (this.suggestionArea) { + this.suggestionArea.addEventListener('click', (e) => { + if (e.target.classList.contains('suggestion-pill')) { + this.inputField.value = e.target.textContent; + this.sendMessage(); + } + }); + } + } + + /** + * Öffnet oder schließt den Assistenten + * @param {boolean} state - Optional: erzwingt einen bestimmten Zustand + */ + toggleAssistant(state = null) { + const chatContainer = document.getElementById('assistant-chat'); + if (!chatContainer) return; + + this.isOpen = state !== null ? state : !this.isOpen; + + if (this.isOpen) { + chatContainer.classList.remove('max-h-0', 'opacity-0'); + chatContainer.classList.add('max-h-[32rem]', 'opacity-100'); + if (this.inputField) this.inputField.focus(); + + // Zeige Vorschläge wenn verfügbar + if (this.suggestionArea && this.suggestionArea.children.length > 0) { + this.suggestionArea.classList.remove('hidden'); + } + } else { + chatContainer.classList.remove('max-h-[32rem]', 'opacity-100'); + chatContainer.classList.add('max-h-0', 'opacity-0'); + } + } + + /** + * Fügt eine Nachricht zum Chat-Verlauf hinzu + * @param {string} sender - 'user' oder 'assistant' + * @param {string} text - Nachrichtentext + */ + addMessage(sender, text) { + // Nachricht zum Verlauf hinzufügen + this.messages.push({ role: sender, content: text }); + + // DOM-Element erstellen + const messageEl = document.createElement('div'); + messageEl.className = `flex ${sender === 'user' ? 'justify-end' : 'justify-start'}`; + + const bubble = document.createElement('div'); + bubble.className = sender === 'user' + ? 'bg-primary-100 dark:bg-primary-900 text-gray-800 dark:text-white rounded-lg py-2 px-3 max-w-[85%]' + : 'bg-gray-100 dark:bg-dark-700 text-gray-800 dark:text-white rounded-lg py-2 px-3 max-w-[85%]'; + + // Formatierung des Texts (mit Markdown für Assistent-Nachrichten) + let formattedText = ''; + + if (sender === 'assistant' && this.markdownParser) { + // Für Assistentnachrichten Markdown verwenden + try { + formattedText = this.markdownParser.parse(text); + + // CSS für Markdown-Formatierung hinzufügen + const markdownStyles = ` + .markdown-bubble h1, .markdown-bubble h2, .markdown-bubble h3, + .markdown-bubble h4, .markdown-bubble h5, .markdown-bubble h6 { + font-weight: bold; + margin-top: 0.5rem; + margin-bottom: 0.5rem; + } + .markdown-bubble h1 { font-size: 1.4rem; } + .markdown-bubble h2 { font-size: 1.3rem; } + .markdown-bubble h3 { font-size: 1.2rem; } + .markdown-bubble h4 { font-size: 1.1rem; } + .markdown-bubble ul, .markdown-bubble ol { + padding-left: 1.5rem; + margin: 0.5rem 0; + } + .markdown-bubble ul { list-style-type: disc; } + .markdown-bubble ol { list-style-type: decimal; } + .markdown-bubble p { margin: 0.5rem 0; } + .markdown-bubble code { + font-family: monospace; + background-color: rgba(0, 0, 0, 0.1); + padding: 1px 4px; + border-radius: 3px; + } + .markdown-bubble pre { + background-color: rgba(0, 0, 0, 0.1); + padding: 0.5rem; + border-radius: 4px; + overflow-x: auto; + margin: 0.5rem 0; + } + .markdown-bubble pre code { + background-color: transparent; + padding: 0; + } + .markdown-bubble blockquote { + border-left: 3px solid rgba(0, 0, 0, 0.2); + padding-left: 0.8rem; + margin: 0.5rem 0; + font-style: italic; + } + .dark .markdown-bubble code { + background-color: rgba(255, 255, 255, 0.1); + } + .dark .markdown-bubble pre { + background-color: rgba(255, 255, 255, 0.1); + } + .dark .markdown-bubble blockquote { + border-left-color: rgba(255, 255, 255, 0.2); + } + `; + + // Füge die Styles hinzu, wenn sie noch nicht vorhanden sind + if (!document.querySelector('#markdown-chat-styles')) { + const style = document.createElement('style'); + style.id = 'markdown-chat-styles'; + style.textContent = markdownStyles; + document.head.appendChild(style); + } + + // Klasse für Markdown-Formatierung hinzufügen + bubble.classList.add('markdown-bubble'); + } catch (error) { + console.error('Fehler bei der Markdown-Formatierung:', error); + // Fallback zur einfachen Formatierung + formattedText = text.split('\n').map(line => { + if (line.trim() === '') return '
'; + return `

${line}

`; + }).join(''); + } + } else { + // Für Benutzernachrichten einfache Formatierung + formattedText = text.split('\n').map(line => { + if (line.trim() === '') return '
'; + return `

${line}

`; + }).join(''); + } + + bubble.innerHTML = formattedText; + + messageEl.appendChild(bubble); + + if (this.chatHistory) { + this.chatHistory.appendChild(messageEl); + + // Scroll zum Ende des Verlaufs + this.chatHistory.scrollTop = this.chatHistory.scrollHeight; + } + } + + /** + * Zeigt Vorschläge als klickbare Pills an + * @param {string[]} suggestions - Liste von Vorschlägen + */ + showSuggestions(suggestions) { + if (!this.suggestionArea) return; + + // Vorherige Vorschläge entfernen + this.suggestionArea.innerHTML = ''; + + if (suggestions && suggestions.length > 0) { + suggestions.forEach(suggestion => { + const pill = document.createElement('button'); + pill.className = 'suggestion-pill text-sm bg-gray-200 dark:bg-dark-600 hover:bg-gray-300 dark:hover:bg-dark-500 text-gray-800 dark:text-gray-200 rounded-full px-3 py-1 mb-2 transition-colors'; + pill.textContent = suggestion; + this.suggestionArea.appendChild(pill); + }); + + this.suggestionArea.classList.remove('hidden'); + } else { + this.suggestionArea.classList.add('hidden'); + } + } + + /** + * Sendet die Benutzernachricht an den Server und zeigt die Antwort an + */ + async sendMessage() { + if (!this.inputField) return; + + const userInput = this.inputField.value.trim(); + if (!userInput || this.isLoading) return; + + // Vorschläge ausblenden + if (this.suggestionArea) { + this.suggestionArea.classList.add('hidden'); + } + + // Benutzernachricht anzeigen + this.addMessage('user', userInput); + + // Eingabefeld zurücksetzen + this.inputField.value = ''; + + // Ladeindikator anzeigen + this.isLoading = true; + this.showLoadingIndicator(); + + try { + console.log('Sende Anfrage an KI-Assistent API...'); + // Anfrage an den Server senden + const response = await fetch('/api/assistant', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + messages: this.messages + }), + cache: 'no-cache', // Kein Cache verwenden + credentials: 'same-origin' // Cookies senden + }); + + // Ladeindikator entfernen + this.removeLoadingIndicator(); + + if (!response.ok) { + throw new Error(`Serverfehler: ${response.status} ${response.statusText}`); + } + + const data = await response.json(); + console.log('Antwort erhalten:', data); + + // Antwort anzeigen + if (data.response) { + this.addMessage('assistant', data.response); + + // Neue Vorschläge basierend auf dem aktuellen Kontext anzeigen + this.generateContextualSuggestions(); + + // Erfolgreiche Anfrage zurücksetzen + this.retryCount = 0; + } else if (data.error) { + this.addMessage('assistant', `Fehler: ${data.error}`); + } else { + throw new Error('Unerwartetes Antwortformat vom Server'); + } + } catch (error) { + console.error('Fehler bei der Kommunikation mit dem Assistenten:', error); + + // Ladeindikator entfernen, falls noch vorhanden + this.removeLoadingIndicator(); + + // Fehlermeldung anzeigen oder Wiederholungsversuch starten + if (this.retryCount < this.maxRetries) { + this.retryCount++; + this.addMessage('assistant', 'Es gab ein Problem mit der Anfrage. Ich versuche es erneut...'); + + // Kurze Verzögerung vor dem erneuten Versuch + setTimeout(() => { + // Letzte Benutzernachricht aus dem Messages-Array entfernen + const lastUserMessage = this.messages[this.messages.length - 2].content; + this.messages = this.messages.slice(0, -2); // Entferne Benutzernachricht und Fehlermeldung + + // Erneuter Versand mit gleicher Nachricht + this.inputField.value = lastUserMessage; + this.sendMessage(); + }, 1500); + } else { + // Maximale Anzahl an Wiederholungsversuchen erreicht + this.addMessage('assistant', 'Es tut mir leid, aber es gab ein Problem bei der Verarbeitung deiner Anfrage. Bitte versuche es später noch einmal.'); + this.retryCount = 0; // Zurücksetzen für die nächste Anfrage + } + } finally { + this.isLoading = false; + } + } + + /** + * Generiert kontextbasierte Vorschläge basierend auf dem aktuellen Chat-Verlauf + */ + generateContextualSuggestions() { + // Basierend auf letzter Antwort des Assistenten, verschiedene Vorschläge generieren + const lastAssistantMessage = this.messages.findLast(msg => msg.role === 'assistant')?.content || ''; + + let suggestions = []; + + // Intelligente Vorschläge basierend auf Kontext + if (lastAssistantMessage.includes('Künstliche Intelligenz') || + lastAssistantMessage.includes('KI ') || + lastAssistantMessage.includes('AI ')) { + suggestions = [ + "Wie wird KI in der Wissenschaft eingesetzt?", + "Zeige mir Gedanken zum maschinellen Lernen", + "Was ist der Unterschied zwischen KI und ML?" + ]; + } else if (lastAssistantMessage.includes('Kategorie') || + lastAssistantMessage.includes('Kategorien')) { + suggestions = [ + "Zeige mir die Unterkategorien", + "Welche Gedanken gehören zu dieser Kategorie?", + "Liste alle Wissenschaftskategorien auf" + ]; + } else if (lastAssistantMessage.includes('Mindmap') || + lastAssistantMessage.includes('Visualisierung')) { + suggestions = [ + "Wie kann ich eine eigene Mindmap erstellen?", + "Zeige mir Beispiele für Mindmaps", + "Wie funktionieren die Verbindungen in Mindmaps?" + ]; + } else { + // Standardvorschläge + suggestions = [ + "Erzähle mir mehr dazu", + "Gibt es Beispiele dafür?", + "Wie kann ich diese Information nutzen?" + ]; + } + + this.showSuggestions(suggestions); + } + + /** + * Zeigt einen Ladeindikator im Chat an + */ + showLoadingIndicator() { + if (!this.chatHistory) return; + + // Entferne vorhandenen Ladeindikator (falls vorhanden) + this.removeLoadingIndicator(); + + const loadingEl = document.createElement('div'); + loadingEl.id = 'assistant-loading'; + loadingEl.className = 'flex justify-start'; + + const bubble = document.createElement('div'); + bubble.className = 'bg-gray-100 dark:bg-dark-700 text-gray-800 dark:text-white rounded-lg py-2 px-3'; + bubble.innerHTML = '
'; + + loadingEl.appendChild(bubble); + this.chatHistory.appendChild(loadingEl); + + // Scroll zum Ende des Verlaufs + this.chatHistory.scrollTop = this.chatHistory.scrollHeight; + } + + /** + * Entfernt den Ladeindikator aus dem Chat + */ + removeLoadingIndicator() { + const loadingIndicator = document.getElementById('assistant-loading'); + if (loadingIndicator) { + loadingIndicator.remove(); + } + } + + /** + * Öffnet den Assistenten und sendet eine vorgegebene Frage + * @param {string} question - Die zu stellende Frage + */ + async sendQuestion(question) { + if (!question || this.isLoading) return; + + // Assistenten öffnen + this.toggleAssistant(true); + + // Kurze Verzögerung, um sicherzustellen, dass der UI vollständig geöffnet ist + await new Promise(resolve => setTimeout(resolve, 300)); + + // Frage in Eingabefeld setzen + if (this.inputField) { + this.inputField.value = question; + + // Sende die Frage + this.sendMessage(); + } + } +} + +// Mache die Klasse global verfügbar +window.ChatGPTAssistant = ChatGPTAssistant; \ No newline at end of file diff --git a/static/js/modules/mindmap-page.js b/static/js/modules/mindmap-page.js new file mode 100644 index 0000000..a543504 --- /dev/null +++ b/static/js/modules/mindmap-page.js @@ -0,0 +1,777 @@ +/** + * Mindmap-Seite JavaScript + * Spezifische Funktionen für die Mindmap-Seite + */ + +// Füge das Modul zum globalen MindMap-Objekt hinzu +if (!window.MindMap) { + window.MindMap = {}; +} + +// Registriere den Initialisierer im MindMap-Objekt +if (window.MindMap) { + window.MindMap.pageInitializers = window.MindMap.pageInitializers || {}; + window.MindMap.pageInitializers.mindmap = initMindmapPage; +} + +// Initialisiere die Mindmap-Seite nur, wenn alle Abhängigkeiten vorhanden sind +if (window.MindMap && typeof MindMapVisualization !== 'undefined') { + if (document.body && document.body.dataset && document.body.dataset.page === 'mindmap') { + window.MindMap.pageInitializers = window.MindMap.pageInitializers || {}; + window.MindMap.pageInitializers.mindmap = initMindmapPage; + initMindmapPage(); + } +} + +document.addEventListener('DOMContentLoaded', function() { + // Prüfe, ob wir auf der Mindmap-Seite sind und initialisiere + if (document.body && document.body.dataset && document.body.dataset.page === 'mindmap') { + initMindmapPage(); + } +}); + +/** + * Initialisiert die Mindmap-Seite + */ +function initMindmapPage() { + console.log('Mindmap-Seite Initialisierung startet...'); + console.log('D3 Bibliothek verfügbar:', typeof d3 !== 'undefined'); + console.log('MindMapVisualization verfügbar:', typeof MindMapVisualization !== 'undefined'); + + const mindmapContainer = document.getElementById('mindmap-container'); + const thoughtsContainer = document.getElementById('thoughts-container'); + + if (!mindmapContainer) { + console.error('Mindmap-Container nicht gefunden!'); + return; + } + console.log('Mindmap-Container gefunden:', mindmapContainer); + + // Prüfe, ob D3.js geladen ist + if (typeof d3 === 'undefined') { + console.error('D3.js ist nicht geladen!'); + mindmapContainer.innerHTML = ` +
+
+ +
+

D3.js konnte nicht geladen werden. Bitte laden Sie die Seite neu.

+
+ `; + return; + } + + // Prüfe, ob MindMapVisualization definiert ist + if (typeof MindMapVisualization === 'undefined') { + console.error('MindMapVisualization-Klasse ist nicht definiert!'); + mindmapContainer.innerHTML = ` +
+
+ +
+

MindMap-Visualisierung konnte nicht geladen werden. Bitte laden Sie die Seite neu.

+
+ `; + return; + } + + // Erstelle die Mindmap-Visualisierung + try { + console.log('Versuche, MindMapVisualization zu erstellen...'); + const mindmap = new MindMapVisualization('#mindmap-container', { + height: 600, + onNodeClick: handleNodeClick + }); + + // Globale Referenz für die Zoom-Buttons erstellen + window.mindmapInstance = mindmap; + + // Lade die Mindmap-Daten + mindmap.loadData(); + console.log('MindMapVisualization erfolgreich erstellt und geladen'); + } catch (error) { + console.error('Fehler beim Erstellen der MindMapVisualization:', error); + mindmapContainer.innerHTML = ` +
+
+ +
+

Fehler beim Erstellen der Mindmap-Visualisierung:

+

${error.message}

+
+ `; + return; + } + + // Suchfunktion für die Mindmap + const searchInput = document.getElementById('mindmap-search'); + if (searchInput) { + searchInput.addEventListener('input', function(e) { + mindmap.filterBySearchTerm(e.target.value); + }); + } + + /** + * Behandelt Klicks auf Mindmap-Knoten + */ + async function handleNodeClick(node) { + if (!thoughtsContainer) return; + + // Zeige Lade-Animation + thoughtsContainer.innerHTML = ` +
+
+
+ `; + + try { + // Lade Gedanken für den ausgewählten Knoten + const response = await fetch(`/api/nodes/${node.id}/thoughts`); + + if (!response.ok) { + throw new Error(`HTTP error! status: ${response.status}`); + } + + const thoughts = await response.json(); + + // Gedanken anzeigen + renderThoughts(thoughts, node.name); + + } catch (error) { + console.error('Fehler beim Laden der Gedanken:', error); + thoughtsContainer.innerHTML = ` +
+
+ +
+

Fehler beim Laden der Gedanken.

+

Bitte versuchen Sie es später erneut.

+
+ `; + } + } + + /** + * Rendert die Gedanken in den Container + */ + function renderThoughts(thoughts, nodeName) { + // Wenn keine Gedanken vorhanden sind + if (thoughts.length === 0) { + thoughtsContainer.innerHTML = ` +
+
+ +
+

Keine Gedanken für "${nodeName}" vorhanden.

+ +
+ `; + + // Event-Listener für den Button + document.getElementById('add-thought-btn').addEventListener('click', () => { + openAddThoughtModal(nodeName); + }); + + return; + } + + // Gedanken anzeigen + thoughtsContainer.innerHTML = ` +
+

Gedanken zu "${nodeName}"

+ +
+
+ `; + + // Button-Event-Listener + document.getElementById('add-thought-btn').addEventListener('click', () => { + openAddThoughtModal(nodeName); + }); + + // Gedanken-Karten rendern + const thoughtsGrid = document.getElementById('thoughts-grid'); + thoughts.forEach((thought, index) => { + const card = createThoughtCard(thought); + + // Animation verzögern für gestaffeltes Erscheinen + setTimeout(() => { + card.classList.add('opacity-100'); + card.classList.remove('opacity-0', 'translate-y-4'); + }, index * 100); + + thoughtsGrid.appendChild(card); + }); + } + + /** + * Erstellt eine Gedanken-Karte + */ + function createThoughtCard(thought) { + const card = document.createElement('div'); + card.className = 'card transition-all duration-300 opacity-0 translate-y-4 transform hover:shadow-lg border-l-4'; + card.style.borderLeftColor = thought.color_code || '#4080ff'; + + // Karten-Inhalt + card.innerHTML = ` +
+
+

${thought.title}

+
${thought.timestamp}
+
+
+

${thought.content}

+
+ ${thought.keywords ? ` +
+ ${thought.keywords.split(',').map(keyword => + `${keyword.trim()}` + ).join('')} +
+ ` : ''} +
+
+ ${thought.author} +
+
+ + +
+
+
+ `; + + return card; + } + + /** + * Öffnet das Modal zum Hinzufügen eines neuen Gedankens + */ + function openAddThoughtModal(nodeName) { + // Node-Information extrahieren + let nodeId, nodeTitle; + + if (typeof nodeName === 'string') { + // Wenn nur ein String übergeben wurde + nodeTitle = nodeName; + // Versuche nodeId aus der Mindmap zu finden + const nodeElement = d3.selectAll('.node-group').filter(d => d.name === nodeName); + if (nodeElement.size() > 0) { + nodeId = nodeElement.datum().id; + } + } else if (typeof nodeName === 'object') { + // Wenn ein Node-Objekt übergeben wurde + nodeId = nodeName.id; + nodeTitle = nodeName.name; + } else { + console.error('Ungültiger Node-Parameter', nodeName); + return; + } + + // Modal-Struktur erstellen + const modal = document.createElement('div'); + modal.className = 'fixed inset-0 z-50 flex items-center justify-center p-4'; + modal.innerHTML = ` + +
+
+
+

+ + Neuer Gedanke zu "${nodeTitle}" +

+ +
+
+ +
+ + +
+
+ + +
+
+ + +
+
+ + +
+
+ +
+ + +
+
+
+
+
+ + +
+ + +
+
+ + +
+
+
+
+
+ `; + + document.body.appendChild(modal); + + // Focus auf das erste Feld setzen + setTimeout(() => { + modal.querySelector('#title').focus(); + }, 100); + + // Event-Listener hinzufügen + modal.querySelector('#modal-backdrop').addEventListener('click', closeModal); + modal.querySelector('#close-modal-btn').addEventListener('click', closeModal); + modal.querySelector('#cancel-btn').addEventListener('click', closeModal); + + // Farbauswahl-Event-Listener + const colorInput = modal.querySelector('#color_code'); + const predefinedColors = modal.querySelector('#predefined_colors'); + + predefinedColors.addEventListener('change', function() { + colorInput.value = this.value; + }); + + // Beziehungsmenü-Funktionalität + const relationBtn = modal.querySelector('#open-relation-btn'); + const relationMenu = modal.querySelector('#relation-menu'); + + relationBtn.addEventListener('click', function() { + relationMenu.classList.toggle('hidden'); + }); + + // Klick außerhalb des Menüs schließt es + document.addEventListener('click', function(event) { + if (!relationBtn.contains(event.target) && !relationMenu.contains(event.target)) { + relationMenu.classList.add('hidden'); + } + }); + + // Beziehungstyp-Auswahl + const relationTypeBtns = modal.querySelectorAll('.relation-type-btn'); + const relationTypeInput = modal.querySelector('#relation_type'); + + relationTypeBtns.forEach(btn => { + btn.addEventListener('click', function() { + const relationType = this.dataset.type; + relationTypeInput.value = relationType; + + // Sichtbare Anzeige aktualisieren + relationBtn.innerHTML = ` + + ${this.innerText.trim()} + + `; + + // Menü schließen + relationMenu.classList.add('hidden'); + }); + }); + + // Form-Submit-Handler + const form = modal.querySelector('#add-thought-form'); + form.addEventListener('submit', async (e) => { + e.preventDefault(); + + const formData = new FormData(form); + const thoughtData = { + node_id: formData.get('node_id'), + title: formData.get('title'), + content: formData.get('content'), + keywords: formData.get('keywords'), + abstract: formData.get('abstract'), + color_code: formData.get('color_code'), + relation_type: formData.get('relation_type'), + relation_target: formData.get('relation_target') + }; + + try { + const response = await fetch('/api/thoughts', { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify(thoughtData) + }); + + if (!response.ok) { + throw new Error('Fehler beim Speichern des Gedankens.'); + } + + // Modal schließen + closeModal(); + + // Gedanken neu laden + if (nodeId) { + handleNodeClick({ id: nodeId, name: nodeTitle }); + } + + // Erfolgsbenachrichtigung + if (window.MindMap && window.MindMap.showNotification) { + window.MindMap.showNotification('Gedanke erfolgreich gespeichert.', 'success'); + } + + } catch (error) { + console.error('Fehler beim Speichern:', error); + if (window.MindMap && window.MindMap.showNotification) { + window.MindMap.showNotification('Fehler beim Speichern des Gedankens.', 'error'); + } + } + }); + + // Modal schließen + function closeModal() { + modal.classList.add('opacity-0'); + setTimeout(() => { + modal.remove(); + }, 300); + } + } +} + +/** + * Füge globale Funktionen für das Mindmap-Objekt hinzu + */ +window.showComments = async function(thoughtId) { + try { + // Lade-Animation erstellen + const modal = createModalWithLoading('Kommentare werden geladen...'); + document.body.appendChild(modal); + + // Kommentare laden + const response = await fetch(`/api/comments/${thoughtId}`); + + if (!response.ok) { + throw new Error(`HTTP error! status: ${response.status}`); + } + + const comments = await response.json(); + + // Modal mit Kommentaren aktualisieren + updateModalWithComments(modal, comments, thoughtId); + + } catch (error) { + console.error('Fehler beim Laden der Kommentare:', error); + if (window.MindMap && window.MindMap.showNotification) { + window.MindMap.showNotification('Fehler beim Laden der Kommentare.', 'error'); + } else { + alert('Fehler beim Laden der Kommentare.'); + } + } +}; + +/** + * Zeigt die Beziehungen eines Gedankens an + */ +window.showRelations = async function(thoughtId) { + try { + // Lade-Animation erstellen + const modal = createModalWithLoading('Beziehungen werden geladen...'); + document.body.appendChild(modal); + + // Beziehungen laden + const response = await fetch(`/api/thoughts/${thoughtId}/relations`); + + if (!response.ok) { + throw new Error(`HTTP error! status: ${response.status}`); + } + + const relations = await response.json(); + + // Modal mit Beziehungen aktualisieren + updateModalWithRelations(modal, relations, thoughtId); + + } catch (error) { + console.error('Fehler beim Laden der Beziehungen:', error); + if (window.MindMap && window.MindMap.showNotification) { + window.MindMap.showNotification('Fehler beim Laden der Beziehungen.', 'error'); + } else { + alert('Fehler beim Laden der Beziehungen.'); + } + } +}; + +/** + * Erstellt ein Modal mit Lade-Animation + */ +function createModalWithLoading(loadingText) { + const modal = document.createElement('div'); + modal.className = 'fixed inset-0 z-50 flex items-center justify-center p-4'; + modal.innerHTML = ` + +
+
+
+
+
+

${loadingText}

+
+
+ `; + + // Event-Listener zum Schließen + modal.querySelector('#modal-backdrop').addEventListener('click', () => { + modal.remove(); + }); + + return modal; +} + +/** + * Aktualisiert das Modal mit Kommentaren + */ +function updateModalWithComments(modal, comments, thoughtId) { + const modalContent = modal.querySelector('.glass-effect'); + + modalContent.innerHTML = ` +
+
+

Kommentare

+ +
+ +
+ ${comments.length === 0 ? + '
Keine Kommentare vorhanden.
' : + comments.map(comment => ` +
+
+
${comment.author}
+
${comment.timestamp}
+
+

${comment.content}

+
+ `).join('') + } +
+ +
+
+ + +
+
+ +
+
+
+ `; + + // Event-Listener hinzufügen + modalContent.querySelector('#close-modal-btn').addEventListener('click', () => { + modal.remove(); + }); + + // Kommentar-Formular + const form = modalContent.querySelector('#comment-form'); + form.addEventListener('submit', async (e) => { + e.preventDefault(); + + const content = form.elements.content.value; + + try { + const response = await fetch('/api/comments', { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + thought_id: thoughtId, + content: content + }) + }); + + if (!response.ok) { + throw new Error('Fehler beim Speichern des Kommentars.'); + } + + // Modal schließen + modal.remove(); + + // Erfolgsbenachrichtigung + MindMap.showNotification('Kommentar erfolgreich gespeichert.', 'success'); + + } catch (error) { + console.error('Fehler beim Speichern des Kommentars:', error); + MindMap.showNotification('Fehler beim Speichern des Kommentars.', 'error'); + } + }); +} + +/** + * Aktualisiert das Modal mit Beziehungen + */ +function updateModalWithRelations(modal, relations, thoughtId) { + const modalContent = modal.querySelector('.glass-effect'); + + modalContent.innerHTML = ` +
+
+

Beziehungen

+ +
+ +
+ ${relations.length === 0 ? + '
Keine Beziehungen vorhanden.
' : + relations.map(relation => ` +
+
+ + ${relation.relation_type} + +
+
Ziel: Gedanke #${relation.target_id}
+
Erstellt von ${relation.created_by} am ${relation.created_at}
+
+
+
+ `).join('') + } +
+ +
+
+
+ + +
+
+ + +
+
+
+ +
+
+
+ `; + + // Event-Listener hinzufügen + modalContent.querySelector('#close-modal-btn').addEventListener('click', () => { + modal.remove(); + }); + + // Beziehungs-Formular + const form = modalContent.querySelector('#relation-form'); + form.addEventListener('submit', async (e) => { + e.preventDefault(); + + const formData = { + source_id: thoughtId, + target_id: parseInt(form.elements.target_id.value), + relation_type: form.elements.relation_type.value + }; + + try { + const response = await fetch('/api/relations', { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify(formData) + }); + + if (!response.ok) { + throw new Error('Fehler beim Erstellen der Beziehung.'); + } + + // Modal schließen + modal.remove(); + + // Erfolgsbenachrichtigung + MindMap.showNotification('Beziehung erfolgreich erstellt.', 'success'); + + } catch (error) { + console.error('Fehler beim Erstellen der Beziehung:', error); + MindMap.showNotification('Fehler beim Erstellen der Beziehung.', 'error'); + } + }); +} \ No newline at end of file diff --git a/static/js/modules/mindmap.js b/static/js/modules/mindmap.js new file mode 100644 index 0000000..0dfc9db --- /dev/null +++ b/static/js/modules/mindmap.js @@ -0,0 +1,844 @@ +/** + * MindMap D3.js Modul + * Visualisiert die Mindmap mit D3.js + */ + +class MindMapVisualization { + constructor(containerSelector, options = {}) { + this.containerSelector = containerSelector; + this.container = d3.select(containerSelector); + this.width = options.width || this.container.node().clientWidth || 800; + this.height = options.height || 600; + this.nodeRadius = options.nodeRadius || 14; + this.selectedNodeRadius = options.selectedNodeRadius || 20; + this.linkDistance = options.linkDistance || 150; + this.chargeStrength = options.chargeStrength || -900; + this.centerForce = options.centerForce || 0.15; + this.onNodeClick = options.onNodeClick || ((node) => console.log('Node clicked:', node)); + + this.nodes = []; + this.links = []; + this.simulation = null; + this.svg = null; + this.linkElements = null; + this.nodeElements = null; + this.textElements = null; + this.tooltipEnabled = options.tooltipEnabled !== undefined ? options.tooltipEnabled : true; + + this.mouseoverNode = null; + this.selectedNode = null; + + this.zoomFactor = 1; + this.tooltipDiv = null; + this.isLoading = true; + + // Lade die gemerkten Knoten + this.bookmarkedNodes = this.loadBookmarkedNodes(); + + // Sicherstellen, dass der Container bereit ist + if (this.container.node()) { + this.init(); + this.setupDefaultNodes(); + + // Sofortige Datenladung + window.setTimeout(() => { + this.loadData(); + }, 100); + } else { + console.error('Mindmap-Container nicht gefunden:', containerSelector); + } + } + + // Standardknoten als Fallback einrichten, falls die API nicht reagiert + setupDefaultNodes() { + // Basis-Mindmap mit Hauptthemen + const defaultNodes = [ + { id: "root", name: "Wissen", description: "Zentrale Wissensbasis", thought_count: 0 }, + { id: "philosophy", name: "Philosophie", description: "Philosophisches Denken", thought_count: 0 }, + { id: "science", name: "Wissenschaft", description: "Wissenschaftliche Erkenntnisse", thought_count: 0 }, + { id: "technology", name: "Technologie", description: "Technologische Entwicklungen", thought_count: 0 }, + { id: "arts", name: "Künste", description: "Künstlerische Ausdrucksformen", thought_count: 0 } + ]; + + const defaultLinks = [ + { source: "root", target: "philosophy" }, + { source: "root", target: "science" }, + { source: "root", target: "technology" }, + { source: "root", target: "arts" } + ]; + + // Als Fallback verwenden, falls die API fehlschlägt + this.defaultNodes = defaultNodes; + this.defaultLinks = defaultLinks; + } + + init() { + // SVG erstellen, wenn noch nicht vorhanden + if (!this.svg) { + // Container zuerst leeren + this.container.html(''); + + this.svg = this.container + .append('svg') + .attr('width', '100%') + .attr('height', this.height) + .attr('viewBox', `0 0 ${this.width} ${this.height}`) + .attr('class', 'mindmap-svg') + .call( + d3.zoom() + .scaleExtent([0.1, 5]) + .on('zoom', (event) => { + this.handleZoom(event.transform); + }) + ); + + // Hauptgruppe für alles, was zoom-transformierbar ist + this.g = this.svg.append('g'); + + // Tooltip initialisieren + if (!d3.select('body').select('.node-tooltip').size()) { + this.tooltipDiv = d3.select('body') + .append('div') + .attr('class', 'node-tooltip') + .style('opacity', 0) + .style('position', 'absolute') + .style('pointer-events', 'none') + .style('background', 'rgba(20, 20, 40, 0.9)') + .style('color', '#ffffff') + .style('border', '1px solid rgba(160, 80, 255, 0.2)') + .style('border-radius', '6px') + .style('padding', '8px 12px') + .style('font-size', '14px') + .style('max-width', '250px') + .style('box-shadow', '0 10px 25px rgba(0, 0, 0, 0.5), 0 0 10px rgba(160, 80, 255, 0.2)'); + } else { + this.tooltipDiv = d3.select('body').select('.node-tooltip'); + } + } + + // Force-Simulation initialisieren + this.simulation = d3.forceSimulation() + .force('link', d3.forceLink().id(d => d.id).distance(this.linkDistance)) + .force('charge', d3.forceManyBody().strength(this.chargeStrength)) + .force('center', d3.forceCenter(this.width / 2, this.height / 2).strength(this.centerForce)) + .force('collision', d3.forceCollide().radius(this.nodeRadius * 2)); + + // Globale Mindmap-Instanz für externe Zugriffe setzen + window.mindmapInstance = this; + } + + handleZoom(transform) { + this.g.attr('transform', transform); + this.zoomFactor = transform.k; + + // Knotengröße anpassen, um bei Zoom lesbar zu bleiben + if (this.nodeElements) { + this.nodeElements + .attr('r', d => (d === this.selectedNode ? this.selectedNodeRadius : this.nodeRadius) / Math.sqrt(transform.k)); + } + + // Textgröße anpassen + if (this.textElements) { + this.textElements + .style('font-size', `${12 / Math.sqrt(transform.k)}px`); + } + } + + async loadData() { + try { + // Ladeindikator anzeigen + this.showLoading(); + + // Verwende sofort die Standarddaten für eine schnelle erste Anzeige + this.nodes = [...this.defaultNodes]; + this.links = [...this.defaultLinks]; + + // Visualisierung sofort aktualisieren + this.isLoading = false; + this.updateVisualization(); + + // Status auf bereit setzen - don't wait for API + this.container.attr('data-status', 'ready'); + + // API-Aufruf mit kürzerem Timeout im Hintergrund durchführen + try { + const controller = new AbortController(); + const timeoutId = setTimeout(() => controller.abort(), 5000); // 5 Sekunden Timeout + + const response = await fetch('/api/mindmap', { + signal: controller.signal, + headers: { + 'Cache-Control': 'no-cache', + 'Pragma': 'no-cache' + } + }); + clearTimeout(timeoutId); + + if (!response.ok) { + console.warn(`HTTP Fehler: ${response.status}, versuche erneute Verbindung`); + + // Bei Verbindungsfehler versuchen, die Verbindung neu herzustellen + const retryResponse = await fetch('/api/refresh-mindmap', { + headers: { + 'Cache-Control': 'no-cache', + 'Pragma': 'no-cache' + } + }); + + if (!retryResponse.ok) { + throw new Error(`Retry failed with status: ${retryResponse.status}`); + } + + const retryData = await retryResponse.json(); + + if (!retryData.success || !retryData.nodes || retryData.nodes.length === 0) { + console.warn('Keine Mindmap-Daten nach Neuversuch, verwende weiterhin Standard-Daten.'); + return; // Keep using default data + } + + // Flache Liste von Knoten und Verbindungen erstellen + this.nodes = []; + this.links = []; + + // Knoten direkt übernehmen + retryData.nodes.forEach(node => { + this.nodes.push({ + id: node.id, + name: node.name, + description: node.description || '', + thought_count: node.thought_count || 0, + color: this.generateColorFromString(node.name), + }); + + // Verbindungen hinzufügen + if (node.connections && node.connections.length > 0) { + node.connections.forEach(conn => { + this.links.push({ + source: node.id, + target: conn.target + }); + }); + } + }); + + // Visualisierung aktualisieren mit den tatsächlichen Daten + this.updateVisualization(); + return; + } + + const data = await response.json(); + + if (!data || !data.nodes || data.nodes.length === 0) { + console.warn('Keine Mindmap-Daten vorhanden, verwende weiterhin Standard-Daten.'); + return; // Keep using default data + } + + // Flache Liste von Knoten und Verbindungen erstellen + this.nodes = []; + this.links = []; + this.processHierarchicalData(data.nodes); + + // Visualisierung aktualisieren mit den tatsächlichen Daten + this.updateVisualization(); + + } catch (error) { + console.warn('Fehler beim Laden der Mindmap-Daten, verwende Standarddaten:', error); + // Already using default data, no action needed + } + + } catch (error) { + console.error('Kritischer Fehler bei der Mindmap-Darstellung:', error); + this.showError('Fehler beim Laden der Mindmap-Daten. Bitte laden Sie die Seite neu.'); + this.container.attr('data-status', 'error'); + } + } + + showLoading() { + // Element nur leeren, wenn es noch kein SVG enthält + if (!this.container.select('svg').size()) { + this.container.html(` +
+
+
+

Mindmap wird geladen...

+
+
+ `); + } + } + + processHierarchicalData(hierarchicalNodes, parentId = null) { + hierarchicalNodes.forEach(node => { + // Knoten hinzufügen, wenn noch nicht vorhanden + if (!this.nodes.find(n => n.id === node.id)) { + this.nodes.push({ + id: node.id, + name: node.name, + description: node.description || '', + thought_count: node.thought_count || 0, + color: this.generateColorFromString(node.name), + }); + } + + // Verbindung zum Elternknoten hinzufügen + if (parentId !== null) { + this.links.push({ + source: parentId, + target: node.id + }); + } + + // Rekursiv für Kindknoten aufrufen + if (node.children && node.children.length > 0) { + this.processHierarchicalData(node.children, node.id); + } + }); + } + + generateColorFromString(str) { + // Erzeugt eine deterministische Farbe basierend auf dem String + let hash = 0; + for (let i = 0; i < str.length; i++) { + hash = str.charCodeAt(i) + ((hash << 5) - hash); + } + + // Verwende deterministische Farbe aus unserem Farbschema + const colors = [ + '#4080ff', // primary-400 + '#a040ff', // secondary-400 + '#205cf5', // primary-500 + '#8020f5', // secondary-500 + '#1040e0', // primary-600 + '#6010e0', // secondary-600 + ]; + + return colors[Math.abs(hash) % colors.length]; + } + + updateVisualization() { + // Starte die Visualisierung nur, wenn nicht mehr im Ladezustand + if (this.isLoading) return; + + // Container leeren, wenn Diagramm neu erstellt wird + if (!this.svg) { + this.container.html(''); + this.init(); + } + + // Performance-Optimierung: Deaktiviere Transition während des Datenladens + const useTransitions = false; + + // Links (Edges) erstellen + this.linkElements = this.g.selectAll('.link') + .data(this.links) + .join( + enter => enter.append('line') + .attr('class', 'link') + .attr('stroke', '#ffffff30') + .attr('stroke-width', 2) + .attr('stroke-dasharray', d => d.relation_type === 'contradicts' ? '5,5' : null) + .attr('marker-end', d => d.relation_type === 'builds_upon' ? 'url(#arrowhead)' : null), + update => update + .attr('stroke', '#ffffff30') + .attr('stroke-dasharray', d => d.relation_type === 'contradicts' ? '5,5' : null) + .attr('marker-end', d => d.relation_type === 'builds_upon' ? 'url(#arrowhead)' : null), + exit => exit.remove() + ); + + // Pfeilspitze für gerichtete Beziehungen hinzufügen (falls noch nicht vorhanden) + if (!this.svg.select('defs').node()) { + const defs = this.svg.append('defs'); + defs.append('marker') + .attr('id', 'arrowhead') + .attr('viewBox', '0 -5 10 10') + .attr('refX', 20) + .attr('refY', 0) + .attr('orient', 'auto') + .attr('markerWidth', 6) + .attr('markerHeight', 6) + .append('path') + .attr('d', 'M0,-5L10,0L0,5') + .attr('fill', '#ffffff50'); + } + + // Simplified Effekte definieren, falls noch nicht vorhanden + if (!this.svg.select('#glow').node()) { + const defs = this.svg.select('defs').size() ? this.svg.select('defs') : this.svg.append('defs'); + + // Glow-Effekt für Knoten + const filter = defs.append('filter') + .attr('id', 'glow') + .attr('x', '-50%') + .attr('y', '-50%') + .attr('width', '200%') + .attr('height', '200%'); + + filter.append('feGaussianBlur') + .attr('stdDeviation', '1') + .attr('result', 'blur'); + + filter.append('feComposite') + .attr('in', 'SourceGraphic') + .attr('in2', 'blur') + .attr('operator', 'over'); + + // Blur-Effekt für Schatten + const blurFilter = defs.append('filter') + .attr('id', 'blur') + .attr('x', '-50%') + .attr('y', '-50%') + .attr('width', '200%') + .attr('height', '200%'); + + blurFilter.append('feGaussianBlur') + .attr('stdDeviation', '1'); + } + + // Knoten-Gruppe erstellen/aktualisieren + const nodeGroups = this.g.selectAll('.node-group') + .data(this.nodes) + .join( + enter => { + const group = enter.append('g') + .attr('class', 'node-group') + .call(d3.drag() + .on('start', (event, d) => this.dragStarted(event, d)) + .on('drag', (event, d) => this.dragged(event, d)) + .on('end', (event, d) => this.dragEnded(event, d))); + + // Hintergrundschatten für besseren Kontrast + group.append('circle') + .attr('class', 'node-shadow') + .attr('r', d => this.nodeRadius * 1.2) + .attr('fill', 'rgba(0, 0, 0, 0.3)') + .attr('filter', 'url(#blur)'); + + // Kreis für jeden Knoten + group.append('circle') + .attr('class', d => `node ${this.isNodeBookmarked(d.id) ? 'bookmarked' : ''}`) + .attr('r', this.nodeRadius) + .attr('fill', d => d.color || this.generateColorFromString(d.name)) + .attr('stroke', d => this.isNodeBookmarked(d.id) ? '#FFD700' : '#ffffff50') + .attr('stroke-width', d => this.isNodeBookmarked(d.id) ? 3 : 2) + .attr('filter', 'url(#glow)'); + + // Text-Label mit besserem Kontrast + group.append('text') + .attr('class', 'node-label') + .attr('dy', '0.35em') + .attr('text-anchor', 'middle') + .attr('fill', '#ffffff') + .attr('stroke', 'rgba(0, 0, 0, 0.4)') + .attr('stroke-width', '0.7px') + .attr('paint-order', 'stroke') + .style('font-size', '12px') + .style('font-weight', '500') + .style('pointer-events', 'none') + .text(d => d.name.length > 12 ? d.name.slice(0, 10) + '...' : d.name); + + // Interaktivität hinzufügen + group + .on('mouseover', (event, d) => this.nodeMouseover(event, d)) + .on('mouseout', (event, d) => this.nodeMouseout(event, d)) + .on('click', (event, d) => this.nodeClicked(event, d)); + + return group; + }, + update => { + // Knoten aktualisieren + update.select('.node') + .attr('class', d => `node ${this.isNodeBookmarked(d.id) ? 'bookmarked' : ''}`) + .attr('fill', d => d.color || this.generateColorFromString(d.name)) + .attr('stroke', d => this.isNodeBookmarked(d.id) ? '#FFD700' : '#ffffff50') + .attr('stroke-width', d => this.isNodeBookmarked(d.id) ? 3 : 2); + + // Text aktualisieren + update.select('.node-label') + .text(d => d.name.length > 12 ? d.name.slice(0, 10) + '...' : d.name); + + return update; + }, + exit => exit.remove() + ); + + // Einzelne Elemente für direkten Zugriff speichern + this.nodeElements = this.g.selectAll('.node'); + this.textElements = this.g.selectAll('.node-label'); + + // Performance-Optimierung: Weniger Simulationsschritte für schnellere Stabilisierung + this.simulation + .nodes(this.nodes) + .on('tick', () => this.ticked()) + .alpha(0.3) // Reduzierter Wert für schnellere Stabilisierung + .alphaDecay(0.05); // Erhöhter Wert für schnellere Stabilisierung + + this.simulation.force('link') + .links(this.links); + + // Simulation neu starten + this.simulation.restart(); + + // Update connection counts + this.updateConnectionCounts(); + } + + ticked() { + // Linienpositionen aktualisieren + this.linkElements + .attr('x1', d => d.source.x) + .attr('y1', d => d.source.y) + .attr('x2', d => d.target.x) + .attr('y2', d => d.target.y); + + // Knotenpositionen aktualisieren + this.g.selectAll('.node-group') + .attr('transform', d => `translate(${d.x}, ${d.y})`); + } + + dragStarted(event, d) { + if (!event.active) this.simulation.alphaTarget(0.3).restart(); + d.fx = d.x; + d.fy = d.y; + } + + dragged(event, d) { + d.fx = event.x; + d.fy = event.y; + } + + dragEnded(event, d) { + if (!event.active) this.simulation.alphaTarget(0); + d.fx = null; + d.fy = null; + } + + nodeMouseover(event, d) { + this.mouseoverNode = d; + + // Tooltip anzeigen + if (this.tooltipEnabled) { + const isBookmarked = this.isNodeBookmarked(d.id); + const tooltipContent = ` +
+ ${d.name} + ${d.description ? `

${d.description}

` : ''} +
+ Gedanken: ${d.thought_count} +
+
+ +
+
+ `; + + this.tooltipDiv + .html(tooltipContent) + .style('left', (event.pageX + 10) + 'px') + .style('top', (event.pageY - 10) + 'px') + .transition() + .duration(200) + .style('opacity', 1); + + // Event-Listener für den Bookmark-Button hinzufügen + document.getElementById('bookmark-button').addEventListener('click', (e) => { + e.stopPropagation(); + const nodeId = e.currentTarget.getAttribute('data-nodeid'); + const isNowBookmarked = this.toggleBookmark(nodeId); + + // Button-Text aktualisieren + if (isNowBookmarked) { + e.currentTarget.innerHTML = ' Gemerkt'; + } else { + e.currentTarget.innerHTML = ' Merken'; + } + }); + } + + // Knoten visuell hervorheben + d3.select(event.currentTarget).select('circle') + .transition() + .duration(200) + .attr('r', this.nodeRadius * 1.2) + .attr('stroke', this.isNodeBookmarked(d.id) ? '#FFD700' : '#ffffff'); + } + + nodeMouseout(event, d) { + this.mouseoverNode = null; + + // Tooltip ausblenden + if (this.tooltipEnabled) { + this.tooltipDiv + .transition() + .duration(200) + .style('opacity', 0); + } + + // Knoten-Stil zurücksetzen, wenn nicht ausgewählt + const nodeElement = d3.select(event.currentTarget).select('circle'); + if (d !== this.selectedNode) { + const isBookmarked = this.isNodeBookmarked(d.id); + nodeElement + .transition() + .duration(200) + .attr('r', this.nodeRadius) + .attr('stroke', isBookmarked ? '#FFD700' : '#ffffff50') + .attr('stroke-width', isBookmarked ? 3 : 2); + } + } + + nodeClicked(event, d) { + // Frühere Auswahl zurücksetzen + if (this.selectedNode && this.selectedNode !== d) { + this.g.selectAll('.node') + .filter(n => n === this.selectedNode) + .transition() + .duration(200) + .attr('r', this.nodeRadius) + .attr('stroke', '#ffffff50'); + } + + // Neue Auswahl hervorheben + if (this.selectedNode !== d) { + this.selectedNode = d; + d3.select(event.currentTarget).select('circle') + .transition() + .duration(200) + .attr('r', this.selectedNodeRadius) + .attr('stroke', '#ffffff'); + } + + // Callback mit Node-Daten aufrufen + this.onNodeClick(d); + } + + showError(message) { + this.container.html(` +
+
+ +
+

${message}

+
+ `); + } + + // Fokussiert die Ansicht auf einen bestimmten Knoten + focusNode(nodeId) { + const node = this.nodes.find(n => n.id === nodeId); + if (!node) return; + + // Simuliere einen Klick auf den Knoten + const nodeElement = this.g.selectAll('.node-group') + .filter(d => d.id === nodeId); + + nodeElement.dispatch('click'); + + // Zentriere den Knoten in der Ansicht + const transform = d3.zoomIdentity + .translate(this.width / 2, this.height / 2) + .scale(1.2) + .translate(-node.x, -node.y); + + this.svg.transition() + .duration(750) + .call( + d3.zoom().transform, + transform + ); + } + + // Filtert die Mindmap basierend auf einem Suchbegriff + filterBySearchTerm(searchTerm) { + if (!searchTerm || searchTerm.trim() === '') { + // Alle Knoten anzeigen + this.g.selectAll('.node-group') + .style('opacity', 1) + .style('pointer-events', 'all'); + + this.g.selectAll('.link') + .style('opacity', 1); + + return; + } + + const searchLower = searchTerm.toLowerCase(); + const matchingNodes = this.nodes.filter(node => + node.name.toLowerCase().includes(searchLower) || + (node.description && node.description.toLowerCase().includes(searchLower)) + ); + + const matchingNodeIds = new Set(matchingNodes.map(n => n.id)); + + // Passende Knoten hervorheben, andere ausblenden + this.g.selectAll('.node-group') + .style('opacity', d => matchingNodeIds.has(d.id) ? 1 : 0.2) + .style('pointer-events', d => matchingNodeIds.has(d.id) ? 'all' : 'none'); + + // Verbindungen zwischen passenden Knoten hervorheben + this.g.selectAll('.link') + .style('opacity', d => + matchingNodeIds.has(d.source.id) && matchingNodeIds.has(d.target.id) ? 1 : 0.1 + ); + + // Auf den ersten passenden Knoten fokussieren, wenn vorhanden + if (matchingNodes.length > 0) { + this.focusNode(matchingNodes[0].id); + } + } + + /** + * Updates the thought_count property for each node based on existing connections + */ + updateConnectionCounts() { + // Reset all counts first + this.nodes.forEach(node => { + // Initialize thought_count if it doesn't exist + if (typeof node.thought_count !== 'number') { + node.thought_count = 0; + } + + // Count connections for this node + const connectedNodes = this.getConnectedNodes(node); + node.thought_count = connectedNodes.length; + }); + + // Update UI to show counts + this.updateNodeLabels(); + } + + /** + * Updates the visual representation of node labels to include connection counts + */ + updateNodeLabels() { + if (!this.textElements) return; + + this.textElements.text(d => { + if (d.thought_count > 0) { + return `${d.name} (${d.thought_count})`; + } + return d.name; + }); + } + + /** + * Adds a new connection between nodes and updates the counts + */ + addConnection(sourceNode, targetNode) { + if (!sourceNode || !targetNode) return false; + + // Check if connection already exists + if (this.isConnected(sourceNode, targetNode)) return false; + + // Add new connection + this.links.push({ + source: sourceNode.id, + target: targetNode.id + }); + + // Update counts + this.updateConnectionCounts(); + + // Update visualization + this.updateVisualization(); + + return true; + } + + // Lädt gemerkete Knoten aus dem LocalStorage + loadBookmarkedNodes() { + try { + const bookmarked = localStorage.getItem('bookmarkedNodes'); + return bookmarked ? JSON.parse(bookmarked) : []; + } catch (error) { + console.error('Fehler beim Laden der gemerkten Knoten:', error); + return []; + } + } + + // Speichert gemerkete Knoten im LocalStorage + saveBookmarkedNodes() { + try { + localStorage.setItem('bookmarkedNodes', JSON.stringify(this.bookmarkedNodes)); + } catch (error) { + console.error('Fehler beim Speichern der gemerkten Knoten:', error); + } + } + + // Prüft, ob ein Knoten gemerkt ist + isNodeBookmarked(nodeId) { + return this.bookmarkedNodes.includes(nodeId); + } + + // Merkt einen Knoten oder hebt die Markierung auf + toggleBookmark(nodeId) { + const index = this.bookmarkedNodes.indexOf(nodeId); + if (index === -1) { + // Node hinzufügen + this.bookmarkedNodes.push(nodeId); + this.updateNodeAppearance(nodeId, true); + } else { + // Node entfernen + this.bookmarkedNodes.splice(index, 1); + this.updateNodeAppearance(nodeId, false); + } + + // Änderungen speichern + this.saveBookmarkedNodes(); + + // Event auslösen für andere Komponenten + const event = new CustomEvent('nodeBookmarkToggled', { + detail: { + nodeId: nodeId, + isBookmarked: index === -1 + } + }); + document.dispatchEvent(event); + + return index === -1; // true wenn jetzt gemerkt, false wenn Markierung aufgehoben + } + + // Aktualisiert das Aussehen eines Knotens basierend auf Bookmark-Status + updateNodeAppearance(nodeId, isBookmarked) { + this.g.selectAll('.node-group') + .filter(d => d.id === nodeId) + .select('.node') + .classed('bookmarked', isBookmarked) + .attr('stroke', isBookmarked ? '#FFD700' : '#ffffff50') + .attr('stroke-width', isBookmarked ? 3 : 2); + } + + // Aktualisiert das Aussehen aller gemerkten Knoten + updateAllBookmarkedNodes() { + this.g.selectAll('.node-group') + .each((d) => { + const isBookmarked = this.isNodeBookmarked(d.id); + this.updateNodeAppearance(d.id, isBookmarked); + }); + } + + /** + * Gibt alle direkt verbundenen Knoten eines Knotens zurück + * @param {Object} node - Der Knoten, für den die Verbindungen gesucht werden + * @returns {Array} Array der verbundenen Knotenobjekte + */ + getConnectedNodes(node) { + if (!node || !this.links || !this.nodes) return []; + const nodeId = node.id; + const connectedIds = new Set(); + this.links.forEach(link => { + if (link.source === nodeId || (link.source && link.source.id === nodeId)) { + connectedIds.add(link.target.id ? link.target.id : link.target); + } + if (link.target === nodeId || (link.target && link.target.id === nodeId)) { + connectedIds.add(link.source.id ? link.source.id : link.source); + } + }); + return this.nodes.filter(n => connectedIds.has(n.id)); + } +} + +// Exportiere die Klasse für die Verwendung in anderen Modulen +window.MindMapVisualization = MindMapVisualization; \ No newline at end of file diff --git a/static/mindmap.js b/static/mindmap.js new file mode 100644 index 0000000..f9b706d --- /dev/null +++ b/static/mindmap.js @@ -0,0 +1,1904 @@ +/** + * MindMap D3.js Modul + * Visualisiert die Mindmap mit D3.js + */ + +class MindMapVisualization { + constructor(containerSelector, options = {}) { + this.containerSelector = containerSelector; + this.container = d3.select(containerSelector); + this.width = options.width || this.container.node().clientWidth || 800; + this.height = options.height || 600; + this.nodeRadius = options.nodeRadius || 22; + this.selectedNodeRadius = options.selectedNodeRadius || 28; + this.linkDistance = options.linkDistance || 150; + this.chargeStrength = options.chargeStrength || -1000; + this.centerForce = options.centerForce || 0.15; + this.onNodeClick = options.onNodeClick || ((node) => console.log('Node clicked:', node)); + + this.nodes = []; + this.links = []; + this.simulation = null; + this.svg = null; + this.linkElements = null; + this.nodeElements = null; + this.textElements = null; + this.tooltipEnabled = options.tooltipEnabled !== undefined ? options.tooltipEnabled : true; + + this.mouseoverNode = null; + this.selectedNode = null; + + this.zoomFactor = 1; + this.tooltipDiv = null; + this.isLoading = true; + + // Flash-Nachrichten-Container + this.flashContainer = null; + + // Erweiterte Farbpalette für Knotentypen + this.colorPalette = { + 'default': '#b38fff', + 'root': '#7e3ff2', + 'philosophy': '#58a9ff', + 'science': '#38b2ac', + 'technology': '#6366f1', + 'arts': '#ec4899', + 'ai': '#8b5cf6', + 'ethics': '#f59e0b', + 'math': '#06b6d4', + 'psychology': '#10b981', + 'biology': '#84cc16', + 'literature': '#f43f5e', + 'history': '#fb7185', + 'economics': '#fbbf24', + 'sociology': '#a78bfa', + 'design': '#f472b6', + 'languages': '#4ade80' + }; + + // Sicherstellen, dass der Container bereit ist + if (this.container.node()) { + this.init(); + this.setupDefaultNodes(); + this.setupFlashMessages(); + + // Sofortige Datenladung + window.setTimeout(() => { + this.loadData(); + }, 100); + } else { + console.error('Mindmap-Container nicht gefunden:', containerSelector); + } + } + + // Flash-Nachrichten-System einrichten + setupFlashMessages() { + // Flash-Container erstellen, falls er noch nicht existiert + if (!document.getElementById('mindmap-flash-container')) { + this.flashContainer = document.createElement('div'); + this.flashContainer.id = 'mindmap-flash-container'; + this.flashContainer.className = 'mindmap-flash-container'; + this.flashContainer.style.position = 'fixed'; + this.flashContainer.style.top = '20px'; + this.flashContainer.style.right = '20px'; + this.flashContainer.style.zIndex = '1000'; + this.flashContainer.style.maxWidth = '350px'; + this.flashContainer.style.display = 'flex'; + this.flashContainer.style.flexDirection = 'column'; + this.flashContainer.style.gap = '10px'; + document.body.appendChild(this.flashContainer); + } else { + this.flashContainer = document.getElementById('mindmap-flash-container'); + } + + // Prüfen, ob Server-seitige Flash-Nachrichten existieren und anzeigen + this.checkForServerFlashMessages(); + } + + // Prüft auf Server-seitige Flash-Nachrichten + async checkForServerFlashMessages() { + try { + const response = await fetch('/api/get_flash_messages'); + if (response.ok) { + const messages = await response.json(); + messages.forEach(message => { + this.showFlash(message.message, message.category); + }); + } + } catch (err) { + console.error('Fehler beim Abrufen der Flash-Nachrichten:', err); + } + } + + // Zeigt eine Flash-Nachricht an + showFlash(message, type = 'info', duration = 5000) { + if (!this.flashContainer) return; + + const flashElement = document.createElement('div'); + flashElement.className = `mindmap-flash flash-${type}`; + flashElement.style.padding = '12px 18px'; + flashElement.style.borderRadius = '8px'; + flashElement.style.boxShadow = '0 4px 12px rgba(0, 0, 0, 0.15)'; + flashElement.style.display = 'flex'; + flashElement.style.alignItems = 'center'; + flashElement.style.justifyContent = 'space-between'; + flashElement.style.fontSize = '14px'; + flashElement.style.fontWeight = '500'; + flashElement.style.backdropFilter = 'blur(10px)'; + flashElement.style.opacity = '0'; + flashElement.style.transform = 'translateY(-20px)'; + flashElement.style.transition = 'all 0.3s ease'; + + // Spezifische Stile je nach Nachrichtentyp + switch(type) { + case 'success': + flashElement.style.backgroundColor = 'rgba(34, 197, 94, 0.9)'; + flashElement.style.borderLeft = '5px solid #16a34a'; + flashElement.style.color = 'white'; + break; + case 'error': + flashElement.style.backgroundColor = 'rgba(239, 68, 68, 0.9)'; + flashElement.style.borderLeft = '5px solid #dc2626'; + flashElement.style.color = 'white'; + break; + case 'warning': + flashElement.style.backgroundColor = 'rgba(245, 158, 11, 0.9)'; + flashElement.style.borderLeft = '5px solid #d97706'; + flashElement.style.color = 'white'; + break; + default: // info + flashElement.style.backgroundColor = 'rgba(59, 130, 246, 0.9)'; + flashElement.style.borderLeft = '5px solid #2563eb'; + flashElement.style.color = 'white'; + } + + // Icon je nach Nachrichtentyp + let icon = ''; + switch(type) { + case 'success': + icon = ''; + break; + case 'error': + icon = ''; + break; + case 'warning': + icon = ''; + break; + default: + icon = ''; + } + + // Inhalt der Nachricht mit Icon + const contentWrapper = document.createElement('div'); + contentWrapper.style.display = 'flex'; + contentWrapper.style.alignItems = 'center'; + contentWrapper.style.gap = '12px'; + + const iconElement = document.createElement('div'); + iconElement.className = 'flash-icon'; + iconElement.innerHTML = icon; + + const textElement = document.createElement('div'); + textElement.className = 'flash-text'; + textElement.textContent = message; + + contentWrapper.appendChild(iconElement); + contentWrapper.appendChild(textElement); + + // Schließen-Button + const closeButton = document.createElement('button'); + closeButton.className = 'flash-close'; + closeButton.innerHTML = ''; + closeButton.style.background = 'none'; + closeButton.style.border = 'none'; + closeButton.style.color = 'currentColor'; + closeButton.style.cursor = 'pointer'; + closeButton.style.marginLeft = '15px'; + closeButton.style.padding = '3px'; + closeButton.style.fontSize = '14px'; + closeButton.style.opacity = '0.7'; + closeButton.style.transition = 'opacity 0.2s'; + + closeButton.addEventListener('mouseover', () => { + closeButton.style.opacity = '1'; + }); + + closeButton.addEventListener('mouseout', () => { + closeButton.style.opacity = '0.7'; + }); + + closeButton.addEventListener('click', () => { + this.removeFlash(flashElement); + }); + + // Zusammenfügen + flashElement.appendChild(contentWrapper); + flashElement.appendChild(closeButton); + + // Zum Container hinzufügen + this.flashContainer.appendChild(flashElement); + + // Animation einblenden + setTimeout(() => { + flashElement.style.opacity = '1'; + flashElement.style.transform = 'translateY(0)'; + }, 10); + + // Automatisches Ausblenden nach der angegebenen Zeit + if (duration > 0) { + setTimeout(() => { + this.removeFlash(flashElement); + }, duration); + } + + return flashElement; + } + + // Entfernt eine Flash-Nachricht mit Animation + removeFlash(flashElement) { + if (!flashElement) return; + + flashElement.style.opacity = '0'; + flashElement.style.transform = 'translateY(-20px)'; + + setTimeout(() => { + if (flashElement.parentNode) { + flashElement.parentNode.removeChild(flashElement); + } + }, 300); + } + + // Standardknoten als Fallback einrichten, falls die API nicht reagiert + setupDefaultNodes() { + // Basis-Mindmap mit Hauptthemen + const defaultNodes = [ + { id: "root", name: "Wissen", description: "Zentrale Wissensbasis", thought_count: 3 }, + { id: "philosophy", name: "Philosophie", description: "Philosophisches Denken", thought_count: 2 }, + { id: "science", name: "Wissenschaft", description: "Wissenschaftliche Erkenntnisse", thought_count: 5 }, + { id: "technology", name: "Technologie", description: "Technologische Entwicklungen", thought_count: 7 }, + { id: "arts", name: "Künste", description: "Künstlerische Ausdrucksformen", thought_count: 4 }, + { id: "ai", name: "Künstliche Intelligenz", description: "KI-Forschung und Anwendungen", thought_count: 6 }, + { id: "ethics", name: "Ethik", description: "Moralische Grundsätze", thought_count: 2 }, + { id: "math", name: "Mathematik", description: "Mathematische Konzepte", thought_count: 3 }, + { id: "psychology", name: "Psychologie", description: "Menschliches Verhalten und Kognition", thought_count: 4 }, + { id: "biology", name: "Biologie", description: "Lebenswissenschaften", thought_count: 3 }, + { id: "literature", name: "Literatur", description: "Literarische Werke und Analysen", thought_count: 2 } + ]; + + const defaultLinks = [ + { source: "root", target: "philosophy" }, + { source: "root", target: "science" }, + { source: "root", target: "technology" }, + { source: "root", target: "arts" }, + { source: "science", target: "math" }, + { source: "science", target: "biology" }, + { source: "technology", target: "ai" }, + { source: "philosophy", target: "ethics" }, + { source: "philosophy", target: "psychology" }, + { source: "arts", target: "literature" }, + { source: "ai", target: "ethics" }, + { source: "psychology", target: "biology" } + ]; + + // Als Fallback verwenden, falls die API fehlschlägt + this.defaultNodes = defaultNodes; + this.defaultLinks = defaultLinks; + } + + init() { + // SVG erstellen, wenn noch nicht vorhanden + if (!this.svg) { + // Container zuerst leeren und Loading-State ausblenden + const loadingOverlay = this.container.select('.mindmap-loading'); + if (loadingOverlay) { + loadingOverlay.style('opacity', 0.8); + } + + this.svg = this.container + .append('svg') + .attr('width', '100%') + .attr('height', this.height) + .attr('viewBox', `0 0 ${this.width} ${this.height}`) + .attr('class', 'mindmap-svg') + .call( + d3.zoom() + .scaleExtent([0.1, 5]) + .on('zoom', (event) => { + this.handleZoom(event.transform); + }) + ); + + // Hauptgruppe für alles, was zoom-transformierbar ist + this.g = this.svg.append('g'); + + // SVG-Definitionen für Filter und Effekte + const defs = this.g.append('defs'); + + // Verbesserte Glasmorphismus- und Glow-Effekte + + // Basis Glow-Effekt + D3Extensions.createGlowFilter(defs, 'glow-effect', '#b38fff', 8); + + // Spezifische Effekte für verschiedene Zustände + D3Extensions.createGlowFilter(defs, 'hover-glow', '#58a9ff', 6); + D3Extensions.createGlowFilter(defs, 'selected-glow', '#b38fff', 10); + + // Schatten für alle Knoten + D3Extensions.createShadowFilter(defs, 'shadow-effect'); + + // Glasmorphismus-Effekt für Knoten + D3Extensions.createGlassMorphismFilter(defs, 'glass-effect'); + + // Erweiterte Effekte + this.createAdvancedNodeEffects(defs); + + // Tooltip initialisieren mit verbessertem Glasmorphism-Stil + if (!d3.select('body').select('.node-tooltip').size()) { + this.tooltipDiv = d3.select('body') + .append('div') + .attr('class', 'node-tooltip') + .style('opacity', 0) + .style('position', 'absolute') + .style('pointer-events', 'none') + .style('background', 'rgba(24, 28, 45, 0.85)') + .style('color', '#ffffff') + .style('border', '1px solid rgba(179, 143, 255, 0.3)') + .style('border-radius', '16px') + .style('padding', '12px 16px') + .style('font-size', '14px') + .style('font-weight', '500') + .style('line-height', '1.5') + .style('max-width', '280px') + .style('box-shadow', '0 12px 30px rgba(0, 0, 0, 0.5), 0 0 15px rgba(179, 143, 255, 0.25)') + .style('backdrop-filter', 'blur(20px)') + .style('-webkit-backdrop-filter', 'blur(20px)') + .style('z-index', '1000'); + } else { + this.tooltipDiv = d3.select('body').select('.node-tooltip'); + } + + // Initialisierung abgeschlossen - Loading-Overlay ausblenden + setTimeout(() => { + if (loadingOverlay) { + loadingOverlay.transition() + .duration(500) + .style('opacity', 0) + .on('end', function() { + loadingOverlay.style('display', 'none'); + }); + } + }, 1000); + } + + // Force-Simulation initialisieren + this.simulation = d3.forceSimulation() + .force('link', d3.forceLink().id(d => d.id).distance(this.linkDistance)) + .force('charge', d3.forceManyBody().strength(this.chargeStrength)) + .force('center', d3.forceCenter(this.width / 2, this.height / 2).strength(this.centerForce)) + .force('collision', d3.forceCollide().radius(this.nodeRadius * 2.5)); + + // Globale Mindmap-Instanz für externe Zugriffe setzen + window.mindmapInstance = this; + } + + // Erstellt erweiterte Effekte für die Knoten + createAdvancedNodeEffects(defs) { + // Verbesserte innere Leuchteffekte für Knoten + const innerGlow = defs.append('filter') + .attr('id', 'inner-glow') + .attr('x', '-50%') + .attr('y', '-50%') + .attr('width', '200%') + .attr('height', '200%'); + + // Farbiger innerer Glühen + innerGlow.append('feGaussianBlur') + .attr('in', 'SourceAlpha') + .attr('stdDeviation', 2) + .attr('result', 'blur'); + + innerGlow.append('feOffset') + .attr('in', 'blur') + .attr('dx', 0) + .attr('dy', 0) + .attr('result', 'offsetBlur'); + + innerGlow.append('feFlood') + .attr('flood-color', 'rgba(179, 143, 255, 0.8)') + .attr('result', 'glowColor'); + + innerGlow.append('feComposite') + .attr('in', 'glowColor') + .attr('in2', 'offsetBlur') + .attr('operator', 'in') + .attr('result', 'innerGlow'); + + // Verbinden der Filter + const innerGlowMerge = innerGlow.append('feMerge'); + innerGlowMerge.append('feMergeNode') + .attr('in', 'innerGlow'); + innerGlowMerge.append('feMergeNode') + .attr('in', 'SourceGraphic'); + + // 3D-Glaseffekt mit verbesserter Tiefe + D3Extensions.create3DGlassEffect(defs, '3d-glass'); + + // Pulseffekt für Hervorhebung + const pulseFilter = defs.append('filter') + .attr('id', 'pulse-effect') + .attr('x', '-50%') + .attr('y', '-50%') + .attr('width', '200%') + .attr('height', '200%'); + + // Animation definieren + const pulseAnimation = pulseFilter.append('feComponentTransfer') + .append('feFuncA') + .attr('type', 'linear') + .attr('slope', '1.5'); + } + + // Behandelt die Zoom-Transformation für die SVG + handleZoom(transform) { + this.g.attr('transform', transform); + this.zoomFactor = transform.k; + + // Knotengröße an Zoom anpassen + if (this.nodeElements) { + this.nodeElements.selectAll('circle') + .attr('r', d => { + return d === this.selectedNode + ? this.selectedNodeRadius / Math.sqrt(transform.k) + : this.nodeRadius / Math.sqrt(transform.k); + }); + + this.textElements + .style('font-size', `${16 / Math.sqrt(transform.k)}px`); + } + } + + // Lädt die Mindmap-Daten + async loadData() { + try { + // Zeige Lade-Animation + this.showLoading(); + + // API-Aufruf durchführen, um die Kategorien und ihre Knoten zu laden + const response = await fetch('/api/mindmap/public'); + if (!response.ok) { + throw new Error('API-Fehler: ' + response.statusText); + } + + const data = await response.json(); + console.log('Geladene Mindmap-Daten:', data); + + // Verarbeite die hierarchischen Daten in flache Knoten und Links + const processed = this.processApiData(data); + this.nodes = processed.nodes; + this.links = processed.links; + + // Verbindungszählungen aktualisieren + this.updateConnectionCounts(); + + // Visualisierung aktualisieren + this.updateVisualization(); + + // Lade-Animation ausblenden + this.hideLoading(); + + // Erfolgreiche Ladung melden + this.showFlash('Mindmap-Daten erfolgreich geladen', 'success'); + } catch (error) { + console.error('Fehler beim Laden der Mindmap-Daten:', error); + + // Bei einem Fehler die Fallback-Daten verwenden + this.nodes = this.defaultNodes; + this.links = this.defaultLinks; + + // Verbindungszählungen auch für Fallback-Daten aktualisieren + this.updateConnectionCounts(); + + // Fehler anzeigen + this.showError('Mindmap-Daten konnten nicht geladen werden. Verwende Standarddaten.'); + this.showFlash('Fehler beim Laden der Mindmap-Daten. Standarddaten werden angezeigt.', 'error'); + + // Visualisierung auch im Fehlerfall aktualisieren + this.updateVisualization(); + this.hideLoading(); + } + } + + // Verarbeitet die API-Daten in das benötigte Format + processApiData(apiData) { + // Erstelle einen Root-Knoten, der alle Kategorien verbindet + const rootNode = { + id: "root", + name: "Wissen", + description: "Zentrale Wissensbasis", + thought_count: 0 + }; + + let nodes = [rootNode]; + let links = []; + + // Für jede Kategorie Knoten und Verbindungen erstellen + apiData.forEach(category => { + // Kategorie als Knoten hinzufügen + const categoryNode = { + id: `category_${category.id}`, + name: category.name, + description: category.description, + color_code: category.color_code, + icon: category.icon, + thought_count: 0, + type: 'category' + }; + + nodes.push(categoryNode); + + // Mit Root-Knoten verbinden + links.push({ + source: "root", + target: categoryNode.id + }); + + // Alle Knoten aus dieser Kategorie hinzufügen + if (category.nodes && category.nodes.length > 0) { + category.nodes.forEach(node => { + // Zähle die Gedanken für die Kategorie + categoryNode.thought_count += node.thought_count || 0; + + const mindmapNode = { + id: `node_${node.id}`, + name: node.name, + description: node.description || '', + color_code: node.color_code || category.color_code, + thought_count: node.thought_count || 0, + type: 'node', + categoryId: category.id + }; + + nodes.push(mindmapNode); + + // Mit Kategorie-Knoten verbinden + links.push({ + source: categoryNode.id, + target: mindmapNode.id + }); + }); + } + + // Rekursiv Unterkategorien verarbeiten + if (category.children && category.children.length > 0) { + this.processSubcategories(category.children, nodes, links, categoryNode.id); + } + }); + + // Root-Knoten-Gedankenzähler aktualisieren + rootNode.thought_count = nodes.reduce((sum, node) => sum + (node.thought_count || 0), 0); + + return { nodes, links }; + } + + // Verarbeitet Unterkategorien rekursiv + processSubcategories(subcategories, nodes, links, parentId) { + subcategories.forEach(category => { + // Kategorie als Knoten hinzufügen + const categoryNode = { + id: `category_${category.id}`, + name: category.name, + description: category.description, + color_code: category.color_code, + icon: category.icon, + thought_count: 0, + type: 'subcategory' + }; + + nodes.push(categoryNode); + + // Mit Eltern-Kategorie verbinden + links.push({ + source: parentId, + target: categoryNode.id + }); + + // Alle Knoten aus dieser Kategorie hinzufügen + if (category.nodes && category.nodes.length > 0) { + category.nodes.forEach(node => { + // Zähle die Gedanken für die Kategorie + categoryNode.thought_count += node.thought_count || 0; + + const mindmapNode = { + id: `node_${node.id}`, + name: node.name, + description: node.description || '', + color_code: node.color_code || category.color_code, + thought_count: node.thought_count || 0, + type: 'node', + categoryId: category.id + }; + + nodes.push(mindmapNode); + + // Mit Kategorie-Knoten verbinden + links.push({ + source: categoryNode.id, + target: mindmapNode.id + }); + }); + } + + // Rekursiv Unterkategorien verarbeiten + if (category.children && category.children.length > 0) { + this.processSubcategories(category.children, nodes, links, categoryNode.id); + } + }); + } + + // Zeigt den Ladebildschirm an + showLoading() { + const loadingOverlay = this.container.select('.mindmap-loading'); + + if (loadingOverlay && !loadingOverlay.empty()) { + loadingOverlay + .style('display', 'flex') + .style('opacity', 1); + + // Ladebalken-Animation + const progressBar = loadingOverlay.select('.loading-progress'); + if (!progressBar.empty()) { + let progress = 0; + const progressInterval = setInterval(() => { + progress += Math.random() * 15; + if (progress > 90) { + progress = 90 + Math.random() * 5; + clearInterval(progressInterval); + } + updateProgress(progress); + }, 200); + + function updateProgress(progress) { + progressBar.style('width', `${Math.min(progress, 95)}%`); + } + } + } + } + + // Blendet den Ladebildschirm aus + hideLoading() { + const loadingOverlay = this.container.select('.mindmap-loading'); + + if (loadingOverlay && !loadingOverlay.empty()) { + // Lade-Fortschritt auf 100% setzen + const progressBar = loadingOverlay.select('.loading-progress'); + if (!progressBar.empty()) { + progressBar.transition() + .duration(300) + .style('width', '100%'); + } + + // Overlay ausblenden + setTimeout(() => { + loadingOverlay.transition() + .duration(500) + .style('opacity', 0) + .on('end', function() { + loadingOverlay.style('display', 'none'); + }); + }, 400); + } + } + + // Verarbeitet hierarchische Daten in flache Knoten und Links + processHierarchicalData(hierarchicalNodes, parentId = null) { + let nodes = []; + let links = []; + + for (const node of hierarchicalNodes) { + nodes.push({ + id: node.id, + name: node.name, + description: node.description || '', + thought_count: node.thought_count || 0 + }); + + if (parentId) { + links.push({ + source: parentId, + target: node.id + }); + } + + if (node.children && node.children.length > 0) { + const { nodes: childNodes, links: childLinks } = this.processHierarchicalData(node.children, node.id); + nodes = [...nodes, ...childNodes]; + links = [...links, ...childLinks]; + } + } + + return { nodes, links }; + } + + // Generiert eine konsistente Farbe basierend auf dem Knotennamen + generateColorFromString(str) { + const colors = [ + '#b38fff', '#58a9ff', '#14b8a6', '#f472b6', '#84cc16', + '#f97316', '#4c1d95', '#2dd4bf', '#ec4899', '#eab308' + ]; + + let hash = 0; + for (let i = 0; i < str.length; i++) { + hash = str.charCodeAt(i) + ((hash << 5) - hash); + } + + return colors[Math.abs(hash) % colors.length]; + } + + /** + * Aktualisiert die Visualisierung basierend auf den aktuellen Daten + */ + updateVisualization() { + if (!this.g || !this.nodes.length) return; + + // Daten für Simulation vorbereiten + // Kopieren der Knoten und Links, um Referenzen zu erhalten + const nodes = this.nodes.map(d => Object.assign({}, d)); + const links = this.links.map(d => Object.assign({}, d)); + + // Links erstellen oder aktualisieren + this.linkElements = this.g.selectAll('.link') + .data(links, d => `${d.source.id || d.source}-${d.target.id || d.target}`); + + // Links entfernen, die nicht mehr existieren + this.linkElements.exit().remove(); + + // Neue Links erstellen + const linkEnter = this.linkElements + .enter().append('path') + .attr('class', 'link') + .attr('stroke-width', 2) + .attr('stroke', 'rgba(255, 255, 255, 0.3)') + .attr('fill', 'none') + .attr('marker-end', 'url(#arrowhead)'); + + // Alle Links aktualisieren + this.linkElements = linkEnter.merge(this.linkElements); + + // Pfeilspitzen für die Links definieren + if (!this.g.select('#arrowhead').size()) { + this.g.append('defs').append('marker') + .attr('id', 'arrowhead') + .attr('viewBox', '0 -5 10 10') + .attr('refX', 28) // Abstand vom Ende des Links zum Knoten + .attr('refY', 0) + .attr('orient', 'auto') + .attr('markerWidth', 6) + .attr('markerHeight', 6) + .attr('xoverflow', 'visible') + .append('path') + .attr('d', 'M 0,-3 L 8,0 L 0,3') + .attr('fill', 'rgba(255, 255, 255, 0.6)'); + } + + // Knoten erstellen oder aktualisieren + this.nodeElements = this.g.selectAll('.node') + .data(nodes, d => d.id); + + // Knoten entfernen, die nicht mehr existieren + this.nodeElements.exit().remove(); + + // Container für neue Knoten erstellen + const nodeEnter = this.nodeElements + .enter().append('g') + .attr('class', 'node') + .call(d3.drag() + .on('start', (event, d) => this.dragStarted(event, d)) + .on('drag', (event, d) => this.dragged(event, d)) + .on('end', (event, d) => this.dragEnded(event, d)) + ) + .on('mouseover', (event, d) => this.nodeMouseover(event, d)) + .on('mouseout', (event, d) => this.nodeMouseout(event, d)) + .on('click', (event, d) => this.nodeClicked(event, d)); + + // Kreisformen für die Knoten hinzufügen + nodeEnter.append('circle') + .attr('r', d => this.nodeRadius) + .attr('fill', d => this.getNodeColor(d)) + .attr('stroke', 'rgba(255, 255, 255, 0.12)') + .attr('stroke-width', 2) + .style('filter', 'url(#glass-effect)'); + + // Label für die Knoten hinzufügen + nodeEnter.append('text') + .attr('class', 'node-label') + .attr('dy', 4) + .attr('text-anchor', 'middle') + .text(d => this.truncateNodeLabel(d.name)) + .style('font-size', d => D3Extensions.getAdaptiveFontSize(d.name, 16, 10) + 'px'); + + // Alle Knoten aktualisieren + this.nodeElements = nodeEnter.merge(this.nodeElements); + + // Simulation mit den neuen Daten aktualisieren + this.simulation + .nodes(nodes) + .force('link', d3.forceLink(links).id(d => d.id).distance(this.linkDistance)); + + // Simulation-Tick-Funktion setzen + this.simulation.on('tick', () => this.ticked()); + + // Simulation neustarten + this.simulation.alpha(1).restart(); + + // Nach kurzer Verzögerung die Knoten mit zusätzlichen Effekten versehen + setTimeout(() => { + this.nodeElements.selectAll('circle') + .transition() + .duration(500) + .attr('r', d => { + // Größe abhängig von der Anzahl der Gedanken + const baseRadius = this.nodeRadius; + const bonus = d.thought_count ? Math.min(d.thought_count / 3, 6) : 0; + return baseRadius + bonus; + }); + }, 300); + } + + // Lange Knotenbeschriftungen abkürzen + truncateNodeLabel(label) { + if (!label) return ''; + + const maxLength = 18; // Maximale Zeichenlänge + + if (label.length <= maxLength) { + return label; + } else { + return label.substring(0, maxLength - 3) + '...'; + } + } + + // Bestimmt die Farbe eines Knotens basierend auf seinem Typ oder direkt angegebener Farbe + getNodeColor(node) { + // Direkt angegebene Farbe verwenden, wenn vorhanden + if (node.color_code) { + return node.color_code; + } + + // Kategorietyp-basierte Färbung + if (node.type === 'category' || node.type === 'subcategory') { + return this.colorPalette.root; + } + + // Fallback für verschiedene Knotentypen + return this.colorPalette[node.id] || this.colorPalette.default; + } + + // Aktualisiert die Positionen in jedem Simulationsschritt + ticked() { + if (!this.linkElements || !this.nodeElements) return; + + // Aktualisierung der Linkpositionen mit gebogenem Pfad + this.linkElements + .attr('d', d => { + const dx = d.target.x - d.source.x; + const dy = d.target.y - d.source.y; + const dr = Math.sqrt(dx * dx + dy * dy) * 1.5; // Kurvenstärke + return `M${d.source.x},${d.source.y}A${dr},${dr} 0 0,1 ${d.target.x},${d.target.y}`; + }); + + // Aktualisierung der Knotenpositionen + this.nodeElements + .attr('transform', d => `translate(${d.x},${d.y})`); + } + + // D3.js Drag-Funktionen + dragStarted(event, d) { + if (!event.active) this.simulation.alphaTarget(0.3).restart(); + d.fx = d.x; + d.fy = d.y; + } + + dragged(event, d) { + d.fx = event.x; + d.fy = event.y; + } + + dragEnded(event, d) { + if (!event.active) this.simulation.alphaTarget(0); + d.fx = null; + d.fy = null; + } + + // Hover-Effekte für Knoten + nodeMouseover(event, d) { + if (this.tooltipEnabled) { + // Tooltip-Inhalt erstellen + const tooltipContent = ` +
${d.name}
+
${d.description || 'Keine Beschreibung verfügbar'}
+ ${d.thought_count > 0 ? `
${d.thought_count} Gedanken verknüpft
` : ''} + `; + + this.tooltipDiv.html(tooltipContent) + .style('opacity', 0.95); + + // Tooltip positionieren (oberhalb des Nodes) + const nodeRect = event.target.getBoundingClientRect(); + const tooltipWidth = 250; + const tooltipHeight = 100; // Ungefähre Höhe des Tooltips + + const leftPos = nodeRect.left + (nodeRect.width / 2) - (tooltipWidth / 2); + const topPos = nodeRect.top - tooltipHeight - 10; // 10px Abstand + + this.tooltipDiv + .style('left', `${leftPos}px`) + .style('top', `${topPos}px`) + .style('width', `${tooltipWidth}px`); + } + + // Speichern des aktuellen Hover-Nodes + this.mouseoverNode = d; + + // Highlights für verbundene Nodes und Links hinzufügen + if (this.g) { + // Verbundene Nodes identifizieren + const connectedNodes = this.getConnectedNodesById(d.id); + const connectedNodeIds = connectedNodes.map(node => node.id); + + // Alle Nodes etwas transparenter machen + this.g.selectAll('.node') + .transition() + .duration(200) + .style('opacity', node => { + if (node.id === d.id || connectedNodeIds.includes(node.id)) { + return 1.0; + } else { + return 0.5; + } + }); + + // Den Hover-Node hervorheben mit größerem Radius + this.g.selectAll('.node') + .filter(node => node.id === d.id) + .select('circle') + .transition() + .duration(200) + .attr('r', this.nodeRadius * 1.2) + .style('filter', 'url(#hover-glow)') + .style('stroke', 'rgba(255, 255, 255, 0.25)'); + + // Verbundene Links hervorheben + this.g.selectAll('.link') + .transition() + .duration(200) + .style('opacity', link => { + const sourceId = link.source.id || link.source; + const targetId = link.target.id || link.target; + + if (sourceId === d.id || targetId === d.id) { + return 0.9; + } else { + return 0.3; + } + }) + .style('stroke-width', link => { + const sourceId = link.source.id || link.source; + const targetId = link.target.id || link.target; + + if (sourceId === d.id || targetId === d.id) { + return 3; + } else { + return 2; + } + }) + .style('stroke', link => { + const sourceId = link.source.id || link.source; + const targetId = link.target.id || link.target; + + if (sourceId === d.id || targetId === d.id) { + return 'rgba(179, 143, 255, 0.7)'; + } else { + return 'rgba(255, 255, 255, 0.3)'; + } + }); + } + } + + nodeMouseout(event, d) { + if (this.tooltipEnabled) { + this.tooltipDiv.transition() + .duration(200) + .style('opacity', 0); + } + + this.mouseoverNode = null; + + // Highlights zurücksetzen, falls kein Node ausgewählt ist + if (!this.selectedNode && this.g) { + // Alle Nodes wieder auf volle Deckkraft setzen + this.g.selectAll('.node') + .transition() + .duration(200) + .style('opacity', 1.0); + + // Hover-Node-Radius zurücksetzen + this.g.selectAll('.node') + .filter(node => node.id === d.id) + .select('circle') + .transition() + .duration(200) + .attr('r', this.nodeRadius) + .style('filter', 'none') + .style('stroke', 'rgba(255, 255, 255, 0.12)'); + + // Links zurücksetzen + this.g.selectAll('.link') + .transition() + .duration(200) + .style('opacity', 0.7) + .style('stroke-width', 2) + .style('stroke', 'rgba(255, 255, 255, 0.3)'); + } + // Falls ein Node ausgewählt ist, den Highlight-Status für diesen beibehalten + else if (this.selectedNode && this.g) { + const connectedNodes = this.getConnectedNodesById(this.selectedNode.id); + const connectedNodeIds = connectedNodes.map(node => node.id); + + // Alle Nodes auf den richtigen Highlight-Status setzen + this.g.selectAll('.node') + .transition() + .duration(200) + .style('opacity', node => { + if (node.id === this.selectedNode.id || connectedNodeIds.includes(node.id)) { + return 1.0; + } else { + return 0.5; + } + }); + + // Hover-Node zurücksetzen, wenn er nicht der ausgewählte ist + if (d.id !== this.selectedNode.id) { + this.g.selectAll('.node') + .filter(node => node.id === d.id) + .select('circle') + .transition() + .duration(200) + .attr('r', this.nodeRadius) + .style('filter', 'none') + .style('stroke', 'rgba(255, 255, 255, 0.12)'); + } + + // Links auf den richtigen Highlight-Status setzen + this.g.selectAll('.link') + .transition() + .duration(200) + .style('opacity', link => { + const sourceId = link.source.id || link.source; + const targetId = link.target.id || link.target; + + if (sourceId === this.selectedNode.id || targetId === this.selectedNode.id) { + return 0.9; + } else { + return 0.3; + } + }) + .style('stroke-width', link => { + const sourceId = link.source.id || link.source; + const targetId = link.target.id || link.target; + + if (sourceId === this.selectedNode.id || targetId === this.selectedNode.id) { + return 3; + } else { + return 2; + } + }) + .style('stroke', link => { + const sourceId = link.source.id || link.source; + const targetId = link.target.id || link.target; + + if (sourceId === this.selectedNode.id || targetId === this.selectedNode.id) { + return 'rgba(179, 143, 255, 0.7)'; + } else { + return 'rgba(255, 255, 255, 0.3)'; + } + }); + } + } + + // Findet alle verbundenen Knoten zu einem gegebenen Knoten + getConnectedNodes(node) { + if (!this.links || !this.nodes || !node) return []; + + // Sicherstellen, dass der Knoten eine ID hat + const nodeId = node.id || node; + + return this.nodes.filter(n => + this.links.some(link => { + const sourceId = link.source?.id || link.source; + const targetId = link.target?.id || link.target; + return (sourceId === nodeId && targetId === n.id) || + (targetId === nodeId && sourceId === n.id); + }) + ); + } + + // Prüft, ob zwei Knoten verbunden sind + isConnected(a, b) { + if (!this.links || !a || !b) return false; + + // Sicherstellen, dass die Knoten IDs haben + const aId = a.id || a; + const bId = b.id || b; + + return this.links.some(link => { + const sourceId = link.source?.id || link.source; + const targetId = link.target?.id || link.target; + return (sourceId === aId && targetId === bId) || + (targetId === aId && sourceId === bId); + }); + } + + // Überprüft, ob ein Link zwischen zwei Knoten existiert + hasLink(source, target) { + if (!this.links || !source || !target) return false; + + // Sicherstellen, dass die Knoten IDs haben + const sourceId = source.id || source; + const targetId = target.id || target; + + return this.links.some(link => { + const linkSourceId = link.source?.id || link.source; + const linkTargetId = link.target?.id || link.target; + return (linkSourceId === sourceId && linkTargetId === targetId) || + (linkTargetId === sourceId && linkSourceId === targetId); + }); + } + + // Sicherere Methode zum Abrufen verbundener Knoten, die Prüfungen enthält + getConnectedNodesById(nodeId) { + if (!this.links || !this.nodes || !nodeId) return []; + + return this.nodes.filter(n => + this.links.some(link => { + const sourceId = link.source.id || link.source; + const targetId = link.target.id || link.target; + return (sourceId === nodeId && targetId === n.id) || + (targetId === nodeId && sourceId === n.id); + }) + ); + } + + // Aktualisiert die Verbindungszählungen für alle Knoten + updateConnectionCounts() { + if (!this.nodes || !this.links) return; + + // Für jeden Knoten die Anzahl der Verbindungen berechnen + this.nodes.forEach(node => { + // Sichere Methode, um verbundene Knoten zu zählen + const connectedNodes = this.nodes.filter(n => + n.id !== node.id && this.links.some(link => { + const sourceId = link.source.id || link.source; + const targetId = link.target.id || link.target; + return (sourceId === node.id && targetId === n.id) || + (targetId === node.id && sourceId === n.id); + }) + ); + + // Speichere die Anzahl als Eigenschaft des Knotens + node.connectionCount = connectedNodes.length; + }); + } + + // Klick-Handler für Knoten + nodeClicked(event, d) { + event.preventDefault(); + event.stopPropagation(); + + // Selection-Handling: Knoten auswählen/abwählen + if (this.selectedNode === d) { + // Wenn der gleiche Knoten geklickt wird, Selektion aufheben + this.selectedNode = null; + this.nodeElements.classed('selected', false); + + this.nodeElements + .select('circle:not(.node-background):not(.thought-indicator)') + .transition() + .duration(300) + .attr('r', this.nodeRadius) + .style('filter', 'url(#glass-with-reflection)') + .attr('stroke-width', 2); + + // Gedankenbereich ausblenden, wenn vorhanden + const thoughtContainer = document.getElementById('thought-container'); + if (thoughtContainer) { + // Sanfte Ausblendanimation + thoughtContainer.style.transition = 'all 0.3s ease-out'; + thoughtContainer.style.opacity = '0'; + thoughtContainer.style.transform = 'translateY(10px)'; + + setTimeout(() => { + // Gedankenbereich komplett ausblenden + thoughtContainer.style.display = 'none'; + + // "Empty state" anzeigen oder andere UI-Anpassungen vornehmen + const emptyStateEl = document.getElementById('mindmap-empty-state'); + if (emptyStateEl) { + emptyStateEl.style.display = 'flex'; + emptyStateEl.style.opacity = '0'; + setTimeout(() => { + emptyStateEl.style.transition = 'all 0.5s ease'; + emptyStateEl.style.opacity = '1'; + }, 50); + } + }, 300); + } + + // Alle Kanten zurücksetzen + this.linkElements + .classed('highlighted', false) + .transition() + .duration(300) + .style('stroke', 'rgba(255, 255, 255, 0.3)') + .style('stroke-width', 2) + .style('opacity', 0.7); + + // Interface-Callback für Knoten-Abwahl + if (typeof window.onNodeDeselected === 'function') { + window.onNodeDeselected(); + } + + // Flash-Nachricht für abgewählten Knoten + this.showFlash('Knotenauswahl aufgehoben', 'info', 2000); + + return; + } + + // Bisher ausgewählten Knoten zurücksetzen + if (this.selectedNode) { + this.nodeElements + .filter(n => n === this.selectedNode) + .classed('selected', false) + .select('circle:not(.node-background):not(.thought-indicator)') + .transition() + .duration(300) + .attr('r', this.nodeRadius) + .style('filter', 'url(#glass-with-reflection)') + .attr('stroke-width', 2); + } + + // Neuen Knoten auswählen + this.selectedNode = d; + + // Selected-Klasse für den Knoten setzen + this.nodeElements + .classed('selected', n => n === d); + + // Visuelles Feedback für Auswahl + this.nodeElements + .filter(n => n === d) + .select('circle:not(.node-background):not(.thought-indicator)') + .transition() + .duration(300) + .attr('r', this.selectedNodeRadius) + .style('filter', 'url(#selected-glow)') + .attr('stroke-width', 3) + .attr('stroke', 'rgba(179, 143, 255, 0.6)'); + + // Verbundene Kanten hervorheben + const connectedLinks = this.links.filter(link => + link.source === d || link.source.id === d.id || + link.target === d || link.target.id === d.id + ); + + // Alle Kanten zurücksetzen und dann verbundene hervorheben + this.linkElements + .classed('highlighted', false) + .transition() + .duration(300) + .style('stroke', 'rgba(255, 255, 255, 0.3)') + .style('stroke-width', 2) + .style('opacity', 0.7); + + this.linkElements + .filter(link => + connectedLinks.some(l => + (l.source === link.source || l.source.id === link.source.id) && + (l.target === link.target || l.target.id === link.target.id) + ) + ) + .classed('highlighted', true) + .transition() + .duration(300) + .style('stroke', 'rgba(179, 143, 255, 0.7)') + .style('stroke-width', 3) + .style('opacity', 0.9); + + // Knoten zentrieren + this.centerNodeInView(d); + + // Gedanken laden + this.loadThoughtsForNode(d); + + // Callback für UI-Integration + if (typeof this.onNodeClick === 'function') { + this.onNodeClick(d); + } + + // Interface-Callback für externe Verwendung + if (typeof window.onNodeSelected === 'function') { + window.onNodeSelected(d); + } + } + + // Lädt die Gedanken für einen Knoten und zeigt sie an + loadThoughtsForNode(node) { + // UI-Element für Gedanken finden + const thoughtContainer = document.getElementById('thought-container'); + const loadingIndicator = document.getElementById('thoughts-loading'); + const thoughtsList = document.getElementById('thoughts-list'); + const thoughtsTitle = document.getElementById('thoughts-title'); + const emptyStateEl = document.getElementById('mindmap-empty-state'); + + if (!thoughtContainer || !thoughtsList) { + console.error('Gedanken-Container nicht gefunden'); + this.showFlash('Fehler: Gedanken-Container nicht gefunden', 'error'); + return; + } + + // "Empty state" ausblenden + if (emptyStateEl) { + emptyStateEl.style.transition = 'all 0.3s ease'; + emptyStateEl.style.opacity = '0'; + setTimeout(() => { + emptyStateEl.style.display = 'none'; + }, 300); + } + + // Container anzeigen mit Animation + thoughtContainer.style.display = 'block'; + setTimeout(() => { + thoughtContainer.style.transition = 'all 0.4s ease'; + thoughtContainer.style.opacity = '1'; + thoughtContainer.style.transform = 'translateY(0)'; + }, 50); + + // Titel setzen + if (thoughtsTitle) { + thoughtsTitle.textContent = `Gedanken zu "${node.name}"`; + } + + // Ladeanimation anzeigen + if (loadingIndicator) { + loadingIndicator.style.display = 'flex'; + } + + // Bisherige Gedanken leeren + if (thoughtsList) { + thoughtsList.innerHTML = ''; + } + + // Flash-Nachricht über ausgewählten Knoten + this.showFlash(`Knoten "${node.name}" ausgewählt`, 'info'); + + // Verzögerung für Animation + setTimeout(() => { + // API-Aufruf für echte Daten aus der Datenbank + this.fetchThoughtsForNode(node.id) + .then(thoughts => { + // Ladeanimation ausblenden + if (loadingIndicator) { + loadingIndicator.style.display = 'none'; + } + + // Gedanken anzeigen oder "leer"-Zustand + if (thoughts && thoughts.length > 0) { + this.renderThoughts(thoughts, thoughtsList); + } else { + this.renderEmptyThoughts(thoughtsList, node); + this.showFlash(`Keine Gedanken zu "${node.name}" gefunden`, 'warning'); + } + }) + .catch(error => { + console.error('Fehler beim Laden der Gedanken:', error); + if (loadingIndicator) { + loadingIndicator.style.display = 'none'; + } + this.renderErrorState(thoughtsList); + this.showFlash('Fehler beim Laden der Gedanken. Bitte versuche es später erneut.', 'error'); + }); + }, 600); // Verzögerung für bessere UX + } + + // Holt Gedanken für einen Knoten aus der Datenbank + async fetchThoughtsForNode(nodeId) { + try { + // Extrahiere die tatsächliche ID aus dem nodeId Format (z.B. "node_123" oder "category_456") + const id = nodeId.toString().split('_')[1]; + if (!id) { + console.warn('Ungültige Node-ID: ', nodeId); + this.showFlash('Ungültige Knoten-ID: ' + nodeId, 'warning'); + return []; + } + + // API-Aufruf an den entsprechenden Endpunkt + const response = await fetch(`/api/nodes/${id}/thoughts`); + + if (!response.ok) { + throw new Error(`API-Fehler: ${response.statusText}`); + } + + const thoughts = await response.json(); + console.log('Geladene Gedanken für Knoten:', thoughts); + + if (thoughts.length > 0) { + this.showFlash(`${thoughts.length} Gedanken zum Thema geladen`, 'info'); + } else { + this.showFlash('Keine Gedanken für diesen Knoten gefunden', 'info'); + } + + return thoughts; + } catch (error) { + console.error('Fehler beim Laden der Gedanken für Knoten:', error); + this.showFlash('Fehler beim Laden der Gedanken', 'error'); + return []; + } + } + + // Rendert die Gedanken in der UI + renderThoughts(thoughts, container) { + if (!container) return; + + container.innerHTML = ''; + + thoughts.forEach(thought => { + const thoughtCard = document.createElement('div'); + thoughtCard.className = 'thought-card'; + thoughtCard.setAttribute('data-id', thought.id); + + const cardColor = thought.color_code || this.colorPalette.default; + + thoughtCard.innerHTML = ` +
+

${thought.title}

+
+ ${new Date(thought.created_at).toLocaleDateString('de-DE')} + ${thought.author ? `von ${thought.author.username}` : ''} +
+
+
+

${thought.abstract || thought.content.substring(0, 150) + '...'}

+
+ + `; + + // Event-Listener für Klick auf Gedanken + thoughtCard.addEventListener('click', (e) => { + // Verhindern, dass der Link-Klick den Kartenklick auslöst + if (e.target.tagName === 'A') return; + window.location.href = `/thoughts/${thought.id}`; + }); + + container.appendChild(thoughtCard); + }); + } + + // Rendert eine Leermeldung, wenn keine Gedanken vorhanden sind + renderEmptyThoughts(container, node) { + if (!container) return; + + const emptyState = document.createElement('div'); + emptyState.className = 'empty-thoughts-state'; + + emptyState.innerHTML = ` +
+ +
+

Keine Gedanken verknüpft

+

Zu "${node.name}" sind noch keine Gedanken verknüpft.

+ + `; + + container.appendChild(emptyState); + } + + // Rendert einen Fehlerzustand + renderErrorState(container) { + if (!container) return; + + const errorState = document.createElement('div'); + errorState.className = 'error-thoughts-state'; + + errorState.innerHTML = ` +
+ +
+

Fehler beim Laden

+

Die Gedanken konnten nicht geladen werden. Bitte versuche es später erneut.

+ + `; + + // Event-Listener für Retry-Button + const retryButton = errorState.querySelector('.retry-button'); + if (retryButton && this.selectedNode) { + retryButton.addEventListener('click', () => { + this.loadThoughtsForNode(this.selectedNode); + }); + } + + container.appendChild(errorState); + } + + // Zentriert einen Knoten in der Ansicht + centerNodeInView(node) { + // Sanfter Übergang zur Knotenzentrierüng + const transform = d3.zoomTransform(this.svg.node()); + const scale = transform.k; + + const x = -node.x * scale + this.width / 2; + const y = -node.y * scale + this.height / 2; + + this.svg.transition() + .duration(750) + .call( + d3.zoom().transform, + d3.zoomIdentity.translate(x, y).scale(scale) + ); + + // Flash-Nachricht für Zentrierung + if (node && node.name) { + this.showFlash(`Ansicht auf "${node.name}" zentriert`, 'info', 2000); + } + } + + // Fehlermeldung anzeigen + showError(message) { + // Standard-Fehlermeldung als Banner + const errorBanner = d3.select('body').selectAll('.error-banner').data([0]); + + const errorEnter = errorBanner.enter() + .append('div') + .attr('class', 'error-banner') + .style('position', 'fixed') + .style('bottom', '-100px') + .style('left', '50%') + .style('transform', 'translateX(-50%)') + .style('background', 'rgba(220, 38, 38, 0.9)') + .style('color', 'white') + .style('padding', '12px 20px') + .style('border-radius', '8px') + .style('z-index', '1000') + .style('box-shadow', '0 10px 25px rgba(0, 0, 0, 0.3)') + .style('font-weight', '500') + .style('max-width', '90%') + .style('text-align', 'center'); + + const banner = errorEnter.merge(errorBanner); + banner.html(` ${message}`) + .transition() + .duration(500) + .style('bottom', '20px') + .transition() + .delay(5000) + .duration(500) + .style('bottom', '-100px'); + + // Auch als Flash-Nachricht anzeigen + this.showFlash(message, 'error'); + } + + // Fokussieren auf einen bestimmten Knoten per ID + focusNode(nodeId) { + const targetNode = this.nodes.find(n => n.id === nodeId); + if (!targetNode) { + this.showFlash(`Knoten mit ID "${nodeId}" nicht gefunden`, 'error'); + return; + } + + // Ausgewählten Zustand zurücksetzen + this.selectedNode = null; + + // Node-Klick simulieren + this.nodeClicked(null, targetNode); + + // Fokussieren mit einer Animation + if (targetNode.x && targetNode.y) { + const transform = d3.zoomIdentity + .translate(this.width / 2 - targetNode.x * 1.2, this.height / 2 - targetNode.y * 1.2) + .scale(1.2); + + this.svg.transition() + .duration(750) + .call( + d3.zoom().transform, + transform + ); + + this.showFlash(`Fokus auf Knoten "${targetNode.name}" gesetzt`, 'success'); + } + } + + // Filtert Knoten nach Suchbegriff + filterBySearchTerm(searchTerm) { + if (!searchTerm || searchTerm.trim() === '') { + // Alle Knoten anzeigen, wenn kein Suchbegriff + this.nodeElements + .style('display', 'block') + .selectAll('circle') + .style('opacity', 1); + + this.textElements + .style('opacity', 1); + + this.linkElements + .style('display', 'block') + .style('stroke-opacity', 0.5); + + this.showFlash('Suchfilter zurückgesetzt', 'info', 2000); + + return; + } + + searchTerm = searchTerm.toLowerCase().trim(); + + // Knoten finden, die dem Suchbegriff entsprechen + const matchingNodes = this.nodes.filter(node => + node.name.toLowerCase().includes(searchTerm) || + (node.description && node.description.toLowerCase().includes(searchTerm)) + ); + + const matchingNodeIds = matchingNodes.map(n => n.id); + + // Nur passende Knoten und ihre Verbindungen anzeigen + this.nodeElements + .style('display', d => matchingNodeIds.includes(d.id) ? 'block' : 'none') + .selectAll('circle') + .style('opacity', 1); + + this.textElements + .style('opacity', d => matchingNodeIds.includes(d.id) ? 1 : 0.2); + + this.linkElements + .style('display', link => + matchingNodeIds.includes(link.source.id) && matchingNodeIds.includes(link.target.id) ? 'block' : 'none') + .style('stroke-opacity', 0.7); + + // Wenn nur ein Knoten gefunden wurde, darauf fokussieren + if (matchingNodes.length === 1) { + this.focusNode(matchingNodes[0].id); + } + + // Wenn mehr als ein Knoten gefunden wurde, Simulation mit reduzierter Stärke neu starten + if (matchingNodes.length > 1) { + this.simulation.alpha(0.3).restart(); + this.showFlash(`${matchingNodes.length} Knoten für "${searchTerm}" gefunden`, 'success'); + } else if (matchingNodes.length === 0) { + this.showFlash(`Keine Knoten für "${searchTerm}" gefunden`, 'warning'); + } + } +} + +// D3-Erweiterungen für spezielle Effekte +class D3Extensions { + static createGlowFilter(defs, id, color = '#b38fff', strength = 5) { + const filter = defs.append('filter') + .attr('id', id) + .attr('height', '300%') + .attr('width', '300%') + .attr('x', '-100%') + .attr('y', '-100%'); + + // Farbe und Sättigung + const colorMatrix = filter.append('feColorMatrix') + .attr('type', 'matrix') + .attr('values', ` + 1 0 0 0 ${color === '#b38fff' ? 0.7 : 0.35} + 0 1 0 0 ${color === '#58a9ff' ? 0.7 : 0.35} + 0 0 1 0 ${color === '#58a9ff' ? 0.7 : 0.55} + 0 0 0 1 0 + `) + .attr('result', 'colored'); + + // Weichzeichner für Glühen + const blur = filter.append('feGaussianBlur') + .attr('in', 'colored') + .attr('stdDeviation', strength) + .attr('result', 'blur'); + + // Kombination von Original und Glühen + const merge = filter.append('feMerge'); + + merge.append('feMergeNode') + .attr('in', 'blur'); + + merge.append('feMergeNode') + .attr('in', 'SourceGraphic'); + + return filter; + } + + static createShadowFilter(defs, id) { + const filter = defs.append('filter') + .attr('id', id) + .attr('height', '200%') + .attr('width', '200%') + .attr('x', '-50%') + .attr('y', '-50%'); + + // Offset der Lichtquelle + const offset = filter.append('feOffset') + .attr('in', 'SourceAlpha') + .attr('dx', 3) + .attr('dy', 4) + .attr('result', 'offset'); + + // Weichzeichnung für Schatten + const blur = filter.append('feGaussianBlur') + .attr('in', 'offset') + .attr('stdDeviation', 5) + .attr('result', 'blur'); + + // Schatten-Opazität + const opacity = filter.append('feComponentTransfer'); + + opacity.append('feFuncA') + .attr('type', 'linear') + .attr('slope', 0.3); + + // Zusammenführen + const merge = filter.append('feMerge'); + + merge.append('feMergeNode'); + merge.append('feMergeNode') + .attr('in', 'SourceGraphic'); + + return filter; + } + + static createGlassMorphismFilter(defs, id) { + const filter = defs.append('filter') + .attr('id', id) + .attr('width', '300%') + .attr('height', '300%') + .attr('x', '-100%') + .attr('y', '-100%'); + + // Basis-Hintergrundfarbe + const bgColor = filter.append('feFlood') + .attr('flood-color', 'rgba(24, 28, 45, 0.75)') + .attr('result', 'bgColor'); + + // Weichzeichnung des Originalelements + const blur = filter.append('feGaussianBlur') + .attr('in', 'SourceGraphic') + .attr('stdDeviation', '3') + .attr('result', 'blur'); + + // Komposition des Glaseffekts mit Original + const composite1 = filter.append('feComposite') + .attr('in', 'bgColor') + .attr('in2', 'blur') + .attr('operator', 'in') + .attr('result', 'glass'); + + // Leichter Farbakzent + const colorMatrix = filter.append('feColorMatrix') + .attr('in', 'glass') + .attr('type', 'matrix') + .attr('values', '1 0 0 0 0.1 0 1 0 0 0.1 0 0 1 0 0.3 0 0 0 1 0') + .attr('result', 'coloredGlass'); + + // Leichte Transparenz an den Rändern + const specLight = filter.append('feSpecularLighting') + .attr('in', 'blur') + .attr('surfaceScale', '3') + .attr('specularConstant', '0.75') + .attr('specularExponent', '20') + .attr('lighting-color', '#ffffff') + .attr('result', 'specLight'); + + specLight.append('fePointLight') + .attr('x', '-20') + .attr('y', '-30') + .attr('z', '120'); + + // Lichtkombination + const composite2 = filter.append('feComposite') + .attr('in', 'specLight') + .attr('in2', 'coloredGlass') + .attr('operator', 'in') + .attr('result', 'lightedGlass'); + + // Alle Effekte kombinieren + const merge = filter.append('feMerge') + .attr('result', 'glassMerge'); + + merge.append('feMergeNode') + .attr('in', 'coloredGlass'); + + merge.append('feMergeNode') + .attr('in', 'lightedGlass'); + + merge.append('feMergeNode') + .attr('in', 'SourceGraphic'); + + return filter; + } + + // Erstellt einen erweiterten 3D-Glaseffekt mit Lichtreflexion + static create3DGlassEffect(defs, id) { + const filter = defs.append('filter') + .attr('id', id) + .attr('width', '300%') + .attr('height', '300%') + .attr('x', '-100%') + .attr('y', '-100%'); + + // Hintergrund-Färbung mit Transparenz + const bgColor = filter.append('feFlood') + .attr('flood-color', 'rgba(24, 28, 45, 0.7)') + .attr('result', 'bgColor'); + + // Alpha-Kanal modifizieren + const composite1 = filter.append('feComposite') + .attr('in', 'bgColor') + .attr('in2', 'SourceAlpha') + .attr('operator', 'in') + .attr('result', 'shape'); + + // Leichte Unschärfe hinzufügen + const blur = filter.append('feGaussianBlur') + .attr('in', 'shape') + .attr('stdDeviation', '2') + .attr('result', 'blurredShape'); + + // Lichtquelle für 3D-Effekt + const specLight = filter.append('feSpecularLighting') + .attr('in', 'blurredShape') + .attr('surfaceScale', '5') + .attr('specularConstant', '1') + .attr('specularExponent', '20') + .attr('lighting-color', '#ffffff') + .attr('result', 'specLight'); + + specLight.append('fePointLight') + .attr('x', '50') + .attr('y', '-50') + .attr('z', '200'); + + // Farbmatrix für Lichttönung + const colorMatrix = filter.append('feColorMatrix') + .attr('in', 'specLight') + .attr('type', 'matrix') + .attr('values', '1 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 1 0') + .attr('result', 'coloredLight'); + + // Alle Effekte kombinieren + const merge = filter.append('feMerge'); + + merge.append('feMergeNode') + .attr('in', 'blurredShape'); + + merge.append('feMergeNode') + .attr('in', 'coloredLight'); + + merge.append('feMergeNode') + .attr('in', 'SourceGraphic'); + + return filter; + } +} + +// Globales Objekt für Zugriff außerhalb des Moduls +window.MindMapVisualization = MindMapVisualization; \ No newline at end of file diff --git a/static/neural-network-background-full.js b/static/neural-network-background-full.js new file mode 100644 index 0000000..266732f --- /dev/null +++ b/static/neural-network-background-full.js @@ -0,0 +1,1109 @@ +/** + * Neural Network Background Animation + * Modern, darker, mystical theme using WebGL + * Subtle flowing network aesthetic + */ + +class NeuralNetworkBackground { + constructor(options = {}) { + this.canvas = document.createElement('canvas'); + this.canvas.style.position = 'fixed'; + this.canvas.style.top = '0'; + this.canvas.style.left = '0'; + this.canvas.style.width = '100%'; + this.canvas.style.height = '100%'; + this.canvas.style.zIndex = '-1'; + this.canvas.style.pointerEvents = 'none'; + this.canvas.style.opacity = '0.6'; // Noch weiter reduzierte Opazität für subtileren Hintergrund + + // Standardkonfiguration mit subtileren Werten + this.config = { + nodeCount: 50, // Weniger Knoten + nodeSize: 1.2, // Kleinere Knoten + connectionDistance: 150, // Reduzierte Verbindungsdistanz + connectionOpacity: 0.3, // Sanftere Verbindungslinien + clusterCount: 2, // Weniger Cluster + clusterRadius: 380, // Größerer Cluster-Radius für mehr Verteilung + animationSpeed: 0.25, // Langsamere Animation + flowDensity: 0.05, // Deutlich weniger Flussanimationen + flowSpeed: 0.04, // Langsamerer Fluss + flowLength: 0.04, // Kürzere Flussstreifen + pulseSpeed: 0.0004, // Sehr sanftes Pulsieren + activationFrequency: 0.4, // Seltenere Aktivierungen + minConnectionsPerNode: 1, + maxConnectionsPerNode: 3, // Weniger maximale Verbindungen + lineUnderFlow: true, // Strichlinie unter Fluss anzeigen + lineUnderFlowOpacity: 0.2, // Opazität der Strichlinie + lineUnderFlowWidth: 1.0, // Breite der Strichlinie + sequentialFlows: true, // Flüsse nacheinander anzeigen + maxSimultaneousFlows: 3 // Maximale Anzahl gleichzeitiger Flüsse + }; + + // Benutzerkonfiguration übernehmen (falls vorhanden) + this.config = { ...this.config, ...options.config }; + + // Dunkler Modus mit subtileren Farben + this.darkModeColors = { + background: '#050a12', // Dunklerer Hintergrund + nodeColor: '#5a79b7', // Sanfteres Blau + connectionColor: '#2a3c68', // Gedämpfteres Blau + flowColor: '#4d6ea8', // Sanfteres Flussblau + lineUnderFlowColor: '#223a5c', // Dunkleres Blau für Strichlinie + nodePulse: { r: 120, g: 150, b: 200 } // Sanftere Pulse-Farbe + }; + + // Heller Modus mit subtileren Farben + this.lightModeColors = { + background: '#f9fbff', // Hellerer Hintergrund + nodeColor: '#b9d0f8', // Helleres, sanfteres Blau + connectionColor: '#e0f0ff', // Sehr helles Blau für subtilere Verbindungen + flowColor: '#92b2e6', // Sanftes mittleres Blau + lineUnderFlowColor: '#c7d8f2', // Helles Blau für Strichlinie + nodePulse: { r: 110, g: 140, b: 200 } // Sanftere Pulse-Farbe + }; + + // Erweiterte Konfiguration + this.nodes = []; + this.connections = []; + this.flows = []; + this.isDarkMode = options.isDarkMode !== undefined ? options.isDarkMode : true; + this.useWebGL = options.useWebGL !== undefined ? options.useWebGL : true; + this.frameRate = options.frameRate || 30; // Reduzierte Framerate für bessere Performance + this.lastFrameTime = 0; + this.frameInterval = 1000 / this.frameRate; + this.lastFlowTime = 0; // Zeit des letzten Fluss-Starts + this.flowInterval = 2000; // Mindestzeit zwischen neuen Flüssen (ms) + + // Ereignisbehandlung für Fenstergröße und Theme-Wechsel + this.resizeTimeout = null; + window.addEventListener('resize', this.onResize.bind(this)); + document.addEventListener('theme-changed', (e) => { + this.setDarkMode(e.detail.isDarkMode); + }); + + // Initialisierung + this.webglSetupComplete = false; + if (this.useWebGL && this.setupWebGL()) { + this.webglSetupComplete = true; + } else { + this.ctx = this.canvas.getContext('2d'); + } + + // If canvas already exists, remove it first + const existingCanvas = document.getElementById('neural-network-background'); + if (existingCanvas) { + existingCanvas.remove(); + } + + // Append to body as first child to ensure it's behind everything + if (document.body.firstChild) { + document.body.insertBefore(this.canvas, document.body.firstChild); + } else { + document.body.appendChild(this.canvas); + } + + // Animation properties + this.animationFrameId = null; + + // Initialize + this.init(); + + // Event listeners + document.addEventListener('darkModeToggled', (event) => { + this.isDarkMode = event.detail.isDark; + }); + + // Log that the background is initialized + console.log('Neural Network Background initialized'); + } + + init() { + this.resizeCanvas(); + + if (this.useWebGL) { + this.initWebGL(); + } + + this.createNodes(); + this.createConnections(); + this.startAnimation(); + } + + resizeCanvas() { + const pixelRatio = window.devicePixelRatio || 1; + const width = window.innerWidth; + const height = window.innerHeight; + + this.canvas.style.width = width + 'px'; + this.canvas.style.height = height + 'px'; + this.canvas.width = width * pixelRatio; + this.canvas.height = height * pixelRatio; + + if (this.useWebGL) { + this.gl.viewport(0, 0, this.canvas.width, this.canvas.height); + } else if (this.ctx) { + this.ctx.scale(pixelRatio, pixelRatio); + } + + // Recalculate node positions after resize + if (this.nodes.length) { + this.createNodes(); + this.createConnections(); + } + } + + initWebGL() { + // Vertex shader + const vsSource = ` + attribute vec2 aVertexPosition; + attribute float aPointSize; + uniform vec2 uResolution; + + void main() { + // Convert from pixel to clip space + vec2 position = (aVertexPosition / uResolution) * 2.0 - 1.0; + // Flip Y coordinate + position.y = -position.y; + + gl_Position = vec4(position, 0, 1); + gl_PointSize = aPointSize; + } + `; + + // Fragment shader - Softer glow effect + const fsSource = ` + precision mediump float; + uniform vec4 uColor; + + void main() { + float distance = length(gl_PointCoord - vec2(0.5, 0.5)); + + // Softer glow with smoother falloff + float alpha = 1.0 - smoothstep(0.1, 0.5, distance); + alpha = pow(alpha, 1.5); // Make the glow even softer + + gl_FragColor = vec4(uColor.rgb, uColor.a * alpha); + } + `; + + // Initialize shaders + const vertexShader = this.loadShader(this.gl.VERTEX_SHADER, vsSource); + const fragmentShader = this.loadShader(this.gl.FRAGMENT_SHADER, fsSource); + + // Create shader program + this.shaderProgram = this.gl.createProgram(); + this.gl.attachShader(this.shaderProgram, vertexShader); + this.gl.attachShader(this.shaderProgram, fragmentShader); + this.gl.linkProgram(this.shaderProgram); + + if (!this.gl.getProgramParameter(this.shaderProgram, this.gl.LINK_STATUS)) { + console.error('Unable to initialize the shader program: ' + + this.gl.getProgramInfoLog(this.shaderProgram)); + return; + } + + // Get attribute and uniform locations + this.programInfo = { + program: this.shaderProgram, + attribLocations: { + vertexPosition: this.gl.getAttribLocation(this.shaderProgram, 'aVertexPosition'), + pointSize: this.gl.getAttribLocation(this.shaderProgram, 'aPointSize') + }, + uniformLocations: { + resolution: this.gl.getUniformLocation(this.shaderProgram, 'uResolution'), + color: this.gl.getUniformLocation(this.shaderProgram, 'uColor') + } + }; + + // Create buffers + this.positionBuffer = this.gl.createBuffer(); + this.sizeBuffer = this.gl.createBuffer(); + + // Set clear color for WebGL context + const bgColor = this.hexToRgb(this.darkModeColors.background); + + this.gl.clearColor(bgColor.r/255, bgColor.g/255, bgColor.b/255, 1.0); + } + + loadShader(type, source) { + const shader = this.gl.createShader(type); + this.gl.shaderSource(shader, source); + this.gl.compileShader(shader); + + if (!this.gl.getShaderParameter(shader, this.gl.COMPILE_STATUS)) { + console.error('An error occurred compiling the shaders: ' + + this.gl.getShaderInfoLog(shader)); + this.gl.deleteShader(shader); + return null; + } + + return shader; + } + + createNodes() { + this.nodes = []; + const width = this.canvas.width / (window.devicePixelRatio || 1); + const height = this.canvas.height / (window.devicePixelRatio || 1); + + // Erstelle Cluster-Zentren für neuronale Netzwerkmuster + const clusterCount = Math.floor(5 + Math.random() * 4); // 5-8 Cluster + const clusters = []; + + for (let i = 0; i < clusterCount; i++) { + clusters.push({ + x: Math.random() * width, + y: Math.random() * height, + radius: 100 + Math.random() * 150 + }); + } + + // Create nodes with random positions and properties + for (let i = 0; i < this.config.nodeCount; i++) { + // Entscheide, ob dieser Knoten zu einem Cluster gehört oder nicht + const useCluster = Math.random() < this.config.clusteringFactor; + let x, y; + + if (useCluster && clusters.length > 0) { + // Wähle ein zufälliges Cluster + const cluster = clusters[Math.floor(Math.random() * clusters.length)]; + const angle = Math.random() * Math.PI * 2; + const distance = Math.random() * cluster.radius; + + // Platziere in der Nähe des Clusters mit einiger Streuung + x = cluster.x + Math.cos(angle) * distance; + y = cluster.y + Math.sin(angle) * distance; + + // Stelle sicher, dass es innerhalb des Bildschirms bleibt + x = Math.max(0, Math.min(width, x)); + y = Math.max(0, Math.min(height, y)); + } else { + // Zufällige Position außerhalb von Clustern + x = Math.random() * width; + y = Math.random() * height; + } + + // Bestimme die Knotengröße - wichtigere Knoten (in Clustern) sind größer + const nodeImportance = useCluster ? 1.2 : 0.8; + const size = this.config.nodeSize * nodeImportance + Math.random() * this.config.nodeVariation; + + const node = { + x: x, + y: y, + size: size, + speed: { + x: (Math.random() - 0.5) * this.config.animationSpeed, + y: (Math.random() - 0.5) * this.config.animationSpeed + }, + pulsePhase: Math.random() * Math.PI * 2, // Random starting phase + connections: [], + isActive: Math.random() < 0.3, // Some nodes start active for neural firing effect + lastFired: 0, // For neural firing animation + firingRate: 1000 + Math.random() * 4000 // Random firing rate in ms + }; + + this.nodes.push(node); + } + } + + createConnections() { + this.connections = []; + this.flows = []; // Reset flows + + // Create connections between nearby nodes + for (let i = 0; i < this.nodes.length; i++) { + const nodeA = this.nodes[i]; + nodeA.connections = []; + + // Sortiere andere Knoten nach Entfernung für bevorzugte nahe Verbindungen + const potentialConnections = []; + + for (let j = 0; j < this.nodes.length; j++) { + if (i === j) continue; + + const nodeB = this.nodes[j]; + const dx = nodeB.x - nodeA.x; + const dy = nodeB.y - nodeA.y; + const distance = Math.sqrt(dx * dx + dy * dy); + + if (distance < this.config.connectionDistance) { + potentialConnections.push({ + index: j, + distance: distance + }); + } + } + + // Sortiere nach Entfernung + potentialConnections.sort((a, b) => a.distance - b.distance); + + // Wähle die nächsten N Verbindungen, maximal maxConnections + const maxConn = Math.min( + this.config.maxConnections, + potentialConnections.length, + 1 + Math.floor(Math.random() * this.config.maxConnections) + ); + + for (let c = 0; c < maxConn; c++) { + const connection = potentialConnections[c]; + const j = connection.index; + const nodeB = this.nodes[j]; + const distance = connection.distance; + + // Create weighted connection (closer = stronger) + const connectionStrength = Math.max(0, 1 - distance / this.config.connectionDistance); + const connOpacity = connectionStrength * this.config.connectionOpacity; + + // Check if connection already exists + if (!this.connections.some(conn => + (conn.from === i && conn.to === j) || (conn.from === j && conn.to === i) + )) { + // Neue Verbindung startet mit progress=0 für animierten Aufbau + this.connections.push({ + from: i, + to: j, + distance: distance, + opacity: connOpacity, + strength: connectionStrength, + hasFlow: false, + lastActivated: 0, + progress: 0 // Animationsfortschritt für Verbindungsaufbau + }); + nodeA.connections.push(j); + nodeB.connections.push(i); + } + } + } + } + + startAnimation() { + this.animationFrameId = requestAnimationFrame(this.animate.bind(this)); + } + + animate() { + // Update nodes + const width = this.canvas.width / (window.devicePixelRatio || 1); + const height = this.canvas.height / (window.devicePixelRatio || 1); + const now = Date.now(); + + // Simulate neural firing + for (let i = 0; i < this.nodes.length; i++) { + const node = this.nodes[i]; + + // Check if node should fire based on its firing rate + if (now - node.lastFired > node.firingRate) { + node.isActive = true; + node.lastFired = now; + + // Activate connected nodes with probability based on connection strength + for (const connIndex of node.connections) { + // Find the connection + const conn = this.connections.find(c => + (c.from === i && c.to === connIndex) || (c.from === connIndex && c.to === i) + ); + + if (conn) { + // Mark connection as recently activated + conn.lastActivated = now; + + // Create a flow along this connection + if (Math.random() < conn.strength * 0.8) { + this.flows.push({ + connection: conn, + progress: 0, + direction: conn.from === i, // Flow from activated node + length: this.config.flowLength + Math.random() * 0.1, + intensity: 0.7 + Math.random() * 0.3 // Random intensity for variation + }); + } + + // Probability for connected node to activate + if (Math.random() < conn.strength * 0.5) { + this.nodes[connIndex].isActive = true; + this.nodes[connIndex].lastFired = now - Math.random() * 500; // Slight variation + } + } + } + } else if (now - node.lastFired > 300) { // Deactivate after short period + node.isActive = false; + } + } + + // Animierter Verbindungsaufbau: progress inkrementieren + for (const connection of this.connections) { + if (connection.progress < 1) { + // Langsamer Aufbau: Geschwindigkeit kann angepasst werden + connection.progress += 0.012; // Sehr langsam, für subtilen Effekt + if (connection.progress > 1) connection.progress = 1; + } + } + + // Update flows + this.updateFlows(); + + // Occasionally create new flows along connections + if (Math.random() < this.config.flowDensity) { + this.createNewFlow(); + } + + // Recalculate connections occasionally for a living network + if (Math.random() < 0.01) { // Only recalculate 1% of the time for performance + this.createConnections(); + } + + // Render + if (this.useWebGL) { + this.renderWebGL(); + } else { + this.renderCanvas(); + } + + // Continue animation + this.animationFrameId = requestAnimationFrame(this.animate.bind(this)); + } + + // New method to update flow animations + updateFlows() { + // Update existing flows + const now = Date.now(); + + for (let i = this.flows.length - 1; i >= 0; i--) { + const flow = this.flows[i]; + + // Update flow progress + flow.progress += this.config.flowSpeed / flow.connection.distance; + + // Update line progress (langsamer als der Blitz) + flow.lineProgress = Math.min(flow.progress * 1.5, 1.0); + + // Remove completed flows + if (flow.progress > 1.0) { + this.flows.splice(i, 1); + } + } + + // Generate new flows selten und kontrolliert + if (Math.random() < this.config.flowDensity && + (!this.config.sequentialFlows || + now - this.lastFlowTime >= this.flowInterval)) { + this.createNewFlow(); + } + } + + // New method to create flow animations + createNewFlow() { + if (this.connections.length === 0) return; + + // Überprüfen, ob maximale Anzahl gleichzeitiger Flüsse erreicht ist + if (this.config.sequentialFlows && + this.flows.length >= this.config.maxSimultaneousFlows) { + return; + } + + // Überprüfen, ob genügend Zeit seit dem letzten Fluss vergangen ist + const now = Date.now(); + if (this.config.sequentialFlows && + now - this.lastFlowTime < this.flowInterval) { + return; + } + + // Zeit des letzten Flusses aktualisieren + this.lastFlowTime = now; + + // Select a random connection with preference for more connected nodes + let connectionIdx = Math.floor(Math.random() * this.connections.length); + let attempts = 0; + + // Try to find a connection with more connected nodes + while (attempts < 5) { + const testIdx = Math.floor(Math.random() * this.connections.length); + const testConn = this.connections[testIdx]; + const fromNode = this.nodes[testConn.from]; + + if (fromNode.connections.length > 2) { + connectionIdx = testIdx; + break; + } + attempts++; + } + + const connection = this.connections[connectionIdx]; + + // Create a new flow along this connection + this.flows.push({ + connection: connection, + progress: 0, + direction: Math.random() > 0.5, // Randomly decide direction + length: this.config.flowLength + Math.random() * 0.1, // Slightly vary lengths + lineProgress: 0 // Fortschritt der unterliegenden Strichlinie (startet bei 0) + }); + } + + renderWebGL() { + this.gl.clear(this.gl.COLOR_BUFFER_BIT); + + const width = this.canvas.width / (window.devicePixelRatio || 1); + const height = this.canvas.height / (window.devicePixelRatio || 1); + + // Select shader program + this.gl.useProgram(this.programInfo.program); + + // Set resolution uniform + this.gl.uniform2f(this.programInfo.uniformLocations.resolution, width, height); + + // Draw connections first (behind nodes) + this.renderConnectionsWebGL(); + + // Draw flows on top of connections + this.renderFlowsWebGL(); + + // Draw nodes + this.renderNodesWebGL(); + } + + renderNodesWebGL() { + // Prepare node positions for WebGL + const positions = new Float32Array(this.nodes.length * 2); + const sizes = new Float32Array(this.nodes.length); + + const now = Date.now(); + + for (let i = 0; i < this.nodes.length; i++) { + const node = this.nodes[i]; + positions[i * 2] = node.x; + positions[i * 2 + 1] = node.y; + + // Maximales Pulsieren und sehr große Knoten + let pulse = Math.sin(node.pulsePhase) * 0.38 + 1.35; + const connectivityFactor = 1 + (node.connections.length / this.config.maxConnections) * 1.1; + sizes[i] = node.size * pulse * connectivityFactor; + } + + // Bind position buffer + this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.positionBuffer); + this.gl.bufferData(this.gl.ARRAY_BUFFER, positions, this.gl.STATIC_DRAW); + this.gl.vertexAttribPointer( + this.programInfo.attribLocations.vertexPosition, + 2, // components per vertex + this.gl.FLOAT, // data type + false, // normalize + 0, // stride + 0 // offset + ); + this.gl.enableVertexAttribArray(this.programInfo.attribLocations.vertexPosition); + + // Bind size buffer + this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.sizeBuffer); + this.gl.bufferData(this.gl.ARRAY_BUFFER, sizes, this.gl.STATIC_DRAW); + this.gl.vertexAttribPointer( + this.programInfo.attribLocations.pointSize, + 1, // components per vertex + this.gl.FLOAT, // data type + false, // normalize + 0, // stride + 0 // offset + ); + this.gl.enableVertexAttribArray(this.programInfo.attribLocations.pointSize); + + // Enable blending for all nodes + this.gl.enable(this.gl.BLEND); + this.gl.blendFunc(this.gl.SRC_ALPHA, this.gl.ONE); // Additive blending for glow + + // Draw each node individually with its own color + for (let i = 0; i < this.nodes.length; i++) { + const node = this.nodes[i]; + + // Set node color - more visible with active highlighting + const colorObj = this.isDarkMode ? this.darkModeColors : this.lightModeColors; + const nodeColor = this.hexToRgb(colorObj.nodeColor); + const nodePulseColor = this.hexToRgb(colorObj.nodePulse); + + // Use pulse color for active nodes + let r = nodeColor.r / 255; + let g = nodeColor.g / 255; + let b = nodeColor.b / 255; + + // Active nodes get brighter color + if (node.isActive) { + r = (r + nodePulseColor.r / 255) / 2; + g = (g + nodePulseColor.g / 255) / 2; + b = (b + nodePulseColor.b / 255) / 2; + } + + // Sehr sichtbare, maximal leuchtende Knoten + this.gl.uniform4f( + this.programInfo.uniformLocations.color, + r, g, b, + node.isActive ? 1.0 : 0.92 // Maximal sichtbar + ); + + // Draw each node individually for better control + this.gl.drawArrays(this.gl.POINTS, i, 1); + } + } + + renderConnectionsWebGL() { + // Permanente Verbindungsstriche werden nicht mehr gezeichnet + // Diese Methode bleibt leer, damit keine Linien mehr erscheinen + } + + // New method to render the flowing animations + renderFlowsWebGL() { + // Für jeden Flow zuerst die unterliegende Strichlinie zeichnen, dann den Blitz + for (const flow of this.flows) { + const connection = flow.connection; + const fromNode = this.nodes[connection.from]; + const toNode = this.nodes[connection.to]; + const direction = flow.direction ? 1 : -1; + + // 1. Die unterliegende Strichlinie zeichnen + if (this.config.lineUnderFlow) { + const lineStart = direction > 0 ? fromNode : toNode; + const lineEnd = direction > 0 ? toNode : fromNode; + + // Strichlinie als gerade Linie mit voller Länge, aber mit Fortschritt + const lineEndPoint = { + x: lineStart.x + (lineEnd.x - lineStart.x) * flow.lineProgress, + y: lineStart.y + (lineEnd.y - lineStart.y) * flow.lineProgress + }; + + const positions = new Float32Array([ + lineStart.x, lineStart.y, + lineEndPoint.x, lineEndPoint.y + ]); + + this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.positionBuffer); + this.gl.bufferData(this.gl.ARRAY_BUFFER, positions, this.gl.STATIC_DRAW); + this.gl.vertexAttribPointer( + this.programInfo.attribLocations.vertexPosition, + 2, + this.gl.FLOAT, + false, + 0, + 0 + ); + this.gl.enableVertexAttribArray(this.programInfo.attribLocations.vertexPosition); + this.gl.disableVertexAttribArray(this.programInfo.attribLocations.pointSize); + + // Sanftere, transparentere Strichlinie + const colorObj = this.isDarkMode ? this.darkModeColors : this.lightModeColors; + const lineColor = this.hexToRgb(colorObj.lineUnderFlowColor || colorObj.connectionColor); + + this.gl.uniform4f( + this.programInfo.uniformLocations.color, + lineColor.r / 255, + lineColor.g / 255, + lineColor.b / 255, + this.config.lineUnderFlowOpacity // Sehr subtile Linie + ); + + this.gl.enable(this.gl.BLEND); + this.gl.blendFunc(this.gl.SRC_ALPHA, this.gl.ONE_MINUS_SRC_ALPHA); // Normales Alpha-Blending + this.gl.lineWidth(this.config.lineUnderFlowWidth); // Dünne Linie + this.gl.drawArrays(this.gl.LINES, 0, 2); + } + + // 2. Den Blitz selbst zeichnen + const startProgress = flow.progress; + const endProgress = Math.min(1, startProgress + flow.length); + if (startProgress >= 1 || endProgress <= 0) continue; + + let p1, p2; + if (direction > 0) { + p1 = { + x: fromNode.x + (toNode.x - fromNode.x) * startProgress, + y: fromNode.y + (toNode.y - fromNode.y) * startProgress + }; + p2 = { + x: fromNode.x + (toNode.x - fromNode.x) * endProgress, + y: fromNode.y + (toNode.y - fromNode.y) * endProgress + }; + } else { + p1 = { + x: toNode.x + (fromNode.x - toNode.x) * startProgress, + y: toNode.y + (fromNode.y - toNode.y) * startProgress + }; + p2 = { + x: toNode.x + (fromNode.x - toNode.x) * endProgress, + y: toNode.y + (fromNode.y - toNode.y) * endProgress + }; + } + + // Zickzack-Blitz generieren (weniger ausgeprägt) + const zigzag = this.generateZigZagPoints(p1, p2, 3, 5); // Weniger Segmente, kleinere Amplitude + for (let i = 0; i < zigzag.length - 1; i++) { + const positions = new Float32Array([ + zigzag[i].x, zigzag[i].y, + zigzag[i + 1].x, zigzag[i + 1].y + ]); + this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.positionBuffer); + this.gl.bufferData(this.gl.ARRAY_BUFFER, positions, this.gl.STATIC_DRAW); + this.gl.vertexAttribPointer( + this.programInfo.attribLocations.vertexPosition, + 2, + this.gl.FLOAT, + false, + 0, + 0 + ); + this.gl.enableVertexAttribArray(this.programInfo.attribLocations.vertexPosition); + this.gl.disableVertexAttribArray(this.programInfo.attribLocations.pointSize); + + // Dezenter, subtilerer Blitz + const colorObj = this.isDarkMode ? this.darkModeColors : this.lightModeColors; + const flowColor = this.hexToRgb(colorObj.flowColor); + this.gl.uniform4f( + this.programInfo.uniformLocations.color, + flowColor.r / 255, + flowColor.g / 255, + flowColor.b / 255, + 0.45 // Geringere Sichtbarkeit + ); + this.gl.enable(this.gl.BLEND); + this.gl.blendFunc(this.gl.SRC_ALPHA, this.gl.ONE); + this.gl.lineWidth(1.0); // Dünnerer Blitz für subtileren Effekt + this.gl.drawArrays(this.gl.LINES, 0, 2); + } + + // Funken erzeugen - weniger und subtilere elektrische Funken + const sparks = this.generateSparkPoints(zigzag, 2 + Math.floor(Math.random() * 2)); // Weniger Funken + for (const spark of sparks) { + // Sanfteres Weiß-Blau für subtilere elektrische Funken + this.gl.uniform4f( + this.programInfo.uniformLocations.color, + 0.85, 0.9, 0.95, 0.4 * spark.intensity // Geringere Intensität + ); + + // Position für den Funken setzen + const sparkPos = new Float32Array([spark.x, spark.y]); + this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.positionBuffer); + this.gl.bufferData(this.gl.ARRAY_BUFFER, sparkPos, this.gl.STATIC_DRAW); + + // Punktgröße setzen + const sizes = new Float32Array([spark.size * 2.0]); // Kleinere Funken + this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.sizeBuffer); + this.gl.bufferData(this.gl.ARRAY_BUFFER, sizes, this.gl.STATIC_DRAW); + + this.gl.vertexAttribPointer( + this.programInfo.attribLocations.vertexPosition, + 2, + this.gl.FLOAT, + false, + 0, + 0 + ); + this.gl.enableVertexAttribArray(this.programInfo.attribLocations.vertexPosition); + + this.gl.vertexAttribPointer( + this.programInfo.attribLocations.pointSize, + 1, + this.gl.FLOAT, + false, + 0, + 0 + ); + this.gl.enableVertexAttribArray(this.programInfo.attribLocations.pointSize); + + this.gl.drawArrays(this.gl.POINTS, 0, 1); + } + } + } + + renderCanvas() { + if (!this.ctx) return; + + const { background, nodeColor, connectionColor } = this.isDarkMode ? this.darkModeColors : this.lightModeColors; + + // Hintergrund sanft löschen mit leichter Transparenz für Nachleuchten + this.ctx.fillStyle = background; + this.ctx.globalAlpha = 0.22; // Sehr subtile Überblendung für sanfte Bewegung + this.ctx.fillRect(0, 0, this.canvas.width, this.canvas.height); + this.ctx.globalAlpha = 1.0; + + // Verbindungen zeichnen mit distanzabhängiger Transparenz + this.connections.forEach(connection => { + const { source, target, strength } = connection; + const dx = target.x - source.x; + const dy = target.y - source.y; + const distance = Math.sqrt(dx * dx + dy * dy); + + // Berechne Opazität basierend auf Distanz - je weiter entfernt, desto transparenter + const maxOpacity = 0.02; // Sehr subtile maximale Opazität + const opacityFactor = 1 - (distance / this.config.connectionDistance); + let opacity = maxOpacity * opacityFactor * strength * this.config.connectionOpacity; + + // Subtilere Linien + this.ctx.beginPath(); + this.ctx.moveTo(source.x, source.y); + this.ctx.lineTo(target.x, target.y); + this.ctx.lineWidth = 0.4; // Sehr dünne Linie + this.ctx.strokeStyle = connectionColor; + this.ctx.globalAlpha = opacity; + this.ctx.stroke(); + this.ctx.globalAlpha = 1.0; + }); + + // Knoten zeichnen mit subtilerem Glühen und Pulsieren + this.nodes.forEach(node => { + const nodeBaseSize = node.size * this.config.nodeSize; + const pulse = Math.sin(this.time * this.config.pulseSpeed * node.pulseRate) * 0.1 + 1; // Reduzierte Pulsamplitude + const glowSize = nodeBaseSize * 1.5; // Kleinerer Glowradius + + // Aktive Knoten speziell hervorheben, aber subtiler + if (node.active) { + // Subtiles Glühen für aktive Knoten + const gradient = this.ctx.createRadialGradient( + node.x, node.y, 0, + node.x, node.y, glowSize * 2 + ); + this.ctx.globalAlpha = 0.09; // Sehr niedrige Opazität für subtiles Glühen + gradient.addColorStop(0, 'rgba(255, 255, 255, 0.15)'); + gradient.addColorStop(1, 'rgba(255, 255, 255, 0)'); + + this.ctx.fillStyle = gradient; + this.ctx.beginPath(); + this.ctx.arc(node.x, node.y, glowSize * 2, 0, Math.PI * 2); + this.ctx.fill(); + + // Zeichne aktiven Knoten + this.ctx.globalAlpha = 0.9; + this.ctx.fillStyle = nodeColor; + this.ctx.beginPath(); + this.ctx.arc(node.x, node.y, nodeBaseSize * pulse, 0, Math.PI * 2); + this.ctx.fill(); + } else { + // Inaktive Knoten sanfter darstellen + this.ctx.globalAlpha = 0.8; + this.ctx.fillStyle = nodeColor; + this.ctx.beginPath(); + this.ctx.arc(node.x, node.y, nodeBaseSize * pulse * 0.8, 0, Math.PI * 2); + this.ctx.fill(); + } + this.ctx.globalAlpha = 1.0; + }); + } + + renderFlowsCanvas() { + if (!this.ctx) return; + + const { flowColor } = this.isDarkMode ? this.darkModeColors : this.lightModeColors; + + this.flows.forEach(flow => { + if (flow.progress > 1) return; + + const { source, target, progress } = flow; + + // Berechne aktuelle Position + const dx = target.x - source.x; + const dy = target.y - source.y; + const currentX = source.x + dx * progress; + const currentY = source.y + dy * progress; + + // Zick-Zack-Effekt reduzieren für sanftere Bewegung + const segmentCount = 4; // Weniger Segmente + const amplitude = 2; // Geringere Amplitude + const waveLength = 0.2; + + let prevX = source.x; + let prevY = source.y; + + this.ctx.beginPath(); + this.ctx.moveTo(prevX, prevY); + + // Sanftere "Elektrizitäts"-Linie + for (let i = 0; i <= segmentCount; i++) { + const segmentProgress = i / segmentCount; + if (segmentProgress > progress) break; + + const segX = source.x + dx * segmentProgress; + const segY = source.y + dy * segmentProgress; + + // Sanftere Wellen + const perpX = -dy * Math.sin(segmentProgress * Math.PI * 10) * amplitude; + const perpY = dx * Math.sin(segmentProgress * Math.PI * 10) * amplitude; + + const pointX = segX + perpX * Math.sin(this.time * 0.01); + const pointY = segY + perpY * Math.sin(this.time * 0.01); + + this.ctx.lineTo(pointX, pointY); + prevX = pointX; + prevY = pointY; + } + + // Linienausrichtung und -stil + this.ctx.lineCap = 'round'; + this.ctx.lineWidth = 1.2; // Dünnere Linie + + // Subtiler Gloweffekt + this.ctx.shadowColor = flowColor; + this.ctx.shadowBlur = 6; // Reduzierter Unschärferadius + + this.ctx.strokeStyle = flowColor; + this.ctx.globalAlpha = 0.7 - 0.5 * Math.abs(progress - 0.5); // Sanfteres Ein- und Ausblenden + this.ctx.stroke(); + this.ctx.globalAlpha = 1; + this.ctx.shadowBlur = 0; + + // Funkeneffekte am Ende der Linie, aber weniger und kleiner + if (progress > 0.9 && Math.random() < 0.3) { // Weniger Funken generieren + const sparkCount = Math.floor(Math.random() * 2) + 1; // Maximal 3 Funken + + for (let i = 0; i < sparkCount; i++) { + const sparkSize = Math.random() * 0.8 + 0.4; // Kleinere Funken + const sparkAngle = Math.random() * Math.PI * 2; + const sparkDistance = Math.random() * 5 + 2; + + const sparkX = currentX + Math.cos(sparkAngle) * sparkDistance; + const sparkY = currentY + Math.sin(sparkAngle) * sparkDistance; + + // Weniger intensive Funken + this.ctx.beginPath(); + this.ctx.arc(sparkX, sparkY, sparkSize, 0, Math.PI * 2); + this.ctx.fillStyle = flowColor; + this.ctx.globalAlpha = 0.4; // Reduzierte Opazität + this.ctx.fill(); + this.ctx.globalAlpha = 1; + } + } + }); + } + + // Helper method to convert hex to RGB + hexToRgb(hex) { + // Remove # if present + hex = hex.replace(/^#/, ''); + + // Handle rgba hex format + let alpha = 1; + if (hex.length === 8) { + alpha = parseInt(hex.slice(6, 8), 16) / 255; + hex = hex.slice(0, 6); + } + + // Parse hex values + const bigint = parseInt(hex, 16); + const r = (bigint >> 16) & 255; + const g = (bigint >> 8) & 255; + const b = bigint & 255; + + return { r, g, b, a: alpha }; + } + + // Cleanup method + destroy() { + if (this.animationFrameId) { + cancelAnimationFrame(this.animationFrameId); + } + + window.removeEventListener('resize', this.resizeCanvas.bind(this)); + + if (this.canvas && this.canvas.parentNode) { + this.canvas.parentNode.removeChild(this.canvas); + } + + if (this.gl) { + // Clean up WebGL resources + this.gl.deleteBuffer(this.positionBuffer); + this.gl.deleteBuffer(this.sizeBuffer); + this.gl.deleteProgram(this.shaderProgram); + } + } + + // Hilfsfunktion: Erzeuge Zickzack-Punkte für einen Blitz + generateZigZagPoints(start, end, segments = 5, amplitude = 8) { + const points = [start]; + for (let i = 1; i < segments; i++) { + const t = i / segments; + const x = start.x + (end.x - start.x) * t; + const y = start.y + (end.y - start.y) * t; + // Versatz für Zickzack + const angle = Math.atan2(end.y - start.y, end.x - start.x) + Math.PI / 2; + const offset = (Math.random() - 0.5) * amplitude * (i % 2 === 0 ? 1 : -1); + points.push({ + x: x + Math.cos(angle) * offset, + y: y + Math.sin(angle) * offset + }); + } + points.push(end); + return points; + } + + // Hilfsfunktion: Erzeuge Funkenpunkte entlang eines Zickzack-Blitzes + generateSparkPoints(zigzag, sparkCount = 4) { + const sparks = []; + // Moderatere Anzahl an Funken für subtileren Effekt + const actualSparkCount = sparkCount + Math.floor(Math.random() * 2); + + for (let i = 0; i < actualSparkCount; i++) { + // Entlang des gesamten Blitzes verteilen + const seg = i * (zigzag.length - 1) / actualSparkCount; + const segIndex = Math.floor(seg); + const t = seg - segIndex; + + // Basis-Position + const x = zigzag[segIndex].x + (zigzag[segIndex + 1].x - zigzag[segIndex].x) * t; + const y = zigzag[segIndex].y + (zigzag[segIndex + 1].y - zigzag[segIndex].y) * t; + + // Sanfterer Versatz vom Blitz + const angle = Math.random() * Math.PI * 2; + const distance = 2 + Math.random() * 4; // Reduzierter Abstand für subtileres Sprühen + + sparks.push({ + x: x + Math.cos(angle) * distance, + y: y + Math.sin(angle) * distance, + size: 0.5 + Math.random() * 1 // Kleinere Funken + }); + } + return sparks; + } + + onResize() { + // Verhindere mehrfache Resize-Events in kurzer Zeit + if (this.resizeTimeout) clearTimeout(this.resizeTimeout); + + this.resizeTimeout = setTimeout(() => { + this.resizeCanvas(); + + // Knoten und Verbindungen neu erstellen, um sie an die neue Größe anzupassen + this.createNodes(); + this.createConnections(); + }, 200); // Warte 200ms, bevor die Größenanpassung durchgeführt wird + } + + setDarkMode(isDarkMode) { + this.isDarkMode = isDarkMode; + + // WebGL-Hintergrundfarbe aktualisieren + if (this.useWebGL && this.gl) { + const bgColor = this.hexToRgb( + this.isDarkMode ? this.darkModeColors.background : this.lightModeColors.background + ); + this.gl.clearColor(bgColor.r/255, bgColor.g/255, bgColor.b/255, 1.0); + } + } +} + +// Initialize when DOM is loaded +document.addEventListener('DOMContentLoaded', () => { + // Short delay to ensure DOM is fully loaded + setTimeout(() => { + if (!window.neuralNetworkBackground) { + console.log('Creating Neural Network Background'); + window.neuralNetworkBackground = new NeuralNetworkBackground(); + } + }, 100); +}); + +// Re-initialize when page is fully loaded (for safety) +window.addEventListener('load', () => { + if (!window.neuralNetworkBackground) { + console.log('Re-initializing Neural Network Background on full load'); + window.neuralNetworkBackground = new NeuralNetworkBackground(); + } +}); + +// Clean up when window is closed +window.addEventListener('beforeunload', () => { + if (window.neuralNetworkBackground) { + window.neuralNetworkBackground.destroy(); + } +}); \ No newline at end of file diff --git a/static/neural-network-background.js b/static/neural-network-background.js new file mode 100644 index 0000000..2cc56c2 --- /dev/null +++ b/static/neural-network-background.js @@ -0,0 +1,2096 @@ +/** + * Neural Network Background Animation + * Modern, darker, mystical theme using WebGL + * Subtle flowing network aesthetic + */ + +class NeuralNetworkBackground { + constructor() { + // Canvas setup + this.canvas = document.createElement('canvas'); + this.canvas.id = 'neural-network-background'; + this.canvas.style.position = 'fixed'; + this.canvas.style.top = '0'; + this.canvas.style.left = '0'; + this.canvas.style.width = '100%'; + this.canvas.style.height = '100%'; + this.canvas.style.zIndex = '-10'; // Ensure it's behind content but visible + this.canvas.style.pointerEvents = 'none'; + this.canvas.style.opacity = '1'; // Force visibility + this.canvas.style.transition = 'opacity 3.5s ease-in-out'; // Längere Übergangsanimation (von 1.5s auf 3.5s) + + // If canvas already exists, remove it first + const existingCanvas = document.getElementById('neural-network-background'); + if (existingCanvas) { + existingCanvas.remove(); + } + + // Append to body as first child to ensure it's behind everything + if (document.body.firstChild) { + document.body.insertBefore(this.canvas, document.body.firstChild); + } else { + document.body.appendChild(this.canvas); + } + + // WebGL context + this.gl = this.canvas.getContext('webgl') || this.canvas.getContext('experimental-webgl'); + if (!this.gl) { + console.warn('WebGL not supported, falling back to canvas rendering'); + this.gl = null; + this.ctx = this.canvas.getContext('2d'); + this.useWebGL = false; + } else { + this.useWebGL = true; + } + + // Animation properties + this.nodes = []; + this.connections = []; + this.flows = []; // Flow animations along connections + this.animationFrameId = null; + this.isDarkMode = true; // Always use dark mode for the background + this.isDestroying = false; // Flag für den Ausblendeprozess + + // Colors - Lila Farbpalette mit intensiveren Effekten + this.darkModeColors = { + background: '#040215', // Dunklerer Hintergrund mit leichtem Violett-Anteil + nodeColor: '#6a5498', // Lila-basierte Knotenfarbe + nodePulse: '#9c7fe0', // Helleres Lila für Pulsieren + connectionColor: '#4a3870', // Dunklere Lila-Verbindungen + flowColor: '#b47fea' // Lebendiges Lila für Blitze + }; + + // Farben für Light Mode auch mit Lila-Akzenten + this.lightModeColors = { + background: '#f8f6fc', // Sehr heller Lila-Hintergrund + nodeColor: '#6a5498', // Gleiche Lila-Töne wie im Dark Mode + nodePulse: '#9c7fe0', // für konsistentes Erscheinungsbild + connectionColor: '#7e70a5', // Helleres Lila für Verbindungen + flowColor: '#a06cd5' // Sanfteres Lila für Blitze + }; + + // Konfigurationsobjekt für schlangenartige, leuchtende Blitze und permanentes Netzwerk + this.config = { + nodeCount: 100, // Weniger Knoten für klarere Strukturen + nodeSize: 3.2, // Etwas größere Knoten für bessere Sichtbarkeit + nodeVariation: 8, // Reduzierte Varianz für konsistenteres Erscheinungsbild + connectionDistance: 100, // Größere Verbindungsdistanz für weitläufigeres Netzwerk + connectionOpacity: 0.3, // Erhöhte Opazität für sichtbarere dauerhafte Verbindungen + animationSpeed: 0.1, // Noch langsamere Animation für sanftere Bewegung + pulseSpeed: 0, // Sehr langsames Pulsieren für ruhige Animation + flowSpeed: 0.15, // Deutlich langsamere Blitze für schlangenartige Bewegung + flowDensity: 2, // Weniger gleichzeitige Blitze für bessere Verteilung + flowLength: 0.05, // Längere einzelne Flows für sichtbare "Schlangen" + maxConnections: 12, // Mehr Verbindungen pro Neuron für dichtere Netzwerke + clusteringFactor: 0.005, // Stärkeres Clustering für deutlichere Strukturen + linesFadeDuration: 600, // Noch längere Dauer fürs Ein-/Ausblenden für permanentes Netzwerk (ms) + linesWidth: 1.0, // Etwas dickere Linien für bessere Sichtbarkeit + linesOpacity: 0.75, // Höhere Opazität für das permanente Netzwerk + maxFlowCount: 1, // Weniger gleichzeitige Flows für bessere Übersicht + glowIntensity: 1.0, // Verstärkter Glow-Effekt + sparkCount: 1, // Mehr Funken + sparkSize: 1, // Größere Funken + blurRadius: 10, // Blur-Radius für Glow-Effekte + minActiveFlows: 1, // Minimale Anzahl aktiver Flows + flowForwardingRate: 1.0, // 100% Weiterleitungsrate für endlose Flows + maxFlowGenerations: 10 // Sehr hohe Generationsgrenze für endlose Animation + }; + + // Initialize + this.init(); + + // Event listeners + window.addEventListener('resize', this.resizeCanvas.bind(this)); + document.addEventListener('darkModeToggled', (event) => { + this.isDarkMode = event.detail.isDark; + }); + + // Log that the background is initialized + console.log('Neural Network Background initialized'); + } + + init() { + this.resizeCanvas(); + + if (this.useWebGL) { + this.initWebGL(); + } + + this.createNodes(); + this.createConnections(); + this.startAnimation(); + } + + resizeCanvas() { + const pixelRatio = window.devicePixelRatio || 1; + const width = window.innerWidth; + const height = window.innerHeight; + + this.canvas.style.width = width + 'px'; + this.canvas.style.height = height + 'px'; + this.canvas.width = width * pixelRatio; + this.canvas.height = height * pixelRatio; + + if (this.useWebGL) { + this.gl.viewport(0, 0, this.canvas.width, this.canvas.height); + } else if (this.ctx) { + this.ctx.scale(pixelRatio, pixelRatio); + } + + // Recalculate node positions after resize + if (this.nodes.length) { + this.createNodes(); + this.createConnections(); + } + } + + initWebGL() { + // Vertex shader + const vsSource = ` + attribute vec2 aVertexPosition; + attribute float aPointSize; + uniform vec2 uResolution; + + void main() { + // Convert from pixel to clip space + vec2 position = (aVertexPosition / uResolution) * 2.0 - 1.0; + // Flip Y coordinate + position.y = -position.y; + + gl_Position = vec4(position, 0, 1); + gl_PointSize = aPointSize; + } + `; + + // Fragment shader - Softer glow effect + const fsSource = ` + precision mediump float; + uniform vec4 uColor; + + void main() { + float distance = length(gl_PointCoord - vec2(0.5, 0.5)); + + // Softer glow with smoother falloff + float alpha = 1.0 - smoothstep(0.1, 0.5, distance); + alpha = pow(alpha, 1.5); // Make the glow even softer + + gl_FragColor = vec4(uColor.rgb, uColor.a * alpha); + } + `; + + // Initialize shaders + const vertexShader = this.loadShader(this.gl.VERTEX_SHADER, vsSource); + const fragmentShader = this.loadShader(this.gl.FRAGMENT_SHADER, fsSource); + + // Create shader program + this.shaderProgram = this.gl.createProgram(); + this.gl.attachShader(this.shaderProgram, vertexShader); + this.gl.attachShader(this.shaderProgram, fragmentShader); + this.gl.linkProgram(this.shaderProgram); + + if (!this.gl.getProgramParameter(this.shaderProgram, this.gl.LINK_STATUS)) { + console.error('Unable to initialize the shader program: ' + + this.gl.getProgramInfoLog(this.shaderProgram)); + return; + } + + // Get attribute and uniform locations + this.programInfo = { + program: this.shaderProgram, + attribLocations: { + vertexPosition: this.gl.getAttribLocation(this.shaderProgram, 'aVertexPosition'), + pointSize: this.gl.getAttribLocation(this.shaderProgram, 'aPointSize') + }, + uniformLocations: { + resolution: this.gl.getUniformLocation(this.shaderProgram, 'uResolution'), + color: this.gl.getUniformLocation(this.shaderProgram, 'uColor') + } + }; + + // Create buffers + this.positionBuffer = this.gl.createBuffer(); + this.sizeBuffer = this.gl.createBuffer(); + + // Set clear color for WebGL context + const bgColor = this.hexToRgb(this.darkModeColors.background); + + this.gl.clearColor(bgColor.r/255, bgColor.g/255, bgColor.b/255, 1.0); + } + + loadShader(type, source) { + const shader = this.gl.createShader(type); + this.gl.shaderSource(shader, source); + this.gl.compileShader(shader); + + if (!this.gl.getShaderParameter(shader, this.gl.COMPILE_STATUS)) { + console.error('An error occurred compiling the shaders: ' + + this.gl.getShaderInfoLog(shader)); + this.gl.deleteShader(shader); + return null; + } + + return shader; + } + + createNodes() { + this.nodes = []; + const width = this.canvas.width / (window.devicePixelRatio || 1); + const height = this.canvas.height / (window.devicePixelRatio || 1); + + // Erstelle Cluster-Zentren für neuronale Netzwerkmuster mit erhöhter Komplexität + const clusterCount = Math.floor(8 + Math.random() * 6); // 8-13 Cluster für mehr Dichte + const clusters = []; + + for (let i = 0; i < clusterCount; i++) { + // Dynamischere Clustergrößen mit größerer Variationsbreite + const baseRadius = 120 + Math.random() * 200; // Größere Basisradien + const radiusVariation = 0.3 + Math.random() * 0.4; // 30-70% Variation + + clusters.push({ + x: Math.random() * width, + y: Math.random() * height, + radius: baseRadius * (1 + radiusVariation), // Dynamischere Größen + density: 0.7 + Math.random() * 0.3, // Dichtefaktor für Knotenverteilung + influence: 0.5 + Math.random() * 0.5 // Einflussstärke auf umliegende Knoten + }); + } + + // Create nodes with random positions and properties + for (let i = 0; i < this.config.nodeCount; i++) { + // Entscheide, ob dieser Knoten zu einem Cluster gehört oder nicht + const useCluster = Math.random() < this.config.clusteringFactor; + let x, y; + + if (useCluster && clusters.length > 0) { + // Wähle ein zufälliges Cluster + const cluster = clusters[Math.floor(Math.random() * clusters.length)]; + const angle = Math.random() * Math.PI * 2; + const distance = Math.random() * cluster.radius; + + // Platziere in der Nähe des Clusters mit einiger Streuung + x = cluster.x + Math.cos(angle) * distance; + y = cluster.y + Math.sin(angle) * distance; + + // Stelle sicher, dass es innerhalb des Bildschirms bleibt + x = Math.max(0, Math.min(width, x)); + y = Math.max(0, Math.min(height, y)); + } else { + // Zufällige Position außerhalb von Clustern + x = Math.random() * width; + y = Math.random() * height; + } + + // Bestimme die Knotengröße - wichtigere Knoten (in Clustern) sind deutlich größer + const nodeImportance = useCluster ? 1.8 : 0.6; // Erhöhter Kontrast zwischen Cluster- und Nicht-Cluster-Knoten + const size = this.config.nodeSize * nodeImportance + Math.random() * this.config.nodeVariation * 1.5; // Größere Variationsbreite + + const node = { + x: x, + y: y, + size: size, + speed: { + x: (Math.random() - 0.5) * this.config.animationSpeed, + y: (Math.random() - 0.5) * this.config.animationSpeed + }, + pulsePhase: Math.random() * Math.PI * 2, // Random starting phase + connections: [], + isActive: Math.random() < 0.3, // Some nodes start active for neural firing effect + lastFired: 0, // For neural firing animation + firingRate: 1000 + Math.random() * 4000 // Random firing rate in ms + }; + + this.nodes.push(node); + } + } + + createConnections() { + this.connections = []; + this.flows = []; // Reset flows + + // Create connections between nearby nodes + for (let i = 0; i < this.nodes.length; i++) { + const nodeA = this.nodes[i]; + nodeA.connections = []; + + // Sortiere andere Knoten nach Entfernung für bevorzugte nahe Verbindungen + const potentialConnections = []; + + for (let j = 0; j < this.nodes.length; j++) { + if (i === j) continue; + + const nodeB = this.nodes[j]; + const dx = nodeB.x - nodeA.x; + const dy = nodeB.y - nodeA.y; + const distance = Math.sqrt(dx * dx + dy * dy); + + // Erhöhe die Wahrscheinlichkeit für Verbindungen + if (distance < this.config.connectionDistance * 1.5) { // Erweiterter Verbindungsradius + potentialConnections.push({ + index: j, + distance: distance, + // Zufälliger Bonus für mehr Verbindungen + connectionBonus: Math.random() * 0.5 + }); + } + } + + // Sortiere nach Entfernung und Verbindungsbonus mit erweiterten Kriterien + potentialConnections.sort((a, b) => { + // Basis-Score aus Entfernung und Verbindungsbonus + const baseScoreA = a.distance * (1 - a.connectionBonus); + const baseScoreB = b.distance * (1 - b.connectionBonus); + + // Zusätzliche Faktoren für dynamischere Verbindungen + const randomFactorA = Math.random() * 0.2; // Leichte Zufallskomponente + const randomFactorB = Math.random() * 0.2; + + // Kombinierter Score mit Zufallskomponente + const scoreA = baseScoreA * (1 - randomFactorA); + const scoreB = baseScoreB * (1 - randomFactorB); + + return scoreA - scoreB; + }); + + // Erhöhe die maximale Anzahl der Verbindungen signifikant + const maxConn = Math.min( + this.config.maxConnections * 2.5, // Deutlich mehr Verbindungen erlaubt + potentialConnections.length, + 3 + Math.floor(Math.random() * this.config.maxConnections * 2) // Noch mehr zufällige Verbindungen + ); + for (let c = 0; c < maxConn; c++) { + const connection = potentialConnections[c]; + const j = connection.index; + const nodeB = this.nodes[j]; + const distance = connection.distance; + + // Stärkere Verbindungen durch den Bonus + const connectionStrength = Math.max(0, 1 - distance / (this.config.connectionDistance * 1.5)) + connection.connectionBonus; + const connOpacity = connectionStrength * this.config.connectionOpacity; + + // Check if connection already exists + if (!this.connections.some(conn => + (conn.from === i && conn.to === j) || (conn.from === j && conn.to === i) + )) { + // Neue Verbindung mit Ein-/Ausblend-Status + this.connections.push({ + from: i, + to: j, + distance: distance, + opacity: connOpacity, + strength: connectionStrength, + hasFlow: false, + lastActivated: 0, + progress: 0, // Verbindung beginnt unsichtbar und baut sich auf + fadeState: 'in', // Status: 'in' = einblenden, 'visible' = sichtbar, 'out' = ausblenden + fadeStartTime: Date.now(), // Wann der Fade-Vorgang gestartet wurde + fadeTotalDuration: this.config.linesFadeDuration, // Feste Dauer ohne Zufall + visibleDuration: 10000, // Feste Sichtbarkeitsdauer ohne Zufall + fadeProgress: 0, // Aktueller Fortschritt des Fade-Vorgangs (0-1) + buildSpeed: 0 // Geschwindigkeit, mit der die Verbindung aufgebaut wird + }); + nodeA.connections.push(j); + nodeB.connections.push(i); + } + } + } + } + + startAnimation() { + this.animationFrameId = requestAnimationFrame(this.animate.bind(this)); + } + + animate() { + // Update nodes + const width = this.canvas.width / (window.devicePixelRatio || 1); + const height = this.canvas.height / (window.devicePixelRatio || 1); + const now = Date.now(); + + // Berechne den Fadeout-Faktor bei Deaktivierung (1.0 bis 0.0) + const fadeoutFactor = this.isDestroying ? parseFloat(this.canvas.style.opacity) || 0.98 : 1.0; + + // Für jeden Flow eine eindeutige ID vergeben, falls noch nicht vorhanden + for (const flow of this.flows) { + if (!flow.id) { + flow.id = now + '_' + Math.random().toString(36).substr(2, 9); + } + } + + // Setze zunächst alle Neuronen auf inaktiv + for (let i = 0; i < this.nodes.length; i++) { + // Update pulse phase for smoother animation + const node = this.nodes[i]; + + // Bei Ausblendung langsamer pulsieren lassen + const pulseSpeed = this.isDestroying + ? this.config.pulseSpeed * 0.7 + : this.config.pulseSpeed * (1 + (node.connections.length * 0.04)); + + node.pulsePhase += pulseSpeed; + + // Bei Ausblendung Bewegung reduzieren + const movementFactor = this.isDestroying ? 0.3 : 1.0; + + // Animate node position with gentler movement + node.x += node.speed.x * (Math.sin(now * 0.0003) * 0.4 + 0.4) * movementFactor; + node.y += node.speed.y * (Math.cos(now * 0.0002) * 0.4 + 0.4) * movementFactor; + + // Keep nodes within bounds + if (node.x < 0 || node.x > width) { + node.speed.x *= -1; + node.x = Math.max(0, Math.min(width, node.x)); + } + if (node.y < 0 || node.y > height) { + node.speed.y *= -1; + node.y = Math.max(0, Math.min(height, node.y)); + } + + // Knoten bleiben länger aktiv nach dem Auslösen (2000ms für langanhaltende Aktivierung) + if (node.lastFired && now - node.lastFired < 2000) { + node.isActive = true; + } else { + node.isActive = false; + } + } + + // Aktiviere Neuronen basierend auf aktiven Flows + for (const flow of this.flows) { + // Aktiviere den Quellknoten (der Flow geht von ihm aus) + if (flow.sourceNodeIdx !== undefined) { + this.nodes[flow.sourceNodeIdx].isActive = true; + this.nodes[flow.sourceNodeIdx].lastFired = now; + } + + // Aktiviere den Zielknoten nur, wenn der Flow weit genug fortgeschritten ist + if (flow.targetNodeIdx !== undefined && flow.progress > 0.7) { + this.nodes[flow.targetNodeIdx].isActive = true; + this.nodes[flow.targetNodeIdx].lastFired = now; + } + } + + // Minimale Anzahl aktiver Flows sicherstellen für endlose Animation + const minRequiredFlows = Math.min(15, Math.floor(this.nodes.length * 0.1)); // Mindestens 10% der Knoten haben aktive Flows + const shouldCreateNewFlows = this.flows.length < minRequiredFlows; + + // Chance auf neue Flows + const flowChance = this.isDestroying ? 0.005 : (shouldCreateNewFlows ? 0.1 : 0.04); + + // Neue Flows mit höherer Wahrscheinlichkeit erzeugen, wenn zu wenige aktiv sind + if (Math.random() < flowChance) { + this.initiateMajorFlowCascade(now, fadeoutFactor); + } + + // Aktualisiere die Ein-/Ausblendung von Verbindungen + this.updateConnectionsFading(now); + + // Update flows with proper fading + this.updateFlows(now); + + // Regelmäßig neue Flows erstellen, um das Netzwerk lebendig zu halten + if (Math.random() < 0.07) { // Hohe Chance für kontinuierliche Animation + this.createNewFlow(now); + } + + // Rekonfiguriere Verbindungen sehr selten für ein sich langsam entwickelndes Netzwerk + // Dies verhindert, dass das Netzwerk statisch wird + if (Math.random() < 0.0005) { // Sehr seltene Neuverbindungen (0.05% Chance pro Frame) + this.createConnections(); + } + + // Render + if (this.useWebGL) { + this.renderWebGL(now); + } else { + this.renderCanvas(now); + } + + // Continue animation - garantiere endlose Animation + this.animationFrameId = requestAnimationFrame(this.animate.bind(this)); + } + + // Neue Methode: Starte eine größere Kaskade von Flows an strategischen Punkten + initiateMajorFlowCascade(now, fadeoutFactor = 1.0) { + // Finde gut vernetzte Knoten als Startpunkte + const topNodes = [...this.nodes] + .map((node, index) => ({ node, index, connections: node.connections.length })) + .filter(item => item.connections > 2) // Nur Knoten mit mindestens 3 Verbindungen + .sort((a, b) => b.connections - a.connections); // Sortiere nach Verbindungsanzahl + + if (topNodes.length === 0) return; + + // Wähle 1-3 Startknoten basierend auf Netzwerkgröße + const startNodeCount = Math.max(1, Math.min(3, Math.floor(topNodes.length / 20))); + + for (let i = 0; i < startNodeCount; i++) { + // Wähle einen der Top-Knoten, mit Präferenz für die am besten vernetzten + const nodeIndex = Math.floor(Math.random() * Math.min(10, topNodes.length)); + const startNodeData = topNodes[nodeIndex]; + if (!startNodeData) continue; + + const startNode = this.nodes[startNodeData.index]; + if (!startNode || startNode.connections.length === 0) continue; + + // Aktiviere den Startknoten + startNode.isActive = true; + startNode.lastFired = now; + startNode.lastCascade = now; // Markiere den Zeitpunkt der letzten Kaskade + + // Starte Flows an mehreren Verbindungen dieses Knotens + const connectionsToActivate = Math.min( + Math.ceil(startNode.connections.length / 2), // Bis zu 50% der Verbindungen + 5 // Aber maximal 5 + ); + + // Sorgfältige Auswahl der Verbindungen für eine optimale Weitergabe + const selectedConnections = []; + const allConnections = [...startNode.connections]; + + // Sortiere Verbindungen nach Aktivierungszeit - bevorzuge solche, die länger nicht aktiviert wurden + const sortedConnections = allConnections + .map(connIdx => { + const connectedNode = this.nodes[connIdx]; + const conn = this.connections.find(c => + (c.from === startNodeData.index && c.to === connIdx) || + (c.from === connIdx && c.to === startNodeData.index) + ); + + return { + connIdx, + nodeConnections: connectedNode ? connectedNode.connections.length : 0, + lastActivated: conn ? conn.lastActivated || 0 : 0 + }; + }) + .sort((a, b) => { + // Priorisiere Verbindungen, die länger nicht aktiviert wurden und zu gut vernetzten Knoten führen + const timeFactorA = (now - a.lastActivated) / 10000; // Zeit seit letzter Aktivierung in 10-Sekunden-Einheiten + const timeFactorB = (now - b.lastActivated) / 10000; + + return (timeFactorB + b.nodeConnections * 0.1) - (timeFactorA + a.nodeConnections * 0.1); + }); + + // Wähle die besten Verbindungen aus + for (let j = 0; j < connectionsToActivate && j < sortedConnections.length; j++) { + selectedConnections.push(sortedConnections[j].connIdx); + } + + // Startversatz für Flows eines Knotens - Verzögere die Flows leicht für Welleneffekt + const baseDelay = 100; // Basisverzögerung in ms + + // Erstelle Flows mit leichter Verzögerung zwischen ihnen + selectedConnections.forEach((connIdx, idx) => { + const conn = this.connections.find(c => + (c.from === startNodeData.index && c.to === connIdx) || + (c.from === connIdx && c.to === startNodeData.index) + ); + + if (conn) { + // Verbindung aktivieren + conn.lastActivated = now; + + // Stelle sicher, dass die Verbindung dauerhaft sichtbar bleibt + if (conn.fadeState !== 'visible') { + conn.fadeState = 'visible'; + conn.fadeStartTime = now; + conn.visibleDuration = 600000 + Math.random() * 300000; // 10-15 Minuten sichtbar + } + + // Verbindung aufbauen + if (conn.progress < 1) { + conn.buildSpeed = 0.03 + Math.random() * 0.02; + } + + // Leichte Verzögerung zwischen den Flows für Welleneffekt + const delay = baseDelay + idx * 150; + + setTimeout(() => { + // Erstelle nur, wenn die Animation noch läuft + if (!this.isDestroying && this.animationFrameId) { + // Bestimme die Richtung des Flows + const direction = conn.from === startNodeData.index; + + // Erstelle den Flow + this.flows.push({ + id: now + '_cascade_' + idx + '_' + Math.random().toString(36).substr(2, 9), + connection: conn, + progress: 0, + direction: direction, + length: this.config.flowLength * (0.8 + Math.random() * 0.4), + creationTime: Date.now(), + totalDuration: 700 + Math.random() * 400, + sourceNodeIdx: direction ? conn.from : conn.to, + targetNodeIdx: direction ? conn.to : conn.from, + fadeFactor: fadeoutFactor, + isForwarded: false, + hasForwarded: false, + generation: 1, // Erste Generation + isCascadeRoot: true // Markiere als Wurzel einer Kaskade + }); + } + }, delay); + } + }); + } + } + + // Updated method to update flow animations with proper fading + updateFlows(now) { + // Track new flows to add after the loop to avoid modifying the array during iteration + const newFlows = []; + + // Zähle aktive Flows + const activeFlows = this.flows.length; + + // Wenn zu wenige Flows aktiv sind, starte neue Major Flow Cascades + if (activeFlows < this.config.minActiveFlows) { + this.initiateMajorFlowCascade(now); + } + + // Update existing flows + for (let i = this.flows.length - 1; i >= 0; i--) { + const flow = this.flows[i]; + + // Calculate flow age for fading effects + const flowAge = now - flow.creationTime; + const flowProgress = flowAge / flow.totalDuration; + + // Update flow progress with reduced speed für schlangenartige Bewegung + // Längere Generationen bewegen sich noch langsamer für sichtbare "Schlangen" + const generationSlowdown = Math.max(0.7, 1.0 - (flow.generation || 1) * 0.015); + flow.progress += (this.config.flowSpeed * generationSlowdown) / flow.connection.distance; + + // Aktiviere Quell- und Zielknoten basierend auf Flow-Fortschritt + if (flow.sourceNodeIdx !== undefined) { + // Quellknoten immer aktivieren, solange der Flow aktiv ist + this.nodes[flow.sourceNodeIdx].isActive = true; + this.nodes[flow.sourceNodeIdx].lastFired = now; + } + + // Zielknoten aktivieren und Weiterleitung starten, wenn der Flow ihn erreicht hat + if (flow.targetNodeIdx !== undefined && flow.progress > 0.80) { + const targetNode = this.nodes[flow.targetNodeIdx]; + targetNode.isActive = true; + targetNode.lastFired = now; + + // Stelle sicher, dass die Verbindung dauerhaft sichtbar bleibt + flow.connection.lastActivated = now; + if (flow.connection.fadeState !== 'visible') { + flow.connection.fadeState = 'visible'; + flow.connection.fadeStartTime = now; + flow.connection.visibleDuration = 600000 + Math.random() * 300000; // 10-15 Minuten sichtbar + } + + // Flow ist am Ziel angekommen, starte Weiterleitung zu anderen Knoten + // Jeder Flow leitet genau einmal weiter + if (!flow.hasForwarded && targetNode.connections.length > 0) { + flow.hasForwarded = true; + + // Durch reduzierte Anzahl der Weiterleitungen schaffen wir einen endloseren Eindruck + const connCount = Math.min(3, targetNode.connections.length); + const availableConnections = [...targetNode.connections]; + + // Keine Weiterleitung an die Quelle des Flows zurück + if (flow.sourceNodeIdx !== undefined && availableConnections.length > 1) { + const sourceIndex = availableConnections.indexOf(flow.sourceNodeIdx); + if (sourceIndex >= 0) { + availableConnections.splice(sourceIndex, 1); + } + } + + // Wenn noch Verbindungen übrig sind, leite weiter + if (availableConnections.length > 0) { + // Priorisiere ungenutzte Pfade für ein dynamischeres Netzwerk + const sortedConnections = availableConnections.map(nodeIdx => { + const conn = this.connections.find(c => + (c.from === flow.targetNodeIdx && c.to === nodeIdx) || + (c.from === nodeIdx && c.to === flow.targetNodeIdx) + ); + + const timeSinceLastActivation = conn ? now - (conn.lastActivated || 0) : 0; + const connectionCount = this.nodes[nodeIdx].connections.length; + + return { + nodeIdx, + connectionCount, + timeSinceLastActivation, + score: (timeSinceLastActivation * 0.00001) + + (connectionCount * 0.2) + + (Math.random() * 0.5) + }; + }).sort((a, b) => b.score - a.score); + + // Wähle priorisierte Verbindungen + for (let j = 0; j < connCount && j < sortedConnections.length; j++) { + const nextNodeIdx = sortedConnections[j].nodeIdx; + const nextConn = this.connections.find(c => + (c.from === flow.targetNodeIdx && c.to === nextNodeIdx) || + (c.from === nextNodeIdx && c.to === flow.targetNodeIdx) + ); + + if (nextConn) { + nextConn.lastActivated = now; + + if (nextConn.fadeState !== 'visible') { + nextConn.fadeState = 'visible'; + nextConn.fadeStartTime = now; + nextConn.visibleDuration = 600000 + Math.random() * 300000; + } + + if (nextConn.progress < 1) { + nextConn.buildSpeed = 0.04 + Math.random() * 0.02; + } + + const nextGeneration = (flow.generation || 1) + 1; + + if (nextGeneration <= this.config.maxFlowGenerations) { + const direction = nextConn.from === flow.targetNodeIdx; + + // Berechne Verzögerung basierend auf Verbindungslänge + const connectionLength = Math.sqrt( + Math.pow(this.nodes[nextConn.to].x - this.nodes[nextConn.from].x, 2) + + Math.pow(this.nodes[nextConn.to].y - this.nodes[nextConn.from].y, 2) + ); + + const baseDelay = 80; + const lengthFactor = Math.min(1.2, connectionLength / 200); + const genFactor = Math.max(0.2, 1.0 - (nextGeneration * 0.005)); + const delay = baseDelay * lengthFactor * genFactor; + + setTimeout(() => { + if (!this.isDestroying && this.animationFrameId) { + newFlows.push({ + id: now + '_' + Math.random().toString(36).substr(2, 9), + connection: nextConn, + progress: 0, + direction: direction, + length: this.config.flowLength * (0.9 + Math.random() * 0.2), + creationTime: Date.now(), + totalDuration: 600 + Math.random() * 300 + (nextGeneration * 5), + sourceNodeIdx: direction ? nextConn.from : nextConn.to, + targetNodeIdx: direction ? nextConn.to : nextConn.from, + fadeFactor: flow.fadeFactor || 1.0, + isForwarded: false, + generation: nextGeneration, + hasForwarded: false, + parentFlow: flow.id + }); + } + }, delay); + } + } + } + } + } + } + + // Stellen Sie sicher, dass die Verbindung aktiv bleibt + flow.connection.lastActivated = now; + + if (flow.connection.fadeState === 'visible') { + flow.connection.visibleDuration = Math.max( + flow.connection.visibleDuration || 300000, + 600000 + Math.random() * 300000 + ); + } + + // Remove completed flows, but ensure parent-child relationships for visible "snakes" + const isComplete = flow.progress > 1.0 || flowProgress >= 1.0; + const hasMaxGeneration = flow.generation && flow.generation > this.config.maxFlowGenerations; + + if (isComplete || hasMaxGeneration) { + this.flows.splice(i, 1); + } + } + + // Füge alle neuen Flows hinzu + this.flows.push(...newFlows); + } + + // Aktualisiert die Ein-/Ausblendung von Verbindungen mit deutlich langsameren Übergängen + updateConnectionsFading(now) { + for (const connection of this.connections) { + const elapsedTime = now - connection.fadeStartTime; + + // Update connection fade status + if (connection.fadeState === 'in') { + // Einblenden - langsamer für sanfteren Aufbau + connection.fadeProgress = Math.min(1.0, elapsedTime / (connection.fadeTotalDuration * 1.5)); + if (connection.fadeProgress >= 1.0) { + connection.fadeState = 'visible'; + connection.fadeStartTime = now; + } + } else if (connection.fadeState === 'visible') { + // Verbindung ist vollständig sichtbar - EXTREM lange Sichtbarkeitsdauer + // für permanente Netzwerkstrukturen + if (elapsedTime > connection.visibleDuration * 3.0) { // Erhöht von 2.5 auf 3.0 + // Prüfe, ob die Verbindung kürzlich aktiviert wurde + if (now - connection.lastActivated < 120000) { // 2 Minuten Aktivitätsschutz + // Wenn kürzlich aktiviert, verlängere die Sichtbarkeit + connection.fadeStartTime = now; + } else { + // Nur sehr alte, inaktive Verbindungen langsam ausblenden + connection.fadeState = 'out'; + connection.fadeStartTime = now; + connection.fadeProgress = 1.0; + } + } + } else if (connection.fadeState === 'out') { + // Ausblenden - EXTREM langsam für dauerhaft sichtbare Strukturen + connection.fadeProgress = Math.max(0.25, 1.0 - (elapsedTime / (connection.fadeTotalDuration * 10.0))); // Erhöht von 8.0 auf 10.0 + + // Verbindungen bleiben dauerhaft stark sichtbar + if (connection.fadeProgress <= 0.25) { // Minimum erhöht von 0.15 auf 0.25 + // Verbindungen niemals komplett verschwinden lassen + connection.fadeState = 'in'; + connection.fadeStartTime = now; + connection.fadeProgress = 0.25; // Höhere Grundsichtbarkeit + connection.visibleDuration = 600000 + Math.random() * 300000; // 10-15 Minuten sichtbar + } + } else if (connection.fadeState === 'hidden') { + // Keine Verbindungen verstecken, alle sichtbar halten + connection.fadeState = 'in'; + connection.fadeStartTime = now; + connection.fadeProgress = 0.25; // Höhere Grundsichtbarkeit + } + + // Verbindungen schneller vollständig aufbauen für permanentes Netzwerk + if (connection.progress < 1) { + // Erhöhte Basisgeschwindigkeit für schnelleren Aufbau + const baseBuildSpeed = 0.008; // Erhöht für schnelleren permanenten Aufbau + let buildSpeed = connection.buildSpeed || baseBuildSpeed; + + // Wenn kürzlich aktiviert, noch schneller aufbauen + if (now - connection.lastActivated < 2000) { + buildSpeed = Math.max(buildSpeed, 0.015); // Erhöht für schnelleren Aufbau + } + + connection.progress += buildSpeed; + + if (connection.progress > 1) { + connection.progress = 1; + // Zurücksetzen der Aufbaugeschwindigkeit + connection.buildSpeed = 0; + } + } + } + } + + // Updated method to create flow animations with timing info + createNewFlow(now) { + if (this.connections.length === 0 || this.flows.length >= 8) return; + + // Verbesserte Auswahl zufälliger Verbindungen mit Präferenz für hoch vernetzte Knoten + // Sammle alle Verbindungen und bewerte sie nach der Anzahl ihrer Verbindungen + const weightedConnections = this.connections.map((conn, index) => { + const fromNode = this.nodes[conn.from]; + const toNode = this.nodes[conn.to]; + const connectionCount = fromNode.connections.length + toNode.connections.length; + return { + index, + connectionCount, + weight: connectionCount + Math.random() * 5 // Füge Zufall hinzu für Variabilität + }; + }); + + // Sortiere nach Gewicht (Verbindungsanzahl + Zufallsfaktor) + weightedConnections.sort((a, b) => b.weight - a.weight); + + // Wähle eine der Top-Verbindungen + const connectionIdx = weightedConnections[Math.floor(Math.random() * Math.min(15, weightedConnections.length))].index; + const connection = this.connections[connectionIdx]; + + // Verbindung als "im Aufbau" markieren, wenn sie noch nicht vollständig ist + if (connection.progress < 1) { + connection.buildSpeed = 0.02 + Math.random() * 0.01; // Schnellerer Aufbau während eines Blitzes + } + + // Stelle sicher, dass die Verbindung dauerhaft sichtbar bleibt + connection.lastActivated = now; + if (connection.fadeState !== 'visible') { + connection.fadeState = 'visible'; + connection.fadeStartTime = now; + connection.visibleDuration = 150000 + Math.random() * 90000; // 2.5-4 Minuten sichtbar + } + + // Create a new flow along this connection + this.flows.push({ + connection: connection, + progress: 0, + direction: Math.random() > 0.5, // Randomly decide direction + length: this.config.flowLength + Math.random() * 0.1, // Slightly vary lengths + creationTime: now, + totalDuration: 900 + Math.random() * 500, // Längere Gesamtdauer (900-1400ms) + sourceNodeIdx: connection.direction ? connection.from : connection.to, + targetNodeIdx: connection.direction ? connection.to : connection.from, + isForwarded: false, // Dieses Flag ermöglicht Weiterleitung + hasForwarded: false, // Noch nicht weitergeleitet + generation: 1 // Erste Generation + }); + } + + renderWebGL(now) { + this.gl.clear(this.gl.COLOR_BUFFER_BIT); + + const width = this.canvas.width / (window.devicePixelRatio || 1); + const height = this.canvas.height / (window.devicePixelRatio || 1); + + // Fade-Faktor für sanftes Ausblenden + const fadeoutFactor = this.isDestroying ? parseFloat(this.canvas.style.opacity) || 0.98 : 1.0; + + // Select shader program + this.gl.useProgram(this.programInfo.program); + + // Set resolution uniform + this.gl.uniform2f(this.programInfo.uniformLocations.resolution, width, height); + + // Draw connections first (behind nodes) + this.renderConnectionsWebGL(now, fadeoutFactor); + + // Draw flows on top of connections + this.renderFlowsWebGL(now, fadeoutFactor); + + // Draw nodes + this.renderNodesWebGL(now, fadeoutFactor); + } + + renderNodesWebGL(now, fadeoutFactor = 1.0) { + // Prepare node positions for WebGL + const positions = new Float32Array(this.nodes.length * 2); + const sizes = new Float32Array(this.nodes.length); + + for (let i = 0; i < this.nodes.length; i++) { + const node = this.nodes[i]; + positions[i * 2] = node.x; + positions[i * 2 + 1] = node.y; + + // Sanftere Pulsation mit moderaterem Aktivierungsboost + const activationBoost = node.isActive ? 1.2 : 1.0; // Reduziert von 1.3 auf 1.2 + let pulse = (Math.sin(node.pulsePhase) * 0.2 + 1.1) * activationBoost; // Reduzierte Pulsation + + // Größe basierend auf Konnektivität und Wichtigkeit, aber subtiler + const connectivityFactor = 1 + (node.connections.length / this.config.maxConnections) * 0.8; // Reduziert von 1.1 auf 0.8 + sizes[i] = node.size * pulse * connectivityFactor; + } + + // Bind position buffer + this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.positionBuffer); + this.gl.bufferData(this.gl.ARRAY_BUFFER, positions, this.gl.STATIC_DRAW); + this.gl.vertexAttribPointer( + this.programInfo.attribLocations.vertexPosition, + 2, // components per vertex + this.gl.FLOAT, // data type + false, // normalize + 0, // stride + 0 // offset + ); + this.gl.enableVertexAttribArray(this.programInfo.attribLocations.vertexPosition); + + // Bind size buffer + this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.sizeBuffer); + this.gl.bufferData(this.gl.ARRAY_BUFFER, sizes, this.gl.STATIC_DRAW); + this.gl.vertexAttribPointer( + this.programInfo.attribLocations.pointSize, + 1, // components per vertex + this.gl.FLOAT, // data type + false, // normalize + 0, // stride + 0 // offset + ); + this.gl.enableVertexAttribArray(this.programInfo.attribLocations.pointSize); + + // Enable blending for all nodes + this.gl.enable(this.gl.BLEND); + this.gl.blendFunc(this.gl.SRC_ALPHA, this.gl.ONE); // Additive blending for glow + + // Draw each node individually with its own color + for (let i = 0; i < this.nodes.length; i++) { + const node = this.nodes[i]; + + // Set node color - sanftere Hervorhebung von aktiven Knoten + const colorObj = this.isDarkMode ? this.darkModeColors : this.lightModeColors; + const nodeColor = this.hexToRgb(colorObj.nodeColor); + const nodePulseColor = this.hexToRgb(colorObj.nodePulse); + + // Use pulse color for active nodes + let r = nodeColor.r / 255; + let g = nodeColor.g / 255; + let b = nodeColor.b / 255; + + // Active nodes get slightly brighter color - noch subtiler + if (node.isActive) { + r = (r * 0.8 + nodePulseColor.r / 255 * 0.2); // Reduziert von 0.7/0.3 auf 0.8/0.2 + g = (g * 0.8 + nodePulseColor.g / 255 * 0.2); + b = (b * 0.8 + nodePulseColor.b / 255 * 0.2); + } + + // Noch subtilere Knoten mit weiter reduzierter Opazität + // Berücksichtige fadeoutFactor für sanftes Ausblenden + const nodeOpacity = node.isActive ? 0.65 : 0.55; // Reduziert von 0.75/0.65 auf 0.65/0.55 + this.gl.uniform4f( + this.programInfo.uniformLocations.color, + r, g, b, + nodeOpacity * fadeoutFactor // Reduzierte Opazität während des Ausblendprozesses + ); + + // Draw each node individually for better control + this.gl.drawArrays(this.gl.POINTS, i, 1); + } + } + + renderConnectionsWebGL(now, fadeoutFactor = 1.0) { + for (const connection of this.connections) { + // Überspringe Verbindungen, die komplett unsichtbar sind + if (connection.fadeState === 'hidden' || connection.fadeProgress <= 0) continue; + + const fromNode = this.nodes[connection.from]; + const toNode = this.nodes[connection.to]; + const progress = connection.progress || 1; + const x1 = fromNode.x; + const y1 = fromNode.y; + const x2 = fromNode.x + (toNode.x - fromNode.x) * progress; + const y2 = fromNode.y + (toNode.y - fromNode.y) * progress; + + // Calculate opacity based on fade state + let opacity = connection.opacity * connection.fadeProgress * 0.7; // Reduzierte Gesamtopazität auf 70% + + // Erhöhe kurzzeitig die Opazität bei kürzlich aktivierten Verbindungen + if (connection.lastActivated && now - connection.lastActivated < 800) { + const timeFactor = 1 - ((now - connection.lastActivated) / 800); + opacity = Math.max(opacity, timeFactor * 0.25); // Reduzierte Highlight-Opazität auf 25% + } + + // Berücksichtige fadeoutFactor für sanftes Ausblenden + opacity *= fadeoutFactor; + + const positions = new Float32Array([ + x1, y1, + x2, y2 + ]); + + this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.positionBuffer); + this.gl.bufferData(this.gl.ARRAY_BUFFER, positions, this.gl.STATIC_DRAW); + this.gl.vertexAttribPointer( + this.programInfo.attribLocations.vertexPosition, + 2, + this.gl.FLOAT, + false, + 0, + 0 + ); + this.gl.enableVertexAttribArray(this.programInfo.attribLocations.vertexPosition); + this.gl.disableVertexAttribArray(this.programInfo.attribLocations.pointSize); + + // Set color with calculated opacity + const colorObj = this.isDarkMode ? this.darkModeColors : this.lightModeColors; + const connColor = this.hexToRgb(colorObj.connectionColor); + this.gl.uniform4f( + this.programInfo.uniformLocations.color, + connColor.r / 255, + connColor.g / 255, + connColor.b / 255, + opacity + ); + + this.gl.enable(this.gl.BLEND); + this.gl.blendFunc(this.gl.SRC_ALPHA, this.gl.ONE); + this.gl.lineWidth(this.config.linesWidth); + this.gl.drawArrays(this.gl.LINES, 0, 2); + } + } + + // New method to render the flowing animations with subtilerer Appearance + renderFlowsWebGL(now, fadeoutFactor = 1.0) { + // Für jeden Flow einen dezenten, echten Blitz als Zickzack zeichnen und Funken erzeugen + for (const flow of this.flows) { + const connection = flow.connection; + const fromNode = this.nodes[connection.from]; + const toNode = this.nodes[connection.to]; + const startProgress = flow.progress; + const endProgress = Math.min(1, startProgress + flow.length); + if (startProgress >= 1 || endProgress <= 0) continue; + const direction = flow.direction ? 1 : -1; + let p1, p2; + if (direction > 0) { + p1 = { + x: fromNode.x + (toNode.x - fromNode.x) * startProgress, + y: fromNode.y + (toNode.y - fromNode.y) * startProgress + }; + p2 = { + x: fromNode.x + (toNode.x - fromNode.x) * endProgress, + y: fromNode.y + (toNode.y - fromNode.y) * endProgress + }; + } else { + p1 = { + x: toNode.x + (fromNode.x - toNode.x) * startProgress, + y: toNode.y + (fromNode.y - toNode.y) * startProgress + }; + p2 = { + x: toNode.x + (fromNode.x - toNode.x) * endProgress, + y: toNode.y + (fromNode.y - toNode.y) * endProgress + }; + } + // Zickzack-Blitz generieren + const zigzag = this.generateZigZagPoints(p1, p2, 7, 8); // Reduzierte Zickzack-Amplitude + for (let i = 0; i < zigzag.length - 1; i++) { + const positions = new Float32Array([ + zigzag[i].x, zigzag[i].y, + zigzag[i + 1].x, zigzag[i + 1].y + ]); + this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.positionBuffer); + this.gl.bufferData(this.gl.ARRAY_BUFFER, positions, this.gl.STATIC_DRAW); + this.gl.vertexAttribPointer( + this.programInfo.attribLocations.vertexPosition, + 2, + this.gl.FLOAT, + false, + 0, + 0 + ); + this.gl.enableVertexAttribArray(this.programInfo.attribLocations.vertexPosition); + this.gl.disableVertexAttribArray(this.programInfo.attribLocations.pointSize); + // Dezenter, leuchtender Blitz + const colorObj = this.isDarkMode ? this.darkModeColors : this.lightModeColors; + const flowColor = this.hexToRgb(colorObj.flowColor); + // Definiere fadeFactor als 1.0, falls nicht von flow definiert + // Berücksichtige auch den fadeoutFactor für das Ausblenden der gesamten Animation + const fadeFactor = (flow.fadeFactor || 1.0) * fadeoutFactor; + this.gl.uniform4f( + this.programInfo.uniformLocations.color, + flowColor.r / 255, + flowColor.g / 255, + flowColor.b / 255, + 0.55 * fadeFactor // Reduziert von 0.7 auf 0.55 für subtilere Blitze + ); + this.gl.enable(this.gl.BLEND); + this.gl.blendFunc(this.gl.SRC_ALPHA, this.gl.ONE); + this.gl.lineWidth(1.2); // Schmaler Blitz + this.gl.drawArrays(this.gl.LINES, 0, 2); + } + // Funken erzeugen - subtilere elektrische Funken + const sparks = this.generateSparkPoints(zigzag, 5 + Math.floor(Math.random() * 3)); // Weniger Funken + for (const spark of sparks) { + // Helles Weiß-Blau für elektrische Funken, aber dezenter + this.gl.uniform4f( + this.programInfo.uniformLocations.color, + 0.85, 0.92, 0.98, 0.5 * fadeoutFactor // Reduzierte Opazität auf 50% + ); + + // Position für den Funken setzen + const sparkPos = new Float32Array([spark.x, spark.y]); + this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.positionBuffer); + this.gl.bufferData(this.gl.ARRAY_BUFFER, sparkPos, this.gl.STATIC_DRAW); + + // Punktgröße setzen - kleinere Funken + const sizes = new Float32Array([spark.size * 2.5]); // Reduziert von 3.0 auf 2.5 + this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.sizeBuffer); + this.gl.bufferData(this.gl.ARRAY_BUFFER, sizes, this.gl.STATIC_DRAW); + + this.gl.vertexAttribPointer( + this.programInfo.attribLocations.vertexPosition, + 2, + this.gl.FLOAT, + false, + 0, + 0 + ); + this.gl.enableVertexAttribArray(this.programInfo.attribLocations.vertexPosition); + + this.gl.vertexAttribPointer( + this.programInfo.attribLocations.pointSize, + 1, + this.gl.FLOAT, + false, + 0, + 0 + ); + this.gl.enableVertexAttribArray(this.programInfo.attribLocations.pointSize); + + this.gl.drawArrays(this.gl.POINTS, 0, 1); + } + } + } + + renderCanvas(now) { + // Clear canvas + const width = this.canvas.width / (window.devicePixelRatio || 1); + const height = this.canvas.height / (window.devicePixelRatio || 1); + + // Fade-Faktor für sanftes Ausblenden im Canvas-Modus + const fadeoutFactor = this.isDestroying ? parseFloat(this.canvas.style.opacity) || 0.98 : 1.0; + + this.ctx.clearRect(0, 0, width, height); + + // Set background + const backgroundColor = this.isDarkMode + ? this.darkModeColors.background + : this.lightModeColors.background; + + this.ctx.fillStyle = backgroundColor; + this.ctx.fillRect(0, 0, width, height); + + // Draw connections with fade effects + const connectionColor = this.isDarkMode + ? this.darkModeColors.connectionColor + : this.lightModeColors.connectionColor; + + for (const connection of this.connections) { + // Überspringe Verbindungen, die komplett unsichtbar sind + if (connection.fadeState === 'hidden' || connection.fadeProgress <= 0) continue; + + const fromNode = this.nodes[connection.from]; + const toNode = this.nodes[connection.to]; + + // Zeichne nur den Teil der Verbindung, der schon aufgebaut wurde + const progress = connection.progress || 0; + if (progress <= 0) continue; // Skip if no progress yet + + const x1 = fromNode.x; + const y1 = fromNode.y; + + // Endpunkt basiert auf dem aktuellen Fortschritt + const x2 = x1 + (toNode.x - x1) * progress; + const y2 = y1 + (toNode.y - y1) * progress; + + // Zeichne die unterliegende Linie mit Ein-/Ausblendung + this.ctx.beginPath(); + this.ctx.moveTo(x1, y1); + this.ctx.lineTo(x2, y2); + + const rgbColor = this.hexToRgb(connectionColor); + + // Calculate opacity based on fade state + let opacity = connection.opacity * connection.fadeProgress; + + // Erhöhe kurzzeitig die Opazität bei kürzlich aktivierten Verbindungen + if (connection.lastActivated && now - connection.lastActivated < 800) { + const timeFactor = 1 - ((now - connection.lastActivated) / 800); + opacity = Math.max(opacity, timeFactor * this.config.linesOpacity); + } + + // Berücksichtige fadeoutFactor für sanftes Ausblenden + opacity *= fadeoutFactor; + + this.ctx.strokeStyle = `rgba(${rgbColor.r}, ${rgbColor.g}, ${rgbColor.b}, ${opacity})`; + this.ctx.lineWidth = this.config.linesWidth; + + // Leichter Glow-Effekt für die Linien + if (opacity > 0.1) { + this.ctx.shadowColor = `rgba(${rgbColor.r}, ${rgbColor.g}, ${rgbColor.b}, ${0.15 * fadeoutFactor})`; + this.ctx.shadowBlur = 3; + } else { + this.ctx.shadowBlur = 0; + } + + this.ctx.stroke(); + + // Zeichne einen Fortschrittspunkt am Ende der sich aufbauenden Verbindung + if (progress < 0.95 && connection.fadeProgress > 0.5) { + this.ctx.beginPath(); + this.ctx.arc(x2, y2, 1.5, 0, Math.PI * 2); + this.ctx.fillStyle = `rgba(${rgbColor.r}, ${rgbColor.g}, ${rgbColor.b}, ${opacity * 1.3})`; + this.ctx.fill(); + } + + this.ctx.shadowBlur = 0; // Reset shadow for other elements + } + + // Draw flows with fading effect + this.renderFlowsCanvas(now, fadeoutFactor); + + // Draw nodes with enhanced animations + const nodeColor = this.isDarkMode + ? this.darkModeColors.nodeColor + : this.lightModeColors.nodeColor; + + const nodePulse = this.isDarkMode + ? this.darkModeColors.nodePulse + : this.lightModeColors.nodePulse; + + for (const node of this.nodes) { + // Verbesserte Pulsation mit Aktivierungsboost + const activationBoost = node.isActive ? 1.7 : 1.0; + const pulse = (Math.sin(node.pulsePhase) * 0.45 + 1.4) * activationBoost; + + // Größe basierend auf Konnektivität und Wichtigkeit + const connectivityFactor = 1 + (node.connections.length / this.config.maxConnections) * 1.4; + // Während des Ausblendprozesses die Knotengröße leicht reduzieren + const sizeReduction = this.isDestroying ? 0.85 : 1.0; + const nodeSize = node.size * pulse * connectivityFactor * sizeReduction; + + // Verbesserte Leuchtkraft und Glow-Effekt + const rgbNodeColor = this.hexToRgb(nodeColor); + const rgbPulseColor = this.hexToRgb(nodePulse); + + // Mische Farben basierend auf Aktivierung + let r, g, b; + if (node.isActive) { + r = (rgbNodeColor.r * 0.3 + rgbPulseColor.r * 0.7); + g = (rgbNodeColor.g * 0.3 + rgbPulseColor.g * 0.7); + b = (rgbNodeColor.b * 0.3 + rgbPulseColor.b * 0.7); + } else { + r = rgbNodeColor.r; + g = rgbNodeColor.g; + b = rgbNodeColor.b; + } + + // Äußerer Glow + const glow = this.ctx.createRadialGradient( + node.x, node.y, 0, + node.x, node.y, nodeSize * 4.5 + ); + + // Intensiveres Zentrum und weicherer Übergang + // Berücksichtige fadeoutFactor für sanftes Ausblenden + const activeOpacity = node.isActive ? 0.95 : 0.8; + glow.addColorStop(0, `rgba(${r}, ${g}, ${b}, ${activeOpacity * fadeoutFactor})`); + glow.addColorStop(0.4, `rgba(${r}, ${g}, ${b}, ${0.45 * fadeoutFactor})`); + glow.addColorStop(1, `rgba(${r}, ${g}, ${b}, 0)`); + + this.ctx.beginPath(); + this.ctx.arc(node.x, node.y, nodeSize, 0, Math.PI * 2); + this.ctx.fillStyle = glow; + // Globale Transparenz reduzieren beim Ausblenden + this.ctx.globalAlpha = (node.isActive ? 1.0 : 0.92) * fadeoutFactor; + this.ctx.fill(); + + // Innerer Kern für stärkeren Leuchteffekt + if (node.isActive) { + this.ctx.beginPath(); + this.ctx.arc(node.x, node.y, nodeSize * 0.4, 0, Math.PI * 2); + this.ctx.fillStyle = `rgba(${rgbPulseColor.r}, ${rgbPulseColor.g}, ${rgbPulseColor.b}, ${0.9 * fadeoutFactor})`; + this.ctx.fill(); + } + + this.ctx.globalAlpha = 1.0; + } + } + + // New method to render flows in Canvas mode with fading + renderFlowsCanvas(now, fadeoutFactor = 1.0) { + if (!this.flows.length) return; + + // Für jeden Flow in der Blitzanimation + for (const flow of this.flows) { + const connection = flow.connection; + const fromNode = this.nodes[connection.from]; + const toNode = this.nodes[connection.to]; + + // Berechne den Fortschritt der Connection und des Flows + const connProgress = connection.progress || 1; + + // Flussrichtung bestimmen + const [startNode, endNode] = flow.direction ? [fromNode, toNode] : [toNode, fromNode]; + + // Berücksichtige den Verbindungs-Fortschritt + const maxDistance = Math.min(connProgress, 1) * Math.sqrt( + Math.pow(endNode.x - startNode.x, 2) + + Math.pow(endNode.y - startNode.y, 2) + ); + + // Berechne den aktuellen Fortschritt mit Ein- und Ausblendung + const flowAge = now - flow.creationTime; + const flowLifetime = flow.totalDuration; + let fadeFactor = 1.0; + + // Sanftere Ein- und Ausblendung für Blitzeffekte + if (flowAge < flowLifetime * 0.3) { + // Einblenden - sanfter und länger + fadeFactor = flowAge / (flowLifetime * 0.3); + } else if (flowAge > flowLifetime * 0.7) { + // Ausblenden - sanfter und länger + fadeFactor = 1.0 - ((flowAge - flowLifetime * 0.7) / (flowLifetime * 0.3)); + } + + // Beim Ausblenden der gesamten Animation auch den Flow-Faktor anpassen + fadeFactor *= fadeoutFactor; + + // Flow-Fortschritt + const startProgress = Math.max(0.0, flow.progress - flow.length); + const endProgress = Math.min(flow.progress, connProgress); + + // Start- und Endpunkte basierend auf dem Fortschritt + const p1 = { + x: startNode.x + (endNode.x - startNode.x) * startProgress, + y: startNode.y + (endNode.y - startNode.y) * startProgress + }; + + const p2 = { + x: startNode.x + (endNode.x - startNode.x) * endProgress, + y: startNode.y + (endNode.y - startNode.y) * endProgress + }; + + if (endProgress > connProgress) continue; + + // Lila Gradient für den Blitz + const gradient = this.ctx.createLinearGradient(p1.x, p1.y, p2.x, p2.y); + gradient.addColorStop(0, 'rgba(255, 0, 255, 0.8)'); + gradient.addColorStop(0.5, 'rgba(200, 0, 255, 0.9)'); + gradient.addColorStop(1, 'rgba(255, 0, 255, 0.8)'); + + this.ctx.save(); + + // Untergrundspur mit stärkerem Glühen + this.ctx.beginPath(); + this.ctx.moveTo(p1.x, p1.y); + this.ctx.lineTo(p2.x, p2.y); + this.ctx.strokeStyle = gradient; + this.ctx.lineWidth = 20.0; + this.ctx.shadowColor = 'rgba(255, 0, 255, 0.4)'; + this.ctx.shadowBlur = 25; + this.ctx.stroke(); + + // Abgerundeter Zickzack-Blitz mit weicheren Kurven + const zigzag = this.generateZigZagPoints(p1, p2, 4, 6, true); + + // Hauptblitz mit Gradient + this.ctx.strokeStyle = gradient; + this.ctx.lineWidth = 1.5; + this.ctx.shadowColor = 'rgba(255, 0, 255, 0.5)'; + this.ctx.shadowBlur = 30; + this.ctx.beginPath(); + this.ctx.moveTo(zigzag[0].x, zigzag[0].y); + for (let i = 1; i < zigzag.length; i++) { + const cp1x = zigzag[i-1].x + (zigzag[i].x - zigzag[i-1].x) * 0.4; + const cp1y = zigzag[i-1].y + (zigzag[i].y - zigzag[i-1].y) * 0.4; + const cp2x = zigzag[i].x - (zigzag[i].x - zigzag[i-1].x) * 0.4; + const cp2y = zigzag[i].y - (zigzag[i].y - zigzag[i-1].y) * 0.4; + this.ctx.bezierCurveTo(cp1x, cp1y, cp2x, cp2y, zigzag[i].x, zigzag[i].y); + } + this.ctx.stroke(); + + // Funken mit lila Glühen + const sparks = this.generateSparkPoints(zigzag, 10 + Math.floor(Math.random() * 6)); + + const sparkGradient = this.ctx.createRadialGradient(0, 0, 0, 0, 0, 10); + sparkGradient.addColorStop(0, 'rgba(255, 0, 255, 0.95)'); + sparkGradient.addColorStop(0.5, 'rgba(200, 0, 255, 0.8)'); + sparkGradient.addColorStop(1, 'rgba(255, 0, 255, 0.6)'); + + for (const spark of sparks) { + this.ctx.beginPath(); + + // Weichere Sternform + const points = 4 + Math.floor(Math.random() * 3); + const outerRadius = spark.size * 2.0; + const innerRadius = spark.size * 0.5; + + for (let i = 0; i < points * 2; i++) { + const radius = i % 2 === 0 ? outerRadius : innerRadius; + const angle = (i * Math.PI) / points; + const x = spark.x + Math.cos(angle) * radius; + const y = spark.y + Math.sin(angle) * radius; + + if (i === 0) { + this.ctx.moveTo(x, y); + } else { + this.ctx.lineTo(x, y); + } + } + + this.ctx.closePath(); + + // Intensives lila Glühen + this.ctx.shadowColor = 'rgba(255, 0, 255, 0.8)'; + this.ctx.shadowBlur = 25; + this.ctx.fillStyle = sparkGradient; + this.ctx.fill(); + // Intensiverer innerer Glüheffekt für ausgewählte Funken mit mehrfacher Schichtung + if (spark.size > 3 && Math.random() > 0.3) { + // Erste Glühschicht - größer und weicher + this.ctx.beginPath(); + this.ctx.arc(spark.x, spark.y, spark.size * 0.8, 0, Math.PI * 2); + this.ctx.fillStyle = this.isDarkMode + ? `rgba(245, 252, 255, ${0.85 * fadeFactor})` + : `rgba(230, 245, 255, ${0.8 * fadeFactor})`; + this.ctx.shadowColor = this.isDarkMode + ? `rgba(200, 225, 255, ${0.7 * fadeFactor})` + : `rgba(180, 220, 255, ${0.6 * fadeFactor})`; + this.ctx.shadowBlur = 15; + this.ctx.fill(); + + // Zweite Glühschicht - kleiner und intensiver + this.ctx.beginPath(); + this.ctx.arc(spark.x, spark.y, spark.size * 0.5, 0, Math.PI * 2); + this.ctx.fillStyle = this.isDarkMode + ? `rgba(255, 255, 255, ${0.95 * fadeFactor})` + : `rgba(240, 250, 255, ${0.9 * fadeFactor})`; + this.ctx.shadowColor = this.isDarkMode + ? `rgba(220, 235, 255, ${0.8 * fadeFactor})` + : `rgba(200, 230, 255, ${0.7 * fadeFactor})`; + this.ctx.shadowBlur = 20; + this.ctx.fill(); + + // Dritte Glühschicht - noch intensiverer Kern + this.ctx.beginPath(); + this.ctx.arc(spark.x, spark.y, spark.size * 0.3, 0, Math.PI * 2); + this.ctx.fillStyle = this.isDarkMode + ? `rgba(255, 255, 255, ${0.98 * fadeFactor})` + : `rgba(245, 252, 255, ${0.95 * fadeFactor})`; + this.ctx.shadowColor = this.isDarkMode + ? `rgba(230, 240, 255, ${0.9 * fadeFactor})` + : `rgba(210, 235, 255, ${0.8 * fadeFactor})`; + this.ctx.shadowBlur = 25; + this.ctx.fill(); + + // Vierte Glühschicht - pulsierender Effekt + const pulseSize = spark.size * (0.2 + Math.sin(Date.now() * 0.01) * 0.1); + this.ctx.beginPath(); + this.ctx.arc(spark.x, spark.y, pulseSize, 0, Math.PI * 2); + this.ctx.fillStyle = this.isDarkMode + ? `rgba(255, 255, 255, ${0.99 * fadeFactor})` + : `rgba(250, 255, 255, ${0.97 * fadeFactor})`; + this.ctx.shadowColor = this.isDarkMode + ? `rgba(240, 245, 255, ${0.95 * fadeFactor})` + : `rgba(220, 240, 255, ${0.85 * fadeFactor})`; + this.ctx.shadowBlur = 30; + this.ctx.fill(); + } + } + + // Deutlicherer und länger anhaltender Fortschrittseffekt an der Spitze des Blitzes + if (endProgress >= connProgress - 0.1 && connProgress < 0.98) { + const tipGlow = this.ctx.createRadialGradient( + p2.x, p2.y, 0, + p2.x, p2.y, 10 + ); + tipGlow.addColorStop(0, `rgba(${rgbFlowColor.r}, ${rgbFlowColor.g}, ${rgbFlowColor.b}, ${0.85 * fadeFactor})`); + tipGlow.addColorStop(0.5, `rgba(${rgbFlowColor.r}, ${rgbFlowColor.g}, ${rgbFlowColor.b}, ${0.4 * fadeFactor})`); + tipGlow.addColorStop(1, `rgba(${rgbFlowColor.r}, ${rgbFlowColor.g}, ${rgbFlowColor.b}, 0)`); + + this.ctx.fillStyle = tipGlow; + this.ctx.beginPath(); + this.ctx.arc(p2.x, p2.y, 10, 0, Math.PI * 2); + this.ctx.fill(); + } + + this.ctx.restore(); + } + } + + // Helper method to convert hex to RGB + hexToRgb(hex) { + // Remove # if present + hex = hex.replace(/^#/, ''); + + // Handle rgba hex format + let alpha = 1; + if (hex.length === 8) { + alpha = parseInt(hex.slice(6, 8), 16) / 255; + hex = hex.slice(0, 6); + } + + // Parse hex values + const bigint = parseInt(hex, 16); + const r = (bigint >> 16) & 255; + const g = (bigint >> 8) & 255; + const b = bigint & 255; + + return { r, g, b, a: alpha }; + } + + // Cleanup method mit deutlich sanfterem Ausblenden + destroy() { + // Sanfte Ausblendanimation starten + this.isDestroying = true; + + // Canvas-Element sanft ausblenden über einen längeren Zeitraum + if (this.canvas) { + this.canvas.style.transition = 'opacity 5s ease-in-out'; // Verlängert von 1.5s auf 5s + this.canvas.style.opacity = '0'; + + // Warte auf das Ende der Übergangsanimation, bevor die Ressourcen freigegeben werden + setTimeout(() => { + // Animation schrittweise verlangsamen statt sofort stoppen + const slowDownAnimation = () => { + // Schrittweise die Animation verlangsamen durch Reduzierung der FPS + if (this.animationFrameId) { + cancelAnimationFrame(this.animationFrameId); + + // Animiere mit abnehmender Framerate + setTimeout(() => { + this.animationFrameId = requestAnimationFrame(this.animate.bind(this)); + + // Nach 10 weiteren Sekunden komplett beenden + if (this._destroyStartTime && (Date.now() - this._destroyStartTime > 10000)) { + finalCleanup(); + } else { + slowDownAnimation(); + } + }, 500); // Zunehmende Verzögerung zwischen Frames + } + }; + + // Endgültige Bereinigung nach dem vollständigen Ausblenden + const finalCleanup = () => { + // Animation stoppen + if (this.animationFrameId) { + cancelAnimationFrame(this.animationFrameId); + this.animationFrameId = null; + } + + // Event-Listener entfernen + window.removeEventListener('resize', this.resizeCanvas.bind(this)); + + // Canvas-Element aus dem DOM entfernen, nachdem es ausgeblendet wurde + if (this.canvas && this.canvas.parentNode) { + this.canvas.parentNode.removeChild(this.canvas); + } + + // WebGL-Ressourcen bereinigen + if (this.gl) { + this.gl.deleteBuffer(this.positionBuffer); + this.gl.deleteBuffer(this.sizeBuffer); + this.gl.deleteProgram(this.shaderProgram); + } + + console.log('Neural Network Background erfolgreich deaktiviert'); + }; + + // Startzeit für die schrittweise Verlangsamung setzen + this._destroyStartTime = Date.now(); + + // Starte die schrittweise Verlangsamung + slowDownAnimation(); + }, 5500); // Warte länger als die CSS-Transition-Dauer + } + } + + // Hilfsfunktion: Erzeuge Zickzack-Punkte für einen Blitz mit geringerer Vibration + generateZigZagPoints(start, end, segments = 5, amplitude = 8) { + const points = [start]; + const mainAngle = Math.atan2(end.y - start.y, end.x - start.x); + + // Berechne die Gesamtlänge des Blitzes + const totalDistance = Math.sqrt( + Math.pow(end.x - start.x, 2) + + Math.pow(end.y - start.y, 2) + ); + + // Geringere Amplitude für subtilere Zickzack-Muster + const baseAmplitude = totalDistance * 0.12; // Reduziert für sanfteres Erscheinungsbild + + for (let i = 1; i < segments; i++) { + const t = i / segments; + const x = start.x + (end.x - start.x) * t; + const y = start.y + (end.y - start.y) * t; + + // Geringere Vibration durch kleinere Zufallsvariationen + const perpendicularAngle = mainAngle + Math.PI/2; + const variation = (Math.random() * 0.8 - 0.4); // Kleinere Variation für sanftere Muster + + // Mal Links, mal Rechts für sanften Blitz + const directionFactor = (i % 2 === 0) ? 1 : -1; + const offset = baseAmplitude * (Math.sin(i * Math.PI) + variation) * directionFactor; + + points.push({ + x: x + Math.cos(perpendicularAngle) * offset, + y: y + Math.sin(perpendicularAngle) * offset + }); + } + points.push(end); + return points; + } + + // Hilfsfunktion: Erzeuge intensivere Funkenpunkte mit dynamischer Verteilung + generateSparkPoints(zigzag, sparkCount = 15) { + const sparks = []; + // Mehr Funken für intensiveren Effekt + const actualSparkCount = Math.min(sparkCount, zigzag.length * 2); + + // Funken an zufälligen Stellen entlang des Blitzes + for (let i = 0; i < actualSparkCount; i++) { + // Zufälliges Segment des Zickzacks auswählen + const segIndex = Math.floor(Math.random() * (zigzag.length - 1)); + + // Bestimme Richtung des Segments + const dx = zigzag[segIndex + 1].x - zigzag[segIndex].x; + const dy = zigzag[segIndex + 1].y - zigzag[segIndex].y; + const segmentAngle = Math.atan2(dy, dx); + + // Zufällige Position entlang des Segments + const t = Math.random(); + const x = zigzag[segIndex].x + dx * t; + const y = zigzag[segIndex].y + dy * t; + + // Dynamischer Versatz für intensivere Funken + const offsetAngle = segmentAngle + (Math.random() * Math.PI - Math.PI/2); + const offsetDistance = Math.random() * 8 - 4; // Größerer Offset für dramatischere Funken + + // Zufällige Größe für variierende Intensität + const baseSize = 3.5 + Math.random() * 3.5; + const sizeVariation = Math.random() * 2.5; + + sparks.push({ + x: x + Math.cos(offsetAngle) * offsetDistance, + y: y + Math.sin(offsetAngle) * offsetDistance, + size: baseSize + sizeVariation // Größere und variablere Funkengröße + }); + + // Zusätzliche kleinere Funken in der Nähe für einen intensiveren Effekt + if (Math.random() < 0.4) { // 40% Chance für zusätzliche Funken + const subSparkAngle = offsetAngle + (Math.random() * Math.PI/2 - Math.PI/4); + const subDistance = offsetDistance * (0.4 + Math.random() * 0.6); + + sparks.push({ + x: x + Math.cos(subSparkAngle) * subDistance, + y: y + Math.sin(subSparkAngle) * subDistance, + size: (baseSize + sizeVariation) * 0.6 // Kleinere Größe für sekundäre Funken + }); + } + } + + return sparks; + } + + // New method to render the flowing animations with intensiveren Glow und Lila-Farbverlauf + renderFlowsWebGL(now, fadeoutFactor = 1.0) { + // Für jeden Flow einen leuchtenden Lila-Blitz als Schlange zeichnen und Funken erzeugen + for (const flow of this.flows) { + const connection = flow.connection; + const fromNode = this.nodes[connection.from]; + const toNode = this.nodes[connection.to]; + const startProgress = flow.progress; + const endProgress = Math.min(1, startProgress + flow.length); + if (startProgress >= 1 || endProgress <= 0) continue; + const direction = flow.direction ? 1 : -1; + let p1, p2; + if (direction > 0) { + p1 = { + x: fromNode.x + (toNode.x - fromNode.x) * startProgress, + y: fromNode.y + (toNode.y - fromNode.y) * startProgress + }; + p2 = { + x: fromNode.x + (toNode.x - fromNode.x) * endProgress, + y: fromNode.y + (toNode.y - fromNode.y) * endProgress + }; + } else { + p1 = { + x: toNode.x + (fromNode.x - toNode.x) * startProgress, + y: toNode.y + (fromNode.y - toNode.y) * startProgress + }; + p2 = { + x: toNode.x + (fromNode.x - toNode.x) * endProgress, + y: toNode.y + (fromNode.y - toNode.y) * endProgress + }; + } + + // Erzeuge Schlangenbewegung durch intensivere Zickzack-Muster + const snakeSegments = 12; // Mehr Segmente für flüssigere Schlangenbewegung + const snakeAmplitude = 12; // Größere Amplitude für deutlichere Schlangenbewegung + // Einflussfaktor, der von der Generation abhängt - höhere Generationen schlängeln mehr + const generationFactor = Math.min(1.5, (flow.generation || 1) / 10 + 0.8); + const zigzag = this.generateZigZagPoints( + p1, p2, + snakeSegments, + snakeAmplitude * generationFactor, + true // Sanftere Kurven aktivieren + ); + + // Lila Gradient Effekt für Blitze - Multi-Pass Rendering für intensives Glühen + + // 1. Render outer glow - very wide and subtle + const outerGlowWidth = 6.0 * this.config.glowIntensity; + for (let i = 0; i < zigzag.length - 1; i++) { + const positions = new Float32Array([ + zigzag[i].x, zigzag[i].y, + zigzag[i + 1].x, zigzag[i + 1].y + ]); + this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.positionBuffer); + this.gl.bufferData(this.gl.ARRAY_BUFFER, positions, this.gl.STATIC_DRAW); + this.gl.vertexAttribPointer( + this.programInfo.attribLocations.vertexPosition, + 2, + this.gl.FLOAT, + false, + 0, + 0 + ); + this.gl.enableVertexAttribArray(this.programInfo.attribLocations.vertexPosition); + this.gl.disableVertexAttribArray(this.programInfo.attribLocations.pointSize); + + // Äußerer Glow in dunklerem Lila mit niedriger Opazität + this.gl.uniform4f( + this.programInfo.uniformLocations.color, + 0.45, 0.2, 0.7, 0.15 * fadeoutFactor // Dunkler lila Außenglow + ); + this.gl.enable(this.gl.BLEND); + this.gl.blendFunc(this.gl.SRC_ALPHA, this.gl.ONE); + this.gl.lineWidth(outerGlowWidth); + this.gl.drawArrays(this.gl.LINES, 0, 2); + } + + // 2. Middle glow layer - more opaque and focused + const middleGlowWidth = 3.5 * this.config.glowIntensity; + for (let i = 0; i < zigzag.length - 1; i++) { + const positions = new Float32Array([ + zigzag[i].x, zigzag[i].y, + zigzag[i + 1].x, zigzag[i + 1].y + ]); + this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.positionBuffer); + this.gl.bufferData(this.gl.ARRAY_BUFFER, positions, this.gl.STATIC_DRAW); + this.gl.vertexAttribPointer( + this.programInfo.attribLocations.vertexPosition, + 2, + this.gl.FLOAT, + false, + 0, + 0 + ); + + // Mittlerer Glow in hellerem Lila mit mittlerer Opazität + this.gl.uniform4f( + this.programInfo.uniformLocations.color, + 0.65, 0.3, 0.85, 0.35 * fadeoutFactor // Mittleres Lila + ); + this.gl.lineWidth(middleGlowWidth); + this.gl.drawArrays(this.gl.LINES, 0, 2); + } + + // 3. Inner core - bright and vibrant + for (let i = 0; i < zigzag.length - 1; i++) { + const positions = new Float32Array([ + zigzag[i].x, zigzag[i].y, + zigzag[i + 1].x, zigzag[i + 1].y + ]); + this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.positionBuffer); + this.gl.bufferData(this.gl.ARRAY_BUFFER, positions, this.gl.STATIC_DRAW); + this.gl.vertexAttribPointer( + this.programInfo.attribLocations.vertexPosition, + 2, + this.gl.FLOAT, + false, + 0, + 0 + ); + + // Innerer Kern in strahlendem Lila-Weiß + this.gl.uniform4f( + this.programInfo.uniformLocations.color, + 0.9, 0.7, 1.0, 0.8 * fadeoutFactor // Weißlich-Lila für intensives Zentrum + ); + this.gl.lineWidth(1.6); // Schmaler Kern + this.gl.drawArrays(this.gl.LINES, 0, 2); + } + + // Viele intensivere Funken erzeugen + const sparkCount = this.config.sparkCount + Math.floor((flow.generation || 1) / 2); // Mehr Funken bei höheren Generationen + const sparks = this.generateSparkPoints(zigzag, sparkCount); + + // Render die Funken mit mehreren Ebenen für intensiveres Glühen + for (const spark of sparks) { + // 1. Äußerer Glow der Funken + this.gl.uniform4f( + this.programInfo.uniformLocations.color, + 0.5, 0.2, 0.8, 0.3 * fadeoutFactor // Lila Glow mit niedriger Opazität + ); + + // Position für den Funken setzen + const sparkPos = new Float32Array([spark.x, spark.y]); + this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.positionBuffer); + this.gl.bufferData(this.gl.ARRAY_BUFFER, sparkPos, this.gl.STATIC_DRAW); + + // Größerer Glow um jeden Funken + const sizes = new Float32Array([spark.size * this.config.sparkSize * 2.0]); + this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.sizeBuffer); + this.gl.bufferData(this.gl.ARRAY_BUFFER, sizes, this.gl.STATIC_DRAW); + + this.gl.vertexAttribPointer( + this.programInfo.attribLocations.vertexPosition, + 2, + this.gl.FLOAT, + false, + 0, + 0 + ); + this.gl.enableVertexAttribArray(this.programInfo.attribLocations.vertexPosition); + + this.gl.vertexAttribPointer( + this.programInfo.attribLocations.pointSize, + 1, + this.gl.FLOAT, + false, + 0, + 0 + ); + this.gl.enableVertexAttribArray(this.programInfo.attribLocations.pointSize); + + this.gl.drawArrays(this.gl.POINTS, 0, 1); + + // 2. Mittlere Schicht der Funken + this.gl.uniform4f( + this.programInfo.uniformLocations.color, + 0.7, 0.4, 0.95, 0.6 * fadeoutFactor // Mittleres Lila mit mittlerer Opazität + ); + + const middleSizes = new Float32Array([spark.size * this.config.sparkSize * 1.2]); + this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.sizeBuffer); + this.gl.bufferData(this.gl.ARRAY_BUFFER, middleSizes, this.gl.STATIC_DRAW); + + this.gl.drawArrays(this.gl.POINTS, 0, 1); + + // 3. Innerer Kern der Funken + this.gl.uniform4f( + this.programInfo.uniformLocations.color, + 0.95, 0.85, 1.0, 0.9 * fadeoutFactor // Fast weißes Zentrum für intensives Leuchten + ); + + const innerSizes = new Float32Array([spark.size * this.config.sparkSize * 0.6]); + this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.sizeBuffer); + this.gl.bufferData(this.gl.ARRAY_BUFFER, innerSizes, this.gl.STATIC_DRAW); + + this.gl.drawArrays(this.gl.POINTS, 0, 1); + } + } + } + + // Hilfsfunktion: Erzeuge schlangenartige Zickzack-Punkte mit Glätte-Option + generateZigZagPoints(start, end, segments = 5, amplitude = 8, smooth = false) { + const points = [start]; + const mainAngle = Math.atan2(end.y - start.y, end.x - start.x); + + // Berechne die Gesamtlänge des Blitzes + const totalDistance = Math.sqrt( + Math.pow(end.x - start.x, 2) + + Math.pow(end.y - start.y, 2) + ); + + // Dynamische Amplitude basierend auf Distanz + const baseAmplitude = amplitude * (totalDistance / 150); + + // Welle für schlangenartigen Effekt + const waveFrequency = 2 + Math.random() * 1.5; // Zusätzlicher Welleneffekt + + // Vorherige Punktrichtung für glattere Übergänge + let prevOffsetX = 0; + let prevOffsetY = 0; + + for (let i = 1; i < segments; i++) { + const t = i / segments; + // Kubische Easing-Funktion für natürlichere Bewegung + const easedT = smooth ? t*t*(3-2*t) : t; + + const x = start.x + (end.x - start.x) * easedT; + const y = start.y + (end.y - start.y) * easedT; + + // Schlangenbewegung durch Sinuswelle mit zusätzlicher Zeit- und Positionsabhängigkeit + const wavePhase = waveFrequency * Math.PI * t + (Date.now() % 1000) / 1000 * Math.PI; + const waveAmplitude = Math.sin(wavePhase) * baseAmplitude; + + // Perpendikuläre Richtung zur Hauptrichtung + etwas Zufall + const perpendicularAngle = mainAngle + Math.PI/2 + (Math.random() * 0.3 - 0.15); + + // Für glattere Übergänge, interpoliere zwischen vorherigem und neuem Offset + let offsetX, offsetY; + if (smooth && i > 1) { + const newOffsetX = Math.cos(perpendicularAngle) * waveAmplitude; + const newOffsetY = Math.sin(perpendicularAngle) * waveAmplitude; + + // Interpoliere zwischen vorherigem und neuem Offset (30% vorheriger, 70% neuer) + offsetX = prevOffsetX * 0.3 + newOffsetX * 0.7; + offsetY = prevOffsetY * 0.3 + newOffsetY * 0.7; + + // Speichere für nächsten Punkt + prevOffsetX = offsetX; + prevOffsetY = offsetY; + } else { + offsetX = Math.cos(perpendicularAngle) * waveAmplitude; + offsetY = Math.sin(perpendicularAngle) * waveAmplitude; + + prevOffsetX = offsetX; + prevOffsetY = offsetY; + } + + points.push({ + x: x + offsetX, + y: y + offsetY + }); + } + + points.push(end); + return points; + } + + // Hilfsfunktion: Erzeuge leuchtende Funkenpunkte + generateSparkPoints(zigzag, sparkCount = 15) { + const sparks = []; + // Mehr Funken für intensiveren Effekt + const actualSparkCount = Math.min(sparkCount, zigzag.length * 3); + + // Funken an zufälligen Stellen entlang des Blitzes mit mehr Variabilität + for (let i = 0; i < actualSparkCount; i++) { + // Zufälliges Segment des Zickzacks auswählen, mit Präferenz für Ecken + let segIndex; + if (Math.random() < 0.6 && zigzag.length > 3) { + // 60% der Funken an Ecken platzieren (mehr Funken an Wendepunkten) + segIndex = 1 + Math.floor(Math.random() * (zigzag.length - 3)); + } else { + // Rest gleichmäßig verteilen + segIndex = Math.floor(Math.random() * (zigzag.length - 1)); + } + + // Bestimme Richtung des Segments + const dx = zigzag[segIndex + 1].x - zigzag[segIndex].x; + const dy = zigzag[segIndex + 1].y - zigzag[segIndex].y; + const segmentAngle = Math.atan2(dy, dx); + + // Zufällige Position entlang des Segments mit Präferenz für die Mitte + let t; + if (Math.random() < 0.4) { + // 40% der Funken näher zur Mitte des Segments + t = 0.3 + Math.random() * 0.4; + } else { + // Rest gleichmäßig verteilen + t = Math.random(); + } + + const x = zigzag[segIndex].x + dx * t; + const y = zigzag[segIndex].y + dy * t; + + // Dynamischer Versatz für intensivere Funken + // Versatz tendenziell senkrecht zur Segmentrichtung + const offsetAngle = segmentAngle + (Math.random() * Math.PI - Math.PI/2); + // Größerer Versatz für Funken, die weiter vom Pfad wegfliegen + const offsetDistance = (0.8 + Math.random() * 2.0) * 6; + + // Größere und variablere Funken mit Zeit-basiertem Pulsieren + const baseSize = 0.8 + Math.random() * 1.0; + // Pulsierender Effekt + const pulsePhase = (Date.now() % 1000) / 1000 * Math.PI * 2; + const pulseFactor = 0.8 + Math.sin(pulsePhase) * 0.2; + + sparks.push({ + x: x + Math.cos(offsetAngle) * offsetDistance, + y: y + Math.sin(offsetAngle) * offsetDistance, + size: baseSize * pulseFactor, + // Zufälliger Winkel für Funken-Rotation + angle: Math.random() * Math.PI * 2 + }); + } + + return sparks; + } +} + +// Initialize when DOM is loaded +document.addEventListener('DOMContentLoaded', () => { + // Short delay to ensure DOM is fully loaded + setTimeout(() => { + if (!window.neuralNetworkBackground) { + console.log('Creating Neural Network Background'); + window.neuralNetworkBackground = new NeuralNetworkBackground(); + } + }, 100); +}); + +// Re-initialize when page is fully loaded (for safety) +window.addEventListener('load', () => { + if (!window.neuralNetworkBackground) { + console.log('Re-initializing Neural Network Background on full load'); + window.neuralNetworkBackground = new NeuralNetworkBackground(); + } +}); + +// Event listener to clean up when the window is closed +window.addEventListener('beforeunload', function() { + if (window.neuralNetworkBackground) { + window.neuralNetworkBackground.destroy(); + } +}); diff --git a/static/style.css b/static/style.css new file mode 100644 index 0000000..e7c6d52 --- /dev/null +++ b/static/style.css @@ -0,0 +1,508 @@ +/* Main Systades Styles - Dark Mystical Theme */ + +/* Import Fonts */ +@import url('https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600;700&family=JetBrains+Mono:wght@400;500;700&display=swap'); + +/* Root Variables */ +:root { + /* Light Theme Colors */ + --light-bg-primary: #f8fafc; + --light-bg-secondary: #f1f5f9; + --light-text-primary: #1e293b; + --light-text-secondary: #475569; + --light-accent-primary: #7c3aed; + --light-accent-secondary: #8b5cf6; + --light-border: #e2e8f0; + + /* Dark Theme Colors */ + --dark-bg-primary: #0a0e19; + --dark-bg-secondary: #111827; + --dark-text-primary: #f9fafb; + --dark-text-secondary: #e5e7eb; + --dark-accent-primary: #6d28d9; + --dark-accent-secondary: #8b5cf6; + --dark-border: #1f2937; + + /* Common */ + --font-sans: 'Inter', system-ui, -apple-system, sans-serif; + --font-mono: 'JetBrains Mono', monospace; + --shadow-sm: 0 1px 2px 0 rgba(0, 0, 0, 0.05); + --shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.1), 0 2px 4px -1px rgba(0, 0, 0, 0.06); + --shadow-md: 0 10px 15px -3px rgba(0, 0, 0, 0.1), 0 4px 6px -2px rgba(0, 0, 0, 0.05); + --shadow-lg: 0 20px 25px -5px rgba(0, 0, 0, 0.1), 0 10px 10px -5px rgba(0, 0, 0, 0.04); + + /* Transitions */ + --transition-fast: 150ms ease-in-out; + --transition-normal: 300ms ease-in-out; + --transition-slow: 500ms ease-in-out; +} + +/* Base Elements */ +body { + font-family: var(--font-sans); + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; + transition: background-color var(--transition-normal), color var(--transition-normal); + background-color: transparent !important; /* Ensure background is transparent */ +} + +/* HTML root element should also be transparent */ +html { + background-color: transparent !important; +} + +html.dark { + background-color: transparent !important; +} + +/* Theme Specific - keep the color but remove background */ +body { + color: var(--light-text-primary); +} + +body.dark { + color: var(--dark-text-primary); + background-color: transparent; +} + +/* Ensure proper contrast in both modes */ +body:not(.dark) { + --text-primary: var(--light-text-primary); + --text-secondary: var(--light-text-secondary); + --bg-primary: var(--light-bg-primary); + --bg-secondary: var(--light-bg-secondary); +} + +body.dark { + --text-primary: var(--dark-text-primary); + --text-secondary: var(--dark-text-secondary); + --bg-primary: var(--dark-bg-primary); + --bg-secondary: var(--dark-bg-secondary); +} + +/* Typography */ +h1, h2, h3, h4, h5, h6 { + font-weight: 600; + line-height: 1.2; +} + +p { + line-height: 1.6; +} + +.gradient-text { + background-clip: text; + -webkit-background-clip: text; + color: transparent; + position: relative; +} + +body .gradient-text { + background-image: linear-gradient(135deg, var(--light-accent-primary), var(--light-accent-secondary)); +} + +body.dark .gradient-text { + background-image: linear-gradient(135deg, var(--dark-accent-primary), var(--dark-accent-secondary)); +} + +/* Subtle glow for dark mode gradient text */ +body.dark .gradient-text::after { + content: ""; + position: absolute; + top: 0; + left: 0; + width: 100%; + height: 100%; + filter: blur(8px); + opacity: 0.3; + background-image: inherit; + z-index: -1; + pointer-events: none; +} + +/* Containers and Layout */ +.container { + width: 100%; + margin-left: auto; + margin-right: auto; + padding-left: 1rem; + padding-right: 1rem; +} + +@media (min-width: 640px) { + .container { + max-width: 640px; + } +} + +@media (min-width: 768px) { + .container { + max-width: 768px; + } +} + +@media (min-width: 1024px) { + .container { + max-width: 1024px; + } +} + +@media (min-width: 1280px) { + .container { + max-width: 1280px; + } +} + +/* Glass Morphism */ +.glass-morphism { + backdrop-filter: blur(10px); + -webkit-backdrop-filter: blur(10px); +} + +body .glass-navbar-light { + background-color: rgba(255, 255, 255, 0.8); + border-color: rgba(226, 232, 240, 0.5); + box-shadow: 0 4px 30px rgba(0, 0, 0, 0.1); +} + +body.dark .glass-navbar-dark { + background-color: rgba(10, 14, 25, 0.8); + border-color: rgba(31, 41, 55, 0.5); + box-shadow: 0 4px 30px rgba(0, 0, 0, 0.3); +} + +/* Navigation */ +.nav-link { + padding: 0.5rem 0.75rem; + border-radius: 0.5rem; + transition: all var(--transition-normal); +} + +body .nav-link { + color: var(--light-text-secondary); +} + +body.dark .nav-link { + color: var(--dark-text-secondary); +} + +body .nav-link:hover { + color: var(--light-text-primary); + background-color: rgba(241, 245, 249, 0.5); +} + +body.dark .nav-link:hover { + color: var(--dark-text-primary); + background-color: rgba(31, 41, 55, 0.5); +} + +body .nav-link-light-active { + color: var(--light-accent-primary); + background-color: rgba(139, 92, 246, 0.1); +} + +body.dark .nav-link-active { + color: var(--dark-accent-secondary); + background-color: rgba(109, 40, 217, 0.15); +} + +/* Buttons */ +.btn { + padding: 0.5rem 1rem; + border-radius: 0.5rem; + font-weight: 500; + transition: all var(--transition-normal); + display: inline-flex; + align-items: center; + justify-content: center; +} + +body .btn-primary { + background-color: var(--light-accent-primary); + color: white; +} + +body.dark .btn-primary { + background-color: var(--dark-accent-primary); + color: white; +} + +body .btn-primary:hover { + background-color: var(--light-accent-secondary); + box-shadow: 0 0 15px rgba(124, 58, 237, 0.3); +} + +body.dark .btn-primary:hover { + background-color: var(--dark-accent-secondary); + box-shadow: 0 0 15px rgba(109, 40, 217, 0.5); +} + +body .btn-secondary { + background-color: transparent; + border: 1px solid var(--light-border); + color: var(--light-text-primary); +} + +body.dark .btn-secondary { + background-color: transparent; + border: 1px solid var(--dark-border); + color: var(--dark-text-primary); +} + +body .btn-secondary:hover { + background-color: var(--light-bg-secondary); + border-color: var(--light-accent-secondary); +} + +body.dark .btn-secondary:hover { + background-color: var(--dark-bg-secondary); + border-color: var(--dark-accent-secondary); +} + +/* Cards */ +.card { + border-radius: 0.75rem; + overflow: hidden; + transition: all var(--transition-normal); +} + +body .card { + background-color: white; + border: 1px solid var(--light-border); + box-shadow: var(--shadow); +} + +body.dark .card { + background-color: var(--dark-bg-secondary); + border: 1px solid var(--dark-border); + box-shadow: var(--shadow); +} + +body .card:hover { + transform: translateY(-5px); + box-shadow: var(--shadow-md); +} + +body.dark .card:hover { + transform: translateY(-5px); + box-shadow: 0 10px 15px -3px rgba(0, 0, 0, 0.3), 0 4px 6px -2px rgba(0, 0, 0, 0.2); +} + +/* Form Elements */ +.form-input { + width: 100%; + padding: 0.5rem 0.75rem; + border-radius: 0.5rem; + transition: all var(--transition-normal); +} + +body .form-input { + background-color: white; + border: 1px solid var(--light-border); + color: var(--light-text-primary); +} + +body.dark .form-input { + background-color: var(--dark-bg-secondary); + border: 1px solid var(--dark-border); + color: var(--dark-text-primary); +} + +body .form-input:focus { + outline: none; + border-color: var(--light-accent-secondary); + box-shadow: 0 0 0 3px rgba(139, 92, 246, 0.15); +} + +body.dark .form-input:focus { + outline: none; + border-color: var(--dark-accent-secondary); + box-shadow: 0 0 0 3px rgba(139, 92, 246, 0.25); +} + +/* Animations */ +@keyframes fadeIn { + from { opacity: 0; } + to { opacity: 1; } +} + +.animate-fade-in { + animation: fadeIn 0.5s ease-out forwards; +} + +@keyframes float { + 0%, 100% { transform: translateY(0); } + 50% { transform: translateY(-10px); } +} + +.animate-float { + animation: float 5s ease-in-out infinite; +} + +@keyframes pulse { + 0%, 100% { opacity: 0.7; } + 50% { opacity: 1; } +} + +.animate-pulse { + animation: pulse 3s ease-in-out infinite; +} + +/* Utilities */ +.sr-only { + position: absolute; + width: 1px; + height: 1px; + padding: 0; + margin: -1px; + overflow: hidden; + clip: rect(0, 0, 0, 0); + white-space: nowrap; + border-width: 0; +} + +.shadow-elevation { + transition: box-shadow var(--transition-normal), transform var(--transition-normal); +} + +body .shadow-elevation { + box-shadow: var(--shadow); +} + +body.dark .shadow-elevation { + box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.3), 0 2px 4px -1px rgba(0, 0, 0, 0.2); +} + +body .shadow-elevation:hover { + box-shadow: var(--shadow-md); + transform: translateY(-2px); +} + +body.dark .shadow-elevation:hover { + box-shadow: 0 10px 15px -3px rgba(0, 0, 0, 0.4), 0 4px 6px -2px rgba(0, 0, 0, 0.3); + transform: translateY(-2px); +} + +/* Tooltips */ +.tooltip { + position: relative; +} + +.tooltip:hover::before { + content: attr(data-tooltip); + position: absolute; + bottom: 100%; + left: 50%; + transform: translateX(-50%); + padding: 0.25rem 0.5rem; + border-radius: 0.25rem; + font-size: 0.75rem; + white-space: nowrap; + z-index: 10; + opacity: 0; + animation: fadeIn 0.3s ease-out forwards; +} + +body .tooltip:hover::before { + background-color: var(--light-text-primary); + color: white; +} + +body.dark .tooltip:hover::before { + background-color: var(--dark-text-primary); + color: var(--dark-bg-primary); +} + +/* Mystical elements */ +.mystical-border { + position: relative; + overflow: hidden; +} + +.mystical-border::after { + content: ''; + position: absolute; + top: 0; + left: 0; + right: 0; + bottom: 0; + border: 1px solid; + border-radius: inherit; + pointer-events: none; + opacity: 0.3; + transition: opacity var(--transition-normal); +} + +body .mystical-border::after { + border-color: var(--light-accent-primary); +} + +body.dark .mystical-border::after { + border-color: var(--dark-accent-primary); +} + +.mystical-border:hover::after { + opacity: 0.6; +} + +/* Responsive Design Helpers */ +@media (max-width: 640px) { + .container { + padding-left: 1rem; + padding-right: 1rem; + } + + .hero-heading { + font-size: 2rem; + } + + .section-heading { + font-size: 1.5rem; + } +} + +/* Accessibility */ +:focus-visible { + outline: 2px solid; + outline-offset: 2px; +} + +body :focus-visible { + outline-color: var(--light-accent-primary); +} + +body.dark :focus-visible { + outline-color: var(--dark-accent-primary); +} + +/* Scrollbar styling */ +::-webkit-scrollbar { + width: 0.5rem; + height: 0.5rem; +} + +body ::-webkit-scrollbar-track { + background: var(--light-bg-secondary); +} + +body.dark ::-webkit-scrollbar-track { + background: var(--dark-bg-secondary); +} + +body ::-webkit-scrollbar-thumb { + background: var(--light-accent-primary); + border-radius: 0.25rem; +} + +body.dark ::-webkit-scrollbar-thumb { + background: var(--dark-accent-primary); + border-radius: 0.25rem; +} + +body ::-webkit-scrollbar-thumb:hover { + background: var(--light-accent-secondary); +} + +body.dark ::-webkit-scrollbar-thumb:hover { + background: var(--dark-accent-secondary); +} \ No newline at end of file diff --git a/static/three.min.js b/static/three.min.js new file mode 100644 index 0000000..61ce799 --- /dev/null +++ b/static/three.min.js @@ -0,0 +1,6 @@ +/** + * @license + * Copyright 2010-2021 Three.js Authors + * SPDX-License-Identifier: MIT + */ +!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports):"function"==typeof define&&define.amd?define(["exports"],e):e((t="undefined"!=typeof globalThis?globalThis:t||self).THREE={})}(this,(function(t){"use strict";const e="136",n=100,i=300,r=301,s=302,a=303,o=304,l=306,c=307,h=1e3,u=1001,d=1002,p=1003,m=1004,f=1005,g=1006,v=1007,y=1008,x=1009,_=1012,b=1014,M=1015,w=1016,S=1020,T=1022,E=1023,A=1026,L=1027,R=33776,C=33777,P=33778,D=33779,I=35840,N=35841,z=35842,B=35843,F=37492,O=37496,U=2300,H=2301,G=2302,k=2400,V=2401,W=2402,j=2500,q=2501,X=3e3,J=3001,Y=7680,Z=35044,Q=35048,K="300 es";class ${addEventListener(t,e){void 0===this._listeners&&(this._listeners={});const n=this._listeners;void 0===n[t]&&(n[t]=[]),-1===n[t].indexOf(e)&&n[t].push(e)}hasEventListener(t,e){if(void 0===this._listeners)return!1;const n=this._listeners;return void 0!==n[t]&&-1!==n[t].indexOf(e)}removeEventListener(t,e){if(void 0===this._listeners)return;const n=this._listeners[t];if(void 0!==n){const t=n.indexOf(e);-1!==t&&n.splice(t,1)}}dispatchEvent(t){if(void 0===this._listeners)return;const e=this._listeners[t.type];if(void 0!==e){t.target=this;const n=e.slice(0);for(let e=0,i=n.length;e>8&255]+tt[t>>16&255]+tt[t>>24&255]+"-"+tt[255&e]+tt[e>>8&255]+"-"+tt[e>>16&15|64]+tt[e>>24&255]+"-"+tt[63&n|128]+tt[n>>8&255]+"-"+tt[n>>16&255]+tt[n>>24&255]+tt[255&i]+tt[i>>8&255]+tt[i>>16&255]+tt[i>>24&255]).toUpperCase()}function st(t,e,n){return Math.max(e,Math.min(n,t))}function at(t,e){return(t%e+e)%e}function ot(t,e,n){return(1-n)*t+n*e}function lt(t){return 0==(t&t-1)&&0!==t}function ct(t){return Math.pow(2,Math.ceil(Math.log(t)/Math.LN2))}function ht(t){return Math.pow(2,Math.floor(Math.log(t)/Math.LN2))}var ut=Object.freeze({__proto__:null,DEG2RAD:nt,RAD2DEG:it,generateUUID:rt,clamp:st,euclideanModulo:at,mapLinear:function(t,e,n,i,r){return i+(t-e)*(r-i)/(n-e)},inverseLerp:function(t,e,n){return t!==e?(n-t)/(e-t):0},lerp:ot,damp:function(t,e,n,i){return ot(t,e,1-Math.exp(-n*i))},pingpong:function(t,e=1){return e-Math.abs(at(t,2*e)-e)},smoothstep:function(t,e,n){return t<=e?0:t>=n?1:(t=(t-e)/(n-e))*t*(3-2*t)},smootherstep:function(t,e,n){return t<=e?0:t>=n?1:(t=(t-e)/(n-e))*t*t*(t*(6*t-15)+10)},randInt:function(t,e){return t+Math.floor(Math.random()*(e-t+1))},randFloat:function(t,e){return t+Math.random()*(e-t)},randFloatSpread:function(t){return t*(.5-Math.random())},seededRandom:function(t){return void 0!==t&&(et=t%2147483647),et=16807*et%2147483647,(et-1)/2147483646},degToRad:function(t){return t*nt},radToDeg:function(t){return t*it},isPowerOfTwo:lt,ceilPowerOfTwo:ct,floorPowerOfTwo:ht,setQuaternionFromProperEuler:function(t,e,n,i,r){const s=Math.cos,a=Math.sin,o=s(n/2),l=a(n/2),c=s((e+i)/2),h=a((e+i)/2),u=s((e-i)/2),d=a((e-i)/2),p=s((i-e)/2),m=a((i-e)/2);switch(r){case"XYX":t.set(o*h,l*u,l*d,o*c);break;case"YZY":t.set(l*d,o*h,l*u,o*c);break;case"ZXZ":t.set(l*u,l*d,o*h,o*c);break;case"XZX":t.set(o*h,l*m,l*p,o*c);break;case"YXY":t.set(l*p,o*h,l*m,o*c);break;case"ZYZ":t.set(l*m,l*p,o*h,o*c);break;default:console.warn("THREE.MathUtils: .setQuaternionFromProperEuler() encountered an unknown order: "+r)}}});class dt{constructor(t=0,e=0){this.x=t,this.y=e}get width(){return this.x}set width(t){this.x=t}get height(){return this.y}set height(t){this.y=t}set(t,e){return this.x=t,this.y=e,this}setScalar(t){return this.x=t,this.y=t,this}setX(t){return this.x=t,this}setY(t){return this.y=t,this}setComponent(t,e){switch(t){case 0:this.x=e;break;case 1:this.y=e;break;default:throw new Error("index is out of range: "+t)}return this}getComponent(t){switch(t){case 0:return this.x;case 1:return this.y;default:throw new Error("index is out of range: "+t)}}clone(){return new this.constructor(this.x,this.y)}copy(t){return this.x=t.x,this.y=t.y,this}add(t,e){return void 0!==e?(console.warn("THREE.Vector2: .add() now only accepts one argument. Use .addVectors( a, b ) instead."),this.addVectors(t,e)):(this.x+=t.x,this.y+=t.y,this)}addScalar(t){return this.x+=t,this.y+=t,this}addVectors(t,e){return this.x=t.x+e.x,this.y=t.y+e.y,this}addScaledVector(t,e){return this.x+=t.x*e,this.y+=t.y*e,this}sub(t,e){return void 0!==e?(console.warn("THREE.Vector2: .sub() now only accepts one argument. Use .subVectors( a, b ) instead."),this.subVectors(t,e)):(this.x-=t.x,this.y-=t.y,this)}subScalar(t){return this.x-=t,this.y-=t,this}subVectors(t,e){return this.x=t.x-e.x,this.y=t.y-e.y,this}multiply(t){return this.x*=t.x,this.y*=t.y,this}multiplyScalar(t){return this.x*=t,this.y*=t,this}divide(t){return this.x/=t.x,this.y/=t.y,this}divideScalar(t){return this.multiplyScalar(1/t)}applyMatrix3(t){const e=this.x,n=this.y,i=t.elements;return this.x=i[0]*e+i[3]*n+i[6],this.y=i[1]*e+i[4]*n+i[7],this}min(t){return this.x=Math.min(this.x,t.x),this.y=Math.min(this.y,t.y),this}max(t){return this.x=Math.max(this.x,t.x),this.y=Math.max(this.y,t.y),this}clamp(t,e){return this.x=Math.max(t.x,Math.min(e.x,this.x)),this.y=Math.max(t.y,Math.min(e.y,this.y)),this}clampScalar(t,e){return this.x=Math.max(t,Math.min(e,this.x)),this.y=Math.max(t,Math.min(e,this.y)),this}clampLength(t,e){const n=this.length();return this.divideScalar(n||1).multiplyScalar(Math.max(t,Math.min(e,n)))}floor(){return this.x=Math.floor(this.x),this.y=Math.floor(this.y),this}ceil(){return this.x=Math.ceil(this.x),this.y=Math.ceil(this.y),this}round(){return this.x=Math.round(this.x),this.y=Math.round(this.y),this}roundToZero(){return this.x=this.x<0?Math.ceil(this.x):Math.floor(this.x),this.y=this.y<0?Math.ceil(this.y):Math.floor(this.y),this}negate(){return this.x=-this.x,this.y=-this.y,this}dot(t){return this.x*t.x+this.y*t.y}cross(t){return this.x*t.y-this.y*t.x}lengthSq(){return this.x*this.x+this.y*this.y}length(){return Math.sqrt(this.x*this.x+this.y*this.y)}manhattanLength(){return Math.abs(this.x)+Math.abs(this.y)}normalize(){return this.divideScalar(this.length()||1)}angle(){return Math.atan2(-this.y,-this.x)+Math.PI}distanceTo(t){return Math.sqrt(this.distanceToSquared(t))}distanceToSquared(t){const e=this.x-t.x,n=this.y-t.y;return e*e+n*n}manhattanDistanceTo(t){return Math.abs(this.x-t.x)+Math.abs(this.y-t.y)}setLength(t){return this.normalize().multiplyScalar(t)}lerp(t,e){return this.x+=(t.x-this.x)*e,this.y+=(t.y-this.y)*e,this}lerpVectors(t,e,n){return this.x=t.x+(e.x-t.x)*n,this.y=t.y+(e.y-t.y)*n,this}equals(t){return t.x===this.x&&t.y===this.y}fromArray(t,e=0){return this.x=t[e],this.y=t[e+1],this}toArray(t=[],e=0){return t[e]=this.x,t[e+1]=this.y,t}fromBufferAttribute(t,e,n){return void 0!==n&&console.warn("THREE.Vector2: offset has been removed from .fromBufferAttribute()."),this.x=t.getX(e),this.y=t.getY(e),this}rotateAround(t,e){const n=Math.cos(e),i=Math.sin(e),r=this.x-t.x,s=this.y-t.y;return this.x=r*n-s*i+t.x,this.y=r*i+s*n+t.y,this}random(){return this.x=Math.random(),this.y=Math.random(),this}*[Symbol.iterator](){yield this.x,yield this.y}}dt.prototype.isVector2=!0;class pt{constructor(){this.elements=[1,0,0,0,1,0,0,0,1],arguments.length>0&&console.error("THREE.Matrix3: the constructor no longer reads arguments. use .set() instead.")}set(t,e,n,i,r,s,a,o,l){const c=this.elements;return c[0]=t,c[1]=i,c[2]=a,c[3]=e,c[4]=r,c[5]=o,c[6]=n,c[7]=s,c[8]=l,this}identity(){return this.set(1,0,0,0,1,0,0,0,1),this}copy(t){const e=this.elements,n=t.elements;return e[0]=n[0],e[1]=n[1],e[2]=n[2],e[3]=n[3],e[4]=n[4],e[5]=n[5],e[6]=n[6],e[7]=n[7],e[8]=n[8],this}extractBasis(t,e,n){return t.setFromMatrix3Column(this,0),e.setFromMatrix3Column(this,1),n.setFromMatrix3Column(this,2),this}setFromMatrix4(t){const e=t.elements;return this.set(e[0],e[4],e[8],e[1],e[5],e[9],e[2],e[6],e[10]),this}multiply(t){return this.multiplyMatrices(this,t)}premultiply(t){return this.multiplyMatrices(t,this)}multiplyMatrices(t,e){const n=t.elements,i=e.elements,r=this.elements,s=n[0],a=n[3],o=n[6],l=n[1],c=n[4],h=n[7],u=n[2],d=n[5],p=n[8],m=i[0],f=i[3],g=i[6],v=i[1],y=i[4],x=i[7],_=i[2],b=i[5],M=i[8];return r[0]=s*m+a*v+o*_,r[3]=s*f+a*y+o*b,r[6]=s*g+a*x+o*M,r[1]=l*m+c*v+h*_,r[4]=l*f+c*y+h*b,r[7]=l*g+c*x+h*M,r[2]=u*m+d*v+p*_,r[5]=u*f+d*y+p*b,r[8]=u*g+d*x+p*M,this}multiplyScalar(t){const e=this.elements;return e[0]*=t,e[3]*=t,e[6]*=t,e[1]*=t,e[4]*=t,e[7]*=t,e[2]*=t,e[5]*=t,e[8]*=t,this}determinant(){const t=this.elements,e=t[0],n=t[1],i=t[2],r=t[3],s=t[4],a=t[5],o=t[6],l=t[7],c=t[8];return e*s*c-e*a*l-n*r*c+n*a*o+i*r*l-i*s*o}invert(){const t=this.elements,e=t[0],n=t[1],i=t[2],r=t[3],s=t[4],a=t[5],o=t[6],l=t[7],c=t[8],h=c*s-a*l,u=a*o-c*r,d=l*r-s*o,p=e*h+n*u+i*d;if(0===p)return this.set(0,0,0,0,0,0,0,0,0);const m=1/p;return t[0]=h*m,t[1]=(i*l-c*n)*m,t[2]=(a*n-i*s)*m,t[3]=u*m,t[4]=(c*e-i*o)*m,t[5]=(i*r-a*e)*m,t[6]=d*m,t[7]=(n*o-l*e)*m,t[8]=(s*e-n*r)*m,this}transpose(){let t;const e=this.elements;return t=e[1],e[1]=e[3],e[3]=t,t=e[2],e[2]=e[6],e[6]=t,t=e[5],e[5]=e[7],e[7]=t,this}getNormalMatrix(t){return this.setFromMatrix4(t).invert().transpose()}transposeIntoArray(t){const e=this.elements;return t[0]=e[0],t[1]=e[3],t[2]=e[6],t[3]=e[1],t[4]=e[4],t[5]=e[7],t[6]=e[2],t[7]=e[5],t[8]=e[8],this}setUvTransform(t,e,n,i,r,s,a){const o=Math.cos(r),l=Math.sin(r);return this.set(n*o,n*l,-n*(o*s+l*a)+s+t,-i*l,i*o,-i*(-l*s+o*a)+a+e,0,0,1),this}scale(t,e){const n=this.elements;return n[0]*=t,n[3]*=t,n[6]*=t,n[1]*=e,n[4]*=e,n[7]*=e,this}rotate(t){const e=Math.cos(t),n=Math.sin(t),i=this.elements,r=i[0],s=i[3],a=i[6],o=i[1],l=i[4],c=i[7];return i[0]=e*r+n*o,i[3]=e*s+n*l,i[6]=e*a+n*c,i[1]=-n*r+e*o,i[4]=-n*s+e*l,i[7]=-n*a+e*c,this}translate(t,e){const n=this.elements;return n[0]+=t*n[2],n[3]+=t*n[5],n[6]+=t*n[8],n[1]+=e*n[2],n[4]+=e*n[5],n[7]+=e*n[8],this}equals(t){const e=this.elements,n=t.elements;for(let t=0;t<9;t++)if(e[t]!==n[t])return!1;return!0}fromArray(t,e=0){for(let n=0;n<9;n++)this.elements[n]=t[n+e];return this}toArray(t=[],e=0){const n=this.elements;return t[e]=n[0],t[e+1]=n[1],t[e+2]=n[2],t[e+3]=n[3],t[e+4]=n[4],t[e+5]=n[5],t[e+6]=n[6],t[e+7]=n[7],t[e+8]=n[8],t}clone(){return(new this.constructor).fromArray(this.elements)}}function mt(t){if(0===t.length)return-1/0;let e=t[0];for(let n=1,i=t.length;ne&&(e=t[n]);return e}pt.prototype.isMatrix3=!0;const ft={Int8Array:Int8Array,Uint8Array:Uint8Array,Uint8ClampedArray:Uint8ClampedArray,Int16Array:Int16Array,Uint16Array:Uint16Array,Int32Array:Int32Array,Uint32Array:Uint32Array,Float32Array:Float32Array,Float64Array:Float64Array};function gt(t,e){return new ft[t](e)}function vt(t){return document.createElementNS("http://www.w3.org/1999/xhtml",t)}let yt;class xt{static getDataURL(t){if(/^data:/i.test(t.src))return t.src;if("undefined"==typeof HTMLCanvasElement)return t.src;let e;if(t instanceof HTMLCanvasElement)e=t;else{void 0===yt&&(yt=vt("canvas")),yt.width=t.width,yt.height=t.height;const n=yt.getContext("2d");t instanceof ImageData?n.putImageData(t,0,0):n.drawImage(t,0,0,t.width,t.height),e=yt}return e.width>2048||e.height>2048?(console.warn("THREE.ImageUtils.getDataURL: Image converted to jpg for performance reasons",t),e.toDataURL("image/jpeg",.6)):e.toDataURL("image/png")}}let _t=0;class bt extends ${constructor(t=bt.DEFAULT_IMAGE,e=bt.DEFAULT_MAPPING,n=1001,i=1001,r=1006,s=1008,a=1023,o=1009,l=1,c=3e3){super(),Object.defineProperty(this,"id",{value:_t++}),this.uuid=rt(),this.name="",this.image=t,this.mipmaps=[],this.mapping=e,this.wrapS=n,this.wrapT=i,this.magFilter=r,this.minFilter=s,this.anisotropy=l,this.format=a,this.internalFormat=null,this.type=o,this.offset=new dt(0,0),this.repeat=new dt(1,1),this.center=new dt(0,0),this.rotation=0,this.matrixAutoUpdate=!0,this.matrix=new pt,this.generateMipmaps=!0,this.premultiplyAlpha=!1,this.flipY=!0,this.unpackAlignment=4,this.encoding=c,this.userData={},this.version=0,this.onUpdate=null,this.isRenderTargetTexture=!1}updateMatrix(){this.matrix.setUvTransform(this.offset.x,this.offset.y,this.repeat.x,this.repeat.y,this.rotation,this.center.x,this.center.y)}clone(){return(new this.constructor).copy(this)}copy(t){return this.name=t.name,this.image=t.image,this.mipmaps=t.mipmaps.slice(0),this.mapping=t.mapping,this.wrapS=t.wrapS,this.wrapT=t.wrapT,this.magFilter=t.magFilter,this.minFilter=t.minFilter,this.anisotropy=t.anisotropy,this.format=t.format,this.internalFormat=t.internalFormat,this.type=t.type,this.offset.copy(t.offset),this.repeat.copy(t.repeat),this.center.copy(t.center),this.rotation=t.rotation,this.matrixAutoUpdate=t.matrixAutoUpdate,this.matrix.copy(t.matrix),this.generateMipmaps=t.generateMipmaps,this.premultiplyAlpha=t.premultiplyAlpha,this.flipY=t.flipY,this.unpackAlignment=t.unpackAlignment,this.encoding=t.encoding,this.userData=JSON.parse(JSON.stringify(t.userData)),this}toJSON(t){const e=void 0===t||"string"==typeof t;if(!e&&void 0!==t.textures[this.uuid])return t.textures[this.uuid];const n={metadata:{version:4.5,type:"Texture",generator:"Texture.toJSON"},uuid:this.uuid,name:this.name,mapping:this.mapping,repeat:[this.repeat.x,this.repeat.y],offset:[this.offset.x,this.offset.y],center:[this.center.x,this.center.y],rotation:this.rotation,wrap:[this.wrapS,this.wrapT],format:this.format,type:this.type,encoding:this.encoding,minFilter:this.minFilter,magFilter:this.magFilter,anisotropy:this.anisotropy,flipY:this.flipY,premultiplyAlpha:this.premultiplyAlpha,unpackAlignment:this.unpackAlignment};if(void 0!==this.image){const i=this.image;if(void 0===i.uuid&&(i.uuid=rt()),!e&&void 0===t.images[i.uuid]){let e;if(Array.isArray(i)){e=[];for(let t=0,n=i.length;t1)switch(this.wrapS){case h:t.x=t.x-Math.floor(t.x);break;case u:t.x=t.x<0?0:1;break;case d:1===Math.abs(Math.floor(t.x)%2)?t.x=Math.ceil(t.x)-t.x:t.x=t.x-Math.floor(t.x)}if(t.y<0||t.y>1)switch(this.wrapT){case h:t.y=t.y-Math.floor(t.y);break;case u:t.y=t.y<0?0:1;break;case d:1===Math.abs(Math.floor(t.y)%2)?t.y=Math.ceil(t.y)-t.y:t.y=t.y-Math.floor(t.y)}return this.flipY&&(t.y=1-t.y),t}set needsUpdate(t){!0===t&&this.version++}}function Mt(t){return"undefined"!=typeof HTMLImageElement&&t instanceof HTMLImageElement||"undefined"!=typeof HTMLCanvasElement&&t instanceof HTMLCanvasElement||"undefined"!=typeof ImageBitmap&&t instanceof ImageBitmap?xt.getDataURL(t):t.data?{data:Array.prototype.slice.call(t.data),width:t.width,height:t.height,type:t.data.constructor.name}:(console.warn("THREE.Texture: Unable to serialize Texture."),{})}bt.DEFAULT_IMAGE=void 0,bt.DEFAULT_MAPPING=i,bt.prototype.isTexture=!0;class wt{constructor(t=0,e=0,n=0,i=1){this.x=t,this.y=e,this.z=n,this.w=i}get width(){return this.z}set width(t){this.z=t}get height(){return this.w}set height(t){this.w=t}set(t,e,n,i){return this.x=t,this.y=e,this.z=n,this.w=i,this}setScalar(t){return this.x=t,this.y=t,this.z=t,this.w=t,this}setX(t){return this.x=t,this}setY(t){return this.y=t,this}setZ(t){return this.z=t,this}setW(t){return this.w=t,this}setComponent(t,e){switch(t){case 0:this.x=e;break;case 1:this.y=e;break;case 2:this.z=e;break;case 3:this.w=e;break;default:throw new Error("index is out of range: "+t)}return this}getComponent(t){switch(t){case 0:return this.x;case 1:return this.y;case 2:return this.z;case 3:return this.w;default:throw new Error("index is out of range: "+t)}}clone(){return new this.constructor(this.x,this.y,this.z,this.w)}copy(t){return this.x=t.x,this.y=t.y,this.z=t.z,this.w=void 0!==t.w?t.w:1,this}add(t,e){return void 0!==e?(console.warn("THREE.Vector4: .add() now only accepts one argument. Use .addVectors( a, b ) instead."),this.addVectors(t,e)):(this.x+=t.x,this.y+=t.y,this.z+=t.z,this.w+=t.w,this)}addScalar(t){return this.x+=t,this.y+=t,this.z+=t,this.w+=t,this}addVectors(t,e){return this.x=t.x+e.x,this.y=t.y+e.y,this.z=t.z+e.z,this.w=t.w+e.w,this}addScaledVector(t,e){return this.x+=t.x*e,this.y+=t.y*e,this.z+=t.z*e,this.w+=t.w*e,this}sub(t,e){return void 0!==e?(console.warn("THREE.Vector4: .sub() now only accepts one argument. Use .subVectors( a, b ) instead."),this.subVectors(t,e)):(this.x-=t.x,this.y-=t.y,this.z-=t.z,this.w-=t.w,this)}subScalar(t){return this.x-=t,this.y-=t,this.z-=t,this.w-=t,this}subVectors(t,e){return this.x=t.x-e.x,this.y=t.y-e.y,this.z=t.z-e.z,this.w=t.w-e.w,this}multiply(t){return this.x*=t.x,this.y*=t.y,this.z*=t.z,this.w*=t.w,this}multiplyScalar(t){return this.x*=t,this.y*=t,this.z*=t,this.w*=t,this}applyMatrix4(t){const e=this.x,n=this.y,i=this.z,r=this.w,s=t.elements;return this.x=s[0]*e+s[4]*n+s[8]*i+s[12]*r,this.y=s[1]*e+s[5]*n+s[9]*i+s[13]*r,this.z=s[2]*e+s[6]*n+s[10]*i+s[14]*r,this.w=s[3]*e+s[7]*n+s[11]*i+s[15]*r,this}divideScalar(t){return this.multiplyScalar(1/t)}setAxisAngleFromQuaternion(t){this.w=2*Math.acos(t.w);const e=Math.sqrt(1-t.w*t.w);return e<1e-4?(this.x=1,this.y=0,this.z=0):(this.x=t.x/e,this.y=t.y/e,this.z=t.z/e),this}setAxisAngleFromRotationMatrix(t){let e,n,i,r;const s=.01,a=.1,o=t.elements,l=o[0],c=o[4],h=o[8],u=o[1],d=o[5],p=o[9],m=o[2],f=o[6],g=o[10];if(Math.abs(c-u)o&&t>v?tv?o=0?1:-1,i=1-e*e;if(i>Number.EPSILON){const r=Math.sqrt(i),s=Math.atan2(r,e*n);t=Math.sin(t*s)/r,a=Math.sin(a*s)/r}const r=a*n;if(o=o*t+u*r,l=l*t+d*r,c=c*t+p*r,h=h*t+m*r,t===1-a){const t=1/Math.sqrt(o*o+l*l+c*c+h*h);o*=t,l*=t,c*=t,h*=t}}t[e]=o,t[e+1]=l,t[e+2]=c,t[e+3]=h}static multiplyQuaternionsFlat(t,e,n,i,r,s){const a=n[i],o=n[i+1],l=n[i+2],c=n[i+3],h=r[s],u=r[s+1],d=r[s+2],p=r[s+3];return t[e]=a*p+c*h+o*d-l*u,t[e+1]=o*p+c*u+l*h-a*d,t[e+2]=l*p+c*d+a*u-o*h,t[e+3]=c*p-a*h-o*u-l*d,t}get x(){return this._x}set x(t){this._x=t,this._onChangeCallback()}get y(){return this._y}set y(t){this._y=t,this._onChangeCallback()}get z(){return this._z}set z(t){this._z=t,this._onChangeCallback()}get w(){return this._w}set w(t){this._w=t,this._onChangeCallback()}set(t,e,n,i){return this._x=t,this._y=e,this._z=n,this._w=i,this._onChangeCallback(),this}clone(){return new this.constructor(this._x,this._y,this._z,this._w)}copy(t){return this._x=t.x,this._y=t.y,this._z=t.z,this._w=t.w,this._onChangeCallback(),this}setFromEuler(t,e){if(!t||!t.isEuler)throw new Error("THREE.Quaternion: .setFromEuler() now expects an Euler rotation rather than a Vector3 and order.");const n=t._x,i=t._y,r=t._z,s=t._order,a=Math.cos,o=Math.sin,l=a(n/2),c=a(i/2),h=a(r/2),u=o(n/2),d=o(i/2),p=o(r/2);switch(s){case"XYZ":this._x=u*c*h+l*d*p,this._y=l*d*h-u*c*p,this._z=l*c*p+u*d*h,this._w=l*c*h-u*d*p;break;case"YXZ":this._x=u*c*h+l*d*p,this._y=l*d*h-u*c*p,this._z=l*c*p-u*d*h,this._w=l*c*h+u*d*p;break;case"ZXY":this._x=u*c*h-l*d*p,this._y=l*d*h+u*c*p,this._z=l*c*p+u*d*h,this._w=l*c*h-u*d*p;break;case"ZYX":this._x=u*c*h-l*d*p,this._y=l*d*h+u*c*p,this._z=l*c*p-u*d*h,this._w=l*c*h+u*d*p;break;case"YZX":this._x=u*c*h+l*d*p,this._y=l*d*h+u*c*p,this._z=l*c*p-u*d*h,this._w=l*c*h-u*d*p;break;case"XZY":this._x=u*c*h-l*d*p,this._y=l*d*h-u*c*p,this._z=l*c*p+u*d*h,this._w=l*c*h+u*d*p;break;default:console.warn("THREE.Quaternion: .setFromEuler() encountered an unknown order: "+s)}return!1!==e&&this._onChangeCallback(),this}setFromAxisAngle(t,e){const n=e/2,i=Math.sin(n);return this._x=t.x*i,this._y=t.y*i,this._z=t.z*i,this._w=Math.cos(n),this._onChangeCallback(),this}setFromRotationMatrix(t){const e=t.elements,n=e[0],i=e[4],r=e[8],s=e[1],a=e[5],o=e[9],l=e[2],c=e[6],h=e[10],u=n+a+h;if(u>0){const t=.5/Math.sqrt(u+1);this._w=.25/t,this._x=(c-o)*t,this._y=(r-l)*t,this._z=(s-i)*t}else if(n>a&&n>h){const t=2*Math.sqrt(1+n-a-h);this._w=(c-o)/t,this._x=.25*t,this._y=(i+s)/t,this._z=(r+l)/t}else if(a>h){const t=2*Math.sqrt(1+a-n-h);this._w=(r-l)/t,this._x=(i+s)/t,this._y=.25*t,this._z=(o+c)/t}else{const t=2*Math.sqrt(1+h-n-a);this._w=(s-i)/t,this._x=(r+l)/t,this._y=(o+c)/t,this._z=.25*t}return this._onChangeCallback(),this}setFromUnitVectors(t,e){let n=t.dot(e)+1;return nMath.abs(t.z)?(this._x=-t.y,this._y=t.x,this._z=0,this._w=n):(this._x=0,this._y=-t.z,this._z=t.y,this._w=n)):(this._x=t.y*e.z-t.z*e.y,this._y=t.z*e.x-t.x*e.z,this._z=t.x*e.y-t.y*e.x,this._w=n),this.normalize()}angleTo(t){return 2*Math.acos(Math.abs(st(this.dot(t),-1,1)))}rotateTowards(t,e){const n=this.angleTo(t);if(0===n)return this;const i=Math.min(1,e/n);return this.slerp(t,i),this}identity(){return this.set(0,0,0,1)}invert(){return this.conjugate()}conjugate(){return this._x*=-1,this._y*=-1,this._z*=-1,this._onChangeCallback(),this}dot(t){return this._x*t._x+this._y*t._y+this._z*t._z+this._w*t._w}lengthSq(){return this._x*this._x+this._y*this._y+this._z*this._z+this._w*this._w}length(){return Math.sqrt(this._x*this._x+this._y*this._y+this._z*this._z+this._w*this._w)}normalize(){let t=this.length();return 0===t?(this._x=0,this._y=0,this._z=0,this._w=1):(t=1/t,this._x=this._x*t,this._y=this._y*t,this._z=this._z*t,this._w=this._w*t),this._onChangeCallback(),this}multiply(t,e){return void 0!==e?(console.warn("THREE.Quaternion: .multiply() now only accepts one argument. Use .multiplyQuaternions( a, b ) instead."),this.multiplyQuaternions(t,e)):this.multiplyQuaternions(this,t)}premultiply(t){return this.multiplyQuaternions(t,this)}multiplyQuaternions(t,e){const n=t._x,i=t._y,r=t._z,s=t._w,a=e._x,o=e._y,l=e._z,c=e._w;return this._x=n*c+s*a+i*l-r*o,this._y=i*c+s*o+r*a-n*l,this._z=r*c+s*l+n*o-i*a,this._w=s*c-n*a-i*o-r*l,this._onChangeCallback(),this}slerp(t,e){if(0===e)return this;if(1===e)return this.copy(t);const n=this._x,i=this._y,r=this._z,s=this._w;let a=s*t._w+n*t._x+i*t._y+r*t._z;if(a<0?(this._w=-t._w,this._x=-t._x,this._y=-t._y,this._z=-t._z,a=-a):this.copy(t),a>=1)return this._w=s,this._x=n,this._y=i,this._z=r,this;const o=1-a*a;if(o<=Number.EPSILON){const t=1-e;return this._w=t*s+e*this._w,this._x=t*n+e*this._x,this._y=t*i+e*this._y,this._z=t*r+e*this._z,this.normalize(),this._onChangeCallback(),this}const l=Math.sqrt(o),c=Math.atan2(l,a),h=Math.sin((1-e)*c)/l,u=Math.sin(e*c)/l;return this._w=s*h+this._w*u,this._x=n*h+this._x*u,this._y=i*h+this._y*u,this._z=r*h+this._z*u,this._onChangeCallback(),this}slerpQuaternions(t,e,n){this.copy(t).slerp(e,n)}random(){const t=Math.random(),e=Math.sqrt(1-t),n=Math.sqrt(t),i=2*Math.PI*Math.random(),r=2*Math.PI*Math.random();return this.set(e*Math.cos(i),n*Math.sin(r),n*Math.cos(r),e*Math.sin(i))}equals(t){return t._x===this._x&&t._y===this._y&&t._z===this._z&&t._w===this._w}fromArray(t,e=0){return this._x=t[e],this._y=t[e+1],this._z=t[e+2],this._w=t[e+3],this._onChangeCallback(),this}toArray(t=[],e=0){return t[e]=this._x,t[e+1]=this._y,t[e+2]=this._z,t[e+3]=this._w,t}fromBufferAttribute(t,e){return this._x=t.getX(e),this._y=t.getY(e),this._z=t.getZ(e),this._w=t.getW(e),this}_onChange(t){return this._onChangeCallback=t,this}_onChangeCallback(){}}At.prototype.isQuaternion=!0;class Lt{constructor(t=0,e=0,n=0){this.x=t,this.y=e,this.z=n}set(t,e,n){return void 0===n&&(n=this.z),this.x=t,this.y=e,this.z=n,this}setScalar(t){return this.x=t,this.y=t,this.z=t,this}setX(t){return this.x=t,this}setY(t){return this.y=t,this}setZ(t){return this.z=t,this}setComponent(t,e){switch(t){case 0:this.x=e;break;case 1:this.y=e;break;case 2:this.z=e;break;default:throw new Error("index is out of range: "+t)}return this}getComponent(t){switch(t){case 0:return this.x;case 1:return this.y;case 2:return this.z;default:throw new Error("index is out of range: "+t)}}clone(){return new this.constructor(this.x,this.y,this.z)}copy(t){return this.x=t.x,this.y=t.y,this.z=t.z,this}add(t,e){return void 0!==e?(console.warn("THREE.Vector3: .add() now only accepts one argument. Use .addVectors( a, b ) instead."),this.addVectors(t,e)):(this.x+=t.x,this.y+=t.y,this.z+=t.z,this)}addScalar(t){return this.x+=t,this.y+=t,this.z+=t,this}addVectors(t,e){return this.x=t.x+e.x,this.y=t.y+e.y,this.z=t.z+e.z,this}addScaledVector(t,e){return this.x+=t.x*e,this.y+=t.y*e,this.z+=t.z*e,this}sub(t,e){return void 0!==e?(console.warn("THREE.Vector3: .sub() now only accepts one argument. Use .subVectors( a, b ) instead."),this.subVectors(t,e)):(this.x-=t.x,this.y-=t.y,this.z-=t.z,this)}subScalar(t){return this.x-=t,this.y-=t,this.z-=t,this}subVectors(t,e){return this.x=t.x-e.x,this.y=t.y-e.y,this.z=t.z-e.z,this}multiply(t,e){return void 0!==e?(console.warn("THREE.Vector3: .multiply() now only accepts one argument. Use .multiplyVectors( a, b ) instead."),this.multiplyVectors(t,e)):(this.x*=t.x,this.y*=t.y,this.z*=t.z,this)}multiplyScalar(t){return this.x*=t,this.y*=t,this.z*=t,this}multiplyVectors(t,e){return this.x=t.x*e.x,this.y=t.y*e.y,this.z=t.z*e.z,this}applyEuler(t){return t&&t.isEuler||console.error("THREE.Vector3: .applyEuler() now expects an Euler rotation rather than a Vector3 and order."),this.applyQuaternion(Ct.setFromEuler(t))}applyAxisAngle(t,e){return this.applyQuaternion(Ct.setFromAxisAngle(t,e))}applyMatrix3(t){const e=this.x,n=this.y,i=this.z,r=t.elements;return this.x=r[0]*e+r[3]*n+r[6]*i,this.y=r[1]*e+r[4]*n+r[7]*i,this.z=r[2]*e+r[5]*n+r[8]*i,this}applyNormalMatrix(t){return this.applyMatrix3(t).normalize()}applyMatrix4(t){const e=this.x,n=this.y,i=this.z,r=t.elements,s=1/(r[3]*e+r[7]*n+r[11]*i+r[15]);return this.x=(r[0]*e+r[4]*n+r[8]*i+r[12])*s,this.y=(r[1]*e+r[5]*n+r[9]*i+r[13])*s,this.z=(r[2]*e+r[6]*n+r[10]*i+r[14])*s,this}applyQuaternion(t){const e=this.x,n=this.y,i=this.z,r=t.x,s=t.y,a=t.z,o=t.w,l=o*e+s*i-a*n,c=o*n+a*e-r*i,h=o*i+r*n-s*e,u=-r*e-s*n-a*i;return this.x=l*o+u*-r+c*-a-h*-s,this.y=c*o+u*-s+h*-r-l*-a,this.z=h*o+u*-a+l*-s-c*-r,this}project(t){return this.applyMatrix4(t.matrixWorldInverse).applyMatrix4(t.projectionMatrix)}unproject(t){return this.applyMatrix4(t.projectionMatrixInverse).applyMatrix4(t.matrixWorld)}transformDirection(t){const e=this.x,n=this.y,i=this.z,r=t.elements;return this.x=r[0]*e+r[4]*n+r[8]*i,this.y=r[1]*e+r[5]*n+r[9]*i,this.z=r[2]*e+r[6]*n+r[10]*i,this.normalize()}divide(t){return this.x/=t.x,this.y/=t.y,this.z/=t.z,this}divideScalar(t){return this.multiplyScalar(1/t)}min(t){return this.x=Math.min(this.x,t.x),this.y=Math.min(this.y,t.y),this.z=Math.min(this.z,t.z),this}max(t){return this.x=Math.max(this.x,t.x),this.y=Math.max(this.y,t.y),this.z=Math.max(this.z,t.z),this}clamp(t,e){return this.x=Math.max(t.x,Math.min(e.x,this.x)),this.y=Math.max(t.y,Math.min(e.y,this.y)),this.z=Math.max(t.z,Math.min(e.z,this.z)),this}clampScalar(t,e){return this.x=Math.max(t,Math.min(e,this.x)),this.y=Math.max(t,Math.min(e,this.y)),this.z=Math.max(t,Math.min(e,this.z)),this}clampLength(t,e){const n=this.length();return this.divideScalar(n||1).multiplyScalar(Math.max(t,Math.min(e,n)))}floor(){return this.x=Math.floor(this.x),this.y=Math.floor(this.y),this.z=Math.floor(this.z),this}ceil(){return this.x=Math.ceil(this.x),this.y=Math.ceil(this.y),this.z=Math.ceil(this.z),this}round(){return this.x=Math.round(this.x),this.y=Math.round(this.y),this.z=Math.round(this.z),this}roundToZero(){return this.x=this.x<0?Math.ceil(this.x):Math.floor(this.x),this.y=this.y<0?Math.ceil(this.y):Math.floor(this.y),this.z=this.z<0?Math.ceil(this.z):Math.floor(this.z),this}negate(){return this.x=-this.x,this.y=-this.y,this.z=-this.z,this}dot(t){return this.x*t.x+this.y*t.y+this.z*t.z}lengthSq(){return this.x*this.x+this.y*this.y+this.z*this.z}length(){return Math.sqrt(this.x*this.x+this.y*this.y+this.z*this.z)}manhattanLength(){return Math.abs(this.x)+Math.abs(this.y)+Math.abs(this.z)}normalize(){return this.divideScalar(this.length()||1)}setLength(t){return this.normalize().multiplyScalar(t)}lerp(t,e){return this.x+=(t.x-this.x)*e,this.y+=(t.y-this.y)*e,this.z+=(t.z-this.z)*e,this}lerpVectors(t,e,n){return this.x=t.x+(e.x-t.x)*n,this.y=t.y+(e.y-t.y)*n,this.z=t.z+(e.z-t.z)*n,this}cross(t,e){return void 0!==e?(console.warn("THREE.Vector3: .cross() now only accepts one argument. Use .crossVectors( a, b ) instead."),this.crossVectors(t,e)):this.crossVectors(this,t)}crossVectors(t,e){const n=t.x,i=t.y,r=t.z,s=e.x,a=e.y,o=e.z;return this.x=i*o-r*a,this.y=r*s-n*o,this.z=n*a-i*s,this}projectOnVector(t){const e=t.lengthSq();if(0===e)return this.set(0,0,0);const n=t.dot(this)/e;return this.copy(t).multiplyScalar(n)}projectOnPlane(t){return Rt.copy(this).projectOnVector(t),this.sub(Rt)}reflect(t){return this.sub(Rt.copy(t).multiplyScalar(2*this.dot(t)))}angleTo(t){const e=Math.sqrt(this.lengthSq()*t.lengthSq());if(0===e)return Math.PI/2;const n=this.dot(t)/e;return Math.acos(st(n,-1,1))}distanceTo(t){return Math.sqrt(this.distanceToSquared(t))}distanceToSquared(t){const e=this.x-t.x,n=this.y-t.y,i=this.z-t.z;return e*e+n*n+i*i}manhattanDistanceTo(t){return Math.abs(this.x-t.x)+Math.abs(this.y-t.y)+Math.abs(this.z-t.z)}setFromSpherical(t){return this.setFromSphericalCoords(t.radius,t.phi,t.theta)}setFromSphericalCoords(t,e,n){const i=Math.sin(e)*t;return this.x=i*Math.sin(n),this.y=Math.cos(e)*t,this.z=i*Math.cos(n),this}setFromCylindrical(t){return this.setFromCylindricalCoords(t.radius,t.theta,t.y)}setFromCylindricalCoords(t,e,n){return this.x=t*Math.sin(e),this.y=n,this.z=t*Math.cos(e),this}setFromMatrixPosition(t){const e=t.elements;return this.x=e[12],this.y=e[13],this.z=e[14],this}setFromMatrixScale(t){const e=this.setFromMatrixColumn(t,0).length(),n=this.setFromMatrixColumn(t,1).length(),i=this.setFromMatrixColumn(t,2).length();return this.x=e,this.y=n,this.z=i,this}setFromMatrixColumn(t,e){return this.fromArray(t.elements,4*e)}setFromMatrix3Column(t,e){return this.fromArray(t.elements,3*e)}equals(t){return t.x===this.x&&t.y===this.y&&t.z===this.z}fromArray(t,e=0){return this.x=t[e],this.y=t[e+1],this.z=t[e+2],this}toArray(t=[],e=0){return t[e]=this.x,t[e+1]=this.y,t[e+2]=this.z,t}fromBufferAttribute(t,e,n){return void 0!==n&&console.warn("THREE.Vector3: offset has been removed from .fromBufferAttribute()."),this.x=t.getX(e),this.y=t.getY(e),this.z=t.getZ(e),this}random(){return this.x=Math.random(),this.y=Math.random(),this.z=Math.random(),this}randomDirection(){const t=2*(Math.random()-.5),e=Math.random()*Math.PI*2,n=Math.sqrt(1-t**2);return this.x=n*Math.cos(e),this.y=n*Math.sin(e),this.z=t,this}*[Symbol.iterator](){yield this.x,yield this.y,yield this.z}}Lt.prototype.isVector3=!0;const Rt=new Lt,Ct=new At;class Pt{constructor(t=new Lt(1/0,1/0,1/0),e=new Lt(-1/0,-1/0,-1/0)){this.min=t,this.max=e}set(t,e){return this.min.copy(t),this.max.copy(e),this}setFromArray(t){let e=1/0,n=1/0,i=1/0,r=-1/0,s=-1/0,a=-1/0;for(let o=0,l=t.length;or&&(r=l),c>s&&(s=c),h>a&&(a=h)}return this.min.set(e,n,i),this.max.set(r,s,a),this}setFromBufferAttribute(t){let e=1/0,n=1/0,i=1/0,r=-1/0,s=-1/0,a=-1/0;for(let o=0,l=t.count;or&&(r=l),c>s&&(s=c),h>a&&(a=h)}return this.min.set(e,n,i),this.max.set(r,s,a),this}setFromPoints(t){this.makeEmpty();for(let e=0,n=t.length;ethis.max.x||t.ythis.max.y||t.zthis.max.z)}containsBox(t){return this.min.x<=t.min.x&&t.max.x<=this.max.x&&this.min.y<=t.min.y&&t.max.y<=this.max.y&&this.min.z<=t.min.z&&t.max.z<=this.max.z}getParameter(t,e){return e.set((t.x-this.min.x)/(this.max.x-this.min.x),(t.y-this.min.y)/(this.max.y-this.min.y),(t.z-this.min.z)/(this.max.z-this.min.z))}intersectsBox(t){return!(t.max.xthis.max.x||t.max.ythis.max.y||t.max.zthis.max.z)}intersectsSphere(t){return this.clampPoint(t.center,It),It.distanceToSquared(t.center)<=t.radius*t.radius}intersectsPlane(t){let e,n;return t.normal.x>0?(e=t.normal.x*this.min.x,n=t.normal.x*this.max.x):(e=t.normal.x*this.max.x,n=t.normal.x*this.min.x),t.normal.y>0?(e+=t.normal.y*this.min.y,n+=t.normal.y*this.max.y):(e+=t.normal.y*this.max.y,n+=t.normal.y*this.min.y),t.normal.z>0?(e+=t.normal.z*this.min.z,n+=t.normal.z*this.max.z):(e+=t.normal.z*this.max.z,n+=t.normal.z*this.min.z),e<=-t.constant&&n>=-t.constant}intersectsTriangle(t){if(this.isEmpty())return!1;this.getCenter(Gt),kt.subVectors(this.max,Gt),zt.subVectors(t.a,Gt),Bt.subVectors(t.b,Gt),Ft.subVectors(t.c,Gt),Ot.subVectors(Bt,zt),Ut.subVectors(Ft,Bt),Ht.subVectors(zt,Ft);let e=[0,-Ot.z,Ot.y,0,-Ut.z,Ut.y,0,-Ht.z,Ht.y,Ot.z,0,-Ot.x,Ut.z,0,-Ut.x,Ht.z,0,-Ht.x,-Ot.y,Ot.x,0,-Ut.y,Ut.x,0,-Ht.y,Ht.x,0];return!!jt(e,zt,Bt,Ft,kt)&&(e=[1,0,0,0,1,0,0,0,1],!!jt(e,zt,Bt,Ft,kt)&&(Vt.crossVectors(Ot,Ut),e=[Vt.x,Vt.y,Vt.z],jt(e,zt,Bt,Ft,kt)))}clampPoint(t,e){return e.copy(t).clamp(this.min,this.max)}distanceToPoint(t){return It.copy(t).clamp(this.min,this.max).sub(t).length()}getBoundingSphere(t){return this.getCenter(t.center),t.radius=.5*this.getSize(It).length(),t}intersect(t){return this.min.max(t.min),this.max.min(t.max),this.isEmpty()&&this.makeEmpty(),this}union(t){return this.min.min(t.min),this.max.max(t.max),this}applyMatrix4(t){return this.isEmpty()||(Dt[0].set(this.min.x,this.min.y,this.min.z).applyMatrix4(t),Dt[1].set(this.min.x,this.min.y,this.max.z).applyMatrix4(t),Dt[2].set(this.min.x,this.max.y,this.min.z).applyMatrix4(t),Dt[3].set(this.min.x,this.max.y,this.max.z).applyMatrix4(t),Dt[4].set(this.max.x,this.min.y,this.min.z).applyMatrix4(t),Dt[5].set(this.max.x,this.min.y,this.max.z).applyMatrix4(t),Dt[6].set(this.max.x,this.max.y,this.min.z).applyMatrix4(t),Dt[7].set(this.max.x,this.max.y,this.max.z).applyMatrix4(t),this.setFromPoints(Dt)),this}translate(t){return this.min.add(t),this.max.add(t),this}equals(t){return t.min.equals(this.min)&&t.max.equals(this.max)}}Pt.prototype.isBox3=!0;const Dt=[new Lt,new Lt,new Lt,new Lt,new Lt,new Lt,new Lt,new Lt],It=new Lt,Nt=new Pt,zt=new Lt,Bt=new Lt,Ft=new Lt,Ot=new Lt,Ut=new Lt,Ht=new Lt,Gt=new Lt,kt=new Lt,Vt=new Lt,Wt=new Lt;function jt(t,e,n,i,r){for(let s=0,a=t.length-3;s<=a;s+=3){Wt.fromArray(t,s);const a=r.x*Math.abs(Wt.x)+r.y*Math.abs(Wt.y)+r.z*Math.abs(Wt.z),o=e.dot(Wt),l=n.dot(Wt),c=i.dot(Wt);if(Math.max(-Math.max(o,l,c),Math.min(o,l,c))>a)return!1}return!0}const qt=new Pt,Xt=new Lt,Jt=new Lt,Yt=new Lt;class Zt{constructor(t=new Lt,e=-1){this.center=t,this.radius=e}set(t,e){return this.center.copy(t),this.radius=e,this}setFromPoints(t,e){const n=this.center;void 0!==e?n.copy(e):qt.setFromPoints(t).getCenter(n);let i=0;for(let e=0,r=t.length;ethis.radius*this.radius&&(e.sub(this.center).normalize(),e.multiplyScalar(this.radius).add(this.center)),e}getBoundingBox(t){return this.isEmpty()?(t.makeEmpty(),t):(t.set(this.center,this.center),t.expandByScalar(this.radius),t)}applyMatrix4(t){return this.center.applyMatrix4(t),this.radius=this.radius*t.getMaxScaleOnAxis(),this}translate(t){return this.center.add(t),this}expandByPoint(t){Yt.subVectors(t,this.center);const e=Yt.lengthSq();if(e>this.radius*this.radius){const t=Math.sqrt(e),n=.5*(t-this.radius);this.center.add(Yt.multiplyScalar(n/t)),this.radius+=n}return this}union(t){return!0===this.center.equals(t.center)?Jt.set(0,0,1).multiplyScalar(t.radius):Jt.subVectors(t.center,this.center).normalize().multiplyScalar(t.radius),this.expandByPoint(Xt.copy(t.center).add(Jt)),this.expandByPoint(Xt.copy(t.center).sub(Jt)),this}equals(t){return t.center.equals(this.center)&&t.radius===this.radius}clone(){return(new this.constructor).copy(this)}}const Qt=new Lt,Kt=new Lt,$t=new Lt,te=new Lt,ee=new Lt,ne=new Lt,ie=new Lt;class re{constructor(t=new Lt,e=new Lt(0,0,-1)){this.origin=t,this.direction=e}set(t,e){return this.origin.copy(t),this.direction.copy(e),this}copy(t){return this.origin.copy(t.origin),this.direction.copy(t.direction),this}at(t,e){return e.copy(this.direction).multiplyScalar(t).add(this.origin)}lookAt(t){return this.direction.copy(t).sub(this.origin).normalize(),this}recast(t){return this.origin.copy(this.at(t,Qt)),this}closestPointToPoint(t,e){e.subVectors(t,this.origin);const n=e.dot(this.direction);return n<0?e.copy(this.origin):e.copy(this.direction).multiplyScalar(n).add(this.origin)}distanceToPoint(t){return Math.sqrt(this.distanceSqToPoint(t))}distanceSqToPoint(t){const e=Qt.subVectors(t,this.origin).dot(this.direction);return e<0?this.origin.distanceToSquared(t):(Qt.copy(this.direction).multiplyScalar(e).add(this.origin),Qt.distanceToSquared(t))}distanceSqToSegment(t,e,n,i){Kt.copy(t).add(e).multiplyScalar(.5),$t.copy(e).sub(t).normalize(),te.copy(this.origin).sub(Kt);const r=.5*t.distanceTo(e),s=-this.direction.dot($t),a=te.dot(this.direction),o=-te.dot($t),l=te.lengthSq(),c=Math.abs(1-s*s);let h,u,d,p;if(c>0)if(h=s*o-a,u=s*a-o,p=r*c,h>=0)if(u>=-p)if(u<=p){const t=1/c;h*=t,u*=t,d=h*(h+s*u+2*a)+u*(s*h+u+2*o)+l}else u=r,h=Math.max(0,-(s*u+a)),d=-h*h+u*(u+2*o)+l;else u=-r,h=Math.max(0,-(s*u+a)),d=-h*h+u*(u+2*o)+l;else u<=-p?(h=Math.max(0,-(-s*r+a)),u=h>0?-r:Math.min(Math.max(-r,-o),r),d=-h*h+u*(u+2*o)+l):u<=p?(h=0,u=Math.min(Math.max(-r,-o),r),d=u*(u+2*o)+l):(h=Math.max(0,-(s*r+a)),u=h>0?r:Math.min(Math.max(-r,-o),r),d=-h*h+u*(u+2*o)+l);else u=s>0?-r:r,h=Math.max(0,-(s*u+a)),d=-h*h+u*(u+2*o)+l;return n&&n.copy(this.direction).multiplyScalar(h).add(this.origin),i&&i.copy($t).multiplyScalar(u).add(Kt),d}intersectSphere(t,e){Qt.subVectors(t.center,this.origin);const n=Qt.dot(this.direction),i=Qt.dot(Qt)-n*n,r=t.radius*t.radius;if(i>r)return null;const s=Math.sqrt(r-i),a=n-s,o=n+s;return a<0&&o<0?null:a<0?this.at(o,e):this.at(a,e)}intersectsSphere(t){return this.distanceSqToPoint(t.center)<=t.radius*t.radius}distanceToPlane(t){const e=t.normal.dot(this.direction);if(0===e)return 0===t.distanceToPoint(this.origin)?0:null;const n=-(this.origin.dot(t.normal)+t.constant)/e;return n>=0?n:null}intersectPlane(t,e){const n=this.distanceToPlane(t);return null===n?null:this.at(n,e)}intersectsPlane(t){const e=t.distanceToPoint(this.origin);if(0===e)return!0;return t.normal.dot(this.direction)*e<0}intersectBox(t,e){let n,i,r,s,a,o;const l=1/this.direction.x,c=1/this.direction.y,h=1/this.direction.z,u=this.origin;return l>=0?(n=(t.min.x-u.x)*l,i=(t.max.x-u.x)*l):(n=(t.max.x-u.x)*l,i=(t.min.x-u.x)*l),c>=0?(r=(t.min.y-u.y)*c,s=(t.max.y-u.y)*c):(r=(t.max.y-u.y)*c,s=(t.min.y-u.y)*c),n>s||r>i?null:((r>n||n!=n)&&(n=r),(s=0?(a=(t.min.z-u.z)*h,o=(t.max.z-u.z)*h):(a=(t.max.z-u.z)*h,o=(t.min.z-u.z)*h),n>o||a>i?null:((a>n||n!=n)&&(n=a),(o=0?n:i,e)))}intersectsBox(t){return null!==this.intersectBox(t,Qt)}intersectTriangle(t,e,n,i,r){ee.subVectors(e,t),ne.subVectors(n,t),ie.crossVectors(ee,ne);let s,a=this.direction.dot(ie);if(a>0){if(i)return null;s=1}else{if(!(a<0))return null;s=-1,a=-a}te.subVectors(this.origin,t);const o=s*this.direction.dot(ne.crossVectors(te,ne));if(o<0)return null;const l=s*this.direction.dot(ee.cross(te));if(l<0)return null;if(o+l>a)return null;const c=-s*te.dot(ie);return c<0?null:this.at(c/a,r)}applyMatrix4(t){return this.origin.applyMatrix4(t),this.direction.transformDirection(t),this}equals(t){return t.origin.equals(this.origin)&&t.direction.equals(this.direction)}clone(){return(new this.constructor).copy(this)}}class se{constructor(){this.elements=[1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1],arguments.length>0&&console.error("THREE.Matrix4: the constructor no longer reads arguments. use .set() instead.")}set(t,e,n,i,r,s,a,o,l,c,h,u,d,p,m,f){const g=this.elements;return g[0]=t,g[4]=e,g[8]=n,g[12]=i,g[1]=r,g[5]=s,g[9]=a,g[13]=o,g[2]=l,g[6]=c,g[10]=h,g[14]=u,g[3]=d,g[7]=p,g[11]=m,g[15]=f,this}identity(){return this.set(1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1),this}clone(){return(new se).fromArray(this.elements)}copy(t){const e=this.elements,n=t.elements;return e[0]=n[0],e[1]=n[1],e[2]=n[2],e[3]=n[3],e[4]=n[4],e[5]=n[5],e[6]=n[6],e[7]=n[7],e[8]=n[8],e[9]=n[9],e[10]=n[10],e[11]=n[11],e[12]=n[12],e[13]=n[13],e[14]=n[14],e[15]=n[15],this}copyPosition(t){const e=this.elements,n=t.elements;return e[12]=n[12],e[13]=n[13],e[14]=n[14],this}setFromMatrix3(t){const e=t.elements;return this.set(e[0],e[3],e[6],0,e[1],e[4],e[7],0,e[2],e[5],e[8],0,0,0,0,1),this}extractBasis(t,e,n){return t.setFromMatrixColumn(this,0),e.setFromMatrixColumn(this,1),n.setFromMatrixColumn(this,2),this}makeBasis(t,e,n){return this.set(t.x,e.x,n.x,0,t.y,e.y,n.y,0,t.z,e.z,n.z,0,0,0,0,1),this}extractRotation(t){const e=this.elements,n=t.elements,i=1/ae.setFromMatrixColumn(t,0).length(),r=1/ae.setFromMatrixColumn(t,1).length(),s=1/ae.setFromMatrixColumn(t,2).length();return e[0]=n[0]*i,e[1]=n[1]*i,e[2]=n[2]*i,e[3]=0,e[4]=n[4]*r,e[5]=n[5]*r,e[6]=n[6]*r,e[7]=0,e[8]=n[8]*s,e[9]=n[9]*s,e[10]=n[10]*s,e[11]=0,e[12]=0,e[13]=0,e[14]=0,e[15]=1,this}makeRotationFromEuler(t){t&&t.isEuler||console.error("THREE.Matrix4: .makeRotationFromEuler() now expects a Euler rotation rather than a Vector3 and order.");const e=this.elements,n=t.x,i=t.y,r=t.z,s=Math.cos(n),a=Math.sin(n),o=Math.cos(i),l=Math.sin(i),c=Math.cos(r),h=Math.sin(r);if("XYZ"===t.order){const t=s*c,n=s*h,i=a*c,r=a*h;e[0]=o*c,e[4]=-o*h,e[8]=l,e[1]=n+i*l,e[5]=t-r*l,e[9]=-a*o,e[2]=r-t*l,e[6]=i+n*l,e[10]=s*o}else if("YXZ"===t.order){const t=o*c,n=o*h,i=l*c,r=l*h;e[0]=t+r*a,e[4]=i*a-n,e[8]=s*l,e[1]=s*h,e[5]=s*c,e[9]=-a,e[2]=n*a-i,e[6]=r+t*a,e[10]=s*o}else if("ZXY"===t.order){const t=o*c,n=o*h,i=l*c,r=l*h;e[0]=t-r*a,e[4]=-s*h,e[8]=i+n*a,e[1]=n+i*a,e[5]=s*c,e[9]=r-t*a,e[2]=-s*l,e[6]=a,e[10]=s*o}else if("ZYX"===t.order){const t=s*c,n=s*h,i=a*c,r=a*h;e[0]=o*c,e[4]=i*l-n,e[8]=t*l+r,e[1]=o*h,e[5]=r*l+t,e[9]=n*l-i,e[2]=-l,e[6]=a*o,e[10]=s*o}else if("YZX"===t.order){const t=s*o,n=s*l,i=a*o,r=a*l;e[0]=o*c,e[4]=r-t*h,e[8]=i*h+n,e[1]=h,e[5]=s*c,e[9]=-a*c,e[2]=-l*c,e[6]=n*h+i,e[10]=t-r*h}else if("XZY"===t.order){const t=s*o,n=s*l,i=a*o,r=a*l;e[0]=o*c,e[4]=-h,e[8]=l*c,e[1]=t*h+r,e[5]=s*c,e[9]=n*h-i,e[2]=i*h-n,e[6]=a*c,e[10]=r*h+t}return e[3]=0,e[7]=0,e[11]=0,e[12]=0,e[13]=0,e[14]=0,e[15]=1,this}makeRotationFromQuaternion(t){return this.compose(le,t,ce)}lookAt(t,e,n){const i=this.elements;return de.subVectors(t,e),0===de.lengthSq()&&(de.z=1),de.normalize(),he.crossVectors(n,de),0===he.lengthSq()&&(1===Math.abs(n.z)?de.x+=1e-4:de.z+=1e-4,de.normalize(),he.crossVectors(n,de)),he.normalize(),ue.crossVectors(de,he),i[0]=he.x,i[4]=ue.x,i[8]=de.x,i[1]=he.y,i[5]=ue.y,i[9]=de.y,i[2]=he.z,i[6]=ue.z,i[10]=de.z,this}multiply(t,e){return void 0!==e?(console.warn("THREE.Matrix4: .multiply() now only accepts one argument. Use .multiplyMatrices( a, b ) instead."),this.multiplyMatrices(t,e)):this.multiplyMatrices(this,t)}premultiply(t){return this.multiplyMatrices(t,this)}multiplyMatrices(t,e){const n=t.elements,i=e.elements,r=this.elements,s=n[0],a=n[4],o=n[8],l=n[12],c=n[1],h=n[5],u=n[9],d=n[13],p=n[2],m=n[6],f=n[10],g=n[14],v=n[3],y=n[7],x=n[11],_=n[15],b=i[0],M=i[4],w=i[8],S=i[12],T=i[1],E=i[5],A=i[9],L=i[13],R=i[2],C=i[6],P=i[10],D=i[14],I=i[3],N=i[7],z=i[11],B=i[15];return r[0]=s*b+a*T+o*R+l*I,r[4]=s*M+a*E+o*C+l*N,r[8]=s*w+a*A+o*P+l*z,r[12]=s*S+a*L+o*D+l*B,r[1]=c*b+h*T+u*R+d*I,r[5]=c*M+h*E+u*C+d*N,r[9]=c*w+h*A+u*P+d*z,r[13]=c*S+h*L+u*D+d*B,r[2]=p*b+m*T+f*R+g*I,r[6]=p*M+m*E+f*C+g*N,r[10]=p*w+m*A+f*P+g*z,r[14]=p*S+m*L+f*D+g*B,r[3]=v*b+y*T+x*R+_*I,r[7]=v*M+y*E+x*C+_*N,r[11]=v*w+y*A+x*P+_*z,r[15]=v*S+y*L+x*D+_*B,this}multiplyScalar(t){const e=this.elements;return e[0]*=t,e[4]*=t,e[8]*=t,e[12]*=t,e[1]*=t,e[5]*=t,e[9]*=t,e[13]*=t,e[2]*=t,e[6]*=t,e[10]*=t,e[14]*=t,e[3]*=t,e[7]*=t,e[11]*=t,e[15]*=t,this}determinant(){const t=this.elements,e=t[0],n=t[4],i=t[8],r=t[12],s=t[1],a=t[5],o=t[9],l=t[13],c=t[2],h=t[6],u=t[10],d=t[14];return t[3]*(+r*o*h-i*l*h-r*a*u+n*l*u+i*a*d-n*o*d)+t[7]*(+e*o*d-e*l*u+r*s*u-i*s*d+i*l*c-r*o*c)+t[11]*(+e*l*h-e*a*d-r*s*h+n*s*d+r*a*c-n*l*c)+t[15]*(-i*a*c-e*o*h+e*a*u+i*s*h-n*s*u+n*o*c)}transpose(){const t=this.elements;let e;return e=t[1],t[1]=t[4],t[4]=e,e=t[2],t[2]=t[8],t[8]=e,e=t[6],t[6]=t[9],t[9]=e,e=t[3],t[3]=t[12],t[12]=e,e=t[7],t[7]=t[13],t[13]=e,e=t[11],t[11]=t[14],t[14]=e,this}setPosition(t,e,n){const i=this.elements;return t.isVector3?(i[12]=t.x,i[13]=t.y,i[14]=t.z):(i[12]=t,i[13]=e,i[14]=n),this}invert(){const t=this.elements,e=t[0],n=t[1],i=t[2],r=t[3],s=t[4],a=t[5],o=t[6],l=t[7],c=t[8],h=t[9],u=t[10],d=t[11],p=t[12],m=t[13],f=t[14],g=t[15],v=h*f*l-m*u*l+m*o*d-a*f*d-h*o*g+a*u*g,y=p*u*l-c*f*l-p*o*d+s*f*d+c*o*g-s*u*g,x=c*m*l-p*h*l+p*a*d-s*m*d-c*a*g+s*h*g,_=p*h*o-c*m*o-p*a*u+s*m*u+c*a*f-s*h*f,b=e*v+n*y+i*x+r*_;if(0===b)return this.set(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0);const M=1/b;return t[0]=v*M,t[1]=(m*u*r-h*f*r-m*i*d+n*f*d+h*i*g-n*u*g)*M,t[2]=(a*f*r-m*o*r+m*i*l-n*f*l-a*i*g+n*o*g)*M,t[3]=(h*o*r-a*u*r-h*i*l+n*u*l+a*i*d-n*o*d)*M,t[4]=y*M,t[5]=(c*f*r-p*u*r+p*i*d-e*f*d-c*i*g+e*u*g)*M,t[6]=(p*o*r-s*f*r-p*i*l+e*f*l+s*i*g-e*o*g)*M,t[7]=(s*u*r-c*o*r+c*i*l-e*u*l-s*i*d+e*o*d)*M,t[8]=x*M,t[9]=(p*h*r-c*m*r-p*n*d+e*m*d+c*n*g-e*h*g)*M,t[10]=(s*m*r-p*a*r+p*n*l-e*m*l-s*n*g+e*a*g)*M,t[11]=(c*a*r-s*h*r-c*n*l+e*h*l+s*n*d-e*a*d)*M,t[12]=_*M,t[13]=(c*m*i-p*h*i+p*n*u-e*m*u-c*n*f+e*h*f)*M,t[14]=(p*a*i-s*m*i-p*n*o+e*m*o+s*n*f-e*a*f)*M,t[15]=(s*h*i-c*a*i+c*n*o-e*h*o-s*n*u+e*a*u)*M,this}scale(t){const e=this.elements,n=t.x,i=t.y,r=t.z;return e[0]*=n,e[4]*=i,e[8]*=r,e[1]*=n,e[5]*=i,e[9]*=r,e[2]*=n,e[6]*=i,e[10]*=r,e[3]*=n,e[7]*=i,e[11]*=r,this}getMaxScaleOnAxis(){const t=this.elements,e=t[0]*t[0]+t[1]*t[1]+t[2]*t[2],n=t[4]*t[4]+t[5]*t[5]+t[6]*t[6],i=t[8]*t[8]+t[9]*t[9]+t[10]*t[10];return Math.sqrt(Math.max(e,n,i))}makeTranslation(t,e,n){return this.set(1,0,0,t,0,1,0,e,0,0,1,n,0,0,0,1),this}makeRotationX(t){const e=Math.cos(t),n=Math.sin(t);return this.set(1,0,0,0,0,e,-n,0,0,n,e,0,0,0,0,1),this}makeRotationY(t){const e=Math.cos(t),n=Math.sin(t);return this.set(e,0,n,0,0,1,0,0,-n,0,e,0,0,0,0,1),this}makeRotationZ(t){const e=Math.cos(t),n=Math.sin(t);return this.set(e,-n,0,0,n,e,0,0,0,0,1,0,0,0,0,1),this}makeRotationAxis(t,e){const n=Math.cos(e),i=Math.sin(e),r=1-n,s=t.x,a=t.y,o=t.z,l=r*s,c=r*a;return this.set(l*s+n,l*a-i*o,l*o+i*a,0,l*a+i*o,c*a+n,c*o-i*s,0,l*o-i*a,c*o+i*s,r*o*o+n,0,0,0,0,1),this}makeScale(t,e,n){return this.set(t,0,0,0,0,e,0,0,0,0,n,0,0,0,0,1),this}makeShear(t,e,n,i,r,s){return this.set(1,n,r,0,t,1,s,0,e,i,1,0,0,0,0,1),this}compose(t,e,n){const i=this.elements,r=e._x,s=e._y,a=e._z,o=e._w,l=r+r,c=s+s,h=a+a,u=r*l,d=r*c,p=r*h,m=s*c,f=s*h,g=a*h,v=o*l,y=o*c,x=o*h,_=n.x,b=n.y,M=n.z;return i[0]=(1-(m+g))*_,i[1]=(d+x)*_,i[2]=(p-y)*_,i[3]=0,i[4]=(d-x)*b,i[5]=(1-(u+g))*b,i[6]=(f+v)*b,i[7]=0,i[8]=(p+y)*M,i[9]=(f-v)*M,i[10]=(1-(u+m))*M,i[11]=0,i[12]=t.x,i[13]=t.y,i[14]=t.z,i[15]=1,this}decompose(t,e,n){const i=this.elements;let r=ae.set(i[0],i[1],i[2]).length();const s=ae.set(i[4],i[5],i[6]).length(),a=ae.set(i[8],i[9],i[10]).length();this.determinant()<0&&(r=-r),t.x=i[12],t.y=i[13],t.z=i[14],oe.copy(this);const o=1/r,l=1/s,c=1/a;return oe.elements[0]*=o,oe.elements[1]*=o,oe.elements[2]*=o,oe.elements[4]*=l,oe.elements[5]*=l,oe.elements[6]*=l,oe.elements[8]*=c,oe.elements[9]*=c,oe.elements[10]*=c,e.setFromRotationMatrix(oe),n.x=r,n.y=s,n.z=a,this}makePerspective(t,e,n,i,r,s){void 0===s&&console.warn("THREE.Matrix4: .makePerspective() has been redefined and has a new signature. Please check the docs.");const a=this.elements,o=2*r/(e-t),l=2*r/(n-i),c=(e+t)/(e-t),h=(n+i)/(n-i),u=-(s+r)/(s-r),d=-2*s*r/(s-r);return a[0]=o,a[4]=0,a[8]=c,a[12]=0,a[1]=0,a[5]=l,a[9]=h,a[13]=0,a[2]=0,a[6]=0,a[10]=u,a[14]=d,a[3]=0,a[7]=0,a[11]=-1,a[15]=0,this}makeOrthographic(t,e,n,i,r,s){const a=this.elements,o=1/(e-t),l=1/(n-i),c=1/(s-r),h=(e+t)*o,u=(n+i)*l,d=(s+r)*c;return a[0]=2*o,a[4]=0,a[8]=0,a[12]=-h,a[1]=0,a[5]=2*l,a[9]=0,a[13]=-u,a[2]=0,a[6]=0,a[10]=-2*c,a[14]=-d,a[3]=0,a[7]=0,a[11]=0,a[15]=1,this}equals(t){const e=this.elements,n=t.elements;for(let t=0;t<16;t++)if(e[t]!==n[t])return!1;return!0}fromArray(t,e=0){for(let n=0;n<16;n++)this.elements[n]=t[n+e];return this}toArray(t=[],e=0){const n=this.elements;return t[e]=n[0],t[e+1]=n[1],t[e+2]=n[2],t[e+3]=n[3],t[e+4]=n[4],t[e+5]=n[5],t[e+6]=n[6],t[e+7]=n[7],t[e+8]=n[8],t[e+9]=n[9],t[e+10]=n[10],t[e+11]=n[11],t[e+12]=n[12],t[e+13]=n[13],t[e+14]=n[14],t[e+15]=n[15],t}}se.prototype.isMatrix4=!0;const ae=new Lt,oe=new se,le=new Lt(0,0,0),ce=new Lt(1,1,1),he=new Lt,ue=new Lt,de=new Lt,pe=new se,me=new At;class fe{constructor(t=0,e=0,n=0,i=fe.DefaultOrder){this._x=t,this._y=e,this._z=n,this._order=i}get x(){return this._x}set x(t){this._x=t,this._onChangeCallback()}get y(){return this._y}set y(t){this._y=t,this._onChangeCallback()}get z(){return this._z}set z(t){this._z=t,this._onChangeCallback()}get order(){return this._order}set order(t){this._order=t,this._onChangeCallback()}set(t,e,n,i=this._order){return this._x=t,this._y=e,this._z=n,this._order=i,this._onChangeCallback(),this}clone(){return new this.constructor(this._x,this._y,this._z,this._order)}copy(t){return this._x=t._x,this._y=t._y,this._z=t._z,this._order=t._order,this._onChangeCallback(),this}setFromRotationMatrix(t,e=this._order,n=!0){const i=t.elements,r=i[0],s=i[4],a=i[8],o=i[1],l=i[5],c=i[9],h=i[2],u=i[6],d=i[10];switch(e){case"XYZ":this._y=Math.asin(st(a,-1,1)),Math.abs(a)<.9999999?(this._x=Math.atan2(-c,d),this._z=Math.atan2(-s,r)):(this._x=Math.atan2(u,l),this._z=0);break;case"YXZ":this._x=Math.asin(-st(c,-1,1)),Math.abs(c)<.9999999?(this._y=Math.atan2(a,d),this._z=Math.atan2(o,l)):(this._y=Math.atan2(-h,r),this._z=0);break;case"ZXY":this._x=Math.asin(st(u,-1,1)),Math.abs(u)<.9999999?(this._y=Math.atan2(-h,d),this._z=Math.atan2(-s,l)):(this._y=0,this._z=Math.atan2(o,r));break;case"ZYX":this._y=Math.asin(-st(h,-1,1)),Math.abs(h)<.9999999?(this._x=Math.atan2(u,d),this._z=Math.atan2(o,r)):(this._x=0,this._z=Math.atan2(-s,l));break;case"YZX":this._z=Math.asin(st(o,-1,1)),Math.abs(o)<.9999999?(this._x=Math.atan2(-c,l),this._y=Math.atan2(-h,r)):(this._x=0,this._y=Math.atan2(a,d));break;case"XZY":this._z=Math.asin(-st(s,-1,1)),Math.abs(s)<.9999999?(this._x=Math.atan2(u,l),this._y=Math.atan2(a,r)):(this._x=Math.atan2(-c,d),this._y=0);break;default:console.warn("THREE.Euler: .setFromRotationMatrix() encountered an unknown order: "+e)}return this._order=e,!0===n&&this._onChangeCallback(),this}setFromQuaternion(t,e,n){return pe.makeRotationFromQuaternion(t),this.setFromRotationMatrix(pe,e,n)}setFromVector3(t,e=this._order){return this.set(t.x,t.y,t.z,e)}reorder(t){return me.setFromEuler(this),this.setFromQuaternion(me,t)}equals(t){return t._x===this._x&&t._y===this._y&&t._z===this._z&&t._order===this._order}fromArray(t){return this._x=t[0],this._y=t[1],this._z=t[2],void 0!==t[3]&&(this._order=t[3]),this._onChangeCallback(),this}toArray(t=[],e=0){return t[e]=this._x,t[e+1]=this._y,t[e+2]=this._z,t[e+3]=this._order,t}toVector3(t){return t?t.set(this._x,this._y,this._z):new Lt(this._x,this._y,this._z)}_onChange(t){return this._onChangeCallback=t,this}_onChangeCallback(){}}fe.prototype.isEuler=!0,fe.DefaultOrder="XYZ",fe.RotationOrders=["XYZ","YZX","ZXY","XZY","YXZ","ZYX"];class ge{constructor(){this.mask=1}set(t){this.mask=(1<>>0}enable(t){this.mask|=1<1){for(let t=0;t1){for(let t=0;t0){i.children=[];for(let e=0;e0){i.animations=[];for(let e=0;e0&&(n.geometries=e),i.length>0&&(n.materials=i),r.length>0&&(n.textures=r),a.length>0&&(n.images=a),o.length>0&&(n.shapes=o),l.length>0&&(n.skeletons=l),c.length>0&&(n.animations=c)}return n.object=i,n;function s(t){const e=[];for(const n in t){const i=t[n];delete i.metadata,e.push(i)}return e}}clone(t){return(new this.constructor).copy(this,t)}copy(t,e=!0){if(this.name=t.name,this.up.copy(t.up),this.position.copy(t.position),this.rotation.order=t.rotation.order,this.quaternion.copy(t.quaternion),this.scale.copy(t.scale),this.matrix.copy(t.matrix),this.matrixWorld.copy(t.matrixWorld),this.matrixAutoUpdate=t.matrixAutoUpdate,this.matrixWorldNeedsUpdate=t.matrixWorldNeedsUpdate,this.layers.mask=t.layers.mask,this.visible=t.visible,this.castShadow=t.castShadow,this.receiveShadow=t.receiveShadow,this.frustumCulled=t.frustumCulled,this.renderOrder=t.renderOrder,this.userData=JSON.parse(JSON.stringify(t.userData)),!0===e)for(let e=0;e0?i.multiplyScalar(1/Math.sqrt(r)):i.set(0,0,0)}static getBarycoord(t,e,n,i,r){Pe.subVectors(i,e),De.subVectors(n,e),Ie.subVectors(t,e);const s=Pe.dot(Pe),a=Pe.dot(De),o=Pe.dot(Ie),l=De.dot(De),c=De.dot(Ie),h=s*l-a*a;if(0===h)return r.set(-2,-1,-1);const u=1/h,d=(l*o-a*c)*u,p=(s*c-a*o)*u;return r.set(1-d-p,p,d)}static containsPoint(t,e,n,i){return this.getBarycoord(t,e,n,i,Ne),Ne.x>=0&&Ne.y>=0&&Ne.x+Ne.y<=1}static getUV(t,e,n,i,r,s,a,o){return this.getBarycoord(t,e,n,i,Ne),o.set(0,0),o.addScaledVector(r,Ne.x),o.addScaledVector(s,Ne.y),o.addScaledVector(a,Ne.z),o}static isFrontFacing(t,e,n,i){return Pe.subVectors(n,e),De.subVectors(t,e),Pe.cross(De).dot(i)<0}set(t,e,n){return this.a.copy(t),this.b.copy(e),this.c.copy(n),this}setFromPointsAndIndices(t,e,n,i){return this.a.copy(t[e]),this.b.copy(t[n]),this.c.copy(t[i]),this}setFromAttributeAndIndices(t,e,n,i){return this.a.fromBufferAttribute(t,e),this.b.fromBufferAttribute(t,n),this.c.fromBufferAttribute(t,i),this}clone(){return(new this.constructor).copy(this)}copy(t){return this.a.copy(t.a),this.b.copy(t.b),this.c.copy(t.c),this}getArea(){return Pe.subVectors(this.c,this.b),De.subVectors(this.a,this.b),.5*Pe.cross(De).length()}getMidpoint(t){return t.addVectors(this.a,this.b).add(this.c).multiplyScalar(1/3)}getNormal(t){return Ge.getNormal(this.a,this.b,this.c,t)}getPlane(t){return t.setFromCoplanarPoints(this.a,this.b,this.c)}getBarycoord(t,e){return Ge.getBarycoord(t,this.a,this.b,this.c,e)}getUV(t,e,n,i,r){return Ge.getUV(t,this.a,this.b,this.c,e,n,i,r)}containsPoint(t){return Ge.containsPoint(t,this.a,this.b,this.c)}isFrontFacing(t){return Ge.isFrontFacing(this.a,this.b,this.c,t)}intersectsBox(t){return t.intersectsTriangle(this)}closestPointToPoint(t,e){const n=this.a,i=this.b,r=this.c;let s,a;ze.subVectors(i,n),Be.subVectors(r,n),Oe.subVectors(t,n);const o=ze.dot(Oe),l=Be.dot(Oe);if(o<=0&&l<=0)return e.copy(n);Ue.subVectors(t,i);const c=ze.dot(Ue),h=Be.dot(Ue);if(c>=0&&h<=c)return e.copy(i);const u=o*h-c*l;if(u<=0&&o>=0&&c<=0)return s=o/(o-c),e.copy(n).addScaledVector(ze,s);He.subVectors(t,r);const d=ze.dot(He),p=Be.dot(He);if(p>=0&&d<=p)return e.copy(r);const m=d*l-o*p;if(m<=0&&l>=0&&p<=0)return a=l/(l-p),e.copy(n).addScaledVector(Be,a);const f=c*p-d*h;if(f<=0&&h-c>=0&&d-p>=0)return Fe.subVectors(r,i),a=(h-c)/(h-c+(d-p)),e.copy(i).addScaledVector(Fe,a);const g=1/(f+m+u);return s=m*g,a=u*g,e.copy(n).addScaledVector(ze,s).addScaledVector(Be,a)}equals(t){return t.a.equals(this.a)&&t.b.equals(this.b)&&t.c.equals(this.c)}}let ke=0;class Ve extends ${constructor(){super(),Object.defineProperty(this,"id",{value:ke++}),this.uuid=rt(),this.name="",this.type="Material",this.fog=!0,this.blending=1,this.side=0,this.vertexColors=!1,this.opacity=1,this.format=E,this.transparent=!1,this.blendSrc=204,this.blendDst=205,this.blendEquation=n,this.blendSrcAlpha=null,this.blendDstAlpha=null,this.blendEquationAlpha=null,this.depthFunc=3,this.depthTest=!0,this.depthWrite=!0,this.stencilWriteMask=255,this.stencilFunc=519,this.stencilRef=0,this.stencilFuncMask=255,this.stencilFail=Y,this.stencilZFail=Y,this.stencilZPass=Y,this.stencilWrite=!1,this.clippingPlanes=null,this.clipIntersection=!1,this.clipShadows=!1,this.shadowSide=null,this.colorWrite=!0,this.precision=null,this.polygonOffset=!1,this.polygonOffsetFactor=0,this.polygonOffsetUnits=0,this.dithering=!1,this.alphaToCoverage=!1,this.premultipliedAlpha=!1,this.visible=!0,this.toneMapped=!0,this.userData={},this.version=0,this._alphaTest=0}get alphaTest(){return this._alphaTest}set alphaTest(t){this._alphaTest>0!=t>0&&this.version++,this._alphaTest=t}onBuild(){}onBeforeRender(){}onBeforeCompile(){}customProgramCacheKey(){return this.onBeforeCompile.toString()}setValues(t){if(void 0!==t)for(const e in t){const n=t[e];if(void 0===n){console.warn("THREE.Material: '"+e+"' parameter is undefined.");continue}if("shading"===e){console.warn("THREE."+this.type+": .shading has been removed. Use the boolean .flatShading instead."),this.flatShading=1===n;continue}const i=this[e];void 0!==i?i&&i.isColor?i.set(n):i&&i.isVector3&&n&&n.isVector3?i.copy(n):this[e]=n:console.warn("THREE."+this.type+": '"+e+"' is not a property of this material.")}}toJSON(t){const e=void 0===t||"string"==typeof t;e&&(t={textures:{},images:{}});const n={metadata:{version:4.5,type:"Material",generator:"Material.toJSON"}};function i(t){const e=[];for(const n in t){const i=t[n];delete i.metadata,e.push(i)}return e}if(n.uuid=this.uuid,n.type=this.type,""!==this.name&&(n.name=this.name),this.color&&this.color.isColor&&(n.color=this.color.getHex()),void 0!==this.roughness&&(n.roughness=this.roughness),void 0!==this.metalness&&(n.metalness=this.metalness),void 0!==this.sheen&&(n.sheen=this.sheen),this.sheenColor&&this.sheenColor.isColor&&(n.sheenColor=this.sheenColor.getHex()),void 0!==this.sheenRoughness&&(n.sheenRoughness=this.sheenRoughness),this.emissive&&this.emissive.isColor&&(n.emissive=this.emissive.getHex()),this.emissiveIntensity&&1!==this.emissiveIntensity&&(n.emissiveIntensity=this.emissiveIntensity),this.specular&&this.specular.isColor&&(n.specular=this.specular.getHex()),void 0!==this.specularIntensity&&(n.specularIntensity=this.specularIntensity),this.specularColor&&this.specularColor.isColor&&(n.specularColor=this.specularColor.getHex()),void 0!==this.shininess&&(n.shininess=this.shininess),void 0!==this.clearcoat&&(n.clearcoat=this.clearcoat),void 0!==this.clearcoatRoughness&&(n.clearcoatRoughness=this.clearcoatRoughness),this.clearcoatMap&&this.clearcoatMap.isTexture&&(n.clearcoatMap=this.clearcoatMap.toJSON(t).uuid),this.clearcoatRoughnessMap&&this.clearcoatRoughnessMap.isTexture&&(n.clearcoatRoughnessMap=this.clearcoatRoughnessMap.toJSON(t).uuid),this.clearcoatNormalMap&&this.clearcoatNormalMap.isTexture&&(n.clearcoatNormalMap=this.clearcoatNormalMap.toJSON(t).uuid,n.clearcoatNormalScale=this.clearcoatNormalScale.toArray()),this.map&&this.map.isTexture&&(n.map=this.map.toJSON(t).uuid),this.matcap&&this.matcap.isTexture&&(n.matcap=this.matcap.toJSON(t).uuid),this.alphaMap&&this.alphaMap.isTexture&&(n.alphaMap=this.alphaMap.toJSON(t).uuid),this.lightMap&&this.lightMap.isTexture&&(n.lightMap=this.lightMap.toJSON(t).uuid,n.lightMapIntensity=this.lightMapIntensity),this.aoMap&&this.aoMap.isTexture&&(n.aoMap=this.aoMap.toJSON(t).uuid,n.aoMapIntensity=this.aoMapIntensity),this.bumpMap&&this.bumpMap.isTexture&&(n.bumpMap=this.bumpMap.toJSON(t).uuid,n.bumpScale=this.bumpScale),this.normalMap&&this.normalMap.isTexture&&(n.normalMap=this.normalMap.toJSON(t).uuid,n.normalMapType=this.normalMapType,n.normalScale=this.normalScale.toArray()),this.displacementMap&&this.displacementMap.isTexture&&(n.displacementMap=this.displacementMap.toJSON(t).uuid,n.displacementScale=this.displacementScale,n.displacementBias=this.displacementBias),this.roughnessMap&&this.roughnessMap.isTexture&&(n.roughnessMap=this.roughnessMap.toJSON(t).uuid),this.metalnessMap&&this.metalnessMap.isTexture&&(n.metalnessMap=this.metalnessMap.toJSON(t).uuid),this.emissiveMap&&this.emissiveMap.isTexture&&(n.emissiveMap=this.emissiveMap.toJSON(t).uuid),this.specularMap&&this.specularMap.isTexture&&(n.specularMap=this.specularMap.toJSON(t).uuid),this.specularIntensityMap&&this.specularIntensityMap.isTexture&&(n.specularIntensityMap=this.specularIntensityMap.toJSON(t).uuid),this.specularColorMap&&this.specularColorMap.isTexture&&(n.specularColorMap=this.specularColorMap.toJSON(t).uuid),this.envMap&&this.envMap.isTexture&&(n.envMap=this.envMap.toJSON(t).uuid,void 0!==this.combine&&(n.combine=this.combine)),void 0!==this.envMapIntensity&&(n.envMapIntensity=this.envMapIntensity),void 0!==this.reflectivity&&(n.reflectivity=this.reflectivity),void 0!==this.refractionRatio&&(n.refractionRatio=this.refractionRatio),this.gradientMap&&this.gradientMap.isTexture&&(n.gradientMap=this.gradientMap.toJSON(t).uuid),void 0!==this.transmission&&(n.transmission=this.transmission),this.transmissionMap&&this.transmissionMap.isTexture&&(n.transmissionMap=this.transmissionMap.toJSON(t).uuid),void 0!==this.thickness&&(n.thickness=this.thickness),this.thicknessMap&&this.thicknessMap.isTexture&&(n.thicknessMap=this.thicknessMap.toJSON(t).uuid),void 0!==this.attenuationDistance&&(n.attenuationDistance=this.attenuationDistance),void 0!==this.attenuationColor&&(n.attenuationColor=this.attenuationColor.getHex()),void 0!==this.size&&(n.size=this.size),null!==this.shadowSide&&(n.shadowSide=this.shadowSide),void 0!==this.sizeAttenuation&&(n.sizeAttenuation=this.sizeAttenuation),1!==this.blending&&(n.blending=this.blending),0!==this.side&&(n.side=this.side),this.vertexColors&&(n.vertexColors=!0),this.opacity<1&&(n.opacity=this.opacity),this.format!==E&&(n.format=this.format),!0===this.transparent&&(n.transparent=this.transparent),n.depthFunc=this.depthFunc,n.depthTest=this.depthTest,n.depthWrite=this.depthWrite,n.colorWrite=this.colorWrite,n.stencilWrite=this.stencilWrite,n.stencilWriteMask=this.stencilWriteMask,n.stencilFunc=this.stencilFunc,n.stencilRef=this.stencilRef,n.stencilFuncMask=this.stencilFuncMask,n.stencilFail=this.stencilFail,n.stencilZFail=this.stencilZFail,n.stencilZPass=this.stencilZPass,this.rotation&&0!==this.rotation&&(n.rotation=this.rotation),!0===this.polygonOffset&&(n.polygonOffset=!0),0!==this.polygonOffsetFactor&&(n.polygonOffsetFactor=this.polygonOffsetFactor),0!==this.polygonOffsetUnits&&(n.polygonOffsetUnits=this.polygonOffsetUnits),this.linewidth&&1!==this.linewidth&&(n.linewidth=this.linewidth),void 0!==this.dashSize&&(n.dashSize=this.dashSize),void 0!==this.gapSize&&(n.gapSize=this.gapSize),void 0!==this.scale&&(n.scale=this.scale),!0===this.dithering&&(n.dithering=!0),this.alphaTest>0&&(n.alphaTest=this.alphaTest),!0===this.alphaToCoverage&&(n.alphaToCoverage=this.alphaToCoverage),!0===this.premultipliedAlpha&&(n.premultipliedAlpha=this.premultipliedAlpha),!0===this.wireframe&&(n.wireframe=this.wireframe),this.wireframeLinewidth>1&&(n.wireframeLinewidth=this.wireframeLinewidth),"round"!==this.wireframeLinecap&&(n.wireframeLinecap=this.wireframeLinecap),"round"!==this.wireframeLinejoin&&(n.wireframeLinejoin=this.wireframeLinejoin),!0===this.flatShading&&(n.flatShading=this.flatShading),!1===this.visible&&(n.visible=!1),!1===this.toneMapped&&(n.toneMapped=!1),"{}"!==JSON.stringify(this.userData)&&(n.userData=this.userData),e){const e=i(t.textures),r=i(t.images);e.length>0&&(n.textures=e),r.length>0&&(n.images=r)}return n}clone(){return(new this.constructor).copy(this)}copy(t){this.name=t.name,this.fog=t.fog,this.blending=t.blending,this.side=t.side,this.vertexColors=t.vertexColors,this.opacity=t.opacity,this.format=t.format,this.transparent=t.transparent,this.blendSrc=t.blendSrc,this.blendDst=t.blendDst,this.blendEquation=t.blendEquation,this.blendSrcAlpha=t.blendSrcAlpha,this.blendDstAlpha=t.blendDstAlpha,this.blendEquationAlpha=t.blendEquationAlpha,this.depthFunc=t.depthFunc,this.depthTest=t.depthTest,this.depthWrite=t.depthWrite,this.stencilWriteMask=t.stencilWriteMask,this.stencilFunc=t.stencilFunc,this.stencilRef=t.stencilRef,this.stencilFuncMask=t.stencilFuncMask,this.stencilFail=t.stencilFail,this.stencilZFail=t.stencilZFail,this.stencilZPass=t.stencilZPass,this.stencilWrite=t.stencilWrite;const e=t.clippingPlanes;let n=null;if(null!==e){const t=e.length;n=new Array(t);for(let i=0;i!==t;++i)n[i]=e[i].clone()}return this.clippingPlanes=n,this.clipIntersection=t.clipIntersection,this.clipShadows=t.clipShadows,this.shadowSide=t.shadowSide,this.colorWrite=t.colorWrite,this.precision=t.precision,this.polygonOffset=t.polygonOffset,this.polygonOffsetFactor=t.polygonOffsetFactor,this.polygonOffsetUnits=t.polygonOffsetUnits,this.dithering=t.dithering,this.alphaTest=t.alphaTest,this.alphaToCoverage=t.alphaToCoverage,this.premultipliedAlpha=t.premultipliedAlpha,this.visible=t.visible,this.toneMapped=t.toneMapped,this.userData=JSON.parse(JSON.stringify(t.userData)),this}dispose(){this.dispatchEvent({type:"dispose"})}set needsUpdate(t){!0===t&&this.version++}}Ve.prototype.isMaterial=!0;const We={aliceblue:15792383,antiquewhite:16444375,aqua:65535,aquamarine:8388564,azure:15794175,beige:16119260,bisque:16770244,black:0,blanchedalmond:16772045,blue:255,blueviolet:9055202,brown:10824234,burlywood:14596231,cadetblue:6266528,chartreuse:8388352,chocolate:13789470,coral:16744272,cornflowerblue:6591981,cornsilk:16775388,crimson:14423100,cyan:65535,darkblue:139,darkcyan:35723,darkgoldenrod:12092939,darkgray:11119017,darkgreen:25600,darkgrey:11119017,darkkhaki:12433259,darkmagenta:9109643,darkolivegreen:5597999,darkorange:16747520,darkorchid:10040012,darkred:9109504,darksalmon:15308410,darkseagreen:9419919,darkslateblue:4734347,darkslategray:3100495,darkslategrey:3100495,darkturquoise:52945,darkviolet:9699539,deeppink:16716947,deepskyblue:49151,dimgray:6908265,dimgrey:6908265,dodgerblue:2003199,firebrick:11674146,floralwhite:16775920,forestgreen:2263842,fuchsia:16711935,gainsboro:14474460,ghostwhite:16316671,gold:16766720,goldenrod:14329120,gray:8421504,green:32768,greenyellow:11403055,grey:8421504,honeydew:15794160,hotpink:16738740,indianred:13458524,indigo:4915330,ivory:16777200,khaki:15787660,lavender:15132410,lavenderblush:16773365,lawngreen:8190976,lemonchiffon:16775885,lightblue:11393254,lightcoral:15761536,lightcyan:14745599,lightgoldenrodyellow:16448210,lightgray:13882323,lightgreen:9498256,lightgrey:13882323,lightpink:16758465,lightsalmon:16752762,lightseagreen:2142890,lightskyblue:8900346,lightslategray:7833753,lightslategrey:7833753,lightsteelblue:11584734,lightyellow:16777184,lime:65280,limegreen:3329330,linen:16445670,magenta:16711935,maroon:8388608,mediumaquamarine:6737322,mediumblue:205,mediumorchid:12211667,mediumpurple:9662683,mediumseagreen:3978097,mediumslateblue:8087790,mediumspringgreen:64154,mediumturquoise:4772300,mediumvioletred:13047173,midnightblue:1644912,mintcream:16121850,mistyrose:16770273,moccasin:16770229,navajowhite:16768685,navy:128,oldlace:16643558,olive:8421376,olivedrab:7048739,orange:16753920,orangered:16729344,orchid:14315734,palegoldenrod:15657130,palegreen:10025880,paleturquoise:11529966,palevioletred:14381203,papayawhip:16773077,peachpuff:16767673,peru:13468991,pink:16761035,plum:14524637,powderblue:11591910,purple:8388736,rebeccapurple:6697881,red:16711680,rosybrown:12357519,royalblue:4286945,saddlebrown:9127187,salmon:16416882,sandybrown:16032864,seagreen:3050327,seashell:16774638,sienna:10506797,silver:12632256,skyblue:8900331,slateblue:6970061,slategray:7372944,slategrey:7372944,snow:16775930,springgreen:65407,steelblue:4620980,tan:13808780,teal:32896,thistle:14204888,tomato:16737095,turquoise:4251856,violet:15631086,wheat:16113331,white:16777215,whitesmoke:16119285,yellow:16776960,yellowgreen:10145074},je={h:0,s:0,l:0},qe={h:0,s:0,l:0};function Xe(t,e,n){return n<0&&(n+=1),n>1&&(n-=1),n<1/6?t+6*(e-t)*n:n<.5?e:n<2/3?t+6*(e-t)*(2/3-n):t}function Je(t){return t<.04045?.0773993808*t:Math.pow(.9478672986*t+.0521327014,2.4)}function Ye(t){return t<.0031308?12.92*t:1.055*Math.pow(t,.41666)-.055}class Ze{constructor(t,e,n){return void 0===e&&void 0===n?this.set(t):this.setRGB(t,e,n)}set(t){return t&&t.isColor?this.copy(t):"number"==typeof t?this.setHex(t):"string"==typeof t&&this.setStyle(t),this}setScalar(t){return this.r=t,this.g=t,this.b=t,this}setHex(t){return t=Math.floor(t),this.r=(t>>16&255)/255,this.g=(t>>8&255)/255,this.b=(255&t)/255,this}setRGB(t,e,n){return this.r=t,this.g=e,this.b=n,this}setHSL(t,e,n){if(t=at(t,1),e=st(e,0,1),n=st(n,0,1),0===e)this.r=this.g=this.b=n;else{const i=n<=.5?n*(1+e):n+e-n*e,r=2*n-i;this.r=Xe(r,i,t+1/3),this.g=Xe(r,i,t),this.b=Xe(r,i,t-1/3)}return this}setStyle(t){function e(e){void 0!==e&&parseFloat(e)<1&&console.warn("THREE.Color: Alpha component of "+t+" will be ignored.")}let n;if(n=/^((?:rgb|hsl)a?)\(([^\)]*)\)/.exec(t)){let t;const i=n[1],r=n[2];switch(i){case"rgb":case"rgba":if(t=/^\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*(?:,\s*(\d*\.?\d+)\s*)?$/.exec(r))return this.r=Math.min(255,parseInt(t[1],10))/255,this.g=Math.min(255,parseInt(t[2],10))/255,this.b=Math.min(255,parseInt(t[3],10))/255,e(t[4]),this;if(t=/^\s*(\d+)\%\s*,\s*(\d+)\%\s*,\s*(\d+)\%\s*(?:,\s*(\d*\.?\d+)\s*)?$/.exec(r))return this.r=Math.min(100,parseInt(t[1],10))/100,this.g=Math.min(100,parseInt(t[2],10))/100,this.b=Math.min(100,parseInt(t[3],10))/100,e(t[4]),this;break;case"hsl":case"hsla":if(t=/^\s*(\d*\.?\d+)\s*,\s*(\d+)\%\s*,\s*(\d+)\%\s*(?:,\s*(\d*\.?\d+)\s*)?$/.exec(r)){const n=parseFloat(t[1])/360,i=parseInt(t[2],10)/100,r=parseInt(t[3],10)/100;return e(t[4]),this.setHSL(n,i,r)}}}else if(n=/^\#([A-Fa-f\d]+)$/.exec(t)){const t=n[1],e=t.length;if(3===e)return this.r=parseInt(t.charAt(0)+t.charAt(0),16)/255,this.g=parseInt(t.charAt(1)+t.charAt(1),16)/255,this.b=parseInt(t.charAt(2)+t.charAt(2),16)/255,this;if(6===e)return this.r=parseInt(t.charAt(0)+t.charAt(1),16)/255,this.g=parseInt(t.charAt(2)+t.charAt(3),16)/255,this.b=parseInt(t.charAt(4)+t.charAt(5),16)/255,this}return t&&t.length>0?this.setColorName(t):this}setColorName(t){const e=We[t.toLowerCase()];return void 0!==e?this.setHex(e):console.warn("THREE.Color: Unknown color "+t),this}clone(){return new this.constructor(this.r,this.g,this.b)}copy(t){return this.r=t.r,this.g=t.g,this.b=t.b,this}copySRGBToLinear(t){return this.r=Je(t.r),this.g=Je(t.g),this.b=Je(t.b),this}copyLinearToSRGB(t){return this.r=Ye(t.r),this.g=Ye(t.g),this.b=Ye(t.b),this}convertSRGBToLinear(){return this.copySRGBToLinear(this),this}convertLinearToSRGB(){return this.copyLinearToSRGB(this),this}getHex(){return 255*this.r<<16^255*this.g<<8^255*this.b<<0}getHexString(){return("000000"+this.getHex().toString(16)).slice(-6)}getHSL(t){const e=this.r,n=this.g,i=this.b,r=Math.max(e,n,i),s=Math.min(e,n,i);let a,o;const l=(s+r)/2;if(s===r)a=0,o=0;else{const t=r-s;switch(o=l<=.5?t/(r+s):t/(2-r-s),r){case e:a=(n-i)/t+(n65535?ln:an)(t,1):this.index=t,this}getAttribute(t){return this.attributes[t]}setAttribute(t,e){return this.attributes[t]=e,this}deleteAttribute(t){return delete this.attributes[t],this}hasAttribute(t){return void 0!==this.attributes[t]}addGroup(t,e,n=0){this.groups.push({start:t,count:e,materialIndex:n})}clearGroups(){this.groups=[]}setDrawRange(t,e){this.drawRange.start=t,this.drawRange.count=e}applyMatrix4(t){const e=this.attributes.position;void 0!==e&&(e.applyMatrix4(t),e.needsUpdate=!0);const n=this.attributes.normal;if(void 0!==n){const e=(new pt).getNormalMatrix(t);n.applyNormalMatrix(e),n.needsUpdate=!0}const i=this.attributes.tangent;return void 0!==i&&(i.transformDirection(t),i.needsUpdate=!0),null!==this.boundingBox&&this.computeBoundingBox(),null!==this.boundingSphere&&this.computeBoundingSphere(),this}applyQuaternion(t){return pn.makeRotationFromQuaternion(t),this.applyMatrix4(pn),this}rotateX(t){return pn.makeRotationX(t),this.applyMatrix4(pn),this}rotateY(t){return pn.makeRotationY(t),this.applyMatrix4(pn),this}rotateZ(t){return pn.makeRotationZ(t),this.applyMatrix4(pn),this}translate(t,e,n){return pn.makeTranslation(t,e,n),this.applyMatrix4(pn),this}scale(t,e,n){return pn.makeScale(t,e,n),this.applyMatrix4(pn),this}lookAt(t){return mn.lookAt(t),mn.updateMatrix(),this.applyMatrix4(mn.matrix),this}center(){return this.computeBoundingBox(),this.boundingBox.getCenter(fn).negate(),this.translate(fn.x,fn.y,fn.z),this}setFromPoints(t){const e=[];for(let n=0,i=t.length;n0&&(t.userData=this.userData),void 0!==this.parameters){const e=this.parameters;for(const n in e)void 0!==e[n]&&(t[n]=e[n]);return t}t.data={attributes:{}};const e=this.index;null!==e&&(t.data.index={type:e.array.constructor.name,array:Array.prototype.slice.call(e.array)});const n=this.attributes;for(const e in n){const i=n[e];t.data.attributes[e]=i.toJSON(t.data)}const i={};let r=!1;for(const e in this.morphAttributes){const n=this.morphAttributes[e],s=[];for(let e=0,i=n.length;e0&&(i[e]=s,r=!0)}r&&(t.data.morphAttributes=i,t.data.morphTargetsRelative=this.morphTargetsRelative);const s=this.groups;s.length>0&&(t.data.groups=JSON.parse(JSON.stringify(s)));const a=this.boundingSphere;return null!==a&&(t.data.boundingSphere={center:a.center.toArray(),radius:a.radius}),t}clone(){return(new this.constructor).copy(this)}copy(t){this.index=null,this.attributes={},this.morphAttributes={},this.groups=[],this.boundingBox=null,this.boundingSphere=null;const e={};this.name=t.name;const n=t.index;null!==n&&this.setIndex(n.clone(e));const i=t.attributes;for(const t in i){const n=i[t];this.setAttribute(t,n.clone(e))}const r=t.morphAttributes;for(const t in r){const n=[],i=r[t];for(let t=0,r=i.length;t0){const t=e[n[0]];if(void 0!==t){this.morphTargetInfluences=[],this.morphTargetDictionary={};for(let e=0,n=t.length;e0&&console.error("THREE.Mesh.updateMorphTargets() no longer supports THREE.Geometry. Use THREE.BufferGeometry instead.")}}raycast(t,e){const n=this.geometry,i=this.material,r=this.matrixWorld;if(void 0===i)return;if(null===n.boundingSphere&&n.computeBoundingSphere(),Mn.copy(n.boundingSphere),Mn.applyMatrix4(r),!1===t.ray.intersectsSphere(Mn))return;if(_n.copy(r).invert(),bn.copy(t.ray).applyMatrix4(_n),null!==n.boundingBox&&!1===bn.intersectsBox(n.boundingBox))return;let s;if(n.isBufferGeometry){const r=n.index,a=n.attributes.position,o=n.morphAttributes.position,l=n.morphTargetsRelative,c=n.attributes.uv,h=n.attributes.uv2,u=n.groups,d=n.drawRange;if(null!==r)if(Array.isArray(i))for(let n=0,p=u.length;nn.far?null:{distance:c,point:Bn.clone(),object:t}}(t,e,n,i,wn,Sn,Tn,zn);if(p){o&&(Dn.fromBufferAttribute(o,c),In.fromBufferAttribute(o,h),Nn.fromBufferAttribute(o,u),p.uv=Ge.getUV(zn,wn,Sn,Tn,Dn,In,Nn,new dt)),l&&(Dn.fromBufferAttribute(l,c),In.fromBufferAttribute(l,h),Nn.fromBufferAttribute(l,u),p.uv2=Ge.getUV(zn,wn,Sn,Tn,Dn,In,Nn,new dt));const t={a:c,b:h,c:u,normal:new Lt,materialIndex:0};Ge.getNormal(wn,Sn,Tn,t.normal),p.face=t}return p}Fn.prototype.isMesh=!0;class Un extends xn{constructor(t=1,e=1,n=1,i=1,r=1,s=1){super(),this.type="BoxGeometry",this.parameters={width:t,height:e,depth:n,widthSegments:i,heightSegments:r,depthSegments:s};const a=this;i=Math.floor(i),r=Math.floor(r),s=Math.floor(s);const o=[],l=[],c=[],h=[];let u=0,d=0;function p(t,e,n,i,r,s,p,m,f,g,v){const y=s/f,x=p/g,_=s/2,b=p/2,M=m/2,w=f+1,S=g+1;let T=0,E=0;const A=new Lt;for(let s=0;s0?1:-1,c.push(A.x,A.y,A.z),h.push(o/f),h.push(1-s/g),T+=1}}for(let t=0;t0&&(e.defines=this.defines),e.vertexShader=this.vertexShader,e.fragmentShader=this.fragmentShader;const n={};for(const t in this.extensions)!0===this.extensions[t]&&(n[t]=!0);return Object.keys(n).length>0&&(e.extensions=n),e}}Vn.prototype.isShaderMaterial=!0;class Wn extends Ce{constructor(){super(),this.type="Camera",this.matrixWorldInverse=new se,this.projectionMatrix=new se,this.projectionMatrixInverse=new se}copy(t,e){return super.copy(t,e),this.matrixWorldInverse.copy(t.matrixWorldInverse),this.projectionMatrix.copy(t.projectionMatrix),this.projectionMatrixInverse.copy(t.projectionMatrixInverse),this}getWorldDirection(t){this.updateWorldMatrix(!0,!1);const e=this.matrixWorld.elements;return t.set(-e[8],-e[9],-e[10]).normalize()}updateMatrixWorld(t){super.updateMatrixWorld(t),this.matrixWorldInverse.copy(this.matrixWorld).invert()}updateWorldMatrix(t,e){super.updateWorldMatrix(t,e),this.matrixWorldInverse.copy(this.matrixWorld).invert()}clone(){return(new this.constructor).copy(this)}}Wn.prototype.isCamera=!0;class jn extends Wn{constructor(t=50,e=1,n=.1,i=2e3){super(),this.type="PerspectiveCamera",this.fov=t,this.zoom=1,this.near=n,this.far=i,this.focus=10,this.aspect=e,this.view=null,this.filmGauge=35,this.filmOffset=0,this.updateProjectionMatrix()}copy(t,e){return super.copy(t,e),this.fov=t.fov,this.zoom=t.zoom,this.near=t.near,this.far=t.far,this.focus=t.focus,this.aspect=t.aspect,this.view=null===t.view?null:Object.assign({},t.view),this.filmGauge=t.filmGauge,this.filmOffset=t.filmOffset,this}setFocalLength(t){const e=.5*this.getFilmHeight()/t;this.fov=2*it*Math.atan(e),this.updateProjectionMatrix()}getFocalLength(){const t=Math.tan(.5*nt*this.fov);return.5*this.getFilmHeight()/t}getEffectiveFOV(){return 2*it*Math.atan(Math.tan(.5*nt*this.fov)/this.zoom)}getFilmWidth(){return this.filmGauge*Math.min(this.aspect,1)}getFilmHeight(){return this.filmGauge/Math.max(this.aspect,1)}setViewOffset(t,e,n,i,r,s){this.aspect=t/e,null===this.view&&(this.view={enabled:!0,fullWidth:1,fullHeight:1,offsetX:0,offsetY:0,width:1,height:1}),this.view.enabled=!0,this.view.fullWidth=t,this.view.fullHeight=e,this.view.offsetX=n,this.view.offsetY=i,this.view.width=r,this.view.height=s,this.updateProjectionMatrix()}clearViewOffset(){null!==this.view&&(this.view.enabled=!1),this.updateProjectionMatrix()}updateProjectionMatrix(){const t=this.near;let e=t*Math.tan(.5*nt*this.fov)/this.zoom,n=2*e,i=this.aspect*n,r=-.5*i;const s=this.view;if(null!==this.view&&this.view.enabled){const t=s.fullWidth,a=s.fullHeight;r+=s.offsetX*i/t,e-=s.offsetY*n/a,i*=s.width/t,n*=s.height/a}const a=this.filmOffset;0!==a&&(r+=t*a/this.getFilmWidth()),this.projectionMatrix.makePerspective(r,r+i,e,e-n,t,this.far),this.projectionMatrixInverse.copy(this.projectionMatrix).invert()}toJSON(t){const e=super.toJSON(t);return e.object.fov=this.fov,e.object.zoom=this.zoom,e.object.near=this.near,e.object.far=this.far,e.object.focus=this.focus,e.object.aspect=this.aspect,null!==this.view&&(e.object.view=Object.assign({},this.view)),e.object.filmGauge=this.filmGauge,e.object.filmOffset=this.filmOffset,e}}jn.prototype.isPerspectiveCamera=!0;const qn=90;class Xn extends Ce{constructor(t,e,n){if(super(),this.type="CubeCamera",!0!==n.isWebGLCubeRenderTarget)return void console.error("THREE.CubeCamera: The constructor now expects an instance of WebGLCubeRenderTarget as third parameter.");this.renderTarget=n;const i=new jn(qn,1,t,e);i.layers=this.layers,i.up.set(0,-1,0),i.lookAt(new Lt(1,0,0)),this.add(i);const r=new jn(qn,1,t,e);r.layers=this.layers,r.up.set(0,-1,0),r.lookAt(new Lt(-1,0,0)),this.add(r);const s=new jn(qn,1,t,e);s.layers=this.layers,s.up.set(0,0,1),s.lookAt(new Lt(0,1,0)),this.add(s);const a=new jn(qn,1,t,e);a.layers=this.layers,a.up.set(0,0,-1),a.lookAt(new Lt(0,-1,0)),this.add(a);const o=new jn(qn,1,t,e);o.layers=this.layers,o.up.set(0,-1,0),o.lookAt(new Lt(0,0,1)),this.add(o);const l=new jn(qn,1,t,e);l.layers=this.layers,l.up.set(0,-1,0),l.lookAt(new Lt(0,0,-1)),this.add(l)}update(t,e){null===this.parent&&this.updateMatrixWorld();const n=this.renderTarget,[i,r,s,a,o,l]=this.children,c=t.xr.enabled,h=t.getRenderTarget();t.xr.enabled=!1;const u=n.texture.generateMipmaps;n.texture.generateMipmaps=!1,t.setRenderTarget(n,0),t.render(e,i),t.setRenderTarget(n,1),t.render(e,r),t.setRenderTarget(n,2),t.render(e,s),t.setRenderTarget(n,3),t.render(e,a),t.setRenderTarget(n,4),t.render(e,o),n.texture.generateMipmaps=u,t.setRenderTarget(n,5),t.render(e,l),t.setRenderTarget(h),t.xr.enabled=c}}class Jn extends bt{constructor(t,e,n,i,s,a,o,l,c,h){super(t=void 0!==t?t:[],e=void 0!==e?e:r,n,i,s,a,o,l,c,h),this.flipY=!1}get images(){return this.image}set images(t){this.image=t}}Jn.prototype.isCubeTexture=!0;class Yn extends St{constructor(t,e,n){Number.isInteger(e)&&(console.warn("THREE.WebGLCubeRenderTarget: constructor signature is now WebGLCubeRenderTarget( size, options )"),e=n),super(t,t,e),e=e||{},this.texture=new Jn(void 0,e.mapping,e.wrapS,e.wrapT,e.magFilter,e.minFilter,e.format,e.type,e.anisotropy,e.encoding),this.texture.isRenderTargetTexture=!0,this.texture.generateMipmaps=void 0!==e.generateMipmaps&&e.generateMipmaps,this.texture.minFilter=void 0!==e.minFilter?e.minFilter:g,this.texture._needsFlipEnvMap=!1}fromEquirectangularTexture(t,e){this.texture.type=e.type,this.texture.format=E,this.texture.encoding=e.encoding,this.texture.generateMipmaps=e.generateMipmaps,this.texture.minFilter=e.minFilter,this.texture.magFilter=e.magFilter;const n={uniforms:{tEquirect:{value:null}},vertexShader:"\n\n\t\t\t\tvarying vec3 vWorldDirection;\n\n\t\t\t\tvec3 transformDirection( in vec3 dir, in mat4 matrix ) {\n\n\t\t\t\t\treturn normalize( ( matrix * vec4( dir, 0.0 ) ).xyz );\n\n\t\t\t\t}\n\n\t\t\t\tvoid main() {\n\n\t\t\t\t\tvWorldDirection = transformDirection( position, modelMatrix );\n\n\t\t\t\t\t#include \n\t\t\t\t\t#include \n\n\t\t\t\t}\n\t\t\t",fragmentShader:"\n\n\t\t\t\tuniform sampler2D tEquirect;\n\n\t\t\t\tvarying vec3 vWorldDirection;\n\n\t\t\t\t#include \n\n\t\t\t\tvoid main() {\n\n\t\t\t\t\tvec3 direction = normalize( vWorldDirection );\n\n\t\t\t\t\tvec2 sampleUV = equirectUv( direction );\n\n\t\t\t\t\tgl_FragColor = texture2D( tEquirect, sampleUV );\n\n\t\t\t\t}\n\t\t\t"},i=new Un(5,5,5),r=new Vn({name:"CubemapFromEquirect",uniforms:Hn(n.uniforms),vertexShader:n.vertexShader,fragmentShader:n.fragmentShader,side:1,blending:0});r.uniforms.tEquirect.value=e;const s=new Fn(i,r),a=e.minFilter;e.minFilter===y&&(e.minFilter=g);return new Xn(1,10,this).update(t,s),e.minFilter=a,s.geometry.dispose(),s.material.dispose(),this}clear(t,e,n,i){const r=t.getRenderTarget();for(let r=0;r<6;r++)t.setRenderTarget(this,r),t.clear(e,n,i);t.setRenderTarget(r)}}Yn.prototype.isWebGLCubeRenderTarget=!0;const Zn=new Lt,Qn=new Lt,Kn=new pt;class $n{constructor(t=new Lt(1,0,0),e=0){this.normal=t,this.constant=e}set(t,e){return this.normal.copy(t),this.constant=e,this}setComponents(t,e,n,i){return this.normal.set(t,e,n),this.constant=i,this}setFromNormalAndCoplanarPoint(t,e){return this.normal.copy(t),this.constant=-e.dot(this.normal),this}setFromCoplanarPoints(t,e,n){const i=Zn.subVectors(n,e).cross(Qn.subVectors(t,e)).normalize();return this.setFromNormalAndCoplanarPoint(i,t),this}copy(t){return this.normal.copy(t.normal),this.constant=t.constant,this}normalize(){const t=1/this.normal.length();return this.normal.multiplyScalar(t),this.constant*=t,this}negate(){return this.constant*=-1,this.normal.negate(),this}distanceToPoint(t){return this.normal.dot(t)+this.constant}distanceToSphere(t){return this.distanceToPoint(t.center)-t.radius}projectPoint(t,e){return e.copy(this.normal).multiplyScalar(-this.distanceToPoint(t)).add(t)}intersectLine(t,e){const n=t.delta(Zn),i=this.normal.dot(n);if(0===i)return 0===this.distanceToPoint(t.start)?e.copy(t.start):null;const r=-(t.start.dot(this.normal)+this.constant)/i;return r<0||r>1?null:e.copy(n).multiplyScalar(r).add(t.start)}intersectsLine(t){const e=this.distanceToPoint(t.start),n=this.distanceToPoint(t.end);return e<0&&n>0||n<0&&e>0}intersectsBox(t){return t.intersectsPlane(this)}intersectsSphere(t){return t.intersectsPlane(this)}coplanarPoint(t){return t.copy(this.normal).multiplyScalar(-this.constant)}applyMatrix4(t,e){const n=e||Kn.getNormalMatrix(t),i=this.coplanarPoint(Zn).applyMatrix4(t),r=this.normal.applyMatrix3(n).normalize();return this.constant=-i.dot(r),this}translate(t){return this.constant-=t.dot(this.normal),this}equals(t){return t.normal.equals(this.normal)&&t.constant===this.constant}clone(){return(new this.constructor).copy(this)}}$n.prototype.isPlane=!0;const ti=new Zt,ei=new Lt;class ni{constructor(t=new $n,e=new $n,n=new $n,i=new $n,r=new $n,s=new $n){this.planes=[t,e,n,i,r,s]}set(t,e,n,i,r,s){const a=this.planes;return a[0].copy(t),a[1].copy(e),a[2].copy(n),a[3].copy(i),a[4].copy(r),a[5].copy(s),this}copy(t){const e=this.planes;for(let n=0;n<6;n++)e[n].copy(t.planes[n]);return this}setFromProjectionMatrix(t){const e=this.planes,n=t.elements,i=n[0],r=n[1],s=n[2],a=n[3],o=n[4],l=n[5],c=n[6],h=n[7],u=n[8],d=n[9],p=n[10],m=n[11],f=n[12],g=n[13],v=n[14],y=n[15];return e[0].setComponents(a-i,h-o,m-u,y-f).normalize(),e[1].setComponents(a+i,h+o,m+u,y+f).normalize(),e[2].setComponents(a+r,h+l,m+d,y+g).normalize(),e[3].setComponents(a-r,h-l,m-d,y-g).normalize(),e[4].setComponents(a-s,h-c,m-p,y-v).normalize(),e[5].setComponents(a+s,h+c,m+p,y+v).normalize(),this}intersectsObject(t){const e=t.geometry;return null===e.boundingSphere&&e.computeBoundingSphere(),ti.copy(e.boundingSphere).applyMatrix4(t.matrixWorld),this.intersectsSphere(ti)}intersectsSprite(t){return ti.center.set(0,0,0),ti.radius=.7071067811865476,ti.applyMatrix4(t.matrixWorld),this.intersectsSphere(ti)}intersectsSphere(t){const e=this.planes,n=t.center,i=-t.radius;for(let t=0;t<6;t++){if(e[t].distanceToPoint(n)0?t.max.x:t.min.x,ei.y=i.normal.y>0?t.max.y:t.min.y,ei.z=i.normal.z>0?t.max.z:t.min.z,i.distanceToPoint(ei)<0)return!1}return!0}containsPoint(t){const e=this.planes;for(let n=0;n<6;n++)if(e[n].distanceToPoint(t)<0)return!1;return!0}clone(){return(new this.constructor).copy(this)}}function ii(){let t=null,e=!1,n=null,i=null;function r(e,s){n(e,s),i=t.requestAnimationFrame(r)}return{start:function(){!0!==e&&null!==n&&(i=t.requestAnimationFrame(r),e=!0)},stop:function(){t.cancelAnimationFrame(i),e=!1},setAnimationLoop:function(t){n=t},setContext:function(e){t=e}}}function ri(t,e){const n=e.isWebGL2,i=new WeakMap;return{get:function(t){return t.isInterleavedBufferAttribute&&(t=t.data),i.get(t)},remove:function(e){e.isInterleavedBufferAttribute&&(e=e.data);const n=i.get(e);n&&(t.deleteBuffer(n.buffer),i.delete(e))},update:function(e,r){if(e.isGLBufferAttribute){const t=i.get(e);return void((!t||t.version 0.0 ) ? v : 0.5 * inversesqrt( max( 1.0 - x * x, 1e-7 ) ) - v;\n\treturn cross( v1, v2 ) * theta_sintheta;\n}\nvec3 LTC_Evaluate( const in vec3 N, const in vec3 V, const in vec3 P, const in mat3 mInv, const in vec3 rectCoords[ 4 ] ) {\n\tvec3 v1 = rectCoords[ 1 ] - rectCoords[ 0 ];\n\tvec3 v2 = rectCoords[ 3 ] - rectCoords[ 0 ];\n\tvec3 lightNormal = cross( v1, v2 );\n\tif( dot( lightNormal, P - rectCoords[ 0 ] ) < 0.0 ) return vec3( 0.0 );\n\tvec3 T1, T2;\n\tT1 = normalize( V - N * dot( V, N ) );\n\tT2 = - cross( N, T1 );\n\tmat3 mat = mInv * transposeMat3( mat3( T1, T2, N ) );\n\tvec3 coords[ 4 ];\n\tcoords[ 0 ] = mat * ( rectCoords[ 0 ] - P );\n\tcoords[ 1 ] = mat * ( rectCoords[ 1 ] - P );\n\tcoords[ 2 ] = mat * ( rectCoords[ 2 ] - P );\n\tcoords[ 3 ] = mat * ( rectCoords[ 3 ] - P );\n\tcoords[ 0 ] = normalize( coords[ 0 ] );\n\tcoords[ 1 ] = normalize( coords[ 1 ] );\n\tcoords[ 2 ] = normalize( coords[ 2 ] );\n\tcoords[ 3 ] = normalize( coords[ 3 ] );\n\tvec3 vectorFormFactor = vec3( 0.0 );\n\tvectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 0 ], coords[ 1 ] );\n\tvectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 1 ], coords[ 2 ] );\n\tvectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 2 ], coords[ 3 ] );\n\tvectorFormFactor += LTC_EdgeVectorFormFactor( coords[ 3 ], coords[ 0 ] );\n\tfloat result = LTC_ClippedSphereFormFactor( vectorFormFactor );\n\treturn vec3( result );\n}\nfloat G_BlinnPhong_Implicit( ) {\n\treturn 0.25;\n}\nfloat D_BlinnPhong( const in float shininess, const in float dotNH ) {\n\treturn RECIPROCAL_PI * ( shininess * 0.5 + 1.0 ) * pow( dotNH, shininess );\n}\nvec3 BRDF_BlinnPhong( const in vec3 lightDir, const in vec3 viewDir, const in vec3 normal, const in vec3 specularColor, const in float shininess ) {\n\tvec3 halfDir = normalize( lightDir + viewDir );\n\tfloat dotNH = saturate( dot( normal, halfDir ) );\n\tfloat dotVH = saturate( dot( viewDir, halfDir ) );\n\tvec3 F = F_Schlick( specularColor, 1.0, dotVH );\n\tfloat G = G_BlinnPhong_Implicit( );\n\tfloat D = D_BlinnPhong( shininess, dotNH );\n\treturn F * ( G * D );\n}\n#if defined( USE_SHEEN )\nfloat D_Charlie( float roughness, float dotNH ) {\n\tfloat alpha = pow2( roughness );\n\tfloat invAlpha = 1.0 / alpha;\n\tfloat cos2h = dotNH * dotNH;\n\tfloat sin2h = max( 1.0 - cos2h, 0.0078125 );\n\treturn ( 2.0 + invAlpha ) * pow( sin2h, invAlpha * 0.5 ) / ( 2.0 * PI );\n}\nfloat V_Neubelt( float dotNV, float dotNL ) {\n\treturn saturate( 1.0 / ( 4.0 * ( dotNL + dotNV - dotNL * dotNV ) ) );\n}\nvec3 BRDF_Sheen( const in vec3 lightDir, const in vec3 viewDir, const in vec3 normal, vec3 sheenColor, const in float sheenRoughness ) {\n\tvec3 halfDir = normalize( lightDir + viewDir );\n\tfloat dotNL = saturate( dot( normal, lightDir ) );\n\tfloat dotNV = saturate( dot( normal, viewDir ) );\n\tfloat dotNH = saturate( dot( normal, halfDir ) );\n\tfloat D = D_Charlie( sheenRoughness, dotNH );\n\tfloat V = V_Neubelt( dotNV, dotNL );\n\treturn sheenColor * ( D * V );\n}\n#endif",bumpmap_pars_fragment:"#ifdef USE_BUMPMAP\n\tuniform sampler2D bumpMap;\n\tuniform float bumpScale;\n\tvec2 dHdxy_fwd() {\n\t\tvec2 dSTdx = dFdx( vUv );\n\t\tvec2 dSTdy = dFdy( vUv );\n\t\tfloat Hll = bumpScale * texture2D( bumpMap, vUv ).x;\n\t\tfloat dBx = bumpScale * texture2D( bumpMap, vUv + dSTdx ).x - Hll;\n\t\tfloat dBy = bumpScale * texture2D( bumpMap, vUv + dSTdy ).x - Hll;\n\t\treturn vec2( dBx, dBy );\n\t}\n\tvec3 perturbNormalArb( vec3 surf_pos, vec3 surf_norm, vec2 dHdxy, float faceDirection ) {\n\t\tvec3 vSigmaX = vec3( dFdx( surf_pos.x ), dFdx( surf_pos.y ), dFdx( surf_pos.z ) );\n\t\tvec3 vSigmaY = vec3( dFdy( surf_pos.x ), dFdy( surf_pos.y ), dFdy( surf_pos.z ) );\n\t\tvec3 vN = surf_norm;\n\t\tvec3 R1 = cross( vSigmaY, vN );\n\t\tvec3 R2 = cross( vN, vSigmaX );\n\t\tfloat fDet = dot( vSigmaX, R1 ) * faceDirection;\n\t\tvec3 vGrad = sign( fDet ) * ( dHdxy.x * R1 + dHdxy.y * R2 );\n\t\treturn normalize( abs( fDet ) * surf_norm - vGrad );\n\t}\n#endif",clipping_planes_fragment:"#if NUM_CLIPPING_PLANES > 0\n\tvec4 plane;\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < UNION_CLIPPING_PLANES; i ++ ) {\n\t\tplane = clippingPlanes[ i ];\n\t\tif ( dot( vClipPosition, plane.xyz ) > plane.w ) discard;\n\t}\n\t#pragma unroll_loop_end\n\t#if UNION_CLIPPING_PLANES < NUM_CLIPPING_PLANES\n\t\tbool clipped = true;\n\t\t#pragma unroll_loop_start\n\t\tfor ( int i = UNION_CLIPPING_PLANES; i < NUM_CLIPPING_PLANES; i ++ ) {\n\t\t\tplane = clippingPlanes[ i ];\n\t\t\tclipped = ( dot( vClipPosition, plane.xyz ) > plane.w ) && clipped;\n\t\t}\n\t\t#pragma unroll_loop_end\n\t\tif ( clipped ) discard;\n\t#endif\n#endif",clipping_planes_pars_fragment:"#if NUM_CLIPPING_PLANES > 0\n\tvarying vec3 vClipPosition;\n\tuniform vec4 clippingPlanes[ NUM_CLIPPING_PLANES ];\n#endif",clipping_planes_pars_vertex:"#if NUM_CLIPPING_PLANES > 0\n\tvarying vec3 vClipPosition;\n#endif",clipping_planes_vertex:"#if NUM_CLIPPING_PLANES > 0\n\tvClipPosition = - mvPosition.xyz;\n#endif",color_fragment:"#if defined( USE_COLOR_ALPHA )\n\tdiffuseColor *= vColor;\n#elif defined( USE_COLOR )\n\tdiffuseColor.rgb *= vColor;\n#endif",color_pars_fragment:"#if defined( USE_COLOR_ALPHA )\n\tvarying vec4 vColor;\n#elif defined( USE_COLOR )\n\tvarying vec3 vColor;\n#endif",color_pars_vertex:"#if defined( USE_COLOR_ALPHA )\n\tvarying vec4 vColor;\n#elif defined( USE_COLOR ) || defined( USE_INSTANCING_COLOR )\n\tvarying vec3 vColor;\n#endif",color_vertex:"#if defined( USE_COLOR_ALPHA )\n\tvColor = vec4( 1.0 );\n#elif defined( USE_COLOR ) || defined( USE_INSTANCING_COLOR )\n\tvColor = vec3( 1.0 );\n#endif\n#ifdef USE_COLOR\n\tvColor *= color;\n#endif\n#ifdef USE_INSTANCING_COLOR\n\tvColor.xyz *= instanceColor.xyz;\n#endif",common:"#define PI 3.141592653589793\n#define PI2 6.283185307179586\n#define PI_HALF 1.5707963267948966\n#define RECIPROCAL_PI 0.3183098861837907\n#define RECIPROCAL_PI2 0.15915494309189535\n#define EPSILON 1e-6\n#ifndef saturate\n#define saturate( a ) clamp( a, 0.0, 1.0 )\n#endif\n#define whiteComplement( a ) ( 1.0 - saturate( a ) )\nfloat pow2( const in float x ) { return x*x; }\nfloat pow3( const in float x ) { return x*x*x; }\nfloat pow4( const in float x ) { float x2 = x*x; return x2*x2; }\nfloat max3( const in vec3 v ) { return max( max( v.x, v.y ), v.z ); }\nfloat average( const in vec3 color ) { return dot( color, vec3( 0.3333 ) ); }\nhighp float rand( const in vec2 uv ) {\n\tconst highp float a = 12.9898, b = 78.233, c = 43758.5453;\n\thighp float dt = dot( uv.xy, vec2( a,b ) ), sn = mod( dt, PI );\n\treturn fract( sin( sn ) * c );\n}\n#ifdef HIGH_PRECISION\n\tfloat precisionSafeLength( vec3 v ) { return length( v ); }\n#else\n\tfloat precisionSafeLength( vec3 v ) {\n\t\tfloat maxComponent = max3( abs( v ) );\n\t\treturn length( v / maxComponent ) * maxComponent;\n\t}\n#endif\nstruct IncidentLight {\n\tvec3 color;\n\tvec3 direction;\n\tbool visible;\n};\nstruct ReflectedLight {\n\tvec3 directDiffuse;\n\tvec3 directSpecular;\n\tvec3 indirectDiffuse;\n\tvec3 indirectSpecular;\n};\nstruct GeometricContext {\n\tvec3 position;\n\tvec3 normal;\n\tvec3 viewDir;\n#ifdef USE_CLEARCOAT\n\tvec3 clearcoatNormal;\n#endif\n};\nvec3 transformDirection( in vec3 dir, in mat4 matrix ) {\n\treturn normalize( ( matrix * vec4( dir, 0.0 ) ).xyz );\n}\nvec3 inverseTransformDirection( in vec3 dir, in mat4 matrix ) {\n\treturn normalize( ( vec4( dir, 0.0 ) * matrix ).xyz );\n}\nmat3 transposeMat3( const in mat3 m ) {\n\tmat3 tmp;\n\ttmp[ 0 ] = vec3( m[ 0 ].x, m[ 1 ].x, m[ 2 ].x );\n\ttmp[ 1 ] = vec3( m[ 0 ].y, m[ 1 ].y, m[ 2 ].y );\n\ttmp[ 2 ] = vec3( m[ 0 ].z, m[ 1 ].z, m[ 2 ].z );\n\treturn tmp;\n}\nfloat linearToRelativeLuminance( const in vec3 color ) {\n\tvec3 weights = vec3( 0.2126, 0.7152, 0.0722 );\n\treturn dot( weights, color.rgb );\n}\nbool isPerspectiveMatrix( mat4 m ) {\n\treturn m[ 2 ][ 3 ] == - 1.0;\n}\nvec2 equirectUv( in vec3 dir ) {\n\tfloat u = atan( dir.z, dir.x ) * RECIPROCAL_PI2 + 0.5;\n\tfloat v = asin( clamp( dir.y, - 1.0, 1.0 ) ) * RECIPROCAL_PI + 0.5;\n\treturn vec2( u, v );\n}",cube_uv_reflection_fragment:"#ifdef ENVMAP_TYPE_CUBE_UV\n\t#define cubeUV_maxMipLevel 8.0\n\t#define cubeUV_minMipLevel 4.0\n\t#define cubeUV_maxTileSize 256.0\n\t#define cubeUV_minTileSize 16.0\n\tfloat getFace( vec3 direction ) {\n\t\tvec3 absDirection = abs( direction );\n\t\tfloat face = - 1.0;\n\t\tif ( absDirection.x > absDirection.z ) {\n\t\t\tif ( absDirection.x > absDirection.y )\n\t\t\t\tface = direction.x > 0.0 ? 0.0 : 3.0;\n\t\t\telse\n\t\t\t\tface = direction.y > 0.0 ? 1.0 : 4.0;\n\t\t} else {\n\t\t\tif ( absDirection.z > absDirection.y )\n\t\t\t\tface = direction.z > 0.0 ? 2.0 : 5.0;\n\t\t\telse\n\t\t\t\tface = direction.y > 0.0 ? 1.0 : 4.0;\n\t\t}\n\t\treturn face;\n\t}\n\tvec2 getUV( vec3 direction, float face ) {\n\t\tvec2 uv;\n\t\tif ( face == 0.0 ) {\n\t\t\tuv = vec2( direction.z, direction.y ) / abs( direction.x );\n\t\t} else if ( face == 1.0 ) {\n\t\t\tuv = vec2( - direction.x, - direction.z ) / abs( direction.y );\n\t\t} else if ( face == 2.0 ) {\n\t\t\tuv = vec2( - direction.x, direction.y ) / abs( direction.z );\n\t\t} else if ( face == 3.0 ) {\n\t\t\tuv = vec2( - direction.z, direction.y ) / abs( direction.x );\n\t\t} else if ( face == 4.0 ) {\n\t\t\tuv = vec2( - direction.x, direction.z ) / abs( direction.y );\n\t\t} else {\n\t\t\tuv = vec2( direction.x, direction.y ) / abs( direction.z );\n\t\t}\n\t\treturn 0.5 * ( uv + 1.0 );\n\t}\n\tvec3 bilinearCubeUV( sampler2D envMap, vec3 direction, float mipInt ) {\n\t\tfloat face = getFace( direction );\n\t\tfloat filterInt = max( cubeUV_minMipLevel - mipInt, 0.0 );\n\t\tmipInt = max( mipInt, cubeUV_minMipLevel );\n\t\tfloat faceSize = exp2( mipInt );\n\t\tfloat texelSize = 1.0 / ( 3.0 * cubeUV_maxTileSize );\n\t\tvec2 uv = getUV( direction, face ) * ( faceSize - 1.0 ) + 0.5;\n\t\tif ( face > 2.0 ) {\n\t\t\tuv.y += faceSize;\n\t\t\tface -= 3.0;\n\t\t}\n\t\tuv.x += face * faceSize;\n\t\tif ( mipInt < cubeUV_maxMipLevel ) {\n\t\t\tuv.y += 2.0 * cubeUV_maxTileSize;\n\t\t}\n\t\tuv.y += filterInt * 2.0 * cubeUV_minTileSize;\n\t\tuv.x += 3.0 * max( 0.0, cubeUV_maxTileSize - 2.0 * faceSize );\n\t\tuv *= texelSize;\n\t\treturn texture2D( envMap, uv ).rgb;\n\t}\n\t#define r0 1.0\n\t#define v0 0.339\n\t#define m0 - 2.0\n\t#define r1 0.8\n\t#define v1 0.276\n\t#define m1 - 1.0\n\t#define r4 0.4\n\t#define v4 0.046\n\t#define m4 2.0\n\t#define r5 0.305\n\t#define v5 0.016\n\t#define m5 3.0\n\t#define r6 0.21\n\t#define v6 0.0038\n\t#define m6 4.0\n\tfloat roughnessToMip( float roughness ) {\n\t\tfloat mip = 0.0;\n\t\tif ( roughness >= r1 ) {\n\t\t\tmip = ( r0 - roughness ) * ( m1 - m0 ) / ( r0 - r1 ) + m0;\n\t\t} else if ( roughness >= r4 ) {\n\t\t\tmip = ( r1 - roughness ) * ( m4 - m1 ) / ( r1 - r4 ) + m1;\n\t\t} else if ( roughness >= r5 ) {\n\t\t\tmip = ( r4 - roughness ) * ( m5 - m4 ) / ( r4 - r5 ) + m4;\n\t\t} else if ( roughness >= r6 ) {\n\t\t\tmip = ( r5 - roughness ) * ( m6 - m5 ) / ( r5 - r6 ) + m5;\n\t\t} else {\n\t\t\tmip = - 2.0 * log2( 1.16 * roughness );\t\t}\n\t\treturn mip;\n\t}\n\tvec4 textureCubeUV( sampler2D envMap, vec3 sampleDir, float roughness ) {\n\t\tfloat mip = clamp( roughnessToMip( roughness ), m0, cubeUV_maxMipLevel );\n\t\tfloat mipF = fract( mip );\n\t\tfloat mipInt = floor( mip );\n\t\tvec3 color0 = bilinearCubeUV( envMap, sampleDir, mipInt );\n\t\tif ( mipF == 0.0 ) {\n\t\t\treturn vec4( color0, 1.0 );\n\t\t} else {\n\t\t\tvec3 color1 = bilinearCubeUV( envMap, sampleDir, mipInt + 1.0 );\n\t\t\treturn vec4( mix( color0, color1, mipF ), 1.0 );\n\t\t}\n\t}\n#endif",defaultnormal_vertex:"vec3 transformedNormal = objectNormal;\n#ifdef USE_INSTANCING\n\tmat3 m = mat3( instanceMatrix );\n\ttransformedNormal /= vec3( dot( m[ 0 ], m[ 0 ] ), dot( m[ 1 ], m[ 1 ] ), dot( m[ 2 ], m[ 2 ] ) );\n\ttransformedNormal = m * transformedNormal;\n#endif\ntransformedNormal = normalMatrix * transformedNormal;\n#ifdef FLIP_SIDED\n\ttransformedNormal = - transformedNormal;\n#endif\n#ifdef USE_TANGENT\n\tvec3 transformedTangent = ( modelViewMatrix * vec4( objectTangent, 0.0 ) ).xyz;\n\t#ifdef FLIP_SIDED\n\t\ttransformedTangent = - transformedTangent;\n\t#endif\n#endif",displacementmap_pars_vertex:"#ifdef USE_DISPLACEMENTMAP\n\tuniform sampler2D displacementMap;\n\tuniform float displacementScale;\n\tuniform float displacementBias;\n#endif",displacementmap_vertex:"#ifdef USE_DISPLACEMENTMAP\n\ttransformed += normalize( objectNormal ) * ( texture2D( displacementMap, vUv ).x * displacementScale + displacementBias );\n#endif",emissivemap_fragment:"#ifdef USE_EMISSIVEMAP\n\tvec4 emissiveColor = texture2D( emissiveMap, vUv );\n\temissiveColor.rgb = emissiveMapTexelToLinear( emissiveColor ).rgb;\n\ttotalEmissiveRadiance *= emissiveColor.rgb;\n#endif",emissivemap_pars_fragment:"#ifdef USE_EMISSIVEMAP\n\tuniform sampler2D emissiveMap;\n#endif",encodings_fragment:"gl_FragColor = linearToOutputTexel( gl_FragColor );",encodings_pars_fragment:"vec4 LinearToLinear( in vec4 value ) {\n\treturn value;\n}\nvec4 sRGBToLinear( in vec4 value ) {\n\treturn vec4( mix( pow( value.rgb * 0.9478672986 + vec3( 0.0521327014 ), vec3( 2.4 ) ), value.rgb * 0.0773993808, vec3( lessThanEqual( value.rgb, vec3( 0.04045 ) ) ) ), value.a );\n}\nvec4 LinearTosRGB( in vec4 value ) {\n\treturn vec4( mix( pow( value.rgb, vec3( 0.41666 ) ) * 1.055 - vec3( 0.055 ), value.rgb * 12.92, vec3( lessThanEqual( value.rgb, vec3( 0.0031308 ) ) ) ), value.a );\n}",envmap_fragment:"#ifdef USE_ENVMAP\n\t#ifdef ENV_WORLDPOS\n\t\tvec3 cameraToFrag;\n\t\tif ( isOrthographic ) {\n\t\t\tcameraToFrag = normalize( vec3( - viewMatrix[ 0 ][ 2 ], - viewMatrix[ 1 ][ 2 ], - viewMatrix[ 2 ][ 2 ] ) );\n\t\t} else {\n\t\t\tcameraToFrag = normalize( vWorldPosition - cameraPosition );\n\t\t}\n\t\tvec3 worldNormal = inverseTransformDirection( normal, viewMatrix );\n\t\t#ifdef ENVMAP_MODE_REFLECTION\n\t\t\tvec3 reflectVec = reflect( cameraToFrag, worldNormal );\n\t\t#else\n\t\t\tvec3 reflectVec = refract( cameraToFrag, worldNormal, refractionRatio );\n\t\t#endif\n\t#else\n\t\tvec3 reflectVec = vReflect;\n\t#endif\n\t#ifdef ENVMAP_TYPE_CUBE\n\t\tvec4 envColor = textureCube( envMap, vec3( flipEnvMap * reflectVec.x, reflectVec.yz ) );\n\t\tenvColor = envMapTexelToLinear( envColor );\n\t#elif defined( ENVMAP_TYPE_CUBE_UV )\n\t\tvec4 envColor = textureCubeUV( envMap, reflectVec, 0.0 );\n\t#else\n\t\tvec4 envColor = vec4( 0.0 );\n\t#endif\n\t#ifdef ENVMAP_BLENDING_MULTIPLY\n\t\toutgoingLight = mix( outgoingLight, outgoingLight * envColor.xyz, specularStrength * reflectivity );\n\t#elif defined( ENVMAP_BLENDING_MIX )\n\t\toutgoingLight = mix( outgoingLight, envColor.xyz, specularStrength * reflectivity );\n\t#elif defined( ENVMAP_BLENDING_ADD )\n\t\toutgoingLight += envColor.xyz * specularStrength * reflectivity;\n\t#endif\n#endif",envmap_common_pars_fragment:"#ifdef USE_ENVMAP\n\tuniform float envMapIntensity;\n\tuniform float flipEnvMap;\n\t#ifdef ENVMAP_TYPE_CUBE\n\t\tuniform samplerCube envMap;\n\t#else\n\t\tuniform sampler2D envMap;\n\t#endif\n\t\n#endif",envmap_pars_fragment:"#ifdef USE_ENVMAP\n\tuniform float reflectivity;\n\t#if defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( PHONG )\n\t\t#define ENV_WORLDPOS\n\t#endif\n\t#ifdef ENV_WORLDPOS\n\t\tvarying vec3 vWorldPosition;\n\t\tuniform float refractionRatio;\n\t#else\n\t\tvarying vec3 vReflect;\n\t#endif\n#endif",envmap_pars_vertex:"#ifdef USE_ENVMAP\n\t#if defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) ||defined( PHONG )\n\t\t#define ENV_WORLDPOS\n\t#endif\n\t#ifdef ENV_WORLDPOS\n\t\t\n\t\tvarying vec3 vWorldPosition;\n\t#else\n\t\tvarying vec3 vReflect;\n\t\tuniform float refractionRatio;\n\t#endif\n#endif",envmap_physical_pars_fragment:"#if defined( USE_ENVMAP )\n\t#ifdef ENVMAP_MODE_REFRACTION\n\t\tuniform float refractionRatio;\n\t#endif\n\tvec3 getIBLIrradiance( const in vec3 normal ) {\n\t\t#if defined( ENVMAP_TYPE_CUBE_UV )\n\t\t\tvec3 worldNormal = inverseTransformDirection( normal, viewMatrix );\n\t\t\tvec4 envMapColor = textureCubeUV( envMap, worldNormal, 1.0 );\n\t\t\treturn PI * envMapColor.rgb * envMapIntensity;\n\t\t#else\n\t\t\treturn vec3( 0.0 );\n\t\t#endif\n\t}\n\tvec3 getIBLRadiance( const in vec3 viewDir, const in vec3 normal, const in float roughness ) {\n\t\t#if defined( ENVMAP_TYPE_CUBE_UV )\n\t\t\tvec3 reflectVec;\n\t\t\t#ifdef ENVMAP_MODE_REFLECTION\n\t\t\t\treflectVec = reflect( - viewDir, normal );\n\t\t\t\treflectVec = normalize( mix( reflectVec, normal, roughness * roughness) );\n\t\t\t#else\n\t\t\t\treflectVec = refract( - viewDir, normal, refractionRatio );\n\t\t\t#endif\n\t\t\treflectVec = inverseTransformDirection( reflectVec, viewMatrix );\n\t\t\tvec4 envMapColor = textureCubeUV( envMap, reflectVec, roughness );\n\t\t\treturn envMapColor.rgb * envMapIntensity;\n\t\t#else\n\t\t\treturn vec3( 0.0 );\n\t\t#endif\n\t}\n#endif",envmap_vertex:"#ifdef USE_ENVMAP\n\t#ifdef ENV_WORLDPOS\n\t\tvWorldPosition = worldPosition.xyz;\n\t#else\n\t\tvec3 cameraToVertex;\n\t\tif ( isOrthographic ) {\n\t\t\tcameraToVertex = normalize( vec3( - viewMatrix[ 0 ][ 2 ], - viewMatrix[ 1 ][ 2 ], - viewMatrix[ 2 ][ 2 ] ) );\n\t\t} else {\n\t\t\tcameraToVertex = normalize( worldPosition.xyz - cameraPosition );\n\t\t}\n\t\tvec3 worldNormal = inverseTransformDirection( transformedNormal, viewMatrix );\n\t\t#ifdef ENVMAP_MODE_REFLECTION\n\t\t\tvReflect = reflect( cameraToVertex, worldNormal );\n\t\t#else\n\t\t\tvReflect = refract( cameraToVertex, worldNormal, refractionRatio );\n\t\t#endif\n\t#endif\n#endif",fog_vertex:"#ifdef USE_FOG\n\tvFogDepth = - mvPosition.z;\n#endif",fog_pars_vertex:"#ifdef USE_FOG\n\tvarying float vFogDepth;\n#endif",fog_fragment:"#ifdef USE_FOG\n\t#ifdef FOG_EXP2\n\t\tfloat fogFactor = 1.0 - exp( - fogDensity * fogDensity * vFogDepth * vFogDepth );\n\t#else\n\t\tfloat fogFactor = smoothstep( fogNear, fogFar, vFogDepth );\n\t#endif\n\tgl_FragColor.rgb = mix( gl_FragColor.rgb, fogColor, fogFactor );\n#endif",fog_pars_fragment:"#ifdef USE_FOG\n\tuniform vec3 fogColor;\n\tvarying float vFogDepth;\n\t#ifdef FOG_EXP2\n\t\tuniform float fogDensity;\n\t#else\n\t\tuniform float fogNear;\n\t\tuniform float fogFar;\n\t#endif\n#endif",gradientmap_pars_fragment:"#ifdef USE_GRADIENTMAP\n\tuniform sampler2D gradientMap;\n#endif\nvec3 getGradientIrradiance( vec3 normal, vec3 lightDirection ) {\n\tfloat dotNL = dot( normal, lightDirection );\n\tvec2 coord = vec2( dotNL * 0.5 + 0.5, 0.0 );\n\t#ifdef USE_GRADIENTMAP\n\t\treturn vec3( texture2D( gradientMap, coord ).r );\n\t#else\n\t\treturn ( coord.x < 0.7 ) ? vec3( 0.7 ) : vec3( 1.0 );\n\t#endif\n}",lightmap_fragment:"#ifdef USE_LIGHTMAP\n\tvec4 lightMapTexel = texture2D( lightMap, vUv2 );\n\tvec3 lightMapIrradiance = lightMapTexelToLinear( lightMapTexel ).rgb * lightMapIntensity;\n\t#ifndef PHYSICALLY_CORRECT_LIGHTS\n\t\tlightMapIrradiance *= PI;\n\t#endif\n\treflectedLight.indirectDiffuse += lightMapIrradiance;\n#endif",lightmap_pars_fragment:"#ifdef USE_LIGHTMAP\n\tuniform sampler2D lightMap;\n\tuniform float lightMapIntensity;\n#endif",lights_lambert_vertex:"vec3 diffuse = vec3( 1.0 );\nGeometricContext geometry;\ngeometry.position = mvPosition.xyz;\ngeometry.normal = normalize( transformedNormal );\ngeometry.viewDir = ( isOrthographic ) ? vec3( 0, 0, 1 ) : normalize( -mvPosition.xyz );\nGeometricContext backGeometry;\nbackGeometry.position = geometry.position;\nbackGeometry.normal = -geometry.normal;\nbackGeometry.viewDir = geometry.viewDir;\nvLightFront = vec3( 0.0 );\nvIndirectFront = vec3( 0.0 );\n#ifdef DOUBLE_SIDED\n\tvLightBack = vec3( 0.0 );\n\tvIndirectBack = vec3( 0.0 );\n#endif\nIncidentLight directLight;\nfloat dotNL;\nvec3 directLightColor_Diffuse;\nvIndirectFront += getAmbientLightIrradiance( ambientLightColor );\nvIndirectFront += getLightProbeIrradiance( lightProbe, geometry.normal );\n#ifdef DOUBLE_SIDED\n\tvIndirectBack += getAmbientLightIrradiance( ambientLightColor );\n\tvIndirectBack += getLightProbeIrradiance( lightProbe, backGeometry.normal );\n#endif\n#if NUM_POINT_LIGHTS > 0\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_POINT_LIGHTS; i ++ ) {\n\t\tgetPointLightInfo( pointLights[ i ], geometry, directLight );\n\t\tdotNL = dot( geometry.normal, directLight.direction );\n\t\tdirectLightColor_Diffuse = directLight.color;\n\t\tvLightFront += saturate( dotNL ) * directLightColor_Diffuse;\n\t\t#ifdef DOUBLE_SIDED\n\t\t\tvLightBack += saturate( - dotNL ) * directLightColor_Diffuse;\n\t\t#endif\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if NUM_SPOT_LIGHTS > 0\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_SPOT_LIGHTS; i ++ ) {\n\t\tgetSpotLightInfo( spotLights[ i ], geometry, directLight );\n\t\tdotNL = dot( geometry.normal, directLight.direction );\n\t\tdirectLightColor_Diffuse = directLight.color;\n\t\tvLightFront += saturate( dotNL ) * directLightColor_Diffuse;\n\t\t#ifdef DOUBLE_SIDED\n\t\t\tvLightBack += saturate( - dotNL ) * directLightColor_Diffuse;\n\t\t#endif\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if NUM_DIR_LIGHTS > 0\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_DIR_LIGHTS; i ++ ) {\n\t\tgetDirectionalLightInfo( directionalLights[ i ], geometry, directLight );\n\t\tdotNL = dot( geometry.normal, directLight.direction );\n\t\tdirectLightColor_Diffuse = directLight.color;\n\t\tvLightFront += saturate( dotNL ) * directLightColor_Diffuse;\n\t\t#ifdef DOUBLE_SIDED\n\t\t\tvLightBack += saturate( - dotNL ) * directLightColor_Diffuse;\n\t\t#endif\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if NUM_HEMI_LIGHTS > 0\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_HEMI_LIGHTS; i ++ ) {\n\t\tvIndirectFront += getHemisphereLightIrradiance( hemisphereLights[ i ], geometry.normal );\n\t\t#ifdef DOUBLE_SIDED\n\t\t\tvIndirectBack += getHemisphereLightIrradiance( hemisphereLights[ i ], backGeometry.normal );\n\t\t#endif\n\t}\n\t#pragma unroll_loop_end\n#endif",lights_pars_begin:"uniform bool receiveShadow;\nuniform vec3 ambientLightColor;\nuniform vec3 lightProbe[ 9 ];\nvec3 shGetIrradianceAt( in vec3 normal, in vec3 shCoefficients[ 9 ] ) {\n\tfloat x = normal.x, y = normal.y, z = normal.z;\n\tvec3 result = shCoefficients[ 0 ] * 0.886227;\n\tresult += shCoefficients[ 1 ] * 2.0 * 0.511664 * y;\n\tresult += shCoefficients[ 2 ] * 2.0 * 0.511664 * z;\n\tresult += shCoefficients[ 3 ] * 2.0 * 0.511664 * x;\n\tresult += shCoefficients[ 4 ] * 2.0 * 0.429043 * x * y;\n\tresult += shCoefficients[ 5 ] * 2.0 * 0.429043 * y * z;\n\tresult += shCoefficients[ 6 ] * ( 0.743125 * z * z - 0.247708 );\n\tresult += shCoefficients[ 7 ] * 2.0 * 0.429043 * x * z;\n\tresult += shCoefficients[ 8 ] * 0.429043 * ( x * x - y * y );\n\treturn result;\n}\nvec3 getLightProbeIrradiance( const in vec3 lightProbe[ 9 ], const in vec3 normal ) {\n\tvec3 worldNormal = inverseTransformDirection( normal, viewMatrix );\n\tvec3 irradiance = shGetIrradianceAt( worldNormal, lightProbe );\n\treturn irradiance;\n}\nvec3 getAmbientLightIrradiance( const in vec3 ambientLightColor ) {\n\tvec3 irradiance = ambientLightColor;\n\treturn irradiance;\n}\nfloat getDistanceAttenuation( const in float lightDistance, const in float cutoffDistance, const in float decayExponent ) {\n\t#if defined ( PHYSICALLY_CORRECT_LIGHTS )\n\t\tfloat distanceFalloff = 1.0 / max( pow( lightDistance, decayExponent ), 0.01 );\n\t\tif ( cutoffDistance > 0.0 ) {\n\t\t\tdistanceFalloff *= pow2( saturate( 1.0 - pow4( lightDistance / cutoffDistance ) ) );\n\t\t}\n\t\treturn distanceFalloff;\n\t#else\n\t\tif ( cutoffDistance > 0.0 && decayExponent > 0.0 ) {\n\t\t\treturn pow( saturate( - lightDistance / cutoffDistance + 1.0 ), decayExponent );\n\t\t}\n\t\treturn 1.0;\n\t#endif\n}\nfloat getSpotAttenuation( const in float coneCosine, const in float penumbraCosine, const in float angleCosine ) {\n\treturn smoothstep( coneCosine, penumbraCosine, angleCosine );\n}\n#if NUM_DIR_LIGHTS > 0\n\tstruct DirectionalLight {\n\t\tvec3 direction;\n\t\tvec3 color;\n\t};\n\tuniform DirectionalLight directionalLights[ NUM_DIR_LIGHTS ];\n\tvoid getDirectionalLightInfo( const in DirectionalLight directionalLight, const in GeometricContext geometry, out IncidentLight light ) {\n\t\tlight.color = directionalLight.color;\n\t\tlight.direction = directionalLight.direction;\n\t\tlight.visible = true;\n\t}\n#endif\n#if NUM_POINT_LIGHTS > 0\n\tstruct PointLight {\n\t\tvec3 position;\n\t\tvec3 color;\n\t\tfloat distance;\n\t\tfloat decay;\n\t};\n\tuniform PointLight pointLights[ NUM_POINT_LIGHTS ];\n\tvoid getPointLightInfo( const in PointLight pointLight, const in GeometricContext geometry, out IncidentLight light ) {\n\t\tvec3 lVector = pointLight.position - geometry.position;\n\t\tlight.direction = normalize( lVector );\n\t\tfloat lightDistance = length( lVector );\n\t\tlight.color = pointLight.color;\n\t\tlight.color *= getDistanceAttenuation( lightDistance, pointLight.distance, pointLight.decay );\n\t\tlight.visible = ( light.color != vec3( 0.0 ) );\n\t}\n#endif\n#if NUM_SPOT_LIGHTS > 0\n\tstruct SpotLight {\n\t\tvec3 position;\n\t\tvec3 direction;\n\t\tvec3 color;\n\t\tfloat distance;\n\t\tfloat decay;\n\t\tfloat coneCos;\n\t\tfloat penumbraCos;\n\t};\n\tuniform SpotLight spotLights[ NUM_SPOT_LIGHTS ];\n\tvoid getSpotLightInfo( const in SpotLight spotLight, const in GeometricContext geometry, out IncidentLight light ) {\n\t\tvec3 lVector = spotLight.position - geometry.position;\n\t\tlight.direction = normalize( lVector );\n\t\tfloat angleCos = dot( light.direction, spotLight.direction );\n\t\tfloat spotAttenuation = getSpotAttenuation( spotLight.coneCos, spotLight.penumbraCos, angleCos );\n\t\tif ( spotAttenuation > 0.0 ) {\n\t\t\tfloat lightDistance = length( lVector );\n\t\t\tlight.color = spotLight.color * spotAttenuation;\n\t\t\tlight.color *= getDistanceAttenuation( lightDistance, spotLight.distance, spotLight.decay );\n\t\t\tlight.visible = ( light.color != vec3( 0.0 ) );\n\t\t} else {\n\t\t\tlight.color = vec3( 0.0 );\n\t\t\tlight.visible = false;\n\t\t}\n\t}\n#endif\n#if NUM_RECT_AREA_LIGHTS > 0\n\tstruct RectAreaLight {\n\t\tvec3 color;\n\t\tvec3 position;\n\t\tvec3 halfWidth;\n\t\tvec3 halfHeight;\n\t};\n\tuniform sampler2D ltc_1;\tuniform sampler2D ltc_2;\n\tuniform RectAreaLight rectAreaLights[ NUM_RECT_AREA_LIGHTS ];\n#endif\n#if NUM_HEMI_LIGHTS > 0\n\tstruct HemisphereLight {\n\t\tvec3 direction;\n\t\tvec3 skyColor;\n\t\tvec3 groundColor;\n\t};\n\tuniform HemisphereLight hemisphereLights[ NUM_HEMI_LIGHTS ];\n\tvec3 getHemisphereLightIrradiance( const in HemisphereLight hemiLight, const in vec3 normal ) {\n\t\tfloat dotNL = dot( normal, hemiLight.direction );\n\t\tfloat hemiDiffuseWeight = 0.5 * dotNL + 0.5;\n\t\tvec3 irradiance = mix( hemiLight.groundColor, hemiLight.skyColor, hemiDiffuseWeight );\n\t\treturn irradiance;\n\t}\n#endif",lights_toon_fragment:"ToonMaterial material;\nmaterial.diffuseColor = diffuseColor.rgb;",lights_toon_pars_fragment:"varying vec3 vViewPosition;\nstruct ToonMaterial {\n\tvec3 diffuseColor;\n};\nvoid RE_Direct_Toon( const in IncidentLight directLight, const in GeometricContext geometry, const in ToonMaterial material, inout ReflectedLight reflectedLight ) {\n\tvec3 irradiance = getGradientIrradiance( geometry.normal, directLight.direction ) * directLight.color;\n\treflectedLight.directDiffuse += irradiance * BRDF_Lambert( material.diffuseColor );\n}\nvoid RE_IndirectDiffuse_Toon( const in vec3 irradiance, const in GeometricContext geometry, const in ToonMaterial material, inout ReflectedLight reflectedLight ) {\n\treflectedLight.indirectDiffuse += irradiance * BRDF_Lambert( material.diffuseColor );\n}\n#define RE_Direct\t\t\t\tRE_Direct_Toon\n#define RE_IndirectDiffuse\t\tRE_IndirectDiffuse_Toon\n#define Material_LightProbeLOD( material )\t(0)",lights_phong_fragment:"BlinnPhongMaterial material;\nmaterial.diffuseColor = diffuseColor.rgb;\nmaterial.specularColor = specular;\nmaterial.specularShininess = shininess;\nmaterial.specularStrength = specularStrength;",lights_phong_pars_fragment:"varying vec3 vViewPosition;\nstruct BlinnPhongMaterial {\n\tvec3 diffuseColor;\n\tvec3 specularColor;\n\tfloat specularShininess;\n\tfloat specularStrength;\n};\nvoid RE_Direct_BlinnPhong( const in IncidentLight directLight, const in GeometricContext geometry, const in BlinnPhongMaterial material, inout ReflectedLight reflectedLight ) {\n\tfloat dotNL = saturate( dot( geometry.normal, directLight.direction ) );\n\tvec3 irradiance = dotNL * directLight.color;\n\treflectedLight.directDiffuse += irradiance * BRDF_Lambert( material.diffuseColor );\n\treflectedLight.directSpecular += irradiance * BRDF_BlinnPhong( directLight.direction, geometry.viewDir, geometry.normal, material.specularColor, material.specularShininess ) * material.specularStrength;\n}\nvoid RE_IndirectDiffuse_BlinnPhong( const in vec3 irradiance, const in GeometricContext geometry, const in BlinnPhongMaterial material, inout ReflectedLight reflectedLight ) {\n\treflectedLight.indirectDiffuse += irradiance * BRDF_Lambert( material.diffuseColor );\n}\n#define RE_Direct\t\t\t\tRE_Direct_BlinnPhong\n#define RE_IndirectDiffuse\t\tRE_IndirectDiffuse_BlinnPhong\n#define Material_LightProbeLOD( material )\t(0)",lights_physical_fragment:"PhysicalMaterial material;\nmaterial.diffuseColor = diffuseColor.rgb * ( 1.0 - metalnessFactor );\nvec3 dxy = max( abs( dFdx( geometryNormal ) ), abs( dFdy( geometryNormal ) ) );\nfloat geometryRoughness = max( max( dxy.x, dxy.y ), dxy.z );\nmaterial.roughness = max( roughnessFactor, 0.0525 );material.roughness += geometryRoughness;\nmaterial.roughness = min( material.roughness, 1.0 );\n#ifdef IOR\n\t#ifdef SPECULAR\n\t\tfloat specularIntensityFactor = specularIntensity;\n\t\tvec3 specularColorFactor = specularColor;\n\t\t#ifdef USE_SPECULARINTENSITYMAP\n\t\t\tspecularIntensityFactor *= texture2D( specularIntensityMap, vUv ).a;\n\t\t#endif\n\t\t#ifdef USE_SPECULARCOLORMAP\n\t\t\tspecularColorFactor *= specularColorMapTexelToLinear( texture2D( specularColorMap, vUv ) ).rgb;\n\t\t#endif\n\t\tmaterial.specularF90 = mix( specularIntensityFactor, 1.0, metalnessFactor );\n\t#else\n\t\tfloat specularIntensityFactor = 1.0;\n\t\tvec3 specularColorFactor = vec3( 1.0 );\n\t\tmaterial.specularF90 = 1.0;\n\t#endif\n\tmaterial.specularColor = mix( min( pow2( ( ior - 1.0 ) / ( ior + 1.0 ) ) * specularColorFactor, vec3( 1.0 ) ) * specularIntensityFactor, diffuseColor.rgb, metalnessFactor );\n#else\n\tmaterial.specularColor = mix( vec3( 0.04 ), diffuseColor.rgb, metalnessFactor );\n\tmaterial.specularF90 = 1.0;\n#endif\n#ifdef USE_CLEARCOAT\n\tmaterial.clearcoat = clearcoat;\n\tmaterial.clearcoatRoughness = clearcoatRoughness;\n\tmaterial.clearcoatF0 = vec3( 0.04 );\n\tmaterial.clearcoatF90 = 1.0;\n\t#ifdef USE_CLEARCOATMAP\n\t\tmaterial.clearcoat *= texture2D( clearcoatMap, vUv ).x;\n\t#endif\n\t#ifdef USE_CLEARCOAT_ROUGHNESSMAP\n\t\tmaterial.clearcoatRoughness *= texture2D( clearcoatRoughnessMap, vUv ).y;\n\t#endif\n\tmaterial.clearcoat = saturate( material.clearcoat );\tmaterial.clearcoatRoughness = max( material.clearcoatRoughness, 0.0525 );\n\tmaterial.clearcoatRoughness += geometryRoughness;\n\tmaterial.clearcoatRoughness = min( material.clearcoatRoughness, 1.0 );\n#endif\n#ifdef USE_SHEEN\n\tmaterial.sheenColor = sheenColor;\n\t#ifdef USE_SHEENCOLORMAP\n\t\tmaterial.sheenColor *= sheenColorMapTexelToLinear( texture2D( sheenColorMap, vUv ) ).rgb;\n\t#endif\n\tmaterial.sheenRoughness = clamp( sheenRoughness, 0.07, 1.0 );\n\t#ifdef USE_SHEENROUGHNESSMAP\n\t\tmaterial.sheenRoughness *= texture2D( sheenRoughnessMap, vUv ).a;\n\t#endif\n#endif",lights_physical_pars_fragment:"struct PhysicalMaterial {\n\tvec3 diffuseColor;\n\tfloat roughness;\n\tvec3 specularColor;\n\tfloat specularF90;\n\t#ifdef USE_CLEARCOAT\n\t\tfloat clearcoat;\n\t\tfloat clearcoatRoughness;\n\t\tvec3 clearcoatF0;\n\t\tfloat clearcoatF90;\n\t#endif\n\t#ifdef USE_SHEEN\n\t\tvec3 sheenColor;\n\t\tfloat sheenRoughness;\n\t#endif\n};\nvec3 clearcoatSpecular = vec3( 0.0 );\nvec3 sheenSpecular = vec3( 0.0 );\nfloat IBLSheenBRDF( const in vec3 normal, const in vec3 viewDir, const in float roughness) {\n\tfloat dotNV = saturate( dot( normal, viewDir ) );\n\tfloat r2 = roughness * roughness;\n\tfloat a = roughness < 0.25 ? -339.2 * r2 + 161.4 * roughness - 25.9 : -8.48 * r2 + 14.3 * roughness - 9.95;\n\tfloat b = roughness < 0.25 ? 44.0 * r2 - 23.7 * roughness + 3.26 : 1.97 * r2 - 3.27 * roughness + 0.72;\n\tfloat DG = exp( a * dotNV + b ) + ( roughness < 0.25 ? 0.0 : 0.1 * ( roughness - 0.25 ) );\n\treturn saturate( DG * RECIPROCAL_PI );\n}\nvec2 DFGApprox( const in vec3 normal, const in vec3 viewDir, const in float roughness ) {\n\tfloat dotNV = saturate( dot( normal, viewDir ) );\n\tconst vec4 c0 = vec4( - 1, - 0.0275, - 0.572, 0.022 );\n\tconst vec4 c1 = vec4( 1, 0.0425, 1.04, - 0.04 );\n\tvec4 r = roughness * c0 + c1;\n\tfloat a004 = min( r.x * r.x, exp2( - 9.28 * dotNV ) ) * r.x + r.y;\n\tvec2 fab = vec2( - 1.04, 1.04 ) * a004 + r.zw;\n\treturn fab;\n}\nvec3 EnvironmentBRDF( const in vec3 normal, const in vec3 viewDir, const in vec3 specularColor, const in float specularF90, const in float roughness ) {\n\tvec2 fab = DFGApprox( normal, viewDir, roughness );\n\treturn specularColor * fab.x + specularF90 * fab.y;\n}\nvoid computeMultiscattering( const in vec3 normal, const in vec3 viewDir, const in vec3 specularColor, const in float specularF90, const in float roughness, inout vec3 singleScatter, inout vec3 multiScatter ) {\n\tvec2 fab = DFGApprox( normal, viewDir, roughness );\n\tvec3 FssEss = specularColor * fab.x + specularF90 * fab.y;\n\tfloat Ess = fab.x + fab.y;\n\tfloat Ems = 1.0 - Ess;\n\tvec3 Favg = specularColor + ( 1.0 - specularColor ) * 0.047619;\tvec3 Fms = FssEss * Favg / ( 1.0 - Ems * Favg );\n\tsingleScatter += FssEss;\n\tmultiScatter += Fms * Ems;\n}\n#if NUM_RECT_AREA_LIGHTS > 0\n\tvoid RE_Direct_RectArea_Physical( const in RectAreaLight rectAreaLight, const in GeometricContext geometry, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {\n\t\tvec3 normal = geometry.normal;\n\t\tvec3 viewDir = geometry.viewDir;\n\t\tvec3 position = geometry.position;\n\t\tvec3 lightPos = rectAreaLight.position;\n\t\tvec3 halfWidth = rectAreaLight.halfWidth;\n\t\tvec3 halfHeight = rectAreaLight.halfHeight;\n\t\tvec3 lightColor = rectAreaLight.color;\n\t\tfloat roughness = material.roughness;\n\t\tvec3 rectCoords[ 4 ];\n\t\trectCoords[ 0 ] = lightPos + halfWidth - halfHeight;\t\trectCoords[ 1 ] = lightPos - halfWidth - halfHeight;\n\t\trectCoords[ 2 ] = lightPos - halfWidth + halfHeight;\n\t\trectCoords[ 3 ] = lightPos + halfWidth + halfHeight;\n\t\tvec2 uv = LTC_Uv( normal, viewDir, roughness );\n\t\tvec4 t1 = texture2D( ltc_1, uv );\n\t\tvec4 t2 = texture2D( ltc_2, uv );\n\t\tmat3 mInv = mat3(\n\t\t\tvec3( t1.x, 0, t1.y ),\n\t\t\tvec3(\t\t0, 1,\t\t0 ),\n\t\t\tvec3( t1.z, 0, t1.w )\n\t\t);\n\t\tvec3 fresnel = ( material.specularColor * t2.x + ( vec3( 1.0 ) - material.specularColor ) * t2.y );\n\t\treflectedLight.directSpecular += lightColor * fresnel * LTC_Evaluate( normal, viewDir, position, mInv, rectCoords );\n\t\treflectedLight.directDiffuse += lightColor * material.diffuseColor * LTC_Evaluate( normal, viewDir, position, mat3( 1.0 ), rectCoords );\n\t}\n#endif\nvoid RE_Direct_Physical( const in IncidentLight directLight, const in GeometricContext geometry, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {\n\tfloat dotNL = saturate( dot( geometry.normal, directLight.direction ) );\n\tvec3 irradiance = dotNL * directLight.color;\n\t#ifdef USE_CLEARCOAT\n\t\tfloat dotNLcc = saturate( dot( geometry.clearcoatNormal, directLight.direction ) );\n\t\tvec3 ccIrradiance = dotNLcc * directLight.color;\n\t\tclearcoatSpecular += ccIrradiance * BRDF_GGX( directLight.direction, geometry.viewDir, geometry.clearcoatNormal, material.clearcoatF0, material.clearcoatF90, material.clearcoatRoughness );\n\t#endif\n\t#ifdef USE_SHEEN\n\t\tsheenSpecular += irradiance * BRDF_Sheen( directLight.direction, geometry.viewDir, geometry.normal, material.sheenColor, material.sheenRoughness );\n\t#endif\n\treflectedLight.directSpecular += irradiance * BRDF_GGX( directLight.direction, geometry.viewDir, geometry.normal, material.specularColor, material.specularF90, material.roughness );\n\treflectedLight.directDiffuse += irradiance * BRDF_Lambert( material.diffuseColor );\n}\nvoid RE_IndirectDiffuse_Physical( const in vec3 irradiance, const in GeometricContext geometry, const in PhysicalMaterial material, inout ReflectedLight reflectedLight ) {\n\treflectedLight.indirectDiffuse += irradiance * BRDF_Lambert( material.diffuseColor );\n}\nvoid RE_IndirectSpecular_Physical( const in vec3 radiance, const in vec3 irradiance, const in vec3 clearcoatRadiance, const in GeometricContext geometry, const in PhysicalMaterial material, inout ReflectedLight reflectedLight) {\n\t#ifdef USE_CLEARCOAT\n\t\tclearcoatSpecular += clearcoatRadiance * EnvironmentBRDF( geometry.clearcoatNormal, geometry.viewDir, material.clearcoatF0, material.clearcoatF90, material.clearcoatRoughness );\n\t#endif\n\t#ifdef USE_SHEEN\n\t\tsheenSpecular += irradiance * material.sheenColor * IBLSheenBRDF( geometry.normal, geometry.viewDir, material.sheenRoughness );\n\t#endif\n\tvec3 singleScattering = vec3( 0.0 );\n\tvec3 multiScattering = vec3( 0.0 );\n\tvec3 cosineWeightedIrradiance = irradiance * RECIPROCAL_PI;\n\tcomputeMultiscattering( geometry.normal, geometry.viewDir, material.specularColor, material.specularF90, material.roughness, singleScattering, multiScattering );\n\tvec3 diffuse = material.diffuseColor * ( 1.0 - ( singleScattering + multiScattering ) );\n\treflectedLight.indirectSpecular += radiance * singleScattering;\n\treflectedLight.indirectSpecular += multiScattering * cosineWeightedIrradiance;\n\treflectedLight.indirectDiffuse += diffuse * cosineWeightedIrradiance;\n}\n#define RE_Direct\t\t\t\tRE_Direct_Physical\n#define RE_Direct_RectArea\t\tRE_Direct_RectArea_Physical\n#define RE_IndirectDiffuse\t\tRE_IndirectDiffuse_Physical\n#define RE_IndirectSpecular\t\tRE_IndirectSpecular_Physical\nfloat computeSpecularOcclusion( const in float dotNV, const in float ambientOcclusion, const in float roughness ) {\n\treturn saturate( pow( dotNV + ambientOcclusion, exp2( - 16.0 * roughness - 1.0 ) ) - 1.0 + ambientOcclusion );\n}",lights_fragment_begin:"\nGeometricContext geometry;\ngeometry.position = - vViewPosition;\ngeometry.normal = normal;\ngeometry.viewDir = ( isOrthographic ) ? vec3( 0, 0, 1 ) : normalize( vViewPosition );\n#ifdef USE_CLEARCOAT\n\tgeometry.clearcoatNormal = clearcoatNormal;\n#endif\nIncidentLight directLight;\n#if ( NUM_POINT_LIGHTS > 0 ) && defined( RE_Direct )\n\tPointLight pointLight;\n\t#if defined( USE_SHADOWMAP ) && NUM_POINT_LIGHT_SHADOWS > 0\n\tPointLightShadow pointLightShadow;\n\t#endif\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_POINT_LIGHTS; i ++ ) {\n\t\tpointLight = pointLights[ i ];\n\t\tgetPointLightInfo( pointLight, geometry, directLight );\n\t\t#if defined( USE_SHADOWMAP ) && ( UNROLLED_LOOP_INDEX < NUM_POINT_LIGHT_SHADOWS )\n\t\tpointLightShadow = pointLightShadows[ i ];\n\t\tdirectLight.color *= all( bvec2( directLight.visible, receiveShadow ) ) ? getPointShadow( pointShadowMap[ i ], pointLightShadow.shadowMapSize, pointLightShadow.shadowBias, pointLightShadow.shadowRadius, vPointShadowCoord[ i ], pointLightShadow.shadowCameraNear, pointLightShadow.shadowCameraFar ) : 1.0;\n\t\t#endif\n\t\tRE_Direct( directLight, geometry, material, reflectedLight );\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if ( NUM_SPOT_LIGHTS > 0 ) && defined( RE_Direct )\n\tSpotLight spotLight;\n\t#if defined( USE_SHADOWMAP ) && NUM_SPOT_LIGHT_SHADOWS > 0\n\tSpotLightShadow spotLightShadow;\n\t#endif\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_SPOT_LIGHTS; i ++ ) {\n\t\tspotLight = spotLights[ i ];\n\t\tgetSpotLightInfo( spotLight, geometry, directLight );\n\t\t#if defined( USE_SHADOWMAP ) && ( UNROLLED_LOOP_INDEX < NUM_SPOT_LIGHT_SHADOWS )\n\t\tspotLightShadow = spotLightShadows[ i ];\n\t\tdirectLight.color *= all( bvec2( directLight.visible, receiveShadow ) ) ? getShadow( spotShadowMap[ i ], spotLightShadow.shadowMapSize, spotLightShadow.shadowBias, spotLightShadow.shadowRadius, vSpotShadowCoord[ i ] ) : 1.0;\n\t\t#endif\n\t\tRE_Direct( directLight, geometry, material, reflectedLight );\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if ( NUM_DIR_LIGHTS > 0 ) && defined( RE_Direct )\n\tDirectionalLight directionalLight;\n\t#if defined( USE_SHADOWMAP ) && NUM_DIR_LIGHT_SHADOWS > 0\n\tDirectionalLightShadow directionalLightShadow;\n\t#endif\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_DIR_LIGHTS; i ++ ) {\n\t\tdirectionalLight = directionalLights[ i ];\n\t\tgetDirectionalLightInfo( directionalLight, geometry, directLight );\n\t\t#if defined( USE_SHADOWMAP ) && ( UNROLLED_LOOP_INDEX < NUM_DIR_LIGHT_SHADOWS )\n\t\tdirectionalLightShadow = directionalLightShadows[ i ];\n\t\tdirectLight.color *= all( bvec2( directLight.visible, receiveShadow ) ) ? getShadow( directionalShadowMap[ i ], directionalLightShadow.shadowMapSize, directionalLightShadow.shadowBias, directionalLightShadow.shadowRadius, vDirectionalShadowCoord[ i ] ) : 1.0;\n\t\t#endif\n\t\tRE_Direct( directLight, geometry, material, reflectedLight );\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if ( NUM_RECT_AREA_LIGHTS > 0 ) && defined( RE_Direct_RectArea )\n\tRectAreaLight rectAreaLight;\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_RECT_AREA_LIGHTS; i ++ ) {\n\t\trectAreaLight = rectAreaLights[ i ];\n\t\tRE_Direct_RectArea( rectAreaLight, geometry, material, reflectedLight );\n\t}\n\t#pragma unroll_loop_end\n#endif\n#if defined( RE_IndirectDiffuse )\n\tvec3 iblIrradiance = vec3( 0.0 );\n\tvec3 irradiance = getAmbientLightIrradiance( ambientLightColor );\n\tirradiance += getLightProbeIrradiance( lightProbe, geometry.normal );\n\t#if ( NUM_HEMI_LIGHTS > 0 )\n\t\t#pragma unroll_loop_start\n\t\tfor ( int i = 0; i < NUM_HEMI_LIGHTS; i ++ ) {\n\t\t\tirradiance += getHemisphereLightIrradiance( hemisphereLights[ i ], geometry.normal );\n\t\t}\n\t\t#pragma unroll_loop_end\n\t#endif\n#endif\n#if defined( RE_IndirectSpecular )\n\tvec3 radiance = vec3( 0.0 );\n\tvec3 clearcoatRadiance = vec3( 0.0 );\n#endif",lights_fragment_maps:"#if defined( RE_IndirectDiffuse )\n\t#ifdef USE_LIGHTMAP\n\t\tvec4 lightMapTexel = texture2D( lightMap, vUv2 );\n\t\tvec3 lightMapIrradiance = lightMapTexelToLinear( lightMapTexel ).rgb * lightMapIntensity;\n\t\t#ifndef PHYSICALLY_CORRECT_LIGHTS\n\t\t\tlightMapIrradiance *= PI;\n\t\t#endif\n\t\tirradiance += lightMapIrradiance;\n\t#endif\n\t#if defined( USE_ENVMAP ) && defined( STANDARD ) && defined( ENVMAP_TYPE_CUBE_UV )\n\t\tiblIrradiance += getIBLIrradiance( geometry.normal );\n\t#endif\n#endif\n#if defined( USE_ENVMAP ) && defined( RE_IndirectSpecular )\n\tradiance += getIBLRadiance( geometry.viewDir, geometry.normal, material.roughness );\n\t#ifdef USE_CLEARCOAT\n\t\tclearcoatRadiance += getIBLRadiance( geometry.viewDir, geometry.clearcoatNormal, material.clearcoatRoughness );\n\t#endif\n#endif",lights_fragment_end:"#if defined( RE_IndirectDiffuse )\n\tRE_IndirectDiffuse( irradiance, geometry, material, reflectedLight );\n#endif\n#if defined( RE_IndirectSpecular )\n\tRE_IndirectSpecular( radiance, iblIrradiance, clearcoatRadiance, geometry, material, reflectedLight );\n#endif",logdepthbuf_fragment:"#if defined( USE_LOGDEPTHBUF ) && defined( USE_LOGDEPTHBUF_EXT )\n\tgl_FragDepthEXT = vIsPerspective == 0.0 ? gl_FragCoord.z : log2( vFragDepth ) * logDepthBufFC * 0.5;\n#endif",logdepthbuf_pars_fragment:"#if defined( USE_LOGDEPTHBUF ) && defined( USE_LOGDEPTHBUF_EXT )\n\tuniform float logDepthBufFC;\n\tvarying float vFragDepth;\n\tvarying float vIsPerspective;\n#endif",logdepthbuf_pars_vertex:"#ifdef USE_LOGDEPTHBUF\n\t#ifdef USE_LOGDEPTHBUF_EXT\n\t\tvarying float vFragDepth;\n\t\tvarying float vIsPerspective;\n\t#else\n\t\tuniform float logDepthBufFC;\n\t#endif\n#endif",logdepthbuf_vertex:"#ifdef USE_LOGDEPTHBUF\n\t#ifdef USE_LOGDEPTHBUF_EXT\n\t\tvFragDepth = 1.0 + gl_Position.w;\n\t\tvIsPerspective = float( isPerspectiveMatrix( projectionMatrix ) );\n\t#else\n\t\tif ( isPerspectiveMatrix( projectionMatrix ) ) {\n\t\t\tgl_Position.z = log2( max( EPSILON, gl_Position.w + 1.0 ) ) * logDepthBufFC - 1.0;\n\t\t\tgl_Position.z *= gl_Position.w;\n\t\t}\n\t#endif\n#endif",map_fragment:"#ifdef USE_MAP\n\tvec4 texelColor = texture2D( map, vUv );\n\ttexelColor = mapTexelToLinear( texelColor );\n\tdiffuseColor *= texelColor;\n#endif",map_pars_fragment:"#ifdef USE_MAP\n\tuniform sampler2D map;\n#endif",map_particle_fragment:"#if defined( USE_MAP ) || defined( USE_ALPHAMAP )\n\tvec2 uv = ( uvTransform * vec3( gl_PointCoord.x, 1.0 - gl_PointCoord.y, 1 ) ).xy;\n#endif\n#ifdef USE_MAP\n\tvec4 mapTexel = texture2D( map, uv );\n\tdiffuseColor *= mapTexelToLinear( mapTexel );\n#endif\n#ifdef USE_ALPHAMAP\n\tdiffuseColor.a *= texture2D( alphaMap, uv ).g;\n#endif",map_particle_pars_fragment:"#if defined( USE_MAP ) || defined( USE_ALPHAMAP )\n\tuniform mat3 uvTransform;\n#endif\n#ifdef USE_MAP\n\tuniform sampler2D map;\n#endif\n#ifdef USE_ALPHAMAP\n\tuniform sampler2D alphaMap;\n#endif",metalnessmap_fragment:"float metalnessFactor = metalness;\n#ifdef USE_METALNESSMAP\n\tvec4 texelMetalness = texture2D( metalnessMap, vUv );\n\tmetalnessFactor *= texelMetalness.b;\n#endif",metalnessmap_pars_fragment:"#ifdef USE_METALNESSMAP\n\tuniform sampler2D metalnessMap;\n#endif",morphnormal_vertex:"#ifdef USE_MORPHNORMALS\n\tobjectNormal *= morphTargetBaseInfluence;\n\t#ifdef MORPHTARGETS_TEXTURE\n\t\tfor ( int i = 0; i < MORPHTARGETS_COUNT; i ++ ) {\n\t\t\tif ( morphTargetInfluences[ i ] > 0.0 ) objectNormal += getMorph( gl_VertexID, i, 1, 2 ) * morphTargetInfluences[ i ];\n\t\t}\n\t#else\n\t\tobjectNormal += morphNormal0 * morphTargetInfluences[ 0 ];\n\t\tobjectNormal += morphNormal1 * morphTargetInfluences[ 1 ];\n\t\tobjectNormal += morphNormal2 * morphTargetInfluences[ 2 ];\n\t\tobjectNormal += morphNormal3 * morphTargetInfluences[ 3 ];\n\t#endif\n#endif",morphtarget_pars_vertex:"#ifdef USE_MORPHTARGETS\n\tuniform float morphTargetBaseInfluence;\n\t#ifdef MORPHTARGETS_TEXTURE\n\t\tuniform float morphTargetInfluences[ MORPHTARGETS_COUNT ];\n\t\tuniform sampler2DArray morphTargetsTexture;\n\t\tuniform vec2 morphTargetsTextureSize;\n\t\tvec3 getMorph( const in int vertexIndex, const in int morphTargetIndex, const in int offset, const in int stride ) {\n\t\t\tfloat texelIndex = float( vertexIndex * stride + offset );\n\t\t\tfloat y = floor( texelIndex / morphTargetsTextureSize.x );\n\t\t\tfloat x = texelIndex - y * morphTargetsTextureSize.x;\n\t\t\tvec3 morphUV = vec3( ( x + 0.5 ) / morphTargetsTextureSize.x, y / morphTargetsTextureSize.y, morphTargetIndex );\n\t\t\treturn texture( morphTargetsTexture, morphUV ).xyz;\n\t\t}\n\t#else\n\t\t#ifndef USE_MORPHNORMALS\n\t\t\tuniform float morphTargetInfluences[ 8 ];\n\t\t#else\n\t\t\tuniform float morphTargetInfluences[ 4 ];\n\t\t#endif\n\t#endif\n#endif",morphtarget_vertex:"#ifdef USE_MORPHTARGETS\n\ttransformed *= morphTargetBaseInfluence;\n\t#ifdef MORPHTARGETS_TEXTURE\n\t\tfor ( int i = 0; i < MORPHTARGETS_COUNT; i ++ ) {\n\t\t\t#ifndef USE_MORPHNORMALS\n\t\t\t\tif ( morphTargetInfluences[ i ] > 0.0 ) transformed += getMorph( gl_VertexID, i, 0, 1 ) * morphTargetInfluences[ i ];\n\t\t\t#else\n\t\t\t\tif ( morphTargetInfluences[ i ] > 0.0 ) transformed += getMorph( gl_VertexID, i, 0, 2 ) * morphTargetInfluences[ i ];\n\t\t\t#endif\n\t\t}\n\t#else\n\t\ttransformed += morphTarget0 * morphTargetInfluences[ 0 ];\n\t\ttransformed += morphTarget1 * morphTargetInfluences[ 1 ];\n\t\ttransformed += morphTarget2 * morphTargetInfluences[ 2 ];\n\t\ttransformed += morphTarget3 * morphTargetInfluences[ 3 ];\n\t\t#ifndef USE_MORPHNORMALS\n\t\t\ttransformed += morphTarget4 * morphTargetInfluences[ 4 ];\n\t\t\ttransformed += morphTarget5 * morphTargetInfluences[ 5 ];\n\t\t\ttransformed += morphTarget6 * morphTargetInfluences[ 6 ];\n\t\t\ttransformed += morphTarget7 * morphTargetInfluences[ 7 ];\n\t\t#endif\n\t#endif\n#endif",normal_fragment_begin:"float faceDirection = gl_FrontFacing ? 1.0 : - 1.0;\n#ifdef FLAT_SHADED\n\tvec3 fdx = vec3( dFdx( vViewPosition.x ), dFdx( vViewPosition.y ), dFdx( vViewPosition.z ) );\n\tvec3 fdy = vec3( dFdy( vViewPosition.x ), dFdy( vViewPosition.y ), dFdy( vViewPosition.z ) );\n\tvec3 normal = normalize( cross( fdx, fdy ) );\n#else\n\tvec3 normal = normalize( vNormal );\n\t#ifdef DOUBLE_SIDED\n\t\tnormal = normal * faceDirection;\n\t#endif\n\t#ifdef USE_TANGENT\n\t\tvec3 tangent = normalize( vTangent );\n\t\tvec3 bitangent = normalize( vBitangent );\n\t\t#ifdef DOUBLE_SIDED\n\t\t\ttangent = tangent * faceDirection;\n\t\t\tbitangent = bitangent * faceDirection;\n\t\t#endif\n\t\t#if defined( TANGENTSPACE_NORMALMAP ) || defined( USE_CLEARCOAT_NORMALMAP )\n\t\t\tmat3 vTBN = mat3( tangent, bitangent, normal );\n\t\t#endif\n\t#endif\n#endif\nvec3 geometryNormal = normal;",normal_fragment_maps:"#ifdef OBJECTSPACE_NORMALMAP\n\tnormal = texture2D( normalMap, vUv ).xyz * 2.0 - 1.0;\n\t#ifdef FLIP_SIDED\n\t\tnormal = - normal;\n\t#endif\n\t#ifdef DOUBLE_SIDED\n\t\tnormal = normal * faceDirection;\n\t#endif\n\tnormal = normalize( normalMatrix * normal );\n#elif defined( TANGENTSPACE_NORMALMAP )\n\tvec3 mapN = texture2D( normalMap, vUv ).xyz * 2.0 - 1.0;\n\tmapN.xy *= normalScale;\n\t#ifdef USE_TANGENT\n\t\tnormal = normalize( vTBN * mapN );\n\t#else\n\t\tnormal = perturbNormal2Arb( - vViewPosition, normal, mapN, faceDirection );\n\t#endif\n#elif defined( USE_BUMPMAP )\n\tnormal = perturbNormalArb( - vViewPosition, normal, dHdxy_fwd(), faceDirection );\n#endif",normal_pars_fragment:"#ifndef FLAT_SHADED\n\tvarying vec3 vNormal;\n\t#ifdef USE_TANGENT\n\t\tvarying vec3 vTangent;\n\t\tvarying vec3 vBitangent;\n\t#endif\n#endif",normal_pars_vertex:"#ifndef FLAT_SHADED\n\tvarying vec3 vNormal;\n\t#ifdef USE_TANGENT\n\t\tvarying vec3 vTangent;\n\t\tvarying vec3 vBitangent;\n\t#endif\n#endif",normal_vertex:"#ifndef FLAT_SHADED\n\tvNormal = normalize( transformedNormal );\n\t#ifdef USE_TANGENT\n\t\tvTangent = normalize( transformedTangent );\n\t\tvBitangent = normalize( cross( vNormal, vTangent ) * tangent.w );\n\t#endif\n#endif",normalmap_pars_fragment:"#ifdef USE_NORMALMAP\n\tuniform sampler2D normalMap;\n\tuniform vec2 normalScale;\n#endif\n#ifdef OBJECTSPACE_NORMALMAP\n\tuniform mat3 normalMatrix;\n#endif\n#if ! defined ( USE_TANGENT ) && ( defined ( TANGENTSPACE_NORMALMAP ) || defined ( USE_CLEARCOAT_NORMALMAP ) )\n\tvec3 perturbNormal2Arb( vec3 eye_pos, vec3 surf_norm, vec3 mapN, float faceDirection ) {\n\t\tvec3 q0 = vec3( dFdx( eye_pos.x ), dFdx( eye_pos.y ), dFdx( eye_pos.z ) );\n\t\tvec3 q1 = vec3( dFdy( eye_pos.x ), dFdy( eye_pos.y ), dFdy( eye_pos.z ) );\n\t\tvec2 st0 = dFdx( vUv.st );\n\t\tvec2 st1 = dFdy( vUv.st );\n\t\tvec3 N = surf_norm;\n\t\tvec3 q1perp = cross( q1, N );\n\t\tvec3 q0perp = cross( N, q0 );\n\t\tvec3 T = q1perp * st0.x + q0perp * st1.x;\n\t\tvec3 B = q1perp * st0.y + q0perp * st1.y;\n\t\tfloat det = max( dot( T, T ), dot( B, B ) );\n\t\tfloat scale = ( det == 0.0 ) ? 0.0 : faceDirection * inversesqrt( det );\n\t\treturn normalize( T * ( mapN.x * scale ) + B * ( mapN.y * scale ) + N * mapN.z );\n\t}\n#endif",clearcoat_normal_fragment_begin:"#ifdef USE_CLEARCOAT\n\tvec3 clearcoatNormal = geometryNormal;\n#endif",clearcoat_normal_fragment_maps:"#ifdef USE_CLEARCOAT_NORMALMAP\n\tvec3 clearcoatMapN = texture2D( clearcoatNormalMap, vUv ).xyz * 2.0 - 1.0;\n\tclearcoatMapN.xy *= clearcoatNormalScale;\n\t#ifdef USE_TANGENT\n\t\tclearcoatNormal = normalize( vTBN * clearcoatMapN );\n\t#else\n\t\tclearcoatNormal = perturbNormal2Arb( - vViewPosition, clearcoatNormal, clearcoatMapN, faceDirection );\n\t#endif\n#endif",clearcoat_pars_fragment:"#ifdef USE_CLEARCOATMAP\n\tuniform sampler2D clearcoatMap;\n#endif\n#ifdef USE_CLEARCOAT_ROUGHNESSMAP\n\tuniform sampler2D clearcoatRoughnessMap;\n#endif\n#ifdef USE_CLEARCOAT_NORMALMAP\n\tuniform sampler2D clearcoatNormalMap;\n\tuniform vec2 clearcoatNormalScale;\n#endif",output_fragment:"#ifdef OPAQUE\ndiffuseColor.a = 1.0;\n#endif\n#ifdef USE_TRANSMISSION\ndiffuseColor.a *= transmissionAlpha + 0.1;\n#endif\ngl_FragColor = vec4( outgoingLight, diffuseColor.a );",packing:"vec3 packNormalToRGB( const in vec3 normal ) {\n\treturn normalize( normal ) * 0.5 + 0.5;\n}\nvec3 unpackRGBToNormal( const in vec3 rgb ) {\n\treturn 2.0 * rgb.xyz - 1.0;\n}\nconst float PackUpscale = 256. / 255.;const float UnpackDownscale = 255. / 256.;\nconst vec3 PackFactors = vec3( 256. * 256. * 256., 256. * 256., 256. );\nconst vec4 UnpackFactors = UnpackDownscale / vec4( PackFactors, 1. );\nconst float ShiftRight8 = 1. / 256.;\nvec4 packDepthToRGBA( const in float v ) {\n\tvec4 r = vec4( fract( v * PackFactors ), v );\n\tr.yzw -= r.xyz * ShiftRight8;\treturn r * PackUpscale;\n}\nfloat unpackRGBAToDepth( const in vec4 v ) {\n\treturn dot( v, UnpackFactors );\n}\nvec4 pack2HalfToRGBA( vec2 v ) {\n\tvec4 r = vec4( v.x, fract( v.x * 255.0 ), v.y, fract( v.y * 255.0 ) );\n\treturn vec4( r.x - r.y / 255.0, r.y, r.z - r.w / 255.0, r.w );\n}\nvec2 unpackRGBATo2Half( vec4 v ) {\n\treturn vec2( v.x + ( v.y / 255.0 ), v.z + ( v.w / 255.0 ) );\n}\nfloat viewZToOrthographicDepth( const in float viewZ, const in float near, const in float far ) {\n\treturn ( viewZ + near ) / ( near - far );\n}\nfloat orthographicDepthToViewZ( const in float linearClipZ, const in float near, const in float far ) {\n\treturn linearClipZ * ( near - far ) - near;\n}\nfloat viewZToPerspectiveDepth( const in float viewZ, const in float near, const in float far ) {\n\treturn ( ( near + viewZ ) * far ) / ( ( far - near ) * viewZ );\n}\nfloat perspectiveDepthToViewZ( const in float invClipZ, const in float near, const in float far ) {\n\treturn ( near * far ) / ( ( far - near ) * invClipZ - far );\n}",premultiplied_alpha_fragment:"#ifdef PREMULTIPLIED_ALPHA\n\tgl_FragColor.rgb *= gl_FragColor.a;\n#endif",project_vertex:"vec4 mvPosition = vec4( transformed, 1.0 );\n#ifdef USE_INSTANCING\n\tmvPosition = instanceMatrix * mvPosition;\n#endif\nmvPosition = modelViewMatrix * mvPosition;\ngl_Position = projectionMatrix * mvPosition;",dithering_fragment:"#ifdef DITHERING\n\tgl_FragColor.rgb = dithering( gl_FragColor.rgb );\n#endif",dithering_pars_fragment:"#ifdef DITHERING\n\tvec3 dithering( vec3 color ) {\n\t\tfloat grid_position = rand( gl_FragCoord.xy );\n\t\tvec3 dither_shift_RGB = vec3( 0.25 / 255.0, -0.25 / 255.0, 0.25 / 255.0 );\n\t\tdither_shift_RGB = mix( 2.0 * dither_shift_RGB, -2.0 * dither_shift_RGB, grid_position );\n\t\treturn color + dither_shift_RGB;\n\t}\n#endif",roughnessmap_fragment:"float roughnessFactor = roughness;\n#ifdef USE_ROUGHNESSMAP\n\tvec4 texelRoughness = texture2D( roughnessMap, vUv );\n\troughnessFactor *= texelRoughness.g;\n#endif",roughnessmap_pars_fragment:"#ifdef USE_ROUGHNESSMAP\n\tuniform sampler2D roughnessMap;\n#endif",shadowmap_pars_fragment:"#ifdef USE_SHADOWMAP\n\t#if NUM_DIR_LIGHT_SHADOWS > 0\n\t\tuniform sampler2D directionalShadowMap[ NUM_DIR_LIGHT_SHADOWS ];\n\t\tvarying vec4 vDirectionalShadowCoord[ NUM_DIR_LIGHT_SHADOWS ];\n\t\tstruct DirectionalLightShadow {\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t};\n\t\tuniform DirectionalLightShadow directionalLightShadows[ NUM_DIR_LIGHT_SHADOWS ];\n\t#endif\n\t#if NUM_SPOT_LIGHT_SHADOWS > 0\n\t\tuniform sampler2D spotShadowMap[ NUM_SPOT_LIGHT_SHADOWS ];\n\t\tvarying vec4 vSpotShadowCoord[ NUM_SPOT_LIGHT_SHADOWS ];\n\t\tstruct SpotLightShadow {\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t};\n\t\tuniform SpotLightShadow spotLightShadows[ NUM_SPOT_LIGHT_SHADOWS ];\n\t#endif\n\t#if NUM_POINT_LIGHT_SHADOWS > 0\n\t\tuniform sampler2D pointShadowMap[ NUM_POINT_LIGHT_SHADOWS ];\n\t\tvarying vec4 vPointShadowCoord[ NUM_POINT_LIGHT_SHADOWS ];\n\t\tstruct PointLightShadow {\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t\tfloat shadowCameraNear;\n\t\t\tfloat shadowCameraFar;\n\t\t};\n\t\tuniform PointLightShadow pointLightShadows[ NUM_POINT_LIGHT_SHADOWS ];\n\t#endif\n\tfloat texture2DCompare( sampler2D depths, vec2 uv, float compare ) {\n\t\treturn step( compare, unpackRGBAToDepth( texture2D( depths, uv ) ) );\n\t}\n\tvec2 texture2DDistribution( sampler2D shadow, vec2 uv ) {\n\t\treturn unpackRGBATo2Half( texture2D( shadow, uv ) );\n\t}\n\tfloat VSMShadow (sampler2D shadow, vec2 uv, float compare ){\n\t\tfloat occlusion = 1.0;\n\t\tvec2 distribution = texture2DDistribution( shadow, uv );\n\t\tfloat hard_shadow = step( compare , distribution.x );\n\t\tif (hard_shadow != 1.0 ) {\n\t\t\tfloat distance = compare - distribution.x ;\n\t\t\tfloat variance = max( 0.00000, distribution.y * distribution.y );\n\t\t\tfloat softness_probability = variance / (variance + distance * distance );\t\t\tsoftness_probability = clamp( ( softness_probability - 0.3 ) / ( 0.95 - 0.3 ), 0.0, 1.0 );\t\t\tocclusion = clamp( max( hard_shadow, softness_probability ), 0.0, 1.0 );\n\t\t}\n\t\treturn occlusion;\n\t}\n\tfloat getShadow( sampler2D shadowMap, vec2 shadowMapSize, float shadowBias, float shadowRadius, vec4 shadowCoord ) {\n\t\tfloat shadow = 1.0;\n\t\tshadowCoord.xyz /= shadowCoord.w;\n\t\tshadowCoord.z += shadowBias;\n\t\tbvec4 inFrustumVec = bvec4 ( shadowCoord.x >= 0.0, shadowCoord.x <= 1.0, shadowCoord.y >= 0.0, shadowCoord.y <= 1.0 );\n\t\tbool inFrustum = all( inFrustumVec );\n\t\tbvec2 frustumTestVec = bvec2( inFrustum, shadowCoord.z <= 1.0 );\n\t\tbool frustumTest = all( frustumTestVec );\n\t\tif ( frustumTest ) {\n\t\t#if defined( SHADOWMAP_TYPE_PCF )\n\t\t\tvec2 texelSize = vec2( 1.0 ) / shadowMapSize;\n\t\t\tfloat dx0 = - texelSize.x * shadowRadius;\n\t\t\tfloat dy0 = - texelSize.y * shadowRadius;\n\t\t\tfloat dx1 = + texelSize.x * shadowRadius;\n\t\t\tfloat dy1 = + texelSize.y * shadowRadius;\n\t\t\tfloat dx2 = dx0 / 2.0;\n\t\t\tfloat dy2 = dy0 / 2.0;\n\t\t\tfloat dx3 = dx1 / 2.0;\n\t\t\tfloat dy3 = dy1 / 2.0;\n\t\t\tshadow = (\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx0, dy0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( 0.0, dy0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx1, dy0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx2, dy2 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( 0.0, dy2 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx3, dy2 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx0, 0.0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx2, 0.0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy, shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx3, 0.0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx1, 0.0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx2, dy3 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( 0.0, dy3 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx3, dy3 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx0, dy1 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( 0.0, dy1 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, shadowCoord.xy + vec2( dx1, dy1 ), shadowCoord.z )\n\t\t\t) * ( 1.0 / 17.0 );\n\t\t#elif defined( SHADOWMAP_TYPE_PCF_SOFT )\n\t\t\tvec2 texelSize = vec2( 1.0 ) / shadowMapSize;\n\t\t\tfloat dx = texelSize.x;\n\t\t\tfloat dy = texelSize.y;\n\t\t\tvec2 uv = shadowCoord.xy;\n\t\t\tvec2 f = fract( uv * shadowMapSize + 0.5 );\n\t\t\tuv -= f * texelSize;\n\t\t\tshadow = (\n\t\t\t\ttexture2DCompare( shadowMap, uv, shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, uv + vec2( dx, 0.0 ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, uv + vec2( 0.0, dy ), shadowCoord.z ) +\n\t\t\t\ttexture2DCompare( shadowMap, uv + texelSize, shadowCoord.z ) +\n\t\t\t\tmix( texture2DCompare( shadowMap, uv + vec2( -dx, 0.0 ), shadowCoord.z ), \n\t\t\t\t\t texture2DCompare( shadowMap, uv + vec2( 2.0 * dx, 0.0 ), shadowCoord.z ),\n\t\t\t\t\t f.x ) +\n\t\t\t\tmix( texture2DCompare( shadowMap, uv + vec2( -dx, dy ), shadowCoord.z ), \n\t\t\t\t\t texture2DCompare( shadowMap, uv + vec2( 2.0 * dx, dy ), shadowCoord.z ),\n\t\t\t\t\t f.x ) +\n\t\t\t\tmix( texture2DCompare( shadowMap, uv + vec2( 0.0, -dy ), shadowCoord.z ), \n\t\t\t\t\t texture2DCompare( shadowMap, uv + vec2( 0.0, 2.0 * dy ), shadowCoord.z ),\n\t\t\t\t\t f.y ) +\n\t\t\t\tmix( texture2DCompare( shadowMap, uv + vec2( dx, -dy ), shadowCoord.z ), \n\t\t\t\t\t texture2DCompare( shadowMap, uv + vec2( dx, 2.0 * dy ), shadowCoord.z ),\n\t\t\t\t\t f.y ) +\n\t\t\t\tmix( mix( texture2DCompare( shadowMap, uv + vec2( -dx, -dy ), shadowCoord.z ), \n\t\t\t\t\t\t\ttexture2DCompare( shadowMap, uv + vec2( 2.0 * dx, -dy ), shadowCoord.z ),\n\t\t\t\t\t\t\tf.x ),\n\t\t\t\t\t mix( texture2DCompare( shadowMap, uv + vec2( -dx, 2.0 * dy ), shadowCoord.z ), \n\t\t\t\t\t\t\ttexture2DCompare( shadowMap, uv + vec2( 2.0 * dx, 2.0 * dy ), shadowCoord.z ),\n\t\t\t\t\t\t\tf.x ),\n\t\t\t\t\t f.y )\n\t\t\t) * ( 1.0 / 9.0 );\n\t\t#elif defined( SHADOWMAP_TYPE_VSM )\n\t\t\tshadow = VSMShadow( shadowMap, shadowCoord.xy, shadowCoord.z );\n\t\t#else\n\t\t\tshadow = texture2DCompare( shadowMap, shadowCoord.xy, shadowCoord.z );\n\t\t#endif\n\t\t}\n\t\treturn shadow;\n\t}\n\tvec2 cubeToUV( vec3 v, float texelSizeY ) {\n\t\tvec3 absV = abs( v );\n\t\tfloat scaleToCube = 1.0 / max( absV.x, max( absV.y, absV.z ) );\n\t\tabsV *= scaleToCube;\n\t\tv *= scaleToCube * ( 1.0 - 2.0 * texelSizeY );\n\t\tvec2 planar = v.xy;\n\t\tfloat almostATexel = 1.5 * texelSizeY;\n\t\tfloat almostOne = 1.0 - almostATexel;\n\t\tif ( absV.z >= almostOne ) {\n\t\t\tif ( v.z > 0.0 )\n\t\t\t\tplanar.x = 4.0 - v.x;\n\t\t} else if ( absV.x >= almostOne ) {\n\t\t\tfloat signX = sign( v.x );\n\t\t\tplanar.x = v.z * signX + 2.0 * signX;\n\t\t} else if ( absV.y >= almostOne ) {\n\t\t\tfloat signY = sign( v.y );\n\t\t\tplanar.x = v.x + 2.0 * signY + 2.0;\n\t\t\tplanar.y = v.z * signY - 2.0;\n\t\t}\n\t\treturn vec2( 0.125, 0.25 ) * planar + vec2( 0.375, 0.75 );\n\t}\n\tfloat getPointShadow( sampler2D shadowMap, vec2 shadowMapSize, float shadowBias, float shadowRadius, vec4 shadowCoord, float shadowCameraNear, float shadowCameraFar ) {\n\t\tvec2 texelSize = vec2( 1.0 ) / ( shadowMapSize * vec2( 4.0, 2.0 ) );\n\t\tvec3 lightToPosition = shadowCoord.xyz;\n\t\tfloat dp = ( length( lightToPosition ) - shadowCameraNear ) / ( shadowCameraFar - shadowCameraNear );\t\tdp += shadowBias;\n\t\tvec3 bd3D = normalize( lightToPosition );\n\t\t#if defined( SHADOWMAP_TYPE_PCF ) || defined( SHADOWMAP_TYPE_PCF_SOFT ) || defined( SHADOWMAP_TYPE_VSM )\n\t\t\tvec2 offset = vec2( - 1, 1 ) * shadowRadius * texelSize.y;\n\t\t\treturn (\n\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.xyy, texelSize.y ), dp ) +\n\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.yyy, texelSize.y ), dp ) +\n\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.xyx, texelSize.y ), dp ) +\n\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.yyx, texelSize.y ), dp ) +\n\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D, texelSize.y ), dp ) +\n\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.xxy, texelSize.y ), dp ) +\n\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.yxy, texelSize.y ), dp ) +\n\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.xxx, texelSize.y ), dp ) +\n\t\t\t\ttexture2DCompare( shadowMap, cubeToUV( bd3D + offset.yxx, texelSize.y ), dp )\n\t\t\t) * ( 1.0 / 9.0 );\n\t\t#else\n\t\t\treturn texture2DCompare( shadowMap, cubeToUV( bd3D, texelSize.y ), dp );\n\t\t#endif\n\t}\n#endif",shadowmap_pars_vertex:"#ifdef USE_SHADOWMAP\n\t#if NUM_DIR_LIGHT_SHADOWS > 0\n\t\tuniform mat4 directionalShadowMatrix[ NUM_DIR_LIGHT_SHADOWS ];\n\t\tvarying vec4 vDirectionalShadowCoord[ NUM_DIR_LIGHT_SHADOWS ];\n\t\tstruct DirectionalLightShadow {\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t};\n\t\tuniform DirectionalLightShadow directionalLightShadows[ NUM_DIR_LIGHT_SHADOWS ];\n\t#endif\n\t#if NUM_SPOT_LIGHT_SHADOWS > 0\n\t\tuniform mat4 spotShadowMatrix[ NUM_SPOT_LIGHT_SHADOWS ];\n\t\tvarying vec4 vSpotShadowCoord[ NUM_SPOT_LIGHT_SHADOWS ];\n\t\tstruct SpotLightShadow {\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t};\n\t\tuniform SpotLightShadow spotLightShadows[ NUM_SPOT_LIGHT_SHADOWS ];\n\t#endif\n\t#if NUM_POINT_LIGHT_SHADOWS > 0\n\t\tuniform mat4 pointShadowMatrix[ NUM_POINT_LIGHT_SHADOWS ];\n\t\tvarying vec4 vPointShadowCoord[ NUM_POINT_LIGHT_SHADOWS ];\n\t\tstruct PointLightShadow {\n\t\t\tfloat shadowBias;\n\t\t\tfloat shadowNormalBias;\n\t\t\tfloat shadowRadius;\n\t\t\tvec2 shadowMapSize;\n\t\t\tfloat shadowCameraNear;\n\t\t\tfloat shadowCameraFar;\n\t\t};\n\t\tuniform PointLightShadow pointLightShadows[ NUM_POINT_LIGHT_SHADOWS ];\n\t#endif\n#endif",shadowmap_vertex:"#ifdef USE_SHADOWMAP\n\t#if NUM_DIR_LIGHT_SHADOWS > 0 || NUM_SPOT_LIGHT_SHADOWS > 0 || NUM_POINT_LIGHT_SHADOWS > 0\n\t\tvec3 shadowWorldNormal = inverseTransformDirection( transformedNormal, viewMatrix );\n\t\tvec4 shadowWorldPosition;\n\t#endif\n\t#if NUM_DIR_LIGHT_SHADOWS > 0\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_DIR_LIGHT_SHADOWS; i ++ ) {\n\t\tshadowWorldPosition = worldPosition + vec4( shadowWorldNormal * directionalLightShadows[ i ].shadowNormalBias, 0 );\n\t\tvDirectionalShadowCoord[ i ] = directionalShadowMatrix[ i ] * shadowWorldPosition;\n\t}\n\t#pragma unroll_loop_end\n\t#endif\n\t#if NUM_SPOT_LIGHT_SHADOWS > 0\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_SPOT_LIGHT_SHADOWS; i ++ ) {\n\t\tshadowWorldPosition = worldPosition + vec4( shadowWorldNormal * spotLightShadows[ i ].shadowNormalBias, 0 );\n\t\tvSpotShadowCoord[ i ] = spotShadowMatrix[ i ] * shadowWorldPosition;\n\t}\n\t#pragma unroll_loop_end\n\t#endif\n\t#if NUM_POINT_LIGHT_SHADOWS > 0\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_POINT_LIGHT_SHADOWS; i ++ ) {\n\t\tshadowWorldPosition = worldPosition + vec4( shadowWorldNormal * pointLightShadows[ i ].shadowNormalBias, 0 );\n\t\tvPointShadowCoord[ i ] = pointShadowMatrix[ i ] * shadowWorldPosition;\n\t}\n\t#pragma unroll_loop_end\n\t#endif\n#endif",shadowmask_pars_fragment:"float getShadowMask() {\n\tfloat shadow = 1.0;\n\t#ifdef USE_SHADOWMAP\n\t#if NUM_DIR_LIGHT_SHADOWS > 0\n\tDirectionalLightShadow directionalLight;\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_DIR_LIGHT_SHADOWS; i ++ ) {\n\t\tdirectionalLight = directionalLightShadows[ i ];\n\t\tshadow *= receiveShadow ? getShadow( directionalShadowMap[ i ], directionalLight.shadowMapSize, directionalLight.shadowBias, directionalLight.shadowRadius, vDirectionalShadowCoord[ i ] ) : 1.0;\n\t}\n\t#pragma unroll_loop_end\n\t#endif\n\t#if NUM_SPOT_LIGHT_SHADOWS > 0\n\tSpotLightShadow spotLight;\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_SPOT_LIGHT_SHADOWS; i ++ ) {\n\t\tspotLight = spotLightShadows[ i ];\n\t\tshadow *= receiveShadow ? getShadow( spotShadowMap[ i ], spotLight.shadowMapSize, spotLight.shadowBias, spotLight.shadowRadius, vSpotShadowCoord[ i ] ) : 1.0;\n\t}\n\t#pragma unroll_loop_end\n\t#endif\n\t#if NUM_POINT_LIGHT_SHADOWS > 0\n\tPointLightShadow pointLight;\n\t#pragma unroll_loop_start\n\tfor ( int i = 0; i < NUM_POINT_LIGHT_SHADOWS; i ++ ) {\n\t\tpointLight = pointLightShadows[ i ];\n\t\tshadow *= receiveShadow ? getPointShadow( pointShadowMap[ i ], pointLight.shadowMapSize, pointLight.shadowBias, pointLight.shadowRadius, vPointShadowCoord[ i ], pointLight.shadowCameraNear, pointLight.shadowCameraFar ) : 1.0;\n\t}\n\t#pragma unroll_loop_end\n\t#endif\n\t#endif\n\treturn shadow;\n}",skinbase_vertex:"#ifdef USE_SKINNING\n\tmat4 boneMatX = getBoneMatrix( skinIndex.x );\n\tmat4 boneMatY = getBoneMatrix( skinIndex.y );\n\tmat4 boneMatZ = getBoneMatrix( skinIndex.z );\n\tmat4 boneMatW = getBoneMatrix( skinIndex.w );\n#endif",skinning_pars_vertex:"#ifdef USE_SKINNING\n\tuniform mat4 bindMatrix;\n\tuniform mat4 bindMatrixInverse;\n\t#ifdef BONE_TEXTURE\n\t\tuniform highp sampler2D boneTexture;\n\t\tuniform int boneTextureSize;\n\t\tmat4 getBoneMatrix( const in float i ) {\n\t\t\tfloat j = i * 4.0;\n\t\t\tfloat x = mod( j, float( boneTextureSize ) );\n\t\t\tfloat y = floor( j / float( boneTextureSize ) );\n\t\t\tfloat dx = 1.0 / float( boneTextureSize );\n\t\t\tfloat dy = 1.0 / float( boneTextureSize );\n\t\t\ty = dy * ( y + 0.5 );\n\t\t\tvec4 v1 = texture2D( boneTexture, vec2( dx * ( x + 0.5 ), y ) );\n\t\t\tvec4 v2 = texture2D( boneTexture, vec2( dx * ( x + 1.5 ), y ) );\n\t\t\tvec4 v3 = texture2D( boneTexture, vec2( dx * ( x + 2.5 ), y ) );\n\t\t\tvec4 v4 = texture2D( boneTexture, vec2( dx * ( x + 3.5 ), y ) );\n\t\t\tmat4 bone = mat4( v1, v2, v3, v4 );\n\t\t\treturn bone;\n\t\t}\n\t#else\n\t\tuniform mat4 boneMatrices[ MAX_BONES ];\n\t\tmat4 getBoneMatrix( const in float i ) {\n\t\t\tmat4 bone = boneMatrices[ int(i) ];\n\t\t\treturn bone;\n\t\t}\n\t#endif\n#endif",skinning_vertex:"#ifdef USE_SKINNING\n\tvec4 skinVertex = bindMatrix * vec4( transformed, 1.0 );\n\tvec4 skinned = vec4( 0.0 );\n\tskinned += boneMatX * skinVertex * skinWeight.x;\n\tskinned += boneMatY * skinVertex * skinWeight.y;\n\tskinned += boneMatZ * skinVertex * skinWeight.z;\n\tskinned += boneMatW * skinVertex * skinWeight.w;\n\ttransformed = ( bindMatrixInverse * skinned ).xyz;\n#endif",skinnormal_vertex:"#ifdef USE_SKINNING\n\tmat4 skinMatrix = mat4( 0.0 );\n\tskinMatrix += skinWeight.x * boneMatX;\n\tskinMatrix += skinWeight.y * boneMatY;\n\tskinMatrix += skinWeight.z * boneMatZ;\n\tskinMatrix += skinWeight.w * boneMatW;\n\tskinMatrix = bindMatrixInverse * skinMatrix * bindMatrix;\n\tobjectNormal = vec4( skinMatrix * vec4( objectNormal, 0.0 ) ).xyz;\n\t#ifdef USE_TANGENT\n\t\tobjectTangent = vec4( skinMatrix * vec4( objectTangent, 0.0 ) ).xyz;\n\t#endif\n#endif",specularmap_fragment:"float specularStrength;\n#ifdef USE_SPECULARMAP\n\tvec4 texelSpecular = texture2D( specularMap, vUv );\n\tspecularStrength = texelSpecular.r;\n#else\n\tspecularStrength = 1.0;\n#endif",specularmap_pars_fragment:"#ifdef USE_SPECULARMAP\n\tuniform sampler2D specularMap;\n#endif",tonemapping_fragment:"#if defined( TONE_MAPPING )\n\tgl_FragColor.rgb = toneMapping( gl_FragColor.rgb );\n#endif",tonemapping_pars_fragment:"#ifndef saturate\n#define saturate( a ) clamp( a, 0.0, 1.0 )\n#endif\nuniform float toneMappingExposure;\nvec3 LinearToneMapping( vec3 color ) {\n\treturn toneMappingExposure * color;\n}\nvec3 ReinhardToneMapping( vec3 color ) {\n\tcolor *= toneMappingExposure;\n\treturn saturate( color / ( vec3( 1.0 ) + color ) );\n}\nvec3 OptimizedCineonToneMapping( vec3 color ) {\n\tcolor *= toneMappingExposure;\n\tcolor = max( vec3( 0.0 ), color - 0.004 );\n\treturn pow( ( color * ( 6.2 * color + 0.5 ) ) / ( color * ( 6.2 * color + 1.7 ) + 0.06 ), vec3( 2.2 ) );\n}\nvec3 RRTAndODTFit( vec3 v ) {\n\tvec3 a = v * ( v + 0.0245786 ) - 0.000090537;\n\tvec3 b = v * ( 0.983729 * v + 0.4329510 ) + 0.238081;\n\treturn a / b;\n}\nvec3 ACESFilmicToneMapping( vec3 color ) {\n\tconst mat3 ACESInputMat = mat3(\n\t\tvec3( 0.59719, 0.07600, 0.02840 ),\t\tvec3( 0.35458, 0.90834, 0.13383 ),\n\t\tvec3( 0.04823, 0.01566, 0.83777 )\n\t);\n\tconst mat3 ACESOutputMat = mat3(\n\t\tvec3(\t1.60475, -0.10208, -0.00327 ),\t\tvec3( -0.53108,\t1.10813, -0.07276 ),\n\t\tvec3( -0.07367, -0.00605,\t1.07602 )\n\t);\n\tcolor *= toneMappingExposure / 0.6;\n\tcolor = ACESInputMat * color;\n\tcolor = RRTAndODTFit( color );\n\tcolor = ACESOutputMat * color;\n\treturn saturate( color );\n}\nvec3 CustomToneMapping( vec3 color ) { return color; }",transmission_fragment:"#ifdef USE_TRANSMISSION\n\tfloat transmissionAlpha = 1.0;\n\tfloat transmissionFactor = transmission;\n\tfloat thicknessFactor = thickness;\n\t#ifdef USE_TRANSMISSIONMAP\n\t\ttransmissionFactor *= texture2D( transmissionMap, vUv ).r;\n\t#endif\n\t#ifdef USE_THICKNESSMAP\n\t\tthicknessFactor *= texture2D( thicknessMap, vUv ).g;\n\t#endif\n\tvec3 pos = vWorldPosition;\n\tvec3 v = normalize( cameraPosition - pos );\n\tvec3 n = inverseTransformDirection( normal, viewMatrix );\n\tvec4 transmission = getIBLVolumeRefraction(\n\t\tn, v, roughnessFactor, material.diffuseColor, material.specularColor, material.specularF90,\n\t\tpos, modelMatrix, viewMatrix, projectionMatrix, ior, thicknessFactor,\n\t\tattenuationColor, attenuationDistance );\n\ttotalDiffuse = mix( totalDiffuse, transmission.rgb, transmissionFactor );\n\ttransmissionAlpha = mix( transmissionAlpha, transmission.a, transmissionFactor );\n#endif",transmission_pars_fragment:"#ifdef USE_TRANSMISSION\n\tuniform float transmission;\n\tuniform float thickness;\n\tuniform float attenuationDistance;\n\tuniform vec3 attenuationColor;\n\t#ifdef USE_TRANSMISSIONMAP\n\t\tuniform sampler2D transmissionMap;\n\t#endif\n\t#ifdef USE_THICKNESSMAP\n\t\tuniform sampler2D thicknessMap;\n\t#endif\n\tuniform vec2 transmissionSamplerSize;\n\tuniform sampler2D transmissionSamplerMap;\n\tuniform mat4 modelMatrix;\n\tuniform mat4 projectionMatrix;\n\tvarying vec3 vWorldPosition;\n\tvec3 getVolumeTransmissionRay( vec3 n, vec3 v, float thickness, float ior, mat4 modelMatrix ) {\n\t\tvec3 refractionVector = refract( - v, normalize( n ), 1.0 / ior );\n\t\tvec3 modelScale;\n\t\tmodelScale.x = length( vec3( modelMatrix[ 0 ].xyz ) );\n\t\tmodelScale.y = length( vec3( modelMatrix[ 1 ].xyz ) );\n\t\tmodelScale.z = length( vec3( modelMatrix[ 2 ].xyz ) );\n\t\treturn normalize( refractionVector ) * thickness * modelScale;\n\t}\n\tfloat applyIorToRoughness( float roughness, float ior ) {\n\t\treturn roughness * clamp( ior * 2.0 - 2.0, 0.0, 1.0 );\n\t}\n\tvec4 getTransmissionSample( vec2 fragCoord, float roughness, float ior ) {\n\t\tfloat framebufferLod = log2( transmissionSamplerSize.x ) * applyIorToRoughness( roughness, ior );\n\t\t#ifdef TEXTURE_LOD_EXT\n\t\t\treturn texture2DLodEXT( transmissionSamplerMap, fragCoord.xy, framebufferLod );\n\t\t#else\n\t\t\treturn texture2D( transmissionSamplerMap, fragCoord.xy, framebufferLod );\n\t\t#endif\n\t}\n\tvec3 applyVolumeAttenuation( vec3 radiance, float transmissionDistance, vec3 attenuationColor, float attenuationDistance ) {\n\t\tif ( attenuationDistance == 0.0 ) {\n\t\t\treturn radiance;\n\t\t} else {\n\t\t\tvec3 attenuationCoefficient = -log( attenuationColor ) / attenuationDistance;\n\t\t\tvec3 transmittance = exp( - attenuationCoefficient * transmissionDistance );\t\t\treturn transmittance * radiance;\n\t\t}\n\t}\n\tvec4 getIBLVolumeRefraction( vec3 n, vec3 v, float roughness, vec3 diffuseColor, vec3 specularColor, float specularF90,\n\t\tvec3 position, mat4 modelMatrix, mat4 viewMatrix, mat4 projMatrix, float ior, float thickness,\n\t\tvec3 attenuationColor, float attenuationDistance ) {\n\t\tvec3 transmissionRay = getVolumeTransmissionRay( n, v, thickness, ior, modelMatrix );\n\t\tvec3 refractedRayExit = position + transmissionRay;\n\t\tvec4 ndcPos = projMatrix * viewMatrix * vec4( refractedRayExit, 1.0 );\n\t\tvec2 refractionCoords = ndcPos.xy / ndcPos.w;\n\t\trefractionCoords += 1.0;\n\t\trefractionCoords /= 2.0;\n\t\tvec4 transmittedLight = getTransmissionSample( refractionCoords, roughness, ior );\n\t\tvec3 attenuatedColor = applyVolumeAttenuation( transmittedLight.rgb, length( transmissionRay ), attenuationColor, attenuationDistance );\n\t\tvec3 F = EnvironmentBRDF( n, v, specularColor, specularF90, roughness );\n\t\treturn vec4( ( 1.0 - F ) * attenuatedColor * diffuseColor, transmittedLight.a );\n\t}\n#endif",uv_pars_fragment:"#if ( defined( USE_UV ) && ! defined( UVS_VERTEX_ONLY ) )\n\tvarying vec2 vUv;\n#endif",uv_pars_vertex:"#ifdef USE_UV\n\t#ifdef UVS_VERTEX_ONLY\n\t\tvec2 vUv;\n\t#else\n\t\tvarying vec2 vUv;\n\t#endif\n\tuniform mat3 uvTransform;\n#endif",uv_vertex:"#ifdef USE_UV\n\tvUv = ( uvTransform * vec3( uv, 1 ) ).xy;\n#endif",uv2_pars_fragment:"#if defined( USE_LIGHTMAP ) || defined( USE_AOMAP )\n\tvarying vec2 vUv2;\n#endif",uv2_pars_vertex:"#if defined( USE_LIGHTMAP ) || defined( USE_AOMAP )\n\tattribute vec2 uv2;\n\tvarying vec2 vUv2;\n\tuniform mat3 uv2Transform;\n#endif",uv2_vertex:"#if defined( USE_LIGHTMAP ) || defined( USE_AOMAP )\n\tvUv2 = ( uv2Transform * vec3( uv2, 1 ) ).xy;\n#endif",worldpos_vertex:"#if defined( USE_ENVMAP ) || defined( DISTANCE ) || defined ( USE_SHADOWMAP ) || defined ( USE_TRANSMISSION )\n\tvec4 worldPosition = vec4( transformed, 1.0 );\n\t#ifdef USE_INSTANCING\n\t\tworldPosition = instanceMatrix * worldPosition;\n\t#endif\n\tworldPosition = modelMatrix * worldPosition;\n#endif",background_vert:"varying vec2 vUv;\nuniform mat3 uvTransform;\nvoid main() {\n\tvUv = ( uvTransform * vec3( uv, 1 ) ).xy;\n\tgl_Position = vec4( position.xy, 1.0, 1.0 );\n}",background_frag:"uniform sampler2D t2D;\nvarying vec2 vUv;\nvoid main() {\n\tvec4 texColor = texture2D( t2D, vUv );\n\tgl_FragColor = mapTexelToLinear( texColor );\n\t#include \n\t#include \n}",cube_vert:"varying vec3 vWorldDirection;\n#include \nvoid main() {\n\tvWorldDirection = transformDirection( position, modelMatrix );\n\t#include \n\t#include \n\tgl_Position.z = gl_Position.w;\n}",cube_frag:"#include \nuniform float opacity;\nvarying vec3 vWorldDirection;\n#include \nvoid main() {\n\tvec3 vReflect = vWorldDirection;\n\t#include \n\tgl_FragColor = envColor;\n\tgl_FragColor.a *= opacity;\n\t#include \n\t#include \n}",depth_vert:"#include \n#include \n#include \n#include \n#include \n#include \n#include \nvarying vec2 vHighPrecisionZW;\nvoid main() {\n\t#include \n\t#include \n\t#ifdef USE_DISPLACEMENTMAP\n\t\t#include \n\t\t#include \n\t\t#include \n\t#endif\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvHighPrecisionZW = gl_Position.zw;\n}",depth_frag:"#if DEPTH_PACKING == 3200\n\tuniform float opacity;\n#endif\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvarying vec2 vHighPrecisionZW;\nvoid main() {\n\t#include \n\tvec4 diffuseColor = vec4( 1.0 );\n\t#if DEPTH_PACKING == 3200\n\t\tdiffuseColor.a = opacity;\n\t#endif\n\t#include \n\t#include \n\t#include \n\t#include \n\tfloat fragCoordZ = 0.5 * vHighPrecisionZW[0] / vHighPrecisionZW[1] + 0.5;\n\t#if DEPTH_PACKING == 3200\n\t\tgl_FragColor = vec4( vec3( 1.0 - fragCoordZ ), opacity );\n\t#elif DEPTH_PACKING == 3201\n\t\tgl_FragColor = packDepthToRGBA( fragCoordZ );\n\t#endif\n}",distanceRGBA_vert:"#define DISTANCE\nvarying vec3 vWorldPosition;\n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#ifdef USE_DISPLACEMENTMAP\n\t\t#include \n\t\t#include \n\t\t#include \n\t#endif\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvWorldPosition = worldPosition.xyz;\n}",distanceRGBA_frag:"#define DISTANCE\nuniform vec3 referencePosition;\nuniform float nearDistance;\nuniform float farDistance;\nvarying vec3 vWorldPosition;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main () {\n\t#include \n\tvec4 diffuseColor = vec4( 1.0 );\n\t#include \n\t#include \n\t#include \n\tfloat dist = length( vWorldPosition - referencePosition );\n\tdist = ( dist - nearDistance ) / ( farDistance - nearDistance );\n\tdist = saturate( dist );\n\tgl_FragColor = packDepthToRGBA( dist );\n}",equirect_vert:"varying vec3 vWorldDirection;\n#include \nvoid main() {\n\tvWorldDirection = transformDirection( position, modelMatrix );\n\t#include \n\t#include \n}",equirect_frag:"uniform sampler2D tEquirect;\nvarying vec3 vWorldDirection;\n#include \nvoid main() {\n\tvec3 direction = normalize( vWorldDirection );\n\tvec2 sampleUV = equirectUv( direction );\n\tvec4 texColor = texture2D( tEquirect, sampleUV );\n\tgl_FragColor = mapTexelToLinear( texColor );\n\t#include \n\t#include \n}",linedashed_vert:"uniform float scale;\nattribute float lineDistance;\nvarying float vLineDistance;\n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\tvLineDistance = scale * lineDistance;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}",linedashed_frag:"uniform vec3 diffuse;\nuniform float opacity;\nuniform float dashSize;\nuniform float totalSize;\nvarying float vLineDistance;\n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\tif ( mod( vLineDistance, totalSize ) > dashSize ) {\n\t\tdiscard;\n\t}\n\tvec3 outgoingLight = vec3( 0.0 );\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include \n\t#include \n\toutgoingLight = diffuseColor.rgb;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}",meshbasic_vert:"#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#if defined ( USE_ENVMAP ) || defined ( USE_SKINNING )\n\t\t#include \n\t\t#include \n\t\t#include \n\t\t#include \n\t\t#include \n\t#endif\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}",meshbasic_frag:"uniform vec3 diffuse;\nuniform float opacity;\n#ifndef FLAT_SHADED\n\tvarying vec3 vNormal;\n#endif\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );\n\t#ifdef USE_LIGHTMAP\n\t\tvec4 lightMapTexel= texture2D( lightMap, vUv2 );\n\t\treflectedLight.indirectDiffuse += lightMapTexelToLinear( lightMapTexel ).rgb * lightMapIntensity;\n\t#else\n\t\treflectedLight.indirectDiffuse += vec3( 1.0 );\n\t#endif\n\t#include \n\treflectedLight.indirectDiffuse *= diffuseColor.rgb;\n\tvec3 outgoingLight = reflectedLight.indirectDiffuse;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}",meshlambert_vert:"#define LAMBERT\nvarying vec3 vLightFront;\nvarying vec3 vIndirectFront;\n#ifdef DOUBLE_SIDED\n\tvarying vec3 vLightBack;\n\tvarying vec3 vIndirectBack;\n#endif\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}",meshlambert_frag:"uniform vec3 diffuse;\nuniform vec3 emissive;\nuniform float opacity;\nvarying vec3 vLightFront;\nvarying vec3 vIndirectFront;\n#ifdef DOUBLE_SIDED\n\tvarying vec3 vLightBack;\n\tvarying vec3 vIndirectBack;\n#endif\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\tReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );\n\tvec3 totalEmissiveRadiance = emissive;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#ifdef DOUBLE_SIDED\n\t\treflectedLight.indirectDiffuse += ( gl_FrontFacing ) ? vIndirectFront : vIndirectBack;\n\t#else\n\t\treflectedLight.indirectDiffuse += vIndirectFront;\n\t#endif\n\t#include \n\treflectedLight.indirectDiffuse *= BRDF_Lambert( diffuseColor.rgb );\n\t#ifdef DOUBLE_SIDED\n\t\treflectedLight.directDiffuse = ( gl_FrontFacing ) ? vLightFront : vLightBack;\n\t#else\n\t\treflectedLight.directDiffuse = vLightFront;\n\t#endif\n\treflectedLight.directDiffuse *= BRDF_Lambert( diffuseColor.rgb ) * getShadowMask();\n\t#include \n\tvec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + totalEmissiveRadiance;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}",meshmatcap_vert:"#define MATCAP\nvarying vec3 vViewPosition;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvViewPosition = - mvPosition.xyz;\n}",meshmatcap_frag:"#define MATCAP\nuniform vec3 diffuse;\nuniform float opacity;\nuniform sampler2D matcap;\nvarying vec3 vViewPosition;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvec3 viewDir = normalize( vViewPosition );\n\tvec3 x = normalize( vec3( viewDir.z, 0.0, - viewDir.x ) );\n\tvec3 y = cross( viewDir, x );\n\tvec2 uv = vec2( dot( x, normal ), dot( y, normal ) ) * 0.495 + 0.5;\n\t#ifdef USE_MATCAP\n\t\tvec4 matcapColor = texture2D( matcap, uv );\n\t\tmatcapColor = matcapTexelToLinear( matcapColor );\n\t#else\n\t\tvec4 matcapColor = vec4( 1.0 );\n\t#endif\n\tvec3 outgoingLight = diffuseColor.rgb * matcapColor.rgb;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}",meshnormal_vert:"#define NORMAL\n#if defined( FLAT_SHADED ) || defined( USE_BUMPMAP ) || defined( TANGENTSPACE_NORMALMAP )\n\tvarying vec3 vViewPosition;\n#endif\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n#if defined( FLAT_SHADED ) || defined( USE_BUMPMAP ) || defined( TANGENTSPACE_NORMALMAP )\n\tvViewPosition = - mvPosition.xyz;\n#endif\n}",meshnormal_frag:"#define NORMAL\nuniform float opacity;\n#if defined( FLAT_SHADED ) || defined( USE_BUMPMAP ) || defined( TANGENTSPACE_NORMALMAP )\n\tvarying vec3 vViewPosition;\n#endif\n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\tgl_FragColor = vec4( packNormalToRGB( normal ), opacity );\n}",meshphong_vert:"#define PHONG\nvarying vec3 vViewPosition;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvViewPosition = - mvPosition.xyz;\n\t#include \n\t#include \n\t#include \n\t#include \n}",meshphong_frag:"#define PHONG\nuniform vec3 diffuse;\nuniform vec3 emissive;\nuniform vec3 specular;\nuniform float shininess;\nuniform float opacity;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\tReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );\n\tvec3 totalEmissiveRadiance = emissive;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + reflectedLight.directSpecular + reflectedLight.indirectSpecular + totalEmissiveRadiance;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}",meshphysical_vert:"#define STANDARD\nvarying vec3 vViewPosition;\n#ifdef USE_TRANSMISSION\n\tvarying vec3 vWorldPosition;\n#endif\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvViewPosition = - mvPosition.xyz;\n\t#include \n\t#include \n\t#include \n#ifdef USE_TRANSMISSION\n\tvWorldPosition = worldPosition.xyz;\n#endif\n}",meshphysical_frag:"#define STANDARD\n#ifdef PHYSICAL\n\t#define IOR\n\t#define SPECULAR\n#endif\nuniform vec3 diffuse;\nuniform vec3 emissive;\nuniform float roughness;\nuniform float metalness;\nuniform float opacity;\n#ifdef IOR\n\tuniform float ior;\n#endif\n#ifdef SPECULAR\n\tuniform float specularIntensity;\n\tuniform vec3 specularColor;\n\t#ifdef USE_SPECULARINTENSITYMAP\n\t\tuniform sampler2D specularIntensityMap;\n\t#endif\n\t#ifdef USE_SPECULARCOLORMAP\n\t\tuniform sampler2D specularColorMap;\n\t#endif\n#endif\n#ifdef USE_CLEARCOAT\n\tuniform float clearcoat;\n\tuniform float clearcoatRoughness;\n#endif\n#ifdef USE_SHEEN\n\tuniform vec3 sheenColor;\n\tuniform float sheenRoughness;\n\t#ifdef USE_SHEENCOLORMAP\n\t\tuniform sampler2D sheenColorMap;\n\t#endif\n\t#ifdef USE_SHEENROUGHNESSMAP\n\t\tuniform sampler2D sheenRoughnessMap;\n\t#endif\n#endif\nvarying vec3 vViewPosition;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\tReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );\n\tvec3 totalEmissiveRadiance = emissive;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvec3 totalDiffuse = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse;\n\tvec3 totalSpecular = reflectedLight.directSpecular + reflectedLight.indirectSpecular;\n\t#include \n\tvec3 outgoingLight = totalDiffuse + totalSpecular + totalEmissiveRadiance;\n\t#ifdef USE_SHEEN\n\t\tfloat sheenEnergyComp = 1.0 - 0.157 * max3( material.sheenColor );\n\t\toutgoingLight = outgoingLight * sheenEnergyComp + sheenSpecular;\n\t#endif\n\t#ifdef USE_CLEARCOAT\n\t\tfloat dotNVcc = saturate( dot( geometry.clearcoatNormal, geometry.viewDir ) );\n\t\tvec3 Fcc = F_Schlick( material.clearcoatF0, material.clearcoatF90, dotNVcc );\n\t\toutgoingLight = outgoingLight * ( 1.0 - material.clearcoat * Fcc ) + clearcoatSpecular * material.clearcoat;\n\t#endif\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}",meshtoon_vert:"#define TOON\nvarying vec3 vViewPosition;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvViewPosition = - mvPosition.xyz;\n\t#include \n\t#include \n\t#include \n}",meshtoon_frag:"#define TOON\nuniform vec3 diffuse;\nuniform vec3 emissive;\nuniform float opacity;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\tReflectedLight reflectedLight = ReflectedLight( vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ), vec3( 0.0 ) );\n\tvec3 totalEmissiveRadiance = emissive;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\tvec3 outgoingLight = reflectedLight.directDiffuse + reflectedLight.indirectDiffuse + totalEmissiveRadiance;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}",points_vert:"uniform float size;\nuniform float scale;\n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\tgl_PointSize = size;\n\t#ifdef USE_SIZEATTENUATION\n\t\tbool isPerspective = isPerspectiveMatrix( projectionMatrix );\n\t\tif ( isPerspective ) gl_PointSize *= ( scale / - mvPosition.z );\n\t#endif\n\t#include \n\t#include \n\t#include \n\t#include \n}",points_frag:"uniform vec3 diffuse;\nuniform float opacity;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\tvec3 outgoingLight = vec3( 0.0 );\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include \n\t#include \n\t#include \n\t#include \n\toutgoingLight = diffuseColor.rgb;\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}",shadow_vert:"#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n\t#include \n}",shadow_frag:"uniform vec3 color;\nuniform float opacity;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\tgl_FragColor = vec4( color, opacity * ( 1.0 - getShadowMask() ) );\n\t#include \n\t#include \n\t#include \n}",sprite_vert:"uniform float rotation;\nuniform vec2 center;\n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\tvec4 mvPosition = modelViewMatrix * vec4( 0.0, 0.0, 0.0, 1.0 );\n\tvec2 scale;\n\tscale.x = length( vec3( modelMatrix[ 0 ].x, modelMatrix[ 0 ].y, modelMatrix[ 0 ].z ) );\n\tscale.y = length( vec3( modelMatrix[ 1 ].x, modelMatrix[ 1 ].y, modelMatrix[ 1 ].z ) );\n\t#ifndef USE_SIZEATTENUATION\n\t\tbool isPerspective = isPerspectiveMatrix( projectionMatrix );\n\t\tif ( isPerspective ) scale *= - mvPosition.z;\n\t#endif\n\tvec2 alignedPosition = ( position.xy - ( center - vec2( 0.5 ) ) ) * scale;\n\tvec2 rotatedPosition;\n\trotatedPosition.x = cos( rotation ) * alignedPosition.x - sin( rotation ) * alignedPosition.y;\n\trotatedPosition.y = sin( rotation ) * alignedPosition.x + cos( rotation ) * alignedPosition.y;\n\tmvPosition.xy += rotatedPosition;\n\tgl_Position = projectionMatrix * mvPosition;\n\t#include \n\t#include \n\t#include \n}",sprite_frag:"uniform vec3 diffuse;\nuniform float opacity;\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \nvoid main() {\n\t#include \n\tvec3 outgoingLight = vec3( 0.0 );\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include \n\t#include \n\t#include \n\t#include \n\toutgoingLight = diffuseColor.rgb;\n\t#include \n\t#include \n\t#include \n\t#include \n}"},oi={common:{diffuse:{value:new Ze(16777215)},opacity:{value:1},map:{value:null},uvTransform:{value:new pt},uv2Transform:{value:new pt},alphaMap:{value:null},alphaTest:{value:0}},specularmap:{specularMap:{value:null}},envmap:{envMap:{value:null},flipEnvMap:{value:-1},reflectivity:{value:1},ior:{value:1.5},refractionRatio:{value:.98}},aomap:{aoMap:{value:null},aoMapIntensity:{value:1}},lightmap:{lightMap:{value:null},lightMapIntensity:{value:1}},emissivemap:{emissiveMap:{value:null}},bumpmap:{bumpMap:{value:null},bumpScale:{value:1}},normalmap:{normalMap:{value:null},normalScale:{value:new dt(1,1)}},displacementmap:{displacementMap:{value:null},displacementScale:{value:1},displacementBias:{value:0}},roughnessmap:{roughnessMap:{value:null}},metalnessmap:{metalnessMap:{value:null}},gradientmap:{gradientMap:{value:null}},fog:{fogDensity:{value:25e-5},fogNear:{value:1},fogFar:{value:2e3},fogColor:{value:new Ze(16777215)}},lights:{ambientLightColor:{value:[]},lightProbe:{value:[]},directionalLights:{value:[],properties:{direction:{},color:{}}},directionalLightShadows:{value:[],properties:{shadowBias:{},shadowNormalBias:{},shadowRadius:{},shadowMapSize:{}}},directionalShadowMap:{value:[]},directionalShadowMatrix:{value:[]},spotLights:{value:[],properties:{color:{},position:{},direction:{},distance:{},coneCos:{},penumbraCos:{},decay:{}}},spotLightShadows:{value:[],properties:{shadowBias:{},shadowNormalBias:{},shadowRadius:{},shadowMapSize:{}}},spotShadowMap:{value:[]},spotShadowMatrix:{value:[]},pointLights:{value:[],properties:{color:{},position:{},decay:{},distance:{}}},pointLightShadows:{value:[],properties:{shadowBias:{},shadowNormalBias:{},shadowRadius:{},shadowMapSize:{},shadowCameraNear:{},shadowCameraFar:{}}},pointShadowMap:{value:[]},pointShadowMatrix:{value:[]},hemisphereLights:{value:[],properties:{direction:{},skyColor:{},groundColor:{}}},rectAreaLights:{value:[],properties:{color:{},position:{},width:{},height:{}}},ltc_1:{value:null},ltc_2:{value:null}},points:{diffuse:{value:new Ze(16777215)},opacity:{value:1},size:{value:1},scale:{value:1},map:{value:null},alphaMap:{value:null},alphaTest:{value:0},uvTransform:{value:new pt}},sprite:{diffuse:{value:new Ze(16777215)},opacity:{value:1},center:{value:new dt(.5,.5)},rotation:{value:0},map:{value:null},alphaMap:{value:null},alphaTest:{value:0},uvTransform:{value:new pt}}},li={basic:{uniforms:Gn([oi.common,oi.specularmap,oi.envmap,oi.aomap,oi.lightmap,oi.fog]),vertexShader:ai.meshbasic_vert,fragmentShader:ai.meshbasic_frag},lambert:{uniforms:Gn([oi.common,oi.specularmap,oi.envmap,oi.aomap,oi.lightmap,oi.emissivemap,oi.fog,oi.lights,{emissive:{value:new Ze(0)}}]),vertexShader:ai.meshlambert_vert,fragmentShader:ai.meshlambert_frag},phong:{uniforms:Gn([oi.common,oi.specularmap,oi.envmap,oi.aomap,oi.lightmap,oi.emissivemap,oi.bumpmap,oi.normalmap,oi.displacementmap,oi.fog,oi.lights,{emissive:{value:new Ze(0)},specular:{value:new Ze(1118481)},shininess:{value:30}}]),vertexShader:ai.meshphong_vert,fragmentShader:ai.meshphong_frag},standard:{uniforms:Gn([oi.common,oi.envmap,oi.aomap,oi.lightmap,oi.emissivemap,oi.bumpmap,oi.normalmap,oi.displacementmap,oi.roughnessmap,oi.metalnessmap,oi.fog,oi.lights,{emissive:{value:new Ze(0)},roughness:{value:1},metalness:{value:0},envMapIntensity:{value:1}}]),vertexShader:ai.meshphysical_vert,fragmentShader:ai.meshphysical_frag},toon:{uniforms:Gn([oi.common,oi.aomap,oi.lightmap,oi.emissivemap,oi.bumpmap,oi.normalmap,oi.displacementmap,oi.gradientmap,oi.fog,oi.lights,{emissive:{value:new Ze(0)}}]),vertexShader:ai.meshtoon_vert,fragmentShader:ai.meshtoon_frag},matcap:{uniforms:Gn([oi.common,oi.bumpmap,oi.normalmap,oi.displacementmap,oi.fog,{matcap:{value:null}}]),vertexShader:ai.meshmatcap_vert,fragmentShader:ai.meshmatcap_frag},points:{uniforms:Gn([oi.points,oi.fog]),vertexShader:ai.points_vert,fragmentShader:ai.points_frag},dashed:{uniforms:Gn([oi.common,oi.fog,{scale:{value:1},dashSize:{value:1},totalSize:{value:2}}]),vertexShader:ai.linedashed_vert,fragmentShader:ai.linedashed_frag},depth:{uniforms:Gn([oi.common,oi.displacementmap]),vertexShader:ai.depth_vert,fragmentShader:ai.depth_frag},normal:{uniforms:Gn([oi.common,oi.bumpmap,oi.normalmap,oi.displacementmap,{opacity:{value:1}}]),vertexShader:ai.meshnormal_vert,fragmentShader:ai.meshnormal_frag},sprite:{uniforms:Gn([oi.sprite,oi.fog]),vertexShader:ai.sprite_vert,fragmentShader:ai.sprite_frag},background:{uniforms:{uvTransform:{value:new pt},t2D:{value:null}},vertexShader:ai.background_vert,fragmentShader:ai.background_frag},cube:{uniforms:Gn([oi.envmap,{opacity:{value:1}}]),vertexShader:ai.cube_vert,fragmentShader:ai.cube_frag},equirect:{uniforms:{tEquirect:{value:null}},vertexShader:ai.equirect_vert,fragmentShader:ai.equirect_frag},distanceRGBA:{uniforms:Gn([oi.common,oi.displacementmap,{referencePosition:{value:new Lt},nearDistance:{value:1},farDistance:{value:1e3}}]),vertexShader:ai.distanceRGBA_vert,fragmentShader:ai.distanceRGBA_frag},shadow:{uniforms:Gn([oi.lights,oi.fog,{color:{value:new Ze(0)},opacity:{value:1}}]),vertexShader:ai.shadow_vert,fragmentShader:ai.shadow_frag}};function ci(t,e,n,i,r){const s=new Ze(0);let a,o,c=0,h=null,u=0,d=null;function p(t,e){n.buffers.color.setClear(t.r,t.g,t.b,e,r)}return{getClearColor:function(){return s},setClearColor:function(t,e=1){s.set(t),c=e,p(s,c)},getClearAlpha:function(){return c},setClearAlpha:function(t){c=t,p(s,c)},render:function(n,r){let m=!1,f=!0===r.isScene?r.background:null;f&&f.isTexture&&(f=e.get(f));const g=t.xr,v=g.getSession&&g.getSession();v&&"additive"===v.environmentBlendMode&&(f=null),null===f?p(s,c):f&&f.isColor&&(p(f,1),m=!0),(t.autoClear||m)&&t.clear(t.autoClearColor,t.autoClearDepth,t.autoClearStencil),f&&(f.isCubeTexture||f.mapping===l)?(void 0===o&&(o=new Fn(new Un(1,1,1),new Vn({name:"BackgroundCubeMaterial",uniforms:Hn(li.cube.uniforms),vertexShader:li.cube.vertexShader,fragmentShader:li.cube.fragmentShader,side:1,depthTest:!1,depthWrite:!1,fog:!1})),o.geometry.deleteAttribute("normal"),o.geometry.deleteAttribute("uv"),o.onBeforeRender=function(t,e,n){this.matrixWorld.copyPosition(n.matrixWorld)},Object.defineProperty(o.material,"envMap",{get:function(){return this.uniforms.envMap.value}}),i.update(o)),o.material.uniforms.envMap.value=f,o.material.uniforms.flipEnvMap.value=f.isCubeTexture&&!1===f.isRenderTargetTexture?-1:1,h===f&&u===f.version&&d===t.toneMapping||(o.material.needsUpdate=!0,h=f,u=f.version,d=t.toneMapping),n.unshift(o,o.geometry,o.material,0,0,null)):f&&f.isTexture&&(void 0===a&&(a=new Fn(new si(2,2),new Vn({name:"BackgroundMaterial",uniforms:Hn(li.background.uniforms),vertexShader:li.background.vertexShader,fragmentShader:li.background.fragmentShader,side:0,depthTest:!1,depthWrite:!1,fog:!1})),a.geometry.deleteAttribute("normal"),Object.defineProperty(a.material,"map",{get:function(){return this.uniforms.t2D.value}}),i.update(a)),a.material.uniforms.t2D.value=f,!0===f.matrixAutoUpdate&&f.updateMatrix(),a.material.uniforms.uvTransform.value.copy(f.matrix),h===f&&u===f.version&&d===t.toneMapping||(a.material.needsUpdate=!0,h=f,u=f.version,d=t.toneMapping),n.unshift(a,a.geometry,a.material,0,0,null))}}}function hi(t,e,n,i){const r=t.getParameter(34921),s=i.isWebGL2?null:e.get("OES_vertex_array_object"),a=i.isWebGL2||null!==s,o={},l=d(null);let c=l;function h(e){return i.isWebGL2?t.bindVertexArray(e):s.bindVertexArrayOES(e)}function u(e){return i.isWebGL2?t.deleteVertexArray(e):s.deleteVertexArrayOES(e)}function d(t){const e=[],n=[],i=[];for(let t=0;t=0){let s=l[e];if(void 0===s&&("instanceMatrix"===e&&r.instanceMatrix&&(s=r.instanceMatrix),"instanceColor"===e&&r.instanceColor&&(s=r.instanceColor)),void 0!==s){const e=s.normalized,a=s.itemSize,l=n.get(s);if(void 0===l)continue;const c=l.buffer,h=l.type,u=l.bytesPerElement;if(s.isInterleavedBufferAttribute){const n=s.data,l=n.stride,d=s.offset;if(n&&n.isInstancedInterleavedBuffer){for(let t=0;t0&&t.getShaderPrecisionFormat(35632,36338).precision>0)return"highp";e="mediump"}return"mediump"===e&&t.getShaderPrecisionFormat(35633,36337).precision>0&&t.getShaderPrecisionFormat(35632,36337).precision>0?"mediump":"lowp"}const s="undefined"!=typeof WebGL2RenderingContext&&t instanceof WebGL2RenderingContext||"undefined"!=typeof WebGL2ComputeRenderingContext&&t instanceof WebGL2ComputeRenderingContext;let a=void 0!==n.precision?n.precision:"highp";const o=r(a);o!==a&&(console.warn("THREE.WebGLRenderer:",a,"not supported, using",o,"instead."),a=o);const l=s||e.has("WEBGL_draw_buffers"),c=!0===n.logarithmicDepthBuffer,h=t.getParameter(34930),u=t.getParameter(35660),d=t.getParameter(3379),p=t.getParameter(34076),m=t.getParameter(34921),f=t.getParameter(36347),g=t.getParameter(36348),v=t.getParameter(36349),y=u>0,x=s||e.has("OES_texture_float");return{isWebGL2:s,drawBuffers:l,getMaxAnisotropy:function(){if(void 0!==i)return i;if(!0===e.has("EXT_texture_filter_anisotropic")){const n=e.get("EXT_texture_filter_anisotropic");i=t.getParameter(n.MAX_TEXTURE_MAX_ANISOTROPY_EXT)}else i=0;return i},getMaxPrecision:r,precision:a,logarithmicDepthBuffer:c,maxTextures:h,maxVertexTextures:u,maxTextureSize:d,maxCubemapSize:p,maxAttributes:m,maxVertexUniforms:f,maxVaryings:g,maxFragmentUniforms:v,vertexTextures:y,floatFragmentTextures:x,floatVertexTextures:y&&x,maxSamples:s?t.getParameter(36183):0}}function pi(t){const e=this;let n=null,i=0,r=!1,s=!1;const a=new $n,o=new pt,l={value:null,needsUpdate:!1};function c(){l.value!==n&&(l.value=n,l.needsUpdate=i>0),e.numPlanes=i,e.numIntersection=0}function h(t,n,i,r){const s=null!==t?t.length:0;let c=null;if(0!==s){if(c=l.value,!0!==r||null===c){const e=i+4*s,r=n.matrixWorldInverse;o.getNormalMatrix(r),(null===c||c.length0){const a=t.getRenderTarget(),o=new Yn(s.height/2);return o.fromEquirectangularTexture(t,r),e.set(r,o),t.setRenderTarget(a),r.addEventListener("dispose",i),n(o.texture,r.mapping)}return null}}}return r},dispose:function(){e=new WeakMap}}}li.physical={uniforms:Gn([li.standard.uniforms,{clearcoat:{value:0},clearcoatMap:{value:null},clearcoatRoughness:{value:0},clearcoatRoughnessMap:{value:null},clearcoatNormalScale:{value:new dt(1,1)},clearcoatNormalMap:{value:null},sheen:{value:0},sheenColor:{value:new Ze(0)},sheenColorMap:{value:null},sheenRoughness:{value:0},sheenRoughnessMap:{value:null},transmission:{value:0},transmissionMap:{value:null},transmissionSamplerSize:{value:new dt},transmissionSamplerMap:{value:null},thickness:{value:0},thicknessMap:{value:null},attenuationDistance:{value:0},attenuationColor:{value:new Ze(0)},specularIntensity:{value:0},specularIntensityMap:{value:null},specularColor:{value:new Ze(1,1,1)},specularColorMap:{value:null}}]),vertexShader:ai.meshphysical_vert,fragmentShader:ai.meshphysical_frag};class fi extends Wn{constructor(t=-1,e=1,n=1,i=-1,r=.1,s=2e3){super(),this.type="OrthographicCamera",this.zoom=1,this.view=null,this.left=t,this.right=e,this.top=n,this.bottom=i,this.near=r,this.far=s,this.updateProjectionMatrix()}copy(t,e){return super.copy(t,e),this.left=t.left,this.right=t.right,this.top=t.top,this.bottom=t.bottom,this.near=t.near,this.far=t.far,this.zoom=t.zoom,this.view=null===t.view?null:Object.assign({},t.view),this}setViewOffset(t,e,n,i,r,s){null===this.view&&(this.view={enabled:!0,fullWidth:1,fullHeight:1,offsetX:0,offsetY:0,width:1,height:1}),this.view.enabled=!0,this.view.fullWidth=t,this.view.fullHeight=e,this.view.offsetX=n,this.view.offsetY=i,this.view.width=r,this.view.height=s,this.updateProjectionMatrix()}clearViewOffset(){null!==this.view&&(this.view.enabled=!1),this.updateProjectionMatrix()}updateProjectionMatrix(){const t=(this.right-this.left)/(2*this.zoom),e=(this.top-this.bottom)/(2*this.zoom),n=(this.right+this.left)/2,i=(this.top+this.bottom)/2;let r=n-t,s=n+t,a=i+e,o=i-e;if(null!==this.view&&this.view.enabled){const t=(this.right-this.left)/this.view.fullWidth/this.zoom,e=(this.top-this.bottom)/this.view.fullHeight/this.zoom;r+=t*this.view.offsetX,s=r+t*this.view.width,a-=e*this.view.offsetY,o=a-e*this.view.height}this.projectionMatrix.makeOrthographic(r,s,a,o,this.near,this.far),this.projectionMatrixInverse.copy(this.projectionMatrix).invert()}toJSON(t){const e=super.toJSON(t);return e.object.zoom=this.zoom,e.object.left=this.left,e.object.right=this.right,e.object.top=this.top,e.object.bottom=this.bottom,e.object.near=this.near,e.object.far=this.far,null!==this.view&&(e.object.view=Object.assign({},this.view)),e}}fi.prototype.isOrthographicCamera=!0;class gi extends Vn{constructor(t){super(t),this.type="RawShaderMaterial"}}gi.prototype.isRawShaderMaterial=!0;const vi=Math.pow(2,8),yi=[.125,.215,.35,.446,.526,.582],xi=5+yi.length,_i=20,bi={[X]:0,[J]:1},Mi=new fi,{_lodPlanes:wi,_sizeLods:Si,_sigmas:Ti}=Di(),Ei=new Ze;let Ai=null;const Li=(1+Math.sqrt(5))/2,Ri=1/Li,Ci=[new Lt(1,1,1),new Lt(-1,1,1),new Lt(1,1,-1),new Lt(-1,1,-1),new Lt(0,Li,Ri),new Lt(0,Li,-Ri),new Lt(Ri,0,Li),new Lt(-Ri,0,Li),new Lt(Li,Ri,0),new Lt(-Li,Ri,0)];class Pi{constructor(t){this._renderer=t,this._pingPongRenderTarget=null,this._blurMaterial=function(t){const e=new Float32Array(t),n=new Lt(0,1,0);return new gi({name:"SphericalGaussianBlur",defines:{n:t},uniforms:{envMap:{value:null},samples:{value:1},weights:{value:e},latitudinal:{value:!1},dTheta:{value:0},mipInt:{value:0},poleAxis:{value:n}},vertexShader:Fi(),fragmentShader:`\n\n\t\t\tprecision mediump float;\n\t\t\tprecision mediump int;\n\n\t\t\tvarying vec3 vOutputDirection;\n\n\t\t\tuniform sampler2D envMap;\n\t\t\tuniform int samples;\n\t\t\tuniform float weights[ n ];\n\t\t\tuniform bool latitudinal;\n\t\t\tuniform float dTheta;\n\t\t\tuniform float mipInt;\n\t\t\tuniform vec3 poleAxis;\n\n\t\t\t${Oi()}\n\n\t\t\t#define ENVMAP_TYPE_CUBE_UV\n\t\t\t#include \n\n\t\t\tvec3 getSample( float theta, vec3 axis ) {\n\n\t\t\t\tfloat cosTheta = cos( theta );\n\t\t\t\t// Rodrigues' axis-angle rotation\n\t\t\t\tvec3 sampleDirection = vOutputDirection * cosTheta\n\t\t\t\t\t+ cross( axis, vOutputDirection ) * sin( theta )\n\t\t\t\t\t+ axis * dot( axis, vOutputDirection ) * ( 1.0 - cosTheta );\n\n\t\t\t\treturn bilinearCubeUV( envMap, sampleDirection, mipInt );\n\n\t\t\t}\n\n\t\t\tvoid main() {\n\n\t\t\t\tvec3 axis = latitudinal ? poleAxis : cross( poleAxis, vOutputDirection );\n\n\t\t\t\tif ( all( equal( axis, vec3( 0.0 ) ) ) ) {\n\n\t\t\t\t\taxis = vec3( vOutputDirection.z, 0.0, - vOutputDirection.x );\n\n\t\t\t\t}\n\n\t\t\t\taxis = normalize( axis );\n\n\t\t\t\tgl_FragColor = vec4( 0.0, 0.0, 0.0, 1.0 );\n\t\t\t\tgl_FragColor.rgb += weights[ 0 ] * getSample( 0.0, axis );\n\n\t\t\t\tfor ( int i = 1; i < n; i++ ) {\n\n\t\t\t\t\tif ( i >= samples ) {\n\n\t\t\t\t\t\tbreak;\n\n\t\t\t\t\t}\n\n\t\t\t\t\tfloat theta = dTheta * float( i );\n\t\t\t\t\tgl_FragColor.rgb += weights[ i ] * getSample( -1.0 * theta, axis );\n\t\t\t\t\tgl_FragColor.rgb += weights[ i ] * getSample( theta, axis );\n\n\t\t\t\t}\n\n\t\t\t}\n\t\t`,blending:0,depthTest:!1,depthWrite:!1})}(_i),this._equirectShader=null,this._cubemapShader=null,this._compileMaterial(this._blurMaterial)}fromScene(t,e=0,n=.1,i=100){Ai=this._renderer.getRenderTarget();const r=this._allocateTargets();return this._sceneToCubeUV(t,n,i,r),e>0&&this._blur(r,0,0,e),this._applyPMREM(r),this._cleanup(r),r}fromEquirectangular(t){return this._fromTexture(t)}fromCubemap(t){return this._fromTexture(t)}compileCubemapShader(){null===this._cubemapShader&&(this._cubemapShader=Bi(),this._compileMaterial(this._cubemapShader))}compileEquirectangularShader(){null===this._equirectShader&&(this._equirectShader=zi(),this._compileMaterial(this._equirectShader))}dispose(){this._blurMaterial.dispose(),null!==this._cubemapShader&&this._cubemapShader.dispose(),null!==this._equirectShader&&this._equirectShader.dispose();for(let t=0;t2?vi:0,vi,vi),o.setRenderTarget(i),d&&o.render(u,r),o.render(t,r)}u.geometry.dispose(),u.material.dispose(),o.toneMapping=c,o.autoClear=l,t.background=p}_setEncoding(t,e){!0===this._renderer.capabilities.isWebGL2&&e.format===E&&e.type===x&&e.encoding===J?t.value=bi[3e3]:t.value=bi[e.encoding]}_textureToCubeUV(t,e){const n=this._renderer,i=t.mapping===r||t.mapping===s;i?null==this._cubemapShader&&(this._cubemapShader=Bi()):null==this._equirectShader&&(this._equirectShader=zi());const a=i?this._cubemapShader:this._equirectShader,o=new Fn(wi[0],a),l=a.uniforms;l.envMap.value=t,i||l.texelSize.value.set(1/t.image.width,1/t.image.height),this._setEncoding(l.inputEncoding,t),Ni(e,0,0,3*vi,2*vi),n.setRenderTarget(e),n.render(o,Mi)}_applyPMREM(t){const e=this._renderer,n=e.autoClear;e.autoClear=!1;for(let e=1;e_i&&console.warn(`sigmaRadians, ${r}, is too large and will clip, as it requested ${m} samples when the maximum is set to 20`);const f=[];let g=0;for(let t=0;t<_i;++t){const e=t/p,n=Math.exp(-e*e/2);f.push(n),0==t?g+=n:t4?i-8+4:0),3*v,2*v),o.setRenderTarget(e),o.render(c,Mi)}}function Di(){const t=[],e=[],n=[];let i=8;for(let r=0;r4?a=yi[r-8+4-1]:0==r&&(a=0),n.push(a);const o=1/(s-1),l=-o/2,c=1+o/2,h=[l,l,c,l,c,c,l,l,c,c,l,c],u=6,d=6,p=3,m=2,f=1,g=new Float32Array(p*d*u),v=new Float32Array(m*d*u),y=new Float32Array(f*d*u);for(let t=0;t2?0:-1,i=[e,n,0,e+2/3,n,0,e+2/3,n+1,0,e,n,0,e+2/3,n+1,0,e,n+1,0];g.set(i,p*d*t),v.set(h,m*d*t);const r=[t,t,t,t,t,t];y.set(r,f*d*t)}const x=new xn;x.setAttribute("position",new tn(g,p)),x.setAttribute("uv",new tn(v,m)),x.setAttribute("faceIndex",new tn(y,f)),t.push(x),i>4&&i--}return{_lodPlanes:t,_sizeLods:e,_sigmas:n}}function Ii(t){const e=new St(3*vi,3*vi,t);return e.texture.mapping=l,e.texture.name="PMREM.cubeUv",e.scissorTest=!0,e}function Ni(t,e,n,i,r){t.viewport.set(e,n,i,r),t.scissor.set(e,n,i,r)}function zi(){const t=new dt(1,1);return new gi({name:"EquirectangularToCubeUV",uniforms:{envMap:{value:null},texelSize:{value:t},inputEncoding:{value:bi[3e3]}},vertexShader:Fi(),fragmentShader:`\n\n\t\t\tprecision mediump float;\n\t\t\tprecision mediump int;\n\n\t\t\tvarying vec3 vOutputDirection;\n\n\t\t\tuniform sampler2D envMap;\n\t\t\tuniform vec2 texelSize;\n\n\t\t\t${Oi()}\n\n\t\t\t#include \n\n\t\t\tvoid main() {\n\n\t\t\t\tgl_FragColor = vec4( 0.0, 0.0, 0.0, 1.0 );\n\n\t\t\t\tvec3 outputDirection = normalize( vOutputDirection );\n\t\t\t\tvec2 uv = equirectUv( outputDirection );\n\n\t\t\t\tvec2 f = fract( uv / texelSize - 0.5 );\n\t\t\t\tuv -= f * texelSize;\n\t\t\t\tvec3 tl = envMapTexelToLinear( texture2D ( envMap, uv ) ).rgb;\n\t\t\t\tuv.x += texelSize.x;\n\t\t\t\tvec3 tr = envMapTexelToLinear( texture2D ( envMap, uv ) ).rgb;\n\t\t\t\tuv.y += texelSize.y;\n\t\t\t\tvec3 br = envMapTexelToLinear( texture2D ( envMap, uv ) ).rgb;\n\t\t\t\tuv.x -= texelSize.x;\n\t\t\t\tvec3 bl = envMapTexelToLinear( texture2D ( envMap, uv ) ).rgb;\n\n\t\t\t\tvec3 tm = mix( tl, tr, f.x );\n\t\t\t\tvec3 bm = mix( bl, br, f.x );\n\t\t\t\tgl_FragColor.rgb = mix( tm, bm, f.y );\n\n\t\t\t}\n\t\t`,blending:0,depthTest:!1,depthWrite:!1})}function Bi(){return new gi({name:"CubemapToCubeUV",uniforms:{envMap:{value:null},inputEncoding:{value:bi[3e3]}},vertexShader:Fi(),fragmentShader:`\n\n\t\t\tprecision mediump float;\n\t\t\tprecision mediump int;\n\n\t\t\tvarying vec3 vOutputDirection;\n\n\t\t\tuniform samplerCube envMap;\n\n\t\t\t${Oi()}\n\n\t\t\tvoid main() {\n\n\t\t\t\tgl_FragColor = envMapTexelToLinear( textureCube( envMap, vec3( - vOutputDirection.x, vOutputDirection.yz ) ) );\n\n\t\t\t}\n\t\t`,blending:0,depthTest:!1,depthWrite:!1})}function Fi(){return"\n\n\t\tprecision mediump float;\n\t\tprecision mediump int;\n\n\t\tattribute vec3 position;\n\t\tattribute vec2 uv;\n\t\tattribute float faceIndex;\n\n\t\tvarying vec3 vOutputDirection;\n\n\t\t// RH coordinate system; PMREM face-indexing convention\n\t\tvec3 getDirection( vec2 uv, float face ) {\n\n\t\t\tuv = 2.0 * uv - 1.0;\n\n\t\t\tvec3 direction = vec3( uv, 1.0 );\n\n\t\t\tif ( face == 0.0 ) {\n\n\t\t\t\tdirection = direction.zyx; // ( 1, v, u ) pos x\n\n\t\t\t} else if ( face == 1.0 ) {\n\n\t\t\t\tdirection = direction.xzy;\n\t\t\t\tdirection.xz *= -1.0; // ( -u, 1, -v ) pos y\n\n\t\t\t} else if ( face == 2.0 ) {\n\n\t\t\t\tdirection.x *= -1.0; // ( -u, v, 1 ) pos z\n\n\t\t\t} else if ( face == 3.0 ) {\n\n\t\t\t\tdirection = direction.zyx;\n\t\t\t\tdirection.xz *= -1.0; // ( -1, v, -u ) neg x\n\n\t\t\t} else if ( face == 4.0 ) {\n\n\t\t\t\tdirection = direction.xzy;\n\t\t\t\tdirection.xy *= -1.0; // ( -u, -1, v ) neg y\n\n\t\t\t} else if ( face == 5.0 ) {\n\n\t\t\t\tdirection.z *= -1.0; // ( u, v, -1 ) neg z\n\n\t\t\t}\n\n\t\t\treturn direction;\n\n\t\t}\n\n\t\tvoid main() {\n\n\t\t\tvOutputDirection = getDirection( uv, faceIndex );\n\t\t\tgl_Position = vec4( position, 1.0 );\n\n\t\t}\n\t"}function Oi(){return"\n\n\t\tuniform int inputEncoding;\n\n\t\t#include \n\n\t\tvec4 inputTexelToLinear( vec4 value ) {\n\n\t\t\tif ( inputEncoding == 0 ) {\n\n\t\t\t\treturn value;\n\n\t\t\t} else {\n\n\t\t\t\treturn sRGBToLinear( value );\n\n\t\t\t}\n\n\t\t}\n\n\t\tvec4 envMapTexelToLinear( vec4 color ) {\n\n\t\t\treturn inputTexelToLinear( color );\n\n\t\t}\n\t"}function Ui(t){let e=new WeakMap,n=null;function i(t){const n=t.target;n.removeEventListener("dispose",i);const r=e.get(n);void 0!==r&&(e.delete(n),r.dispose())}return{get:function(l){if(l&&l.isTexture&&!1===l.isRenderTargetTexture){const c=l.mapping,h=c===a||c===o,u=c===r||c===s;if(h||u){if(e.has(l))return e.get(l).texture;{const r=l.image;if(h&&r&&r.height>0||u&&r&&function(t){let e=0;const n=6;for(let i=0;i65535?ln:an)(n,1);o.version=a;const l=s.get(t);l&&e.remove(l),s.set(t,o)}return{get:function(t,e){return!0===r[e.id]||(e.addEventListener("dispose",a),r[e.id]=!0,n.memory.geometries++),e},update:function(t){const n=t.attributes;for(const t in n)e.update(n[t],34962);const i=t.morphAttributes;for(const t in i){const n=i[t];for(let t=0,i=n.length;te.maxTextureSize&&(u=Math.ceil(h/e.maxTextureSize),h=e.maxTextureSize);const d=new Float32Array(h*u*4*i),p=new Wi(d,h,u,i);p.format=E,p.type=M,p.needsUpdate=!0;const m=4*l;for(let e=0;e0)return t;const r=e*n;let s=er[r];if(void 0===s&&(s=new Float32Array(r),er[r]=s),0!==e){i.toArray(s,0);for(let i=1,r=0;i!==e;++i)r+=n,t[i].toArray(s,r)}return s}function or(t,e){if(t.length!==e.length)return!1;for(let n=0,i=t.length;n/gm;function ds(t){return t.replace(us,ps)}function ps(t,e){const n=ai[e];if(void 0===n)throw new Error("Can not resolve #include <"+e+">");return ds(n)}const ms=/#pragma unroll_loop[\s]+?for \( int i \= (\d+)\; i < (\d+)\; i \+\+ \) \{([\s\S]+?)(?=\})\}/g,fs=/#pragma unroll_loop_start\s+for\s*\(\s*int\s+i\s*=\s*(\d+)\s*;\s*i\s*<\s*(\d+)\s*;\s*i\s*\+\+\s*\)\s*{([\s\S]+?)}\s+#pragma unroll_loop_end/g;function gs(t){return t.replace(fs,ys).replace(ms,vs)}function vs(t,e,n,i){return console.warn("WebGLProgram: #pragma unroll_loop shader syntax is deprecated. Please use #pragma unroll_loop_start syntax instead."),ys(t,e,n,i)}function ys(t,e,n,i){let r="";for(let t=parseInt(e);t0&&(x+="\n"),_=[g,v].filter(ls).join("\n"),_.length>0&&(_+="\n")):(x=[xs(n),"#define SHADER_NAME "+n.shaderName,v,n.instancing?"#define USE_INSTANCING":"",n.instancingColor?"#define USE_INSTANCING_COLOR":"",n.supportsVertexTextures?"#define VERTEX_TEXTURES":"","#define MAX_BONES "+n.maxBones,n.useFog&&n.fog?"#define USE_FOG":"",n.useFog&&n.fogExp2?"#define FOG_EXP2":"",n.map?"#define USE_MAP":"",n.envMap?"#define USE_ENVMAP":"",n.envMap?"#define "+m:"",n.lightMap?"#define USE_LIGHTMAP":"",n.aoMap?"#define USE_AOMAP":"",n.emissiveMap?"#define USE_EMISSIVEMAP":"",n.bumpMap?"#define USE_BUMPMAP":"",n.normalMap?"#define USE_NORMALMAP":"",n.normalMap&&n.objectSpaceNormalMap?"#define OBJECTSPACE_NORMALMAP":"",n.normalMap&&n.tangentSpaceNormalMap?"#define TANGENTSPACE_NORMALMAP":"",n.clearcoatMap?"#define USE_CLEARCOATMAP":"",n.clearcoatRoughnessMap?"#define USE_CLEARCOAT_ROUGHNESSMAP":"",n.clearcoatNormalMap?"#define USE_CLEARCOAT_NORMALMAP":"",n.displacementMap&&n.supportsVertexTextures?"#define USE_DISPLACEMENTMAP":"",n.specularMap?"#define USE_SPECULARMAP":"",n.specularIntensityMap?"#define USE_SPECULARINTENSITYMAP":"",n.specularColorMap?"#define USE_SPECULARCOLORMAP":"",n.roughnessMap?"#define USE_ROUGHNESSMAP":"",n.metalnessMap?"#define USE_METALNESSMAP":"",n.alphaMap?"#define USE_ALPHAMAP":"",n.transmission?"#define USE_TRANSMISSION":"",n.transmissionMap?"#define USE_TRANSMISSIONMAP":"",n.thicknessMap?"#define USE_THICKNESSMAP":"",n.sheenColorMap?"#define USE_SHEENCOLORMAP":"",n.sheenRoughnessMap?"#define USE_SHEENROUGHNESSMAP":"",n.vertexTangents?"#define USE_TANGENT":"",n.vertexColors?"#define USE_COLOR":"",n.vertexAlphas?"#define USE_COLOR_ALPHA":"",n.vertexUvs?"#define USE_UV":"",n.uvsVertexOnly?"#define UVS_VERTEX_ONLY":"",n.flatShading?"#define FLAT_SHADED":"",n.skinning?"#define USE_SKINNING":"",n.useVertexTexture?"#define BONE_TEXTURE":"",n.morphTargets?"#define USE_MORPHTARGETS":"",n.morphNormals&&!1===n.flatShading?"#define USE_MORPHNORMALS":"",n.morphTargets&&n.isWebGL2?"#define MORPHTARGETS_TEXTURE":"",n.morphTargets&&n.isWebGL2?"#define MORPHTARGETS_COUNT "+n.morphTargetsCount:"",n.doubleSided?"#define DOUBLE_SIDED":"",n.flipSided?"#define FLIP_SIDED":"",n.shadowMapEnabled?"#define USE_SHADOWMAP":"",n.shadowMapEnabled?"#define "+d:"",n.sizeAttenuation?"#define USE_SIZEATTENUATION":"",n.logarithmicDepthBuffer?"#define USE_LOGDEPTHBUF":"",n.logarithmicDepthBuffer&&n.rendererExtensionFragDepth?"#define USE_LOGDEPTHBUF_EXT":"","uniform mat4 modelMatrix;","uniform mat4 modelViewMatrix;","uniform mat4 projectionMatrix;","uniform mat4 viewMatrix;","uniform mat3 normalMatrix;","uniform vec3 cameraPosition;","uniform bool isOrthographic;","#ifdef USE_INSTANCING","\tattribute mat4 instanceMatrix;","#endif","#ifdef USE_INSTANCING_COLOR","\tattribute vec3 instanceColor;","#endif","attribute vec3 position;","attribute vec3 normal;","attribute vec2 uv;","#ifdef USE_TANGENT","\tattribute vec4 tangent;","#endif","#if defined( USE_COLOR_ALPHA )","\tattribute vec4 color;","#elif defined( USE_COLOR )","\tattribute vec3 color;","#endif","#if ( defined( USE_MORPHTARGETS ) && ! defined( MORPHTARGETS_TEXTURE ) )","\tattribute vec3 morphTarget0;","\tattribute vec3 morphTarget1;","\tattribute vec3 morphTarget2;","\tattribute vec3 morphTarget3;","\t#ifdef USE_MORPHNORMALS","\t\tattribute vec3 morphNormal0;","\t\tattribute vec3 morphNormal1;","\t\tattribute vec3 morphNormal2;","\t\tattribute vec3 morphNormal3;","\t#else","\t\tattribute vec3 morphTarget4;","\t\tattribute vec3 morphTarget5;","\t\tattribute vec3 morphTarget6;","\t\tattribute vec3 morphTarget7;","\t#endif","#endif","#ifdef USE_SKINNING","\tattribute vec4 skinIndex;","\tattribute vec4 skinWeight;","#endif","\n"].filter(ls).join("\n"),_=[g,xs(n),"#define SHADER_NAME "+n.shaderName,v,n.useFog&&n.fog?"#define USE_FOG":"",n.useFog&&n.fogExp2?"#define FOG_EXP2":"",n.map?"#define USE_MAP":"",n.matcap?"#define USE_MATCAP":"",n.envMap?"#define USE_ENVMAP":"",n.envMap?"#define "+p:"",n.envMap?"#define "+m:"",n.envMap?"#define "+f:"",n.lightMap?"#define USE_LIGHTMAP":"",n.aoMap?"#define USE_AOMAP":"",n.emissiveMap?"#define USE_EMISSIVEMAP":"",n.bumpMap?"#define USE_BUMPMAP":"",n.normalMap?"#define USE_NORMALMAP":"",n.normalMap&&n.objectSpaceNormalMap?"#define OBJECTSPACE_NORMALMAP":"",n.normalMap&&n.tangentSpaceNormalMap?"#define TANGENTSPACE_NORMALMAP":"",n.clearcoat?"#define USE_CLEARCOAT":"",n.clearcoatMap?"#define USE_CLEARCOATMAP":"",n.clearcoatRoughnessMap?"#define USE_CLEARCOAT_ROUGHNESSMAP":"",n.clearcoatNormalMap?"#define USE_CLEARCOAT_NORMALMAP":"",n.specularMap?"#define USE_SPECULARMAP":"",n.specularIntensityMap?"#define USE_SPECULARINTENSITYMAP":"",n.specularColorMap?"#define USE_SPECULARCOLORMAP":"",n.roughnessMap?"#define USE_ROUGHNESSMAP":"",n.metalnessMap?"#define USE_METALNESSMAP":"",n.alphaMap?"#define USE_ALPHAMAP":"",n.alphaTest?"#define USE_ALPHATEST":"",n.sheen?"#define USE_SHEEN":"",n.sheenColorMap?"#define USE_SHEENCOLORMAP":"",n.sheenRoughnessMap?"#define USE_SHEENROUGHNESSMAP":"",n.transmission?"#define USE_TRANSMISSION":"",n.transmissionMap?"#define USE_TRANSMISSIONMAP":"",n.thicknessMap?"#define USE_THICKNESSMAP":"",n.vertexTangents?"#define USE_TANGENT":"",n.vertexColors||n.instancingColor?"#define USE_COLOR":"",n.vertexAlphas?"#define USE_COLOR_ALPHA":"",n.vertexUvs?"#define USE_UV":"",n.uvsVertexOnly?"#define UVS_VERTEX_ONLY":"",n.gradientMap?"#define USE_GRADIENTMAP":"",n.flatShading?"#define FLAT_SHADED":"",n.doubleSided?"#define DOUBLE_SIDED":"",n.flipSided?"#define FLIP_SIDED":"",n.shadowMapEnabled?"#define USE_SHADOWMAP":"",n.shadowMapEnabled?"#define "+d:"",n.premultipliedAlpha?"#define PREMULTIPLIED_ALPHA":"",n.physicallyCorrectLights?"#define PHYSICALLY_CORRECT_LIGHTS":"",n.logarithmicDepthBuffer?"#define USE_LOGDEPTHBUF":"",n.logarithmicDepthBuffer&&n.rendererExtensionFragDepth?"#define USE_LOGDEPTHBUF_EXT":"",(n.extensionShaderTextureLOD||n.envMap)&&n.rendererExtensionShaderTextureLod?"#define TEXTURE_LOD_EXT":"","uniform mat4 viewMatrix;","uniform vec3 cameraPosition;","uniform bool isOrthographic;",0!==n.toneMapping?"#define TONE_MAPPING":"",0!==n.toneMapping?ai.tonemapping_pars_fragment:"",0!==n.toneMapping?os("toneMapping",n.toneMapping):"",n.dithering?"#define DITHERING":"",n.format===T?"#define OPAQUE":"",ai.encodings_pars_fragment,n.map?ss("mapTexelToLinear",n.mapEncoding):"",n.matcap?ss("matcapTexelToLinear",n.matcapEncoding):"",n.envMap?ss("envMapTexelToLinear",n.envMapEncoding):"",n.emissiveMap?ss("emissiveMapTexelToLinear",n.emissiveMapEncoding):"",n.specularColorMap?ss("specularColorMapTexelToLinear",n.specularColorMapEncoding):"",n.sheenColorMap?ss("sheenColorMapTexelToLinear",n.sheenColorMapEncoding):"",n.lightMap?ss("lightMapTexelToLinear",n.lightMapEncoding):"",as("linearToOutputTexel",n.outputEncoding),n.depthPacking?"#define DEPTH_PACKING "+n.depthPacking:"","\n"].filter(ls).join("\n")),h=ds(h),h=cs(h,n),h=hs(h,n),u=ds(u),u=cs(u,n),u=hs(u,n),h=gs(h),u=gs(u),n.isWebGL2&&!0!==n.isRawShaderMaterial&&(b="#version 300 es\n",x=["precision mediump sampler2DArray;","#define attribute in","#define varying out","#define texture2D texture"].join("\n")+"\n"+x,_=["#define varying in",n.glslVersion===K?"":"layout(location = 0) out highp vec4 pc_fragColor;",n.glslVersion===K?"":"#define gl_FragColor pc_fragColor","#define gl_FragDepthEXT gl_FragDepth","#define texture2D texture","#define textureCube texture","#define texture2DProj textureProj","#define texture2DLodEXT textureLod","#define texture2DProjLodEXT textureProjLod","#define textureCubeLodEXT textureLod","#define texture2DGradEXT textureGrad","#define texture2DProjGradEXT textureProjGrad","#define textureCubeGradEXT textureGrad"].join("\n")+"\n"+_);const M=b+_+u,w=es(a,35633,b+x+h),S=es(a,35632,M);if(a.attachShader(y,w),a.attachShader(y,S),void 0!==n.index0AttributeName?a.bindAttribLocation(y,0,n.index0AttributeName):!0===n.morphTargets&&a.bindAttribLocation(y,0,"position"),a.linkProgram(y),t.debug.checkShaderErrors){const t=a.getProgramInfoLog(y).trim(),e=a.getShaderInfoLog(w).trim(),n=a.getShaderInfoLog(S).trim();let i=!0,r=!0;if(!1===a.getProgramParameter(y,35714)){i=!1;const e=rs(a,w,"vertex"),n=rs(a,S,"fragment");console.error("THREE.WebGLProgram: Shader Error "+a.getError()+" - VALIDATE_STATUS "+a.getProgramParameter(y,35715)+"\n\nProgram Info Log: "+t+"\n"+e+"\n"+n)}else""!==t?console.warn("THREE.WebGLProgram: Program Info Log:",t):""!==e&&""!==n||(r=!1);r&&(this.diagnostics={runnable:i,programLog:t,vertexShader:{log:e,prefix:x},fragmentShader:{log:n,prefix:_}})}let E,A;return a.deleteShader(w),a.deleteShader(S),this.getUniforms=function(){return void 0===E&&(E=new ts(a,y)),E},this.getAttributes=function(){return void 0===A&&(A=function(t,e){const n={},i=t.getProgramParameter(e,35721);for(let r=0;r0,I=s.clearcoat>0;return{isWebGL2:d,shaderID:T,shaderName:s.type,vertexShader:A,fragmentShader:L,defines:s.defines,customVertexShaderID:R,customFragmentShaderID:C,isRawShaderMaterial:!0===s.isRawShaderMaterial,glslVersion:s.glslVersion,precision:v,instancing:!0===b.isInstancedMesh,instancingColor:!0===b.isInstancedMesh&&null!==b.instanceColor,supportsVertexTextures:g,outputEncoding:null!==P?_(P.texture):t.outputEncoding,map:!!s.map,mapEncoding:_(s.map),matcap:!!s.matcap,matcapEncoding:_(s.matcap),envMap:!!S,envMapMode:S&&S.mapping,envMapEncoding:_(S),envMapCubeUV:!!S&&(S.mapping===l||S.mapping===c),lightMap:!!s.lightMap,lightMapEncoding:_(s.lightMap),aoMap:!!s.aoMap,emissiveMap:!!s.emissiveMap,emissiveMapEncoding:_(s.emissiveMap),bumpMap:!!s.bumpMap,normalMap:!!s.normalMap,objectSpaceNormalMap:1===s.normalMapType,tangentSpaceNormalMap:0===s.normalMapType,clearcoat:I,clearcoatMap:I&&!!s.clearcoatMap,clearcoatRoughnessMap:I&&!!s.clearcoatRoughnessMap,clearcoatNormalMap:I&&!!s.clearcoatNormalMap,displacementMap:!!s.displacementMap,roughnessMap:!!s.roughnessMap,metalnessMap:!!s.metalnessMap,specularMap:!!s.specularMap,specularIntensityMap:!!s.specularIntensityMap,specularColorMap:!!s.specularColorMap,specularColorMapEncoding:_(s.specularColorMap),alphaMap:!!s.alphaMap,alphaTest:D,gradientMap:!!s.gradientMap,sheen:s.sheen>0,sheenColorMap:!!s.sheenColorMap,sheenColorMapEncoding:_(s.sheenColorMap),sheenRoughnessMap:!!s.sheenRoughnessMap,transmission:s.transmission>0,transmissionMap:!!s.transmissionMap,thicknessMap:!!s.thicknessMap,combine:s.combine,vertexTangents:!!s.normalMap&&!!b.geometry&&!!b.geometry.attributes.tangent,vertexColors:s.vertexColors,vertexAlphas:!0===s.vertexColors&&!!b.geometry&&!!b.geometry.attributes.color&&4===b.geometry.attributes.color.itemSize,vertexUvs:!!(s.map||s.bumpMap||s.normalMap||s.specularMap||s.alphaMap||s.emissiveMap||s.roughnessMap||s.metalnessMap||s.clearcoatMap||s.clearcoatRoughnessMap||s.clearcoatNormalMap||s.displacementMap||s.transmissionMap||s.thicknessMap||s.specularIntensityMap||s.specularColorMap||s.sheenColorMap||s.sheenRoughnessMap),uvsVertexOnly:!(s.map||s.bumpMap||s.normalMap||s.specularMap||s.alphaMap||s.emissiveMap||s.roughnessMap||s.metalnessMap||s.clearcoatNormalMap||s.transmission>0||s.transmissionMap||s.thicknessMap||s.specularIntensityMap||s.specularColorMap||s.sheen>0||s.sheenColorMap||s.sheenRoughnessMap||!s.displacementMap),fog:!!M,useFog:s.fog,fogExp2:M&&M.isFogExp2,flatShading:!!s.flatShading,sizeAttenuation:s.sizeAttenuation,logarithmicDepthBuffer:p,skinning:!0===b.isSkinnedMesh&&E>0,maxBones:E,useVertexTexture:m,morphTargets:!!b.geometry&&!!b.geometry.morphAttributes.position,morphNormals:!!b.geometry&&!!b.geometry.morphAttributes.normal,morphTargetsCount:b.geometry&&b.geometry.morphAttributes.position?b.geometry.morphAttributes.position.length:0,numDirLights:o.directional.length,numPointLights:o.point.length,numSpotLights:o.spot.length,numRectAreaLights:o.rectArea.length,numHemiLights:o.hemi.length,numDirLightShadows:o.directionalShadowMap.length,numPointLightShadows:o.pointShadowMap.length,numSpotLightShadows:o.spotShadowMap.length,numClippingPlanes:a.numPlanes,numClipIntersection:a.numIntersection,format:s.format,dithering:s.dithering,shadowMapEnabled:t.shadowMap.enabled&&u.length>0,shadowMapType:t.shadowMap.type,toneMapping:s.toneMapped?t.toneMapping:0,physicallyCorrectLights:t.physicallyCorrectLights,premultipliedAlpha:s.premultipliedAlpha,doubleSided:2===s.side,flipSided:1===s.side,depthPacking:void 0!==s.depthPacking&&s.depthPacking,index0AttributeName:s.index0AttributeName,extensionDerivatives:s.extensions&&s.extensions.derivatives,extensionFragDepth:s.extensions&&s.extensions.fragDepth,extensionDrawBuffers:s.extensions&&s.extensions.drawBuffers,extensionShaderTextureLOD:s.extensions&&s.extensions.shaderTextureLOD,rendererExtensionFragDepth:d||i.has("EXT_frag_depth"),rendererExtensionDrawBuffers:d||i.has("WEBGL_draw_buffers"),rendererExtensionShaderTextureLod:d||i.has("EXT_shader_texture_lod"),customProgramCacheKey:s.customProgramCacheKey()}},getProgramCacheKey:function(e){const n=[];if(e.shaderID?n.push(e.shaderID):(n.push(e.customVertexShaderID),n.push(e.customFragmentShaderID)),void 0!==e.defines)for(const t in e.defines)n.push(t),n.push(e.defines[t]);return!1===e.isRawShaderMaterial&&(!function(t,e){t.push(e.precision),t.push(e.outputEncoding),t.push(e.mapEncoding),t.push(e.matcapEncoding),t.push(e.envMapMode),t.push(e.envMapEncoding),t.push(e.lightMapEncoding),t.push(e.emissiveMapEncoding),t.push(e.combine),t.push(e.vertexUvs),t.push(e.fogExp2),t.push(e.sizeAttenuation),t.push(e.maxBones),t.push(e.morphTargetsCount),t.push(e.numDirLights),t.push(e.numPointLights),t.push(e.numSpotLights),t.push(e.numHemiLights),t.push(e.numRectAreaLights),t.push(e.numDirLightShadows),t.push(e.numPointLightShadows),t.push(e.numSpotLightShadows),t.push(e.shadowMapType),t.push(e.toneMapping),t.push(e.numClippingPlanes),t.push(e.numClipIntersection),t.push(e.format),t.push(e.specularColorMapEncoding),t.push(e.sheenColorMapEncoding)}(n,e),function(t,e){o.disableAll(),e.isWebGL2&&o.enable(0);e.supportsVertexTextures&&o.enable(1);e.instancing&&o.enable(2);e.instancingColor&&o.enable(3);e.map&&o.enable(4);e.matcap&&o.enable(5);e.envMap&&o.enable(6);e.envMapCubeUV&&o.enable(7);e.lightMap&&o.enable(8);e.aoMap&&o.enable(9);e.emissiveMap&&o.enable(10);e.bumpMap&&o.enable(11);e.normalMap&&o.enable(12);e.objectSpaceNormalMap&&o.enable(13);e.tangentSpaceNormalMap&&o.enable(14);e.clearcoat&&o.enable(15);e.clearcoatMap&&o.enable(16);e.clearcoatRoughnessMap&&o.enable(17);e.clearcoatNormalMap&&o.enable(18);e.displacementMap&&o.enable(19);e.specularMap&&o.enable(20);e.roughnessMap&&o.enable(21);e.metalnessMap&&o.enable(22);e.gradientMap&&o.enable(23);e.alphaMap&&o.enable(24);e.alphaTest&&o.enable(25);e.vertexColors&&o.enable(26);e.vertexAlphas&&o.enable(27);e.vertexUvs&&o.enable(28);e.vertexTangents&&o.enable(29);e.uvsVertexOnly&&o.enable(30);e.fog&&o.enable(31);t.push(o.mask),o.disableAll(),e.useFog&&o.enable(0);e.flatShading&&o.enable(1);e.logarithmicDepthBuffer&&o.enable(2);e.skinning&&o.enable(3);e.useVertexTexture&&o.enable(4);e.morphTargets&&o.enable(5);e.morphNormals&&o.enable(6);e.premultipliedAlpha&&o.enable(7);e.shadowMapEnabled&&o.enable(8);e.physicallyCorrectLights&&o.enable(9);e.doubleSided&&o.enable(10);e.flipSided&&o.enable(11);e.depthPacking&&o.enable(12);e.dithering&&o.enable(13);e.specularIntensityMap&&o.enable(14);e.specularColorMap&&o.enable(15);e.transmission&&o.enable(16);e.transmissionMap&&o.enable(17);e.thicknessMap&&o.enable(18);e.sheen&&o.enable(19);e.sheenColorMap&&o.enable(20);e.sheenRoughnessMap&&o.enable(21);t.push(o.mask)}(n,e),n.push(t.outputEncoding)),n.push(e.customProgramCacheKey),n.join()},getUniforms:function(t){const e=y[t.type];let n;if(e){const t=li[e];n=kn.clone(t.uniforms)}else n=t.uniforms;return n},acquireProgram:function(e,n){let i;for(let t=0,e=u.length;t0?i.push(h):!0===a.transparent?r.push(h):n.push(h)},unshift:function(t,e,a,o,l,c){const h=s(t,e,a,o,l,c);a.transmission>0?i.unshift(h):!0===a.transparent?r.unshift(h):n.unshift(h)},finish:function(){for(let n=e,i=t.length;n1&&n.sort(t||Es),i.length>1&&i.sort(e||As),r.length>1&&r.sort(e||As)}}}function Rs(){let t=new WeakMap;return{get:function(e,n){let i;return!1===t.has(e)?(i=new Ls,t.set(e,[i])):n>=t.get(e).length?(i=new Ls,t.get(e).push(i)):i=t.get(e)[n],i},dispose:function(){t=new WeakMap}}}function Cs(){const t={};return{get:function(e){if(void 0!==t[e.id])return t[e.id];let n;switch(e.type){case"DirectionalLight":n={direction:new Lt,color:new Ze};break;case"SpotLight":n={position:new Lt,direction:new Lt,color:new Ze,distance:0,coneCos:0,penumbraCos:0,decay:0};break;case"PointLight":n={position:new Lt,color:new Ze,distance:0,decay:0};break;case"HemisphereLight":n={direction:new Lt,skyColor:new Ze,groundColor:new Ze};break;case"RectAreaLight":n={color:new Ze,position:new Lt,halfWidth:new Lt,halfHeight:new Lt}}return t[e.id]=n,n}}}let Ps=0;function Ds(t,e){return(e.castShadow?1:0)-(t.castShadow?1:0)}function Is(t,e){const n=new Cs,i=function(){const t={};return{get:function(e){if(void 0!==t[e.id])return t[e.id];let n;switch(e.type){case"DirectionalLight":case"SpotLight":n={shadowBias:0,shadowNormalBias:0,shadowRadius:1,shadowMapSize:new dt};break;case"PointLight":n={shadowBias:0,shadowNormalBias:0,shadowRadius:1,shadowMapSize:new dt,shadowCameraNear:1,shadowCameraFar:1e3}}return t[e.id]=n,n}}}(),r={version:0,hash:{directionalLength:-1,pointLength:-1,spotLength:-1,rectAreaLength:-1,hemiLength:-1,numDirectionalShadows:-1,numPointShadows:-1,numSpotShadows:-1},ambient:[0,0,0],probe:[],directional:[],directionalShadow:[],directionalShadowMap:[],directionalShadowMatrix:[],spot:[],spotShadow:[],spotShadowMap:[],spotShadowMatrix:[],rectArea:[],rectAreaLTC1:null,rectAreaLTC2:null,point:[],pointShadow:[],pointShadowMap:[],pointShadowMatrix:[],hemi:[]};for(let t=0;t<9;t++)r.probe.push(new Lt);const s=new Lt,a=new se,o=new se;return{setup:function(s,a){let o=0,l=0,c=0;for(let t=0;t<9;t++)r.probe[t].set(0,0,0);let h=0,u=0,d=0,p=0,m=0,f=0,g=0,v=0;s.sort(Ds);const y=!0!==a?Math.PI:1;for(let t=0,e=s.length;t0&&(e.isWebGL2||!0===t.has("OES_texture_float_linear")?(r.rectAreaLTC1=oi.LTC_FLOAT_1,r.rectAreaLTC2=oi.LTC_FLOAT_2):!0===t.has("OES_texture_half_float_linear")?(r.rectAreaLTC1=oi.LTC_HALF_1,r.rectAreaLTC2=oi.LTC_HALF_2):console.error("THREE.WebGLRenderer: Unable to use RectAreaLight. Missing WebGL extensions.")),r.ambient[0]=o,r.ambient[1]=l,r.ambient[2]=c;const x=r.hash;x.directionalLength===h&&x.pointLength===u&&x.spotLength===d&&x.rectAreaLength===p&&x.hemiLength===m&&x.numDirectionalShadows===f&&x.numPointShadows===g&&x.numSpotShadows===v||(r.directional.length=h,r.spot.length=d,r.rectArea.length=p,r.point.length=u,r.hemi.length=m,r.directionalShadow.length=f,r.directionalShadowMap.length=f,r.pointShadow.length=g,r.pointShadowMap.length=g,r.spotShadow.length=v,r.spotShadowMap.length=v,r.directionalShadowMatrix.length=f,r.pointShadowMatrix.length=g,r.spotShadowMatrix.length=v,x.directionalLength=h,x.pointLength=u,x.spotLength=d,x.rectAreaLength=p,x.hemiLength=m,x.numDirectionalShadows=f,x.numPointShadows=g,x.numSpotShadows=v,r.version=Ps++)},setupView:function(t,e){let n=0,i=0,l=0,c=0,h=0;const u=e.matrixWorldInverse;for(let e=0,d=t.length;e=n.get(i).length?(s=new Ns(t,e),n.get(i).push(s)):s=n.get(i)[r],s},dispose:function(){n=new WeakMap}}}class Bs extends Ve{constructor(t){super(),this.type="MeshDepthMaterial",this.depthPacking=3200,this.map=null,this.alphaMap=null,this.displacementMap=null,this.displacementScale=1,this.displacementBias=0,this.wireframe=!1,this.wireframeLinewidth=1,this.fog=!1,this.setValues(t)}copy(t){return super.copy(t),this.depthPacking=t.depthPacking,this.map=t.map,this.alphaMap=t.alphaMap,this.displacementMap=t.displacementMap,this.displacementScale=t.displacementScale,this.displacementBias=t.displacementBias,this.wireframe=t.wireframe,this.wireframeLinewidth=t.wireframeLinewidth,this}}Bs.prototype.isMeshDepthMaterial=!0;class Fs extends Ve{constructor(t){super(),this.type="MeshDistanceMaterial",this.referencePosition=new Lt,this.nearDistance=1,this.farDistance=1e3,this.map=null,this.alphaMap=null,this.displacementMap=null,this.displacementScale=1,this.displacementBias=0,this.fog=!1,this.setValues(t)}copy(t){return super.copy(t),this.referencePosition.copy(t.referencePosition),this.nearDistance=t.nearDistance,this.farDistance=t.farDistance,this.map=t.map,this.alphaMap=t.alphaMap,this.displacementMap=t.displacementMap,this.displacementScale=t.displacementScale,this.displacementBias=t.displacementBias,this}}Fs.prototype.isMeshDistanceMaterial=!0;function Os(t,e,n){let i=new ni;const r=new dt,s=new dt,a=new wt,o=new Bs({depthPacking:3201}),l=new Fs,c={},h=n.maxTextureSize,u={0:1,1:0,2:2},d=new Vn({defines:{VSM_SAMPLES:8},uniforms:{shadow_pass:{value:null},resolution:{value:new dt},radius:{value:4}},vertexShader:"void main() {\n\tgl_Position = vec4( position, 1.0 );\n}",fragmentShader:"uniform sampler2D shadow_pass;\nuniform vec2 resolution;\nuniform float radius;\n#include \nvoid main() {\n\tconst float samples = float( VSM_SAMPLES );\n\tfloat mean = 0.0;\n\tfloat squared_mean = 0.0;\n\tfloat uvStride = samples <= 1.0 ? 0.0 : 2.0 / ( samples - 1.0 );\n\tfloat uvStart = samples <= 1.0 ? 0.0 : - 1.0;\n\tfor ( float i = 0.0; i < samples; i ++ ) {\n\t\tfloat uvOffset = uvStart + i * uvStride;\n\t\t#ifdef HORIZONTAL_PASS\n\t\t\tvec2 distribution = unpackRGBATo2Half( texture2D( shadow_pass, ( gl_FragCoord.xy + vec2( uvOffset, 0.0 ) * radius ) / resolution ) );\n\t\t\tmean += distribution.x;\n\t\t\tsquared_mean += distribution.y * distribution.y + distribution.x * distribution.x;\n\t\t#else\n\t\t\tfloat depth = unpackRGBAToDepth( texture2D( shadow_pass, ( gl_FragCoord.xy + vec2( 0.0, uvOffset ) * radius ) / resolution ) );\n\t\t\tmean += depth;\n\t\t\tsquared_mean += depth * depth;\n\t\t#endif\n\t}\n\tmean = mean / samples;\n\tsquared_mean = squared_mean / samples;\n\tfloat std_dev = sqrt( squared_mean - mean * mean );\n\tgl_FragColor = pack2HalfToRGBA( vec2( mean, std_dev ) );\n}"}),m=d.clone();m.defines.HORIZONTAL_PASS=1;const f=new xn;f.setAttribute("position",new tn(new Float32Array([-1,-1,.5,3,-1,.5,-1,3,.5]),3));const v=new Fn(f,d),y=this;function x(n,i){const r=e.update(v);d.defines.VSM_SAMPLES!==n.blurSamples&&(d.defines.VSM_SAMPLES=n.blurSamples,m.defines.VSM_SAMPLES=n.blurSamples,d.needsUpdate=!0,m.needsUpdate=!0),d.uniforms.shadow_pass.value=n.map.texture,d.uniforms.resolution.value=n.mapSize,d.uniforms.radius.value=n.radius,t.setRenderTarget(n.mapPass),t.clear(),t.renderBufferDirect(i,null,r,d,v,null),m.uniforms.shadow_pass.value=n.mapPass.texture,m.uniforms.resolution.value=n.mapSize,m.uniforms.radius.value=n.radius,t.setRenderTarget(n.map),t.clear(),t.renderBufferDirect(i,null,r,m,v,null)}function _(e,n,i,r,s,a,h){let d=null;const p=!0===r.isPointLight?e.customDistanceMaterial:e.customDepthMaterial;if(d=void 0!==p?p:!0===r.isPointLight?l:o,t.localClippingEnabled&&!0===i.clipShadows&&0!==i.clippingPlanes.length||i.displacementMap&&0!==i.displacementScale||i.alphaMap&&i.alphaTest>0){const t=d.uuid,e=i.uuid;let n=c[t];void 0===n&&(n={},c[t]=n);let r=n[e];void 0===r&&(r=d.clone(),n[e]=r),d=r}return d.visible=i.visible,d.wireframe=i.wireframe,d.side=3===h?null!==i.shadowSide?i.shadowSide:i.side:null!==i.shadowSide?i.shadowSide:u[i.side],d.alphaMap=i.alphaMap,d.alphaTest=i.alphaTest,d.clipShadows=i.clipShadows,d.clippingPlanes=i.clippingPlanes,d.clipIntersection=i.clipIntersection,d.displacementMap=i.displacementMap,d.displacementScale=i.displacementScale,d.displacementBias=i.displacementBias,d.wireframeLinewidth=i.wireframeLinewidth,d.linewidth=i.linewidth,!0===r.isPointLight&&!0===d.isMeshDistanceMaterial&&(d.referencePosition.setFromMatrixPosition(r.matrixWorld),d.nearDistance=s,d.farDistance=a),d}function b(n,r,s,a,o){if(!1===n.visible)return;if(n.layers.test(r.layers)&&(n.isMesh||n.isLine||n.isPoints)&&(n.castShadow||n.receiveShadow&&3===o)&&(!n.frustumCulled||i.intersectsObject(n))){n.modelViewMatrix.multiplyMatrices(s.matrixWorldInverse,n.matrixWorld);const i=e.update(n),r=n.material;if(Array.isArray(r)){const e=i.groups;for(let l=0,c=e.length;lh||r.y>h)&&(r.x>h&&(s.x=Math.floor(h/m.x),r.x=s.x*m.x,u.mapSize.x=s.x),r.y>h&&(s.y=Math.floor(h/m.y),r.y=s.y*m.y,u.mapSize.y=s.y)),null===u.map&&!u.isPointLightShadow&&3===this.type){const t={minFilter:g,magFilter:g,format:E};u.map=new St(r.x,r.y,t),u.map.texture.name=c.name+".shadowMap",u.mapPass=new St(r.x,r.y,t),u.camera.updateProjectionMatrix()}if(null===u.map){const t={minFilter:p,magFilter:p,format:E};u.map=new St(r.x,r.y,t),u.map.texture.name=c.name+".shadowMap",u.camera.updateProjectionMatrix()}t.setRenderTarget(u.map),t.clear();const f=u.getViewportCount();for(let t=0;t=1):-1!==L.indexOf("OpenGL ES")&&(A=parseFloat(/^OpenGL ES (\d)/.exec(L)[1]),E=A>=2);let R=null,C={};const P=t.getParameter(3088),D=t.getParameter(2978),I=(new wt).fromArray(P),N=(new wt).fromArray(D);function z(e,n,i){const r=new Uint8Array(4),s=t.createTexture();t.bindTexture(e,s),t.texParameteri(e,10241,9728),t.texParameteri(e,10240,9728);for(let e=0;ei||t.height>i)&&(r=i/Math.max(t.width,t.height)),r<1||!0===e){if("undefined"!=typeof HTMLImageElement&&t instanceof HTMLImageElement||"undefined"!=typeof HTMLCanvasElement&&t instanceof HTMLCanvasElement||"undefined"!=typeof ImageBitmap&&t instanceof ImageBitmap){const i=e?ht:Math.floor,s=i(r*t.width),a=i(r*t.height);void 0===D&&(D=N(s,a));const o=n?N(s,a):D;o.width=s,o.height=a;return o.getContext("2d").drawImage(t,0,0,s,a),console.warn("THREE.WebGLRenderer: Texture has been resized from ("+t.width+"x"+t.height+") to ("+s+"x"+a+")."),o}return"data"in t&&console.warn("THREE.WebGLRenderer: Image in DataTexture is too big ("+t.width+"x"+t.height+")."),t}return t}function B(t){return lt(t.width)&<(t.height)}function F(t,e){return t.generateMipmaps&&e&&t.minFilter!==p&&t.minFilter!==g}function O(e){t.generateMipmap(e)}function U(n,i,r,s){if(!1===o)return i;if(null!==n){if(void 0!==t[n])return t[n];console.warn("THREE.WebGLRenderer: Attempt to use non-existing WebGL internal format '"+n+"'")}let a=i;return 6403===i&&(5126===r&&(a=33326),5131===r&&(a=33325),5121===r&&(a=33321)),6407===i&&(5126===r&&(a=34837),5131===r&&(a=34843),5121===r&&(a=32849)),6408===i&&(5126===r&&(a=34836),5131===r&&(a=34842),5121===r&&(a=s===J?35907:32856)),33325!==a&&33326!==a&&34842!==a&&34836!==a||e.get("EXT_color_buffer_float"),a}function H(t,e,n){return!0===F(t,n)||t.isFramebufferTexture&&t.minFilter!==p&&t.minFilter!==g?Math.log2(Math.max(e.width,e.height))+1:void 0!==t.mipmaps&&t.mipmaps.length>0?t.mipmaps.length:t.isCompressedTexture&&Array.isArray(t.image)?e.mipmaps.length:1}function G(t){return t===p||t===m||t===f?9728:9729}function k(e){const n=e.target;n.removeEventListener("dispose",k),function(e){const n=i.get(e);if(void 0===n.__webglInit)return;t.deleteTexture(n.__webglTexture),i.remove(e)}(n),n.isVideoTexture&&P.delete(n),a.memory.textures--}function V(e){const n=e.target;n.removeEventListener("dispose",V),function(e){const n=e.texture,r=i.get(e),s=i.get(n);if(!e)return;void 0!==s.__webglTexture&&(t.deleteTexture(s.__webglTexture),a.memory.textures--);e.depthTexture&&e.depthTexture.dispose();if(e.isWebGLCubeRenderTarget)for(let e=0;e<6;e++)t.deleteFramebuffer(r.__webglFramebuffer[e]),r.__webglDepthbuffer&&t.deleteRenderbuffer(r.__webglDepthbuffer[e]);else t.deleteFramebuffer(r.__webglFramebuffer),r.__webglDepthbuffer&&t.deleteRenderbuffer(r.__webglDepthbuffer),r.__webglMultisampledFramebuffer&&t.deleteFramebuffer(r.__webglMultisampledFramebuffer),r.__webglColorRenderbuffer&&t.deleteRenderbuffer(r.__webglColorRenderbuffer),r.__webglDepthRenderbuffer&&t.deleteRenderbuffer(r.__webglDepthRenderbuffer);if(e.isWebGLMultipleRenderTargets)for(let e=0,r=n.length;e0&&r.__version!==t.version){const n=t.image;if(void 0===n)console.warn("THREE.WebGLRenderer: Texture marked for update but image is undefined");else{if(!1!==n.complete)return void K(r,t,e);console.warn("THREE.WebGLRenderer: Texture marked for update but image is incomplete")}}n.activeTexture(33984+e),n.bindTexture(3553,r.__webglTexture)}function q(e,r){const a=i.get(e);e.version>0&&a.__version!==e.version?function(e,i,r){if(6!==i.image.length)return;Q(e,i),n.activeTexture(33984+r),n.bindTexture(34067,e.__webglTexture),t.pixelStorei(37440,i.flipY),t.pixelStorei(37441,i.premultiplyAlpha),t.pixelStorei(3317,i.unpackAlignment),t.pixelStorei(37443,0);const a=i&&(i.isCompressedTexture||i.image[0].isCompressedTexture),l=i.image[0]&&i.image[0].isDataTexture,h=[];for(let t=0;t<6;t++)h[t]=a||l?l?i.image[t].image:i.image[t]:z(i.image[t],!1,!0,c);const u=h[0],d=B(u)||o,p=s.convert(i.format),m=s.convert(i.type),f=U(i.internalFormat,p,m,i.encoding),g=o&&!0!==i.isVideoTexture,v=void 0===e.__version;let y,x=H(i,u,d);if(Z(34067,i,d),a){g&&v&&n.texStorage2D(34067,x,f,u.width,u.height);for(let t=0;t<6;t++){y=h[t].mipmaps;for(let e=0;e0&&x++,n.texStorage2D(34067,x,f,h[0].width,h[0].height));for(let t=0;t<6;t++)if(l){g?n.texSubImage2D(34069+t,0,0,0,h[t].width,h[t].height,p,m,h[t].data):n.texImage2D(34069+t,0,f,h[t].width,h[t].height,0,p,m,h[t].data);for(let e=0;e1||i.get(s).__currentAnisotropy)&&(t.texParameterf(n,a.TEXTURE_MAX_ANISOTROPY_EXT,Math.min(s.anisotropy,r.getMaxAnisotropy())),i.get(s).__currentAnisotropy=s.anisotropy)}}function Q(e,n){void 0===e.__webglInit&&(e.__webglInit=!0,n.addEventListener("dispose",k),e.__webglTexture=t.createTexture(),a.memory.textures++)}function K(e,i,r){let a=3553;i.isDataTexture2DArray&&(a=35866),i.isDataTexture3D&&(a=32879),Q(e,i),n.activeTexture(33984+r),n.bindTexture(a,e.__webglTexture),t.pixelStorei(37440,i.flipY),t.pixelStorei(37441,i.premultiplyAlpha),t.pixelStorei(3317,i.unpackAlignment),t.pixelStorei(37443,0);const l=function(t){return!o&&(t.wrapS!==u||t.wrapT!==u||t.minFilter!==p&&t.minFilter!==g)}(i)&&!1===B(i.image),c=z(i.image,l,!1,x),h=B(c)||o,d=s.convert(i.format);let m,f=s.convert(i.type),v=U(i.internalFormat,d,f,i.encoding);Z(a,i,h);const y=i.mipmaps,w=o&&!0!==i.isVideoTexture,R=void 0===e.__version,C=H(i,c,h);if(i.isDepthTexture)v=6402,o?v=i.type===M?36012:i.type===b?33190:i.type===S?35056:33189:i.type===M&&console.error("WebGLRenderer: Floating point depth texture requires WebGL2."),i.format===A&&6402===v&&i.type!==_&&i.type!==b&&(console.warn("THREE.WebGLRenderer: Use UnsignedShortType or UnsignedIntType for DepthFormat DepthTexture."),i.type=_,f=s.convert(i.type)),i.format===L&&6402===v&&(v=34041,i.type!==S&&(console.warn("THREE.WebGLRenderer: Use UnsignedInt248Type for DepthStencilFormat DepthTexture."),i.type=S,f=s.convert(i.type))),w&&R?n.texStorage2D(3553,1,v,c.width,c.height):n.texImage2D(3553,0,v,c.width,c.height,0,d,f,null);else if(i.isDataTexture)if(y.length>0&&h){w&&R&&n.texStorage2D(3553,C,v,y[0].width,y[0].height);for(let t=0,e=y.length;t0&&h){w&&R&&n.texStorage2D(3553,C,v,y[0].width,y[0].height);for(let t=0,e=y.length;t=l&&console.warn("THREE.WebGLTextures: Trying to use "+t+" texture units while this GPU supports only "+l),W+=1,t},this.resetTextureUnits=function(){W=0},this.setTexture2D=j,this.setTexture2DArray=function(t,e){const r=i.get(t);t.version>0&&r.__version!==t.version?K(r,t,e):(n.activeTexture(33984+e),n.bindTexture(35866,r.__webglTexture))},this.setTexture3D=function(t,e){const r=i.get(t);t.version>0&&r.__version!==t.version?K(r,t,e):(n.activeTexture(33984+e),n.bindTexture(32879,r.__webglTexture))},this.setTextureCube=q,this.rebindTextures=function(t,e,n){const r=i.get(t);void 0!==e&&$(r.__webglFramebuffer,t,t.texture,36064,3553),void 0!==n&&et(t)},this.setupRenderTarget=function(e){const l=e.texture,c=i.get(e),h=i.get(l);e.addEventListener("dispose",V),!0!==e.isWebGLMultipleRenderTargets&&(void 0===h.__webglTexture&&(h.__webglTexture=t.createTexture()),h.__version=l.version,a.memory.textures++);const u=!0===e.isWebGLCubeRenderTarget,d=!0===e.isWebGLMultipleRenderTargets,p=l.isDataTexture3D||l.isDataTexture2DArray,m=B(e)||o;if(!o||l.format!==T||l.type!==M&&l.type!==w||(l.format=E,console.warn("THREE.WebGLRenderer: Rendering to textures with RGB format is not supported. Using RGBA format instead.")),u){c.__webglFramebuffer=[];for(let e=0;e<6;e++)c.__webglFramebuffer[e]=t.createFramebuffer()}else if(c.__webglFramebuffer=t.createFramebuffer(),d)if(r.drawBuffers){const n=e.texture;for(let e=0,r=n.length;eo+c?(l.inputState.pinching=!1,this.dispatchEvent({type:"pinchend",handedness:t.handedness,target:this})):!l.inputState.pinching&&a<=o-c&&(l.inputState.pinching=!0,this.dispatchEvent({type:"pinchstart",handedness:t.handedness,target:this}))}else null!==o&&t.gripSpace&&(r=e.getPose(t.gripSpace,n),null!==r&&(o.matrix.fromArray(r.transform.matrix),o.matrix.decompose(o.position,o.rotation,o.scale),r.linearVelocity?(o.hasLinearVelocity=!0,o.linearVelocity.copy(r.linearVelocity)):o.hasLinearVelocity=!1,r.angularVelocity?(o.hasAngularVelocity=!0,o.angularVelocity.copy(r.angularVelocity)):o.hasAngularVelocity=!1));return null!==a&&(a.visible=null!==i),null!==o&&(o.visible=null!==r),null!==l&&(l.visible=null!==s),this}}class qs extends bt{constructor(t,e,n,i,r,s,a,o,l,c){if((c=void 0!==c?c:A)!==A&&c!==L)throw new Error("DepthTexture format must be either THREE.DepthFormat or THREE.DepthStencilFormat");void 0===n&&c===A&&(n=_),void 0===n&&c===L&&(n=S),super(null,i,r,s,a,o,c,n,l),this.image={width:t,height:e},this.magFilter=void 0!==a?a:p,this.minFilter=void 0!==o?o:p,this.flipY=!1,this.generateMipmaps=!1}}qs.prototype.isDepthTexture=!0;class Xs extends ${constructor(t,e){super();const n=this;let i=null,r=1,s=null,a="local-floor";const o=t.extensions.has("WEBGL_multisampled_render_to_texture");let l=null,c=null,h=null,u=null,d=!1,p=null;const m=e.getContextAttributes();let f=null,g=null;const v=[],y=new Map,b=new jn;b.layers.enable(1),b.viewport=new wt;const M=new jn;M.layers.enable(2),M.viewport=new wt;const w=[b,M],R=new ks;R.layers.enable(1),R.layers.enable(2);let C=null,P=null;function D(t){const e=y.get(t.inputSource);e&&e.dispatchEvent({type:t.type,data:t.inputSource})}function I(){y.forEach((function(t,e){t.disconnect(e)})),y.clear(),C=null,P=null,t.setRenderTarget(f),u=null,h=null,c=null,i=null,g=null,U.stop(),n.isPresenting=!1,n.dispatchEvent({type:"sessionend"})}function N(t){const e=i.inputSources;for(let t=0;t0&&(e.alphaTest.value=n.alphaTest);const i=t.get(n).envMap;let r,s;i&&(e.envMap.value=i,e.flipEnvMap.value=i.isCubeTexture&&!1===i.isRenderTargetTexture?-1:1,e.reflectivity.value=n.reflectivity,e.ior.value=n.ior,e.refractionRatio.value=n.refractionRatio),n.lightMap&&(e.lightMap.value=n.lightMap,e.lightMapIntensity.value=n.lightMapIntensity),n.aoMap&&(e.aoMap.value=n.aoMap,e.aoMapIntensity.value=n.aoMapIntensity),n.map?r=n.map:n.specularMap?r=n.specularMap:n.displacementMap?r=n.displacementMap:n.normalMap?r=n.normalMap:n.bumpMap?r=n.bumpMap:n.roughnessMap?r=n.roughnessMap:n.metalnessMap?r=n.metalnessMap:n.alphaMap?r=n.alphaMap:n.emissiveMap?r=n.emissiveMap:n.clearcoatMap?r=n.clearcoatMap:n.clearcoatNormalMap?r=n.clearcoatNormalMap:n.clearcoatRoughnessMap?r=n.clearcoatRoughnessMap:n.specularIntensityMap?r=n.specularIntensityMap:n.specularColorMap?r=n.specularColorMap:n.transmissionMap?r=n.transmissionMap:n.thicknessMap?r=n.thicknessMap:n.sheenColorMap?r=n.sheenColorMap:n.sheenRoughnessMap&&(r=n.sheenRoughnessMap),void 0!==r&&(r.isWebGLRenderTarget&&(r=r.texture),!0===r.matrixAutoUpdate&&r.updateMatrix(),e.uvTransform.value.copy(r.matrix)),n.aoMap?s=n.aoMap:n.lightMap&&(s=n.lightMap),void 0!==s&&(s.isWebGLRenderTarget&&(s=s.texture),!0===s.matrixAutoUpdate&&s.updateMatrix(),e.uv2Transform.value.copy(s.matrix))}function n(e,n){e.roughness.value=n.roughness,e.metalness.value=n.metalness,n.roughnessMap&&(e.roughnessMap.value=n.roughnessMap),n.metalnessMap&&(e.metalnessMap.value=n.metalnessMap),n.emissiveMap&&(e.emissiveMap.value=n.emissiveMap),n.bumpMap&&(e.bumpMap.value=n.bumpMap,e.bumpScale.value=n.bumpScale,1===n.side&&(e.bumpScale.value*=-1)),n.normalMap&&(e.normalMap.value=n.normalMap,e.normalScale.value.copy(n.normalScale),1===n.side&&e.normalScale.value.negate()),n.displacementMap&&(e.displacementMap.value=n.displacementMap,e.displacementScale.value=n.displacementScale,e.displacementBias.value=n.displacementBias);t.get(n).envMap&&(e.envMapIntensity.value=n.envMapIntensity)}return{refreshFogUniforms:function(t,e){t.fogColor.value.copy(e.color),e.isFog?(t.fogNear.value=e.near,t.fogFar.value=e.far):e.isFogExp2&&(t.fogDensity.value=e.density)},refreshMaterialUniforms:function(t,i,r,s,a){i.isMeshBasicMaterial?e(t,i):i.isMeshLambertMaterial?(e(t,i),function(t,e){e.emissiveMap&&(t.emissiveMap.value=e.emissiveMap)}(t,i)):i.isMeshToonMaterial?(e(t,i),function(t,e){e.gradientMap&&(t.gradientMap.value=e.gradientMap);e.emissiveMap&&(t.emissiveMap.value=e.emissiveMap);e.bumpMap&&(t.bumpMap.value=e.bumpMap,t.bumpScale.value=e.bumpScale,1===e.side&&(t.bumpScale.value*=-1));e.normalMap&&(t.normalMap.value=e.normalMap,t.normalScale.value.copy(e.normalScale),1===e.side&&t.normalScale.value.negate());e.displacementMap&&(t.displacementMap.value=e.displacementMap,t.displacementScale.value=e.displacementScale,t.displacementBias.value=e.displacementBias)}(t,i)):i.isMeshPhongMaterial?(e(t,i),function(t,e){t.specular.value.copy(e.specular),t.shininess.value=Math.max(e.shininess,1e-4),e.emissiveMap&&(t.emissiveMap.value=e.emissiveMap);e.bumpMap&&(t.bumpMap.value=e.bumpMap,t.bumpScale.value=e.bumpScale,1===e.side&&(t.bumpScale.value*=-1));e.normalMap&&(t.normalMap.value=e.normalMap,t.normalScale.value.copy(e.normalScale),1===e.side&&t.normalScale.value.negate());e.displacementMap&&(t.displacementMap.value=e.displacementMap,t.displacementScale.value=e.displacementScale,t.displacementBias.value=e.displacementBias)}(t,i)):i.isMeshStandardMaterial?(e(t,i),i.isMeshPhysicalMaterial?function(t,e,i){n(t,e),t.ior.value=e.ior,e.sheen>0&&(t.sheenColor.value.copy(e.sheenColor).multiplyScalar(e.sheen),t.sheenRoughness.value=e.sheenRoughness,e.sheenColorMap&&(t.sheenColorMap.value=e.sheenColorMap),e.sheenRoughnessMap&&(t.sheenRoughnessMap.value=e.sheenRoughnessMap));e.clearcoat>0&&(t.clearcoat.value=e.clearcoat,t.clearcoatRoughness.value=e.clearcoatRoughness,e.clearcoatMap&&(t.clearcoatMap.value=e.clearcoatMap),e.clearcoatRoughnessMap&&(t.clearcoatRoughnessMap.value=e.clearcoatRoughnessMap),e.clearcoatNormalMap&&(t.clearcoatNormalScale.value.copy(e.clearcoatNormalScale),t.clearcoatNormalMap.value=e.clearcoatNormalMap,1===e.side&&t.clearcoatNormalScale.value.negate()));e.transmission>0&&(t.transmission.value=e.transmission,t.transmissionSamplerMap.value=i.texture,t.transmissionSamplerSize.value.set(i.width,i.height),e.transmissionMap&&(t.transmissionMap.value=e.transmissionMap),t.thickness.value=e.thickness,e.thicknessMap&&(t.thicknessMap.value=e.thicknessMap),t.attenuationDistance.value=e.attenuationDistance,t.attenuationColor.value.copy(e.attenuationColor));t.specularIntensity.value=e.specularIntensity,t.specularColor.value.copy(e.specularColor),e.specularIntensityMap&&(t.specularIntensityMap.value=e.specularIntensityMap);e.specularColorMap&&(t.specularColorMap.value=e.specularColorMap)}(t,i,a):n(t,i)):i.isMeshMatcapMaterial?(e(t,i),function(t,e){e.matcap&&(t.matcap.value=e.matcap);e.bumpMap&&(t.bumpMap.value=e.bumpMap,t.bumpScale.value=e.bumpScale,1===e.side&&(t.bumpScale.value*=-1));e.normalMap&&(t.normalMap.value=e.normalMap,t.normalScale.value.copy(e.normalScale),1===e.side&&t.normalScale.value.negate());e.displacementMap&&(t.displacementMap.value=e.displacementMap,t.displacementScale.value=e.displacementScale,t.displacementBias.value=e.displacementBias)}(t,i)):i.isMeshDepthMaterial?(e(t,i),function(t,e){e.displacementMap&&(t.displacementMap.value=e.displacementMap,t.displacementScale.value=e.displacementScale,t.displacementBias.value=e.displacementBias)}(t,i)):i.isMeshDistanceMaterial?(e(t,i),function(t,e){e.displacementMap&&(t.displacementMap.value=e.displacementMap,t.displacementScale.value=e.displacementScale,t.displacementBias.value=e.displacementBias);t.referencePosition.value.copy(e.referencePosition),t.nearDistance.value=e.nearDistance,t.farDistance.value=e.farDistance}(t,i)):i.isMeshNormalMaterial?(e(t,i),function(t,e){e.bumpMap&&(t.bumpMap.value=e.bumpMap,t.bumpScale.value=e.bumpScale,1===e.side&&(t.bumpScale.value*=-1));e.normalMap&&(t.normalMap.value=e.normalMap,t.normalScale.value.copy(e.normalScale),1===e.side&&t.normalScale.value.negate());e.displacementMap&&(t.displacementMap.value=e.displacementMap,t.displacementScale.value=e.displacementScale,t.displacementBias.value=e.displacementBias)}(t,i)):i.isLineBasicMaterial?(function(t,e){t.diffuse.value.copy(e.color),t.opacity.value=e.opacity}(t,i),i.isLineDashedMaterial&&function(t,e){t.dashSize.value=e.dashSize,t.totalSize.value=e.dashSize+e.gapSize,t.scale.value=e.scale}(t,i)):i.isPointsMaterial?function(t,e,n,i){t.diffuse.value.copy(e.color),t.opacity.value=e.opacity,t.size.value=e.size*n,t.scale.value=.5*i,e.map&&(t.map.value=e.map);e.alphaMap&&(t.alphaMap.value=e.alphaMap);e.alphaTest>0&&(t.alphaTest.value=e.alphaTest);let r;e.map?r=e.map:e.alphaMap&&(r=e.alphaMap);void 0!==r&&(!0===r.matrixAutoUpdate&&r.updateMatrix(),t.uvTransform.value.copy(r.matrix))}(t,i,r,s):i.isSpriteMaterial?function(t,e){t.diffuse.value.copy(e.color),t.opacity.value=e.opacity,t.rotation.value=e.rotation,e.map&&(t.map.value=e.map);e.alphaMap&&(t.alphaMap.value=e.alphaMap);e.alphaTest>0&&(t.alphaTest.value=e.alphaTest);let n;e.map?n=e.map:e.alphaMap&&(n=e.alphaMap);void 0!==n&&(!0===n.matrixAutoUpdate&&n.updateMatrix(),t.uvTransform.value.copy(n.matrix))}(t,i):i.isShadowMaterial?(t.color.value.copy(i.color),t.opacity.value=i.opacity):i.isShaderMaterial&&(i.uniformsNeedUpdate=!1)}}}function Ys(t={}){const e=void 0!==t.canvas?t.canvas:function(){const t=vt("canvas");return t.style.display="block",t}(),n=void 0!==t.context?t.context:null,i=void 0!==t.alpha&&t.alpha,r=void 0===t.depth||t.depth,s=void 0===t.stencil||t.stencil,a=void 0!==t.antialias&&t.antialias,o=void 0===t.premultipliedAlpha||t.premultipliedAlpha,l=void 0!==t.preserveDrawingBuffer&&t.preserveDrawingBuffer,c=void 0!==t.powerPreference?t.powerPreference:"default",h=void 0!==t.failIfMajorPerformanceCaveat&&t.failIfMajorPerformanceCaveat;let d=null,m=null;const f=[],g=[];this.domElement=e,this.debug={checkShaderErrors:!0},this.autoClear=!0,this.autoClearColor=!0,this.autoClearDepth=!0,this.autoClearStencil=!0,this.sortObjects=!0,this.clippingPlanes=[],this.localClippingEnabled=!1,this.outputEncoding=X,this.physicallyCorrectLights=!1,this.toneMapping=0,this.toneMappingExposure=1;const v=this;let _=!1,b=0,S=0,T=null,A=-1,L=null;const R=new wt,C=new wt;let P=null,D=e.width,I=e.height,N=1,z=null,B=null;const F=new wt(0,0,D,I),O=new wt(0,0,D,I);let U=!1;const H=[],G=new ni;let k=!1,V=!1,W=null;const j=new se,q=new Lt,J={background:null,fog:null,environment:null,overrideMaterial:null,isScene:!0};function Y(){return null===T?N:1}let Z,Q,K,$,tt,et,nt,it,rt,st,at,ot,lt,ct,ht,ut,dt,pt,mt,ft,gt,yt,xt,_t=n;function bt(t,n){for(let i=0;i0&&function(t,e,n){if(null===W){const t=!0===a&&!0===Q.isWebGL2;W=new(t?Et:St)(1024,1024,{generateMipmaps:!0,type:null!==yt.convert(w)?w:x,minFilter:y,magFilter:p,wrapS:u,wrapT:u,useRenderToTexture:Z.has("WEBGL_multisampled_render_to_texture")})}const i=v.getRenderTarget();v.setRenderTarget(W),v.clear();const r=v.toneMapping;v.toneMapping=0,Ft(t,e,n),v.toneMapping=r,et.updateMultisampleRenderTarget(W),et.updateRenderTargetMipmap(W),v.setRenderTarget(i)}(r,e,n),i&&K.viewport(R.copy(i)),r.length>0&&Ft(r,e,n),s.length>0&&Ft(s,e,n),o.length>0&&Ft(o,e,n)}function Ft(t,e,n){const i=!0===e.isScene?e.overrideMaterial:null;for(let r=0,s=t.length;r0?g[g.length-1]:null,f.pop(),d=f.length>0?f[f.length-1]:null},this.getActiveCubeFace=function(){return b},this.getActiveMipmapLevel=function(){return S},this.getRenderTarget=function(){return T},this.setRenderTargetTextures=function(t,e,n){tt.get(t.texture).__webglTexture=e,tt.get(t.depthTexture).__webglTexture=n;const i=tt.get(t);i.__hasExternalTextures=!0,i.__hasExternalTextures&&(i.__autoAllocateDepthBuffer=void 0===n,i.__autoAllocateDepthBuffer||t.useRenderToTexture&&(console.warn("render-to-texture extension was disabled because an external texture was provided"),t.useRenderToTexture=!1,t.useRenderbuffer=!0))},this.setRenderTargetFramebuffer=function(t,e){const n=tt.get(t);n.__webglFramebuffer=e,n.__useDefaultFramebuffer=void 0===e},this.setRenderTarget=function(t,e=0,n=0){T=t,b=e,S=n;let i=!0;if(t){const e=tt.get(t);void 0!==e.__useDefaultFramebuffer?(K.bindFramebuffer(36160,null),i=!1):void 0===e.__webglFramebuffer?et.setupRenderTarget(t):e.__hasExternalTextures&&et.rebindTextures(t,tt.get(t.texture).__webglTexture,tt.get(t.depthTexture).__webglTexture)}let r=null,s=!1,a=!1;if(t){const n=t.texture;(n.isDataTexture3D||n.isDataTexture2DArray)&&(a=!0);const i=tt.get(t).__webglFramebuffer;t.isWebGLCubeRenderTarget?(r=i[e],s=!0):r=t.useRenderbuffer?tt.get(t).__webglMultisampledFramebuffer:i,R.copy(t.viewport),C.copy(t.scissor),P=t.scissorTest}else R.copy(F).multiplyScalar(N).floor(),C.copy(O).multiplyScalar(N).floor(),P=U;if(K.bindFramebuffer(36160,r)&&Q.drawBuffers&&i){let e=!1;if(t)if(t.isWebGLMultipleRenderTargets){const n=t.texture;if(H.length!==n.length||36064!==H[0]){for(let t=0,e=n.length;t=0&&e<=t.width-i&&n>=0&&n<=t.height-r&&_t.readPixels(e,n,i,r,yt.convert(o),yt.convert(l),s):console.error("THREE.WebGLRenderer.readRenderTargetPixels: readPixels from renderTarget failed. Framebuffer not complete.")}finally{const t=null!==T?tt.get(T).__webglFramebuffer:null;K.bindFramebuffer(36160,t)}}},this.copyFramebufferToTexture=function(t,e,n=0){if(!0!==e.isFramebufferTexture)return void console.error("THREE.WebGLRenderer: copyFramebufferToTexture() can only be used with FramebufferTexture.");const i=Math.pow(2,-n),r=Math.floor(e.image.width*i),s=Math.floor(e.image.height*i);et.setTexture2D(e,0),_t.copyTexSubImage2D(3553,n,0,0,t.x,t.y,r,s),K.unbindTexture()},this.copyTextureToTexture=function(t,e,n,i=0){const r=e.image.width,s=e.image.height,a=yt.convert(n.format),o=yt.convert(n.type);et.setTexture2D(n,0),_t.pixelStorei(37440,n.flipY),_t.pixelStorei(37441,n.premultiplyAlpha),_t.pixelStorei(3317,n.unpackAlignment),e.isDataTexture?_t.texSubImage2D(3553,i,t.x,t.y,r,s,a,o,e.image.data):e.isCompressedTexture?_t.compressedTexSubImage2D(3553,i,t.x,t.y,e.mipmaps[0].width,e.mipmaps[0].height,a,e.mipmaps[0].data):_t.texSubImage2D(3553,i,t.x,t.y,a,o,e.image),0===i&&n.generateMipmaps&&_t.generateMipmap(3553),K.unbindTexture()},this.copyTextureToTexture3D=function(t,e,n,i,r=0){if(v.isWebGL1Renderer)return void console.warn("THREE.WebGLRenderer.copyTextureToTexture3D: can only be used with WebGL2.");const s=t.max.x-t.min.x+1,a=t.max.y-t.min.y+1,o=t.max.z-t.min.z+1,l=yt.convert(i.format),c=yt.convert(i.type);let h;if(i.isDataTexture3D)et.setTexture3D(i,0),h=32879;else{if(!i.isDataTexture2DArray)return void console.warn("THREE.WebGLRenderer.copyTextureToTexture3D: only supports THREE.DataTexture3D and THREE.DataTexture2DArray.");et.setTexture2DArray(i,0),h=35866}_t.pixelStorei(37440,i.flipY),_t.pixelStorei(37441,i.premultiplyAlpha),_t.pixelStorei(3317,i.unpackAlignment);const u=_t.getParameter(3314),d=_t.getParameter(32878),p=_t.getParameter(3316),m=_t.getParameter(3315),f=_t.getParameter(32877),g=n.isCompressedTexture?n.mipmaps[0]:n.image;_t.pixelStorei(3314,g.width),_t.pixelStorei(32878,g.height),_t.pixelStorei(3316,t.min.x),_t.pixelStorei(3315,t.min.y),_t.pixelStorei(32877,t.min.z),n.isDataTexture||n.isDataTexture3D?_t.texSubImage3D(h,r,e.x,e.y,e.z,s,a,o,l,c,g.data):n.isCompressedTexture?(console.warn("THREE.WebGLRenderer.copyTextureToTexture3D: untested support for compressed srcTexture."),_t.compressedTexSubImage3D(h,r,e.x,e.y,e.z,s,a,o,l,g.data)):_t.texSubImage3D(h,r,e.x,e.y,e.z,s,a,o,l,c,g),_t.pixelStorei(3314,u),_t.pixelStorei(32878,d),_t.pixelStorei(3316,p),_t.pixelStorei(3315,m),_t.pixelStorei(32877,f),0===r&&i.generateMipmaps&&_t.generateMipmap(h),K.unbindTexture()},this.initTexture=function(t){et.setTexture2D(t,0),K.unbindTexture()},this.resetState=function(){b=0,S=0,T=null,K.reset(),xt.reset()},"undefined"!=typeof __THREE_DEVTOOLS__&&__THREE_DEVTOOLS__.dispatchEvent(new CustomEvent("observe",{detail:this}))}Ys.prototype.isWebGLRenderer=!0;class Zs extends Ys{}Zs.prototype.isWebGL1Renderer=!0;class Qs{constructor(t,e=25e-5){this.name="",this.color=new Ze(t),this.density=e}clone(){return new Qs(this.color,this.density)}toJSON(){return{type:"FogExp2",color:this.color.getHex(),density:this.density}}}Qs.prototype.isFogExp2=!0;class Ks{constructor(t,e=1,n=1e3){this.name="",this.color=new Ze(t),this.near=e,this.far=n}clone(){return new Ks(this.color,this.near,this.far)}toJSON(){return{type:"Fog",color:this.color.getHex(),near:this.near,far:this.far}}}Ks.prototype.isFog=!0;class $s extends Ce{constructor(){super(),this.type="Scene",this.background=null,this.environment=null,this.fog=null,this.overrideMaterial=null,this.autoUpdate=!0,"undefined"!=typeof __THREE_DEVTOOLS__&&__THREE_DEVTOOLS__.dispatchEvent(new CustomEvent("observe",{detail:this}))}copy(t,e){return super.copy(t,e),null!==t.background&&(this.background=t.background.clone()),null!==t.environment&&(this.environment=t.environment.clone()),null!==t.fog&&(this.fog=t.fog.clone()),null!==t.overrideMaterial&&(this.overrideMaterial=t.overrideMaterial.clone()),this.autoUpdate=t.autoUpdate,this.matrixAutoUpdate=t.matrixAutoUpdate,this}toJSON(t){const e=super.toJSON(t);return null!==this.fog&&(e.object.fog=this.fog.toJSON()),e}}$s.prototype.isScene=!0;class ta{constructor(t,e){this.array=t,this.stride=e,this.count=void 0!==t?t.length/e:0,this.usage=Z,this.updateRange={offset:0,count:-1},this.version=0,this.uuid=rt()}onUploadCallback(){}set needsUpdate(t){!0===t&&this.version++}setUsage(t){return this.usage=t,this}copy(t){return this.array=new t.array.constructor(t.array),this.count=t.count,this.stride=t.stride,this.usage=t.usage,this}copyAt(t,e,n){t*=this.stride,n*=e.stride;for(let i=0,r=this.stride;it.far||e.push({distance:o,point:sa.clone(),uv:Ge.getUV(sa,ua,da,pa,ma,fa,ga,new dt),face:null,object:this})}copy(t){return super.copy(t),void 0!==t.center&&this.center.copy(t.center),this.material=t.material,this}}function ya(t,e,n,i,r,s){la.subVectors(t,n).addScalar(.5).multiply(i),void 0!==r?(ca.x=s*la.x-r*la.y,ca.y=r*la.x+s*la.y):ca.copy(la),t.copy(e),t.x+=ca.x,t.y+=ca.y,t.applyMatrix4(ha)}va.prototype.isSprite=!0;const xa=new Lt,_a=new Lt;class ba extends Ce{constructor(){super(),this._currentLevel=0,this.type="LOD",Object.defineProperties(this,{levels:{enumerable:!0,value:[]},isLOD:{value:!0}}),this.autoUpdate=!0}copy(t){super.copy(t,!1);const e=t.levels;for(let t=0,n=e.length;t0){let n,i;for(n=1,i=e.length;n0){xa.setFromMatrixPosition(this.matrixWorld);const n=t.ray.origin.distanceTo(xa);this.getObjectForDistance(n).raycast(t,e)}}update(t){const e=this.levels;if(e.length>1){xa.setFromMatrixPosition(t.matrixWorld),_a.setFromMatrixPosition(this.matrixWorld);const n=xa.distanceTo(_a)/t.zoom;let i,r;for(e[0].object.visible=!0,i=1,r=e.length;i=e[i].distance;i++)e[i-1].object.visible=!1,e[i].object.visible=!0;for(this._currentLevel=i-1;io)continue;u.applyMatrix4(this.matrixWorld);const d=t.ray.origin.distanceTo(u);dt.far||e.push({distance:d,point:h.clone().applyMatrix4(this.matrixWorld),index:n,face:null,faceIndex:null,object:this})}}else{for(let n=Math.max(0,s.start),i=Math.min(r.count,s.start+s.count)-1;no)continue;u.applyMatrix4(this.matrixWorld);const i=t.ray.origin.distanceTo(u);it.far||e.push({distance:i,point:h.clone().applyMatrix4(this.matrixWorld),index:n,face:null,faceIndex:null,object:this})}}}else n.isGeometry&&console.error("THREE.Line.raycast() no longer supports THREE.Geometry. Use THREE.BufferGeometry instead.")}updateMorphTargets(){const t=this.geometry;if(t.isBufferGeometry){const e=t.morphAttributes,n=Object.keys(e);if(n.length>0){const t=e[n[0]];if(void 0!==t){this.morphTargetInfluences=[],this.morphTargetDictionary={};for(let e=0,n=t.length;e0&&console.error("THREE.Line.updateMorphTargets() does not support THREE.Geometry. Use THREE.BufferGeometry instead.")}}}ja.prototype.isLine=!0;const qa=new Lt,Xa=new Lt;class Ja extends ja{constructor(t,e){super(t,e),this.type="LineSegments"}computeLineDistances(){const t=this.geometry;if(t.isBufferGeometry)if(null===t.index){const e=t.attributes.position,n=[];for(let t=0,i=e.count;t0){const t=e[n[0]];if(void 0!==t){this.morphTargetInfluences=[],this.morphTargetDictionary={};for(let e=0,n=t.length;e0&&console.error("THREE.Points.updateMorphTargets() does not support THREE.Geometry. Use THREE.BufferGeometry instead.")}}}function no(t,e,n,i,r,s,a){const o=Ka.distanceSqToPoint(t);if(or.far)return;s.push({distance:l,distanceToRay:Math.sqrt(o),point:n,index:e,face:null,object:a})}}eo.prototype.isPoints=!0;class io extends bt{constructor(t,e,n,i,r,s,a,o,l){super(t,e,n,i,r,s,a,o,l),this.format=void 0!==a?a:T,this.minFilter=void 0!==s?s:g,this.magFilter=void 0!==r?r:g,this.generateMipmaps=!1;const c=this;"requestVideoFrameCallback"in t&&t.requestVideoFrameCallback((function e(){c.needsUpdate=!0,t.requestVideoFrameCallback(e)}))}clone(){return new this.constructor(this.image).copy(this)}update(){const t=this.image;!1==="requestVideoFrameCallback"in t&&t.readyState>=t.HAVE_CURRENT_DATA&&(this.needsUpdate=!0)}}io.prototype.isVideoTexture=!0;class ro extends bt{constructor(t,e,n){super({width:t,height:e}),this.format=n,this.magFilter=p,this.minFilter=p,this.generateMipmaps=!1,this.needsUpdate=!0}}ro.prototype.isFramebufferTexture=!0;class so extends bt{constructor(t,e,n,i,r,s,a,o,l,c,h,u){super(null,s,a,o,l,c,i,r,h,u),this.image={width:e,height:n},this.mipmaps=t,this.flipY=!1,this.generateMipmaps=!1}}so.prototype.isCompressedTexture=!0;class ao extends bt{constructor(t,e,n,i,r,s,a,o,l){super(t,e,n,i,r,s,a,o,l),this.needsUpdate=!0}}ao.prototype.isCanvasTexture=!0;class oo extends xn{constructor(t=1,e=8,n=0,i=2*Math.PI){super(),this.type="CircleGeometry",this.parameters={radius:t,segments:e,thetaStart:n,thetaLength:i},e=Math.max(3,e);const r=[],s=[],a=[],o=[],l=new Lt,c=new dt;s.push(0,0,0),a.push(0,0,1),o.push(.5,.5);for(let r=0,h=3;r<=e;r++,h+=3){const u=n+r/e*i;l.x=t*Math.cos(u),l.y=t*Math.sin(u),s.push(l.x,l.y,l.z),a.push(0,0,1),c.x=(s[h]/t+1)/2,c.y=(s[h+1]/t+1)/2,o.push(c.x,c.y)}for(let t=1;t<=e;t++)r.push(t,t+1,0);this.setIndex(r),this.setAttribute("position",new hn(s,3)),this.setAttribute("normal",new hn(a,3)),this.setAttribute("uv",new hn(o,2))}static fromJSON(t){return new oo(t.radius,t.segments,t.thetaStart,t.thetaLength)}}class lo extends xn{constructor(t=1,e=1,n=1,i=8,r=1,s=!1,a=0,o=2*Math.PI){super(),this.type="CylinderGeometry",this.parameters={radiusTop:t,radiusBottom:e,height:n,radialSegments:i,heightSegments:r,openEnded:s,thetaStart:a,thetaLength:o};const l=this;i=Math.floor(i),r=Math.floor(r);const c=[],h=[],u=[],d=[];let p=0;const m=[],f=n/2;let g=0;function v(n){const r=p,s=new dt,m=new Lt;let v=0;const y=!0===n?t:e,x=!0===n?1:-1;for(let t=1;t<=i;t++)h.push(0,f*x,0),u.push(0,x,0),d.push(.5,.5),p++;const _=p;for(let t=0;t<=i;t++){const e=t/i*o+a,n=Math.cos(e),r=Math.sin(e);m.x=y*r,m.y=f*x,m.z=y*n,h.push(m.x,m.y,m.z),u.push(0,x,0),s.x=.5*n+.5,s.y=.5*r*x+.5,d.push(s.x,s.y),p++}for(let t=0;t0&&v(!0),e>0&&v(!1)),this.setIndex(c),this.setAttribute("position",new hn(h,3)),this.setAttribute("normal",new hn(u,3)),this.setAttribute("uv",new hn(d,2))}static fromJSON(t){return new lo(t.radiusTop,t.radiusBottom,t.height,t.radialSegments,t.heightSegments,t.openEnded,t.thetaStart,t.thetaLength)}}class co extends lo{constructor(t=1,e=1,n=8,i=1,r=!1,s=0,a=2*Math.PI){super(0,t,e,n,i,r,s,a),this.type="ConeGeometry",this.parameters={radius:t,height:e,radialSegments:n,heightSegments:i,openEnded:r,thetaStart:s,thetaLength:a}}static fromJSON(t){return new co(t.radius,t.height,t.radialSegments,t.heightSegments,t.openEnded,t.thetaStart,t.thetaLength)}}class ho extends xn{constructor(t=[],e=[],n=1,i=0){super(),this.type="PolyhedronGeometry",this.parameters={vertices:t,indices:e,radius:n,detail:i};const r=[],s=[];function a(t,e,n,i){const r=i+1,s=[];for(let i=0;i<=r;i++){s[i]=[];const a=t.clone().lerp(n,i/r),o=e.clone().lerp(n,i/r),l=r-i;for(let t=0;t<=l;t++)s[i][t]=0===t&&i===r?a:a.clone().lerp(o,t/l)}for(let t=0;t.9&&a<.1&&(e<.2&&(s[t+0]+=1),n<.2&&(s[t+2]+=1),i<.2&&(s[t+4]+=1))}}()}(),this.setAttribute("position",new hn(r,3)),this.setAttribute("normal",new hn(r.slice(),3)),this.setAttribute("uv",new hn(s,2)),0===i?this.computeVertexNormals():this.normalizeNormals()}static fromJSON(t){return new ho(t.vertices,t.indices,t.radius,t.details)}}class uo extends ho{constructor(t=1,e=0){const n=(1+Math.sqrt(5))/2,i=1/n;super([-1,-1,-1,-1,-1,1,-1,1,-1,-1,1,1,1,-1,-1,1,-1,1,1,1,-1,1,1,1,0,-i,-n,0,-i,n,0,i,-n,0,i,n,-i,-n,0,-i,n,0,i,-n,0,i,n,0,-n,0,-i,n,0,-i,-n,0,i,n,0,i],[3,11,7,3,7,15,3,15,13,7,19,17,7,17,6,7,6,15,17,4,8,17,8,10,17,10,6,8,0,16,8,16,2,8,2,10,0,12,1,0,1,18,0,18,16,6,10,2,6,2,13,6,13,15,2,16,18,2,18,3,2,3,13,18,1,9,18,9,11,18,11,3,4,14,12,4,12,0,4,0,8,11,9,5,11,5,19,11,19,7,19,5,14,19,14,4,19,4,17,1,12,14,1,14,5,1,5,9],t,e),this.type="DodecahedronGeometry",this.parameters={radius:t,detail:e}}static fromJSON(t){return new uo(t.radius,t.detail)}}const po=new Lt,mo=new Lt,fo=new Lt,go=new Ge;class vo extends xn{constructor(t=null,e=1){if(super(),this.type="EdgesGeometry",this.parameters={geometry:t,thresholdAngle:e},null!==t){const n=4,i=Math.pow(10,n),r=Math.cos(nt*e),s=t.getIndex(),a=t.getAttribute("position"),o=s?s.count:a.count,l=[0,0,0],c=["a","b","c"],h=new Array(3),u={},d=[];for(let t=0;t0)){l=i;break}l=i-1}if(i=l,n[i]===s)return i/(r-1);const c=n[i];return(i+(s-c)/(n[i+1]-c))/(r-1)}getTangent(t,e){const n=1e-4;let i=t-n,r=t+n;i<0&&(i=0),r>1&&(r=1);const s=this.getPoint(i),a=this.getPoint(r),o=e||(s.isVector2?new dt:new Lt);return o.copy(a).sub(s).normalize(),o}getTangentAt(t,e){const n=this.getUtoTmapping(t);return this.getTangent(n,e)}computeFrenetFrames(t,e){const n=new Lt,i=[],r=[],s=[],a=new Lt,o=new se;for(let e=0;e<=t;e++){const n=e/t;i[e]=this.getTangentAt(n,new Lt)}r[0]=new Lt,s[0]=new Lt;let l=Number.MAX_VALUE;const c=Math.abs(i[0].x),h=Math.abs(i[0].y),u=Math.abs(i[0].z);c<=l&&(l=c,n.set(1,0,0)),h<=l&&(l=h,n.set(0,1,0)),u<=l&&n.set(0,0,1),a.crossVectors(i[0],n).normalize(),r[0].crossVectors(i[0],a),s[0].crossVectors(i[0],r[0]);for(let e=1;e<=t;e++){if(r[e]=r[e-1].clone(),s[e]=s[e-1].clone(),a.crossVectors(i[e-1],i[e]),a.length()>Number.EPSILON){a.normalize();const t=Math.acos(st(i[e-1].dot(i[e]),-1,1));r[e].applyMatrix4(o.makeRotationAxis(a,t))}s[e].crossVectors(i[e],r[e])}if(!0===e){let e=Math.acos(st(r[0].dot(r[t]),-1,1));e/=t,i[0].dot(a.crossVectors(r[0],r[t]))>0&&(e=-e);for(let n=1;n<=t;n++)r[n].applyMatrix4(o.makeRotationAxis(i[n],e*n)),s[n].crossVectors(i[n],r[n])}return{tangents:i,normals:r,binormals:s}}clone(){return(new this.constructor).copy(this)}copy(t){return this.arcLengthDivisions=t.arcLengthDivisions,this}toJSON(){const t={metadata:{version:4.5,type:"Curve",generator:"Curve.toJSON"}};return t.arcLengthDivisions=this.arcLengthDivisions,t.type=this.type,t}fromJSON(t){return this.arcLengthDivisions=t.arcLengthDivisions,this}}class xo extends yo{constructor(t=0,e=0,n=1,i=1,r=0,s=2*Math.PI,a=!1,o=0){super(),this.type="EllipseCurve",this.aX=t,this.aY=e,this.xRadius=n,this.yRadius=i,this.aStartAngle=r,this.aEndAngle=s,this.aClockwise=a,this.aRotation=o}getPoint(t,e){const n=e||new dt,i=2*Math.PI;let r=this.aEndAngle-this.aStartAngle;const s=Math.abs(r)i;)r-=i;r0?0:(Math.floor(Math.abs(l)/r)+1)*r:0===c&&l===r-1&&(l=r-2,c=1),this.closed||l>0?a=i[(l-1)%r]:(Mo.subVectors(i[0],i[1]).add(i[0]),a=Mo);const h=i[l%r],u=i[(l+1)%r];if(this.closed||l+2i.length-2?i.length-1:s+1],h=i[s>i.length-3?i.length-1:s+2];return n.set(Ao(a,o.x,l.x,c.x,h.x),Ao(a,o.y,l.y,c.y,h.y)),n}copy(t){super.copy(t),this.points=[];for(let e=0,n=t.points.length;e=n){const t=i[r]-n,s=this.curves[r],a=s.getLength(),o=0===a?0:1-t/a;return s.getPointAt(o,e)}r++}return null}getLength(){const t=this.getCurveLengths();return t[t.length-1]}updateArcLengths(){this.needsUpdate=!0,this.cacheLengths=null,this.getCurveLengths()}getCurveLengths(){if(this.cacheLengths&&this.cacheLengths.length===this.curves.length)return this.cacheLengths;const t=[];let e=0;for(let n=0,i=this.curves.length;n1&&!e[e.length-1].equals(e[0])&&e.push(e[0]),e}copy(t){super.copy(t),this.curves=[];for(let e=0,n=t.curves.length;e0){const t=l.getPoint(0);t.equals(this.currentPoint)||this.lineTo(t.x,t.y)}this.curves.push(l);const c=l.getPoint(1);return this.currentPoint.copy(c),this}copy(t){return super.copy(t),this.currentPoint.copy(t.currentPoint),this}toJSON(){const t=super.toJSON();return t.currentPoint=this.currentPoint.toArray(),t}fromJSON(t){return super.fromJSON(t),this.currentPoint.fromArray(t.currentPoint),this}}class Ho extends Uo{constructor(t){super(t),this.uuid=rt(),this.type="Shape",this.holes=[]}getPointsHoles(t){const e=[];for(let n=0,i=this.holes.length;n80*n){o=c=t[0],l=h=t[1];for(let e=n;ec&&(c=u),d>h&&(h=d);p=Math.max(c-o,h-l),p=0!==p?1/p:0}return Wo(s,a,n,o,l,p),a};function ko(t,e,n,i,r){let s,a;if(r===function(t,e,n,i){let r=0;for(let s=e,a=n-i;s0)for(s=e;s=e;s-=i)a=cl(s,t[s],t[s+1],a);return a&&il(a,a.next)&&(hl(a),a=a.next),a}function Vo(t,e){if(!t)return t;e||(e=t);let n,i=t;do{if(n=!1,i.steiner||!il(i,i.next)&&0!==nl(i.prev,i,i.next))i=i.next;else{if(hl(i),i=e=i.prev,i===i.next)break;n=!0}}while(n||i!==e);return e}function Wo(t,e,n,i,r,s,a){if(!t)return;!a&&s&&function(t,e,n,i){let r=t;do{null===r.z&&(r.z=Ko(r.x,r.y,e,n,i)),r.prevZ=r.prev,r.nextZ=r.next,r=r.next}while(r!==t);r.prevZ.nextZ=null,r.prevZ=null,function(t){let e,n,i,r,s,a,o,l,c=1;do{for(n=t,t=null,s=null,a=0;n;){for(a++,i=n,o=0,e=0;e0||l>0&&i;)0!==o&&(0===l||!i||n.z<=i.z)?(r=n,n=n.nextZ,o--):(r=i,i=i.nextZ,l--),s?s.nextZ=r:t=r,r.prevZ=s,s=r;n=i}s.nextZ=null,c*=2}while(a>1)}(r)}(t,i,r,s);let o,l,c=t;for(;t.prev!==t.next;)if(o=t.prev,l=t.next,s?qo(t,i,r,s):jo(t))e.push(o.i/n),e.push(t.i/n),e.push(l.i/n),hl(t),t=l.next,c=l.next;else if((t=l)===c){a?1===a?Wo(t=Xo(Vo(t),e,n),e,n,i,r,s,2):2===a&&Jo(t,e,n,i,r,s):Wo(Vo(t),e,n,i,r,s,1);break}}function jo(t){const e=t.prev,n=t,i=t.next;if(nl(e,n,i)>=0)return!1;let r=t.next.next;for(;r!==t.prev;){if(tl(e.x,e.y,n.x,n.y,i.x,i.y,r.x,r.y)&&nl(r.prev,r,r.next)>=0)return!1;r=r.next}return!0}function qo(t,e,n,i){const r=t.prev,s=t,a=t.next;if(nl(r,s,a)>=0)return!1;const o=r.xs.x?r.x>a.x?r.x:a.x:s.x>a.x?s.x:a.x,h=r.y>s.y?r.y>a.y?r.y:a.y:s.y>a.y?s.y:a.y,u=Ko(o,l,e,n,i),d=Ko(c,h,e,n,i);let p=t.prevZ,m=t.nextZ;for(;p&&p.z>=u&&m&&m.z<=d;){if(p!==t.prev&&p!==t.next&&tl(r.x,r.y,s.x,s.y,a.x,a.y,p.x,p.y)&&nl(p.prev,p,p.next)>=0)return!1;if(p=p.prevZ,m!==t.prev&&m!==t.next&&tl(r.x,r.y,s.x,s.y,a.x,a.y,m.x,m.y)&&nl(m.prev,m,m.next)>=0)return!1;m=m.nextZ}for(;p&&p.z>=u;){if(p!==t.prev&&p!==t.next&&tl(r.x,r.y,s.x,s.y,a.x,a.y,p.x,p.y)&&nl(p.prev,p,p.next)>=0)return!1;p=p.prevZ}for(;m&&m.z<=d;){if(m!==t.prev&&m!==t.next&&tl(r.x,r.y,s.x,s.y,a.x,a.y,m.x,m.y)&&nl(m.prev,m,m.next)>=0)return!1;m=m.nextZ}return!0}function Xo(t,e,n){let i=t;do{const r=i.prev,s=i.next.next;!il(r,s)&&rl(r,i,i.next,s)&&ol(r,s)&&ol(s,r)&&(e.push(r.i/n),e.push(i.i/n),e.push(s.i/n),hl(i),hl(i.next),i=t=s),i=i.next}while(i!==t);return Vo(i)}function Jo(t,e,n,i,r,s){let a=t;do{let t=a.next.next;for(;t!==a.prev;){if(a.i!==t.i&&el(a,t)){let o=ll(a,t);return a=Vo(a,a.next),o=Vo(o,o.next),Wo(a,e,n,i,r,s),void Wo(o,e,n,i,r,s)}t=t.next}a=a.next}while(a!==t)}function Yo(t,e){return t.x-e.x}function Zo(t,e){if(e=function(t,e){let n=e;const i=t.x,r=t.y;let s,a=-1/0;do{if(r<=n.y&&r>=n.next.y&&n.next.y!==n.y){const t=n.x+(r-n.y)*(n.next.x-n.x)/(n.next.y-n.y);if(t<=i&&t>a){if(a=t,t===i){if(r===n.y)return n;if(r===n.next.y)return n.next}s=n.x=n.x&&n.x>=l&&i!==n.x&&tl(rs.x||n.x===s.x&&Qo(s,n)))&&(s=n,u=h)),n=n.next}while(n!==o);return s}(t,e),e){const n=ll(e,t);Vo(e,e.next),Vo(n,n.next)}}function Qo(t,e){return nl(t.prev,t,e.prev)<0&&nl(e.next,t,t.next)<0}function Ko(t,e,n,i,r){return(t=1431655765&((t=858993459&((t=252645135&((t=16711935&((t=32767*(t-n)*r)|t<<8))|t<<4))|t<<2))|t<<1))|(e=1431655765&((e=858993459&((e=252645135&((e=16711935&((e=32767*(e-i)*r)|e<<8))|e<<4))|e<<2))|e<<1))<<1}function $o(t){let e=t,n=t;do{(e.x=0&&(t-a)*(i-o)-(n-a)*(e-o)>=0&&(n-a)*(s-o)-(r-a)*(i-o)>=0}function el(t,e){return t.next.i!==e.i&&t.prev.i!==e.i&&!function(t,e){let n=t;do{if(n.i!==t.i&&n.next.i!==t.i&&n.i!==e.i&&n.next.i!==e.i&&rl(n,n.next,t,e))return!0;n=n.next}while(n!==t);return!1}(t,e)&&(ol(t,e)&&ol(e,t)&&function(t,e){let n=t,i=!1;const r=(t.x+e.x)/2,s=(t.y+e.y)/2;do{n.y>s!=n.next.y>s&&n.next.y!==n.y&&r<(n.next.x-n.x)*(s-n.y)/(n.next.y-n.y)+n.x&&(i=!i),n=n.next}while(n!==t);return i}(t,e)&&(nl(t.prev,t,e.prev)||nl(t,e.prev,e))||il(t,e)&&nl(t.prev,t,t.next)>0&&nl(e.prev,e,e.next)>0)}function nl(t,e,n){return(e.y-t.y)*(n.x-e.x)-(e.x-t.x)*(n.y-e.y)}function il(t,e){return t.x===e.x&&t.y===e.y}function rl(t,e,n,i){const r=al(nl(t,e,n)),s=al(nl(t,e,i)),a=al(nl(n,i,t)),o=al(nl(n,i,e));return r!==s&&a!==o||(!(0!==r||!sl(t,n,e))||(!(0!==s||!sl(t,i,e))||(!(0!==a||!sl(n,t,i))||!(0!==o||!sl(n,e,i)))))}function sl(t,e,n){return e.x<=Math.max(t.x,n.x)&&e.x>=Math.min(t.x,n.x)&&e.y<=Math.max(t.y,n.y)&&e.y>=Math.min(t.y,n.y)}function al(t){return t>0?1:t<0?-1:0}function ol(t,e){return nl(t.prev,t,t.next)<0?nl(t,e,t.next)>=0&&nl(t,t.prev,e)>=0:nl(t,e,t.prev)<0||nl(t,t.next,e)<0}function ll(t,e){const n=new ul(t.i,t.x,t.y),i=new ul(e.i,e.x,e.y),r=t.next,s=e.prev;return t.next=e,e.prev=t,n.next=r,r.prev=n,i.next=n,n.prev=i,s.next=i,i.prev=s,i}function cl(t,e,n,i){const r=new ul(t,e,n);return i?(r.next=i.next,r.prev=i,i.next.prev=r,i.next=r):(r.prev=r,r.next=r),r}function hl(t){t.next.prev=t.prev,t.prev.next=t.next,t.prevZ&&(t.prevZ.nextZ=t.nextZ),t.nextZ&&(t.nextZ.prevZ=t.prevZ)}function ul(t,e,n){this.i=t,this.x=e,this.y=n,this.prev=null,this.next=null,this.z=null,this.prevZ=null,this.nextZ=null,this.steiner=!1}class dl{static area(t){const e=t.length;let n=0;for(let i=e-1,r=0;r2&&t[e-1].equals(t[0])&&t.pop()}function ml(t,e){for(let n=0;nNumber.EPSILON){const u=Math.sqrt(h),d=Math.sqrt(l*l+c*c),p=e.x-o/u,m=e.y+a/u,f=((n.x-c/d-p)*c-(n.y+l/d-m)*l)/(a*c-o*l);i=p+a*f-t.x,r=m+o*f-t.y;const g=i*i+r*r;if(g<=2)return new dt(i,r);s=Math.sqrt(g/2)}else{let t=!1;a>Number.EPSILON?l>Number.EPSILON&&(t=!0):a<-Number.EPSILON?l<-Number.EPSILON&&(t=!0):Math.sign(o)===Math.sign(c)&&(t=!0),t?(i=-o,r=a,s=Math.sqrt(h)):(i=a,r=o,s=Math.sqrt(h/2))}return new dt(i/s,r/s)}const P=[];for(let t=0,e=E.length,n=e-1,i=t+1;t=0;t--){const e=t/p,n=h*Math.cos(e*Math.PI/2),i=u*Math.sin(e*Math.PI/2)+d;for(let t=0,e=E.length;t=0;){const i=n;let r=n-1;r<0&&(r=t.length-1);for(let t=0,n=o+2*p;t0)&&d.push(e,r,l),(t!==n-1||o0!=t>0&&this.version++,this._sheen=t}get clearcoat(){return this._clearcoat}set clearcoat(t){this._clearcoat>0!=t>0&&this.version++,this._clearcoat=t}get transmission(){return this._transmission}set transmission(t){this._transmission>0!=t>0&&this.version++,this._transmission=t}copy(t){return super.copy(t),this.defines={STANDARD:"",PHYSICAL:""},this.clearcoat=t.clearcoat,this.clearcoatMap=t.clearcoatMap,this.clearcoatRoughness=t.clearcoatRoughness,this.clearcoatRoughnessMap=t.clearcoatRoughnessMap,this.clearcoatNormalMap=t.clearcoatNormalMap,this.clearcoatNormalScale.copy(t.clearcoatNormalScale),this.ior=t.ior,this.sheen=t.sheen,this.sheenColor.copy(t.sheenColor),this.sheenColorMap=t.sheenColorMap,this.sheenRoughness=t.sheenRoughness,this.sheenRoughnessMap=t.sheenRoughnessMap,this.transmission=t.transmission,this.transmissionMap=t.transmissionMap,this.thickness=t.thickness,this.thicknessMap=t.thicknessMap,this.attenuationDistance=t.attenuationDistance,this.attenuationColor.copy(t.attenuationColor),this.specularIntensity=t.specularIntensity,this.specularIntensityMap=t.specularIntensityMap,this.specularColor.copy(t.specularColor),this.specularColorMap=t.specularColorMap,this}}Dl.prototype.isMeshPhysicalMaterial=!0;class Il extends Ve{constructor(t){super(),this.type="MeshPhongMaterial",this.color=new Ze(16777215),this.specular=new Ze(1118481),this.shininess=30,this.map=null,this.lightMap=null,this.lightMapIntensity=1,this.aoMap=null,this.aoMapIntensity=1,this.emissive=new Ze(0),this.emissiveIntensity=1,this.emissiveMap=null,this.bumpMap=null,this.bumpScale=1,this.normalMap=null,this.normalMapType=0,this.normalScale=new dt(1,1),this.displacementMap=null,this.displacementScale=1,this.displacementBias=0,this.specularMap=null,this.alphaMap=null,this.envMap=null,this.combine=0,this.reflectivity=1,this.refractionRatio=.98,this.wireframe=!1,this.wireframeLinewidth=1,this.wireframeLinecap="round",this.wireframeLinejoin="round",this.flatShading=!1,this.setValues(t)}copy(t){return super.copy(t),this.color.copy(t.color),this.specular.copy(t.specular),this.shininess=t.shininess,this.map=t.map,this.lightMap=t.lightMap,this.lightMapIntensity=t.lightMapIntensity,this.aoMap=t.aoMap,this.aoMapIntensity=t.aoMapIntensity,this.emissive.copy(t.emissive),this.emissiveMap=t.emissiveMap,this.emissiveIntensity=t.emissiveIntensity,this.bumpMap=t.bumpMap,this.bumpScale=t.bumpScale,this.normalMap=t.normalMap,this.normalMapType=t.normalMapType,this.normalScale.copy(t.normalScale),this.displacementMap=t.displacementMap,this.displacementScale=t.displacementScale,this.displacementBias=t.displacementBias,this.specularMap=t.specularMap,this.alphaMap=t.alphaMap,this.envMap=t.envMap,this.combine=t.combine,this.reflectivity=t.reflectivity,this.refractionRatio=t.refractionRatio,this.wireframe=t.wireframe,this.wireframeLinewidth=t.wireframeLinewidth,this.wireframeLinecap=t.wireframeLinecap,this.wireframeLinejoin=t.wireframeLinejoin,this.flatShading=t.flatShading,this}}Il.prototype.isMeshPhongMaterial=!0;class Nl extends Ve{constructor(t){super(),this.defines={TOON:""},this.type="MeshToonMaterial",this.color=new Ze(16777215),this.map=null,this.gradientMap=null,this.lightMap=null,this.lightMapIntensity=1,this.aoMap=null,this.aoMapIntensity=1,this.emissive=new Ze(0),this.emissiveIntensity=1,this.emissiveMap=null,this.bumpMap=null,this.bumpScale=1,this.normalMap=null,this.normalMapType=0,this.normalScale=new dt(1,1),this.displacementMap=null,this.displacementScale=1,this.displacementBias=0,this.alphaMap=null,this.wireframe=!1,this.wireframeLinewidth=1,this.wireframeLinecap="round",this.wireframeLinejoin="round",this.setValues(t)}copy(t){return super.copy(t),this.color.copy(t.color),this.map=t.map,this.gradientMap=t.gradientMap,this.lightMap=t.lightMap,this.lightMapIntensity=t.lightMapIntensity,this.aoMap=t.aoMap,this.aoMapIntensity=t.aoMapIntensity,this.emissive.copy(t.emissive),this.emissiveMap=t.emissiveMap,this.emissiveIntensity=t.emissiveIntensity,this.bumpMap=t.bumpMap,this.bumpScale=t.bumpScale,this.normalMap=t.normalMap,this.normalMapType=t.normalMapType,this.normalScale.copy(t.normalScale),this.displacementMap=t.displacementMap,this.displacementScale=t.displacementScale,this.displacementBias=t.displacementBias,this.alphaMap=t.alphaMap,this.wireframe=t.wireframe,this.wireframeLinewidth=t.wireframeLinewidth,this.wireframeLinecap=t.wireframeLinecap,this.wireframeLinejoin=t.wireframeLinejoin,this}}Nl.prototype.isMeshToonMaterial=!0;class zl extends Ve{constructor(t){super(),this.type="MeshNormalMaterial",this.bumpMap=null,this.bumpScale=1,this.normalMap=null,this.normalMapType=0,this.normalScale=new dt(1,1),this.displacementMap=null,this.displacementScale=1,this.displacementBias=0,this.wireframe=!1,this.wireframeLinewidth=1,this.fog=!1,this.flatShading=!1,this.setValues(t)}copy(t){return super.copy(t),this.bumpMap=t.bumpMap,this.bumpScale=t.bumpScale,this.normalMap=t.normalMap,this.normalMapType=t.normalMapType,this.normalScale.copy(t.normalScale),this.displacementMap=t.displacementMap,this.displacementScale=t.displacementScale,this.displacementBias=t.displacementBias,this.wireframe=t.wireframe,this.wireframeLinewidth=t.wireframeLinewidth,this.flatShading=t.flatShading,this}}zl.prototype.isMeshNormalMaterial=!0;class Bl extends Ve{constructor(t){super(),this.type="MeshLambertMaterial",this.color=new Ze(16777215),this.map=null,this.lightMap=null,this.lightMapIntensity=1,this.aoMap=null,this.aoMapIntensity=1,this.emissive=new Ze(0),this.emissiveIntensity=1,this.emissiveMap=null,this.specularMap=null,this.alphaMap=null,this.envMap=null,this.combine=0,this.reflectivity=1,this.refractionRatio=.98,this.wireframe=!1,this.wireframeLinewidth=1,this.wireframeLinecap="round",this.wireframeLinejoin="round",this.setValues(t)}copy(t){return super.copy(t),this.color.copy(t.color),this.map=t.map,this.lightMap=t.lightMap,this.lightMapIntensity=t.lightMapIntensity,this.aoMap=t.aoMap,this.aoMapIntensity=t.aoMapIntensity,this.emissive.copy(t.emissive),this.emissiveMap=t.emissiveMap,this.emissiveIntensity=t.emissiveIntensity,this.specularMap=t.specularMap,this.alphaMap=t.alphaMap,this.envMap=t.envMap,this.combine=t.combine,this.reflectivity=t.reflectivity,this.refractionRatio=t.refractionRatio,this.wireframe=t.wireframe,this.wireframeLinewidth=t.wireframeLinewidth,this.wireframeLinecap=t.wireframeLinecap,this.wireframeLinejoin=t.wireframeLinejoin,this}}Bl.prototype.isMeshLambertMaterial=!0;class Fl extends Ve{constructor(t){super(),this.defines={MATCAP:""},this.type="MeshMatcapMaterial",this.color=new Ze(16777215),this.matcap=null,this.map=null,this.bumpMap=null,this.bumpScale=1,this.normalMap=null,this.normalMapType=0,this.normalScale=new dt(1,1),this.displacementMap=null,this.displacementScale=1,this.displacementBias=0,this.alphaMap=null,this.flatShading=!1,this.setValues(t)}copy(t){return super.copy(t),this.defines={MATCAP:""},this.color.copy(t.color),this.matcap=t.matcap,this.map=t.map,this.bumpMap=t.bumpMap,this.bumpScale=t.bumpScale,this.normalMap=t.normalMap,this.normalMapType=t.normalMapType,this.normalScale.copy(t.normalScale),this.displacementMap=t.displacementMap,this.displacementScale=t.displacementScale,this.displacementBias=t.displacementBias,this.alphaMap=t.alphaMap,this.flatShading=t.flatShading,this}}Fl.prototype.isMeshMatcapMaterial=!0;class Ol extends Ua{constructor(t){super(),this.type="LineDashedMaterial",this.scale=1,this.dashSize=3,this.gapSize=1,this.setValues(t)}copy(t){return super.copy(t),this.scale=t.scale,this.dashSize=t.dashSize,this.gapSize=t.gapSize,this}}Ol.prototype.isLineDashedMaterial=!0;var Ul=Object.freeze({__proto__:null,ShadowMaterial:Cl,SpriteMaterial:ia,RawShaderMaterial:gi,ShaderMaterial:Vn,PointsMaterial:Za,MeshPhysicalMaterial:Dl,MeshStandardMaterial:Pl,MeshPhongMaterial:Il,MeshToonMaterial:Nl,MeshNormalMaterial:zl,MeshLambertMaterial:Bl,MeshDepthMaterial:Bs,MeshDistanceMaterial:Fs,MeshBasicMaterial:Qe,MeshMatcapMaterial:Fl,LineDashedMaterial:Ol,LineBasicMaterial:Ua,Material:Ve});const Hl={arraySlice:function(t,e,n){return Hl.isTypedArray(t)?new t.constructor(t.subarray(e,void 0!==n?n:t.length)):t.slice(e,n)},convertArray:function(t,e,n){return!t||!n&&t.constructor===e?t:"number"==typeof e.BYTES_PER_ELEMENT?new e(t):Array.prototype.slice.call(t)},isTypedArray:function(t){return ArrayBuffer.isView(t)&&!(t instanceof DataView)},getKeyframeOrder:function(t){const e=t.length,n=new Array(e);for(let t=0;t!==e;++t)n[t]=t;return n.sort((function(e,n){return t[e]-t[n]})),n},sortedArray:function(t,e,n){const i=t.length,r=new t.constructor(i);for(let s=0,a=0;a!==i;++s){const i=n[s]*e;for(let n=0;n!==e;++n)r[a++]=t[i+n]}return r},flattenJSON:function(t,e,n,i){let r=1,s=t[0];for(;void 0!==s&&void 0===s[i];)s=t[r++];if(void 0===s)return;let a=s[i];if(void 0!==a)if(Array.isArray(a))do{a=s[i],void 0!==a&&(e.push(s.time),n.push.apply(n,a)),s=t[r++]}while(void 0!==s);else if(void 0!==a.toArray)do{a=s[i],void 0!==a&&(e.push(s.time),a.toArray(n,n.length)),s=t[r++]}while(void 0!==s);else do{a=s[i],void 0!==a&&(e.push(s.time),n.push(a)),s=t[r++]}while(void 0!==s)},subclip:function(t,e,n,i,r=30){const s=t.clone();s.name=e;const a=[];for(let t=0;t=i)){l.push(e.times[t]);for(let n=0;ns.tracks[t].times[0]&&(o=s.tracks[t].times[0]);for(let t=0;t=i.times[u]){const t=u*l+o,e=t+l-o;d=Hl.arraySlice(i.values,t,e)}else{const t=i.createInterpolant(),e=o,n=l-o;t.evaluate(s),d=Hl.arraySlice(t.resultBuffer,e,n)}if("quaternion"===r){(new At).fromArray(d).normalize().conjugate().toArray(d)}const p=a.times.length;for(let t=0;t=r)break t;{const a=e[1];t=r)break e}s=n,n=0}}for(;n>>1;te;)--s;if(++s,0!==r||s!==i){r>=s&&(s=Math.max(s,1),r=s-1);const t=this.getValueSize();this.times=Hl.arraySlice(n,r,s),this.values=Hl.arraySlice(this.values,r*t,s*t)}return this}validate(){let t=!0;const e=this.getValueSize();e-Math.floor(e)!=0&&(console.error("THREE.KeyframeTrack: Invalid value size in track.",this),t=!1);const n=this.times,i=this.values,r=n.length;0===r&&(console.error("THREE.KeyframeTrack: Track is empty.",this),t=!1);let s=null;for(let e=0;e!==r;e++){const i=n[e];if("number"==typeof i&&isNaN(i)){console.error("THREE.KeyframeTrack: Time is not a valid number.",this,e,i),t=!1;break}if(null!==s&&s>i){console.error("THREE.KeyframeTrack: Out of order keys.",this,e,i,s),t=!1;break}s=i}if(void 0!==i&&Hl.isTypedArray(i))for(let e=0,n=i.length;e!==n;++e){const n=i[e];if(isNaN(n)){console.error("THREE.KeyframeTrack: Value is not a valid number.",this,e,n),t=!1;break}}return t}optimize(){const t=Hl.arraySlice(this.times),e=Hl.arraySlice(this.values),n=this.getValueSize(),i=this.getInterpolation()===G,r=t.length-1;let s=1;for(let a=1;a0){t[s]=t[r];for(let t=r*n,i=s*n,a=0;a!==n;++a)e[i+a]=e[t+a];++s}return s!==t.length?(this.times=Hl.arraySlice(t,0,s),this.values=Hl.arraySlice(e,0,s*n)):(this.times=t,this.values=e),this}clone(){const t=Hl.arraySlice(this.times,0),e=Hl.arraySlice(this.values,0),n=new(0,this.constructor)(this.name,t,e);return n.createInterpolant=this.createInterpolant,n}}jl.prototype.TimeBufferType=Float32Array,jl.prototype.ValueBufferType=Float32Array,jl.prototype.DefaultInterpolation=H;class ql extends jl{}ql.prototype.ValueTypeName="bool",ql.prototype.ValueBufferType=Array,ql.prototype.DefaultInterpolation=U,ql.prototype.InterpolantFactoryMethodLinear=void 0,ql.prototype.InterpolantFactoryMethodSmooth=void 0;class Xl extends jl{}Xl.prototype.ValueTypeName="color";class Jl extends jl{}Jl.prototype.ValueTypeName="number";class Yl extends Gl{constructor(t,e,n,i){super(t,e,n,i)}interpolate_(t,e,n,i){const r=this.resultBuffer,s=this.sampleValues,a=this.valueSize,o=(n-e)/(i-e);let l=t*a;for(let t=l+a;l!==t;l+=4)At.slerpFlat(r,0,s,l-a,s,l,o);return r}}class Zl extends jl{InterpolantFactoryMethodLinear(t){return new Yl(this.times,this.values,this.getValueSize(),t)}}Zl.prototype.ValueTypeName="quaternion",Zl.prototype.DefaultInterpolation=H,Zl.prototype.InterpolantFactoryMethodSmooth=void 0;class Ql extends jl{}Ql.prototype.ValueTypeName="string",Ql.prototype.ValueBufferType=Array,Ql.prototype.DefaultInterpolation=U,Ql.prototype.InterpolantFactoryMethodLinear=void 0,Ql.prototype.InterpolantFactoryMethodSmooth=void 0;class Kl extends jl{}Kl.prototype.ValueTypeName="vector";class $l{constructor(t,e=-1,n,i=2500){this.name=t,this.tracks=n,this.duration=e,this.blendMode=i,this.uuid=rt(),this.duration<0&&this.resetDuration()}static parse(t){const e=[],n=t.tracks,i=1/(t.fps||1);for(let t=0,r=n.length;t!==r;++t)e.push(tc(n[t]).scale(i));const r=new this(t.name,t.duration,e,t.blendMode);return r.uuid=t.uuid,r}static toJSON(t){const e=[],n=t.tracks,i={name:t.name,duration:t.duration,tracks:e,uuid:t.uuid,blendMode:t.blendMode};for(let t=0,i=n.length;t!==i;++t)e.push(jl.toJSON(n[t]));return i}static CreateFromMorphTargetSequence(t,e,n,i){const r=e.length,s=[];for(let t=0;t1){const t=s[1];let e=i[t];e||(i[t]=e=[]),e.push(n)}}const s=[];for(const t in i)s.push(this.CreateFromMorphTargetSequence(t,i[t],e,n));return s}static parseAnimation(t,e){if(!t)return console.error("THREE.AnimationClip: No animation in JSONLoader data."),null;const n=function(t,e,n,i,r){if(0!==n.length){const s=[],a=[];Hl.flattenJSON(n,s,a,i),0!==s.length&&r.push(new t(e,s,a))}},i=[],r=t.name||"default",s=t.fps||30,a=t.blendMode;let o=t.length||-1;const l=t.hierarchy||[];for(let t=0;t{e&&e(r),this.manager.itemEnd(t)}),0),r;if(void 0!==sc[t])return void sc[t].push({onLoad:e,onProgress:n,onError:i});sc[t]=[],sc[t].push({onLoad:e,onProgress:n,onError:i});const s=new Request(t,{headers:new Headers(this.requestHeader),credentials:this.withCredentials?"include":"same-origin"});fetch(s).then((e=>{if(200===e.status||0===e.status){if(0===e.status&&console.warn("THREE.FileLoader: HTTP Status 0 received."),"undefined"==typeof ReadableStream||void 0===e.body.getReader)return e;const n=sc[t],i=e.body.getReader(),r=e.headers.get("Content-Length"),s=r?parseInt(r):0,a=0!==s;let o=0;const l=new ReadableStream({start(t){!function e(){i.read().then((({done:i,value:r})=>{if(i)t.close();else{o+=r.byteLength;const i=new ProgressEvent("progress",{lengthComputable:a,loaded:o,total:s});for(let t=0,e=n.length;t{switch(this.responseType){case"arraybuffer":return t.arrayBuffer();case"blob":return t.blob();case"document":return t.text().then((t=>(new DOMParser).parseFromString(t,this.mimeType)));case"json":return t.json();default:return t.text()}})).then((e=>{ec.add(t,e);const n=sc[t];delete sc[t];for(let t=0,i=n.length;t{const n=sc[t];if(void 0===n)throw this.manager.itemError(t),e;delete sc[t];for(let t=0,i=n.length;t{this.manager.itemEnd(t)})),this.manager.itemStart(t)}setResponseType(t){return this.responseType=t,this}setMimeType(t){return this.mimeType=t,this}}class oc extends rc{constructor(t){super(t)}load(t,e,n,i){void 0!==this.path&&(t=this.path+t),t=this.manager.resolveURL(t);const r=this,s=ec.get(t);if(void 0!==s)return r.manager.itemStart(t),setTimeout((function(){e&&e(s),r.manager.itemEnd(t)}),0),s;const a=vt("img");function o(){c(),ec.add(t,this),e&&e(this),r.manager.itemEnd(t)}function l(e){c(),i&&i(e),r.manager.itemError(t),r.manager.itemEnd(t)}function c(){a.removeEventListener("load",o,!1),a.removeEventListener("error",l,!1)}return a.addEventListener("load",o,!1),a.addEventListener("error",l,!1),"data:"!==t.substr(0,5)&&void 0!==this.crossOrigin&&(a.crossOrigin=this.crossOrigin),r.manager.itemStart(t),a.src=t,a}}class lc extends rc{constructor(t){super(t)}load(t,e,n,i){const r=new Jn,s=new oc(this.manager);s.setCrossOrigin(this.crossOrigin),s.setPath(this.path);let a=0;function o(n){s.load(t[n],(function(t){r.images[n]=t,a++,6===a&&(r.needsUpdate=!0,e&&e(r))}),void 0,i)}for(let e=0;e0:i.vertexColors=t.vertexColors),void 0!==t.uniforms)for(const e in t.uniforms){const r=t.uniforms[e];switch(i.uniforms[e]={},r.type){case"t":i.uniforms[e].value=n(r.value);break;case"c":i.uniforms[e].value=(new Ze).setHex(r.value);break;case"v2":i.uniforms[e].value=(new dt).fromArray(r.value);break;case"v3":i.uniforms[e].value=(new Lt).fromArray(r.value);break;case"v4":i.uniforms[e].value=(new wt).fromArray(r.value);break;case"m3":i.uniforms[e].value=(new pt).fromArray(r.value);break;case"m4":i.uniforms[e].value=(new se).fromArray(r.value);break;default:i.uniforms[e].value=r.value}}if(void 0!==t.defines&&(i.defines=t.defines),void 0!==t.vertexShader&&(i.vertexShader=t.vertexShader),void 0!==t.fragmentShader&&(i.fragmentShader=t.fragmentShader),void 0!==t.extensions)for(const e in t.extensions)i.extensions[e]=t.extensions[e];if(void 0!==t.shading&&(i.flatShading=1===t.shading),void 0!==t.size&&(i.size=t.size),void 0!==t.sizeAttenuation&&(i.sizeAttenuation=t.sizeAttenuation),void 0!==t.map&&(i.map=n(t.map)),void 0!==t.matcap&&(i.matcap=n(t.matcap)),void 0!==t.alphaMap&&(i.alphaMap=n(t.alphaMap)),void 0!==t.bumpMap&&(i.bumpMap=n(t.bumpMap)),void 0!==t.bumpScale&&(i.bumpScale=t.bumpScale),void 0!==t.normalMap&&(i.normalMap=n(t.normalMap)),void 0!==t.normalMapType&&(i.normalMapType=t.normalMapType),void 0!==t.normalScale){let e=t.normalScale;!1===Array.isArray(e)&&(e=[e,e]),i.normalScale=(new dt).fromArray(e)}return void 0!==t.displacementMap&&(i.displacementMap=n(t.displacementMap)),void 0!==t.displacementScale&&(i.displacementScale=t.displacementScale),void 0!==t.displacementBias&&(i.displacementBias=t.displacementBias),void 0!==t.roughnessMap&&(i.roughnessMap=n(t.roughnessMap)),void 0!==t.metalnessMap&&(i.metalnessMap=n(t.metalnessMap)),void 0!==t.emissiveMap&&(i.emissiveMap=n(t.emissiveMap)),void 0!==t.emissiveIntensity&&(i.emissiveIntensity=t.emissiveIntensity),void 0!==t.specularMap&&(i.specularMap=n(t.specularMap)),void 0!==t.specularIntensityMap&&(i.specularIntensityMap=n(t.specularIntensityMap)),void 0!==t.specularColorMap&&(i.specularColorMap=n(t.specularColorMap)),void 0!==t.envMap&&(i.envMap=n(t.envMap)),void 0!==t.envMapIntensity&&(i.envMapIntensity=t.envMapIntensity),void 0!==t.reflectivity&&(i.reflectivity=t.reflectivity),void 0!==t.refractionRatio&&(i.refractionRatio=t.refractionRatio),void 0!==t.lightMap&&(i.lightMap=n(t.lightMap)),void 0!==t.lightMapIntensity&&(i.lightMapIntensity=t.lightMapIntensity),void 0!==t.aoMap&&(i.aoMap=n(t.aoMap)),void 0!==t.aoMapIntensity&&(i.aoMapIntensity=t.aoMapIntensity),void 0!==t.gradientMap&&(i.gradientMap=n(t.gradientMap)),void 0!==t.clearcoatMap&&(i.clearcoatMap=n(t.clearcoatMap)),void 0!==t.clearcoatRoughnessMap&&(i.clearcoatRoughnessMap=n(t.clearcoatRoughnessMap)),void 0!==t.clearcoatNormalMap&&(i.clearcoatNormalMap=n(t.clearcoatNormalMap)),void 0!==t.clearcoatNormalScale&&(i.clearcoatNormalScale=(new dt).fromArray(t.clearcoatNormalScale)),void 0!==t.transmissionMap&&(i.transmissionMap=n(t.transmissionMap)),void 0!==t.thicknessMap&&(i.thicknessMap=n(t.thicknessMap)),void 0!==t.sheenColorMap&&(i.sheenColorMap=n(t.sheenColorMap)),void 0!==t.sheenRoughnessMap&&(i.sheenRoughnessMap=n(t.sheenRoughnessMap)),i}setTextures(t){return this.textures=t,this}}class Pc{static decodeText(t){if("undefined"!=typeof TextDecoder)return(new TextDecoder).decode(t);let e="";for(let n=0,i=t.length;n0){this.source.connect(this.filters[0]);for(let t=1,e=this.filters.length;t0){this.source.disconnect(this.filters[0]);for(let t=1,e=this.filters.length;t0&&this._mixBufferRegionAdditive(n,i,this._addIndex*e,1,e);for(let t=e,r=e+e;t!==r;++t)if(n[t]!==n[t+e]){a.setValue(n,i);break}}saveOriginalState(){const t=this.binding,e=this.buffer,n=this.valueSize,i=n*this._origIndex;t.getValue(e,i);for(let t=n,r=i;t!==r;++t)e[t]=e[i+t%n];this._setIdentity(),this.cumulativeWeight=0,this.cumulativeWeightAdditive=0}restoreOriginalState(){const t=3*this.valueSize;this.binding.setValue(this.buffer,t)}_setAdditiveIdentityNumeric(){const t=this._addIndex*this.valueSize,e=t+this.valueSize;for(let n=t;n=.5)for(let i=0;i!==r;++i)t[e+i]=t[n+i]}_slerp(t,e,n,i){At.slerpFlat(t,e,t,e,t,n,i)}_slerpAdditive(t,e,n,i,r){const s=this._workIndex*r;At.multiplyQuaternionsFlat(t,s,t,e,t,n),At.slerpFlat(t,e,t,e,t,s,i)}_lerp(t,e,n,i,r){const s=1-i;for(let a=0;a!==r;++a){const r=e+a;t[r]=t[r]*s+t[n+a]*i}}_lerpAdditive(t,e,n,i,r){for(let s=0;s!==r;++s){const r=e+s;t[r]=t[r]+t[n+s]*i}}}const sh="\\[\\]\\.:\\/",ah=new RegExp("[\\[\\]\\.:\\/]","g"),oh="[^\\[\\]\\.:\\/]",lh="[^"+sh.replace("\\.","")+"]",ch=/((?:WC+[\/:])*)/.source.replace("WC",oh),hh=/(WCOD+)?/.source.replace("WCOD",lh),uh=/(?:\.(WC+)(?:\[(.+)\])?)?/.source.replace("WC",oh),dh=/\.(WC+)(?:\[(.+)\])?/.source.replace("WC",oh),ph=new RegExp("^"+ch+hh+uh+dh+"$"),mh=["material","materials","bones"];class fh{constructor(t,e,n){this.path=e,this.parsedPath=n||fh.parseTrackName(e),this.node=fh.findNode(t,this.parsedPath.nodeName)||t,this.rootNode=t,this.getValue=this._getValue_unbound,this.setValue=this._setValue_unbound}static create(t,e,n){return t&&t.isAnimationObjectGroup?new fh.Composite(t,e,n):new fh(t,e,n)}static sanitizeNodeName(t){return t.replace(/\s/g,"_").replace(ah,"")}static parseTrackName(t){const e=ph.exec(t);if(!e)throw new Error("PropertyBinding: Cannot parse trackName: "+t);const n={nodeName:e[2],objectName:e[3],objectIndex:e[4],propertyName:e[5],propertyIndex:e[6]},i=n.nodeName&&n.nodeName.lastIndexOf(".");if(void 0!==i&&-1!==i){const t=n.nodeName.substring(i+1);-1!==mh.indexOf(t)&&(n.nodeName=n.nodeName.substring(0,i),n.objectName=t)}if(null===n.propertyName||0===n.propertyName.length)throw new Error("PropertyBinding: can not parse propertyName from trackName: "+t);return n}static findNode(t,e){if(!e||""===e||"."===e||-1===e||e===t.name||e===t.uuid)return t;if(t.skeleton){const n=t.skeleton.getBoneByName(e);if(void 0!==n)return n}if(t.children){const n=function(t){for(let i=0;i=r){const s=r++,c=t[s];e[c.uuid]=l,t[l]=c,e[o]=s,t[s]=a;for(let t=0,e=i;t!==e;++t){const e=n[t],i=e[s],r=e[l];e[l]=i,e[s]=r}}}this.nCachedObjects_=r}uncache(){const t=this._objects,e=this._indicesByUUID,n=this._bindings,i=n.length;let r=this.nCachedObjects_,s=t.length;for(let a=0,o=arguments.length;a!==o;++a){const o=arguments[a].uuid,l=e[o];if(void 0!==l)if(delete e[o],l0&&(e[a.uuid]=l),t[l]=a,t.pop();for(let t=0,e=i;t!==e;++t){const e=n[t];e[l]=e[r],e.pop()}}}this.nCachedObjects_=r}subscribe_(t,e){const n=this._bindingsIndicesByPath;let i=n[t];const r=this._bindings;if(void 0!==i)return r[i];const s=this._paths,a=this._parsedPaths,o=this._objects,l=o.length,c=this.nCachedObjects_,h=new Array(l);i=r.length,n[t]=i,s.push(t),a.push(e),r.push(h);for(let n=c,i=o.length;n!==i;++n){const i=o[n];h[n]=new fh(i,t,e)}return h}unsubscribe_(t){const e=this._bindingsIndicesByPath,n=e[t];if(void 0!==n){const i=this._paths,r=this._parsedPaths,s=this._bindings,a=s.length-1,o=s[a];e[t[a]]=n,s[n]=o,s.pop(),r[n]=r[a],r.pop(),i[n]=i[a],i.pop()}}}gh.prototype.isAnimationObjectGroup=!0;class vh{constructor(t,e,n=null,i=e.blendMode){this._mixer=t,this._clip=e,this._localRoot=n,this.blendMode=i;const r=e.tracks,s=r.length,a=new Array(s),o={endingStart:k,endingEnd:k};for(let t=0;t!==s;++t){const e=r[t].createInterpolant(null);a[t]=e,e.settings=o}this._interpolantSettings=o,this._interpolants=a,this._propertyBindings=new Array(s),this._cacheIndex=null,this._byClipCacheIndex=null,this._timeScaleInterpolant=null,this._weightInterpolant=null,this.loop=2201,this._loopCount=-1,this._startTime=null,this.time=0,this.timeScale=1,this._effectiveTimeScale=1,this.weight=1,this._effectiveWeight=1,this.repetitions=1/0,this.paused=!1,this.enabled=!0,this.clampWhenFinished=!1,this.zeroSlopeAtStart=!0,this.zeroSlopeAtEnd=!0}play(){return this._mixer._activateAction(this),this}stop(){return this._mixer._deactivateAction(this),this.reset()}reset(){return this.paused=!1,this.enabled=!0,this.time=0,this._loopCount=-1,this._startTime=null,this.stopFading().stopWarping()}isRunning(){return this.enabled&&!this.paused&&0!==this.timeScale&&null===this._startTime&&this._mixer._isActiveAction(this)}isScheduled(){return this._mixer._isActiveAction(this)}startAt(t){return this._startTime=t,this}setLoop(t,e){return this.loop=t,this.repetitions=e,this}setEffectiveWeight(t){return this.weight=t,this._effectiveWeight=this.enabled?t:0,this.stopFading()}getEffectiveWeight(){return this._effectiveWeight}fadeIn(t){return this._scheduleFading(t,0,1)}fadeOut(t){return this._scheduleFading(t,1,0)}crossFadeFrom(t,e,n){if(t.fadeOut(e),this.fadeIn(e),n){const n=this._clip.duration,i=t._clip.duration,r=i/n,s=n/i;t.warp(1,r,e),this.warp(s,1,e)}return this}crossFadeTo(t,e,n){return t.crossFadeFrom(this,e,n)}stopFading(){const t=this._weightInterpolant;return null!==t&&(this._weightInterpolant=null,this._mixer._takeBackControlInterpolant(t)),this}setEffectiveTimeScale(t){return this.timeScale=t,this._effectiveTimeScale=this.paused?0:t,this.stopWarping()}getEffectiveTimeScale(){return this._effectiveTimeScale}setDuration(t){return this.timeScale=this._clip.duration/t,this.stopWarping()}syncWith(t){return this.time=t.time,this.timeScale=t.timeScale,this.stopWarping()}halt(t){return this.warp(this._effectiveTimeScale,0,t)}warp(t,e,n){const i=this._mixer,r=i.time,s=this.timeScale;let a=this._timeScaleInterpolant;null===a&&(a=i._lendControlInterpolant(),this._timeScaleInterpolant=a);const o=a.parameterPositions,l=a.sampleValues;return o[0]=r,o[1]=r+n,l[0]=t/s,l[1]=e/s,this}stopWarping(){const t=this._timeScaleInterpolant;return null!==t&&(this._timeScaleInterpolant=null,this._mixer._takeBackControlInterpolant(t)),this}getMixer(){return this._mixer}getClip(){return this._clip}getRoot(){return this._localRoot||this._mixer._root}_update(t,e,n,i){if(!this.enabled)return void this._updateWeight(t);const r=this._startTime;if(null!==r){const i=(t-r)*n;if(i<0||0===n)return;this._startTime=null,e=n*i}e*=this._updateTimeScale(t);const s=this._updateTime(e),a=this._updateWeight(t);if(a>0){const t=this._interpolants,e=this._propertyBindings;if(this.blendMode===q)for(let n=0,i=t.length;n!==i;++n)t[n].evaluate(s),e[n].accumulateAdditive(a);else for(let n=0,r=t.length;n!==r;++n)t[n].evaluate(s),e[n].accumulate(i,a)}}_updateWeight(t){let e=0;if(this.enabled){e=this.weight;const n=this._weightInterpolant;if(null!==n){const i=n.evaluate(t)[0];e*=i,t>n.parameterPositions[1]&&(this.stopFading(),0===i&&(this.enabled=!1))}}return this._effectiveWeight=e,e}_updateTimeScale(t){let e=0;if(!this.paused){e=this.timeScale;const n=this._timeScaleInterpolant;if(null!==n){e*=n.evaluate(t)[0],t>n.parameterPositions[1]&&(this.stopWarping(),0===e?this.paused=!0:this.timeScale=e)}}return this._effectiveTimeScale=e,e}_updateTime(t){const e=this._clip.duration,n=this.loop;let i=this.time+t,r=this._loopCount;const s=2202===n;if(0===t)return-1===r?i:s&&1==(1&r)?e-i:i;if(2200===n){-1===r&&(this._loopCount=0,this._setEndings(!0,!0,!1));t:{if(i>=e)i=e;else{if(!(i<0)){this.time=i;break t}i=0}this.clampWhenFinished?this.paused=!0:this.enabled=!1,this.time=i,this._mixer.dispatchEvent({type:"finished",action:this,direction:t<0?-1:1})}}else{if(-1===r&&(t>=0?(r=0,this._setEndings(!0,0===this.repetitions,s)):this._setEndings(0===this.repetitions,!0,s)),i>=e||i<0){const n=Math.floor(i/e);i-=e*n,r+=Math.abs(n);const a=this.repetitions-r;if(a<=0)this.clampWhenFinished?this.paused=!0:this.enabled=!1,i=t>0?e:0,this.time=i,this._mixer.dispatchEvent({type:"finished",action:this,direction:t>0?1:-1});else{if(1===a){const e=t<0;this._setEndings(e,!e,s)}else this._setEndings(!1,!1,s);this._loopCount=r,this.time=i,this._mixer.dispatchEvent({type:"loop",action:this,loopDelta:n})}}else this.time=i;if(s&&1==(1&r))return e-i}return i}_setEndings(t,e,n){const i=this._interpolantSettings;n?(i.endingStart=V,i.endingEnd=V):(i.endingStart=t?this.zeroSlopeAtStart?V:k:W,i.endingEnd=e?this.zeroSlopeAtEnd?V:k:W)}_scheduleFading(t,e,n){const i=this._mixer,r=i.time;let s=this._weightInterpolant;null===s&&(s=i._lendControlInterpolant(),this._weightInterpolant=s);const a=s.parameterPositions,o=s.sampleValues;return a[0]=r,o[0]=e,a[1]=r+t,o[1]=n,this}}class yh extends ${constructor(t){super(),this._root=t,this._initMemoryManager(),this._accuIndex=0,this.time=0,this.timeScale=1}_bindAction(t,e){const n=t._localRoot||this._root,i=t._clip.tracks,r=i.length,s=t._propertyBindings,a=t._interpolants,o=n.uuid,l=this._bindingsByRootAndName;let c=l[o];void 0===c&&(c={},l[o]=c);for(let t=0;t!==r;++t){const r=i[t],l=r.name;let h=c[l];if(void 0!==h)s[t]=h;else{if(h=s[t],void 0!==h){null===h._cacheIndex&&(++h.referenceCount,this._addInactiveBinding(h,o,l));continue}const i=e&&e._propertyBindings[t].binding.parsedPath;h=new rh(fh.create(n,l,i),r.ValueTypeName,r.getValueSize()),++h.referenceCount,this._addInactiveBinding(h,o,l),s[t]=h}a[t].resultBuffer=h.buffer}}_activateAction(t){if(!this._isActiveAction(t)){if(null===t._cacheIndex){const e=(t._localRoot||this._root).uuid,n=t._clip.uuid,i=this._actionsByClip[n];this._bindAction(t,i&&i.knownActions[0]),this._addInactiveAction(t,n,e)}const e=t._propertyBindings;for(let t=0,n=e.length;t!==n;++t){const n=e[t];0==n.useCount++&&(this._lendBinding(n),n.saveOriginalState())}this._lendAction(t)}}_deactivateAction(t){if(this._isActiveAction(t)){const e=t._propertyBindings;for(let t=0,n=e.length;t!==n;++t){const n=e[t];0==--n.useCount&&(n.restoreOriginalState(),this._takeBackBinding(n))}this._takeBackAction(t)}}_initMemoryManager(){this._actions=[],this._nActiveActions=0,this._actionsByClip={},this._bindings=[],this._nActiveBindings=0,this._bindingsByRootAndName={},this._controlInterpolants=[],this._nActiveControlInterpolants=0;const t=this;this.stats={actions:{get total(){return t._actions.length},get inUse(){return t._nActiveActions}},bindings:{get total(){return t._bindings.length},get inUse(){return t._nActiveBindings}},controlInterpolants:{get total(){return t._controlInterpolants.length},get inUse(){return t._nActiveControlInterpolants}}}}_isActiveAction(t){const e=t._cacheIndex;return null!==e&&e=0;--e)t[e].stop();return this}update(t){t*=this.timeScale;const e=this._actions,n=this._nActiveActions,i=this.time+=t,r=Math.sign(t),s=this._accuIndex^=1;for(let a=0;a!==n;++a){e[a]._update(i,t,r,s)}const a=this._bindings,o=this._nActiveBindings;for(let t=0;t!==o;++t)a[t].apply(s);return this}setTime(t){this.time=0;for(let t=0;tthis.max.x||t.ythis.max.y)}containsBox(t){return this.min.x<=t.min.x&&t.max.x<=this.max.x&&this.min.y<=t.min.y&&t.max.y<=this.max.y}getParameter(t,e){return e.set((t.x-this.min.x)/(this.max.x-this.min.x),(t.y-this.min.y)/(this.max.y-this.min.y))}intersectsBox(t){return!(t.max.xthis.max.x||t.max.ythis.max.y)}clampPoint(t,e){return e.copy(t).clamp(this.min,this.max)}distanceToPoint(t){return Sh.copy(t).clamp(this.min,this.max).sub(t).length()}intersect(t){return this.min.max(t.min),this.max.min(t.max),this}union(t){return this.min.min(t.min),this.max.max(t.max),this}translate(t){return this.min.add(t),this.max.add(t),this}equals(t){return t.min.equals(this.min)&&t.max.equals(this.max)}}Th.prototype.isBox2=!0;const Eh=new Lt,Ah=new Lt;class Lh{constructor(t=new Lt,e=new Lt){this.start=t,this.end=e}set(t,e){return this.start.copy(t),this.end.copy(e),this}copy(t){return this.start.copy(t.start),this.end.copy(t.end),this}getCenter(t){return t.addVectors(this.start,this.end).multiplyScalar(.5)}delta(t){return t.subVectors(this.end,this.start)}distanceSq(){return this.start.distanceToSquared(this.end)}distance(){return this.start.distanceTo(this.end)}at(t,e){return this.delta(e).multiplyScalar(t).add(this.start)}closestPointToPointParameter(t,e){Eh.subVectors(t,this.start),Ah.subVectors(this.end,this.start);const n=Ah.dot(Ah);let i=Ah.dot(Eh)/n;return e&&(i=st(i,0,1)),i}closestPointToPoint(t,e,n){const i=this.closestPointToPointParameter(t,e);return this.delta(n).multiplyScalar(i).add(this.start)}applyMatrix4(t){return this.start.applyMatrix4(t),this.end.applyMatrix4(t),this}equals(t){return t.start.equals(this.start)&&t.end.equals(this.end)}clone(){return(new this.constructor).copy(this)}}const Rh=new Lt;const Ch=new Lt,Ph=new se,Dh=new se;class Ih extends Ja{constructor(t){const e=Nh(t),n=new xn,i=[],r=[],s=new Ze(0,0,1),a=new Ze(0,1,0);for(let t=0;t.99999)this.quaternion.set(0,0,0,1);else if(t.y<-.99999)this.quaternion.set(1,0,0,0);else{Xh.set(t.z,0,-t.x).normalize();const e=Math.acos(t.y);this.quaternion.setFromAxisAngle(Xh,e)}}setLength(t,e=.2*t,n=.2*e){this.line.scale.set(1,Math.max(1e-4,t-e),1),this.line.updateMatrix(),this.cone.scale.set(n,e,n),this.cone.position.y=t,this.cone.updateMatrix()}setColor(t){this.line.material.color.set(t),this.cone.material.color.set(t)}copy(t){return super.copy(t,!1),this.line.copy(t.line),this.cone.copy(t.cone),this}},t.Audio=Kc,t.AudioAnalyser=ih,t.AudioContext=Uc,t.AudioListener=class extends Ce{constructor(){super(),this.type="AudioListener",this.context=Uc.getContext(),this.gain=this.context.createGain(),this.gain.connect(this.context.destination),this.filter=null,this.timeDelta=0,this._clock=new qc}getInput(){return this.gain}removeFilter(){return null!==this.filter&&(this.gain.disconnect(this.filter),this.filter.disconnect(this.context.destination),this.gain.connect(this.context.destination),this.filter=null),this}getFilter(){return this.filter}setFilter(t){return null!==this.filter?(this.gain.disconnect(this.filter),this.filter.disconnect(this.context.destination)):this.gain.disconnect(this.context.destination),this.filter=t,this.gain.connect(this.filter),this.filter.connect(this.context.destination),this}getMasterVolume(){return this.gain.gain.value}setMasterVolume(t){return this.gain.gain.setTargetAtTime(t,this.context.currentTime,.01),this}updateMatrixWorld(t){super.updateMatrixWorld(t);const e=this.context.listener,n=this.up;if(this.timeDelta=this._clock.getDelta(),this.matrixWorld.decompose(Jc,Yc,Zc),Qc.set(0,0,-1).applyQuaternion(Yc),e.positionX){const t=this.context.currentTime+this.timeDelta;e.positionX.linearRampToValueAtTime(Jc.x,t),e.positionY.linearRampToValueAtTime(Jc.y,t),e.positionZ.linearRampToValueAtTime(Jc.z,t),e.forwardX.linearRampToValueAtTime(Qc.x,t),e.forwardY.linearRampToValueAtTime(Qc.y,t),e.forwardZ.linearRampToValueAtTime(Qc.z,t),e.upX.linearRampToValueAtTime(n.x,t),e.upY.linearRampToValueAtTime(n.y,t),e.upZ.linearRampToValueAtTime(n.z,t)}else e.setPosition(Jc.x,Jc.y,Jc.z),e.setOrientation(Qc.x,Qc.y,Qc.z,n.x,n.y,n.z)}},t.AudioLoader=Hc,t.AxesHelper=Zh,t.AxisHelper=function(t){return console.warn("THREE.AxisHelper has been renamed to THREE.AxesHelper."),new Zh(t)},t.BackSide=1,t.BasicDepthPacking=3200,t.BasicShadowMap=0,t.BinaryTextureLoader=function(t){return console.warn("THREE.BinaryTextureLoader has been renamed to THREE.DataTextureLoader."),new cc(t)},t.Bone=La,t.BooleanKeyframeTrack=ql,t.BoundingBoxHelper=function(t,e){return console.warn("THREE.BoundingBoxHelper has been deprecated. Creating a THREE.BoxHelper instead."),new qh(t,e)},t.Box2=Th,t.Box3=Pt,t.Box3Helper=class extends Ja{constructor(t,e=16776960){const n=new Uint16Array([0,1,1,2,2,3,3,0,4,5,5,6,6,7,7,4,0,4,1,5,2,6,3,7]),i=new xn;i.setIndex(new tn(n,1)),i.setAttribute("position",new hn([1,1,1,-1,1,1,-1,-1,1,1,-1,1,1,1,-1,-1,1,-1,-1,-1,-1,1,-1,-1],3)),super(i,new Ua({color:e,toneMapped:!1})),this.box=t,this.type="Box3Helper",this.geometry.computeBoundingSphere()}updateMatrixWorld(t){const e=this.box;e.isEmpty()||(e.getCenter(this.position),e.getSize(this.scale),this.scale.multiplyScalar(.5),super.updateMatrixWorld(t))}},t.BoxBufferGeometry=Un,t.BoxGeometry=Un,t.BoxHelper=qh,t.BufferAttribute=tn,t.BufferGeometry=xn,t.BufferGeometryLoader=Ic,t.ByteType=1010,t.Cache=ec,t.Camera=Wn,t.CameraHelper=class extends Ja{constructor(t){const e=new xn,n=new Ua({color:16777215,vertexColors:!0,toneMapped:!1}),i=[],r=[],s={},a=new Ze(16755200),o=new Ze(16711680),l=new Ze(43775),c=new Ze(16777215),h=new Ze(3355443);function u(t,e,n){d(t,n),d(e,n)}function d(t,e){i.push(0,0,0),r.push(e.r,e.g,e.b),void 0===s[t]&&(s[t]=[]),s[t].push(i.length/3-1)}u("n1","n2",a),u("n2","n4",a),u("n4","n3",a),u("n3","n1",a),u("f1","f2",a),u("f2","f4",a),u("f4","f3",a),u("f3","f1",a),u("n1","f1",a),u("n2","f2",a),u("n3","f3",a),u("n4","f4",a),u("p","n1",o),u("p","n2",o),u("p","n3",o),u("p","n4",o),u("u1","u2",l),u("u2","u3",l),u("u3","u1",l),u("c","t",c),u("p","c",h),u("cn1","cn2",h),u("cn3","cn4",h),u("cf1","cf2",h),u("cf3","cf4",h),e.setAttribute("position",new hn(i,3)),e.setAttribute("color",new hn(r,3)),super(e,n),this.type="CameraHelper",this.camera=t,this.camera.updateProjectionMatrix&&this.camera.updateProjectionMatrix(),this.matrix=t.matrixWorld,this.matrixAutoUpdate=!1,this.pointMap=s,this.update()}update(){const t=this.geometry,e=this.pointMap;Vh.projectionMatrixInverse.copy(this.camera.projectionMatrixInverse),Wh("c",e,t,Vh,0,0,-1),Wh("t",e,t,Vh,0,0,1),Wh("n1",e,t,Vh,-1,-1,-1),Wh("n2",e,t,Vh,1,-1,-1),Wh("n3",e,t,Vh,-1,1,-1),Wh("n4",e,t,Vh,1,1,-1),Wh("f1",e,t,Vh,-1,-1,1),Wh("f2",e,t,Vh,1,-1,1),Wh("f3",e,t,Vh,-1,1,1),Wh("f4",e,t,Vh,1,1,1),Wh("u1",e,t,Vh,.7,1.1,-1),Wh("u2",e,t,Vh,-.7,1.1,-1),Wh("u3",e,t,Vh,0,2,-1),Wh("cf1",e,t,Vh,-1,0,1),Wh("cf2",e,t,Vh,1,0,1),Wh("cf3",e,t,Vh,0,-1,1),Wh("cf4",e,t,Vh,0,1,1),Wh("cn1",e,t,Vh,-1,0,-1),Wh("cn2",e,t,Vh,1,0,-1),Wh("cn3",e,t,Vh,0,-1,-1),Wh("cn4",e,t,Vh,0,1,-1),t.getAttribute("position").needsUpdate=!0}dispose(){this.geometry.dispose(),this.material.dispose()}},t.CanvasRenderer=function(){console.error("THREE.CanvasRenderer has been removed")},t.CanvasTexture=ao,t.CatmullRomCurve3=Eo,t.CineonToneMapping=3,t.CircleBufferGeometry=oo,t.CircleGeometry=oo,t.ClampToEdgeWrapping=u,t.Clock=qc,t.Color=Ze,t.ColorKeyframeTrack=Xl,t.CompressedTexture=so,t.CompressedTextureLoader=class extends rc{constructor(t){super(t)}load(t,e,n,i){const r=this,s=[],a=new so,o=new ac(this.manager);o.setPath(this.path),o.setResponseType("arraybuffer"),o.setRequestHeader(this.requestHeader),o.setWithCredentials(r.withCredentials);let l=0;function c(c){o.load(t[c],(function(t){const n=r.parse(t,!0);s[c]={width:n.width,height:n.height,format:n.format,mipmaps:n.mipmaps},l+=1,6===l&&(1===n.mipmapCount&&(a.minFilter=g),a.image=s,a.format=n.format,a.needsUpdate=!0,e&&e(a))}),n,i)}if(Array.isArray(t))for(let e=0,n=t.length;e65504&&(console.warn("THREE.DataUtils.toHalfFloat(): value exceeds 65504."),t=65504),Qh[0]=t;const e=Kh[0];let n=e>>16&32768,i=e>>12&2047;const r=e>>23&255;return r<103?n:r>142?(n|=31744,n|=(255==r?0:1)&&8388607&e,n):r<113?(i|=2048,n|=(i>>114-r)+(i>>113-r&1),n):(n|=r-112<<10|i>>1,n+=1&i,n)}},t.DecrementStencilOp=7683,t.DecrementWrapStencilOp=34056,t.DefaultLoadingManager=ic,t.DepthFormat=A,t.DepthStencilFormat=L,t.DepthTexture=qs,t.DirectionalLight=Tc,t.DirectionalLightHelper=class extends Ce{constructor(t,e,n){super(),this.light=t,this.light.updateMatrixWorld(),this.matrix=t.matrixWorld,this.matrixAutoUpdate=!1,this.color=n,void 0===e&&(e=1);let i=new xn;i.setAttribute("position",new hn([-e,e,0,e,e,0,e,-e,0,-e,-e,0,-e,e,0],3));const r=new Ua({fog:!1,toneMapped:!1});this.lightPlane=new ja(i,r),this.add(this.lightPlane),i=new xn,i.setAttribute("position",new hn([0,0,0,0,0,1],3)),this.targetLine=new ja(i,r),this.add(this.targetLine),this.update()}dispose(){this.lightPlane.geometry.dispose(),this.lightPlane.material.dispose(),this.targetLine.geometry.dispose(),this.targetLine.material.dispose()}update(){Uh.setFromMatrixPosition(this.light.matrixWorld),Hh.setFromMatrixPosition(this.light.target.matrixWorld),Gh.subVectors(Hh,Uh),this.lightPlane.lookAt(Hh),void 0!==this.color?(this.lightPlane.material.color.set(this.color),this.targetLine.material.color.set(this.color)):(this.lightPlane.material.color.copy(this.light.color),this.targetLine.material.color.copy(this.light.color)),this.targetLine.lookAt(Hh),this.targetLine.scale.z=Gh.length()}},t.DiscreteInterpolant=Wl,t.DodecahedronBufferGeometry=uo,t.DodecahedronGeometry=uo,t.DoubleSide=2,t.DstAlphaFactor=206,t.DstColorFactor=208,t.DynamicBufferAttribute=function(t,e){return console.warn("THREE.DynamicBufferAttribute has been removed. Use new THREE.BufferAttribute().setUsage( THREE.DynamicDrawUsage ) instead."),new tn(t,e).setUsage(Q)},t.DynamicCopyUsage=35050,t.DynamicDrawUsage=Q,t.DynamicReadUsage=35049,t.EdgesGeometry=vo,t.EdgesHelper=function(t,e){return console.warn("THREE.EdgesHelper has been removed. Use THREE.EdgesGeometry instead."),new Ja(new vo(t.geometry),new Ua({color:void 0!==e?e:16777215}))},t.EllipseCurve=xo,t.EqualDepth=4,t.EqualStencilFunc=514,t.EquirectangularReflectionMapping=a,t.EquirectangularRefractionMapping=o,t.Euler=fe,t.EventDispatcher=$,t.ExtrudeBufferGeometry=fl,t.ExtrudeGeometry=fl,t.FaceColors=1,t.FileLoader=ac,t.FlatShading=1,t.Float16BufferAttribute=cn,t.Float32Attribute=function(t,e){return console.warn("THREE.Float32Attribute has been removed. Use new THREE.Float32BufferAttribute() instead."),new hn(t,e)},t.Float32BufferAttribute=hn,t.Float64Attribute=function(t,e){return console.warn("THREE.Float64Attribute has been removed. Use new THREE.Float64BufferAttribute() instead."),new un(t,e)},t.Float64BufferAttribute=un,t.FloatType=M,t.Fog=Ks,t.FogExp2=Qs,t.Font=function(){console.error("THREE.Font has been moved to /examples/jsm/loaders/FontLoader.js")},t.FontLoader=function(){console.error("THREE.FontLoader has been moved to /examples/jsm/loaders/FontLoader.js")},t.FramebufferTexture=ro,t.FrontSide=0,t.Frustum=ni,t.GLBufferAttribute=bh,t.GLSL1="100",t.GLSL3=K,t.GreaterDepth=6,t.GreaterEqualDepth=5,t.GreaterEqualStencilFunc=518,t.GreaterStencilFunc=516,t.GridHelper=Oh,t.Group=Vs,t.HalfFloatType=w,t.HemisphereLight=dc,t.HemisphereLightHelper=class extends Ce{constructor(t,e,n){super(),this.light=t,this.light.updateMatrixWorld(),this.matrix=t.matrixWorld,this.matrixAutoUpdate=!1,this.color=n;const i=new xl(e);i.rotateY(.5*Math.PI),this.material=new Qe({wireframe:!0,fog:!1,toneMapped:!1}),void 0===this.color&&(this.material.vertexColors=!0);const r=i.getAttribute("position"),s=new Float32Array(3*r.count);i.setAttribute("color",new tn(s,3)),this.add(new Fn(i,this.material)),this.update()}dispose(){this.children[0].geometry.dispose(),this.children[0].material.dispose()}update(){const t=this.children[0];if(void 0!==this.color)this.material.color.set(this.color);else{const e=t.geometry.getAttribute("color");Bh.copy(this.light.color),Fh.copy(this.light.groundColor);for(let t=0,n=e.count;t0){const n=new nc(e);r=new oc(n),r.setCrossOrigin(this.crossOrigin);for(let e=0,n=t.length;e0){i=new oc(this.manager),i.setCrossOrigin(this.crossOrigin);for(let e=0,i=t.length;eNumber.EPSILON){if(l<0&&(n=e[s],o=-o,a=e[r],l=-l),t.ya.y)continue;if(t.y===n.y){if(t.x===n.x)return!0}else{const e=l*(t.x-n.x)-o*(t.y-n.y);if(0===e)return!0;if(e<0)continue;i=!i}}else{if(t.y!==n.y)continue;if(a.x<=t.x&&t.x<=n.x||n.x<=t.x&&t.x<=a.x)return!0}}return i}const r=dl.isClockWise,s=this.subPaths;if(0===s.length)return[];if(!0===e)return n(s);let a,o,l;const c=[];if(1===s.length)return o=s[0],l=new Ho,l.curves=o.curves,c.push(l),c;let h=!r(s[0].getPoints());h=t?!h:h;const u=[],d=[];let p,m,f=[],g=0;d[g]=void 0,f[g]=[];for(let e=0,n=s.length;e1){let t=!1;const e=[];for(let t=0,e=d.length;t0&&(t||(f=u))}for(let t=0,e=d.length;t +

Admin-Bereich

+ + +
+
+ + + + +
+ + +
+
+

Benutzerverwaltung

+ +
+ +
+ + + + + + + + + + + + + {% for user in users %} + + + + + + + + + {% endfor %} + +
IDBenutzernameE-MailAdminGedankenAktionen
{{ user.id }}{{ user.username }}{{ user.email }} + {% if user.is_admin %} + Admin + {% else %} + User + {% endif %} + {{ user.thoughts|length }} + + +
+
+
+ + +
+
+

Mindmap-Knoten Verwaltung

+ +
+ +
+ + + + + + + + + + + + {% for node in nodes %} + + + + + + + + {% endfor %} + +
IDNameElternknotenGedankenAktionen
{{ node.id }}{{ node.name }} + {% if node.parent %} + {{ node.parent.name }} + {% else %} + Wurzelknoten + {% endif %} + {{ node.thoughts|length }} + + +
+
+
+ + +
+
+

Gedanken-Verwaltung

+
+
+ +
+ +
+
+ +
+
+ +
+ + + + + + + + + + + + + {% for thought in thoughts %} + + + + + + + + + {% endfor %} + +
IDTitelAutorDatumBewertungAktionen
{{ thought.id }}{{ thought.title }}{{ thought.author.username }}{{ thought.timestamp.strftime('%d.%m.%Y') }} +
+ {{ "%.1f"|format(thought.average_rating) }} +
+ {% for i in range(5) %} + {% if i < thought.average_rating|int %} + + {% elif i < (thought.average_rating|int + 0.5) %} + + {% else %} + + {% endif %} + {% endfor %} +
+
+
+ + + +
+
+
+ + +
+

Systemstatistiken

+ +
+
+
+
+ +
+
+

Benutzer

+

{{ users|length }}

+
+
+
+ +
+
+
+ +
+
+

Knoten

+

{{ nodes|length }}

+
+
+
+ +
+
+
+ +
+
+

Gedanken

+

{{ thoughts|length }}

+
+
+
+ +
+
+
+ +
+
+

Kommentare

+

+ {% set comment_count = 0 %} + {% for thought in thoughts %} + {% set comment_count = comment_count + thought.comments|length %} + {% endfor %} + {{ comment_count }} +

+
+
+
+
+ +
+
+

Aktive Benutzer

+
+

Hier würde ein Aktivitätsdiagramm angezeigt werden

+
+
+ +
+

Gedanken pro Kategorie

+
+

Hier würde eine Verteilungsstatistik angezeigt werden

+
+
+
+
+
+ + +
+

System-Log

+
+
[INFO] [{{ now.strftime('%Y-%m-%d %H:%M:%S') }}] System gestartet
+
[INFO] [{{ now.strftime('%Y-%m-%d %H:%M:%S') }}] Admin-Bereich aufgerufen von {{ current_user.username }}
+
[WARN] [{{ now.strftime('%Y-%m-%d %H:%M:%S') }}] Hohe Serverauslastung erkannt
+
[INFO] [{{ now.strftime('%Y-%m-%d %H:%M:%S') }}] Backup erfolgreich erstellt
+
[ERROR] [{{ now.strftime('%Y-%m-%d %H:%M:%S') }}] API-Zugriffsfehler (Timeout) bei externer Anfrage
+
+
+ +{% endblock %} + +{% block extra_js %} + +{% endblock %} \ No newline at end of file diff --git a/templates/agb.html b/templates/agb.html new file mode 100644 index 0000000..06de99c --- /dev/null +++ b/templates/agb.html @@ -0,0 +1,71 @@ +{% extends "base.html" %} + +{% block title %}AGB{% endblock %} + +{% block content %} +
+
+

Allgemeine Geschäftsbedingungen

+ +
+

1. Geltungsbereich

+

Diese Allgemeinen Geschäftsbedingungen (nachfolgend "AGB") gelten für die Nutzung der MindMap-Plattform (nachfolgend "Plattform"), die von der MindMap GmbH, Musterstraße 123, 12345 Musterstadt (nachfolgend "Anbieter") betrieben wird.

+

Mit der Registrierung und/oder Nutzung der Plattform erkennt der Nutzer diese AGB an. Die Nutzung der Plattform ist nur zulässig, wenn der Nutzer diese AGB akzeptiert.

+
+ +
+

2. Leistungsbeschreibung

+

Die Plattform bietet dem Nutzer die Möglichkeit, komplexe Informationen in Form von Mindmaps zu visualisieren, zu organisieren und zu teilen. Der genaue Funktionsumfang ergibt sich aus der jeweiligen Leistungsbeschreibung auf der Website des Anbieters.

+

Der Anbieter ist berechtigt, die angebotenen Dienste zu ändern, neue Dienste unentgeltlich oder entgeltlich verfügbar zu machen und die Bereitstellung unentgeltlicher Dienste einzustellen. Der Anbieter wird hierbei jeweils auf die berechtigten Interessen des Nutzers Rücksicht nehmen.

+
+ +
+

3. Registrierung und Nutzerkonto

+

Die Nutzung bestimmter Funktionen der Plattform setzt die Registrierung eines Nutzerkontos voraus. Die Registrierung ist nur volljährigen und voll geschäftsfähigen natürlichen Personen erlaubt.

+

Der Nutzer verpflichtet sich, bei der Registrierung wahrheitsgemäße und vollständige Angaben zu machen und diese Daten stets aktuell zu halten. Es ist nicht gestattet, mehrere Nutzerkonten zu erstellen.

+

Der Nutzer ist verpflichtet, seine Zugangsdaten geheim zu halten und vor dem Zugriff durch unbefugte Dritte zu schützen. Der Anbieter wird den Nutzer niemals nach seinem Passwort fragen.

+
+ +
+

4. Nutzungsrechte

+

Der Anbieter gewährt dem Nutzer für die Dauer der Vertragslaufzeit ein einfaches, nicht übertragbares Recht zur Nutzung der Plattform im vertraglich vereinbarten Umfang.

+

Der Nutzer räumt dem Anbieter an den von ihm auf der Plattform eingestellten Inhalten ein einfaches, übertragbares, unterlizenzierbares, räumlich und zeitlich unbeschränktes Nutzungsrecht ein, soweit dies für den Betrieb der Plattform erforderlich ist.

+

Der Nutzer garantiert, dass er über alle Rechte an den von ihm eingestellten Inhalten verfügt und durch diese keine Rechte Dritter verletzt werden.

+
+ +
+

5. Pflichten des Nutzers

+

Der Nutzer verpflichtet sich, die Plattform nur im Einklang mit diesen AGB und den geltenden Gesetzen zu nutzen. Insbesondere ist es dem Nutzer untersagt:

+
    +
  • die Plattform für rechtswidrige oder betrügerische Zwecke zu nutzen
  • +
  • rechtswidrige, beleidigende, diskriminierende oder anderweitig anstößige Inhalte zu verbreiten
  • +
  • Schadsoftware, Viren oder andere schädliche Computercodes zu verbreiten
  • +
  • die normale Funktion der Plattform zu stören oder übermäßig zu belasten
  • +
  • auf die Plattform mit automatisierten Mitteln zuzugreifen (wie z.B. Bots, Scraper)
  • +
  • die Plattform zu reverse-engineeren, zu dekompilieren oder zu disassemblieren
  • +
+

Der Anbieter behält sich das Recht vor, bei Verstößen gegen diese Pflichten entsprechende Maßnahmen zu ergreifen, einschließlich der Sperrung des Nutzerkontos.

+
+ +
+

6. Verfügbarkeit und Wartung

+

Der Anbieter ist bemüht, eine hohe Verfügbarkeit der Plattform zu gewährleisten, kann jedoch keine unterbrechungsfreie Verfügbarkeit garantieren. Insbesondere können Wartungsarbeiten, Sicherheits- oder Kapazitätsprobleme sowie Ereignisse, die außerhalb des Einflussbereichs des Anbieters liegen, zu vorübergehenden Unterbrechungen führen.

+

Der Anbieter wird planmäßige Wartungsarbeiten, sofern möglich, vorher ankündigen und zu Zeiten durchführen, in denen die Nutzung der Plattform typischerweise gering ist.

+
+ +
+

7. Haftung

+

Der Anbieter haftet unbeschränkt für Vorsatz und grobe Fahrlässigkeit sowie nach dem Produkthaftungsgesetz. Für leichte Fahrlässigkeit haftet der Anbieter nur bei Verletzung einer wesentlichen Vertragspflicht und der Höhe nach beschränkt auf die bei Vertragsschluss vorhersehbaren und vertragstypischen Schäden. Wesentliche Vertragspflichten sind solche, deren Erfüllung die ordnungsgemäße Durchführung des Vertrags überhaupt erst ermöglicht und auf deren Einhaltung der Nutzer regelmäßig vertrauen darf.

+

Diese Haftungsbeschränkung gilt nicht für Schäden aus der Verletzung des Lebens, des Körpers oder der Gesundheit.

+
+ +
+

8. Schlussbestimmungen

+

Es gilt das Recht der Bundesrepublik Deutschland unter Ausschluss des UN-Kaufrechts.

+

Sollten einzelne Bestimmungen dieser AGB unwirksam sein oder werden, bleibt die Wirksamkeit der übrigen Bestimmungen unberührt.

+

Der Anbieter behält sich vor, diese AGB jederzeit zu ändern. Änderungen werden dem Nutzer rechtzeitig vor ihrem Inkrafttreten mitgeteilt. Die Änderungen gelten als akzeptiert, wenn der Nutzer ihnen nicht innerhalb von vier Wochen nach Erhalt der Mitteilung widerspricht.

+

Stand: Mai 2023

+
+
+
+{% endblock %} \ No newline at end of file diff --git a/templates/base.html b/templates/base.html new file mode 100644 index 0000000..de71aea --- /dev/null +++ b/templates/base.html @@ -0,0 +1,614 @@ + + + + + + Systades - {% block title %}{% endblock %} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + {% block extra_css %}{% endblock %} + + + + + + + +
+ + + + +
+
+ + Start + + + Mindmap + + + Suche + + + + {% if current_user.is_authenticated %} + + Profil + + {% endif %} +
+
+ + +
+ {% block content %}{% endblock %} +
+ + + +
+ + + {% block scripts %}{% endblock %} + + + + + + + + \ No newline at end of file diff --git a/templates/datenschutz.html b/templates/datenschutz.html new file mode 100644 index 0000000..32d090f --- /dev/null +++ b/templates/datenschutz.html @@ -0,0 +1,64 @@ +{% extends "base.html" %} + +{% block title %}Datenschutz{% endblock %} + +{% block content %} +
+
+

Datenschutzerklärung

+ +
+

1. Datenschutz auf einen Blick

+ +

Allgemeine Hinweise

+

Die folgenden Hinweise geben einen einfachen Überblick darüber, was mit Ihren personenbezogenen Daten passiert, wenn Sie diese Website besuchen. Personenbezogene Daten sind alle Daten, mit denen Sie persönlich identifiziert werden können. Ausführliche Informationen zum Thema Datenschutz entnehmen Sie unserer unter diesem Text aufgeführten Datenschutzerklärung.

+ +

Datenerfassung auf dieser Website

+

Wer ist verantwortlich für die Datenerfassung auf dieser Website?

+

Die Datenverarbeitung auf dieser Website erfolgt durch den Websitebetreiber. Dessen Kontaktdaten können Sie dem Impressum dieser Website entnehmen.

+ +

Wie erfassen wir Ihre Daten?

+

Ihre Daten werden zum einen dadurch erhoben, dass Sie uns diese mitteilen. Hierbei kann es sich z. B. um Daten handeln, die Sie in ein Kontaktformular eingeben.

+

Andere Daten werden automatisch oder nach Ihrer Einwilligung beim Besuch der Website durch unsere IT-Systeme erfasst. Das sind vor allem technische Daten (z. B. Internetbrowser, Betriebssystem oder Uhrzeit des Seitenaufrufs). Die Erfassung dieser Daten erfolgt automatisch, sobald Sie diese Website betreten.

+ +

Wofür nutzen wir Ihre Daten?

+

Ein Teil der Daten wird erhoben, um eine fehlerfreie Bereitstellung der Website zu gewährleisten. Andere Daten können zur Analyse Ihres Nutzerverhaltens verwendet werden.

+ +

Welche Rechte haben Sie bezüglich Ihrer Daten?

+

Sie haben jederzeit das Recht, unentgeltlich Auskunft über Herkunft, Empfänger und Zweck Ihrer gespeicherten personenbezogenen Daten zu erhalten. Sie haben außerdem ein Recht, die Berichtigung oder Löschung dieser Daten zu verlangen. Wenn Sie eine Einwilligung zur Datenverarbeitung erteilt haben, können Sie diese Einwilligung jederzeit für die Zukunft widerrufen. Außerdem haben Sie das Recht, unter bestimmten Umständen die Einschränkung der Verarbeitung Ihrer personenbezogenen Daten zu verlangen.

+
+ +
+

2. Allgemeine Hinweise und Pflichtinformationen

+ +

Datenschutz

+

Die Betreiber dieser Seiten nehmen den Schutz Ihrer persönlichen Daten sehr ernst. Wir behandeln Ihre personenbezogenen Daten vertraulich und entsprechend der gesetzlichen Datenschutzvorschriften sowie dieser Datenschutzerklärung.

+

Wenn Sie diese Website benutzen, werden verschiedene personenbezogene Daten erhoben. Personenbezogene Daten sind Daten, mit denen Sie persönlich identifiziert werden können. Die vorliegende Datenschutzerklärung erläutert, welche Daten wir erheben und wofür wir sie nutzen. Sie erläutert auch, wie und zu welchem Zweck das geschieht.

+

Wir weisen darauf hin, dass die Datenübertragung im Internet (z. B. bei der Kommunikation per E-Mail) Sicherheitslücken aufweisen kann. Ein lückenloser Schutz der Daten vor dem Zugriff durch Dritte ist nicht möglich.

+ +

Hinweis zur verantwortlichen Stelle

+

Die verantwortliche Stelle für die Datenverarbeitung auf dieser Website ist:

+

+ MindMap GmbH
+ Musterstraße 123
+ 12345 Musterstadt
+ Deutschland +

+

+ Telefon: +49 (0) 123 456789
+ E-Mail: info@mindmap-example.com +

+

Verantwortliche Stelle ist die natürliche oder juristische Person, die allein oder gemeinsam mit anderen über die Zwecke und Mittel der Verarbeitung von personenbezogenen Daten (z. B. Namen, E-Mail-Adressen o. Ä.) entscheidet.

+
+ +
+

3. Datenerfassung auf dieser Website

+ +

Cookies

+

Unsere Internetseiten verwenden so genannte "Cookies". Cookies sind kleine Datenpakete und richten auf Ihrem Endgerät keinen Schaden an. Sie werden entweder vorübergehend für die Dauer einer Sitzung (Session-Cookies) oder dauerhaft (permanente Cookies) auf Ihrem Endgerät gespeichert. Session-Cookies werden nach Ende Ihres Besuchs automatisch gelöscht. Permanente Cookies bleiben auf Ihrem Endgerät gespeichert, bis Sie diese selbst löschen oder eine automatische Löschung durch Ihren Webbrowser erfolgt.

+

Cookies können von uns (First-Party-Cookies) oder von Drittunternehmen stammen (sog. Third-Party-Cookies). Third-Party-Cookies ermöglichen die Einbindung bestimmter Dienstleistungen von Drittunternehmen innerhalb von Webseiten (z. B. Cookies zur Abwicklung von Zahlungsdienstleistungen).

+

Die meisten Browser bieten Ihnen die Möglichkeit, das Setzen von Cookies für bestimmte Webseiten zu verbieten oder Cookies jedes Mal vor dem Akzeptieren anzuzeigen. Ebenso können Sie jederzeit mitgeteilt bekommen, sobald Ihr Browser ein neues Cookie empfängt.

+
+
+
+{% endblock %} \ No newline at end of file diff --git a/templates/errors/403.html b/templates/errors/403.html new file mode 100644 index 0000000..1d0f1e0 --- /dev/null +++ b/templates/errors/403.html @@ -0,0 +1,33 @@ +{% extends "base.html" %} + +{% block title %}403 - Zugriff verweigert{% endblock %} + +{% block content %} +
+
+
+
+
+

403

+
+ +
+
+
+

Zugriff verweigert

+

Sie haben nicht die erforderlichen Berechtigungen, um auf diese Seite zuzugreifen. Bitte melden Sie sich an oder nutzen Sie ein Konto mit entsprechenden Rechten.

+ +
+
+
+

Benötigen Sie Hilfe? Kontaktieren Sie uns

+
+
+{% endblock %} \ No newline at end of file diff --git a/templates/errors/404.html b/templates/errors/404.html new file mode 100644 index 0000000..f9efbf1 --- /dev/null +++ b/templates/errors/404.html @@ -0,0 +1,33 @@ +{% extends "base.html" %} + +{% block title %}404 - Seite nicht gefunden{% endblock %} + +{% block content %} +
+
+
+
+
+

404

+
+ +
+
+
+

Seite nicht gefunden

+

Die gesuchte Seite existiert nicht oder wurde verschoben. Bitte prüfen Sie die URL oder nutzen Sie die Navigation.

+ +
+
+
+

Benötigen Sie Hilfe? Kontaktieren Sie uns

+
+
+{% endblock %} \ No newline at end of file diff --git a/templates/errors/429.html b/templates/errors/429.html new file mode 100644 index 0000000..b3c68b8 --- /dev/null +++ b/templates/errors/429.html @@ -0,0 +1,33 @@ +{% extends "base.html" %} + +{% block title %}429 - Zu viele Anfragen{% endblock %} + +{% block content %} +
+
+
+
+
+

429

+
+ +
+
+
+

Zu viele Anfragen

+

Sie haben zu viele Anfragen in kurzer Zeit gestellt. Bitte warten Sie einen Moment und versuchen Sie es dann erneut.

+ +
+
+
+

Benötigen Sie Hilfe? Kontaktieren Sie uns

+
+
+{% endblock %} \ No newline at end of file diff --git a/templates/errors/500.html b/templates/errors/500.html new file mode 100644 index 0000000..d8269ee --- /dev/null +++ b/templates/errors/500.html @@ -0,0 +1,33 @@ +{% extends "base.html" %} + +{% block title %}500 - Serverfehler{% endblock %} + +{% block content %} +
+
+
+
+
+

500

+
+ +
+
+
+

Interner Serverfehler

+

Es ist ein Fehler auf unserem Server aufgetreten. Unser Team wurde informiert und arbeitet bereits an einer Lösung. Bitte versuchen Sie es später erneut.

+ +
+
+
+

Benötigen Sie Hilfe? Kontaktieren Sie uns

+
+
+{% endblock %} \ No newline at end of file diff --git a/templates/impressum.html b/templates/impressum.html new file mode 100644 index 0000000..f34bca7 --- /dev/null +++ b/templates/impressum.html @@ -0,0 +1,64 @@ +{% extends "base.html" %} + +{% block title %}Impressum{% endblock %} + +{% block content %} +
+
+

Impressum

+ +
+

Angaben gemäß § 5 TMG und § 55 RStV

+

+ Diese Website wird privat betrieben von:
+ Marwin Medczinski
+ Fasanenstraße 30
+ 16761 Hennigsdorf
+ Deutschland +

+ + +

+ +

+ Kontakt:
+ Telefon: +49 (0) 173 8041824
+ E-Mail: medczinski.marwin@gmx.de +

+
+ +
+

Inhaltlich Verantwortlicher gemäß § 55 Abs. 2 RStV

+

+ Marwin Medczinski
+ Fasanenstraße 30
+ 16761 Hennigsdorf +

+
+ +
+

Hinweis zur Streitbeilegung

+

Die Europäische Kommission stellt eine Plattform zur Online-Streitbeilegung (OS) bereit: https://ec.europa.eu/consumers/odr/

+

Ich bin nicht bereit oder verpflichtet, an Streitbeilegungsverfahren vor einer Verbraucherschlichtungsstelle teilzunehmen.

+
+ +
+

Haftungsausschluss

+ +

Haftung für Inhalte

+

Die Inhalte dieser Seiten wurden mit größter Sorgfalt erstellt. Für die Richtigkeit, Vollständigkeit und Aktualität der Inhalte kann ich jedoch keine Gewähr übernehmen. Als Diensteanbieter bin ich gemäß § 7 Abs.1 TMG für eigene Inhalte auf diesen Seiten nach den allgemeinen Gesetzen verantwortlich. Nach §§ 8 bis 10 TMG bin ich als Diensteanbieter jedoch nicht verpflichtet, übermittelte oder gespeicherte fremde Informationen zu überwachen oder nach Umständen zu forschen, die auf eine rechtswidrige Tätigkeit hinweisen.

+

Verpflichtungen zur Entfernung oder Sperrung der Nutzung von Informationen nach den allgemeinen Gesetzen bleiben hiervon unberührt. Eine diesbezügliche Haftung ist jedoch erst ab dem Zeitpunkt der Kenntnis einer konkreten Rechtsverletzung möglich. Bei Bekanntwerden von entsprechenden Rechtsverletzungen werde ich diese Inhalte umgehend entfernen.

+ +

Haftung für Links

+

Diese Website enthält Links zu externen Webseiten Dritter, auf deren Inhalte ich keinen Einfluss habe. Deshalb kann ich für diese fremden Inhalte auch keine Gewähr übernehmen. Für die Inhalte der verlinkten Seiten ist stets der jeweilige Anbieter oder Betreiber der Seiten verantwortlich. Die verlinkten Seiten wurden zum Zeitpunkt der Verlinkung auf mögliche Rechtsverstöße überprüft. Rechtswidrige Inhalte waren zum Zeitpunkt der Verlinkung nicht erkennbar.

+

Eine permanente inhaltliche Kontrolle der verlinkten Seiten ist jedoch ohne konkrete Anhaltspunkte einer Rechtsverletzung nicht zumutbar. Bei Bekanntwerden von Rechtsverletzungen werde ich derartige Links umgehend entfernen.

+
+ +
+

Urheberrecht

+

Die durch mich erstellten Inhalte und Werke auf diesen Seiten unterliegen dem deutschen Urheberrecht. Die Vervielfältigung, Bearbeitung, Verbreitung und jede Art der Verwertung außerhalb der Grenzen des Urheberrechtes bedürfen meiner schriftlichen Zustimmung. Downloads und Kopien dieser Seite sind nur für den privaten, nicht kommerziellen Gebrauch gestattet.

+

Soweit die Inhalte auf dieser Seite nicht von mir erstellt wurden, werden die Urheberrechte Dritter beachtet. Insbesondere werden Inhalte Dritter als solche gekennzeichnet. Sollten Sie trotzdem auf eine Urheberrechtsverletzung aufmerksam werden, bitte ich um einen entsprechenden Hinweis. Bei Bekanntwerden von Rechtsverletzungen werde ich derartige Inhalte umgehend entfernen.

+
+
+
+{% endblock %} \ No newline at end of file diff --git a/templates/index.html b/templates/index.html new file mode 100644 index 0000000..8f183f1 --- /dev/null +++ b/templates/index.html @@ -0,0 +1,520 @@ +{% extends "base.html" %} + +{% block title %}Wissensnetzwerk{% endblock %} + +{% block extra_css %} + +{% endblock %} + +{% block content %} + +
+ +
+
+

+
+ Wissen +
+
+ neu +
+
+ vernetzen +
+
+
+

+
+

+ Erkunde komplexe Ideen visuell, schaffe Verbindungen und teile deine Gedanken + in einem interaktiven Wissensnetzwerk. +

+
+
+ + + + + Mindmap erkunden + + + + + {% if not current_user.is_authenticated %} + + + + + Konto erstellen + + + + + {% endif %} +
+
+ + +
+
+
+
Systades
+
WISSEN VERNETZEN
+
+
+
+
+
+ + +
+
+ +
+
+

Was ist Systades?

+

+ Ein modernes Werkzeug zum Visualisieren, Erforschen und Teilen von Wissen in einem interaktiven + Netzwerk, das dir hilft, Verbindungen zu entdecken und dein Wissen zu organisieren. +

+
+ + +
+ + + + + + + + +
+
+
+ + +
+
+ +
+
+

Dein KI-Assistent

+

+ Unser integrierter KI-Assistent hilft dir, Wissen zu organisieren, Verbindungen zu finden und + Einsichten zu gewinnen. +

+
+ + +
+
+ +
+
+
+ +
+ Systades Assistent (4o-mini) +
+
+ +
+
+ + +
+ +
+
+ +
+
+
+

Hallo! Ich bin dein Systades-Assistent. Wie kann ich dir heute helfen?

+

Du kannst mir Fragen zu:

+
    +
  • Gedanken in der Datenbank
  • +
  • Kategorien und Wissensgebieten
  • +
  • Mindmaps und Visualisierungsmöglichkeiten
  • +
+

stellen.

+
+
+
+ + +
+
+

+ Kann ich mit deiner Hilfe eine Mindmap zum Thema Künstliche Intelligenz erstellen? +

+
+
+ +
+
+ + +
+
+ +
+
+
+

Ja, natürlich! Ich kann dir dabei helfen, eine Mindmap zum Thema Künstliche Intelligenz zu erstellen.

+

Du kannst wie folgt vorgehen:

+
    +
  1. Gehe zur Mindmap-Ansicht
  2. +
  3. Suche nach dem Knoten "Künstliche Intelligenz" unter der Kategorie "Technologie"
  4. +
  5. Füge diesen Knoten zu deiner persönlichen Mindmap hinzu
  6. +
  7. Ergänze verwandte Themen wie Machine Learning, Neural Networks oder Data Science
  8. +
+

Soll ich dir noch mehr spezifische Informationen zu KI-Teilgebieten geben?

+
+
+
+
+ + +
+
+ + +
+ +
+ Beispiele: + + + +
+
+
+
+ + +
+ +
+
+
+ + +
+
+ +
+
+

So funktioniert's

+

+ In wenigen einfachen Schritten kannst du mit Systades beginnen, dein Wissen zu organisieren. +

+
+ + +
+ + + + + + + + +
+
+
+ + +
+
+ +
+

Bereit, dein Wissen zu vernetzen?

+

+ Tritt unserer wachsenden Community bei und entdecke eine neue Art, Wissen zu organisieren und zu teilen. +

+
+ + Mindmap erkunden + + {% if not current_user.is_authenticated %} + + Konto erstellen + + {% endif %} +
+
+
+{% endblock %} + +{% block extra_js %} + +{% endblock %} \ No newline at end of file diff --git a/templates/layout.html b/templates/layout.html new file mode 100644 index 0000000..33d7242 --- /dev/null +++ b/templates/layout.html @@ -0,0 +1,11 @@ + +
+ +
+ + +
+ +
\ No newline at end of file diff --git a/templates/login.html b/templates/login.html new file mode 100644 index 0000000..903afa8 --- /dev/null +++ b/templates/login.html @@ -0,0 +1,55 @@ +{% extends "base.html" %} + +{% block title %}Anmelden{% endblock %} + +{% block content %} +
+
+
+
+

+ + Anmelden +

+ +
+
+ +
+
+ +
+ +
+
+ +
+ +
+
+ +
+ +
+
+ +
+ +
+ +
+

Noch kein Konto? Registrieren

+
+
+
+
+
+
+{% endblock %} \ No newline at end of file diff --git a/templates/mindmap.html b/templates/mindmap.html new file mode 100644 index 0000000..fb1dea4 --- /dev/null +++ b/templates/mindmap.html @@ -0,0 +1,234 @@ + + + + + + Interaktive Mindmap + + + + + + + + + + + + + +
+
+

Interaktive Mindmap

+
+ +
+
+ +
+ + + + + + + +
+ +
+ +
+ +
+ +
+ Mindmap-Anwendung © 2023 +
+
+ + + + + + + + \ No newline at end of file diff --git a/templates/my_account.html b/templates/my_account.html new file mode 100644 index 0000000..3359758 --- /dev/null +++ b/templates/my_account.html @@ -0,0 +1,320 @@ +{% extends "base.html" %} + +{% block title %}Meine Merkliste{% endblock %} + +{% block extra_css %} + +{% endblock %} + +{% block content %} + +
+ +
+

+ Meine Merkliste +

+

+ Deine persönliche Sammlung gemerkter Wissensbereiche und Beiträge +

+
+ + +
+

+ + Meine Mindmap +

+ +
+
+
+
+ +
+

Deine persönliche Mindmap ist leer

+

+ Merke dir Wissensbereiche und Gedanken in der Hauptmindmap, um deine persönliche Mindmap zu erstellen. +

+ + Zur Mindmap + +
+
+
+
+ + +
+ +
+

+ + Gemerkte Wissensbereiche +

+ +
+ +
+
+
+
+
+
+
+ + +
+

+ + Gemerkte Gedanken +

+ +
+ +
+
+
+
+
+
+
+
+
+ + + +{% endblock %} \ No newline at end of file diff --git a/templates/profile.html b/templates/profile.html new file mode 100644 index 0000000..a243f45 --- /dev/null +++ b/templates/profile.html @@ -0,0 +1,800 @@ +{% extends "base.html" %} + +{% block title %}Profil{% endblock %} + +{% block extra_css %} + +{% endblock %} + +{% block content %} +
+ +
+ +
+
+ Profilbild +
+ +
+
+ +
+ + +
+ +
+
+ +
+
{{ stats.thought_count if stats and stats.thought_count else 0 }}
+
Gedanken
+
+ + +
+
+ +
+
{{ stats.connections_count if stats and stats.connections_count else 0 }}
+
Verbindungen
+
+ + +
+
+ +
+
{{ stats.followers_count if stats and stats.followers_count else 0 }}
+
Follower
+
+ + +
+
+ +
+
{{ stats.contributions_count if stats and stats.contributions_count else 0 }}
+
Beiträge
+
+ + +
+
+ +
+
{{ stats.rating if stats and stats.rating else '0.0' }}
+
Bewertung
+
+
+
+ + +
+ +
+
Aktivitäten
+
Gedanken
+
Sammlungen
+
Verbindungen
+
Einstellungen
+
+ + +
+ +
+
+ {% if activities %} + {% for activity in activities %} +
+
+
{{ activity.title }}
+
{{ activity.date }}
+
+
+

{{ activity.content }}

+
+ +
+ {% endfor %} + {% else %} +
+ +

Noch keine Aktivitäten vorhanden

+
+ {% endif %} +
+
+ + + + + + + + + +
+
+
+{% endblock %} + +{% block extra_js %} + +{% endblock %} \ No newline at end of file diff --git a/templates/register.html b/templates/register.html new file mode 100644 index 0000000..0d20104 --- /dev/null +++ b/templates/register.html @@ -0,0 +1,115 @@ +{% extends "base.html" %} + +{% block title %}Registrieren{% endblock %} + +{% block content %} +
+
+
+

+ + Registrieren +

+ +
+
+ +
+ + + + +
+ +
+ +
+ +
+ + + + +
+ +
+ +
+ +
+ + + + + +
+ +
+ +
+ +
+ +
+

Bereits registriert? Anmelden

+
+
+
+
+
+ + +{% endblock %} \ No newline at end of file diff --git a/templates/search.html b/templates/search.html new file mode 100644 index 0000000..54507aa --- /dev/null +++ b/templates/search.html @@ -0,0 +1,105 @@ +{% extends "base.html" %} + +{% block title %}Suche{% endblock %} + +{% block content %} +
+ +
+
+

Erweiterte Suche

+
+
+ +
+ + + + +
+
+ +
+ +
+ + + + +
+
+ +
+ +
+ + + + +
+
+ +
+ +
+ + + + +
+
+ +
+ +
+ + + + +
+
+ + +
+
+
+ + +
+
+

Suchergebnisse

+

Nutze die Filter links, um deine Suche zu präzisieren.

+
+ +
+ +
+ +
Wissen entdecken
+

Gib einen Suchbegriff ein, um in der wissenschaftlichen Wissensdatenbank zu suchen.

+
+
+
+
+{% endblock %} \ No newline at end of file diff --git a/templates/settings.html b/templates/settings.html new file mode 100644 index 0000000..9b71b90 --- /dev/null +++ b/templates/settings.html @@ -0,0 +1,269 @@ +{% extends "base.html" %} + +{% block title %}Einstellungen{% endblock %} + +{% block content %} +
+ +
+

+ Einstellungen +

+

+ Verwalten Sie Ihr Konto und passen Sie Ihre Benutzererfahrung an. +

+
+ + +
+ +
+
+ +
+
+ + +
+ +
+

+ + Konto-Einstellungen +

+ +
+
+
+ {{ current_user.username[0] | upper }} +
+
+

{{ current_user.username }}

+

{{ current_user.email }}

+

+ Mitglied seit {{ current_user.id | string }} +

+
+
+
+ +
+
+
+ + +

Der Benutzername kann derzeit nicht geändert werden.

+
+ +
+ + +

Die E-Mail-Adresse kann derzeit nicht geändert werden.

+
+
+ +
+

Passwort ändern

+ +
+
+ + +
+ +
+ + +

Mindestens 6 Zeichen

+
+ +
+ + +
+
+
+ +
+ +
+
+
+ + + + + + + + + +
+
+
+{% endblock %} + +{% block extra_js %} + +{% endblock %} \ No newline at end of file diff --git a/templates/ueber_uns.html b/templates/ueber_uns.html new file mode 100644 index 0000000..2e507ef --- /dev/null +++ b/templates/ueber_uns.html @@ -0,0 +1,79 @@ +{% extends "base.html" %} + +{% block title %}Über uns{% endblock %} + +{% block content %} +
+
+

Über uns

+ +
+

Unsere Vision

+

+ Systades ist ein innovatives Projekt, das darauf abzielt, das Teilen und Vernetzen von Wissen und Gedanken zu revolutionieren. Unsere Plattform ermöglicht es Nutzern, ihre Ideen in interaktiven Mindmaps zu organisieren und mit anderen zu teilen, wodurch ein kollaboratives Netzwerk des Wissens entsteht. +

+

+ Wir glauben daran, dass Wissen am wertvollsten ist, wenn es geteilt und vernetzt wird. Durch die Verbindung verschiedener Perspektiven und Denkansätze entstehen neue Erkenntnisse und Innovationen. +

+
+ +
+

Das Team

+

+ Till Tomczak und Marwin Medczinski arbeiten gemeinsam daran, Systades kontinuierlich zu verbessern und weiterzuentwickeln. +

+ + +
+ + +
+
+ +
+

Unsere Mission

+

+ Wir setzen uns dafür ein, eine Plattform zu schaffen, die: +

+
    +
  • Intuitive Werkzeuge für die Organisation und Visualisierung von Wissen bereitstellt
  • +
  • Die Zusammenarbeit und den Austausch zwischen Nutzern fördert
  • +
  • Kreativität und innovative Denkansätze unterstützt
  • +
  • Einen sicheren und respektvollen Raum für intellektuellen Austausch bietet
  • +
+
+ +
+

Technologie & Innovation

+

+ Systades nutzt modernste Technologien und innovative Ansätze, um eine optimale Nutzererfahrung zu gewährleisten. Unsere Plattform wird kontinuierlich weiterentwickelt, um neue Funktionen und Verbesserungen zu integrieren. +

+

+ Wir legen besonderen Wert auf: +

+
    +
  • Intuitive Benutzeroberfläche
  • +
  • Hohe Performance und Zuverlässigkeit
  • +
  • Datensicherheit und Privatsphäre
  • +
  • Barrierefreiheit und Inklusivität
  • +
+
+ +
+

Kontakt & Feedback

+

+ Wir freuen uns über Ihr Feedback und Ihre Ideen zur Verbesserung von Systades. Gemeinsam können wir die Plattform weiter optimieren und an die Bedürfnisse unserer Nutzer anpassen. +

+

+ Kontaktieren Sie uns gerne für Fragen, Anregungen oder Kooperationsmöglichkeiten. +

+
+
+
+{% endblock %} \ No newline at end of file diff --git a/test_app.py b/test_app.py new file mode 100644 index 0000000..ac388ea --- /dev/null +++ b/test_app.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import sqlite3 +import os +import requests +import time + +print("Systades Anwendungstest") +print("=======================") + +# Prüfen, ob die Datenbank existiert +db_path = 'systades.db' +if not os.path.exists(db_path): + print(f"Datenbank {db_path} existiert nicht.") + exit(1) + +# Datenbankprüfung +conn = sqlite3.connect(db_path) +cursor = conn.cursor() + +# Prüfen, ob die User-Tabelle existiert und Benutzer enthält +cursor.execute("SELECT COUNT(*) FROM user;") +user_count = cursor.fetchone()[0] +print(f"Anzahl der Benutzer in der Datenbank: {user_count}") + +if user_count == 0: + print("WARNUNG: Keine Benutzer in der Datenbank gefunden!") + print("Bitte führen Sie das Skript 'create_default_users.py' aus, um Standardbenutzer zu erstellen.") + conn.close() + exit(1) + +# Tabellenschema prüfen +cursor.execute("PRAGMA table_info(user);") +columns = cursor.fetchall() +column_names = [column[1] for column in columns] +print(f"Spalten in der User-Tabelle: {', '.join(column_names)}") + +# Überprüfen, ob die password-Spalte existiert +if 'password' not in column_names: + print("FEHLER: Die Spalte 'password' fehlt in der Tabelle 'user'!") + print("Bitte führen Sie das Skript 'fix_user_table.py' aus, um die Datenbank zu reparieren.") + conn.close() + exit(1) + +# Benutzer für Testlogin abrufen +cursor.execute("SELECT username, email FROM user LIMIT 1;") +test_user = cursor.fetchone() +if test_user: + print(f"Testbenutzer für Login: {test_user[0]} (E-Mail: {test_user[1]})") +else: + print("FEHLER: Konnte keinen Testbenutzer abrufen.") + +conn.close() + +print("\nDie Datenbank scheint korrekt konfiguriert zu sein.") +print("Sie können nun die Anwendung starten und sich mit den folgenden Zugangsdaten anmelden:") +print(" Admin: username=admin, password=admin") +print(" User: username=user, password=user") +print("\nTest abgeschlossen.") \ No newline at end of file diff --git a/utils/__init__.py b/utils/__init__.py new file mode 100644 index 0000000..359d89d --- /dev/null +++ b/utils/__init__.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +""" +Utility functions for the website application. +This package contains various utilities for database management, +user management, and server administration. +""" + +# Import utilities that don't depend on app.py first +from .db_check import check_db_connection, initialize_db_if_needed + +# Define the list of all available utilities +__all__ = [ + # Database utilities + 'check_db_connection', + 'initialize_db_if_needed', + 'fix_database_schema', + 'rebuild_database', + 'test_database_connection', + 'test_models', + 'print_database_stats', + 'run_all_tests', + + # User management + 'list_users', + 'create_user', + 'reset_password', + 'delete_user', + 'create_admin_user', + + # Server management + 'run_development_server', +] + +# Import remaining modules that might depend on app +from .db_fix import fix_database_schema +from .db_rebuild import rebuild_database +from .db_test import test_database_connection, test_models, print_database_stats, run_all_tests +from .user_manager import list_users, create_user, reset_password, delete_user, create_admin_user +from .server import run_development_server \ No newline at end of file diff --git a/utils/__pycache__/__init__.cpython-311.pyc b/utils/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..613d2b4 Binary files /dev/null and b/utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/utils/__pycache__/__init__.cpython-313.pyc b/utils/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000..032f9c9 Binary files /dev/null and b/utils/__pycache__/__init__.cpython-313.pyc differ diff --git a/utils/__pycache__/db_check.cpython-311.pyc b/utils/__pycache__/db_check.cpython-311.pyc new file mode 100644 index 0000000..0b4f11f Binary files /dev/null and b/utils/__pycache__/db_check.cpython-311.pyc differ diff --git a/utils/__pycache__/db_check.cpython-313.pyc b/utils/__pycache__/db_check.cpython-313.pyc new file mode 100644 index 0000000..d3bf2ad Binary files /dev/null and b/utils/__pycache__/db_check.cpython-313.pyc differ diff --git a/utils/__pycache__/db_fix.cpython-311.pyc b/utils/__pycache__/db_fix.cpython-311.pyc new file mode 100644 index 0000000..fe2d90b Binary files /dev/null and b/utils/__pycache__/db_fix.cpython-311.pyc differ diff --git a/utils/__pycache__/db_fix.cpython-313.pyc b/utils/__pycache__/db_fix.cpython-313.pyc new file mode 100644 index 0000000..a3db687 Binary files /dev/null and b/utils/__pycache__/db_fix.cpython-313.pyc differ diff --git a/utils/__pycache__/db_rebuild.cpython-311.pyc b/utils/__pycache__/db_rebuild.cpython-311.pyc new file mode 100644 index 0000000..a059a74 Binary files /dev/null and b/utils/__pycache__/db_rebuild.cpython-311.pyc differ diff --git a/utils/__pycache__/db_rebuild.cpython-313.pyc b/utils/__pycache__/db_rebuild.cpython-313.pyc new file mode 100644 index 0000000..8e4ac13 Binary files /dev/null and b/utils/__pycache__/db_rebuild.cpython-313.pyc differ diff --git a/utils/__pycache__/db_test.cpython-311.pyc b/utils/__pycache__/db_test.cpython-311.pyc new file mode 100644 index 0000000..9fc961e Binary files /dev/null and b/utils/__pycache__/db_test.cpython-311.pyc differ diff --git a/utils/__pycache__/db_test.cpython-313.pyc b/utils/__pycache__/db_test.cpython-313.pyc new file mode 100644 index 0000000..098065f Binary files /dev/null and b/utils/__pycache__/db_test.cpython-313.pyc differ diff --git a/utils/__pycache__/server.cpython-311.pyc b/utils/__pycache__/server.cpython-311.pyc new file mode 100644 index 0000000..9e1d591 Binary files /dev/null and b/utils/__pycache__/server.cpython-311.pyc differ diff --git a/utils/__pycache__/server.cpython-313.pyc b/utils/__pycache__/server.cpython-313.pyc new file mode 100644 index 0000000..e7a51f4 Binary files /dev/null and b/utils/__pycache__/server.cpython-313.pyc differ diff --git a/utils/__pycache__/user_manager.cpython-311.pyc b/utils/__pycache__/user_manager.cpython-311.pyc new file mode 100644 index 0000000..40def1d Binary files /dev/null and b/utils/__pycache__/user_manager.cpython-311.pyc differ diff --git a/utils/__pycache__/user_manager.cpython-313.pyc b/utils/__pycache__/user_manager.cpython-313.pyc new file mode 100644 index 0000000..398b652 Binary files /dev/null and b/utils/__pycache__/user_manager.cpython-313.pyc differ diff --git a/utils/db_check.py b/utils/db_check.py new file mode 100644 index 0000000..17ec701 --- /dev/null +++ b/utils/db_check.py @@ -0,0 +1,79 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from flask import current_app +from sqlalchemy.exc import SQLAlchemyError +from sqlalchemy import text +import time + +def check_db_connection(db): + """ + Überprüft die Datenbankverbindung und versucht ggf. die Verbindung wiederherzustellen + + Args: + db: SQLAlchemy-Instanz + + Returns: + bool: True, wenn die Verbindung erfolgreich ist, sonst False + """ + max_retries = 3 + retry_count = 0 + + while retry_count < max_retries: + try: + # Führe eine einfache Abfrage durch, um die Verbindung zu testen + with current_app.app_context(): + db.session.execute(text('SELECT 1')) + return True + except SQLAlchemyError as e: + retry_count += 1 + print(f"DB-Verbindungsfehler (Versuch {retry_count}/{max_retries}): {str(e)}") + + if retry_count < max_retries: + # Warte vor dem nächsten Versuch + time.sleep(1) + + # Versuche die Verbindung zurückzusetzen + try: + db.session.rollback() + except: + pass + + return False + +def initialize_db_if_needed(db, initialize_function=None): + """ + Initialisiert die Datenbank, falls erforderlich + + Args: + db: SQLAlchemy-Instanz + initialize_function: Funktion, die aufgerufen wird, um die Datenbank zu initialisieren + + Returns: + bool: True, wenn die Datenbank bereit ist, sonst False + """ + # Prüfe die Verbindung + if not check_db_connection(db): + return False + + # Prüfe, ob die Tabellen existieren + try: + with current_app.app_context(): + # Führe eine Testabfrage auf einer Tabelle durch + db.session.execute(text('SELECT COUNT(*) FROM user')) + except SQLAlchemyError: + # Tabellen existieren nicht, erstelle sie + try: + with current_app.app_context(): + db.create_all() + + # Rufe die Initialisierungsfunktion auf, falls vorhanden + if initialize_function and callable(initialize_function): + initialize_function() + + return True + except Exception as e: + print(f"Fehler bei DB-Initialisierung: {str(e)}") + return False + + return True \ No newline at end of file diff --git a/utils/db_fix.py b/utils/db_fix.py new file mode 100644 index 0000000..2271457 --- /dev/null +++ b/utils/db_fix.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import os +import sqlite3 +from datetime import datetime +import sys +import importlib.util + +# Add the parent directory to path so we can import the app +parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.insert(0, parent_dir) + +from app import app, db_path +from models import db + +def ensure_db_dir(): + """Make sure the database directory exists.""" + os.makedirs(os.path.dirname(db_path), exist_ok=True) + +def fix_database_schema(): + """Fix the database schema by adding missing columns.""" + with app.app_context(): + # Ensure directory exists + ensure_db_dir() + + # Check if database exists, create tables if needed + if not os.path.exists(db_path): + print("Database doesn't exist. Creating all tables from scratch...") + db.create_all() + print("Database tables created successfully!") + return + + # Connect to existing database + print(f"Connecting to database: {db_path}") + conn = sqlite3.connect(db_path) + cursor = conn.cursor() + + # Check if User table exists + cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='user'") + if not cursor.fetchone(): + print("User table doesn't exist. Creating all tables from scratch...") + conn.close() + db.create_all() + print("Database tables created successfully!") + return + + # Check existing columns + cursor.execute("PRAGMA table_info(user)") + columns = cursor.fetchall() + column_names = [col[1] for col in columns] + print("Existing columns in User table:", column_names) + + # Add missing columns + if 'created_at' not in column_names: + print("Adding 'created_at' column to User table...") + cursor.execute("ALTER TABLE user ADD COLUMN created_at TIMESTAMP") + + if 'last_login' not in column_names: + print("Adding 'last_login' column to User table...") + cursor.execute("ALTER TABLE user ADD COLUMN last_login TIMESTAMP") + + if 'avatar' not in column_names: + print("Adding 'avatar' column to User table...") + cursor.execute("ALTER TABLE user ADD COLUMN avatar VARCHAR(200)") + + if 'bio' not in column_names: + print("Adding 'bio' column to User table...") + cursor.execute("ALTER TABLE user ADD COLUMN bio TEXT") + + # Commit the changes + conn.commit() + conn.close() + print("Database schema updated successfully!") + return True + +if __name__ == "__main__": + fix_database_schema() \ No newline at end of file diff --git a/utils/db_rebuild.py b/utils/db_rebuild.py new file mode 100644 index 0000000..58a714f --- /dev/null +++ b/utils/db_rebuild.py @@ -0,0 +1,91 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import os +import sqlite3 +from datetime import datetime +import sys +import importlib.util + +# Add the parent directory to path so we can import the app +parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.insert(0, parent_dir) + +# Import models directly to avoid circular import +from models import db, User, Category + +def rebuild_database(app_instance=None): + """Completely rebuilds the database by dropping and recreating all tables.""" + if app_instance is None: + # Only import app if it's not provided as a parameter + from app import app as app_instance + from app import db_path + else: + # Get db_path from app_instance config + db_path = app_instance.config['SQLALCHEMY_DATABASE_URI'].replace('sqlite:///', '') + + with app_instance.app_context(): + print(f"Database path: {db_path}") + + # Back up existing database if it exists + if os.path.exists(db_path): + backup_path = db_path + '.backup' + try: + import shutil + shutil.copy2(db_path, backup_path) + print(f"Backed up existing database to {backup_path}") + except Exception as e: + print(f"Warning: Could not create backup - {str(e)}") + + # Ensure directory exists + os.makedirs(os.path.dirname(db_path), exist_ok=True) + + # Drop all tables and recreate them + print("Dropping all tables...") + db.drop_all() + + print("Creating all tables...") + db.create_all() + + # Create admin user + print("Creating admin user...") + admin = User( + username='admin', + email='admin@example.com', + is_admin=True, + created_at=datetime.utcnow() + ) + admin.set_password('admin') + db.session.add(admin) + + # Create regular user + print("Creating regular user...") + user = User( + username='user', + email='user@example.com', + is_admin=False, + created_at=datetime.utcnow() + ) + user.set_password('user') + db.session.add(user) + + try: + # Commit to generate user IDs + db.session.commit() + print("Users created successfully!") + + # Create default categories + print("Creating default categories...") + # Instead of directly importing create_default_categories, call it through app_instance + create_default_categories_func = getattr(sys.modules['app'], 'create_default_categories') + create_default_categories_func() + + print("Database rebuild completed successfully!") + return True + except Exception as e: + db.session.rollback() + print(f"Error during database rebuild: {str(e)}") + raise + +if __name__ == "__main__": + rebuild_database() \ No newline at end of file diff --git a/utils/db_test.py b/utils/db_test.py new file mode 100644 index 0000000..f3653fa --- /dev/null +++ b/utils/db_test.py @@ -0,0 +1,120 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import os +import sys +import sqlite3 + +# Add the parent directory to path so we can import the app +parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.insert(0, parent_dir) + +from app import app, db_path +from models import db, User, Thought, MindMapNode, Category + +def test_database_connection(): + """Test if the database exists and can be connected to.""" + try: + if not os.path.exists(db_path): + print(f"Database file does not exist: {db_path}") + return False + + conn = sqlite3.connect(db_path) + cursor = conn.cursor() + cursor.execute("PRAGMA integrity_check") + result = cursor.fetchone() + conn.close() + + if result and result[0] == "ok": + print(f"Database integrity check passed: {db_path}") + return True + else: + print(f"Database integrity check failed: {result}") + return False + except Exception as e: + print(f"Error testing database connection: {e}") + return False + +def test_models(): + """Test if all models are properly defined and can be queried.""" + with app.app_context(): + try: + print("\nTesting User model...") + user_count = User.query.count() + print(f" Found {user_count} users") + + print("\nTesting Category model...") + category_count = Category.query.count() + print(f" Found {category_count} categories") + + print("\nTesting MindMapNode model...") + node_count = MindMapNode.query.count() + print(f" Found {node_count} mindmap nodes") + + print("\nTesting Thought model...") + thought_count = Thought.query.count() + print(f" Found {thought_count} thoughts") + + if user_count == 0: + print("\nWARNING: No users found in the database. You might need to create an admin user.") + + return True + except Exception as e: + print(f"Error testing models: {e}") + return False + +def print_database_stats(): + """Print database statistics.""" + with app.app_context(): + try: + stats = [] + stats.append(("Users", User.query.count())) + stats.append(("Categories", Category.query.count())) + stats.append(("Mindmap Nodes", MindMapNode.query.count())) + stats.append(("Thoughts", Thought.query.count())) + + print("\nDatabase Statistics:") + print("-" * 40) + for name, count in stats: + print(f"{name:<20} : {count}") + print("-" * 40) + + return True + except Exception as e: + print(f"Error generating database statistics: {e}") + return False + +def run_all_tests(): + """Run all database tests.""" + success = True + + print("=" * 60) + print("STARTING DATABASE TESTS") + print("=" * 60) + + # Test database connection + print("\n1. Testing database connection...") + if not test_database_connection(): + success = False + + # Test models + print("\n2. Testing database models...") + if not test_models(): + success = False + + # Print statistics + print("\n3. Database statistics:") + if not print_database_stats(): + success = False + + print("\n" + "=" * 60) + if success: + print("All database tests completed successfully!") + else: + print("Some database tests failed. Check the output above for details.") + print("=" * 60) + + return success + +if __name__ == "__main__": + run_all_tests() \ No newline at end of file diff --git a/utils/download_resources.py b/utils/download_resources.py new file mode 100644 index 0000000..b6148b6 --- /dev/null +++ b/utils/download_resources.py @@ -0,0 +1,225 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +""" +Dieses Skript lädt externe Ressourcen wie Font Awesome, Tailwind CSS und Alpine.js herunter +und installiert sie lokal, um die Abhängigkeit von externen CDNs zu vermeiden und +die Content Security Policy zu erfüllen. +""" + +import os +import sys +import requests +import zipfile +import io +import shutil +import subprocess +from pathlib import Path + +BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + +# URLs für die Ressourcen +RESOURCES = { + 'alpine.js': 'https://cdn.jsdelivr.net/npm/alpinejs@3.12.3/dist/cdn.min.js', + 'font_awesome': 'https://use.fontawesome.com/releases/v6.4.0/fontawesome-free-6.4.0-web.zip', + 'inter_font_300': 'https://fonts.gstatic.com/s/inter/v18/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa1ZL7.woff2', + 'inter_font_400': 'https://fonts.gstatic.com/s/inter/v18/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2ZL7SUc.woff2', + 'inter_font_500': 'https://fonts.gstatic.com/s/inter/v18/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa1pL7SUc.woff2', + 'inter_font_600': 'https://fonts.gstatic.com/s/inter/v18/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa2pL7SUc.woff2', + 'inter_font_700': 'https://fonts.gstatic.com/s/inter/v18/UcC73FwrK3iLTeHuS_nVMrMxCp50SjIa25L7SUc.woff2', + 'jetbrains_font_400': 'https://fonts.gstatic.com/s/jetbrainsmono/v20/tDbv2o-flEEny0FZhsfKu5WU4zr3E_BX0PnT8RD8yKwBNntkaToggR7BYRbKPxDcwg.woff2', + 'jetbrains_font_500': 'https://fonts.gstatic.com/s/jetbrainsmono/v20/tDbv2o-flEEny0FZhsfKu5WU4zr3E_BX0PnT8RD8yKwBNntkaToggR7BYRbKPx3cwhsk.woff2', + 'jetbrains_font_700': 'https://fonts.gstatic.com/s/jetbrainsmono/v20/tDbv2o-flEEny0FZhsfKu5WU4zr3E_BX0PnT8RD8yKwBNntkaToggR7BYRbKPxTcwhsk.woff2', +} + +# Zielverzeichnisse +DIRECTORIES = { + 'js': os.path.join(BASE_DIR, 'static', 'js'), + 'css': os.path.join(BASE_DIR, 'static', 'css'), + 'fonts': os.path.join(BASE_DIR, 'static', 'fonts'), + 'webfonts': os.path.join(BASE_DIR, 'static', 'webfonts'), +} + +def download_file(url, filepath): + """Lädt eine Datei von einer URL herunter und speichert sie lokal.""" + response = requests.get(url, stream=True) + if response.status_code == 200: + with open(filepath, 'wb') as f: + for chunk in response.iter_content(chunk_size=8192): + f.write(chunk) + print(f"✅ Heruntergeladen: {os.path.basename(filepath)}") + return True + else: + print(f"❌ Fehler beim Herunterladen von {url}: {response.status_code}") + return False + +def extract_zip(zip_data, extract_to): + """Extrahiert eine ZIP-Datei in das angegebene Verzeichnis.""" + with zipfile.ZipFile(io.BytesIO(zip_data)) as zip_ref: + zip_ref.extractall(extract_to) + +def setup_directories(): + """Erstellt die benötigten Verzeichnisse, falls sie nicht existieren.""" + for directory in DIRECTORIES.values(): + os.makedirs(directory, exist_ok=True) + print(f"📁 Verzeichnis erstellt/überprüft: {directory}") + +def download_alpine(): + """Lädt Alpine.js herunter.""" + url = RESOURCES['alpine.js'] + filepath = os.path.join(DIRECTORIES['js'], 'alpine.min.js') + download_file(url, filepath) + +def download_font_awesome(): + """Lädt Font Awesome herunter und extrahiert die Dateien.""" + url = RESOURCES['font_awesome'] + response = requests.get(url) + if response.status_code == 200: + # Temporäres Verzeichnis für die Extraktion + temp_dir = os.path.join(BASE_DIR, 'temp_fontawesome') + os.makedirs(temp_dir, exist_ok=True) + + # ZIP-Datei extrahieren + extract_zip(response.content, temp_dir) + + # CSS-Datei kopieren + fa_dir = os.path.join(temp_dir, 'fontawesome-free-6.4.0-web') + css_source = os.path.join(fa_dir, 'css', 'all.min.css') + css_dest = os.path.join(DIRECTORIES['css'], 'all.min.css') + shutil.copyfile(css_source, css_dest) + print(f"✅ Font Awesome CSS kopiert nach {css_dest}") + + # Webfonts-Verzeichnis kopieren + webfonts_source = os.path.join(fa_dir, 'webfonts') + shutil.rmtree(DIRECTORIES['webfonts'], ignore_errors=True) + shutil.copytree(webfonts_source, DIRECTORIES['webfonts']) + print(f"✅ Font Awesome Webfonts kopiert nach {DIRECTORIES['webfonts']}") + + # Temporäres Verzeichnis löschen + shutil.rmtree(temp_dir) + + return True + else: + print(f"❌ Fehler beim Herunterladen von Font Awesome: {response.status_code}") + return False + +def download_google_fonts(): + """Lädt die Google Fonts (Inter und JetBrains Mono) herunter.""" + font_files = { + 'inter-light.woff2': RESOURCES['inter_font_300'], + 'inter-regular.woff2': RESOURCES['inter_font_400'], + 'inter-medium.woff2': RESOURCES['inter_font_500'], + 'inter-semibold.woff2': RESOURCES['inter_font_600'], + 'inter-bold.woff2': RESOURCES['inter_font_700'], + 'jetbrainsmono-regular.woff2': RESOURCES['jetbrains_font_400'], + 'jetbrainsmono-medium.woff2': RESOURCES['jetbrains_font_500'], + 'jetbrainsmono-bold.woff2': RESOURCES['jetbrains_font_700'], + } + + for filename, url in font_files.items(): + filepath = os.path.join(DIRECTORIES['fonts'], filename) + download_file(url, filepath) + +def setup_tailwind(): + """Richtet Tailwind CSS ein.""" + # Tailwind-Konfiguration erstellen, falls sie nicht existiert + tailwind_config = os.path.join(BASE_DIR, 'tailwind.config.js') + if not os.path.exists(tailwind_config): + with open(tailwind_config, 'w') as f: + f.write("""/** @type {import('tailwindcss').Config} */ +module.exports = { + darkMode: 'class', + content: [ + "./templates/**/*.html", + "./static/**/*.js", + ], + theme: { + extend: { + fontFamily: { + sans: ['Inter', 'ui-sans-serif', 'system-ui', '-apple-system', 'sans-serif'], + mono: ['JetBrains Mono', 'ui-monospace', 'monospace'] + }, + colors: { + primary: { + 50: '#f5f3ff', + 100: '#ede9fe', + 200: '#ddd6fe', + 300: '#c4b5fd', + 400: '#a78bfa', + 500: '#8b5cf6', + 600: '#7c3aed', + 700: '#6d28d9', + 800: '#5b21b6', + 900: '#4c1d95' + }, + secondary: { + 50: '#ecfdf5', + 100: '#d1fae5', + 200: '#a7f3d0', + 300: '#6ee7b7', + 400: '#34d399', + 500: '#10b981', + 600: '#059669', + 700: '#047857', + 800: '#065f46', + 900: '#064e3b' + }, + dark: { + 500: '#374151', + 600: '#1f2937', + 700: '#111827', + 800: '#0e1220', + 900: '#0a0e19' + } + } + } + }, + plugins: [], +} +""") + print(f"✅ Tailwind-Konfiguration erstellt: {tailwind_config}") + + # Input-CSS-Datei erstellen + input_css_dir = os.path.join(DIRECTORIES['css'], 'src') + os.makedirs(input_css_dir, exist_ok=True) + + input_css = os.path.join(input_css_dir, 'input.css') + if not os.path.exists(input_css): + with open(input_css, 'w') as f: + f.write("""@tailwind base; +@tailwind components; +@tailwind utilities; +""") + print(f"✅ Tailwind Input-CSS erstellt: {input_css}") + + # Hinweis zur Kompilierung anzeigen + print("\n📋 Um Tailwind CSS zu kompilieren, führe folgenden Befehl aus:") + print("npm install -D tailwindcss") + print(f"npx tailwindcss -i {input_css} -o {os.path.join(DIRECTORIES['css'], 'tailwind.min.css')} --minify") + +def main(): + """Hauptfunktion: Lädt alle benötigten Ressourcen herunter.""" + print("🚀 Starte den Download externer Ressourcen...") + setup_directories() + + # Alpine.js herunterladen + print("\n📦 Lade Alpine.js herunter...") + download_alpine() + + # Font Awesome herunterladen + print("\n📦 Lade Font Awesome herunter...") + download_font_awesome() + + # Google Fonts herunterladen + print("\n📦 Lade Google Fonts herunter...") + download_google_fonts() + + # Tailwind CSS einrichten + print("\n📦 Richte Tailwind CSS ein...") + setup_tailwind() + + print("\n✅ Alle Ressourcen wurden erfolgreich heruntergeladen und eingerichtet!") + print("🔒 Die Webseite sollte nun ohne externe CDNs funktionieren und die Content Security Policy erfüllen.") + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/utils/server.py b/utils/server.py new file mode 100644 index 0000000..2f36df4 --- /dev/null +++ b/utils/server.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import os +import sys + +# Add the parent directory to path so we can import the app +parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.insert(0, parent_dir) + +from app import app + +def run_development_server(host='127.0.0.1', port=5000, debug=True): + """Run the Flask development server.""" + try: + print(f"Starting development server on http://{host}:{port}") + print("Press CTRL+C to stop the server") + app.run(host=host, port=port, debug=debug) + return True + except Exception as e: + print(f"Error starting development server: {e}") + return False + +if __name__ == "__main__": + import argparse + + parser = argparse.ArgumentParser(description='Run the development server') + parser.add_argument('--host', default='127.0.0.1', help='Host to bind to') + parser.add_argument('--port', type=int, default=5000, help='Port to bind to') + parser.add_argument('--debug', action='store_true', default=True, help='Enable debug mode') + + args = parser.parse_args() + + run_development_server(host=args.host, port=args.port, debug=args.debug) \ No newline at end of file diff --git a/utils/user_manager.py b/utils/user_manager.py new file mode 100644 index 0000000..081fff0 --- /dev/null +++ b/utils/user_manager.py @@ -0,0 +1,159 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import os +import sys +from datetime import datetime + +# Add the parent directory to path so we can import the app +parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.insert(0, parent_dir) + +from app import app +from models import db, User + +def list_users(): + """List all users in the database.""" + with app.app_context(): + try: + users = User.query.all() + if not users: + print("No users found in the database.") + return [] + + print("Found {} users:".format(len(users))) + print("-" * 60) + print("{:<5} {:<20} {:<30} {:<10}".format("ID", "Username", "Email", "Admin")) + print("-" * 60) + + for user in users: + print("{:<5} {:<20} {:<30} {:<10}".format( + user.id, user.username, user.email, "Yes" if user.is_admin else "No" + )) + return users + except Exception as e: + print(f"Error listing users: {e}") + return [] + +def create_user(username, email, password, is_admin=False): + """Create a new user in the database.""" + with app.app_context(): + try: + # Check if user already exists + existing_user = User.query.filter_by(username=username).first() + if existing_user: + print(f"User with username '{username}' already exists.") + return False + + # Check if email already exists + existing_email = User.query.filter_by(email=email).first() + if existing_email: + print(f"User with email '{email}' already exists.") + return False + + # Create new user + user = User( + username=username, + email=email, + is_admin=is_admin, + created_at=datetime.utcnow() + ) + user.set_password(password) + + db.session.add(user) + db.session.commit() + + print(f"User '{username}' created successfully!") + return True + + except Exception as e: + db.session.rollback() + print(f"Error creating user: {e}") + return False + +def reset_password(username, new_password): + """Reset password for a user.""" + with app.app_context(): + try: + user = User.query.filter_by(username=username).first() + if not user: + print(f"User '{username}' not found.") + return False + + user.set_password(new_password) + db.session.commit() + + print(f"Password for user '{username}' reset successfully!") + return True + + except Exception as e: + db.session.rollback() + print(f"Error resetting password: {e}") + return False + +def delete_user(username): + """Delete a user from the database.""" + with app.app_context(): + try: + user = User.query.filter_by(username=username).first() + if not user: + print(f"User '{username}' not found.") + return False + + db.session.delete(user) + db.session.commit() + + print(f"User '{username}' deleted successfully!") + return True + + except Exception as e: + db.session.rollback() + print(f"Error deleting user: {e}") + return False + +def create_admin_user(): + """Create an admin user in the database.""" + return create_user('admin', 'admin@example.com', 'admin', is_admin=True) + +if __name__ == "__main__": + import argparse + + parser = argparse.ArgumentParser(description='User management utility') + subparsers = parser.add_subparsers(dest='command', help='Command to execute') + + # List users command + list_parser = subparsers.add_parser('list', help='List all users') + + # Create user command + create_parser = subparsers.add_parser('create', help='Create a new user') + create_parser.add_argument('--username', '-u', required=True, help='Username') + create_parser.add_argument('--email', '-e', required=True, help='Email address') + create_parser.add_argument('--password', '-p', required=True, help='Password') + create_parser.add_argument('--admin', '-a', action='store_true', help='Make user an admin') + + # Reset password command + reset_parser = subparsers.add_parser('reset-password', help='Reset a user password') + reset_parser.add_argument('--username', '-u', required=True, help='Username') + reset_parser.add_argument('--password', '-p', required=True, help='New password') + + # Delete user command + delete_parser = subparsers.add_parser('delete', help='Delete a user') + delete_parser.add_argument('--username', '-u', required=True, help='Username to delete') + + # Create admin command (shortcut) + admin_parser = subparsers.add_parser('create-admin', help='Create the default admin user') + + args = parser.parse_args() + + if args.command == 'list': + list_users() + elif args.command == 'create': + create_user(args.username, args.email, args.password, args.admin) + elif args.command == 'reset-password': + reset_password(args.username, args.password) + elif args.command == 'delete': + delete_user(args.username) + elif args.command == 'create-admin': + create_admin_user() + else: + parser.print_help() \ No newline at end of file diff --git a/venv/bin/Activate.ps1 b/venv/bin/Activate.ps1 new file mode 100644 index 0000000..eeea358 --- /dev/null +++ b/venv/bin/Activate.ps1 @@ -0,0 +1,247 @@ +<# +.Synopsis +Activate a Python virtual environment for the current PowerShell session. + +.Description +Pushes the python executable for a virtual environment to the front of the +$Env:PATH environment variable and sets the prompt to signify that you are +in a Python virtual environment. Makes use of the command line switches as +well as the `pyvenv.cfg` file values present in the virtual environment. + +.Parameter VenvDir +Path to the directory that contains the virtual environment to activate. The +default value for this is the parent of the directory that the Activate.ps1 +script is located within. + +.Parameter Prompt +The prompt prefix to display when this virtual environment is activated. By +default, this prompt is the name of the virtual environment folder (VenvDir) +surrounded by parentheses and followed by a single space (ie. '(.venv) '). + +.Example +Activate.ps1 +Activates the Python virtual environment that contains the Activate.ps1 script. + +.Example +Activate.ps1 -Verbose +Activates the Python virtual environment that contains the Activate.ps1 script, +and shows extra information about the activation as it executes. + +.Example +Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv +Activates the Python virtual environment located in the specified location. + +.Example +Activate.ps1 -Prompt "MyPython" +Activates the Python virtual environment that contains the Activate.ps1 script, +and prefixes the current prompt with the specified string (surrounded in +parentheses) while the virtual environment is active. + +.Notes +On Windows, it may be required to enable this Activate.ps1 script by setting the +execution policy for the user. You can do this by issuing the following PowerShell +command: + +PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser + +For more information on Execution Policies: +https://go.microsoft.com/fwlink/?LinkID=135170 + +#> +Param( + [Parameter(Mandatory = $false)] + [String] + $VenvDir, + [Parameter(Mandatory = $false)] + [String] + $Prompt +) + +<# Function declarations --------------------------------------------------- #> + +<# +.Synopsis +Remove all shell session elements added by the Activate script, including the +addition of the virtual environment's Python executable from the beginning of +the PATH variable. + +.Parameter NonDestructive +If present, do not remove this function from the global namespace for the +session. + +#> +function global:deactivate ([switch]$NonDestructive) { + # Revert to original values + + # The prior prompt: + if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) { + Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt + Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT + } + + # The prior PYTHONHOME: + if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) { + Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME + Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME + } + + # The prior PATH: + if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) { + Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH + Remove-Item -Path Env:_OLD_VIRTUAL_PATH + } + + # Just remove the VIRTUAL_ENV altogether: + if (Test-Path -Path Env:VIRTUAL_ENV) { + Remove-Item -Path env:VIRTUAL_ENV + } + + # Just remove VIRTUAL_ENV_PROMPT altogether. + if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) { + Remove-Item -Path env:VIRTUAL_ENV_PROMPT + } + + # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether: + if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) { + Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force + } + + # Leave deactivate function in the global namespace if requested: + if (-not $NonDestructive) { + Remove-Item -Path function:deactivate + } +} + +<# +.Description +Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the +given folder, and returns them in a map. + +For each line in the pyvenv.cfg file, if that line can be parsed into exactly +two strings separated by `=` (with any amount of whitespace surrounding the =) +then it is considered a `key = value` line. The left hand string is the key, +the right hand is the value. + +If the value starts with a `'` or a `"` then the first and last character is +stripped from the value before being captured. + +.Parameter ConfigDir +Path to the directory that contains the `pyvenv.cfg` file. +#> +function Get-PyVenvConfig( + [String] + $ConfigDir +) { + Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg" + + # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue). + $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue + + # An empty map will be returned if no config file is found. + $pyvenvConfig = @{ } + + if ($pyvenvConfigPath) { + + Write-Verbose "File exists, parse `key = value` lines" + $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath + + $pyvenvConfigContent | ForEach-Object { + $keyval = $PSItem -split "\s*=\s*", 2 + if ($keyval[0] -and $keyval[1]) { + $val = $keyval[1] + + # Remove extraneous quotations around a string value. + if ("'""".Contains($val.Substring(0, 1))) { + $val = $val.Substring(1, $val.Length - 2) + } + + $pyvenvConfig[$keyval[0]] = $val + Write-Verbose "Adding Key: '$($keyval[0])'='$val'" + } + } + } + return $pyvenvConfig +} + + +<# Begin Activate script --------------------------------------------------- #> + +# Determine the containing directory of this script +$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition +$VenvExecDir = Get-Item -Path $VenvExecPath + +Write-Verbose "Activation script is located in path: '$VenvExecPath'" +Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)" +Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)" + +# Set values required in priority: CmdLine, ConfigFile, Default +# First, get the location of the virtual environment, it might not be +# VenvExecDir if specified on the command line. +if ($VenvDir) { + Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values" +} +else { + Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir." + $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/") + Write-Verbose "VenvDir=$VenvDir" +} + +# Next, read the `pyvenv.cfg` file to determine any required value such +# as `prompt`. +$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir + +# Next, set the prompt from the command line, or the config file, or +# just use the name of the virtual environment folder. +if ($Prompt) { + Write-Verbose "Prompt specified as argument, using '$Prompt'" +} +else { + Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value" + if ($pyvenvCfg -and $pyvenvCfg['prompt']) { + Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'" + $Prompt = $pyvenvCfg['prompt']; + } + else { + Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)" + Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'" + $Prompt = Split-Path -Path $venvDir -Leaf + } +} + +Write-Verbose "Prompt = '$Prompt'" +Write-Verbose "VenvDir='$VenvDir'" + +# Deactivate any currently active virtual environment, but leave the +# deactivate function in place. +deactivate -nondestructive + +# Now set the environment variable VIRTUAL_ENV, used by many tools to determine +# that there is an activated venv. +$env:VIRTUAL_ENV = $VenvDir + +if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) { + + Write-Verbose "Setting prompt to '$Prompt'" + + # Set the prompt to include the env name + # Make sure _OLD_VIRTUAL_PROMPT is global + function global:_OLD_VIRTUAL_PROMPT { "" } + Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT + New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt + + function global:prompt { + Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) " + _OLD_VIRTUAL_PROMPT + } + $env:VIRTUAL_ENV_PROMPT = $Prompt +} + +# Clear PYTHONHOME +if (Test-Path -Path Env:PYTHONHOME) { + Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME + Remove-Item -Path Env:PYTHONHOME +} + +# Add the venv to the PATH +Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH +$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH" diff --git a/venv/bin/activate b/venv/bin/activate new file mode 100644 index 0000000..25d4262 --- /dev/null +++ b/venv/bin/activate @@ -0,0 +1,69 @@ +# This file must be used with "source bin/activate" *from bash* +# you cannot run it directly + +deactivate () { + # reset old environment variables + if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then + PATH="${_OLD_VIRTUAL_PATH:-}" + export PATH + unset _OLD_VIRTUAL_PATH + fi + if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then + PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" + export PYTHONHOME + unset _OLD_VIRTUAL_PYTHONHOME + fi + + # This should detect bash and zsh, which have a hash command that must + # be called to get it to forget past commands. Without forgetting + # past commands the $PATH changes we made may not be respected + if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then + hash -r 2> /dev/null + fi + + if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then + PS1="${_OLD_VIRTUAL_PS1:-}" + export PS1 + unset _OLD_VIRTUAL_PS1 + fi + + unset VIRTUAL_ENV + unset VIRTUAL_ENV_PROMPT + if [ ! "${1:-}" = "nondestructive" ] ; then + # Self destruct! + unset -f deactivate + fi +} + +# unset irrelevant variables +deactivate nondestructive + +VIRTUAL_ENV="/home/core/dev/website/venv" +export VIRTUAL_ENV + +_OLD_VIRTUAL_PATH="$PATH" +PATH="$VIRTUAL_ENV/bin:$PATH" +export PATH + +# unset PYTHONHOME if set +# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) +# could use `if (set -u; : $PYTHONHOME) ;` in bash +if [ -n "${PYTHONHOME:-}" ] ; then + _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" + unset PYTHONHOME +fi + +if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then + _OLD_VIRTUAL_PS1="${PS1:-}" + PS1="(venv) ${PS1:-}" + export PS1 + VIRTUAL_ENV_PROMPT="(venv) " + export VIRTUAL_ENV_PROMPT +fi + +# This should detect bash and zsh, which have a hash command that must +# be called to get it to forget past commands. Without forgetting +# past commands the $PATH changes we made may not be respected +if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then + hash -r 2> /dev/null +fi diff --git a/venv/bin/activate.csh b/venv/bin/activate.csh new file mode 100644 index 0000000..c0db2e6 --- /dev/null +++ b/venv/bin/activate.csh @@ -0,0 +1,26 @@ +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. +# Created by Davide Di Blasi . +# Ported to Python 3.3 venv by Andrew Svetlov + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate' + +# Unset irrelevant variables. +deactivate nondestructive + +setenv VIRTUAL_ENV "/home/core/dev/website/venv" + +set _OLD_VIRTUAL_PATH="$PATH" +setenv PATH "$VIRTUAL_ENV/bin:$PATH" + + +set _OLD_VIRTUAL_PROMPT="$prompt" + +if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then + set prompt = "(venv) $prompt" + setenv VIRTUAL_ENV_PROMPT "(venv) " +endif + +alias pydoc python -m pydoc + +rehash diff --git a/venv/bin/activate.fish b/venv/bin/activate.fish new file mode 100644 index 0000000..0a39ada --- /dev/null +++ b/venv/bin/activate.fish @@ -0,0 +1,69 @@ +# This file must be used with "source /bin/activate.fish" *from fish* +# (https://fishshell.com/); you cannot run it directly. + +function deactivate -d "Exit virtual environment and return to normal shell environment" + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + set -gx PATH $_OLD_VIRTUAL_PATH + set -e _OLD_VIRTUAL_PATH + end + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + set -e _OLD_FISH_PROMPT_OVERRIDE + # prevents error when using nested fish instances (Issue #93858) + if functions -q _old_fish_prompt + functions -e fish_prompt + functions -c _old_fish_prompt fish_prompt + functions -e _old_fish_prompt + end + end + + set -e VIRTUAL_ENV + set -e VIRTUAL_ENV_PROMPT + if test "$argv[1]" != "nondestructive" + # Self-destruct! + functions -e deactivate + end +end + +# Unset irrelevant variables. +deactivate nondestructive + +set -gx VIRTUAL_ENV "/home/core/dev/website/venv" + +set -gx _OLD_VIRTUAL_PATH $PATH +set -gx PATH "$VIRTUAL_ENV/bin" $PATH + +# Unset PYTHONHOME if set. +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # fish uses a function instead of an env var to generate the prompt. + + # Save the current fish_prompt function as the function _old_fish_prompt. + functions -c fish_prompt _old_fish_prompt + + # With the original prompt function renamed, we can override with our own. + function fish_prompt + # Save the return status of the last command. + set -l old_status $status + + # Output the venv prompt; color taken from the blue of the Python logo. + printf "%s%s%s" (set_color 4B8BBE) "(venv) " (set_color normal) + + # Restore the return status of the previous command. + echo "exit $old_status" | . + # Output the original/"old" prompt. + _old_fish_prompt + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" + set -gx VIRTUAL_ENV_PROMPT "(venv) " +end diff --git a/venv/bin/distro b/venv/bin/distro new file mode 100644 index 0000000..b625a09 --- /dev/null +++ b/venv/bin/distro @@ -0,0 +1,8 @@ +#!/home/core/dev/website/venv/bin/python3.11 +# -*- coding: utf-8 -*- +import re +import sys +from distro.distro import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/dotenv b/venv/bin/dotenv new file mode 100644 index 0000000..471e572 --- /dev/null +++ b/venv/bin/dotenv @@ -0,0 +1,8 @@ +#!/home/core/dev/website/venv/bin/python3.11 +# -*- coding: utf-8 -*- +import re +import sys +from dotenv.__main__ import cli +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(cli()) diff --git a/venv/bin/email_validator b/venv/bin/email_validator new file mode 100644 index 0000000..0da256d --- /dev/null +++ b/venv/bin/email_validator @@ -0,0 +1,8 @@ +#!/home/core/dev/website/venv/bin/python3.11 +# -*- coding: utf-8 -*- +import re +import sys +from email_validator.__main__ import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/flask b/venv/bin/flask new file mode 100644 index 0000000..8622b32 --- /dev/null +++ b/venv/bin/flask @@ -0,0 +1,8 @@ +#!/home/core/dev/website/venv/bin/python3.11 +# -*- coding: utf-8 -*- +import re +import sys +from flask.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/gunicorn b/venv/bin/gunicorn new file mode 100644 index 0000000..ebe682a --- /dev/null +++ b/venv/bin/gunicorn @@ -0,0 +1,8 @@ +#!/home/core/dev/website/venv/bin/python3.11 +# -*- coding: utf-8 -*- +import re +import sys +from gunicorn.app.wsgiapp import run +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(run()) diff --git a/venv/bin/httpx b/venv/bin/httpx new file mode 100644 index 0000000..7605020 --- /dev/null +++ b/venv/bin/httpx @@ -0,0 +1,8 @@ +#!/home/core/dev/website/venv/bin/python3.11 +# -*- coding: utf-8 -*- +import re +import sys +from httpx import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/normalizer b/venv/bin/normalizer new file mode 100644 index 0000000..586ea1e --- /dev/null +++ b/venv/bin/normalizer @@ -0,0 +1,8 @@ +#!/home/core/dev/website/venv/bin/python3.11 +# -*- coding: utf-8 -*- +import re +import sys +from charset_normalizer import cli +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(cli.cli_detect()) diff --git a/venv/bin/openai b/venv/bin/openai new file mode 100644 index 0000000..00efed6 --- /dev/null +++ b/venv/bin/openai @@ -0,0 +1,8 @@ +#!/home/core/dev/website/venv/bin/python3.11 +# -*- coding: utf-8 -*- +import re +import sys +from openai.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pip b/venv/bin/pip new file mode 100644 index 0000000..259be76 --- /dev/null +++ b/venv/bin/pip @@ -0,0 +1,8 @@ +#!/home/core/dev/website/venv/bin/python3.11 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pip3 b/venv/bin/pip3 new file mode 100644 index 0000000..259be76 --- /dev/null +++ b/venv/bin/pip3 @@ -0,0 +1,8 @@ +#!/home/core/dev/website/venv/bin/python3.11 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pip3.11 b/venv/bin/pip3.11 new file mode 100644 index 0000000..259be76 --- /dev/null +++ b/venv/bin/pip3.11 @@ -0,0 +1,8 @@ +#!/home/core/dev/website/venv/bin/python3.11 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/py.test b/venv/bin/py.test new file mode 100644 index 0000000..863f8ac --- /dev/null +++ b/venv/bin/py.test @@ -0,0 +1,8 @@ +#!/home/core/dev/website/venv/bin/python3.11 +# -*- coding: utf-8 -*- +import re +import sys +from pytest import console_main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(console_main()) diff --git a/venv/bin/pytest b/venv/bin/pytest new file mode 100644 index 0000000..863f8ac --- /dev/null +++ b/venv/bin/pytest @@ -0,0 +1,8 @@ +#!/home/core/dev/website/venv/bin/python3.11 +# -*- coding: utf-8 -*- +import re +import sys +from pytest import console_main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(console_main()) diff --git a/venv/bin/python b/venv/bin/python new file mode 100644 index 0000000..948489e Binary files /dev/null and b/venv/bin/python differ diff --git a/venv/bin/python3 b/venv/bin/python3 new file mode 100644 index 0000000..948489e Binary files /dev/null and b/venv/bin/python3 differ diff --git a/venv/bin/python3.11 b/venv/bin/python3.11 new file mode 100644 index 0000000..948489e Binary files /dev/null and b/venv/bin/python3.11 differ diff --git a/venv/bin/tqdm b/venv/bin/tqdm new file mode 100644 index 0000000..7a137ea --- /dev/null +++ b/venv/bin/tqdm @@ -0,0 +1,8 @@ +#!/home/core/dev/website/venv/bin/python3.11 +# -*- coding: utf-8 -*- +import re +import sys +from tqdm.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/include/site/python3.11/greenlet/greenlet.h b/venv/include/site/python3.11/greenlet/greenlet.h new file mode 100644 index 0000000..d02a16e --- /dev/null +++ b/venv/include/site/python3.11/greenlet/greenlet.h @@ -0,0 +1,164 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ + +/* Greenlet object interface */ + +#ifndef Py_GREENLETOBJECT_H +#define Py_GREENLETOBJECT_H + + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* This is deprecated and undocumented. It does not change. */ +#define GREENLET_VERSION "1.0.0" + +#ifndef GREENLET_MODULE +#define implementation_ptr_t void* +#endif + +typedef struct _greenlet { + PyObject_HEAD + PyObject* weakreflist; + PyObject* dict; + implementation_ptr_t pimpl; +} PyGreenlet; + +#define PyGreenlet_Check(op) (op && PyObject_TypeCheck(op, &PyGreenlet_Type)) + + +/* C API functions */ + +/* Total number of symbols that are exported */ +#define PyGreenlet_API_pointers 12 + +#define PyGreenlet_Type_NUM 0 +#define PyExc_GreenletError_NUM 1 +#define PyExc_GreenletExit_NUM 2 + +#define PyGreenlet_New_NUM 3 +#define PyGreenlet_GetCurrent_NUM 4 +#define PyGreenlet_Throw_NUM 5 +#define PyGreenlet_Switch_NUM 6 +#define PyGreenlet_SetParent_NUM 7 + +#define PyGreenlet_MAIN_NUM 8 +#define PyGreenlet_STARTED_NUM 9 +#define PyGreenlet_ACTIVE_NUM 10 +#define PyGreenlet_GET_PARENT_NUM 11 + +#ifndef GREENLET_MODULE +/* This section is used by modules that uses the greenlet C API */ +static void** _PyGreenlet_API = NULL; + +# define PyGreenlet_Type \ + (*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM]) + +# define PyExc_GreenletError \ + ((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM]) + +# define PyExc_GreenletExit \ + ((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM]) + +/* + * PyGreenlet_New(PyObject *args) + * + * greenlet.greenlet(run, parent=None) + */ +# define PyGreenlet_New \ + (*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \ + _PyGreenlet_API[PyGreenlet_New_NUM]) + +/* + * PyGreenlet_GetCurrent(void) + * + * greenlet.getcurrent() + */ +# define PyGreenlet_GetCurrent \ + (*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM]) + +/* + * PyGreenlet_Throw( + * PyGreenlet *greenlet, + * PyObject *typ, + * PyObject *val, + * PyObject *tb) + * + * g.throw(...) + */ +# define PyGreenlet_Throw \ + (*(PyObject * (*)(PyGreenlet * self, \ + PyObject * typ, \ + PyObject * val, \ + PyObject * tb)) \ + _PyGreenlet_API[PyGreenlet_Throw_NUM]) + +/* + * PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args) + * + * g.switch(*args, **kwargs) + */ +# define PyGreenlet_Switch \ + (*(PyObject * \ + (*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \ + _PyGreenlet_API[PyGreenlet_Switch_NUM]) + +/* + * PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent) + * + * g.parent = new_parent + */ +# define PyGreenlet_SetParent \ + (*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \ + _PyGreenlet_API[PyGreenlet_SetParent_NUM]) + +/* + * PyGreenlet_GetParent(PyObject* greenlet) + * + * return greenlet.parent; + * + * This could return NULL even if there is no exception active. + * If it does not return NULL, you are responsible for decrementing the + * reference count. + */ +# define PyGreenlet_GetParent \ + (*(PyGreenlet* (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_GET_PARENT_NUM]) + +/* + * deprecated, undocumented alias. + */ +# define PyGreenlet_GET_PARENT PyGreenlet_GetParent + +# define PyGreenlet_MAIN \ + (*(int (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_MAIN_NUM]) + +# define PyGreenlet_STARTED \ + (*(int (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_STARTED_NUM]) + +# define PyGreenlet_ACTIVE \ + (*(int (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_ACTIVE_NUM]) + + + + +/* Macro that imports greenlet and initializes C API */ +/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we + keep the older definition to be sure older code that might have a copy of + the header still works. */ +# define PyGreenlet_Import() \ + { \ + _PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \ + } + +#endif /* GREENLET_MODULE */ + +#ifdef __cplusplus +} +#endif +#endif /* !Py_GREENLETOBJECT_H */ diff --git a/venv/lib/python3.11/site-packages/Flask-2.2.5.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/Flask-2.2.5.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.11/site-packages/Flask-2.2.5.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.11/site-packages/Flask-2.2.5.dist-info/LICENSE.rst b/venv/lib/python3.11/site-packages/Flask-2.2.5.dist-info/LICENSE.rst new file mode 100644 index 0000000..9d227a0 --- /dev/null +++ b/venv/lib/python3.11/site-packages/Flask-2.2.5.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2010 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.11/site-packages/Flask-2.2.5.dist-info/METADATA b/venv/lib/python3.11/site-packages/Flask-2.2.5.dist-info/METADATA new file mode 100644 index 0000000..2526be4 --- /dev/null +++ b/venv/lib/python3.11/site-packages/Flask-2.2.5.dist-info/METADATA @@ -0,0 +1,123 @@ +Metadata-Version: 2.1 +Name: Flask +Version: 2.2.5 +Summary: A simple framework for building complex web applications. +Home-page: https://palletsprojects.com/p/flask +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://flask.palletsprojects.com/ +Project-URL: Changes, https://flask.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/flask/ +Project-URL: Issue Tracker, https://github.com/pallets/flask/issues/ +Project-URL: Twitter, https://twitter.com/PalletsTeam +Project-URL: Chat, https://discord.gg/pallets +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Framework :: Flask +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application +Classifier: Topic :: Software Development :: Libraries :: Application Frameworks +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst +Requires-Dist: Werkzeug (>=2.2.2) +Requires-Dist: Jinja2 (>=3.0) +Requires-Dist: itsdangerous (>=2.0) +Requires-Dist: click (>=8.0) +Requires-Dist: importlib-metadata (>=3.6.0) ; python_version < "3.10" +Provides-Extra: async +Requires-Dist: asgiref (>=3.2) ; extra == 'async' +Provides-Extra: dotenv +Requires-Dist: python-dotenv ; extra == 'dotenv' + +Flask +===== + +Flask is a lightweight `WSGI`_ web application framework. It is designed +to make getting started quick and easy, with the ability to scale up to +complex applications. It began as a simple wrapper around `Werkzeug`_ +and `Jinja`_ and has become one of the most popular Python web +application frameworks. + +Flask offers suggestions, but doesn't enforce any dependencies or +project layout. It is up to the developer to choose the tools and +libraries they want to use. There are many extensions provided by the +community that make adding new functionality easy. + +.. _WSGI: https://wsgi.readthedocs.io/ +.. _Werkzeug: https://werkzeug.palletsprojects.com/ +.. _Jinja: https://jinja.palletsprojects.com/ + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + $ pip install -U Flask + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +A Simple Example +---------------- + +.. code-block:: python + + # save this as app.py + from flask import Flask + + app = Flask(__name__) + + @app.route("/") + def hello(): + return "Hello, World!" + +.. code-block:: text + + $ flask run + * Running on http://127.0.0.1:5000/ (Press CTRL+C to quit) + + +Contributing +------------ + +For guidance on setting up a development environment and how to make a +contribution to Flask, see the `contributing guidelines`_. + +.. _contributing guidelines: https://github.com/pallets/flask/blob/main/CONTRIBUTING.rst + + +Donate +------ + +The Pallets organization develops and supports Flask and the libraries +it uses. In order to grow the community of contributors and users, and +allow the maintainers to devote more time to the projects, `please +donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://flask.palletsprojects.com/ +- Changes: https://flask.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/Flask/ +- Source Code: https://github.com/pallets/flask/ +- Issue Tracker: https://github.com/pallets/flask/issues/ +- Website: https://palletsprojects.com/p/flask/ +- Twitter: https://twitter.com/PalletsTeam +- Chat: https://discord.gg/pallets diff --git a/venv/lib/python3.11/site-packages/Flask-2.2.5.dist-info/RECORD b/venv/lib/python3.11/site-packages/Flask-2.2.5.dist-info/RECORD new file mode 100644 index 0000000..ac93719 --- /dev/null +++ b/venv/lib/python3.11/site-packages/Flask-2.2.5.dist-info/RECORD @@ -0,0 +1,54 @@ +../../../bin/flask,sha256=6fWTGy7Nkz970qr1oQpRe3U74s3GtrbIej78CF8-MSI,234 +Flask-2.2.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Flask-2.2.5.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 +Flask-2.2.5.dist-info/METADATA,sha256=rZTjr5v4M7HB-zC-w2Y0ZU96OYSGBb-Hm15jlLJhs3g,3889 +Flask-2.2.5.dist-info/RECORD,, +Flask-2.2.5.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +Flask-2.2.5.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 +Flask-2.2.5.dist-info/entry_points.txt,sha256=s3MqQpduU25y4dq3ftBYD6bMVdVnbMpZP-sUNw0zw0k,41 +Flask-2.2.5.dist-info/top_level.txt,sha256=dvi65F6AeGWVU0TBpYiC04yM60-FX1gJFkK31IKQr5c,6 +flask/__init__.py,sha256=GJgAILDWhW_DQljuoJ4pk9zBUy70zPPu-VZ6kLyiVI4,2890 +flask/__main__.py,sha256=bYt9eEaoRQWdejEHFD8REx9jxVEdZptECFsV7F49Ink,30 +flask/__pycache__/__init__.cpython-311.pyc,, +flask/__pycache__/__main__.cpython-311.pyc,, +flask/__pycache__/app.cpython-311.pyc,, +flask/__pycache__/blueprints.cpython-311.pyc,, +flask/__pycache__/cli.cpython-311.pyc,, +flask/__pycache__/config.cpython-311.pyc,, +flask/__pycache__/ctx.cpython-311.pyc,, +flask/__pycache__/debughelpers.cpython-311.pyc,, +flask/__pycache__/globals.cpython-311.pyc,, +flask/__pycache__/helpers.cpython-311.pyc,, +flask/__pycache__/logging.cpython-311.pyc,, +flask/__pycache__/scaffold.cpython-311.pyc,, +flask/__pycache__/sessions.cpython-311.pyc,, +flask/__pycache__/signals.cpython-311.pyc,, +flask/__pycache__/templating.cpython-311.pyc,, +flask/__pycache__/testing.cpython-311.pyc,, +flask/__pycache__/typing.cpython-311.pyc,, +flask/__pycache__/views.cpython-311.pyc,, +flask/__pycache__/wrappers.cpython-311.pyc,, +flask/app.py,sha256=ue4tEeDnr3m-eSEwz7OJ1_wafSYl3fl6eo-NLFgNNJQ,99141 +flask/blueprints.py,sha256=fenhKP_Sh5eU6qtWeHacg1GVeun4pQzK2vq8sNDd1hY,27266 +flask/cli.py,sha256=pLmnWObe_G4_ZAFQdh7kgwqPMxRXm4oUhaUSBpJMeq4,33532 +flask/config.py,sha256=Ubo_juzSYsAKqD2vD3vm6mjsPo3EOJDdSEzYq8lKTJI,12585 +flask/ctx.py,sha256=bGEQQuF2_cHqZ3ZNMeMeEG8HOLJkDlL88u2BBxCrRao,14829 +flask/debughelpers.py,sha256=_RvAL3TW5lqMJeCVWtTU6rSDJC7jnRaBL6OEkVmooyU,5511 +flask/globals.py,sha256=EX0XdX73BTWdVF0UHDSNet2ER3kI6sKveo3_o5IOs98,3187 +flask/helpers.py,sha256=XTHRgLlyxeEzR988q63-4OY8RswTscR-5exFxK10CLU,25280 +flask/json/__init__.py,sha256=TOwldHT3_kFaXHlORKi9yCWt7dbPNB0ovdHHQWlSRzY,11175 +flask/json/__pycache__/__init__.cpython-311.pyc,, +flask/json/__pycache__/provider.cpython-311.pyc,, +flask/json/__pycache__/tag.cpython-311.pyc,, +flask/json/provider.py,sha256=jXCNypf11PN4ngQjEt6LnSdCWQ1yHIAkNLHlXQlCB-A,10674 +flask/json/tag.py,sha256=fys3HBLssWHuMAIJuTcf2K0bCtosePBKXIWASZEEjnU,8857 +flask/logging.py,sha256=WYng0bLTRS_CJrocGcCLJpibHf1lygHE_pg-KoUIQ4w,2293 +flask/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +flask/scaffold.py,sha256=EKx-Tr5BXLzeKKvq3ZAi_2oUQVZuC4OJSJTocyDXsSo,35958 +flask/sessions.py,sha256=adWCRnJYETJcjjhlcvUgZR5S0DMqKQctS0nzkY9g9Us,15927 +flask/signals.py,sha256=H7QwDciK-dtBxinjKpexpglP0E6k0MJILiFWTItfmqU,2136 +flask/templating.py,sha256=1P4OzvSnA2fsJTYgQT3G4owVKsuOz8XddCiR6jMHGJ0,7419 +flask/testing.py,sha256=JtHRQY7mIH39SM4S51svAr8e7Xk87dqMb30Z6Dyv9TA,10706 +flask/typing.py,sha256=KgxegTF9v9WvuongeF8LooIvpZPauzGrq9ZXf3gBlYc,2969 +flask/views.py,sha256=LulttWL4owVFlgwrJi8GCNM4inC3xbs2IBlY31bdCS4,6765 +flask/wrappers.py,sha256=el3tn1LgSUV0eNGgYMjKICT5I7qGJgbpIhvci4nrwQ8,5702 diff --git a/venv/lib/python3.11/site-packages/Flask-2.2.5.dist-info/REQUESTED b/venv/lib/python3.11/site-packages/Flask-2.2.5.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/Flask-2.2.5.dist-info/WHEEL b/venv/lib/python3.11/site-packages/Flask-2.2.5.dist-info/WHEEL new file mode 100644 index 0000000..1f37c02 --- /dev/null +++ b/venv/lib/python3.11/site-packages/Flask-2.2.5.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.40.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.11/site-packages/Flask-2.2.5.dist-info/entry_points.txt b/venv/lib/python3.11/site-packages/Flask-2.2.5.dist-info/entry_points.txt new file mode 100644 index 0000000..137232d --- /dev/null +++ b/venv/lib/python3.11/site-packages/Flask-2.2.5.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +flask = flask.cli:main diff --git a/venv/lib/python3.11/site-packages/Flask-2.2.5.dist-info/top_level.txt b/venv/lib/python3.11/site-packages/Flask-2.2.5.dist-info/top_level.txt new file mode 100644 index 0000000..7e10602 --- /dev/null +++ b/venv/lib/python3.11/site-packages/Flask-2.2.5.dist-info/top_level.txt @@ -0,0 +1 @@ +flask diff --git a/venv/lib/python3.11/site-packages/Flask_Cors-4.0.0.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/Flask_Cors-4.0.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.11/site-packages/Flask_Cors-4.0.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.11/site-packages/Flask_Cors-4.0.0.dist-info/LICENSE b/venv/lib/python3.11/site-packages/Flask_Cors-4.0.0.dist-info/LICENSE new file mode 100644 index 0000000..46d932f --- /dev/null +++ b/venv/lib/python3.11/site-packages/Flask_Cors-4.0.0.dist-info/LICENSE @@ -0,0 +1,7 @@ +Copyright (C) 2016 Cory Dolphin, Olin College + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/venv/lib/python3.11/site-packages/Flask_Cors-4.0.0.dist-info/METADATA b/venv/lib/python3.11/site-packages/Flask_Cors-4.0.0.dist-info/METADATA new file mode 100644 index 0000000..bd0c55a --- /dev/null +++ b/venv/lib/python3.11/site-packages/Flask_Cors-4.0.0.dist-info/METADATA @@ -0,0 +1,147 @@ +Metadata-Version: 2.1 +Name: Flask-Cors +Version: 4.0.0 +Summary: A Flask extension adding a decorator for CORS support +Home-page: https://github.com/corydolphin/flask-cors +Author: Cory Dolphin +Author-email: corydolphin@gmail.com +License: MIT +Platform: any +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Software Development :: Libraries :: Python Modules +License-File: LICENSE +Requires-Dist: Flask (>=0.9) + +Flask-CORS +========== + +|Build Status| |Latest Version| |Supported Python versions| +|License| + +A Flask extension for handling Cross Origin Resource Sharing (CORS), making cross-origin AJAX possible. + +This package has a simple philosophy: when you want to enable CORS, you wish to enable it for all use cases on a domain. +This means no mucking around with different allowed headers, methods, etc. + +By default, submission of cookies across domains is disabled due to the security implications. +Please see the documentation for how to enable credential'ed requests, and please make sure you add some sort of `CSRF `__ protection before doing so! + +Installation +------------ + +Install the extension with using pip, or easy\_install. + +.. code:: bash + + $ pip install -U flask-cors + +Usage +----- + +This package exposes a Flask extension which by default enables CORS support on all routes, for all origins and methods. +It allows parameterization of all CORS headers on a per-resource level. +The package also contains a decorator, for those who prefer this approach. + +Simple Usage +~~~~~~~~~~~~ + +In the simplest case, initialize the Flask-Cors extension with default arguments in order to allow CORS for all domains on all routes. +See the full list of options in the `documentation `__. + +.. code:: python + + + from flask import Flask + from flask_cors import CORS + + app = Flask(__name__) + CORS(app) + + @app.route("/") + def helloWorld(): + return "Hello, cross-origin-world!" + +Resource specific CORS +^^^^^^^^^^^^^^^^^^^^^^ + +Alternatively, you can specify CORS options on a resource and origin level of granularity by passing a dictionary as the `resources` option, mapping paths to a set of options. +See the full list of options in the `documentation `__. + +.. code:: python + + app = Flask(__name__) + cors = CORS(app, resources={r"/api/*": {"origins": "*"}}) + + @app.route("/api/v1/users") + def list_users(): + return "user example" + +Route specific CORS via decorator +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +This extension also exposes a simple decorator to decorate flask routes with. +Simply add ``@cross_origin()`` below a call to Flask's ``@app.route(..)`` to allow CORS on a given route. +See the full list of options in the `decorator documentation `__. + +.. code:: python + + @app.route("/") + @cross_origin() + def helloWorld(): + return "Hello, cross-origin-world!" + +Documentation +------------- + +For a full list of options, please see the full `documentation `__ + +Troubleshooting +--------------- + +If things aren't working as you expect, enable logging to help understand what is going on under the hood, and why. + +.. code:: python + + logging.getLogger('flask_cors').level = logging.DEBUG + + +Tests +----- + +A simple set of tests is included in ``test/``. +To run, install nose, and simply invoke ``nosetests`` or ``python setup.py test`` to exercise the tests. + +If nosetests does not work for you, due to it no longer working with newer python versions. +You can use pytest to run the tests instead. + +Contributing +------------ + +Questions, comments or improvements? +Please create an issue on `Github `__, tweet at `@corydolphin `__ or send me an email. +I do my best to include every contribution proposed in any way that I can. + +Credits +------- + +This Flask extension is based upon the `Decorator for the HTTP Access Control `__ written by Armin Ronacher. + +.. |Build Status| image:: https://api.travis-ci.org/corydolphin/flask-cors.svg?branch=master + :target: https://travis-ci.org/corydolphin/flask-cors +.. |Latest Version| image:: https://img.shields.io/pypi/v/Flask-Cors.svg + :target: https://pypi.python.org/pypi/Flask-Cors/ +.. |Supported Python versions| image:: https://img.shields.io/pypi/pyversions/Flask-Cors.svg + :target: https://img.shields.io/pypi/pyversions/Flask-Cors.svg +.. |License| image:: http://img.shields.io/:license-mit-blue.svg + :target: https://pypi.python.org/pypi/Flask-Cors/ diff --git a/venv/lib/python3.11/site-packages/Flask_Cors-4.0.0.dist-info/RECORD b/venv/lib/python3.11/site-packages/Flask_Cors-4.0.0.dist-info/RECORD new file mode 100644 index 0000000..9c9513b --- /dev/null +++ b/venv/lib/python3.11/site-packages/Flask_Cors-4.0.0.dist-info/RECORD @@ -0,0 +1,17 @@ +Flask_Cors-4.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Flask_Cors-4.0.0.dist-info/LICENSE,sha256=bhob3FSDTB4HQMvOXV9vLK4chG_Sp_SCsRZJWU-vvV0,1069 +Flask_Cors-4.0.0.dist-info/METADATA,sha256=iien2vLs6EIqceJgaNEJ6FPinwfjzFWSNl7XOkuyc10,5419 +Flask_Cors-4.0.0.dist-info/RECORD,, +Flask_Cors-4.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +Flask_Cors-4.0.0.dist-info/WHEEL,sha256=a-zpFRIJzOq5QfuhBzbhiA1eHTzNCJn8OdRvhdNX0Rk,110 +Flask_Cors-4.0.0.dist-info/top_level.txt,sha256=aWye_0QNZPp_QtPF4ZluLHqnyVLT9CPJsfiGhwqkWuo,11 +flask_cors/__init__.py,sha256=wZDCvPTHspA2g1VV7KyKN7R-uCdBnirTlsCzgPDcQtI,792 +flask_cors/__pycache__/__init__.cpython-311.pyc,, +flask_cors/__pycache__/core.cpython-311.pyc,, +flask_cors/__pycache__/decorator.cpython-311.pyc,, +flask_cors/__pycache__/extension.cpython-311.pyc,, +flask_cors/__pycache__/version.cpython-311.pyc,, +flask_cors/core.py,sha256=e1u_o5SOcS_gMWGjcQrkyk91uPICnzZ3AXZvy5jQ_FE,14063 +flask_cors/decorator.py,sha256=BeJsyX1wYhVKWN04FAhb6z8YqffiRr7wKqwzHPap4bw,5009 +flask_cors/extension.py,sha256=nP4Zq_BhgDVWwPdIl_f-uucNxD38pXUo-dkL-voXc58,7832 +flask_cors/version.py,sha256=61rJjfThnbRdElpSP2tm31hPmFnHJmcwoPhtqA0Bi_Q,22 diff --git a/venv/lib/python3.11/site-packages/Flask_Cors-4.0.0.dist-info/REQUESTED b/venv/lib/python3.11/site-packages/Flask_Cors-4.0.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/Flask_Cors-4.0.0.dist-info/WHEEL b/venv/lib/python3.11/site-packages/Flask_Cors-4.0.0.dist-info/WHEEL new file mode 100644 index 0000000..f771c29 --- /dev/null +++ b/venv/lib/python3.11/site-packages/Flask_Cors-4.0.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.40.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/lib/python3.11/site-packages/Flask_Cors-4.0.0.dist-info/top_level.txt b/venv/lib/python3.11/site-packages/Flask_Cors-4.0.0.dist-info/top_level.txt new file mode 100644 index 0000000..27af988 --- /dev/null +++ b/venv/lib/python3.11/site-packages/Flask_Cors-4.0.0.dist-info/top_level.txt @@ -0,0 +1 @@ +flask_cors diff --git a/venv/lib/python3.11/site-packages/Flask_Login-0.6.2.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/Flask_Login-0.6.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.11/site-packages/Flask_Login-0.6.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.11/site-packages/Flask_Login-0.6.2.dist-info/LICENSE b/venv/lib/python3.11/site-packages/Flask_Login-0.6.2.dist-info/LICENSE new file mode 100644 index 0000000..0446381 --- /dev/null +++ b/venv/lib/python3.11/site-packages/Flask_Login-0.6.2.dist-info/LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2011 Matthew Frazier + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/lib/python3.11/site-packages/Flask_Login-0.6.2.dist-info/METADATA b/venv/lib/python3.11/site-packages/Flask_Login-0.6.2.dist-info/METADATA new file mode 100644 index 0000000..ad35eb4 --- /dev/null +++ b/venv/lib/python3.11/site-packages/Flask_Login-0.6.2.dist-info/METADATA @@ -0,0 +1,183 @@ +Metadata-Version: 2.1 +Name: Flask-Login +Version: 0.6.2 +Summary: User authentication and session management for Flask. +Home-page: https://github.com/maxcountryman/flask-login +Author: Matthew Frazier +Author-email: leafstormrush@gmail.com +Maintainer: Max Countryman +License: MIT +Project-URL: Documentation, https://flask-login.readthedocs.io/ +Project-URL: Changes, https://github.com/maxcountryman/flask-login/blob/main/CHANGES.md +Project-URL: Source Code, https://github.com/maxcountryman/flask-login +Project-URL: Issue Tracker, https://github.com/maxcountryman/flask-login/issues +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Web Environment +Classifier: Framework :: Flask +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: Flask (>=1.0.4) +Requires-Dist: Werkzeug (>=1.0.1) + +# Flask-Login + +![Tests](https://github.com/maxcountryman/flask-login/workflows/Tests/badge.svg) +[![coverage](https://coveralls.io/repos/maxcountryman/flask-login/badge.svg?branch=main&service=github)](https://coveralls.io/github/maxcountryman/flask-login?branch=main) +[![Software License](https://img.shields.io/badge/license-MIT-brightgreen.svg)](LICENSE) + +Flask-Login provides user session management for Flask. It handles the common +tasks of logging in, logging out, and remembering your users' sessions over +extended periods of time. + +Flask-Login is not bound to any particular database system or permissions +model. The only requirement is that your user objects implement a few methods, +and that you provide a callback to the extension capable of loading users from +their ID. + +## Installation + +Install the extension with pip: + +```sh +$ pip install flask-login +``` + +## Usage + +Once installed, the Flask-Login is easy to use. Let's walk through setting up +a basic application. Also please note that this is a very basic guide: we will +be taking shortcuts here that you should never take in a real application. + +To begin we'll set up a Flask app: + +```python +import flask + +app = flask.Flask(__name__) +app.secret_key = 'super secret string' # Change this! +``` + +Flask-Login works via a login manager. To kick things off, we'll set up the +login manager by instantiating it and telling it about our Flask app: + +```python +import flask_login + +login_manager = flask_login.LoginManager() + +login_manager.init_app(app) +``` + +To keep things simple we're going to use a dictionary to represent a database +of users. In a real application, this would be an actual persistence layer. +However it's important to point out this is a feature of Flask-Login: it +doesn't care how your data is stored so long as you tell it how to retrieve it! + +```python +# Our mock database. +users = {'foo@bar.tld': {'password': 'secret'}} +``` + +We also need to tell Flask-Login how to load a user from a Flask request and +from its session. To do this we need to define our user object, a +`user_loader` callback, and a `request_loader` callback. + +```python +class User(flask_login.UserMixin): + pass + + +@login_manager.user_loader +def user_loader(email): + if email not in users: + return + + user = User() + user.id = email + return user + + +@login_manager.request_loader +def request_loader(request): + email = request.form.get('email') + if email not in users: + return + + user = User() + user.id = email + return user +``` + +Now we're ready to define our views. We can start with a login view, which will +populate the session with authentication bits. After that we can define a view +that requires authentication. + +```python +@app.route('/login', methods=['GET', 'POST']) +def login(): + if flask.request.method == 'GET': + return ''' +
+ + + +
+ ''' + + email = flask.request.form['email'] + if email in users and flask.request.form['password'] == users[email]['password']: + user = User() + user.id = email + flask_login.login_user(user) + return flask.redirect(flask.url_for('protected')) + + return 'Bad login' + + +@app.route('/protected') +@flask_login.login_required +def protected(): + return 'Logged in as: ' + flask_login.current_user.id +``` + +Finally we can define a view to clear the session and log users out: + +```python +@app.route('/logout') +def logout(): + flask_login.logout_user() + return 'Logged out' +``` + +We now have a basic working application that makes use of session-based +authentication. To round things off, we should provide a callback for login +failures: + +```python +@login_manager.unauthorized_handler +def unauthorized_handler(): + return 'Unauthorized', 401 +``` + +Documentation for Flask-Login is available on [ReadTheDocs](https://flask-login.readthedocs.io/en/latest/). +For complete understanding of available configuration, please refer to the [source code](https://github.com/maxcountryman/flask-login). + + +## Contributing + +We welcome contributions! If you would like to hack on Flask-Login, please +follow these steps: + +1. Fork this repository +2. Make your changes +3. Install the dev requirements with `pip install -r requirements/dev.txt` +4. Submit a pull request after running `tox` (ensure it does not error!) + +Please give us adequate time to review your submission. Thanks! diff --git a/venv/lib/python3.11/site-packages/Flask_Login-0.6.2.dist-info/RECORD b/venv/lib/python3.11/site-packages/Flask_Login-0.6.2.dist-info/RECORD new file mode 100644 index 0000000..867bb7f --- /dev/null +++ b/venv/lib/python3.11/site-packages/Flask_Login-0.6.2.dist-info/RECORD @@ -0,0 +1,23 @@ +Flask_Login-0.6.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Flask_Login-0.6.2.dist-info/LICENSE,sha256=ep37nF2iBO0TcPO2LBPimSoS2h2nB_R-FWiX7rQ0Tls,1059 +Flask_Login-0.6.2.dist-info/METADATA,sha256=e64WTcUek0WEtAZum9pOOw7IyDWFfyZ2YQnw5TMdemY,5803 +Flask_Login-0.6.2.dist-info/RECORD,, +Flask_Login-0.6.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +Flask_Login-0.6.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +Flask_Login-0.6.2.dist-info/top_level.txt,sha256=OuXmIpiFnXLvW-iBbW2km7ZIy5EZvwSBnYaOC3Kt7j8,12 +flask_login/__about__.py,sha256=8yOFICsyQVOw1GiWVrJW8spiEtBp97DskGmwiFP5aZQ,389 +flask_login/__init__.py,sha256=wYQiQCikT_Ndp3PhOD-1gRTGCrUPIE-FrjQUrT9aVAg,2681 +flask_login/__pycache__/__about__.cpython-311.pyc,, +flask_login/__pycache__/__init__.cpython-311.pyc,, +flask_login/__pycache__/config.cpython-311.pyc,, +flask_login/__pycache__/login_manager.cpython-311.pyc,, +flask_login/__pycache__/mixins.cpython-311.pyc,, +flask_login/__pycache__/signals.cpython-311.pyc,, +flask_login/__pycache__/test_client.cpython-311.pyc,, +flask_login/__pycache__/utils.cpython-311.pyc,, +flask_login/config.py,sha256=YAocv18La7YGQyNY5aT7rU1GQIZnX6pvchwqx3kA9p8,1813 +flask_login/login_manager.py,sha256=h20F_iv3mqc6rIJ4-V6_XookzOUl8Rcpasua-dCByQY,20073 +flask_login/mixins.py,sha256=gPd7otMRljxw0eUhUMbHsnEBc_jK2cYdxg5KFLuJcoI,1528 +flask_login/signals.py,sha256=xCMoFHKU1RTVt1NY-Gfl0OiVKpiyNt6YJw_PsgkjY3w,2464 +flask_login/test_client.py,sha256=6mrjiBRLGJpgvvFlLypXPTBLiMp0BAN-Ft-uogqC81g,517 +flask_login/utils.py,sha256=RFSbIMPr58gMPRWx3aQ-Co1gZASld-A1W30zwvZDzYM,14020 diff --git a/venv/lib/python3.11/site-packages/Flask_Login-0.6.2.dist-info/REQUESTED b/venv/lib/python3.11/site-packages/Flask_Login-0.6.2.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/Flask_Login-0.6.2.dist-info/WHEEL b/venv/lib/python3.11/site-packages/Flask_Login-0.6.2.dist-info/WHEEL new file mode 100644 index 0000000..becc9a6 --- /dev/null +++ b/venv/lib/python3.11/site-packages/Flask_Login-0.6.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.11/site-packages/Flask_Login-0.6.2.dist-info/top_level.txt b/venv/lib/python3.11/site-packages/Flask_Login-0.6.2.dist-info/top_level.txt new file mode 100644 index 0000000..31514bd --- /dev/null +++ b/venv/lib/python3.11/site-packages/Flask_Login-0.6.2.dist-info/top_level.txt @@ -0,0 +1 @@ +flask_login diff --git a/venv/lib/python3.11/site-packages/MarkupSafe-3.0.2.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/MarkupSafe-3.0.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.11/site-packages/MarkupSafe-3.0.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.11/site-packages/MarkupSafe-3.0.2.dist-info/LICENSE.txt b/venv/lib/python3.11/site-packages/MarkupSafe-3.0.2.dist-info/LICENSE.txt new file mode 100644 index 0000000..9d227a0 --- /dev/null +++ b/venv/lib/python3.11/site-packages/MarkupSafe-3.0.2.dist-info/LICENSE.txt @@ -0,0 +1,28 @@ +Copyright 2010 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.11/site-packages/MarkupSafe-3.0.2.dist-info/METADATA b/venv/lib/python3.11/site-packages/MarkupSafe-3.0.2.dist-info/METADATA new file mode 100644 index 0000000..82261f2 --- /dev/null +++ b/venv/lib/python3.11/site-packages/MarkupSafe-3.0.2.dist-info/METADATA @@ -0,0 +1,92 @@ +Metadata-Version: 2.1 +Name: MarkupSafe +Version: 3.0.2 +Summary: Safely add untrusted strings to HTML/XML markup. +Maintainer-email: Pallets +License: Copyright 2010 Pallets + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A + PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED + TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://markupsafe.palletsprojects.com/ +Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/ +Project-URL: Source, https://github.com/pallets/markupsafe/ +Project-URL: Chat, https://discord.gg/pallets +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Text Processing :: Markup :: HTML +Classifier: Typing :: Typed +Requires-Python: >=3.9 +Description-Content-Type: text/markdown +License-File: LICENSE.txt + +# MarkupSafe + +MarkupSafe implements a text object that escapes characters so it is +safe to use in HTML and XML. Characters that have special meanings are +replaced so that they display as the actual characters. This mitigates +injection attacks, meaning untrusted user input can safely be displayed +on a page. + + +## Examples + +```pycon +>>> from markupsafe import Markup, escape + +>>> # escape replaces special characters and wraps in Markup +>>> escape("") +Markup('<script>alert(document.cookie);</script>') + +>>> # wrap in Markup to mark text "safe" and prevent escaping +>>> Markup("Hello") +Markup('hello') + +>>> escape(Markup("Hello")) +Markup('hello') + +>>> # Markup is a str subclass +>>> # methods and operators escape their arguments +>>> template = Markup("Hello {name}") +>>> template.format(name='"World"') +Markup('Hello "World"') +``` + +## Donate + +The Pallets organization develops and supports MarkupSafe and other +popular packages. In order to grow the community of contributors and +users, and allow the maintainers to devote more time to the projects, +[please donate today][]. + +[please donate today]: https://palletsprojects.com/donate diff --git a/venv/lib/python3.11/site-packages/MarkupSafe-3.0.2.dist-info/RECORD b/venv/lib/python3.11/site-packages/MarkupSafe-3.0.2.dist-info/RECORD new file mode 100644 index 0000000..85566c9 --- /dev/null +++ b/venv/lib/python3.11/site-packages/MarkupSafe-3.0.2.dist-info/RECORD @@ -0,0 +1,14 @@ +MarkupSafe-3.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +MarkupSafe-3.0.2.dist-info/LICENSE.txt,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 +MarkupSafe-3.0.2.dist-info/METADATA,sha256=aAwbZhSmXdfFuMM-rEHpeiHRkBOGESyVLJIuwzHP-nw,3975 +MarkupSafe-3.0.2.dist-info/RECORD,, +MarkupSafe-3.0.2.dist-info/WHEEL,sha256=OhaudQk1f3YCu0uQO5v6u-i01XPoX70c0R3T_XY-jOo,151 +MarkupSafe-3.0.2.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11 +markupsafe/__init__.py,sha256=sr-U6_27DfaSrj5jnHYxWN-pvhM27sjlDplMDPZKm7k,13214 +markupsafe/__pycache__/__init__.cpython-311.pyc,, +markupsafe/__pycache__/_native.cpython-311.pyc,, +markupsafe/_native.py,sha256=hSLs8Jmz5aqayuengJJ3kdT5PwNpBWpKrmQSdipndC8,210 +markupsafe/_speedups.c,sha256=O7XulmTo-epI6n2FtMVOrJXl8EAaIwD2iNYmBI5SEoQ,4149 +markupsafe/_speedups.cpython-311-x86_64-linux-gnu.so,sha256=6IDH6Z1ajjClhfGerTB8WLb81uXUpLD8e-e1WzCirVY,43456 +markupsafe/_speedups.pyi,sha256=ENd1bYe7gbBUf2ywyYWOGUpnXOHNJ-cgTNqetlW8h5k,41 +markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.11/site-packages/MarkupSafe-3.0.2.dist-info/WHEEL b/venv/lib/python3.11/site-packages/MarkupSafe-3.0.2.dist-info/WHEEL new file mode 100644 index 0000000..35db8b0 --- /dev/null +++ b/venv/lib/python3.11/site-packages/MarkupSafe-3.0.2.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (75.2.0) +Root-Is-Purelib: false +Tag: cp311-cp311-manylinux_2_17_x86_64 +Tag: cp311-cp311-manylinux2014_x86_64 + diff --git a/venv/lib/python3.11/site-packages/MarkupSafe-3.0.2.dist-info/top_level.txt b/venv/lib/python3.11/site-packages/MarkupSafe-3.0.2.dist-info/top_level.txt new file mode 100644 index 0000000..75bf729 --- /dev/null +++ b/venv/lib/python3.11/site-packages/MarkupSafe-3.0.2.dist-info/top_level.txt @@ -0,0 +1 @@ +markupsafe diff --git a/venv/lib/python3.11/site-packages/Werkzeug-2.2.3.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/Werkzeug-2.2.3.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.11/site-packages/Werkzeug-2.2.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.11/site-packages/Werkzeug-2.2.3.dist-info/LICENSE.rst b/venv/lib/python3.11/site-packages/Werkzeug-2.2.3.dist-info/LICENSE.rst new file mode 100644 index 0000000..c37cae4 --- /dev/null +++ b/venv/lib/python3.11/site-packages/Werkzeug-2.2.3.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2007 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.11/site-packages/Werkzeug-2.2.3.dist-info/METADATA b/venv/lib/python3.11/site-packages/Werkzeug-2.2.3.dist-info/METADATA new file mode 100644 index 0000000..647bfc8 --- /dev/null +++ b/venv/lib/python3.11/site-packages/Werkzeug-2.2.3.dist-info/METADATA @@ -0,0 +1,126 @@ +Metadata-Version: 2.1 +Name: Werkzeug +Version: 2.2.3 +Summary: The comprehensive WSGI web application library. +Home-page: https://palletsprojects.com/p/werkzeug/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://werkzeug.palletsprojects.com/ +Project-URL: Changes, https://werkzeug.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/werkzeug/ +Project-URL: Issue Tracker, https://github.com/pallets/werkzeug/issues/ +Project-URL: Twitter, https://twitter.com/PalletsTeam +Project-URL: Chat, https://discord.gg/pallets +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware +Classifier: Topic :: Software Development :: Libraries :: Application Frameworks +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst +Requires-Dist: MarkupSafe (>=2.1.1) +Provides-Extra: watchdog +Requires-Dist: watchdog ; extra == 'watchdog' + +Werkzeug +======== + +*werkzeug* German noun: "tool". Etymology: *werk* ("work"), *zeug* ("stuff") + +Werkzeug is a comprehensive `WSGI`_ web application library. It began as +a simple collection of various utilities for WSGI applications and has +become one of the most advanced WSGI utility libraries. + +It includes: + +- An interactive debugger that allows inspecting stack traces and + source code in the browser with an interactive interpreter for any + frame in the stack. +- A full-featured request object with objects to interact with + headers, query args, form data, files, and cookies. +- A response object that can wrap other WSGI applications and handle + streaming data. +- A routing system for matching URLs to endpoints and generating URLs + for endpoints, with an extensible system for capturing variables + from URLs. +- HTTP utilities to handle entity tags, cache control, dates, user + agents, cookies, files, and more. +- A threaded WSGI server for use while developing applications + locally. +- A test client for simulating HTTP requests during testing without + requiring running a server. + +Werkzeug doesn't enforce any dependencies. It is up to the developer to +choose a template engine, database adapter, and even how to handle +requests. It can be used to build all sorts of end user applications +such as blogs, wikis, or bulletin boards. + +`Flask`_ wraps Werkzeug, using it to handle the details of WSGI while +providing more structure and patterns for defining powerful +applications. + +.. _WSGI: https://wsgi.readthedocs.io/en/latest/ +.. _Flask: https://www.palletsprojects.com/p/flask/ + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + pip install -U Werkzeug + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +A Simple Example +---------------- + +.. code-block:: python + + from werkzeug.wrappers import Request, Response + + @Request.application + def application(request): + return Response('Hello, World!') + + if __name__ == '__main__': + from werkzeug.serving import run_simple + run_simple('localhost', 4000, application) + + +Donate +------ + +The Pallets organization develops and supports Werkzeug and other +popular packages. In order to grow the community of contributors and +users, and allow the maintainers to devote more time to the projects, +`please donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://werkzeug.palletsprojects.com/ +- Changes: https://werkzeug.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/Werkzeug/ +- Source Code: https://github.com/pallets/werkzeug/ +- Issue Tracker: https://github.com/pallets/werkzeug/issues/ +- Website: https://palletsprojects.com/p/werkzeug/ +- Twitter: https://twitter.com/PalletsTeam +- Chat: https://discord.gg/pallets diff --git a/venv/lib/python3.11/site-packages/Werkzeug-2.2.3.dist-info/RECORD b/venv/lib/python3.11/site-packages/Werkzeug-2.2.3.dist-info/RECORD new file mode 100644 index 0000000..c3ceb57 --- /dev/null +++ b/venv/lib/python3.11/site-packages/Werkzeug-2.2.3.dist-info/RECORD @@ -0,0 +1,99 @@ +Werkzeug-2.2.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Werkzeug-2.2.3.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 +Werkzeug-2.2.3.dist-info/METADATA,sha256=TIyameVEp5p52N9E1mTWWabY6g1sB0Dm25vznZQeXPQ,4416 +Werkzeug-2.2.3.dist-info/RECORD,, +Werkzeug-2.2.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +Werkzeug-2.2.3.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 +Werkzeug-2.2.3.dist-info/top_level.txt,sha256=QRyj2VjwJoQkrwjwFIOlB8Xg3r9un0NtqVHQF-15xaw,9 +werkzeug/__init__.py,sha256=Hr0lQweC21HXPVBemSpBJUIzrbq2mn8h70J1h30QcqY,188 +werkzeug/__pycache__/__init__.cpython-311.pyc,, +werkzeug/__pycache__/_internal.cpython-311.pyc,, +werkzeug/__pycache__/_reloader.cpython-311.pyc,, +werkzeug/__pycache__/datastructures.cpython-311.pyc,, +werkzeug/__pycache__/exceptions.cpython-311.pyc,, +werkzeug/__pycache__/formparser.cpython-311.pyc,, +werkzeug/__pycache__/http.cpython-311.pyc,, +werkzeug/__pycache__/local.cpython-311.pyc,, +werkzeug/__pycache__/security.cpython-311.pyc,, +werkzeug/__pycache__/serving.cpython-311.pyc,, +werkzeug/__pycache__/test.cpython-311.pyc,, +werkzeug/__pycache__/testapp.cpython-311.pyc,, +werkzeug/__pycache__/urls.cpython-311.pyc,, +werkzeug/__pycache__/user_agent.cpython-311.pyc,, +werkzeug/__pycache__/utils.cpython-311.pyc,, +werkzeug/__pycache__/wsgi.cpython-311.pyc,, +werkzeug/_internal.py,sha256=4lwshe63pFlCo0h2IMcmvhbugA50QXQvfLD5VoY5c4Q,16271 +werkzeug/_reloader.py,sha256=hiP0z4bi6p_8UIJOtq7K0BV2dqCik5yztWLsDXeI_WE,14285 +werkzeug/datastructures.py,sha256=v2WYfs1rb1OuQgXyLripHQFwgodrfTNCd5P5f8n3ueA,97081 +werkzeug/datastructures.pyi,sha256=HRzDLc7A6qnwluhNqn6AT76CsLZIkAbVVqxn0AbfV-s,34506 +werkzeug/debug/__init__.py,sha256=wfJ2OmljsO5C0e0sXJpTUiG6bwGU6uHtFDDDMfJfQJk,18877 +werkzeug/debug/__pycache__/__init__.cpython-311.pyc,, +werkzeug/debug/__pycache__/console.cpython-311.pyc,, +werkzeug/debug/__pycache__/repr.cpython-311.pyc,, +werkzeug/debug/__pycache__/tbtools.cpython-311.pyc,, +werkzeug/debug/console.py,sha256=dechqiCtHfs0AQZWZofUC1S97tCuvwDgT0gdha5KwWM,6208 +werkzeug/debug/repr.py,sha256=vF3TLnYBohYr8V6Gz13PTJspQs42uv3gUJSzSbmHJBo,9472 +werkzeug/debug/shared/ICON_LICENSE.md,sha256=DhA6Y1gUl5Jwfg0NFN9Rj4VWITt8tUx0IvdGf0ux9-s,222 +werkzeug/debug/shared/console.png,sha256=bxax6RXXlvOij_KeqvSNX0ojJf83YbnZ7my-3Gx9w2A,507 +werkzeug/debug/shared/debugger.js,sha256=tg42SZs1SVmYWZ-_Fj5ELK5-FLHnGNQrei0K2By8Bw8,10521 +werkzeug/debug/shared/less.png,sha256=-4-kNRaXJSONVLahrQKUxMwXGm9R4OnZ9SxDGpHlIR4,191 +werkzeug/debug/shared/more.png,sha256=GngN7CioHQoV58rH6ojnkYi8c_qED2Aka5FO5UXrReY,200 +werkzeug/debug/shared/style.css,sha256=-xSxzUEZGw_IqlDR5iZxitNl8LQUjBM-_Y4UAvXVH8g,6078 +werkzeug/debug/tbtools.py,sha256=6iohJovtBSFRAcgX7_aRY4r3e19PLj3FavYB3RM4CmA,13263 +werkzeug/exceptions.py,sha256=8-KOXguQkOLoBUdN-7x_WyHT92TcAmjTWNwG4t8QYIg,26527 +werkzeug/formparser.py,sha256=DBRbbAnzspYUBzgfxPaZC7MjGAK_m5QTvdWoyvrhw4o,16516 +werkzeug/http.py,sha256=NqJjYCt8tKn2XOEKPApq4L3q8zb8YFq3GFOe5gsonI4,42776 +werkzeug/local.py,sha256=v-HEqr4bLpLHl4upCj97MOfUyCjW10Tp6mcNaFRFyew,22288 +werkzeug/middleware/__init__.py,sha256=qfqgdT5npwG9ses3-FXQJf3aB95JYP1zchetH_T3PUw,500 +werkzeug/middleware/__pycache__/__init__.cpython-311.pyc,, +werkzeug/middleware/__pycache__/dispatcher.cpython-311.pyc,, +werkzeug/middleware/__pycache__/http_proxy.cpython-311.pyc,, +werkzeug/middleware/__pycache__/lint.cpython-311.pyc,, +werkzeug/middleware/__pycache__/profiler.cpython-311.pyc,, +werkzeug/middleware/__pycache__/proxy_fix.cpython-311.pyc,, +werkzeug/middleware/__pycache__/shared_data.cpython-311.pyc,, +werkzeug/middleware/dispatcher.py,sha256=Fh_w-KyWnTSYF-Lfv5dimQ7THSS7afPAZMmvc4zF1gg,2580 +werkzeug/middleware/http_proxy.py,sha256=HE8VyhS7CR-E1O6_9b68huv8FLgGGR1DLYqkS3Xcp3Q,7558 +werkzeug/middleware/lint.py,sha256=1w_UVKkAwq5wjjtCcDCDZwhAhWzPSZ0aDyUmbjAEeXw,13952 +werkzeug/middleware/profiler.py,sha256=7pWYDYPC774S0-HYLkG3Uge58PGUMX7tWp_Cor3etvo,4883 +werkzeug/middleware/proxy_fix.py,sha256=l7LC_LDu0Yd4SvUxS5SFigAJMzcIOGm6LNKl9IXJBSU,6974 +werkzeug/middleware/shared_data.py,sha256=fXjrEkuqxUVLG1DLrOdQLc96QQdjftCBZ1oM5oK89h4,9528 +werkzeug/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +werkzeug/routing/__init__.py,sha256=HpvahY7WwkLdV4Cq3Bsc3GrqNon4u6t8-vhbb9E5o00,4819 +werkzeug/routing/__pycache__/__init__.cpython-311.pyc,, +werkzeug/routing/__pycache__/converters.cpython-311.pyc,, +werkzeug/routing/__pycache__/exceptions.cpython-311.pyc,, +werkzeug/routing/__pycache__/map.cpython-311.pyc,, +werkzeug/routing/__pycache__/matcher.cpython-311.pyc,, +werkzeug/routing/__pycache__/rules.cpython-311.pyc,, +werkzeug/routing/converters.py,sha256=05bkekg64vLC6mqqK4ddBh589WH9yBsjtW8IJhdUBvw,6968 +werkzeug/routing/exceptions.py,sha256=RklUDL9ajOv2fTcRNj4pb18Bs4Y-GKk4rIeTSfsqkks,4737 +werkzeug/routing/map.py,sha256=XN4ZjzEF1SfMxtdov89SDE-1_U78KVnnobTfnHzqbmE,36757 +werkzeug/routing/matcher.py,sha256=6VvQYCCOjyj1JKUZKuAiVA_U1nXtvvJ70pSbBUdL_1k,7509 +werkzeug/routing/rules.py,sha256=3YsPpI9ZGcNmFiV2Go2Td1DvZ9ZdaMMnvGP1o17aMfc,31836 +werkzeug/sansio/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +werkzeug/sansio/__pycache__/__init__.cpython-311.pyc,, +werkzeug/sansio/__pycache__/http.cpython-311.pyc,, +werkzeug/sansio/__pycache__/multipart.cpython-311.pyc,, +werkzeug/sansio/__pycache__/request.cpython-311.pyc,, +werkzeug/sansio/__pycache__/response.cpython-311.pyc,, +werkzeug/sansio/__pycache__/utils.cpython-311.pyc,, +werkzeug/sansio/http.py,sha256=k3nREBfU-r8fXCfSTKQenys25q9bzUOvdY-OVGrqztA,5107 +werkzeug/sansio/multipart.py,sha256=vMZ85cvLD55clUTcTin2DtBv2GQRGh0_fExklnXKHoI,10055 +werkzeug/sansio/request.py,sha256=SiGcx2cz-l81TlCCrKrT2fePqC64hN8fSg5Ig6J6vRs,20175 +werkzeug/sansio/response.py,sha256=UTl-teQDDjovrZMkjj3ZQsHw-JtiFak5JfKEk1_vBYU,26026 +werkzeug/sansio/utils.py,sha256=EjbqdHdT-JZWgjUQaaWSgBUIRprXUkrsMQQqJlJHpVU,4847 +werkzeug/security.py,sha256=7TVI0L62emBHAh-1RHB_KlwGYcE08pPCyU674Ho4aNE,4653 +werkzeug/serving.py,sha256=XCiHFbMCFCgecKycgajhF4rFsGoemeN0xW1eTQqNt-g,37558 +werkzeug/test.py,sha256=uMahfM81RqEN3d3Sp4SkN36Pi8oZpV6dTgFY0cW1_2c,48126 +werkzeug/testapp.py,sha256=RJhT_2JweNiMKe304N3bF1zaIeMpRx-CIMERdeydfTY,9404 +werkzeug/urls.py,sha256=Q9Si-eVh7yxk3rwkzrwGRm146FXVXgg9lBP3k0HUfVM,36600 +werkzeug/user_agent.py,sha256=WclZhpvgLurMF45hsioSbS75H1Zb4iMQGKN3_yZ2oKo,1420 +werkzeug/utils.py,sha256=BDX5_7OCMVgl-ib84bCEdBG5MVvrxaSlfdg7Cxh4ND0,25174 +werkzeug/wrappers/__init__.py,sha256=kGyK7rOud3qCxll_jFyW15YarJhj1xtdf3ocx9ZheB8,120 +werkzeug/wrappers/__pycache__/__init__.cpython-311.pyc,, +werkzeug/wrappers/__pycache__/request.cpython-311.pyc,, +werkzeug/wrappers/__pycache__/response.cpython-311.pyc,, +werkzeug/wrappers/request.py,sha256=XmpTThXytTdznbPJnIsfdoIAvdi-THzTJQ9DsvARhn4,24026 +werkzeug/wrappers/response.py,sha256=ii1IaN2eUfoB-tBqbn_46fCB_SVVL8Fu4qd6cu0AlEY,34963 +werkzeug/wsgi.py,sha256=-VKI2iwCgLb-VToIZeBpdutkTETxy9HkIwgcFC5orkU,36060 diff --git a/venv/lib/python3.11/site-packages/Werkzeug-2.2.3.dist-info/REQUESTED b/venv/lib/python3.11/site-packages/Werkzeug-2.2.3.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/Werkzeug-2.2.3.dist-info/WHEEL b/venv/lib/python3.11/site-packages/Werkzeug-2.2.3.dist-info/WHEEL new file mode 100644 index 0000000..57e3d84 --- /dev/null +++ b/venv/lib/python3.11/site-packages/Werkzeug-2.2.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.38.4) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.11/site-packages/Werkzeug-2.2.3.dist-info/top_level.txt b/venv/lib/python3.11/site-packages/Werkzeug-2.2.3.dist-info/top_level.txt new file mode 100644 index 0000000..6fe8da8 --- /dev/null +++ b/venv/lib/python3.11/site-packages/Werkzeug-2.2.3.dist-info/top_level.txt @@ -0,0 +1 @@ +werkzeug diff --git a/venv/lib/python3.11/site-packages/__pycache__/py.cpython-311.pyc b/venv/lib/python3.11/site-packages/__pycache__/py.cpython-311.pyc new file mode 100644 index 0000000..af81bb9 Binary files /dev/null and b/venv/lib/python3.11/site-packages/__pycache__/py.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/__pycache__/typing_extensions.cpython-311.pyc b/venv/lib/python3.11/site-packages/__pycache__/typing_extensions.cpython-311.pyc new file mode 100644 index 0000000..fb73306 Binary files /dev/null and b/venv/lib/python3.11/site-packages/__pycache__/typing_extensions.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_distutils_hack/__init__.py b/venv/lib/python3.11/site-packages/_distutils_hack/__init__.py new file mode 100644 index 0000000..f987a53 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_distutils_hack/__init__.py @@ -0,0 +1,222 @@ +# don't import any costly modules +import sys +import os + + +is_pypy = '__pypy__' in sys.builtin_module_names + + +def warn_distutils_present(): + if 'distutils' not in sys.modules: + return + if is_pypy and sys.version_info < (3, 7): + # PyPy for 3.6 unconditionally imports distutils, so bypass the warning + # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250 + return + import warnings + + warnings.warn( + "Distutils was imported before Setuptools, but importing Setuptools " + "also replaces the `distutils` module in `sys.modules`. This may lead " + "to undesirable behaviors or errors. To avoid these issues, avoid " + "using distutils directly, ensure that setuptools is installed in the " + "traditional way (e.g. not an editable install), and/or make sure " + "that setuptools is always imported before distutils." + ) + + +def clear_distutils(): + if 'distutils' not in sys.modules: + return + import warnings + + warnings.warn("Setuptools is replacing distutils.") + mods = [ + name + for name in sys.modules + if name == "distutils" or name.startswith("distutils.") + ] + for name in mods: + del sys.modules[name] + + +def enabled(): + """ + Allow selection of distutils by environment variable. + """ + which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local') + return which == 'local' + + +def ensure_local_distutils(): + import importlib + + clear_distutils() + + # With the DistutilsMetaFinder in place, + # perform an import to cause distutils to be + # loaded from setuptools._distutils. Ref #2906. + with shim(): + importlib.import_module('distutils') + + # check that submodules load as expected + core = importlib.import_module('distutils.core') + assert '_distutils' in core.__file__, core.__file__ + assert 'setuptools._distutils.log' not in sys.modules + + +def do_override(): + """ + Ensure that the local copy of distutils is preferred over stdlib. + + See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401 + for more motivation. + """ + if enabled(): + warn_distutils_present() + ensure_local_distutils() + + +class _TrivialRe: + def __init__(self, *patterns): + self._patterns = patterns + + def match(self, string): + return all(pat in string for pat in self._patterns) + + +class DistutilsMetaFinder: + def find_spec(self, fullname, path, target=None): + # optimization: only consider top level modules and those + # found in the CPython test suite. + if path is not None and not fullname.startswith('test.'): + return + + method_name = 'spec_for_{fullname}'.format(**locals()) + method = getattr(self, method_name, lambda: None) + return method() + + def spec_for_distutils(self): + if self.is_cpython(): + return + + import importlib + import importlib.abc + import importlib.util + + try: + mod = importlib.import_module('setuptools._distutils') + except Exception: + # There are a couple of cases where setuptools._distutils + # may not be present: + # - An older Setuptools without a local distutils is + # taking precedence. Ref #2957. + # - Path manipulation during sitecustomize removes + # setuptools from the path but only after the hook + # has been loaded. Ref #2980. + # In either case, fall back to stdlib behavior. + return + + class DistutilsLoader(importlib.abc.Loader): + def create_module(self, spec): + mod.__name__ = 'distutils' + return mod + + def exec_module(self, module): + pass + + return importlib.util.spec_from_loader( + 'distutils', DistutilsLoader(), origin=mod.__file__ + ) + + @staticmethod + def is_cpython(): + """ + Suppress supplying distutils for CPython (build and tests). + Ref #2965 and #3007. + """ + return os.path.isfile('pybuilddir.txt') + + def spec_for_pip(self): + """ + Ensure stdlib distutils when running under pip. + See pypa/pip#8761 for rationale. + """ + if self.pip_imported_during_build(): + return + clear_distutils() + self.spec_for_distutils = lambda: None + + @classmethod + def pip_imported_during_build(cls): + """ + Detect if pip is being imported in a build script. Ref #2355. + """ + import traceback + + return any( + cls.frame_file_is_setup(frame) for frame, line in traceback.walk_stack(None) + ) + + @staticmethod + def frame_file_is_setup(frame): + """ + Return True if the indicated frame suggests a setup.py file. + """ + # some frames may not have __file__ (#2940) + return frame.f_globals.get('__file__', '').endswith('setup.py') + + def spec_for_sensitive_tests(self): + """ + Ensure stdlib distutils when running select tests under CPython. + + python/cpython#91169 + """ + clear_distutils() + self.spec_for_distutils = lambda: None + + sensitive_tests = ( + [ + 'test.test_distutils', + 'test.test_peg_generator', + 'test.test_importlib', + ] + if sys.version_info < (3, 10) + else [ + 'test.test_distutils', + ] + ) + + +for name in DistutilsMetaFinder.sensitive_tests: + setattr( + DistutilsMetaFinder, + f'spec_for_{name}', + DistutilsMetaFinder.spec_for_sensitive_tests, + ) + + +DISTUTILS_FINDER = DistutilsMetaFinder() + + +def add_shim(): + DISTUTILS_FINDER in sys.meta_path or insert_shim() + + +class shim: + def __enter__(self): + insert_shim() + + def __exit__(self, exc, value, tb): + remove_shim() + + +def insert_shim(): + sys.meta_path.insert(0, DISTUTILS_FINDER) + + +def remove_shim(): + try: + sys.meta_path.remove(DISTUTILS_FINDER) + except ValueError: + pass diff --git a/venv/lib/python3.11/site-packages/_distutils_hack/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/_distutils_hack/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..b33d963 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_distutils_hack/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_distutils_hack/__pycache__/override.cpython-311.pyc b/venv/lib/python3.11/site-packages/_distutils_hack/__pycache__/override.cpython-311.pyc new file mode 100644 index 0000000..a8f640c Binary files /dev/null and b/venv/lib/python3.11/site-packages/_distutils_hack/__pycache__/override.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_distutils_hack/override.py b/venv/lib/python3.11/site-packages/_distutils_hack/override.py new file mode 100644 index 0000000..2cc433a --- /dev/null +++ b/venv/lib/python3.11/site-packages/_distutils_hack/override.py @@ -0,0 +1 @@ +__import__('_distutils_hack').do_override() diff --git a/venv/lib/python3.11/site-packages/_pytest/__init__.py b/venv/lib/python3.11/site-packages/_pytest/__init__.py new file mode 100644 index 0000000..8a406c5 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/__init__.py @@ -0,0 +1,9 @@ +__all__ = ["__version__", "version_tuple"] + +try: + from ._version import version as __version__, version_tuple +except ImportError: # pragma: no cover + # broken installation, we don't even try + # unknown only works because we do poor mans version compare + __version__ = "unknown" + version_tuple = (0, 0, "unknown") # type:ignore[assignment] diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..94f3f63 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/_argcomplete.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/_argcomplete.cpython-311.pyc new file mode 100644 index 0000000..f6d91a2 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/_argcomplete.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/_version.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/_version.cpython-311.pyc new file mode 100644 index 0000000..8b5af07 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/_version.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/cacheprovider.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/cacheprovider.cpython-311.pyc new file mode 100644 index 0000000..e9f8216 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/cacheprovider.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/capture.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/capture.cpython-311.pyc new file mode 100644 index 0000000..d65f386 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/capture.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/compat.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/compat.cpython-311.pyc new file mode 100644 index 0000000..b8a466f Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/compat.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/debugging.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/debugging.cpython-311.pyc new file mode 100644 index 0000000..537dd26 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/debugging.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/deprecated.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/deprecated.cpython-311.pyc new file mode 100644 index 0000000..b9eab5b Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/deprecated.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/doctest.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/doctest.cpython-311.pyc new file mode 100644 index 0000000..8166cd0 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/doctest.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/faulthandler.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/faulthandler.cpython-311.pyc new file mode 100644 index 0000000..ac55083 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/faulthandler.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/fixtures.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/fixtures.cpython-311.pyc new file mode 100644 index 0000000..68fc7ca Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/fixtures.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/freeze_support.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/freeze_support.cpython-311.pyc new file mode 100644 index 0000000..b0a1828 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/freeze_support.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/helpconfig.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/helpconfig.cpython-311.pyc new file mode 100644 index 0000000..7201441 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/helpconfig.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/hookspec.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/hookspec.cpython-311.pyc new file mode 100644 index 0000000..cdc60ca Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/hookspec.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/junitxml.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/junitxml.cpython-311.pyc new file mode 100644 index 0000000..e58d0be Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/junitxml.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/legacypath.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/legacypath.cpython-311.pyc new file mode 100644 index 0000000..03f3290 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/legacypath.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/logging.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/logging.cpython-311.pyc new file mode 100644 index 0000000..88bb59a Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/logging.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/main.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/main.cpython-311.pyc new file mode 100644 index 0000000..18e9fb5 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/main.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/monkeypatch.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/monkeypatch.cpython-311.pyc new file mode 100644 index 0000000..504cba8 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/monkeypatch.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/nodes.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/nodes.cpython-311.pyc new file mode 100644 index 0000000..48612ff Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/nodes.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/nose.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/nose.cpython-311.pyc new file mode 100644 index 0000000..453aae1 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/nose.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/outcomes.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/outcomes.cpython-311.pyc new file mode 100644 index 0000000..49fe42e Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/outcomes.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/pastebin.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/pastebin.cpython-311.pyc new file mode 100644 index 0000000..0f8a176 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/pastebin.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/pathlib.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/pathlib.cpython-311.pyc new file mode 100644 index 0000000..1a907ff Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/pathlib.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/pytester.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/pytester.cpython-311.pyc new file mode 100644 index 0000000..f2f9a84 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/pytester.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/pytester_assertions.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/pytester_assertions.cpython-311.pyc new file mode 100644 index 0000000..b6b420a Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/pytester_assertions.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/python.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/python.cpython-311.pyc new file mode 100644 index 0000000..8835548 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/python.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/python_api.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/python_api.cpython-311.pyc new file mode 100644 index 0000000..a990a92 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/python_api.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/python_path.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/python_path.cpython-311.pyc new file mode 100644 index 0000000..e1d9729 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/python_path.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/recwarn.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/recwarn.cpython-311.pyc new file mode 100644 index 0000000..a9c7ea3 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/recwarn.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/reports.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/reports.cpython-311.pyc new file mode 100644 index 0000000..13f845f Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/reports.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/runner.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/runner.cpython-311.pyc new file mode 100644 index 0000000..449a2b3 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/runner.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/scope.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/scope.cpython-311.pyc new file mode 100644 index 0000000..dc03a9e Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/scope.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/setuponly.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/setuponly.cpython-311.pyc new file mode 100644 index 0000000..d5abccf Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/setuponly.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/setupplan.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/setupplan.cpython-311.pyc new file mode 100644 index 0000000..6dfea48 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/setupplan.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/skipping.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/skipping.cpython-311.pyc new file mode 100644 index 0000000..88fa1fe Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/skipping.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/stash.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/stash.cpython-311.pyc new file mode 100644 index 0000000..6303636 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/stash.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/stepwise.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/stepwise.cpython-311.pyc new file mode 100644 index 0000000..40d3b8e Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/stepwise.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/terminal.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/terminal.cpython-311.pyc new file mode 100644 index 0000000..72e4b59 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/terminal.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/threadexception.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/threadexception.cpython-311.pyc new file mode 100644 index 0000000..8cf356b Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/threadexception.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/timing.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/timing.cpython-311.pyc new file mode 100644 index 0000000..4c32878 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/timing.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/tmpdir.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/tmpdir.cpython-311.pyc new file mode 100644 index 0000000..488c9c0 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/tmpdir.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/unittest.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/unittest.cpython-311.pyc new file mode 100644 index 0000000..ccbfddc Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/unittest.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/unraisableexception.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/unraisableexception.cpython-311.pyc new file mode 100644 index 0000000..71a4318 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/unraisableexception.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/warning_types.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/warning_types.cpython-311.pyc new file mode 100644 index 0000000..81de92e Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/warning_types.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/__pycache__/warnings.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/__pycache__/warnings.cpython-311.pyc new file mode 100644 index 0000000..4b5ef8f Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/__pycache__/warnings.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/_argcomplete.py b/venv/lib/python3.11/site-packages/_pytest/_argcomplete.py new file mode 100644 index 0000000..6a80837 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/_argcomplete.py @@ -0,0 +1,116 @@ +"""Allow bash-completion for argparse with argcomplete if installed. + +Needs argcomplete>=0.5.6 for python 3.2/3.3 (older versions fail +to find the magic string, so _ARGCOMPLETE env. var is never set, and +this does not need special code). + +Function try_argcomplete(parser) should be called directly before +the call to ArgumentParser.parse_args(). + +The filescompleter is what you normally would use on the positional +arguments specification, in order to get "dirname/" after "dirn" +instead of the default "dirname ": + + optparser.add_argument(Config._file_or_dir, nargs='*').completer=filescompleter + +Other, application specific, completers should go in the file +doing the add_argument calls as they need to be specified as .completer +attributes as well. (If argcomplete is not installed, the function the +attribute points to will not be used). + +SPEEDUP +======= + +The generic argcomplete script for bash-completion +(/etc/bash_completion.d/python-argcomplete.sh) +uses a python program to determine startup script generated by pip. +You can speed up completion somewhat by changing this script to include + # PYTHON_ARGCOMPLETE_OK +so the python-argcomplete-check-easy-install-script does not +need to be called to find the entry point of the code and see if that is +marked with PYTHON_ARGCOMPLETE_OK. + +INSTALL/DEBUGGING +================= + +To include this support in another application that has setup.py generated +scripts: + +- Add the line: + # PYTHON_ARGCOMPLETE_OK + near the top of the main python entry point. + +- Include in the file calling parse_args(): + from _argcomplete import try_argcomplete, filescompleter + Call try_argcomplete just before parse_args(), and optionally add + filescompleter to the positional arguments' add_argument(). + +If things do not work right away: + +- Switch on argcomplete debugging with (also helpful when doing custom + completers): + export _ARC_DEBUG=1 + +- Run: + python-argcomplete-check-easy-install-script $(which appname) + echo $? + will echo 0 if the magic line has been found, 1 if not. + +- Sometimes it helps to find early on errors using: + _ARGCOMPLETE=1 _ARC_DEBUG=1 appname + which should throw a KeyError: 'COMPLINE' (which is properly set by the + global argcomplete script). +""" +import argparse +import os +import sys +from glob import glob +from typing import Any +from typing import List +from typing import Optional + + +class FastFilesCompleter: + """Fast file completer class.""" + + def __init__(self, directories: bool = True) -> None: + self.directories = directories + + def __call__(self, prefix: str, **kwargs: Any) -> List[str]: + # Only called on non option completions. + if os.sep in prefix[1:]: + prefix_dir = len(os.path.dirname(prefix) + os.sep) + else: + prefix_dir = 0 + completion = [] + globbed = [] + if "*" not in prefix and "?" not in prefix: + # We are on unix, otherwise no bash. + if not prefix or prefix[-1] == os.sep: + globbed.extend(glob(prefix + ".*")) + prefix += "*" + globbed.extend(glob(prefix)) + for x in sorted(globbed): + if os.path.isdir(x): + x += "/" + # Append stripping the prefix (like bash, not like compgen). + completion.append(x[prefix_dir:]) + return completion + + +if os.environ.get("_ARGCOMPLETE"): + try: + import argcomplete.completers + except ImportError: + sys.exit(-1) + filescompleter: Optional[FastFilesCompleter] = FastFilesCompleter() + + def try_argcomplete(parser: argparse.ArgumentParser) -> None: + argcomplete.autocomplete(parser, always_complete_options=False) + +else: + + def try_argcomplete(parser: argparse.ArgumentParser) -> None: + pass + + filescompleter = None diff --git a/venv/lib/python3.11/site-packages/_pytest/_code/__init__.py b/venv/lib/python3.11/site-packages/_pytest/_code/__init__.py new file mode 100644 index 0000000..511d0dd --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/_code/__init__.py @@ -0,0 +1,22 @@ +"""Python inspection/code generation API.""" +from .code import Code +from .code import ExceptionInfo +from .code import filter_traceback +from .code import Frame +from .code import getfslineno +from .code import Traceback +from .code import TracebackEntry +from .source import getrawcode +from .source import Source + +__all__ = [ + "Code", + "ExceptionInfo", + "filter_traceback", + "Frame", + "getfslineno", + "getrawcode", + "Traceback", + "TracebackEntry", + "Source", +] diff --git a/venv/lib/python3.11/site-packages/_pytest/_code/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/_code/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..ee7f6e4 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/_code/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/_code/__pycache__/code.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/_code/__pycache__/code.cpython-311.pyc new file mode 100644 index 0000000..018087c Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/_code/__pycache__/code.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/_code/__pycache__/source.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/_code/__pycache__/source.cpython-311.pyc new file mode 100644 index 0000000..2ca178b Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/_code/__pycache__/source.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/_code/code.py b/venv/lib/python3.11/site-packages/_pytest/_code/code.py new file mode 100644 index 0000000..9b05133 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/_code/code.py @@ -0,0 +1,1337 @@ +import ast +import dataclasses +import inspect +import os +import re +import sys +import traceback +from inspect import CO_VARARGS +from inspect import CO_VARKEYWORDS +from io import StringIO +from pathlib import Path +from traceback import format_exception_only +from types import CodeType +from types import FrameType +from types import TracebackType +from typing import Any +from typing import Callable +from typing import ClassVar +from typing import Dict +from typing import Generic +from typing import Iterable +from typing import List +from typing import Mapping +from typing import Optional +from typing import overload +from typing import Pattern +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +import pluggy + +import _pytest +from _pytest._code.source import findsource +from _pytest._code.source import getrawcode +from _pytest._code.source import getstatementrange_ast +from _pytest._code.source import Source +from _pytest._io import TerminalWriter +from _pytest._io.saferepr import safeformat +from _pytest._io.saferepr import saferepr +from _pytest.compat import final +from _pytest.compat import get_real_func +from _pytest.deprecated import check_ispytest +from _pytest.pathlib import absolutepath +from _pytest.pathlib import bestrelpath + +if TYPE_CHECKING: + from typing_extensions import Final + from typing_extensions import Literal + from typing_extensions import SupportsIndex + + _TracebackStyle = Literal["long", "short", "line", "no", "native", "value", "auto"] + +if sys.version_info[:2] < (3, 11): + from exceptiongroup import BaseExceptionGroup + + +class Code: + """Wrapper around Python code objects.""" + + __slots__ = ("raw",) + + def __init__(self, obj: CodeType) -> None: + self.raw = obj + + @classmethod + def from_function(cls, obj: object) -> "Code": + return cls(getrawcode(obj)) + + def __eq__(self, other): + return self.raw == other.raw + + # Ignore type because of https://github.com/python/mypy/issues/4266. + __hash__ = None # type: ignore + + @property + def firstlineno(self) -> int: + return self.raw.co_firstlineno - 1 + + @property + def name(self) -> str: + return self.raw.co_name + + @property + def path(self) -> Union[Path, str]: + """Return a path object pointing to source code, or an ``str`` in + case of ``OSError`` / non-existing file.""" + if not self.raw.co_filename: + return "" + try: + p = absolutepath(self.raw.co_filename) + # maybe don't try this checking + if not p.exists(): + raise OSError("path check failed.") + return p + except OSError: + # XXX maybe try harder like the weird logic + # in the standard lib [linecache.updatecache] does? + return self.raw.co_filename + + @property + def fullsource(self) -> Optional["Source"]: + """Return a _pytest._code.Source object for the full source file of the code.""" + full, _ = findsource(self.raw) + return full + + def source(self) -> "Source": + """Return a _pytest._code.Source object for the code object's source only.""" + # return source only for that part of code + return Source(self.raw) + + def getargs(self, var: bool = False) -> Tuple[str, ...]: + """Return a tuple with the argument names for the code object. + + If 'var' is set True also return the names of the variable and + keyword arguments when present. + """ + # Handy shortcut for getting args. + raw = self.raw + argcount = raw.co_argcount + if var: + argcount += raw.co_flags & CO_VARARGS + argcount += raw.co_flags & CO_VARKEYWORDS + return raw.co_varnames[:argcount] + + +class Frame: + """Wrapper around a Python frame holding f_locals and f_globals + in which expressions can be evaluated.""" + + __slots__ = ("raw",) + + def __init__(self, frame: FrameType) -> None: + self.raw = frame + + @property + def lineno(self) -> int: + return self.raw.f_lineno - 1 + + @property + def f_globals(self) -> Dict[str, Any]: + return self.raw.f_globals + + @property + def f_locals(self) -> Dict[str, Any]: + return self.raw.f_locals + + @property + def code(self) -> Code: + return Code(self.raw.f_code) + + @property + def statement(self) -> "Source": + """Statement this frame is at.""" + if self.code.fullsource is None: + return Source("") + return self.code.fullsource.getstatement(self.lineno) + + def eval(self, code, **vars): + """Evaluate 'code' in the frame. + + 'vars' are optional additional local variables. + + Returns the result of the evaluation. + """ + f_locals = self.f_locals.copy() + f_locals.update(vars) + return eval(code, self.f_globals, f_locals) + + def repr(self, object: object) -> str: + """Return a 'safe' (non-recursive, one-line) string repr for 'object'.""" + return saferepr(object) + + def getargs(self, var: bool = False): + """Return a list of tuples (name, value) for all arguments. + + If 'var' is set True, also include the variable and keyword arguments + when present. + """ + retval = [] + for arg in self.code.getargs(var): + try: + retval.append((arg, self.f_locals[arg])) + except KeyError: + pass # this can occur when using Psyco + return retval + + +class TracebackEntry: + """A single entry in a Traceback.""" + + __slots__ = ("_rawentry", "_repr_style") + + def __init__( + self, + rawentry: TracebackType, + repr_style: Optional['Literal["short", "long"]'] = None, + ) -> None: + self._rawentry: "Final" = rawentry + self._repr_style: "Final" = repr_style + + def with_repr_style( + self, repr_style: Optional['Literal["short", "long"]'] + ) -> "TracebackEntry": + return TracebackEntry(self._rawentry, repr_style) + + @property + def lineno(self) -> int: + return self._rawentry.tb_lineno - 1 + + @property + def frame(self) -> Frame: + return Frame(self._rawentry.tb_frame) + + @property + def relline(self) -> int: + return self.lineno - self.frame.code.firstlineno + + def __repr__(self) -> str: + return "" % (self.frame.code.path, self.lineno + 1) + + @property + def statement(self) -> "Source": + """_pytest._code.Source object for the current statement.""" + source = self.frame.code.fullsource + assert source is not None + return source.getstatement(self.lineno) + + @property + def path(self) -> Union[Path, str]: + """Path to the source code.""" + return self.frame.code.path + + @property + def locals(self) -> Dict[str, Any]: + """Locals of underlying frame.""" + return self.frame.f_locals + + def getfirstlinesource(self) -> int: + return self.frame.code.firstlineno + + def getsource( + self, astcache: Optional[Dict[Union[str, Path], ast.AST]] = None + ) -> Optional["Source"]: + """Return failing source code.""" + # we use the passed in astcache to not reparse asttrees + # within exception info printing + source = self.frame.code.fullsource + if source is None: + return None + key = astnode = None + if astcache is not None: + key = self.frame.code.path + if key is not None: + astnode = astcache.get(key, None) + start = self.getfirstlinesource() + try: + astnode, _, end = getstatementrange_ast( + self.lineno, source, astnode=astnode + ) + except SyntaxError: + end = self.lineno + 1 + else: + if key is not None and astcache is not None: + astcache[key] = astnode + return source[start:end] + + source = property(getsource) + + def ishidden(self, excinfo: Optional["ExceptionInfo[BaseException]"]) -> bool: + """Return True if the current frame has a var __tracebackhide__ + resolving to True. + + If __tracebackhide__ is a callable, it gets called with the + ExceptionInfo instance and can decide whether to hide the traceback. + + Mostly for internal use. + """ + tbh: Union[ + bool, Callable[[Optional[ExceptionInfo[BaseException]]], bool] + ] = False + for maybe_ns_dct in (self.frame.f_locals, self.frame.f_globals): + # in normal cases, f_locals and f_globals are dictionaries + # however via `exec(...)` / `eval(...)` they can be other types + # (even incorrect types!). + # as such, we suppress all exceptions while accessing __tracebackhide__ + try: + tbh = maybe_ns_dct["__tracebackhide__"] + except Exception: + pass + else: + break + if tbh and callable(tbh): + return tbh(excinfo) + return tbh + + def __str__(self) -> str: + name = self.frame.code.name + try: + line = str(self.statement).lstrip() + except KeyboardInterrupt: + raise + except BaseException: + line = "???" + # This output does not quite match Python's repr for traceback entries, + # but changing it to do so would break certain plugins. See + # https://github.com/pytest-dev/pytest/pull/7535/ for details. + return " File %r:%d in %s\n %s\n" % ( + str(self.path), + self.lineno + 1, + name, + line, + ) + + @property + def name(self) -> str: + """co_name of underlying code.""" + return self.frame.code.raw.co_name + + +class Traceback(List[TracebackEntry]): + """Traceback objects encapsulate and offer higher level access to Traceback entries.""" + + def __init__( + self, + tb: Union[TracebackType, Iterable[TracebackEntry]], + ) -> None: + """Initialize from given python traceback object and ExceptionInfo.""" + if isinstance(tb, TracebackType): + + def f(cur: TracebackType) -> Iterable[TracebackEntry]: + cur_: Optional[TracebackType] = cur + while cur_ is not None: + yield TracebackEntry(cur_) + cur_ = cur_.tb_next + + super().__init__(f(tb)) + else: + super().__init__(tb) + + def cut( + self, + path: Optional[Union["os.PathLike[str]", str]] = None, + lineno: Optional[int] = None, + firstlineno: Optional[int] = None, + excludepath: Optional["os.PathLike[str]"] = None, + ) -> "Traceback": + """Return a Traceback instance wrapping part of this Traceback. + + By providing any combination of path, lineno and firstlineno, the + first frame to start the to-be-returned traceback is determined. + + This allows cutting the first part of a Traceback instance e.g. + for formatting reasons (removing some uninteresting bits that deal + with handling of the exception/traceback). + """ + path_ = None if path is None else os.fspath(path) + excludepath_ = None if excludepath is None else os.fspath(excludepath) + for x in self: + code = x.frame.code + codepath = code.path + if path is not None and str(codepath) != path_: + continue + if ( + excludepath is not None + and isinstance(codepath, Path) + and excludepath_ in (str(p) for p in codepath.parents) # type: ignore[operator] + ): + continue + if lineno is not None and x.lineno != lineno: + continue + if firstlineno is not None and x.frame.code.firstlineno != firstlineno: + continue + return Traceback(x._rawentry) + return self + + @overload + def __getitem__(self, key: "SupportsIndex") -> TracebackEntry: + ... + + @overload + def __getitem__(self, key: slice) -> "Traceback": + ... + + def __getitem__( + self, key: Union["SupportsIndex", slice] + ) -> Union[TracebackEntry, "Traceback"]: + if isinstance(key, slice): + return self.__class__(super().__getitem__(key)) + else: + return super().__getitem__(key) + + def filter( + self, + # TODO(py38): change to positional only. + _excinfo_or_fn: Union[ + "ExceptionInfo[BaseException]", + Callable[[TracebackEntry], bool], + ], + ) -> "Traceback": + """Return a Traceback instance with certain items removed. + + If the filter is an `ExceptionInfo`, removes all the ``TracebackEntry``s + which are hidden (see ishidden() above). + + Otherwise, the filter is a function that gets a single argument, a + ``TracebackEntry`` instance, and should return True when the item should + be added to the ``Traceback``, False when not. + """ + if isinstance(_excinfo_or_fn, ExceptionInfo): + fn = lambda x: not x.ishidden(_excinfo_or_fn) # noqa: E731 + else: + fn = _excinfo_or_fn + return Traceback(filter(fn, self)) + + def recursionindex(self) -> Optional[int]: + """Return the index of the frame/TracebackEntry where recursion originates if + appropriate, None if no recursion occurred.""" + cache: Dict[Tuple[Any, int, int], List[Dict[str, Any]]] = {} + for i, entry in enumerate(self): + # id for the code.raw is needed to work around + # the strange metaprogramming in the decorator lib from pypi + # which generates code objects that have hash/value equality + # XXX needs a test + key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno + # print "checking for recursion at", key + values = cache.setdefault(key, []) + if values: + f = entry.frame + loc = f.f_locals + for otherloc in values: + if otherloc == loc: + return i + values.append(entry.frame.f_locals) + return None + + +E = TypeVar("E", bound=BaseException, covariant=True) + + +@final +@dataclasses.dataclass +class ExceptionInfo(Generic[E]): + """Wraps sys.exc_info() objects and offers help for navigating the traceback.""" + + _assert_start_repr: ClassVar = "AssertionError('assert " + + _excinfo: Optional[Tuple[Type["E"], "E", TracebackType]] + _striptext: str + _traceback: Optional[Traceback] + + def __init__( + self, + excinfo: Optional[Tuple[Type["E"], "E", TracebackType]], + striptext: str = "", + traceback: Optional[Traceback] = None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self._excinfo = excinfo + self._striptext = striptext + self._traceback = traceback + + @classmethod + def from_exception( + cls, + # Ignoring error: "Cannot use a covariant type variable as a parameter". + # This is OK to ignore because this class is (conceptually) readonly. + # See https://github.com/python/mypy/issues/7049. + exception: E, # type: ignore[misc] + exprinfo: Optional[str] = None, + ) -> "ExceptionInfo[E]": + """Return an ExceptionInfo for an existing exception. + + The exception must have a non-``None`` ``__traceback__`` attribute, + otherwise this function fails with an assertion error. This means that + the exception must have been raised, or added a traceback with the + :py:meth:`~BaseException.with_traceback()` method. + + :param exprinfo: + A text string helping to determine if we should strip + ``AssertionError`` from the output. Defaults to the exception + message/``__str__()``. + + .. versionadded:: 7.4 + """ + assert ( + exception.__traceback__ + ), "Exceptions passed to ExcInfo.from_exception(...) must have a non-None __traceback__." + exc_info = (type(exception), exception, exception.__traceback__) + return cls.from_exc_info(exc_info, exprinfo) + + @classmethod + def from_exc_info( + cls, + exc_info: Tuple[Type[E], E, TracebackType], + exprinfo: Optional[str] = None, + ) -> "ExceptionInfo[E]": + """Like :func:`from_exception`, but using old-style exc_info tuple.""" + _striptext = "" + if exprinfo is None and isinstance(exc_info[1], AssertionError): + exprinfo = getattr(exc_info[1], "msg", None) + if exprinfo is None: + exprinfo = saferepr(exc_info[1]) + if exprinfo and exprinfo.startswith(cls._assert_start_repr): + _striptext = "AssertionError: " + + return cls(exc_info, _striptext, _ispytest=True) + + @classmethod + def from_current( + cls, exprinfo: Optional[str] = None + ) -> "ExceptionInfo[BaseException]": + """Return an ExceptionInfo matching the current traceback. + + .. warning:: + + Experimental API + + :param exprinfo: + A text string helping to determine if we should strip + ``AssertionError`` from the output. Defaults to the exception + message/``__str__()``. + """ + tup = sys.exc_info() + assert tup[0] is not None, "no current exception" + assert tup[1] is not None, "no current exception" + assert tup[2] is not None, "no current exception" + exc_info = (tup[0], tup[1], tup[2]) + return ExceptionInfo.from_exc_info(exc_info, exprinfo) + + @classmethod + def for_later(cls) -> "ExceptionInfo[E]": + """Return an unfilled ExceptionInfo.""" + return cls(None, _ispytest=True) + + def fill_unfilled(self, exc_info: Tuple[Type[E], E, TracebackType]) -> None: + """Fill an unfilled ExceptionInfo created with ``for_later()``.""" + assert self._excinfo is None, "ExceptionInfo was already filled" + self._excinfo = exc_info + + @property + def type(self) -> Type[E]: + """The exception class.""" + assert ( + self._excinfo is not None + ), ".type can only be used after the context manager exits" + return self._excinfo[0] + + @property + def value(self) -> E: + """The exception value.""" + assert ( + self._excinfo is not None + ), ".value can only be used after the context manager exits" + return self._excinfo[1] + + @property + def tb(self) -> TracebackType: + """The exception raw traceback.""" + assert ( + self._excinfo is not None + ), ".tb can only be used after the context manager exits" + return self._excinfo[2] + + @property + def typename(self) -> str: + """The type name of the exception.""" + assert ( + self._excinfo is not None + ), ".typename can only be used after the context manager exits" + return self.type.__name__ + + @property + def traceback(self) -> Traceback: + """The traceback.""" + if self._traceback is None: + self._traceback = Traceback(self.tb) + return self._traceback + + @traceback.setter + def traceback(self, value: Traceback) -> None: + self._traceback = value + + def __repr__(self) -> str: + if self._excinfo is None: + return "" + return "<{} {} tblen={}>".format( + self.__class__.__name__, saferepr(self._excinfo[1]), len(self.traceback) + ) + + def exconly(self, tryshort: bool = False) -> str: + """Return the exception as a string. + + When 'tryshort' resolves to True, and the exception is an + AssertionError, only the actual exception part of the exception + representation is returned (so 'AssertionError: ' is removed from + the beginning). + """ + lines = format_exception_only(self.type, self.value) + text = "".join(lines) + text = text.rstrip() + if tryshort: + if text.startswith(self._striptext): + text = text[len(self._striptext) :] + return text + + def errisinstance( + self, exc: Union[Type[BaseException], Tuple[Type[BaseException], ...]] + ) -> bool: + """Return True if the exception is an instance of exc. + + Consider using ``isinstance(excinfo.value, exc)`` instead. + """ + return isinstance(self.value, exc) + + def _getreprcrash(self) -> Optional["ReprFileLocation"]: + # Find last non-hidden traceback entry that led to the exception of the + # traceback, or None if all hidden. + for i in range(-1, -len(self.traceback) - 1, -1): + entry = self.traceback[i] + if not entry.ishidden(self): + path, lineno = entry.frame.code.raw.co_filename, entry.lineno + exconly = self.exconly(tryshort=True) + return ReprFileLocation(path, lineno + 1, exconly) + return None + + def getrepr( + self, + showlocals: bool = False, + style: "_TracebackStyle" = "long", + abspath: bool = False, + tbfilter: Union[ + bool, Callable[["ExceptionInfo[BaseException]"], Traceback] + ] = True, + funcargs: bool = False, + truncate_locals: bool = True, + chain: bool = True, + ) -> Union["ReprExceptionInfo", "ExceptionChainRepr"]: + """Return str()able representation of this exception info. + + :param bool showlocals: + Show locals per traceback entry. + Ignored if ``style=="native"``. + + :param str style: + long|short|line|no|native|value traceback style. + + :param bool abspath: + If paths should be changed to absolute or left unchanged. + + :param tbfilter: + A filter for traceback entries. + + * If false, don't hide any entries. + * If true, hide internal entries and entries that contain a local + variable ``__tracebackhide__ = True``. + * If a callable, delegates the filtering to the callable. + + Ignored if ``style`` is ``"native"``. + + :param bool funcargs: + Show fixtures ("funcargs" for legacy purposes) per traceback entry. + + :param bool truncate_locals: + With ``showlocals==True``, make sure locals can be safely represented as strings. + + :param bool chain: + If chained exceptions in Python 3 should be shown. + + .. versionchanged:: 3.9 + + Added the ``chain`` parameter. + """ + if style == "native": + return ReprExceptionInfo( + reprtraceback=ReprTracebackNative( + traceback.format_exception( + self.type, + self.value, + self.traceback[0]._rawentry if self.traceback else None, + ) + ), + reprcrash=self._getreprcrash(), + ) + + fmt = FormattedExcinfo( + showlocals=showlocals, + style=style, + abspath=abspath, + tbfilter=tbfilter, + funcargs=funcargs, + truncate_locals=truncate_locals, + chain=chain, + ) + return fmt.repr_excinfo(self) + + def match(self, regexp: Union[str, Pattern[str]]) -> "Literal[True]": + """Check whether the regular expression `regexp` matches the string + representation of the exception using :func:`python:re.search`. + + If it matches `True` is returned, otherwise an `AssertionError` is raised. + """ + __tracebackhide__ = True + value = str(self.value) + msg = f"Regex pattern did not match.\n Regex: {regexp!r}\n Input: {value!r}" + if regexp == value: + msg += "\n Did you mean to `re.escape()` the regex?" + assert re.search(regexp, value), msg + # Return True to allow for "assert excinfo.match()". + return True + + +@dataclasses.dataclass +class FormattedExcinfo: + """Presenting information about failing Functions and Generators.""" + + # for traceback entries + flow_marker: ClassVar = ">" + fail_marker: ClassVar = "E" + + showlocals: bool = False + style: "_TracebackStyle" = "long" + abspath: bool = True + tbfilter: Union[bool, Callable[[ExceptionInfo[BaseException]], Traceback]] = True + funcargs: bool = False + truncate_locals: bool = True + chain: bool = True + astcache: Dict[Union[str, Path], ast.AST] = dataclasses.field( + default_factory=dict, init=False, repr=False + ) + + def _getindent(self, source: "Source") -> int: + # Figure out indent for the given source. + try: + s = str(source.getstatement(len(source) - 1)) + except KeyboardInterrupt: + raise + except BaseException: + try: + s = str(source[-1]) + except KeyboardInterrupt: + raise + except BaseException: + return 0 + return 4 + (len(s) - len(s.lstrip())) + + def _getentrysource(self, entry: TracebackEntry) -> Optional["Source"]: + source = entry.getsource(self.astcache) + if source is not None: + source = source.deindent() + return source + + def repr_args(self, entry: TracebackEntry) -> Optional["ReprFuncArgs"]: + if self.funcargs: + args = [] + for argname, argvalue in entry.frame.getargs(var=True): + args.append((argname, saferepr(argvalue))) + return ReprFuncArgs(args) + return None + + def get_source( + self, + source: Optional["Source"], + line_index: int = -1, + excinfo: Optional[ExceptionInfo[BaseException]] = None, + short: bool = False, + ) -> List[str]: + """Return formatted and marked up source lines.""" + lines = [] + if source is not None and line_index < 0: + line_index += len(source) + if source is None or line_index >= len(source.lines) or line_index < 0: + # `line_index` could still be outside `range(len(source.lines))` if + # we're processing AST with pathological position attributes. + source = Source("???") + line_index = 0 + space_prefix = " " + if short: + lines.append(space_prefix + source.lines[line_index].strip()) + else: + for line in source.lines[:line_index]: + lines.append(space_prefix + line) + lines.append(self.flow_marker + " " + source.lines[line_index]) + for line in source.lines[line_index + 1 :]: + lines.append(space_prefix + line) + if excinfo is not None: + indent = 4 if short else self._getindent(source) + lines.extend(self.get_exconly(excinfo, indent=indent, markall=True)) + return lines + + def get_exconly( + self, + excinfo: ExceptionInfo[BaseException], + indent: int = 4, + markall: bool = False, + ) -> List[str]: + lines = [] + indentstr = " " * indent + # Get the real exception information out. + exlines = excinfo.exconly(tryshort=True).split("\n") + failindent = self.fail_marker + indentstr[1:] + for line in exlines: + lines.append(failindent + line) + if not markall: + failindent = indentstr + return lines + + def repr_locals(self, locals: Mapping[str, object]) -> Optional["ReprLocals"]: + if self.showlocals: + lines = [] + keys = [loc for loc in locals if loc[0] != "@"] + keys.sort() + for name in keys: + value = locals[name] + if name == "__builtins__": + lines.append("__builtins__ = ") + else: + # This formatting could all be handled by the + # _repr() function, which is only reprlib.Repr in + # disguise, so is very configurable. + if self.truncate_locals: + str_repr = saferepr(value) + else: + str_repr = safeformat(value) + # if len(str_repr) < 70 or not isinstance(value, (list, tuple, dict)): + lines.append(f"{name:<10} = {str_repr}") + # else: + # self._line("%-10s =\\" % (name,)) + # # XXX + # pprint.pprint(value, stream=self.excinfowriter) + return ReprLocals(lines) + return None + + def repr_traceback_entry( + self, + entry: Optional[TracebackEntry], + excinfo: Optional[ExceptionInfo[BaseException]] = None, + ) -> "ReprEntry": + lines: List[str] = [] + style = ( + entry._repr_style + if entry is not None and entry._repr_style is not None + else self.style + ) + if style in ("short", "long") and entry is not None: + source = self._getentrysource(entry) + if source is None: + source = Source("???") + line_index = 0 + else: + line_index = entry.lineno - entry.getfirstlinesource() + short = style == "short" + reprargs = self.repr_args(entry) if not short else None + s = self.get_source(source, line_index, excinfo, short=short) + lines.extend(s) + if short: + message = "in %s" % (entry.name) + else: + message = excinfo and excinfo.typename or "" + entry_path = entry.path + path = self._makepath(entry_path) + reprfileloc = ReprFileLocation(path, entry.lineno + 1, message) + localsrepr = self.repr_locals(entry.locals) + return ReprEntry(lines, reprargs, localsrepr, reprfileloc, style) + elif style == "value": + if excinfo: + lines.extend(str(excinfo.value).split("\n")) + return ReprEntry(lines, None, None, None, style) + else: + if excinfo: + lines.extend(self.get_exconly(excinfo, indent=4)) + return ReprEntry(lines, None, None, None, style) + + def _makepath(self, path: Union[Path, str]) -> str: + if not self.abspath and isinstance(path, Path): + try: + np = bestrelpath(Path.cwd(), path) + except OSError: + return str(path) + if len(np) < len(str(path)): + return np + return str(path) + + def repr_traceback(self, excinfo: ExceptionInfo[BaseException]) -> "ReprTraceback": + traceback = excinfo.traceback + if callable(self.tbfilter): + traceback = self.tbfilter(excinfo) + elif self.tbfilter: + traceback = traceback.filter(excinfo) + + if isinstance(excinfo.value, RecursionError): + traceback, extraline = self._truncate_recursive_traceback(traceback) + else: + extraline = None + + if not traceback: + if extraline is None: + extraline = "All traceback entries are hidden. Pass `--full-trace` to see hidden and internal frames." + entries = [self.repr_traceback_entry(None, excinfo)] + return ReprTraceback(entries, extraline, style=self.style) + + last = traceback[-1] + if self.style == "value": + entries = [self.repr_traceback_entry(last, excinfo)] + return ReprTraceback(entries, None, style=self.style) + + entries = [ + self.repr_traceback_entry(entry, excinfo if last == entry else None) + for entry in traceback + ] + return ReprTraceback(entries, extraline, style=self.style) + + def _truncate_recursive_traceback( + self, traceback: Traceback + ) -> Tuple[Traceback, Optional[str]]: + """Truncate the given recursive traceback trying to find the starting + point of the recursion. + + The detection is done by going through each traceback entry and + finding the point in which the locals of the frame are equal to the + locals of a previous frame (see ``recursionindex()``). + + Handle the situation where the recursion process might raise an + exception (for example comparing numpy arrays using equality raises a + TypeError), in which case we do our best to warn the user of the + error and show a limited traceback. + """ + try: + recursionindex = traceback.recursionindex() + except Exception as e: + max_frames = 10 + extraline: Optional[str] = ( + "!!! Recursion error detected, but an error occurred locating the origin of recursion.\n" + " The following exception happened when comparing locals in the stack frame:\n" + " {exc_type}: {exc_msg}\n" + " Displaying first and last {max_frames} stack frames out of {total}." + ).format( + exc_type=type(e).__name__, + exc_msg=str(e), + max_frames=max_frames, + total=len(traceback), + ) + # Type ignored because adding two instances of a List subtype + # currently incorrectly has type List instead of the subtype. + traceback = traceback[:max_frames] + traceback[-max_frames:] # type: ignore + else: + if recursionindex is not None: + extraline = "!!! Recursion detected (same locals & position)" + traceback = traceback[: recursionindex + 1] + else: + extraline = None + + return traceback, extraline + + def repr_excinfo( + self, excinfo: ExceptionInfo[BaseException] + ) -> "ExceptionChainRepr": + repr_chain: List[ + Tuple[ReprTraceback, Optional[ReprFileLocation], Optional[str]] + ] = [] + e: Optional[BaseException] = excinfo.value + excinfo_: Optional[ExceptionInfo[BaseException]] = excinfo + descr = None + seen: Set[int] = set() + while e is not None and id(e) not in seen: + seen.add(id(e)) + + if excinfo_: + # Fall back to native traceback as a temporary workaround until + # full support for exception groups added to ExceptionInfo. + # See https://github.com/pytest-dev/pytest/issues/9159 + if isinstance(e, BaseExceptionGroup): + reprtraceback: Union[ + ReprTracebackNative, ReprTraceback + ] = ReprTracebackNative( + traceback.format_exception( + type(excinfo_.value), + excinfo_.value, + excinfo_.traceback[0]._rawentry, + ) + ) + else: + reprtraceback = self.repr_traceback(excinfo_) + reprcrash = excinfo_._getreprcrash() + else: + # Fallback to native repr if the exception doesn't have a traceback: + # ExceptionInfo objects require a full traceback to work. + reprtraceback = ReprTracebackNative( + traceback.format_exception(type(e), e, None) + ) + reprcrash = None + repr_chain += [(reprtraceback, reprcrash, descr)] + + if e.__cause__ is not None and self.chain: + e = e.__cause__ + excinfo_ = ExceptionInfo.from_exception(e) if e.__traceback__ else None + descr = "The above exception was the direct cause of the following exception:" + elif ( + e.__context__ is not None and not e.__suppress_context__ and self.chain + ): + e = e.__context__ + excinfo_ = ExceptionInfo.from_exception(e) if e.__traceback__ else None + descr = "During handling of the above exception, another exception occurred:" + else: + e = None + repr_chain.reverse() + return ExceptionChainRepr(repr_chain) + + +@dataclasses.dataclass(eq=False) +class TerminalRepr: + def __str__(self) -> str: + # FYI this is called from pytest-xdist's serialization of exception + # information. + io = StringIO() + tw = TerminalWriter(file=io) + self.toterminal(tw) + return io.getvalue().strip() + + def __repr__(self) -> str: + return f"<{self.__class__} instance at {id(self):0x}>" + + def toterminal(self, tw: TerminalWriter) -> None: + raise NotImplementedError() + + +# This class is abstract -- only subclasses are instantiated. +@dataclasses.dataclass(eq=False) +class ExceptionRepr(TerminalRepr): + # Provided by subclasses. + reprtraceback: "ReprTraceback" + reprcrash: Optional["ReprFileLocation"] + sections: List[Tuple[str, str, str]] = dataclasses.field( + init=False, default_factory=list + ) + + def addsection(self, name: str, content: str, sep: str = "-") -> None: + self.sections.append((name, content, sep)) + + def toterminal(self, tw: TerminalWriter) -> None: + for name, content, sep in self.sections: + tw.sep(sep, name) + tw.line(content) + + +@dataclasses.dataclass(eq=False) +class ExceptionChainRepr(ExceptionRepr): + chain: Sequence[Tuple["ReprTraceback", Optional["ReprFileLocation"], Optional[str]]] + + def __init__( + self, + chain: Sequence[ + Tuple["ReprTraceback", Optional["ReprFileLocation"], Optional[str]] + ], + ) -> None: + # reprcrash and reprtraceback of the outermost (the newest) exception + # in the chain. + super().__init__( + reprtraceback=chain[-1][0], + reprcrash=chain[-1][1], + ) + self.chain = chain + + def toterminal(self, tw: TerminalWriter) -> None: + for element in self.chain: + element[0].toterminal(tw) + if element[2] is not None: + tw.line("") + tw.line(element[2], yellow=True) + super().toterminal(tw) + + +@dataclasses.dataclass(eq=False) +class ReprExceptionInfo(ExceptionRepr): + reprtraceback: "ReprTraceback" + reprcrash: Optional["ReprFileLocation"] + + def toterminal(self, tw: TerminalWriter) -> None: + self.reprtraceback.toterminal(tw) + super().toterminal(tw) + + +@dataclasses.dataclass(eq=False) +class ReprTraceback(TerminalRepr): + reprentries: Sequence[Union["ReprEntry", "ReprEntryNative"]] + extraline: Optional[str] + style: "_TracebackStyle" + + entrysep: ClassVar = "_ " + + def toterminal(self, tw: TerminalWriter) -> None: + # The entries might have different styles. + for i, entry in enumerate(self.reprentries): + if entry.style == "long": + tw.line("") + entry.toterminal(tw) + if i < len(self.reprentries) - 1: + next_entry = self.reprentries[i + 1] + if ( + entry.style == "long" + or entry.style == "short" + and next_entry.style == "long" + ): + tw.sep(self.entrysep) + + if self.extraline: + tw.line(self.extraline) + + +class ReprTracebackNative(ReprTraceback): + def __init__(self, tblines: Sequence[str]) -> None: + self.reprentries = [ReprEntryNative(tblines)] + self.extraline = None + self.style = "native" + + +@dataclasses.dataclass(eq=False) +class ReprEntryNative(TerminalRepr): + lines: Sequence[str] + + style: ClassVar["_TracebackStyle"] = "native" + + def toterminal(self, tw: TerminalWriter) -> None: + tw.write("".join(self.lines)) + + +@dataclasses.dataclass(eq=False) +class ReprEntry(TerminalRepr): + lines: Sequence[str] + reprfuncargs: Optional["ReprFuncArgs"] + reprlocals: Optional["ReprLocals"] + reprfileloc: Optional["ReprFileLocation"] + style: "_TracebackStyle" + + def _write_entry_lines(self, tw: TerminalWriter) -> None: + """Write the source code portions of a list of traceback entries with syntax highlighting. + + Usually entries are lines like these: + + " x = 1" + "> assert x == 2" + "E assert 1 == 2" + + This function takes care of rendering the "source" portions of it (the lines without + the "E" prefix) using syntax highlighting, taking care to not highlighting the ">" + character, as doing so might break line continuations. + """ + + if not self.lines: + return + + # separate indents and source lines that are not failures: we want to + # highlight the code but not the indentation, which may contain markers + # such as "> assert 0" + fail_marker = f"{FormattedExcinfo.fail_marker} " + indent_size = len(fail_marker) + indents: List[str] = [] + source_lines: List[str] = [] + failure_lines: List[str] = [] + for index, line in enumerate(self.lines): + is_failure_line = line.startswith(fail_marker) + if is_failure_line: + # from this point on all lines are considered part of the failure + failure_lines.extend(self.lines[index:]) + break + else: + if self.style == "value": + source_lines.append(line) + else: + indents.append(line[:indent_size]) + source_lines.append(line[indent_size:]) + + tw._write_source(source_lines, indents) + + # failure lines are always completely red and bold + for line in failure_lines: + tw.line(line, bold=True, red=True) + + def toterminal(self, tw: TerminalWriter) -> None: + if self.style == "short": + if self.reprfileloc: + self.reprfileloc.toterminal(tw) + self._write_entry_lines(tw) + if self.reprlocals: + self.reprlocals.toterminal(tw, indent=" " * 8) + return + + if self.reprfuncargs: + self.reprfuncargs.toterminal(tw) + + self._write_entry_lines(tw) + + if self.reprlocals: + tw.line("") + self.reprlocals.toterminal(tw) + if self.reprfileloc: + if self.lines: + tw.line("") + self.reprfileloc.toterminal(tw) + + def __str__(self) -> str: + return "{}\n{}\n{}".format( + "\n".join(self.lines), self.reprlocals, self.reprfileloc + ) + + +@dataclasses.dataclass(eq=False) +class ReprFileLocation(TerminalRepr): + path: str + lineno: int + message: str + + def __post_init__(self) -> None: + self.path = str(self.path) + + def toterminal(self, tw: TerminalWriter) -> None: + # Filename and lineno output for each entry, using an output format + # that most editors understand. + msg = self.message + i = msg.find("\n") + if i != -1: + msg = msg[:i] + tw.write(self.path, bold=True, red=True) + tw.line(f":{self.lineno}: {msg}") + + +@dataclasses.dataclass(eq=False) +class ReprLocals(TerminalRepr): + lines: Sequence[str] + + def toterminal(self, tw: TerminalWriter, indent="") -> None: + for line in self.lines: + tw.line(indent + line) + + +@dataclasses.dataclass(eq=False) +class ReprFuncArgs(TerminalRepr): + args: Sequence[Tuple[str, object]] + + def toterminal(self, tw: TerminalWriter) -> None: + if self.args: + linesofar = "" + for name, value in self.args: + ns = f"{name} = {value}" + if len(ns) + len(linesofar) + 2 > tw.fullwidth: + if linesofar: + tw.line(linesofar) + linesofar = ns + else: + if linesofar: + linesofar += ", " + ns + else: + linesofar = ns + if linesofar: + tw.line(linesofar) + tw.line("") + + +def getfslineno(obj: object) -> Tuple[Union[str, Path], int]: + """Return source location (path, lineno) for the given object. + + If the source cannot be determined return ("", -1). + + The line number is 0-based. + """ + # xxx let decorators etc specify a sane ordering + # NOTE: this used to be done in _pytest.compat.getfslineno, initially added + # in 6ec13a2b9. It ("place_as") appears to be something very custom. + obj = get_real_func(obj) + if hasattr(obj, "place_as"): + obj = obj.place_as # type: ignore[attr-defined] + + try: + code = Code.from_function(obj) + except TypeError: + try: + fn = inspect.getsourcefile(obj) or inspect.getfile(obj) # type: ignore[arg-type] + except TypeError: + return "", -1 + + fspath = fn and absolutepath(fn) or "" + lineno = -1 + if fspath: + try: + _, lineno = findsource(obj) + except OSError: + pass + return fspath, lineno + + return code.path, code.firstlineno + + +# Relative paths that we use to filter traceback entries from appearing to the user; +# see filter_traceback. +# note: if we need to add more paths than what we have now we should probably use a list +# for better maintenance. + +_PLUGGY_DIR = Path(pluggy.__file__.rstrip("oc")) +# pluggy is either a package or a single module depending on the version +if _PLUGGY_DIR.name == "__init__.py": + _PLUGGY_DIR = _PLUGGY_DIR.parent +_PYTEST_DIR = Path(_pytest.__file__).parent + + +def filter_traceback(entry: TracebackEntry) -> bool: + """Return True if a TracebackEntry instance should be included in tracebacks. + + We hide traceback entries of: + + * dynamically generated code (no code to show up for it); + * internal traceback from pytest or its internal libraries, py and pluggy. + """ + # entry.path might sometimes return a str object when the entry + # points to dynamically generated code. + # See https://bitbucket.org/pytest-dev/py/issues/71. + raw_filename = entry.frame.code.raw.co_filename + is_generated = "<" in raw_filename and ">" in raw_filename + if is_generated: + return False + + # entry.path might point to a non-existing file, in which case it will + # also return a str object. See #1133. + p = Path(entry.path) + + parents = p.parents + if _PLUGGY_DIR in parents: + return False + if _PYTEST_DIR in parents: + return False + + return True diff --git a/venv/lib/python3.11/site-packages/_pytest/_code/source.py b/venv/lib/python3.11/site-packages/_pytest/_code/source.py new file mode 100644 index 0000000..208cfb8 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/_code/source.py @@ -0,0 +1,217 @@ +import ast +import inspect +import textwrap +import tokenize +import types +import warnings +from bisect import bisect_right +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Optional +from typing import overload +from typing import Tuple +from typing import Union + + +class Source: + """An immutable object holding a source code fragment. + + When using Source(...), the source lines are deindented. + """ + + def __init__(self, obj: object = None) -> None: + if not obj: + self.lines: List[str] = [] + elif isinstance(obj, Source): + self.lines = obj.lines + elif isinstance(obj, (tuple, list)): + self.lines = deindent(x.rstrip("\n") for x in obj) + elif isinstance(obj, str): + self.lines = deindent(obj.split("\n")) + else: + try: + rawcode = getrawcode(obj) + src = inspect.getsource(rawcode) + except TypeError: + src = inspect.getsource(obj) # type: ignore[arg-type] + self.lines = deindent(src.split("\n")) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Source): + return NotImplemented + return self.lines == other.lines + + # Ignore type because of https://github.com/python/mypy/issues/4266. + __hash__ = None # type: ignore + + @overload + def __getitem__(self, key: int) -> str: + ... + + @overload + def __getitem__(self, key: slice) -> "Source": + ... + + def __getitem__(self, key: Union[int, slice]) -> Union[str, "Source"]: + if isinstance(key, int): + return self.lines[key] + else: + if key.step not in (None, 1): + raise IndexError("cannot slice a Source with a step") + newsource = Source() + newsource.lines = self.lines[key.start : key.stop] + return newsource + + def __iter__(self) -> Iterator[str]: + return iter(self.lines) + + def __len__(self) -> int: + return len(self.lines) + + def strip(self) -> "Source": + """Return new Source object with trailing and leading blank lines removed.""" + start, end = 0, len(self) + while start < end and not self.lines[start].strip(): + start += 1 + while end > start and not self.lines[end - 1].strip(): + end -= 1 + source = Source() + source.lines[:] = self.lines[start:end] + return source + + def indent(self, indent: str = " " * 4) -> "Source": + """Return a copy of the source object with all lines indented by the + given indent-string.""" + newsource = Source() + newsource.lines = [(indent + line) for line in self.lines] + return newsource + + def getstatement(self, lineno: int) -> "Source": + """Return Source statement which contains the given linenumber + (counted from 0).""" + start, end = self.getstatementrange(lineno) + return self[start:end] + + def getstatementrange(self, lineno: int) -> Tuple[int, int]: + """Return (start, end) tuple which spans the minimal statement region + which containing the given lineno.""" + if not (0 <= lineno < len(self)): + raise IndexError("lineno out of range") + ast, start, end = getstatementrange_ast(lineno, self) + return start, end + + def deindent(self) -> "Source": + """Return a new Source object deindented.""" + newsource = Source() + newsource.lines[:] = deindent(self.lines) + return newsource + + def __str__(self) -> str: + return "\n".join(self.lines) + + +# +# helper functions +# + + +def findsource(obj) -> Tuple[Optional[Source], int]: + try: + sourcelines, lineno = inspect.findsource(obj) + except Exception: + return None, -1 + source = Source() + source.lines = [line.rstrip() for line in sourcelines] + return source, lineno + + +def getrawcode(obj: object, trycall: bool = True) -> types.CodeType: + """Return code object for given function.""" + try: + return obj.__code__ # type: ignore[attr-defined,no-any-return] + except AttributeError: + pass + if trycall: + call = getattr(obj, "__call__", None) + if call and not isinstance(obj, type): + return getrawcode(call, trycall=False) + raise TypeError(f"could not get code object for {obj!r}") + + +def deindent(lines: Iterable[str]) -> List[str]: + return textwrap.dedent("\n".join(lines)).splitlines() + + +def get_statement_startend2(lineno: int, node: ast.AST) -> Tuple[int, Optional[int]]: + # Flatten all statements and except handlers into one lineno-list. + # AST's line numbers start indexing at 1. + values: List[int] = [] + for x in ast.walk(node): + if isinstance(x, (ast.stmt, ast.ExceptHandler)): + # Before Python 3.8, the lineno of a decorated class or function pointed at the decorator. + # Since Python 3.8, the lineno points to the class/def, so need to include the decorators. + if isinstance(x, (ast.ClassDef, ast.FunctionDef, ast.AsyncFunctionDef)): + for d in x.decorator_list: + values.append(d.lineno - 1) + values.append(x.lineno - 1) + for name in ("finalbody", "orelse"): + val: Optional[List[ast.stmt]] = getattr(x, name, None) + if val: + # Treat the finally/orelse part as its own statement. + values.append(val[0].lineno - 1 - 1) + values.sort() + insert_index = bisect_right(values, lineno) + start = values[insert_index - 1] + if insert_index >= len(values): + end = None + else: + end = values[insert_index] + return start, end + + +def getstatementrange_ast( + lineno: int, + source: Source, + assertion: bool = False, + astnode: Optional[ast.AST] = None, +) -> Tuple[ast.AST, int, int]: + if astnode is None: + content = str(source) + # See #4260: + # Don't produce duplicate warnings when compiling source to find AST. + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + astnode = ast.parse(content, "source", "exec") + + start, end = get_statement_startend2(lineno, astnode) + # We need to correct the end: + # - ast-parsing strips comments + # - there might be empty lines + # - we might have lesser indented code blocks at the end + if end is None: + end = len(source.lines) + + if end > start + 1: + # Make sure we don't span differently indented code blocks + # by using the BlockFinder helper used which inspect.getsource() uses itself. + block_finder = inspect.BlockFinder() + # If we start with an indented line, put blockfinder to "started" mode. + block_finder.started = source.lines[start][0].isspace() + it = ((x + "\n") for x in source.lines[start:end]) + try: + for tok in tokenize.generate_tokens(lambda: next(it)): + block_finder.tokeneater(*tok) + except (inspect.EndOfBlock, IndentationError): + end = block_finder.last + start + except Exception: + pass + + # The end might still point to a comment or empty line, correct it. + while end: + line = source.lines[end - 1].lstrip() + if line.startswith("#") or not line: + end -= 1 + else: + break + return astnode, start, end diff --git a/venv/lib/python3.11/site-packages/_pytest/_io/__init__.py b/venv/lib/python3.11/site-packages/_pytest/_io/__init__.py new file mode 100644 index 0000000..db001e9 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/_io/__init__.py @@ -0,0 +1,8 @@ +from .terminalwriter import get_terminal_width +from .terminalwriter import TerminalWriter + + +__all__ = [ + "TerminalWriter", + "get_terminal_width", +] diff --git a/venv/lib/python3.11/site-packages/_pytest/_io/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/_io/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..93eded4 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/_io/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/_io/__pycache__/saferepr.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/_io/__pycache__/saferepr.cpython-311.pyc new file mode 100644 index 0000000..a1fe6ea Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/_io/__pycache__/saferepr.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/_io/__pycache__/terminalwriter.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/_io/__pycache__/terminalwriter.cpython-311.pyc new file mode 100644 index 0000000..72d6fc0 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/_io/__pycache__/terminalwriter.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/_io/__pycache__/wcwidth.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/_io/__pycache__/wcwidth.cpython-311.pyc new file mode 100644 index 0000000..5e21e73 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/_io/__pycache__/wcwidth.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/_io/saferepr.py b/venv/lib/python3.11/site-packages/_pytest/_io/saferepr.py new file mode 100644 index 0000000..c701872 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/_io/saferepr.py @@ -0,0 +1,180 @@ +import pprint +import reprlib +from typing import Any +from typing import Dict +from typing import IO +from typing import Optional + + +def _try_repr_or_str(obj: object) -> str: + try: + return repr(obj) + except (KeyboardInterrupt, SystemExit): + raise + except BaseException: + return f'{type(obj).__name__}("{obj}")' + + +def _format_repr_exception(exc: BaseException, obj: object) -> str: + try: + exc_info = _try_repr_or_str(exc) + except (KeyboardInterrupt, SystemExit): + raise + except BaseException as exc: + exc_info = f"unpresentable exception ({_try_repr_or_str(exc)})" + return "<[{} raised in repr()] {} object at 0x{:x}>".format( + exc_info, type(obj).__name__, id(obj) + ) + + +def _ellipsize(s: str, maxsize: int) -> str: + if len(s) > maxsize: + i = max(0, (maxsize - 3) // 2) + j = max(0, maxsize - 3 - i) + return s[:i] + "..." + s[len(s) - j :] + return s + + +class SafeRepr(reprlib.Repr): + """ + repr.Repr that limits the resulting size of repr() and includes + information on exceptions raised during the call. + """ + + def __init__(self, maxsize: Optional[int], use_ascii: bool = False) -> None: + """ + :param maxsize: + If not None, will truncate the resulting repr to that specific size, using ellipsis + somewhere in the middle to hide the extra text. + If None, will not impose any size limits on the returning repr. + """ + super().__init__() + # ``maxstring`` is used by the superclass, and needs to be an int; using a + # very large number in case maxsize is None, meaning we want to disable + # truncation. + self.maxstring = maxsize if maxsize is not None else 1_000_000_000 + self.maxsize = maxsize + self.use_ascii = use_ascii + + def repr(self, x: object) -> str: + try: + if self.use_ascii: + s = ascii(x) + else: + s = super().repr(x) + + except (KeyboardInterrupt, SystemExit): + raise + except BaseException as exc: + s = _format_repr_exception(exc, x) + if self.maxsize is not None: + s = _ellipsize(s, self.maxsize) + return s + + def repr_instance(self, x: object, level: int) -> str: + try: + s = repr(x) + except (KeyboardInterrupt, SystemExit): + raise + except BaseException as exc: + s = _format_repr_exception(exc, x) + if self.maxsize is not None: + s = _ellipsize(s, self.maxsize) + return s + + +def safeformat(obj: object) -> str: + """Return a pretty printed string for the given object. + + Failing __repr__ functions of user instances will be represented + with a short exception info. + """ + try: + return pprint.pformat(obj) + except Exception as exc: + return _format_repr_exception(exc, obj) + + +# Maximum size of overall repr of objects to display during assertion errors. +DEFAULT_REPR_MAX_SIZE = 240 + + +def saferepr( + obj: object, maxsize: Optional[int] = DEFAULT_REPR_MAX_SIZE, use_ascii: bool = False +) -> str: + """Return a size-limited safe repr-string for the given object. + + Failing __repr__ functions of user instances will be represented + with a short exception info and 'saferepr' generally takes + care to never raise exceptions itself. + + This function is a wrapper around the Repr/reprlib functionality of the + stdlib. + """ + + return SafeRepr(maxsize, use_ascii).repr(obj) + + +def saferepr_unlimited(obj: object, use_ascii: bool = True) -> str: + """Return an unlimited-size safe repr-string for the given object. + + As with saferepr, failing __repr__ functions of user instances + will be represented with a short exception info. + + This function is a wrapper around simple repr. + + Note: a cleaner solution would be to alter ``saferepr``this way + when maxsize=None, but that might affect some other code. + """ + try: + if use_ascii: + return ascii(obj) + return repr(obj) + except Exception as exc: + return _format_repr_exception(exc, obj) + + +class AlwaysDispatchingPrettyPrinter(pprint.PrettyPrinter): + """PrettyPrinter that always dispatches (regardless of width).""" + + def _format( + self, + object: object, + stream: IO[str], + indent: int, + allowance: int, + context: Dict[int, Any], + level: int, + ) -> None: + # Type ignored because _dispatch is private. + p = self._dispatch.get(type(object).__repr__, None) # type: ignore[attr-defined] + + objid = id(object) + if objid in context or p is None: + # Type ignored because _format is private. + super()._format( # type: ignore[misc] + object, + stream, + indent, + allowance, + context, + level, + ) + return + + context[objid] = 1 + p(self, object, stream, indent, allowance, context, level + 1) + del context[objid] + + +def _pformat_dispatch( + object: object, + indent: int = 1, + width: int = 80, + depth: Optional[int] = None, + *, + compact: bool = False, +) -> str: + return AlwaysDispatchingPrettyPrinter( + indent=indent, width=width, depth=depth, compact=compact + ).pformat(object) diff --git a/venv/lib/python3.11/site-packages/_pytest/_io/terminalwriter.py b/venv/lib/python3.11/site-packages/_pytest/_io/terminalwriter.py new file mode 100644 index 0000000..379035d --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/_io/terminalwriter.py @@ -0,0 +1,233 @@ +"""Helper functions for writing to terminals and files.""" +import os +import shutil +import sys +from typing import Optional +from typing import Sequence +from typing import TextIO + +from .wcwidth import wcswidth +from _pytest.compat import final + + +# This code was initially copied from py 1.8.1, file _io/terminalwriter.py. + + +def get_terminal_width() -> int: + width, _ = shutil.get_terminal_size(fallback=(80, 24)) + + # The Windows get_terminal_size may be bogus, let's sanify a bit. + if width < 40: + width = 80 + + return width + + +def should_do_markup(file: TextIO) -> bool: + if os.environ.get("PY_COLORS") == "1": + return True + if os.environ.get("PY_COLORS") == "0": + return False + if "NO_COLOR" in os.environ: + return False + if "FORCE_COLOR" in os.environ: + return True + return ( + hasattr(file, "isatty") and file.isatty() and os.environ.get("TERM") != "dumb" + ) + + +@final +class TerminalWriter: + _esctable = dict( + black=30, + red=31, + green=32, + yellow=33, + blue=34, + purple=35, + cyan=36, + white=37, + Black=40, + Red=41, + Green=42, + Yellow=43, + Blue=44, + Purple=45, + Cyan=46, + White=47, + bold=1, + light=2, + blink=5, + invert=7, + ) + + def __init__(self, file: Optional[TextIO] = None) -> None: + if file is None: + file = sys.stdout + if hasattr(file, "isatty") and file.isatty() and sys.platform == "win32": + try: + import colorama + except ImportError: + pass + else: + file = colorama.AnsiToWin32(file).stream + assert file is not None + self._file = file + self.hasmarkup = should_do_markup(file) + self._current_line = "" + self._terminal_width: Optional[int] = None + self.code_highlight = True + + @property + def fullwidth(self) -> int: + if self._terminal_width is not None: + return self._terminal_width + return get_terminal_width() + + @fullwidth.setter + def fullwidth(self, value: int) -> None: + self._terminal_width = value + + @property + def width_of_current_line(self) -> int: + """Return an estimate of the width so far in the current line.""" + return wcswidth(self._current_line) + + def markup(self, text: str, **markup: bool) -> str: + for name in markup: + if name not in self._esctable: + raise ValueError(f"unknown markup: {name!r}") + if self.hasmarkup: + esc = [self._esctable[name] for name, on in markup.items() if on] + if esc: + text = "".join("\x1b[%sm" % cod for cod in esc) + text + "\x1b[0m" + return text + + def sep( + self, + sepchar: str, + title: Optional[str] = None, + fullwidth: Optional[int] = None, + **markup: bool, + ) -> None: + if fullwidth is None: + fullwidth = self.fullwidth + # The goal is to have the line be as long as possible + # under the condition that len(line) <= fullwidth. + if sys.platform == "win32": + # If we print in the last column on windows we are on a + # new line but there is no way to verify/neutralize this + # (we may not know the exact line width). + # So let's be defensive to avoid empty lines in the output. + fullwidth -= 1 + if title is not None: + # we want 2 + 2*len(fill) + len(title) <= fullwidth + # i.e. 2 + 2*len(sepchar)*N + len(title) <= fullwidth + # 2*len(sepchar)*N <= fullwidth - len(title) - 2 + # N <= (fullwidth - len(title) - 2) // (2*len(sepchar)) + N = max((fullwidth - len(title) - 2) // (2 * len(sepchar)), 1) + fill = sepchar * N + line = f"{fill} {title} {fill}" + else: + # we want len(sepchar)*N <= fullwidth + # i.e. N <= fullwidth // len(sepchar) + line = sepchar * (fullwidth // len(sepchar)) + # In some situations there is room for an extra sepchar at the right, + # in particular if we consider that with a sepchar like "_ " the + # trailing space is not important at the end of the line. + if len(line) + len(sepchar.rstrip()) <= fullwidth: + line += sepchar.rstrip() + + self.line(line, **markup) + + def write(self, msg: str, *, flush: bool = False, **markup: bool) -> None: + if msg: + current_line = msg.rsplit("\n", 1)[-1] + if "\n" in msg: + self._current_line = current_line + else: + self._current_line += current_line + + msg = self.markup(msg, **markup) + + try: + self._file.write(msg) + except UnicodeEncodeError: + # Some environments don't support printing general Unicode + # strings, due to misconfiguration or otherwise; in that case, + # print the string escaped to ASCII. + # When the Unicode situation improves we should consider + # letting the error propagate instead of masking it (see #7475 + # for one brief attempt). + msg = msg.encode("unicode-escape").decode("ascii") + self._file.write(msg) + + if flush: + self.flush() + + def line(self, s: str = "", **markup: bool) -> None: + self.write(s, **markup) + self.write("\n") + + def flush(self) -> None: + self._file.flush() + + def _write_source(self, lines: Sequence[str], indents: Sequence[str] = ()) -> None: + """Write lines of source code possibly highlighted. + + Keeping this private for now because the API is clunky. We should discuss how + to evolve the terminal writer so we can have more precise color support, for example + being able to write part of a line in one color and the rest in another, and so on. + """ + if indents and len(indents) != len(lines): + raise ValueError( + "indents size ({}) should have same size as lines ({})".format( + len(indents), len(lines) + ) + ) + if not indents: + indents = [""] * len(lines) + source = "\n".join(lines) + new_lines = self._highlight(source).splitlines() + for indent, new_line in zip(indents, new_lines): + self.line(indent + new_line) + + def _highlight(self, source: str) -> str: + """Highlight the given source code if we have markup support.""" + from _pytest.config.exceptions import UsageError + + if not self.hasmarkup or not self.code_highlight: + return source + try: + from pygments.formatters.terminal import TerminalFormatter + from pygments.lexers.python import PythonLexer + from pygments import highlight + import pygments.util + except ImportError: + return source + else: + try: + highlighted: str = highlight( + source, + PythonLexer(), + TerminalFormatter( + bg=os.getenv("PYTEST_THEME_MODE", "dark"), + style=os.getenv("PYTEST_THEME"), + ), + ) + return highlighted + except pygments.util.ClassNotFound: + raise UsageError( + "PYTEST_THEME environment variable had an invalid value: '{}'. " + "Only valid pygment styles are allowed.".format( + os.getenv("PYTEST_THEME") + ) + ) + except pygments.util.OptionError: + raise UsageError( + "PYTEST_THEME_MODE environment variable had an invalid value: '{}'. " + "The only allowed values are 'dark' and 'light'.".format( + os.getenv("PYTEST_THEME_MODE") + ) + ) diff --git a/venv/lib/python3.11/site-packages/_pytest/_io/wcwidth.py b/venv/lib/python3.11/site-packages/_pytest/_io/wcwidth.py new file mode 100644 index 0000000..e5c7bf4 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/_io/wcwidth.py @@ -0,0 +1,55 @@ +import unicodedata +from functools import lru_cache + + +@lru_cache(100) +def wcwidth(c: str) -> int: + """Determine how many columns are needed to display a character in a terminal. + + Returns -1 if the character is not printable. + Returns 0, 1 or 2 for other characters. + """ + o = ord(c) + + # ASCII fast path. + if 0x20 <= o < 0x07F: + return 1 + + # Some Cf/Zp/Zl characters which should be zero-width. + if ( + o == 0x0000 + or 0x200B <= o <= 0x200F + or 0x2028 <= o <= 0x202E + or 0x2060 <= o <= 0x2063 + ): + return 0 + + category = unicodedata.category(c) + + # Control characters. + if category == "Cc": + return -1 + + # Combining characters with zero width. + if category in ("Me", "Mn"): + return 0 + + # Full/Wide east asian characters. + if unicodedata.east_asian_width(c) in ("F", "W"): + return 2 + + return 1 + + +def wcswidth(s: str) -> int: + """Determine how many columns are needed to display a string in a terminal. + + Returns -1 if the string contains non-printable characters. + """ + width = 0 + for c in unicodedata.normalize("NFC", s): + wc = wcwidth(c) + if wc < 0: + return -1 + width += wc + return width diff --git a/venv/lib/python3.11/site-packages/_pytest/_py/__init__.py b/venv/lib/python3.11/site-packages/_pytest/_py/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/_pytest/_py/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/_py/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..815660f Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/_py/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/_py/__pycache__/error.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/_py/__pycache__/error.cpython-311.pyc new file mode 100644 index 0000000..7c339c3 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/_py/__pycache__/error.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/_py/__pycache__/path.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/_py/__pycache__/path.cpython-311.pyc new file mode 100644 index 0000000..3fe42ae Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/_py/__pycache__/path.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/_py/error.py b/venv/lib/python3.11/site-packages/_pytest/_py/error.py new file mode 100644 index 0000000..0b8f2d5 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/_py/error.py @@ -0,0 +1,109 @@ +"""create errno-specific classes for IO or os calls.""" +from __future__ import annotations + +import errno +import os +import sys +from typing import Callable +from typing import TYPE_CHECKING +from typing import TypeVar + +if TYPE_CHECKING: + from typing_extensions import ParamSpec + + P = ParamSpec("P") + +R = TypeVar("R") + + +class Error(EnvironmentError): + def __repr__(self) -> str: + return "{}.{} {!r}: {} ".format( + self.__class__.__module__, + self.__class__.__name__, + self.__class__.__doc__, + " ".join(map(str, self.args)), + # repr(self.args) + ) + + def __str__(self) -> str: + s = "[{}]: {}".format( + self.__class__.__doc__, + " ".join(map(str, self.args)), + ) + return s + + +_winerrnomap = { + 2: errno.ENOENT, + 3: errno.ENOENT, + 17: errno.EEXIST, + 18: errno.EXDEV, + 13: errno.EBUSY, # empty cd drive, but ENOMEDIUM seems unavailiable + 22: errno.ENOTDIR, + 20: errno.ENOTDIR, + 267: errno.ENOTDIR, + 5: errno.EACCES, # anything better? +} + + +class ErrorMaker: + """lazily provides Exception classes for each possible POSIX errno + (as defined per the 'errno' module). All such instances + subclass EnvironmentError. + """ + + _errno2class: dict[int, type[Error]] = {} + + def __getattr__(self, name: str) -> type[Error]: + if name[0] == "_": + raise AttributeError(name) + eno = getattr(errno, name) + cls = self._geterrnoclass(eno) + setattr(self, name, cls) + return cls + + def _geterrnoclass(self, eno: int) -> type[Error]: + try: + return self._errno2class[eno] + except KeyError: + clsname = errno.errorcode.get(eno, "UnknownErrno%d" % (eno,)) + errorcls = type( + clsname, + (Error,), + {"__module__": "py.error", "__doc__": os.strerror(eno)}, + ) + self._errno2class[eno] = errorcls + return errorcls + + def checked_call( + self, func: Callable[P, R], *args: P.args, **kwargs: P.kwargs + ) -> R: + """Call a function and raise an errno-exception if applicable.""" + __tracebackhide__ = True + try: + return func(*args, **kwargs) + except Error: + raise + except OSError as value: + if not hasattr(value, "errno"): + raise + errno = value.errno + if sys.platform == "win32": + try: + cls = self._geterrnoclass(_winerrnomap[errno]) + except KeyError: + raise value + else: + # we are not on Windows, or we got a proper OSError + cls = self._geterrnoclass(errno) + + raise cls(f"{func.__name__}{args!r}") + + +_error_maker = ErrorMaker() +checked_call = _error_maker.checked_call + + +def __getattr__(attr: str) -> type[Error]: + return getattr(_error_maker, attr) # type: ignore[no-any-return] diff --git a/venv/lib/python3.11/site-packages/_pytest/_py/path.py b/venv/lib/python3.11/site-packages/_pytest/_py/path.py new file mode 100644 index 0000000..73a070d --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/_py/path.py @@ -0,0 +1,1475 @@ +"""local path implementation.""" +from __future__ import annotations + +import atexit +import fnmatch +import importlib.util +import io +import os +import posixpath +import sys +import uuid +import warnings +from contextlib import contextmanager +from os.path import abspath +from os.path import dirname +from os.path import exists +from os.path import isabs +from os.path import isdir +from os.path import isfile +from os.path import islink +from os.path import normpath +from stat import S_ISDIR +from stat import S_ISLNK +from stat import S_ISREG +from typing import Any +from typing import Callable +from typing import cast +from typing import overload +from typing import TYPE_CHECKING + +from . import error + +if TYPE_CHECKING: + from typing import Literal + +# Moved from local.py. +iswin32 = sys.platform == "win32" or (getattr(os, "_name", False) == "nt") + + +class Checkers: + _depend_on_existence = "exists", "link", "dir", "file" + + def __init__(self, path): + self.path = path + + def dotfile(self): + return self.path.basename.startswith(".") + + def ext(self, arg): + if not arg.startswith("."): + arg = "." + arg + return self.path.ext == arg + + def basename(self, arg): + return self.path.basename == arg + + def basestarts(self, arg): + return self.path.basename.startswith(arg) + + def relto(self, arg): + return self.path.relto(arg) + + def fnmatch(self, arg): + return self.path.fnmatch(arg) + + def endswith(self, arg): + return str(self.path).endswith(arg) + + def _evaluate(self, kw): + from .._code.source import getrawcode + + for name, value in kw.items(): + invert = False + meth = None + try: + meth = getattr(self, name) + except AttributeError: + if name[:3] == "not": + invert = True + try: + meth = getattr(self, name[3:]) + except AttributeError: + pass + if meth is None: + raise TypeError(f"no {name!r} checker available for {self.path!r}") + try: + if getrawcode(meth).co_argcount > 1: + if (not meth(value)) ^ invert: + return False + else: + if bool(value) ^ bool(meth()) ^ invert: + return False + except (error.ENOENT, error.ENOTDIR, error.EBUSY): + # EBUSY feels not entirely correct, + # but its kind of necessary since ENOMEDIUM + # is not accessible in python + for name in self._depend_on_existence: + if name in kw: + if kw.get(name): + return False + name = "not" + name + if name in kw: + if not kw.get(name): + return False + return True + + _statcache: Stat + + def _stat(self) -> Stat: + try: + return self._statcache + except AttributeError: + try: + self._statcache = self.path.stat() + except error.ELOOP: + self._statcache = self.path.lstat() + return self._statcache + + def dir(self): + return S_ISDIR(self._stat().mode) + + def file(self): + return S_ISREG(self._stat().mode) + + def exists(self): + return self._stat() + + def link(self): + st = self.path.lstat() + return S_ISLNK(st.mode) + + +class NeverRaised(Exception): + pass + + +class Visitor: + def __init__(self, fil, rec, ignore, bf, sort): + if isinstance(fil, str): + fil = FNMatcher(fil) + if isinstance(rec, str): + self.rec: Callable[[LocalPath], bool] = FNMatcher(rec) + elif not hasattr(rec, "__call__") and rec: + self.rec = lambda path: True + else: + self.rec = rec + self.fil = fil + self.ignore = ignore + self.breadthfirst = bf + self.optsort = cast(Callable[[Any], Any], sorted) if sort else (lambda x: x) + + def gen(self, path): + try: + entries = path.listdir() + except self.ignore: + return + rec = self.rec + dirs = self.optsort( + [p for p in entries if p.check(dir=1) and (rec is None or rec(p))] + ) + if not self.breadthfirst: + for subdir in dirs: + for p in self.gen(subdir): + yield p + for p in self.optsort(entries): + if self.fil is None or self.fil(p): + yield p + if self.breadthfirst: + for subdir in dirs: + for p in self.gen(subdir): + yield p + + +class FNMatcher: + def __init__(self, pattern): + self.pattern = pattern + + def __call__(self, path): + pattern = self.pattern + + if ( + pattern.find(path.sep) == -1 + and iswin32 + and pattern.find(posixpath.sep) != -1 + ): + # Running on Windows, the pattern has no Windows path separators, + # and the pattern has one or more Posix path separators. Replace + # the Posix path separators with the Windows path separator. + pattern = pattern.replace(posixpath.sep, path.sep) + + if pattern.find(path.sep) == -1: + name = path.basename + else: + name = str(path) # path.strpath # XXX svn? + if not os.path.isabs(pattern): + pattern = "*" + path.sep + pattern + return fnmatch.fnmatch(name, pattern) + + +def map_as_list(func, iter): + return list(map(func, iter)) + + +class Stat: + if TYPE_CHECKING: + + @property + def size(self) -> int: + ... + + @property + def mtime(self) -> float: + ... + + def __getattr__(self, name: str) -> Any: + return getattr(self._osstatresult, "st_" + name) + + def __init__(self, path, osstatresult): + self.path = path + self._osstatresult = osstatresult + + @property + def owner(self): + if iswin32: + raise NotImplementedError("XXX win32") + import pwd + + entry = error.checked_call(pwd.getpwuid, self.uid) # type:ignore[attr-defined] + return entry[0] + + @property + def group(self): + """Return group name of file.""" + if iswin32: + raise NotImplementedError("XXX win32") + import grp + + entry = error.checked_call(grp.getgrgid, self.gid) # type:ignore[attr-defined] + return entry[0] + + def isdir(self): + return S_ISDIR(self._osstatresult.st_mode) + + def isfile(self): + return S_ISREG(self._osstatresult.st_mode) + + def islink(self): + self.path.lstat() + return S_ISLNK(self._osstatresult.st_mode) + + +def getuserid(user): + import pwd + + if not isinstance(user, int): + user = pwd.getpwnam(user)[2] # type:ignore[attr-defined] + return user + + +def getgroupid(group): + import grp + + if not isinstance(group, int): + group = grp.getgrnam(group)[2] # type:ignore[attr-defined] + return group + + +class LocalPath: + """Object oriented interface to os.path and other local filesystem + related information. + """ + + class ImportMismatchError(ImportError): + """raised on pyimport() if there is a mismatch of __file__'s""" + + sep = os.sep + + def __init__(self, path=None, expanduser=False): + """Initialize and return a local Path instance. + + Path can be relative to the current directory. + If path is None it defaults to the current working directory. + If expanduser is True, tilde-expansion is performed. + Note that Path instances always carry an absolute path. + Note also that passing in a local path object will simply return + the exact same path object. Use new() to get a new copy. + """ + if path is None: + self.strpath = error.checked_call(os.getcwd) + else: + try: + path = os.fspath(path) + except TypeError: + raise ValueError( + "can only pass None, Path instances " + "or non-empty strings to LocalPath" + ) + if expanduser: + path = os.path.expanduser(path) + self.strpath = abspath(path) + + if sys.platform != "win32": + + def chown(self, user, group, rec=0): + """Change ownership to the given user and group. + user and group may be specified by a number or + by a name. if rec is True change ownership + recursively. + """ + uid = getuserid(user) + gid = getgroupid(group) + if rec: + for x in self.visit(rec=lambda x: x.check(link=0)): + if x.check(link=0): + error.checked_call(os.chown, str(x), uid, gid) + error.checked_call(os.chown, str(self), uid, gid) + + def readlink(self) -> str: + """Return value of a symbolic link.""" + # https://github.com/python/mypy/issues/12278 + return error.checked_call(os.readlink, self.strpath) # type: ignore[arg-type,return-value] + + def mklinkto(self, oldname): + """Posix style hard link to another name.""" + error.checked_call(os.link, str(oldname), str(self)) + + def mksymlinkto(self, value, absolute=1): + """Create a symbolic link with the given value (pointing to another name).""" + if absolute: + error.checked_call(os.symlink, str(value), self.strpath) + else: + base = self.common(value) + # with posix local paths '/' is always a common base + relsource = self.__class__(value).relto(base) + reldest = self.relto(base) + n = reldest.count(self.sep) + target = self.sep.join(("..",) * n + (relsource,)) + error.checked_call(os.symlink, target, self.strpath) + + def __div__(self, other): + return self.join(os.fspath(other)) + + __truediv__ = __div__ # py3k + + @property + def basename(self): + """Basename part of path.""" + return self._getbyspec("basename")[0] + + @property + def dirname(self): + """Dirname part of path.""" + return self._getbyspec("dirname")[0] + + @property + def purebasename(self): + """Pure base name of the path.""" + return self._getbyspec("purebasename")[0] + + @property + def ext(self): + """Extension of the path (including the '.').""" + return self._getbyspec("ext")[0] + + def read_binary(self): + """Read and return a bytestring from reading the path.""" + with self.open("rb") as f: + return f.read() + + def read_text(self, encoding): + """Read and return a Unicode string from reading the path.""" + with self.open("r", encoding=encoding) as f: + return f.read() + + def read(self, mode="r"): + """Read and return a bytestring from reading the path.""" + with self.open(mode) as f: + return f.read() + + def readlines(self, cr=1): + """Read and return a list of lines from the path. if cr is False, the + newline will be removed from the end of each line.""" + mode = "r" + + if not cr: + content = self.read(mode) + return content.split("\n") + else: + f = self.open(mode) + try: + return f.readlines() + finally: + f.close() + + def load(self): + """(deprecated) return object unpickled from self.read()""" + f = self.open("rb") + try: + import pickle + + return error.checked_call(pickle.load, f) + finally: + f.close() + + def move(self, target): + """Move this path to target.""" + if target.relto(self): + raise error.EINVAL(target, "cannot move path into a subdirectory of itself") + try: + self.rename(target) + except error.EXDEV: # invalid cross-device link + self.copy(target) + self.remove() + + def fnmatch(self, pattern): + """Return true if the basename/fullname matches the glob-'pattern'. + + valid pattern characters:: + + * matches everything + ? matches any single character + [seq] matches any character in seq + [!seq] matches any char not in seq + + If the pattern contains a path-separator then the full path + is used for pattern matching and a '*' is prepended to the + pattern. + + if the pattern doesn't contain a path-separator the pattern + is only matched against the basename. + """ + return FNMatcher(pattern)(self) + + def relto(self, relpath): + """Return a string which is the relative part of the path + to the given 'relpath'. + """ + if not isinstance(relpath, (str, LocalPath)): + raise TypeError(f"{relpath!r}: not a string or path object") + strrelpath = str(relpath) + if strrelpath and strrelpath[-1] != self.sep: + strrelpath += self.sep + # assert strrelpath[-1] == self.sep + # assert strrelpath[-2] != self.sep + strself = self.strpath + if sys.platform == "win32" or getattr(os, "_name", None) == "nt": + if os.path.normcase(strself).startswith(os.path.normcase(strrelpath)): + return strself[len(strrelpath) :] + elif strself.startswith(strrelpath): + return strself[len(strrelpath) :] + return "" + + def ensure_dir(self, *args): + """Ensure the path joined with args is a directory.""" + return self.ensure(*args, **{"dir": True}) + + def bestrelpath(self, dest): + """Return a string which is a relative path from self + (assumed to be a directory) to dest such that + self.join(bestrelpath) == dest and if not such + path can be determined return dest. + """ + try: + if self == dest: + return os.curdir + base = self.common(dest) + if not base: # can be the case on windows + return str(dest) + self2base = self.relto(base) + reldest = dest.relto(base) + if self2base: + n = self2base.count(self.sep) + 1 + else: + n = 0 + lst = [os.pardir] * n + if reldest: + lst.append(reldest) + target = dest.sep.join(lst) + return target + except AttributeError: + return str(dest) + + def exists(self): + return self.check() + + def isdir(self): + return self.check(dir=1) + + def isfile(self): + return self.check(file=1) + + def parts(self, reverse=False): + """Return a root-first list of all ancestor directories + plus the path itself. + """ + current = self + lst = [self] + while 1: + last = current + current = current.dirpath() + if last == current: + break + lst.append(current) + if not reverse: + lst.reverse() + return lst + + def common(self, other): + """Return the common part shared with the other path + or None if there is no common part. + """ + last = None + for x, y in zip(self.parts(), other.parts()): + if x != y: + return last + last = x + return last + + def __add__(self, other): + """Return new path object with 'other' added to the basename""" + return self.new(basename=self.basename + str(other)) + + def visit(self, fil=None, rec=None, ignore=NeverRaised, bf=False, sort=False): + """Yields all paths below the current one + + fil is a filter (glob pattern or callable), if not matching the + path will not be yielded, defaulting to None (everything is + returned) + + rec is a filter (glob pattern or callable) that controls whether + a node is descended, defaulting to None + + ignore is an Exception class that is ignoredwhen calling dirlist() + on any of the paths (by default, all exceptions are reported) + + bf if True will cause a breadthfirst search instead of the + default depthfirst. Default: False + + sort if True will sort entries within each directory level. + """ + yield from Visitor(fil, rec, ignore, bf, sort).gen(self) + + def _sortlist(self, res, sort): + if sort: + if hasattr(sort, "__call__"): + warnings.warn( + DeprecationWarning( + "listdir(sort=callable) is deprecated and breaks on python3" + ), + stacklevel=3, + ) + res.sort(sort) + else: + res.sort() + + def __fspath__(self): + return self.strpath + + def __hash__(self): + s = self.strpath + if iswin32: + s = s.lower() + return hash(s) + + def __eq__(self, other): + s1 = os.fspath(self) + try: + s2 = os.fspath(other) + except TypeError: + return False + if iswin32: + s1 = s1.lower() + try: + s2 = s2.lower() + except AttributeError: + return False + return s1 == s2 + + def __ne__(self, other): + return not (self == other) + + def __lt__(self, other): + return os.fspath(self) < os.fspath(other) + + def __gt__(self, other): + return os.fspath(self) > os.fspath(other) + + def samefile(self, other): + """Return True if 'other' references the same file as 'self'.""" + other = os.fspath(other) + if not isabs(other): + other = abspath(other) + if self == other: + return True + if not hasattr(os.path, "samefile"): + return False + return error.checked_call(os.path.samefile, self.strpath, other) + + def remove(self, rec=1, ignore_errors=False): + """Remove a file or directory (or a directory tree if rec=1). + if ignore_errors is True, errors while removing directories will + be ignored. + """ + if self.check(dir=1, link=0): + if rec: + # force remove of readonly files on windows + if iswin32: + self.chmod(0o700, rec=1) + import shutil + + error.checked_call( + shutil.rmtree, self.strpath, ignore_errors=ignore_errors + ) + else: + error.checked_call(os.rmdir, self.strpath) + else: + if iswin32: + self.chmod(0o700) + error.checked_call(os.remove, self.strpath) + + def computehash(self, hashtype="md5", chunksize=524288): + """Return hexdigest of hashvalue for this file.""" + try: + try: + import hashlib as mod + except ImportError: + if hashtype == "sha1": + hashtype = "sha" + mod = __import__(hashtype) + hash = getattr(mod, hashtype)() + except (AttributeError, ImportError): + raise ValueError(f"Don't know how to compute {hashtype!r} hash") + f = self.open("rb") + try: + while 1: + buf = f.read(chunksize) + if not buf: + return hash.hexdigest() + hash.update(buf) + finally: + f.close() + + def new(self, **kw): + """Create a modified version of this path. + the following keyword arguments modify various path parts:: + + a:/some/path/to/a/file.ext + xx drive + xxxxxxxxxxxxxxxxx dirname + xxxxxxxx basename + xxxx purebasename + xxx ext + """ + obj = object.__new__(self.__class__) + if not kw: + obj.strpath = self.strpath + return obj + drive, dirname, basename, purebasename, ext = self._getbyspec( + "drive,dirname,basename,purebasename,ext" + ) + if "basename" in kw: + if "purebasename" in kw or "ext" in kw: + raise ValueError("invalid specification %r" % kw) + else: + pb = kw.setdefault("purebasename", purebasename) + try: + ext = kw["ext"] + except KeyError: + pass + else: + if ext and not ext.startswith("."): + ext = "." + ext + kw["basename"] = pb + ext + + if "dirname" in kw and not kw["dirname"]: + kw["dirname"] = drive + else: + kw.setdefault("dirname", dirname) + kw.setdefault("sep", self.sep) + obj.strpath = normpath("%(dirname)s%(sep)s%(basename)s" % kw) + return obj + + def _getbyspec(self, spec: str) -> list[str]: + """See new for what 'spec' can be.""" + res = [] + parts = self.strpath.split(self.sep) + + args = filter(None, spec.split(",")) + for name in args: + if name == "drive": + res.append(parts[0]) + elif name == "dirname": + res.append(self.sep.join(parts[:-1])) + else: + basename = parts[-1] + if name == "basename": + res.append(basename) + else: + i = basename.rfind(".") + if i == -1: + purebasename, ext = basename, "" + else: + purebasename, ext = basename[:i], basename[i:] + if name == "purebasename": + res.append(purebasename) + elif name == "ext": + res.append(ext) + else: + raise ValueError("invalid part specification %r" % name) + return res + + def dirpath(self, *args, **kwargs): + """Return the directory path joined with any given path arguments.""" + if not kwargs: + path = object.__new__(self.__class__) + path.strpath = dirname(self.strpath) + if args: + path = path.join(*args) + return path + return self.new(basename="").join(*args, **kwargs) + + def join(self, *args: os.PathLike[str], abs: bool = False) -> LocalPath: + """Return a new path by appending all 'args' as path + components. if abs=1 is used restart from root if any + of the args is an absolute path. + """ + sep = self.sep + strargs = [os.fspath(arg) for arg in args] + strpath = self.strpath + if abs: + newargs: list[str] = [] + for arg in reversed(strargs): + if isabs(arg): + strpath = arg + strargs = newargs + break + newargs.insert(0, arg) + # special case for when we have e.g. strpath == "/" + actual_sep = "" if strpath.endswith(sep) else sep + for arg in strargs: + arg = arg.strip(sep) + if iswin32: + # allow unix style paths even on windows. + arg = arg.strip("/") + arg = arg.replace("/", sep) + strpath = strpath + actual_sep + arg + actual_sep = sep + obj = object.__new__(self.__class__) + obj.strpath = normpath(strpath) + return obj + + def open(self, mode="r", ensure=False, encoding=None): + """Return an opened file with the given mode. + + If ensure is True, create parent directories if needed. + """ + if ensure: + self.dirpath().ensure(dir=1) + if encoding: + return error.checked_call(io.open, self.strpath, mode, encoding=encoding) + return error.checked_call(open, self.strpath, mode) + + def _fastjoin(self, name): + child = object.__new__(self.__class__) + child.strpath = self.strpath + self.sep + name + return child + + def islink(self): + return islink(self.strpath) + + def check(self, **kw): + """Check a path for existence and properties. + + Without arguments, return True if the path exists, otherwise False. + + valid checkers:: + + file=1 # is a file + file=0 # is not a file (may not even exist) + dir=1 # is a dir + link=1 # is a link + exists=1 # exists + + You can specify multiple checker definitions, for example:: + + path.check(file=1, link=1) # a link pointing to a file + """ + if not kw: + return exists(self.strpath) + if len(kw) == 1: + if "dir" in kw: + return not kw["dir"] ^ isdir(self.strpath) + if "file" in kw: + return not kw["file"] ^ isfile(self.strpath) + if not kw: + kw = {"exists": 1} + return Checkers(self)._evaluate(kw) + + _patternchars = set("*?[" + os.sep) + + def listdir(self, fil=None, sort=None): + """List directory contents, possibly filter by the given fil func + and possibly sorted. + """ + if fil is None and sort is None: + names = error.checked_call(os.listdir, self.strpath) + return map_as_list(self._fastjoin, names) + if isinstance(fil, str): + if not self._patternchars.intersection(fil): + child = self._fastjoin(fil) + if exists(child.strpath): + return [child] + return [] + fil = FNMatcher(fil) + names = error.checked_call(os.listdir, self.strpath) + res = [] + for name in names: + child = self._fastjoin(name) + if fil is None or fil(child): + res.append(child) + self._sortlist(res, sort) + return res + + def size(self) -> int: + """Return size of the underlying file object""" + return self.stat().size + + def mtime(self) -> float: + """Return last modification time of the path.""" + return self.stat().mtime + + def copy(self, target, mode=False, stat=False): + """Copy path to target. + + If mode is True, will copy copy permission from path to target. + If stat is True, copy permission, last modification + time, last access time, and flags from path to target. + """ + if self.check(file=1): + if target.check(dir=1): + target = target.join(self.basename) + assert self != target + copychunked(self, target) + if mode: + copymode(self.strpath, target.strpath) + if stat: + copystat(self, target) + else: + + def rec(p): + return p.check(link=0) + + for x in self.visit(rec=rec): + relpath = x.relto(self) + newx = target.join(relpath) + newx.dirpath().ensure(dir=1) + if x.check(link=1): + newx.mksymlinkto(x.readlink()) + continue + elif x.check(file=1): + copychunked(x, newx) + elif x.check(dir=1): + newx.ensure(dir=1) + if mode: + copymode(x.strpath, newx.strpath) + if stat: + copystat(x, newx) + + def rename(self, target): + """Rename this path to target.""" + target = os.fspath(target) + return error.checked_call(os.rename, self.strpath, target) + + def dump(self, obj, bin=1): + """Pickle object into path location""" + f = self.open("wb") + import pickle + + try: + error.checked_call(pickle.dump, obj, f, bin) + finally: + f.close() + + def mkdir(self, *args): + """Create & return the directory joined with args.""" + p = self.join(*args) + error.checked_call(os.mkdir, os.fspath(p)) + return p + + def write_binary(self, data, ensure=False): + """Write binary data into path. If ensure is True create + missing parent directories. + """ + if ensure: + self.dirpath().ensure(dir=1) + with self.open("wb") as f: + f.write(data) + + def write_text(self, data, encoding, ensure=False): + """Write text data into path using the specified encoding. + If ensure is True create missing parent directories. + """ + if ensure: + self.dirpath().ensure(dir=1) + with self.open("w", encoding=encoding) as f: + f.write(data) + + def write(self, data, mode="w", ensure=False): + """Write data into path. If ensure is True create + missing parent directories. + """ + if ensure: + self.dirpath().ensure(dir=1) + if "b" in mode: + if not isinstance(data, bytes): + raise ValueError("can only process bytes") + else: + if not isinstance(data, str): + if not isinstance(data, bytes): + data = str(data) + else: + data = data.decode(sys.getdefaultencoding()) + f = self.open(mode) + try: + f.write(data) + finally: + f.close() + + def _ensuredirs(self): + parent = self.dirpath() + if parent == self: + return self + if parent.check(dir=0): + parent._ensuredirs() + if self.check(dir=0): + try: + self.mkdir() + except error.EEXIST: + # race condition: file/dir created by another thread/process. + # complain if it is not a dir + if self.check(dir=0): + raise + return self + + def ensure(self, *args, **kwargs): + """Ensure that an args-joined path exists (by default as + a file). if you specify a keyword argument 'dir=True' + then the path is forced to be a directory path. + """ + p = self.join(*args) + if kwargs.get("dir", 0): + return p._ensuredirs() + else: + p.dirpath()._ensuredirs() + if not p.check(file=1): + p.open("wb").close() + return p + + @overload + def stat(self, raising: Literal[True] = ...) -> Stat: + ... + + @overload + def stat(self, raising: Literal[False]) -> Stat | None: + ... + + def stat(self, raising: bool = True) -> Stat | None: + """Return an os.stat() tuple.""" + if raising: + return Stat(self, error.checked_call(os.stat, self.strpath)) + try: + return Stat(self, os.stat(self.strpath)) + except KeyboardInterrupt: + raise + except Exception: + return None + + def lstat(self) -> Stat: + """Return an os.lstat() tuple.""" + return Stat(self, error.checked_call(os.lstat, self.strpath)) + + def setmtime(self, mtime=None): + """Set modification time for the given path. if 'mtime' is None + (the default) then the file's mtime is set to current time. + + Note that the resolution for 'mtime' is platform dependent. + """ + if mtime is None: + return error.checked_call(os.utime, self.strpath, mtime) + try: + return error.checked_call(os.utime, self.strpath, (-1, mtime)) + except error.EINVAL: + return error.checked_call(os.utime, self.strpath, (self.atime(), mtime)) + + def chdir(self): + """Change directory to self and return old current directory""" + try: + old = self.__class__() + except error.ENOENT: + old = None + error.checked_call(os.chdir, self.strpath) + return old + + @contextmanager + def as_cwd(self): + """ + Return a context manager, which changes to the path's dir during the + managed "with" context. + On __enter__ it returns the old dir, which might be ``None``. + """ + old = self.chdir() + try: + yield old + finally: + if old is not None: + old.chdir() + + def realpath(self): + """Return a new path which contains no symbolic links.""" + return self.__class__(os.path.realpath(self.strpath)) + + def atime(self): + """Return last access time of the path.""" + return self.stat().atime + + def __repr__(self): + return "local(%r)" % self.strpath + + def __str__(self): + """Return string representation of the Path.""" + return self.strpath + + def chmod(self, mode, rec=0): + """Change permissions to the given mode. If mode is an + integer it directly encodes the os-specific modes. + if rec is True perform recursively. + """ + if not isinstance(mode, int): + raise TypeError(f"mode {mode!r} must be an integer") + if rec: + for x in self.visit(rec=rec): + error.checked_call(os.chmod, str(x), mode) + error.checked_call(os.chmod, self.strpath, mode) + + def pypkgpath(self): + """Return the Python package path by looking for the last + directory upwards which still contains an __init__.py. + Return None if a pkgpath can not be determined. + """ + pkgpath = None + for parent in self.parts(reverse=True): + if parent.isdir(): + if not parent.join("__init__.py").exists(): + break + if not isimportable(parent.basename): + break + pkgpath = parent + return pkgpath + + def _ensuresyspath(self, ensuremode, path): + if ensuremode: + s = str(path) + if ensuremode == "append": + if s not in sys.path: + sys.path.append(s) + else: + if s != sys.path[0]: + sys.path.insert(0, s) + + def pyimport(self, modname=None, ensuresyspath=True): + """Return path as an imported python module. + + If modname is None, look for the containing package + and construct an according module name. + The module will be put/looked up in sys.modules. + if ensuresyspath is True then the root dir for importing + the file (taking __init__.py files into account) will + be prepended to sys.path if it isn't there already. + If ensuresyspath=="append" the root dir will be appended + if it isn't already contained in sys.path. + if ensuresyspath is False no modification of syspath happens. + + Special value of ensuresyspath=="importlib" is intended + purely for using in pytest, it is capable only of importing + separate .py files outside packages, e.g. for test suite + without any __init__.py file. It effectively allows having + same-named test modules in different places and offers + mild opt-in via this option. Note that it works only in + recent versions of python. + """ + if not self.check(): + raise error.ENOENT(self) + + if ensuresyspath == "importlib": + if modname is None: + modname = self.purebasename + spec = importlib.util.spec_from_file_location(modname, str(self)) + if spec is None or spec.loader is None: + raise ImportError( + f"Can't find module {modname} at location {str(self)}" + ) + mod = importlib.util.module_from_spec(spec) + spec.loader.exec_module(mod) + return mod + + pkgpath = None + if modname is None: + pkgpath = self.pypkgpath() + if pkgpath is not None: + pkgroot = pkgpath.dirpath() + names = self.new(ext="").relto(pkgroot).split(self.sep) + if names[-1] == "__init__": + names.pop() + modname = ".".join(names) + else: + pkgroot = self.dirpath() + modname = self.purebasename + + self._ensuresyspath(ensuresyspath, pkgroot) + __import__(modname) + mod = sys.modules[modname] + if self.basename == "__init__.py": + return mod # we don't check anything as we might + # be in a namespace package ... too icky to check + modfile = mod.__file__ + assert modfile is not None + if modfile[-4:] in (".pyc", ".pyo"): + modfile = modfile[:-1] + elif modfile.endswith("$py.class"): + modfile = modfile[:-9] + ".py" + if modfile.endswith(os.sep + "__init__.py"): + if self.basename != "__init__.py": + modfile = modfile[:-12] + try: + issame = self.samefile(modfile) + except error.ENOENT: + issame = False + if not issame: + ignore = os.getenv("PY_IGNORE_IMPORTMISMATCH") + if ignore != "1": + raise self.ImportMismatchError(modname, modfile, self) + return mod + else: + try: + return sys.modules[modname] + except KeyError: + # we have a custom modname, do a pseudo-import + import types + + mod = types.ModuleType(modname) + mod.__file__ = str(self) + sys.modules[modname] = mod + try: + with open(str(self), "rb") as f: + exec(f.read(), mod.__dict__) + except BaseException: + del sys.modules[modname] + raise + return mod + + def sysexec(self, *argv: os.PathLike[str], **popen_opts: Any) -> str: + """Return stdout text from executing a system child process, + where the 'self' path points to executable. + The process is directly invoked and not through a system shell. + """ + from subprocess import Popen, PIPE + + popen_opts.pop("stdout", None) + popen_opts.pop("stderr", None) + proc = Popen( + [str(self)] + [str(arg) for arg in argv], + **popen_opts, + stdout=PIPE, + stderr=PIPE, + ) + stdout: str | bytes + stdout, stderr = proc.communicate() + ret = proc.wait() + if isinstance(stdout, bytes): + stdout = stdout.decode(sys.getdefaultencoding()) + if ret != 0: + if isinstance(stderr, bytes): + stderr = stderr.decode(sys.getdefaultencoding()) + raise RuntimeError( + ret, + ret, + str(self), + stdout, + stderr, + ) + return stdout + + @classmethod + def sysfind(cls, name, checker=None, paths=None): + """Return a path object found by looking at the systems + underlying PATH specification. If the checker is not None + it will be invoked to filter matching paths. If a binary + cannot be found, None is returned + Note: This is probably not working on plain win32 systems + but may work on cygwin. + """ + if isabs(name): + p = local(name) + if p.check(file=1): + return p + else: + if paths is None: + if iswin32: + paths = os.environ["Path"].split(";") + if "" not in paths and "." not in paths: + paths.append(".") + try: + systemroot = os.environ["SYSTEMROOT"] + except KeyError: + pass + else: + paths = [ + path.replace("%SystemRoot%", systemroot) for path in paths + ] + else: + paths = os.environ["PATH"].split(":") + tryadd = [] + if iswin32: + tryadd += os.environ["PATHEXT"].split(os.pathsep) + tryadd.append("") + + for x in paths: + for addext in tryadd: + p = local(x).join(name, abs=True) + addext + try: + if p.check(file=1): + if checker: + if not checker(p): + continue + return p + except error.EACCES: + pass + return None + + @classmethod + def _gethomedir(cls): + try: + x = os.environ["HOME"] + except KeyError: + try: + x = os.environ["HOMEDRIVE"] + os.environ["HOMEPATH"] + except KeyError: + return None + return cls(x) + + # """ + # special class constructors for local filesystem paths + # """ + @classmethod + def get_temproot(cls): + """Return the system's temporary directory + (where tempfiles are usually created in) + """ + import tempfile + + return local(tempfile.gettempdir()) + + @classmethod + def mkdtemp(cls, rootdir=None): + """Return a Path object pointing to a fresh new temporary directory + (which we created ourself). + """ + import tempfile + + if rootdir is None: + rootdir = cls.get_temproot() + return cls(error.checked_call(tempfile.mkdtemp, dir=str(rootdir))) + + @classmethod + def make_numbered_dir( + cls, prefix="session-", rootdir=None, keep=3, lock_timeout=172800 + ): # two days + """Return unique directory with a number greater than the current + maximum one. The number is assumed to start directly after prefix. + if keep is true directories with a number less than (maxnum-keep) + will be removed. If .lock files are used (lock_timeout non-zero), + algorithm is multi-process safe. + """ + if rootdir is None: + rootdir = cls.get_temproot() + + nprefix = prefix.lower() + + def parse_num(path): + """Parse the number out of a path (if it matches the prefix)""" + nbasename = path.basename.lower() + if nbasename.startswith(nprefix): + try: + return int(nbasename[len(nprefix) :]) + except ValueError: + pass + + def create_lockfile(path): + """Exclusively create lockfile. Throws when failed""" + mypid = os.getpid() + lockfile = path.join(".lock") + if hasattr(lockfile, "mksymlinkto"): + lockfile.mksymlinkto(str(mypid)) + else: + fd = error.checked_call( + os.open, str(lockfile), os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0o644 + ) + with os.fdopen(fd, "w") as f: + f.write(str(mypid)) + return lockfile + + def atexit_remove_lockfile(lockfile): + """Ensure lockfile is removed at process exit""" + mypid = os.getpid() + + def try_remove_lockfile(): + # in a fork() situation, only the last process should + # remove the .lock, otherwise the other processes run the + # risk of seeing their temporary dir disappear. For now + # we remove the .lock in the parent only (i.e. we assume + # that the children finish before the parent). + if os.getpid() != mypid: + return + try: + lockfile.remove() + except error.Error: + pass + + atexit.register(try_remove_lockfile) + + # compute the maximum number currently in use with the prefix + lastmax = None + while True: + maxnum = -1 + for path in rootdir.listdir(): + num = parse_num(path) + if num is not None: + maxnum = max(maxnum, num) + + # make the new directory + try: + udir = rootdir.mkdir(prefix + str(maxnum + 1)) + if lock_timeout: + lockfile = create_lockfile(udir) + atexit_remove_lockfile(lockfile) + except (error.EEXIST, error.ENOENT, error.EBUSY): + # race condition (1): another thread/process created the dir + # in the meantime - try again + # race condition (2): another thread/process spuriously acquired + # lock treating empty directory as candidate + # for removal - try again + # race condition (3): another thread/process tried to create the lock at + # the same time (happened in Python 3.3 on Windows) + # https://ci.appveyor.com/project/pytestbot/py/build/1.0.21/job/ffi85j4c0lqwsfwa + if lastmax == maxnum: + raise + lastmax = maxnum + continue + break + + def get_mtime(path): + """Read file modification time""" + try: + return path.lstat().mtime + except error.Error: + pass + + garbage_prefix = prefix + "garbage-" + + def is_garbage(path): + """Check if path denotes directory scheduled for removal""" + bn = path.basename + return bn.startswith(garbage_prefix) + + # prune old directories + udir_time = get_mtime(udir) + if keep and udir_time: + for path in rootdir.listdir(): + num = parse_num(path) + if num is not None and num <= (maxnum - keep): + try: + # try acquiring lock to remove directory as exclusive user + if lock_timeout: + create_lockfile(path) + except (error.EEXIST, error.ENOENT, error.EBUSY): + path_time = get_mtime(path) + if not path_time: + # assume directory doesn't exist now + continue + if abs(udir_time - path_time) < lock_timeout: + # assume directory with lockfile exists + # and lock timeout hasn't expired yet + continue + + # path dir locked for exclusive use + # and scheduled for removal to avoid another thread/process + # treating it as a new directory or removal candidate + garbage_path = rootdir.join(garbage_prefix + str(uuid.uuid4())) + try: + path.rename(garbage_path) + garbage_path.remove(rec=1) + except KeyboardInterrupt: + raise + except Exception: # this might be error.Error, WindowsError ... + pass + if is_garbage(path): + try: + path.remove(rec=1) + except KeyboardInterrupt: + raise + except Exception: # this might be error.Error, WindowsError ... + pass + + # make link... + try: + username = os.environ["USER"] # linux, et al + except KeyError: + try: + username = os.environ["USERNAME"] # windows + except KeyError: + username = "current" + + src = str(udir) + dest = src[: src.rfind("-")] + "-" + username + try: + os.unlink(dest) + except OSError: + pass + try: + os.symlink(src, dest) + except (OSError, AttributeError, NotImplementedError): + pass + + return udir + + +def copymode(src, dest): + """Copy permission from src to dst.""" + import shutil + + shutil.copymode(src, dest) + + +def copystat(src, dest): + """Copy permission, last modification time, + last access time, and flags from src to dst.""" + import shutil + + shutil.copystat(str(src), str(dest)) + + +def copychunked(src, dest): + chunksize = 524288 # half a meg of bytes + fsrc = src.open("rb") + try: + fdest = dest.open("wb") + try: + while 1: + buf = fsrc.read(chunksize) + if not buf: + break + fdest.write(buf) + finally: + fdest.close() + finally: + fsrc.close() + + +def isimportable(name): + if name and (name[0].isalpha() or name[0] == "_"): + name = name.replace("_", "") + return not name or name.isalnum() + + +local = LocalPath diff --git a/venv/lib/python3.11/site-packages/_pytest/_version.py b/venv/lib/python3.11/site-packages/_pytest/_version.py new file mode 100644 index 0000000..f5d7211 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/_version.py @@ -0,0 +1,4 @@ +# file generated by setuptools_scm +# don't change, don't track in version control +__version__ = version = '7.4.0' +__version_tuple__ = version_tuple = (7, 4, 0) diff --git a/venv/lib/python3.11/site-packages/_pytest/assertion/__init__.py b/venv/lib/python3.11/site-packages/_pytest/assertion/__init__.py new file mode 100644 index 0000000..a46e581 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/assertion/__init__.py @@ -0,0 +1,181 @@ +"""Support for presenting detailed information in failing assertions.""" +import sys +from typing import Any +from typing import Generator +from typing import List +from typing import Optional +from typing import TYPE_CHECKING + +from _pytest.assertion import rewrite +from _pytest.assertion import truncate +from _pytest.assertion import util +from _pytest.assertion.rewrite import assertstate_key +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.nodes import Item + +if TYPE_CHECKING: + from _pytest.main import Session + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("debugconfig") + group.addoption( + "--assert", + action="store", + dest="assertmode", + choices=("rewrite", "plain"), + default="rewrite", + metavar="MODE", + help=( + "Control assertion debugging tools.\n" + "'plain' performs no assertion debugging.\n" + "'rewrite' (the default) rewrites assert statements in test modules" + " on import to provide assert expression information." + ), + ) + parser.addini( + "enable_assertion_pass_hook", + type="bool", + default=False, + help="Enables the pytest_assertion_pass hook. " + "Make sure to delete any previously generated pyc cache files.", + ) + + +def register_assert_rewrite(*names: str) -> None: + """Register one or more module names to be rewritten on import. + + This function will make sure that this module or all modules inside + the package will get their assert statements rewritten. + Thus you should make sure to call this before the module is + actually imported, usually in your __init__.py if you are a plugin + using a package. + + :param names: The module names to register. + """ + for name in names: + if not isinstance(name, str): + msg = "expected module names as *args, got {0} instead" # type: ignore[unreachable] + raise TypeError(msg.format(repr(names))) + for hook in sys.meta_path: + if isinstance(hook, rewrite.AssertionRewritingHook): + importhook = hook + break + else: + # TODO(typing): Add a protocol for mark_rewrite() and use it + # for importhook and for PytestPluginManager.rewrite_hook. + importhook = DummyRewriteHook() # type: ignore + importhook.mark_rewrite(*names) + + +class DummyRewriteHook: + """A no-op import hook for when rewriting is disabled.""" + + def mark_rewrite(self, *names: str) -> None: + pass + + +class AssertionState: + """State for the assertion plugin.""" + + def __init__(self, config: Config, mode) -> None: + self.mode = mode + self.trace = config.trace.root.get("assertion") + self.hook: Optional[rewrite.AssertionRewritingHook] = None + + +def install_importhook(config: Config) -> rewrite.AssertionRewritingHook: + """Try to install the rewrite hook, raise SystemError if it fails.""" + config.stash[assertstate_key] = AssertionState(config, "rewrite") + config.stash[assertstate_key].hook = hook = rewrite.AssertionRewritingHook(config) + sys.meta_path.insert(0, hook) + config.stash[assertstate_key].trace("installed rewrite import hook") + + def undo() -> None: + hook = config.stash[assertstate_key].hook + if hook is not None and hook in sys.meta_path: + sys.meta_path.remove(hook) + + config.add_cleanup(undo) + return hook + + +def pytest_collection(session: "Session") -> None: + # This hook is only called when test modules are collected + # so for example not in the managing process of pytest-xdist + # (which does not collect test modules). + assertstate = session.config.stash.get(assertstate_key, None) + if assertstate: + if assertstate.hook is not None: + assertstate.hook.set_session(session) + + +@hookimpl(tryfirst=True, hookwrapper=True) +def pytest_runtest_protocol(item: Item) -> Generator[None, None, None]: + """Setup the pytest_assertrepr_compare and pytest_assertion_pass hooks. + + The rewrite module will use util._reprcompare if it exists to use custom + reporting via the pytest_assertrepr_compare hook. This sets up this custom + comparison for the test. + """ + + ihook = item.ihook + + def callbinrepr(op, left: object, right: object) -> Optional[str]: + """Call the pytest_assertrepr_compare hook and prepare the result. + + This uses the first result from the hook and then ensures the + following: + * Overly verbose explanations are truncated unless configured otherwise + (eg. if running in verbose mode). + * Embedded newlines are escaped to help util.format_explanation() + later. + * If the rewrite mode is used embedded %-characters are replaced + to protect later % formatting. + + The result can be formatted by util.format_explanation() for + pretty printing. + """ + hook_result = ihook.pytest_assertrepr_compare( + config=item.config, op=op, left=left, right=right + ) + for new_expl in hook_result: + if new_expl: + new_expl = truncate.truncate_if_required(new_expl, item) + new_expl = [line.replace("\n", "\\n") for line in new_expl] + res = "\n~".join(new_expl) + if item.config.getvalue("assertmode") == "rewrite": + res = res.replace("%", "%%") + return res + return None + + saved_assert_hooks = util._reprcompare, util._assertion_pass + util._reprcompare = callbinrepr + util._config = item.config + + if ihook.pytest_assertion_pass.get_hookimpls(): + + def call_assertion_pass_hook(lineno: int, orig: str, expl: str) -> None: + ihook.pytest_assertion_pass(item=item, lineno=lineno, orig=orig, expl=expl) + + util._assertion_pass = call_assertion_pass_hook + + yield + + util._reprcompare, util._assertion_pass = saved_assert_hooks + util._config = None + + +def pytest_sessionfinish(session: "Session") -> None: + assertstate = session.config.stash.get(assertstate_key, None) + if assertstate: + if assertstate.hook is not None: + assertstate.hook.set_session(None) + + +def pytest_assertrepr_compare( + config: Config, op: str, left: Any, right: Any +) -> Optional[List[str]]: + return util.assertrepr_compare(config=config, op=op, left=left, right=right) diff --git a/venv/lib/python3.11/site-packages/_pytest/assertion/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/assertion/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..4f0a706 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/assertion/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/assertion/__pycache__/rewrite.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/assertion/__pycache__/rewrite.cpython-311.pyc new file mode 100644 index 0000000..364385d Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/assertion/__pycache__/rewrite.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/assertion/__pycache__/truncate.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/assertion/__pycache__/truncate.cpython-311.pyc new file mode 100644 index 0000000..e93715d Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/assertion/__pycache__/truncate.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/assertion/__pycache__/util.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/assertion/__pycache__/util.cpython-311.pyc new file mode 100644 index 0000000..1d483d2 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/assertion/__pycache__/util.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/assertion/rewrite.py b/venv/lib/python3.11/site-packages/_pytest/assertion/rewrite.py new file mode 100644 index 0000000..ab83fee --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/assertion/rewrite.py @@ -0,0 +1,1185 @@ +"""Rewrite assertion AST to produce nice error messages.""" +import ast +import errno +import functools +import importlib.abc +import importlib.machinery +import importlib.util +import io +import itertools +import marshal +import os +import struct +import sys +import tokenize +import types +from pathlib import Path +from pathlib import PurePath +from typing import Callable +from typing import Dict +from typing import IO +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Optional +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union + +from _pytest._io.saferepr import DEFAULT_REPR_MAX_SIZE +from _pytest._io.saferepr import saferepr +from _pytest._version import version +from _pytest.assertion import util +from _pytest.assertion.util import ( # noqa: F401 + format_explanation as _format_explanation, +) +from _pytest.config import Config +from _pytest.main import Session +from _pytest.pathlib import absolutepath +from _pytest.pathlib import fnmatch_ex +from _pytest.stash import StashKey + +if TYPE_CHECKING: + from _pytest.assertion import AssertionState + +if sys.version_info >= (3, 8): + namedExpr = ast.NamedExpr + astNameConstant = ast.Constant + astStr = ast.Constant + astNum = ast.Constant +else: + namedExpr = ast.Expr + astNameConstant = ast.NameConstant + astStr = ast.Str + astNum = ast.Num + + +assertstate_key = StashKey["AssertionState"]() + +# pytest caches rewritten pycs in pycache dirs +PYTEST_TAG = f"{sys.implementation.cache_tag}-pytest-{version}" +PYC_EXT = ".py" + (__debug__ and "c" or "o") +PYC_TAIL = "." + PYTEST_TAG + PYC_EXT + + +class AssertionRewritingHook(importlib.abc.MetaPathFinder, importlib.abc.Loader): + """PEP302/PEP451 import hook which rewrites asserts.""" + + def __init__(self, config: Config) -> None: + self.config = config + try: + self.fnpats = config.getini("python_files") + except ValueError: + self.fnpats = ["test_*.py", "*_test.py"] + self.session: Optional[Session] = None + self._rewritten_names: Dict[str, Path] = {} + self._must_rewrite: Set[str] = set() + # flag to guard against trying to rewrite a pyc file while we are already writing another pyc file, + # which might result in infinite recursion (#3506) + self._writing_pyc = False + self._basenames_to_check_rewrite = {"conftest"} + self._marked_for_rewrite_cache: Dict[str, bool] = {} + self._session_paths_checked = False + + def set_session(self, session: Optional[Session]) -> None: + self.session = session + self._session_paths_checked = False + + # Indirection so we can mock calls to find_spec originated from the hook during testing + _find_spec = importlib.machinery.PathFinder.find_spec + + def find_spec( + self, + name: str, + path: Optional[Sequence[Union[str, bytes]]] = None, + target: Optional[types.ModuleType] = None, + ) -> Optional[importlib.machinery.ModuleSpec]: + if self._writing_pyc: + return None + state = self.config.stash[assertstate_key] + if self._early_rewrite_bailout(name, state): + return None + state.trace("find_module called for: %s" % name) + + # Type ignored because mypy is confused about the `self` binding here. + spec = self._find_spec(name, path) # type: ignore + if ( + # the import machinery could not find a file to import + spec is None + # this is a namespace package (without `__init__.py`) + # there's nothing to rewrite there + or spec.origin is None + # we can only rewrite source files + or not isinstance(spec.loader, importlib.machinery.SourceFileLoader) + # if the file doesn't exist, we can't rewrite it + or not os.path.exists(spec.origin) + ): + return None + else: + fn = spec.origin + + if not self._should_rewrite(name, fn, state): + return None + + return importlib.util.spec_from_file_location( + name, + fn, + loader=self, + submodule_search_locations=spec.submodule_search_locations, + ) + + def create_module( + self, spec: importlib.machinery.ModuleSpec + ) -> Optional[types.ModuleType]: + return None # default behaviour is fine + + def exec_module(self, module: types.ModuleType) -> None: + assert module.__spec__ is not None + assert module.__spec__.origin is not None + fn = Path(module.__spec__.origin) + state = self.config.stash[assertstate_key] + + self._rewritten_names[module.__name__] = fn + + # The requested module looks like a test file, so rewrite it. This is + # the most magical part of the process: load the source, rewrite the + # asserts, and load the rewritten source. We also cache the rewritten + # module code in a special pyc. We must be aware of the possibility of + # concurrent pytest processes rewriting and loading pycs. To avoid + # tricky race conditions, we maintain the following invariant: The + # cached pyc is always a complete, valid pyc. Operations on it must be + # atomic. POSIX's atomic rename comes in handy. + write = not sys.dont_write_bytecode + cache_dir = get_cache_dir(fn) + if write: + ok = try_makedirs(cache_dir) + if not ok: + write = False + state.trace(f"read only directory: {cache_dir}") + + cache_name = fn.name[:-3] + PYC_TAIL + pyc = cache_dir / cache_name + # Notice that even if we're in a read-only directory, I'm going + # to check for a cached pyc. This may not be optimal... + co = _read_pyc(fn, pyc, state.trace) + if co is None: + state.trace(f"rewriting {fn!r}") + source_stat, co = _rewrite_test(fn, self.config) + if write: + self._writing_pyc = True + try: + _write_pyc(state, co, source_stat, pyc) + finally: + self._writing_pyc = False + else: + state.trace(f"found cached rewritten pyc for {fn}") + exec(co, module.__dict__) + + def _early_rewrite_bailout(self, name: str, state: "AssertionState") -> bool: + """A fast way to get out of rewriting modules. + + Profiling has shown that the call to PathFinder.find_spec (inside of + the find_spec from this class) is a major slowdown, so, this method + tries to filter what we're sure won't be rewritten before getting to + it. + """ + if self.session is not None and not self._session_paths_checked: + self._session_paths_checked = True + for initial_path in self.session._initialpaths: + # Make something as c:/projects/my_project/path.py -> + # ['c:', 'projects', 'my_project', 'path.py'] + parts = str(initial_path).split(os.sep) + # add 'path' to basenames to be checked. + self._basenames_to_check_rewrite.add(os.path.splitext(parts[-1])[0]) + + # Note: conftest already by default in _basenames_to_check_rewrite. + parts = name.split(".") + if parts[-1] in self._basenames_to_check_rewrite: + return False + + # For matching the name it must be as if it was a filename. + path = PurePath(*parts).with_suffix(".py") + + for pat in self.fnpats: + # if the pattern contains subdirectories ("tests/**.py" for example) we can't bail out based + # on the name alone because we need to match against the full path + if os.path.dirname(pat): + return False + if fnmatch_ex(pat, path): + return False + + if self._is_marked_for_rewrite(name, state): + return False + + state.trace(f"early skip of rewriting module: {name}") + return True + + def _should_rewrite(self, name: str, fn: str, state: "AssertionState") -> bool: + # always rewrite conftest files + if os.path.basename(fn) == "conftest.py": + state.trace(f"rewriting conftest file: {fn!r}") + return True + + if self.session is not None: + if self.session.isinitpath(absolutepath(fn)): + state.trace(f"matched test file (was specified on cmdline): {fn!r}") + return True + + # modules not passed explicitly on the command line are only + # rewritten if they match the naming convention for test files + fn_path = PurePath(fn) + for pat in self.fnpats: + if fnmatch_ex(pat, fn_path): + state.trace(f"matched test file {fn!r}") + return True + + return self._is_marked_for_rewrite(name, state) + + def _is_marked_for_rewrite(self, name: str, state: "AssertionState") -> bool: + try: + return self._marked_for_rewrite_cache[name] + except KeyError: + for marked in self._must_rewrite: + if name == marked or name.startswith(marked + "."): + state.trace(f"matched marked file {name!r} (from {marked!r})") + self._marked_for_rewrite_cache[name] = True + return True + + self._marked_for_rewrite_cache[name] = False + return False + + def mark_rewrite(self, *names: str) -> None: + """Mark import names as needing to be rewritten. + + The named module or package as well as any nested modules will + be rewritten on import. + """ + already_imported = ( + set(names).intersection(sys.modules).difference(self._rewritten_names) + ) + for name in already_imported: + mod = sys.modules[name] + if not AssertionRewriter.is_rewrite_disabled( + mod.__doc__ or "" + ) and not isinstance(mod.__loader__, type(self)): + self._warn_already_imported(name) + self._must_rewrite.update(names) + self._marked_for_rewrite_cache.clear() + + def _warn_already_imported(self, name: str) -> None: + from _pytest.warning_types import PytestAssertRewriteWarning + + self.config.issue_config_time_warning( + PytestAssertRewriteWarning( + "Module already imported so cannot be rewritten: %s" % name + ), + stacklevel=5, + ) + + def get_data(self, pathname: Union[str, bytes]) -> bytes: + """Optional PEP302 get_data API.""" + with open(pathname, "rb") as f: + return f.read() + + if sys.version_info >= (3, 10): + if sys.version_info >= (3, 12): + from importlib.resources.abc import TraversableResources + else: + from importlib.abc import TraversableResources + + def get_resource_reader(self, name: str) -> TraversableResources: # type: ignore + if sys.version_info < (3, 11): + from importlib.readers import FileReader + else: + from importlib.resources.readers import FileReader + + return FileReader( # type:ignore[no-any-return] + types.SimpleNamespace(path=self._rewritten_names[name]) + ) + + +def _write_pyc_fp( + fp: IO[bytes], source_stat: os.stat_result, co: types.CodeType +) -> None: + # Technically, we don't have to have the same pyc format as + # (C)Python, since these "pycs" should never be seen by builtin + # import. However, there's little reason to deviate. + fp.write(importlib.util.MAGIC_NUMBER) + # https://www.python.org/dev/peps/pep-0552/ + flags = b"\x00\x00\x00\x00" + fp.write(flags) + # as of now, bytecode header expects 32-bit numbers for size and mtime (#4903) + mtime = int(source_stat.st_mtime) & 0xFFFFFFFF + size = source_stat.st_size & 0xFFFFFFFF + # " bool: + proc_pyc = f"{pyc}.{os.getpid()}" + try: + with open(proc_pyc, "wb") as fp: + _write_pyc_fp(fp, source_stat, co) + except OSError as e: + state.trace(f"error writing pyc file at {proc_pyc}: errno={e.errno}") + return False + + try: + os.replace(proc_pyc, pyc) + except OSError as e: + state.trace(f"error writing pyc file at {pyc}: {e}") + # we ignore any failure to write the cache file + # there are many reasons, permission-denied, pycache dir being a + # file etc. + return False + return True + + +def _rewrite_test(fn: Path, config: Config) -> Tuple[os.stat_result, types.CodeType]: + """Read and rewrite *fn* and return the code object.""" + stat = os.stat(fn) + source = fn.read_bytes() + strfn = str(fn) + tree = ast.parse(source, filename=strfn) + rewrite_asserts(tree, source, strfn, config) + co = compile(tree, strfn, "exec", dont_inherit=True) + return stat, co + + +def _read_pyc( + source: Path, pyc: Path, trace: Callable[[str], None] = lambda x: None +) -> Optional[types.CodeType]: + """Possibly read a pytest pyc containing rewritten code. + + Return rewritten code if successful or None if not. + """ + try: + fp = open(pyc, "rb") + except OSError: + return None + with fp: + try: + stat_result = os.stat(source) + mtime = int(stat_result.st_mtime) + size = stat_result.st_size + data = fp.read(16) + except OSError as e: + trace(f"_read_pyc({source}): OSError {e}") + return None + # Check for invalid or out of date pyc file. + if len(data) != (16): + trace("_read_pyc(%s): invalid pyc (too short)" % source) + return None + if data[:4] != importlib.util.MAGIC_NUMBER: + trace("_read_pyc(%s): invalid pyc (bad magic number)" % source) + return None + if data[4:8] != b"\x00\x00\x00\x00": + trace("_read_pyc(%s): invalid pyc (unsupported flags)" % source) + return None + mtime_data = data[8:12] + if int.from_bytes(mtime_data, "little") != mtime & 0xFFFFFFFF: + trace("_read_pyc(%s): out of date" % source) + return None + size_data = data[12:16] + if int.from_bytes(size_data, "little") != size & 0xFFFFFFFF: + trace("_read_pyc(%s): invalid pyc (incorrect size)" % source) + return None + try: + co = marshal.load(fp) + except Exception as e: + trace(f"_read_pyc({source}): marshal.load error {e}") + return None + if not isinstance(co, types.CodeType): + trace("_read_pyc(%s): not a code object" % source) + return None + return co + + +def rewrite_asserts( + mod: ast.Module, + source: bytes, + module_path: Optional[str] = None, + config: Optional[Config] = None, +) -> None: + """Rewrite the assert statements in mod.""" + AssertionRewriter(module_path, config, source).run(mod) + + +def _saferepr(obj: object) -> str: + r"""Get a safe repr of an object for assertion error messages. + + The assertion formatting (util.format_explanation()) requires + newlines to be escaped since they are a special character for it. + Normally assertion.util.format_explanation() does this but for a + custom repr it is possible to contain one of the special escape + sequences, especially '\n{' and '\n}' are likely to be present in + JSON reprs. + """ + maxsize = _get_maxsize_for_saferepr(util._config) + return saferepr(obj, maxsize=maxsize).replace("\n", "\\n") + + +def _get_maxsize_for_saferepr(config: Optional[Config]) -> Optional[int]: + """Get `maxsize` configuration for saferepr based on the given config object.""" + verbosity = config.getoption("verbose") if config is not None else 0 + if verbosity >= 2: + return None + if verbosity >= 1: + return DEFAULT_REPR_MAX_SIZE * 10 + return DEFAULT_REPR_MAX_SIZE + + +def _format_assertmsg(obj: object) -> str: + r"""Format the custom assertion message given. + + For strings this simply replaces newlines with '\n~' so that + util.format_explanation() will preserve them instead of escaping + newlines. For other objects saferepr() is used first. + """ + # reprlib appears to have a bug which means that if a string + # contains a newline it gets escaped, however if an object has a + # .__repr__() which contains newlines it does not get escaped. + # However in either case we want to preserve the newline. + replaces = [("\n", "\n~"), ("%", "%%")] + if not isinstance(obj, str): + obj = saferepr(obj) + replaces.append(("\\n", "\n~")) + + for r1, r2 in replaces: + obj = obj.replace(r1, r2) + + return obj + + +def _should_repr_global_name(obj: object) -> bool: + if callable(obj): + return False + + try: + return not hasattr(obj, "__name__") + except Exception: + return True + + +def _format_boolop(explanations: Iterable[str], is_or: bool) -> str: + explanation = "(" + (is_or and " or " or " and ").join(explanations) + ")" + return explanation.replace("%", "%%") + + +def _call_reprcompare( + ops: Sequence[str], + results: Sequence[bool], + expls: Sequence[str], + each_obj: Sequence[object], +) -> str: + for i, res, expl in zip(range(len(ops)), results, expls): + try: + done = not res + except Exception: + done = True + if done: + break + if util._reprcompare is not None: + custom = util._reprcompare(ops[i], each_obj[i], each_obj[i + 1]) + if custom is not None: + return custom + return expl + + +def _call_assertion_pass(lineno: int, orig: str, expl: str) -> None: + if util._assertion_pass is not None: + util._assertion_pass(lineno, orig, expl) + + +def _check_if_assertion_pass_impl() -> bool: + """Check if any plugins implement the pytest_assertion_pass hook + in order not to generate explanation unnecessarily (might be expensive).""" + return True if util._assertion_pass else False + + +UNARY_MAP = {ast.Not: "not %s", ast.Invert: "~%s", ast.USub: "-%s", ast.UAdd: "+%s"} + +BINOP_MAP = { + ast.BitOr: "|", + ast.BitXor: "^", + ast.BitAnd: "&", + ast.LShift: "<<", + ast.RShift: ">>", + ast.Add: "+", + ast.Sub: "-", + ast.Mult: "*", + ast.Div: "/", + ast.FloorDiv: "//", + ast.Mod: "%%", # escaped for string formatting + ast.Eq: "==", + ast.NotEq: "!=", + ast.Lt: "<", + ast.LtE: "<=", + ast.Gt: ">", + ast.GtE: ">=", + ast.Pow: "**", + ast.Is: "is", + ast.IsNot: "is not", + ast.In: "in", + ast.NotIn: "not in", + ast.MatMult: "@", +} + + +def traverse_node(node: ast.AST) -> Iterator[ast.AST]: + """Recursively yield node and all its children in depth-first order.""" + yield node + for child in ast.iter_child_nodes(node): + yield from traverse_node(child) + + +@functools.lru_cache(maxsize=1) +def _get_assertion_exprs(src: bytes) -> Dict[int, str]: + """Return a mapping from {lineno: "assertion test expression"}.""" + ret: Dict[int, str] = {} + + depth = 0 + lines: List[str] = [] + assert_lineno: Optional[int] = None + seen_lines: Set[int] = set() + + def _write_and_reset() -> None: + nonlocal depth, lines, assert_lineno, seen_lines + assert assert_lineno is not None + ret[assert_lineno] = "".join(lines).rstrip().rstrip("\\") + depth = 0 + lines = [] + assert_lineno = None + seen_lines = set() + + tokens = tokenize.tokenize(io.BytesIO(src).readline) + for tp, source, (lineno, offset), _, line in tokens: + if tp == tokenize.NAME and source == "assert": + assert_lineno = lineno + elif assert_lineno is not None: + # keep track of depth for the assert-message `,` lookup + if tp == tokenize.OP and source in "([{": + depth += 1 + elif tp == tokenize.OP and source in ")]}": + depth -= 1 + + if not lines: + lines.append(line[offset:]) + seen_lines.add(lineno) + # a non-nested comma separates the expression from the message + elif depth == 0 and tp == tokenize.OP and source == ",": + # one line assert with message + if lineno in seen_lines and len(lines) == 1: + offset_in_trimmed = offset + len(lines[-1]) - len(line) + lines[-1] = lines[-1][:offset_in_trimmed] + # multi-line assert with message + elif lineno in seen_lines: + lines[-1] = lines[-1][:offset] + # multi line assert with escapd newline before message + else: + lines.append(line[:offset]) + _write_and_reset() + elif tp in {tokenize.NEWLINE, tokenize.ENDMARKER}: + _write_and_reset() + elif lines and lineno not in seen_lines: + lines.append(line) + seen_lines.add(lineno) + + return ret + + +class AssertionRewriter(ast.NodeVisitor): + """Assertion rewriting implementation. + + The main entrypoint is to call .run() with an ast.Module instance, + this will then find all the assert statements and rewrite them to + provide intermediate values and a detailed assertion error. See + http://pybites.blogspot.be/2011/07/behind-scenes-of-pytests-new-assertion.html + for an overview of how this works. + + The entry point here is .run() which will iterate over all the + statements in an ast.Module and for each ast.Assert statement it + finds call .visit() with it. Then .visit_Assert() takes over and + is responsible for creating new ast statements to replace the + original assert statement: it rewrites the test of an assertion + to provide intermediate values and replace it with an if statement + which raises an assertion error with a detailed explanation in + case the expression is false and calls pytest_assertion_pass hook + if expression is true. + + For this .visit_Assert() uses the visitor pattern to visit all the + AST nodes of the ast.Assert.test field, each visit call returning + an AST node and the corresponding explanation string. During this + state is kept in several instance attributes: + + :statements: All the AST statements which will replace the assert + statement. + + :variables: This is populated by .variable() with each variable + used by the statements so that they can all be set to None at + the end of the statements. + + :variable_counter: Counter to create new unique variables needed + by statements. Variables are created using .variable() and + have the form of "@py_assert0". + + :expl_stmts: The AST statements which will be executed to get + data from the assertion. This is the code which will construct + the detailed assertion message that is used in the AssertionError + or for the pytest_assertion_pass hook. + + :explanation_specifiers: A dict filled by .explanation_param() + with %-formatting placeholders and their corresponding + expressions to use in the building of an assertion message. + This is used by .pop_format_context() to build a message. + + :stack: A stack of the explanation_specifiers dicts maintained by + .push_format_context() and .pop_format_context() which allows + to build another %-formatted string while already building one. + + :variables_overwrite: A dict filled with references to variables + that change value within an assert. This happens when a variable is + reassigned with the walrus operator + + This state, except the variables_overwrite, is reset on every new assert + statement visited and used by the other visitors. + """ + + def __init__( + self, module_path: Optional[str], config: Optional[Config], source: bytes + ) -> None: + super().__init__() + self.module_path = module_path + self.config = config + if config is not None: + self.enable_assertion_pass_hook = config.getini( + "enable_assertion_pass_hook" + ) + else: + self.enable_assertion_pass_hook = False + self.source = source + self.variables_overwrite: Dict[str, str] = {} + + def run(self, mod: ast.Module) -> None: + """Find all assert statements in *mod* and rewrite them.""" + if not mod.body: + # Nothing to do. + return + + # We'll insert some special imports at the top of the module, but after any + # docstrings and __future__ imports, so first figure out where that is. + doc = getattr(mod, "docstring", None) + expect_docstring = doc is None + if doc is not None and self.is_rewrite_disabled(doc): + return + pos = 0 + item = None + for item in mod.body: + if ( + expect_docstring + and isinstance(item, ast.Expr) + and isinstance(item.value, astStr) + ): + if sys.version_info >= (3, 8): + doc = item.value.value + else: + doc = item.value.s + if self.is_rewrite_disabled(doc): + return + expect_docstring = False + elif ( + isinstance(item, ast.ImportFrom) + and item.level == 0 + and item.module == "__future__" + ): + pass + else: + break + pos += 1 + # Special case: for a decorated function, set the lineno to that of the + # first decorator, not the `def`. Issue #4984. + if isinstance(item, ast.FunctionDef) and item.decorator_list: + lineno = item.decorator_list[0].lineno + else: + lineno = item.lineno + # Now actually insert the special imports. + if sys.version_info >= (3, 10): + aliases = [ + ast.alias("builtins", "@py_builtins", lineno=lineno, col_offset=0), + ast.alias( + "_pytest.assertion.rewrite", + "@pytest_ar", + lineno=lineno, + col_offset=0, + ), + ] + else: + aliases = [ + ast.alias("builtins", "@py_builtins"), + ast.alias("_pytest.assertion.rewrite", "@pytest_ar"), + ] + imports = [ + ast.Import([alias], lineno=lineno, col_offset=0) for alias in aliases + ] + mod.body[pos:pos] = imports + + # Collect asserts. + nodes: List[ast.AST] = [mod] + while nodes: + node = nodes.pop() + for name, field in ast.iter_fields(node): + if isinstance(field, list): + new: List[ast.AST] = [] + for i, child in enumerate(field): + if isinstance(child, ast.Assert): + # Transform assert. + new.extend(self.visit(child)) + else: + new.append(child) + if isinstance(child, ast.AST): + nodes.append(child) + setattr(node, name, new) + elif ( + isinstance(field, ast.AST) + # Don't recurse into expressions as they can't contain + # asserts. + and not isinstance(field, ast.expr) + ): + nodes.append(field) + + @staticmethod + def is_rewrite_disabled(docstring: str) -> bool: + return "PYTEST_DONT_REWRITE" in docstring + + def variable(self) -> str: + """Get a new variable.""" + # Use a character invalid in python identifiers to avoid clashing. + name = "@py_assert" + str(next(self.variable_counter)) + self.variables.append(name) + return name + + def assign(self, expr: ast.expr) -> ast.Name: + """Give *expr* a name.""" + name = self.variable() + self.statements.append(ast.Assign([ast.Name(name, ast.Store())], expr)) + return ast.Name(name, ast.Load()) + + def display(self, expr: ast.expr) -> ast.expr: + """Call saferepr on the expression.""" + return self.helper("_saferepr", expr) + + def helper(self, name: str, *args: ast.expr) -> ast.expr: + """Call a helper in this module.""" + py_name = ast.Name("@pytest_ar", ast.Load()) + attr = ast.Attribute(py_name, name, ast.Load()) + return ast.Call(attr, list(args), []) + + def builtin(self, name: str) -> ast.Attribute: + """Return the builtin called *name*.""" + builtin_name = ast.Name("@py_builtins", ast.Load()) + return ast.Attribute(builtin_name, name, ast.Load()) + + def explanation_param(self, expr: ast.expr) -> str: + """Return a new named %-formatting placeholder for expr. + + This creates a %-formatting placeholder for expr in the + current formatting context, e.g. ``%(py0)s``. The placeholder + and expr are placed in the current format context so that it + can be used on the next call to .pop_format_context(). + """ + specifier = "py" + str(next(self.variable_counter)) + self.explanation_specifiers[specifier] = expr + return "%(" + specifier + ")s" + + def push_format_context(self) -> None: + """Create a new formatting context. + + The format context is used for when an explanation wants to + have a variable value formatted in the assertion message. In + this case the value required can be added using + .explanation_param(). Finally .pop_format_context() is used + to format a string of %-formatted values as added by + .explanation_param(). + """ + self.explanation_specifiers: Dict[str, ast.expr] = {} + self.stack.append(self.explanation_specifiers) + + def pop_format_context(self, expl_expr: ast.expr) -> ast.Name: + """Format the %-formatted string with current format context. + + The expl_expr should be an str ast.expr instance constructed from + the %-placeholders created by .explanation_param(). This will + add the required code to format said string to .expl_stmts and + return the ast.Name instance of the formatted string. + """ + current = self.stack.pop() + if self.stack: + self.explanation_specifiers = self.stack[-1] + keys = [astStr(key) for key in current.keys()] + format_dict = ast.Dict(keys, list(current.values())) + form = ast.BinOp(expl_expr, ast.Mod(), format_dict) + name = "@py_format" + str(next(self.variable_counter)) + if self.enable_assertion_pass_hook: + self.format_variables.append(name) + self.expl_stmts.append(ast.Assign([ast.Name(name, ast.Store())], form)) + return ast.Name(name, ast.Load()) + + def generic_visit(self, node: ast.AST) -> Tuple[ast.Name, str]: + """Handle expressions we don't have custom code for.""" + assert isinstance(node, ast.expr) + res = self.assign(node) + return res, self.explanation_param(self.display(res)) + + def visit_Assert(self, assert_: ast.Assert) -> List[ast.stmt]: + """Return the AST statements to replace the ast.Assert instance. + + This rewrites the test of an assertion to provide + intermediate values and replace it with an if statement which + raises an assertion error with a detailed explanation in case + the expression is false. + """ + if isinstance(assert_.test, ast.Tuple) and len(assert_.test.elts) >= 1: + from _pytest.warning_types import PytestAssertRewriteWarning + import warnings + + # TODO: This assert should not be needed. + assert self.module_path is not None + warnings.warn_explicit( + PytestAssertRewriteWarning( + "assertion is always true, perhaps remove parentheses?" + ), + category=None, + filename=self.module_path, + lineno=assert_.lineno, + ) + + self.statements: List[ast.stmt] = [] + self.variables: List[str] = [] + self.variable_counter = itertools.count() + + if self.enable_assertion_pass_hook: + self.format_variables: List[str] = [] + + self.stack: List[Dict[str, ast.expr]] = [] + self.expl_stmts: List[ast.stmt] = [] + self.push_format_context() + # Rewrite assert into a bunch of statements. + top_condition, explanation = self.visit(assert_.test) + + negation = ast.UnaryOp(ast.Not(), top_condition) + + if self.enable_assertion_pass_hook: # Experimental pytest_assertion_pass hook + msg = self.pop_format_context(astStr(explanation)) + + # Failed + if assert_.msg: + assertmsg = self.helper("_format_assertmsg", assert_.msg) + gluestr = "\n>assert " + else: + assertmsg = astStr("") + gluestr = "assert " + err_explanation = ast.BinOp(astStr(gluestr), ast.Add(), msg) + err_msg = ast.BinOp(assertmsg, ast.Add(), err_explanation) + err_name = ast.Name("AssertionError", ast.Load()) + fmt = self.helper("_format_explanation", err_msg) + exc = ast.Call(err_name, [fmt], []) + raise_ = ast.Raise(exc, None) + statements_fail = [] + statements_fail.extend(self.expl_stmts) + statements_fail.append(raise_) + + # Passed + fmt_pass = self.helper("_format_explanation", msg) + orig = _get_assertion_exprs(self.source)[assert_.lineno] + hook_call_pass = ast.Expr( + self.helper( + "_call_assertion_pass", + astNum(assert_.lineno), + astStr(orig), + fmt_pass, + ) + ) + # If any hooks implement assert_pass hook + hook_impl_test = ast.If( + self.helper("_check_if_assertion_pass_impl"), + self.expl_stmts + [hook_call_pass], + [], + ) + statements_pass = [hook_impl_test] + + # Test for assertion condition + main_test = ast.If(negation, statements_fail, statements_pass) + self.statements.append(main_test) + if self.format_variables: + variables = [ + ast.Name(name, ast.Store()) for name in self.format_variables + ] + clear_format = ast.Assign(variables, astNameConstant(None)) + self.statements.append(clear_format) + + else: # Original assertion rewriting + # Create failure message. + body = self.expl_stmts + self.statements.append(ast.If(negation, body, [])) + if assert_.msg: + assertmsg = self.helper("_format_assertmsg", assert_.msg) + explanation = "\n>assert " + explanation + else: + assertmsg = astStr("") + explanation = "assert " + explanation + template = ast.BinOp(assertmsg, ast.Add(), astStr(explanation)) + msg = self.pop_format_context(template) + fmt = self.helper("_format_explanation", msg) + err_name = ast.Name("AssertionError", ast.Load()) + exc = ast.Call(err_name, [fmt], []) + raise_ = ast.Raise(exc, None) + + body.append(raise_) + + # Clear temporary variables by setting them to None. + if self.variables: + variables = [ast.Name(name, ast.Store()) for name in self.variables] + clear = ast.Assign(variables, astNameConstant(None)) + self.statements.append(clear) + # Fix locations (line numbers/column offsets). + for stmt in self.statements: + for node in traverse_node(stmt): + ast.copy_location(node, assert_) + return self.statements + + def visit_NamedExpr(self, name: namedExpr) -> Tuple[namedExpr, str]: + # This method handles the 'walrus operator' repr of the target + # name if it's a local variable or _should_repr_global_name() + # thinks it's acceptable. + locs = ast.Call(self.builtin("locals"), [], []) + target_id = name.target.id # type: ignore[attr-defined] + inlocs = ast.Compare(astStr(target_id), [ast.In()], [locs]) + dorepr = self.helper("_should_repr_global_name", name) + test = ast.BoolOp(ast.Or(), [inlocs, dorepr]) + expr = ast.IfExp(test, self.display(name), astStr(target_id)) + return name, self.explanation_param(expr) + + def visit_Name(self, name: ast.Name) -> Tuple[ast.Name, str]: + # Display the repr of the name if it's a local variable or + # _should_repr_global_name() thinks it's acceptable. + locs = ast.Call(self.builtin("locals"), [], []) + inlocs = ast.Compare(astStr(name.id), [ast.In()], [locs]) + dorepr = self.helper("_should_repr_global_name", name) + test = ast.BoolOp(ast.Or(), [inlocs, dorepr]) + expr = ast.IfExp(test, self.display(name), astStr(name.id)) + return name, self.explanation_param(expr) + + def visit_BoolOp(self, boolop: ast.BoolOp) -> Tuple[ast.Name, str]: + res_var = self.variable() + expl_list = self.assign(ast.List([], ast.Load())) + app = ast.Attribute(expl_list, "append", ast.Load()) + is_or = int(isinstance(boolop.op, ast.Or)) + body = save = self.statements + fail_save = self.expl_stmts + levels = len(boolop.values) - 1 + self.push_format_context() + # Process each operand, short-circuiting if needed. + for i, v in enumerate(boolop.values): + if i: + fail_inner: List[ast.stmt] = [] + # cond is set in a prior loop iteration below + self.expl_stmts.append(ast.If(cond, fail_inner, [])) # noqa + self.expl_stmts = fail_inner + # Check if the left operand is a namedExpr and the value has already been visited + if ( + isinstance(v, ast.Compare) + and isinstance(v.left, namedExpr) + and v.left.target.id + in [ + ast_expr.id + for ast_expr in boolop.values[:i] + if hasattr(ast_expr, "id") + ] + ): + pytest_temp = self.variable() + self.variables_overwrite[ + v.left.target.id + ] = v.left # type:ignore[assignment] + v.left.target.id = pytest_temp + self.push_format_context() + res, expl = self.visit(v) + body.append(ast.Assign([ast.Name(res_var, ast.Store())], res)) + expl_format = self.pop_format_context(astStr(expl)) + call = ast.Call(app, [expl_format], []) + self.expl_stmts.append(ast.Expr(call)) + if i < levels: + cond: ast.expr = res + if is_or: + cond = ast.UnaryOp(ast.Not(), cond) + inner: List[ast.stmt] = [] + self.statements.append(ast.If(cond, inner, [])) + self.statements = body = inner + self.statements = save + self.expl_stmts = fail_save + expl_template = self.helper("_format_boolop", expl_list, astNum(is_or)) + expl = self.pop_format_context(expl_template) + return ast.Name(res_var, ast.Load()), self.explanation_param(expl) + + def visit_UnaryOp(self, unary: ast.UnaryOp) -> Tuple[ast.Name, str]: + pattern = UNARY_MAP[unary.op.__class__] + operand_res, operand_expl = self.visit(unary.operand) + res = self.assign(ast.UnaryOp(unary.op, operand_res)) + return res, pattern % (operand_expl,) + + def visit_BinOp(self, binop: ast.BinOp) -> Tuple[ast.Name, str]: + symbol = BINOP_MAP[binop.op.__class__] + left_expr, left_expl = self.visit(binop.left) + right_expr, right_expl = self.visit(binop.right) + explanation = f"({left_expl} {symbol} {right_expl})" + res = self.assign(ast.BinOp(left_expr, binop.op, right_expr)) + return res, explanation + + def visit_Call(self, call: ast.Call) -> Tuple[ast.Name, str]: + new_func, func_expl = self.visit(call.func) + arg_expls = [] + new_args = [] + new_kwargs = [] + for arg in call.args: + if isinstance(arg, ast.Name) and arg.id in self.variables_overwrite: + arg = self.variables_overwrite[arg.id] # type:ignore[assignment] + res, expl = self.visit(arg) + arg_expls.append(expl) + new_args.append(res) + for keyword in call.keywords: + if ( + isinstance(keyword.value, ast.Name) + and keyword.value.id in self.variables_overwrite + ): + keyword.value = self.variables_overwrite[ + keyword.value.id + ] # type:ignore[assignment] + res, expl = self.visit(keyword.value) + new_kwargs.append(ast.keyword(keyword.arg, res)) + if keyword.arg: + arg_expls.append(keyword.arg + "=" + expl) + else: # **args have `arg` keywords with an .arg of None + arg_expls.append("**" + expl) + + expl = "{}({})".format(func_expl, ", ".join(arg_expls)) + new_call = ast.Call(new_func, new_args, new_kwargs) + res = self.assign(new_call) + res_expl = self.explanation_param(self.display(res)) + outer_expl = f"{res_expl}\n{{{res_expl} = {expl}\n}}" + return res, outer_expl + + def visit_Starred(self, starred: ast.Starred) -> Tuple[ast.Starred, str]: + # A Starred node can appear in a function call. + res, expl = self.visit(starred.value) + new_starred = ast.Starred(res, starred.ctx) + return new_starred, "*" + expl + + def visit_Attribute(self, attr: ast.Attribute) -> Tuple[ast.Name, str]: + if not isinstance(attr.ctx, ast.Load): + return self.generic_visit(attr) + value, value_expl = self.visit(attr.value) + res = self.assign(ast.Attribute(value, attr.attr, ast.Load())) + res_expl = self.explanation_param(self.display(res)) + pat = "%s\n{%s = %s.%s\n}" + expl = pat % (res_expl, res_expl, value_expl, attr.attr) + return res, expl + + def visit_Compare(self, comp: ast.Compare) -> Tuple[ast.expr, str]: + self.push_format_context() + # We first check if we have overwritten a variable in the previous assert + if isinstance(comp.left, ast.Name) and comp.left.id in self.variables_overwrite: + comp.left = self.variables_overwrite[ + comp.left.id + ] # type:ignore[assignment] + if isinstance(comp.left, namedExpr): + self.variables_overwrite[ + comp.left.target.id + ] = comp.left # type:ignore[assignment] + left_res, left_expl = self.visit(comp.left) + if isinstance(comp.left, (ast.Compare, ast.BoolOp)): + left_expl = f"({left_expl})" + res_variables = [self.variable() for i in range(len(comp.ops))] + load_names = [ast.Name(v, ast.Load()) for v in res_variables] + store_names = [ast.Name(v, ast.Store()) for v in res_variables] + it = zip(range(len(comp.ops)), comp.ops, comp.comparators) + expls = [] + syms = [] + results = [left_res] + for i, op, next_operand in it: + if ( + isinstance(next_operand, namedExpr) + and isinstance(left_res, ast.Name) + and next_operand.target.id == left_res.id + ): + next_operand.target.id = self.variable() + self.variables_overwrite[ + left_res.id + ] = next_operand # type:ignore[assignment] + next_res, next_expl = self.visit(next_operand) + if isinstance(next_operand, (ast.Compare, ast.BoolOp)): + next_expl = f"({next_expl})" + results.append(next_res) + sym = BINOP_MAP[op.__class__] + syms.append(astStr(sym)) + expl = f"{left_expl} {sym} {next_expl}" + expls.append(astStr(expl)) + res_expr = ast.Compare(left_res, [op], [next_res]) + self.statements.append(ast.Assign([store_names[i]], res_expr)) + left_res, left_expl = next_res, next_expl + # Use pytest.assertion.util._reprcompare if that's available. + expl_call = self.helper( + "_call_reprcompare", + ast.Tuple(syms, ast.Load()), + ast.Tuple(load_names, ast.Load()), + ast.Tuple(expls, ast.Load()), + ast.Tuple(results, ast.Load()), + ) + if len(comp.ops) > 1: + res: ast.expr = ast.BoolOp(ast.And(), load_names) + else: + res = load_names[0] + + return res, self.explanation_param(self.pop_format_context(expl_call)) + + +def try_makedirs(cache_dir: Path) -> bool: + """Attempt to create the given directory and sub-directories exist. + + Returns True if successful or if it already exists. + """ + try: + os.makedirs(cache_dir, exist_ok=True) + except (FileNotFoundError, NotADirectoryError, FileExistsError): + # One of the path components was not a directory: + # - we're in a zip file + # - it is a file + return False + except PermissionError: + return False + except OSError as e: + # as of now, EROFS doesn't have an equivalent OSError-subclass + if e.errno == errno.EROFS: + return False + raise + return True + + +def get_cache_dir(file_path: Path) -> Path: + """Return the cache directory to write .pyc files for the given .py file path.""" + if sys.version_info >= (3, 8) and sys.pycache_prefix: + # given: + # prefix = '/tmp/pycs' + # path = '/home/user/proj/test_app.py' + # we want: + # '/tmp/pycs/home/user/proj' + return Path(sys.pycache_prefix) / Path(*file_path.parts[1:-1]) + else: + # classic pycache directory + return file_path.parent / "__pycache__" diff --git a/venv/lib/python3.11/site-packages/_pytest/assertion/truncate.py b/venv/lib/python3.11/site-packages/_pytest/assertion/truncate.py new file mode 100644 index 0000000..dfd6f65 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/assertion/truncate.py @@ -0,0 +1,115 @@ +"""Utilities for truncating assertion output. + +Current default behaviour is to truncate assertion explanations at +~8 terminal lines, unless running in "-vv" mode or running on CI. +""" +from typing import List +from typing import Optional + +from _pytest.assertion import util +from _pytest.nodes import Item + + +DEFAULT_MAX_LINES = 8 +DEFAULT_MAX_CHARS = 8 * 80 +USAGE_MSG = "use '-vv' to show" + + +def truncate_if_required( + explanation: List[str], item: Item, max_length: Optional[int] = None +) -> List[str]: + """Truncate this assertion explanation if the given test item is eligible.""" + if _should_truncate_item(item): + return _truncate_explanation(explanation) + return explanation + + +def _should_truncate_item(item: Item) -> bool: + """Whether or not this test item is eligible for truncation.""" + verbose = item.config.option.verbose + return verbose < 2 and not util.running_on_ci() + + +def _truncate_explanation( + input_lines: List[str], + max_lines: Optional[int] = None, + max_chars: Optional[int] = None, +) -> List[str]: + """Truncate given list of strings that makes up the assertion explanation. + + Truncates to either 8 lines, or 640 characters - whichever the input reaches + first, taking the truncation explanation into account. The remaining lines + will be replaced by a usage message. + """ + if max_lines is None: + max_lines = DEFAULT_MAX_LINES + if max_chars is None: + max_chars = DEFAULT_MAX_CHARS + + # Check if truncation required + input_char_count = len("".join(input_lines)) + # The length of the truncation explanation depends on the number of lines + # removed but is at least 68 characters: + # The real value is + # 64 (for the base message: + # '...\n...Full output truncated (1 line hidden), use '-vv' to show")' + # ) + # + 1 (for plural) + # + int(math.log10(len(input_lines) - max_lines)) (number of hidden line, at least 1) + # + 3 for the '...' added to the truncated line + # But if there's more than 100 lines it's very likely that we're going to + # truncate, so we don't need the exact value using log10. + tolerable_max_chars = ( + max_chars + 70 # 64 + 1 (for plural) + 2 (for '99') + 3 for '...' + ) + # The truncation explanation add two lines to the output + tolerable_max_lines = max_lines + 2 + if ( + len(input_lines) <= tolerable_max_lines + and input_char_count <= tolerable_max_chars + ): + return input_lines + # Truncate first to max_lines, and then truncate to max_chars if necessary + truncated_explanation = input_lines[:max_lines] + truncated_char = True + # We reevaluate the need to truncate chars following removal of some lines + if len("".join(truncated_explanation)) > tolerable_max_chars: + truncated_explanation = _truncate_by_char_count( + truncated_explanation, max_chars + ) + else: + truncated_char = False + + truncated_line_count = len(input_lines) - len(truncated_explanation) + if truncated_explanation[-1]: + # Add ellipsis and take into account part-truncated final line + truncated_explanation[-1] = truncated_explanation[-1] + "..." + if truncated_char: + # It's possible that we did not remove any char from this line + truncated_line_count += 1 + else: + # Add proper ellipsis when we were able to fit a full line exactly + truncated_explanation[-1] = "..." + return truncated_explanation + [ + "", + f"...Full output truncated ({truncated_line_count} line" + f"{'' if truncated_line_count == 1 else 's'} hidden), {USAGE_MSG}", + ] + + +def _truncate_by_char_count(input_lines: List[str], max_chars: int) -> List[str]: + # Find point at which input length exceeds total allowed length + iterated_char_count = 0 + for iterated_index, input_line in enumerate(input_lines): + if iterated_char_count + len(input_line) > max_chars: + break + iterated_char_count += len(input_line) + + # Create truncated explanation with modified final line + truncated_result = input_lines[:iterated_index] + final_line = input_lines[iterated_index] + if final_line: + final_line_truncate_point = max_chars - iterated_char_count + final_line = final_line[:final_line_truncate_point] + truncated_result.append(final_line) + return truncated_result diff --git a/venv/lib/python3.11/site-packages/_pytest/assertion/util.py b/venv/lib/python3.11/site-packages/_pytest/assertion/util.py new file mode 100644 index 0000000..fc5dfdb --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/assertion/util.py @@ -0,0 +1,522 @@ +"""Utilities for assertion debugging.""" +import collections.abc +import os +import pprint +from typing import AbstractSet +from typing import Any +from typing import Callable +from typing import Iterable +from typing import List +from typing import Mapping +from typing import Optional +from typing import Sequence +from unicodedata import normalize + +import _pytest._code +from _pytest import outcomes +from _pytest._io.saferepr import _pformat_dispatch +from _pytest._io.saferepr import saferepr +from _pytest._io.saferepr import saferepr_unlimited +from _pytest.config import Config + +# The _reprcompare attribute on the util module is used by the new assertion +# interpretation code and assertion rewriter to detect this plugin was +# loaded and in turn call the hooks defined here as part of the +# DebugInterpreter. +_reprcompare: Optional[Callable[[str, object, object], Optional[str]]] = None + +# Works similarly as _reprcompare attribute. Is populated with the hook call +# when pytest_runtest_setup is called. +_assertion_pass: Optional[Callable[[int, str, str], None]] = None + +# Config object which is assigned during pytest_runtest_protocol. +_config: Optional[Config] = None + + +def format_explanation(explanation: str) -> str: + r"""Format an explanation. + + Normally all embedded newlines are escaped, however there are + three exceptions: \n{, \n} and \n~. The first two are intended + cover nested explanations, see function and attribute explanations + for examples (.visit_Call(), visit_Attribute()). The last one is + for when one explanation needs to span multiple lines, e.g. when + displaying diffs. + """ + lines = _split_explanation(explanation) + result = _format_lines(lines) + return "\n".join(result) + + +def _split_explanation(explanation: str) -> List[str]: + r"""Return a list of individual lines in the explanation. + + This will return a list of lines split on '\n{', '\n}' and '\n~'. + Any other newlines will be escaped and appear in the line as the + literal '\n' characters. + """ + raw_lines = (explanation or "").split("\n") + lines = [raw_lines[0]] + for values in raw_lines[1:]: + if values and values[0] in ["{", "}", "~", ">"]: + lines.append(values) + else: + lines[-1] += "\\n" + values + return lines + + +def _format_lines(lines: Sequence[str]) -> List[str]: + """Format the individual lines. + + This will replace the '{', '}' and '~' characters of our mini formatting + language with the proper 'where ...', 'and ...' and ' + ...' text, taking + care of indentation along the way. + + Return a list of formatted lines. + """ + result = list(lines[:1]) + stack = [0] + stackcnt = [0] + for line in lines[1:]: + if line.startswith("{"): + if stackcnt[-1]: + s = "and " + else: + s = "where " + stack.append(len(result)) + stackcnt[-1] += 1 + stackcnt.append(0) + result.append(" +" + " " * (len(stack) - 1) + s + line[1:]) + elif line.startswith("}"): + stack.pop() + stackcnt.pop() + result[stack[-1]] += line[1:] + else: + assert line[0] in ["~", ">"] + stack[-1] += 1 + indent = len(stack) if line.startswith("~") else len(stack) - 1 + result.append(" " * indent + line[1:]) + assert len(stack) == 1 + return result + + +def issequence(x: Any) -> bool: + return isinstance(x, collections.abc.Sequence) and not isinstance(x, str) + + +def istext(x: Any) -> bool: + return isinstance(x, str) + + +def isdict(x: Any) -> bool: + return isinstance(x, dict) + + +def isset(x: Any) -> bool: + return isinstance(x, (set, frozenset)) + + +def isnamedtuple(obj: Any) -> bool: + return isinstance(obj, tuple) and getattr(obj, "_fields", None) is not None + + +def isdatacls(obj: Any) -> bool: + return getattr(obj, "__dataclass_fields__", None) is not None + + +def isattrs(obj: Any) -> bool: + return getattr(obj, "__attrs_attrs__", None) is not None + + +def isiterable(obj: Any) -> bool: + try: + iter(obj) + return not istext(obj) + except TypeError: + return False + + +def has_default_eq( + obj: object, +) -> bool: + """Check if an instance of an object contains the default eq + + First, we check if the object's __eq__ attribute has __code__, + if so, we check the equally of the method code filename (__code__.co_filename) + to the default one generated by the dataclass and attr module + for dataclasses the default co_filename is , for attrs class, the __eq__ should contain "attrs eq generated" + """ + # inspired from https://github.com/willmcgugan/rich/blob/07d51ffc1aee6f16bd2e5a25b4e82850fb9ed778/rich/pretty.py#L68 + if hasattr(obj.__eq__, "__code__") and hasattr(obj.__eq__.__code__, "co_filename"): + code_filename = obj.__eq__.__code__.co_filename + + if isattrs(obj): + return "attrs generated eq" in code_filename + + return code_filename == "" # data class + return True + + +def assertrepr_compare( + config, op: str, left: Any, right: Any, use_ascii: bool = False +) -> Optional[List[str]]: + """Return specialised explanations for some operators/operands.""" + verbose = config.getoption("verbose") + + # Strings which normalize equal are often hard to distinguish when printed; use ascii() to make this easier. + # See issue #3246. + use_ascii = ( + isinstance(left, str) + and isinstance(right, str) + and normalize("NFD", left) == normalize("NFD", right) + ) + + if verbose > 1: + left_repr = saferepr_unlimited(left, use_ascii=use_ascii) + right_repr = saferepr_unlimited(right, use_ascii=use_ascii) + else: + # XXX: "15 chars indentation" is wrong + # ("E AssertionError: assert "); should use term width. + maxsize = ( + 80 - 15 - len(op) - 2 + ) // 2 # 15 chars indentation, 1 space around op + + left_repr = saferepr(left, maxsize=maxsize, use_ascii=use_ascii) + right_repr = saferepr(right, maxsize=maxsize, use_ascii=use_ascii) + + summary = f"{left_repr} {op} {right_repr}" + + explanation = None + try: + if op == "==": + explanation = _compare_eq_any(left, right, verbose) + elif op == "not in": + if istext(left) and istext(right): + explanation = _notin_text(left, right, verbose) + except outcomes.Exit: + raise + except Exception: + explanation = [ + "(pytest_assertion plugin: representation of details failed: {}.".format( + _pytest._code.ExceptionInfo.from_current()._getreprcrash() + ), + " Probably an object has a faulty __repr__.)", + ] + + if not explanation: + return None + + return [summary] + explanation + + +def _compare_eq_any(left: Any, right: Any, verbose: int = 0) -> List[str]: + explanation = [] + if istext(left) and istext(right): + explanation = _diff_text(left, right, verbose) + else: + from _pytest.python_api import ApproxBase + + if isinstance(left, ApproxBase) or isinstance(right, ApproxBase): + # Although the common order should be obtained == expected, this ensures both ways + approx_side = left if isinstance(left, ApproxBase) else right + other_side = right if isinstance(left, ApproxBase) else left + + explanation = approx_side._repr_compare(other_side) + elif type(left) == type(right) and ( + isdatacls(left) or isattrs(left) or isnamedtuple(left) + ): + # Note: unlike dataclasses/attrs, namedtuples compare only the + # field values, not the type or field names. But this branch + # intentionally only handles the same-type case, which was often + # used in older code bases before dataclasses/attrs were available. + explanation = _compare_eq_cls(left, right, verbose) + elif issequence(left) and issequence(right): + explanation = _compare_eq_sequence(left, right, verbose) + elif isset(left) and isset(right): + explanation = _compare_eq_set(left, right, verbose) + elif isdict(left) and isdict(right): + explanation = _compare_eq_dict(left, right, verbose) + + if isiterable(left) and isiterable(right): + expl = _compare_eq_iterable(left, right, verbose) + explanation.extend(expl) + + return explanation + + +def _diff_text(left: str, right: str, verbose: int = 0) -> List[str]: + """Return the explanation for the diff between text. + + Unless --verbose is used this will skip leading and trailing + characters which are identical to keep the diff minimal. + """ + from difflib import ndiff + + explanation: List[str] = [] + + if verbose < 1: + i = 0 # just in case left or right has zero length + for i in range(min(len(left), len(right))): + if left[i] != right[i]: + break + if i > 42: + i -= 10 # Provide some context + explanation = [ + "Skipping %s identical leading characters in diff, use -v to show" % i + ] + left = left[i:] + right = right[i:] + if len(left) == len(right): + for i in range(len(left)): + if left[-i] != right[-i]: + break + if i > 42: + i -= 10 # Provide some context + explanation += [ + "Skipping {} identical trailing " + "characters in diff, use -v to show".format(i) + ] + left = left[:-i] + right = right[:-i] + keepends = True + if left.isspace() or right.isspace(): + left = repr(str(left)) + right = repr(str(right)) + explanation += ["Strings contain only whitespace, escaping them using repr()"] + # "right" is the expected base against which we compare "left", + # see https://github.com/pytest-dev/pytest/issues/3333 + explanation += [ + line.strip("\n") + for line in ndiff(right.splitlines(keepends), left.splitlines(keepends)) + ] + return explanation + + +def _surrounding_parens_on_own_lines(lines: List[str]) -> None: + """Move opening/closing parenthesis/bracket to own lines.""" + opening = lines[0][:1] + if opening in ["(", "[", "{"]: + lines[0] = " " + lines[0][1:] + lines[:] = [opening] + lines + closing = lines[-1][-1:] + if closing in [")", "]", "}"]: + lines[-1] = lines[-1][:-1] + "," + lines[:] = lines + [closing] + + +def _compare_eq_iterable( + left: Iterable[Any], right: Iterable[Any], verbose: int = 0 +) -> List[str]: + if verbose <= 0 and not running_on_ci(): + return ["Use -v to get more diff"] + # dynamic import to speedup pytest + import difflib + + left_formatting = pprint.pformat(left).splitlines() + right_formatting = pprint.pformat(right).splitlines() + + # Re-format for different output lengths. + lines_left = len(left_formatting) + lines_right = len(right_formatting) + if lines_left != lines_right: + left_formatting = _pformat_dispatch(left).splitlines() + right_formatting = _pformat_dispatch(right).splitlines() + + if lines_left > 1 or lines_right > 1: + _surrounding_parens_on_own_lines(left_formatting) + _surrounding_parens_on_own_lines(right_formatting) + + explanation = ["Full diff:"] + # "right" is the expected base against which we compare "left", + # see https://github.com/pytest-dev/pytest/issues/3333 + explanation.extend( + line.rstrip() for line in difflib.ndiff(right_formatting, left_formatting) + ) + return explanation + + +def _compare_eq_sequence( + left: Sequence[Any], right: Sequence[Any], verbose: int = 0 +) -> List[str]: + comparing_bytes = isinstance(left, bytes) and isinstance(right, bytes) + explanation: List[str] = [] + len_left = len(left) + len_right = len(right) + for i in range(min(len_left, len_right)): + if left[i] != right[i]: + if comparing_bytes: + # when comparing bytes, we want to see their ascii representation + # instead of their numeric values (#5260) + # using a slice gives us the ascii representation: + # >>> s = b'foo' + # >>> s[0] + # 102 + # >>> s[0:1] + # b'f' + left_value = left[i : i + 1] + right_value = right[i : i + 1] + else: + left_value = left[i] + right_value = right[i] + + explanation += [f"At index {i} diff: {left_value!r} != {right_value!r}"] + break + + if comparing_bytes: + # when comparing bytes, it doesn't help to show the "sides contain one or more + # items" longer explanation, so skip it + + return explanation + + len_diff = len_left - len_right + if len_diff: + if len_diff > 0: + dir_with_more = "Left" + extra = saferepr(left[len_right]) + else: + len_diff = 0 - len_diff + dir_with_more = "Right" + extra = saferepr(right[len_left]) + + if len_diff == 1: + explanation += [f"{dir_with_more} contains one more item: {extra}"] + else: + explanation += [ + "%s contains %d more items, first extra item: %s" + % (dir_with_more, len_diff, extra) + ] + return explanation + + +def _compare_eq_set( + left: AbstractSet[Any], right: AbstractSet[Any], verbose: int = 0 +) -> List[str]: + explanation = [] + diff_left = left - right + diff_right = right - left + if diff_left: + explanation.append("Extra items in the left set:") + for item in diff_left: + explanation.append(saferepr(item)) + if diff_right: + explanation.append("Extra items in the right set:") + for item in diff_right: + explanation.append(saferepr(item)) + return explanation + + +def _compare_eq_dict( + left: Mapping[Any, Any], right: Mapping[Any, Any], verbose: int = 0 +) -> List[str]: + explanation: List[str] = [] + set_left = set(left) + set_right = set(right) + common = set_left.intersection(set_right) + same = {k: left[k] for k in common if left[k] == right[k]} + if same and verbose < 2: + explanation += ["Omitting %s identical items, use -vv to show" % len(same)] + elif same: + explanation += ["Common items:"] + explanation += pprint.pformat(same).splitlines() + diff = {k for k in common if left[k] != right[k]} + if diff: + explanation += ["Differing items:"] + for k in diff: + explanation += [saferepr({k: left[k]}) + " != " + saferepr({k: right[k]})] + extra_left = set_left - set_right + len_extra_left = len(extra_left) + if len_extra_left: + explanation.append( + "Left contains %d more item%s:" + % (len_extra_left, "" if len_extra_left == 1 else "s") + ) + explanation.extend( + pprint.pformat({k: left[k] for k in extra_left}).splitlines() + ) + extra_right = set_right - set_left + len_extra_right = len(extra_right) + if len_extra_right: + explanation.append( + "Right contains %d more item%s:" + % (len_extra_right, "" if len_extra_right == 1 else "s") + ) + explanation.extend( + pprint.pformat({k: right[k] for k in extra_right}).splitlines() + ) + return explanation + + +def _compare_eq_cls(left: Any, right: Any, verbose: int) -> List[str]: + if not has_default_eq(left): + return [] + if isdatacls(left): + import dataclasses + + all_fields = dataclasses.fields(left) + fields_to_check = [info.name for info in all_fields if info.compare] + elif isattrs(left): + all_fields = left.__attrs_attrs__ + fields_to_check = [field.name for field in all_fields if getattr(field, "eq")] + elif isnamedtuple(left): + fields_to_check = left._fields + else: + assert False + + indent = " " + same = [] + diff = [] + for field in fields_to_check: + if getattr(left, field) == getattr(right, field): + same.append(field) + else: + diff.append(field) + + explanation = [] + if same or diff: + explanation += [""] + if same and verbose < 2: + explanation.append("Omitting %s identical items, use -vv to show" % len(same)) + elif same: + explanation += ["Matching attributes:"] + explanation += pprint.pformat(same).splitlines() + if diff: + explanation += ["Differing attributes:"] + explanation += pprint.pformat(diff).splitlines() + for field in diff: + field_left = getattr(left, field) + field_right = getattr(right, field) + explanation += [ + "", + "Drill down into differing attribute %s:" % field, + ("%s%s: %r != %r") % (indent, field, field_left, field_right), + ] + explanation += [ + indent + line + for line in _compare_eq_any(field_left, field_right, verbose) + ] + return explanation + + +def _notin_text(term: str, text: str, verbose: int = 0) -> List[str]: + index = text.find(term) + head = text[:index] + tail = text[index + len(term) :] + correct_text = head + tail + diff = _diff_text(text, correct_text, verbose) + newdiff = ["%s is contained here:" % saferepr(term, maxsize=42)] + for line in diff: + if line.startswith("Skipping"): + continue + if line.startswith("- "): + continue + if line.startswith("+ "): + newdiff.append(" " + line[2:]) + else: + newdiff.append(line) + return newdiff + + +def running_on_ci() -> bool: + """Check if we're currently running on a CI system.""" + env_vars = ["CI", "BUILD_NUMBER"] + return any(var in os.environ for var in env_vars) diff --git a/venv/lib/python3.11/site-packages/_pytest/cacheprovider.py b/venv/lib/python3.11/site-packages/_pytest/cacheprovider.py new file mode 100644 index 0000000..855716d --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/cacheprovider.py @@ -0,0 +1,598 @@ +"""Implementation of the cache provider.""" +# This plugin was not named "cache" to avoid conflicts with the external +# pytest-cache version. +import dataclasses +import json +import os +from pathlib import Path +from typing import Dict +from typing import Generator +from typing import Iterable +from typing import List +from typing import Optional +from typing import Set +from typing import Union + +from .pathlib import resolve_from_str +from .pathlib import rm_rf +from .reports import CollectReport +from _pytest import nodes +from _pytest._io import TerminalWriter +from _pytest.compat import final +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.main import Session +from _pytest.nodes import File +from _pytest.python import Package +from _pytest.reports import TestReport + +README_CONTENT = """\ +# pytest cache directory # + +This directory contains data from the pytest's cache plugin, +which provides the `--lf` and `--ff` options, as well as the `cache` fixture. + +**Do not** commit this to version control. + +See [the docs](https://docs.pytest.org/en/stable/how-to/cache.html) for more information. +""" + +CACHEDIR_TAG_CONTENT = b"""\ +Signature: 8a477f597d28d172789f06886806bc55 +# This file is a cache directory tag created by pytest. +# For information about cache directory tags, see: +# https://bford.info/cachedir/spec.html +""" + + +@final +@dataclasses.dataclass +class Cache: + """Instance of the `cache` fixture.""" + + _cachedir: Path = dataclasses.field(repr=False) + _config: Config = dataclasses.field(repr=False) + + # Sub-directory under cache-dir for directories created by `mkdir()`. + _CACHE_PREFIX_DIRS = "d" + + # Sub-directory under cache-dir for values created by `set()`. + _CACHE_PREFIX_VALUES = "v" + + def __init__( + self, cachedir: Path, config: Config, *, _ispytest: bool = False + ) -> None: + check_ispytest(_ispytest) + self._cachedir = cachedir + self._config = config + + @classmethod + def for_config(cls, config: Config, *, _ispytest: bool = False) -> "Cache": + """Create the Cache instance for a Config. + + :meta private: + """ + check_ispytest(_ispytest) + cachedir = cls.cache_dir_from_config(config, _ispytest=True) + if config.getoption("cacheclear") and cachedir.is_dir(): + cls.clear_cache(cachedir, _ispytest=True) + return cls(cachedir, config, _ispytest=True) + + @classmethod + def clear_cache(cls, cachedir: Path, _ispytest: bool = False) -> None: + """Clear the sub-directories used to hold cached directories and values. + + :meta private: + """ + check_ispytest(_ispytest) + for prefix in (cls._CACHE_PREFIX_DIRS, cls._CACHE_PREFIX_VALUES): + d = cachedir / prefix + if d.is_dir(): + rm_rf(d) + + @staticmethod + def cache_dir_from_config(config: Config, *, _ispytest: bool = False) -> Path: + """Get the path to the cache directory for a Config. + + :meta private: + """ + check_ispytest(_ispytest) + return resolve_from_str(config.getini("cache_dir"), config.rootpath) + + def warn(self, fmt: str, *, _ispytest: bool = False, **args: object) -> None: + """Issue a cache warning. + + :meta private: + """ + check_ispytest(_ispytest) + import warnings + from _pytest.warning_types import PytestCacheWarning + + warnings.warn( + PytestCacheWarning(fmt.format(**args) if args else fmt), + self._config.hook, + stacklevel=3, + ) + + def mkdir(self, name: str) -> Path: + """Return a directory path object with the given name. + + If the directory does not yet exist, it will be created. You can use + it to manage files to e.g. store/retrieve database dumps across test + sessions. + + .. versionadded:: 7.0 + + :param name: + Must be a string not containing a ``/`` separator. + Make sure the name contains your plugin or application + identifiers to prevent clashes with other cache users. + """ + path = Path(name) + if len(path.parts) > 1: + raise ValueError("name is not allowed to contain path separators") + res = self._cachedir.joinpath(self._CACHE_PREFIX_DIRS, path) + res.mkdir(exist_ok=True, parents=True) + return res + + def _getvaluepath(self, key: str) -> Path: + return self._cachedir.joinpath(self._CACHE_PREFIX_VALUES, Path(key)) + + def get(self, key: str, default): + """Return the cached value for the given key. + + If no value was yet cached or the value cannot be read, the specified + default is returned. + + :param key: + Must be a ``/`` separated value. Usually the first + name is the name of your plugin or your application. + :param default: + The value to return in case of a cache-miss or invalid cache value. + """ + path = self._getvaluepath(key) + try: + with path.open("r", encoding="UTF-8") as f: + return json.load(f) + except (ValueError, OSError): + return default + + def set(self, key: str, value: object) -> None: + """Save value for the given key. + + :param key: + Must be a ``/`` separated value. Usually the first + name is the name of your plugin or your application. + :param value: + Must be of any combination of basic python types, + including nested types like lists of dictionaries. + """ + path = self._getvaluepath(key) + try: + if path.parent.is_dir(): + cache_dir_exists_already = True + else: + cache_dir_exists_already = self._cachedir.exists() + path.parent.mkdir(exist_ok=True, parents=True) + except OSError as exc: + self.warn( + f"could not create cache path {path}: {exc}", + _ispytest=True, + ) + return + if not cache_dir_exists_already: + self._ensure_supporting_files() + data = json.dumps(value, ensure_ascii=False, indent=2) + try: + f = path.open("w", encoding="UTF-8") + except OSError as exc: + self.warn( + f"cache could not write path {path}: {exc}", + _ispytest=True, + ) + else: + with f: + f.write(data) + + def _ensure_supporting_files(self) -> None: + """Create supporting files in the cache dir that are not really part of the cache.""" + readme_path = self._cachedir / "README.md" + readme_path.write_text(README_CONTENT, encoding="UTF-8") + + gitignore_path = self._cachedir.joinpath(".gitignore") + msg = "# Created by pytest automatically.\n*\n" + gitignore_path.write_text(msg, encoding="UTF-8") + + cachedir_tag_path = self._cachedir.joinpath("CACHEDIR.TAG") + cachedir_tag_path.write_bytes(CACHEDIR_TAG_CONTENT) + + +class LFPluginCollWrapper: + def __init__(self, lfplugin: "LFPlugin") -> None: + self.lfplugin = lfplugin + self._collected_at_least_one_failure = False + + @hookimpl(hookwrapper=True) + def pytest_make_collect_report(self, collector: nodes.Collector): + if isinstance(collector, (Session, Package)): + out = yield + res: CollectReport = out.get_result() + + # Sort any lf-paths to the beginning. + lf_paths = self.lfplugin._last_failed_paths + + # Use stable sort to priorize last failed. + def sort_key(node: Union[nodes.Item, nodes.Collector]) -> bool: + # Package.path is the __init__.py file, we need the directory. + if isinstance(node, Package): + path = node.path.parent + else: + path = node.path + return path in lf_paths + + res.result = sorted( + res.result, + key=sort_key, + reverse=True, + ) + return + + elif isinstance(collector, File): + if collector.path in self.lfplugin._last_failed_paths: + out = yield + res = out.get_result() + result = res.result + lastfailed = self.lfplugin.lastfailed + + # Only filter with known failures. + if not self._collected_at_least_one_failure: + if not any(x.nodeid in lastfailed for x in result): + return + self.lfplugin.config.pluginmanager.register( + LFPluginCollSkipfiles(self.lfplugin), "lfplugin-collskip" + ) + self._collected_at_least_one_failure = True + + session = collector.session + result[:] = [ + x + for x in result + if x.nodeid in lastfailed + # Include any passed arguments (not trivial to filter). + or session.isinitpath(x.path) + # Keep all sub-collectors. + or isinstance(x, nodes.Collector) + ] + return + yield + + +class LFPluginCollSkipfiles: + def __init__(self, lfplugin: "LFPlugin") -> None: + self.lfplugin = lfplugin + + @hookimpl + def pytest_make_collect_report( + self, collector: nodes.Collector + ) -> Optional[CollectReport]: + # Packages are Files, but we only want to skip test-bearing Files, + # so don't filter Packages. + if isinstance(collector, File) and not isinstance(collector, Package): + if collector.path not in self.lfplugin._last_failed_paths: + self.lfplugin._skipped_files += 1 + + return CollectReport( + collector.nodeid, "passed", longrepr=None, result=[] + ) + return None + + +class LFPlugin: + """Plugin which implements the --lf (run last-failing) option.""" + + def __init__(self, config: Config) -> None: + self.config = config + active_keys = "lf", "failedfirst" + self.active = any(config.getoption(key) for key in active_keys) + assert config.cache + self.lastfailed: Dict[str, bool] = config.cache.get("cache/lastfailed", {}) + self._previously_failed_count: Optional[int] = None + self._report_status: Optional[str] = None + self._skipped_files = 0 # count skipped files during collection due to --lf + + if config.getoption("lf"): + self._last_failed_paths = self.get_last_failed_paths() + config.pluginmanager.register( + LFPluginCollWrapper(self), "lfplugin-collwrapper" + ) + + def get_last_failed_paths(self) -> Set[Path]: + """Return a set with all Paths of the previously failed nodeids and + their parents.""" + rootpath = self.config.rootpath + result = set() + for nodeid in self.lastfailed: + path = rootpath / nodeid.split("::")[0] + result.add(path) + result.update(path.parents) + return {x for x in result if x.exists()} + + def pytest_report_collectionfinish(self) -> Optional[str]: + if self.active and self.config.getoption("verbose") >= 0: + return "run-last-failure: %s" % self._report_status + return None + + def pytest_runtest_logreport(self, report: TestReport) -> None: + if (report.when == "call" and report.passed) or report.skipped: + self.lastfailed.pop(report.nodeid, None) + elif report.failed: + self.lastfailed[report.nodeid] = True + + def pytest_collectreport(self, report: CollectReport) -> None: + passed = report.outcome in ("passed", "skipped") + if passed: + if report.nodeid in self.lastfailed: + self.lastfailed.pop(report.nodeid) + self.lastfailed.update((item.nodeid, True) for item in report.result) + else: + self.lastfailed[report.nodeid] = True + + @hookimpl(hookwrapper=True, tryfirst=True) + def pytest_collection_modifyitems( + self, config: Config, items: List[nodes.Item] + ) -> Generator[None, None, None]: + yield + + if not self.active: + return + + if self.lastfailed: + previously_failed = [] + previously_passed = [] + for item in items: + if item.nodeid in self.lastfailed: + previously_failed.append(item) + else: + previously_passed.append(item) + self._previously_failed_count = len(previously_failed) + + if not previously_failed: + # Running a subset of all tests with recorded failures + # only outside of it. + self._report_status = "%d known failures not in selected tests" % ( + len(self.lastfailed), + ) + else: + if self.config.getoption("lf"): + items[:] = previously_failed + config.hook.pytest_deselected(items=previously_passed) + else: # --failedfirst + items[:] = previously_failed + previously_passed + + noun = "failure" if self._previously_failed_count == 1 else "failures" + suffix = " first" if self.config.getoption("failedfirst") else "" + self._report_status = "rerun previous {count} {noun}{suffix}".format( + count=self._previously_failed_count, suffix=suffix, noun=noun + ) + + if self._skipped_files > 0: + files_noun = "file" if self._skipped_files == 1 else "files" + self._report_status += " (skipped {files} {files_noun})".format( + files=self._skipped_files, files_noun=files_noun + ) + else: + self._report_status = "no previously failed tests, " + if self.config.getoption("last_failed_no_failures") == "none": + self._report_status += "deselecting all items." + config.hook.pytest_deselected(items=items[:]) + items[:] = [] + else: + self._report_status += "not deselecting items." + + def pytest_sessionfinish(self, session: Session) -> None: + config = self.config + if config.getoption("cacheshow") or hasattr(config, "workerinput"): + return + + assert config.cache is not None + saved_lastfailed = config.cache.get("cache/lastfailed", {}) + if saved_lastfailed != self.lastfailed: + config.cache.set("cache/lastfailed", self.lastfailed) + + +class NFPlugin: + """Plugin which implements the --nf (run new-first) option.""" + + def __init__(self, config: Config) -> None: + self.config = config + self.active = config.option.newfirst + assert config.cache is not None + self.cached_nodeids = set(config.cache.get("cache/nodeids", [])) + + @hookimpl(hookwrapper=True, tryfirst=True) + def pytest_collection_modifyitems( + self, items: List[nodes.Item] + ) -> Generator[None, None, None]: + yield + + if self.active: + new_items: Dict[str, nodes.Item] = {} + other_items: Dict[str, nodes.Item] = {} + for item in items: + if item.nodeid not in self.cached_nodeids: + new_items[item.nodeid] = item + else: + other_items[item.nodeid] = item + + items[:] = self._get_increasing_order( + new_items.values() + ) + self._get_increasing_order(other_items.values()) + self.cached_nodeids.update(new_items) + else: + self.cached_nodeids.update(item.nodeid for item in items) + + def _get_increasing_order(self, items: Iterable[nodes.Item]) -> List[nodes.Item]: + return sorted(items, key=lambda item: item.path.stat().st_mtime, reverse=True) # type: ignore[no-any-return] + + def pytest_sessionfinish(self) -> None: + config = self.config + if config.getoption("cacheshow") or hasattr(config, "workerinput"): + return + + if config.getoption("collectonly"): + return + + assert config.cache is not None + config.cache.set("cache/nodeids", sorted(self.cached_nodeids)) + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group.addoption( + "--lf", + "--last-failed", + action="store_true", + dest="lf", + help="Rerun only the tests that failed " + "at the last run (or all if none failed)", + ) + group.addoption( + "--ff", + "--failed-first", + action="store_true", + dest="failedfirst", + help="Run all tests, but run the last failures first. " + "This may re-order tests and thus lead to " + "repeated fixture setup/teardown.", + ) + group.addoption( + "--nf", + "--new-first", + action="store_true", + dest="newfirst", + help="Run tests from new files first, then the rest of the tests " + "sorted by file mtime", + ) + group.addoption( + "--cache-show", + action="append", + nargs="?", + dest="cacheshow", + help=( + "Show cache contents, don't perform collection or tests. " + "Optional argument: glob (default: '*')." + ), + ) + group.addoption( + "--cache-clear", + action="store_true", + dest="cacheclear", + help="Remove all cache contents at start of test run", + ) + cache_dir_default = ".pytest_cache" + if "TOX_ENV_DIR" in os.environ: + cache_dir_default = os.path.join(os.environ["TOX_ENV_DIR"], cache_dir_default) + parser.addini("cache_dir", default=cache_dir_default, help="Cache directory path") + group.addoption( + "--lfnf", + "--last-failed-no-failures", + action="store", + dest="last_failed_no_failures", + choices=("all", "none"), + default="all", + help="Which tests to run with no previously (known) failures", + ) + + +def pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]: + if config.option.cacheshow and not config.option.help: + from _pytest.main import wrap_session + + return wrap_session(config, cacheshow) + return None + + +@hookimpl(tryfirst=True) +def pytest_configure(config: Config) -> None: + config.cache = Cache.for_config(config, _ispytest=True) + config.pluginmanager.register(LFPlugin(config), "lfplugin") + config.pluginmanager.register(NFPlugin(config), "nfplugin") + + +@fixture +def cache(request: FixtureRequest) -> Cache: + """Return a cache object that can persist state between testing sessions. + + cache.get(key, default) + cache.set(key, value) + + Keys must be ``/`` separated strings, where the first part is usually the + name of your plugin or application to avoid clashes with other cache users. + + Values can be any object handled by the json stdlib module. + """ + assert request.config.cache is not None + return request.config.cache + + +def pytest_report_header(config: Config) -> Optional[str]: + """Display cachedir with --cache-show and if non-default.""" + if config.option.verbose > 0 or config.getini("cache_dir") != ".pytest_cache": + assert config.cache is not None + cachedir = config.cache._cachedir + # TODO: evaluate generating upward relative paths + # starting with .., ../.. if sensible + + try: + displaypath = cachedir.relative_to(config.rootpath) + except ValueError: + displaypath = cachedir + return f"cachedir: {displaypath}" + return None + + +def cacheshow(config: Config, session: Session) -> int: + from pprint import pformat + + assert config.cache is not None + + tw = TerminalWriter() + tw.line("cachedir: " + str(config.cache._cachedir)) + if not config.cache._cachedir.is_dir(): + tw.line("cache is empty") + return 0 + + glob = config.option.cacheshow[0] + if glob is None: + glob = "*" + + dummy = object() + basedir = config.cache._cachedir + vdir = basedir / Cache._CACHE_PREFIX_VALUES + tw.sep("-", "cache values for %r" % glob) + for valpath in sorted(x for x in vdir.rglob(glob) if x.is_file()): + key = str(valpath.relative_to(vdir)) + val = config.cache.get(key, dummy) + if val is dummy: + tw.line("%s contains unreadable content, will be ignored" % key) + else: + tw.line("%s contains:" % key) + for line in pformat(val).splitlines(): + tw.line(" " + line) + + ddir = basedir / Cache._CACHE_PREFIX_DIRS + if ddir.is_dir(): + contents = sorted(ddir.rglob(glob)) + tw.sep("-", "cache directories for %r" % glob) + for p in contents: + # if p.is_dir(): + # print("%s/" % p.relative_to(basedir)) + if p.is_file(): + key = str(p.relative_to(basedir)) + tw.line(f"{key} is a file of length {p.stat().st_size:d}") + return 0 diff --git a/venv/lib/python3.11/site-packages/_pytest/capture.py b/venv/lib/python3.11/site-packages/_pytest/capture.py new file mode 100644 index 0000000..a8ca086 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/capture.py @@ -0,0 +1,1082 @@ +"""Per-test stdout/stderr capturing mechanism.""" +import abc +import collections +import contextlib +import io +import os +import sys +from io import UnsupportedOperation +from tempfile import TemporaryFile +from types import TracebackType +from typing import Any +from typing import AnyStr +from typing import BinaryIO +from typing import Generator +from typing import Generic +from typing import Iterable +from typing import Iterator +from typing import List +from typing import NamedTuple +from typing import Optional +from typing import TextIO +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import Union + +from _pytest.compat import final +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import SubRequest +from _pytest.nodes import Collector +from _pytest.nodes import File +from _pytest.nodes import Item + +if TYPE_CHECKING: + from typing_extensions import Final + from typing_extensions import Literal + + _CaptureMethod = Literal["fd", "sys", "no", "tee-sys"] + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group._addoption( + "--capture", + action="store", + default="fd", + metavar="method", + choices=["fd", "sys", "no", "tee-sys"], + help="Per-test capturing method: one of fd|sys|no|tee-sys", + ) + group._addoption( + "-s", + action="store_const", + const="no", + dest="capture", + help="Shortcut for --capture=no", + ) + + +def _colorama_workaround() -> None: + """Ensure colorama is imported so that it attaches to the correct stdio + handles on Windows. + + colorama uses the terminal on import time. So if something does the + first import of colorama while I/O capture is active, colorama will + fail in various ways. + """ + if sys.platform.startswith("win32"): + try: + import colorama # noqa: F401 + except ImportError: + pass + + +def _windowsconsoleio_workaround(stream: TextIO) -> None: + """Workaround for Windows Unicode console handling. + + Python 3.6 implemented Unicode console handling for Windows. This works + by reading/writing to the raw console handle using + ``{Read,Write}ConsoleW``. + + The problem is that we are going to ``dup2`` over the stdio file + descriptors when doing ``FDCapture`` and this will ``CloseHandle`` the + handles used by Python to write to the console. Though there is still some + weirdness and the console handle seems to only be closed randomly and not + on the first call to ``CloseHandle``, or maybe it gets reopened with the + same handle value when we suspend capturing. + + The workaround in this case will reopen stdio with a different fd which + also means a different handle by replicating the logic in + "Py_lifecycle.c:initstdio/create_stdio". + + :param stream: + In practice ``sys.stdout`` or ``sys.stderr``, but given + here as parameter for unittesting purposes. + + See https://github.com/pytest-dev/py/issues/103. + """ + if not sys.platform.startswith("win32") or hasattr(sys, "pypy_version_info"): + return + + # Bail out if ``stream`` doesn't seem like a proper ``io`` stream (#2666). + if not hasattr(stream, "buffer"): # type: ignore[unreachable] + return + + buffered = hasattr(stream.buffer, "raw") + raw_stdout = stream.buffer.raw if buffered else stream.buffer # type: ignore[attr-defined] + + if not isinstance(raw_stdout, io._WindowsConsoleIO): # type: ignore[attr-defined] + return + + def _reopen_stdio(f, mode): + if not buffered and mode[0] == "w": + buffering = 0 + else: + buffering = -1 + + return io.TextIOWrapper( + open(os.dup(f.fileno()), mode, buffering), + f.encoding, + f.errors, + f.newlines, + f.line_buffering, + ) + + sys.stdin = _reopen_stdio(sys.stdin, "rb") + sys.stdout = _reopen_stdio(sys.stdout, "wb") + sys.stderr = _reopen_stdio(sys.stderr, "wb") + + +@hookimpl(hookwrapper=True) +def pytest_load_initial_conftests(early_config: Config): + ns = early_config.known_args_namespace + if ns.capture == "fd": + _windowsconsoleio_workaround(sys.stdout) + _colorama_workaround() + pluginmanager = early_config.pluginmanager + capman = CaptureManager(ns.capture) + pluginmanager.register(capman, "capturemanager") + + # Make sure that capturemanager is properly reset at final shutdown. + early_config.add_cleanup(capman.stop_global_capturing) + + # Finally trigger conftest loading but while capturing (issue #93). + capman.start_global_capturing() + outcome = yield + capman.suspend_global_capture() + if outcome.excinfo is not None: + out, err = capman.read_global_capture() + sys.stdout.write(out) + sys.stderr.write(err) + + +# IO Helpers. + + +class EncodedFile(io.TextIOWrapper): + __slots__ = () + + @property + def name(self) -> str: + # Ensure that file.name is a string. Workaround for a Python bug + # fixed in >=3.7.4: https://bugs.python.org/issue36015 + return repr(self.buffer) + + @property + def mode(self) -> str: + # TextIOWrapper doesn't expose a mode, but at least some of our + # tests check it. + return self.buffer.mode.replace("b", "") + + +class CaptureIO(io.TextIOWrapper): + def __init__(self) -> None: + super().__init__(io.BytesIO(), encoding="UTF-8", newline="", write_through=True) + + def getvalue(self) -> str: + assert isinstance(self.buffer, io.BytesIO) + return self.buffer.getvalue().decode("UTF-8") + + +class TeeCaptureIO(CaptureIO): + def __init__(self, other: TextIO) -> None: + self._other = other + super().__init__() + + def write(self, s: str) -> int: + super().write(s) + return self._other.write(s) + + +class DontReadFromInput(TextIO): + @property + def encoding(self) -> str: + return sys.__stdin__.encoding + + def read(self, size: int = -1) -> str: + raise OSError( + "pytest: reading from stdin while output is captured! Consider using `-s`." + ) + + readline = read + + def __next__(self) -> str: + return self.readline() + + def readlines(self, hint: Optional[int] = -1) -> List[str]: + raise OSError( + "pytest: reading from stdin while output is captured! Consider using `-s`." + ) + + def __iter__(self) -> Iterator[str]: + return self + + def fileno(self) -> int: + raise UnsupportedOperation("redirected stdin is pseudofile, has no fileno()") + + def flush(self) -> None: + raise UnsupportedOperation("redirected stdin is pseudofile, has no flush()") + + def isatty(self) -> bool: + return False + + def close(self) -> None: + pass + + def readable(self) -> bool: + return False + + def seek(self, offset: int, whence: int = 0) -> int: + raise UnsupportedOperation("redirected stdin is pseudofile, has no seek(int)") + + def seekable(self) -> bool: + return False + + def tell(self) -> int: + raise UnsupportedOperation("redirected stdin is pseudofile, has no tell()") + + def truncate(self, size: Optional[int] = None) -> int: + raise UnsupportedOperation("cannot truncate stdin") + + def write(self, data: str) -> int: + raise UnsupportedOperation("cannot write to stdin") + + def writelines(self, lines: Iterable[str]) -> None: + raise UnsupportedOperation("Cannot write to stdin") + + def writable(self) -> bool: + return False + + def __enter__(self) -> "DontReadFromInput": + return self + + def __exit__( + self, + type: Optional[Type[BaseException]], + value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> None: + pass + + @property + def buffer(self) -> BinaryIO: + # The str/bytes doesn't actually matter in this type, so OK to fake. + return self # type: ignore[return-value] + + +# Capture classes. + + +class CaptureBase(abc.ABC, Generic[AnyStr]): + EMPTY_BUFFER: AnyStr + + @abc.abstractmethod + def __init__(self, fd: int) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def start(self) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def done(self) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def suspend(self) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def resume(self) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def writeorg(self, data: AnyStr) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def snap(self) -> AnyStr: + raise NotImplementedError() + + +patchsysdict = {0: "stdin", 1: "stdout", 2: "stderr"} + + +class NoCapture(CaptureBase[str]): + EMPTY_BUFFER = "" + + def __init__(self, fd: int) -> None: + pass + + def start(self) -> None: + pass + + def done(self) -> None: + pass + + def suspend(self) -> None: + pass + + def resume(self) -> None: + pass + + def snap(self) -> str: + return "" + + def writeorg(self, data: str) -> None: + pass + + +class SysCaptureBase(CaptureBase[AnyStr]): + def __init__( + self, fd: int, tmpfile: Optional[TextIO] = None, *, tee: bool = False + ) -> None: + name = patchsysdict[fd] + self._old: TextIO = getattr(sys, name) + self.name = name + if tmpfile is None: + if name == "stdin": + tmpfile = DontReadFromInput() + else: + tmpfile = CaptureIO() if not tee else TeeCaptureIO(self._old) + self.tmpfile = tmpfile + self._state = "initialized" + + def repr(self, class_name: str) -> str: + return "<{} {} _old={} _state={!r} tmpfile={!r}>".format( + class_name, + self.name, + hasattr(self, "_old") and repr(self._old) or "", + self._state, + self.tmpfile, + ) + + def __repr__(self) -> str: + return "<{} {} _old={} _state={!r} tmpfile={!r}>".format( + self.__class__.__name__, + self.name, + hasattr(self, "_old") and repr(self._old) or "", + self._state, + self.tmpfile, + ) + + def _assert_state(self, op: str, states: Tuple[str, ...]) -> None: + assert ( + self._state in states + ), "cannot {} in state {!r}: expected one of {}".format( + op, self._state, ", ".join(states) + ) + + def start(self) -> None: + self._assert_state("start", ("initialized",)) + setattr(sys, self.name, self.tmpfile) + self._state = "started" + + def done(self) -> None: + self._assert_state("done", ("initialized", "started", "suspended", "done")) + if self._state == "done": + return + setattr(sys, self.name, self._old) + del self._old + self.tmpfile.close() + self._state = "done" + + def suspend(self) -> None: + self._assert_state("suspend", ("started", "suspended")) + setattr(sys, self.name, self._old) + self._state = "suspended" + + def resume(self) -> None: + self._assert_state("resume", ("started", "suspended")) + if self._state == "started": + return + setattr(sys, self.name, self.tmpfile) + self._state = "started" + + +class SysCaptureBinary(SysCaptureBase[bytes]): + EMPTY_BUFFER = b"" + + def snap(self) -> bytes: + self._assert_state("snap", ("started", "suspended")) + self.tmpfile.seek(0) + res = self.tmpfile.buffer.read() + self.tmpfile.seek(0) + self.tmpfile.truncate() + return res + + def writeorg(self, data: bytes) -> None: + self._assert_state("writeorg", ("started", "suspended")) + self._old.flush() + self._old.buffer.write(data) + self._old.buffer.flush() + + +class SysCapture(SysCaptureBase[str]): + EMPTY_BUFFER = "" + + def snap(self) -> str: + self._assert_state("snap", ("started", "suspended")) + assert isinstance(self.tmpfile, CaptureIO) + res = self.tmpfile.getvalue() + self.tmpfile.seek(0) + self.tmpfile.truncate() + return res + + def writeorg(self, data: str) -> None: + self._assert_state("writeorg", ("started", "suspended")) + self._old.write(data) + self._old.flush() + + +class FDCaptureBase(CaptureBase[AnyStr]): + def __init__(self, targetfd: int) -> None: + self.targetfd = targetfd + + try: + os.fstat(targetfd) + except OSError: + # FD capturing is conceptually simple -- create a temporary file, + # redirect the FD to it, redirect back when done. But when the + # target FD is invalid it throws a wrench into this lovely scheme. + # + # Tests themselves shouldn't care if the FD is valid, FD capturing + # should work regardless of external circumstances. So falling back + # to just sys capturing is not a good option. + # + # Further complications are the need to support suspend() and the + # possibility of FD reuse (e.g. the tmpfile getting the very same + # target FD). The following approach is robust, I believe. + self.targetfd_invalid: Optional[int] = os.open(os.devnull, os.O_RDWR) + os.dup2(self.targetfd_invalid, targetfd) + else: + self.targetfd_invalid = None + self.targetfd_save = os.dup(targetfd) + + if targetfd == 0: + self.tmpfile = open(os.devnull, encoding="utf-8") + self.syscapture: CaptureBase[str] = SysCapture(targetfd) + else: + self.tmpfile = EncodedFile( + TemporaryFile(buffering=0), + encoding="utf-8", + errors="replace", + newline="", + write_through=True, + ) + if targetfd in patchsysdict: + self.syscapture = SysCapture(targetfd, self.tmpfile) + else: + self.syscapture = NoCapture(targetfd) + + self._state = "initialized" + + def __repr__(self) -> str: + return "<{} {} oldfd={} _state={!r} tmpfile={!r}>".format( + self.__class__.__name__, + self.targetfd, + self.targetfd_save, + self._state, + self.tmpfile, + ) + + def _assert_state(self, op: str, states: Tuple[str, ...]) -> None: + assert ( + self._state in states + ), "cannot {} in state {!r}: expected one of {}".format( + op, self._state, ", ".join(states) + ) + + def start(self) -> None: + """Start capturing on targetfd using memorized tmpfile.""" + self._assert_state("start", ("initialized",)) + os.dup2(self.tmpfile.fileno(), self.targetfd) + self.syscapture.start() + self._state = "started" + + def done(self) -> None: + """Stop capturing, restore streams, return original capture file, + seeked to position zero.""" + self._assert_state("done", ("initialized", "started", "suspended", "done")) + if self._state == "done": + return + os.dup2(self.targetfd_save, self.targetfd) + os.close(self.targetfd_save) + if self.targetfd_invalid is not None: + if self.targetfd_invalid != self.targetfd: + os.close(self.targetfd) + os.close(self.targetfd_invalid) + self.syscapture.done() + self.tmpfile.close() + self._state = "done" + + def suspend(self) -> None: + self._assert_state("suspend", ("started", "suspended")) + if self._state == "suspended": + return + self.syscapture.suspend() + os.dup2(self.targetfd_save, self.targetfd) + self._state = "suspended" + + def resume(self) -> None: + self._assert_state("resume", ("started", "suspended")) + if self._state == "started": + return + self.syscapture.resume() + os.dup2(self.tmpfile.fileno(), self.targetfd) + self._state = "started" + + +class FDCaptureBinary(FDCaptureBase[bytes]): + """Capture IO to/from a given OS-level file descriptor. + + snap() produces `bytes`. + """ + + EMPTY_BUFFER = b"" + + def snap(self) -> bytes: + self._assert_state("snap", ("started", "suspended")) + self.tmpfile.seek(0) + res = self.tmpfile.buffer.read() + self.tmpfile.seek(0) + self.tmpfile.truncate() + return res + + def writeorg(self, data: bytes) -> None: + """Write to original file descriptor.""" + self._assert_state("writeorg", ("started", "suspended")) + os.write(self.targetfd_save, data) + + +class FDCapture(FDCaptureBase[str]): + """Capture IO to/from a given OS-level file descriptor. + + snap() produces text. + """ + + EMPTY_BUFFER = "" + + def snap(self) -> str: + self._assert_state("snap", ("started", "suspended")) + self.tmpfile.seek(0) + res = self.tmpfile.read() + self.tmpfile.seek(0) + self.tmpfile.truncate() + return res + + def writeorg(self, data: str) -> None: + """Write to original file descriptor.""" + self._assert_state("writeorg", ("started", "suspended")) + # XXX use encoding of original stream + os.write(self.targetfd_save, data.encode("utf-8")) + + +# MultiCapture + + +# Generic NamedTuple only supported since Python 3.11. +if sys.version_info >= (3, 11) or TYPE_CHECKING: + + @final + class CaptureResult(NamedTuple, Generic[AnyStr]): + """The result of :method:`CaptureFixture.readouterr`.""" + + out: AnyStr + err: AnyStr + +else: + + class CaptureResult( + collections.namedtuple("CaptureResult", ["out", "err"]), Generic[AnyStr] + ): + """The result of :method:`CaptureFixture.readouterr`.""" + + __slots__ = () + + +class MultiCapture(Generic[AnyStr]): + _state = None + _in_suspended = False + + def __init__( + self, + in_: Optional[CaptureBase[AnyStr]], + out: Optional[CaptureBase[AnyStr]], + err: Optional[CaptureBase[AnyStr]], + ) -> None: + self.in_: Optional[CaptureBase[AnyStr]] = in_ + self.out: Optional[CaptureBase[AnyStr]] = out + self.err: Optional[CaptureBase[AnyStr]] = err + + def __repr__(self) -> str: + return "".format( + self.out, + self.err, + self.in_, + self._state, + self._in_suspended, + ) + + def start_capturing(self) -> None: + self._state = "started" + if self.in_: + self.in_.start() + if self.out: + self.out.start() + if self.err: + self.err.start() + + def pop_outerr_to_orig(self) -> Tuple[AnyStr, AnyStr]: + """Pop current snapshot out/err capture and flush to orig streams.""" + out, err = self.readouterr() + if out: + assert self.out is not None + self.out.writeorg(out) + if err: + assert self.err is not None + self.err.writeorg(err) + return out, err + + def suspend_capturing(self, in_: bool = False) -> None: + self._state = "suspended" + if self.out: + self.out.suspend() + if self.err: + self.err.suspend() + if in_ and self.in_: + self.in_.suspend() + self._in_suspended = True + + def resume_capturing(self) -> None: + self._state = "started" + if self.out: + self.out.resume() + if self.err: + self.err.resume() + if self._in_suspended: + assert self.in_ is not None + self.in_.resume() + self._in_suspended = False + + def stop_capturing(self) -> None: + """Stop capturing and reset capturing streams.""" + if self._state == "stopped": + raise ValueError("was already stopped") + self._state = "stopped" + if self.out: + self.out.done() + if self.err: + self.err.done() + if self.in_: + self.in_.done() + + def is_started(self) -> bool: + """Whether actively capturing -- not suspended or stopped.""" + return self._state == "started" + + def readouterr(self) -> CaptureResult[AnyStr]: + out = self.out.snap() if self.out else "" + err = self.err.snap() if self.err else "" + # TODO: This type error is real, need to fix. + return CaptureResult(out, err) # type: ignore[arg-type] + + +def _get_multicapture(method: "_CaptureMethod") -> MultiCapture[str]: + if method == "fd": + return MultiCapture(in_=FDCapture(0), out=FDCapture(1), err=FDCapture(2)) + elif method == "sys": + return MultiCapture(in_=SysCapture(0), out=SysCapture(1), err=SysCapture(2)) + elif method == "no": + return MultiCapture(in_=None, out=None, err=None) + elif method == "tee-sys": + return MultiCapture( + in_=None, out=SysCapture(1, tee=True), err=SysCapture(2, tee=True) + ) + raise ValueError(f"unknown capturing method: {method!r}") + + +# CaptureManager and CaptureFixture + + +class CaptureManager: + """The capture plugin. + + Manages that the appropriate capture method is enabled/disabled during + collection and each test phase (setup, call, teardown). After each of + those points, the captured output is obtained and attached to the + collection/runtest report. + + There are two levels of capture: + + * global: enabled by default and can be suppressed by the ``-s`` + option. This is always enabled/disabled during collection and each test + phase. + + * fixture: when a test function or one of its fixture depend on the + ``capsys`` or ``capfd`` fixtures. In this case special handling is + needed to ensure the fixtures take precedence over the global capture. + """ + + def __init__(self, method: "_CaptureMethod") -> None: + self._method: Final = method + self._global_capturing: Optional[MultiCapture[str]] = None + self._capture_fixture: Optional[CaptureFixture[Any]] = None + + def __repr__(self) -> str: + return "".format( + self._method, self._global_capturing, self._capture_fixture + ) + + def is_capturing(self) -> Union[str, bool]: + if self.is_globally_capturing(): + return "global" + if self._capture_fixture: + return "fixture %s" % self._capture_fixture.request.fixturename + return False + + # Global capturing control + + def is_globally_capturing(self) -> bool: + return self._method != "no" + + def start_global_capturing(self) -> None: + assert self._global_capturing is None + self._global_capturing = _get_multicapture(self._method) + self._global_capturing.start_capturing() + + def stop_global_capturing(self) -> None: + if self._global_capturing is not None: + self._global_capturing.pop_outerr_to_orig() + self._global_capturing.stop_capturing() + self._global_capturing = None + + def resume_global_capture(self) -> None: + # During teardown of the python process, and on rare occasions, capture + # attributes can be `None` while trying to resume global capture. + if self._global_capturing is not None: + self._global_capturing.resume_capturing() + + def suspend_global_capture(self, in_: bool = False) -> None: + if self._global_capturing is not None: + self._global_capturing.suspend_capturing(in_=in_) + + def suspend(self, in_: bool = False) -> None: + # Need to undo local capsys-et-al if it exists before disabling global capture. + self.suspend_fixture() + self.suspend_global_capture(in_) + + def resume(self) -> None: + self.resume_global_capture() + self.resume_fixture() + + def read_global_capture(self) -> CaptureResult[str]: + assert self._global_capturing is not None + return self._global_capturing.readouterr() + + # Fixture Control + + def set_fixture(self, capture_fixture: "CaptureFixture[Any]") -> None: + if self._capture_fixture: + current_fixture = self._capture_fixture.request.fixturename + requested_fixture = capture_fixture.request.fixturename + capture_fixture.request.raiseerror( + "cannot use {} and {} at the same time".format( + requested_fixture, current_fixture + ) + ) + self._capture_fixture = capture_fixture + + def unset_fixture(self) -> None: + self._capture_fixture = None + + def activate_fixture(self) -> None: + """If the current item is using ``capsys`` or ``capfd``, activate + them so they take precedence over the global capture.""" + if self._capture_fixture: + self._capture_fixture._start() + + def deactivate_fixture(self) -> None: + """Deactivate the ``capsys`` or ``capfd`` fixture of this item, if any.""" + if self._capture_fixture: + self._capture_fixture.close() + + def suspend_fixture(self) -> None: + if self._capture_fixture: + self._capture_fixture._suspend() + + def resume_fixture(self) -> None: + if self._capture_fixture: + self._capture_fixture._resume() + + # Helper context managers + + @contextlib.contextmanager + def global_and_fixture_disabled(self) -> Generator[None, None, None]: + """Context manager to temporarily disable global and current fixture capturing.""" + do_fixture = self._capture_fixture and self._capture_fixture._is_started() + if do_fixture: + self.suspend_fixture() + do_global = self._global_capturing and self._global_capturing.is_started() + if do_global: + self.suspend_global_capture() + try: + yield + finally: + if do_global: + self.resume_global_capture() + if do_fixture: + self.resume_fixture() + + @contextlib.contextmanager + def item_capture(self, when: str, item: Item) -> Generator[None, None, None]: + self.resume_global_capture() + self.activate_fixture() + try: + yield + finally: + self.deactivate_fixture() + self.suspend_global_capture(in_=False) + + out, err = self.read_global_capture() + item.add_report_section(when, "stdout", out) + item.add_report_section(when, "stderr", err) + + # Hooks + + @hookimpl(hookwrapper=True) + def pytest_make_collect_report(self, collector: Collector): + if isinstance(collector, File): + self.resume_global_capture() + outcome = yield + self.suspend_global_capture() + out, err = self.read_global_capture() + rep = outcome.get_result() + if out: + rep.sections.append(("Captured stdout", out)) + if err: + rep.sections.append(("Captured stderr", err)) + else: + yield + + @hookimpl(hookwrapper=True) + def pytest_runtest_setup(self, item: Item) -> Generator[None, None, None]: + with self.item_capture("setup", item): + yield + + @hookimpl(hookwrapper=True) + def pytest_runtest_call(self, item: Item) -> Generator[None, None, None]: + with self.item_capture("call", item): + yield + + @hookimpl(hookwrapper=True) + def pytest_runtest_teardown(self, item: Item) -> Generator[None, None, None]: + with self.item_capture("teardown", item): + yield + + @hookimpl(tryfirst=True) + def pytest_keyboard_interrupt(self) -> None: + self.stop_global_capturing() + + @hookimpl(tryfirst=True) + def pytest_internalerror(self) -> None: + self.stop_global_capturing() + + +class CaptureFixture(Generic[AnyStr]): + """Object returned by the :fixture:`capsys`, :fixture:`capsysbinary`, + :fixture:`capfd` and :fixture:`capfdbinary` fixtures.""" + + def __init__( + self, + captureclass: Type[CaptureBase[AnyStr]], + request: SubRequest, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self.captureclass: Type[CaptureBase[AnyStr]] = captureclass + self.request = request + self._capture: Optional[MultiCapture[AnyStr]] = None + self._captured_out: AnyStr = self.captureclass.EMPTY_BUFFER + self._captured_err: AnyStr = self.captureclass.EMPTY_BUFFER + + def _start(self) -> None: + if self._capture is None: + self._capture = MultiCapture( + in_=None, + out=self.captureclass(1), + err=self.captureclass(2), + ) + self._capture.start_capturing() + + def close(self) -> None: + if self._capture is not None: + out, err = self._capture.pop_outerr_to_orig() + self._captured_out += out + self._captured_err += err + self._capture.stop_capturing() + self._capture = None + + def readouterr(self) -> CaptureResult[AnyStr]: + """Read and return the captured output so far, resetting the internal + buffer. + + :returns: + The captured content as a namedtuple with ``out`` and ``err`` + string attributes. + """ + captured_out, captured_err = self._captured_out, self._captured_err + if self._capture is not None: + out, err = self._capture.readouterr() + captured_out += out + captured_err += err + self._captured_out = self.captureclass.EMPTY_BUFFER + self._captured_err = self.captureclass.EMPTY_BUFFER + return CaptureResult(captured_out, captured_err) + + def _suspend(self) -> None: + """Suspend this fixture's own capturing temporarily.""" + if self._capture is not None: + self._capture.suspend_capturing() + + def _resume(self) -> None: + """Resume this fixture's own capturing temporarily.""" + if self._capture is not None: + self._capture.resume_capturing() + + def _is_started(self) -> bool: + """Whether actively capturing -- not disabled or closed.""" + if self._capture is not None: + return self._capture.is_started() + return False + + @contextlib.contextmanager + def disabled(self) -> Generator[None, None, None]: + """Temporarily disable capturing while inside the ``with`` block.""" + capmanager: CaptureManager = self.request.config.pluginmanager.getplugin( + "capturemanager" + ) + with capmanager.global_and_fixture_disabled(): + yield + + +# The fixtures. + + +@fixture +def capsys(request: SubRequest) -> Generator[CaptureFixture[str], None, None]: + r"""Enable text capturing of writes to ``sys.stdout`` and ``sys.stderr``. + + The captured output is made available via ``capsys.readouterr()`` method + calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``text`` objects. + + Returns an instance of :class:`CaptureFixture[str] `. + + Example: + + .. code-block:: python + + def test_output(capsys): + print("hello") + captured = capsys.readouterr() + assert captured.out == "hello\n" + """ + capman: CaptureManager = request.config.pluginmanager.getplugin("capturemanager") + capture_fixture = CaptureFixture(SysCapture, request, _ispytest=True) + capman.set_fixture(capture_fixture) + capture_fixture._start() + yield capture_fixture + capture_fixture.close() + capman.unset_fixture() + + +@fixture +def capsysbinary(request: SubRequest) -> Generator[CaptureFixture[bytes], None, None]: + r"""Enable bytes capturing of writes to ``sys.stdout`` and ``sys.stderr``. + + The captured output is made available via ``capsysbinary.readouterr()`` + method calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``bytes`` objects. + + Returns an instance of :class:`CaptureFixture[bytes] `. + + Example: + + .. code-block:: python + + def test_output(capsysbinary): + print("hello") + captured = capsysbinary.readouterr() + assert captured.out == b"hello\n" + """ + capman: CaptureManager = request.config.pluginmanager.getplugin("capturemanager") + capture_fixture = CaptureFixture(SysCaptureBinary, request, _ispytest=True) + capman.set_fixture(capture_fixture) + capture_fixture._start() + yield capture_fixture + capture_fixture.close() + capman.unset_fixture() + + +@fixture +def capfd(request: SubRequest) -> Generator[CaptureFixture[str], None, None]: + r"""Enable text capturing of writes to file descriptors ``1`` and ``2``. + + The captured output is made available via ``capfd.readouterr()`` method + calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``text`` objects. + + Returns an instance of :class:`CaptureFixture[str] `. + + Example: + + .. code-block:: python + + def test_system_echo(capfd): + os.system('echo "hello"') + captured = capfd.readouterr() + assert captured.out == "hello\n" + """ + capman: CaptureManager = request.config.pluginmanager.getplugin("capturemanager") + capture_fixture = CaptureFixture(FDCapture, request, _ispytest=True) + capman.set_fixture(capture_fixture) + capture_fixture._start() + yield capture_fixture + capture_fixture.close() + capman.unset_fixture() + + +@fixture +def capfdbinary(request: SubRequest) -> Generator[CaptureFixture[bytes], None, None]: + r"""Enable bytes capturing of writes to file descriptors ``1`` and ``2``. + + The captured output is made available via ``capfd.readouterr()`` method + calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``byte`` objects. + + Returns an instance of :class:`CaptureFixture[bytes] `. + + Example: + + .. code-block:: python + + def test_system_echo(capfdbinary): + os.system('echo "hello"') + captured = capfdbinary.readouterr() + assert captured.out == b"hello\n" + + """ + capman: CaptureManager = request.config.pluginmanager.getplugin("capturemanager") + capture_fixture = CaptureFixture(FDCaptureBinary, request, _ispytest=True) + capman.set_fixture(capture_fixture) + capture_fixture._start() + yield capture_fixture + capture_fixture.close() + capman.unset_fixture() diff --git a/venv/lib/python3.11/site-packages/_pytest/compat.py b/venv/lib/python3.11/site-packages/_pytest/compat.py new file mode 100644 index 0000000..352211d --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/compat.py @@ -0,0 +1,426 @@ +"""Python version compatibility code.""" +from __future__ import annotations + +import dataclasses +import enum +import functools +import inspect +import os +import sys +from inspect import Parameter +from inspect import signature +from pathlib import Path +from typing import Any +from typing import Callable +from typing import Generic +from typing import NoReturn +from typing import TYPE_CHECKING +from typing import TypeVar + +import py + +# fmt: off +# Workaround for https://github.com/sphinx-doc/sphinx/issues/10351. +# If `overload` is imported from `compat` instead of from `typing`, +# Sphinx doesn't recognize it as `overload` and the API docs for +# overloaded functions look good again. But type checkers handle +# it fine. +# fmt: on +if True: + from typing import overload as overload + +if TYPE_CHECKING: + from typing_extensions import Final + + +_T = TypeVar("_T") +_S = TypeVar("_S") + +#: constant to prepare valuing pylib path replacements/lazy proxies later on +# intended for removal in pytest 8.0 or 9.0 + +# fmt: off +# intentional space to create a fake difference for the verification +LEGACY_PATH = py.path. local +# fmt: on + + +def legacy_path(path: str | os.PathLike[str]) -> LEGACY_PATH: + """Internal wrapper to prepare lazy proxies for legacy_path instances""" + return LEGACY_PATH(path) + + +# fmt: off +# Singleton type for NOTSET, as described in: +# https://www.python.org/dev/peps/pep-0484/#support-for-singleton-types-in-unions +class NotSetType(enum.Enum): + token = 0 +NOTSET: Final = NotSetType.token # noqa: E305 +# fmt: on + +if sys.version_info >= (3, 8): + import importlib.metadata + + importlib_metadata = importlib.metadata +else: + import importlib_metadata as importlib_metadata # noqa: F401 + + +def _format_args(func: Callable[..., Any]) -> str: + return str(signature(func)) + + +def is_generator(func: object) -> bool: + genfunc = inspect.isgeneratorfunction(func) + return genfunc and not iscoroutinefunction(func) + + +def iscoroutinefunction(func: object) -> bool: + """Return True if func is a coroutine function (a function defined with async + def syntax, and doesn't contain yield), or a function decorated with + @asyncio.coroutine. + + Note: copied and modified from Python 3.5's builtin couroutines.py to avoid + importing asyncio directly, which in turns also initializes the "logging" + module as a side-effect (see issue #8). + """ + return inspect.iscoroutinefunction(func) or getattr(func, "_is_coroutine", False) + + +def is_async_function(func: object) -> bool: + """Return True if the given function seems to be an async function or + an async generator.""" + return iscoroutinefunction(func) or inspect.isasyncgenfunction(func) + + +def getlocation(function, curdir: str | None = None) -> str: + function = get_real_func(function) + fn = Path(inspect.getfile(function)) + lineno = function.__code__.co_firstlineno + if curdir is not None: + try: + relfn = fn.relative_to(curdir) + except ValueError: + pass + else: + return "%s:%d" % (relfn, lineno + 1) + return "%s:%d" % (fn, lineno + 1) + + +def num_mock_patch_args(function) -> int: + """Return number of arguments used up by mock arguments (if any).""" + patchings = getattr(function, "patchings", None) + if not patchings: + return 0 + + mock_sentinel = getattr(sys.modules.get("mock"), "DEFAULT", object()) + ut_mock_sentinel = getattr(sys.modules.get("unittest.mock"), "DEFAULT", object()) + + return len( + [ + p + for p in patchings + if not p.attribute_name + and (p.new is mock_sentinel or p.new is ut_mock_sentinel) + ] + ) + + +def getfuncargnames( + function: Callable[..., Any], + *, + name: str = "", + is_method: bool = False, + cls: type | None = None, +) -> tuple[str, ...]: + """Return the names of a function's mandatory arguments. + + Should return the names of all function arguments that: + * Aren't bound to an instance or type as in instance or class methods. + * Don't have default values. + * Aren't bound with functools.partial. + * Aren't replaced with mocks. + + The is_method and cls arguments indicate that the function should + be treated as a bound method even though it's not unless, only in + the case of cls, the function is a static method. + + The name parameter should be the original name in which the function was collected. + """ + # TODO(RonnyPfannschmidt): This function should be refactored when we + # revisit fixtures. The fixture mechanism should ask the node for + # the fixture names, and not try to obtain directly from the + # function object well after collection has occurred. + + # The parameters attribute of a Signature object contains an + # ordered mapping of parameter names to Parameter instances. This + # creates a tuple of the names of the parameters that don't have + # defaults. + try: + parameters = signature(function).parameters + except (ValueError, TypeError) as e: + from _pytest.outcomes import fail + + fail( + f"Could not determine arguments of {function!r}: {e}", + pytrace=False, + ) + + arg_names = tuple( + p.name + for p in parameters.values() + if ( + p.kind is Parameter.POSITIONAL_OR_KEYWORD + or p.kind is Parameter.KEYWORD_ONLY + ) + and p.default is Parameter.empty + ) + if not name: + name = function.__name__ + + # If this function should be treated as a bound method even though + # it's passed as an unbound method or function, remove the first + # parameter name. + if is_method or ( + # Not using `getattr` because we don't want to resolve the staticmethod. + # Not using `cls.__dict__` because we want to check the entire MRO. + cls + and not isinstance( + inspect.getattr_static(cls, name, default=None), staticmethod + ) + ): + arg_names = arg_names[1:] + # Remove any names that will be replaced with mocks. + if hasattr(function, "__wrapped__"): + arg_names = arg_names[num_mock_patch_args(function) :] + return arg_names + + +def get_default_arg_names(function: Callable[..., Any]) -> tuple[str, ...]: + # Note: this code intentionally mirrors the code at the beginning of + # getfuncargnames, to get the arguments which were excluded from its result + # because they had default values. + return tuple( + p.name + for p in signature(function).parameters.values() + if p.kind in (Parameter.POSITIONAL_OR_KEYWORD, Parameter.KEYWORD_ONLY) + and p.default is not Parameter.empty + ) + + +_non_printable_ascii_translate_table = { + i: f"\\x{i:02x}" for i in range(128) if i not in range(32, 127) +} +_non_printable_ascii_translate_table.update( + {ord("\t"): "\\t", ord("\r"): "\\r", ord("\n"): "\\n"} +) + + +def _translate_non_printable(s: str) -> str: + return s.translate(_non_printable_ascii_translate_table) + + +STRING_TYPES = bytes, str + + +def _bytes_to_ascii(val: bytes) -> str: + return val.decode("ascii", "backslashreplace") + + +def ascii_escaped(val: bytes | str) -> str: + r"""If val is pure ASCII, return it as an str, otherwise, escape + bytes objects into a sequence of escaped bytes: + + b'\xc3\xb4\xc5\xd6' -> r'\xc3\xb4\xc5\xd6' + + and escapes unicode objects into a sequence of escaped unicode + ids, e.g.: + + r'4\nV\U00043efa\x0eMXWB\x1e\u3028\u15fd\xcd\U0007d944' + + Note: + The obvious "v.decode('unicode-escape')" will return + valid UTF-8 unicode if it finds them in bytes, but we + want to return escaped bytes for any byte, even if they match + a UTF-8 string. + """ + if isinstance(val, bytes): + ret = _bytes_to_ascii(val) + else: + ret = val.encode("unicode_escape").decode("ascii") + return _translate_non_printable(ret) + + +@dataclasses.dataclass +class _PytestWrapper: + """Dummy wrapper around a function object for internal use only. + + Used to correctly unwrap the underlying function object when we are + creating fixtures, because we wrap the function object ourselves with a + decorator to issue warnings when the fixture function is called directly. + """ + + obj: Any + + +def get_real_func(obj): + """Get the real function object of the (possibly) wrapped object by + functools.wraps or functools.partial.""" + start_obj = obj + for i in range(100): + # __pytest_wrapped__ is set by @pytest.fixture when wrapping the fixture function + # to trigger a warning if it gets called directly instead of by pytest: we don't + # want to unwrap further than this otherwise we lose useful wrappings like @mock.patch (#3774) + new_obj = getattr(obj, "__pytest_wrapped__", None) + if isinstance(new_obj, _PytestWrapper): + obj = new_obj.obj + break + new_obj = getattr(obj, "__wrapped__", None) + if new_obj is None: + break + obj = new_obj + else: + from _pytest._io.saferepr import saferepr + + raise ValueError( + ("could not find real function of {start}\nstopped at {current}").format( + start=saferepr(start_obj), current=saferepr(obj) + ) + ) + if isinstance(obj, functools.partial): + obj = obj.func + return obj + + +def get_real_method(obj, holder): + """Attempt to obtain the real function object that might be wrapping + ``obj``, while at the same time returning a bound method to ``holder`` if + the original object was a bound method.""" + try: + is_method = hasattr(obj, "__func__") + obj = get_real_func(obj) + except Exception: # pragma: no cover + return obj + if is_method and hasattr(obj, "__get__") and callable(obj.__get__): + obj = obj.__get__(holder) + return obj + + +def getimfunc(func): + try: + return func.__func__ + except AttributeError: + return func + + +def safe_getattr(object: Any, name: str, default: Any) -> Any: + """Like getattr but return default upon any Exception or any OutcomeException. + + Attribute access can potentially fail for 'evil' Python objects. + See issue #214. + It catches OutcomeException because of #2490 (issue #580), new outcomes + are derived from BaseException instead of Exception (for more details + check #2707). + """ + from _pytest.outcomes import TEST_OUTCOME + + try: + return getattr(object, name, default) + except TEST_OUTCOME: + return default + + +def safe_isclass(obj: object) -> bool: + """Ignore any exception via isinstance on Python 3.""" + try: + return inspect.isclass(obj) + except Exception: + return False + + +if TYPE_CHECKING: + if sys.version_info >= (3, 8): + from typing import final as final + else: + from typing_extensions import final as final +elif sys.version_info >= (3, 8): + from typing import final as final +else: + + def final(f): + return f + + +if sys.version_info >= (3, 8): + from functools import cached_property as cached_property +else: + + class cached_property(Generic[_S, _T]): + __slots__ = ("func", "__doc__") + + def __init__(self, func: Callable[[_S], _T]) -> None: + self.func = func + self.__doc__ = func.__doc__ + + @overload + def __get__( + self, instance: None, owner: type[_S] | None = ... + ) -> cached_property[_S, _T]: + ... + + @overload + def __get__(self, instance: _S, owner: type[_S] | None = ...) -> _T: + ... + + def __get__(self, instance, owner=None): + if instance is None: + return self + value = instance.__dict__[self.func.__name__] = self.func(instance) + return value + + +def get_user_id() -> int | None: + """Return the current user id, or None if we cannot get it reliably on the current platform.""" + # win32 does not have a getuid() function. + # On Emscripten, getuid() is a stub that always returns 0. + if sys.platform in ("win32", "emscripten"): + return None + # getuid shouldn't fail, but cpython defines such a case. + # Let's hope for the best. + uid = os.getuid() + return uid if uid != -1 else None + + +# Perform exhaustiveness checking. +# +# Consider this example: +# +# MyUnion = Union[int, str] +# +# def handle(x: MyUnion) -> int { +# if isinstance(x, int): +# return 1 +# elif isinstance(x, str): +# return 2 +# else: +# raise Exception('unreachable') +# +# Now suppose we add a new variant: +# +# MyUnion = Union[int, str, bytes] +# +# After doing this, we must remember ourselves to go and update the handle +# function to handle the new variant. +# +# With `assert_never` we can do better: +# +# // raise Exception('unreachable') +# return assert_never(x) +# +# Now, if we forget to handle the new variant, the type-checker will emit a +# compile-time error, instead of the runtime error we would have gotten +# previously. +# +# This also work for Enums (if you use `is` to compare) and Literals. +def assert_never(value: NoReturn) -> NoReturn: + assert False, f"Unhandled value: {value} ({type(value).__name__})" diff --git a/venv/lib/python3.11/site-packages/_pytest/config/__init__.py b/venv/lib/python3.11/site-packages/_pytest/config/__init__.py new file mode 100644 index 0000000..c9a4b7f --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/config/__init__.py @@ -0,0 +1,1816 @@ +"""Command line options, ini-file and conftest.py processing.""" +import argparse +import collections.abc +import copy +import dataclasses +import enum +import glob +import inspect +import os +import re +import shlex +import sys +import types +import warnings +from functools import lru_cache +from pathlib import Path +from textwrap import dedent +from types import FunctionType +from types import TracebackType +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import Generator +from typing import IO +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Optional +from typing import Sequence +from typing import Set +from typing import TextIO +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import Union + +from pluggy import HookimplMarker +from pluggy import HookspecMarker +from pluggy import PluginManager + +import _pytest._code +import _pytest.deprecated +import _pytest.hookspec +from .exceptions import PrintHelp as PrintHelp +from .exceptions import UsageError as UsageError +from .findpaths import determine_setup +from _pytest._code import ExceptionInfo +from _pytest._code import filter_traceback +from _pytest._io import TerminalWriter +from _pytest.compat import final +from _pytest.compat import importlib_metadata # type: ignore[attr-defined] +from _pytest.outcomes import fail +from _pytest.outcomes import Skipped +from _pytest.pathlib import absolutepath +from _pytest.pathlib import bestrelpath +from _pytest.pathlib import import_path +from _pytest.pathlib import ImportMode +from _pytest.pathlib import resolve_package_path +from _pytest.stash import Stash +from _pytest.warning_types import PytestConfigWarning +from _pytest.warning_types import warn_explicit_for + +if TYPE_CHECKING: + from _pytest._code.code import _TracebackStyle + from _pytest.terminal import TerminalReporter + from .argparsing import Argument + + +_PluggyPlugin = object +"""A type to represent plugin objects. + +Plugins can be any namespace, so we can't narrow it down much, but we use an +alias to make the intent clear. + +Ideally this type would be provided by pluggy itself. +""" + + +hookimpl = HookimplMarker("pytest") +hookspec = HookspecMarker("pytest") + + +@final +class ExitCode(enum.IntEnum): + """Encodes the valid exit codes by pytest. + + Currently users and plugins may supply other exit codes as well. + + .. versionadded:: 5.0 + """ + + #: Tests passed. + OK = 0 + #: Tests failed. + TESTS_FAILED = 1 + #: pytest was interrupted. + INTERRUPTED = 2 + #: An internal error got in the way. + INTERNAL_ERROR = 3 + #: pytest was misused. + USAGE_ERROR = 4 + #: pytest couldn't find tests. + NO_TESTS_COLLECTED = 5 + + +class ConftestImportFailure(Exception): + def __init__( + self, + path: Path, + excinfo: Tuple[Type[Exception], Exception, TracebackType], + ) -> None: + super().__init__(path, excinfo) + self.path = path + self.excinfo = excinfo + + def __str__(self) -> str: + return "{}: {} (from {})".format( + self.excinfo[0].__name__, self.excinfo[1], self.path + ) + + +def filter_traceback_for_conftest_import_failure( + entry: _pytest._code.TracebackEntry, +) -> bool: + """Filter tracebacks entries which point to pytest internals or importlib. + + Make a special case for importlib because we use it to import test modules and conftest files + in _pytest.pathlib.import_path. + """ + return filter_traceback(entry) and "importlib" not in str(entry.path).split(os.sep) + + +def main( + args: Optional[Union[List[str], "os.PathLike[str]"]] = None, + plugins: Optional[Sequence[Union[str, _PluggyPlugin]]] = None, +) -> Union[int, ExitCode]: + """Perform an in-process test run. + + :param args: List of command line arguments. + :param plugins: List of plugin objects to be auto-registered during initialization. + + :returns: An exit code. + """ + try: + try: + config = _prepareconfig(args, plugins) + except ConftestImportFailure as e: + exc_info = ExceptionInfo.from_exc_info(e.excinfo) + tw = TerminalWriter(sys.stderr) + tw.line(f"ImportError while loading conftest '{e.path}'.", red=True) + exc_info.traceback = exc_info.traceback.filter( + filter_traceback_for_conftest_import_failure + ) + exc_repr = ( + exc_info.getrepr(style="short", chain=False) + if exc_info.traceback + else exc_info.exconly() + ) + formatted_tb = str(exc_repr) + for line in formatted_tb.splitlines(): + tw.line(line.rstrip(), red=True) + return ExitCode.USAGE_ERROR + else: + try: + ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( + config=config + ) + try: + return ExitCode(ret) + except ValueError: + return ret + finally: + config._ensure_unconfigure() + except UsageError as e: + tw = TerminalWriter(sys.stderr) + for msg in e.args: + tw.line(f"ERROR: {msg}\n", red=True) + return ExitCode.USAGE_ERROR + + +def console_main() -> int: + """The CLI entry point of pytest. + + This function is not meant for programmable use; use `main()` instead. + """ + # https://docs.python.org/3/library/signal.html#note-on-sigpipe + try: + code = main() + sys.stdout.flush() + return code + except BrokenPipeError: + # Python flushes standard streams on exit; redirect remaining output + # to devnull to avoid another BrokenPipeError at shutdown + devnull = os.open(os.devnull, os.O_WRONLY) + os.dup2(devnull, sys.stdout.fileno()) + return 1 # Python exits with error code 1 on EPIPE + + +class cmdline: # compatibility namespace + main = staticmethod(main) + + +def filename_arg(path: str, optname: str) -> str: + """Argparse type validator for filename arguments. + + :path: Path of filename. + :optname: Name of the option. + """ + if os.path.isdir(path): + raise UsageError(f"{optname} must be a filename, given: {path}") + return path + + +def directory_arg(path: str, optname: str) -> str: + """Argparse type validator for directory arguments. + + :path: Path of directory. + :optname: Name of the option. + """ + if not os.path.isdir(path): + raise UsageError(f"{optname} must be a directory, given: {path}") + return path + + +# Plugins that cannot be disabled via "-p no:X" currently. +essential_plugins = ( + "mark", + "main", + "runner", + "fixtures", + "helpconfig", # Provides -p. +) + +default_plugins = essential_plugins + ( + "python", + "terminal", + "debugging", + "unittest", + "capture", + "skipping", + "legacypath", + "tmpdir", + "monkeypatch", + "recwarn", + "pastebin", + "nose", + "assertion", + "junitxml", + "doctest", + "cacheprovider", + "freeze_support", + "setuponly", + "setupplan", + "stepwise", + "warnings", + "logging", + "reports", + "python_path", + *(["unraisableexception", "threadexception"] if sys.version_info >= (3, 8) else []), + "faulthandler", +) + +builtin_plugins = set(default_plugins) +builtin_plugins.add("pytester") +builtin_plugins.add("pytester_assertions") + + +def get_config( + args: Optional[List[str]] = None, + plugins: Optional[Sequence[Union[str, _PluggyPlugin]]] = None, +) -> "Config": + # subsequent calls to main will create a fresh instance + pluginmanager = PytestPluginManager() + config = Config( + pluginmanager, + invocation_params=Config.InvocationParams( + args=args or (), + plugins=plugins, + dir=Path.cwd(), + ), + ) + + if args is not None: + # Handle any "-p no:plugin" args. + pluginmanager.consider_preparse(args, exclude_only=True) + + for spec in default_plugins: + pluginmanager.import_plugin(spec) + + return config + + +def get_plugin_manager() -> "PytestPluginManager": + """Obtain a new instance of the + :py:class:`pytest.PytestPluginManager`, with default plugins + already loaded. + + This function can be used by integration with other tools, like hooking + into pytest to run tests into an IDE. + """ + return get_config().pluginmanager + + +def _prepareconfig( + args: Optional[Union[List[str], "os.PathLike[str]"]] = None, + plugins: Optional[Sequence[Union[str, _PluggyPlugin]]] = None, +) -> "Config": + if args is None: + args = sys.argv[1:] + elif isinstance(args, os.PathLike): + args = [os.fspath(args)] + elif not isinstance(args, list): + msg = ( # type:ignore[unreachable] + "`args` parameter expected to be a list of strings, got: {!r} (type: {})" + ) + raise TypeError(msg.format(args, type(args))) + + config = get_config(args, plugins) + pluginmanager = config.pluginmanager + try: + if plugins: + for plugin in plugins: + if isinstance(plugin, str): + pluginmanager.consider_pluginarg(plugin) + else: + pluginmanager.register(plugin) + config = pluginmanager.hook.pytest_cmdline_parse( + pluginmanager=pluginmanager, args=args + ) + return config + except BaseException: + config._ensure_unconfigure() + raise + + +def _get_directory(path: Path) -> Path: + """Get the directory of a path - itself if already a directory.""" + if path.is_file(): + return path.parent + else: + return path + + +def _get_legacy_hook_marks( + method: Any, + hook_type: str, + opt_names: Tuple[str, ...], +) -> Dict[str, bool]: + if TYPE_CHECKING: + # abuse typeguard from importlib to avoid massive method type union thats lacking a alias + assert inspect.isroutine(method) + known_marks: set[str] = {m.name for m in getattr(method, "pytestmark", [])} + must_warn: list[str] = [] + opts: dict[str, bool] = {} + for opt_name in opt_names: + opt_attr = getattr(method, opt_name, AttributeError) + if opt_attr is not AttributeError: + must_warn.append(f"{opt_name}={opt_attr}") + opts[opt_name] = True + elif opt_name in known_marks: + must_warn.append(f"{opt_name}=True") + opts[opt_name] = True + else: + opts[opt_name] = False + if must_warn: + hook_opts = ", ".join(must_warn) + message = _pytest.deprecated.HOOK_LEGACY_MARKING.format( + type=hook_type, + fullname=method.__qualname__, + hook_opts=hook_opts, + ) + warn_explicit_for(cast(FunctionType, method), message) + return opts + + +@final +class PytestPluginManager(PluginManager): + """A :py:class:`pluggy.PluginManager ` with + additional pytest-specific functionality: + + * Loading plugins from the command line, ``PYTEST_PLUGINS`` env variable and + ``pytest_plugins`` global variables found in plugins being loaded. + * ``conftest.py`` loading during start-up. + """ + + def __init__(self) -> None: + import _pytest.assertion + + super().__init__("pytest") + + # -- State related to local conftest plugins. + # All loaded conftest modules. + self._conftest_plugins: Set[types.ModuleType] = set() + # All conftest modules applicable for a directory. + # This includes the directory's own conftest modules as well + # as those of its parent directories. + self._dirpath2confmods: Dict[Path, List[types.ModuleType]] = {} + # Cutoff directory above which conftests are no longer discovered. + self._confcutdir: Optional[Path] = None + # If set, conftest loading is skipped. + self._noconftest = False + + # _getconftestmodules()'s call to _get_directory() causes a stat + # storm when it's called potentially thousands of times in a test + # session (#9478), often with the same path, so cache it. + self._get_directory = lru_cache(256)(_get_directory) + + self._duplicatepaths: Set[Path] = set() + + # plugins that were explicitly skipped with pytest.skip + # list of (module name, skip reason) + # previously we would issue a warning when a plugin was skipped, but + # since we refactored warnings as first citizens of Config, they are + # just stored here to be used later. + self.skipped_plugins: List[Tuple[str, str]] = [] + + self.add_hookspecs(_pytest.hookspec) + self.register(self) + if os.environ.get("PYTEST_DEBUG"): + err: IO[str] = sys.stderr + encoding: str = getattr(err, "encoding", "utf8") + try: + err = open( + os.dup(err.fileno()), + mode=err.mode, + buffering=1, + encoding=encoding, + ) + except Exception: + pass + self.trace.root.setwriter(err.write) + self.enable_tracing() + + # Config._consider_importhook will set a real object if required. + self.rewrite_hook = _pytest.assertion.DummyRewriteHook() + # Used to know when we are importing conftests after the pytest_configure stage. + self._configured = False + + def parse_hookimpl_opts(self, plugin: _PluggyPlugin, name: str): + # pytest hooks are always prefixed with "pytest_", + # so we avoid accessing possibly non-readable attributes + # (see issue #1073). + if not name.startswith("pytest_"): + return + # Ignore names which can not be hooks. + if name == "pytest_plugins": + return + + opts = super().parse_hookimpl_opts(plugin, name) + if opts is not None: + return opts + + method = getattr(plugin, name) + # Consider only actual functions for hooks (#3775). + if not inspect.isroutine(method): + return + # Collect unmarked hooks as long as they have the `pytest_' prefix. + return _get_legacy_hook_marks( + method, "impl", ("tryfirst", "trylast", "optionalhook", "hookwrapper") + ) + + def parse_hookspec_opts(self, module_or_class, name: str): + opts = super().parse_hookspec_opts(module_or_class, name) + if opts is None: + method = getattr(module_or_class, name) + if name.startswith("pytest_"): + opts = _get_legacy_hook_marks( + method, + "spec", + ("firstresult", "historic"), + ) + return opts + + def register( + self, plugin: _PluggyPlugin, name: Optional[str] = None + ) -> Optional[str]: + if name in _pytest.deprecated.DEPRECATED_EXTERNAL_PLUGINS: + warnings.warn( + PytestConfigWarning( + "{} plugin has been merged into the core, " + "please remove it from your requirements.".format( + name.replace("_", "-") + ) + ) + ) + return None + ret: Optional[str] = super().register(plugin, name) + if ret: + self.hook.pytest_plugin_registered.call_historic( + kwargs=dict(plugin=plugin, manager=self) + ) + + if isinstance(plugin, types.ModuleType): + self.consider_module(plugin) + return ret + + def getplugin(self, name: str): + # Support deprecated naming because plugins (xdist e.g.) use it. + plugin: Optional[_PluggyPlugin] = self.get_plugin(name) + return plugin + + def hasplugin(self, name: str) -> bool: + """Return whether a plugin with the given name is registered.""" + return bool(self.get_plugin(name)) + + def pytest_configure(self, config: "Config") -> None: + """:meta private:""" + # XXX now that the pluginmanager exposes hookimpl(tryfirst...) + # we should remove tryfirst/trylast as markers. + config.addinivalue_line( + "markers", + "tryfirst: mark a hook implementation function such that the " + "plugin machinery will try to call it first/as early as possible. " + "DEPRECATED, use @pytest.hookimpl(tryfirst=True) instead.", + ) + config.addinivalue_line( + "markers", + "trylast: mark a hook implementation function such that the " + "plugin machinery will try to call it last/as late as possible. " + "DEPRECATED, use @pytest.hookimpl(trylast=True) instead.", + ) + self._configured = True + + # + # Internal API for local conftest plugin handling. + # + def _set_initial_conftests( + self, + args: Sequence[Union[str, Path]], + pyargs: bool, + noconftest: bool, + rootpath: Path, + confcutdir: Optional[Path], + importmode: Union[ImportMode, str], + ) -> None: + """Load initial conftest files given a preparsed "namespace". + + As conftest files may add their own command line options which have + arguments ('--my-opt somepath') we might get some false positives. + All builtin and 3rd party plugins will have been loaded, however, so + common options will not confuse our logic here. + """ + current = Path.cwd() + self._confcutdir = absolutepath(current / confcutdir) if confcutdir else None + self._noconftest = noconftest + self._using_pyargs = pyargs + foundanchor = False + for intitial_path in args: + path = str(intitial_path) + # remove node-id syntax + i = path.find("::") + if i != -1: + path = path[:i] + anchor = absolutepath(current / path) + + # Ensure we do not break if what appears to be an anchor + # is in fact a very long option (#10169). + try: + anchor_exists = anchor.exists() + except OSError: # pragma: no cover + anchor_exists = False + if anchor_exists: + self._try_load_conftest(anchor, importmode, rootpath) + foundanchor = True + if not foundanchor: + self._try_load_conftest(current, importmode, rootpath) + + def _is_in_confcutdir(self, path: Path) -> bool: + """Whether a path is within the confcutdir. + + When false, should not load conftest. + """ + if self._confcutdir is None: + return True + return path not in self._confcutdir.parents + + def _try_load_conftest( + self, anchor: Path, importmode: Union[str, ImportMode], rootpath: Path + ) -> None: + self._getconftestmodules(anchor, importmode, rootpath) + # let's also consider test* subdirs + if anchor.is_dir(): + for x in anchor.glob("test*"): + if x.is_dir(): + self._getconftestmodules(x, importmode, rootpath) + + def _getconftestmodules( + self, path: Path, importmode: Union[str, ImportMode], rootpath: Path + ) -> Sequence[types.ModuleType]: + if self._noconftest: + return [] + + directory = self._get_directory(path) + + # Optimization: avoid repeated searches in the same directory. + # Assumes always called with same importmode and rootpath. + existing_clist = self._dirpath2confmods.get(directory) + if existing_clist is not None: + return existing_clist + + # XXX these days we may rather want to use config.rootpath + # and allow users to opt into looking into the rootdir parent + # directories instead of requiring to specify confcutdir. + clist = [] + for parent in reversed((directory, *directory.parents)): + if self._is_in_confcutdir(parent): + conftestpath = parent / "conftest.py" + if conftestpath.is_file(): + mod = self._importconftest(conftestpath, importmode, rootpath) + clist.append(mod) + self._dirpath2confmods[directory] = clist + return clist + + def _rget_with_confmod( + self, + name: str, + path: Path, + importmode: Union[str, ImportMode], + rootpath: Path, + ) -> Tuple[types.ModuleType, Any]: + modules = self._getconftestmodules(path, importmode, rootpath=rootpath) + for mod in reversed(modules): + try: + return mod, getattr(mod, name) + except AttributeError: + continue + raise KeyError(name) + + def _importconftest( + self, conftestpath: Path, importmode: Union[str, ImportMode], rootpath: Path + ) -> types.ModuleType: + existing = self.get_plugin(str(conftestpath)) + if existing is not None: + return cast(types.ModuleType, existing) + + pkgpath = resolve_package_path(conftestpath) + if pkgpath is None: + _ensure_removed_sysmodule(conftestpath.stem) + + try: + mod = import_path(conftestpath, mode=importmode, root=rootpath) + except Exception as e: + assert e.__traceback__ is not None + exc_info = (type(e), e, e.__traceback__) + raise ConftestImportFailure(conftestpath, exc_info) from e + + self._check_non_top_pytest_plugins(mod, conftestpath) + + self._conftest_plugins.add(mod) + dirpath = conftestpath.parent + if dirpath in self._dirpath2confmods: + for path, mods in self._dirpath2confmods.items(): + if dirpath in path.parents or path == dirpath: + assert mod not in mods + mods.append(mod) + self.trace(f"loading conftestmodule {mod!r}") + self.consider_conftest(mod) + return mod + + def _check_non_top_pytest_plugins( + self, + mod: types.ModuleType, + conftestpath: Path, + ) -> None: + if ( + hasattr(mod, "pytest_plugins") + and self._configured + and not self._using_pyargs + ): + msg = ( + "Defining 'pytest_plugins' in a non-top-level conftest is no longer supported:\n" + "It affects the entire test suite instead of just below the conftest as expected.\n" + " {}\n" + "Please move it to a top level conftest file at the rootdir:\n" + " {}\n" + "For more information, visit:\n" + " https://docs.pytest.org/en/stable/deprecations.html#pytest-plugins-in-non-top-level-conftest-files" + ) + fail(msg.format(conftestpath, self._confcutdir), pytrace=False) + + # + # API for bootstrapping plugin loading + # + # + + def consider_preparse( + self, args: Sequence[str], *, exclude_only: bool = False + ) -> None: + """:meta private:""" + i = 0 + n = len(args) + while i < n: + opt = args[i] + i += 1 + if isinstance(opt, str): + if opt == "-p": + try: + parg = args[i] + except IndexError: + return + i += 1 + elif opt.startswith("-p"): + parg = opt[2:] + else: + continue + parg = parg.strip() + if exclude_only and not parg.startswith("no:"): + continue + self.consider_pluginarg(parg) + + def consider_pluginarg(self, arg: str) -> None: + """:meta private:""" + if arg.startswith("no:"): + name = arg[3:] + if name in essential_plugins: + raise UsageError("plugin %s cannot be disabled" % name) + + # PR #4304: remove stepwise if cacheprovider is blocked. + if name == "cacheprovider": + self.set_blocked("stepwise") + self.set_blocked("pytest_stepwise") + + self.set_blocked(name) + if not name.startswith("pytest_"): + self.set_blocked("pytest_" + name) + else: + name = arg + # Unblock the plugin. None indicates that it has been blocked. + # There is no interface with pluggy for this. + if self._name2plugin.get(name, -1) is None: + del self._name2plugin[name] + if not name.startswith("pytest_"): + if self._name2plugin.get("pytest_" + name, -1) is None: + del self._name2plugin["pytest_" + name] + self.import_plugin(arg, consider_entry_points=True) + + def consider_conftest(self, conftestmodule: types.ModuleType) -> None: + """:meta private:""" + self.register(conftestmodule, name=conftestmodule.__file__) + + def consider_env(self) -> None: + """:meta private:""" + self._import_plugin_specs(os.environ.get("PYTEST_PLUGINS")) + + def consider_module(self, mod: types.ModuleType) -> None: + """:meta private:""" + self._import_plugin_specs(getattr(mod, "pytest_plugins", [])) + + def _import_plugin_specs( + self, spec: Union[None, types.ModuleType, str, Sequence[str]] + ) -> None: + plugins = _get_plugin_specs_as_list(spec) + for import_spec in plugins: + self.import_plugin(import_spec) + + def import_plugin(self, modname: str, consider_entry_points: bool = False) -> None: + """Import a plugin with ``modname``. + + If ``consider_entry_points`` is True, entry point names are also + considered to find a plugin. + """ + # Most often modname refers to builtin modules, e.g. "pytester", + # "terminal" or "capture". Those plugins are registered under their + # basename for historic purposes but must be imported with the + # _pytest prefix. + assert isinstance(modname, str), ( + "module name as text required, got %r" % modname + ) + if self.is_blocked(modname) or self.get_plugin(modname) is not None: + return + + importspec = "_pytest." + modname if modname in builtin_plugins else modname + self.rewrite_hook.mark_rewrite(importspec) + + if consider_entry_points: + loaded = self.load_setuptools_entrypoints("pytest11", name=modname) + if loaded: + return + + try: + __import__(importspec) + except ImportError as e: + raise ImportError( + f'Error importing plugin "{modname}": {e.args[0]}' + ).with_traceback(e.__traceback__) from e + + except Skipped as e: + self.skipped_plugins.append((modname, e.msg or "")) + else: + mod = sys.modules[importspec] + self.register(mod, modname) + + +def _get_plugin_specs_as_list( + specs: Union[None, types.ModuleType, str, Sequence[str]] +) -> List[str]: + """Parse a plugins specification into a list of plugin names.""" + # None means empty. + if specs is None: + return [] + # Workaround for #3899 - a submodule which happens to be called "pytest_plugins". + if isinstance(specs, types.ModuleType): + return [] + # Comma-separated list. + if isinstance(specs, str): + return specs.split(",") if specs else [] + # Direct specification. + if isinstance(specs, collections.abc.Sequence): + return list(specs) + raise UsageError( + "Plugins may be specified as a sequence or a ','-separated string of plugin names. Got: %r" + % specs + ) + + +def _ensure_removed_sysmodule(modname: str) -> None: + try: + del sys.modules[modname] + except KeyError: + pass + + +class Notset: + def __repr__(self): + return "" + + +notset = Notset() + + +def _iter_rewritable_modules(package_files: Iterable[str]) -> Iterator[str]: + """Given an iterable of file names in a source distribution, return the "names" that should + be marked for assertion rewrite. + + For example the package "pytest_mock/__init__.py" should be added as "pytest_mock" in + the assertion rewrite mechanism. + + This function has to deal with dist-info based distributions and egg based distributions + (which are still very much in use for "editable" installs). + + Here are the file names as seen in a dist-info based distribution: + + pytest_mock/__init__.py + pytest_mock/_version.py + pytest_mock/plugin.py + pytest_mock.egg-info/PKG-INFO + + Here are the file names as seen in an egg based distribution: + + src/pytest_mock/__init__.py + src/pytest_mock/_version.py + src/pytest_mock/plugin.py + src/pytest_mock.egg-info/PKG-INFO + LICENSE + setup.py + + We have to take in account those two distribution flavors in order to determine which + names should be considered for assertion rewriting. + + More information: + https://github.com/pytest-dev/pytest-mock/issues/167 + """ + package_files = list(package_files) + seen_some = False + for fn in package_files: + is_simple_module = "/" not in fn and fn.endswith(".py") + is_package = fn.count("/") == 1 and fn.endswith("__init__.py") + if is_simple_module: + module_name, _ = os.path.splitext(fn) + # we ignore "setup.py" at the root of the distribution + # as well as editable installation finder modules made by setuptools + if module_name != "setup" and not module_name.startswith("__editable__"): + seen_some = True + yield module_name + elif is_package: + package_name = os.path.dirname(fn) + seen_some = True + yield package_name + + if not seen_some: + # At this point we did not find any packages or modules suitable for assertion + # rewriting, so we try again by stripping the first path component (to account for + # "src" based source trees for example). + # This approach lets us have the common case continue to be fast, as egg-distributions + # are rarer. + new_package_files = [] + for fn in package_files: + parts = fn.split("/") + new_fn = "/".join(parts[1:]) + if new_fn: + new_package_files.append(new_fn) + if new_package_files: + yield from _iter_rewritable_modules(new_package_files) + + +@final +class Config: + """Access to configuration values, pluginmanager and plugin hooks. + + :param PytestPluginManager pluginmanager: + A pytest PluginManager. + + :param InvocationParams invocation_params: + Object containing parameters regarding the :func:`pytest.main` + invocation. + """ + + @final + @dataclasses.dataclass(frozen=True) + class InvocationParams: + """Holds parameters passed during :func:`pytest.main`. + + The object attributes are read-only. + + .. versionadded:: 5.1 + + .. note:: + + Note that the environment variable ``PYTEST_ADDOPTS`` and the ``addopts`` + ini option are handled by pytest, not being included in the ``args`` attribute. + + Plugins accessing ``InvocationParams`` must be aware of that. + """ + + args: Tuple[str, ...] + """The command-line arguments as passed to :func:`pytest.main`.""" + plugins: Optional[Sequence[Union[str, _PluggyPlugin]]] + """Extra plugins, might be `None`.""" + dir: Path + """The directory from which :func:`pytest.main` was invoked.""" + + def __init__( + self, + *, + args: Iterable[str], + plugins: Optional[Sequence[Union[str, _PluggyPlugin]]], + dir: Path, + ) -> None: + object.__setattr__(self, "args", tuple(args)) + object.__setattr__(self, "plugins", plugins) + object.__setattr__(self, "dir", dir) + + class ArgsSource(enum.Enum): + """Indicates the source of the test arguments. + + .. versionadded:: 7.2 + """ + + #: Command line arguments. + ARGS = enum.auto() + #: Invocation directory. + INCOVATION_DIR = enum.auto() + #: 'testpaths' configuration value. + TESTPATHS = enum.auto() + + def __init__( + self, + pluginmanager: PytestPluginManager, + *, + invocation_params: Optional[InvocationParams] = None, + ) -> None: + from .argparsing import Parser, FILE_OR_DIR + + if invocation_params is None: + invocation_params = self.InvocationParams( + args=(), plugins=None, dir=Path.cwd() + ) + + self.option = argparse.Namespace() + """Access to command line option as attributes. + + :type: argparse.Namespace + """ + + self.invocation_params = invocation_params + """The parameters with which pytest was invoked. + + :type: InvocationParams + """ + + _a = FILE_OR_DIR + self._parser = Parser( + usage=f"%(prog)s [options] [{_a}] [{_a}] [...]", + processopt=self._processopt, + _ispytest=True, + ) + self.pluginmanager = pluginmanager + """The plugin manager handles plugin registration and hook invocation. + + :type: PytestPluginManager + """ + + self.stash = Stash() + """A place where plugins can store information on the config for their + own use. + + :type: Stash + """ + # Deprecated alias. Was never public. Can be removed in a few releases. + self._store = self.stash + + from .compat import PathAwareHookProxy + + self.trace = self.pluginmanager.trace.root.get("config") + self.hook = PathAwareHookProxy(self.pluginmanager.hook) + self._inicache: Dict[str, Any] = {} + self._override_ini: Sequence[str] = () + self._opt2dest: Dict[str, str] = {} + self._cleanup: List[Callable[[], None]] = [] + self.pluginmanager.register(self, "pytestconfig") + self._configured = False + self.hook.pytest_addoption.call_historic( + kwargs=dict(parser=self._parser, pluginmanager=self.pluginmanager) + ) + self.args_source = Config.ArgsSource.ARGS + self.args: List[str] = [] + + if TYPE_CHECKING: + from _pytest.cacheprovider import Cache + + self.cache: Optional[Cache] = None + + @property + def rootpath(self) -> Path: + """The path to the :ref:`rootdir `. + + :type: pathlib.Path + + .. versionadded:: 6.1 + """ + return self._rootpath + + @property + def inipath(self) -> Optional[Path]: + """The path to the :ref:`configfile `. + + :type: Optional[pathlib.Path] + + .. versionadded:: 6.1 + """ + return self._inipath + + def add_cleanup(self, func: Callable[[], None]) -> None: + """Add a function to be called when the config object gets out of + use (usually coinciding with pytest_unconfigure).""" + self._cleanup.append(func) + + def _do_configure(self) -> None: + assert not self._configured + self._configured = True + with warnings.catch_warnings(): + warnings.simplefilter("default") + self.hook.pytest_configure.call_historic(kwargs=dict(config=self)) + + def _ensure_unconfigure(self) -> None: + if self._configured: + self._configured = False + self.hook.pytest_unconfigure(config=self) + self.hook.pytest_configure._call_history = [] + while self._cleanup: + fin = self._cleanup.pop() + fin() + + def get_terminal_writer(self) -> TerminalWriter: + terminalreporter: TerminalReporter = self.pluginmanager.get_plugin( + "terminalreporter" + ) + return terminalreporter._tw + + def pytest_cmdline_parse( + self, pluginmanager: PytestPluginManager, args: List[str] + ) -> "Config": + try: + self.parse(args) + except UsageError: + # Handle --version and --help here in a minimal fashion. + # This gets done via helpconfig normally, but its + # pytest_cmdline_main is not called in case of errors. + if getattr(self.option, "version", False) or "--version" in args: + from _pytest.helpconfig import showversion + + showversion(self) + elif ( + getattr(self.option, "help", False) or "--help" in args or "-h" in args + ): + self._parser._getparser().print_help() + sys.stdout.write( + "\nNOTE: displaying only minimal help due to UsageError.\n\n" + ) + + raise + + return self + + def notify_exception( + self, + excinfo: ExceptionInfo[BaseException], + option: Optional[argparse.Namespace] = None, + ) -> None: + if option and getattr(option, "fulltrace", False): + style: _TracebackStyle = "long" + else: + style = "native" + excrepr = excinfo.getrepr( + funcargs=True, showlocals=getattr(option, "showlocals", False), style=style + ) + res = self.hook.pytest_internalerror(excrepr=excrepr, excinfo=excinfo) + if not any(res): + for line in str(excrepr).split("\n"): + sys.stderr.write("INTERNALERROR> %s\n" % line) + sys.stderr.flush() + + def cwd_relative_nodeid(self, nodeid: str) -> str: + # nodeid's are relative to the rootpath, compute relative to cwd. + if self.invocation_params.dir != self.rootpath: + fullpath = self.rootpath / nodeid + nodeid = bestrelpath(self.invocation_params.dir, fullpath) + return nodeid + + @classmethod + def fromdictargs(cls, option_dict, args) -> "Config": + """Constructor usable for subprocesses.""" + config = get_config(args) + config.option.__dict__.update(option_dict) + config.parse(args, addopts=False) + for x in config.option.plugins: + config.pluginmanager.consider_pluginarg(x) + return config + + def _processopt(self, opt: "Argument") -> None: + for name in opt._short_opts + opt._long_opts: + self._opt2dest[name] = opt.dest + + if hasattr(opt, "default"): + if not hasattr(self.option, opt.dest): + setattr(self.option, opt.dest, opt.default) + + @hookimpl(trylast=True) + def pytest_load_initial_conftests(self, early_config: "Config") -> None: + # We haven't fully parsed the command line arguments yet, so + # early_config.args it not set yet. But we need it for + # discovering the initial conftests. So "pre-run" the logic here. + # It will be done for real in `parse()`. + args, args_source = early_config._decide_args( + args=early_config.known_args_namespace.file_or_dir, + pyargs=early_config.known_args_namespace.pyargs, + testpaths=early_config.getini("testpaths"), + invocation_dir=early_config.invocation_params.dir, + rootpath=early_config.rootpath, + warn=False, + ) + self.pluginmanager._set_initial_conftests( + args=args, + pyargs=early_config.known_args_namespace.pyargs, + noconftest=early_config.known_args_namespace.noconftest, + rootpath=early_config.rootpath, + confcutdir=early_config.known_args_namespace.confcutdir, + importmode=early_config.known_args_namespace.importmode, + ) + + def _initini(self, args: Sequence[str]) -> None: + ns, unknown_args = self._parser.parse_known_and_unknown_args( + args, namespace=copy.copy(self.option) + ) + rootpath, inipath, inicfg = determine_setup( + ns.inifilename, + ns.file_or_dir + unknown_args, + rootdir_cmd_arg=ns.rootdir or None, + config=self, + ) + self._rootpath = rootpath + self._inipath = inipath + self.inicfg = inicfg + self._parser.extra_info["rootdir"] = str(self.rootpath) + self._parser.extra_info["inifile"] = str(self.inipath) + self._parser.addini("addopts", "Extra command line options", "args") + self._parser.addini("minversion", "Minimally required pytest version") + self._parser.addini( + "required_plugins", + "Plugins that must be present for pytest to run", + type="args", + default=[], + ) + self._override_ini = ns.override_ini or () + + def _consider_importhook(self, args: Sequence[str]) -> None: + """Install the PEP 302 import hook if using assertion rewriting. + + Needs to parse the --assert= option from the commandline + and find all the installed plugins to mark them for rewriting + by the importhook. + """ + ns, unknown_args = self._parser.parse_known_and_unknown_args(args) + mode = getattr(ns, "assertmode", "plain") + if mode == "rewrite": + import _pytest.assertion + + try: + hook = _pytest.assertion.install_importhook(self) + except SystemError: + mode = "plain" + else: + self._mark_plugins_for_rewrite(hook) + self._warn_about_missing_assertion(mode) + + def _mark_plugins_for_rewrite(self, hook) -> None: + """Given an importhook, mark for rewrite any top-level + modules or packages in the distribution package for + all pytest plugins.""" + self.pluginmanager.rewrite_hook = hook + + if os.environ.get("PYTEST_DISABLE_PLUGIN_AUTOLOAD"): + # We don't autoload from setuptools entry points, no need to continue. + return + + package_files = ( + str(file) + for dist in importlib_metadata.distributions() + if any(ep.group == "pytest11" for ep in dist.entry_points) + for file in dist.files or [] + ) + + for name in _iter_rewritable_modules(package_files): + hook.mark_rewrite(name) + + def _validate_args(self, args: List[str], via: str) -> List[str]: + """Validate known args.""" + self._parser._config_source_hint = via # type: ignore + try: + self._parser.parse_known_and_unknown_args( + args, namespace=copy.copy(self.option) + ) + finally: + del self._parser._config_source_hint # type: ignore + + return args + + def _decide_args( + self, + *, + args: List[str], + pyargs: List[str], + testpaths: List[str], + invocation_dir: Path, + rootpath: Path, + warn: bool, + ) -> Tuple[List[str], ArgsSource]: + """Decide the args (initial paths/nodeids) to use given the relevant inputs. + + :param warn: Whether can issue warnings. + """ + if args: + source = Config.ArgsSource.ARGS + result = args + else: + if invocation_dir == rootpath: + source = Config.ArgsSource.TESTPATHS + if pyargs: + result = testpaths + else: + result = [] + for path in testpaths: + result.extend(sorted(glob.iglob(path, recursive=True))) + if testpaths and not result: + if warn: + warning_text = ( + "No files were found in testpaths; " + "consider removing or adjusting your testpaths configuration. " + "Searching recursively from the current directory instead." + ) + self.issue_config_time_warning( + PytestConfigWarning(warning_text), stacklevel=3 + ) + else: + result = [] + if not result: + source = Config.ArgsSource.INCOVATION_DIR + result = [str(invocation_dir)] + return result, source + + def _preparse(self, args: List[str], addopts: bool = True) -> None: + if addopts: + env_addopts = os.environ.get("PYTEST_ADDOPTS", "") + if len(env_addopts): + args[:] = ( + self._validate_args(shlex.split(env_addopts), "via PYTEST_ADDOPTS") + + args + ) + self._initini(args) + if addopts: + args[:] = ( + self._validate_args(self.getini("addopts"), "via addopts config") + args + ) + + self.known_args_namespace = self._parser.parse_known_args( + args, namespace=copy.copy(self.option) + ) + self._checkversion() + self._consider_importhook(args) + self.pluginmanager.consider_preparse(args, exclude_only=False) + if not os.environ.get("PYTEST_DISABLE_PLUGIN_AUTOLOAD"): + # Don't autoload from setuptools entry point. Only explicitly specified + # plugins are going to be loaded. + self.pluginmanager.load_setuptools_entrypoints("pytest11") + self.pluginmanager.consider_env() + + self.known_args_namespace = self._parser.parse_known_args( + args, namespace=copy.copy(self.known_args_namespace) + ) + + self._validate_plugins() + self._warn_about_skipped_plugins() + + if self.known_args_namespace.strict: + self.issue_config_time_warning( + _pytest.deprecated.STRICT_OPTION, stacklevel=2 + ) + + if self.known_args_namespace.confcutdir is None: + if self.inipath is not None: + confcutdir = str(self.inipath.parent) + else: + confcutdir = str(self.rootpath) + self.known_args_namespace.confcutdir = confcutdir + try: + self.hook.pytest_load_initial_conftests( + early_config=self, args=args, parser=self._parser + ) + except ConftestImportFailure as e: + if self.known_args_namespace.help or self.known_args_namespace.version: + # we don't want to prevent --help/--version to work + # so just let is pass and print a warning at the end + self.issue_config_time_warning( + PytestConfigWarning(f"could not load initial conftests: {e.path}"), + stacklevel=2, + ) + else: + raise + + @hookimpl(hookwrapper=True) + def pytest_collection(self) -> Generator[None, None, None]: + # Validate invalid ini keys after collection is done so we take in account + # options added by late-loading conftest files. + yield + self._validate_config_options() + + def _checkversion(self) -> None: + import pytest + + minver = self.inicfg.get("minversion", None) + if minver: + # Imported lazily to improve start-up time. + from packaging.version import Version + + if not isinstance(minver, str): + raise pytest.UsageError( + "%s: 'minversion' must be a single value" % self.inipath + ) + + if Version(minver) > Version(pytest.__version__): + raise pytest.UsageError( + "%s: 'minversion' requires pytest-%s, actual pytest-%s'" + % ( + self.inipath, + minver, + pytest.__version__, + ) + ) + + def _validate_config_options(self) -> None: + for key in sorted(self._get_unknown_ini_keys()): + self._warn_or_fail_if_strict(f"Unknown config option: {key}\n") + + def _validate_plugins(self) -> None: + required_plugins = sorted(self.getini("required_plugins")) + if not required_plugins: + return + + # Imported lazily to improve start-up time. + from packaging.version import Version + from packaging.requirements import InvalidRequirement, Requirement + + plugin_info = self.pluginmanager.list_plugin_distinfo() + plugin_dist_info = {dist.project_name: dist.version for _, dist in plugin_info} + + missing_plugins = [] + for required_plugin in required_plugins: + try: + req = Requirement(required_plugin) + except InvalidRequirement: + missing_plugins.append(required_plugin) + continue + + if req.name not in plugin_dist_info: + missing_plugins.append(required_plugin) + elif not req.specifier.contains( + Version(plugin_dist_info[req.name]), prereleases=True + ): + missing_plugins.append(required_plugin) + + if missing_plugins: + raise UsageError( + "Missing required plugins: {}".format(", ".join(missing_plugins)), + ) + + def _warn_or_fail_if_strict(self, message: str) -> None: + if self.known_args_namespace.strict_config: + raise UsageError(message) + + self.issue_config_time_warning(PytestConfigWarning(message), stacklevel=3) + + def _get_unknown_ini_keys(self) -> List[str]: + parser_inicfg = self._parser._inidict + return [name for name in self.inicfg if name not in parser_inicfg] + + def parse(self, args: List[str], addopts: bool = True) -> None: + # Parse given cmdline arguments into this config object. + assert ( + self.args == [] + ), "can only parse cmdline args at most once per Config object" + self.hook.pytest_addhooks.call_historic( + kwargs=dict(pluginmanager=self.pluginmanager) + ) + self._preparse(args, addopts=addopts) + # XXX deprecated hook: + self.hook.pytest_cmdline_preparse(config=self, args=args) + self._parser.after_preparse = True # type: ignore + try: + args = self._parser.parse_setoption( + args, self.option, namespace=self.option + ) + self.args, self.args_source = self._decide_args( + args=args, + pyargs=self.known_args_namespace.pyargs, + testpaths=self.getini("testpaths"), + invocation_dir=self.invocation_params.dir, + rootpath=self.rootpath, + warn=True, + ) + except PrintHelp: + pass + + def issue_config_time_warning(self, warning: Warning, stacklevel: int) -> None: + """Issue and handle a warning during the "configure" stage. + + During ``pytest_configure`` we can't capture warnings using the ``catch_warnings_for_item`` + function because it is not possible to have hookwrappers around ``pytest_configure``. + + This function is mainly intended for plugins that need to issue warnings during + ``pytest_configure`` (or similar stages). + + :param warning: The warning instance. + :param stacklevel: stacklevel forwarded to warnings.warn. + """ + if self.pluginmanager.is_blocked("warnings"): + return + + cmdline_filters = self.known_args_namespace.pythonwarnings or [] + config_filters = self.getini("filterwarnings") + + with warnings.catch_warnings(record=True) as records: + warnings.simplefilter("always", type(warning)) + apply_warning_filters(config_filters, cmdline_filters) + warnings.warn(warning, stacklevel=stacklevel) + + if records: + frame = sys._getframe(stacklevel - 1) + location = frame.f_code.co_filename, frame.f_lineno, frame.f_code.co_name + self.hook.pytest_warning_recorded.call_historic( + kwargs=dict( + warning_message=records[0], + when="config", + nodeid="", + location=location, + ) + ) + + def addinivalue_line(self, name: str, line: str) -> None: + """Add a line to an ini-file option. The option must have been + declared but might not yet be set in which case the line becomes + the first line in its value.""" + x = self.getini(name) + assert isinstance(x, list) + x.append(line) # modifies the cached list inline + + def getini(self, name: str): + """Return configuration value from an :ref:`ini file `. + + If the specified name hasn't been registered through a prior + :func:`parser.addini ` call (usually from a + plugin), a ValueError is raised. + """ + try: + return self._inicache[name] + except KeyError: + self._inicache[name] = val = self._getini(name) + return val + + # Meant for easy monkeypatching by legacypath plugin. + # Can be inlined back (with no cover removed) once legacypath is gone. + def _getini_unknown_type(self, name: str, type: str, value: Union[str, List[str]]): + msg = f"unknown configuration type: {type}" + raise ValueError(msg, value) # pragma: no cover + + def _getini(self, name: str): + try: + description, type, default = self._parser._inidict[name] + except KeyError as e: + raise ValueError(f"unknown configuration value: {name!r}") from e + override_value = self._get_override_ini_value(name) + if override_value is None: + try: + value = self.inicfg[name] + except KeyError: + if default is not None: + return default + if type is None: + return "" + return [] + else: + value = override_value + # Coerce the values based on types. + # + # Note: some coercions are only required if we are reading from .ini files, because + # the file format doesn't contain type information, but when reading from toml we will + # get either str or list of str values (see _parse_ini_config_from_pyproject_toml). + # For example: + # + # ini: + # a_line_list = "tests acceptance" + # in this case, we need to split the string to obtain a list of strings. + # + # toml: + # a_line_list = ["tests", "acceptance"] + # in this case, we already have a list ready to use. + # + if type == "paths": + # TODO: This assert is probably not valid in all cases. + assert self.inipath is not None + dp = self.inipath.parent + input_values = shlex.split(value) if isinstance(value, str) else value + return [dp / x for x in input_values] + elif type == "args": + return shlex.split(value) if isinstance(value, str) else value + elif type == "linelist": + if isinstance(value, str): + return [t for t in map(lambda x: x.strip(), value.split("\n")) if t] + else: + return value + elif type == "bool": + return _strtobool(str(value).strip()) + elif type == "string": + return value + elif type is None: + return value + else: + return self._getini_unknown_type(name, type, value) + + def _getconftest_pathlist( + self, name: str, path: Path, rootpath: Path + ) -> Optional[List[Path]]: + try: + mod, relroots = self.pluginmanager._rget_with_confmod( + name, path, self.getoption("importmode"), rootpath + ) + except KeyError: + return None + assert mod.__file__ is not None + modpath = Path(mod.__file__).parent + values: List[Path] = [] + for relroot in relroots: + if isinstance(relroot, os.PathLike): + relroot = Path(relroot) + else: + relroot = relroot.replace("/", os.sep) + relroot = absolutepath(modpath / relroot) + values.append(relroot) + return values + + def _get_override_ini_value(self, name: str) -> Optional[str]: + value = None + # override_ini is a list of "ini=value" options. + # Always use the last item if multiple values are set for same ini-name, + # e.g. -o foo=bar1 -o foo=bar2 will set foo to bar2. + for ini_config in self._override_ini: + try: + key, user_ini_value = ini_config.split("=", 1) + except ValueError as e: + raise UsageError( + "-o/--override-ini expects option=value style (got: {!r}).".format( + ini_config + ) + ) from e + else: + if key == name: + value = user_ini_value + return value + + def getoption(self, name: str, default=notset, skip: bool = False): + """Return command line option value. + + :param name: Name of the option. You may also specify + the literal ``--OPT`` option instead of the "dest" option name. + :param default: Default value if no option of that name exists. + :param skip: If True, raise pytest.skip if option does not exists + or has a None value. + """ + name = self._opt2dest.get(name, name) + try: + val = getattr(self.option, name) + if val is None and skip: + raise AttributeError(name) + return val + except AttributeError as e: + if default is not notset: + return default + if skip: + import pytest + + pytest.skip(f"no {name!r} option found") + raise ValueError(f"no option named {name!r}") from e + + def getvalue(self, name: str, path=None): + """Deprecated, use getoption() instead.""" + return self.getoption(name) + + def getvalueorskip(self, name: str, path=None): + """Deprecated, use getoption(skip=True) instead.""" + return self.getoption(name, skip=True) + + def _warn_about_missing_assertion(self, mode: str) -> None: + if not _assertion_supported(): + if mode == "plain": + warning_text = ( + "ASSERTIONS ARE NOT EXECUTED" + " and FAILING TESTS WILL PASS. Are you" + " using python -O?" + ) + else: + warning_text = ( + "assertions not in test modules or" + " plugins will be ignored" + " because assert statements are not executed " + "by the underlying Python interpreter " + "(are you using python -O?)\n" + ) + self.issue_config_time_warning( + PytestConfigWarning(warning_text), + stacklevel=3, + ) + + def _warn_about_skipped_plugins(self) -> None: + for module_name, msg in self.pluginmanager.skipped_plugins: + self.issue_config_time_warning( + PytestConfigWarning(f"skipped plugin {module_name!r}: {msg}"), + stacklevel=2, + ) + + +def _assertion_supported() -> bool: + try: + assert False + except AssertionError: + return True + else: + return False # type: ignore[unreachable] + + +def create_terminal_writer( + config: Config, file: Optional[TextIO] = None +) -> TerminalWriter: + """Create a TerminalWriter instance configured according to the options + in the config object. + + Every code which requires a TerminalWriter object and has access to a + config object should use this function. + """ + tw = TerminalWriter(file=file) + + if config.option.color == "yes": + tw.hasmarkup = True + elif config.option.color == "no": + tw.hasmarkup = False + + if config.option.code_highlight == "yes": + tw.code_highlight = True + elif config.option.code_highlight == "no": + tw.code_highlight = False + + return tw + + +def _strtobool(val: str) -> bool: + """Convert a string representation of truth to True or False. + + True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values + are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if + 'val' is anything else. + + .. note:: Copied from distutils.util. + """ + val = val.lower() + if val in ("y", "yes", "t", "true", "on", "1"): + return True + elif val in ("n", "no", "f", "false", "off", "0"): + return False + else: + raise ValueError(f"invalid truth value {val!r}") + + +@lru_cache(maxsize=50) +def parse_warning_filter( + arg: str, *, escape: bool +) -> Tuple["warnings._ActionKind", str, Type[Warning], str, int]: + """Parse a warnings filter string. + + This is copied from warnings._setoption with the following changes: + + * Does not apply the filter. + * Escaping is optional. + * Raises UsageError so we get nice error messages on failure. + """ + __tracebackhide__ = True + error_template = dedent( + f"""\ + while parsing the following warning configuration: + + {arg} + + This error occurred: + + {{error}} + """ + ) + + parts = arg.split(":") + if len(parts) > 5: + doc_url = ( + "https://docs.python.org/3/library/warnings.html#describing-warning-filters" + ) + error = dedent( + f"""\ + Too many fields ({len(parts)}), expected at most 5 separated by colons: + + action:message:category:module:line + + For more information please consult: {doc_url} + """ + ) + raise UsageError(error_template.format(error=error)) + + while len(parts) < 5: + parts.append("") + action_, message, category_, module, lineno_ = (s.strip() for s in parts) + try: + action: "warnings._ActionKind" = warnings._getaction(action_) # type: ignore[attr-defined] + except warnings._OptionError as e: + raise UsageError(error_template.format(error=str(e))) + try: + category: Type[Warning] = _resolve_warning_category(category_) + except Exception: + exc_info = ExceptionInfo.from_current() + exception_text = exc_info.getrepr(style="native") + raise UsageError(error_template.format(error=exception_text)) + if message and escape: + message = re.escape(message) + if module and escape: + module = re.escape(module) + r"\Z" + if lineno_: + try: + lineno = int(lineno_) + if lineno < 0: + raise ValueError("number is negative") + except ValueError as e: + raise UsageError( + error_template.format(error=f"invalid lineno {lineno_!r}: {e}") + ) + else: + lineno = 0 + return action, message, category, module, lineno + + +def _resolve_warning_category(category: str) -> Type[Warning]: + """ + Copied from warnings._getcategory, but changed so it lets exceptions (specially ImportErrors) + propagate so we can get access to their tracebacks (#9218). + """ + __tracebackhide__ = True + if not category: + return Warning + + if "." not in category: + import builtins as m + + klass = category + else: + module, _, klass = category.rpartition(".") + m = __import__(module, None, None, [klass]) + cat = getattr(m, klass) + if not issubclass(cat, Warning): + raise UsageError(f"{cat} is not a Warning subclass") + return cast(Type[Warning], cat) + + +def apply_warning_filters( + config_filters: Iterable[str], cmdline_filters: Iterable[str] +) -> None: + """Applies pytest-configured filters to the warnings module""" + # Filters should have this precedence: cmdline options, config. + # Filters should be applied in the inverse order of precedence. + for arg in config_filters: + warnings.filterwarnings(*parse_warning_filter(arg, escape=False)) + + for arg in cmdline_filters: + warnings.filterwarnings(*parse_warning_filter(arg, escape=True)) diff --git a/venv/lib/python3.11/site-packages/_pytest/config/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/config/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..82964d2 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/config/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/config/__pycache__/argparsing.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/config/__pycache__/argparsing.cpython-311.pyc new file mode 100644 index 0000000..6a2ee9b Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/config/__pycache__/argparsing.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/config/__pycache__/compat.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/config/__pycache__/compat.cpython-311.pyc new file mode 100644 index 0000000..125e258 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/config/__pycache__/compat.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/config/__pycache__/exceptions.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/config/__pycache__/exceptions.cpython-311.pyc new file mode 100644 index 0000000..7220a8d Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/config/__pycache__/exceptions.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/config/__pycache__/findpaths.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/config/__pycache__/findpaths.cpython-311.pyc new file mode 100644 index 0000000..470ff35 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/config/__pycache__/findpaths.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/config/argparsing.py b/venv/lib/python3.11/site-packages/_pytest/config/argparsing.py new file mode 100644 index 0000000..d3f0191 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/config/argparsing.py @@ -0,0 +1,551 @@ +import argparse +import os +import sys +import warnings +from gettext import gettext +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import List +from typing import Mapping +from typing import NoReturn +from typing import Optional +from typing import Sequence +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union + +import _pytest._io +from _pytest.compat import final +from _pytest.config.exceptions import UsageError +from _pytest.deprecated import ARGUMENT_PERCENT_DEFAULT +from _pytest.deprecated import ARGUMENT_TYPE_STR +from _pytest.deprecated import ARGUMENT_TYPE_STR_CHOICE +from _pytest.deprecated import check_ispytest + +if TYPE_CHECKING: + from typing_extensions import Literal + +FILE_OR_DIR = "file_or_dir" + + +@final +class Parser: + """Parser for command line arguments and ini-file values. + + :ivar extra_info: Dict of generic param -> value to display in case + there's an error processing the command line arguments. + """ + + prog: Optional[str] = None + + def __init__( + self, + usage: Optional[str] = None, + processopt: Optional[Callable[["Argument"], None]] = None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self._anonymous = OptionGroup("Custom options", parser=self, _ispytest=True) + self._groups: List[OptionGroup] = [] + self._processopt = processopt + self._usage = usage + self._inidict: Dict[str, Tuple[str, Optional[str], Any]] = {} + self._ininames: List[str] = [] + self.extra_info: Dict[str, Any] = {} + + def processoption(self, option: "Argument") -> None: + if self._processopt: + if option.dest: + self._processopt(option) + + def getgroup( + self, name: str, description: str = "", after: Optional[str] = None + ) -> "OptionGroup": + """Get (or create) a named option Group. + + :param name: Name of the option group. + :param description: Long description for --help output. + :param after: Name of another group, used for ordering --help output. + :returns: The option group. + + The returned group object has an ``addoption`` method with the same + signature as :func:`parser.addoption ` but + will be shown in the respective group in the output of + ``pytest --help``. + """ + for group in self._groups: + if group.name == name: + return group + group = OptionGroup(name, description, parser=self, _ispytest=True) + i = 0 + for i, grp in enumerate(self._groups): + if grp.name == after: + break + self._groups.insert(i + 1, group) + return group + + def addoption(self, *opts: str, **attrs: Any) -> None: + """Register a command line option. + + :param opts: + Option names, can be short or long options. + :param attrs: + Same attributes as the argparse library's :py:func:`add_argument() + ` function accepts. + + After command line parsing, options are available on the pytest config + object via ``config.option.NAME`` where ``NAME`` is usually set + by passing a ``dest`` attribute, for example + ``addoption("--long", dest="NAME", ...)``. + """ + self._anonymous.addoption(*opts, **attrs) + + def parse( + self, + args: Sequence[Union[str, "os.PathLike[str]"]], + namespace: Optional[argparse.Namespace] = None, + ) -> argparse.Namespace: + from _pytest._argcomplete import try_argcomplete + + self.optparser = self._getparser() + try_argcomplete(self.optparser) + strargs = [os.fspath(x) for x in args] + return self.optparser.parse_args(strargs, namespace=namespace) + + def _getparser(self) -> "MyOptionParser": + from _pytest._argcomplete import filescompleter + + optparser = MyOptionParser(self, self.extra_info, prog=self.prog) + groups = self._groups + [self._anonymous] + for group in groups: + if group.options: + desc = group.description or group.name + arggroup = optparser.add_argument_group(desc) + for option in group.options: + n = option.names() + a = option.attrs() + arggroup.add_argument(*n, **a) + file_or_dir_arg = optparser.add_argument(FILE_OR_DIR, nargs="*") + # bash like autocompletion for dirs (appending '/') + # Type ignored because typeshed doesn't know about argcomplete. + file_or_dir_arg.completer = filescompleter # type: ignore + return optparser + + def parse_setoption( + self, + args: Sequence[Union[str, "os.PathLike[str]"]], + option: argparse.Namespace, + namespace: Optional[argparse.Namespace] = None, + ) -> List[str]: + parsedoption = self.parse(args, namespace=namespace) + for name, value in parsedoption.__dict__.items(): + setattr(option, name, value) + return cast(List[str], getattr(parsedoption, FILE_OR_DIR)) + + def parse_known_args( + self, + args: Sequence[Union[str, "os.PathLike[str]"]], + namespace: Optional[argparse.Namespace] = None, + ) -> argparse.Namespace: + """Parse the known arguments at this point. + + :returns: An argparse namespace object. + """ + return self.parse_known_and_unknown_args(args, namespace=namespace)[0] + + def parse_known_and_unknown_args( + self, + args: Sequence[Union[str, "os.PathLike[str]"]], + namespace: Optional[argparse.Namespace] = None, + ) -> Tuple[argparse.Namespace, List[str]]: + """Parse the known arguments at this point, and also return the + remaining unknown arguments. + + :returns: + A tuple containing an argparse namespace object for the known + arguments, and a list of the unknown arguments. + """ + optparser = self._getparser() + strargs = [os.fspath(x) for x in args] + return optparser.parse_known_args(strargs, namespace=namespace) + + def addini( + self, + name: str, + help: str, + type: Optional[ + "Literal['string', 'paths', 'pathlist', 'args', 'linelist', 'bool']" + ] = None, + default: Any = None, + ) -> None: + """Register an ini-file option. + + :param name: + Name of the ini-variable. + :param type: + Type of the variable. Can be: + + * ``string``: a string + * ``bool``: a boolean + * ``args``: a list of strings, separated as in a shell + * ``linelist``: a list of strings, separated by line breaks + * ``paths``: a list of :class:`pathlib.Path`, separated as in a shell + * ``pathlist``: a list of ``py.path``, separated as in a shell + + .. versionadded:: 7.0 + The ``paths`` variable type. + + Defaults to ``string`` if ``None`` or not passed. + :param default: + Default value if no ini-file option exists but is queried. + + The value of ini-variables can be retrieved via a call to + :py:func:`config.getini(name) `. + """ + assert type in (None, "string", "paths", "pathlist", "args", "linelist", "bool") + self._inidict[name] = (help, type, default) + self._ininames.append(name) + + +class ArgumentError(Exception): + """Raised if an Argument instance is created with invalid or + inconsistent arguments.""" + + def __init__(self, msg: str, option: Union["Argument", str]) -> None: + self.msg = msg + self.option_id = str(option) + + def __str__(self) -> str: + if self.option_id: + return f"option {self.option_id}: {self.msg}" + else: + return self.msg + + +class Argument: + """Class that mimics the necessary behaviour of optparse.Option. + + It's currently a least effort implementation and ignoring choices + and integer prefixes. + + https://docs.python.org/3/library/optparse.html#optparse-standard-option-types + """ + + _typ_map = {"int": int, "string": str, "float": float, "complex": complex} + + def __init__(self, *names: str, **attrs: Any) -> None: + """Store params in private vars for use in add_argument.""" + self._attrs = attrs + self._short_opts: List[str] = [] + self._long_opts: List[str] = [] + if "%default" in (attrs.get("help") or ""): + warnings.warn(ARGUMENT_PERCENT_DEFAULT, stacklevel=3) + try: + typ = attrs["type"] + except KeyError: + pass + else: + # This might raise a keyerror as well, don't want to catch that. + if isinstance(typ, str): + if typ == "choice": + warnings.warn( + ARGUMENT_TYPE_STR_CHOICE.format(typ=typ, names=names), + stacklevel=4, + ) + # argparse expects a type here take it from + # the type of the first element + attrs["type"] = type(attrs["choices"][0]) + else: + warnings.warn( + ARGUMENT_TYPE_STR.format(typ=typ, names=names), stacklevel=4 + ) + attrs["type"] = Argument._typ_map[typ] + # Used in test_parseopt -> test_parse_defaultgetter. + self.type = attrs["type"] + else: + self.type = typ + try: + # Attribute existence is tested in Config._processopt. + self.default = attrs["default"] + except KeyError: + pass + self._set_opt_strings(names) + dest: Optional[str] = attrs.get("dest") + if dest: + self.dest = dest + elif self._long_opts: + self.dest = self._long_opts[0][2:].replace("-", "_") + else: + try: + self.dest = self._short_opts[0][1:] + except IndexError as e: + self.dest = "???" # Needed for the error repr. + raise ArgumentError("need a long or short option", self) from e + + def names(self) -> List[str]: + return self._short_opts + self._long_opts + + def attrs(self) -> Mapping[str, Any]: + # Update any attributes set by processopt. + attrs = "default dest help".split() + attrs.append(self.dest) + for attr in attrs: + try: + self._attrs[attr] = getattr(self, attr) + except AttributeError: + pass + if self._attrs.get("help"): + a = self._attrs["help"] + a = a.replace("%default", "%(default)s") + # a = a.replace('%prog', '%(prog)s') + self._attrs["help"] = a + return self._attrs + + def _set_opt_strings(self, opts: Sequence[str]) -> None: + """Directly from optparse. + + Might not be necessary as this is passed to argparse later on. + """ + for opt in opts: + if len(opt) < 2: + raise ArgumentError( + "invalid option string %r: " + "must be at least two characters long" % opt, + self, + ) + elif len(opt) == 2: + if not (opt[0] == "-" and opt[1] != "-"): + raise ArgumentError( + "invalid short option string %r: " + "must be of the form -x, (x any non-dash char)" % opt, + self, + ) + self._short_opts.append(opt) + else: + if not (opt[0:2] == "--" and opt[2] != "-"): + raise ArgumentError( + "invalid long option string %r: " + "must start with --, followed by non-dash" % opt, + self, + ) + self._long_opts.append(opt) + + def __repr__(self) -> str: + args: List[str] = [] + if self._short_opts: + args += ["_short_opts: " + repr(self._short_opts)] + if self._long_opts: + args += ["_long_opts: " + repr(self._long_opts)] + args += ["dest: " + repr(self.dest)] + if hasattr(self, "type"): + args += ["type: " + repr(self.type)] + if hasattr(self, "default"): + args += ["default: " + repr(self.default)] + return "Argument({})".format(", ".join(args)) + + +class OptionGroup: + """A group of options shown in its own section.""" + + def __init__( + self, + name: str, + description: str = "", + parser: Optional[Parser] = None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self.name = name + self.description = description + self.options: List[Argument] = [] + self.parser = parser + + def addoption(self, *opts: str, **attrs: Any) -> None: + """Add an option to this group. + + If a shortened version of a long option is specified, it will + be suppressed in the help. ``addoption('--twowords', '--two-words')`` + results in help showing ``--two-words`` only, but ``--twowords`` gets + accepted **and** the automatic destination is in ``args.twowords``. + + :param opts: + Option names, can be short or long options. + :param attrs: + Same attributes as the argparse library's :py:func:`add_argument() + ` function accepts. + """ + conflict = set(opts).intersection( + name for opt in self.options for name in opt.names() + ) + if conflict: + raise ValueError("option names %s already added" % conflict) + option = Argument(*opts, **attrs) + self._addoption_instance(option, shortupper=False) + + def _addoption(self, *opts: str, **attrs: Any) -> None: + option = Argument(*opts, **attrs) + self._addoption_instance(option, shortupper=True) + + def _addoption_instance(self, option: "Argument", shortupper: bool = False) -> None: + if not shortupper: + for opt in option._short_opts: + if opt[0] == "-" and opt[1].islower(): + raise ValueError("lowercase shortoptions reserved") + if self.parser: + self.parser.processoption(option) + self.options.append(option) + + +class MyOptionParser(argparse.ArgumentParser): + def __init__( + self, + parser: Parser, + extra_info: Optional[Dict[str, Any]] = None, + prog: Optional[str] = None, + ) -> None: + self._parser = parser + super().__init__( + prog=prog, + usage=parser._usage, + add_help=False, + formatter_class=DropShorterLongHelpFormatter, + allow_abbrev=False, + ) + # extra_info is a dict of (param -> value) to display if there's + # an usage error to provide more contextual information to the user. + self.extra_info = extra_info if extra_info else {} + + def error(self, message: str) -> NoReturn: + """Transform argparse error message into UsageError.""" + msg = f"{self.prog}: error: {message}" + + if hasattr(self._parser, "_config_source_hint"): + # Type ignored because the attribute is set dynamically. + msg = f"{msg} ({self._parser._config_source_hint})" # type: ignore + + raise UsageError(self.format_usage() + msg) + + # Type ignored because typeshed has a very complex type in the superclass. + def parse_args( # type: ignore + self, + args: Optional[Sequence[str]] = None, + namespace: Optional[argparse.Namespace] = None, + ) -> argparse.Namespace: + """Allow splitting of positional arguments.""" + parsed, unrecognized = self.parse_known_args(args, namespace) + if unrecognized: + for arg in unrecognized: + if arg and arg[0] == "-": + lines = ["unrecognized arguments: %s" % (" ".join(unrecognized))] + for k, v in sorted(self.extra_info.items()): + lines.append(f" {k}: {v}") + self.error("\n".join(lines)) + getattr(parsed, FILE_OR_DIR).extend(unrecognized) + return parsed + + if sys.version_info[:2] < (3, 9): # pragma: no cover + # Backport of https://github.com/python/cpython/pull/14316 so we can + # disable long --argument abbreviations without breaking short flags. + def _parse_optional( + self, arg_string: str + ) -> Optional[Tuple[Optional[argparse.Action], str, Optional[str]]]: + if not arg_string: + return None + if not arg_string[0] in self.prefix_chars: + return None + if arg_string in self._option_string_actions: + action = self._option_string_actions[arg_string] + return action, arg_string, None + if len(arg_string) == 1: + return None + if "=" in arg_string: + option_string, explicit_arg = arg_string.split("=", 1) + if option_string in self._option_string_actions: + action = self._option_string_actions[option_string] + return action, option_string, explicit_arg + if self.allow_abbrev or not arg_string.startswith("--"): + option_tuples = self._get_option_tuples(arg_string) + if len(option_tuples) > 1: + msg = gettext( + "ambiguous option: %(option)s could match %(matches)s" + ) + options = ", ".join(option for _, option, _ in option_tuples) + self.error(msg % {"option": arg_string, "matches": options}) + elif len(option_tuples) == 1: + (option_tuple,) = option_tuples + return option_tuple + if self._negative_number_matcher.match(arg_string): + if not self._has_negative_number_optionals: + return None + if " " in arg_string: + return None + return None, arg_string, None + + +class DropShorterLongHelpFormatter(argparse.HelpFormatter): + """Shorten help for long options that differ only in extra hyphens. + + - Collapse **long** options that are the same except for extra hyphens. + - Shortcut if there are only two options and one of them is a short one. + - Cache result on the action object as this is called at least 2 times. + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + # Use more accurate terminal width. + if "width" not in kwargs: + kwargs["width"] = _pytest._io.get_terminal_width() + super().__init__(*args, **kwargs) + + def _format_action_invocation(self, action: argparse.Action) -> str: + orgstr = super()._format_action_invocation(action) + if orgstr and orgstr[0] != "-": # only optional arguments + return orgstr + res: Optional[str] = getattr(action, "_formatted_action_invocation", None) + if res: + return res + options = orgstr.split(", ") + if len(options) == 2 and (len(options[0]) == 2 or len(options[1]) == 2): + # a shortcut for '-h, --help' or '--abc', '-a' + action._formatted_action_invocation = orgstr # type: ignore + return orgstr + return_list = [] + short_long: Dict[str, str] = {} + for option in options: + if len(option) == 2 or option[2] == " ": + continue + if not option.startswith("--"): + raise ArgumentError( + 'long optional argument without "--": [%s]' % (option), option + ) + xxoption = option[2:] + shortened = xxoption.replace("-", "") + if shortened not in short_long or len(short_long[shortened]) < len( + xxoption + ): + short_long[shortened] = xxoption + # now short_long has been filled out to the longest with dashes + # **and** we keep the right option ordering from add_argument + for option in options: + if len(option) == 2 or option[2] == " ": + return_list.append(option) + if option[2:] == short_long.get(option.replace("-", "")): + return_list.append(option.replace(" ", "=", 1)) + formatted_action_invocation = ", ".join(return_list) + action._formatted_action_invocation = formatted_action_invocation # type: ignore + return formatted_action_invocation + + def _split_lines(self, text, width): + """Wrap lines after splitting on original newlines. + + This allows to have explicit line breaks in the help text. + """ + import textwrap + + lines = [] + for line in text.splitlines(): + lines.extend(textwrap.wrap(line.strip(), width)) + return lines diff --git a/venv/lib/python3.11/site-packages/_pytest/config/compat.py b/venv/lib/python3.11/site-packages/_pytest/config/compat.py new file mode 100644 index 0000000..5bd922a --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/config/compat.py @@ -0,0 +1,70 @@ +import functools +import warnings +from pathlib import Path +from typing import Optional + +from ..compat import LEGACY_PATH +from ..compat import legacy_path +from ..deprecated import HOOK_LEGACY_PATH_ARG +from _pytest.nodes import _check_path + +# hookname: (Path, LEGACY_PATH) +imply_paths_hooks = { + "pytest_ignore_collect": ("collection_path", "path"), + "pytest_collect_file": ("file_path", "path"), + "pytest_pycollect_makemodule": ("module_path", "path"), + "pytest_report_header": ("start_path", "startdir"), + "pytest_report_collectionfinish": ("start_path", "startdir"), +} + + +class PathAwareHookProxy: + """ + this helper wraps around hook callers + until pluggy supports fixingcalls, this one will do + + it currently doesn't return full hook caller proxies for fixed hooks, + this may have to be changed later depending on bugs + """ + + def __init__(self, hook_caller): + self.__hook_caller = hook_caller + + def __dir__(self): + return dir(self.__hook_caller) + + def __getattr__(self, key, _wraps=functools.wraps): + hook = getattr(self.__hook_caller, key) + if key not in imply_paths_hooks: + self.__dict__[key] = hook + return hook + else: + path_var, fspath_var = imply_paths_hooks[key] + + @_wraps(hook) + def fixed_hook(**kw): + path_value: Optional[Path] = kw.pop(path_var, None) + fspath_value: Optional[LEGACY_PATH] = kw.pop(fspath_var, None) + if fspath_value is not None: + warnings.warn( + HOOK_LEGACY_PATH_ARG.format( + pylib_path_arg=fspath_var, pathlib_path_arg=path_var + ), + stacklevel=2, + ) + if path_value is not None: + if fspath_value is not None: + _check_path(path_value, fspath_value) + else: + fspath_value = legacy_path(path_value) + else: + assert fspath_value is not None + path_value = Path(fspath_value) + + kw[path_var] = path_value + kw[fspath_var] = fspath_value + return hook(**kw) + + fixed_hook.__name__ = key + self.__dict__[key] = fixed_hook + return fixed_hook diff --git a/venv/lib/python3.11/site-packages/_pytest/config/exceptions.py b/venv/lib/python3.11/site-packages/_pytest/config/exceptions.py new file mode 100644 index 0000000..4f1320e --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/config/exceptions.py @@ -0,0 +1,11 @@ +from _pytest.compat import final + + +@final +class UsageError(Exception): + """Error in pytest usage or invocation.""" + + +class PrintHelp(Exception): + """Raised when pytest should print its help to skip the rest of the + argument parsing and validation.""" diff --git a/venv/lib/python3.11/site-packages/_pytest/config/findpaths.py b/venv/lib/python3.11/site-packages/_pytest/config/findpaths.py new file mode 100644 index 0000000..234b9e1 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/config/findpaths.py @@ -0,0 +1,225 @@ +import os +import sys +from pathlib import Path +from typing import Dict +from typing import Iterable +from typing import List +from typing import Optional +from typing import Sequence +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union + +import iniconfig + +from .exceptions import UsageError +from _pytest.outcomes import fail +from _pytest.pathlib import absolutepath +from _pytest.pathlib import commonpath + +if TYPE_CHECKING: + from . import Config + + +def _parse_ini_config(path: Path) -> iniconfig.IniConfig: + """Parse the given generic '.ini' file using legacy IniConfig parser, returning + the parsed object. + + Raise UsageError if the file cannot be parsed. + """ + try: + return iniconfig.IniConfig(str(path)) + except iniconfig.ParseError as exc: + raise UsageError(str(exc)) from exc + + +def load_config_dict_from_file( + filepath: Path, +) -> Optional[Dict[str, Union[str, List[str]]]]: + """Load pytest configuration from the given file path, if supported. + + Return None if the file does not contain valid pytest configuration. + """ + + # Configuration from ini files are obtained from the [pytest] section, if present. + if filepath.suffix == ".ini": + iniconfig = _parse_ini_config(filepath) + + if "pytest" in iniconfig: + return dict(iniconfig["pytest"].items()) + else: + # "pytest.ini" files are always the source of configuration, even if empty. + if filepath.name == "pytest.ini": + return {} + + # '.cfg' files are considered if they contain a "[tool:pytest]" section. + elif filepath.suffix == ".cfg": + iniconfig = _parse_ini_config(filepath) + + if "tool:pytest" in iniconfig.sections: + return dict(iniconfig["tool:pytest"].items()) + elif "pytest" in iniconfig.sections: + # If a setup.cfg contains a "[pytest]" section, we raise a failure to indicate users that + # plain "[pytest]" sections in setup.cfg files is no longer supported (#3086). + fail(CFG_PYTEST_SECTION.format(filename="setup.cfg"), pytrace=False) + + # '.toml' files are considered if they contain a [tool.pytest.ini_options] table. + elif filepath.suffix == ".toml": + if sys.version_info >= (3, 11): + import tomllib + else: + import tomli as tomllib + + toml_text = filepath.read_text(encoding="utf-8") + try: + config = tomllib.loads(toml_text) + except tomllib.TOMLDecodeError as exc: + raise UsageError(f"{filepath}: {exc}") from exc + + result = config.get("tool", {}).get("pytest", {}).get("ini_options", None) + if result is not None: + # TOML supports richer data types than ini files (strings, arrays, floats, ints, etc), + # however we need to convert all scalar values to str for compatibility with the rest + # of the configuration system, which expects strings only. + def make_scalar(v: object) -> Union[str, List[str]]: + return v if isinstance(v, list) else str(v) + + return {k: make_scalar(v) for k, v in result.items()} + + return None + + +def locate_config( + args: Iterable[Path], +) -> Tuple[Optional[Path], Optional[Path], Dict[str, Union[str, List[str]]]]: + """Search in the list of arguments for a valid ini-file for pytest, + and return a tuple of (rootdir, inifile, cfg-dict).""" + config_names = [ + "pytest.ini", + ".pytest.ini", + "pyproject.toml", + "tox.ini", + "setup.cfg", + ] + args = [x for x in args if not str(x).startswith("-")] + if not args: + args = [Path.cwd()] + for arg in args: + argpath = absolutepath(arg) + for base in (argpath, *argpath.parents): + for config_name in config_names: + p = base / config_name + if p.is_file(): + ini_config = load_config_dict_from_file(p) + if ini_config is not None: + return base, p, ini_config + return None, None, {} + + +def get_common_ancestor(paths: Iterable[Path]) -> Path: + common_ancestor: Optional[Path] = None + for path in paths: + if not path.exists(): + continue + if common_ancestor is None: + common_ancestor = path + else: + if common_ancestor in path.parents or path == common_ancestor: + continue + elif path in common_ancestor.parents: + common_ancestor = path + else: + shared = commonpath(path, common_ancestor) + if shared is not None: + common_ancestor = shared + if common_ancestor is None: + common_ancestor = Path.cwd() + elif common_ancestor.is_file(): + common_ancestor = common_ancestor.parent + return common_ancestor + + +def get_dirs_from_args(args: Iterable[str]) -> List[Path]: + def is_option(x: str) -> bool: + return x.startswith("-") + + def get_file_part_from_node_id(x: str) -> str: + return x.split("::")[0] + + def get_dir_from_path(path: Path) -> Path: + if path.is_dir(): + return path + return path.parent + + def safe_exists(path: Path) -> bool: + # This can throw on paths that contain characters unrepresentable at the OS level, + # or with invalid syntax on Windows (https://bugs.python.org/issue35306) + try: + return path.exists() + except OSError: + return False + + # These look like paths but may not exist + possible_paths = ( + absolutepath(get_file_part_from_node_id(arg)) + for arg in args + if not is_option(arg) + ) + + return [get_dir_from_path(path) for path in possible_paths if safe_exists(path)] + + +CFG_PYTEST_SECTION = "[pytest] section in {filename} files is no longer supported, change to [tool:pytest] instead." + + +def determine_setup( + inifile: Optional[str], + args: Sequence[str], + rootdir_cmd_arg: Optional[str] = None, + config: Optional["Config"] = None, +) -> Tuple[Path, Optional[Path], Dict[str, Union[str, List[str]]]]: + rootdir = None + dirs = get_dirs_from_args(args) + if inifile: + inipath_ = absolutepath(inifile) + inipath: Optional[Path] = inipath_ + inicfg = load_config_dict_from_file(inipath_) or {} + if rootdir_cmd_arg is None: + rootdir = inipath_.parent + else: + ancestor = get_common_ancestor(dirs) + rootdir, inipath, inicfg = locate_config([ancestor]) + if rootdir is None and rootdir_cmd_arg is None: + for possible_rootdir in (ancestor, *ancestor.parents): + if (possible_rootdir / "setup.py").is_file(): + rootdir = possible_rootdir + break + else: + if dirs != [ancestor]: + rootdir, inipath, inicfg = locate_config(dirs) + if rootdir is None: + if config is not None: + cwd = config.invocation_params.dir + else: + cwd = Path.cwd() + rootdir = get_common_ancestor([cwd, ancestor]) + if is_fs_root(rootdir): + rootdir = ancestor + if rootdir_cmd_arg: + rootdir = absolutepath(os.path.expandvars(rootdir_cmd_arg)) + if not rootdir.is_dir(): + raise UsageError( + "Directory '{}' not found. Check your '--rootdir' option.".format( + rootdir + ) + ) + assert rootdir is not None + return rootdir, inipath, inicfg or {} + + +def is_fs_root(p: Path) -> bool: + r""" + Return True if the given path is pointing to the root of the + file system ("/" on Unix and "C:\\" on Windows for example). + """ + return os.path.splitdrive(str(p))[1] == os.sep diff --git a/venv/lib/python3.11/site-packages/_pytest/debugging.py b/venv/lib/python3.11/site-packages/_pytest/debugging.py new file mode 100644 index 0000000..a3f8080 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/debugging.py @@ -0,0 +1,391 @@ +"""Interactive debugging with PDB, the Python Debugger.""" +import argparse +import functools +import sys +import types +import unittest +from typing import Any +from typing import Callable +from typing import Generator +from typing import List +from typing import Optional +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import Union + +from _pytest import outcomes +from _pytest._code import ExceptionInfo +from _pytest.config import Config +from _pytest.config import ConftestImportFailure +from _pytest.config import hookimpl +from _pytest.config import PytestPluginManager +from _pytest.config.argparsing import Parser +from _pytest.config.exceptions import UsageError +from _pytest.nodes import Node +from _pytest.reports import BaseReport + +if TYPE_CHECKING: + from _pytest.capture import CaptureManager + from _pytest.runner import CallInfo + + +def _validate_usepdb_cls(value: str) -> Tuple[str, str]: + """Validate syntax of --pdbcls option.""" + try: + modname, classname = value.split(":") + except ValueError as e: + raise argparse.ArgumentTypeError( + f"{value!r} is not in the format 'modname:classname'" + ) from e + return (modname, classname) + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group._addoption( + "--pdb", + dest="usepdb", + action="store_true", + help="Start the interactive Python debugger on errors or KeyboardInterrupt", + ) + group._addoption( + "--pdbcls", + dest="usepdb_cls", + metavar="modulename:classname", + type=_validate_usepdb_cls, + help="Specify a custom interactive Python debugger for use with --pdb." + "For example: --pdbcls=IPython.terminal.debugger:TerminalPdb", + ) + group._addoption( + "--trace", + dest="trace", + action="store_true", + help="Immediately break when running each test", + ) + + +def pytest_configure(config: Config) -> None: + import pdb + + if config.getvalue("trace"): + config.pluginmanager.register(PdbTrace(), "pdbtrace") + if config.getvalue("usepdb"): + config.pluginmanager.register(PdbInvoke(), "pdbinvoke") + + pytestPDB._saved.append( + (pdb.set_trace, pytestPDB._pluginmanager, pytestPDB._config) + ) + pdb.set_trace = pytestPDB.set_trace + pytestPDB._pluginmanager = config.pluginmanager + pytestPDB._config = config + + # NOTE: not using pytest_unconfigure, since it might get called although + # pytest_configure was not (if another plugin raises UsageError). + def fin() -> None: + ( + pdb.set_trace, + pytestPDB._pluginmanager, + pytestPDB._config, + ) = pytestPDB._saved.pop() + + config.add_cleanup(fin) + + +class pytestPDB: + """Pseudo PDB that defers to the real pdb.""" + + _pluginmanager: Optional[PytestPluginManager] = None + _config: Optional[Config] = None + _saved: List[ + Tuple[Callable[..., None], Optional[PytestPluginManager], Optional[Config]] + ] = [] + _recursive_debug = 0 + _wrapped_pdb_cls: Optional[Tuple[Type[Any], Type[Any]]] = None + + @classmethod + def _is_capturing(cls, capman: Optional["CaptureManager"]) -> Union[str, bool]: + if capman: + return capman.is_capturing() + return False + + @classmethod + def _import_pdb_cls(cls, capman: Optional["CaptureManager"]): + if not cls._config: + import pdb + + # Happens when using pytest.set_trace outside of a test. + return pdb.Pdb + + usepdb_cls = cls._config.getvalue("usepdb_cls") + + if cls._wrapped_pdb_cls and cls._wrapped_pdb_cls[0] == usepdb_cls: + return cls._wrapped_pdb_cls[1] + + if usepdb_cls: + modname, classname = usepdb_cls + + try: + __import__(modname) + mod = sys.modules[modname] + + # Handle --pdbcls=pdb:pdb.Pdb (useful e.g. with pdbpp). + parts = classname.split(".") + pdb_cls = getattr(mod, parts[0]) + for part in parts[1:]: + pdb_cls = getattr(pdb_cls, part) + except Exception as exc: + value = ":".join((modname, classname)) + raise UsageError( + f"--pdbcls: could not import {value!r}: {exc}" + ) from exc + else: + import pdb + + pdb_cls = pdb.Pdb + + wrapped_cls = cls._get_pdb_wrapper_class(pdb_cls, capman) + cls._wrapped_pdb_cls = (usepdb_cls, wrapped_cls) + return wrapped_cls + + @classmethod + def _get_pdb_wrapper_class(cls, pdb_cls, capman: Optional["CaptureManager"]): + import _pytest.config + + # Type ignored because mypy doesn't support "dynamic" + # inheritance like this. + class PytestPdbWrapper(pdb_cls): # type: ignore[valid-type,misc] + _pytest_capman = capman + _continued = False + + def do_debug(self, arg): + cls._recursive_debug += 1 + ret = super().do_debug(arg) + cls._recursive_debug -= 1 + return ret + + def do_continue(self, arg): + ret = super().do_continue(arg) + if cls._recursive_debug == 0: + assert cls._config is not None + tw = _pytest.config.create_terminal_writer(cls._config) + tw.line() + + capman = self._pytest_capman + capturing = pytestPDB._is_capturing(capman) + if capturing: + if capturing == "global": + tw.sep(">", "PDB continue (IO-capturing resumed)") + else: + tw.sep( + ">", + "PDB continue (IO-capturing resumed for %s)" + % capturing, + ) + assert capman is not None + capman.resume() + else: + tw.sep(">", "PDB continue") + assert cls._pluginmanager is not None + cls._pluginmanager.hook.pytest_leave_pdb(config=cls._config, pdb=self) + self._continued = True + return ret + + do_c = do_cont = do_continue + + def do_quit(self, arg): + """Raise Exit outcome when quit command is used in pdb. + + This is a bit of a hack - it would be better if BdbQuit + could be handled, but this would require to wrap the + whole pytest run, and adjust the report etc. + """ + ret = super().do_quit(arg) + + if cls._recursive_debug == 0: + outcomes.exit("Quitting debugger") + + return ret + + do_q = do_quit + do_exit = do_quit + + def setup(self, f, tb): + """Suspend on setup(). + + Needed after do_continue resumed, and entering another + breakpoint again. + """ + ret = super().setup(f, tb) + if not ret and self._continued: + # pdb.setup() returns True if the command wants to exit + # from the interaction: do not suspend capturing then. + if self._pytest_capman: + self._pytest_capman.suspend_global_capture(in_=True) + return ret + + def get_stack(self, f, t): + stack, i = super().get_stack(f, t) + if f is None: + # Find last non-hidden frame. + i = max(0, len(stack) - 1) + while i and stack[i][0].f_locals.get("__tracebackhide__", False): + i -= 1 + return stack, i + + return PytestPdbWrapper + + @classmethod + def _init_pdb(cls, method, *args, **kwargs): + """Initialize PDB debugging, dropping any IO capturing.""" + import _pytest.config + + if cls._pluginmanager is None: + capman: Optional[CaptureManager] = None + else: + capman = cls._pluginmanager.getplugin("capturemanager") + if capman: + capman.suspend(in_=True) + + if cls._config: + tw = _pytest.config.create_terminal_writer(cls._config) + tw.line() + + if cls._recursive_debug == 0: + # Handle header similar to pdb.set_trace in py37+. + header = kwargs.pop("header", None) + if header is not None: + tw.sep(">", header) + else: + capturing = cls._is_capturing(capman) + if capturing == "global": + tw.sep(">", f"PDB {method} (IO-capturing turned off)") + elif capturing: + tw.sep( + ">", + "PDB %s (IO-capturing turned off for %s)" + % (method, capturing), + ) + else: + tw.sep(">", f"PDB {method}") + + _pdb = cls._import_pdb_cls(capman)(**kwargs) + + if cls._pluginmanager: + cls._pluginmanager.hook.pytest_enter_pdb(config=cls._config, pdb=_pdb) + return _pdb + + @classmethod + def set_trace(cls, *args, **kwargs) -> None: + """Invoke debugging via ``Pdb.set_trace``, dropping any IO capturing.""" + frame = sys._getframe().f_back + _pdb = cls._init_pdb("set_trace", *args, **kwargs) + _pdb.set_trace(frame) + + +class PdbInvoke: + def pytest_exception_interact( + self, node: Node, call: "CallInfo[Any]", report: BaseReport + ) -> None: + capman = node.config.pluginmanager.getplugin("capturemanager") + if capman: + capman.suspend_global_capture(in_=True) + out, err = capman.read_global_capture() + sys.stdout.write(out) + sys.stdout.write(err) + assert call.excinfo is not None + + if not isinstance(call.excinfo.value, unittest.SkipTest): + _enter_pdb(node, call.excinfo, report) + + def pytest_internalerror(self, excinfo: ExceptionInfo[BaseException]) -> None: + tb = _postmortem_traceback(excinfo) + post_mortem(tb) + + +class PdbTrace: + @hookimpl(hookwrapper=True) + def pytest_pyfunc_call(self, pyfuncitem) -> Generator[None, None, None]: + wrap_pytest_function_for_tracing(pyfuncitem) + yield + + +def wrap_pytest_function_for_tracing(pyfuncitem): + """Change the Python function object of the given Function item by a + wrapper which actually enters pdb before calling the python function + itself, effectively leaving the user in the pdb prompt in the first + statement of the function.""" + _pdb = pytestPDB._init_pdb("runcall") + testfunction = pyfuncitem.obj + + # we can't just return `partial(pdb.runcall, testfunction)` because (on + # python < 3.7.4) runcall's first param is `func`, which means we'd get + # an exception if one of the kwargs to testfunction was called `func`. + @functools.wraps(testfunction) + def wrapper(*args, **kwargs): + func = functools.partial(testfunction, *args, **kwargs) + _pdb.runcall(func) + + pyfuncitem.obj = wrapper + + +def maybe_wrap_pytest_function_for_tracing(pyfuncitem): + """Wrap the given pytestfunct item for tracing support if --trace was given in + the command line.""" + if pyfuncitem.config.getvalue("trace"): + wrap_pytest_function_for_tracing(pyfuncitem) + + +def _enter_pdb( + node: Node, excinfo: ExceptionInfo[BaseException], rep: BaseReport +) -> BaseReport: + # XXX we re-use the TerminalReporter's terminalwriter + # because this seems to avoid some encoding related troubles + # for not completely clear reasons. + tw = node.config.pluginmanager.getplugin("terminalreporter")._tw + tw.line() + + showcapture = node.config.option.showcapture + + for sectionname, content in ( + ("stdout", rep.capstdout), + ("stderr", rep.capstderr), + ("log", rep.caplog), + ): + if showcapture in (sectionname, "all") and content: + tw.sep(">", "captured " + sectionname) + if content[-1:] == "\n": + content = content[:-1] + tw.line(content) + + tw.sep(">", "traceback") + rep.toterminal(tw) + tw.sep(">", "entering PDB") + tb = _postmortem_traceback(excinfo) + rep._pdbshown = True # type: ignore[attr-defined] + post_mortem(tb) + return rep + + +def _postmortem_traceback(excinfo: ExceptionInfo[BaseException]) -> types.TracebackType: + from doctest import UnexpectedException + + if isinstance(excinfo.value, UnexpectedException): + # A doctest.UnexpectedException is not useful for post_mortem. + # Use the underlying exception instead: + return excinfo.value.exc_info[2] + elif isinstance(excinfo.value, ConftestImportFailure): + # A config.ConftestImportFailure is not useful for post_mortem. + # Use the underlying exception instead: + return excinfo.value.excinfo[2] + else: + assert excinfo._excinfo is not None + return excinfo._excinfo[2] + + +def post_mortem(t: types.TracebackType) -> None: + p = pytestPDB._init_pdb("post_mortem") + p.reset() + p.interaction(None, t) + if p.quitting: + outcomes.exit("Quitting debugger") diff --git a/venv/lib/python3.11/site-packages/_pytest/deprecated.py b/venv/lib/python3.11/site-packages/_pytest/deprecated.py new file mode 100644 index 0000000..b9c10df --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/deprecated.py @@ -0,0 +1,146 @@ +"""Deprecation messages and bits of code used elsewhere in the codebase that +is planned to be removed in the next pytest release. + +Keeping it in a central location makes it easy to track what is deprecated and should +be removed when the time comes. + +All constants defined in this module should be either instances of +:class:`PytestWarning`, or :class:`UnformattedWarning` +in case of warnings which need to format their messages. +""" +from warnings import warn + +from _pytest.warning_types import PytestDeprecationWarning +from _pytest.warning_types import PytestRemovedIn8Warning +from _pytest.warning_types import UnformattedWarning + +# set of plugins which have been integrated into the core; we use this list to ignore +# them during registration to avoid conflicts +DEPRECATED_EXTERNAL_PLUGINS = { + "pytest_catchlog", + "pytest_capturelog", + "pytest_faulthandler", +} + +NOSE_SUPPORT = UnformattedWarning( + PytestRemovedIn8Warning, + "Support for nose tests is deprecated and will be removed in a future release.\n" + "{nodeid} is using nose method: `{method}` ({stage})\n" + "See docs: https://docs.pytest.org/en/stable/deprecations.html#support-for-tests-written-for-nose", +) + +NOSE_SUPPORT_METHOD = UnformattedWarning( + PytestRemovedIn8Warning, + "Support for nose tests is deprecated and will be removed in a future release.\n" + "{nodeid} is using nose-specific method: `{method}(self)`\n" + "To remove this warning, rename it to `{method}_method(self)`\n" + "See docs: https://docs.pytest.org/en/stable/deprecations.html#support-for-tests-written-for-nose", +) + + +# This can be* removed pytest 8, but it's harmless and common, so no rush to remove. +# * If you're in the future: "could have been". +YIELD_FIXTURE = PytestDeprecationWarning( + "@pytest.yield_fixture is deprecated.\n" + "Use @pytest.fixture instead; they are the same." +) + +WARNING_CMDLINE_PREPARSE_HOOK = PytestRemovedIn8Warning( + "The pytest_cmdline_preparse hook is deprecated and will be removed in a future release. \n" + "Please use pytest_load_initial_conftests hook instead." +) + +FSCOLLECTOR_GETHOOKPROXY_ISINITPATH = PytestRemovedIn8Warning( + "The gethookproxy() and isinitpath() methods of FSCollector and Package are deprecated; " + "use self.session.gethookproxy() and self.session.isinitpath() instead. " +) + +STRICT_OPTION = PytestRemovedIn8Warning( + "The --strict option is deprecated, use --strict-markers instead." +) + +# This deprecation is never really meant to be removed. +PRIVATE = PytestDeprecationWarning("A private pytest class or function was used.") + +ARGUMENT_PERCENT_DEFAULT = PytestRemovedIn8Warning( + 'pytest now uses argparse. "%default" should be changed to "%(default)s"', +) + +ARGUMENT_TYPE_STR_CHOICE = UnformattedWarning( + PytestRemovedIn8Warning, + "`type` argument to addoption() is the string {typ!r}." + " For choices this is optional and can be omitted, " + " but when supplied should be a type (for example `str` or `int`)." + " (options: {names})", +) + +ARGUMENT_TYPE_STR = UnformattedWarning( + PytestRemovedIn8Warning, + "`type` argument to addoption() is the string {typ!r}, " + " but when supplied should be a type (for example `str` or `int`)." + " (options: {names})", +) + + +HOOK_LEGACY_PATH_ARG = UnformattedWarning( + PytestRemovedIn8Warning, + "The ({pylib_path_arg}: py.path.local) argument is deprecated, please use ({pathlib_path_arg}: pathlib.Path)\n" + "see https://docs.pytest.org/en/latest/deprecations.html" + "#py-path-local-arguments-for-hooks-replaced-with-pathlib-path", +) + +NODE_CTOR_FSPATH_ARG = UnformattedWarning( + PytestRemovedIn8Warning, + "The (fspath: py.path.local) argument to {node_type_name} is deprecated. " + "Please use the (path: pathlib.Path) argument instead.\n" + "See https://docs.pytest.org/en/latest/deprecations.html" + "#fspath-argument-for-node-constructors-replaced-with-pathlib-path", +) + +WARNS_NONE_ARG = PytestRemovedIn8Warning( + "Passing None has been deprecated.\n" + "See https://docs.pytest.org/en/latest/how-to/capture-warnings.html" + "#additional-use-cases-of-warnings-in-tests" + " for alternatives in common use cases." +) + +KEYWORD_MSG_ARG = UnformattedWarning( + PytestRemovedIn8Warning, + "pytest.{func}(msg=...) is now deprecated, use pytest.{func}(reason=...) instead", +) + +INSTANCE_COLLECTOR = PytestRemovedIn8Warning( + "The pytest.Instance collector type is deprecated and is no longer used. " + "See https://docs.pytest.org/en/latest/deprecations.html#the-pytest-instance-collector", +) +HOOK_LEGACY_MARKING = UnformattedWarning( + PytestDeprecationWarning, + "The hook{type} {fullname} uses old-style configuration options (marks or attributes).\n" + "Please use the pytest.hook{type}({hook_opts}) decorator instead\n" + " to configure the hooks.\n" + " See https://docs.pytest.org/en/latest/deprecations.html" + "#configuring-hook-specs-impls-using-markers", +) + +# You want to make some `__init__` or function "private". +# +# def my_private_function(some, args): +# ... +# +# Do this: +# +# def my_private_function(some, args, *, _ispytest: bool = False): +# check_ispytest(_ispytest) +# ... +# +# Change all internal/allowed calls to +# +# my_private_function(some, args, _ispytest=True) +# +# All other calls will get the default _ispytest=False and trigger +# the warning (possibly error in the future). + + +def check_ispytest(ispytest: bool) -> None: + if not ispytest: + warn(PRIVATE, stacklevel=3) diff --git a/venv/lib/python3.11/site-packages/_pytest/doctest.py b/venv/lib/python3.11/site-packages/_pytest/doctest.py new file mode 100644 index 0000000..455ad62 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/doctest.py @@ -0,0 +1,751 @@ +"""Discover and run doctests in modules and test files.""" +import bdb +import inspect +import os +import platform +import sys +import traceback +import types +import warnings +from contextlib import contextmanager +from pathlib import Path +from typing import Any +from typing import Callable +from typing import Dict +from typing import Generator +from typing import Iterable +from typing import List +from typing import Optional +from typing import Pattern +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import Union + +from _pytest import outcomes +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import ReprFileLocation +from _pytest._code.code import TerminalRepr +from _pytest._io import TerminalWriter +from _pytest.compat import safe_getattr +from _pytest.config import Config +from _pytest.config.argparsing import Parser +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.outcomes import OutcomeException +from _pytest.outcomes import skip +from _pytest.pathlib import fnmatch_ex +from _pytest.pathlib import import_path +from _pytest.python import Module +from _pytest.python_api import approx +from _pytest.warning_types import PytestWarning + +if TYPE_CHECKING: + import doctest + +DOCTEST_REPORT_CHOICE_NONE = "none" +DOCTEST_REPORT_CHOICE_CDIFF = "cdiff" +DOCTEST_REPORT_CHOICE_NDIFF = "ndiff" +DOCTEST_REPORT_CHOICE_UDIFF = "udiff" +DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE = "only_first_failure" + +DOCTEST_REPORT_CHOICES = ( + DOCTEST_REPORT_CHOICE_NONE, + DOCTEST_REPORT_CHOICE_CDIFF, + DOCTEST_REPORT_CHOICE_NDIFF, + DOCTEST_REPORT_CHOICE_UDIFF, + DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE, +) + +# Lazy definition of runner class +RUNNER_CLASS = None +# Lazy definition of output checker class +CHECKER_CLASS: Optional[Type["doctest.OutputChecker"]] = None + + +def pytest_addoption(parser: Parser) -> None: + parser.addini( + "doctest_optionflags", + "Option flags for doctests", + type="args", + default=["ELLIPSIS"], + ) + parser.addini( + "doctest_encoding", "Encoding used for doctest files", default="utf-8" + ) + group = parser.getgroup("collect") + group.addoption( + "--doctest-modules", + action="store_true", + default=False, + help="Run doctests in all .py modules", + dest="doctestmodules", + ) + group.addoption( + "--doctest-report", + type=str.lower, + default="udiff", + help="Choose another output format for diffs on doctest failure", + choices=DOCTEST_REPORT_CHOICES, + dest="doctestreport", + ) + group.addoption( + "--doctest-glob", + action="append", + default=[], + metavar="pat", + help="Doctests file matching pattern, default: test*.txt", + dest="doctestglob", + ) + group.addoption( + "--doctest-ignore-import-errors", + action="store_true", + default=False, + help="Ignore doctest ImportErrors", + dest="doctest_ignore_import_errors", + ) + group.addoption( + "--doctest-continue-on-failure", + action="store_true", + default=False, + help="For a given doctest, continue to run after the first failure", + dest="doctest_continue_on_failure", + ) + + +def pytest_unconfigure() -> None: + global RUNNER_CLASS + + RUNNER_CLASS = None + + +def pytest_collect_file( + file_path: Path, + parent: Collector, +) -> Optional[Union["DoctestModule", "DoctestTextfile"]]: + config = parent.config + if file_path.suffix == ".py": + if config.option.doctestmodules and not any( + (_is_setup_py(file_path), _is_main_py(file_path)) + ): + mod: DoctestModule = DoctestModule.from_parent(parent, path=file_path) + return mod + elif _is_doctest(config, file_path, parent): + txt: DoctestTextfile = DoctestTextfile.from_parent(parent, path=file_path) + return txt + return None + + +def _is_setup_py(path: Path) -> bool: + if path.name != "setup.py": + return False + contents = path.read_bytes() + return b"setuptools" in contents or b"distutils" in contents + + +def _is_doctest(config: Config, path: Path, parent: Collector) -> bool: + if path.suffix in (".txt", ".rst") and parent.session.isinitpath(path): + return True + globs = config.getoption("doctestglob") or ["test*.txt"] + return any(fnmatch_ex(glob, path) for glob in globs) + + +def _is_main_py(path: Path) -> bool: + return path.name == "__main__.py" + + +class ReprFailDoctest(TerminalRepr): + def __init__( + self, reprlocation_lines: Sequence[Tuple[ReprFileLocation, Sequence[str]]] + ) -> None: + self.reprlocation_lines = reprlocation_lines + + def toterminal(self, tw: TerminalWriter) -> None: + for reprlocation, lines in self.reprlocation_lines: + for line in lines: + tw.line(line) + reprlocation.toterminal(tw) + + +class MultipleDoctestFailures(Exception): + def __init__(self, failures: Sequence["doctest.DocTestFailure"]) -> None: + super().__init__() + self.failures = failures + + +def _init_runner_class() -> Type["doctest.DocTestRunner"]: + import doctest + + class PytestDoctestRunner(doctest.DebugRunner): + """Runner to collect failures. + + Note that the out variable in this case is a list instead of a + stdout-like object. + """ + + def __init__( + self, + checker: Optional["doctest.OutputChecker"] = None, + verbose: Optional[bool] = None, + optionflags: int = 0, + continue_on_failure: bool = True, + ) -> None: + super().__init__(checker=checker, verbose=verbose, optionflags=optionflags) + self.continue_on_failure = continue_on_failure + + def report_failure( + self, + out, + test: "doctest.DocTest", + example: "doctest.Example", + got: str, + ) -> None: + failure = doctest.DocTestFailure(test, example, got) + if self.continue_on_failure: + out.append(failure) + else: + raise failure + + def report_unexpected_exception( + self, + out, + test: "doctest.DocTest", + example: "doctest.Example", + exc_info: Tuple[Type[BaseException], BaseException, types.TracebackType], + ) -> None: + if isinstance(exc_info[1], OutcomeException): + raise exc_info[1] + if isinstance(exc_info[1], bdb.BdbQuit): + outcomes.exit("Quitting debugger") + failure = doctest.UnexpectedException(test, example, exc_info) + if self.continue_on_failure: + out.append(failure) + else: + raise failure + + return PytestDoctestRunner + + +def _get_runner( + checker: Optional["doctest.OutputChecker"] = None, + verbose: Optional[bool] = None, + optionflags: int = 0, + continue_on_failure: bool = True, +) -> "doctest.DocTestRunner": + # We need this in order to do a lazy import on doctest + global RUNNER_CLASS + if RUNNER_CLASS is None: + RUNNER_CLASS = _init_runner_class() + # Type ignored because the continue_on_failure argument is only defined on + # PytestDoctestRunner, which is lazily defined so can't be used as a type. + return RUNNER_CLASS( # type: ignore + checker=checker, + verbose=verbose, + optionflags=optionflags, + continue_on_failure=continue_on_failure, + ) + + +class DoctestItem(Item): + def __init__( + self, + name: str, + parent: "Union[DoctestTextfile, DoctestModule]", + runner: Optional["doctest.DocTestRunner"] = None, + dtest: Optional["doctest.DocTest"] = None, + ) -> None: + super().__init__(name, parent) + self.runner = runner + self.dtest = dtest + self.obj = None + self.fixture_request: Optional[FixtureRequest] = None + + @classmethod + def from_parent( # type: ignore + cls, + parent: "Union[DoctestTextfile, DoctestModule]", + *, + name: str, + runner: "doctest.DocTestRunner", + dtest: "doctest.DocTest", + ): + # incompatible signature due to imposed limits on subclass + """The public named constructor.""" + return super().from_parent(name=name, parent=parent, runner=runner, dtest=dtest) + + def setup(self) -> None: + if self.dtest is not None: + self.fixture_request = _setup_fixtures(self) + globs = dict(getfixture=self.fixture_request.getfixturevalue) + for name, value in self.fixture_request.getfixturevalue( + "doctest_namespace" + ).items(): + globs[name] = value + self.dtest.globs.update(globs) + + def runtest(self) -> None: + assert self.dtest is not None + assert self.runner is not None + _check_all_skipped(self.dtest) + self._disable_output_capturing_for_darwin() + failures: List["doctest.DocTestFailure"] = [] + # Type ignored because we change the type of `out` from what + # doctest expects. + self.runner.run(self.dtest, out=failures) # type: ignore[arg-type] + if failures: + raise MultipleDoctestFailures(failures) + + def _disable_output_capturing_for_darwin(self) -> None: + """Disable output capturing. Otherwise, stdout is lost to doctest (#985).""" + if platform.system() != "Darwin": + return + capman = self.config.pluginmanager.getplugin("capturemanager") + if capman: + capman.suspend_global_capture(in_=True) + out, err = capman.read_global_capture() + sys.stdout.write(out) + sys.stderr.write(err) + + # TODO: Type ignored -- breaks Liskov Substitution. + def repr_failure( # type: ignore[override] + self, + excinfo: ExceptionInfo[BaseException], + ) -> Union[str, TerminalRepr]: + import doctest + + failures: Optional[ + Sequence[Union[doctest.DocTestFailure, doctest.UnexpectedException]] + ] = None + if isinstance( + excinfo.value, (doctest.DocTestFailure, doctest.UnexpectedException) + ): + failures = [excinfo.value] + elif isinstance(excinfo.value, MultipleDoctestFailures): + failures = excinfo.value.failures + + if failures is None: + return super().repr_failure(excinfo) + + reprlocation_lines = [] + for failure in failures: + example = failure.example + test = failure.test + filename = test.filename + if test.lineno is None: + lineno = None + else: + lineno = test.lineno + example.lineno + 1 + message = type(failure).__name__ + # TODO: ReprFileLocation doesn't expect a None lineno. + reprlocation = ReprFileLocation(filename, lineno, message) # type: ignore[arg-type] + checker = _get_checker() + report_choice = _get_report_choice(self.config.getoption("doctestreport")) + if lineno is not None: + assert failure.test.docstring is not None + lines = failure.test.docstring.splitlines(False) + # add line numbers to the left of the error message + assert test.lineno is not None + lines = [ + "%03d %s" % (i + test.lineno + 1, x) for (i, x) in enumerate(lines) + ] + # trim docstring error lines to 10 + lines = lines[max(example.lineno - 9, 0) : example.lineno + 1] + else: + lines = [ + "EXAMPLE LOCATION UNKNOWN, not showing all tests of that example" + ] + indent = ">>>" + for line in example.source.splitlines(): + lines.append(f"??? {indent} {line}") + indent = "..." + if isinstance(failure, doctest.DocTestFailure): + lines += checker.output_difference( + example, failure.got, report_choice + ).split("\n") + else: + inner_excinfo = ExceptionInfo.from_exc_info(failure.exc_info) + lines += ["UNEXPECTED EXCEPTION: %s" % repr(inner_excinfo.value)] + lines += [ + x.strip("\n") for x in traceback.format_exception(*failure.exc_info) + ] + reprlocation_lines.append((reprlocation, lines)) + return ReprFailDoctest(reprlocation_lines) + + def reportinfo(self) -> Tuple[Union["os.PathLike[str]", str], Optional[int], str]: + assert self.dtest is not None + return self.path, self.dtest.lineno, "[doctest] %s" % self.name + + +def _get_flag_lookup() -> Dict[str, int]: + import doctest + + return dict( + DONT_ACCEPT_TRUE_FOR_1=doctest.DONT_ACCEPT_TRUE_FOR_1, + DONT_ACCEPT_BLANKLINE=doctest.DONT_ACCEPT_BLANKLINE, + NORMALIZE_WHITESPACE=doctest.NORMALIZE_WHITESPACE, + ELLIPSIS=doctest.ELLIPSIS, + IGNORE_EXCEPTION_DETAIL=doctest.IGNORE_EXCEPTION_DETAIL, + COMPARISON_FLAGS=doctest.COMPARISON_FLAGS, + ALLOW_UNICODE=_get_allow_unicode_flag(), + ALLOW_BYTES=_get_allow_bytes_flag(), + NUMBER=_get_number_flag(), + ) + + +def get_optionflags(parent): + optionflags_str = parent.config.getini("doctest_optionflags") + flag_lookup_table = _get_flag_lookup() + flag_acc = 0 + for flag in optionflags_str: + flag_acc |= flag_lookup_table[flag] + return flag_acc + + +def _get_continue_on_failure(config): + continue_on_failure = config.getvalue("doctest_continue_on_failure") + if continue_on_failure: + # We need to turn off this if we use pdb since we should stop at + # the first failure. + if config.getvalue("usepdb"): + continue_on_failure = False + return continue_on_failure + + +class DoctestTextfile(Module): + obj = None + + def collect(self) -> Iterable[DoctestItem]: + import doctest + + # Inspired by doctest.testfile; ideally we would use it directly, + # but it doesn't support passing a custom checker. + encoding = self.config.getini("doctest_encoding") + text = self.path.read_text(encoding) + filename = str(self.path) + name = self.path.name + globs = {"__name__": "__main__"} + + optionflags = get_optionflags(self) + + runner = _get_runner( + verbose=False, + optionflags=optionflags, + checker=_get_checker(), + continue_on_failure=_get_continue_on_failure(self.config), + ) + + parser = doctest.DocTestParser() + test = parser.get_doctest(text, globs, name, filename, 0) + if test.examples: + yield DoctestItem.from_parent( + self, name=test.name, runner=runner, dtest=test + ) + + +def _check_all_skipped(test: "doctest.DocTest") -> None: + """Raise pytest.skip() if all examples in the given DocTest have the SKIP + option set.""" + import doctest + + all_skipped = all(x.options.get(doctest.SKIP, False) for x in test.examples) + if all_skipped: + skip("all tests skipped by +SKIP option") + + +def _is_mocked(obj: object) -> bool: + """Return if an object is possibly a mock object by checking the + existence of a highly improbable attribute.""" + return ( + safe_getattr(obj, "pytest_mock_example_attribute_that_shouldnt_exist", None) + is not None + ) + + +@contextmanager +def _patch_unwrap_mock_aware() -> Generator[None, None, None]: + """Context manager which replaces ``inspect.unwrap`` with a version + that's aware of mock objects and doesn't recurse into them.""" + real_unwrap = inspect.unwrap + + def _mock_aware_unwrap( + func: Callable[..., Any], *, stop: Optional[Callable[[Any], Any]] = None + ) -> Any: + try: + if stop is None or stop is _is_mocked: + return real_unwrap(func, stop=_is_mocked) + _stop = stop + return real_unwrap(func, stop=lambda obj: _is_mocked(obj) or _stop(func)) + except Exception as e: + warnings.warn( + "Got %r when unwrapping %r. This is usually caused " + "by a violation of Python's object protocol; see e.g. " + "https://github.com/pytest-dev/pytest/issues/5080" % (e, func), + PytestWarning, + ) + raise + + inspect.unwrap = _mock_aware_unwrap + try: + yield + finally: + inspect.unwrap = real_unwrap + + +class DoctestModule(Module): + def collect(self) -> Iterable[DoctestItem]: + import doctest + + class MockAwareDocTestFinder(doctest.DocTestFinder): + """A hackish doctest finder that overrides stdlib internals to fix a stdlib bug. + + https://github.com/pytest-dev/pytest/issues/3456 + https://bugs.python.org/issue25532 + """ + + def _find_lineno(self, obj, source_lines): + """Doctest code does not take into account `@property`, this + is a hackish way to fix it. https://bugs.python.org/issue17446 + + Wrapped Doctests will need to be unwrapped so the correct + line number is returned. This will be reported upstream. #8796 + """ + if isinstance(obj, property): + obj = getattr(obj, "fget", obj) + + if hasattr(obj, "__wrapped__"): + # Get the main obj in case of it being wrapped + obj = inspect.unwrap(obj) + + # Type ignored because this is a private function. + return super()._find_lineno( # type:ignore[misc] + obj, + source_lines, + ) + + def _find( + self, tests, obj, name, module, source_lines, globs, seen + ) -> None: + if _is_mocked(obj): + return + with _patch_unwrap_mock_aware(): + # Type ignored because this is a private function. + super()._find( # type:ignore[misc] + tests, obj, name, module, source_lines, globs, seen + ) + + if self.path.name == "conftest.py": + module = self.config.pluginmanager._importconftest( + self.path, + self.config.getoption("importmode"), + rootpath=self.config.rootpath, + ) + else: + try: + module = import_path( + self.path, + root=self.config.rootpath, + mode=self.config.getoption("importmode"), + ) + except ImportError: + if self.config.getvalue("doctest_ignore_import_errors"): + skip("unable to import module %r" % self.path) + else: + raise + # Uses internal doctest module parsing mechanism. + finder = MockAwareDocTestFinder() + optionflags = get_optionflags(self) + runner = _get_runner( + verbose=False, + optionflags=optionflags, + checker=_get_checker(), + continue_on_failure=_get_continue_on_failure(self.config), + ) + + for test in finder.find(module, module.__name__): + if test.examples: # skip empty doctests + yield DoctestItem.from_parent( + self, name=test.name, runner=runner, dtest=test + ) + + +def _setup_fixtures(doctest_item: DoctestItem) -> FixtureRequest: + """Used by DoctestTextfile and DoctestItem to setup fixture information.""" + + def func() -> None: + pass + + doctest_item.funcargs = {} # type: ignore[attr-defined] + fm = doctest_item.session._fixturemanager + doctest_item._fixtureinfo = fm.getfixtureinfo( # type: ignore[attr-defined] + node=doctest_item, func=func, cls=None, funcargs=False + ) + fixture_request = FixtureRequest(doctest_item, _ispytest=True) + fixture_request._fillfixtures() + return fixture_request + + +def _init_checker_class() -> Type["doctest.OutputChecker"]: + import doctest + import re + + class LiteralsOutputChecker(doctest.OutputChecker): + # Based on doctest_nose_plugin.py from the nltk project + # (https://github.com/nltk/nltk) and on the "numtest" doctest extension + # by Sebastien Boisgerault (https://github.com/boisgera/numtest). + + _unicode_literal_re = re.compile(r"(\W|^)[uU]([rR]?[\'\"])", re.UNICODE) + _bytes_literal_re = re.compile(r"(\W|^)[bB]([rR]?[\'\"])", re.UNICODE) + _number_re = re.compile( + r""" + (?P + (?P + (?P [+-]?\d*)\.(?P\d+) + | + (?P [+-]?\d+)\. + ) + (?: + [Ee] + (?P [+-]?\d+) + )? + | + (?P [+-]?\d+) + (?: + [Ee] + (?P [+-]?\d+) + ) + ) + """, + re.VERBOSE, + ) + + def check_output(self, want: str, got: str, optionflags: int) -> bool: + if super().check_output(want, got, optionflags): + return True + + allow_unicode = optionflags & _get_allow_unicode_flag() + allow_bytes = optionflags & _get_allow_bytes_flag() + allow_number = optionflags & _get_number_flag() + + if not allow_unicode and not allow_bytes and not allow_number: + return False + + def remove_prefixes(regex: Pattern[str], txt: str) -> str: + return re.sub(regex, r"\1\2", txt) + + if allow_unicode: + want = remove_prefixes(self._unicode_literal_re, want) + got = remove_prefixes(self._unicode_literal_re, got) + + if allow_bytes: + want = remove_prefixes(self._bytes_literal_re, want) + got = remove_prefixes(self._bytes_literal_re, got) + + if allow_number: + got = self._remove_unwanted_precision(want, got) + + return super().check_output(want, got, optionflags) + + def _remove_unwanted_precision(self, want: str, got: str) -> str: + wants = list(self._number_re.finditer(want)) + gots = list(self._number_re.finditer(got)) + if len(wants) != len(gots): + return got + offset = 0 + for w, g in zip(wants, gots): + fraction: Optional[str] = w.group("fraction") + exponent: Optional[str] = w.group("exponent1") + if exponent is None: + exponent = w.group("exponent2") + precision = 0 if fraction is None else len(fraction) + if exponent is not None: + precision -= int(exponent) + if float(w.group()) == approx(float(g.group()), abs=10**-precision): + # They're close enough. Replace the text we actually + # got with the text we want, so that it will match when we + # check the string literally. + got = ( + got[: g.start() + offset] + w.group() + got[g.end() + offset :] + ) + offset += w.end() - w.start() - (g.end() - g.start()) + return got + + return LiteralsOutputChecker + + +def _get_checker() -> "doctest.OutputChecker": + """Return a doctest.OutputChecker subclass that supports some + additional options: + + * ALLOW_UNICODE and ALLOW_BYTES options to ignore u'' and b'' + prefixes (respectively) in string literals. Useful when the same + doctest should run in Python 2 and Python 3. + + * NUMBER to ignore floating-point differences smaller than the + precision of the literal number in the doctest. + + An inner class is used to avoid importing "doctest" at the module + level. + """ + global CHECKER_CLASS + if CHECKER_CLASS is None: + CHECKER_CLASS = _init_checker_class() + return CHECKER_CLASS() + + +def _get_allow_unicode_flag() -> int: + """Register and return the ALLOW_UNICODE flag.""" + import doctest + + return doctest.register_optionflag("ALLOW_UNICODE") + + +def _get_allow_bytes_flag() -> int: + """Register and return the ALLOW_BYTES flag.""" + import doctest + + return doctest.register_optionflag("ALLOW_BYTES") + + +def _get_number_flag() -> int: + """Register and return the NUMBER flag.""" + import doctest + + return doctest.register_optionflag("NUMBER") + + +def _get_report_choice(key: str) -> int: + """Return the actual `doctest` module flag value. + + We want to do it as late as possible to avoid importing `doctest` and all + its dependencies when parsing options, as it adds overhead and breaks tests. + """ + import doctest + + return { + DOCTEST_REPORT_CHOICE_UDIFF: doctest.REPORT_UDIFF, + DOCTEST_REPORT_CHOICE_CDIFF: doctest.REPORT_CDIFF, + DOCTEST_REPORT_CHOICE_NDIFF: doctest.REPORT_NDIFF, + DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE: doctest.REPORT_ONLY_FIRST_FAILURE, + DOCTEST_REPORT_CHOICE_NONE: 0, + }[key] + + +@fixture(scope="session") +def doctest_namespace() -> Dict[str, Any]: + """Fixture that returns a :py:class:`dict` that will be injected into the + namespace of doctests. + + Usually this fixture is used in conjunction with another ``autouse`` fixture: + + .. code-block:: python + + @pytest.fixture(autouse=True) + def add_np(doctest_namespace): + doctest_namespace["np"] = numpy + + For more details: :ref:`doctest_namespace`. + """ + return dict() diff --git a/venv/lib/python3.11/site-packages/_pytest/faulthandler.py b/venv/lib/python3.11/site-packages/_pytest/faulthandler.py new file mode 100644 index 0000000..af879aa --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/faulthandler.py @@ -0,0 +1,95 @@ +import io +import os +import sys +from typing import Generator + +import pytest +from _pytest.config import Config +from _pytest.config.argparsing import Parser +from _pytest.nodes import Item +from _pytest.stash import StashKey + + +fault_handler_stderr_fd_key = StashKey[int]() +fault_handler_originally_enabled_key = StashKey[bool]() + + +def pytest_addoption(parser: Parser) -> None: + help = ( + "Dump the traceback of all threads if a test takes " + "more than TIMEOUT seconds to finish" + ) + parser.addini("faulthandler_timeout", help, default=0.0) + + +def pytest_configure(config: Config) -> None: + import faulthandler + + config.stash[fault_handler_stderr_fd_key] = os.dup(get_stderr_fileno()) + config.stash[fault_handler_originally_enabled_key] = faulthandler.is_enabled() + faulthandler.enable(file=config.stash[fault_handler_stderr_fd_key]) + + +def pytest_unconfigure(config: Config) -> None: + import faulthandler + + faulthandler.disable() + # Close the dup file installed during pytest_configure. + if fault_handler_stderr_fd_key in config.stash: + os.close(config.stash[fault_handler_stderr_fd_key]) + del config.stash[fault_handler_stderr_fd_key] + if config.stash.get(fault_handler_originally_enabled_key, False): + # Re-enable the faulthandler if it was originally enabled. + faulthandler.enable(file=get_stderr_fileno()) + + +def get_stderr_fileno() -> int: + try: + fileno = sys.stderr.fileno() + # The Twisted Logger will return an invalid file descriptor since it is not backed + # by an FD. So, let's also forward this to the same code path as with pytest-xdist. + if fileno == -1: + raise AttributeError() + return fileno + except (AttributeError, io.UnsupportedOperation): + # pytest-xdist monkeypatches sys.stderr with an object that is not an actual file. + # https://docs.python.org/3/library/faulthandler.html#issue-with-file-descriptors + # This is potentially dangerous, but the best we can do. + return sys.__stderr__.fileno() + + +def get_timeout_config_value(config: Config) -> float: + return float(config.getini("faulthandler_timeout") or 0.0) + + +@pytest.hookimpl(hookwrapper=True, trylast=True) +def pytest_runtest_protocol(item: Item) -> Generator[None, None, None]: + timeout = get_timeout_config_value(item.config) + if timeout > 0: + import faulthandler + + stderr = item.config.stash[fault_handler_stderr_fd_key] + faulthandler.dump_traceback_later(timeout, file=stderr) + try: + yield + finally: + faulthandler.cancel_dump_traceback_later() + else: + yield + + +@pytest.hookimpl(tryfirst=True) +def pytest_enter_pdb() -> None: + """Cancel any traceback dumping due to timeout before entering pdb.""" + import faulthandler + + faulthandler.cancel_dump_traceback_later() + + +@pytest.hookimpl(tryfirst=True) +def pytest_exception_interact() -> None: + """Cancel any traceback dumping due to an interactive exception being + raised.""" + import faulthandler + + faulthandler.cancel_dump_traceback_later() diff --git a/venv/lib/python3.11/site-packages/_pytest/fixtures.py b/venv/lib/python3.11/site-packages/_pytest/fixtures.py new file mode 100644 index 0000000..0462504 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/fixtures.py @@ -0,0 +1,1713 @@ +import dataclasses +import functools +import inspect +import os +import sys +import warnings +from collections import defaultdict +from collections import deque +from contextlib import suppress +from pathlib import Path +from types import TracebackType +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import Generator +from typing import Generic +from typing import Iterable +from typing import Iterator +from typing import List +from typing import MutableMapping +from typing import NoReturn +from typing import Optional +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +import _pytest +from _pytest import nodes +from _pytest._code import getfslineno +from _pytest._code.code import FormattedExcinfo +from _pytest._code.code import TerminalRepr +from _pytest._io import TerminalWriter +from _pytest.compat import _format_args +from _pytest.compat import _PytestWrapper +from _pytest.compat import assert_never +from _pytest.compat import final +from _pytest.compat import get_real_func +from _pytest.compat import get_real_method +from _pytest.compat import getfuncargnames +from _pytest.compat import getimfunc +from _pytest.compat import getlocation +from _pytest.compat import is_generator +from _pytest.compat import NOTSET +from _pytest.compat import NotSetType +from _pytest.compat import overload +from _pytest.compat import safe_getattr +from _pytest.config import _PluggyPlugin +from _pytest.config import Config +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.deprecated import YIELD_FIXTURE +from _pytest.mark import Mark +from _pytest.mark import ParameterSet +from _pytest.mark.structures import MarkDecorator +from _pytest.outcomes import fail +from _pytest.outcomes import skip +from _pytest.outcomes import TEST_OUTCOME +from _pytest.pathlib import absolutepath +from _pytest.pathlib import bestrelpath +from _pytest.scope import HIGH_SCOPES +from _pytest.scope import Scope +from _pytest.stash import StashKey + + +if TYPE_CHECKING: + from typing import Deque + + from _pytest.scope import _ScopeName + from _pytest.main import Session + from _pytest.python import CallSpec2 + from _pytest.python import Metafunc + + +# The value of the fixture -- return/yield of the fixture function (type variable). +FixtureValue = TypeVar("FixtureValue") +# The type of the fixture function (type variable). +FixtureFunction = TypeVar("FixtureFunction", bound=Callable[..., object]) +# The type of a fixture function (type alias generic in fixture value). +_FixtureFunc = Union[ + Callable[..., FixtureValue], Callable[..., Generator[FixtureValue, None, None]] +] +# The type of FixtureDef.cached_result (type alias generic in fixture value). +_FixtureCachedResult = Union[ + Tuple[ + # The result. + FixtureValue, + # Cache key. + object, + None, + ], + Tuple[ + None, + # Cache key. + object, + # Exc info if raised. + Tuple[Type[BaseException], BaseException, TracebackType], + ], +] + + +@dataclasses.dataclass(frozen=True) +class PseudoFixtureDef(Generic[FixtureValue]): + cached_result: "_FixtureCachedResult[FixtureValue]" + _scope: Scope + + +def pytest_sessionstart(session: "Session") -> None: + session._fixturemanager = FixtureManager(session) + + +def get_scope_package( + node: nodes.Item, + fixturedef: "FixtureDef[object]", +) -> Optional[Union[nodes.Item, nodes.Collector]]: + from _pytest.python import Package + + current: Optional[Union[nodes.Item, nodes.Collector]] = node + fixture_package_name = "{}/{}".format(fixturedef.baseid, "__init__.py") + while current and ( + not isinstance(current, Package) or fixture_package_name != current.nodeid + ): + current = current.parent # type: ignore[assignment] + if current is None: + return node.session + return current + + +def get_scope_node( + node: nodes.Node, scope: Scope +) -> Optional[Union[nodes.Item, nodes.Collector]]: + import _pytest.python + + if scope is Scope.Function: + return node.getparent(nodes.Item) + elif scope is Scope.Class: + return node.getparent(_pytest.python.Class) + elif scope is Scope.Module: + return node.getparent(_pytest.python.Module) + elif scope is Scope.Package: + return node.getparent(_pytest.python.Package) + elif scope is Scope.Session: + return node.getparent(_pytest.main.Session) + else: + assert_never(scope) + + +# Used for storing artificial fixturedefs for direct parametrization. +name2pseudofixturedef_key = StashKey[Dict[str, "FixtureDef[Any]"]]() + + +def add_funcarg_pseudo_fixture_def( + collector: nodes.Collector, metafunc: "Metafunc", fixturemanager: "FixtureManager" +) -> None: + # This function will transform all collected calls to functions + # if they use direct funcargs (i.e. direct parametrization) + # because we want later test execution to be able to rely on + # an existing FixtureDef structure for all arguments. + # XXX we can probably avoid this algorithm if we modify CallSpec2 + # to directly care for creating the fixturedefs within its methods. + if not metafunc._calls[0].funcargs: + # This function call does not have direct parametrization. + return + # Collect funcargs of all callspecs into a list of values. + arg2params: Dict[str, List[object]] = {} + arg2scope: Dict[str, Scope] = {} + for callspec in metafunc._calls: + for argname, argvalue in callspec.funcargs.items(): + assert argname not in callspec.params + callspec.params[argname] = argvalue + arg2params_list = arg2params.setdefault(argname, []) + callspec.indices[argname] = len(arg2params_list) + arg2params_list.append(argvalue) + if argname not in arg2scope: + scope = callspec._arg2scope.get(argname, Scope.Function) + arg2scope[argname] = scope + callspec.funcargs.clear() + + # Register artificial FixtureDef's so that later at test execution + # time we can rely on a proper FixtureDef to exist for fixture setup. + arg2fixturedefs = metafunc._arg2fixturedefs + for argname, valuelist in arg2params.items(): + # If we have a scope that is higher than function, we need + # to make sure we only ever create an according fixturedef on + # a per-scope basis. We thus store and cache the fixturedef on the + # node related to the scope. + scope = arg2scope[argname] + node = None + if scope is not Scope.Function: + node = get_scope_node(collector, scope) + if node is None: + assert scope is Scope.Class and isinstance( + collector, _pytest.python.Module + ) + # Use module-level collector for class-scope (for now). + node = collector + if node is None: + name2pseudofixturedef = None + else: + default: Dict[str, FixtureDef[Any]] = {} + name2pseudofixturedef = node.stash.setdefault( + name2pseudofixturedef_key, default + ) + if name2pseudofixturedef is not None and argname in name2pseudofixturedef: + arg2fixturedefs[argname] = [name2pseudofixturedef[argname]] + else: + fixturedef = FixtureDef( + fixturemanager=fixturemanager, + baseid="", + argname=argname, + func=get_direct_param_fixture_func, + scope=arg2scope[argname], + params=valuelist, + unittest=False, + ids=None, + ) + arg2fixturedefs[argname] = [fixturedef] + if name2pseudofixturedef is not None: + name2pseudofixturedef[argname] = fixturedef + + +def getfixturemarker(obj: object) -> Optional["FixtureFunctionMarker"]: + """Return fixturemarker or None if it doesn't exist or raised + exceptions.""" + return cast( + Optional[FixtureFunctionMarker], + safe_getattr(obj, "_pytestfixturefunction", None), + ) + + +# Parametrized fixture key, helper alias for code below. +_Key = Tuple[object, ...] + + +def get_parametrized_fixture_keys(item: nodes.Item, scope: Scope) -> Iterator[_Key]: + """Return list of keys for all parametrized arguments which match + the specified scope.""" + assert scope is not Scope.Function + try: + callspec = item.callspec # type: ignore[attr-defined] + except AttributeError: + pass + else: + cs: CallSpec2 = callspec + # cs.indices.items() is random order of argnames. Need to + # sort this so that different calls to + # get_parametrized_fixture_keys will be deterministic. + for argname, param_index in sorted(cs.indices.items()): + if cs._arg2scope[argname] != scope: + continue + if scope is Scope.Session: + key: _Key = (argname, param_index) + elif scope is Scope.Package: + key = (argname, param_index, item.path.parent) + elif scope is Scope.Module: + key = (argname, param_index, item.path) + elif scope is Scope.Class: + item_cls = item.cls # type: ignore[attr-defined] + key = (argname, param_index, item.path, item_cls) + else: + assert_never(scope) + yield key + + +# Algorithm for sorting on a per-parametrized resource setup basis. +# It is called for Session scope first and performs sorting +# down to the lower scopes such as to minimize number of "high scope" +# setups and teardowns. + + +def reorder_items(items: Sequence[nodes.Item]) -> List[nodes.Item]: + argkeys_cache: Dict[Scope, Dict[nodes.Item, Dict[_Key, None]]] = {} + items_by_argkey: Dict[Scope, Dict[_Key, Deque[nodes.Item]]] = {} + for scope in HIGH_SCOPES: + d: Dict[nodes.Item, Dict[_Key, None]] = {} + argkeys_cache[scope] = d + item_d: Dict[_Key, Deque[nodes.Item]] = defaultdict(deque) + items_by_argkey[scope] = item_d + for item in items: + keys = dict.fromkeys(get_parametrized_fixture_keys(item, scope), None) + if keys: + d[item] = keys + for key in keys: + item_d[key].append(item) + items_dict = dict.fromkeys(items, None) + return list( + reorder_items_atscope(items_dict, argkeys_cache, items_by_argkey, Scope.Session) + ) + + +def fix_cache_order( + item: nodes.Item, + argkeys_cache: Dict[Scope, Dict[nodes.Item, Dict[_Key, None]]], + items_by_argkey: Dict[Scope, Dict[_Key, "Deque[nodes.Item]"]], +) -> None: + for scope in HIGH_SCOPES: + for key in argkeys_cache[scope].get(item, []): + items_by_argkey[scope][key].appendleft(item) + + +def reorder_items_atscope( + items: Dict[nodes.Item, None], + argkeys_cache: Dict[Scope, Dict[nodes.Item, Dict[_Key, None]]], + items_by_argkey: Dict[Scope, Dict[_Key, "Deque[nodes.Item]"]], + scope: Scope, +) -> Dict[nodes.Item, None]: + if scope is Scope.Function or len(items) < 3: + return items + ignore: Set[Optional[_Key]] = set() + items_deque = deque(items) + items_done: Dict[nodes.Item, None] = {} + scoped_items_by_argkey = items_by_argkey[scope] + scoped_argkeys_cache = argkeys_cache[scope] + while items_deque: + no_argkey_group: Dict[nodes.Item, None] = {} + slicing_argkey = None + while items_deque: + item = items_deque.popleft() + if item in items_done or item in no_argkey_group: + continue + argkeys = dict.fromkeys( + (k for k in scoped_argkeys_cache.get(item, []) if k not in ignore), None + ) + if not argkeys: + no_argkey_group[item] = None + else: + slicing_argkey, _ = argkeys.popitem() + # We don't have to remove relevant items from later in the + # deque because they'll just be ignored. + matching_items = [ + i for i in scoped_items_by_argkey[slicing_argkey] if i in items + ] + for i in reversed(matching_items): + fix_cache_order(i, argkeys_cache, items_by_argkey) + items_deque.appendleft(i) + break + if no_argkey_group: + no_argkey_group = reorder_items_atscope( + no_argkey_group, argkeys_cache, items_by_argkey, scope.next_lower() + ) + for item in no_argkey_group: + items_done[item] = None + ignore.add(slicing_argkey) + return items_done + + +def get_direct_param_fixture_func(request: "FixtureRequest") -> Any: + return request.param + + +@dataclasses.dataclass +class FuncFixtureInfo: + __slots__ = ("argnames", "initialnames", "names_closure", "name2fixturedefs") + + # Original function argument names. + argnames: Tuple[str, ...] + # Argnames that function immediately requires. These include argnames + + # fixture names specified via usefixtures and via autouse=True in fixture + # definitions. + initialnames: Tuple[str, ...] + names_closure: List[str] + name2fixturedefs: Dict[str, Sequence["FixtureDef[Any]"]] + + def prune_dependency_tree(self) -> None: + """Recompute names_closure from initialnames and name2fixturedefs. + + Can only reduce names_closure, which means that the new closure will + always be a subset of the old one. The order is preserved. + + This method is needed because direct parametrization may shadow some + of the fixtures that were included in the originally built dependency + tree. In this way the dependency tree can get pruned, and the closure + of argnames may get reduced. + """ + closure: Set[str] = set() + working_set = set(self.initialnames) + while working_set: + argname = working_set.pop() + # Argname may be smth not included in the original names_closure, + # in which case we ignore it. This currently happens with pseudo + # FixtureDefs which wrap 'get_direct_param_fixture_func(request)'. + # So they introduce the new dependency 'request' which might have + # been missing in the original tree (closure). + if argname not in closure and argname in self.names_closure: + closure.add(argname) + if argname in self.name2fixturedefs: + working_set.update(self.name2fixturedefs[argname][-1].argnames) + + self.names_closure[:] = sorted(closure, key=self.names_closure.index) + + +class FixtureRequest: + """A request for a fixture from a test or fixture function. + + A request object gives access to the requesting test context and has + an optional ``param`` attribute in case the fixture is parametrized + indirectly. + """ + + def __init__(self, pyfuncitem, *, _ispytest: bool = False) -> None: + check_ispytest(_ispytest) + self._pyfuncitem = pyfuncitem + #: Fixture for which this request is being performed. + self.fixturename: Optional[str] = None + self._scope = Scope.Function + self._fixture_defs: Dict[str, FixtureDef[Any]] = {} + fixtureinfo: FuncFixtureInfo = pyfuncitem._fixtureinfo + self._arg2fixturedefs = fixtureinfo.name2fixturedefs.copy() + self._arg2index: Dict[str, int] = {} + self._fixturemanager: FixtureManager = pyfuncitem.session._fixturemanager + # Notes on the type of `param`: + # -`request.param` is only defined in parametrized fixtures, and will raise + # AttributeError otherwise. Python typing has no notion of "undefined", so + # this cannot be reflected in the type. + # - Technically `param` is only (possibly) defined on SubRequest, not + # FixtureRequest, but the typing of that is still in flux so this cheats. + # - In the future we might consider using a generic for the param type, but + # for now just using Any. + self.param: Any + + @property + def scope(self) -> "_ScopeName": + """Scope string, one of "function", "class", "module", "package", "session".""" + return self._scope.value + + @property + def fixturenames(self) -> List[str]: + """Names of all active fixtures in this request.""" + result = list(self._pyfuncitem._fixtureinfo.names_closure) + result.extend(set(self._fixture_defs).difference(result)) + return result + + @property + def node(self): + """Underlying collection node (depends on current request scope).""" + scope = self._scope + if scope is Scope.Function: + # This might also be a non-function Item despite its attribute name. + node: Optional[Union[nodes.Item, nodes.Collector]] = self._pyfuncitem + elif scope is Scope.Package: + # FIXME: _fixturedef is not defined on FixtureRequest (this class), + # but on FixtureRequest (a subclass). + node = get_scope_package(self._pyfuncitem, self._fixturedef) # type: ignore[attr-defined] + else: + node = get_scope_node(self._pyfuncitem, scope) + if node is None and scope is Scope.Class: + # Fallback to function item itself. + node = self._pyfuncitem + assert node, 'Could not obtain a node for scope "{}" for function {!r}'.format( + scope, self._pyfuncitem + ) + return node + + def _getnextfixturedef(self, argname: str) -> "FixtureDef[Any]": + fixturedefs = self._arg2fixturedefs.get(argname, None) + if fixturedefs is None: + # We arrive here because of a dynamic call to + # getfixturevalue(argname) usage which was naturally + # not known at parsing/collection time. + assert self._pyfuncitem.parent is not None + parentid = self._pyfuncitem.parent.nodeid + fixturedefs = self._fixturemanager.getfixturedefs(argname, parentid) + # TODO: Fix this type ignore. Either add assert or adjust types. + # Can this be None here? + self._arg2fixturedefs[argname] = fixturedefs # type: ignore[assignment] + # fixturedefs list is immutable so we maintain a decreasing index. + index = self._arg2index.get(argname, 0) - 1 + if fixturedefs is None or (-index > len(fixturedefs)): + raise FixtureLookupError(argname, self) + self._arg2index[argname] = index + return fixturedefs[index] + + @property + def config(self) -> Config: + """The pytest config object associated with this request.""" + return self._pyfuncitem.config # type: ignore[no-any-return] + + @property + def function(self): + """Test function object if the request has a per-function scope.""" + if self.scope != "function": + raise AttributeError( + f"function not available in {self.scope}-scoped context" + ) + return self._pyfuncitem.obj + + @property + def cls(self): + """Class (can be None) where the test function was collected.""" + if self.scope not in ("class", "function"): + raise AttributeError(f"cls not available in {self.scope}-scoped context") + clscol = self._pyfuncitem.getparent(_pytest.python.Class) + if clscol: + return clscol.obj + + @property + def instance(self): + """Instance (can be None) on which test function was collected.""" + # unittest support hack, see _pytest.unittest.TestCaseFunction. + try: + return self._pyfuncitem._testcase + except AttributeError: + function = getattr(self, "function", None) + return getattr(function, "__self__", None) + + @property + def module(self): + """Python module object where the test function was collected.""" + if self.scope not in ("function", "class", "module"): + raise AttributeError(f"module not available in {self.scope}-scoped context") + return self._pyfuncitem.getparent(_pytest.python.Module).obj + + @property + def path(self) -> Path: + """Path where the test function was collected.""" + if self.scope not in ("function", "class", "module", "package"): + raise AttributeError(f"path not available in {self.scope}-scoped context") + # TODO: Remove ignore once _pyfuncitem is properly typed. + return self._pyfuncitem.path # type: ignore + + @property + def keywords(self) -> MutableMapping[str, Any]: + """Keywords/markers dictionary for the underlying node.""" + node: nodes.Node = self.node + return node.keywords + + @property + def session(self) -> "Session": + """Pytest session object.""" + return self._pyfuncitem.session # type: ignore[no-any-return] + + def addfinalizer(self, finalizer: Callable[[], object]) -> None: + """Add finalizer/teardown function to be called without arguments after + the last test within the requesting test context finished execution.""" + # XXX usually this method is shadowed by fixturedef specific ones. + self.node.addfinalizer(finalizer) + + def applymarker(self, marker: Union[str, MarkDecorator]) -> None: + """Apply a marker to a single test function invocation. + + This method is useful if you don't want to have a keyword/marker + on all function invocations. + + :param marker: + An object created by a call to ``pytest.mark.NAME(...)``. + """ + self.node.add_marker(marker) + + def raiseerror(self, msg: Optional[str]) -> NoReturn: + """Raise a FixtureLookupError exception. + + :param msg: + An optional custom error message. + """ + raise self._fixturemanager.FixtureLookupError(None, self, msg) + + def _fillfixtures(self) -> None: + item = self._pyfuncitem + fixturenames = getattr(item, "fixturenames", self.fixturenames) + for argname in fixturenames: + if argname not in item.funcargs: + item.funcargs[argname] = self.getfixturevalue(argname) + + def getfixturevalue(self, argname: str) -> Any: + """Dynamically run a named fixture function. + + Declaring fixtures via function argument is recommended where possible. + But if you can only decide whether to use another fixture at test + setup time, you may use this function to retrieve it inside a fixture + or test function body. + + This method can be used during the test setup phase or the test run + phase, but during the test teardown phase a fixture's value may not + be available. + + :param argname: + The fixture name. + :raises pytest.FixtureLookupError: + If the given fixture could not be found. + """ + fixturedef = self._get_active_fixturedef(argname) + assert fixturedef.cached_result is not None, ( + f'The fixture value for "{argname}" is not available. ' + "This can happen when the fixture has already been torn down." + ) + return fixturedef.cached_result[0] + + def _get_active_fixturedef( + self, argname: str + ) -> Union["FixtureDef[object]", PseudoFixtureDef[object]]: + try: + return self._fixture_defs[argname] + except KeyError: + try: + fixturedef = self._getnextfixturedef(argname) + except FixtureLookupError: + if argname == "request": + cached_result = (self, [0], None) + return PseudoFixtureDef(cached_result, Scope.Function) + raise + # Remove indent to prevent the python3 exception + # from leaking into the call. + self._compute_fixture_value(fixturedef) + self._fixture_defs[argname] = fixturedef + return fixturedef + + def _get_fixturestack(self) -> List["FixtureDef[Any]"]: + current = self + values: List[FixtureDef[Any]] = [] + while isinstance(current, SubRequest): + values.append(current._fixturedef) # type: ignore[has-type] + current = current._parent_request + values.reverse() + return values + + def _compute_fixture_value(self, fixturedef: "FixtureDef[object]") -> None: + """Create a SubRequest based on "self" and call the execute method + of the given FixtureDef object. + + This will force the FixtureDef object to throw away any previous + results and compute a new fixture value, which will be stored into + the FixtureDef object itself. + """ + # prepare a subrequest object before calling fixture function + # (latter managed by fixturedef) + argname = fixturedef.argname + funcitem = self._pyfuncitem + scope = fixturedef._scope + try: + callspec = funcitem.callspec + except AttributeError: + callspec = None + if callspec is not None and argname in callspec.params: + param = callspec.params[argname] + param_index = callspec.indices[argname] + # If a parametrize invocation set a scope it will override + # the static scope defined with the fixture function. + with suppress(KeyError): + scope = callspec._arg2scope[argname] + else: + param = NOTSET + param_index = 0 + has_params = fixturedef.params is not None + fixtures_not_supported = getattr(funcitem, "nofuncargs", False) + if has_params and fixtures_not_supported: + msg = ( + "{name} does not support fixtures, maybe unittest.TestCase subclass?\n" + "Node id: {nodeid}\n" + "Function type: {typename}" + ).format( + name=funcitem.name, + nodeid=funcitem.nodeid, + typename=type(funcitem).__name__, + ) + fail(msg, pytrace=False) + if has_params: + frame = inspect.stack()[3] + frameinfo = inspect.getframeinfo(frame[0]) + source_path = absolutepath(frameinfo.filename) + source_lineno = frameinfo.lineno + try: + source_path_str = str( + source_path.relative_to(funcitem.config.rootpath) + ) + except ValueError: + source_path_str = str(source_path) + msg = ( + "The requested fixture has no parameter defined for test:\n" + " {}\n\n" + "Requested fixture '{}' defined in:\n{}" + "\n\nRequested here:\n{}:{}".format( + funcitem.nodeid, + fixturedef.argname, + getlocation(fixturedef.func, funcitem.config.rootpath), + source_path_str, + source_lineno, + ) + ) + fail(msg, pytrace=False) + + subrequest = SubRequest( + self, scope, param, param_index, fixturedef, _ispytest=True + ) + + # Check if a higher-level scoped fixture accesses a lower level one. + subrequest._check_scope(argname, self._scope, scope) + try: + # Call the fixture function. + fixturedef.execute(request=subrequest) + finally: + self._schedule_finalizers(fixturedef, subrequest) + + def _schedule_finalizers( + self, fixturedef: "FixtureDef[object]", subrequest: "SubRequest" + ) -> None: + # If fixture function failed it might have registered finalizers. + subrequest.node.addfinalizer(lambda: fixturedef.finish(request=subrequest)) + + def _check_scope( + self, + argname: str, + invoking_scope: Scope, + requested_scope: Scope, + ) -> None: + if argname == "request": + return + if invoking_scope > requested_scope: + # Try to report something helpful. + text = "\n".join(self._factorytraceback()) + fail( + f"ScopeMismatch: You tried to access the {requested_scope.value} scoped " + f"fixture {argname} with a {invoking_scope.value} scoped request object, " + f"involved factories:\n{text}", + pytrace=False, + ) + + def _factorytraceback(self) -> List[str]: + lines = [] + for fixturedef in self._get_fixturestack(): + factory = fixturedef.func + fs, lineno = getfslineno(factory) + if isinstance(fs, Path): + session: Session = self._pyfuncitem.session + p = bestrelpath(session.path, fs) + else: + p = fs + args = _format_args(factory) + lines.append("%s:%d: def %s%s" % (p, lineno + 1, factory.__name__, args)) + return lines + + def __repr__(self) -> str: + return "" % (self.node) + + +@final +class SubRequest(FixtureRequest): + """A sub request for handling getting a fixture from a test function/fixture.""" + + def __init__( + self, + request: "FixtureRequest", + scope: Scope, + param: Any, + param_index: int, + fixturedef: "FixtureDef[object]", + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self._parent_request = request + self.fixturename = fixturedef.argname + if param is not NOTSET: + self.param = param + self.param_index = param_index + self._scope = scope + self._fixturedef = fixturedef + self._pyfuncitem = request._pyfuncitem + self._fixture_defs = request._fixture_defs + self._arg2fixturedefs = request._arg2fixturedefs + self._arg2index = request._arg2index + self._fixturemanager = request._fixturemanager + + def __repr__(self) -> str: + return f"" + + def addfinalizer(self, finalizer: Callable[[], object]) -> None: + """Add finalizer/teardown function to be called without arguments after + the last test within the requesting test context finished execution.""" + self._fixturedef.addfinalizer(finalizer) + + def _schedule_finalizers( + self, fixturedef: "FixtureDef[object]", subrequest: "SubRequest" + ) -> None: + # If the executing fixturedef was not explicitly requested in the argument list (via + # getfixturevalue inside the fixture call) then ensure this fixture def will be finished + # first. + if fixturedef.argname not in self.fixturenames: + fixturedef.addfinalizer( + functools.partial(self._fixturedef.finish, request=self) + ) + super()._schedule_finalizers(fixturedef, subrequest) + + +@final +class FixtureLookupError(LookupError): + """Could not return a requested fixture (missing or invalid).""" + + def __init__( + self, argname: Optional[str], request: FixtureRequest, msg: Optional[str] = None + ) -> None: + self.argname = argname + self.request = request + self.fixturestack = request._get_fixturestack() + self.msg = msg + + def formatrepr(self) -> "FixtureLookupErrorRepr": + tblines: List[str] = [] + addline = tblines.append + stack = [self.request._pyfuncitem.obj] + stack.extend(map(lambda x: x.func, self.fixturestack)) + msg = self.msg + if msg is not None: + # The last fixture raise an error, let's present + # it at the requesting side. + stack = stack[:-1] + for function in stack: + fspath, lineno = getfslineno(function) + try: + lines, _ = inspect.getsourcelines(get_real_func(function)) + except (OSError, IndexError, TypeError): + error_msg = "file %s, line %s: source code not available" + addline(error_msg % (fspath, lineno + 1)) + else: + addline(f"file {fspath}, line {lineno + 1}") + for i, line in enumerate(lines): + line = line.rstrip() + addline(" " + line) + if line.lstrip().startswith("def"): + break + + if msg is None: + fm = self.request._fixturemanager + available = set() + parentid = self.request._pyfuncitem.parent.nodeid + for name, fixturedefs in fm._arg2fixturedefs.items(): + faclist = list(fm._matchfactories(fixturedefs, parentid)) + if faclist: + available.add(name) + if self.argname in available: + msg = " recursive dependency involving fixture '{}' detected".format( + self.argname + ) + else: + msg = f"fixture '{self.argname}' not found" + msg += "\n available fixtures: {}".format(", ".join(sorted(available))) + msg += "\n use 'pytest --fixtures [testpath]' for help on them." + + return FixtureLookupErrorRepr(fspath, lineno, tblines, msg, self.argname) + + +class FixtureLookupErrorRepr(TerminalRepr): + def __init__( + self, + filename: Union[str, "os.PathLike[str]"], + firstlineno: int, + tblines: Sequence[str], + errorstring: str, + argname: Optional[str], + ) -> None: + self.tblines = tblines + self.errorstring = errorstring + self.filename = filename + self.firstlineno = firstlineno + self.argname = argname + + def toterminal(self, tw: TerminalWriter) -> None: + # tw.line("FixtureLookupError: %s" %(self.argname), red=True) + for tbline in self.tblines: + tw.line(tbline.rstrip()) + lines = self.errorstring.split("\n") + if lines: + tw.line( + f"{FormattedExcinfo.fail_marker} {lines[0].strip()}", + red=True, + ) + for line in lines[1:]: + tw.line( + f"{FormattedExcinfo.flow_marker} {line.strip()}", + red=True, + ) + tw.line() + tw.line("%s:%d" % (os.fspath(self.filename), self.firstlineno + 1)) + + +def fail_fixturefunc(fixturefunc, msg: str) -> NoReturn: + fs, lineno = getfslineno(fixturefunc) + location = f"{fs}:{lineno + 1}" + source = _pytest._code.Source(fixturefunc) + fail(msg + ":\n\n" + str(source.indent()) + "\n" + location, pytrace=False) + + +def call_fixture_func( + fixturefunc: "_FixtureFunc[FixtureValue]", request: FixtureRequest, kwargs +) -> FixtureValue: + if is_generator(fixturefunc): + fixturefunc = cast( + Callable[..., Generator[FixtureValue, None, None]], fixturefunc + ) + generator = fixturefunc(**kwargs) + try: + fixture_result = next(generator) + except StopIteration: + raise ValueError(f"{request.fixturename} did not yield a value") from None + finalizer = functools.partial(_teardown_yield_fixture, fixturefunc, generator) + request.addfinalizer(finalizer) + else: + fixturefunc = cast(Callable[..., FixtureValue], fixturefunc) + fixture_result = fixturefunc(**kwargs) + return fixture_result + + +def _teardown_yield_fixture(fixturefunc, it) -> None: + """Execute the teardown of a fixture function by advancing the iterator + after the yield and ensure the iteration ends (if not it means there is + more than one yield in the function).""" + try: + next(it) + except StopIteration: + pass + else: + fail_fixturefunc(fixturefunc, "fixture function has more than one 'yield'") + + +def _eval_scope_callable( + scope_callable: "Callable[[str, Config], _ScopeName]", + fixture_name: str, + config: Config, +) -> "_ScopeName": + try: + # Type ignored because there is no typing mechanism to specify + # keyword arguments, currently. + result = scope_callable(fixture_name=fixture_name, config=config) # type: ignore[call-arg] + except Exception as e: + raise TypeError( + "Error evaluating {} while defining fixture '{}'.\n" + "Expected a function with the signature (*, fixture_name, config)".format( + scope_callable, fixture_name + ) + ) from e + if not isinstance(result, str): + fail( + "Expected {} to return a 'str' while defining fixture '{}', but it returned:\n" + "{!r}".format(scope_callable, fixture_name, result), + pytrace=False, + ) + return result + + +@final +class FixtureDef(Generic[FixtureValue]): + """A container for a fixture definition.""" + + def __init__( + self, + fixturemanager: "FixtureManager", + baseid: Optional[str], + argname: str, + func: "_FixtureFunc[FixtureValue]", + scope: Union[Scope, "_ScopeName", Callable[[str, Config], "_ScopeName"], None], + params: Optional[Sequence[object]], + unittest: bool = False, + ids: Optional[ + Union[Tuple[Optional[object], ...], Callable[[Any], Optional[object]]] + ] = None, + ) -> None: + self._fixturemanager = fixturemanager + # The "base" node ID for the fixture. + # + # This is a node ID prefix. A fixture is only available to a node (e.g. + # a `Function` item) if the fixture's baseid is a parent of the node's + # nodeid (see the `iterparentnodeids` function for what constitutes a + # "parent" and a "prefix" in this context). + # + # For a fixture found in a Collector's object (e.g. a `Module`s module, + # a `Class`'s class), the baseid is the Collector's nodeid. + # + # For a fixture found in a conftest plugin, the baseid is the conftest's + # directory path relative to the rootdir. + # + # For other plugins, the baseid is the empty string (always matches). + self.baseid = baseid or "" + # Whether the fixture was found from a node or a conftest in the + # collection tree. Will be false for fixtures defined in non-conftest + # plugins. + self.has_location = baseid is not None + # The fixture factory function. + self.func = func + # The name by which the fixture may be requested. + self.argname = argname + if scope is None: + scope = Scope.Function + elif callable(scope): + scope = _eval_scope_callable(scope, argname, fixturemanager.config) + if isinstance(scope, str): + scope = Scope.from_user( + scope, descr=f"Fixture '{func.__name__}'", where=baseid + ) + self._scope = scope + # If the fixture is directly parametrized, the parameter values. + self.params: Optional[Sequence[object]] = params + # If the fixture is directly parametrized, a tuple of explicit IDs to + # assign to the parameter values, or a callable to generate an ID given + # a parameter value. + self.ids = ids + # The names requested by the fixtures. + self.argnames = getfuncargnames(func, name=argname, is_method=unittest) + # Whether the fixture was collected from a unittest TestCase class. + # Note that it really only makes sense to define autouse fixtures in + # unittest TestCases. + self.unittest = unittest + # If the fixture was executed, the current value of the fixture. + # Can change if the fixture is executed with different parameters. + self.cached_result: Optional[_FixtureCachedResult[FixtureValue]] = None + self._finalizers: List[Callable[[], object]] = [] + + @property + def scope(self) -> "_ScopeName": + """Scope string, one of "function", "class", "module", "package", "session".""" + return self._scope.value + + def addfinalizer(self, finalizer: Callable[[], object]) -> None: + self._finalizers.append(finalizer) + + def finish(self, request: SubRequest) -> None: + exc = None + try: + while self._finalizers: + try: + func = self._finalizers.pop() + func() + except BaseException as e: + # XXX Only first exception will be seen by user, + # ideally all should be reported. + if exc is None: + exc = e + if exc: + raise exc + finally: + ihook = request.node.ihook + ihook.pytest_fixture_post_finalizer(fixturedef=self, request=request) + # Even if finalization fails, we invalidate the cached fixture + # value and remove all finalizers because they may be bound methods + # which will keep instances alive. + self.cached_result = None + self._finalizers = [] + + def execute(self, request: SubRequest) -> FixtureValue: + # Get required arguments and register our own finish() + # with their finalization. + for argname in self.argnames: + fixturedef = request._get_active_fixturedef(argname) + if argname != "request": + # PseudoFixtureDef is only for "request". + assert isinstance(fixturedef, FixtureDef) + fixturedef.addfinalizer(functools.partial(self.finish, request=request)) + + my_cache_key = self.cache_key(request) + if self.cached_result is not None: + # note: comparison with `==` can fail (or be expensive) for e.g. + # numpy arrays (#6497). + cache_key = self.cached_result[1] + if my_cache_key is cache_key: + if self.cached_result[2] is not None: + _, val, tb = self.cached_result[2] + raise val.with_traceback(tb) + else: + result = self.cached_result[0] + return result + # We have a previous but differently parametrized fixture instance + # so we need to tear it down before creating a new one. + self.finish(request) + assert self.cached_result is None + + ihook = request.node.ihook + result = ihook.pytest_fixture_setup(fixturedef=self, request=request) + return result + + def cache_key(self, request: SubRequest) -> object: + return request.param_index if not hasattr(request, "param") else request.param + + def __repr__(self) -> str: + return "".format( + self.argname, self.scope, self.baseid + ) + + +def resolve_fixture_function( + fixturedef: FixtureDef[FixtureValue], request: FixtureRequest +) -> "_FixtureFunc[FixtureValue]": + """Get the actual callable that can be called to obtain the fixture + value, dealing with unittest-specific instances and bound methods.""" + fixturefunc = fixturedef.func + if fixturedef.unittest: + if request.instance is not None: + # Bind the unbound method to the TestCase instance. + fixturefunc = fixturedef.func.__get__(request.instance) # type: ignore[union-attr] + else: + # The fixture function needs to be bound to the actual + # request.instance so that code working with "fixturedef" behaves + # as expected. + if request.instance is not None: + # Handle the case where fixture is defined not in a test class, but some other class + # (for example a plugin class with a fixture), see #2270. + if hasattr(fixturefunc, "__self__") and not isinstance( + request.instance, fixturefunc.__self__.__class__ # type: ignore[union-attr] + ): + return fixturefunc + fixturefunc = getimfunc(fixturedef.func) + if fixturefunc != fixturedef.func: + fixturefunc = fixturefunc.__get__(request.instance) # type: ignore[union-attr] + return fixturefunc + + +def pytest_fixture_setup( + fixturedef: FixtureDef[FixtureValue], request: SubRequest +) -> FixtureValue: + """Execution of fixture setup.""" + kwargs = {} + for argname in fixturedef.argnames: + fixdef = request._get_active_fixturedef(argname) + assert fixdef.cached_result is not None + result, arg_cache_key, exc = fixdef.cached_result + request._check_scope(argname, request._scope, fixdef._scope) + kwargs[argname] = result + + fixturefunc = resolve_fixture_function(fixturedef, request) + my_cache_key = fixturedef.cache_key(request) + try: + result = call_fixture_func(fixturefunc, request, kwargs) + except TEST_OUTCOME: + exc_info = sys.exc_info() + assert exc_info[0] is not None + if isinstance( + exc_info[1], skip.Exception + ) and not fixturefunc.__name__.startswith("xunit_setup"): + exc_info[1]._use_item_location = True # type: ignore[attr-defined] + fixturedef.cached_result = (None, my_cache_key, exc_info) + raise + fixturedef.cached_result = (result, my_cache_key, None) + return result + + +def _ensure_immutable_ids( + ids: Optional[Union[Sequence[Optional[object]], Callable[[Any], Optional[object]]]] +) -> Optional[Union[Tuple[Optional[object], ...], Callable[[Any], Optional[object]]]]: + if ids is None: + return None + if callable(ids): + return ids + return tuple(ids) + + +def _params_converter( + params: Optional[Iterable[object]], +) -> Optional[Tuple[object, ...]]: + return tuple(params) if params is not None else None + + +def wrap_function_to_error_out_if_called_directly( + function: FixtureFunction, + fixture_marker: "FixtureFunctionMarker", +) -> FixtureFunction: + """Wrap the given fixture function so we can raise an error about it being called directly, + instead of used as an argument in a test function.""" + message = ( + 'Fixture "{name}" called directly. Fixtures are not meant to be called directly,\n' + "but are created automatically when test functions request them as parameters.\n" + "See https://docs.pytest.org/en/stable/explanation/fixtures.html for more information about fixtures, and\n" + "https://docs.pytest.org/en/stable/deprecations.html#calling-fixtures-directly about how to update your code." + ).format(name=fixture_marker.name or function.__name__) + + @functools.wraps(function) + def result(*args, **kwargs): + fail(message, pytrace=False) + + # Keep reference to the original function in our own custom attribute so we don't unwrap + # further than this point and lose useful wrappings like @mock.patch (#3774). + result.__pytest_wrapped__ = _PytestWrapper(function) # type: ignore[attr-defined] + + return cast(FixtureFunction, result) + + +@final +@dataclasses.dataclass(frozen=True) +class FixtureFunctionMarker: + scope: "Union[_ScopeName, Callable[[str, Config], _ScopeName]]" + params: Optional[Tuple[object, ...]] + autouse: bool = False + ids: Optional[ + Union[Tuple[Optional[object], ...], Callable[[Any], Optional[object]]] + ] = None + name: Optional[str] = None + + _ispytest: dataclasses.InitVar[bool] = False + + def __post_init__(self, _ispytest: bool) -> None: + check_ispytest(_ispytest) + + def __call__(self, function: FixtureFunction) -> FixtureFunction: + if inspect.isclass(function): + raise ValueError("class fixtures not supported (maybe in the future)") + + if getattr(function, "_pytestfixturefunction", False): + raise ValueError( + "fixture is being applied more than once to the same function" + ) + + function = wrap_function_to_error_out_if_called_directly(function, self) + + name = self.name or function.__name__ + if name == "request": + location = getlocation(function) + fail( + "'request' is a reserved word for fixtures, use another name:\n {}".format( + location + ), + pytrace=False, + ) + + # Type ignored because https://github.com/python/mypy/issues/2087. + function._pytestfixturefunction = self # type: ignore[attr-defined] + return function + + +@overload +def fixture( + fixture_function: FixtureFunction, + *, + scope: "Union[_ScopeName, Callable[[str, Config], _ScopeName]]" = ..., + params: Optional[Iterable[object]] = ..., + autouse: bool = ..., + ids: Optional[ + Union[Sequence[Optional[object]], Callable[[Any], Optional[object]]] + ] = ..., + name: Optional[str] = ..., +) -> FixtureFunction: + ... + + +@overload +def fixture( # noqa: F811 + fixture_function: None = ..., + *, + scope: "Union[_ScopeName, Callable[[str, Config], _ScopeName]]" = ..., + params: Optional[Iterable[object]] = ..., + autouse: bool = ..., + ids: Optional[ + Union[Sequence[Optional[object]], Callable[[Any], Optional[object]]] + ] = ..., + name: Optional[str] = None, +) -> FixtureFunctionMarker: + ... + + +def fixture( # noqa: F811 + fixture_function: Optional[FixtureFunction] = None, + *, + scope: "Union[_ScopeName, Callable[[str, Config], _ScopeName]]" = "function", + params: Optional[Iterable[object]] = None, + autouse: bool = False, + ids: Optional[ + Union[Sequence[Optional[object]], Callable[[Any], Optional[object]]] + ] = None, + name: Optional[str] = None, +) -> Union[FixtureFunctionMarker, FixtureFunction]: + """Decorator to mark a fixture factory function. + + This decorator can be used, with or without parameters, to define a + fixture function. + + The name of the fixture function can later be referenced to cause its + invocation ahead of running tests: test modules or classes can use the + ``pytest.mark.usefixtures(fixturename)`` marker. + + Test functions can directly use fixture names as input arguments in which + case the fixture instance returned from the fixture function will be + injected. + + Fixtures can provide their values to test functions using ``return`` or + ``yield`` statements. When using ``yield`` the code block after the + ``yield`` statement is executed as teardown code regardless of the test + outcome, and must yield exactly once. + + :param scope: + The scope for which this fixture is shared; one of ``"function"`` + (default), ``"class"``, ``"module"``, ``"package"`` or ``"session"``. + + This parameter may also be a callable which receives ``(fixture_name, config)`` + as parameters, and must return a ``str`` with one of the values mentioned above. + + See :ref:`dynamic scope` in the docs for more information. + + :param params: + An optional list of parameters which will cause multiple invocations + of the fixture function and all of the tests using it. The current + parameter is available in ``request.param``. + + :param autouse: + If True, the fixture func is activated for all tests that can see it. + If False (the default), an explicit reference is needed to activate + the fixture. + + :param ids: + Sequence of ids each corresponding to the params so that they are + part of the test id. If no ids are provided they will be generated + automatically from the params. + + :param name: + The name of the fixture. This defaults to the name of the decorated + function. If a fixture is used in the same module in which it is + defined, the function name of the fixture will be shadowed by the + function arg that requests the fixture; one way to resolve this is to + name the decorated function ``fixture_`` and then use + ``@pytest.fixture(name='')``. + """ + fixture_marker = FixtureFunctionMarker( + scope=scope, + params=tuple(params) if params is not None else None, + autouse=autouse, + ids=None if ids is None else ids if callable(ids) else tuple(ids), + name=name, + _ispytest=True, + ) + + # Direct decoration. + if fixture_function: + return fixture_marker(fixture_function) + + return fixture_marker + + +def yield_fixture( + fixture_function=None, + *args, + scope="function", + params=None, + autouse=False, + ids=None, + name=None, +): + """(Return a) decorator to mark a yield-fixture factory function. + + .. deprecated:: 3.0 + Use :py:func:`pytest.fixture` directly instead. + """ + warnings.warn(YIELD_FIXTURE, stacklevel=2) + return fixture( + fixture_function, + *args, + scope=scope, + params=params, + autouse=autouse, + ids=ids, + name=name, + ) + + +@fixture(scope="session") +def pytestconfig(request: FixtureRequest) -> Config: + """Session-scoped fixture that returns the session's :class:`pytest.Config` + object. + + Example:: + + def test_foo(pytestconfig): + if pytestconfig.getoption("verbose") > 0: + ... + + """ + return request.config + + +def pytest_addoption(parser: Parser) -> None: + parser.addini( + "usefixtures", + type="args", + default=[], + help="List of default fixtures to be used with this project", + ) + + +class FixtureManager: + """pytest fixture definitions and information is stored and managed + from this class. + + During collection fm.parsefactories() is called multiple times to parse + fixture function definitions into FixtureDef objects and internal + data structures. + + During collection of test functions, metafunc-mechanics instantiate + a FuncFixtureInfo object which is cached per node/func-name. + This FuncFixtureInfo object is later retrieved by Function nodes + which themselves offer a fixturenames attribute. + + The FuncFixtureInfo object holds information about fixtures and FixtureDefs + relevant for a particular function. An initial list of fixtures is + assembled like this: + + - ini-defined usefixtures + - autouse-marked fixtures along the collection chain up from the function + - usefixtures markers at module/class/function level + - test function funcargs + + Subsequently the funcfixtureinfo.fixturenames attribute is computed + as the closure of the fixtures needed to setup the initial fixtures, + i.e. fixtures needed by fixture functions themselves are appended + to the fixturenames list. + + Upon the test-setup phases all fixturenames are instantiated, retrieved + by a lookup of their FuncFixtureInfo. + """ + + FixtureLookupError = FixtureLookupError + FixtureLookupErrorRepr = FixtureLookupErrorRepr + + def __init__(self, session: "Session") -> None: + self.session = session + self.config: Config = session.config + self._arg2fixturedefs: Dict[str, List[FixtureDef[Any]]] = {} + self._holderobjseen: Set[object] = set() + # A mapping from a nodeid to a list of autouse fixtures it defines. + self._nodeid_autousenames: Dict[str, List[str]] = { + "": self.config.getini("usefixtures"), + } + session.config.pluginmanager.register(self, "funcmanage") + + def _get_direct_parametrize_args(self, node: nodes.Node) -> List[str]: + """Return all direct parametrization arguments of a node, so we don't + mistake them for fixtures. + + Check https://github.com/pytest-dev/pytest/issues/5036. + + These things are done later as well when dealing with parametrization + so this could be improved. + """ + parametrize_argnames: List[str] = [] + for marker in node.iter_markers(name="parametrize"): + if not marker.kwargs.get("indirect", False): + p_argnames, _ = ParameterSet._parse_parametrize_args( + *marker.args, **marker.kwargs + ) + parametrize_argnames.extend(p_argnames) + + return parametrize_argnames + + def getfixtureinfo( + self, node: nodes.Node, func, cls, funcargs: bool = True + ) -> FuncFixtureInfo: + if funcargs and not getattr(node, "nofuncargs", False): + argnames = getfuncargnames(func, name=node.name, cls=cls) + else: + argnames = () + + usefixtures = tuple( + arg for mark in node.iter_markers(name="usefixtures") for arg in mark.args + ) + initialnames = usefixtures + argnames + fm = node.session._fixturemanager + initialnames, names_closure, arg2fixturedefs = fm.getfixtureclosure( + initialnames, node, ignore_args=self._get_direct_parametrize_args(node) + ) + return FuncFixtureInfo(argnames, initialnames, names_closure, arg2fixturedefs) + + def pytest_plugin_registered(self, plugin: _PluggyPlugin) -> None: + nodeid = None + try: + p = absolutepath(plugin.__file__) # type: ignore[attr-defined] + except AttributeError: + pass + else: + # Construct the base nodeid which is later used to check + # what fixtures are visible for particular tests (as denoted + # by their test id). + if p.name.startswith("conftest.py"): + try: + nodeid = str(p.parent.relative_to(self.config.rootpath)) + except ValueError: + nodeid = "" + if nodeid == ".": + nodeid = "" + if os.sep != nodes.SEP: + nodeid = nodeid.replace(os.sep, nodes.SEP) + + self.parsefactories(plugin, nodeid) + + def _getautousenames(self, nodeid: str) -> Iterator[str]: + """Return the names of autouse fixtures applicable to nodeid.""" + for parentnodeid in nodes.iterparentnodeids(nodeid): + basenames = self._nodeid_autousenames.get(parentnodeid) + if basenames: + yield from basenames + + def getfixtureclosure( + self, + fixturenames: Tuple[str, ...], + parentnode: nodes.Node, + ignore_args: Sequence[str] = (), + ) -> Tuple[Tuple[str, ...], List[str], Dict[str, Sequence[FixtureDef[Any]]]]: + # Collect the closure of all fixtures, starting with the given + # fixturenames as the initial set. As we have to visit all + # factory definitions anyway, we also return an arg2fixturedefs + # mapping so that the caller can reuse it and does not have + # to re-discover fixturedefs again for each fixturename + # (discovering matching fixtures for a given name/node is expensive). + + parentid = parentnode.nodeid + fixturenames_closure = list(self._getautousenames(parentid)) + + def merge(otherlist: Iterable[str]) -> None: + for arg in otherlist: + if arg not in fixturenames_closure: + fixturenames_closure.append(arg) + + merge(fixturenames) + + # At this point, fixturenames_closure contains what we call "initialnames", + # which is a set of fixturenames the function immediately requests. We + # need to return it as well, so save this. + initialnames = tuple(fixturenames_closure) + + arg2fixturedefs: Dict[str, Sequence[FixtureDef[Any]]] = {} + lastlen = -1 + while lastlen != len(fixturenames_closure): + lastlen = len(fixturenames_closure) + for argname in fixturenames_closure: + if argname in ignore_args: + continue + if argname in arg2fixturedefs: + continue + fixturedefs = self.getfixturedefs(argname, parentid) + if fixturedefs: + arg2fixturedefs[argname] = fixturedefs + merge(fixturedefs[-1].argnames) + + def sort_by_scope(arg_name: str) -> Scope: + try: + fixturedefs = arg2fixturedefs[arg_name] + except KeyError: + return Scope.Function + else: + return fixturedefs[-1]._scope + + fixturenames_closure.sort(key=sort_by_scope, reverse=True) + return initialnames, fixturenames_closure, arg2fixturedefs + + def pytest_generate_tests(self, metafunc: "Metafunc") -> None: + """Generate new tests based on parametrized fixtures used by the given metafunc""" + + def get_parametrize_mark_argnames(mark: Mark) -> Sequence[str]: + args, _ = ParameterSet._parse_parametrize_args(*mark.args, **mark.kwargs) + return args + + for argname in metafunc.fixturenames: + # Get the FixtureDefs for the argname. + fixture_defs = metafunc._arg2fixturedefs.get(argname) + if not fixture_defs: + # Will raise FixtureLookupError at setup time if not parametrized somewhere + # else (e.g @pytest.mark.parametrize) + continue + + # If the test itself parametrizes using this argname, give it + # precedence. + if any( + argname in get_parametrize_mark_argnames(mark) + for mark in metafunc.definition.iter_markers("parametrize") + ): + continue + + # In the common case we only look at the fixture def with the + # closest scope (last in the list). But if the fixture overrides + # another fixture, while requesting the super fixture, keep going + # in case the super fixture is parametrized (#1953). + for fixturedef in reversed(fixture_defs): + # Fixture is parametrized, apply it and stop. + if fixturedef.params is not None: + metafunc.parametrize( + argname, + fixturedef.params, + indirect=True, + scope=fixturedef.scope, + ids=fixturedef.ids, + ) + break + + # Not requesting the overridden super fixture, stop. + if argname not in fixturedef.argnames: + break + + # Try next super fixture, if any. + + def pytest_collection_modifyitems(self, items: List[nodes.Item]) -> None: + # Separate parametrized setups. + items[:] = reorder_items(items) + + @overload + def parsefactories( + self, + node_or_obj: nodes.Node, + *, + unittest: bool = ..., + ) -> None: + raise NotImplementedError() + + @overload + def parsefactories( # noqa: F811 + self, + node_or_obj: object, + nodeid: Optional[str], + *, + unittest: bool = ..., + ) -> None: + raise NotImplementedError() + + def parsefactories( # noqa: F811 + self, + node_or_obj: Union[nodes.Node, object], + nodeid: Union[str, NotSetType, None] = NOTSET, + *, + unittest: bool = False, + ) -> None: + """Collect fixtures from a collection node or object. + + Found fixtures are parsed into `FixtureDef`s and saved. + + If `node_or_object` is a collection node (with an underlying Python + object), the node's object is traversed and the node's nodeid is used to + determine the fixtures' visibilty. `nodeid` must not be specified in + this case. + + If `node_or_object` is an object (e.g. a plugin), the object is + traversed and the given `nodeid` is used to determine the fixtures' + visibility. `nodeid` must be specified in this case; None and "" mean + total visibility. + """ + if nodeid is not NOTSET: + holderobj = node_or_obj + else: + assert isinstance(node_or_obj, nodes.Node) + holderobj = cast(object, node_or_obj.obj) # type: ignore[attr-defined] + assert isinstance(node_or_obj.nodeid, str) + nodeid = node_or_obj.nodeid + if holderobj in self._holderobjseen: + return + + self._holderobjseen.add(holderobj) + autousenames = [] + for name in dir(holderobj): + # ugly workaround for one of the fspath deprecated property of node + # todo: safely generalize + if isinstance(holderobj, nodes.Node) and name == "fspath": + continue + + # The attribute can be an arbitrary descriptor, so the attribute + # access below can raise. safe_getatt() ignores such exceptions. + obj = safe_getattr(holderobj, name, None) + marker = getfixturemarker(obj) + if not isinstance(marker, FixtureFunctionMarker): + # Magic globals with __getattr__ might have got us a wrong + # fixture attribute. + continue + + if marker.name: + name = marker.name + + # During fixture definition we wrap the original fixture function + # to issue a warning if called directly, so here we unwrap it in + # order to not emit the warning when pytest itself calls the + # fixture function. + obj = get_real_method(obj, holderobj) + + fixture_def = FixtureDef( + fixturemanager=self, + baseid=nodeid, + argname=name, + func=obj, + scope=marker.scope, + params=marker.params, + unittest=unittest, + ids=marker.ids, + ) + + faclist = self._arg2fixturedefs.setdefault(name, []) + if fixture_def.has_location: + faclist.append(fixture_def) + else: + # fixturedefs with no location are at the front + # so this inserts the current fixturedef after the + # existing fixturedefs from external plugins but + # before the fixturedefs provided in conftests. + i = len([f for f in faclist if not f.has_location]) + faclist.insert(i, fixture_def) + if marker.autouse: + autousenames.append(name) + + if autousenames: + self._nodeid_autousenames.setdefault(nodeid or "", []).extend(autousenames) + + def getfixturedefs( + self, argname: str, nodeid: str + ) -> Optional[Sequence[FixtureDef[Any]]]: + """Get a list of fixtures which are applicable to the given node id. + + :param str argname: Name of the fixture to search for. + :param str nodeid: Full node id of the requesting test. + :rtype: Sequence[FixtureDef] + """ + try: + fixturedefs = self._arg2fixturedefs[argname] + except KeyError: + return None + return tuple(self._matchfactories(fixturedefs, nodeid)) + + def _matchfactories( + self, fixturedefs: Iterable[FixtureDef[Any]], nodeid: str + ) -> Iterator[FixtureDef[Any]]: + parentnodeids = set(nodes.iterparentnodeids(nodeid)) + for fixturedef in fixturedefs: + if fixturedef.baseid in parentnodeids: + yield fixturedef diff --git a/venv/lib/python3.11/site-packages/_pytest/freeze_support.py b/venv/lib/python3.11/site-packages/_pytest/freeze_support.py new file mode 100644 index 0000000..9f8ea23 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/freeze_support.py @@ -0,0 +1,44 @@ +"""Provides a function to report all internal modules for using freezing +tools.""" +import types +from typing import Iterator +from typing import List +from typing import Union + + +def freeze_includes() -> List[str]: + """Return a list of module names used by pytest that should be + included by cx_freeze.""" + import _pytest + + result = list(_iter_all_modules(_pytest)) + return result + + +def _iter_all_modules( + package: Union[str, types.ModuleType], + prefix: str = "", +) -> Iterator[str]: + """Iterate over the names of all modules that can be found in the given + package, recursively. + + >>> import _pytest + >>> list(_iter_all_modules(_pytest)) + ['_pytest._argcomplete', '_pytest._code.code', ...] + """ + import os + import pkgutil + + if isinstance(package, str): + path = package + else: + # Type ignored because typeshed doesn't define ModuleType.__path__ + # (only defined on packages). + package_path = package.__path__ # type: ignore[attr-defined] + path, prefix = package_path[0], package.__name__ + "." + for _, name, is_package in pkgutil.iter_modules([path]): + if is_package: + for m in _iter_all_modules(os.path.join(path, name), prefix=name + "."): + yield prefix + m + else: + yield prefix + name diff --git a/venv/lib/python3.11/site-packages/_pytest/helpconfig.py b/venv/lib/python3.11/site-packages/_pytest/helpconfig.py new file mode 100644 index 0000000..4308706 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/helpconfig.py @@ -0,0 +1,266 @@ +"""Version info, help messages, tracing configuration.""" +import os +import sys +from argparse import Action +from typing import List +from typing import Optional +from typing import Union + +import pytest +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import PrintHelp +from _pytest.config.argparsing import Parser + + +class HelpAction(Action): + """An argparse Action that will raise an exception in order to skip the + rest of the argument parsing when --help is passed. + + This prevents argparse from quitting due to missing required arguments + when any are defined, for example by ``pytest_addoption``. + This is similar to the way that the builtin argparse --help option is + implemented by raising SystemExit. + """ + + def __init__(self, option_strings, dest=None, default=False, help=None): + super().__init__( + option_strings=option_strings, + dest=dest, + const=True, + default=default, + nargs=0, + help=help, + ) + + def __call__(self, parser, namespace, values, option_string=None): + setattr(namespace, self.dest, self.const) + + # We should only skip the rest of the parsing after preparse is done. + if getattr(parser._parser, "after_preparse", False): + raise PrintHelp + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("debugconfig") + group.addoption( + "--version", + "-V", + action="count", + default=0, + dest="version", + help="Display pytest version and information about plugins. " + "When given twice, also display information about plugins.", + ) + group._addoption( + "-h", + "--help", + action=HelpAction, + dest="help", + help="Show help message and configuration info", + ) + group._addoption( + "-p", + action="append", + dest="plugins", + default=[], + metavar="name", + help="Early-load given plugin module name or entry point (multi-allowed). " + "To avoid loading of plugins, use the `no:` prefix, e.g. " + "`no:doctest`.", + ) + group.addoption( + "--traceconfig", + "--trace-config", + action="store_true", + default=False, + help="Trace considerations of conftest.py files", + ) + group.addoption( + "--debug", + action="store", + nargs="?", + const="pytestdebug.log", + dest="debug", + metavar="DEBUG_FILE_NAME", + help="Store internal tracing debug information in this log file. " + "This file is opened with 'w' and truncated as a result, care advised. " + "Default: pytestdebug.log.", + ) + group._addoption( + "-o", + "--override-ini", + dest="override_ini", + action="append", + help='Override ini option with "option=value" style, ' + "e.g. `-o xfail_strict=True -o cache_dir=cache`.", + ) + + +@pytest.hookimpl(hookwrapper=True) +def pytest_cmdline_parse(): + outcome = yield + config: Config = outcome.get_result() + + if config.option.debug: + # --debug | --debug was provided. + path = config.option.debug + debugfile = open(path, "w", encoding="utf-8") + debugfile.write( + "versions pytest-%s, " + "python-%s\ncwd=%s\nargs=%s\n\n" + % ( + pytest.__version__, + ".".join(map(str, sys.version_info)), + os.getcwd(), + config.invocation_params.args, + ) + ) + config.trace.root.setwriter(debugfile.write) + undo_tracing = config.pluginmanager.enable_tracing() + sys.stderr.write("writing pytest debug information to %s\n" % path) + + def unset_tracing() -> None: + debugfile.close() + sys.stderr.write("wrote pytest debug information to %s\n" % debugfile.name) + config.trace.root.setwriter(None) + undo_tracing() + + config.add_cleanup(unset_tracing) + + +def showversion(config: Config) -> None: + if config.option.version > 1: + sys.stdout.write( + "This is pytest version {}, imported from {}\n".format( + pytest.__version__, pytest.__file__ + ) + ) + plugininfo = getpluginversioninfo(config) + if plugininfo: + for line in plugininfo: + sys.stdout.write(line + "\n") + else: + sys.stdout.write(f"pytest {pytest.__version__}\n") + + +def pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]: + if config.option.version > 0: + showversion(config) + return 0 + elif config.option.help: + config._do_configure() + showhelp(config) + config._ensure_unconfigure() + return 0 + return None + + +def showhelp(config: Config) -> None: + import textwrap + + reporter = config.pluginmanager.get_plugin("terminalreporter") + tw = reporter._tw + tw.write(config._parser.optparser.format_help()) + tw.line() + tw.line( + "[pytest] ini-options in the first " + "pytest.ini|tox.ini|setup.cfg|pyproject.toml file found:" + ) + tw.line() + + columns = tw.fullwidth # costly call + indent_len = 24 # based on argparse's max_help_position=24 + indent = " " * indent_len + for name in config._parser._ininames: + help, type, default = config._parser._inidict[name] + if type is None: + type = "string" + if help is None: + raise TypeError(f"help argument cannot be None for {name}") + spec = f"{name} ({type}):" + tw.write(" %s" % spec) + spec_len = len(spec) + if spec_len > (indent_len - 3): + # Display help starting at a new line. + tw.line() + helplines = textwrap.wrap( + help, + columns, + initial_indent=indent, + subsequent_indent=indent, + break_on_hyphens=False, + ) + + for line in helplines: + tw.line(line) + else: + # Display help starting after the spec, following lines indented. + tw.write(" " * (indent_len - spec_len - 2)) + wrapped = textwrap.wrap(help, columns - indent_len, break_on_hyphens=False) + + if wrapped: + tw.line(wrapped[0]) + for line in wrapped[1:]: + tw.line(indent + line) + + tw.line() + tw.line("Environment variables:") + vars = [ + ("PYTEST_ADDOPTS", "Extra command line options"), + ("PYTEST_PLUGINS", "Comma-separated plugins to load during startup"), + ("PYTEST_DISABLE_PLUGIN_AUTOLOAD", "Set to disable plugin auto-loading"), + ("PYTEST_DEBUG", "Set to enable debug tracing of pytest's internals"), + ] + for name, help in vars: + tw.line(f" {name:<24} {help}") + tw.line() + tw.line() + + tw.line("to see available markers type: pytest --markers") + tw.line("to see available fixtures type: pytest --fixtures") + tw.line( + "(shown according to specified file_or_dir or current dir " + "if not specified; fixtures with leading '_' are only shown " + "with the '-v' option" + ) + + for warningreport in reporter.stats.get("warnings", []): + tw.line("warning : " + warningreport.message, red=True) + return + + +conftest_options = [("pytest_plugins", "list of plugin names to load")] + + +def getpluginversioninfo(config: Config) -> List[str]: + lines = [] + plugininfo = config.pluginmanager.list_plugin_distinfo() + if plugininfo: + lines.append("setuptools registered plugins:") + for plugin, dist in plugininfo: + loc = getattr(plugin, "__file__", repr(plugin)) + content = f"{dist.project_name}-{dist.version} at {loc}" + lines.append(" " + content) + return lines + + +def pytest_report_header(config: Config) -> List[str]: + lines = [] + if config.option.debug or config.option.traceconfig: + lines.append(f"using: pytest-{pytest.__version__}") + + verinfo = getpluginversioninfo(config) + if verinfo: + lines.extend(verinfo) + + if config.option.traceconfig: + lines.append("active plugins:") + items = config.pluginmanager.list_name_plugin() + for name, plugin in items: + if hasattr(plugin, "__file__"): + r = plugin.__file__ + else: + r = repr(plugin) + lines.append(f" {name:<20}: {r}") + return lines diff --git a/venv/lib/python3.11/site-packages/_pytest/hookspec.py b/venv/lib/python3.11/site-packages/_pytest/hookspec.py new file mode 100644 index 0000000..1f7c368 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/hookspec.py @@ -0,0 +1,979 @@ +"""Hook specifications for pytest plugins which are invoked by pytest itself +and by builtin plugins.""" +from pathlib import Path +from typing import Any +from typing import Dict +from typing import List +from typing import Mapping +from typing import Optional +from typing import Sequence +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union + +from pluggy import HookspecMarker + +from _pytest.deprecated import WARNING_CMDLINE_PREPARSE_HOOK + +if TYPE_CHECKING: + import pdb + import warnings + from typing_extensions import Literal + + from _pytest._code.code import ExceptionRepr + from _pytest._code.code import ExceptionInfo + from _pytest.config import Config + from _pytest.config import ExitCode + from _pytest.config import PytestPluginManager + from _pytest.config import _PluggyPlugin + from _pytest.config.argparsing import Parser + from _pytest.fixtures import FixtureDef + from _pytest.fixtures import SubRequest + from _pytest.main import Session + from _pytest.nodes import Collector + from _pytest.nodes import Item + from _pytest.outcomes import Exit + from _pytest.python import Class + from _pytest.python import Function + from _pytest.python import Metafunc + from _pytest.python import Module + from _pytest.reports import CollectReport + from _pytest.reports import TestReport + from _pytest.runner import CallInfo + from _pytest.terminal import TerminalReporter + from _pytest.terminal import TestShortLogReport + from _pytest.compat import LEGACY_PATH + + +hookspec = HookspecMarker("pytest") + +# ------------------------------------------------------------------------- +# Initialization hooks called for every plugin +# ------------------------------------------------------------------------- + + +@hookspec(historic=True) +def pytest_addhooks(pluginmanager: "PytestPluginManager") -> None: + """Called at plugin registration time to allow adding new hooks via a call to + ``pluginmanager.add_hookspecs(module_or_class, prefix)``. + + :param pytest.PytestPluginManager pluginmanager: The pytest plugin manager. + + .. note:: + This hook is incompatible with ``hookwrapper=True``. + """ + + +@hookspec(historic=True) +def pytest_plugin_registered( + plugin: "_PluggyPlugin", manager: "PytestPluginManager" +) -> None: + """A new pytest plugin got registered. + + :param plugin: The plugin module or instance. + :param pytest.PytestPluginManager manager: pytest plugin manager. + + .. note:: + This hook is incompatible with ``hookwrapper=True``. + """ + + +@hookspec(historic=True) +def pytest_addoption(parser: "Parser", pluginmanager: "PytestPluginManager") -> None: + """Register argparse-style options and ini-style config values, + called once at the beginning of a test run. + + .. note:: + + This function should be implemented only in plugins or ``conftest.py`` + files situated at the tests root directory due to how pytest + :ref:`discovers plugins during startup `. + + :param pytest.Parser parser: + To add command line options, call + :py:func:`parser.addoption(...) `. + To add ini-file values call :py:func:`parser.addini(...) + `. + + :param pytest.PytestPluginManager pluginmanager: + The pytest plugin manager, which can be used to install :py:func:`hookspec`'s + or :py:func:`hookimpl`'s and allow one plugin to call another plugin's hooks + to change how command line options are added. + + Options can later be accessed through the + :py:class:`config ` object, respectively: + + - :py:func:`config.getoption(name) ` to + retrieve the value of a command line option. + + - :py:func:`config.getini(name) ` to retrieve + a value read from an ini-style file. + + The config object is passed around on many internal objects via the ``.config`` + attribute or can be retrieved as the ``pytestconfig`` fixture. + + .. note:: + This hook is incompatible with ``hookwrapper=True``. + """ + + +@hookspec(historic=True) +def pytest_configure(config: "Config") -> None: + """Allow plugins and conftest files to perform initial configuration. + + This hook is called for every plugin and initial conftest file + after command line options have been parsed. + + After that, the hook is called for other conftest files as they are + imported. + + .. note:: + This hook is incompatible with ``hookwrapper=True``. + + :param pytest.Config config: The pytest config object. + """ + + +# ------------------------------------------------------------------------- +# Bootstrapping hooks called for plugins registered early enough: +# internal and 3rd party plugins. +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_cmdline_parse( + pluginmanager: "PytestPluginManager", args: List[str] +) -> Optional["Config"]: + """Return an initialized :class:`~pytest.Config`, parsing the specified args. + + Stops at first non-None result, see :ref:`firstresult`. + + .. note:: + This hook will only be called for plugin classes passed to the + ``plugins`` arg when using `pytest.main`_ to perform an in-process + test run. + + :param pluginmanager: The pytest plugin manager. + :param args: List of arguments passed on the command line. + :returns: A pytest config object. + """ + + +@hookspec(warn_on_impl=WARNING_CMDLINE_PREPARSE_HOOK) +def pytest_cmdline_preparse(config: "Config", args: List[str]) -> None: + """(**Deprecated**) modify command line arguments before option parsing. + + This hook is considered deprecated and will be removed in a future pytest version. Consider + using :hook:`pytest_load_initial_conftests` instead. + + .. note:: + This hook will not be called for ``conftest.py`` files, only for setuptools plugins. + + :param config: The pytest config object. + :param args: Arguments passed on the command line. + """ + + +@hookspec(firstresult=True) +def pytest_cmdline_main(config: "Config") -> Optional[Union["ExitCode", int]]: + """Called for performing the main command line action. The default + implementation will invoke the configure hooks and runtest_mainloop. + + Stops at first non-None result, see :ref:`firstresult`. + + :param config: The pytest config object. + :returns: The exit code. + """ + + +def pytest_load_initial_conftests( + early_config: "Config", parser: "Parser", args: List[str] +) -> None: + """Called to implement the loading of initial conftest files ahead + of command line option parsing. + + .. note:: + This hook will not be called for ``conftest.py`` files, only for setuptools plugins. + + :param early_config: The pytest config object. + :param args: Arguments passed on the command line. + :param parser: To add command line options. + """ + + +# ------------------------------------------------------------------------- +# collection hooks +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_collection(session: "Session") -> Optional[object]: + """Perform the collection phase for the given session. + + Stops at first non-None result, see :ref:`firstresult`. + The return value is not used, but only stops further processing. + + The default collection phase is this (see individual hooks for full details): + + 1. Starting from ``session`` as the initial collector: + + 1. ``pytest_collectstart(collector)`` + 2. ``report = pytest_make_collect_report(collector)`` + 3. ``pytest_exception_interact(collector, call, report)`` if an interactive exception occurred + 4. For each collected node: + + 1. If an item, ``pytest_itemcollected(item)`` + 2. If a collector, recurse into it. + + 5. ``pytest_collectreport(report)`` + + 2. ``pytest_collection_modifyitems(session, config, items)`` + + 1. ``pytest_deselected(items)`` for any deselected items (may be called multiple times) + + 3. ``pytest_collection_finish(session)`` + 4. Set ``session.items`` to the list of collected items + 5. Set ``session.testscollected`` to the number of collected items + + You can implement this hook to only perform some action before collection, + for example the terminal plugin uses it to start displaying the collection + counter (and returns `None`). + + :param session: The pytest session object. + """ + + +def pytest_collection_modifyitems( + session: "Session", config: "Config", items: List["Item"] +) -> None: + """Called after collection has been performed. May filter or re-order + the items in-place. + + :param session: The pytest session object. + :param config: The pytest config object. + :param items: List of item objects. + """ + + +def pytest_collection_finish(session: "Session") -> None: + """Called after collection has been performed and modified. + + :param session: The pytest session object. + """ + + +@hookspec(firstresult=True) +def pytest_ignore_collect( + collection_path: Path, path: "LEGACY_PATH", config: "Config" +) -> Optional[bool]: + """Return True to prevent considering this path for collection. + + This hook is consulted for all files and directories prior to calling + more specific hooks. + + Stops at first non-None result, see :ref:`firstresult`. + + :param collection_path: The path to analyze. + :param path: The path to analyze (deprecated). + :param config: The pytest config object. + + .. versionchanged:: 7.0.0 + The ``collection_path`` parameter was added as a :class:`pathlib.Path` + equivalent of the ``path`` parameter. The ``path`` parameter + has been deprecated. + """ + + +def pytest_collect_file( + file_path: Path, path: "LEGACY_PATH", parent: "Collector" +) -> "Optional[Collector]": + """Create a :class:`~pytest.Collector` for the given path, or None if not relevant. + + The new node needs to have the specified ``parent`` as a parent. + + :param file_path: The path to analyze. + :param path: The path to collect (deprecated). + + .. versionchanged:: 7.0.0 + The ``file_path`` parameter was added as a :class:`pathlib.Path` + equivalent of the ``path`` parameter. The ``path`` parameter + has been deprecated. + """ + + +# logging hooks for collection + + +def pytest_collectstart(collector: "Collector") -> None: + """Collector starts collecting. + + :param collector: + The collector. + """ + + +def pytest_itemcollected(item: "Item") -> None: + """We just collected a test item. + + :param item: + The item. + """ + + +def pytest_collectreport(report: "CollectReport") -> None: + """Collector finished collecting. + + :param report: + The collect report. + """ + + +def pytest_deselected(items: Sequence["Item"]) -> None: + """Called for deselected test items, e.g. by keyword. + + May be called multiple times. + + :param items: + The items. + """ + + +@hookspec(firstresult=True) +def pytest_make_collect_report(collector: "Collector") -> "Optional[CollectReport]": + """Perform :func:`collector.collect() ` and return + a :class:`~pytest.CollectReport`. + + Stops at first non-None result, see :ref:`firstresult`. + + :param collector: + The collector. + """ + + +# ------------------------------------------------------------------------- +# Python test function related hooks +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_pycollect_makemodule( + module_path: Path, path: "LEGACY_PATH", parent +) -> Optional["Module"]: + """Return a :class:`pytest.Module` collector or None for the given path. + + This hook will be called for each matching test module path. + The :hook:`pytest_collect_file` hook needs to be used if you want to + create test modules for files that do not match as a test module. + + Stops at first non-None result, see :ref:`firstresult`. + + :param module_path: The path of the module to collect. + :param path: The path of the module to collect (deprecated). + + .. versionchanged:: 7.0.0 + The ``module_path`` parameter was added as a :class:`pathlib.Path` + equivalent of the ``path`` parameter. + + The ``path`` parameter has been deprecated in favor of ``fspath``. + """ + + +@hookspec(firstresult=True) +def pytest_pycollect_makeitem( + collector: Union["Module", "Class"], name: str, obj: object +) -> Union[None, "Item", "Collector", List[Union["Item", "Collector"]]]: + """Return a custom item/collector for a Python object in a module, or None. + + Stops at first non-None result, see :ref:`firstresult`. + + :param collector: + The module/class collector. + :param name: + The name of the object in the module/class. + :param obj: + The object. + :returns: + The created items/collectors. + """ + + +@hookspec(firstresult=True) +def pytest_pyfunc_call(pyfuncitem: "Function") -> Optional[object]: + """Call underlying test function. + + Stops at first non-None result, see :ref:`firstresult`. + + :param pyfuncitem: + The function item. + """ + + +def pytest_generate_tests(metafunc: "Metafunc") -> None: + """Generate (multiple) parametrized calls to a test function. + + :param metafunc: + The :class:`~pytest.Metafunc` helper for the test function. + """ + + +@hookspec(firstresult=True) +def pytest_make_parametrize_id( + config: "Config", val: object, argname: str +) -> Optional[str]: + """Return a user-friendly string representation of the given ``val`` + that will be used by @pytest.mark.parametrize calls, or None if the hook + doesn't know about ``val``. + + The parameter name is available as ``argname``, if required. + + Stops at first non-None result, see :ref:`firstresult`. + + :param config: The pytest config object. + :param val: The parametrized value. + :param str argname: The automatic parameter name produced by pytest. + """ + + +# ------------------------------------------------------------------------- +# runtest related hooks +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_runtestloop(session: "Session") -> Optional[object]: + """Perform the main runtest loop (after collection finished). + + The default hook implementation performs the runtest protocol for all items + collected in the session (``session.items``), unless the collection failed + or the ``collectonly`` pytest option is set. + + If at any point :py:func:`pytest.exit` is called, the loop is + terminated immediately. + + If at any point ``session.shouldfail`` or ``session.shouldstop`` are set, the + loop is terminated after the runtest protocol for the current item is finished. + + :param session: The pytest session object. + + Stops at first non-None result, see :ref:`firstresult`. + The return value is not used, but only stops further processing. + """ + + +@hookspec(firstresult=True) +def pytest_runtest_protocol( + item: "Item", nextitem: "Optional[Item]" +) -> Optional[object]: + """Perform the runtest protocol for a single test item. + + The default runtest protocol is this (see individual hooks for full details): + + - ``pytest_runtest_logstart(nodeid, location)`` + + - Setup phase: + - ``call = pytest_runtest_setup(item)`` (wrapped in ``CallInfo(when="setup")``) + - ``report = pytest_runtest_makereport(item, call)`` + - ``pytest_runtest_logreport(report)`` + - ``pytest_exception_interact(call, report)`` if an interactive exception occurred + + - Call phase, if the the setup passed and the ``setuponly`` pytest option is not set: + - ``call = pytest_runtest_call(item)`` (wrapped in ``CallInfo(when="call")``) + - ``report = pytest_runtest_makereport(item, call)`` + - ``pytest_runtest_logreport(report)`` + - ``pytest_exception_interact(call, report)`` if an interactive exception occurred + + - Teardown phase: + - ``call = pytest_runtest_teardown(item, nextitem)`` (wrapped in ``CallInfo(when="teardown")``) + - ``report = pytest_runtest_makereport(item, call)`` + - ``pytest_runtest_logreport(report)`` + - ``pytest_exception_interact(call, report)`` if an interactive exception occurred + + - ``pytest_runtest_logfinish(nodeid, location)`` + + :param item: Test item for which the runtest protocol is performed. + :param nextitem: The scheduled-to-be-next test item (or None if this is the end my friend). + + Stops at first non-None result, see :ref:`firstresult`. + The return value is not used, but only stops further processing. + """ + + +def pytest_runtest_logstart( + nodeid: str, location: Tuple[str, Optional[int], str] +) -> None: + """Called at the start of running the runtest protocol for a single item. + + See :hook:`pytest_runtest_protocol` for a description of the runtest protocol. + + :param nodeid: Full node ID of the item. + :param location: A tuple of ``(filename, lineno, testname)`` + where ``filename`` is a file path relative to ``config.rootpath`` + and ``lineno`` is 0-based. + """ + + +def pytest_runtest_logfinish( + nodeid: str, location: Tuple[str, Optional[int], str] +) -> None: + """Called at the end of running the runtest protocol for a single item. + + See :hook:`pytest_runtest_protocol` for a description of the runtest protocol. + + :param nodeid: Full node ID of the item. + :param location: A tuple of ``(filename, lineno, testname)`` + where ``filename`` is a file path relative to ``config.rootpath`` + and ``lineno`` is 0-based. + """ + + +def pytest_runtest_setup(item: "Item") -> None: + """Called to perform the setup phase for a test item. + + The default implementation runs ``setup()`` on ``item`` and all of its + parents (which haven't been setup yet). This includes obtaining the + values of fixtures required by the item (which haven't been obtained + yet). + + :param item: + The item. + """ + + +def pytest_runtest_call(item: "Item") -> None: + """Called to run the test for test item (the call phase). + + The default implementation calls ``item.runtest()``. + + :param item: + The item. + """ + + +def pytest_runtest_teardown(item: "Item", nextitem: Optional["Item"]) -> None: + """Called to perform the teardown phase for a test item. + + The default implementation runs the finalizers and calls ``teardown()`` + on ``item`` and all of its parents (which need to be torn down). This + includes running the teardown phase of fixtures required by the item (if + they go out of scope). + + :param item: + The item. + :param nextitem: + The scheduled-to-be-next test item (None if no further test item is + scheduled). This argument is used to perform exact teardowns, i.e. + calling just enough finalizers so that nextitem only needs to call + setup functions. + """ + + +@hookspec(firstresult=True) +def pytest_runtest_makereport( + item: "Item", call: "CallInfo[None]" +) -> Optional["TestReport"]: + """Called to create a :class:`~pytest.TestReport` for each of + the setup, call and teardown runtest phases of a test item. + + See :hook:`pytest_runtest_protocol` for a description of the runtest protocol. + + :param item: The item. + :param call: The :class:`~pytest.CallInfo` for the phase. + + Stops at first non-None result, see :ref:`firstresult`. + """ + + +def pytest_runtest_logreport(report: "TestReport") -> None: + """Process the :class:`~pytest.TestReport` produced for each + of the setup, call and teardown runtest phases of an item. + + See :hook:`pytest_runtest_protocol` for a description of the runtest protocol. + """ + + +@hookspec(firstresult=True) +def pytest_report_to_serializable( + config: "Config", + report: Union["CollectReport", "TestReport"], +) -> Optional[Dict[str, Any]]: + """Serialize the given report object into a data structure suitable for + sending over the wire, e.g. converted to JSON. + + :param config: The pytest config object. + :param report: The report. + """ + + +@hookspec(firstresult=True) +def pytest_report_from_serializable( + config: "Config", + data: Dict[str, Any], +) -> Optional[Union["CollectReport", "TestReport"]]: + """Restore a report object previously serialized with + :hook:`pytest_report_to_serializable`. + + :param config: The pytest config object. + """ + + +# ------------------------------------------------------------------------- +# Fixture related hooks +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_fixture_setup( + fixturedef: "FixtureDef[Any]", request: "SubRequest" +) -> Optional[object]: + """Perform fixture setup execution. + + :param fixturdef: + The fixture definition object. + :param request: + The fixture request object. + :returns: + The return value of the call to the fixture function. + + Stops at first non-None result, see :ref:`firstresult`. + + .. note:: + If the fixture function returns None, other implementations of + this hook function will continue to be called, according to the + behavior of the :ref:`firstresult` option. + """ + + +def pytest_fixture_post_finalizer( + fixturedef: "FixtureDef[Any]", request: "SubRequest" +) -> None: + """Called after fixture teardown, but before the cache is cleared, so + the fixture result ``fixturedef.cached_result`` is still available (not + ``None``). + + :param fixturdef: + The fixture definition object. + :param request: + The fixture request object. + """ + + +# ------------------------------------------------------------------------- +# test session related hooks +# ------------------------------------------------------------------------- + + +def pytest_sessionstart(session: "Session") -> None: + """Called after the ``Session`` object has been created and before performing collection + and entering the run test loop. + + :param session: The pytest session object. + """ + + +def pytest_sessionfinish( + session: "Session", + exitstatus: Union[int, "ExitCode"], +) -> None: + """Called after whole test run finished, right before returning the exit status to the system. + + :param session: The pytest session object. + :param exitstatus: The status which pytest will return to the system. + """ + + +def pytest_unconfigure(config: "Config") -> None: + """Called before test process is exited. + + :param config: The pytest config object. + """ + + +# ------------------------------------------------------------------------- +# hooks for customizing the assert methods +# ------------------------------------------------------------------------- + + +def pytest_assertrepr_compare( + config: "Config", op: str, left: object, right: object +) -> Optional[List[str]]: + """Return explanation for comparisons in failing assert expressions. + + Return None for no custom explanation, otherwise return a list + of strings. The strings will be joined by newlines but any newlines + *in* a string will be escaped. Note that all but the first line will + be indented slightly, the intention is for the first line to be a summary. + + :param config: The pytest config object. + :param op: The operator, e.g. `"=="`, `"!="`, `"not in"`. + :param left: The left operand. + :param right: The right operand. + """ + + +def pytest_assertion_pass(item: "Item", lineno: int, orig: str, expl: str) -> None: + """Called whenever an assertion passes. + + .. versionadded:: 5.0 + + Use this hook to do some processing after a passing assertion. + The original assertion information is available in the `orig` string + and the pytest introspected assertion information is available in the + `expl` string. + + This hook must be explicitly enabled by the ``enable_assertion_pass_hook`` + ini-file option: + + .. code-block:: ini + + [pytest] + enable_assertion_pass_hook=true + + You need to **clean the .pyc** files in your project directory and interpreter libraries + when enabling this option, as assertions will require to be re-written. + + :param item: pytest item object of current test. + :param lineno: Line number of the assert statement. + :param orig: String with the original assertion. + :param expl: String with the assert explanation. + """ + + +# ------------------------------------------------------------------------- +# Hooks for influencing reporting (invoked from _pytest_terminal). +# ------------------------------------------------------------------------- + + +def pytest_report_header( # type:ignore[empty-body] + config: "Config", start_path: Path, startdir: "LEGACY_PATH" +) -> Union[str, List[str]]: + """Return a string or list of strings to be displayed as header info for terminal reporting. + + :param config: The pytest config object. + :param start_path: The starting dir. + :param startdir: The starting dir (deprecated). + + .. note:: + + Lines returned by a plugin are displayed before those of plugins which + ran before it. + If you want to have your line(s) displayed first, use + :ref:`trylast=True `. + + .. note:: + + This function should be implemented only in plugins or ``conftest.py`` + files situated at the tests root directory due to how pytest + :ref:`discovers plugins during startup `. + + .. versionchanged:: 7.0.0 + The ``start_path`` parameter was added as a :class:`pathlib.Path` + equivalent of the ``startdir`` parameter. The ``startdir`` parameter + has been deprecated. + """ + + +def pytest_report_collectionfinish( # type:ignore[empty-body] + config: "Config", + start_path: Path, + startdir: "LEGACY_PATH", + items: Sequence["Item"], +) -> Union[str, List[str]]: + """Return a string or list of strings to be displayed after collection + has finished successfully. + + These strings will be displayed after the standard "collected X items" message. + + .. versionadded:: 3.2 + + :param config: The pytest config object. + :param start_path: The starting dir. + :param startdir: The starting dir (deprecated). + :param items: List of pytest items that are going to be executed; this list should not be modified. + + .. note:: + + Lines returned by a plugin are displayed before those of plugins which + ran before it. + If you want to have your line(s) displayed first, use + :ref:`trylast=True `. + + .. versionchanged:: 7.0.0 + The ``start_path`` parameter was added as a :class:`pathlib.Path` + equivalent of the ``startdir`` parameter. The ``startdir`` parameter + has been deprecated. + """ + + +@hookspec(firstresult=True) +def pytest_report_teststatus( # type:ignore[empty-body] + report: Union["CollectReport", "TestReport"], config: "Config" +) -> "TestShortLogReport | Tuple[str, str, Union[str, Tuple[str, Mapping[str, bool]]]]": + """Return result-category, shortletter and verbose word for status + reporting. + + The result-category is a category in which to count the result, for + example "passed", "skipped", "error" or the empty string. + + The shortletter is shown as testing progresses, for example ".", "s", + "E" or the empty string. + + The verbose word is shown as testing progresses in verbose mode, for + example "PASSED", "SKIPPED", "ERROR" or the empty string. + + pytest may style these implicitly according to the report outcome. + To provide explicit styling, return a tuple for the verbose word, + for example ``"rerun", "R", ("RERUN", {"yellow": True})``. + + :param report: The report object whose status is to be returned. + :param config: The pytest config object. + :returns: The test status. + + Stops at first non-None result, see :ref:`firstresult`. + """ + + +def pytest_terminal_summary( + terminalreporter: "TerminalReporter", + exitstatus: "ExitCode", + config: "Config", +) -> None: + """Add a section to terminal summary reporting. + + :param terminalreporter: The internal terminal reporter object. + :param exitstatus: The exit status that will be reported back to the OS. + :param config: The pytest config object. + + .. versionadded:: 4.2 + The ``config`` parameter. + """ + + +@hookspec(historic=True) +def pytest_warning_recorded( + warning_message: "warnings.WarningMessage", + when: "Literal['config', 'collect', 'runtest']", + nodeid: str, + location: Optional[Tuple[str, int, str]], +) -> None: + """Process a warning captured by the internal pytest warnings plugin. + + :param warning_message: + The captured warning. This is the same object produced by :py:func:`warnings.catch_warnings`, and contains + the same attributes as the parameters of :py:func:`warnings.showwarning`. + + :param when: + Indicates when the warning was captured. Possible values: + + * ``"config"``: during pytest configuration/initialization stage. + * ``"collect"``: during test collection. + * ``"runtest"``: during test execution. + + :param nodeid: + Full id of the item. + + :param location: + When available, holds information about the execution context of the captured + warning (filename, linenumber, function). ``function`` evaluates to + when the execution context is at the module level. + + .. versionadded:: 6.0 + """ + + +# ------------------------------------------------------------------------- +# Hooks for influencing skipping +# ------------------------------------------------------------------------- + + +def pytest_markeval_namespace( # type:ignore[empty-body] + config: "Config", +) -> Dict[str, Any]: + """Called when constructing the globals dictionary used for + evaluating string conditions in xfail/skipif markers. + + This is useful when the condition for a marker requires + objects that are expensive or impossible to obtain during + collection time, which is required by normal boolean + conditions. + + .. versionadded:: 6.2 + + :param config: The pytest config object. + :returns: A dictionary of additional globals to add. + """ + + +# ------------------------------------------------------------------------- +# error handling and internal debugging hooks +# ------------------------------------------------------------------------- + + +def pytest_internalerror( + excrepr: "ExceptionRepr", + excinfo: "ExceptionInfo[BaseException]", +) -> Optional[bool]: + """Called for internal errors. + + Return True to suppress the fallback handling of printing an + INTERNALERROR message directly to sys.stderr. + + :param excrepr: The exception repr object. + :param excinfo: The exception info. + """ + + +def pytest_keyboard_interrupt( + excinfo: "ExceptionInfo[Union[KeyboardInterrupt, Exit]]", +) -> None: + """Called for keyboard interrupt. + + :param excinfo: The exception info. + """ + + +def pytest_exception_interact( + node: Union["Item", "Collector"], + call: "CallInfo[Any]", + report: Union["CollectReport", "TestReport"], +) -> None: + """Called when an exception was raised which can potentially be + interactively handled. + + May be called during collection (see :hook:`pytest_make_collect_report`), + in which case ``report`` is a :class:`CollectReport`. + + May be called during runtest of an item (see :hook:`pytest_runtest_protocol`), + in which case ``report`` is a :class:`TestReport`. + + This hook is not called if the exception that was raised is an internal + exception like ``skip.Exception``. + + :param node: + The item or collector. + :param call: + The call information. Contains the exception. + :param report: + The collection or test report. + """ + + +def pytest_enter_pdb(config: "Config", pdb: "pdb.Pdb") -> None: + """Called upon pdb.set_trace(). + + Can be used by plugins to take special action just before the python + debugger enters interactive mode. + + :param config: The pytest config object. + :param pdb: The Pdb instance. + """ + + +def pytest_leave_pdb(config: "Config", pdb: "pdb.Pdb") -> None: + """Called when leaving pdb (e.g. with continue after pdb.set_trace()). + + Can be used by plugins to take special action just after the python + debugger leaves interactive mode. + + :param config: The pytest config object. + :param pdb: The Pdb instance. + """ diff --git a/venv/lib/python3.11/site-packages/_pytest/junitxml.py b/venv/lib/python3.11/site-packages/_pytest/junitxml.py new file mode 100644 index 0000000..9242d46 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/junitxml.py @@ -0,0 +1,699 @@ +"""Report test results in JUnit-XML format, for use with Jenkins and build +integration servers. + +Based on initial code from Ross Lawley. + +Output conforms to +https://github.com/jenkinsci/xunit-plugin/blob/master/src/main/resources/org/jenkinsci/plugins/xunit/types/model/xsd/junit-10.xsd +""" +import functools +import os +import platform +import re +import xml.etree.ElementTree as ET +from datetime import datetime +from typing import Callable +from typing import Dict +from typing import List +from typing import Match +from typing import Optional +from typing import Tuple +from typing import Union + +import pytest +from _pytest import nodes +from _pytest import timing +from _pytest._code.code import ExceptionRepr +from _pytest._code.code import ReprFileLocation +from _pytest.config import Config +from _pytest.config import filename_arg +from _pytest.config.argparsing import Parser +from _pytest.fixtures import FixtureRequest +from _pytest.reports import TestReport +from _pytest.stash import StashKey +from _pytest.terminal import TerminalReporter + + +xml_key = StashKey["LogXML"]() + + +def bin_xml_escape(arg: object) -> str: + r"""Visually escape invalid XML characters. + + For example, transforms + 'hello\aworld\b' + into + 'hello#x07world#x08' + Note that the #xABs are *not* XML escapes - missing the ampersand «. + The idea is to escape visually for the user rather than for XML itself. + """ + + def repl(matchobj: Match[str]) -> str: + i = ord(matchobj.group()) + if i <= 0xFF: + return "#x%02X" % i + else: + return "#x%04X" % i + + # The spec range of valid chars is: + # Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF] + # For an unknown(?) reason, we disallow #x7F (DEL) as well. + illegal_xml_re = ( + "[^\u0009\u000A\u000D\u0020-\u007E\u0080-\uD7FF\uE000-\uFFFD\u10000-\u10FFFF]" + ) + return re.sub(illegal_xml_re, repl, str(arg)) + + +def merge_family(left, right) -> None: + result = {} + for kl, vl in left.items(): + for kr, vr in right.items(): + if not isinstance(vl, list): + raise TypeError(type(vl)) + result[kl] = vl + vr + left.update(result) + + +families = {} +families["_base"] = {"testcase": ["classname", "name"]} +families["_base_legacy"] = {"testcase": ["file", "line", "url"]} + +# xUnit 1.x inherits legacy attributes. +families["xunit1"] = families["_base"].copy() +merge_family(families["xunit1"], families["_base_legacy"]) + +# xUnit 2.x uses strict base attributes. +families["xunit2"] = families["_base"] + + +class _NodeReporter: + def __init__(self, nodeid: Union[str, TestReport], xml: "LogXML") -> None: + self.id = nodeid + self.xml = xml + self.add_stats = self.xml.add_stats + self.family = self.xml.family + self.duration = 0.0 + self.properties: List[Tuple[str, str]] = [] + self.nodes: List[ET.Element] = [] + self.attrs: Dict[str, str] = {} + + def append(self, node: ET.Element) -> None: + self.xml.add_stats(node.tag) + self.nodes.append(node) + + def add_property(self, name: str, value: object) -> None: + self.properties.append((str(name), bin_xml_escape(value))) + + def add_attribute(self, name: str, value: object) -> None: + self.attrs[str(name)] = bin_xml_escape(value) + + def make_properties_node(self) -> Optional[ET.Element]: + """Return a Junit node containing custom properties, if any.""" + if self.properties: + properties = ET.Element("properties") + for name, value in self.properties: + properties.append(ET.Element("property", name=name, value=value)) + return properties + return None + + def record_testreport(self, testreport: TestReport) -> None: + names = mangle_test_address(testreport.nodeid) + existing_attrs = self.attrs + classnames = names[:-1] + if self.xml.prefix: + classnames.insert(0, self.xml.prefix) + attrs: Dict[str, str] = { + "classname": ".".join(classnames), + "name": bin_xml_escape(names[-1]), + "file": testreport.location[0], + } + if testreport.location[1] is not None: + attrs["line"] = str(testreport.location[1]) + if hasattr(testreport, "url"): + attrs["url"] = testreport.url + self.attrs = attrs + self.attrs.update(existing_attrs) # Restore any user-defined attributes. + + # Preserve legacy testcase behavior. + if self.family == "xunit1": + return + + # Filter out attributes not permitted by this test family. + # Including custom attributes because they are not valid here. + temp_attrs = {} + for key in self.attrs.keys(): + if key in families[self.family]["testcase"]: + temp_attrs[key] = self.attrs[key] + self.attrs = temp_attrs + + def to_xml(self) -> ET.Element: + testcase = ET.Element("testcase", self.attrs, time="%.3f" % self.duration) + properties = self.make_properties_node() + if properties is not None: + testcase.append(properties) + testcase.extend(self.nodes) + return testcase + + def _add_simple(self, tag: str, message: str, data: Optional[str] = None) -> None: + node = ET.Element(tag, message=message) + node.text = bin_xml_escape(data) + self.append(node) + + def write_captured_output(self, report: TestReport) -> None: + if not self.xml.log_passing_tests and report.passed: + return + + content_out = report.capstdout + content_log = report.caplog + content_err = report.capstderr + if self.xml.logging == "no": + return + content_all = "" + if self.xml.logging in ["log", "all"]: + content_all = self._prepare_content(content_log, " Captured Log ") + if self.xml.logging in ["system-out", "out-err", "all"]: + content_all += self._prepare_content(content_out, " Captured Out ") + self._write_content(report, content_all, "system-out") + content_all = "" + if self.xml.logging in ["system-err", "out-err", "all"]: + content_all += self._prepare_content(content_err, " Captured Err ") + self._write_content(report, content_all, "system-err") + content_all = "" + if content_all: + self._write_content(report, content_all, "system-out") + + def _prepare_content(self, content: str, header: str) -> str: + return "\n".join([header.center(80, "-"), content, ""]) + + def _write_content(self, report: TestReport, content: str, jheader: str) -> None: + tag = ET.Element(jheader) + tag.text = bin_xml_escape(content) + self.append(tag) + + def append_pass(self, report: TestReport) -> None: + self.add_stats("passed") + + def append_failure(self, report: TestReport) -> None: + # msg = str(report.longrepr.reprtraceback.extraline) + if hasattr(report, "wasxfail"): + self._add_simple("skipped", "xfail-marked test passes unexpectedly") + else: + assert report.longrepr is not None + reprcrash: Optional[ReprFileLocation] = getattr( + report.longrepr, "reprcrash", None + ) + if reprcrash is not None: + message = reprcrash.message + else: + message = str(report.longrepr) + message = bin_xml_escape(message) + self._add_simple("failure", message, str(report.longrepr)) + + def append_collect_error(self, report: TestReport) -> None: + # msg = str(report.longrepr.reprtraceback.extraline) + assert report.longrepr is not None + self._add_simple("error", "collection failure", str(report.longrepr)) + + def append_collect_skipped(self, report: TestReport) -> None: + self._add_simple("skipped", "collection skipped", str(report.longrepr)) + + def append_error(self, report: TestReport) -> None: + assert report.longrepr is not None + reprcrash: Optional[ReprFileLocation] = getattr( + report.longrepr, "reprcrash", None + ) + if reprcrash is not None: + reason = reprcrash.message + else: + reason = str(report.longrepr) + + if report.when == "teardown": + msg = f'failed on teardown with "{reason}"' + else: + msg = f'failed on setup with "{reason}"' + self._add_simple("error", bin_xml_escape(msg), str(report.longrepr)) + + def append_skipped(self, report: TestReport) -> None: + if hasattr(report, "wasxfail"): + xfailreason = report.wasxfail + if xfailreason.startswith("reason: "): + xfailreason = xfailreason[8:] + xfailreason = bin_xml_escape(xfailreason) + skipped = ET.Element("skipped", type="pytest.xfail", message=xfailreason) + self.append(skipped) + else: + assert isinstance(report.longrepr, tuple) + filename, lineno, skipreason = report.longrepr + if skipreason.startswith("Skipped: "): + skipreason = skipreason[9:] + details = f"{filename}:{lineno}: {skipreason}" + + skipped = ET.Element("skipped", type="pytest.skip", message=skipreason) + skipped.text = bin_xml_escape(details) + self.append(skipped) + self.write_captured_output(report) + + def finalize(self) -> None: + data = self.to_xml() + self.__dict__.clear() + # Type ignored because mypy doesn't like overriding a method. + # Also the return value doesn't match... + self.to_xml = lambda: data # type: ignore[assignment] + + +def _warn_incompatibility_with_xunit2( + request: FixtureRequest, fixture_name: str +) -> None: + """Emit a PytestWarning about the given fixture being incompatible with newer xunit revisions.""" + from _pytest.warning_types import PytestWarning + + xml = request.config.stash.get(xml_key, None) + if xml is not None and xml.family not in ("xunit1", "legacy"): + request.node.warn( + PytestWarning( + "{fixture_name} is incompatible with junit_family '{family}' (use 'legacy' or 'xunit1')".format( + fixture_name=fixture_name, family=xml.family + ) + ) + ) + + +@pytest.fixture +def record_property(request: FixtureRequest) -> Callable[[str, object], None]: + """Add extra properties to the calling test. + + User properties become part of the test report and are available to the + configured reporters, like JUnit XML. + + The fixture is callable with ``name, value``. The value is automatically + XML-encoded. + + Example:: + + def test_function(record_property): + record_property("example_key", 1) + """ + _warn_incompatibility_with_xunit2(request, "record_property") + + def append_property(name: str, value: object) -> None: + request.node.user_properties.append((name, value)) + + return append_property + + +@pytest.fixture +def record_xml_attribute(request: FixtureRequest) -> Callable[[str, object], None]: + """Add extra xml attributes to the tag for the calling test. + + The fixture is callable with ``name, value``. The value is + automatically XML-encoded. + """ + from _pytest.warning_types import PytestExperimentalApiWarning + + request.node.warn( + PytestExperimentalApiWarning("record_xml_attribute is an experimental feature") + ) + + _warn_incompatibility_with_xunit2(request, "record_xml_attribute") + + # Declare noop + def add_attr_noop(name: str, value: object) -> None: + pass + + attr_func = add_attr_noop + + xml = request.config.stash.get(xml_key, None) + if xml is not None: + node_reporter = xml.node_reporter(request.node.nodeid) + attr_func = node_reporter.add_attribute + + return attr_func + + +def _check_record_param_type(param: str, v: str) -> None: + """Used by record_testsuite_property to check that the given parameter name is of the proper + type.""" + __tracebackhide__ = True + if not isinstance(v, str): + msg = "{param} parameter needs to be a string, but {g} given" # type: ignore[unreachable] + raise TypeError(msg.format(param=param, g=type(v).__name__)) + + +@pytest.fixture(scope="session") +def record_testsuite_property(request: FixtureRequest) -> Callable[[str, object], None]: + """Record a new ```` tag as child of the root ````. + + This is suitable to writing global information regarding the entire test + suite, and is compatible with ``xunit2`` JUnit family. + + This is a ``session``-scoped fixture which is called with ``(name, value)``. Example: + + .. code-block:: python + + def test_foo(record_testsuite_property): + record_testsuite_property("ARCH", "PPC") + record_testsuite_property("STORAGE_TYPE", "CEPH") + + :param name: + The property name. + :param value: + The property value. Will be converted to a string. + + .. warning:: + + Currently this fixture **does not work** with the + `pytest-xdist `__ plugin. See + :issue:`7767` for details. + """ + + __tracebackhide__ = True + + def record_func(name: str, value: object) -> None: + """No-op function in case --junitxml was not passed in the command-line.""" + __tracebackhide__ = True + _check_record_param_type("name", name) + + xml = request.config.stash.get(xml_key, None) + if xml is not None: + record_func = xml.add_global_property # noqa + return record_func + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("terminal reporting") + group.addoption( + "--junitxml", + "--junit-xml", + action="store", + dest="xmlpath", + metavar="path", + type=functools.partial(filename_arg, optname="--junitxml"), + default=None, + help="Create junit-xml style report file at given path", + ) + group.addoption( + "--junitprefix", + "--junit-prefix", + action="store", + metavar="str", + default=None, + help="Prepend prefix to classnames in junit-xml output", + ) + parser.addini( + "junit_suite_name", "Test suite name for JUnit report", default="pytest" + ) + parser.addini( + "junit_logging", + "Write captured log messages to JUnit report: " + "one of no|log|system-out|system-err|out-err|all", + default="no", + ) + parser.addini( + "junit_log_passing_tests", + "Capture log information for passing tests to JUnit report: ", + type="bool", + default=True, + ) + parser.addini( + "junit_duration_report", + "Duration time to report: one of total|call", + default="total", + ) # choices=['total', 'call']) + parser.addini( + "junit_family", + "Emit XML for schema: one of legacy|xunit1|xunit2", + default="xunit2", + ) + + +def pytest_configure(config: Config) -> None: + xmlpath = config.option.xmlpath + # Prevent opening xmllog on worker nodes (xdist). + if xmlpath and not hasattr(config, "workerinput"): + junit_family = config.getini("junit_family") + config.stash[xml_key] = LogXML( + xmlpath, + config.option.junitprefix, + config.getini("junit_suite_name"), + config.getini("junit_logging"), + config.getini("junit_duration_report"), + junit_family, + config.getini("junit_log_passing_tests"), + ) + config.pluginmanager.register(config.stash[xml_key]) + + +def pytest_unconfigure(config: Config) -> None: + xml = config.stash.get(xml_key, None) + if xml: + del config.stash[xml_key] + config.pluginmanager.unregister(xml) + + +def mangle_test_address(address: str) -> List[str]: + path, possible_open_bracket, params = address.partition("[") + names = path.split("::") + # Convert file path to dotted path. + names[0] = names[0].replace(nodes.SEP, ".") + names[0] = re.sub(r"\.py$", "", names[0]) + # Put any params back. + names[-1] += possible_open_bracket + params + return names + + +class LogXML: + def __init__( + self, + logfile, + prefix: Optional[str], + suite_name: str = "pytest", + logging: str = "no", + report_duration: str = "total", + family="xunit1", + log_passing_tests: bool = True, + ) -> None: + logfile = os.path.expanduser(os.path.expandvars(logfile)) + self.logfile = os.path.normpath(os.path.abspath(logfile)) + self.prefix = prefix + self.suite_name = suite_name + self.logging = logging + self.log_passing_tests = log_passing_tests + self.report_duration = report_duration + self.family = family + self.stats: Dict[str, int] = dict.fromkeys( + ["error", "passed", "failure", "skipped"], 0 + ) + self.node_reporters: Dict[ + Tuple[Union[str, TestReport], object], _NodeReporter + ] = {} + self.node_reporters_ordered: List[_NodeReporter] = [] + self.global_properties: List[Tuple[str, str]] = [] + + # List of reports that failed on call but teardown is pending. + self.open_reports: List[TestReport] = [] + self.cnt_double_fail_tests = 0 + + # Replaces convenience family with real family. + if self.family == "legacy": + self.family = "xunit1" + + def finalize(self, report: TestReport) -> None: + nodeid = getattr(report, "nodeid", report) + # Local hack to handle xdist report order. + workernode = getattr(report, "node", None) + reporter = self.node_reporters.pop((nodeid, workernode)) + if reporter is not None: + reporter.finalize() + + def node_reporter(self, report: Union[TestReport, str]) -> _NodeReporter: + nodeid: Union[str, TestReport] = getattr(report, "nodeid", report) + # Local hack to handle xdist report order. + workernode = getattr(report, "node", None) + + key = nodeid, workernode + + if key in self.node_reporters: + # TODO: breaks for --dist=each + return self.node_reporters[key] + + reporter = _NodeReporter(nodeid, self) + + self.node_reporters[key] = reporter + self.node_reporters_ordered.append(reporter) + + return reporter + + def add_stats(self, key: str) -> None: + if key in self.stats: + self.stats[key] += 1 + + def _opentestcase(self, report: TestReport) -> _NodeReporter: + reporter = self.node_reporter(report) + reporter.record_testreport(report) + return reporter + + def pytest_runtest_logreport(self, report: TestReport) -> None: + """Handle a setup/call/teardown report, generating the appropriate + XML tags as necessary. + + Note: due to plugins like xdist, this hook may be called in interlaced + order with reports from other nodes. For example: + + Usual call order: + -> setup node1 + -> call node1 + -> teardown node1 + -> setup node2 + -> call node2 + -> teardown node2 + + Possible call order in xdist: + -> setup node1 + -> call node1 + -> setup node2 + -> call node2 + -> teardown node2 + -> teardown node1 + """ + close_report = None + if report.passed: + if report.when == "call": # ignore setup/teardown + reporter = self._opentestcase(report) + reporter.append_pass(report) + elif report.failed: + if report.when == "teardown": + # The following vars are needed when xdist plugin is used. + report_wid = getattr(report, "worker_id", None) + report_ii = getattr(report, "item_index", None) + close_report = next( + ( + rep + for rep in self.open_reports + if ( + rep.nodeid == report.nodeid + and getattr(rep, "item_index", None) == report_ii + and getattr(rep, "worker_id", None) == report_wid + ) + ), + None, + ) + if close_report: + # We need to open new testcase in case we have failure in + # call and error in teardown in order to follow junit + # schema. + self.finalize(close_report) + self.cnt_double_fail_tests += 1 + reporter = self._opentestcase(report) + if report.when == "call": + reporter.append_failure(report) + self.open_reports.append(report) + if not self.log_passing_tests: + reporter.write_captured_output(report) + else: + reporter.append_error(report) + elif report.skipped: + reporter = self._opentestcase(report) + reporter.append_skipped(report) + self.update_testcase_duration(report) + if report.when == "teardown": + reporter = self._opentestcase(report) + reporter.write_captured_output(report) + + for propname, propvalue in report.user_properties: + reporter.add_property(propname, str(propvalue)) + + self.finalize(report) + report_wid = getattr(report, "worker_id", None) + report_ii = getattr(report, "item_index", None) + close_report = next( + ( + rep + for rep in self.open_reports + if ( + rep.nodeid == report.nodeid + and getattr(rep, "item_index", None) == report_ii + and getattr(rep, "worker_id", None) == report_wid + ) + ), + None, + ) + if close_report: + self.open_reports.remove(close_report) + + def update_testcase_duration(self, report: TestReport) -> None: + """Accumulate total duration for nodeid from given report and update + the Junit.testcase with the new total if already created.""" + if self.report_duration == "total" or report.when == self.report_duration: + reporter = self.node_reporter(report) + reporter.duration += getattr(report, "duration", 0.0) + + def pytest_collectreport(self, report: TestReport) -> None: + if not report.passed: + reporter = self._opentestcase(report) + if report.failed: + reporter.append_collect_error(report) + else: + reporter.append_collect_skipped(report) + + def pytest_internalerror(self, excrepr: ExceptionRepr) -> None: + reporter = self.node_reporter("internal") + reporter.attrs.update(classname="pytest", name="internal") + reporter._add_simple("error", "internal error", str(excrepr)) + + def pytest_sessionstart(self) -> None: + self.suite_start_time = timing.time() + + def pytest_sessionfinish(self) -> None: + dirname = os.path.dirname(os.path.abspath(self.logfile)) + # exist_ok avoids filesystem race conditions between checking path existence and requesting creation + os.makedirs(dirname, exist_ok=True) + + with open(self.logfile, "w", encoding="utf-8") as logfile: + suite_stop_time = timing.time() + suite_time_delta = suite_stop_time - self.suite_start_time + + numtests = ( + self.stats["passed"] + + self.stats["failure"] + + self.stats["skipped"] + + self.stats["error"] + - self.cnt_double_fail_tests + ) + logfile.write('') + + suite_node = ET.Element( + "testsuite", + name=self.suite_name, + errors=str(self.stats["error"]), + failures=str(self.stats["failure"]), + skipped=str(self.stats["skipped"]), + tests=str(numtests), + time="%.3f" % suite_time_delta, + timestamp=datetime.fromtimestamp(self.suite_start_time).isoformat(), + hostname=platform.node(), + ) + global_properties = self._get_global_properties_node() + if global_properties is not None: + suite_node.append(global_properties) + for node_reporter in self.node_reporters_ordered: + suite_node.append(node_reporter.to_xml()) + testsuites = ET.Element("testsuites") + testsuites.append(suite_node) + logfile.write(ET.tostring(testsuites, encoding="unicode")) + + def pytest_terminal_summary(self, terminalreporter: TerminalReporter) -> None: + terminalreporter.write_sep("-", f"generated xml file: {self.logfile}") + + def add_global_property(self, name: str, value: object) -> None: + __tracebackhide__ = True + _check_record_param_type("name", name) + self.global_properties.append((name, bin_xml_escape(value))) + + def _get_global_properties_node(self) -> Optional[ET.Element]: + """Return a Junit node containing custom properties, if any.""" + if self.global_properties: + properties = ET.Element("properties") + for name, value in self.global_properties: + properties.append(ET.Element("property", name=name, value=value)) + return properties + return None diff --git a/venv/lib/python3.11/site-packages/_pytest/legacypath.py b/venv/lib/python3.11/site-packages/_pytest/legacypath.py new file mode 100644 index 0000000..af1d0c0 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/legacypath.py @@ -0,0 +1,479 @@ +"""Add backward compatibility support for the legacy py path type.""" +import dataclasses +import shlex +import subprocess +from pathlib import Path +from typing import List +from typing import Optional +from typing import TYPE_CHECKING +from typing import Union + +from iniconfig import SectionWrapper + +from _pytest.cacheprovider import Cache +from _pytest.compat import final +from _pytest.compat import LEGACY_PATH +from _pytest.compat import legacy_path +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config import PytestPluginManager +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.main import Session +from _pytest.monkeypatch import MonkeyPatch +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.nodes import Node +from _pytest.pytester import HookRecorder +from _pytest.pytester import Pytester +from _pytest.pytester import RunResult +from _pytest.terminal import TerminalReporter +from _pytest.tmpdir import TempPathFactory + +if TYPE_CHECKING: + from typing_extensions import Final + + import pexpect + + +@final +class Testdir: + """ + Similar to :class:`Pytester`, but this class works with legacy legacy_path objects instead. + + All methods just forward to an internal :class:`Pytester` instance, converting results + to `legacy_path` objects as necessary. + """ + + __test__ = False + + CLOSE_STDIN: "Final" = Pytester.CLOSE_STDIN + TimeoutExpired: "Final" = Pytester.TimeoutExpired + + def __init__(self, pytester: Pytester, *, _ispytest: bool = False) -> None: + check_ispytest(_ispytest) + self._pytester = pytester + + @property + def tmpdir(self) -> LEGACY_PATH: + """Temporary directory where tests are executed.""" + return legacy_path(self._pytester.path) + + @property + def test_tmproot(self) -> LEGACY_PATH: + return legacy_path(self._pytester._test_tmproot) + + @property + def request(self): + return self._pytester._request + + @property + def plugins(self): + return self._pytester.plugins + + @plugins.setter + def plugins(self, plugins): + self._pytester.plugins = plugins + + @property + def monkeypatch(self) -> MonkeyPatch: + return self._pytester._monkeypatch + + def make_hook_recorder(self, pluginmanager) -> HookRecorder: + """See :meth:`Pytester.make_hook_recorder`.""" + return self._pytester.make_hook_recorder(pluginmanager) + + def chdir(self) -> None: + """See :meth:`Pytester.chdir`.""" + return self._pytester.chdir() + + def finalize(self) -> None: + """See :meth:`Pytester._finalize`.""" + return self._pytester._finalize() + + def makefile(self, ext, *args, **kwargs) -> LEGACY_PATH: + """See :meth:`Pytester.makefile`.""" + if ext and not ext.startswith("."): + # pytester.makefile is going to throw a ValueError in a way that + # testdir.makefile did not, because + # pathlib.Path is stricter suffixes than py.path + # This ext arguments is likely user error, but since testdir has + # allowed this, we will prepend "." as a workaround to avoid breaking + # testdir usage that worked before + ext = "." + ext + return legacy_path(self._pytester.makefile(ext, *args, **kwargs)) + + def makeconftest(self, source) -> LEGACY_PATH: + """See :meth:`Pytester.makeconftest`.""" + return legacy_path(self._pytester.makeconftest(source)) + + def makeini(self, source) -> LEGACY_PATH: + """See :meth:`Pytester.makeini`.""" + return legacy_path(self._pytester.makeini(source)) + + def getinicfg(self, source: str) -> SectionWrapper: + """See :meth:`Pytester.getinicfg`.""" + return self._pytester.getinicfg(source) + + def makepyprojecttoml(self, source) -> LEGACY_PATH: + """See :meth:`Pytester.makepyprojecttoml`.""" + return legacy_path(self._pytester.makepyprojecttoml(source)) + + def makepyfile(self, *args, **kwargs) -> LEGACY_PATH: + """See :meth:`Pytester.makepyfile`.""" + return legacy_path(self._pytester.makepyfile(*args, **kwargs)) + + def maketxtfile(self, *args, **kwargs) -> LEGACY_PATH: + """See :meth:`Pytester.maketxtfile`.""" + return legacy_path(self._pytester.maketxtfile(*args, **kwargs)) + + def syspathinsert(self, path=None) -> None: + """See :meth:`Pytester.syspathinsert`.""" + return self._pytester.syspathinsert(path) + + def mkdir(self, name) -> LEGACY_PATH: + """See :meth:`Pytester.mkdir`.""" + return legacy_path(self._pytester.mkdir(name)) + + def mkpydir(self, name) -> LEGACY_PATH: + """See :meth:`Pytester.mkpydir`.""" + return legacy_path(self._pytester.mkpydir(name)) + + def copy_example(self, name=None) -> LEGACY_PATH: + """See :meth:`Pytester.copy_example`.""" + return legacy_path(self._pytester.copy_example(name)) + + def getnode(self, config: Config, arg) -> Optional[Union[Item, Collector]]: + """See :meth:`Pytester.getnode`.""" + return self._pytester.getnode(config, arg) + + def getpathnode(self, path): + """See :meth:`Pytester.getpathnode`.""" + return self._pytester.getpathnode(path) + + def genitems(self, colitems: List[Union[Item, Collector]]) -> List[Item]: + """See :meth:`Pytester.genitems`.""" + return self._pytester.genitems(colitems) + + def runitem(self, source): + """See :meth:`Pytester.runitem`.""" + return self._pytester.runitem(source) + + def inline_runsource(self, source, *cmdlineargs): + """See :meth:`Pytester.inline_runsource`.""" + return self._pytester.inline_runsource(source, *cmdlineargs) + + def inline_genitems(self, *args): + """See :meth:`Pytester.inline_genitems`.""" + return self._pytester.inline_genitems(*args) + + def inline_run(self, *args, plugins=(), no_reraise_ctrlc: bool = False): + """See :meth:`Pytester.inline_run`.""" + return self._pytester.inline_run( + *args, plugins=plugins, no_reraise_ctrlc=no_reraise_ctrlc + ) + + def runpytest_inprocess(self, *args, **kwargs) -> RunResult: + """See :meth:`Pytester.runpytest_inprocess`.""" + return self._pytester.runpytest_inprocess(*args, **kwargs) + + def runpytest(self, *args, **kwargs) -> RunResult: + """See :meth:`Pytester.runpytest`.""" + return self._pytester.runpytest(*args, **kwargs) + + def parseconfig(self, *args) -> Config: + """See :meth:`Pytester.parseconfig`.""" + return self._pytester.parseconfig(*args) + + def parseconfigure(self, *args) -> Config: + """See :meth:`Pytester.parseconfigure`.""" + return self._pytester.parseconfigure(*args) + + def getitem(self, source, funcname="test_func"): + """See :meth:`Pytester.getitem`.""" + return self._pytester.getitem(source, funcname) + + def getitems(self, source): + """See :meth:`Pytester.getitems`.""" + return self._pytester.getitems(source) + + def getmodulecol(self, source, configargs=(), withinit=False): + """See :meth:`Pytester.getmodulecol`.""" + return self._pytester.getmodulecol( + source, configargs=configargs, withinit=withinit + ) + + def collect_by_name( + self, modcol: Collector, name: str + ) -> Optional[Union[Item, Collector]]: + """See :meth:`Pytester.collect_by_name`.""" + return self._pytester.collect_by_name(modcol, name) + + def popen( + self, + cmdargs, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + stdin=CLOSE_STDIN, + **kw, + ): + """See :meth:`Pytester.popen`.""" + return self._pytester.popen(cmdargs, stdout, stderr, stdin, **kw) + + def run(self, *cmdargs, timeout=None, stdin=CLOSE_STDIN) -> RunResult: + """See :meth:`Pytester.run`.""" + return self._pytester.run(*cmdargs, timeout=timeout, stdin=stdin) + + def runpython(self, script) -> RunResult: + """See :meth:`Pytester.runpython`.""" + return self._pytester.runpython(script) + + def runpython_c(self, command): + """See :meth:`Pytester.runpython_c`.""" + return self._pytester.runpython_c(command) + + def runpytest_subprocess(self, *args, timeout=None) -> RunResult: + """See :meth:`Pytester.runpytest_subprocess`.""" + return self._pytester.runpytest_subprocess(*args, timeout=timeout) + + def spawn_pytest( + self, string: str, expect_timeout: float = 10.0 + ) -> "pexpect.spawn": + """See :meth:`Pytester.spawn_pytest`.""" + return self._pytester.spawn_pytest(string, expect_timeout=expect_timeout) + + def spawn(self, cmd: str, expect_timeout: float = 10.0) -> "pexpect.spawn": + """See :meth:`Pytester.spawn`.""" + return self._pytester.spawn(cmd, expect_timeout=expect_timeout) + + def __repr__(self) -> str: + return f"" + + def __str__(self) -> str: + return str(self.tmpdir) + + +class LegacyTestdirPlugin: + @staticmethod + @fixture + def testdir(pytester: Pytester) -> Testdir: + """ + Identical to :fixture:`pytester`, and provides an instance whose methods return + legacy ``LEGACY_PATH`` objects instead when applicable. + + New code should avoid using :fixture:`testdir` in favor of :fixture:`pytester`. + """ + return Testdir(pytester, _ispytest=True) + + +@final +@dataclasses.dataclass +class TempdirFactory: + """Backward compatibility wrapper that implements :class:`py.path.local` + for :class:`TempPathFactory`. + + .. note:: + These days, it is preferred to use ``tmp_path_factory``. + + :ref:`About the tmpdir and tmpdir_factory fixtures`. + + """ + + _tmppath_factory: TempPathFactory + + def __init__( + self, tmppath_factory: TempPathFactory, *, _ispytest: bool = False + ) -> None: + check_ispytest(_ispytest) + self._tmppath_factory = tmppath_factory + + def mktemp(self, basename: str, numbered: bool = True) -> LEGACY_PATH: + """Same as :meth:`TempPathFactory.mktemp`, but returns a :class:`py.path.local` object.""" + return legacy_path(self._tmppath_factory.mktemp(basename, numbered).resolve()) + + def getbasetemp(self) -> LEGACY_PATH: + """Same as :meth:`TempPathFactory.getbasetemp`, but returns a :class:`py.path.local` object.""" + return legacy_path(self._tmppath_factory.getbasetemp().resolve()) + + +class LegacyTmpdirPlugin: + @staticmethod + @fixture(scope="session") + def tmpdir_factory(request: FixtureRequest) -> TempdirFactory: + """Return a :class:`pytest.TempdirFactory` instance for the test session.""" + # Set dynamically by pytest_configure(). + return request.config._tmpdirhandler # type: ignore + + @staticmethod + @fixture + def tmpdir(tmp_path: Path) -> LEGACY_PATH: + """Return a temporary directory path object which is unique to each test + function invocation, created as a sub directory of the base temporary + directory. + + By default, a new base temporary directory is created each test session, + and old bases are removed after 3 sessions, to aid in debugging. If + ``--basetemp`` is used then it is cleared each session. See :ref:`base + temporary directory`. + + The returned object is a `legacy_path`_ object. + + .. note:: + These days, it is preferred to use ``tmp_path``. + + :ref:`About the tmpdir and tmpdir_factory fixtures`. + + .. _legacy_path: https://py.readthedocs.io/en/latest/path.html + """ + return legacy_path(tmp_path) + + +def Cache_makedir(self: Cache, name: str) -> LEGACY_PATH: + """Return a directory path object with the given name. + + Same as :func:`mkdir`, but returns a legacy py path instance. + """ + return legacy_path(self.mkdir(name)) + + +def FixtureRequest_fspath(self: FixtureRequest) -> LEGACY_PATH: + """(deprecated) The file system path of the test module which collected this test.""" + return legacy_path(self.path) + + +def TerminalReporter_startdir(self: TerminalReporter) -> LEGACY_PATH: + """The directory from which pytest was invoked. + + Prefer to use ``startpath`` which is a :class:`pathlib.Path`. + + :type: LEGACY_PATH + """ + return legacy_path(self.startpath) + + +def Config_invocation_dir(self: Config) -> LEGACY_PATH: + """The directory from which pytest was invoked. + + Prefer to use :attr:`invocation_params.dir `, + which is a :class:`pathlib.Path`. + + :type: LEGACY_PATH + """ + return legacy_path(str(self.invocation_params.dir)) + + +def Config_rootdir(self: Config) -> LEGACY_PATH: + """The path to the :ref:`rootdir `. + + Prefer to use :attr:`rootpath`, which is a :class:`pathlib.Path`. + + :type: LEGACY_PATH + """ + return legacy_path(str(self.rootpath)) + + +def Config_inifile(self: Config) -> Optional[LEGACY_PATH]: + """The path to the :ref:`configfile `. + + Prefer to use :attr:`inipath`, which is a :class:`pathlib.Path`. + + :type: Optional[LEGACY_PATH] + """ + return legacy_path(str(self.inipath)) if self.inipath else None + + +def Session_stardir(self: Session) -> LEGACY_PATH: + """The path from which pytest was invoked. + + Prefer to use ``startpath`` which is a :class:`pathlib.Path`. + + :type: LEGACY_PATH + """ + return legacy_path(self.startpath) + + +def Config__getini_unknown_type( + self, name: str, type: str, value: Union[str, List[str]] +): + if type == "pathlist": + # TODO: This assert is probably not valid in all cases. + assert self.inipath is not None + dp = self.inipath.parent + input_values = shlex.split(value) if isinstance(value, str) else value + return [legacy_path(str(dp / x)) for x in input_values] + else: + raise ValueError(f"unknown configuration type: {type}", value) + + +def Node_fspath(self: Node) -> LEGACY_PATH: + """(deprecated) returns a legacy_path copy of self.path""" + return legacy_path(self.path) + + +def Node_fspath_set(self: Node, value: LEGACY_PATH) -> None: + self.path = Path(value) + + +@hookimpl(tryfirst=True) +def pytest_load_initial_conftests(early_config: Config) -> None: + """Monkeypatch legacy path attributes in several classes, as early as possible.""" + mp = MonkeyPatch() + early_config.add_cleanup(mp.undo) + + # Add Cache.makedir(). + mp.setattr(Cache, "makedir", Cache_makedir, raising=False) + + # Add FixtureRequest.fspath property. + mp.setattr(FixtureRequest, "fspath", property(FixtureRequest_fspath), raising=False) + + # Add TerminalReporter.startdir property. + mp.setattr( + TerminalReporter, "startdir", property(TerminalReporter_startdir), raising=False + ) + + # Add Config.{invocation_dir,rootdir,inifile} properties. + mp.setattr(Config, "invocation_dir", property(Config_invocation_dir), raising=False) + mp.setattr(Config, "rootdir", property(Config_rootdir), raising=False) + mp.setattr(Config, "inifile", property(Config_inifile), raising=False) + + # Add Session.startdir property. + mp.setattr(Session, "startdir", property(Session_stardir), raising=False) + + # Add pathlist configuration type. + mp.setattr(Config, "_getini_unknown_type", Config__getini_unknown_type) + + # Add Node.fspath property. + mp.setattr(Node, "fspath", property(Node_fspath, Node_fspath_set), raising=False) + + +@hookimpl +def pytest_configure(config: Config) -> None: + """Installs the LegacyTmpdirPlugin if the ``tmpdir`` plugin is also installed.""" + if config.pluginmanager.has_plugin("tmpdir"): + mp = MonkeyPatch() + config.add_cleanup(mp.undo) + # Create TmpdirFactory and attach it to the config object. + # + # This is to comply with existing plugins which expect the handler to be + # available at pytest_configure time, but ideally should be moved entirely + # to the tmpdir_factory session fixture. + try: + tmp_path_factory = config._tmp_path_factory # type: ignore[attr-defined] + except AttributeError: + # tmpdir plugin is blocked. + pass + else: + _tmpdirhandler = TempdirFactory(tmp_path_factory, _ispytest=True) + mp.setattr(config, "_tmpdirhandler", _tmpdirhandler, raising=False) + + config.pluginmanager.register(LegacyTmpdirPlugin, "legacypath-tmpdir") + + +@hookimpl +def pytest_plugin_registered(plugin: object, manager: PytestPluginManager) -> None: + # pytester is not loaded by default and is commonly loaded from a conftest, + # so checking for it in `pytest_configure` is not enough. + is_pytester = plugin is manager.get_plugin("pytester") + if is_pytester and not manager.is_registered(LegacyTestdirPlugin): + manager.register(LegacyTestdirPlugin, "legacypath-pytester") diff --git a/venv/lib/python3.11/site-packages/_pytest/logging.py b/venv/lib/python3.11/site-packages/_pytest/logging.py new file mode 100644 index 0000000..8381346 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/logging.py @@ -0,0 +1,918 @@ +"""Access and control log capturing.""" +import io +import logging +import os +import re +from contextlib import contextmanager +from contextlib import nullcontext +from datetime import datetime +from datetime import timedelta +from datetime import timezone +from io import StringIO +from logging import LogRecord +from pathlib import Path +from typing import AbstractSet +from typing import Dict +from typing import Generator +from typing import List +from typing import Mapping +from typing import Optional +from typing import Tuple +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from _pytest import nodes +from _pytest._io import TerminalWriter +from _pytest.capture import CaptureManager +from _pytest.compat import final +from _pytest.config import _strtobool +from _pytest.config import Config +from _pytest.config import create_terminal_writer +from _pytest.config import hookimpl +from _pytest.config import UsageError +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.main import Session +from _pytest.stash import StashKey +from _pytest.terminal import TerminalReporter + +if TYPE_CHECKING: + logging_StreamHandler = logging.StreamHandler[StringIO] + + from typing_extensions import Literal +else: + logging_StreamHandler = logging.StreamHandler + +DEFAULT_LOG_FORMAT = "%(levelname)-8s %(name)s:%(filename)s:%(lineno)d %(message)s" +DEFAULT_LOG_DATE_FORMAT = "%H:%M:%S" +_ANSI_ESCAPE_SEQ = re.compile(r"\x1b\[[\d;]+m") +caplog_handler_key = StashKey["LogCaptureHandler"]() +caplog_records_key = StashKey[Dict[str, List[logging.LogRecord]]]() + + +def _remove_ansi_escape_sequences(text: str) -> str: + return _ANSI_ESCAPE_SEQ.sub("", text) + + +class DatetimeFormatter(logging.Formatter): + """A logging formatter which formats record with + :func:`datetime.datetime.strftime` formatter instead of + :func:`time.strftime` in case of microseconds in format string. + """ + + def formatTime(self, record: LogRecord, datefmt=None) -> str: + if datefmt and "%f" in datefmt: + ct = self.converter(record.created) + tz = timezone(timedelta(seconds=ct.tm_gmtoff), ct.tm_zone) + # Construct `datetime.datetime` object from `struct_time` + # and msecs information from `record` + dt = datetime(*ct[0:6], microsecond=round(record.msecs * 1000), tzinfo=tz) + return dt.strftime(datefmt) + # Use `logging.Formatter` for non-microsecond formats + return super().formatTime(record, datefmt) + + +class ColoredLevelFormatter(DatetimeFormatter): + """A logging formatter which colorizes the %(levelname)..s part of the + log format passed to __init__.""" + + LOGLEVEL_COLOROPTS: Mapping[int, AbstractSet[str]] = { + logging.CRITICAL: {"red"}, + logging.ERROR: {"red", "bold"}, + logging.WARNING: {"yellow"}, + logging.WARN: {"yellow"}, + logging.INFO: {"green"}, + logging.DEBUG: {"purple"}, + logging.NOTSET: set(), + } + LEVELNAME_FMT_REGEX = re.compile(r"%\(levelname\)([+-.]?\d*(?:\.\d+)?s)") + + def __init__(self, terminalwriter: TerminalWriter, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + self._terminalwriter = terminalwriter + self._original_fmt = self._style._fmt + self._level_to_fmt_mapping: Dict[int, str] = {} + + for level, color_opts in self.LOGLEVEL_COLOROPTS.items(): + self.add_color_level(level, *color_opts) + + def add_color_level(self, level: int, *color_opts: str) -> None: + """Add or update color opts for a log level. + + :param level: + Log level to apply a style to, e.g. ``logging.INFO``. + :param color_opts: + ANSI escape sequence color options. Capitalized colors indicates + background color, i.e. ``'green', 'Yellow', 'bold'`` will give bold + green text on yellow background. + + .. warning:: + This is an experimental API. + """ + + assert self._fmt is not None + levelname_fmt_match = self.LEVELNAME_FMT_REGEX.search(self._fmt) + if not levelname_fmt_match: + return + levelname_fmt = levelname_fmt_match.group() + + formatted_levelname = levelname_fmt % {"levelname": logging.getLevelName(level)} + + # add ANSI escape sequences around the formatted levelname + color_kwargs = {name: True for name in color_opts} + colorized_formatted_levelname = self._terminalwriter.markup( + formatted_levelname, **color_kwargs + ) + self._level_to_fmt_mapping[level] = self.LEVELNAME_FMT_REGEX.sub( + colorized_formatted_levelname, self._fmt + ) + + def format(self, record: logging.LogRecord) -> str: + fmt = self._level_to_fmt_mapping.get(record.levelno, self._original_fmt) + self._style._fmt = fmt + return super().format(record) + + +class PercentStyleMultiline(logging.PercentStyle): + """A logging style with special support for multiline messages. + + If the message of a record consists of multiple lines, this style + formats the message as if each line were logged separately. + """ + + def __init__(self, fmt: str, auto_indent: Union[int, str, bool, None]) -> None: + super().__init__(fmt) + self._auto_indent = self._get_auto_indent(auto_indent) + + @staticmethod + def _get_auto_indent(auto_indent_option: Union[int, str, bool, None]) -> int: + """Determine the current auto indentation setting. + + Specify auto indent behavior (on/off/fixed) by passing in + extra={"auto_indent": [value]} to the call to logging.log() or + using a --log-auto-indent [value] command line or the + log_auto_indent [value] config option. + + Default behavior is auto-indent off. + + Using the string "True" or "on" or the boolean True as the value + turns auto indent on, using the string "False" or "off" or the + boolean False or the int 0 turns it off, and specifying a + positive integer fixes the indentation position to the value + specified. + + Any other values for the option are invalid, and will silently be + converted to the default. + + :param None|bool|int|str auto_indent_option: + User specified option for indentation from command line, config + or extra kwarg. Accepts int, bool or str. str option accepts the + same range of values as boolean config options, as well as + positive integers represented in str form. + + :returns: + Indentation value, which can be + -1 (automatically determine indentation) or + 0 (auto-indent turned off) or + >0 (explicitly set indentation position). + """ + + if auto_indent_option is None: + return 0 + elif isinstance(auto_indent_option, bool): + if auto_indent_option: + return -1 + else: + return 0 + elif isinstance(auto_indent_option, int): + return int(auto_indent_option) + elif isinstance(auto_indent_option, str): + try: + return int(auto_indent_option) + except ValueError: + pass + try: + if _strtobool(auto_indent_option): + return -1 + except ValueError: + return 0 + + return 0 + + def format(self, record: logging.LogRecord) -> str: + if "\n" in record.message: + if hasattr(record, "auto_indent"): + # Passed in from the "extra={}" kwarg on the call to logging.log(). + auto_indent = self._get_auto_indent(record.auto_indent) # type: ignore[attr-defined] + else: + auto_indent = self._auto_indent + + if auto_indent: + lines = record.message.splitlines() + formatted = self._fmt % {**record.__dict__, "message": lines[0]} + + if auto_indent < 0: + indentation = _remove_ansi_escape_sequences(formatted).find( + lines[0] + ) + else: + # Optimizes logging by allowing a fixed indentation. + indentation = auto_indent + lines[0] = formatted + return ("\n" + " " * indentation).join(lines) + return self._fmt % record.__dict__ + + +def get_option_ini(config: Config, *names: str): + for name in names: + ret = config.getoption(name) # 'default' arg won't work as expected + if ret is None: + ret = config.getini(name) + if ret: + return ret + + +def pytest_addoption(parser: Parser) -> None: + """Add options to control log capturing.""" + group = parser.getgroup("logging") + + def add_option_ini(option, dest, default=None, type=None, **kwargs): + parser.addini( + dest, default=default, type=type, help="Default value for " + option + ) + group.addoption(option, dest=dest, **kwargs) + + add_option_ini( + "--log-level", + dest="log_level", + default=None, + metavar="LEVEL", + help=( + "Level of messages to catch/display." + " Not set by default, so it depends on the root/parent log handler's" + ' effective level, where it is "WARNING" by default.' + ), + ) + add_option_ini( + "--log-format", + dest="log_format", + default=DEFAULT_LOG_FORMAT, + help="Log format used by the logging module", + ) + add_option_ini( + "--log-date-format", + dest="log_date_format", + default=DEFAULT_LOG_DATE_FORMAT, + help="Log date format used by the logging module", + ) + parser.addini( + "log_cli", + default=False, + type="bool", + help='Enable log display during test run (also known as "live logging")', + ) + add_option_ini( + "--log-cli-level", dest="log_cli_level", default=None, help="CLI logging level" + ) + add_option_ini( + "--log-cli-format", + dest="log_cli_format", + default=None, + help="Log format used by the logging module", + ) + add_option_ini( + "--log-cli-date-format", + dest="log_cli_date_format", + default=None, + help="Log date format used by the logging module", + ) + add_option_ini( + "--log-file", + dest="log_file", + default=None, + help="Path to a file when logging will be written to", + ) + add_option_ini( + "--log-file-level", + dest="log_file_level", + default=None, + help="Log file logging level", + ) + add_option_ini( + "--log-file-format", + dest="log_file_format", + default=DEFAULT_LOG_FORMAT, + help="Log format used by the logging module", + ) + add_option_ini( + "--log-file-date-format", + dest="log_file_date_format", + default=DEFAULT_LOG_DATE_FORMAT, + help="Log date format used by the logging module", + ) + add_option_ini( + "--log-auto-indent", + dest="log_auto_indent", + default=None, + help="Auto-indent multiline messages passed to the logging module. Accepts true|on, false|off or an integer.", + ) + group.addoption( + "--log-disable", + action="append", + default=[], + dest="logger_disable", + help="Disable a logger by name. Can be passed multiple times.", + ) + + +_HandlerType = TypeVar("_HandlerType", bound=logging.Handler) + + +# Not using @contextmanager for performance reasons. +class catching_logs: + """Context manager that prepares the whole logging machinery properly.""" + + __slots__ = ("handler", "level", "orig_level") + + def __init__(self, handler: _HandlerType, level: Optional[int] = None) -> None: + self.handler = handler + self.level = level + + def __enter__(self): + root_logger = logging.getLogger() + if self.level is not None: + self.handler.setLevel(self.level) + root_logger.addHandler(self.handler) + if self.level is not None: + self.orig_level = root_logger.level + root_logger.setLevel(min(self.orig_level, self.level)) + return self.handler + + def __exit__(self, type, value, traceback): + root_logger = logging.getLogger() + if self.level is not None: + root_logger.setLevel(self.orig_level) + root_logger.removeHandler(self.handler) + + +class LogCaptureHandler(logging_StreamHandler): + """A logging handler that stores log records and the log text.""" + + def __init__(self) -> None: + """Create a new log handler.""" + super().__init__(StringIO()) + self.records: List[logging.LogRecord] = [] + + def emit(self, record: logging.LogRecord) -> None: + """Keep the log records in a list in addition to the log text.""" + self.records.append(record) + super().emit(record) + + def reset(self) -> None: + self.records = [] + self.stream = StringIO() + + def clear(self) -> None: + self.records.clear() + self.stream = StringIO() + + def handleError(self, record: logging.LogRecord) -> None: + if logging.raiseExceptions: + # Fail the test if the log message is bad (emit failed). + # The default behavior of logging is to print "Logging error" + # to stderr with the call stack and some extra details. + # pytest wants to make such mistakes visible during testing. + raise + + +@final +class LogCaptureFixture: + """Provides access and control of log capturing.""" + + def __init__(self, item: nodes.Node, *, _ispytest: bool = False) -> None: + check_ispytest(_ispytest) + self._item = item + self._initial_handler_level: Optional[int] = None + # Dict of log name -> log level. + self._initial_logger_levels: Dict[Optional[str], int] = {} + self._initial_disabled_logging_level: Optional[int] = None + + def _finalize(self) -> None: + """Finalize the fixture. + + This restores the log levels and the disabled logging levels changed by :meth:`set_level`. + """ + # Restore log levels. + if self._initial_handler_level is not None: + self.handler.setLevel(self._initial_handler_level) + for logger_name, level in self._initial_logger_levels.items(): + logger = logging.getLogger(logger_name) + logger.setLevel(level) + # Disable logging at the original disabled logging level. + if self._initial_disabled_logging_level is not None: + logging.disable(self._initial_disabled_logging_level) + self._initial_disabled_logging_level = None + + @property + def handler(self) -> LogCaptureHandler: + """Get the logging handler used by the fixture.""" + return self._item.stash[caplog_handler_key] + + def get_records( + self, when: "Literal['setup', 'call', 'teardown']" + ) -> List[logging.LogRecord]: + """Get the logging records for one of the possible test phases. + + :param when: + Which test phase to obtain the records from. + Valid values are: "setup", "call" and "teardown". + + :returns: The list of captured records at the given stage. + + .. versionadded:: 3.4 + """ + return self._item.stash[caplog_records_key].get(when, []) + + @property + def text(self) -> str: + """The formatted log text.""" + return _remove_ansi_escape_sequences(self.handler.stream.getvalue()) + + @property + def records(self) -> List[logging.LogRecord]: + """The list of log records.""" + return self.handler.records + + @property + def record_tuples(self) -> List[Tuple[str, int, str]]: + """A list of a stripped down version of log records intended + for use in assertion comparison. + + The format of the tuple is: + + (logger_name, log_level, message) + """ + return [(r.name, r.levelno, r.getMessage()) for r in self.records] + + @property + def messages(self) -> List[str]: + """A list of format-interpolated log messages. + + Unlike 'records', which contains the format string and parameters for + interpolation, log messages in this list are all interpolated. + + Unlike 'text', which contains the output from the handler, log + messages in this list are unadorned with levels, timestamps, etc, + making exact comparisons more reliable. + + Note that traceback or stack info (from :func:`logging.exception` or + the `exc_info` or `stack_info` arguments to the logging functions) is + not included, as this is added by the formatter in the handler. + + .. versionadded:: 3.7 + """ + return [r.getMessage() for r in self.records] + + def clear(self) -> None: + """Reset the list of log records and the captured log text.""" + self.handler.clear() + + def _force_enable_logging( + self, level: Union[int, str], logger_obj: logging.Logger + ) -> int: + """Enable the desired logging level if the global level was disabled via ``logging.disabled``. + + Only enables logging levels greater than or equal to the requested ``level``. + + Does nothing if the desired ``level`` wasn't disabled. + + :param level: + The logger level caplog should capture. + All logging is enabled if a non-standard logging level string is supplied. + Valid level strings are in :data:`logging._nameToLevel`. + :param logger_obj: The logger object to check. + + :return: The original disabled logging level. + """ + original_disable_level: int = logger_obj.manager.disable # type: ignore[attr-defined] + + if isinstance(level, str): + # Try to translate the level string to an int for `logging.disable()` + level = logging.getLevelName(level) + + if not isinstance(level, int): + # The level provided was not valid, so just un-disable all logging. + logging.disable(logging.NOTSET) + elif not logger_obj.isEnabledFor(level): + # Each level is `10` away from other levels. + # https://docs.python.org/3/library/logging.html#logging-levels + disable_level = max(level - 10, logging.NOTSET) + logging.disable(disable_level) + + return original_disable_level + + def set_level(self, level: Union[int, str], logger: Optional[str] = None) -> None: + """Set the threshold level of a logger for the duration of a test. + + Logging messages which are less severe than this level will not be captured. + + .. versionchanged:: 3.4 + The levels of the loggers changed by this function will be + restored to their initial values at the end of the test. + + Will enable the requested logging level if it was disabled via :meth:`logging.disable`. + + :param level: The level. + :param logger: The logger to update. If not given, the root logger. + """ + logger_obj = logging.getLogger(logger) + # Save the original log-level to restore it during teardown. + self._initial_logger_levels.setdefault(logger, logger_obj.level) + logger_obj.setLevel(level) + if self._initial_handler_level is None: + self._initial_handler_level = self.handler.level + self.handler.setLevel(level) + initial_disabled_logging_level = self._force_enable_logging(level, logger_obj) + if self._initial_disabled_logging_level is None: + self._initial_disabled_logging_level = initial_disabled_logging_level + + @contextmanager + def at_level( + self, level: Union[int, str], logger: Optional[str] = None + ) -> Generator[None, None, None]: + """Context manager that sets the level for capturing of logs. After + the end of the 'with' statement the level is restored to its original + value. + + Will enable the requested logging level if it was disabled via :meth:`logging.disable`. + + :param level: The level. + :param logger: The logger to update. If not given, the root logger. + """ + logger_obj = logging.getLogger(logger) + orig_level = logger_obj.level + logger_obj.setLevel(level) + handler_orig_level = self.handler.level + self.handler.setLevel(level) + original_disable_level = self._force_enable_logging(level, logger_obj) + try: + yield + finally: + logger_obj.setLevel(orig_level) + self.handler.setLevel(handler_orig_level) + logging.disable(original_disable_level) + + +@fixture +def caplog(request: FixtureRequest) -> Generator[LogCaptureFixture, None, None]: + """Access and control log capturing. + + Captured logs are available through the following properties/methods:: + + * caplog.messages -> list of format-interpolated log messages + * caplog.text -> string containing formatted log output + * caplog.records -> list of logging.LogRecord instances + * caplog.record_tuples -> list of (logger_name, level, message) tuples + * caplog.clear() -> clear captured records and formatted log output string + """ + result = LogCaptureFixture(request.node, _ispytest=True) + yield result + result._finalize() + + +def get_log_level_for_setting(config: Config, *setting_names: str) -> Optional[int]: + for setting_name in setting_names: + log_level = config.getoption(setting_name) + if log_level is None: + log_level = config.getini(setting_name) + if log_level: + break + else: + return None + + if isinstance(log_level, str): + log_level = log_level.upper() + try: + return int(getattr(logging, log_level, log_level)) + except ValueError as e: + # Python logging does not recognise this as a logging level + raise UsageError( + "'{}' is not recognized as a logging level name for " + "'{}'. Please consider passing the " + "logging level num instead.".format(log_level, setting_name) + ) from e + + +# run after terminalreporter/capturemanager are configured +@hookimpl(trylast=True) +def pytest_configure(config: Config) -> None: + config.pluginmanager.register(LoggingPlugin(config), "logging-plugin") + + +class LoggingPlugin: + """Attaches to the logging module and captures log messages for each test.""" + + def __init__(self, config: Config) -> None: + """Create a new plugin to capture log messages. + + The formatter can be safely shared across all handlers so + create a single one for the entire test session here. + """ + self._config = config + + # Report logging. + self.formatter = self._create_formatter( + get_option_ini(config, "log_format"), + get_option_ini(config, "log_date_format"), + get_option_ini(config, "log_auto_indent"), + ) + self.log_level = get_log_level_for_setting(config, "log_level") + self.caplog_handler = LogCaptureHandler() + self.caplog_handler.setFormatter(self.formatter) + self.report_handler = LogCaptureHandler() + self.report_handler.setFormatter(self.formatter) + + # File logging. + self.log_file_level = get_log_level_for_setting(config, "log_file_level") + log_file = get_option_ini(config, "log_file") or os.devnull + if log_file != os.devnull: + directory = os.path.dirname(os.path.abspath(log_file)) + if not os.path.isdir(directory): + os.makedirs(directory) + + self.log_file_handler = _FileHandler(log_file, mode="w", encoding="UTF-8") + log_file_format = get_option_ini(config, "log_file_format", "log_format") + log_file_date_format = get_option_ini( + config, "log_file_date_format", "log_date_format" + ) + + log_file_formatter = DatetimeFormatter( + log_file_format, datefmt=log_file_date_format + ) + self.log_file_handler.setFormatter(log_file_formatter) + + # CLI/live logging. + self.log_cli_level = get_log_level_for_setting( + config, "log_cli_level", "log_level" + ) + if self._log_cli_enabled(): + terminal_reporter = config.pluginmanager.get_plugin("terminalreporter") + capture_manager = config.pluginmanager.get_plugin("capturemanager") + # if capturemanager plugin is disabled, live logging still works. + self.log_cli_handler: Union[ + _LiveLoggingStreamHandler, _LiveLoggingNullHandler + ] = _LiveLoggingStreamHandler(terminal_reporter, capture_manager) + else: + self.log_cli_handler = _LiveLoggingNullHandler() + log_cli_formatter = self._create_formatter( + get_option_ini(config, "log_cli_format", "log_format"), + get_option_ini(config, "log_cli_date_format", "log_date_format"), + get_option_ini(config, "log_auto_indent"), + ) + self.log_cli_handler.setFormatter(log_cli_formatter) + self._disable_loggers(loggers_to_disable=config.option.logger_disable) + + def _disable_loggers(self, loggers_to_disable: List[str]) -> None: + if not loggers_to_disable: + return + + for name in loggers_to_disable: + logger = logging.getLogger(name) + logger.disabled = True + + def _create_formatter(self, log_format, log_date_format, auto_indent): + # Color option doesn't exist if terminal plugin is disabled. + color = getattr(self._config.option, "color", "no") + if color != "no" and ColoredLevelFormatter.LEVELNAME_FMT_REGEX.search( + log_format + ): + formatter: logging.Formatter = ColoredLevelFormatter( + create_terminal_writer(self._config), log_format, log_date_format + ) + else: + formatter = DatetimeFormatter(log_format, log_date_format) + + formatter._style = PercentStyleMultiline( + formatter._style._fmt, auto_indent=auto_indent + ) + + return formatter + + def set_log_path(self, fname: str) -> None: + """Set the filename parameter for Logging.FileHandler(). + + Creates parent directory if it does not exist. + + .. warning:: + This is an experimental API. + """ + fpath = Path(fname) + + if not fpath.is_absolute(): + fpath = self._config.rootpath / fpath + + if not fpath.parent.exists(): + fpath.parent.mkdir(exist_ok=True, parents=True) + + # https://github.com/python/mypy/issues/11193 + stream: io.TextIOWrapper = fpath.open(mode="w", encoding="UTF-8") # type: ignore[assignment] + old_stream = self.log_file_handler.setStream(stream) + if old_stream: + old_stream.close() + + def _log_cli_enabled(self): + """Return whether live logging is enabled.""" + enabled = self._config.getoption( + "--log-cli-level" + ) is not None or self._config.getini("log_cli") + if not enabled: + return False + + terminal_reporter = self._config.pluginmanager.get_plugin("terminalreporter") + if terminal_reporter is None: + # terminal reporter is disabled e.g. by pytest-xdist. + return False + + return True + + @hookimpl(hookwrapper=True, tryfirst=True) + def pytest_sessionstart(self) -> Generator[None, None, None]: + self.log_cli_handler.set_when("sessionstart") + + with catching_logs(self.log_cli_handler, level=self.log_cli_level): + with catching_logs(self.log_file_handler, level=self.log_file_level): + yield + + @hookimpl(hookwrapper=True, tryfirst=True) + def pytest_collection(self) -> Generator[None, None, None]: + self.log_cli_handler.set_when("collection") + + with catching_logs(self.log_cli_handler, level=self.log_cli_level): + with catching_logs(self.log_file_handler, level=self.log_file_level): + yield + + @hookimpl(hookwrapper=True) + def pytest_runtestloop(self, session: Session) -> Generator[None, None, None]: + if session.config.option.collectonly: + yield + return + + if self._log_cli_enabled() and self._config.getoption("verbose") < 1: + # The verbose flag is needed to avoid messy test progress output. + self._config.option.verbose = 1 + + with catching_logs(self.log_cli_handler, level=self.log_cli_level): + with catching_logs(self.log_file_handler, level=self.log_file_level): + yield # Run all the tests. + + @hookimpl + def pytest_runtest_logstart(self) -> None: + self.log_cli_handler.reset() + self.log_cli_handler.set_when("start") + + @hookimpl + def pytest_runtest_logreport(self) -> None: + self.log_cli_handler.set_when("logreport") + + def _runtest_for(self, item: nodes.Item, when: str) -> Generator[None, None, None]: + """Implement the internals of the pytest_runtest_xxx() hooks.""" + with catching_logs( + self.caplog_handler, + level=self.log_level, + ) as caplog_handler, catching_logs( + self.report_handler, + level=self.log_level, + ) as report_handler: + caplog_handler.reset() + report_handler.reset() + item.stash[caplog_records_key][when] = caplog_handler.records + item.stash[caplog_handler_key] = caplog_handler + + yield + + log = report_handler.stream.getvalue().strip() + item.add_report_section(when, "log", log) + + @hookimpl(hookwrapper=True) + def pytest_runtest_setup(self, item: nodes.Item) -> Generator[None, None, None]: + self.log_cli_handler.set_when("setup") + + empty: Dict[str, List[logging.LogRecord]] = {} + item.stash[caplog_records_key] = empty + yield from self._runtest_for(item, "setup") + + @hookimpl(hookwrapper=True) + def pytest_runtest_call(self, item: nodes.Item) -> Generator[None, None, None]: + self.log_cli_handler.set_when("call") + + yield from self._runtest_for(item, "call") + + @hookimpl(hookwrapper=True) + def pytest_runtest_teardown(self, item: nodes.Item) -> Generator[None, None, None]: + self.log_cli_handler.set_when("teardown") + + yield from self._runtest_for(item, "teardown") + del item.stash[caplog_records_key] + del item.stash[caplog_handler_key] + + @hookimpl + def pytest_runtest_logfinish(self) -> None: + self.log_cli_handler.set_when("finish") + + @hookimpl(hookwrapper=True, tryfirst=True) + def pytest_sessionfinish(self) -> Generator[None, None, None]: + self.log_cli_handler.set_when("sessionfinish") + + with catching_logs(self.log_cli_handler, level=self.log_cli_level): + with catching_logs(self.log_file_handler, level=self.log_file_level): + yield + + @hookimpl + def pytest_unconfigure(self) -> None: + # Close the FileHandler explicitly. + # (logging.shutdown might have lost the weakref?!) + self.log_file_handler.close() + + +class _FileHandler(logging.FileHandler): + """A logging FileHandler with pytest tweaks.""" + + def handleError(self, record: logging.LogRecord) -> None: + # Handled by LogCaptureHandler. + pass + + +class _LiveLoggingStreamHandler(logging_StreamHandler): + """A logging StreamHandler used by the live logging feature: it will + write a newline before the first log message in each test. + + During live logging we must also explicitly disable stdout/stderr + capturing otherwise it will get captured and won't appear in the + terminal. + """ + + # Officially stream needs to be a IO[str], but TerminalReporter + # isn't. So force it. + stream: TerminalReporter = None # type: ignore + + def __init__( + self, + terminal_reporter: TerminalReporter, + capture_manager: Optional[CaptureManager], + ) -> None: + super().__init__(stream=terminal_reporter) # type: ignore[arg-type] + self.capture_manager = capture_manager + self.reset() + self.set_when(None) + self._test_outcome_written = False + + def reset(self) -> None: + """Reset the handler; should be called before the start of each test.""" + self._first_record_emitted = False + + def set_when(self, when: Optional[str]) -> None: + """Prepare for the given test phase (setup/call/teardown).""" + self._when = when + self._section_name_shown = False + if when == "start": + self._test_outcome_written = False + + def emit(self, record: logging.LogRecord) -> None: + ctx_manager = ( + self.capture_manager.global_and_fixture_disabled() + if self.capture_manager + else nullcontext() + ) + with ctx_manager: + if not self._first_record_emitted: + self.stream.write("\n") + self._first_record_emitted = True + elif self._when in ("teardown", "finish"): + if not self._test_outcome_written: + self._test_outcome_written = True + self.stream.write("\n") + if not self._section_name_shown and self._when: + self.stream.section("live log " + self._when, sep="-", bold=True) + self._section_name_shown = True + super().emit(record) + + def handleError(self, record: logging.LogRecord) -> None: + # Handled by LogCaptureHandler. + pass + + +class _LiveLoggingNullHandler(logging.NullHandler): + """A logging handler used when live logging is disabled.""" + + def reset(self) -> None: + pass + + def set_when(self, when: str) -> None: + pass + + def handleError(self, record: logging.LogRecord) -> None: + # Handled by LogCaptureHandler. + pass diff --git a/venv/lib/python3.11/site-packages/_pytest/main.py b/venv/lib/python3.11/site-packages/_pytest/main.py new file mode 100644 index 0000000..155d430 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/main.py @@ -0,0 +1,907 @@ +"""Core implementation of the testing process: init, session, runtest loop.""" +import argparse +import dataclasses +import fnmatch +import functools +import importlib +import os +import sys +from pathlib import Path +from typing import Callable +from typing import Dict +from typing import FrozenSet +from typing import Iterator +from typing import List +from typing import Optional +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import Union + +import _pytest._code +from _pytest import nodes +from _pytest.compat import final +from _pytest.compat import overload +from _pytest.config import Config +from _pytest.config import directory_arg +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config import PytestPluginManager +from _pytest.config import UsageError +from _pytest.config.argparsing import Parser +from _pytest.fixtures import FixtureManager +from _pytest.outcomes import exit +from _pytest.pathlib import absolutepath +from _pytest.pathlib import bestrelpath +from _pytest.pathlib import fnmatch_ex +from _pytest.pathlib import visit +from _pytest.reports import CollectReport +from _pytest.reports import TestReport +from _pytest.runner import collect_one_node +from _pytest.runner import SetupState + + +if TYPE_CHECKING: + from typing_extensions import Literal + + +def pytest_addoption(parser: Parser) -> None: + parser.addini( + "norecursedirs", + "Directory patterns to avoid for recursion", + type="args", + default=[ + "*.egg", + ".*", + "_darcs", + "build", + "CVS", + "dist", + "node_modules", + "venv", + "{arch}", + ], + ) + parser.addini( + "testpaths", + "Directories to search for tests when no files or directories are given on the " + "command line", + type="args", + default=[], + ) + group = parser.getgroup("general", "Running and selection options") + group._addoption( + "-x", + "--exitfirst", + action="store_const", + dest="maxfail", + const=1, + help="Exit instantly on first error or failed test", + ) + group = parser.getgroup("pytest-warnings") + group.addoption( + "-W", + "--pythonwarnings", + action="append", + help="Set which warnings to report, see -W option of Python itself", + ) + parser.addini( + "filterwarnings", + type="linelist", + help="Each line specifies a pattern for " + "warnings.filterwarnings. " + "Processed after -W/--pythonwarnings.", + ) + group._addoption( + "--maxfail", + metavar="num", + action="store", + type=int, + dest="maxfail", + default=0, + help="Exit after first num failures or errors", + ) + group._addoption( + "--strict-config", + action="store_true", + help="Any warnings encountered while parsing the `pytest` section of the " + "configuration file raise errors", + ) + group._addoption( + "--strict-markers", + action="store_true", + help="Markers not registered in the `markers` section of the configuration " + "file raise errors", + ) + group._addoption( + "--strict", + action="store_true", + help="(Deprecated) alias to --strict-markers", + ) + group._addoption( + "-c", + "--config-file", + metavar="FILE", + type=str, + dest="inifilename", + help="Load configuration from `FILE` instead of trying to locate one of the " + "implicit configuration files.", + ) + group._addoption( + "--continue-on-collection-errors", + action="store_true", + default=False, + dest="continue_on_collection_errors", + help="Force test execution even if collection errors occur", + ) + group._addoption( + "--rootdir", + action="store", + dest="rootdir", + help="Define root directory for tests. Can be relative path: 'root_dir', './root_dir', " + "'root_dir/another_dir/'; absolute path: '/home/user/root_dir'; path with variables: " + "'$HOME/root_dir'.", + ) + + group = parser.getgroup("collect", "collection") + group.addoption( + "--collectonly", + "--collect-only", + "--co", + action="store_true", + help="Only collect tests, don't execute them", + ) + group.addoption( + "--pyargs", + action="store_true", + help="Try to interpret all arguments as Python packages", + ) + group.addoption( + "--ignore", + action="append", + metavar="path", + help="Ignore path during collection (multi-allowed)", + ) + group.addoption( + "--ignore-glob", + action="append", + metavar="path", + help="Ignore path pattern during collection (multi-allowed)", + ) + group.addoption( + "--deselect", + action="append", + metavar="nodeid_prefix", + help="Deselect item (via node id prefix) during collection (multi-allowed)", + ) + group.addoption( + "--confcutdir", + dest="confcutdir", + default=None, + metavar="dir", + type=functools.partial(directory_arg, optname="--confcutdir"), + help="Only load conftest.py's relative to specified dir", + ) + group.addoption( + "--noconftest", + action="store_true", + dest="noconftest", + default=False, + help="Don't load any conftest.py files", + ) + group.addoption( + "--keepduplicates", + "--keep-duplicates", + action="store_true", + dest="keepduplicates", + default=False, + help="Keep duplicate tests", + ) + group.addoption( + "--collect-in-virtualenv", + action="store_true", + dest="collect_in_virtualenv", + default=False, + help="Don't ignore tests in a local virtualenv directory", + ) + group.addoption( + "--import-mode", + default="prepend", + choices=["prepend", "append", "importlib"], + dest="importmode", + help="Prepend/append to sys.path when importing test modules and conftest " + "files. Default: prepend.", + ) + + group = parser.getgroup("debugconfig", "test session debugging and configuration") + group.addoption( + "--basetemp", + dest="basetemp", + default=None, + type=validate_basetemp, + metavar="dir", + help=( + "Base temporary directory for this test run. " + "(Warning: this directory is removed if it exists.)" + ), + ) + + +def validate_basetemp(path: str) -> str: + # GH 7119 + msg = "basetemp must not be empty, the current working directory or any parent directory of it" + + # empty path + if not path: + raise argparse.ArgumentTypeError(msg) + + def is_ancestor(base: Path, query: Path) -> bool: + """Return whether query is an ancestor of base.""" + if base == query: + return True + return query in base.parents + + # check if path is an ancestor of cwd + if is_ancestor(Path.cwd(), Path(path).absolute()): + raise argparse.ArgumentTypeError(msg) + + # check symlinks for ancestors + if is_ancestor(Path.cwd().resolve(), Path(path).resolve()): + raise argparse.ArgumentTypeError(msg) + + return path + + +def wrap_session( + config: Config, doit: Callable[[Config, "Session"], Optional[Union[int, ExitCode]]] +) -> Union[int, ExitCode]: + """Skeleton command line program.""" + session = Session.from_config(config) + session.exitstatus = ExitCode.OK + initstate = 0 + try: + try: + config._do_configure() + initstate = 1 + config.hook.pytest_sessionstart(session=session) + initstate = 2 + session.exitstatus = doit(config, session) or 0 + except UsageError: + session.exitstatus = ExitCode.USAGE_ERROR + raise + except Failed: + session.exitstatus = ExitCode.TESTS_FAILED + except (KeyboardInterrupt, exit.Exception): + excinfo = _pytest._code.ExceptionInfo.from_current() + exitstatus: Union[int, ExitCode] = ExitCode.INTERRUPTED + if isinstance(excinfo.value, exit.Exception): + if excinfo.value.returncode is not None: + exitstatus = excinfo.value.returncode + if initstate < 2: + sys.stderr.write(f"{excinfo.typename}: {excinfo.value.msg}\n") + config.hook.pytest_keyboard_interrupt(excinfo=excinfo) + session.exitstatus = exitstatus + except BaseException: + session.exitstatus = ExitCode.INTERNAL_ERROR + excinfo = _pytest._code.ExceptionInfo.from_current() + try: + config.notify_exception(excinfo, config.option) + except exit.Exception as exc: + if exc.returncode is not None: + session.exitstatus = exc.returncode + sys.stderr.write(f"{type(exc).__name__}: {exc}\n") + else: + if isinstance(excinfo.value, SystemExit): + sys.stderr.write("mainloop: caught unexpected SystemExit!\n") + + finally: + # Explicitly break reference cycle. + excinfo = None # type: ignore + os.chdir(session.startpath) + if initstate >= 2: + try: + config.hook.pytest_sessionfinish( + session=session, exitstatus=session.exitstatus + ) + except exit.Exception as exc: + if exc.returncode is not None: + session.exitstatus = exc.returncode + sys.stderr.write(f"{type(exc).__name__}: {exc}\n") + config._ensure_unconfigure() + return session.exitstatus + + +def pytest_cmdline_main(config: Config) -> Union[int, ExitCode]: + return wrap_session(config, _main) + + +def _main(config: Config, session: "Session") -> Optional[Union[int, ExitCode]]: + """Default command line protocol for initialization, session, + running tests and reporting.""" + config.hook.pytest_collection(session=session) + config.hook.pytest_runtestloop(session=session) + + if session.testsfailed: + return ExitCode.TESTS_FAILED + elif session.testscollected == 0: + return ExitCode.NO_TESTS_COLLECTED + return None + + +def pytest_collection(session: "Session") -> None: + session.perform_collect() + + +def pytest_runtestloop(session: "Session") -> bool: + if session.testsfailed and not session.config.option.continue_on_collection_errors: + raise session.Interrupted( + "%d error%s during collection" + % (session.testsfailed, "s" if session.testsfailed != 1 else "") + ) + + if session.config.option.collectonly: + return True + + for i, item in enumerate(session.items): + nextitem = session.items[i + 1] if i + 1 < len(session.items) else None + item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) + if session.shouldfail: + raise session.Failed(session.shouldfail) + if session.shouldstop: + raise session.Interrupted(session.shouldstop) + return True + + +def _in_venv(path: Path) -> bool: + """Attempt to detect if ``path`` is the root of a Virtual Environment by + checking for the existence of the appropriate activate script.""" + bindir = path.joinpath("Scripts" if sys.platform.startswith("win") else "bin") + try: + if not bindir.is_dir(): + return False + except OSError: + return False + activates = ( + "activate", + "activate.csh", + "activate.fish", + "Activate", + "Activate.bat", + "Activate.ps1", + ) + return any(fname.name in activates for fname in bindir.iterdir()) + + +def pytest_ignore_collect(collection_path: Path, config: Config) -> Optional[bool]: + ignore_paths = config._getconftest_pathlist( + "collect_ignore", path=collection_path.parent, rootpath=config.rootpath + ) + ignore_paths = ignore_paths or [] + excludeopt = config.getoption("ignore") + if excludeopt: + ignore_paths.extend(absolutepath(x) for x in excludeopt) + + if collection_path in ignore_paths: + return True + + ignore_globs = config._getconftest_pathlist( + "collect_ignore_glob", path=collection_path.parent, rootpath=config.rootpath + ) + ignore_globs = ignore_globs or [] + excludeglobopt = config.getoption("ignore_glob") + if excludeglobopt: + ignore_globs.extend(absolutepath(x) for x in excludeglobopt) + + if any(fnmatch.fnmatch(str(collection_path), str(glob)) for glob in ignore_globs): + return True + + allow_in_venv = config.getoption("collect_in_virtualenv") + if not allow_in_venv and _in_venv(collection_path): + return True + + if collection_path.is_dir(): + norecursepatterns = config.getini("norecursedirs") + if any(fnmatch_ex(pat, collection_path) for pat in norecursepatterns): + return True + + return None + + +def pytest_collection_modifyitems(items: List[nodes.Item], config: Config) -> None: + deselect_prefixes = tuple(config.getoption("deselect") or []) + if not deselect_prefixes: + return + + remaining = [] + deselected = [] + for colitem in items: + if colitem.nodeid.startswith(deselect_prefixes): + deselected.append(colitem) + else: + remaining.append(colitem) + + if deselected: + config.hook.pytest_deselected(items=deselected) + items[:] = remaining + + +class FSHookProxy: + def __init__(self, pm: PytestPluginManager, remove_mods) -> None: + self.pm = pm + self.remove_mods = remove_mods + + def __getattr__(self, name: str): + x = self.pm.subset_hook_caller(name, remove_plugins=self.remove_mods) + self.__dict__[name] = x + return x + + +class Interrupted(KeyboardInterrupt): + """Signals that the test run was interrupted.""" + + __module__ = "builtins" # For py3. + + +class Failed(Exception): + """Signals a stop as failed test run.""" + + +@dataclasses.dataclass +class _bestrelpath_cache(Dict[Path, str]): + __slots__ = ("path",) + + path: Path + + def __missing__(self, path: Path) -> str: + r = bestrelpath(self.path, path) + self[path] = r + return r + + +@final +class Session(nodes.FSCollector): + Interrupted = Interrupted + Failed = Failed + # Set on the session by runner.pytest_sessionstart. + _setupstate: SetupState + # Set on the session by fixtures.pytest_sessionstart. + _fixturemanager: FixtureManager + exitstatus: Union[int, ExitCode] + + def __init__(self, config: Config) -> None: + super().__init__( + path=config.rootpath, + fspath=None, + parent=None, + config=config, + session=self, + nodeid="", + ) + self.testsfailed = 0 + self.testscollected = 0 + self.shouldstop: Union[bool, str] = False + self.shouldfail: Union[bool, str] = False + self.trace = config.trace.root.get("collection") + self._initialpaths: FrozenSet[Path] = frozenset() + + self._bestrelpathcache: Dict[Path, str] = _bestrelpath_cache(config.rootpath) + + self.config.pluginmanager.register(self, name="session") + + @classmethod + def from_config(cls, config: Config) -> "Session": + session: Session = cls._create(config=config) + return session + + def __repr__(self) -> str: + return "<%s %s exitstatus=%r testsfailed=%d testscollected=%d>" % ( + self.__class__.__name__, + self.name, + getattr(self, "exitstatus", ""), + self.testsfailed, + self.testscollected, + ) + + @property + def startpath(self) -> Path: + """The path from which pytest was invoked. + + .. versionadded:: 7.0.0 + """ + return self.config.invocation_params.dir + + def _node_location_to_relpath(self, node_path: Path) -> str: + # bestrelpath is a quite slow function. + return self._bestrelpathcache[node_path] + + @hookimpl(tryfirst=True) + def pytest_collectstart(self) -> None: + if self.shouldfail: + raise self.Failed(self.shouldfail) + if self.shouldstop: + raise self.Interrupted(self.shouldstop) + + @hookimpl(tryfirst=True) + def pytest_runtest_logreport( + self, report: Union[TestReport, CollectReport] + ) -> None: + if report.failed and not hasattr(report, "wasxfail"): + self.testsfailed += 1 + maxfail = self.config.getvalue("maxfail") + if maxfail and self.testsfailed >= maxfail: + self.shouldfail = "stopping after %d failures" % (self.testsfailed) + + pytest_collectreport = pytest_runtest_logreport + + def isinitpath(self, path: Union[str, "os.PathLike[str]"]) -> bool: + # Optimization: Path(Path(...)) is much slower than isinstance. + path_ = path if isinstance(path, Path) else Path(path) + return path_ in self._initialpaths + + def gethookproxy(self, fspath: "os.PathLike[str]"): + # Optimization: Path(Path(...)) is much slower than isinstance. + path = fspath if isinstance(fspath, Path) else Path(fspath) + pm = self.config.pluginmanager + # Check if we have the common case of running + # hooks with all conftest.py files. + my_conftestmodules = pm._getconftestmodules( + path, + self.config.getoption("importmode"), + rootpath=self.config.rootpath, + ) + remove_mods = pm._conftest_plugins.difference(my_conftestmodules) + if remove_mods: + # One or more conftests are not in use at this fspath. + from .config.compat import PathAwareHookProxy + + proxy = PathAwareHookProxy(FSHookProxy(pm, remove_mods)) + else: + # All plugins are active for this fspath. + proxy = self.config.hook + return proxy + + def _recurse(self, direntry: "os.DirEntry[str]") -> bool: + if direntry.name == "__pycache__": + return False + fspath = Path(direntry.path) + ihook = self.gethookproxy(fspath.parent) + if ihook.pytest_ignore_collect(collection_path=fspath, config=self.config): + return False + return True + + def _collectfile( + self, fspath: Path, handle_dupes: bool = True + ) -> Sequence[nodes.Collector]: + assert ( + fspath.is_file() + ), "{!r} is not a file (isdir={!r}, exists={!r}, islink={!r})".format( + fspath, fspath.is_dir(), fspath.exists(), fspath.is_symlink() + ) + ihook = self.gethookproxy(fspath) + if not self.isinitpath(fspath): + if ihook.pytest_ignore_collect(collection_path=fspath, config=self.config): + return () + + if handle_dupes: + keepduplicates = self.config.getoption("keepduplicates") + if not keepduplicates: + duplicate_paths = self.config.pluginmanager._duplicatepaths + if fspath in duplicate_paths: + return () + else: + duplicate_paths.add(fspath) + + return ihook.pytest_collect_file(file_path=fspath, parent=self) # type: ignore[no-any-return] + + @overload + def perform_collect( + self, args: Optional[Sequence[str]] = ..., genitems: "Literal[True]" = ... + ) -> Sequence[nodes.Item]: + ... + + @overload + def perform_collect( # noqa: F811 + self, args: Optional[Sequence[str]] = ..., genitems: bool = ... + ) -> Sequence[Union[nodes.Item, nodes.Collector]]: + ... + + def perform_collect( # noqa: F811 + self, args: Optional[Sequence[str]] = None, genitems: bool = True + ) -> Sequence[Union[nodes.Item, nodes.Collector]]: + """Perform the collection phase for this session. + + This is called by the default :hook:`pytest_collection` hook + implementation; see the documentation of this hook for more details. + For testing purposes, it may also be called directly on a fresh + ``Session``. + + This function normally recursively expands any collectors collected + from the session to their items, and only items are returned. For + testing purposes, this may be suppressed by passing ``genitems=False``, + in which case the return value contains these collectors unexpanded, + and ``session.items`` is empty. + """ + if args is None: + args = self.config.args + + self.trace("perform_collect", self, args) + self.trace.root.indent += 1 + + self._notfound: List[Tuple[str, Sequence[nodes.Collector]]] = [] + self._initial_parts: List[Tuple[Path, List[str]]] = [] + self.items: List[nodes.Item] = [] + + hook = self.config.hook + + items: Sequence[Union[nodes.Item, nodes.Collector]] = self.items + try: + initialpaths: List[Path] = [] + for arg in args: + fspath, parts = resolve_collection_argument( + self.config.invocation_params.dir, + arg, + as_pypath=self.config.option.pyargs, + ) + self._initial_parts.append((fspath, parts)) + initialpaths.append(fspath) + self._initialpaths = frozenset(initialpaths) + rep = collect_one_node(self) + self.ihook.pytest_collectreport(report=rep) + self.trace.root.indent -= 1 + if self._notfound: + errors = [] + for arg, collectors in self._notfound: + if collectors: + errors.append( + f"not found: {arg}\n(no name {arg!r} in any of {collectors!r})" + ) + else: + errors.append(f"found no collectors for {arg}") + + raise UsageError(*errors) + if not genitems: + items = rep.result + else: + if rep.passed: + for node in rep.result: + self.items.extend(self.genitems(node)) + + self.config.pluginmanager.check_pending() + hook.pytest_collection_modifyitems( + session=self, config=self.config, items=items + ) + finally: + hook.pytest_collection_finish(session=self) + + self.testscollected = len(items) + return items + + def collect(self) -> Iterator[Union[nodes.Item, nodes.Collector]]: + from _pytest.python import Package + + # Keep track of any collected nodes in here, so we don't duplicate fixtures. + node_cache1: Dict[Path, Sequence[nodes.Collector]] = {} + node_cache2: Dict[Tuple[Type[nodes.Collector], Path], nodes.Collector] = {} + + # Keep track of any collected collectors in matchnodes paths, so they + # are not collected more than once. + matchnodes_cache: Dict[Tuple[Type[nodes.Collector], str], CollectReport] = {} + + # Directories of pkgs with dunder-init files. + pkg_roots: Dict[Path, Package] = {} + + for argpath, names in self._initial_parts: + self.trace("processing argument", (argpath, names)) + self.trace.root.indent += 1 + + # Start with a Session root, and delve to argpath item (dir or file) + # and stack all Packages found on the way. + # No point in finding packages when collecting doctests. + if not self.config.getoption("doctestmodules", False): + pm = self.config.pluginmanager + for parent in (argpath, *argpath.parents): + if not pm._is_in_confcutdir(argpath): + break + + if parent.is_dir(): + pkginit = parent / "__init__.py" + if pkginit.is_file() and pkginit not in node_cache1: + col = self._collectfile(pkginit, handle_dupes=False) + if col: + if isinstance(col[0], Package): + pkg_roots[parent] = col[0] + node_cache1[col[0].path] = [col[0]] + + # If it's a directory argument, recurse and look for any Subpackages. + # Let the Package collector deal with subnodes, don't collect here. + if argpath.is_dir(): + assert not names, f"invalid arg {(argpath, names)!r}" + + seen_dirs: Set[Path] = set() + for direntry in visit(argpath, self._recurse): + if not direntry.is_file(): + continue + + path = Path(direntry.path) + dirpath = path.parent + + if dirpath not in seen_dirs: + # Collect packages first. + seen_dirs.add(dirpath) + pkginit = dirpath / "__init__.py" + if pkginit.exists(): + for x in self._collectfile(pkginit): + yield x + if isinstance(x, Package): + pkg_roots[dirpath] = x + if dirpath in pkg_roots: + # Do not collect packages here. + continue + + for x in self._collectfile(path): + key2 = (type(x), x.path) + if key2 in node_cache2: + yield node_cache2[key2] + else: + node_cache2[key2] = x + yield x + else: + assert argpath.is_file() + + if argpath in node_cache1: + col = node_cache1[argpath] + else: + collect_root = pkg_roots.get(argpath.parent, self) + col = collect_root._collectfile(argpath, handle_dupes=False) + if col: + node_cache1[argpath] = col + + matching = [] + work: List[ + Tuple[Sequence[Union[nodes.Item, nodes.Collector]], Sequence[str]] + ] = [(col, names)] + while work: + self.trace("matchnodes", col, names) + self.trace.root.indent += 1 + + matchnodes, matchnames = work.pop() + for node in matchnodes: + if not matchnames: + matching.append(node) + continue + if not isinstance(node, nodes.Collector): + continue + key = (type(node), node.nodeid) + if key in matchnodes_cache: + rep = matchnodes_cache[key] + else: + rep = collect_one_node(node) + matchnodes_cache[key] = rep + if rep.passed: + submatchnodes = [] + for r in rep.result: + # TODO: Remove parametrized workaround once collection structure contains + # parametrization. + if ( + r.name == matchnames[0] + or r.name.split("[")[0] == matchnames[0] + ): + submatchnodes.append(r) + if submatchnodes: + work.append((submatchnodes, matchnames[1:])) + else: + # Report collection failures here to avoid failing to run some test + # specified in the command line because the module could not be + # imported (#134). + node.ihook.pytest_collectreport(report=rep) + + self.trace("matchnodes finished -> ", len(matching), "nodes") + self.trace.root.indent -= 1 + + if not matching: + report_arg = "::".join((str(argpath), *names)) + self._notfound.append((report_arg, col)) + continue + + # If __init__.py was the only file requested, then the matched + # node will be the corresponding Package (by default), and the + # first yielded item will be the __init__ Module itself, so + # just use that. If this special case isn't taken, then all the + # files in the package will be yielded. + if argpath.name == "__init__.py" and isinstance(matching[0], Package): + try: + yield next(iter(matching[0].collect())) + except StopIteration: + # The package collects nothing with only an __init__.py + # file in it, which gets ignored by the default + # "python_files" option. + pass + continue + + yield from matching + + self.trace.root.indent -= 1 + + def genitems( + self, node: Union[nodes.Item, nodes.Collector] + ) -> Iterator[nodes.Item]: + self.trace("genitems", node) + if isinstance(node, nodes.Item): + node.ihook.pytest_itemcollected(item=node) + yield node + else: + assert isinstance(node, nodes.Collector) + rep = collect_one_node(node) + if rep.passed: + for subnode in rep.result: + yield from self.genitems(subnode) + node.ihook.pytest_collectreport(report=rep) + + +def search_pypath(module_name: str) -> str: + """Search sys.path for the given a dotted module name, and return its file system path.""" + try: + spec = importlib.util.find_spec(module_name) + # AttributeError: looks like package module, but actually filename + # ImportError: module does not exist + # ValueError: not a module name + except (AttributeError, ImportError, ValueError): + return module_name + if spec is None or spec.origin is None or spec.origin == "namespace": + return module_name + elif spec.submodule_search_locations: + return os.path.dirname(spec.origin) + else: + return spec.origin + + +def resolve_collection_argument( + invocation_path: Path, arg: str, *, as_pypath: bool = False +) -> Tuple[Path, List[str]]: + """Parse path arguments optionally containing selection parts and return (fspath, names). + + Command-line arguments can point to files and/or directories, and optionally contain + parts for specific tests selection, for example: + + "pkg/tests/test_foo.py::TestClass::test_foo" + + This function ensures the path exists, and returns a tuple: + + (Path("/full/path/to/pkg/tests/test_foo.py"), ["TestClass", "test_foo"]) + + When as_pypath is True, expects that the command-line argument actually contains + module paths instead of file-system paths: + + "pkg.tests.test_foo::TestClass::test_foo" + + In which case we search sys.path for a matching module, and then return the *path* to the + found module. + + If the path doesn't exist, raise UsageError. + If the path is a directory and selection parts are present, raise UsageError. + """ + base, squacket, rest = str(arg).partition("[") + strpath, *parts = base.split("::") + if parts: + parts[-1] = f"{parts[-1]}{squacket}{rest}" + if as_pypath: + strpath = search_pypath(strpath) + fspath = invocation_path / strpath + fspath = absolutepath(fspath) + if not fspath.exists(): + msg = ( + "module or package not found: {arg} (missing __init__.py?)" + if as_pypath + else "file or directory not found: {arg}" + ) + raise UsageError(msg.format(arg=arg)) + if parts and fspath.is_dir(): + msg = ( + "package argument cannot contain :: selection parts: {arg}" + if as_pypath + else "directory argument cannot contain :: selection parts: {arg}" + ) + raise UsageError(msg.format(arg=arg)) + return fspath, parts diff --git a/venv/lib/python3.11/site-packages/_pytest/mark/__init__.py b/venv/lib/python3.11/site-packages/_pytest/mark/__init__.py new file mode 100644 index 0000000..de46b4c --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/mark/__init__.py @@ -0,0 +1,269 @@ +"""Generic mechanism for marking and selecting python functions.""" +import dataclasses +from typing import AbstractSet +from typing import Collection +from typing import List +from typing import Optional +from typing import TYPE_CHECKING +from typing import Union + +from .expression import Expression +from .expression import ParseError +from .structures import EMPTY_PARAMETERSET_OPTION +from .structures import get_empty_parameterset_mark +from .structures import Mark +from .structures import MARK_GEN +from .structures import MarkDecorator +from .structures import MarkGenerator +from .structures import ParameterSet +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config import UsageError +from _pytest.config.argparsing import Parser +from _pytest.stash import StashKey + +if TYPE_CHECKING: + from _pytest.nodes import Item + + +__all__ = [ + "MARK_GEN", + "Mark", + "MarkDecorator", + "MarkGenerator", + "ParameterSet", + "get_empty_parameterset_mark", +] + + +old_mark_config_key = StashKey[Optional[Config]]() + + +def param( + *values: object, + marks: Union[MarkDecorator, Collection[Union[MarkDecorator, Mark]]] = (), + id: Optional[str] = None, +) -> ParameterSet: + """Specify a parameter in `pytest.mark.parametrize`_ calls or + :ref:`parametrized fixtures `. + + .. code-block:: python + + @pytest.mark.parametrize( + "test_input,expected", + [ + ("3+5", 8), + pytest.param("6*9", 42, marks=pytest.mark.xfail), + ], + ) + def test_eval(test_input, expected): + assert eval(test_input) == expected + + :param values: Variable args of the values of the parameter set, in order. + :param marks: A single mark or a list of marks to be applied to this parameter set. + :param id: The id to attribute to this parameter set. + """ + return ParameterSet.param(*values, marks=marks, id=id) + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group._addoption( + "-k", + action="store", + dest="keyword", + default="", + metavar="EXPRESSION", + help="Only run tests which match the given substring expression. " + "An expression is a Python evaluatable expression " + "where all names are substring-matched against test names " + "and their parent classes. Example: -k 'test_method or test_" + "other' matches all test functions and classes whose name " + "contains 'test_method' or 'test_other', while -k 'not test_method' " + "matches those that don't contain 'test_method' in their names. " + "-k 'not test_method and not test_other' will eliminate the matches. " + "Additionally keywords are matched to classes and functions " + "containing extra names in their 'extra_keyword_matches' set, " + "as well as functions which have names assigned directly to them. " + "The matching is case-insensitive.", + ) + + group._addoption( + "-m", + action="store", + dest="markexpr", + default="", + metavar="MARKEXPR", + help="Only run tests matching given mark expression. " + "For example: -m 'mark1 and not mark2'.", + ) + + group.addoption( + "--markers", + action="store_true", + help="show markers (builtin, plugin and per-project ones).", + ) + + parser.addini("markers", "Markers for test functions", "linelist") + parser.addini(EMPTY_PARAMETERSET_OPTION, "Default marker for empty parametersets") + + +@hookimpl(tryfirst=True) +def pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]: + import _pytest.config + + if config.option.markers: + config._do_configure() + tw = _pytest.config.create_terminal_writer(config) + for line in config.getini("markers"): + parts = line.split(":", 1) + name = parts[0] + rest = parts[1] if len(parts) == 2 else "" + tw.write("@pytest.mark.%s:" % name, bold=True) + tw.line(rest) + tw.line() + config._ensure_unconfigure() + return 0 + + return None + + +@dataclasses.dataclass +class KeywordMatcher: + """A matcher for keywords. + + Given a list of names, matches any substring of one of these names. The + string inclusion check is case-insensitive. + + Will match on the name of colitem, including the names of its parents. + Only matches names of items which are either a :class:`Class` or a + :class:`Function`. + + Additionally, matches on names in the 'extra_keyword_matches' set of + any item, as well as names directly assigned to test functions. + """ + + __slots__ = ("_names",) + + _names: AbstractSet[str] + + @classmethod + def from_item(cls, item: "Item") -> "KeywordMatcher": + mapped_names = set() + + # Add the names of the current item and any parent items. + import pytest + + for node in item.listchain(): + if not isinstance(node, pytest.Session): + mapped_names.add(node.name) + + # Add the names added as extra keywords to current or parent items. + mapped_names.update(item.listextrakeywords()) + + # Add the names attached to the current function through direct assignment. + function_obj = getattr(item, "function", None) + if function_obj: + mapped_names.update(function_obj.__dict__) + + # Add the markers to the keywords as we no longer handle them correctly. + mapped_names.update(mark.name for mark in item.iter_markers()) + + return cls(mapped_names) + + def __call__(self, subname: str) -> bool: + subname = subname.lower() + names = (name.lower() for name in self._names) + + for name in names: + if subname in name: + return True + return False + + +def deselect_by_keyword(items: "List[Item]", config: Config) -> None: + keywordexpr = config.option.keyword.lstrip() + if not keywordexpr: + return + + expr = _parse_expression(keywordexpr, "Wrong expression passed to '-k'") + + remaining = [] + deselected = [] + for colitem in items: + if not expr.evaluate(KeywordMatcher.from_item(colitem)): + deselected.append(colitem) + else: + remaining.append(colitem) + + if deselected: + config.hook.pytest_deselected(items=deselected) + items[:] = remaining + + +@dataclasses.dataclass +class MarkMatcher: + """A matcher for markers which are present. + + Tries to match on any marker names, attached to the given colitem. + """ + + __slots__ = ("own_mark_names",) + + own_mark_names: AbstractSet[str] + + @classmethod + def from_item(cls, item: "Item") -> "MarkMatcher": + mark_names = {mark.name for mark in item.iter_markers()} + return cls(mark_names) + + def __call__(self, name: str) -> bool: + return name in self.own_mark_names + + +def deselect_by_mark(items: "List[Item]", config: Config) -> None: + matchexpr = config.option.markexpr + if not matchexpr: + return + + expr = _parse_expression(matchexpr, "Wrong expression passed to '-m'") + remaining: List[Item] = [] + deselected: List[Item] = [] + for item in items: + if expr.evaluate(MarkMatcher.from_item(item)): + remaining.append(item) + else: + deselected.append(item) + if deselected: + config.hook.pytest_deselected(items=deselected) + items[:] = remaining + + +def _parse_expression(expr: str, exc_message: str) -> Expression: + try: + return Expression.compile(expr) + except ParseError as e: + raise UsageError(f"{exc_message}: {expr}: {e}") from None + + +def pytest_collection_modifyitems(items: "List[Item]", config: Config) -> None: + deselect_by_keyword(items, config) + deselect_by_mark(items, config) + + +def pytest_configure(config: Config) -> None: + config.stash[old_mark_config_key] = MARK_GEN._config + MARK_GEN._config = config + + empty_parameterset = config.getini(EMPTY_PARAMETERSET_OPTION) + + if empty_parameterset not in ("skip", "xfail", "fail_at_collect", None, ""): + raise UsageError( + "{!s} must be one of skip, xfail or fail_at_collect" + " but it is {!r}".format(EMPTY_PARAMETERSET_OPTION, empty_parameterset) + ) + + +def pytest_unconfigure(config: Config) -> None: + MARK_GEN._config = config.stash.get(old_mark_config_key, None) diff --git a/venv/lib/python3.11/site-packages/_pytest/mark/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/mark/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..818b3fd Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/mark/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/mark/__pycache__/expression.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/mark/__pycache__/expression.cpython-311.pyc new file mode 100644 index 0000000..28568f3 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/mark/__pycache__/expression.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/mark/__pycache__/structures.cpython-311.pyc b/venv/lib/python3.11/site-packages/_pytest/mark/__pycache__/structures.cpython-311.pyc new file mode 100644 index 0000000..6248243 Binary files /dev/null and b/venv/lib/python3.11/site-packages/_pytest/mark/__pycache__/structures.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/_pytest/mark/expression.py b/venv/lib/python3.11/site-packages/_pytest/mark/expression.py new file mode 100644 index 0000000..9287bce --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/mark/expression.py @@ -0,0 +1,228 @@ +r"""Evaluate match expressions, as used by `-k` and `-m`. + +The grammar is: + +expression: expr? EOF +expr: and_expr ('or' and_expr)* +and_expr: not_expr ('and' not_expr)* +not_expr: 'not' not_expr | '(' expr ')' | ident +ident: (\w|:|\+|-|\.|\[|\]|\\|/)+ + +The semantics are: + +- Empty expression evaluates to False. +- ident evaluates to True of False according to a provided matcher function. +- or/and/not evaluate according to the usual boolean semantics. +""" +import ast +import dataclasses +import enum +import re +import sys +import types +from typing import Callable +from typing import Iterator +from typing import Mapping +from typing import NoReturn +from typing import Optional +from typing import Sequence + +if sys.version_info >= (3, 8): + astNameConstant = ast.Constant +else: + astNameConstant = ast.NameConstant + + +__all__ = [ + "Expression", + "ParseError", +] + + +class TokenType(enum.Enum): + LPAREN = "left parenthesis" + RPAREN = "right parenthesis" + OR = "or" + AND = "and" + NOT = "not" + IDENT = "identifier" + EOF = "end of input" + + +@dataclasses.dataclass(frozen=True) +class Token: + __slots__ = ("type", "value", "pos") + type: TokenType + value: str + pos: int + + +class ParseError(Exception): + """The expression contains invalid syntax. + + :param column: The column in the line where the error occurred (1-based). + :param message: A description of the error. + """ + + def __init__(self, column: int, message: str) -> None: + self.column = column + self.message = message + + def __str__(self) -> str: + return f"at column {self.column}: {self.message}" + + +class Scanner: + __slots__ = ("tokens", "current") + + def __init__(self, input: str) -> None: + self.tokens = self.lex(input) + self.current = next(self.tokens) + + def lex(self, input: str) -> Iterator[Token]: + pos = 0 + while pos < len(input): + if input[pos] in (" ", "\t"): + pos += 1 + elif input[pos] == "(": + yield Token(TokenType.LPAREN, "(", pos) + pos += 1 + elif input[pos] == ")": + yield Token(TokenType.RPAREN, ")", pos) + pos += 1 + else: + match = re.match(r"(:?\w|:|\+|-|\.|\[|\]|\\|/)+", input[pos:]) + if match: + value = match.group(0) + if value == "or": + yield Token(TokenType.OR, value, pos) + elif value == "and": + yield Token(TokenType.AND, value, pos) + elif value == "not": + yield Token(TokenType.NOT, value, pos) + else: + yield Token(TokenType.IDENT, value, pos) + pos += len(value) + else: + raise ParseError( + pos + 1, + f'unexpected character "{input[pos]}"', + ) + yield Token(TokenType.EOF, "", pos) + + def accept(self, type: TokenType, *, reject: bool = False) -> Optional[Token]: + if self.current.type is type: + token = self.current + if token.type is not TokenType.EOF: + self.current = next(self.tokens) + return token + if reject: + self.reject((type,)) + return None + + def reject(self, expected: Sequence[TokenType]) -> NoReturn: + raise ParseError( + self.current.pos + 1, + "expected {}; got {}".format( + " OR ".join(type.value for type in expected), + self.current.type.value, + ), + ) + + +# True, False and None are legal match expression identifiers, +# but illegal as Python identifiers. To fix this, this prefix +# is added to identifiers in the conversion to Python AST. +IDENT_PREFIX = "$" + + +def expression(s: Scanner) -> ast.Expression: + if s.accept(TokenType.EOF): + ret: ast.expr = astNameConstant(False) + else: + ret = expr(s) + s.accept(TokenType.EOF, reject=True) + return ast.fix_missing_locations(ast.Expression(ret)) + + +def expr(s: Scanner) -> ast.expr: + ret = and_expr(s) + while s.accept(TokenType.OR): + rhs = and_expr(s) + ret = ast.BoolOp(ast.Or(), [ret, rhs]) + return ret + + +def and_expr(s: Scanner) -> ast.expr: + ret = not_expr(s) + while s.accept(TokenType.AND): + rhs = not_expr(s) + ret = ast.BoolOp(ast.And(), [ret, rhs]) + return ret + + +def not_expr(s: Scanner) -> ast.expr: + if s.accept(TokenType.NOT): + return ast.UnaryOp(ast.Not(), not_expr(s)) + if s.accept(TokenType.LPAREN): + ret = expr(s) + s.accept(TokenType.RPAREN, reject=True) + return ret + ident = s.accept(TokenType.IDENT) + if ident: + return ast.Name(IDENT_PREFIX + ident.value, ast.Load()) + s.reject((TokenType.NOT, TokenType.LPAREN, TokenType.IDENT)) + + +class MatcherAdapter(Mapping[str, bool]): + """Adapts a matcher function to a locals mapping as required by eval().""" + + def __init__(self, matcher: Callable[[str], bool]) -> None: + self.matcher = matcher + + def __getitem__(self, key: str) -> bool: + return self.matcher(key[len(IDENT_PREFIX) :]) + + def __iter__(self) -> Iterator[str]: + raise NotImplementedError() + + def __len__(self) -> int: + raise NotImplementedError() + + +class Expression: + """A compiled match expression as used by -k and -m. + + The expression can be evaluated against different matchers. + """ + + __slots__ = ("code",) + + def __init__(self, code: types.CodeType) -> None: + self.code = code + + @classmethod + def compile(self, input: str) -> "Expression": + """Compile a match expression. + + :param input: The input expression - one line. + """ + astexpr = expression(Scanner(input)) + code: types.CodeType = compile( + astexpr, + filename="", + mode="eval", + ) + return Expression(code) + + def evaluate(self, matcher: Callable[[str], bool]) -> bool: + """Evaluate the match expression. + + :param matcher: + Given an identifier, should return whether it matches or not. + Should be prepared to handle arbitrary strings as input. + + :returns: Whether the expression matches or not. + """ + ret: bool = eval(self.code, {"__builtins__": {}}, MatcherAdapter(matcher)) + return ret diff --git a/venv/lib/python3.11/site-packages/_pytest/mark/structures.py b/venv/lib/python3.11/site-packages/_pytest/mark/structures.py new file mode 100644 index 0000000..8dbff1d --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/mark/structures.py @@ -0,0 +1,616 @@ +import collections.abc +import dataclasses +import inspect +import warnings +from typing import Any +from typing import Callable +from typing import Collection +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Mapping +from typing import MutableMapping +from typing import NamedTuple +from typing import Optional +from typing import overload +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from .._code import getfslineno +from ..compat import ascii_escaped +from ..compat import final +from ..compat import NOTSET +from ..compat import NotSetType +from _pytest.config import Config +from _pytest.deprecated import check_ispytest +from _pytest.outcomes import fail +from _pytest.warning_types import PytestUnknownMarkWarning + +if TYPE_CHECKING: + from ..nodes import Node + + +EMPTY_PARAMETERSET_OPTION = "empty_parameter_set_mark" + + +def istestfunc(func) -> bool: + return callable(func) and getattr(func, "__name__", "") != "" + + +def get_empty_parameterset_mark( + config: Config, argnames: Sequence[str], func +) -> "MarkDecorator": + from ..nodes import Collector + + fs, lineno = getfslineno(func) + reason = "got empty parameter set %r, function %s at %s:%d" % ( + argnames, + func.__name__, + fs, + lineno, + ) + + requested_mark = config.getini(EMPTY_PARAMETERSET_OPTION) + if requested_mark in ("", None, "skip"): + mark = MARK_GEN.skip(reason=reason) + elif requested_mark == "xfail": + mark = MARK_GEN.xfail(reason=reason, run=False) + elif requested_mark == "fail_at_collect": + f_name = func.__name__ + _, lineno = getfslineno(func) + raise Collector.CollectError( + "Empty parameter set in '%s' at line %d" % (f_name, lineno + 1) + ) + else: + raise LookupError(requested_mark) + return mark + + +class ParameterSet(NamedTuple): + values: Sequence[Union[object, NotSetType]] + marks: Collection[Union["MarkDecorator", "Mark"]] + id: Optional[str] + + @classmethod + def param( + cls, + *values: object, + marks: Union["MarkDecorator", Collection[Union["MarkDecorator", "Mark"]]] = (), + id: Optional[str] = None, + ) -> "ParameterSet": + if isinstance(marks, MarkDecorator): + marks = (marks,) + else: + assert isinstance(marks, collections.abc.Collection) + + if id is not None: + if not isinstance(id, str): + raise TypeError(f"Expected id to be a string, got {type(id)}: {id!r}") + id = ascii_escaped(id) + return cls(values, marks, id) + + @classmethod + def extract_from( + cls, + parameterset: Union["ParameterSet", Sequence[object], object], + force_tuple: bool = False, + ) -> "ParameterSet": + """Extract from an object or objects. + + :param parameterset: + A legacy style parameterset that may or may not be a tuple, + and may or may not be wrapped into a mess of mark objects. + + :param force_tuple: + Enforce tuple wrapping so single argument tuple values + don't get decomposed and break tests. + """ + + if isinstance(parameterset, cls): + return parameterset + if force_tuple: + return cls.param(parameterset) + else: + # TODO: Refactor to fix this type-ignore. Currently the following + # passes type-checking but crashes: + # + # @pytest.mark.parametrize(('x', 'y'), [1, 2]) + # def test_foo(x, y): pass + return cls(parameterset, marks=[], id=None) # type: ignore[arg-type] + + @staticmethod + def _parse_parametrize_args( + argnames: Union[str, Sequence[str]], + argvalues: Iterable[Union["ParameterSet", Sequence[object], object]], + *args, + **kwargs, + ) -> Tuple[Sequence[str], bool]: + if isinstance(argnames, str): + argnames = [x.strip() for x in argnames.split(",") if x.strip()] + force_tuple = len(argnames) == 1 + else: + force_tuple = False + return argnames, force_tuple + + @staticmethod + def _parse_parametrize_parameters( + argvalues: Iterable[Union["ParameterSet", Sequence[object], object]], + force_tuple: bool, + ) -> List["ParameterSet"]: + return [ + ParameterSet.extract_from(x, force_tuple=force_tuple) for x in argvalues + ] + + @classmethod + def _for_parametrize( + cls, + argnames: Union[str, Sequence[str]], + argvalues: Iterable[Union["ParameterSet", Sequence[object], object]], + func, + config: Config, + nodeid: str, + ) -> Tuple[Sequence[str], List["ParameterSet"]]: + argnames, force_tuple = cls._parse_parametrize_args(argnames, argvalues) + parameters = cls._parse_parametrize_parameters(argvalues, force_tuple) + del argvalues + + if parameters: + # Check all parameter sets have the correct number of values. + for param in parameters: + if len(param.values) != len(argnames): + msg = ( + '{nodeid}: in "parametrize" the number of names ({names_len}):\n' + " {names}\n" + "must be equal to the number of values ({values_len}):\n" + " {values}" + ) + fail( + msg.format( + nodeid=nodeid, + values=param.values, + names=argnames, + names_len=len(argnames), + values_len=len(param.values), + ), + pytrace=False, + ) + else: + # Empty parameter set (likely computed at runtime): create a single + # parameter set with NOTSET values, with the "empty parameter set" mark applied to it. + mark = get_empty_parameterset_mark(config, argnames, func) + parameters.append( + ParameterSet(values=(NOTSET,) * len(argnames), marks=[mark], id=None) + ) + return argnames, parameters + + +@final +@dataclasses.dataclass(frozen=True) +class Mark: + """A pytest mark.""" + + #: Name of the mark. + name: str + #: Positional arguments of the mark decorator. + args: Tuple[Any, ...] + #: Keyword arguments of the mark decorator. + kwargs: Mapping[str, Any] + + #: Source Mark for ids with parametrize Marks. + _param_ids_from: Optional["Mark"] = dataclasses.field(default=None, repr=False) + #: Resolved/generated ids with parametrize Marks. + _param_ids_generated: Optional[Sequence[str]] = dataclasses.field( + default=None, repr=False + ) + + def __init__( + self, + name: str, + args: Tuple[Any, ...], + kwargs: Mapping[str, Any], + param_ids_from: Optional["Mark"] = None, + param_ids_generated: Optional[Sequence[str]] = None, + *, + _ispytest: bool = False, + ) -> None: + """:meta private:""" + check_ispytest(_ispytest) + # Weirdness to bypass frozen=True. + object.__setattr__(self, "name", name) + object.__setattr__(self, "args", args) + object.__setattr__(self, "kwargs", kwargs) + object.__setattr__(self, "_param_ids_from", param_ids_from) + object.__setattr__(self, "_param_ids_generated", param_ids_generated) + + def _has_param_ids(self) -> bool: + return "ids" in self.kwargs or len(self.args) >= 4 + + def combined_with(self, other: "Mark") -> "Mark": + """Return a new Mark which is a combination of this + Mark and another Mark. + + Combines by appending args and merging kwargs. + + :param Mark other: The mark to combine with. + :rtype: Mark + """ + assert self.name == other.name + + # Remember source of ids with parametrize Marks. + param_ids_from: Optional[Mark] = None + if self.name == "parametrize": + if other._has_param_ids(): + param_ids_from = other + elif self._has_param_ids(): + param_ids_from = self + + return Mark( + self.name, + self.args + other.args, + dict(self.kwargs, **other.kwargs), + param_ids_from=param_ids_from, + _ispytest=True, + ) + + +# A generic parameter designating an object to which a Mark may +# be applied -- a test function (callable) or class. +# Note: a lambda is not allowed, but this can't be represented. +Markable = TypeVar("Markable", bound=Union[Callable[..., object], type]) + + +@dataclasses.dataclass +class MarkDecorator: + """A decorator for applying a mark on test functions and classes. + + ``MarkDecorators`` are created with ``pytest.mark``:: + + mark1 = pytest.mark.NAME # Simple MarkDecorator + mark2 = pytest.mark.NAME(name1=value) # Parametrized MarkDecorator + + and can then be applied as decorators to test functions:: + + @mark2 + def test_function(): + pass + + When a ``MarkDecorator`` is called, it does the following: + + 1. If called with a single class as its only positional argument and no + additional keyword arguments, it attaches the mark to the class so it + gets applied automatically to all test cases found in that class. + + 2. If called with a single function as its only positional argument and + no additional keyword arguments, it attaches the mark to the function, + containing all the arguments already stored internally in the + ``MarkDecorator``. + + 3. When called in any other case, it returns a new ``MarkDecorator`` + instance with the original ``MarkDecorator``'s content updated with + the arguments passed to this call. + + Note: The rules above prevent a ``MarkDecorator`` from storing only a + single function or class reference as its positional argument with no + additional keyword or positional arguments. You can work around this by + using `with_args()`. + """ + + mark: Mark + + def __init__(self, mark: Mark, *, _ispytest: bool = False) -> None: + """:meta private:""" + check_ispytest(_ispytest) + self.mark = mark + + @property + def name(self) -> str: + """Alias for mark.name.""" + return self.mark.name + + @property + def args(self) -> Tuple[Any, ...]: + """Alias for mark.args.""" + return self.mark.args + + @property + def kwargs(self) -> Mapping[str, Any]: + """Alias for mark.kwargs.""" + return self.mark.kwargs + + @property + def markname(self) -> str: + """:meta private:""" + return self.name # for backward-compat (2.4.1 had this attr) + + def with_args(self, *args: object, **kwargs: object) -> "MarkDecorator": + """Return a MarkDecorator with extra arguments added. + + Unlike calling the MarkDecorator, with_args() can be used even + if the sole argument is a callable/class. + """ + mark = Mark(self.name, args, kwargs, _ispytest=True) + return MarkDecorator(self.mark.combined_with(mark), _ispytest=True) + + # Type ignored because the overloads overlap with an incompatible + # return type. Not much we can do about that. Thankfully mypy picks + # the first match so it works out even if we break the rules. + @overload + def __call__(self, arg: Markable) -> Markable: # type: ignore[misc] + pass + + @overload + def __call__(self, *args: object, **kwargs: object) -> "MarkDecorator": + pass + + def __call__(self, *args: object, **kwargs: object): + """Call the MarkDecorator.""" + if args and not kwargs: + func = args[0] + is_class = inspect.isclass(func) + if len(args) == 1 and (istestfunc(func) or is_class): + store_mark(func, self.mark) + return func + return self.with_args(*args, **kwargs) + + +def get_unpacked_marks( + obj: Union[object, type], + *, + consider_mro: bool = True, +) -> List[Mark]: + """Obtain the unpacked marks that are stored on an object. + + If obj is a class and consider_mro is true, return marks applied to + this class and all of its super-classes in MRO order. If consider_mro + is false, only return marks applied directly to this class. + """ + if isinstance(obj, type): + if not consider_mro: + mark_lists = [obj.__dict__.get("pytestmark", [])] + else: + mark_lists = [x.__dict__.get("pytestmark", []) for x in obj.__mro__] + mark_list = [] + for item in mark_lists: + if isinstance(item, list): + mark_list.extend(item) + else: + mark_list.append(item) + else: + mark_attribute = getattr(obj, "pytestmark", []) + if isinstance(mark_attribute, list): + mark_list = mark_attribute + else: + mark_list = [mark_attribute] + return list(normalize_mark_list(mark_list)) + + +def normalize_mark_list( + mark_list: Iterable[Union[Mark, MarkDecorator]] +) -> Iterable[Mark]: + """ + Normalize an iterable of Mark or MarkDecorator objects into a list of marks + by retrieving the `mark` attribute on MarkDecorator instances. + + :param mark_list: marks to normalize + :returns: A new list of the extracted Mark objects + """ + for mark in mark_list: + mark_obj = getattr(mark, "mark", mark) + if not isinstance(mark_obj, Mark): + raise TypeError(f"got {repr(mark_obj)} instead of Mark") + yield mark_obj + + +def store_mark(obj, mark: Mark) -> None: + """Store a Mark on an object. + + This is used to implement the Mark declarations/decorators correctly. + """ + assert isinstance(mark, Mark), mark + # Always reassign name to avoid updating pytestmark in a reference that + # was only borrowed. + obj.pytestmark = [*get_unpacked_marks(obj, consider_mro=False), mark] + + +# Typing for builtin pytest marks. This is cheating; it gives builtin marks +# special privilege, and breaks modularity. But practicality beats purity... +if TYPE_CHECKING: + from _pytest.scope import _ScopeName + + class _SkipMarkDecorator(MarkDecorator): + @overload # type: ignore[override,misc,no-overload-impl] + def __call__(self, arg: Markable) -> Markable: + ... + + @overload + def __call__(self, reason: str = ...) -> "MarkDecorator": + ... + + class _SkipifMarkDecorator(MarkDecorator): + def __call__( # type: ignore[override] + self, + condition: Union[str, bool] = ..., + *conditions: Union[str, bool], + reason: str = ..., + ) -> MarkDecorator: + ... + + class _XfailMarkDecorator(MarkDecorator): + @overload # type: ignore[override,misc,no-overload-impl] + def __call__(self, arg: Markable) -> Markable: + ... + + @overload + def __call__( + self, + condition: Union[str, bool] = ..., + *conditions: Union[str, bool], + reason: str = ..., + run: bool = ..., + raises: Union[Type[BaseException], Tuple[Type[BaseException], ...]] = ..., + strict: bool = ..., + ) -> MarkDecorator: + ... + + class _ParametrizeMarkDecorator(MarkDecorator): + def __call__( # type: ignore[override] + self, + argnames: Union[str, Sequence[str]], + argvalues: Iterable[Union[ParameterSet, Sequence[object], object]], + *, + indirect: Union[bool, Sequence[str]] = ..., + ids: Optional[ + Union[ + Iterable[Union[None, str, float, int, bool]], + Callable[[Any], Optional[object]], + ] + ] = ..., + scope: Optional[_ScopeName] = ..., + ) -> MarkDecorator: + ... + + class _UsefixturesMarkDecorator(MarkDecorator): + def __call__(self, *fixtures: str) -> MarkDecorator: # type: ignore[override] + ... + + class _FilterwarningsMarkDecorator(MarkDecorator): + def __call__(self, *filters: str) -> MarkDecorator: # type: ignore[override] + ... + + +@final +class MarkGenerator: + """Factory for :class:`MarkDecorator` objects - exposed as + a ``pytest.mark`` singleton instance. + + Example:: + + import pytest + + @pytest.mark.slowtest + def test_function(): + pass + + applies a 'slowtest' :class:`Mark` on ``test_function``. + """ + + # See TYPE_CHECKING above. + if TYPE_CHECKING: + skip: _SkipMarkDecorator + skipif: _SkipifMarkDecorator + xfail: _XfailMarkDecorator + parametrize: _ParametrizeMarkDecorator + usefixtures: _UsefixturesMarkDecorator + filterwarnings: _FilterwarningsMarkDecorator + + def __init__(self, *, _ispytest: bool = False) -> None: + check_ispytest(_ispytest) + self._config: Optional[Config] = None + self._markers: Set[str] = set() + + def __getattr__(self, name: str) -> MarkDecorator: + """Generate a new :class:`MarkDecorator` with the given name.""" + if name[0] == "_": + raise AttributeError("Marker name must NOT start with underscore") + + if self._config is not None: + # We store a set of markers as a performance optimisation - if a mark + # name is in the set we definitely know it, but a mark may be known and + # not in the set. We therefore start by updating the set! + if name not in self._markers: + for line in self._config.getini("markers"): + # example lines: "skipif(condition): skip the given test if..." + # or "hypothesis: tests which use Hypothesis", so to get the + # marker name we split on both `:` and `(`. + marker = line.split(":")[0].split("(")[0].strip() + self._markers.add(marker) + + # If the name is not in the set of known marks after updating, + # then it really is time to issue a warning or an error. + if name not in self._markers: + if self._config.option.strict_markers or self._config.option.strict: + fail( + f"{name!r} not found in `markers` configuration option", + pytrace=False, + ) + + # Raise a specific error for common misspellings of "parametrize". + if name in ["parameterize", "parametrise", "parameterise"]: + __tracebackhide__ = True + fail(f"Unknown '{name}' mark, did you mean 'parametrize'?") + + warnings.warn( + "Unknown pytest.mark.%s - is this a typo? You can register " + "custom marks to avoid this warning - for details, see " + "https://docs.pytest.org/en/stable/how-to/mark.html" % name, + PytestUnknownMarkWarning, + 2, + ) + + return MarkDecorator(Mark(name, (), {}, _ispytest=True), _ispytest=True) + + +MARK_GEN = MarkGenerator(_ispytest=True) + + +@final +class NodeKeywords(MutableMapping[str, Any]): + __slots__ = ("node", "parent", "_markers") + + def __init__(self, node: "Node") -> None: + self.node = node + self.parent = node.parent + self._markers = {node.name: True} + + def __getitem__(self, key: str) -> Any: + try: + return self._markers[key] + except KeyError: + if self.parent is None: + raise + return self.parent.keywords[key] + + def __setitem__(self, key: str, value: Any) -> None: + self._markers[key] = value + + # Note: we could've avoided explicitly implementing some of the methods + # below and use the collections.abc fallback, but that would be slow. + + def __contains__(self, key: object) -> bool: + return ( + key in self._markers + or self.parent is not None + and key in self.parent.keywords + ) + + def update( # type: ignore[override] + self, + other: Union[Mapping[str, Any], Iterable[Tuple[str, Any]]] = (), + **kwds: Any, + ) -> None: + self._markers.update(other) + self._markers.update(kwds) + + def __delitem__(self, key: str) -> None: + raise ValueError("cannot delete key in keywords dict") + + def __iter__(self) -> Iterator[str]: + # Doesn't need to be fast. + yield from self._markers + if self.parent is not None: + for keyword in self.parent.keywords: + # self._marks and self.parent.keywords can have duplicates. + if keyword not in self._markers: + yield keyword + + def __len__(self) -> int: + # Doesn't need to be fast. + return sum(1 for keyword in self) + + def __repr__(self) -> str: + return f"" diff --git a/venv/lib/python3.11/site-packages/_pytest/monkeypatch.py b/venv/lib/python3.11/site-packages/_pytest/monkeypatch.py new file mode 100644 index 0000000..9e51ff3 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/monkeypatch.py @@ -0,0 +1,421 @@ +"""Monkeypatching and mocking functionality.""" +import os +import re +import sys +import warnings +from contextlib import contextmanager +from typing import Any +from typing import Generator +from typing import List +from typing import Mapping +from typing import MutableMapping +from typing import Optional +from typing import overload +from typing import Tuple +from typing import TypeVar +from typing import Union + +from _pytest.compat import final +from _pytest.fixtures import fixture +from _pytest.warning_types import PytestWarning + +RE_IMPORT_ERROR_NAME = re.compile(r"^No module named (.*)$") + + +K = TypeVar("K") +V = TypeVar("V") + + +@fixture +def monkeypatch() -> Generator["MonkeyPatch", None, None]: + """A convenient fixture for monkey-patching. + + The fixture provides these methods to modify objects, dictionaries, or + :data:`os.environ`: + + * :meth:`monkeypatch.setattr(obj, name, value, raising=True) ` + * :meth:`monkeypatch.delattr(obj, name, raising=True) ` + * :meth:`monkeypatch.setitem(mapping, name, value) ` + * :meth:`monkeypatch.delitem(obj, name, raising=True) ` + * :meth:`monkeypatch.setenv(name, value, prepend=None) ` + * :meth:`monkeypatch.delenv(name, raising=True) ` + * :meth:`monkeypatch.syspath_prepend(path) ` + * :meth:`monkeypatch.chdir(path) ` + * :meth:`monkeypatch.context() ` + + All modifications will be undone after the requesting test function or + fixture has finished. The ``raising`` parameter determines if a :class:`KeyError` + or :class:`AttributeError` will be raised if the set/deletion operation does not have the + specified target. + + To undo modifications done by the fixture in a contained scope, + use :meth:`context() `. + """ + mpatch = MonkeyPatch() + yield mpatch + mpatch.undo() + + +def resolve(name: str) -> object: + # Simplified from zope.dottedname. + parts = name.split(".") + + used = parts.pop(0) + found: object = __import__(used) + for part in parts: + used += "." + part + try: + found = getattr(found, part) + except AttributeError: + pass + else: + continue + # We use explicit un-nesting of the handling block in order + # to avoid nested exceptions. + try: + __import__(used) + except ImportError as ex: + expected = str(ex).split()[-1] + if expected == used: + raise + else: + raise ImportError(f"import error in {used}: {ex}") from ex + found = annotated_getattr(found, part, used) + return found + + +def annotated_getattr(obj: object, name: str, ann: str) -> object: + try: + obj = getattr(obj, name) + except AttributeError as e: + raise AttributeError( + "{!r} object at {} has no attribute {!r}".format( + type(obj).__name__, ann, name + ) + ) from e + return obj + + +def derive_importpath(import_path: str, raising: bool) -> Tuple[str, object]: + if not isinstance(import_path, str) or "." not in import_path: + raise TypeError(f"must be absolute import path string, not {import_path!r}") + module, attr = import_path.rsplit(".", 1) + target = resolve(module) + if raising: + annotated_getattr(target, attr, ann=module) + return attr, target + + +class Notset: + def __repr__(self) -> str: + return "" + + +notset = Notset() + + +@final +class MonkeyPatch: + """Helper to conveniently monkeypatch attributes/items/environment + variables/syspath. + + Returned by the :fixture:`monkeypatch` fixture. + + .. versionchanged:: 6.2 + Can now also be used directly as `pytest.MonkeyPatch()`, for when + the fixture is not available. In this case, use + :meth:`with MonkeyPatch.context() as mp: ` or remember to call + :meth:`undo` explicitly. + """ + + def __init__(self) -> None: + self._setattr: List[Tuple[object, str, object]] = [] + self._setitem: List[Tuple[Mapping[Any, Any], object, object]] = [] + self._cwd: Optional[str] = None + self._savesyspath: Optional[List[str]] = None + + @classmethod + @contextmanager + def context(cls) -> Generator["MonkeyPatch", None, None]: + """Context manager that returns a new :class:`MonkeyPatch` object + which undoes any patching done inside the ``with`` block upon exit. + + Example: + + .. code-block:: python + + import functools + + + def test_partial(monkeypatch): + with monkeypatch.context() as m: + m.setattr(functools, "partial", 3) + + Useful in situations where it is desired to undo some patches before the test ends, + such as mocking ``stdlib`` functions that might break pytest itself if mocked (for examples + of this see :issue:`3290`). + """ + m = cls() + try: + yield m + finally: + m.undo() + + @overload + def setattr( + self, + target: str, + name: object, + value: Notset = ..., + raising: bool = ..., + ) -> None: + ... + + @overload + def setattr( + self, + target: object, + name: str, + value: object, + raising: bool = ..., + ) -> None: + ... + + def setattr( + self, + target: Union[str, object], + name: Union[object, str], + value: object = notset, + raising: bool = True, + ) -> None: + """ + Set attribute value on target, memorizing the old value. + + For example: + + .. code-block:: python + + import os + + monkeypatch.setattr(os, "getcwd", lambda: "/") + + The code above replaces the :func:`os.getcwd` function by a ``lambda`` which + always returns ``"/"``. + + For convenience, you can specify a string as ``target`` which + will be interpreted as a dotted import path, with the last part + being the attribute name: + + .. code-block:: python + + monkeypatch.setattr("os.getcwd", lambda: "/") + + Raises :class:`AttributeError` if the attribute does not exist, unless + ``raising`` is set to False. + + **Where to patch** + + ``monkeypatch.setattr`` works by (temporarily) changing the object that a name points to with another one. + There can be many names pointing to any individual object, so for patching to work you must ensure + that you patch the name used by the system under test. + + See the section :ref:`Where to patch ` in the :mod:`unittest.mock` + docs for a complete explanation, which is meant for :func:`unittest.mock.patch` but + applies to ``monkeypatch.setattr`` as well. + """ + __tracebackhide__ = True + import inspect + + if isinstance(value, Notset): + if not isinstance(target, str): + raise TypeError( + "use setattr(target, name, value) or " + "setattr(target, value) with target being a dotted " + "import string" + ) + value = name + name, target = derive_importpath(target, raising) + else: + if not isinstance(name, str): + raise TypeError( + "use setattr(target, name, value) with name being a string or " + "setattr(target, value) with target being a dotted " + "import string" + ) + + oldval = getattr(target, name, notset) + if raising and oldval is notset: + raise AttributeError(f"{target!r} has no attribute {name!r}") + + # avoid class descriptors like staticmethod/classmethod + if inspect.isclass(target): + oldval = target.__dict__.get(name, notset) + self._setattr.append((target, name, oldval)) + setattr(target, name, value) + + def delattr( + self, + target: Union[object, str], + name: Union[str, Notset] = notset, + raising: bool = True, + ) -> None: + """Delete attribute ``name`` from ``target``. + + If no ``name`` is specified and ``target`` is a string + it will be interpreted as a dotted import path with the + last part being the attribute name. + + Raises AttributeError it the attribute does not exist, unless + ``raising`` is set to False. + """ + __tracebackhide__ = True + import inspect + + if isinstance(name, Notset): + if not isinstance(target, str): + raise TypeError( + "use delattr(target, name) or " + "delattr(target) with target being a dotted " + "import string" + ) + name, target = derive_importpath(target, raising) + + if not hasattr(target, name): + if raising: + raise AttributeError(name) + else: + oldval = getattr(target, name, notset) + # Avoid class descriptors like staticmethod/classmethod. + if inspect.isclass(target): + oldval = target.__dict__.get(name, notset) + self._setattr.append((target, name, oldval)) + delattr(target, name) + + def setitem(self, dic: Mapping[K, V], name: K, value: V) -> None: + """Set dictionary entry ``name`` to value.""" + self._setitem.append((dic, name, dic.get(name, notset))) + # Not all Mapping types support indexing, but MutableMapping doesn't support TypedDict + dic[name] = value # type: ignore[index] + + def delitem(self, dic: Mapping[K, V], name: K, raising: bool = True) -> None: + """Delete ``name`` from dict. + + Raises ``KeyError`` if it doesn't exist, unless ``raising`` is set to + False. + """ + if name not in dic: + if raising: + raise KeyError(name) + else: + self._setitem.append((dic, name, dic.get(name, notset))) + # Not all Mapping types support indexing, but MutableMapping doesn't support TypedDict + del dic[name] # type: ignore[attr-defined] + + def setenv(self, name: str, value: str, prepend: Optional[str] = None) -> None: + """Set environment variable ``name`` to ``value``. + + If ``prepend`` is a character, read the current environment variable + value and prepend the ``value`` adjoined with the ``prepend`` + character. + """ + if not isinstance(value, str): + warnings.warn( # type: ignore[unreachable] + PytestWarning( + "Value of environment variable {name} type should be str, but got " + "{value!r} (type: {type}); converted to str implicitly".format( + name=name, value=value, type=type(value).__name__ + ) + ), + stacklevel=2, + ) + value = str(value) + if prepend and name in os.environ: + value = value + prepend + os.environ[name] + self.setitem(os.environ, name, value) + + def delenv(self, name: str, raising: bool = True) -> None: + """Delete ``name`` from the environment. + + Raises ``KeyError`` if it does not exist, unless ``raising`` is set to + False. + """ + environ: MutableMapping[str, str] = os.environ + self.delitem(environ, name, raising=raising) + + def syspath_prepend(self, path) -> None: + """Prepend ``path`` to ``sys.path`` list of import locations.""" + + if self._savesyspath is None: + self._savesyspath = sys.path[:] + sys.path.insert(0, str(path)) + + # https://github.com/pypa/setuptools/blob/d8b901bc/docs/pkg_resources.txt#L162-L171 + # this is only needed when pkg_resources was already loaded by the namespace package + if "pkg_resources" in sys.modules: + from pkg_resources import fixup_namespace_packages + + fixup_namespace_packages(str(path)) + + # A call to syspathinsert() usually means that the caller wants to + # import some dynamically created files, thus with python3 we + # invalidate its import caches. + # This is especially important when any namespace package is in use, + # since then the mtime based FileFinder cache (that gets created in + # this case already) gets not invalidated when writing the new files + # quickly afterwards. + from importlib import invalidate_caches + + invalidate_caches() + + def chdir(self, path: Union[str, "os.PathLike[str]"]) -> None: + """Change the current working directory to the specified path. + + :param path: + The path to change into. + """ + if self._cwd is None: + self._cwd = os.getcwd() + os.chdir(path) + + def undo(self) -> None: + """Undo previous changes. + + This call consumes the undo stack. Calling it a second time has no + effect unless you do more monkeypatching after the undo call. + + There is generally no need to call `undo()`, since it is + called automatically during tear-down. + + .. note:: + The same `monkeypatch` fixture is used across a + single test function invocation. If `monkeypatch` is used both by + the test function itself and one of the test fixtures, + calling `undo()` will undo all of the changes made in + both functions. + + Prefer to use :meth:`context() ` instead. + """ + for obj, name, value in reversed(self._setattr): + if value is not notset: + setattr(obj, name, value) + else: + delattr(obj, name) + self._setattr[:] = [] + for dictionary, key, value in reversed(self._setitem): + if value is notset: + try: + # Not all Mapping types support indexing, but MutableMapping doesn't support TypedDict + del dictionary[key] # type: ignore[attr-defined] + except KeyError: + pass # Was already deleted, so we have the desired state. + else: + # Not all Mapping types support indexing, but MutableMapping doesn't support TypedDict + dictionary[key] = value # type: ignore[index] + self._setitem[:] = [] + if self._savesyspath is not None: + sys.path[:] = self._savesyspath + self._savesyspath = None + + if self._cwd is not None: + os.chdir(self._cwd) + self._cwd = None diff --git a/venv/lib/python3.11/site-packages/_pytest/nodes.py b/venv/lib/python3.11/site-packages/_pytest/nodes.py new file mode 100644 index 0000000..dbd6b0a --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/nodes.py @@ -0,0 +1,778 @@ +import os +import warnings +from inspect import signature +from pathlib import Path +from typing import Any +from typing import Callable +from typing import cast +from typing import Iterable +from typing import Iterator +from typing import List +from typing import MutableMapping +from typing import Optional +from typing import overload +from typing import Set +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +import _pytest._code +from _pytest._code import getfslineno +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import TerminalRepr +from _pytest._code.code import Traceback +from _pytest.compat import cached_property +from _pytest.compat import LEGACY_PATH +from _pytest.config import Config +from _pytest.config import ConftestImportFailure +from _pytest.deprecated import FSCOLLECTOR_GETHOOKPROXY_ISINITPATH +from _pytest.deprecated import NODE_CTOR_FSPATH_ARG +from _pytest.mark.structures import Mark +from _pytest.mark.structures import MarkDecorator +from _pytest.mark.structures import NodeKeywords +from _pytest.outcomes import fail +from _pytest.pathlib import absolutepath +from _pytest.pathlib import commonpath +from _pytest.stash import Stash +from _pytest.warning_types import PytestWarning + +if TYPE_CHECKING: + # Imported here due to circular import. + from _pytest.main import Session + from _pytest._code.code import _TracebackStyle + + +SEP = "/" + +tracebackcutdir = Path(_pytest.__file__).parent + + +def iterparentnodeids(nodeid: str) -> Iterator[str]: + """Return the parent node IDs of a given node ID, inclusive. + + For the node ID + + "testing/code/test_excinfo.py::TestFormattedExcinfo::test_repr_source" + + the result would be + + "" + "testing" + "testing/code" + "testing/code/test_excinfo.py" + "testing/code/test_excinfo.py::TestFormattedExcinfo" + "testing/code/test_excinfo.py::TestFormattedExcinfo::test_repr_source" + + Note that / components are only considered until the first ::. + """ + pos = 0 + first_colons: Optional[int] = nodeid.find("::") + if first_colons == -1: + first_colons = None + # The root Session node - always present. + yield "" + # Eagerly consume SEP parts until first colons. + while True: + at = nodeid.find(SEP, pos, first_colons) + if at == -1: + break + if at > 0: + yield nodeid[:at] + pos = at + len(SEP) + # Eagerly consume :: parts. + while True: + at = nodeid.find("::", pos) + if at == -1: + break + if at > 0: + yield nodeid[:at] + pos = at + len("::") + # The node ID itself. + if nodeid: + yield nodeid + + +def _check_path(path: Path, fspath: LEGACY_PATH) -> None: + if Path(fspath) != path: + raise ValueError( + f"Path({fspath!r}) != {path!r}\n" + "if both path and fspath are given they need to be equal" + ) + + +def _imply_path( + node_type: Type["Node"], + path: Optional[Path], + fspath: Optional[LEGACY_PATH], +) -> Path: + if fspath is not None: + warnings.warn( + NODE_CTOR_FSPATH_ARG.format( + node_type_name=node_type.__name__, + ), + stacklevel=6, + ) + if path is not None: + if fspath is not None: + _check_path(path, fspath) + return path + else: + assert fspath is not None + return Path(fspath) + + +_NodeType = TypeVar("_NodeType", bound="Node") + + +class NodeMeta(type): + def __call__(self, *k, **kw): + msg = ( + "Direct construction of {name} has been deprecated, please use {name}.from_parent.\n" + "See " + "https://docs.pytest.org/en/stable/deprecations.html#node-construction-changed-to-node-from-parent" + " for more details." + ).format(name=f"{self.__module__}.{self.__name__}") + fail(msg, pytrace=False) + + def _create(self, *k, **kw): + try: + return super().__call__(*k, **kw) + except TypeError: + sig = signature(getattr(self, "__init__")) + known_kw = {k: v for k, v in kw.items() if k in sig.parameters} + from .warning_types import PytestDeprecationWarning + + warnings.warn( + PytestDeprecationWarning( + f"{self} is not using a cooperative constructor and only takes {set(known_kw)}.\n" + "See https://docs.pytest.org/en/stable/deprecations.html" + "#constructors-of-custom-pytest-node-subclasses-should-take-kwargs " + "for more details." + ) + ) + + return super().__call__(*k, **known_kw) + + +class Node(metaclass=NodeMeta): + """Base class for Collector and Item, the components of the test + collection tree. + + Collector subclasses have children; Items are leaf nodes. + """ + + # Implemented in the legacypath plugin. + #: A ``LEGACY_PATH`` copy of the :attr:`path` attribute. Intended for usage + #: for methods not migrated to ``pathlib.Path`` yet, such as + #: :meth:`Item.reportinfo`. Will be deprecated in a future release, prefer + #: using :attr:`path` instead. + fspath: LEGACY_PATH + + # Use __slots__ to make attribute access faster. + # Note that __dict__ is still available. + __slots__ = ( + "name", + "parent", + "config", + "session", + "path", + "_nodeid", + "_store", + "__dict__", + ) + + def __init__( + self, + name: str, + parent: "Optional[Node]" = None, + config: Optional[Config] = None, + session: "Optional[Session]" = None, + fspath: Optional[LEGACY_PATH] = None, + path: Optional[Path] = None, + nodeid: Optional[str] = None, + ) -> None: + #: A unique name within the scope of the parent node. + self.name: str = name + + #: The parent collector node. + self.parent = parent + + if config: + #: The pytest config object. + self.config: Config = config + else: + if not parent: + raise TypeError("config or parent must be provided") + self.config = parent.config + + if session: + #: The pytest session this node is part of. + self.session: Session = session + else: + if not parent: + raise TypeError("session or parent must be provided") + self.session = parent.session + + if path is None and fspath is None: + path = getattr(parent, "path", None) + #: Filesystem path where this node was collected from (can be None). + self.path: Path = _imply_path(type(self), path, fspath=fspath) + + # The explicit annotation is to avoid publicly exposing NodeKeywords. + #: Keywords/markers collected from all scopes. + self.keywords: MutableMapping[str, Any] = NodeKeywords(self) + + #: The marker objects belonging to this node. + self.own_markers: List[Mark] = [] + + #: Allow adding of extra keywords to use for matching. + self.extra_keyword_matches: Set[str] = set() + + if nodeid is not None: + assert "::()" not in nodeid + self._nodeid = nodeid + else: + if not self.parent: + raise TypeError("nodeid or parent must be provided") + self._nodeid = self.parent.nodeid + "::" + self.name + + #: A place where plugins can store information on the node for their + #: own use. + self.stash: Stash = Stash() + # Deprecated alias. Was never public. Can be removed in a few releases. + self._store = self.stash + + @classmethod + def from_parent(cls, parent: "Node", **kw): + """Public constructor for Nodes. + + This indirection got introduced in order to enable removing + the fragile logic from the node constructors. + + Subclasses can use ``super().from_parent(...)`` when overriding the + construction. + + :param parent: The parent node of this Node. + """ + if "config" in kw: + raise TypeError("config is not a valid argument for from_parent") + if "session" in kw: + raise TypeError("session is not a valid argument for from_parent") + return cls._create(parent=parent, **kw) + + @property + def ihook(self): + """fspath-sensitive hook proxy used to call pytest hooks.""" + return self.session.gethookproxy(self.path) + + def __repr__(self) -> str: + return "<{} {}>".format(self.__class__.__name__, getattr(self, "name", None)) + + def warn(self, warning: Warning) -> None: + """Issue a warning for this Node. + + Warnings will be displayed after the test session, unless explicitly suppressed. + + :param Warning warning: + The warning instance to issue. + + :raises ValueError: If ``warning`` instance is not a subclass of Warning. + + Example usage: + + .. code-block:: python + + node.warn(PytestWarning("some message")) + node.warn(UserWarning("some message")) + + .. versionchanged:: 6.2 + Any subclass of :class:`Warning` is now accepted, rather than only + :class:`PytestWarning ` subclasses. + """ + # enforce type checks here to avoid getting a generic type error later otherwise. + if not isinstance(warning, Warning): + raise ValueError( + "warning must be an instance of Warning or subclass, got {!r}".format( + warning + ) + ) + path, lineno = get_fslocation_from_item(self) + assert lineno is not None + warnings.warn_explicit( + warning, + category=None, + filename=str(path), + lineno=lineno + 1, + ) + + # Methods for ordering nodes. + + @property + def nodeid(self) -> str: + """A ::-separated string denoting its collection tree address.""" + return self._nodeid + + def __hash__(self) -> int: + return hash(self._nodeid) + + def setup(self) -> None: + pass + + def teardown(self) -> None: + pass + + def listchain(self) -> List["Node"]: + """Return list of all parent collectors up to self, starting from + the root of collection tree. + + :returns: The nodes. + """ + chain = [] + item: Optional[Node] = self + while item is not None: + chain.append(item) + item = item.parent + chain.reverse() + return chain + + def add_marker( + self, marker: Union[str, MarkDecorator], append: bool = True + ) -> None: + """Dynamically add a marker object to the node. + + :param marker: + The marker. + :param append: + Whether to append the marker, or prepend it. + """ + from _pytest.mark import MARK_GEN + + if isinstance(marker, MarkDecorator): + marker_ = marker + elif isinstance(marker, str): + marker_ = getattr(MARK_GEN, marker) + else: + raise ValueError("is not a string or pytest.mark.* Marker") + self.keywords[marker_.name] = marker_ + if append: + self.own_markers.append(marker_.mark) + else: + self.own_markers.insert(0, marker_.mark) + + def iter_markers(self, name: Optional[str] = None) -> Iterator[Mark]: + """Iterate over all markers of the node. + + :param name: If given, filter the results by the name attribute. + :returns: An iterator of the markers of the node. + """ + return (x[1] for x in self.iter_markers_with_node(name=name)) + + def iter_markers_with_node( + self, name: Optional[str] = None + ) -> Iterator[Tuple["Node", Mark]]: + """Iterate over all markers of the node. + + :param name: If given, filter the results by the name attribute. + :returns: An iterator of (node, mark) tuples. + """ + for node in reversed(self.listchain()): + for mark in node.own_markers: + if name is None or getattr(mark, "name", None) == name: + yield node, mark + + @overload + def get_closest_marker(self, name: str) -> Optional[Mark]: + ... + + @overload + def get_closest_marker(self, name: str, default: Mark) -> Mark: + ... + + def get_closest_marker( + self, name: str, default: Optional[Mark] = None + ) -> Optional[Mark]: + """Return the first marker matching the name, from closest (for + example function) to farther level (for example module level). + + :param default: Fallback return value if no marker was found. + :param name: Name to filter by. + """ + return next(self.iter_markers(name=name), default) + + def listextrakeywords(self) -> Set[str]: + """Return a set of all extra keywords in self and any parents.""" + extra_keywords: Set[str] = set() + for item in self.listchain(): + extra_keywords.update(item.extra_keyword_matches) + return extra_keywords + + def listnames(self) -> List[str]: + return [x.name for x in self.listchain()] + + def addfinalizer(self, fin: Callable[[], object]) -> None: + """Register a function to be called without arguments when this node is + finalized. + + This method can only be called when this node is active + in a setup chain, for example during self.setup(). + """ + self.session._setupstate.addfinalizer(fin, self) + + def getparent(self, cls: Type[_NodeType]) -> Optional[_NodeType]: + """Get the next parent node (including self) which is an instance of + the given class. + + :param cls: The node class to search for. + :returns: The node, if found. + """ + current: Optional[Node] = self + while current and not isinstance(current, cls): + current = current.parent + assert current is None or isinstance(current, cls) + return current + + def _traceback_filter(self, excinfo: ExceptionInfo[BaseException]) -> Traceback: + return excinfo.traceback + + def _repr_failure_py( + self, + excinfo: ExceptionInfo[BaseException], + style: "Optional[_TracebackStyle]" = None, + ) -> TerminalRepr: + from _pytest.fixtures import FixtureLookupError + + if isinstance(excinfo.value, ConftestImportFailure): + excinfo = ExceptionInfo.from_exc_info(excinfo.value.excinfo) + if isinstance(excinfo.value, fail.Exception): + if not excinfo.value.pytrace: + style = "value" + if isinstance(excinfo.value, FixtureLookupError): + return excinfo.value.formatrepr() + + tbfilter: Union[bool, Callable[[ExceptionInfo[BaseException]], Traceback]] + if self.config.getoption("fulltrace", False): + style = "long" + tbfilter = False + else: + tbfilter = self._traceback_filter + if style == "auto": + style = "long" + # XXX should excinfo.getrepr record all data and toterminal() process it? + if style is None: + if self.config.getoption("tbstyle", "auto") == "short": + style = "short" + else: + style = "long" + + if self.config.getoption("verbose", 0) > 1: + truncate_locals = False + else: + truncate_locals = True + + # excinfo.getrepr() formats paths relative to the CWD if `abspath` is False. + # It is possible for a fixture/test to change the CWD while this code runs, which + # would then result in the user seeing confusing paths in the failure message. + # To fix this, if the CWD changed, always display the full absolute path. + # It will be better to just always display paths relative to invocation_dir, but + # this requires a lot of plumbing (#6428). + try: + abspath = Path(os.getcwd()) != self.config.invocation_params.dir + except OSError: + abspath = True + + return excinfo.getrepr( + funcargs=True, + abspath=abspath, + showlocals=self.config.getoption("showlocals", False), + style=style, + tbfilter=tbfilter, + truncate_locals=truncate_locals, + ) + + def repr_failure( + self, + excinfo: ExceptionInfo[BaseException], + style: "Optional[_TracebackStyle]" = None, + ) -> Union[str, TerminalRepr]: + """Return a representation of a collection or test failure. + + .. seealso:: :ref:`non-python tests` + + :param excinfo: Exception information for the failure. + """ + return self._repr_failure_py(excinfo, style) + + +def get_fslocation_from_item(node: "Node") -> Tuple[Union[str, Path], Optional[int]]: + """Try to extract the actual location from a node, depending on available attributes: + + * "location": a pair (path, lineno) + * "obj": a Python object that the node wraps. + * "fspath": just a path + + :rtype: A tuple of (str|Path, int) with filename and 0-based line number. + """ + # See Item.location. + location: Optional[Tuple[str, Optional[int], str]] = getattr(node, "location", None) + if location is not None: + return location[:2] + obj = getattr(node, "obj", None) + if obj is not None: + return getfslineno(obj) + return getattr(node, "fspath", "unknown location"), -1 + + +class Collector(Node): + """Collector instances create children through collect() and thus + iteratively build a tree.""" + + class CollectError(Exception): + """An error during collection, contains a custom message.""" + + def collect(self) -> Iterable[Union["Item", "Collector"]]: + """Return a list of children (items and collectors) for this + collection node.""" + raise NotImplementedError("abstract") + + # TODO: This omits the style= parameter which breaks Liskov Substitution. + def repr_failure( # type: ignore[override] + self, excinfo: ExceptionInfo[BaseException] + ) -> Union[str, TerminalRepr]: + """Return a representation of a collection failure. + + :param excinfo: Exception information for the failure. + """ + if isinstance(excinfo.value, self.CollectError) and not self.config.getoption( + "fulltrace", False + ): + exc = excinfo.value + return str(exc.args[0]) + + # Respect explicit tbstyle option, but default to "short" + # (_repr_failure_py uses "long" with "fulltrace" option always). + tbstyle = self.config.getoption("tbstyle", "auto") + if tbstyle == "auto": + tbstyle = "short" + + return self._repr_failure_py(excinfo, style=tbstyle) + + def _traceback_filter(self, excinfo: ExceptionInfo[BaseException]) -> Traceback: + if hasattr(self, "path"): + traceback = excinfo.traceback + ntraceback = traceback.cut(path=self.path) + if ntraceback == traceback: + ntraceback = ntraceback.cut(excludepath=tracebackcutdir) + return excinfo.traceback.filter(excinfo) + return excinfo.traceback + + +def _check_initialpaths_for_relpath(session: "Session", path: Path) -> Optional[str]: + for initial_path in session._initialpaths: + if commonpath(path, initial_path) == initial_path: + rel = str(path.relative_to(initial_path)) + return "" if rel == "." else rel + return None + + +class FSCollector(Collector): + def __init__( + self, + fspath: Optional[LEGACY_PATH] = None, + path_or_parent: Optional[Union[Path, Node]] = None, + path: Optional[Path] = None, + name: Optional[str] = None, + parent: Optional[Node] = None, + config: Optional[Config] = None, + session: Optional["Session"] = None, + nodeid: Optional[str] = None, + ) -> None: + if path_or_parent: + if isinstance(path_or_parent, Node): + assert parent is None + parent = cast(FSCollector, path_or_parent) + elif isinstance(path_or_parent, Path): + assert path is None + path = path_or_parent + + path = _imply_path(type(self), path, fspath=fspath) + if name is None: + name = path.name + if parent is not None and parent.path != path: + try: + rel = path.relative_to(parent.path) + except ValueError: + pass + else: + name = str(rel) + name = name.replace(os.sep, SEP) + self.path = path + + if session is None: + assert parent is not None + session = parent.session + + if nodeid is None: + try: + nodeid = str(self.path.relative_to(session.config.rootpath)) + except ValueError: + nodeid = _check_initialpaths_for_relpath(session, path) + + if nodeid and os.sep != SEP: + nodeid = nodeid.replace(os.sep, SEP) + + super().__init__( + name=name, + parent=parent, + config=config, + session=session, + nodeid=nodeid, + path=path, + ) + + @classmethod + def from_parent( + cls, + parent, + *, + fspath: Optional[LEGACY_PATH] = None, + path: Optional[Path] = None, + **kw, + ): + """The public constructor.""" + return super().from_parent(parent=parent, fspath=fspath, path=path, **kw) + + def gethookproxy(self, fspath: "os.PathLike[str]"): + warnings.warn(FSCOLLECTOR_GETHOOKPROXY_ISINITPATH, stacklevel=2) + return self.session.gethookproxy(fspath) + + def isinitpath(self, path: Union[str, "os.PathLike[str]"]) -> bool: + warnings.warn(FSCOLLECTOR_GETHOOKPROXY_ISINITPATH, stacklevel=2) + return self.session.isinitpath(path) + + +class File(FSCollector): + """Base class for collecting tests from a file. + + :ref:`non-python tests`. + """ + + +class Item(Node): + """A basic test invocation item. + + Note that for a single function there might be multiple test invocation items. + """ + + nextitem = None + + def __init__( + self, + name, + parent=None, + config: Optional[Config] = None, + session: Optional["Session"] = None, + nodeid: Optional[str] = None, + **kw, + ) -> None: + # The first two arguments are intentionally passed positionally, + # to keep plugins who define a node type which inherits from + # (pytest.Item, pytest.File) working (see issue #8435). + # They can be made kwargs when the deprecation above is done. + super().__init__( + name, + parent, + config=config, + session=session, + nodeid=nodeid, + **kw, + ) + self._report_sections: List[Tuple[str, str, str]] = [] + + #: A list of tuples (name, value) that holds user defined properties + #: for this test. + self.user_properties: List[Tuple[str, object]] = [] + + self._check_item_and_collector_diamond_inheritance() + + def _check_item_and_collector_diamond_inheritance(self) -> None: + """ + Check if the current type inherits from both File and Collector + at the same time, emitting a warning accordingly (#8447). + """ + cls = type(self) + + # We inject an attribute in the type to avoid issuing this warning + # for the same class more than once, which is not helpful. + # It is a hack, but was deemed acceptable in order to avoid + # flooding the user in the common case. + attr_name = "_pytest_diamond_inheritance_warning_shown" + if getattr(cls, attr_name, False): + return + setattr(cls, attr_name, True) + + problems = ", ".join( + base.__name__ for base in cls.__bases__ if issubclass(base, Collector) + ) + if problems: + warnings.warn( + f"{cls.__name__} is an Item subclass and should not be a collector, " + f"however its bases {problems} are collectors.\n" + "Please split the Collectors and the Item into separate node types.\n" + "Pytest Doc example: https://docs.pytest.org/en/latest/example/nonpython.html\n" + "example pull request on a plugin: https://github.com/asmeurer/pytest-flakes/pull/40/", + PytestWarning, + ) + + def runtest(self) -> None: + """Run the test case for this item. + + Must be implemented by subclasses. + + .. seealso:: :ref:`non-python tests` + """ + raise NotImplementedError("runtest must be implemented by Item subclass") + + def add_report_section(self, when: str, key: str, content: str) -> None: + """Add a new report section, similar to what's done internally to add + stdout and stderr captured output:: + + item.add_report_section("call", "stdout", "report section contents") + + :param str when: + One of the possible capture states, ``"setup"``, ``"call"``, ``"teardown"``. + :param str key: + Name of the section, can be customized at will. Pytest uses ``"stdout"`` and + ``"stderr"`` internally. + :param str content: + The full contents as a string. + """ + if content: + self._report_sections.append((when, key, content)) + + def reportinfo(self) -> Tuple[Union["os.PathLike[str]", str], Optional[int], str]: + """Get location information for this item for test reports. + + Returns a tuple with three elements: + + - The path of the test (default ``self.path``) + - The 0-based line number of the test (default ``None``) + - A name of the test to be shown (default ``""``) + + .. seealso:: :ref:`non-python tests` + """ + return self.path, None, "" + + @cached_property + def location(self) -> Tuple[str, Optional[int], str]: + """ + Returns a tuple of ``(relfspath, lineno, testname)`` for this item + where ``relfspath`` is file path relative to ``config.rootpath`` + and lineno is a 0-based line number. + """ + location = self.reportinfo() + path = absolutepath(os.fspath(location[0])) + relfspath = self.session._node_location_to_relpath(path) + assert type(location[2]) is str + return (relfspath, location[1], location[2]) diff --git a/venv/lib/python3.11/site-packages/_pytest/nose.py b/venv/lib/python3.11/site-packages/_pytest/nose.py new file mode 100644 index 0000000..273bd04 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/nose.py @@ -0,0 +1,50 @@ +"""Run testsuites written for nose.""" +import warnings + +from _pytest.config import hookimpl +from _pytest.deprecated import NOSE_SUPPORT +from _pytest.fixtures import getfixturemarker +from _pytest.nodes import Item +from _pytest.python import Function +from _pytest.unittest import TestCaseFunction + + +@hookimpl(trylast=True) +def pytest_runtest_setup(item: Item) -> None: + if not isinstance(item, Function): + return + # Don't do nose style setup/teardown on direct unittest style classes. + if isinstance(item, TestCaseFunction): + return + + # Capture the narrowed type of item for the teardown closure, + # see https://github.com/python/mypy/issues/2608 + func = item + + call_optional(func.obj, "setup", func.nodeid) + func.addfinalizer(lambda: call_optional(func.obj, "teardown", func.nodeid)) + + # NOTE: Module- and class-level fixtures are handled in python.py + # with `pluginmanager.has_plugin("nose")` checks. + # It would have been nicer to implement them outside of core, but + # it's not straightforward. + + +def call_optional(obj: object, name: str, nodeid: str) -> bool: + method = getattr(obj, name, None) + if method is None: + return False + is_fixture = getfixturemarker(method) is not None + if is_fixture: + return False + if not callable(method): + return False + # Warn about deprecation of this plugin. + method_name = getattr(method, "__name__", str(method)) + warnings.warn( + NOSE_SUPPORT.format(nodeid=nodeid, method=method_name, stage=name), stacklevel=2 + ) + # If there are any problems allow the exception to raise rather than + # silently ignoring it. + method() + return True diff --git a/venv/lib/python3.11/site-packages/_pytest/outcomes.py b/venv/lib/python3.11/site-packages/_pytest/outcomes.py new file mode 100644 index 0000000..1be97dd --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/outcomes.py @@ -0,0 +1,311 @@ +"""Exception classes and constants handling test outcomes as well as +functions creating them.""" +import sys +import warnings +from typing import Any +from typing import Callable +from typing import cast +from typing import NoReturn +from typing import Optional +from typing import Type +from typing import TypeVar + +from _pytest.deprecated import KEYWORD_MSG_ARG + +TYPE_CHECKING = False # Avoid circular import through compat. + +if TYPE_CHECKING: + from typing_extensions import Protocol +else: + # typing.Protocol is only available starting from Python 3.8. It is also + # available from typing_extensions, but we don't want a runtime dependency + # on that. So use a dummy runtime implementation. + from typing import Generic + + Protocol = Generic + + +class OutcomeException(BaseException): + """OutcomeException and its subclass instances indicate and contain info + about test and collection outcomes.""" + + def __init__(self, msg: Optional[str] = None, pytrace: bool = True) -> None: + if msg is not None and not isinstance(msg, str): + error_msg = ( # type: ignore[unreachable] + "{} expected string as 'msg' parameter, got '{}' instead.\n" + "Perhaps you meant to use a mark?" + ) + raise TypeError(error_msg.format(type(self).__name__, type(msg).__name__)) + super().__init__(msg) + self.msg = msg + self.pytrace = pytrace + + def __repr__(self) -> str: + if self.msg is not None: + return self.msg + return f"<{self.__class__.__name__} instance>" + + __str__ = __repr__ + + +TEST_OUTCOME = (OutcomeException, Exception) + + +class Skipped(OutcomeException): + # XXX hackish: on 3k we fake to live in the builtins + # in order to have Skipped exception printing shorter/nicer + __module__ = "builtins" + + def __init__( + self, + msg: Optional[str] = None, + pytrace: bool = True, + allow_module_level: bool = False, + *, + _use_item_location: bool = False, + ) -> None: + super().__init__(msg=msg, pytrace=pytrace) + self.allow_module_level = allow_module_level + # If true, the skip location is reported as the item's location, + # instead of the place that raises the exception/calls skip(). + self._use_item_location = _use_item_location + + +class Failed(OutcomeException): + """Raised from an explicit call to pytest.fail().""" + + __module__ = "builtins" + + +class Exit(Exception): + """Raised for immediate program exits (no tracebacks/summaries).""" + + def __init__( + self, msg: str = "unknown reason", returncode: Optional[int] = None + ) -> None: + self.msg = msg + self.returncode = returncode + super().__init__(msg) + + +# Elaborate hack to work around https://github.com/python/mypy/issues/2087. +# Ideally would just be `exit.Exception = Exit` etc. + +_F = TypeVar("_F", bound=Callable[..., object]) +_ET = TypeVar("_ET", bound=Type[BaseException]) + + +class _WithException(Protocol[_F, _ET]): + Exception: _ET + __call__: _F + + +def _with_exception(exception_type: _ET) -> Callable[[_F], _WithException[_F, _ET]]: + def decorate(func: _F) -> _WithException[_F, _ET]: + func_with_exception = cast(_WithException[_F, _ET], func) + func_with_exception.Exception = exception_type + return func_with_exception + + return decorate + + +# Exposed helper methods. + + +@_with_exception(Exit) +def exit( + reason: str = "", returncode: Optional[int] = None, *, msg: Optional[str] = None +) -> NoReturn: + """Exit testing process. + + :param reason: + The message to show as the reason for exiting pytest. reason has a default value + only because `msg` is deprecated. + + :param returncode: + Return code to be used when exiting pytest. + + :param msg: + Same as ``reason``, but deprecated. Will be removed in a future version, use ``reason`` instead. + """ + __tracebackhide__ = True + from _pytest.config import UsageError + + if reason and msg: + raise UsageError( + "cannot pass reason and msg to exit(), `msg` is deprecated, use `reason`." + ) + if not reason: + if msg is None: + raise UsageError("exit() requires a reason argument") + warnings.warn(KEYWORD_MSG_ARG.format(func="exit"), stacklevel=2) + reason = msg + raise Exit(reason, returncode) + + +@_with_exception(Skipped) +def skip( + reason: str = "", *, allow_module_level: bool = False, msg: Optional[str] = None +) -> NoReturn: + """Skip an executing test with the given message. + + This function should be called only during testing (setup, call or teardown) or + during collection by using the ``allow_module_level`` flag. This function can + be called in doctests as well. + + :param reason: + The message to show the user as reason for the skip. + + :param allow_module_level: + Allows this function to be called at module level. + Raising the skip exception at module level will stop + the execution of the module and prevent the collection of all tests in the module, + even those defined before the `skip` call. + + Defaults to False. + + :param msg: + Same as ``reason``, but deprecated. Will be removed in a future version, use ``reason`` instead. + + .. note:: + It is better to use the :ref:`pytest.mark.skipif ref` marker when + possible to declare a test to be skipped under certain conditions + like mismatching platforms or dependencies. + Similarly, use the ``# doctest: +SKIP`` directive (see :py:data:`doctest.SKIP`) + to skip a doctest statically. + """ + __tracebackhide__ = True + reason = _resolve_msg_to_reason("skip", reason, msg) + raise Skipped(msg=reason, allow_module_level=allow_module_level) + + +@_with_exception(Failed) +def fail(reason: str = "", pytrace: bool = True, msg: Optional[str] = None) -> NoReturn: + """Explicitly fail an executing test with the given message. + + :param reason: + The message to show the user as reason for the failure. + + :param pytrace: + If False, msg represents the full failure information and no + python traceback will be reported. + + :param msg: + Same as ``reason``, but deprecated. Will be removed in a future version, use ``reason`` instead. + """ + __tracebackhide__ = True + reason = _resolve_msg_to_reason("fail", reason, msg) + raise Failed(msg=reason, pytrace=pytrace) + + +def _resolve_msg_to_reason( + func_name: str, reason: str, msg: Optional[str] = None +) -> str: + """ + Handles converting the deprecated msg parameter if provided into + reason, raising a deprecation warning. This function will be removed + when the optional msg argument is removed from here in future. + + :param str func_name: + The name of the offending function, this is formatted into the deprecation message. + + :param str reason: + The reason= passed into either pytest.fail() or pytest.skip() + + :param str msg: + The msg= passed into either pytest.fail() or pytest.skip(). This will + be converted into reason if it is provided to allow pytest.skip(msg=) or + pytest.fail(msg=) to continue working in the interim period. + + :returns: + The value to use as reason. + + """ + __tracebackhide__ = True + if msg is not None: + if reason: + from pytest import UsageError + + raise UsageError( + f"Passing both ``reason`` and ``msg`` to pytest.{func_name}(...) is not permitted." + ) + warnings.warn(KEYWORD_MSG_ARG.format(func=func_name), stacklevel=3) + reason = msg + return reason + + +class XFailed(Failed): + """Raised from an explicit call to pytest.xfail().""" + + +@_with_exception(XFailed) +def xfail(reason: str = "") -> NoReturn: + """Imperatively xfail an executing test or setup function with the given reason. + + This function should be called only during testing (setup, call or teardown). + + :param reason: + The message to show the user as reason for the xfail. + + .. note:: + It is better to use the :ref:`pytest.mark.xfail ref` marker when + possible to declare a test to be xfailed under certain conditions + like known bugs or missing features. + """ + __tracebackhide__ = True + raise XFailed(reason) + + +def importorskip( + modname: str, minversion: Optional[str] = None, reason: Optional[str] = None +) -> Any: + """Import and return the requested module ``modname``, or skip the + current test if the module cannot be imported. + + :param modname: + The name of the module to import. + :param minversion: + If given, the imported module's ``__version__`` attribute must be at + least this minimal version, otherwise the test is still skipped. + :param reason: + If given, this reason is shown as the message when the module cannot + be imported. + + :returns: + The imported module. This should be assigned to its canonical name. + + Example:: + + docutils = pytest.importorskip("docutils") + """ + import warnings + + __tracebackhide__ = True + compile(modname, "", "eval") # to catch syntaxerrors + + with warnings.catch_warnings(): + # Make sure to ignore ImportWarnings that might happen because + # of existing directories with the same name we're trying to + # import but without a __init__.py file. + warnings.simplefilter("ignore") + try: + __import__(modname) + except ImportError as exc: + if reason is None: + reason = f"could not import {modname!r}: {exc}" + raise Skipped(reason, allow_module_level=True) from None + mod = sys.modules[modname] + if minversion is None: + return mod + verattr = getattr(mod, "__version__", None) + if minversion is not None: + # Imported lazily to improve start-up time. + from packaging.version import Version + + if verattr is None or Version(verattr) < Version(minversion): + raise Skipped( + "module %r has __version__ %r, required is: %r" + % (modname, verattr, minversion), + allow_module_level=True, + ) + return mod diff --git a/venv/lib/python3.11/site-packages/_pytest/pastebin.py b/venv/lib/python3.11/site-packages/_pytest/pastebin.py new file mode 100644 index 0000000..22c7a62 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/pastebin.py @@ -0,0 +1,110 @@ +"""Submit failure or test session information to a pastebin service.""" +import tempfile +from io import StringIO +from typing import IO +from typing import Union + +import pytest +from _pytest.config import Config +from _pytest.config import create_terminal_writer +from _pytest.config.argparsing import Parser +from _pytest.stash import StashKey +from _pytest.terminal import TerminalReporter + + +pastebinfile_key = StashKey[IO[bytes]]() + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("terminal reporting") + group._addoption( + "--pastebin", + metavar="mode", + action="store", + dest="pastebin", + default=None, + choices=["failed", "all"], + help="Send failed|all info to bpaste.net pastebin service", + ) + + +@pytest.hookimpl(trylast=True) +def pytest_configure(config: Config) -> None: + if config.option.pastebin == "all": + tr = config.pluginmanager.getplugin("terminalreporter") + # If no terminal reporter plugin is present, nothing we can do here; + # this can happen when this function executes in a worker node + # when using pytest-xdist, for example. + if tr is not None: + # pastebin file will be UTF-8 encoded binary file. + config.stash[pastebinfile_key] = tempfile.TemporaryFile("w+b") + oldwrite = tr._tw.write + + def tee_write(s, **kwargs): + oldwrite(s, **kwargs) + if isinstance(s, str): + s = s.encode("utf-8") + config.stash[pastebinfile_key].write(s) + + tr._tw.write = tee_write + + +def pytest_unconfigure(config: Config) -> None: + if pastebinfile_key in config.stash: + pastebinfile = config.stash[pastebinfile_key] + # Get terminal contents and delete file. + pastebinfile.seek(0) + sessionlog = pastebinfile.read() + pastebinfile.close() + del config.stash[pastebinfile_key] + # Undo our patching in the terminal reporter. + tr = config.pluginmanager.getplugin("terminalreporter") + del tr._tw.__dict__["write"] + # Write summary. + tr.write_sep("=", "Sending information to Paste Service") + pastebinurl = create_new_paste(sessionlog) + tr.write_line("pastebin session-log: %s\n" % pastebinurl) + + +def create_new_paste(contents: Union[str, bytes]) -> str: + """Create a new paste using the bpaste.net service. + + :contents: Paste contents string. + :returns: URL to the pasted contents, or an error message. + """ + import re + from urllib.request import urlopen + from urllib.parse import urlencode + + params = {"code": contents, "lexer": "text", "expiry": "1week"} + url = "https://bpa.st" + try: + response: str = ( + urlopen(url, data=urlencode(params).encode("ascii")).read().decode("utf-8") + ) + except OSError as exc_info: # urllib errors + return "bad response: %s" % exc_info + m = re.search(r'href="/raw/(\w+)"', response) + if m: + return f"{url}/show/{m.group(1)}" + else: + return "bad response: invalid format ('" + response + "')" + + +def pytest_terminal_summary(terminalreporter: TerminalReporter) -> None: + if terminalreporter.config.option.pastebin != "failed": + return + if "failed" in terminalreporter.stats: + terminalreporter.write_sep("=", "Sending information to Paste Service") + for rep in terminalreporter.stats["failed"]: + try: + msg = rep.longrepr.reprtraceback.reprentries[-1].reprfileloc + except AttributeError: + msg = terminalreporter._getfailureheadline(rep) + file = StringIO() + tw = create_terminal_writer(terminalreporter.config, file) + rep.toterminal(tw) + s = file.getvalue() + assert len(s) + pastebinurl = create_new_paste(s) + terminalreporter.write_line(f"{msg} --> {pastebinurl}") diff --git a/venv/lib/python3.11/site-packages/_pytest/pathlib.py b/venv/lib/python3.11/site-packages/_pytest/pathlib.py new file mode 100644 index 0000000..70383e4 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/pathlib.py @@ -0,0 +1,775 @@ +import atexit +import contextlib +import fnmatch +import importlib.util +import itertools +import os +import shutil +import sys +import types +import uuid +import warnings +from enum import Enum +from errno import EBADF +from errno import ELOOP +from errno import ENOENT +from errno import ENOTDIR +from functools import partial +from os.path import expanduser +from os.path import expandvars +from os.path import isabs +from os.path import sep +from pathlib import Path +from pathlib import PurePath +from posixpath import sep as posix_sep +from types import ModuleType +from typing import Callable +from typing import Dict +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Optional +from typing import Set +from typing import Tuple +from typing import Type +from typing import TypeVar +from typing import Union + +from _pytest.compat import assert_never +from _pytest.outcomes import skip +from _pytest.warning_types import PytestWarning + +LOCK_TIMEOUT = 60 * 60 * 24 * 3 + + +_AnyPurePath = TypeVar("_AnyPurePath", bound=PurePath) + +# The following function, variables and comments were +# copied from cpython 3.9 Lib/pathlib.py file. + +# EBADF - guard against macOS `stat` throwing EBADF +_IGNORED_ERRORS = (ENOENT, ENOTDIR, EBADF, ELOOP) + +_IGNORED_WINERRORS = ( + 21, # ERROR_NOT_READY - drive exists but is not accessible + 1921, # ERROR_CANT_RESOLVE_FILENAME - fix for broken symlink pointing to itself +) + + +def _ignore_error(exception): + return ( + getattr(exception, "errno", None) in _IGNORED_ERRORS + or getattr(exception, "winerror", None) in _IGNORED_WINERRORS + ) + + +def get_lock_path(path: _AnyPurePath) -> _AnyPurePath: + return path.joinpath(".lock") + + +def on_rm_rf_error( + func, + path: str, + excinfo: Union[ + BaseException, + Tuple[Type[BaseException], BaseException, Optional[types.TracebackType]], + ], + *, + start_path: Path, +) -> bool: + """Handle known read-only errors during rmtree. + + The returned value is used only by our own tests. + """ + if isinstance(excinfo, BaseException): + exc = excinfo + else: + exc = excinfo[1] + + # Another process removed the file in the middle of the "rm_rf" (xdist for example). + # More context: https://github.com/pytest-dev/pytest/issues/5974#issuecomment-543799018 + if isinstance(exc, FileNotFoundError): + return False + + if not isinstance(exc, PermissionError): + warnings.warn( + PytestWarning(f"(rm_rf) error removing {path}\n{type(exc)}: {exc}") + ) + return False + + if func not in (os.rmdir, os.remove, os.unlink): + if func not in (os.open,): + warnings.warn( + PytestWarning( + "(rm_rf) unknown function {} when removing {}:\n{}: {}".format( + func, path, type(exc), exc + ) + ) + ) + return False + + # Chmod + retry. + import stat + + def chmod_rw(p: str) -> None: + mode = os.stat(p).st_mode + os.chmod(p, mode | stat.S_IRUSR | stat.S_IWUSR) + + # For files, we need to recursively go upwards in the directories to + # ensure they all are also writable. + p = Path(path) + if p.is_file(): + for parent in p.parents: + chmod_rw(str(parent)) + # Stop when we reach the original path passed to rm_rf. + if parent == start_path: + break + chmod_rw(str(path)) + + func(path) + return True + + +def ensure_extended_length_path(path: Path) -> Path: + """Get the extended-length version of a path (Windows). + + On Windows, by default, the maximum length of a path (MAX_PATH) is 260 + characters, and operations on paths longer than that fail. But it is possible + to overcome this by converting the path to "extended-length" form before + performing the operation: + https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file#maximum-path-length-limitation + + On Windows, this function returns the extended-length absolute version of path. + On other platforms it returns path unchanged. + """ + if sys.platform.startswith("win32"): + path = path.resolve() + path = Path(get_extended_length_path_str(str(path))) + return path + + +def get_extended_length_path_str(path: str) -> str: + """Convert a path to a Windows extended length path.""" + long_path_prefix = "\\\\?\\" + unc_long_path_prefix = "\\\\?\\UNC\\" + if path.startswith((long_path_prefix, unc_long_path_prefix)): + return path + # UNC + if path.startswith("\\\\"): + return unc_long_path_prefix + path[2:] + return long_path_prefix + path + + +def rm_rf(path: Path) -> None: + """Remove the path contents recursively, even if some elements + are read-only.""" + path = ensure_extended_length_path(path) + onerror = partial(on_rm_rf_error, start_path=path) + if sys.version_info >= (3, 12): + shutil.rmtree(str(path), onexc=onerror) + else: + shutil.rmtree(str(path), onerror=onerror) + + +def find_prefixed(root: Path, prefix: str) -> Iterator[Path]: + """Find all elements in root that begin with the prefix, case insensitive.""" + l_prefix = prefix.lower() + for x in root.iterdir(): + if x.name.lower().startswith(l_prefix): + yield x + + +def extract_suffixes(iter: Iterable[PurePath], prefix: str) -> Iterator[str]: + """Return the parts of the paths following the prefix. + + :param iter: Iterator over path names. + :param prefix: Expected prefix of the path names. + """ + p_len = len(prefix) + for p in iter: + yield p.name[p_len:] + + +def find_suffixes(root: Path, prefix: str) -> Iterator[str]: + """Combine find_prefixes and extract_suffixes.""" + return extract_suffixes(find_prefixed(root, prefix), prefix) + + +def parse_num(maybe_num) -> int: + """Parse number path suffixes, returns -1 on error.""" + try: + return int(maybe_num) + except ValueError: + return -1 + + +def _force_symlink( + root: Path, target: Union[str, PurePath], link_to: Union[str, Path] +) -> None: + """Helper to create the current symlink. + + It's full of race conditions that are reasonably OK to ignore + for the context of best effort linking to the latest test run. + + The presumption being that in case of much parallelism + the inaccuracy is going to be acceptable. + """ + current_symlink = root.joinpath(target) + try: + current_symlink.unlink() + except OSError: + pass + try: + current_symlink.symlink_to(link_to) + except Exception: + pass + + +def make_numbered_dir(root: Path, prefix: str, mode: int = 0o700) -> Path: + """Create a directory with an increased number as suffix for the given prefix.""" + for i in range(10): + # try up to 10 times to create the folder + max_existing = max(map(parse_num, find_suffixes(root, prefix)), default=-1) + new_number = max_existing + 1 + new_path = root.joinpath(f"{prefix}{new_number}") + try: + new_path.mkdir(mode=mode) + except Exception: + pass + else: + _force_symlink(root, prefix + "current", new_path) + return new_path + else: + raise OSError( + "could not create numbered dir with prefix " + "{prefix} in {root} after 10 tries".format(prefix=prefix, root=root) + ) + + +def create_cleanup_lock(p: Path) -> Path: + """Create a lock to prevent premature folder cleanup.""" + lock_path = get_lock_path(p) + try: + fd = os.open(str(lock_path), os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0o644) + except FileExistsError as e: + raise OSError(f"cannot create lockfile in {p}") from e + else: + pid = os.getpid() + spid = str(pid).encode() + os.write(fd, spid) + os.close(fd) + if not lock_path.is_file(): + raise OSError("lock path got renamed after successful creation") + return lock_path + + +def register_cleanup_lock_removal(lock_path: Path, register=atexit.register): + """Register a cleanup function for removing a lock, by default on atexit.""" + pid = os.getpid() + + def cleanup_on_exit(lock_path: Path = lock_path, original_pid: int = pid) -> None: + current_pid = os.getpid() + if current_pid != original_pid: + # fork + return + try: + lock_path.unlink() + except OSError: + pass + + return register(cleanup_on_exit) + + +def maybe_delete_a_numbered_dir(path: Path) -> None: + """Remove a numbered directory if its lock can be obtained and it does + not seem to be in use.""" + path = ensure_extended_length_path(path) + lock_path = None + try: + lock_path = create_cleanup_lock(path) + parent = path.parent + + garbage = parent.joinpath(f"garbage-{uuid.uuid4()}") + path.rename(garbage) + rm_rf(garbage) + except OSError: + # known races: + # * other process did a cleanup at the same time + # * deletable folder was found + # * process cwd (Windows) + return + finally: + # If we created the lock, ensure we remove it even if we failed + # to properly remove the numbered dir. + if lock_path is not None: + try: + lock_path.unlink() + except OSError: + pass + + +def ensure_deletable(path: Path, consider_lock_dead_if_created_before: float) -> bool: + """Check if `path` is deletable based on whether the lock file is expired.""" + if path.is_symlink(): + return False + lock = get_lock_path(path) + try: + if not lock.is_file(): + return True + except OSError: + # we might not have access to the lock file at all, in this case assume + # we don't have access to the entire directory (#7491). + return False + try: + lock_time = lock.stat().st_mtime + except Exception: + return False + else: + if lock_time < consider_lock_dead_if_created_before: + # We want to ignore any errors while trying to remove the lock such as: + # - PermissionDenied, like the file permissions have changed since the lock creation; + # - FileNotFoundError, in case another pytest process got here first; + # and any other cause of failure. + with contextlib.suppress(OSError): + lock.unlink() + return True + return False + + +def try_cleanup(path: Path, consider_lock_dead_if_created_before: float) -> None: + """Try to cleanup a folder if we can ensure it's deletable.""" + if ensure_deletable(path, consider_lock_dead_if_created_before): + maybe_delete_a_numbered_dir(path) + + +def cleanup_candidates(root: Path, prefix: str, keep: int) -> Iterator[Path]: + """List candidates for numbered directories to be removed - follows py.path.""" + max_existing = max(map(parse_num, find_suffixes(root, prefix)), default=-1) + max_delete = max_existing - keep + paths = find_prefixed(root, prefix) + paths, paths2 = itertools.tee(paths) + numbers = map(parse_num, extract_suffixes(paths2, prefix)) + for path, number in zip(paths, numbers): + if number <= max_delete: + yield path + + +def cleanup_dead_symlinks(root: Path): + for left_dir in root.iterdir(): + if left_dir.is_symlink(): + if not left_dir.resolve().exists(): + left_dir.unlink() + + +def cleanup_numbered_dir( + root: Path, prefix: str, keep: int, consider_lock_dead_if_created_before: float +) -> None: + """Cleanup for lock driven numbered directories.""" + if not root.exists(): + return + for path in cleanup_candidates(root, prefix, keep): + try_cleanup(path, consider_lock_dead_if_created_before) + for path in root.glob("garbage-*"): + try_cleanup(path, consider_lock_dead_if_created_before) + + cleanup_dead_symlinks(root) + + +def make_numbered_dir_with_cleanup( + root: Path, + prefix: str, + keep: int, + lock_timeout: float, + mode: int, +) -> Path: + """Create a numbered dir with a cleanup lock and remove old ones.""" + e = None + for i in range(10): + try: + p = make_numbered_dir(root, prefix, mode) + # Only lock the current dir when keep is not 0 + if keep != 0: + lock_path = create_cleanup_lock(p) + register_cleanup_lock_removal(lock_path) + except Exception as exc: + e = exc + else: + consider_lock_dead_if_created_before = p.stat().st_mtime - lock_timeout + # Register a cleanup for program exit + atexit.register( + cleanup_numbered_dir, + root, + prefix, + keep, + consider_lock_dead_if_created_before, + ) + return p + assert e is not None + raise e + + +def resolve_from_str(input: str, rootpath: Path) -> Path: + input = expanduser(input) + input = expandvars(input) + if isabs(input): + return Path(input) + else: + return rootpath.joinpath(input) + + +def fnmatch_ex(pattern: str, path: Union[str, "os.PathLike[str]"]) -> bool: + """A port of FNMatcher from py.path.common which works with PurePath() instances. + + The difference between this algorithm and PurePath.match() is that the + latter matches "**" glob expressions for each part of the path, while + this algorithm uses the whole path instead. + + For example: + "tests/foo/bar/doc/test_foo.py" matches pattern "tests/**/doc/test*.py" + with this algorithm, but not with PurePath.match(). + + This algorithm was ported to keep backward-compatibility with existing + settings which assume paths match according this logic. + + References: + * https://bugs.python.org/issue29249 + * https://bugs.python.org/issue34731 + """ + path = PurePath(path) + iswin32 = sys.platform.startswith("win") + + if iswin32 and sep not in pattern and posix_sep in pattern: + # Running on Windows, the pattern has no Windows path separators, + # and the pattern has one or more Posix path separators. Replace + # the Posix path separators with the Windows path separator. + pattern = pattern.replace(posix_sep, sep) + + if sep not in pattern: + name = path.name + else: + name = str(path) + if path.is_absolute() and not os.path.isabs(pattern): + pattern = f"*{os.sep}{pattern}" + return fnmatch.fnmatch(name, pattern) + + +def parts(s: str) -> Set[str]: + parts = s.split(sep) + return {sep.join(parts[: i + 1]) or sep for i in range(len(parts))} + + +def symlink_or_skip(src, dst, **kwargs): + """Make a symlink, or skip the test in case symlinks are not supported.""" + try: + os.symlink(str(src), str(dst), **kwargs) + except OSError as e: + skip(f"symlinks not supported: {e}") + + +class ImportMode(Enum): + """Possible values for `mode` parameter of `import_path`.""" + + prepend = "prepend" + append = "append" + importlib = "importlib" + + +class ImportPathMismatchError(ImportError): + """Raised on import_path() if there is a mismatch of __file__'s. + + This can happen when `import_path` is called multiple times with different filenames that has + the same basename but reside in packages + (for example "/tests1/test_foo.py" and "/tests2/test_foo.py"). + """ + + +def import_path( + p: Union[str, "os.PathLike[str]"], + *, + mode: Union[str, ImportMode] = ImportMode.prepend, + root: Path, +) -> ModuleType: + """Import and return a module from the given path, which can be a file (a module) or + a directory (a package). + + The import mechanism used is controlled by the `mode` parameter: + + * `mode == ImportMode.prepend`: the directory containing the module (or package, taking + `__init__.py` files into account) will be put at the *start* of `sys.path` before + being imported with `importlib.import_module`. + + * `mode == ImportMode.append`: same as `prepend`, but the directory will be appended + to the end of `sys.path`, if not already in `sys.path`. + + * `mode == ImportMode.importlib`: uses more fine control mechanisms provided by `importlib` + to import the module, which avoids having to muck with `sys.path` at all. It effectively + allows having same-named test modules in different places. + + :param root: + Used as an anchor when mode == ImportMode.importlib to obtain + a unique name for the module being imported so it can safely be stored + into ``sys.modules``. + + :raises ImportPathMismatchError: + If after importing the given `path` and the module `__file__` + are different. Only raised in `prepend` and `append` modes. + """ + mode = ImportMode(mode) + + path = Path(p) + + if not path.exists(): + raise ImportError(path) + + if mode is ImportMode.importlib: + module_name = module_name_from_path(path, root) + + for meta_importer in sys.meta_path: + spec = meta_importer.find_spec(module_name, [str(path.parent)]) + if spec is not None: + break + else: + spec = importlib.util.spec_from_file_location(module_name, str(path)) + + if spec is None: + raise ImportError(f"Can't find module {module_name} at location {path}") + mod = importlib.util.module_from_spec(spec) + sys.modules[module_name] = mod + spec.loader.exec_module(mod) # type: ignore[union-attr] + insert_missing_modules(sys.modules, module_name) + return mod + + pkg_path = resolve_package_path(path) + if pkg_path is not None: + pkg_root = pkg_path.parent + names = list(path.with_suffix("").relative_to(pkg_root).parts) + if names[-1] == "__init__": + names.pop() + module_name = ".".join(names) + else: + pkg_root = path.parent + module_name = path.stem + + # Change sys.path permanently: restoring it at the end of this function would cause surprising + # problems because of delayed imports: for example, a conftest.py file imported by this function + # might have local imports, which would fail at runtime if we restored sys.path. + if mode is ImportMode.append: + if str(pkg_root) not in sys.path: + sys.path.append(str(pkg_root)) + elif mode is ImportMode.prepend: + if str(pkg_root) != sys.path[0]: + sys.path.insert(0, str(pkg_root)) + else: + assert_never(mode) + + importlib.import_module(module_name) + + mod = sys.modules[module_name] + if path.name == "__init__.py": + return mod + + ignore = os.environ.get("PY_IGNORE_IMPORTMISMATCH", "") + if ignore != "1": + module_file = mod.__file__ + if module_file is None: + raise ImportPathMismatchError(module_name, module_file, path) + + if module_file.endswith((".pyc", ".pyo")): + module_file = module_file[:-1] + if module_file.endswith(os.sep + "__init__.py"): + module_file = module_file[: -(len(os.sep + "__init__.py"))] + + try: + is_same = _is_same(str(path), module_file) + except FileNotFoundError: + is_same = False + + if not is_same: + raise ImportPathMismatchError(module_name, module_file, path) + + return mod + + +# Implement a special _is_same function on Windows which returns True if the two filenames +# compare equal, to circumvent os.path.samefile returning False for mounts in UNC (#7678). +if sys.platform.startswith("win"): + + def _is_same(f1: str, f2: str) -> bool: + return Path(f1) == Path(f2) or os.path.samefile(f1, f2) + +else: + + def _is_same(f1: str, f2: str) -> bool: + return os.path.samefile(f1, f2) + + +def module_name_from_path(path: Path, root: Path) -> str: + """ + Return a dotted module name based on the given path, anchored on root. + + For example: path="projects/src/tests/test_foo.py" and root="/projects", the + resulting module name will be "src.tests.test_foo". + """ + path = path.with_suffix("") + try: + relative_path = path.relative_to(root) + except ValueError: + # If we can't get a relative path to root, use the full path, except + # for the first part ("d:\\" or "/" depending on the platform, for example). + path_parts = path.parts[1:] + else: + # Use the parts for the relative path to the root path. + path_parts = relative_path.parts + + return ".".join(path_parts) + + +def insert_missing_modules(modules: Dict[str, ModuleType], module_name: str) -> None: + """ + Used by ``import_path`` to create intermediate modules when using mode=importlib. + + When we want to import a module as "src.tests.test_foo" for example, we need + to create empty modules "src" and "src.tests" after inserting "src.tests.test_foo", + otherwise "src.tests.test_foo" is not importable by ``__import__``. + """ + module_parts = module_name.split(".") + while module_name: + if module_name not in modules: + try: + # If sys.meta_path is empty, calling import_module will issue + # a warning and raise ModuleNotFoundError. To avoid the + # warning, we check sys.meta_path explicitly and raise the error + # ourselves to fall back to creating a dummy module. + if not sys.meta_path: + raise ModuleNotFoundError + importlib.import_module(module_name) + except ModuleNotFoundError: + module = ModuleType( + module_name, + doc="Empty module created by pytest's importmode=importlib.", + ) + modules[module_name] = module + module_parts.pop(-1) + module_name = ".".join(module_parts) + + +def resolve_package_path(path: Path) -> Optional[Path]: + """Return the Python package path by looking for the last + directory upwards which still contains an __init__.py. + + Returns None if it can not be determined. + """ + result = None + for parent in itertools.chain((path,), path.parents): + if parent.is_dir(): + if not parent.joinpath("__init__.py").is_file(): + break + if not parent.name.isidentifier(): + break + result = parent + return result + + +def scandir(path: Union[str, "os.PathLike[str]"]) -> List["os.DirEntry[str]"]: + """Scan a directory recursively, in breadth-first order. + + The returned entries are sorted. + """ + entries = [] + with os.scandir(path) as s: + # Skip entries with symlink loops and other brokenness, so the caller + # doesn't have to deal with it. + for entry in s: + try: + entry.is_file() + except OSError as err: + if _ignore_error(err): + continue + raise + entries.append(entry) + entries.sort(key=lambda entry: entry.name) + return entries + + +def visit( + path: Union[str, "os.PathLike[str]"], recurse: Callable[["os.DirEntry[str]"], bool] +) -> Iterator["os.DirEntry[str]"]: + """Walk a directory recursively, in breadth-first order. + + The `recurse` predicate determines whether a directory is recursed. + + Entries at each directory level are sorted. + """ + entries = scandir(path) + yield from entries + for entry in entries: + if entry.is_dir() and recurse(entry): + yield from visit(entry.path, recurse) + + +def absolutepath(path: Union[Path, str]) -> Path: + """Convert a path to an absolute path using os.path.abspath. + + Prefer this over Path.resolve() (see #6523). + Prefer this over Path.absolute() (not public, doesn't normalize). + """ + return Path(os.path.abspath(str(path))) + + +def commonpath(path1: Path, path2: Path) -> Optional[Path]: + """Return the common part shared with the other path, or None if there is + no common part. + + If one path is relative and one is absolute, returns None. + """ + try: + return Path(os.path.commonpath((str(path1), str(path2)))) + except ValueError: + return None + + +def bestrelpath(directory: Path, dest: Path) -> str: + """Return a string which is a relative path from directory to dest such + that directory/bestrelpath == dest. + + The paths must be either both absolute or both relative. + + If no such path can be determined, returns dest. + """ + assert isinstance(directory, Path) + assert isinstance(dest, Path) + if dest == directory: + return os.curdir + # Find the longest common directory. + base = commonpath(directory, dest) + # Can be the case on Windows for two absolute paths on different drives. + # Can be the case for two relative paths without common prefix. + # Can be the case for a relative path and an absolute path. + if not base: + return str(dest) + reldirectory = directory.relative_to(base) + reldest = dest.relative_to(base) + return os.path.join( + # Back from directory to base. + *([os.pardir] * len(reldirectory.parts)), + # Forward from base to dest. + *reldest.parts, + ) + + +# Originates from py. path.local.copy(), with siginficant trims and adjustments. +# TODO(py38): Replace with shutil.copytree(..., symlinks=True, dirs_exist_ok=True) +def copytree(source: Path, target: Path) -> None: + """Recursively copy a source directory to target.""" + assert source.is_dir() + for entry in visit(source, recurse=lambda entry: not entry.is_symlink()): + x = Path(entry) + relpath = x.relative_to(source) + newx = target / relpath + newx.parent.mkdir(exist_ok=True) + if x.is_symlink(): + newx.symlink_to(os.readlink(x)) + elif x.is_file(): + shutil.copyfile(x, newx) + elif x.is_dir(): + newx.mkdir(exist_ok=True) diff --git a/venv/lib/python3.11/site-packages/_pytest/py.typed b/venv/lib/python3.11/site-packages/_pytest/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/_pytest/pytester.py b/venv/lib/python3.11/site-packages/_pytest/pytester.py new file mode 100644 index 0000000..3df52eb --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/pytester.py @@ -0,0 +1,1789 @@ +"""(Disabled by default) support for testing pytest and pytest plugins. + +PYTEST_DONT_REWRITE +""" +import collections.abc +import contextlib +import gc +import importlib +import locale +import os +import platform +import re +import shutil +import subprocess +import sys +import traceback +from fnmatch import fnmatch +from io import StringIO +from pathlib import Path +from typing import Any +from typing import Callable +from typing import Dict +from typing import Generator +from typing import IO +from typing import Iterable +from typing import List +from typing import Optional +from typing import overload +from typing import Sequence +from typing import TextIO +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import Union +from weakref import WeakKeyDictionary + +from iniconfig import IniConfig +from iniconfig import SectionWrapper + +from _pytest import timing +from _pytest._code import Source +from _pytest.capture import _get_multicapture +from _pytest.compat import final +from _pytest.compat import NOTSET +from _pytest.compat import NotSetType +from _pytest.config import _PluggyPlugin +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config import main +from _pytest.config import PytestPluginManager +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.main import Session +from _pytest.monkeypatch import MonkeyPatch +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.outcomes import fail +from _pytest.outcomes import importorskip +from _pytest.outcomes import skip +from _pytest.pathlib import bestrelpath +from _pytest.pathlib import copytree +from _pytest.pathlib import make_numbered_dir +from _pytest.reports import CollectReport +from _pytest.reports import TestReport +from _pytest.tmpdir import TempPathFactory +from _pytest.warning_types import PytestWarning + + +if TYPE_CHECKING: + from typing_extensions import Final + from typing_extensions import Literal + + import pexpect + + +pytest_plugins = ["pytester_assertions"] + + +IGNORE_PAM = [ # filenames added when obtaining details about the current user + "/var/lib/sss/mc/passwd" +] + + +def pytest_addoption(parser: Parser) -> None: + parser.addoption( + "--lsof", + action="store_true", + dest="lsof", + default=False, + help="Run FD checks if lsof is available", + ) + + parser.addoption( + "--runpytest", + default="inprocess", + dest="runpytest", + choices=("inprocess", "subprocess"), + help=( + "Run pytest sub runs in tests using an 'inprocess' " + "or 'subprocess' (python -m main) method" + ), + ) + + parser.addini( + "pytester_example_dir", help="Directory to take the pytester example files from" + ) + + +def pytest_configure(config: Config) -> None: + if config.getvalue("lsof"): + checker = LsofFdLeakChecker() + if checker.matching_platform(): + config.pluginmanager.register(checker) + + config.addinivalue_line( + "markers", + "pytester_example_path(*path_segments): join the given path " + "segments to `pytester_example_dir` for this test.", + ) + + +class LsofFdLeakChecker: + def get_open_files(self) -> List[Tuple[str, str]]: + out = subprocess.run( + ("lsof", "-Ffn0", "-p", str(os.getpid())), + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + check=True, + text=True, + encoding=locale.getpreferredencoding(False), + ).stdout + + def isopen(line: str) -> bool: + return line.startswith("f") and ( + "deleted" not in line + and "mem" not in line + and "txt" not in line + and "cwd" not in line + ) + + open_files = [] + + for line in out.split("\n"): + if isopen(line): + fields = line.split("\0") + fd = fields[0][1:] + filename = fields[1][1:] + if filename in IGNORE_PAM: + continue + if filename.startswith("/"): + open_files.append((fd, filename)) + + return open_files + + def matching_platform(self) -> bool: + try: + subprocess.run(("lsof", "-v"), check=True) + except (OSError, subprocess.CalledProcessError): + return False + else: + return True + + @hookimpl(hookwrapper=True, tryfirst=True) + def pytest_runtest_protocol(self, item: Item) -> Generator[None, None, None]: + lines1 = self.get_open_files() + yield + if hasattr(sys, "pypy_version_info"): + gc.collect() + lines2 = self.get_open_files() + + new_fds = {t[0] for t in lines2} - {t[0] for t in lines1} + leaked_files = [t for t in lines2 if t[0] in new_fds] + if leaked_files: + error = [ + "***** %s FD leakage detected" % len(leaked_files), + *(str(f) for f in leaked_files), + "*** Before:", + *(str(f) for f in lines1), + "*** After:", + *(str(f) for f in lines2), + "***** %s FD leakage detected" % len(leaked_files), + "*** function %s:%s: %s " % item.location, + "See issue #2366", + ] + item.warn(PytestWarning("\n".join(error))) + + +# used at least by pytest-xdist plugin + + +@fixture +def _pytest(request: FixtureRequest) -> "PytestArg": + """Return a helper which offers a gethookrecorder(hook) method which + returns a HookRecorder instance which helps to make assertions about called + hooks.""" + return PytestArg(request) + + +class PytestArg: + def __init__(self, request: FixtureRequest) -> None: + self._request = request + + def gethookrecorder(self, hook) -> "HookRecorder": + hookrecorder = HookRecorder(hook._pm) + self._request.addfinalizer(hookrecorder.finish_recording) + return hookrecorder + + +def get_public_names(values: Iterable[str]) -> List[str]: + """Only return names from iterator values without a leading underscore.""" + return [x for x in values if x[0] != "_"] + + +@final +class RecordedHookCall: + """A recorded call to a hook. + + The arguments to the hook call are set as attributes. + For example: + + .. code-block:: python + + calls = hook_recorder.getcalls("pytest_runtest_setup") + # Suppose pytest_runtest_setup was called once with `item=an_item`. + assert calls[0].item is an_item + """ + + def __init__(self, name: str, kwargs) -> None: + self.__dict__.update(kwargs) + self._name = name + + def __repr__(self) -> str: + d = self.__dict__.copy() + del d["_name"] + return f"" + + if TYPE_CHECKING: + # The class has undetermined attributes, this tells mypy about it. + def __getattr__(self, key: str): + ... + + +@final +class HookRecorder: + """Record all hooks called in a plugin manager. + + Hook recorders are created by :class:`Pytester`. + + This wraps all the hook calls in the plugin manager, recording each call + before propagating the normal calls. + """ + + def __init__( + self, pluginmanager: PytestPluginManager, *, _ispytest: bool = False + ) -> None: + check_ispytest(_ispytest) + + self._pluginmanager = pluginmanager + self.calls: List[RecordedHookCall] = [] + self.ret: Optional[Union[int, ExitCode]] = None + + def before(hook_name: str, hook_impls, kwargs) -> None: + self.calls.append(RecordedHookCall(hook_name, kwargs)) + + def after(outcome, hook_name: str, hook_impls, kwargs) -> None: + pass + + self._undo_wrapping = pluginmanager.add_hookcall_monitoring(before, after) + + def finish_recording(self) -> None: + self._undo_wrapping() + + def getcalls(self, names: Union[str, Iterable[str]]) -> List[RecordedHookCall]: + """Get all recorded calls to hooks with the given names (or name).""" + if isinstance(names, str): + names = names.split() + return [call for call in self.calls if call._name in names] + + def assert_contains(self, entries: Sequence[Tuple[str, str]]) -> None: + __tracebackhide__ = True + i = 0 + entries = list(entries) + backlocals = sys._getframe(1).f_locals + while entries: + name, check = entries.pop(0) + for ind, call in enumerate(self.calls[i:]): + if call._name == name: + print("NAMEMATCH", name, call) + if eval(check, backlocals, call.__dict__): + print("CHECKERMATCH", repr(check), "->", call) + else: + print("NOCHECKERMATCH", repr(check), "-", call) + continue + i += ind + 1 + break + print("NONAMEMATCH", name, "with", call) + else: + fail(f"could not find {name!r} check {check!r}") + + def popcall(self, name: str) -> RecordedHookCall: + __tracebackhide__ = True + for i, call in enumerate(self.calls): + if call._name == name: + del self.calls[i] + return call + lines = [f"could not find call {name!r}, in:"] + lines.extend([" %s" % x for x in self.calls]) + fail("\n".join(lines)) + + def getcall(self, name: str) -> RecordedHookCall: + values = self.getcalls(name) + assert len(values) == 1, (name, values) + return values[0] + + # functionality for test reports + + @overload + def getreports( + self, + names: "Literal['pytest_collectreport']", + ) -> Sequence[CollectReport]: + ... + + @overload + def getreports( + self, + names: "Literal['pytest_runtest_logreport']", + ) -> Sequence[TestReport]: + ... + + @overload + def getreports( + self, + names: Union[str, Iterable[str]] = ( + "pytest_collectreport", + "pytest_runtest_logreport", + ), + ) -> Sequence[Union[CollectReport, TestReport]]: + ... + + def getreports( + self, + names: Union[str, Iterable[str]] = ( + "pytest_collectreport", + "pytest_runtest_logreport", + ), + ) -> Sequence[Union[CollectReport, TestReport]]: + return [x.report for x in self.getcalls(names)] + + def matchreport( + self, + inamepart: str = "", + names: Union[str, Iterable[str]] = ( + "pytest_runtest_logreport", + "pytest_collectreport", + ), + when: Optional[str] = None, + ) -> Union[CollectReport, TestReport]: + """Return a testreport whose dotted import path matches.""" + values = [] + for rep in self.getreports(names=names): + if not when and rep.when != "call" and rep.passed: + # setup/teardown passing reports - let's ignore those + continue + if when and rep.when != when: + continue + if not inamepart or inamepart in rep.nodeid.split("::"): + values.append(rep) + if not values: + raise ValueError( + "could not find test report matching %r: " + "no test reports at all!" % (inamepart,) + ) + if len(values) > 1: + raise ValueError( + "found 2 or more testreports matching {!r}: {}".format( + inamepart, values + ) + ) + return values[0] + + @overload + def getfailures( + self, + names: "Literal['pytest_collectreport']", + ) -> Sequence[CollectReport]: + ... + + @overload + def getfailures( + self, + names: "Literal['pytest_runtest_logreport']", + ) -> Sequence[TestReport]: + ... + + @overload + def getfailures( + self, + names: Union[str, Iterable[str]] = ( + "pytest_collectreport", + "pytest_runtest_logreport", + ), + ) -> Sequence[Union[CollectReport, TestReport]]: + ... + + def getfailures( + self, + names: Union[str, Iterable[str]] = ( + "pytest_collectreport", + "pytest_runtest_logreport", + ), + ) -> Sequence[Union[CollectReport, TestReport]]: + return [rep for rep in self.getreports(names) if rep.failed] + + def getfailedcollections(self) -> Sequence[CollectReport]: + return self.getfailures("pytest_collectreport") + + def listoutcomes( + self, + ) -> Tuple[ + Sequence[TestReport], + Sequence[Union[CollectReport, TestReport]], + Sequence[Union[CollectReport, TestReport]], + ]: + passed = [] + skipped = [] + failed = [] + for rep in self.getreports( + ("pytest_collectreport", "pytest_runtest_logreport") + ): + if rep.passed: + if rep.when == "call": + assert isinstance(rep, TestReport) + passed.append(rep) + elif rep.skipped: + skipped.append(rep) + else: + assert rep.failed, f"Unexpected outcome: {rep!r}" + failed.append(rep) + return passed, skipped, failed + + def countoutcomes(self) -> List[int]: + return [len(x) for x in self.listoutcomes()] + + def assertoutcome(self, passed: int = 0, skipped: int = 0, failed: int = 0) -> None: + __tracebackhide__ = True + from _pytest.pytester_assertions import assertoutcome + + outcomes = self.listoutcomes() + assertoutcome( + outcomes, + passed=passed, + skipped=skipped, + failed=failed, + ) + + def clear(self) -> None: + self.calls[:] = [] + + +@fixture +def linecomp() -> "LineComp": + """A :class: `LineComp` instance for checking that an input linearly + contains a sequence of strings.""" + return LineComp() + + +@fixture(name="LineMatcher") +def LineMatcher_fixture(request: FixtureRequest) -> Type["LineMatcher"]: + """A reference to the :class: `LineMatcher`. + + This is instantiable with a list of lines (without their trailing newlines). + This is useful for testing large texts, such as the output of commands. + """ + return LineMatcher + + +@fixture +def pytester( + request: FixtureRequest, tmp_path_factory: TempPathFactory, monkeypatch: MonkeyPatch +) -> "Pytester": + """ + Facilities to write tests/configuration files, execute pytest in isolation, and match + against expected output, perfect for black-box testing of pytest plugins. + + It attempts to isolate the test run from external factors as much as possible, modifying + the current working directory to ``path`` and environment variables during initialization. + + It is particularly useful for testing plugins. It is similar to the :fixture:`tmp_path` + fixture but provides methods which aid in testing pytest itself. + """ + return Pytester(request, tmp_path_factory, monkeypatch, _ispytest=True) + + +@fixture +def _sys_snapshot() -> Generator[None, None, None]: + snappaths = SysPathsSnapshot() + snapmods = SysModulesSnapshot() + yield + snapmods.restore() + snappaths.restore() + + +@fixture +def _config_for_test() -> Generator[Config, None, None]: + from _pytest.config import get_config + + config = get_config() + yield config + config._ensure_unconfigure() # cleanup, e.g. capman closing tmpfiles. + + +# Regex to match the session duration string in the summary: "74.34s". +rex_session_duration = re.compile(r"\d+\.\d\ds") +# Regex to match all the counts and phrases in the summary line: "34 passed, 111 skipped". +rex_outcome = re.compile(r"(\d+) (\w+)") + + +@final +class RunResult: + """The result of running a command from :class:`~pytest.Pytester`.""" + + def __init__( + self, + ret: Union[int, ExitCode], + outlines: List[str], + errlines: List[str], + duration: float, + ) -> None: + try: + self.ret: Union[int, ExitCode] = ExitCode(ret) + """The return value.""" + except ValueError: + self.ret = ret + self.outlines = outlines + """List of lines captured from stdout.""" + self.errlines = errlines + """List of lines captured from stderr.""" + self.stdout = LineMatcher(outlines) + """:class:`~pytest.LineMatcher` of stdout. + + Use e.g. :func:`str(stdout) ` to reconstruct stdout, or the commonly used + :func:`stdout.fnmatch_lines() ` method. + """ + self.stderr = LineMatcher(errlines) + """:class:`~pytest.LineMatcher` of stderr.""" + self.duration = duration + """Duration in seconds.""" + + def __repr__(self) -> str: + return ( + "" + % (self.ret, len(self.stdout.lines), len(self.stderr.lines), self.duration) + ) + + def parseoutcomes(self) -> Dict[str, int]: + """Return a dictionary of outcome noun -> count from parsing the terminal + output that the test process produced. + + The returned nouns will always be in plural form:: + + ======= 1 failed, 1 passed, 1 warning, 1 error in 0.13s ==== + + Will return ``{"failed": 1, "passed": 1, "warnings": 1, "errors": 1}``. + """ + return self.parse_summary_nouns(self.outlines) + + @classmethod + def parse_summary_nouns(cls, lines) -> Dict[str, int]: + """Extract the nouns from a pytest terminal summary line. + + It always returns the plural noun for consistency:: + + ======= 1 failed, 1 passed, 1 warning, 1 error in 0.13s ==== + + Will return ``{"failed": 1, "passed": 1, "warnings": 1, "errors": 1}``. + """ + for line in reversed(lines): + if rex_session_duration.search(line): + outcomes = rex_outcome.findall(line) + ret = {noun: int(count) for (count, noun) in outcomes} + break + else: + raise ValueError("Pytest terminal summary report not found") + + to_plural = { + "warning": "warnings", + "error": "errors", + } + return {to_plural.get(k, k): v for k, v in ret.items()} + + def assert_outcomes( + self, + passed: int = 0, + skipped: int = 0, + failed: int = 0, + errors: int = 0, + xpassed: int = 0, + xfailed: int = 0, + warnings: Optional[int] = None, + deselected: Optional[int] = None, + ) -> None: + """ + Assert that the specified outcomes appear with the respective + numbers (0 means it didn't occur) in the text output from a test run. + + ``warnings`` and ``deselected`` are only checked if not None. + """ + __tracebackhide__ = True + from _pytest.pytester_assertions import assert_outcomes + + outcomes = self.parseoutcomes() + assert_outcomes( + outcomes, + passed=passed, + skipped=skipped, + failed=failed, + errors=errors, + xpassed=xpassed, + xfailed=xfailed, + warnings=warnings, + deselected=deselected, + ) + + +class CwdSnapshot: + def __init__(self) -> None: + self.__saved = os.getcwd() + + def restore(self) -> None: + os.chdir(self.__saved) + + +class SysModulesSnapshot: + def __init__(self, preserve: Optional[Callable[[str], bool]] = None) -> None: + self.__preserve = preserve + self.__saved = dict(sys.modules) + + def restore(self) -> None: + if self.__preserve: + self.__saved.update( + (k, m) for k, m in sys.modules.items() if self.__preserve(k) + ) + sys.modules.clear() + sys.modules.update(self.__saved) + + +class SysPathsSnapshot: + def __init__(self) -> None: + self.__saved = list(sys.path), list(sys.meta_path) + + def restore(self) -> None: + sys.path[:], sys.meta_path[:] = self.__saved + + +@final +class Pytester: + """ + Facilities to write tests/configuration files, execute pytest in isolation, and match + against expected output, perfect for black-box testing of pytest plugins. + + It attempts to isolate the test run from external factors as much as possible, modifying + the current working directory to :attr:`path` and environment variables during initialization. + """ + + __test__ = False + + CLOSE_STDIN: "Final" = NOTSET + + class TimeoutExpired(Exception): + pass + + def __init__( + self, + request: FixtureRequest, + tmp_path_factory: TempPathFactory, + monkeypatch: MonkeyPatch, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self._request = request + self._mod_collections: WeakKeyDictionary[ + Collector, List[Union[Item, Collector]] + ] = WeakKeyDictionary() + if request.function: + name: str = request.function.__name__ + else: + name = request.node.name + self._name = name + self._path: Path = tmp_path_factory.mktemp(name, numbered=True) + #: A list of plugins to use with :py:meth:`parseconfig` and + #: :py:meth:`runpytest`. Initially this is an empty list but plugins can + #: be added to the list. The type of items to add to the list depends on + #: the method using them so refer to them for details. + self.plugins: List[Union[str, _PluggyPlugin]] = [] + self._cwd_snapshot = CwdSnapshot() + self._sys_path_snapshot = SysPathsSnapshot() + self._sys_modules_snapshot = self.__take_sys_modules_snapshot() + self.chdir() + self._request.addfinalizer(self._finalize) + self._method = self._request.config.getoption("--runpytest") + self._test_tmproot = tmp_path_factory.mktemp(f"tmp-{name}", numbered=True) + + self._monkeypatch = mp = monkeypatch + mp.setenv("PYTEST_DEBUG_TEMPROOT", str(self._test_tmproot)) + # Ensure no unexpected caching via tox. + mp.delenv("TOX_ENV_DIR", raising=False) + # Discard outer pytest options. + mp.delenv("PYTEST_ADDOPTS", raising=False) + # Ensure no user config is used. + tmphome = str(self.path) + mp.setenv("HOME", tmphome) + mp.setenv("USERPROFILE", tmphome) + # Do not use colors for inner runs by default. + mp.setenv("PY_COLORS", "0") + + @property + def path(self) -> Path: + """Temporary directory path used to create files/run tests from, etc.""" + return self._path + + def __repr__(self) -> str: + return f"" + + def _finalize(self) -> None: + """ + Clean up global state artifacts. + + Some methods modify the global interpreter state and this tries to + clean this up. It does not remove the temporary directory however so + it can be looked at after the test run has finished. + """ + self._sys_modules_snapshot.restore() + self._sys_path_snapshot.restore() + self._cwd_snapshot.restore() + + def __take_sys_modules_snapshot(self) -> SysModulesSnapshot: + # Some zope modules used by twisted-related tests keep internal state + # and can't be deleted; we had some trouble in the past with + # `zope.interface` for example. + # + # Preserve readline due to https://bugs.python.org/issue41033. + # pexpect issues a SIGWINCH. + def preserve_module(name): + return name.startswith(("zope", "readline")) + + return SysModulesSnapshot(preserve=preserve_module) + + def make_hook_recorder(self, pluginmanager: PytestPluginManager) -> HookRecorder: + """Create a new :class:`HookRecorder` for a :class:`PytestPluginManager`.""" + pluginmanager.reprec = reprec = HookRecorder(pluginmanager, _ispytest=True) + self._request.addfinalizer(reprec.finish_recording) + return reprec + + def chdir(self) -> None: + """Cd into the temporary directory. + + This is done automatically upon instantiation. + """ + os.chdir(self.path) + + def _makefile( + self, + ext: str, + lines: Sequence[Union[Any, bytes]], + files: Dict[str, str], + encoding: str = "utf-8", + ) -> Path: + items = list(files.items()) + + if ext and not ext.startswith("."): + raise ValueError( + f"pytester.makefile expects a file extension, try .{ext} instead of {ext}" + ) + + def to_text(s: Union[Any, bytes]) -> str: + return s.decode(encoding) if isinstance(s, bytes) else str(s) + + if lines: + source = "\n".join(to_text(x) for x in lines) + basename = self._name + items.insert(0, (basename, source)) + + ret = None + for basename, value in items: + p = self.path.joinpath(basename).with_suffix(ext) + p.parent.mkdir(parents=True, exist_ok=True) + source_ = Source(value) + source = "\n".join(to_text(line) for line in source_.lines) + p.write_text(source.strip(), encoding=encoding) + if ret is None: + ret = p + assert ret is not None + return ret + + def makefile(self, ext: str, *args: str, **kwargs: str) -> Path: + r"""Create new text file(s) in the test directory. + + :param ext: + The extension the file(s) should use, including the dot, e.g. `.py`. + :param args: + All args are treated as strings and joined using newlines. + The result is written as contents to the file. The name of the + file is based on the test function requesting this fixture. + :param kwargs: + Each keyword is the name of a file, while the value of it will + be written as contents of the file. + :returns: + The first created file. + + Examples: + + .. code-block:: python + + pytester.makefile(".txt", "line1", "line2") + + pytester.makefile(".ini", pytest="[pytest]\naddopts=-rs\n") + + To create binary files, use :meth:`pathlib.Path.write_bytes` directly: + + .. code-block:: python + + filename = pytester.path.joinpath("foo.bin") + filename.write_bytes(b"...") + """ + return self._makefile(ext, args, kwargs) + + def makeconftest(self, source: str) -> Path: + """Write a contest.py file. + + :param source: The contents. + :returns: The conftest.py file. + """ + return self.makepyfile(conftest=source) + + def makeini(self, source: str) -> Path: + """Write a tox.ini file. + + :param source: The contents. + :returns: The tox.ini file. + """ + return self.makefile(".ini", tox=source) + + def getinicfg(self, source: str) -> SectionWrapper: + """Return the pytest section from the tox.ini config file.""" + p = self.makeini(source) + return IniConfig(str(p))["pytest"] + + def makepyprojecttoml(self, source: str) -> Path: + """Write a pyproject.toml file. + + :param source: The contents. + :returns: The pyproject.ini file. + + .. versionadded:: 6.0 + """ + return self.makefile(".toml", pyproject=source) + + def makepyfile(self, *args, **kwargs) -> Path: + r"""Shortcut for .makefile() with a .py extension. + + Defaults to the test name with a '.py' extension, e.g test_foobar.py, overwriting + existing files. + + Examples: + + .. code-block:: python + + def test_something(pytester): + # Initial file is created test_something.py. + pytester.makepyfile("foobar") + # To create multiple files, pass kwargs accordingly. + pytester.makepyfile(custom="foobar") + # At this point, both 'test_something.py' & 'custom.py' exist in the test directory. + + """ + return self._makefile(".py", args, kwargs) + + def maketxtfile(self, *args, **kwargs) -> Path: + r"""Shortcut for .makefile() with a .txt extension. + + Defaults to the test name with a '.txt' extension, e.g test_foobar.txt, overwriting + existing files. + + Examples: + + .. code-block:: python + + def test_something(pytester): + # Initial file is created test_something.txt. + pytester.maketxtfile("foobar") + # To create multiple files, pass kwargs accordingly. + pytester.maketxtfile(custom="foobar") + # At this point, both 'test_something.txt' & 'custom.txt' exist in the test directory. + + """ + return self._makefile(".txt", args, kwargs) + + def syspathinsert( + self, path: Optional[Union[str, "os.PathLike[str]"]] = None + ) -> None: + """Prepend a directory to sys.path, defaults to :attr:`path`. + + This is undone automatically when this object dies at the end of each + test. + + :param path: + The path. + """ + if path is None: + path = self.path + + self._monkeypatch.syspath_prepend(str(path)) + + def mkdir(self, name: Union[str, "os.PathLike[str]"]) -> Path: + """Create a new (sub)directory. + + :param name: + The name of the directory, relative to the pytester path. + :returns: + The created directory. + """ + p = self.path / name + p.mkdir() + return p + + def mkpydir(self, name: Union[str, "os.PathLike[str]"]) -> Path: + """Create a new python package. + + This creates a (sub)directory with an empty ``__init__.py`` file so it + gets recognised as a Python package. + """ + p = self.path / name + p.mkdir() + p.joinpath("__init__.py").touch() + return p + + def copy_example(self, name: Optional[str] = None) -> Path: + """Copy file from project's directory into the testdir. + + :param name: + The name of the file to copy. + :return: + Path to the copied directory (inside ``self.path``). + """ + example_dir_ = self._request.config.getini("pytester_example_dir") + if example_dir_ is None: + raise ValueError("pytester_example_dir is unset, can't copy examples") + example_dir: Path = self._request.config.rootpath / example_dir_ + + for extra_element in self._request.node.iter_markers("pytester_example_path"): + assert extra_element.args + example_dir = example_dir.joinpath(*extra_element.args) + + if name is None: + func_name = self._name + maybe_dir = example_dir / func_name + maybe_file = example_dir / (func_name + ".py") + + if maybe_dir.is_dir(): + example_path = maybe_dir + elif maybe_file.is_file(): + example_path = maybe_file + else: + raise LookupError( + f"{func_name} can't be found as module or package in {example_dir}" + ) + else: + example_path = example_dir.joinpath(name) + + if example_path.is_dir() and not example_path.joinpath("__init__.py").is_file(): + copytree(example_path, self.path) + return self.path + elif example_path.is_file(): + result = self.path.joinpath(example_path.name) + shutil.copy(example_path, result) + return result + else: + raise LookupError( + f'example "{example_path}" is not found as a file or directory' + ) + + def getnode( + self, config: Config, arg: Union[str, "os.PathLike[str]"] + ) -> Union[Collector, Item]: + """Get the collection node of a file. + + :param config: + A pytest config. + See :py:meth:`parseconfig` and :py:meth:`parseconfigure` for creating it. + :param arg: + Path to the file. + :returns: + The node. + """ + session = Session.from_config(config) + assert "::" not in str(arg) + p = Path(os.path.abspath(arg)) + config.hook.pytest_sessionstart(session=session) + res = session.perform_collect([str(p)], genitems=False)[0] + config.hook.pytest_sessionfinish(session=session, exitstatus=ExitCode.OK) + return res + + def getpathnode( + self, path: Union[str, "os.PathLike[str]"] + ) -> Union[Collector, Item]: + """Return the collection node of a file. + + This is like :py:meth:`getnode` but uses :py:meth:`parseconfigure` to + create the (configured) pytest Config instance. + + :param path: + Path to the file. + :returns: + The node. + """ + path = Path(path) + config = self.parseconfigure(path) + session = Session.from_config(config) + x = bestrelpath(session.path, path) + config.hook.pytest_sessionstart(session=session) + res = session.perform_collect([x], genitems=False)[0] + config.hook.pytest_sessionfinish(session=session, exitstatus=ExitCode.OK) + return res + + def genitems(self, colitems: Sequence[Union[Item, Collector]]) -> List[Item]: + """Generate all test items from a collection node. + + This recurses into the collection node and returns a list of all the + test items contained within. + + :param colitems: + The collection nodes. + :returns: + The collected items. + """ + session = colitems[0].session + result: List[Item] = [] + for colitem in colitems: + result.extend(session.genitems(colitem)) + return result + + def runitem(self, source: str) -> Any: + """Run the "test_func" Item. + + The calling test instance (class containing the test method) must + provide a ``.getrunner()`` method which should return a runner which + can run the test protocol for a single item, e.g. + :py:func:`_pytest.runner.runtestprotocol`. + """ + # used from runner functional tests + item = self.getitem(source) + # the test class where we are called from wants to provide the runner + testclassinstance = self._request.instance + runner = testclassinstance.getrunner() + return runner(item) + + def inline_runsource(self, source: str, *cmdlineargs) -> HookRecorder: + """Run a test module in process using ``pytest.main()``. + + This run writes "source" into a temporary file and runs + ``pytest.main()`` on it, returning a :py:class:`HookRecorder` instance + for the result. + + :param source: The source code of the test module. + :param cmdlineargs: Any extra command line arguments to use. + """ + p = self.makepyfile(source) + values = list(cmdlineargs) + [p] + return self.inline_run(*values) + + def inline_genitems(self, *args) -> Tuple[List[Item], HookRecorder]: + """Run ``pytest.main(['--collectonly'])`` in-process. + + Runs the :py:func:`pytest.main` function to run all of pytest inside + the test process itself like :py:meth:`inline_run`, but returns a + tuple of the collected items and a :py:class:`HookRecorder` instance. + """ + rec = self.inline_run("--collect-only", *args) + items = [x.item for x in rec.getcalls("pytest_itemcollected")] + return items, rec + + def inline_run( + self, + *args: Union[str, "os.PathLike[str]"], + plugins=(), + no_reraise_ctrlc: bool = False, + ) -> HookRecorder: + """Run ``pytest.main()`` in-process, returning a HookRecorder. + + Runs the :py:func:`pytest.main` function to run all of pytest inside + the test process itself. This means it can return a + :py:class:`HookRecorder` instance which gives more detailed results + from that run than can be done by matching stdout/stderr from + :py:meth:`runpytest`. + + :param args: + Command line arguments to pass to :py:func:`pytest.main`. + :param plugins: + Extra plugin instances the ``pytest.main()`` instance should use. + :param no_reraise_ctrlc: + Typically we reraise keyboard interrupts from the child run. If + True, the KeyboardInterrupt exception is captured. + """ + # (maybe a cpython bug?) the importlib cache sometimes isn't updated + # properly between file creation and inline_run (especially if imports + # are interspersed with file creation) + importlib.invalidate_caches() + + plugins = list(plugins) + finalizers = [] + try: + # Any sys.module or sys.path changes done while running pytest + # inline should be reverted after the test run completes to avoid + # clashing with later inline tests run within the same pytest test, + # e.g. just because they use matching test module names. + finalizers.append(self.__take_sys_modules_snapshot().restore) + finalizers.append(SysPathsSnapshot().restore) + + # Important note: + # - our tests should not leave any other references/registrations + # laying around other than possibly loaded test modules + # referenced from sys.modules, as nothing will clean those up + # automatically + + rec = [] + + class Collect: + def pytest_configure(x, config: Config) -> None: + rec.append(self.make_hook_recorder(config.pluginmanager)) + + plugins.append(Collect()) + ret = main([str(x) for x in args], plugins=plugins) + if len(rec) == 1: + reprec = rec.pop() + else: + + class reprec: # type: ignore + pass + + reprec.ret = ret + + # Typically we reraise keyboard interrupts from the child run + # because it's our user requesting interruption of the testing. + if ret == ExitCode.INTERRUPTED and not no_reraise_ctrlc: + calls = reprec.getcalls("pytest_keyboard_interrupt") + if calls and calls[-1].excinfo.type == KeyboardInterrupt: + raise KeyboardInterrupt() + return reprec + finally: + for finalizer in finalizers: + finalizer() + + def runpytest_inprocess( + self, *args: Union[str, "os.PathLike[str]"], **kwargs: Any + ) -> RunResult: + """Return result of running pytest in-process, providing a similar + interface to what self.runpytest() provides.""" + syspathinsert = kwargs.pop("syspathinsert", False) + + if syspathinsert: + self.syspathinsert() + now = timing.time() + capture = _get_multicapture("sys") + capture.start_capturing() + try: + try: + reprec = self.inline_run(*args, **kwargs) + except SystemExit as e: + ret = e.args[0] + try: + ret = ExitCode(e.args[0]) + except ValueError: + pass + + class reprec: # type: ignore + ret = ret + + except Exception: + traceback.print_exc() + + class reprec: # type: ignore + ret = ExitCode(3) + + finally: + out, err = capture.readouterr() + capture.stop_capturing() + sys.stdout.write(out) + sys.stderr.write(err) + + assert reprec.ret is not None + res = RunResult( + reprec.ret, out.splitlines(), err.splitlines(), timing.time() - now + ) + res.reprec = reprec # type: ignore + return res + + def runpytest( + self, *args: Union[str, "os.PathLike[str]"], **kwargs: Any + ) -> RunResult: + """Run pytest inline or in a subprocess, depending on the command line + option "--runpytest" and return a :py:class:`~pytest.RunResult`.""" + new_args = self._ensure_basetemp(args) + if self._method == "inprocess": + return self.runpytest_inprocess(*new_args, **kwargs) + elif self._method == "subprocess": + return self.runpytest_subprocess(*new_args, **kwargs) + raise RuntimeError(f"Unrecognized runpytest option: {self._method}") + + def _ensure_basetemp( + self, args: Sequence[Union[str, "os.PathLike[str]"]] + ) -> List[Union[str, "os.PathLike[str]"]]: + new_args = list(args) + for x in new_args: + if str(x).startswith("--basetemp"): + break + else: + new_args.append("--basetemp=%s" % self.path.parent.joinpath("basetemp")) + return new_args + + def parseconfig(self, *args: Union[str, "os.PathLike[str]"]) -> Config: + """Return a new pytest :class:`pytest.Config` instance from given + commandline args. + + This invokes the pytest bootstrapping code in _pytest.config to create a + new :py:class:`pytest.PytestPluginManager` and call the + :hook:`pytest_cmdline_parse` hook to create a new :class:`pytest.Config` + instance. + + If :attr:`plugins` has been populated they should be plugin modules + to be registered with the plugin manager. + """ + import _pytest.config + + new_args = self._ensure_basetemp(args) + new_args = [str(x) for x in new_args] + + config = _pytest.config._prepareconfig(new_args, self.plugins) # type: ignore[arg-type] + # we don't know what the test will do with this half-setup config + # object and thus we make sure it gets unconfigured properly in any + # case (otherwise capturing could still be active, for example) + self._request.addfinalizer(config._ensure_unconfigure) + return config + + def parseconfigure(self, *args: Union[str, "os.PathLike[str]"]) -> Config: + """Return a new pytest configured Config instance. + + Returns a new :py:class:`pytest.Config` instance like + :py:meth:`parseconfig`, but also calls the :hook:`pytest_configure` + hook. + """ + config = self.parseconfig(*args) + config._do_configure() + return config + + def getitem( + self, source: Union[str, "os.PathLike[str]"], funcname: str = "test_func" + ) -> Item: + """Return the test item for a test function. + + Writes the source to a python file and runs pytest's collection on + the resulting module, returning the test item for the requested + function name. + + :param source: + The module source. + :param funcname: + The name of the test function for which to return a test item. + :returns: + The test item. + """ + items = self.getitems(source) + for item in items: + if item.name == funcname: + return item + assert 0, "{!r} item not found in module:\n{}\nitems: {}".format( + funcname, source, items + ) + + def getitems(self, source: Union[str, "os.PathLike[str]"]) -> List[Item]: + """Return all test items collected from the module. + + Writes the source to a Python file and runs pytest's collection on + the resulting module, returning all test items contained within. + """ + modcol = self.getmodulecol(source) + return self.genitems([modcol]) + + def getmodulecol( + self, + source: Union[str, "os.PathLike[str]"], + configargs=(), + *, + withinit: bool = False, + ): + """Return the module collection node for ``source``. + + Writes ``source`` to a file using :py:meth:`makepyfile` and then + runs the pytest collection on it, returning the collection node for the + test module. + + :param source: + The source code of the module to collect. + + :param configargs: + Any extra arguments to pass to :py:meth:`parseconfigure`. + + :param withinit: + Whether to also write an ``__init__.py`` file to the same + directory to ensure it is a package. + """ + if isinstance(source, os.PathLike): + path = self.path.joinpath(source) + assert not withinit, "not supported for paths" + else: + kw = {self._name: str(source)} + path = self.makepyfile(**kw) + if withinit: + self.makepyfile(__init__="#") + self.config = config = self.parseconfigure(path, *configargs) + return self.getnode(config, path) + + def collect_by_name( + self, modcol: Collector, name: str + ) -> Optional[Union[Item, Collector]]: + """Return the collection node for name from the module collection. + + Searches a module collection node for a collection node matching the + given name. + + :param modcol: A module collection node; see :py:meth:`getmodulecol`. + :param name: The name of the node to return. + """ + if modcol not in self._mod_collections: + self._mod_collections[modcol] = list(modcol.collect()) + for colitem in self._mod_collections[modcol]: + if colitem.name == name: + return colitem + return None + + def popen( + self, + cmdargs: Sequence[Union[str, "os.PathLike[str]"]], + stdout: Union[int, TextIO] = subprocess.PIPE, + stderr: Union[int, TextIO] = subprocess.PIPE, + stdin: Union[NotSetType, bytes, IO[Any], int] = CLOSE_STDIN, + **kw, + ): + """Invoke :py:class:`subprocess.Popen`. + + Calls :py:class:`subprocess.Popen` making sure the current working + directory is in ``PYTHONPATH``. + + You probably want to use :py:meth:`run` instead. + """ + env = os.environ.copy() + env["PYTHONPATH"] = os.pathsep.join( + filter(None, [os.getcwd(), env.get("PYTHONPATH", "")]) + ) + kw["env"] = env + + if stdin is self.CLOSE_STDIN: + kw["stdin"] = subprocess.PIPE + elif isinstance(stdin, bytes): + kw["stdin"] = subprocess.PIPE + else: + kw["stdin"] = stdin + + popen = subprocess.Popen(cmdargs, stdout=stdout, stderr=stderr, **kw) + if stdin is self.CLOSE_STDIN: + assert popen.stdin is not None + popen.stdin.close() + elif isinstance(stdin, bytes): + assert popen.stdin is not None + popen.stdin.write(stdin) + + return popen + + def run( + self, + *cmdargs: Union[str, "os.PathLike[str]"], + timeout: Optional[float] = None, + stdin: Union[NotSetType, bytes, IO[Any], int] = CLOSE_STDIN, + ) -> RunResult: + """Run a command with arguments. + + Run a process using :py:class:`subprocess.Popen` saving the stdout and + stderr. + + :param cmdargs: + The sequence of arguments to pass to :py:class:`subprocess.Popen`, + with path-like objects being converted to :py:class:`str` + automatically. + :param timeout: + The period in seconds after which to timeout and raise + :py:class:`Pytester.TimeoutExpired`. + :param stdin: + Optional standard input. + + - If it is :py:attr:`CLOSE_STDIN` (Default), then this method calls + :py:class:`subprocess.Popen` with ``stdin=subprocess.PIPE``, and + the standard input is closed immediately after the new command is + started. + + - If it is of type :py:class:`bytes`, these bytes are sent to the + standard input of the command. + + - Otherwise, it is passed through to :py:class:`subprocess.Popen`. + For further information in this case, consult the document of the + ``stdin`` parameter in :py:class:`subprocess.Popen`. + :returns: + The result. + """ + __tracebackhide__ = True + + cmdargs = tuple(os.fspath(arg) for arg in cmdargs) + p1 = self.path.joinpath("stdout") + p2 = self.path.joinpath("stderr") + print("running:", *cmdargs) + print(" in:", Path.cwd()) + + with p1.open("w", encoding="utf8") as f1, p2.open("w", encoding="utf8") as f2: + now = timing.time() + popen = self.popen( + cmdargs, + stdin=stdin, + stdout=f1, + stderr=f2, + close_fds=(sys.platform != "win32"), + ) + if popen.stdin is not None: + popen.stdin.close() + + def handle_timeout() -> None: + __tracebackhide__ = True + + timeout_message = ( + "{seconds} second timeout expired running:" + " {command}".format(seconds=timeout, command=cmdargs) + ) + + popen.kill() + popen.wait() + raise self.TimeoutExpired(timeout_message) + + if timeout is None: + ret = popen.wait() + else: + try: + ret = popen.wait(timeout) + except subprocess.TimeoutExpired: + handle_timeout() + + with p1.open(encoding="utf8") as f1, p2.open(encoding="utf8") as f2: + out = f1.read().splitlines() + err = f2.read().splitlines() + + self._dump_lines(out, sys.stdout) + self._dump_lines(err, sys.stderr) + + with contextlib.suppress(ValueError): + ret = ExitCode(ret) + return RunResult(ret, out, err, timing.time() - now) + + def _dump_lines(self, lines, fp): + try: + for line in lines: + print(line, file=fp) + except UnicodeEncodeError: + print(f"couldn't print to {fp} because of encoding") + + def _getpytestargs(self) -> Tuple[str, ...]: + return sys.executable, "-mpytest" + + def runpython(self, script: "os.PathLike[str]") -> RunResult: + """Run a python script using sys.executable as interpreter.""" + return self.run(sys.executable, script) + + def runpython_c(self, command: str) -> RunResult: + """Run ``python -c "command"``.""" + return self.run(sys.executable, "-c", command) + + def runpytest_subprocess( + self, *args: Union[str, "os.PathLike[str]"], timeout: Optional[float] = None + ) -> RunResult: + """Run pytest as a subprocess with given arguments. + + Any plugins added to the :py:attr:`plugins` list will be added using the + ``-p`` command line option. Additionally ``--basetemp`` is used to put + any temporary files and directories in a numbered directory prefixed + with "runpytest-" to not conflict with the normal numbered pytest + location for temporary files and directories. + + :param args: + The sequence of arguments to pass to the pytest subprocess. + :param timeout: + The period in seconds after which to timeout and raise + :py:class:`Pytester.TimeoutExpired`. + :returns: + The result. + """ + __tracebackhide__ = True + p = make_numbered_dir(root=self.path, prefix="runpytest-", mode=0o700) + args = ("--basetemp=%s" % p,) + args + plugins = [x for x in self.plugins if isinstance(x, str)] + if plugins: + args = ("-p", plugins[0]) + args + args = self._getpytestargs() + args + return self.run(*args, timeout=timeout) + + def spawn_pytest( + self, string: str, expect_timeout: float = 10.0 + ) -> "pexpect.spawn": + """Run pytest using pexpect. + + This makes sure to use the right pytest and sets up the temporary + directory locations. + + The pexpect child is returned. + """ + basetemp = self.path / "temp-pexpect" + basetemp.mkdir(mode=0o700) + invoke = " ".join(map(str, self._getpytestargs())) + cmd = f"{invoke} --basetemp={basetemp} {string}" + return self.spawn(cmd, expect_timeout=expect_timeout) + + def spawn(self, cmd: str, expect_timeout: float = 10.0) -> "pexpect.spawn": + """Run a command using pexpect. + + The pexpect child is returned. + """ + pexpect = importorskip("pexpect", "3.0") + if hasattr(sys, "pypy_version_info") and "64" in platform.machine(): + skip("pypy-64 bit not supported") + if not hasattr(pexpect, "spawn"): + skip("pexpect.spawn not available") + logfile = self.path.joinpath("spawn.out").open("wb") + + child = pexpect.spawn(cmd, logfile=logfile, timeout=expect_timeout) + self._request.addfinalizer(logfile.close) + return child + + +class LineComp: + def __init__(self) -> None: + self.stringio = StringIO() + """:class:`python:io.StringIO()` instance used for input.""" + + def assert_contains_lines(self, lines2: Sequence[str]) -> None: + """Assert that ``lines2`` are contained (linearly) in :attr:`stringio`'s value. + + Lines are matched using :func:`LineMatcher.fnmatch_lines `. + """ + __tracebackhide__ = True + val = self.stringio.getvalue() + self.stringio.truncate(0) + self.stringio.seek(0) + lines1 = val.split("\n") + LineMatcher(lines1).fnmatch_lines(lines2) + + +class LineMatcher: + """Flexible matching of text. + + This is a convenience class to test large texts like the output of + commands. + + The constructor takes a list of lines without their trailing newlines, i.e. + ``text.splitlines()``. + """ + + def __init__(self, lines: List[str]) -> None: + self.lines = lines + self._log_output: List[str] = [] + + def __str__(self) -> str: + """Return the entire original text. + + .. versionadded:: 6.2 + You can use :meth:`str` in older versions. + """ + return "\n".join(self.lines) + + def _getlines(self, lines2: Union[str, Sequence[str], Source]) -> Sequence[str]: + if isinstance(lines2, str): + lines2 = Source(lines2) + if isinstance(lines2, Source): + lines2 = lines2.strip().lines + return lines2 + + def fnmatch_lines_random(self, lines2: Sequence[str]) -> None: + """Check lines exist in the output in any order (using :func:`python:fnmatch.fnmatch`).""" + __tracebackhide__ = True + self._match_lines_random(lines2, fnmatch) + + def re_match_lines_random(self, lines2: Sequence[str]) -> None: + """Check lines exist in the output in any order (using :func:`python:re.match`).""" + __tracebackhide__ = True + self._match_lines_random(lines2, lambda name, pat: bool(re.match(pat, name))) + + def _match_lines_random( + self, lines2: Sequence[str], match_func: Callable[[str, str], bool] + ) -> None: + __tracebackhide__ = True + lines2 = self._getlines(lines2) + for line in lines2: + for x in self.lines: + if line == x or match_func(x, line): + self._log("matched: ", repr(line)) + break + else: + msg = "line %r not found in output" % line + self._log(msg) + self._fail(msg) + + def get_lines_after(self, fnline: str) -> Sequence[str]: + """Return all lines following the given line in the text. + + The given line can contain glob wildcards. + """ + for i, line in enumerate(self.lines): + if fnline == line or fnmatch(line, fnline): + return self.lines[i + 1 :] + raise ValueError("line %r not found in output" % fnline) + + def _log(self, *args) -> None: + self._log_output.append(" ".join(str(x) for x in args)) + + @property + def _log_text(self) -> str: + return "\n".join(self._log_output) + + def fnmatch_lines( + self, lines2: Sequence[str], *, consecutive: bool = False + ) -> None: + """Check lines exist in the output (using :func:`python:fnmatch.fnmatch`). + + The argument is a list of lines which have to match and can use glob + wildcards. If they do not match a pytest.fail() is called. The + matches and non-matches are also shown as part of the error message. + + :param lines2: String patterns to match. + :param consecutive: Match lines consecutively? + """ + __tracebackhide__ = True + self._match_lines(lines2, fnmatch, "fnmatch", consecutive=consecutive) + + def re_match_lines( + self, lines2: Sequence[str], *, consecutive: bool = False + ) -> None: + """Check lines exist in the output (using :func:`python:re.match`). + + The argument is a list of lines which have to match using ``re.match``. + If they do not match a pytest.fail() is called. + + The matches and non-matches are also shown as part of the error message. + + :param lines2: string patterns to match. + :param consecutive: match lines consecutively? + """ + __tracebackhide__ = True + self._match_lines( + lines2, + lambda name, pat: bool(re.match(pat, name)), + "re.match", + consecutive=consecutive, + ) + + def _match_lines( + self, + lines2: Sequence[str], + match_func: Callable[[str, str], bool], + match_nickname: str, + *, + consecutive: bool = False, + ) -> None: + """Underlying implementation of ``fnmatch_lines`` and ``re_match_lines``. + + :param Sequence[str] lines2: + List of string patterns to match. The actual format depends on + ``match_func``. + :param match_func: + A callable ``match_func(line, pattern)`` where line is the + captured line from stdout/stderr and pattern is the matching + pattern. + :param str match_nickname: + The nickname for the match function that will be logged to stdout + when a match occurs. + :param consecutive: + Match lines consecutively? + """ + if not isinstance(lines2, collections.abc.Sequence): + raise TypeError(f"invalid type for lines2: {type(lines2).__name__}") + lines2 = self._getlines(lines2) + lines1 = self.lines[:] + extralines = [] + __tracebackhide__ = True + wnick = len(match_nickname) + 1 + started = False + for line in lines2: + nomatchprinted = False + while lines1: + nextline = lines1.pop(0) + if line == nextline: + self._log("exact match:", repr(line)) + started = True + break + elif match_func(nextline, line): + self._log("%s:" % match_nickname, repr(line)) + self._log( + "{:>{width}}".format("with:", width=wnick), repr(nextline) + ) + started = True + break + else: + if consecutive and started: + msg = f"no consecutive match: {line!r}" + self._log(msg) + self._log( + "{:>{width}}".format("with:", width=wnick), repr(nextline) + ) + self._fail(msg) + if not nomatchprinted: + self._log( + "{:>{width}}".format("nomatch:", width=wnick), repr(line) + ) + nomatchprinted = True + self._log("{:>{width}}".format("and:", width=wnick), repr(nextline)) + extralines.append(nextline) + else: + msg = f"remains unmatched: {line!r}" + self._log(msg) + self._fail(msg) + self._log_output = [] + + def no_fnmatch_line(self, pat: str) -> None: + """Ensure captured lines do not match the given pattern, using ``fnmatch.fnmatch``. + + :param str pat: The pattern to match lines. + """ + __tracebackhide__ = True + self._no_match_line(pat, fnmatch, "fnmatch") + + def no_re_match_line(self, pat: str) -> None: + """Ensure captured lines do not match the given pattern, using ``re.match``. + + :param str pat: The regular expression to match lines. + """ + __tracebackhide__ = True + self._no_match_line( + pat, lambda name, pat: bool(re.match(pat, name)), "re.match" + ) + + def _no_match_line( + self, pat: str, match_func: Callable[[str, str], bool], match_nickname: str + ) -> None: + """Ensure captured lines does not have a the given pattern, using ``fnmatch.fnmatch``. + + :param str pat: The pattern to match lines. + """ + __tracebackhide__ = True + nomatch_printed = False + wnick = len(match_nickname) + 1 + for line in self.lines: + if match_func(line, pat): + msg = f"{match_nickname}: {pat!r}" + self._log(msg) + self._log("{:>{width}}".format("with:", width=wnick), repr(line)) + self._fail(msg) + else: + if not nomatch_printed: + self._log("{:>{width}}".format("nomatch:", width=wnick), repr(pat)) + nomatch_printed = True + self._log("{:>{width}}".format("and:", width=wnick), repr(line)) + self._log_output = [] + + def _fail(self, msg: str) -> None: + __tracebackhide__ = True + log_text = self._log_text + self._log_output = [] + fail(log_text) + + def str(self) -> str: + """Return the entire original text.""" + return str(self) diff --git a/venv/lib/python3.11/site-packages/_pytest/pytester_assertions.py b/venv/lib/python3.11/site-packages/_pytest/pytester_assertions.py new file mode 100644 index 0000000..657e4db --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/pytester_assertions.py @@ -0,0 +1,75 @@ +"""Helper plugin for pytester; should not be loaded on its own.""" +# This plugin contains assertions used by pytester. pytester cannot +# contain them itself, since it is imported by the `pytest` module, +# hence cannot be subject to assertion rewriting, which requires a +# module to not be already imported. +from typing import Dict +from typing import Optional +from typing import Sequence +from typing import Tuple +from typing import Union + +from _pytest.reports import CollectReport +from _pytest.reports import TestReport + + +def assertoutcome( + outcomes: Tuple[ + Sequence[TestReport], + Sequence[Union[CollectReport, TestReport]], + Sequence[Union[CollectReport, TestReport]], + ], + passed: int = 0, + skipped: int = 0, + failed: int = 0, +) -> None: + __tracebackhide__ = True + + realpassed, realskipped, realfailed = outcomes + obtained = { + "passed": len(realpassed), + "skipped": len(realskipped), + "failed": len(realfailed), + } + expected = {"passed": passed, "skipped": skipped, "failed": failed} + assert obtained == expected, outcomes + + +def assert_outcomes( + outcomes: Dict[str, int], + passed: int = 0, + skipped: int = 0, + failed: int = 0, + errors: int = 0, + xpassed: int = 0, + xfailed: int = 0, + warnings: Optional[int] = None, + deselected: Optional[int] = None, +) -> None: + """Assert that the specified outcomes appear with the respective + numbers (0 means it didn't occur) in the text output from a test run.""" + __tracebackhide__ = True + + obtained = { + "passed": outcomes.get("passed", 0), + "skipped": outcomes.get("skipped", 0), + "failed": outcomes.get("failed", 0), + "errors": outcomes.get("errors", 0), + "xpassed": outcomes.get("xpassed", 0), + "xfailed": outcomes.get("xfailed", 0), + } + expected = { + "passed": passed, + "skipped": skipped, + "failed": failed, + "errors": errors, + "xpassed": xpassed, + "xfailed": xfailed, + } + if warnings is not None: + obtained["warnings"] = outcomes.get("warnings", 0) + expected["warnings"] = warnings + if deselected is not None: + obtained["deselected"] = outcomes.get("deselected", 0) + expected["deselected"] = deselected + assert obtained == expected diff --git a/venv/lib/python3.11/site-packages/_pytest/python.py b/venv/lib/python3.11/site-packages/_pytest/python.py new file mode 100644 index 0000000..ad847c8 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/python.py @@ -0,0 +1,1841 @@ +"""Python test discovery, setup and run of test functions.""" +import dataclasses +import enum +import fnmatch +import inspect +import itertools +import os +import sys +import types +import warnings +from collections import Counter +from collections import defaultdict +from functools import partial +from pathlib import Path +from typing import Any +from typing import Callable +from typing import Dict +from typing import Generator +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Mapping +from typing import Optional +from typing import Pattern +from typing import Sequence +from typing import Set +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union + +import _pytest +from _pytest import fixtures +from _pytest import nodes +from _pytest._code import filter_traceback +from _pytest._code import getfslineno +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import TerminalRepr +from _pytest._code.code import Traceback +from _pytest._io import TerminalWriter +from _pytest._io.saferepr import saferepr +from _pytest.compat import ascii_escaped +from _pytest.compat import assert_never +from _pytest.compat import final +from _pytest.compat import get_default_arg_names +from _pytest.compat import get_real_func +from _pytest.compat import getimfunc +from _pytest.compat import getlocation +from _pytest.compat import is_async_function +from _pytest.compat import is_generator +from _pytest.compat import LEGACY_PATH +from _pytest.compat import NOTSET +from _pytest.compat import safe_getattr +from _pytest.compat import safe_isclass +from _pytest.compat import STRING_TYPES +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.deprecated import INSTANCE_COLLECTOR +from _pytest.deprecated import NOSE_SUPPORT_METHOD +from _pytest.fixtures import FuncFixtureInfo +from _pytest.main import Session +from _pytest.mark import MARK_GEN +from _pytest.mark import ParameterSet +from _pytest.mark.structures import get_unpacked_marks +from _pytest.mark.structures import Mark +from _pytest.mark.structures import MarkDecorator +from _pytest.mark.structures import normalize_mark_list +from _pytest.outcomes import fail +from _pytest.outcomes import skip +from _pytest.pathlib import bestrelpath +from _pytest.pathlib import fnmatch_ex +from _pytest.pathlib import import_path +from _pytest.pathlib import ImportPathMismatchError +from _pytest.pathlib import parts +from _pytest.pathlib import visit +from _pytest.scope import Scope +from _pytest.warning_types import PytestCollectionWarning +from _pytest.warning_types import PytestReturnNotNoneWarning +from _pytest.warning_types import PytestUnhandledCoroutineWarning + +if TYPE_CHECKING: + from typing_extensions import Literal + + from _pytest.scope import _ScopeName + + +_PYTEST_DIR = Path(_pytest.__file__).parent + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group.addoption( + "--fixtures", + "--funcargs", + action="store_true", + dest="showfixtures", + default=False, + help="Show available fixtures, sorted by plugin appearance " + "(fixtures with leading '_' are only shown with '-v')", + ) + group.addoption( + "--fixtures-per-test", + action="store_true", + dest="show_fixtures_per_test", + default=False, + help="Show fixtures per test", + ) + parser.addini( + "python_files", + type="args", + # NOTE: default is also used in AssertionRewritingHook. + default=["test_*.py", "*_test.py"], + help="Glob-style file patterns for Python test module discovery", + ) + parser.addini( + "python_classes", + type="args", + default=["Test"], + help="Prefixes or glob names for Python test class discovery", + ) + parser.addini( + "python_functions", + type="args", + default=["test"], + help="Prefixes or glob names for Python test function and method discovery", + ) + parser.addini( + "disable_test_id_escaping_and_forfeit_all_rights_to_community_support", + type="bool", + default=False, + help="Disable string escape non-ASCII characters, might cause unwanted " + "side effects(use at your own risk)", + ) + + +def pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]: + if config.option.showfixtures: + showfixtures(config) + return 0 + if config.option.show_fixtures_per_test: + show_fixtures_per_test(config) + return 0 + return None + + +def pytest_generate_tests(metafunc: "Metafunc") -> None: + for marker in metafunc.definition.iter_markers(name="parametrize"): + metafunc.parametrize(*marker.args, **marker.kwargs, _param_mark=marker) + + +def pytest_configure(config: Config) -> None: + config.addinivalue_line( + "markers", + "parametrize(argnames, argvalues): call a test function multiple " + "times passing in different arguments in turn. argvalues generally " + "needs to be a list of values if argnames specifies only one name " + "or a list of tuples of values if argnames specifies multiple names. " + "Example: @parametrize('arg1', [1,2]) would lead to two calls of the " + "decorated test function, one with arg1=1 and another with arg1=2." + "see https://docs.pytest.org/en/stable/how-to/parametrize.html for more info " + "and examples.", + ) + config.addinivalue_line( + "markers", + "usefixtures(fixturename1, fixturename2, ...): mark tests as needing " + "all of the specified fixtures. see " + "https://docs.pytest.org/en/stable/explanation/fixtures.html#usefixtures ", + ) + + +def async_warn_and_skip(nodeid: str) -> None: + msg = "async def functions are not natively supported and have been skipped.\n" + msg += ( + "You need to install a suitable plugin for your async framework, for example:\n" + ) + msg += " - anyio\n" + msg += " - pytest-asyncio\n" + msg += " - pytest-tornasync\n" + msg += " - pytest-trio\n" + msg += " - pytest-twisted" + warnings.warn(PytestUnhandledCoroutineWarning(msg.format(nodeid))) + skip(reason="async def function and no async plugin installed (see warnings)") + + +@hookimpl(trylast=True) +def pytest_pyfunc_call(pyfuncitem: "Function") -> Optional[object]: + testfunction = pyfuncitem.obj + if is_async_function(testfunction): + async_warn_and_skip(pyfuncitem.nodeid) + funcargs = pyfuncitem.funcargs + testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames} + result = testfunction(**testargs) + if hasattr(result, "__await__") or hasattr(result, "__aiter__"): + async_warn_and_skip(pyfuncitem.nodeid) + elif result is not None: + warnings.warn( + PytestReturnNotNoneWarning( + f"Expected None, but {pyfuncitem.nodeid} returned {result!r}, which will be an error in a " + "future version of pytest. Did you mean to use `assert` instead of `return`?" + ) + ) + return True + + +def pytest_collect_file(file_path: Path, parent: nodes.Collector) -> Optional["Module"]: + if file_path.suffix == ".py": + if not parent.session.isinitpath(file_path): + if not path_matches_patterns( + file_path, parent.config.getini("python_files") + ["__init__.py"] + ): + return None + ihook = parent.session.gethookproxy(file_path) + module: Module = ihook.pytest_pycollect_makemodule( + module_path=file_path, parent=parent + ) + return module + return None + + +def path_matches_patterns(path: Path, patterns: Iterable[str]) -> bool: + """Return whether path matches any of the patterns in the list of globs given.""" + return any(fnmatch_ex(pattern, path) for pattern in patterns) + + +def pytest_pycollect_makemodule(module_path: Path, parent) -> "Module": + if module_path.name == "__init__.py": + pkg: Package = Package.from_parent(parent, path=module_path) + return pkg + mod: Module = Module.from_parent(parent, path=module_path) + return mod + + +@hookimpl(trylast=True) +def pytest_pycollect_makeitem( + collector: Union["Module", "Class"], name: str, obj: object +) -> Union[None, nodes.Item, nodes.Collector, List[Union[nodes.Item, nodes.Collector]]]: + assert isinstance(collector, (Class, Module)), type(collector) + # Nothing was collected elsewhere, let's do it here. + if safe_isclass(obj): + if collector.istestclass(obj, name): + klass: Class = Class.from_parent(collector, name=name, obj=obj) + return klass + elif collector.istestfunction(obj, name): + # mock seems to store unbound methods (issue473), normalize it. + obj = getattr(obj, "__func__", obj) + # We need to try and unwrap the function if it's a functools.partial + # or a functools.wrapped. + # We mustn't if it's been wrapped with mock.patch (python 2 only). + if not (inspect.isfunction(obj) or inspect.isfunction(get_real_func(obj))): + filename, lineno = getfslineno(obj) + warnings.warn_explicit( + message=PytestCollectionWarning( + "cannot collect %r because it is not a function." % name + ), + category=None, + filename=str(filename), + lineno=lineno + 1, + ) + elif getattr(obj, "__test__", True): + if is_generator(obj): + res: Function = Function.from_parent(collector, name=name) + reason = "yield tests were removed in pytest 4.0 - {name} will be ignored".format( + name=name + ) + res.add_marker(MARK_GEN.xfail(run=False, reason=reason)) + res.warn(PytestCollectionWarning(reason)) + return res + else: + return list(collector._genfunctions(name, obj)) + return None + + +class PyobjMixin(nodes.Node): + """this mix-in inherits from Node to carry over the typing information + + as its intended to always mix in before a node + its position in the mro is unaffected""" + + _ALLOW_MARKERS = True + + @property + def module(self): + """Python module object this node was collected from (can be None).""" + node = self.getparent(Module) + return node.obj if node is not None else None + + @property + def cls(self): + """Python class object this node was collected from (can be None).""" + node = self.getparent(Class) + return node.obj if node is not None else None + + @property + def instance(self): + """Python instance object the function is bound to. + + Returns None if not a test method, e.g. for a standalone test function, + a staticmethod, a class or a module. + """ + node = self.getparent(Function) + return getattr(node.obj, "__self__", None) if node is not None else None + + @property + def obj(self): + """Underlying Python object.""" + obj = getattr(self, "_obj", None) + if obj is None: + self._obj = obj = self._getobj() + # XXX evil hack + # used to avoid Function marker duplication + if self._ALLOW_MARKERS: + self.own_markers.extend(get_unpacked_marks(self.obj)) + # This assumes that `obj` is called before there is a chance + # to add custom keys to `self.keywords`, so no fear of overriding. + self.keywords.update((mark.name, mark) for mark in self.own_markers) + return obj + + @obj.setter + def obj(self, value): + self._obj = value + + def _getobj(self): + """Get the underlying Python object. May be overwritten by subclasses.""" + # TODO: Improve the type of `parent` such that assert/ignore aren't needed. + assert self.parent is not None + obj = self.parent.obj # type: ignore[attr-defined] + return getattr(obj, self.name) + + def getmodpath(self, stopatmodule: bool = True, includemodule: bool = False) -> str: + """Return Python path relative to the containing module.""" + chain = self.listchain() + chain.reverse() + parts = [] + for node in chain: + name = node.name + if isinstance(node, Module): + name = os.path.splitext(name)[0] + if stopatmodule: + if includemodule: + parts.append(name) + break + parts.append(name) + parts.reverse() + return ".".join(parts) + + def reportinfo(self) -> Tuple[Union["os.PathLike[str]", str], Optional[int], str]: + # XXX caching? + obj = self.obj + compat_co_firstlineno = getattr(obj, "compat_co_firstlineno", None) + if isinstance(compat_co_firstlineno, int): + # nose compatibility + file_path = sys.modules[obj.__module__].__file__ + assert file_path is not None + if file_path.endswith(".pyc"): + file_path = file_path[:-1] + path: Union["os.PathLike[str]", str] = file_path + lineno = compat_co_firstlineno + else: + path, lineno = getfslineno(obj) + modpath = self.getmodpath() + assert isinstance(lineno, int) + return path, lineno, modpath + + +# As an optimization, these builtin attribute names are pre-ignored when +# iterating over an object during collection -- the pytest_pycollect_makeitem +# hook is not called for them. +# fmt: off +class _EmptyClass: pass # noqa: E701 +IGNORED_ATTRIBUTES = frozenset.union( # noqa: E305 + frozenset(), + # Module. + dir(types.ModuleType("empty_module")), + # Some extra module attributes the above doesn't catch. + {"__builtins__", "__file__", "__cached__"}, + # Class. + dir(_EmptyClass), + # Instance. + dir(_EmptyClass()), +) +del _EmptyClass +# fmt: on + + +class PyCollector(PyobjMixin, nodes.Collector): + def funcnamefilter(self, name: str) -> bool: + return self._matches_prefix_or_glob_option("python_functions", name) + + def isnosetest(self, obj: object) -> bool: + """Look for the __test__ attribute, which is applied by the + @nose.tools.istest decorator. + """ + # We explicitly check for "is True" here to not mistakenly treat + # classes with a custom __getattr__ returning something truthy (like a + # function) as test classes. + return safe_getattr(obj, "__test__", False) is True + + def classnamefilter(self, name: str) -> bool: + return self._matches_prefix_or_glob_option("python_classes", name) + + def istestfunction(self, obj: object, name: str) -> bool: + if self.funcnamefilter(name) or self.isnosetest(obj): + if isinstance(obj, (staticmethod, classmethod)): + # staticmethods and classmethods need to be unwrapped. + obj = safe_getattr(obj, "__func__", False) + return callable(obj) and fixtures.getfixturemarker(obj) is None + else: + return False + + def istestclass(self, obj: object, name: str) -> bool: + return self.classnamefilter(name) or self.isnosetest(obj) + + def _matches_prefix_or_glob_option(self, option_name: str, name: str) -> bool: + """Check if the given name matches the prefix or glob-pattern defined + in ini configuration.""" + for option in self.config.getini(option_name): + if name.startswith(option): + return True + # Check that name looks like a glob-string before calling fnmatch + # because this is called for every name in each collected module, + # and fnmatch is somewhat expensive to call. + elif ("*" in option or "?" in option or "[" in option) and fnmatch.fnmatch( + name, option + ): + return True + return False + + def collect(self) -> Iterable[Union[nodes.Item, nodes.Collector]]: + if not getattr(self.obj, "__test__", True): + return [] + + # Avoid random getattrs and peek in the __dict__ instead. + dicts = [getattr(self.obj, "__dict__", {})] + if isinstance(self.obj, type): + for basecls in self.obj.__mro__: + dicts.append(basecls.__dict__) + + # In each class, nodes should be definition ordered. + # __dict__ is definition ordered. + seen: Set[str] = set() + dict_values: List[List[Union[nodes.Item, nodes.Collector]]] = [] + ihook = self.ihook + for dic in dicts: + values: List[Union[nodes.Item, nodes.Collector]] = [] + # Note: seems like the dict can change during iteration - + # be careful not to remove the list() without consideration. + for name, obj in list(dic.items()): + if name in IGNORED_ATTRIBUTES: + continue + if name in seen: + continue + seen.add(name) + res = ihook.pytest_pycollect_makeitem( + collector=self, name=name, obj=obj + ) + if res is None: + continue + elif isinstance(res, list): + values.extend(res) + else: + values.append(res) + dict_values.append(values) + + # Between classes in the class hierarchy, reverse-MRO order -- nodes + # inherited from base classes should come before subclasses. + result = [] + for values in reversed(dict_values): + result.extend(values) + return result + + def _genfunctions(self, name: str, funcobj) -> Iterator["Function"]: + modulecol = self.getparent(Module) + assert modulecol is not None + module = modulecol.obj + clscol = self.getparent(Class) + cls = clscol and clscol.obj or None + + definition = FunctionDefinition.from_parent(self, name=name, callobj=funcobj) + fixtureinfo = definition._fixtureinfo + + # pytest_generate_tests impls call metafunc.parametrize() which fills + # metafunc._calls, the outcome of the hook. + metafunc = Metafunc( + definition=definition, + fixtureinfo=fixtureinfo, + config=self.config, + cls=cls, + module=module, + _ispytest=True, + ) + methods = [] + if hasattr(module, "pytest_generate_tests"): + methods.append(module.pytest_generate_tests) + if cls is not None and hasattr(cls, "pytest_generate_tests"): + methods.append(cls().pytest_generate_tests) + self.ihook.pytest_generate_tests.call_extra(methods, dict(metafunc=metafunc)) + + if not metafunc._calls: + yield Function.from_parent(self, name=name, fixtureinfo=fixtureinfo) + else: + # Add funcargs() as fixturedefs to fixtureinfo.arg2fixturedefs. + fm = self.session._fixturemanager + fixtures.add_funcarg_pseudo_fixture_def(self, metafunc, fm) + + # Add_funcarg_pseudo_fixture_def may have shadowed some fixtures + # with direct parametrization, so make sure we update what the + # function really needs. + fixtureinfo.prune_dependency_tree() + + for callspec in metafunc._calls: + subname = f"{name}[{callspec.id}]" + yield Function.from_parent( + self, + name=subname, + callspec=callspec, + fixtureinfo=fixtureinfo, + keywords={callspec.id: True}, + originalname=name, + ) + + +class Module(nodes.File, PyCollector): + """Collector for test classes and functions.""" + + def _getobj(self): + return self._importtestmodule() + + def collect(self) -> Iterable[Union[nodes.Item, nodes.Collector]]: + self._inject_setup_module_fixture() + self._inject_setup_function_fixture() + self.session._fixturemanager.parsefactories(self) + return super().collect() + + def _inject_setup_module_fixture(self) -> None: + """Inject a hidden autouse, module scoped fixture into the collected module object + that invokes setUpModule/tearDownModule if either or both are available. + + Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with + other fixtures (#517). + """ + has_nose = self.config.pluginmanager.has_plugin("nose") + setup_module = _get_first_non_fixture_func( + self.obj, ("setUpModule", "setup_module") + ) + if setup_module is None and has_nose: + # The name "setup" is too common - only treat as fixture if callable. + setup_module = _get_first_non_fixture_func(self.obj, ("setup",)) + if not callable(setup_module): + setup_module = None + teardown_module = _get_first_non_fixture_func( + self.obj, ("tearDownModule", "teardown_module") + ) + if teardown_module is None and has_nose: + teardown_module = _get_first_non_fixture_func(self.obj, ("teardown",)) + # Same as "setup" above - only treat as fixture if callable. + if not callable(teardown_module): + teardown_module = None + + if setup_module is None and teardown_module is None: + return + + @fixtures.fixture( + autouse=True, + scope="module", + # Use a unique name to speed up lookup. + name=f"_xunit_setup_module_fixture_{self.obj.__name__}", + ) + def xunit_setup_module_fixture(request) -> Generator[None, None, None]: + if setup_module is not None: + _call_with_optional_argument(setup_module, request.module) + yield + if teardown_module is not None: + _call_with_optional_argument(teardown_module, request.module) + + self.obj.__pytest_setup_module = xunit_setup_module_fixture + + def _inject_setup_function_fixture(self) -> None: + """Inject a hidden autouse, function scoped fixture into the collected module object + that invokes setup_function/teardown_function if either or both are available. + + Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with + other fixtures (#517). + """ + setup_function = _get_first_non_fixture_func(self.obj, ("setup_function",)) + teardown_function = _get_first_non_fixture_func( + self.obj, ("teardown_function",) + ) + if setup_function is None and teardown_function is None: + return + + @fixtures.fixture( + autouse=True, + scope="function", + # Use a unique name to speed up lookup. + name=f"_xunit_setup_function_fixture_{self.obj.__name__}", + ) + def xunit_setup_function_fixture(request) -> Generator[None, None, None]: + if request.instance is not None: + # in this case we are bound to an instance, so we need to let + # setup_method handle this + yield + return + if setup_function is not None: + _call_with_optional_argument(setup_function, request.function) + yield + if teardown_function is not None: + _call_with_optional_argument(teardown_function, request.function) + + self.obj.__pytest_setup_function = xunit_setup_function_fixture + + def _importtestmodule(self): + # We assume we are only called once per module. + importmode = self.config.getoption("--import-mode") + try: + mod = import_path(self.path, mode=importmode, root=self.config.rootpath) + except SyntaxError as e: + raise self.CollectError( + ExceptionInfo.from_current().getrepr(style="short") + ) from e + except ImportPathMismatchError as e: + raise self.CollectError( + "import file mismatch:\n" + "imported module %r has this __file__ attribute:\n" + " %s\n" + "which is not the same as the test file we want to collect:\n" + " %s\n" + "HINT: remove __pycache__ / .pyc files and/or use a " + "unique basename for your test file modules" % e.args + ) from e + except ImportError as e: + exc_info = ExceptionInfo.from_current() + if self.config.getoption("verbose") < 2: + exc_info.traceback = exc_info.traceback.filter(filter_traceback) + exc_repr = ( + exc_info.getrepr(style="short") + if exc_info.traceback + else exc_info.exconly() + ) + formatted_tb = str(exc_repr) + raise self.CollectError( + "ImportError while importing test module '{path}'.\n" + "Hint: make sure your test modules/packages have valid Python names.\n" + "Traceback:\n" + "{traceback}".format(path=self.path, traceback=formatted_tb) + ) from e + except skip.Exception as e: + if e.allow_module_level: + raise + raise self.CollectError( + "Using pytest.skip outside of a test will skip the entire module. " + "If that's your intention, pass `allow_module_level=True`. " + "If you want to skip a specific test or an entire class, " + "use the @pytest.mark.skip or @pytest.mark.skipif decorators." + ) from e + self.config.pluginmanager.consider_module(mod) + return mod + + +class Package(Module): + def __init__( + self, + fspath: Optional[LEGACY_PATH], + parent: nodes.Collector, + # NOTE: following args are unused: + config=None, + session=None, + nodeid=None, + path: Optional[Path] = None, + ) -> None: + # NOTE: Could be just the following, but kept as-is for compat. + # nodes.FSCollector.__init__(self, fspath, parent=parent) + session = parent.session + nodes.FSCollector.__init__( + self, + fspath=fspath, + path=path, + parent=parent, + config=config, + session=session, + nodeid=nodeid, + ) + self.name = self.path.parent.name + + def setup(self) -> None: + # Not using fixtures to call setup_module here because autouse fixtures + # from packages are not called automatically (#4085). + setup_module = _get_first_non_fixture_func( + self.obj, ("setUpModule", "setup_module") + ) + if setup_module is not None: + _call_with_optional_argument(setup_module, self.obj) + + teardown_module = _get_first_non_fixture_func( + self.obj, ("tearDownModule", "teardown_module") + ) + if teardown_module is not None: + func = partial(_call_with_optional_argument, teardown_module, self.obj) + self.addfinalizer(func) + + def _recurse(self, direntry: "os.DirEntry[str]") -> bool: + if direntry.name == "__pycache__": + return False + fspath = Path(direntry.path) + ihook = self.session.gethookproxy(fspath.parent) + if ihook.pytest_ignore_collect(collection_path=fspath, config=self.config): + return False + return True + + def _collectfile( + self, fspath: Path, handle_dupes: bool = True + ) -> Sequence[nodes.Collector]: + assert ( + fspath.is_file() + ), "{!r} is not a file (isdir={!r}, exists={!r}, islink={!r})".format( + fspath, fspath.is_dir(), fspath.exists(), fspath.is_symlink() + ) + ihook = self.session.gethookproxy(fspath) + if not self.session.isinitpath(fspath): + if ihook.pytest_ignore_collect(collection_path=fspath, config=self.config): + return () + + if handle_dupes: + keepduplicates = self.config.getoption("keepduplicates") + if not keepduplicates: + duplicate_paths = self.config.pluginmanager._duplicatepaths + if fspath in duplicate_paths: + return () + else: + duplicate_paths.add(fspath) + + return ihook.pytest_collect_file(file_path=fspath, parent=self) # type: ignore[no-any-return] + + def collect(self) -> Iterable[Union[nodes.Item, nodes.Collector]]: + this_path = self.path.parent + + # Always collect the __init__ first. + if path_matches_patterns(self.path, self.config.getini("python_files")): + yield Module.from_parent(self, path=self.path) + + pkg_prefixes: Set[Path] = set() + for direntry in visit(str(this_path), recurse=self._recurse): + path = Path(direntry.path) + + # We will visit our own __init__.py file, in which case we skip it. + if direntry.is_file(): + if direntry.name == "__init__.py" and path.parent == this_path: + continue + + parts_ = parts(direntry.path) + if any( + str(pkg_prefix) in parts_ and pkg_prefix / "__init__.py" != path + for pkg_prefix in pkg_prefixes + ): + continue + + if direntry.is_file(): + yield from self._collectfile(path) + elif not direntry.is_dir(): + # Broken symlink or invalid/missing file. + continue + elif path.joinpath("__init__.py").is_file(): + pkg_prefixes.add(path) + + +def _call_with_optional_argument(func, arg) -> None: + """Call the given function with the given argument if func accepts one argument, otherwise + calls func without arguments.""" + arg_count = func.__code__.co_argcount + if inspect.ismethod(func): + arg_count -= 1 + if arg_count: + func(arg) + else: + func() + + +def _get_first_non_fixture_func(obj: object, names: Iterable[str]) -> Optional[object]: + """Return the attribute from the given object to be used as a setup/teardown + xunit-style function, but only if not marked as a fixture to avoid calling it twice. + """ + for name in names: + meth: Optional[object] = getattr(obj, name, None) + if meth is not None and fixtures.getfixturemarker(meth) is None: + return meth + return None + + +class Class(PyCollector): + """Collector for test methods.""" + + @classmethod + def from_parent(cls, parent, *, name, obj=None, **kw): + """The public constructor.""" + return super().from_parent(name=name, parent=parent, **kw) + + def newinstance(self): + return self.obj() + + def collect(self) -> Iterable[Union[nodes.Item, nodes.Collector]]: + if not safe_getattr(self.obj, "__test__", True): + return [] + if hasinit(self.obj): + assert self.parent is not None + self.warn( + PytestCollectionWarning( + "cannot collect test class %r because it has a " + "__init__ constructor (from: %s)" + % (self.obj.__name__, self.parent.nodeid) + ) + ) + return [] + elif hasnew(self.obj): + assert self.parent is not None + self.warn( + PytestCollectionWarning( + "cannot collect test class %r because it has a " + "__new__ constructor (from: %s)" + % (self.obj.__name__, self.parent.nodeid) + ) + ) + return [] + + self._inject_setup_class_fixture() + self._inject_setup_method_fixture() + + self.session._fixturemanager.parsefactories(self.newinstance(), self.nodeid) + + return super().collect() + + def _inject_setup_class_fixture(self) -> None: + """Inject a hidden autouse, class scoped fixture into the collected class object + that invokes setup_class/teardown_class if either or both are available. + + Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with + other fixtures (#517). + """ + setup_class = _get_first_non_fixture_func(self.obj, ("setup_class",)) + teardown_class = _get_first_non_fixture_func(self.obj, ("teardown_class",)) + if setup_class is None and teardown_class is None: + return + + @fixtures.fixture( + autouse=True, + scope="class", + # Use a unique name to speed up lookup. + name=f"_xunit_setup_class_fixture_{self.obj.__qualname__}", + ) + def xunit_setup_class_fixture(cls) -> Generator[None, None, None]: + if setup_class is not None: + func = getimfunc(setup_class) + _call_with_optional_argument(func, self.obj) + yield + if teardown_class is not None: + func = getimfunc(teardown_class) + _call_with_optional_argument(func, self.obj) + + self.obj.__pytest_setup_class = xunit_setup_class_fixture + + def _inject_setup_method_fixture(self) -> None: + """Inject a hidden autouse, function scoped fixture into the collected class object + that invokes setup_method/teardown_method if either or both are available. + + Using a fixture to invoke these methods ensures we play nicely and unsurprisingly with + other fixtures (#517). + """ + has_nose = self.config.pluginmanager.has_plugin("nose") + setup_name = "setup_method" + setup_method = _get_first_non_fixture_func(self.obj, (setup_name,)) + emit_nose_setup_warning = False + if setup_method is None and has_nose: + setup_name = "setup" + emit_nose_setup_warning = True + setup_method = _get_first_non_fixture_func(self.obj, (setup_name,)) + teardown_name = "teardown_method" + teardown_method = _get_first_non_fixture_func(self.obj, (teardown_name,)) + emit_nose_teardown_warning = False + if teardown_method is None and has_nose: + teardown_name = "teardown" + emit_nose_teardown_warning = True + teardown_method = _get_first_non_fixture_func(self.obj, (teardown_name,)) + if setup_method is None and teardown_method is None: + return + + @fixtures.fixture( + autouse=True, + scope="function", + # Use a unique name to speed up lookup. + name=f"_xunit_setup_method_fixture_{self.obj.__qualname__}", + ) + def xunit_setup_method_fixture(self, request) -> Generator[None, None, None]: + method = request.function + if setup_method is not None: + func = getattr(self, setup_name) + _call_with_optional_argument(func, method) + if emit_nose_setup_warning: + warnings.warn( + NOSE_SUPPORT_METHOD.format( + nodeid=request.node.nodeid, method="setup" + ), + stacklevel=2, + ) + yield + if teardown_method is not None: + func = getattr(self, teardown_name) + _call_with_optional_argument(func, method) + if emit_nose_teardown_warning: + warnings.warn( + NOSE_SUPPORT_METHOD.format( + nodeid=request.node.nodeid, method="teardown" + ), + stacklevel=2, + ) + + self.obj.__pytest_setup_method = xunit_setup_method_fixture + + +class InstanceDummy: + """Instance used to be a node type between Class and Function. It has been + removed in pytest 7.0. Some plugins exist which reference `pytest.Instance` + only to ignore it; this dummy class keeps them working. This will be removed + in pytest 8.""" + + +def __getattr__(name: str) -> object: + if name == "Instance": + warnings.warn(INSTANCE_COLLECTOR, 2) + return InstanceDummy + raise AttributeError(f"module {__name__} has no attribute {name}") + + +def hasinit(obj: object) -> bool: + init: object = getattr(obj, "__init__", None) + if init: + return init != object.__init__ + return False + + +def hasnew(obj: object) -> bool: + new: object = getattr(obj, "__new__", None) + if new: + return new != object.__new__ + return False + + +@final +@dataclasses.dataclass(frozen=True) +class IdMaker: + """Make IDs for a parametrization.""" + + __slots__ = ( + "argnames", + "parametersets", + "idfn", + "ids", + "config", + "nodeid", + "func_name", + ) + + # The argnames of the parametrization. + argnames: Sequence[str] + # The ParameterSets of the parametrization. + parametersets: Sequence[ParameterSet] + # Optionally, a user-provided callable to make IDs for parameters in a + # ParameterSet. + idfn: Optional[Callable[[Any], Optional[object]]] + # Optionally, explicit IDs for ParameterSets by index. + ids: Optional[Sequence[Optional[object]]] + # Optionally, the pytest config. + # Used for controlling ASCII escaping, and for calling the + # :hook:`pytest_make_parametrize_id` hook. + config: Optional[Config] + # Optionally, the ID of the node being parametrized. + # Used only for clearer error messages. + nodeid: Optional[str] + # Optionally, the ID of the function being parametrized. + # Used only for clearer error messages. + func_name: Optional[str] + + def make_unique_parameterset_ids(self) -> List[str]: + """Make a unique identifier for each ParameterSet, that may be used to + identify the parametrization in a node ID. + + Format is -...-[counter], where prm_x_token is + - user-provided id, if given + - else an id derived from the value, applicable for certain types + - else + The counter suffix is appended only in case a string wouldn't be unique + otherwise. + """ + resolved_ids = list(self._resolve_ids()) + # All IDs must be unique! + if len(resolved_ids) != len(set(resolved_ids)): + # Record the number of occurrences of each ID. + id_counts = Counter(resolved_ids) + # Map the ID to its next suffix. + id_suffixes: Dict[str, int] = defaultdict(int) + # Suffix non-unique IDs to make them unique. + for index, id in enumerate(resolved_ids): + if id_counts[id] > 1: + resolved_ids[index] = f"{id}{id_suffixes[id]}" + id_suffixes[id] += 1 + return resolved_ids + + def _resolve_ids(self) -> Iterable[str]: + """Resolve IDs for all ParameterSets (may contain duplicates).""" + for idx, parameterset in enumerate(self.parametersets): + if parameterset.id is not None: + # ID provided directly - pytest.param(..., id="...") + yield parameterset.id + elif self.ids and idx < len(self.ids) and self.ids[idx] is not None: + # ID provided in the IDs list - parametrize(..., ids=[...]). + yield self._idval_from_value_required(self.ids[idx], idx) + else: + # ID not provided - generate it. + yield "-".join( + self._idval(val, argname, idx) + for val, argname in zip(parameterset.values, self.argnames) + ) + + def _idval(self, val: object, argname: str, idx: int) -> str: + """Make an ID for a parameter in a ParameterSet.""" + idval = self._idval_from_function(val, argname, idx) + if idval is not None: + return idval + idval = self._idval_from_hook(val, argname) + if idval is not None: + return idval + idval = self._idval_from_value(val) + if idval is not None: + return idval + return self._idval_from_argname(argname, idx) + + def _idval_from_function( + self, val: object, argname: str, idx: int + ) -> Optional[str]: + """Try to make an ID for a parameter in a ParameterSet using the + user-provided id callable, if given.""" + if self.idfn is None: + return None + try: + id = self.idfn(val) + except Exception as e: + prefix = f"{self.nodeid}: " if self.nodeid is not None else "" + msg = "error raised while trying to determine id of parameter '{}' at position {}" + msg = prefix + msg.format(argname, idx) + raise ValueError(msg) from e + if id is None: + return None + return self._idval_from_value(id) + + def _idval_from_hook(self, val: object, argname: str) -> Optional[str]: + """Try to make an ID for a parameter in a ParameterSet by calling the + :hook:`pytest_make_parametrize_id` hook.""" + if self.config: + id: Optional[str] = self.config.hook.pytest_make_parametrize_id( + config=self.config, val=val, argname=argname + ) + return id + return None + + def _idval_from_value(self, val: object) -> Optional[str]: + """Try to make an ID for a parameter in a ParameterSet from its value, + if the value type is supported.""" + if isinstance(val, STRING_TYPES): + return _ascii_escaped_by_config(val, self.config) + elif val is None or isinstance(val, (float, int, bool, complex)): + return str(val) + elif isinstance(val, Pattern): + return ascii_escaped(val.pattern) + elif val is NOTSET: + # Fallback to default. Note that NOTSET is an enum.Enum. + pass + elif isinstance(val, enum.Enum): + return str(val) + elif isinstance(getattr(val, "__name__", None), str): + # Name of a class, function, module, etc. + name: str = getattr(val, "__name__") + return name + return None + + def _idval_from_value_required(self, val: object, idx: int) -> str: + """Like _idval_from_value(), but fails if the type is not supported.""" + id = self._idval_from_value(val) + if id is not None: + return id + + # Fail. + if self.func_name is not None: + prefix = f"In {self.func_name}: " + elif self.nodeid is not None: + prefix = f"In {self.nodeid}: " + else: + prefix = "" + msg = ( + f"{prefix}ids contains unsupported value {saferepr(val)} (type: {type(val)!r}) at index {idx}. " + "Supported types are: str, bytes, int, float, complex, bool, enum, regex or anything with a __name__." + ) + fail(msg, pytrace=False) + + @staticmethod + def _idval_from_argname(argname: str, idx: int) -> str: + """Make an ID for a parameter in a ParameterSet from the argument name + and the index of the ParameterSet.""" + return str(argname) + str(idx) + + +@final +@dataclasses.dataclass(frozen=True) +class CallSpec2: + """A planned parameterized invocation of a test function. + + Calculated during collection for a given test function's Metafunc. + Once collection is over, each callspec is turned into a single Item + and stored in item.callspec. + """ + + # arg name -> arg value which will be passed to the parametrized test + # function (direct parameterization). + funcargs: Dict[str, object] = dataclasses.field(default_factory=dict) + # arg name -> arg value which will be passed to a fixture of the same name + # (indirect parametrization). + params: Dict[str, object] = dataclasses.field(default_factory=dict) + # arg name -> arg index. + indices: Dict[str, int] = dataclasses.field(default_factory=dict) + # Used for sorting parametrized resources. + _arg2scope: Dict[str, Scope] = dataclasses.field(default_factory=dict) + # Parts which will be added to the item's name in `[..]` separated by "-". + _idlist: List[str] = dataclasses.field(default_factory=list) + # Marks which will be applied to the item. + marks: List[Mark] = dataclasses.field(default_factory=list) + + def setmulti( + self, + *, + valtypes: Mapping[str, "Literal['params', 'funcargs']"], + argnames: Iterable[str], + valset: Iterable[object], + id: str, + marks: Iterable[Union[Mark, MarkDecorator]], + scope: Scope, + param_index: int, + ) -> "CallSpec2": + funcargs = self.funcargs.copy() + params = self.params.copy() + indices = self.indices.copy() + arg2scope = self._arg2scope.copy() + for arg, val in zip(argnames, valset): + if arg in params or arg in funcargs: + raise ValueError(f"duplicate {arg!r}") + valtype_for_arg = valtypes[arg] + if valtype_for_arg == "params": + params[arg] = val + elif valtype_for_arg == "funcargs": + funcargs[arg] = val + else: + assert_never(valtype_for_arg) + indices[arg] = param_index + arg2scope[arg] = scope + return CallSpec2( + funcargs=funcargs, + params=params, + indices=indices, + _arg2scope=arg2scope, + _idlist=[*self._idlist, id], + marks=[*self.marks, *normalize_mark_list(marks)], + ) + + def getparam(self, name: str) -> object: + try: + return self.params[name] + except KeyError as e: + raise ValueError(name) from e + + @property + def id(self) -> str: + return "-".join(self._idlist) + + +@final +class Metafunc: + """Objects passed to the :hook:`pytest_generate_tests` hook. + + They help to inspect a test function and to generate tests according to + test configuration or values specified in the class or module where a + test function is defined. + """ + + def __init__( + self, + definition: "FunctionDefinition", + fixtureinfo: fixtures.FuncFixtureInfo, + config: Config, + cls=None, + module=None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + + #: Access to the underlying :class:`_pytest.python.FunctionDefinition`. + self.definition = definition + + #: Access to the :class:`pytest.Config` object for the test session. + self.config = config + + #: The module object where the test function is defined in. + self.module = module + + #: Underlying Python test function. + self.function = definition.obj + + #: Set of fixture names required by the test function. + self.fixturenames = fixtureinfo.names_closure + + #: Class object where the test function is defined in or ``None``. + self.cls = cls + + self._arg2fixturedefs = fixtureinfo.name2fixturedefs + + # Result of parametrize(). + self._calls: List[CallSpec2] = [] + + def parametrize( + self, + argnames: Union[str, Sequence[str]], + argvalues: Iterable[Union[ParameterSet, Sequence[object], object]], + indirect: Union[bool, Sequence[str]] = False, + ids: Optional[ + Union[Iterable[Optional[object]], Callable[[Any], Optional[object]]] + ] = None, + scope: "Optional[_ScopeName]" = None, + *, + _param_mark: Optional[Mark] = None, + ) -> None: + """Add new invocations to the underlying test function using the list + of argvalues for the given argnames. Parametrization is performed + during the collection phase. If you need to setup expensive resources + see about setting indirect to do it rather than at test setup time. + + Can be called multiple times, in which case each call parametrizes all + previous parametrizations, e.g. + + :: + + unparametrized: t + parametrize ["x", "y"]: t[x], t[y] + parametrize [1, 2]: t[x-1], t[x-2], t[y-1], t[y-2] + + :param argnames: + A comma-separated string denoting one or more argument names, or + a list/tuple of argument strings. + + :param argvalues: + The list of argvalues determines how often a test is invoked with + different argument values. + + If only one argname was specified argvalues is a list of values. + If N argnames were specified, argvalues must be a list of + N-tuples, where each tuple-element specifies a value for its + respective argname. + + :param indirect: + A list of arguments' names (subset of argnames) or a boolean. + If True the list contains all names from the argnames. Each + argvalue corresponding to an argname in this list will + be passed as request.param to its respective argname fixture + function so that it can perform more expensive setups during the + setup phase of a test rather than at collection time. + + :param ids: + Sequence of (or generator for) ids for ``argvalues``, + or a callable to return part of the id for each argvalue. + + With sequences (and generators like ``itertools.count()``) the + returned ids should be of type ``string``, ``int``, ``float``, + ``bool``, or ``None``. + They are mapped to the corresponding index in ``argvalues``. + ``None`` means to use the auto-generated id. + + If it is a callable it will be called for each entry in + ``argvalues``, and the return value is used as part of the + auto-generated id for the whole set (where parts are joined with + dashes ("-")). + This is useful to provide more specific ids for certain items, e.g. + dates. Returning ``None`` will use an auto-generated id. + + If no ids are provided they will be generated automatically from + the argvalues. + + :param scope: + If specified it denotes the scope of the parameters. + The scope is used for grouping tests by parameter instances. + It will also override any fixture-function defined scope, allowing + to set a dynamic scope using test context or configuration. + """ + argnames, parametersets = ParameterSet._for_parametrize( + argnames, + argvalues, + self.function, + self.config, + nodeid=self.definition.nodeid, + ) + del argvalues + + if "request" in argnames: + fail( + "'request' is a reserved name and cannot be used in @pytest.mark.parametrize", + pytrace=False, + ) + + if scope is not None: + scope_ = Scope.from_user( + scope, descr=f"parametrize() call in {self.function.__name__}" + ) + else: + scope_ = _find_parametrized_scope(argnames, self._arg2fixturedefs, indirect) + + self._validate_if_using_arg_names(argnames, indirect) + + arg_values_types = self._resolve_arg_value_types(argnames, indirect) + + # Use any already (possibly) generated ids with parametrize Marks. + if _param_mark and _param_mark._param_ids_from: + generated_ids = _param_mark._param_ids_from._param_ids_generated + if generated_ids is not None: + ids = generated_ids + + ids = self._resolve_parameter_set_ids( + argnames, ids, parametersets, nodeid=self.definition.nodeid + ) + + # Store used (possibly generated) ids with parametrize Marks. + if _param_mark and _param_mark._param_ids_from and generated_ids is None: + object.__setattr__(_param_mark._param_ids_from, "_param_ids_generated", ids) + + # Create the new calls: if we are parametrize() multiple times (by applying the decorator + # more than once) then we accumulate those calls generating the cartesian product + # of all calls. + newcalls = [] + for callspec in self._calls or [CallSpec2()]: + for param_index, (param_id, param_set) in enumerate( + zip(ids, parametersets) + ): + newcallspec = callspec.setmulti( + valtypes=arg_values_types, + argnames=argnames, + valset=param_set.values, + id=param_id, + marks=param_set.marks, + scope=scope_, + param_index=param_index, + ) + newcalls.append(newcallspec) + self._calls = newcalls + + def _resolve_parameter_set_ids( + self, + argnames: Sequence[str], + ids: Optional[ + Union[Iterable[Optional[object]], Callable[[Any], Optional[object]]] + ], + parametersets: Sequence[ParameterSet], + nodeid: str, + ) -> List[str]: + """Resolve the actual ids for the given parameter sets. + + :param argnames: + Argument names passed to ``parametrize()``. + :param ids: + The `ids` parameter of the ``parametrize()`` call (see docs). + :param parametersets: + The parameter sets, each containing a set of values corresponding + to ``argnames``. + :param nodeid str: + The nodeid of the definition item that generated this + parametrization. + :returns: + List with ids for each parameter set given. + """ + if ids is None: + idfn = None + ids_ = None + elif callable(ids): + idfn = ids + ids_ = None + else: + idfn = None + ids_ = self._validate_ids(ids, parametersets, self.function.__name__) + id_maker = IdMaker( + argnames, + parametersets, + idfn, + ids_, + self.config, + nodeid=nodeid, + func_name=self.function.__name__, + ) + return id_maker.make_unique_parameterset_ids() + + def _validate_ids( + self, + ids: Iterable[Optional[object]], + parametersets: Sequence[ParameterSet], + func_name: str, + ) -> List[Optional[object]]: + try: + num_ids = len(ids) # type: ignore[arg-type] + except TypeError: + try: + iter(ids) + except TypeError as e: + raise TypeError("ids must be a callable or an iterable") from e + num_ids = len(parametersets) + + # num_ids == 0 is a special case: https://github.com/pytest-dev/pytest/issues/1849 + if num_ids != len(parametersets) and num_ids != 0: + msg = "In {}: {} parameter sets specified, with different number of ids: {}" + fail(msg.format(func_name, len(parametersets), num_ids), pytrace=False) + + return list(itertools.islice(ids, num_ids)) + + def _resolve_arg_value_types( + self, + argnames: Sequence[str], + indirect: Union[bool, Sequence[str]], + ) -> Dict[str, "Literal['params', 'funcargs']"]: + """Resolve if each parametrized argument must be considered a + parameter to a fixture or a "funcarg" to the function, based on the + ``indirect`` parameter of the parametrized() call. + + :param List[str] argnames: List of argument names passed to ``parametrize()``. + :param indirect: Same as the ``indirect`` parameter of ``parametrize()``. + :rtype: Dict[str, str] + A dict mapping each arg name to either: + * "params" if the argname should be the parameter of a fixture of the same name. + * "funcargs" if the argname should be a parameter to the parametrized test function. + """ + if isinstance(indirect, bool): + valtypes: Dict[str, Literal["params", "funcargs"]] = dict.fromkeys( + argnames, "params" if indirect else "funcargs" + ) + elif isinstance(indirect, Sequence): + valtypes = dict.fromkeys(argnames, "funcargs") + for arg in indirect: + if arg not in argnames: + fail( + "In {}: indirect fixture '{}' doesn't exist".format( + self.function.__name__, arg + ), + pytrace=False, + ) + valtypes[arg] = "params" + else: + fail( + "In {func}: expected Sequence or boolean for indirect, got {type}".format( + type=type(indirect).__name__, func=self.function.__name__ + ), + pytrace=False, + ) + return valtypes + + def _validate_if_using_arg_names( + self, + argnames: Sequence[str], + indirect: Union[bool, Sequence[str]], + ) -> None: + """Check if all argnames are being used, by default values, or directly/indirectly. + + :param List[str] argnames: List of argument names passed to ``parametrize()``. + :param indirect: Same as the ``indirect`` parameter of ``parametrize()``. + :raises ValueError: If validation fails. + """ + default_arg_names = set(get_default_arg_names(self.function)) + func_name = self.function.__name__ + for arg in argnames: + if arg not in self.fixturenames: + if arg in default_arg_names: + fail( + "In {}: function already takes an argument '{}' with a default value".format( + func_name, arg + ), + pytrace=False, + ) + else: + if isinstance(indirect, Sequence): + name = "fixture" if arg in indirect else "argument" + else: + name = "fixture" if indirect else "argument" + fail( + f"In {func_name}: function uses no {name} '{arg}'", + pytrace=False, + ) + + +def _find_parametrized_scope( + argnames: Sequence[str], + arg2fixturedefs: Mapping[str, Sequence[fixtures.FixtureDef[object]]], + indirect: Union[bool, Sequence[str]], +) -> Scope: + """Find the most appropriate scope for a parametrized call based on its arguments. + + When there's at least one direct argument, always use "function" scope. + + When a test function is parametrized and all its arguments are indirect + (e.g. fixtures), return the most narrow scope based on the fixtures used. + + Related to issue #1832, based on code posted by @Kingdread. + """ + if isinstance(indirect, Sequence): + all_arguments_are_fixtures = len(indirect) == len(argnames) + else: + all_arguments_are_fixtures = bool(indirect) + + if all_arguments_are_fixtures: + fixturedefs = arg2fixturedefs or {} + used_scopes = [ + fixturedef[0]._scope + for name, fixturedef in fixturedefs.items() + if name in argnames + ] + # Takes the most narrow scope from used fixtures. + return min(used_scopes, default=Scope.Function) + + return Scope.Function + + +def _ascii_escaped_by_config(val: Union[str, bytes], config: Optional[Config]) -> str: + if config is None: + escape_option = False + else: + escape_option = config.getini( + "disable_test_id_escaping_and_forfeit_all_rights_to_community_support" + ) + # TODO: If escaping is turned off and the user passes bytes, + # will return a bytes. For now we ignore this but the + # code *probably* doesn't handle this case. + return val if escape_option else ascii_escaped(val) # type: ignore + + +def _pretty_fixture_path(func) -> str: + cwd = Path.cwd() + loc = Path(getlocation(func, str(cwd))) + prefix = Path("...", "_pytest") + try: + return str(prefix / loc.relative_to(_PYTEST_DIR)) + except ValueError: + return bestrelpath(cwd, loc) + + +def show_fixtures_per_test(config): + from _pytest.main import wrap_session + + return wrap_session(config, _show_fixtures_per_test) + + +def _show_fixtures_per_test(config: Config, session: Session) -> None: + import _pytest.config + + session.perform_collect() + curdir = Path.cwd() + tw = _pytest.config.create_terminal_writer(config) + verbose = config.getvalue("verbose") + + def get_best_relpath(func) -> str: + loc = getlocation(func, str(curdir)) + return bestrelpath(curdir, Path(loc)) + + def write_fixture(fixture_def: fixtures.FixtureDef[object]) -> None: + argname = fixture_def.argname + if verbose <= 0 and argname.startswith("_"): + return + prettypath = _pretty_fixture_path(fixture_def.func) + tw.write(f"{argname}", green=True) + tw.write(f" -- {prettypath}", yellow=True) + tw.write("\n") + fixture_doc = inspect.getdoc(fixture_def.func) + if fixture_doc: + write_docstring( + tw, fixture_doc.split("\n\n")[0] if verbose <= 0 else fixture_doc + ) + else: + tw.line(" no docstring available", red=True) + + def write_item(item: nodes.Item) -> None: + # Not all items have _fixtureinfo attribute. + info: Optional[FuncFixtureInfo] = getattr(item, "_fixtureinfo", None) + if info is None or not info.name2fixturedefs: + # This test item does not use any fixtures. + return + tw.line() + tw.sep("-", f"fixtures used by {item.name}") + # TODO: Fix this type ignore. + tw.sep("-", f"({get_best_relpath(item.function)})") # type: ignore[attr-defined] + # dict key not used in loop but needed for sorting. + for _, fixturedefs in sorted(info.name2fixturedefs.items()): + assert fixturedefs is not None + if not fixturedefs: + continue + # Last item is expected to be the one used by the test item. + write_fixture(fixturedefs[-1]) + + for session_item in session.items: + write_item(session_item) + + +def showfixtures(config: Config) -> Union[int, ExitCode]: + from _pytest.main import wrap_session + + return wrap_session(config, _showfixtures_main) + + +def _showfixtures_main(config: Config, session: Session) -> None: + import _pytest.config + + session.perform_collect() + curdir = Path.cwd() + tw = _pytest.config.create_terminal_writer(config) + verbose = config.getvalue("verbose") + + fm = session._fixturemanager + + available = [] + seen: Set[Tuple[str, str]] = set() + + for argname, fixturedefs in fm._arg2fixturedefs.items(): + assert fixturedefs is not None + if not fixturedefs: + continue + for fixturedef in fixturedefs: + loc = getlocation(fixturedef.func, str(curdir)) + if (fixturedef.argname, loc) in seen: + continue + seen.add((fixturedef.argname, loc)) + available.append( + ( + len(fixturedef.baseid), + fixturedef.func.__module__, + _pretty_fixture_path(fixturedef.func), + fixturedef.argname, + fixturedef, + ) + ) + + available.sort() + currentmodule = None + for baseid, module, prettypath, argname, fixturedef in available: + if currentmodule != module: + if not module.startswith("_pytest."): + tw.line() + tw.sep("-", f"fixtures defined from {module}") + currentmodule = module + if verbose <= 0 and argname.startswith("_"): + continue + tw.write(f"{argname}", green=True) + if fixturedef.scope != "function": + tw.write(" [%s scope]" % fixturedef.scope, cyan=True) + tw.write(f" -- {prettypath}", yellow=True) + tw.write("\n") + doc = inspect.getdoc(fixturedef.func) + if doc: + write_docstring(tw, doc.split("\n\n")[0] if verbose <= 0 else doc) + else: + tw.line(" no docstring available", red=True) + tw.line() + + +def write_docstring(tw: TerminalWriter, doc: str, indent: str = " ") -> None: + for line in doc.split("\n"): + tw.line(indent + line) + + +class Function(PyobjMixin, nodes.Item): + """An Item responsible for setting up and executing a Python test function. + + :param name: + The full function name, including any decorations like those + added by parametrization (``my_func[my_param]``). + :param parent: + The parent Node. + :param config: + The pytest Config object. + :param callspec: + If given, this is function has been parametrized and the callspec contains + meta information about the parametrization. + :param callobj: + If given, the object which will be called when the Function is invoked, + otherwise the callobj will be obtained from ``parent`` using ``originalname``. + :param keywords: + Keywords bound to the function object for "-k" matching. + :param session: + The pytest Session object. + :param fixtureinfo: + Fixture information already resolved at this fixture node.. + :param originalname: + The attribute name to use for accessing the underlying function object. + Defaults to ``name``. Set this if name is different from the original name, + for example when it contains decorations like those added by parametrization + (``my_func[my_param]``). + """ + + # Disable since functions handle it themselves. + _ALLOW_MARKERS = False + + def __init__( + self, + name: str, + parent, + config: Optional[Config] = None, + callspec: Optional[CallSpec2] = None, + callobj=NOTSET, + keywords: Optional[Mapping[str, Any]] = None, + session: Optional[Session] = None, + fixtureinfo: Optional[FuncFixtureInfo] = None, + originalname: Optional[str] = None, + ) -> None: + super().__init__(name, parent, config=config, session=session) + + if callobj is not NOTSET: + self.obj = callobj + + #: Original function name, without any decorations (for example + #: parametrization adds a ``"[...]"`` suffix to function names), used to access + #: the underlying function object from ``parent`` (in case ``callobj`` is not given + #: explicitly). + #: + #: .. versionadded:: 3.0 + self.originalname = originalname or name + + # Note: when FunctionDefinition is introduced, we should change ``originalname`` + # to a readonly property that returns FunctionDefinition.name. + + self.own_markers.extend(get_unpacked_marks(self.obj)) + if callspec: + self.callspec = callspec + self.own_markers.extend(callspec.marks) + + # todo: this is a hell of a hack + # https://github.com/pytest-dev/pytest/issues/4569 + # Note: the order of the updates is important here; indicates what + # takes priority (ctor argument over function attributes over markers). + # Take own_markers only; NodeKeywords handles parent traversal on its own. + self.keywords.update((mark.name, mark) for mark in self.own_markers) + self.keywords.update(self.obj.__dict__) + if keywords: + self.keywords.update(keywords) + + if fixtureinfo is None: + fixtureinfo = self.session._fixturemanager.getfixtureinfo( + self, self.obj, self.cls, funcargs=True + ) + self._fixtureinfo: FuncFixtureInfo = fixtureinfo + self.fixturenames = fixtureinfo.names_closure + self._initrequest() + + @classmethod + def from_parent(cls, parent, **kw): # todo: determine sound type limitations + """The public constructor.""" + return super().from_parent(parent=parent, **kw) + + def _initrequest(self) -> None: + self.funcargs: Dict[str, object] = {} + self._request = fixtures.FixtureRequest(self, _ispytest=True) + + @property + def function(self): + """Underlying python 'function' object.""" + return getimfunc(self.obj) + + def _getobj(self): + assert self.parent is not None + if isinstance(self.parent, Class): + # Each Function gets a fresh class instance. + parent_obj = self.parent.newinstance() + else: + parent_obj = self.parent.obj # type: ignore[attr-defined] + return getattr(parent_obj, self.originalname) + + @property + def _pyfuncitem(self): + """(compatonly) for code expecting pytest-2.2 style request objects.""" + return self + + def runtest(self) -> None: + """Execute the underlying test function.""" + self.ihook.pytest_pyfunc_call(pyfuncitem=self) + + def setup(self) -> None: + self._request._fillfixtures() + + def _traceback_filter(self, excinfo: ExceptionInfo[BaseException]) -> Traceback: + if hasattr(self, "_obj") and not self.config.getoption("fulltrace", False): + code = _pytest._code.Code.from_function(get_real_func(self.obj)) + path, firstlineno = code.path, code.firstlineno + traceback = excinfo.traceback + ntraceback = traceback.cut(path=path, firstlineno=firstlineno) + if ntraceback == traceback: + ntraceback = ntraceback.cut(path=path) + if ntraceback == traceback: + ntraceback = ntraceback.filter(filter_traceback) + if not ntraceback: + ntraceback = traceback + ntraceback = ntraceback.filter(excinfo) + + # issue364: mark all but first and last frames to + # only show a single-line message for each frame. + if self.config.getoption("tbstyle", "auto") == "auto": + if len(ntraceback) > 2: + ntraceback = Traceback( + entry + if i == 0 or i == len(ntraceback) - 1 + else entry.with_repr_style("short") + for i, entry in enumerate(ntraceback) + ) + + return ntraceback + return excinfo.traceback + + # TODO: Type ignored -- breaks Liskov Substitution. + def repr_failure( # type: ignore[override] + self, + excinfo: ExceptionInfo[BaseException], + ) -> Union[str, TerminalRepr]: + style = self.config.getoption("tbstyle", "auto") + if style == "auto": + style = "long" + return self._repr_failure_py(excinfo, style=style) + + +class FunctionDefinition(Function): + """ + This class is a step gap solution until we evolve to have actual function definition nodes + and manage to get rid of ``metafunc``. + """ + + def runtest(self) -> None: + raise RuntimeError("function definitions are not supposed to be run as tests") + + setup = runtest diff --git a/venv/lib/python3.11/site-packages/_pytest/python_api.py b/venv/lib/python3.11/site-packages/_pytest/python_api.py new file mode 100644 index 0000000..4213bd0 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/python_api.py @@ -0,0 +1,995 @@ +import math +import pprint +from collections.abc import Collection +from collections.abc import Sized +from decimal import Decimal +from numbers import Complex +from types import TracebackType +from typing import Any +from typing import Callable +from typing import cast +from typing import ContextManager +from typing import List +from typing import Mapping +from typing import Optional +from typing import Pattern +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +if TYPE_CHECKING: + from numpy import ndarray + + +import _pytest._code +from _pytest.compat import final +from _pytest.compat import STRING_TYPES +from _pytest.compat import overload +from _pytest.outcomes import fail + + +def _non_numeric_type_error(value, at: Optional[str]) -> TypeError: + at_str = f" at {at}" if at else "" + return TypeError( + "cannot make approximate comparisons to non-numeric values: {!r} {}".format( + value, at_str + ) + ) + + +def _compare_approx( + full_object: object, + message_data: Sequence[Tuple[str, str, str]], + number_of_elements: int, + different_ids: Sequence[object], + max_abs_diff: float, + max_rel_diff: float, +) -> List[str]: + message_list = list(message_data) + message_list.insert(0, ("Index", "Obtained", "Expected")) + max_sizes = [0, 0, 0] + for index, obtained, expected in message_list: + max_sizes[0] = max(max_sizes[0], len(index)) + max_sizes[1] = max(max_sizes[1], len(obtained)) + max_sizes[2] = max(max_sizes[2], len(expected)) + explanation = [ + f"comparison failed. Mismatched elements: {len(different_ids)} / {number_of_elements}:", + f"Max absolute difference: {max_abs_diff}", + f"Max relative difference: {max_rel_diff}", + ] + [ + f"{indexes:<{max_sizes[0]}} | {obtained:<{max_sizes[1]}} | {expected:<{max_sizes[2]}}" + for indexes, obtained, expected in message_list + ] + return explanation + + +# builtin pytest.approx helper + + +class ApproxBase: + """Provide shared utilities for making approximate comparisons between + numbers or sequences of numbers.""" + + # Tell numpy to use our `__eq__` operator instead of its. + __array_ufunc__ = None + __array_priority__ = 100 + + def __init__(self, expected, rel=None, abs=None, nan_ok: bool = False) -> None: + __tracebackhide__ = True + self.expected = expected + self.abs = abs + self.rel = rel + self.nan_ok = nan_ok + self._check_type() + + def __repr__(self) -> str: + raise NotImplementedError + + def _repr_compare(self, other_side: Any) -> List[str]: + return [ + "comparison failed", + f"Obtained: {other_side}", + f"Expected: {self}", + ] + + def __eq__(self, actual) -> bool: + return all( + a == self._approx_scalar(x) for a, x in self._yield_comparisons(actual) + ) + + def __bool__(self): + __tracebackhide__ = True + raise AssertionError( + "approx() is not supported in a boolean context.\nDid you mean: `assert a == approx(b)`?" + ) + + # Ignore type because of https://github.com/python/mypy/issues/4266. + __hash__ = None # type: ignore + + def __ne__(self, actual) -> bool: + return not (actual == self) + + def _approx_scalar(self, x) -> "ApproxScalar": + if isinstance(x, Decimal): + return ApproxDecimal(x, rel=self.rel, abs=self.abs, nan_ok=self.nan_ok) + return ApproxScalar(x, rel=self.rel, abs=self.abs, nan_ok=self.nan_ok) + + def _yield_comparisons(self, actual): + """Yield all the pairs of numbers to be compared. + + This is used to implement the `__eq__` method. + """ + raise NotImplementedError + + def _check_type(self) -> None: + """Raise a TypeError if the expected value is not a valid type.""" + # This is only a concern if the expected value is a sequence. In every + # other case, the approx() function ensures that the expected value has + # a numeric type. For this reason, the default is to do nothing. The + # classes that deal with sequences should reimplement this method to + # raise if there are any non-numeric elements in the sequence. + + +def _recursive_sequence_map(f, x): + """Recursively map a function over a sequence of arbitrary depth""" + if isinstance(x, (list, tuple)): + seq_type = type(x) + return seq_type(_recursive_sequence_map(f, xi) for xi in x) + else: + return f(x) + + +class ApproxNumpy(ApproxBase): + """Perform approximate comparisons where the expected value is numpy array.""" + + def __repr__(self) -> str: + list_scalars = _recursive_sequence_map( + self._approx_scalar, self.expected.tolist() + ) + return f"approx({list_scalars!r})" + + def _repr_compare(self, other_side: "ndarray") -> List[str]: + import itertools + import math + + def get_value_from_nested_list( + nested_list: List[Any], nd_index: Tuple[Any, ...] + ) -> Any: + """ + Helper function to get the value out of a nested list, given an n-dimensional index. + This mimics numpy's indexing, but for raw nested python lists. + """ + value: Any = nested_list + for i in nd_index: + value = value[i] + return value + + np_array_shape = self.expected.shape + approx_side_as_seq = _recursive_sequence_map( + self._approx_scalar, self.expected.tolist() + ) + + if np_array_shape != other_side.shape: + return [ + "Impossible to compare arrays with different shapes.", + f"Shapes: {np_array_shape} and {other_side.shape}", + ] + + number_of_elements = self.expected.size + max_abs_diff = -math.inf + max_rel_diff = -math.inf + different_ids = [] + for index in itertools.product(*(range(i) for i in np_array_shape)): + approx_value = get_value_from_nested_list(approx_side_as_seq, index) + other_value = get_value_from_nested_list(other_side, index) + if approx_value != other_value: + abs_diff = abs(approx_value.expected - other_value) + max_abs_diff = max(max_abs_diff, abs_diff) + if other_value == 0.0: + max_rel_diff = math.inf + else: + max_rel_diff = max(max_rel_diff, abs_diff / abs(other_value)) + different_ids.append(index) + + message_data = [ + ( + str(index), + str(get_value_from_nested_list(other_side, index)), + str(get_value_from_nested_list(approx_side_as_seq, index)), + ) + for index in different_ids + ] + return _compare_approx( + self.expected, + message_data, + number_of_elements, + different_ids, + max_abs_diff, + max_rel_diff, + ) + + def __eq__(self, actual) -> bool: + import numpy as np + + # self.expected is supposed to always be an array here. + + if not np.isscalar(actual): + try: + actual = np.asarray(actual) + except Exception as e: + raise TypeError(f"cannot compare '{actual}' to numpy.ndarray") from e + + if not np.isscalar(actual) and actual.shape != self.expected.shape: + return False + + return super().__eq__(actual) + + def _yield_comparisons(self, actual): + import numpy as np + + # `actual` can either be a numpy array or a scalar, it is treated in + # `__eq__` before being passed to `ApproxBase.__eq__`, which is the + # only method that calls this one. + + if np.isscalar(actual): + for i in np.ndindex(self.expected.shape): + yield actual, self.expected[i].item() + else: + for i in np.ndindex(self.expected.shape): + yield actual[i].item(), self.expected[i].item() + + +class ApproxMapping(ApproxBase): + """Perform approximate comparisons where the expected value is a mapping + with numeric values (the keys can be anything).""" + + def __repr__(self) -> str: + return "approx({!r})".format( + {k: self._approx_scalar(v) for k, v in self.expected.items()} + ) + + def _repr_compare(self, other_side: Mapping[object, float]) -> List[str]: + import math + + approx_side_as_map = { + k: self._approx_scalar(v) for k, v in self.expected.items() + } + + number_of_elements = len(approx_side_as_map) + max_abs_diff = -math.inf + max_rel_diff = -math.inf + different_ids = [] + for (approx_key, approx_value), other_value in zip( + approx_side_as_map.items(), other_side.values() + ): + if approx_value != other_value: + max_abs_diff = max( + max_abs_diff, abs(approx_value.expected - other_value) + ) + if approx_value.expected == 0.0: + max_rel_diff = math.inf + else: + max_rel_diff = max( + max_rel_diff, + abs( + (approx_value.expected - other_value) + / approx_value.expected + ), + ) + different_ids.append(approx_key) + + message_data = [ + (str(key), str(other_side[key]), str(approx_side_as_map[key])) + for key in different_ids + ] + + return _compare_approx( + self.expected, + message_data, + number_of_elements, + different_ids, + max_abs_diff, + max_rel_diff, + ) + + def __eq__(self, actual) -> bool: + try: + if set(actual.keys()) != set(self.expected.keys()): + return False + except AttributeError: + return False + + return super().__eq__(actual) + + def _yield_comparisons(self, actual): + for k in self.expected.keys(): + yield actual[k], self.expected[k] + + def _check_type(self) -> None: + __tracebackhide__ = True + for key, value in self.expected.items(): + if isinstance(value, type(self.expected)): + msg = "pytest.approx() does not support nested dictionaries: key={!r} value={!r}\n full mapping={}" + raise TypeError(msg.format(key, value, pprint.pformat(self.expected))) + + +class ApproxSequenceLike(ApproxBase): + """Perform approximate comparisons where the expected value is a sequence of numbers.""" + + def __repr__(self) -> str: + seq_type = type(self.expected) + if seq_type not in (tuple, list): + seq_type = list + return "approx({!r})".format( + seq_type(self._approx_scalar(x) for x in self.expected) + ) + + def _repr_compare(self, other_side: Sequence[float]) -> List[str]: + import math + + if len(self.expected) != len(other_side): + return [ + "Impossible to compare lists with different sizes.", + f"Lengths: {len(self.expected)} and {len(other_side)}", + ] + + approx_side_as_map = _recursive_sequence_map(self._approx_scalar, self.expected) + + number_of_elements = len(approx_side_as_map) + max_abs_diff = -math.inf + max_rel_diff = -math.inf + different_ids = [] + for i, (approx_value, other_value) in enumerate( + zip(approx_side_as_map, other_side) + ): + if approx_value != other_value: + abs_diff = abs(approx_value.expected - other_value) + max_abs_diff = max(max_abs_diff, abs_diff) + if other_value == 0.0: + max_rel_diff = math.inf + else: + max_rel_diff = max(max_rel_diff, abs_diff / abs(other_value)) + different_ids.append(i) + + message_data = [ + (str(i), str(other_side[i]), str(approx_side_as_map[i])) + for i in different_ids + ] + + return _compare_approx( + self.expected, + message_data, + number_of_elements, + different_ids, + max_abs_diff, + max_rel_diff, + ) + + def __eq__(self, actual) -> bool: + try: + if len(actual) != len(self.expected): + return False + except TypeError: + return False + return super().__eq__(actual) + + def _yield_comparisons(self, actual): + return zip(actual, self.expected) + + def _check_type(self) -> None: + __tracebackhide__ = True + for index, x in enumerate(self.expected): + if isinstance(x, type(self.expected)): + msg = "pytest.approx() does not support nested data structures: {!r} at index {}\n full sequence: {}" + raise TypeError(msg.format(x, index, pprint.pformat(self.expected))) + + +class ApproxScalar(ApproxBase): + """Perform approximate comparisons where the expected value is a single number.""" + + # Using Real should be better than this Union, but not possible yet: + # https://github.com/python/typeshed/pull/3108 + DEFAULT_ABSOLUTE_TOLERANCE: Union[float, Decimal] = 1e-12 + DEFAULT_RELATIVE_TOLERANCE: Union[float, Decimal] = 1e-6 + + def __repr__(self) -> str: + """Return a string communicating both the expected value and the + tolerance for the comparison being made. + + For example, ``1.0 ± 1e-6``, ``(3+4j) ± 5e-6 ∠ ±180°``. + """ + # Don't show a tolerance for values that aren't compared using + # tolerances, i.e. non-numerics and infinities. Need to call abs to + # handle complex numbers, e.g. (inf + 1j). + if (not isinstance(self.expected, (Complex, Decimal))) or math.isinf( + abs(self.expected) # type: ignore[arg-type] + ): + return str(self.expected) + + # If a sensible tolerance can't be calculated, self.tolerance will + # raise a ValueError. In this case, display '???'. + try: + vetted_tolerance = f"{self.tolerance:.1e}" + if ( + isinstance(self.expected, Complex) + and self.expected.imag + and not math.isinf(self.tolerance) + ): + vetted_tolerance += " ∠ ±180°" + except ValueError: + vetted_tolerance = "???" + + return f"{self.expected} ± {vetted_tolerance}" + + def __eq__(self, actual) -> bool: + """Return whether the given value is equal to the expected value + within the pre-specified tolerance.""" + asarray = _as_numpy_array(actual) + if asarray is not None: + # Call ``__eq__()`` manually to prevent infinite-recursion with + # numpy<1.13. See #3748. + return all(self.__eq__(a) for a in asarray.flat) + + # Short-circuit exact equality. + if actual == self.expected: + return True + + # If either type is non-numeric, fall back to strict equality. + # NB: we need Complex, rather than just Number, to ensure that __abs__, + # __sub__, and __float__ are defined. + if not ( + isinstance(self.expected, (Complex, Decimal)) + and isinstance(actual, (Complex, Decimal)) + ): + return False + + # Allow the user to control whether NaNs are considered equal to each + # other or not. The abs() calls are for compatibility with complex + # numbers. + if math.isnan(abs(self.expected)): # type: ignore[arg-type] + return self.nan_ok and math.isnan(abs(actual)) # type: ignore[arg-type] + + # Infinity shouldn't be approximately equal to anything but itself, but + # if there's a relative tolerance, it will be infinite and infinity + # will seem approximately equal to everything. The equal-to-itself + # case would have been short circuited above, so here we can just + # return false if the expected value is infinite. The abs() call is + # for compatibility with complex numbers. + if math.isinf(abs(self.expected)): # type: ignore[arg-type] + return False + + # Return true if the two numbers are within the tolerance. + result: bool = abs(self.expected - actual) <= self.tolerance + return result + + # Ignore type because of https://github.com/python/mypy/issues/4266. + __hash__ = None # type: ignore + + @property + def tolerance(self): + """Return the tolerance for the comparison. + + This could be either an absolute tolerance or a relative tolerance, + depending on what the user specified or which would be larger. + """ + + def set_default(x, default): + return x if x is not None else default + + # Figure out what the absolute tolerance should be. ``self.abs`` is + # either None or a value specified by the user. + absolute_tolerance = set_default(self.abs, self.DEFAULT_ABSOLUTE_TOLERANCE) + + if absolute_tolerance < 0: + raise ValueError( + f"absolute tolerance can't be negative: {absolute_tolerance}" + ) + if math.isnan(absolute_tolerance): + raise ValueError("absolute tolerance can't be NaN.") + + # If the user specified an absolute tolerance but not a relative one, + # just return the absolute tolerance. + if self.rel is None: + if self.abs is not None: + return absolute_tolerance + + # Figure out what the relative tolerance should be. ``self.rel`` is + # either None or a value specified by the user. This is done after + # we've made sure the user didn't ask for an absolute tolerance only, + # because we don't want to raise errors about the relative tolerance if + # we aren't even going to use it. + relative_tolerance = set_default( + self.rel, self.DEFAULT_RELATIVE_TOLERANCE + ) * abs(self.expected) + + if relative_tolerance < 0: + raise ValueError( + f"relative tolerance can't be negative: {relative_tolerance}" + ) + if math.isnan(relative_tolerance): + raise ValueError("relative tolerance can't be NaN.") + + # Return the larger of the relative and absolute tolerances. + return max(relative_tolerance, absolute_tolerance) + + +class ApproxDecimal(ApproxScalar): + """Perform approximate comparisons where the expected value is a Decimal.""" + + DEFAULT_ABSOLUTE_TOLERANCE = Decimal("1e-12") + DEFAULT_RELATIVE_TOLERANCE = Decimal("1e-6") + + +def approx(expected, rel=None, abs=None, nan_ok: bool = False) -> ApproxBase: + """Assert that two numbers (or two ordered sequences of numbers) are equal to each other + within some tolerance. + + Due to the :doc:`python:tutorial/floatingpoint`, numbers that we + would intuitively expect to be equal are not always so:: + + >>> 0.1 + 0.2 == 0.3 + False + + This problem is commonly encountered when writing tests, e.g. when making + sure that floating-point values are what you expect them to be. One way to + deal with this problem is to assert that two floating-point numbers are + equal to within some appropriate tolerance:: + + >>> abs((0.1 + 0.2) - 0.3) < 1e-6 + True + + However, comparisons like this are tedious to write and difficult to + understand. Furthermore, absolute comparisons like the one above are + usually discouraged because there's no tolerance that works well for all + situations. ``1e-6`` is good for numbers around ``1``, but too small for + very big numbers and too big for very small ones. It's better to express + the tolerance as a fraction of the expected value, but relative comparisons + like that are even more difficult to write correctly and concisely. + + The ``approx`` class performs floating-point comparisons using a syntax + that's as intuitive as possible:: + + >>> from pytest import approx + >>> 0.1 + 0.2 == approx(0.3) + True + + The same syntax also works for ordered sequences of numbers:: + + >>> (0.1 + 0.2, 0.2 + 0.4) == approx((0.3, 0.6)) + True + + ``numpy`` arrays:: + + >>> import numpy as np # doctest: +SKIP + >>> np.array([0.1, 0.2]) + np.array([0.2, 0.4]) == approx(np.array([0.3, 0.6])) # doctest: +SKIP + True + + And for a ``numpy`` array against a scalar:: + + >>> import numpy as np # doctest: +SKIP + >>> np.array([0.1, 0.2]) + np.array([0.2, 0.1]) == approx(0.3) # doctest: +SKIP + True + + Only ordered sequences are supported, because ``approx`` needs + to infer the relative position of the sequences without ambiguity. This means + ``sets`` and other unordered sequences are not supported. + + Finally, dictionary *values* can also be compared:: + + >>> {'a': 0.1 + 0.2, 'b': 0.2 + 0.4} == approx({'a': 0.3, 'b': 0.6}) + True + + The comparison will be true if both mappings have the same keys and their + respective values match the expected tolerances. + + **Tolerances** + + By default, ``approx`` considers numbers within a relative tolerance of + ``1e-6`` (i.e. one part in a million) of its expected value to be equal. + This treatment would lead to surprising results if the expected value was + ``0.0``, because nothing but ``0.0`` itself is relatively close to ``0.0``. + To handle this case less surprisingly, ``approx`` also considers numbers + within an absolute tolerance of ``1e-12`` of its expected value to be + equal. Infinity and NaN are special cases. Infinity is only considered + equal to itself, regardless of the relative tolerance. NaN is not + considered equal to anything by default, but you can make it be equal to + itself by setting the ``nan_ok`` argument to True. (This is meant to + facilitate comparing arrays that use NaN to mean "no data".) + + Both the relative and absolute tolerances can be changed by passing + arguments to the ``approx`` constructor:: + + >>> 1.0001 == approx(1) + False + >>> 1.0001 == approx(1, rel=1e-3) + True + >>> 1.0001 == approx(1, abs=1e-3) + True + + If you specify ``abs`` but not ``rel``, the comparison will not consider + the relative tolerance at all. In other words, two numbers that are within + the default relative tolerance of ``1e-6`` will still be considered unequal + if they exceed the specified absolute tolerance. If you specify both + ``abs`` and ``rel``, the numbers will be considered equal if either + tolerance is met:: + + >>> 1 + 1e-8 == approx(1) + True + >>> 1 + 1e-8 == approx(1, abs=1e-12) + False + >>> 1 + 1e-8 == approx(1, rel=1e-6, abs=1e-12) + True + + You can also use ``approx`` to compare nonnumeric types, or dicts and + sequences containing nonnumeric types, in which case it falls back to + strict equality. This can be useful for comparing dicts and sequences that + can contain optional values:: + + >>> {"required": 1.0000005, "optional": None} == approx({"required": 1, "optional": None}) + True + >>> [None, 1.0000005] == approx([None,1]) + True + >>> ["foo", 1.0000005] == approx([None,1]) + False + + If you're thinking about using ``approx``, then you might want to know how + it compares to other good ways of comparing floating-point numbers. All of + these algorithms are based on relative and absolute tolerances and should + agree for the most part, but they do have meaningful differences: + + - ``math.isclose(a, b, rel_tol=1e-9, abs_tol=0.0)``: True if the relative + tolerance is met w.r.t. either ``a`` or ``b`` or if the absolute + tolerance is met. Because the relative tolerance is calculated w.r.t. + both ``a`` and ``b``, this test is symmetric (i.e. neither ``a`` nor + ``b`` is a "reference value"). You have to specify an absolute tolerance + if you want to compare to ``0.0`` because there is no tolerance by + default. More information: :py:func:`math.isclose`. + + - ``numpy.isclose(a, b, rtol=1e-5, atol=1e-8)``: True if the difference + between ``a`` and ``b`` is less that the sum of the relative tolerance + w.r.t. ``b`` and the absolute tolerance. Because the relative tolerance + is only calculated w.r.t. ``b``, this test is asymmetric and you can + think of ``b`` as the reference value. Support for comparing sequences + is provided by :py:func:`numpy.allclose`. More information: + :std:doc:`numpy:reference/generated/numpy.isclose`. + + - ``unittest.TestCase.assertAlmostEqual(a, b)``: True if ``a`` and ``b`` + are within an absolute tolerance of ``1e-7``. No relative tolerance is + considered , so this function is not appropriate for very large or very + small numbers. Also, it's only available in subclasses of ``unittest.TestCase`` + and it's ugly because it doesn't follow PEP8. More information: + :py:meth:`unittest.TestCase.assertAlmostEqual`. + + - ``a == pytest.approx(b, rel=1e-6, abs=1e-12)``: True if the relative + tolerance is met w.r.t. ``b`` or if the absolute tolerance is met. + Because the relative tolerance is only calculated w.r.t. ``b``, this test + is asymmetric and you can think of ``b`` as the reference value. In the + special case that you explicitly specify an absolute tolerance but not a + relative tolerance, only the absolute tolerance is considered. + + .. note:: + + ``approx`` can handle numpy arrays, but we recommend the + specialised test helpers in :std:doc:`numpy:reference/routines.testing` + if you need support for comparisons, NaNs, or ULP-based tolerances. + + To match strings using regex, you can use + `Matches `_ + from the + `re_assert package `_. + + .. warning:: + + .. versionchanged:: 3.2 + + In order to avoid inconsistent behavior, :py:exc:`TypeError` is + raised for ``>``, ``>=``, ``<`` and ``<=`` comparisons. + The example below illustrates the problem:: + + assert approx(0.1) > 0.1 + 1e-10 # calls approx(0.1).__gt__(0.1 + 1e-10) + assert 0.1 + 1e-10 > approx(0.1) # calls approx(0.1).__lt__(0.1 + 1e-10) + + In the second example one expects ``approx(0.1).__le__(0.1 + 1e-10)`` + to be called. But instead, ``approx(0.1).__lt__(0.1 + 1e-10)`` is used to + comparison. This is because the call hierarchy of rich comparisons + follows a fixed behavior. More information: :py:meth:`object.__ge__` + + .. versionchanged:: 3.7.1 + ``approx`` raises ``TypeError`` when it encounters a dict value or + sequence element of nonnumeric type. + + .. versionchanged:: 6.1.0 + ``approx`` falls back to strict equality for nonnumeric types instead + of raising ``TypeError``. + """ + + # Delegate the comparison to a class that knows how to deal with the type + # of the expected value (e.g. int, float, list, dict, numpy.array, etc). + # + # The primary responsibility of these classes is to implement ``__eq__()`` + # and ``__repr__()``. The former is used to actually check if some + # "actual" value is equivalent to the given expected value within the + # allowed tolerance. The latter is used to show the user the expected + # value and tolerance, in the case that a test failed. + # + # The actual logic for making approximate comparisons can be found in + # ApproxScalar, which is used to compare individual numbers. All of the + # other Approx classes eventually delegate to this class. The ApproxBase + # class provides some convenient methods and overloads, but isn't really + # essential. + + __tracebackhide__ = True + + if isinstance(expected, Decimal): + cls: Type[ApproxBase] = ApproxDecimal + elif isinstance(expected, Mapping): + cls = ApproxMapping + elif _is_numpy_array(expected): + expected = _as_numpy_array(expected) + cls = ApproxNumpy + elif ( + hasattr(expected, "__getitem__") + and isinstance(expected, Sized) + # Type ignored because the error is wrong -- not unreachable. + and not isinstance(expected, STRING_TYPES) # type: ignore[unreachable] + ): + cls = ApproxSequenceLike + elif ( + isinstance(expected, Collection) + # Type ignored because the error is wrong -- not unreachable. + and not isinstance(expected, STRING_TYPES) # type: ignore[unreachable] + ): + msg = f"pytest.approx() only supports ordered sequences, but got: {repr(expected)}" + raise TypeError(msg) + else: + cls = ApproxScalar + + return cls(expected, rel, abs, nan_ok) + + +def _is_numpy_array(obj: object) -> bool: + """ + Return true if the given object is implicitly convertible to ndarray, + and numpy is already imported. + """ + return _as_numpy_array(obj) is not None + + +def _as_numpy_array(obj: object) -> Optional["ndarray"]: + """ + Return an ndarray if the given object is implicitly convertible to ndarray, + and numpy is already imported, otherwise None. + """ + import sys + + np: Any = sys.modules.get("numpy") + if np is not None: + # avoid infinite recursion on numpy scalars, which have __array__ + if np.isscalar(obj): + return None + elif isinstance(obj, np.ndarray): + return obj + elif hasattr(obj, "__array__") or hasattr("obj", "__array_interface__"): + return np.asarray(obj) + return None + + +# builtin pytest.raises helper + +E = TypeVar("E", bound=BaseException) + + +@overload +def raises( + expected_exception: Union[Type[E], Tuple[Type[E], ...]], + *, + match: Optional[Union[str, Pattern[str]]] = ..., +) -> "RaisesContext[E]": + ... + + +@overload +def raises( # noqa: F811 + expected_exception: Union[Type[E], Tuple[Type[E], ...]], + func: Callable[..., Any], + *args: Any, + **kwargs: Any, +) -> _pytest._code.ExceptionInfo[E]: + ... + + +def raises( # noqa: F811 + expected_exception: Union[Type[E], Tuple[Type[E], ...]], *args: Any, **kwargs: Any +) -> Union["RaisesContext[E]", _pytest._code.ExceptionInfo[E]]: + r"""Assert that a code block/function call raises an exception. + + :param typing.Type[E] | typing.Tuple[typing.Type[E], ...] expected_exception: + The expected exception type, or a tuple if one of multiple possible + exception types are expected. + :kwparam str | typing.Pattern[str] | None match: + If specified, a string containing a regular expression, + or a regular expression object, that is tested against the string + representation of the exception using :func:`re.search`. + + To match a literal string that may contain :ref:`special characters + `, the pattern can first be escaped with :func:`re.escape`. + + (This is only used when :py:func:`pytest.raises` is used as a context manager, + and passed through to the function otherwise. + When using :py:func:`pytest.raises` as a function, you can use: + ``pytest.raises(Exc, func, match="passed on").match("my pattern")``.) + + .. currentmodule:: _pytest._code + + Use ``pytest.raises`` as a context manager, which will capture the exception of the given + type:: + + >>> import pytest + >>> with pytest.raises(ZeroDivisionError): + ... 1/0 + + If the code block does not raise the expected exception (``ZeroDivisionError`` in the example + above), or no exception at all, the check will fail instead. + + You can also use the keyword argument ``match`` to assert that the + exception matches a text or regex:: + + >>> with pytest.raises(ValueError, match='must be 0 or None'): + ... raise ValueError("value must be 0 or None") + + >>> with pytest.raises(ValueError, match=r'must be \d+$'): + ... raise ValueError("value must be 42") + + The context manager produces an :class:`ExceptionInfo` object which can be used to inspect the + details of the captured exception:: + + >>> with pytest.raises(ValueError) as exc_info: + ... raise ValueError("value must be 42") + >>> assert exc_info.type is ValueError + >>> assert exc_info.value.args[0] == "value must be 42" + + .. note:: + + When using ``pytest.raises`` as a context manager, it's worthwhile to + note that normal context manager rules apply and that the exception + raised *must* be the final line in the scope of the context manager. + Lines of code after that, within the scope of the context manager will + not be executed. For example:: + + >>> value = 15 + >>> with pytest.raises(ValueError) as exc_info: + ... if value > 10: + ... raise ValueError("value must be <= 10") + ... assert exc_info.type is ValueError # this will not execute + + Instead, the following approach must be taken (note the difference in + scope):: + + >>> with pytest.raises(ValueError) as exc_info: + ... if value > 10: + ... raise ValueError("value must be <= 10") + ... + >>> assert exc_info.type is ValueError + + **Using with** ``pytest.mark.parametrize`` + + When using :ref:`pytest.mark.parametrize ref` + it is possible to parametrize tests such that + some runs raise an exception and others do not. + + See :ref:`parametrizing_conditional_raising` for an example. + + **Legacy form** + + It is possible to specify a callable by passing a to-be-called lambda:: + + >>> raises(ZeroDivisionError, lambda: 1/0) + + + or you can specify an arbitrary callable with arguments:: + + >>> def f(x): return 1/x + ... + >>> raises(ZeroDivisionError, f, 0) + + >>> raises(ZeroDivisionError, f, x=0) + + + The form above is fully supported but discouraged for new code because the + context manager form is regarded as more readable and less error-prone. + + .. note:: + Similar to caught exception objects in Python, explicitly clearing + local references to returned ``ExceptionInfo`` objects can + help the Python interpreter speed up its garbage collection. + + Clearing those references breaks a reference cycle + (``ExceptionInfo`` --> caught exception --> frame stack raising + the exception --> current frame stack --> local variables --> + ``ExceptionInfo``) which makes Python keep all objects referenced + from that cycle (including all local variables in the current + frame) alive until the next cyclic garbage collection run. + More detailed information can be found in the official Python + documentation for :ref:`the try statement `. + """ + __tracebackhide__ = True + + if not expected_exception: + raise ValueError( + f"Expected an exception type or a tuple of exception types, but got `{expected_exception!r}`. " + f"Raising exceptions is already understood as failing the test, so you don't need " + f"any special code to say 'this should never raise an exception'." + ) + if isinstance(expected_exception, type): + expected_exceptions: Tuple[Type[E], ...] = (expected_exception,) + else: + expected_exceptions = expected_exception + for exc in expected_exceptions: + if not isinstance(exc, type) or not issubclass(exc, BaseException): + msg = "expected exception must be a BaseException type, not {}" # type: ignore[unreachable] + not_a = exc.__name__ if isinstance(exc, type) else type(exc).__name__ + raise TypeError(msg.format(not_a)) + + message = f"DID NOT RAISE {expected_exception}" + + if not args: + match: Optional[Union[str, Pattern[str]]] = kwargs.pop("match", None) + if kwargs: + msg = "Unexpected keyword arguments passed to pytest.raises: " + msg += ", ".join(sorted(kwargs)) + msg += "\nUse context-manager form instead?" + raise TypeError(msg) + return RaisesContext(expected_exception, message, match) + else: + func = args[0] + if not callable(func): + raise TypeError(f"{func!r} object (type: {type(func)}) must be callable") + try: + func(*args[1:], **kwargs) + except expected_exception as e: + return _pytest._code.ExceptionInfo.from_exception(e) + fail(message) + + +# This doesn't work with mypy for now. Use fail.Exception instead. +raises.Exception = fail.Exception # type: ignore + + +@final +class RaisesContext(ContextManager[_pytest._code.ExceptionInfo[E]]): + def __init__( + self, + expected_exception: Union[Type[E], Tuple[Type[E], ...]], + message: str, + match_expr: Optional[Union[str, Pattern[str]]] = None, + ) -> None: + self.expected_exception = expected_exception + self.message = message + self.match_expr = match_expr + self.excinfo: Optional[_pytest._code.ExceptionInfo[E]] = None + + def __enter__(self) -> _pytest._code.ExceptionInfo[E]: + self.excinfo = _pytest._code.ExceptionInfo.for_later() + return self.excinfo + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> bool: + __tracebackhide__ = True + if exc_type is None: + fail(self.message) + assert self.excinfo is not None + if not issubclass(exc_type, self.expected_exception): + return False + # Cast to narrow the exception type now that it's verified. + exc_info = cast(Tuple[Type[E], E, TracebackType], (exc_type, exc_val, exc_tb)) + self.excinfo.fill_unfilled(exc_info) + if self.match_expr is not None: + self.excinfo.match(self.match_expr) + return True diff --git a/venv/lib/python3.11/site-packages/_pytest/python_path.py b/venv/lib/python3.11/site-packages/_pytest/python_path.py new file mode 100644 index 0000000..cceabbc --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/python_path.py @@ -0,0 +1,24 @@ +import sys + +import pytest +from pytest import Config +from pytest import Parser + + +def pytest_addoption(parser: Parser) -> None: + parser.addini("pythonpath", type="paths", help="Add paths to sys.path", default=[]) + + +@pytest.hookimpl(tryfirst=True) +def pytest_load_initial_conftests(early_config: Config) -> None: + # `pythonpath = a b` will set `sys.path` to `[a, b, x, y, z, ...]` + for path in reversed(early_config.getini("pythonpath")): + sys.path.insert(0, str(path)) + + +@pytest.hookimpl(trylast=True) +def pytest_unconfigure(config: Config) -> None: + for path in config.getini("pythonpath"): + path_str = str(path) + if path_str in sys.path: + sys.path.remove(path_str) diff --git a/venv/lib/python3.11/site-packages/_pytest/recwarn.py b/venv/lib/python3.11/site-packages/_pytest/recwarn.py new file mode 100644 index 0000000..d76ea02 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/recwarn.py @@ -0,0 +1,313 @@ +"""Record warnings during test function execution.""" +import re +import warnings +from pprint import pformat +from types import TracebackType +from typing import Any +from typing import Callable +from typing import Generator +from typing import Iterator +from typing import List +from typing import Optional +from typing import Pattern +from typing import Tuple +from typing import Type +from typing import TypeVar +from typing import Union + +from _pytest.compat import final +from _pytest.compat import overload +from _pytest.deprecated import check_ispytest +from _pytest.deprecated import WARNS_NONE_ARG +from _pytest.fixtures import fixture +from _pytest.outcomes import fail + + +T = TypeVar("T") + + +@fixture +def recwarn() -> Generator["WarningsRecorder", None, None]: + """Return a :class:`WarningsRecorder` instance that records all warnings emitted by test functions. + + See https://docs.pytest.org/en/latest/how-to/capture-warnings.html for information + on warning categories. + """ + wrec = WarningsRecorder(_ispytest=True) + with wrec: + warnings.simplefilter("default") + yield wrec + + +@overload +def deprecated_call( + *, match: Optional[Union[str, Pattern[str]]] = ... +) -> "WarningsRecorder": + ... + + +@overload +def deprecated_call( # noqa: F811 + func: Callable[..., T], *args: Any, **kwargs: Any +) -> T: + ... + + +def deprecated_call( # noqa: F811 + func: Optional[Callable[..., Any]] = None, *args: Any, **kwargs: Any +) -> Union["WarningsRecorder", Any]: + """Assert that code produces a ``DeprecationWarning`` or ``PendingDeprecationWarning``. + + This function can be used as a context manager:: + + >>> import warnings + >>> def api_call_v2(): + ... warnings.warn('use v3 of this api', DeprecationWarning) + ... return 200 + + >>> import pytest + >>> with pytest.deprecated_call(): + ... assert api_call_v2() == 200 + + It can also be used by passing a function and ``*args`` and ``**kwargs``, + in which case it will ensure calling ``func(*args, **kwargs)`` produces one of + the warnings types above. The return value is the return value of the function. + + In the context manager form you may use the keyword argument ``match`` to assert + that the warning matches a text or regex. + + The context manager produces a list of :class:`warnings.WarningMessage` objects, + one for each warning raised. + """ + __tracebackhide__ = True + if func is not None: + args = (func,) + args + return warns((DeprecationWarning, PendingDeprecationWarning), *args, **kwargs) + + +@overload +def warns( + expected_warning: Union[Type[Warning], Tuple[Type[Warning], ...]] = ..., + *, + match: Optional[Union[str, Pattern[str]]] = ..., +) -> "WarningsChecker": + ... + + +@overload +def warns( # noqa: F811 + expected_warning: Union[Type[Warning], Tuple[Type[Warning], ...]], + func: Callable[..., T], + *args: Any, + **kwargs: Any, +) -> T: + ... + + +def warns( # noqa: F811 + expected_warning: Union[Type[Warning], Tuple[Type[Warning], ...]] = Warning, + *args: Any, + match: Optional[Union[str, Pattern[str]]] = None, + **kwargs: Any, +) -> Union["WarningsChecker", Any]: + r"""Assert that code raises a particular class of warning. + + Specifically, the parameter ``expected_warning`` can be a warning class or sequence + of warning classes, and the code inside the ``with`` block must issue at least one + warning of that class or classes. + + This helper produces a list of :class:`warnings.WarningMessage` objects, one for + each warning raised (regardless of whether it is an ``expected_warning`` or not). + + This function can be used as a context manager, which will capture all the raised + warnings inside it:: + + >>> import pytest + >>> with pytest.warns(RuntimeWarning): + ... warnings.warn("my warning", RuntimeWarning) + + In the context manager form you may use the keyword argument ``match`` to assert + that the warning matches a text or regex:: + + >>> with pytest.warns(UserWarning, match='must be 0 or None'): + ... warnings.warn("value must be 0 or None", UserWarning) + + >>> with pytest.warns(UserWarning, match=r'must be \d+$'): + ... warnings.warn("value must be 42", UserWarning) + + >>> with pytest.warns(UserWarning, match=r'must be \d+$'): + ... warnings.warn("this is not here", UserWarning) + Traceback (most recent call last): + ... + Failed: DID NOT WARN. No warnings of type ...UserWarning... were emitted... + + **Using with** ``pytest.mark.parametrize`` + + When using :ref:`pytest.mark.parametrize ref` it is possible to parametrize tests + such that some runs raise a warning and others do not. + + This could be achieved in the same way as with exceptions, see + :ref:`parametrizing_conditional_raising` for an example. + + """ + __tracebackhide__ = True + if not args: + if kwargs: + argnames = ", ".join(sorted(kwargs)) + raise TypeError( + f"Unexpected keyword arguments passed to pytest.warns: {argnames}" + "\nUse context-manager form instead?" + ) + return WarningsChecker(expected_warning, match_expr=match, _ispytest=True) + else: + func = args[0] + if not callable(func): + raise TypeError(f"{func!r} object (type: {type(func)}) must be callable") + with WarningsChecker(expected_warning, _ispytest=True): + return func(*args[1:], **kwargs) + + +class WarningsRecorder(warnings.catch_warnings): # type:ignore[type-arg] + """A context manager to record raised warnings. + + Each recorded warning is an instance of :class:`warnings.WarningMessage`. + + Adapted from `warnings.catch_warnings`. + + .. note:: + ``DeprecationWarning`` and ``PendingDeprecationWarning`` are treated + differently; see :ref:`ensuring_function_triggers`. + + """ + + def __init__(self, *, _ispytest: bool = False) -> None: + check_ispytest(_ispytest) + # Type ignored due to the way typeshed handles warnings.catch_warnings. + super().__init__(record=True) # type: ignore[call-arg] + self._entered = False + self._list: List[warnings.WarningMessage] = [] + + @property + def list(self) -> List["warnings.WarningMessage"]: + """The list of recorded warnings.""" + return self._list + + def __getitem__(self, i: int) -> "warnings.WarningMessage": + """Get a recorded warning by index.""" + return self._list[i] + + def __iter__(self) -> Iterator["warnings.WarningMessage"]: + """Iterate through the recorded warnings.""" + return iter(self._list) + + def __len__(self) -> int: + """The number of recorded warnings.""" + return len(self._list) + + def pop(self, cls: Type[Warning] = Warning) -> "warnings.WarningMessage": + """Pop the first recorded warning, raise exception if not exists.""" + for i, w in enumerate(self._list): + if issubclass(w.category, cls): + return self._list.pop(i) + __tracebackhide__ = True + raise AssertionError(f"{cls!r} not found in warning list") + + def clear(self) -> None: + """Clear the list of recorded warnings.""" + self._list[:] = [] + + # Type ignored because it doesn't exactly warnings.catch_warnings.__enter__ + # -- it returns a List but we only emulate one. + def __enter__(self) -> "WarningsRecorder": # type: ignore + if self._entered: + __tracebackhide__ = True + raise RuntimeError(f"Cannot enter {self!r} twice") + _list = super().__enter__() + # record=True means it's None. + assert _list is not None + self._list = _list + warnings.simplefilter("always") + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + if not self._entered: + __tracebackhide__ = True + raise RuntimeError(f"Cannot exit {self!r} without entering first") + + super().__exit__(exc_type, exc_val, exc_tb) + + # Built-in catch_warnings does not reset entered state so we do it + # manually here for this context manager to become reusable. + self._entered = False + + +@final +class WarningsChecker(WarningsRecorder): + def __init__( + self, + expected_warning: Optional[ + Union[Type[Warning], Tuple[Type[Warning], ...]] + ] = Warning, + match_expr: Optional[Union[str, Pattern[str]]] = None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + super().__init__(_ispytest=True) + + msg = "exceptions must be derived from Warning, not %s" + if expected_warning is None: + warnings.warn(WARNS_NONE_ARG, stacklevel=4) + expected_warning_tup = None + elif isinstance(expected_warning, tuple): + for exc in expected_warning: + if not issubclass(exc, Warning): + raise TypeError(msg % type(exc)) + expected_warning_tup = expected_warning + elif issubclass(expected_warning, Warning): + expected_warning_tup = (expected_warning,) + else: + raise TypeError(msg % type(expected_warning)) + + self.expected_warning = expected_warning_tup + self.match_expr = match_expr + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + super().__exit__(exc_type, exc_val, exc_tb) + + __tracebackhide__ = True + + def found_str(): + return pformat([record.message for record in self], indent=2) + + # only check if we're not currently handling an exception + if exc_type is None and exc_val is None and exc_tb is None: + if self.expected_warning is not None: + if not any(issubclass(r.category, self.expected_warning) for r in self): + __tracebackhide__ = True + fail( + f"DID NOT WARN. No warnings of type {self.expected_warning} were emitted.\n" + f"The list of emitted warnings is: {found_str()}." + ) + elif self.match_expr is not None: + for r in self: + if issubclass(r.category, self.expected_warning): + if re.compile(self.match_expr).search(str(r.message)): + break + else: + fail( + f"""\ +DID NOT WARN. No warnings of type {self.expected_warning} matching the regex were emitted. + Regex: {self.match_expr} + Emitted warnings: {found_str()}""" + ) diff --git a/venv/lib/python3.11/site-packages/_pytest/reports.py b/venv/lib/python3.11/site-packages/_pytest/reports.py new file mode 100644 index 0000000..74e8794 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/reports.py @@ -0,0 +1,622 @@ +import dataclasses +import os +from io import StringIO +from pprint import pprint +from typing import Any +from typing import cast +from typing import Dict +from typing import Iterable +from typing import Iterator +from typing import List +from typing import Mapping +from typing import NoReturn +from typing import Optional +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from _pytest._code.code import ExceptionChainRepr +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import ExceptionRepr +from _pytest._code.code import ReprEntry +from _pytest._code.code import ReprEntryNative +from _pytest._code.code import ReprExceptionInfo +from _pytest._code.code import ReprFileLocation +from _pytest._code.code import ReprFuncArgs +from _pytest._code.code import ReprLocals +from _pytest._code.code import ReprTraceback +from _pytest._code.code import TerminalRepr +from _pytest._io import TerminalWriter +from _pytest.compat import final +from _pytest.config import Config +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.outcomes import skip + +if TYPE_CHECKING: + from typing_extensions import Literal + + from _pytest.runner import CallInfo + + +def getworkerinfoline(node): + try: + return node._workerinfocache + except AttributeError: + d = node.workerinfo + ver = "%s.%s.%s" % d["version_info"][:3] + node._workerinfocache = s = "[{}] {} -- Python {} {}".format( + d["id"], d["sysplatform"], ver, d["executable"] + ) + return s + + +_R = TypeVar("_R", bound="BaseReport") + + +class BaseReport: + when: Optional[str] + location: Optional[Tuple[str, Optional[int], str]] + longrepr: Union[ + None, ExceptionInfo[BaseException], Tuple[str, int, str], str, TerminalRepr + ] + sections: List[Tuple[str, str]] + nodeid: str + outcome: "Literal['passed', 'failed', 'skipped']" + + def __init__(self, **kw: Any) -> None: + self.__dict__.update(kw) + + if TYPE_CHECKING: + # Can have arbitrary fields given to __init__(). + def __getattr__(self, key: str) -> Any: + ... + + def toterminal(self, out: TerminalWriter) -> None: + if hasattr(self, "node"): + worker_info = getworkerinfoline(self.node) + if worker_info: + out.line(worker_info) + + longrepr = self.longrepr + if longrepr is None: + return + + if hasattr(longrepr, "toterminal"): + longrepr_terminal = cast(TerminalRepr, longrepr) + longrepr_terminal.toterminal(out) + else: + try: + s = str(longrepr) + except UnicodeEncodeError: + s = "" + out.line(s) + + def get_sections(self, prefix: str) -> Iterator[Tuple[str, str]]: + for name, content in self.sections: + if name.startswith(prefix): + yield prefix, content + + @property + def longreprtext(self) -> str: + """Read-only property that returns the full string representation of + ``longrepr``. + + .. versionadded:: 3.0 + """ + file = StringIO() + tw = TerminalWriter(file) + tw.hasmarkup = False + self.toterminal(tw) + exc = file.getvalue() + return exc.strip() + + @property + def caplog(self) -> str: + """Return captured log lines, if log capturing is enabled. + + .. versionadded:: 3.5 + """ + return "\n".join( + content for (prefix, content) in self.get_sections("Captured log") + ) + + @property + def capstdout(self) -> str: + """Return captured text from stdout, if capturing is enabled. + + .. versionadded:: 3.0 + """ + return "".join( + content for (prefix, content) in self.get_sections("Captured stdout") + ) + + @property + def capstderr(self) -> str: + """Return captured text from stderr, if capturing is enabled. + + .. versionadded:: 3.0 + """ + return "".join( + content for (prefix, content) in self.get_sections("Captured stderr") + ) + + @property + def passed(self) -> bool: + """Whether the outcome is passed.""" + return self.outcome == "passed" + + @property + def failed(self) -> bool: + """Whether the outcome is failed.""" + return self.outcome == "failed" + + @property + def skipped(self) -> bool: + """Whether the outcome is skipped.""" + return self.outcome == "skipped" + + @property + def fspath(self) -> str: + """The path portion of the reported node, as a string.""" + return self.nodeid.split("::")[0] + + @property + def count_towards_summary(self) -> bool: + """**Experimental** Whether this report should be counted towards the + totals shown at the end of the test session: "1 passed, 1 failure, etc". + + .. note:: + + This function is considered **experimental**, so beware that it is subject to changes + even in patch releases. + """ + return True + + @property + def head_line(self) -> Optional[str]: + """**Experimental** The head line shown with longrepr output for this + report, more commonly during traceback representation during + failures:: + + ________ Test.foo ________ + + + In the example above, the head_line is "Test.foo". + + .. note:: + + This function is considered **experimental**, so beware that it is subject to changes + even in patch releases. + """ + if self.location is not None: + fspath, lineno, domain = self.location + return domain + return None + + def _get_verbose_word(self, config: Config): + _category, _short, verbose = config.hook.pytest_report_teststatus( + report=self, config=config + ) + return verbose + + def _to_json(self) -> Dict[str, Any]: + """Return the contents of this report as a dict of builtin entries, + suitable for serialization. + + This was originally the serialize_report() function from xdist (ca03269). + + Experimental method. + """ + return _report_to_json(self) + + @classmethod + def _from_json(cls: Type[_R], reportdict: Dict[str, object]) -> _R: + """Create either a TestReport or CollectReport, depending on the calling class. + + It is the callers responsibility to know which class to pass here. + + This was originally the serialize_report() function from xdist (ca03269). + + Experimental method. + """ + kwargs = _report_kwargs_from_json(reportdict) + return cls(**kwargs) + + +def _report_unserialization_failure( + type_name: str, report_class: Type[BaseReport], reportdict +) -> NoReturn: + url = "https://github.com/pytest-dev/pytest/issues" + stream = StringIO() + pprint("-" * 100, stream=stream) + pprint("INTERNALERROR: Unknown entry type returned: %s" % type_name, stream=stream) + pprint("report_name: %s" % report_class, stream=stream) + pprint(reportdict, stream=stream) + pprint("Please report this bug at %s" % url, stream=stream) + pprint("-" * 100, stream=stream) + raise RuntimeError(stream.getvalue()) + + +@final +class TestReport(BaseReport): + """Basic test report object (also used for setup and teardown calls if + they fail). + + Reports can contain arbitrary extra attributes. + """ + + __test__ = False + + def __init__( + self, + nodeid: str, + location: Tuple[str, Optional[int], str], + keywords: Mapping[str, Any], + outcome: "Literal['passed', 'failed', 'skipped']", + longrepr: Union[ + None, ExceptionInfo[BaseException], Tuple[str, int, str], str, TerminalRepr + ], + when: "Literal['setup', 'call', 'teardown']", + sections: Iterable[Tuple[str, str]] = (), + duration: float = 0, + start: float = 0, + stop: float = 0, + user_properties: Optional[Iterable[Tuple[str, object]]] = None, + **extra, + ) -> None: + #: Normalized collection nodeid. + self.nodeid = nodeid + + #: A (filesystempath, lineno, domaininfo) tuple indicating the + #: actual location of a test item - it might be different from the + #: collected one e.g. if a method is inherited from a different module. + #: The filesystempath may be relative to ``config.rootdir``. + #: The line number is 0-based. + self.location: Tuple[str, Optional[int], str] = location + + #: A name -> value dictionary containing all keywords and + #: markers associated with a test invocation. + self.keywords: Mapping[str, Any] = keywords + + #: Test outcome, always one of "passed", "failed", "skipped". + self.outcome = outcome + + #: None or a failure representation. + self.longrepr = longrepr + + #: One of 'setup', 'call', 'teardown' to indicate runtest phase. + self.when = when + + #: User properties is a list of tuples (name, value) that holds user + #: defined properties of the test. + self.user_properties = list(user_properties or []) + + #: Tuples of str ``(heading, content)`` with extra information + #: for the test report. Used by pytest to add text captured + #: from ``stdout``, ``stderr``, and intercepted logging events. May + #: be used by other plugins to add arbitrary information to reports. + self.sections = list(sections) + + #: Time it took to run just the test. + self.duration: float = duration + + #: The system time when the call started, in seconds since the epoch. + self.start: float = start + #: The system time when the call ended, in seconds since the epoch. + self.stop: float = stop + + self.__dict__.update(extra) + + def __repr__(self) -> str: + return "<{} {!r} when={!r} outcome={!r}>".format( + self.__class__.__name__, self.nodeid, self.when, self.outcome + ) + + @classmethod + def from_item_and_call(cls, item: Item, call: "CallInfo[None]") -> "TestReport": + """Create and fill a TestReport with standard item and call info. + + :param item: The item. + :param call: The call info. + """ + when = call.when + # Remove "collect" from the Literal type -- only for collection calls. + assert when != "collect" + duration = call.duration + start = call.start + stop = call.stop + keywords = {x: 1 for x in item.keywords} + excinfo = call.excinfo + sections = [] + if not call.excinfo: + outcome: Literal["passed", "failed", "skipped"] = "passed" + longrepr: Union[ + None, + ExceptionInfo[BaseException], + Tuple[str, int, str], + str, + TerminalRepr, + ] = None + else: + if not isinstance(excinfo, ExceptionInfo): + outcome = "failed" + longrepr = excinfo + elif isinstance(excinfo.value, skip.Exception): + outcome = "skipped" + r = excinfo._getreprcrash() + assert ( + r is not None + ), "There should always be a traceback entry for skipping a test." + if excinfo.value._use_item_location: + path, line = item.reportinfo()[:2] + assert line is not None + longrepr = os.fspath(path), line + 1, r.message + else: + longrepr = (str(r.path), r.lineno, r.message) + else: + outcome = "failed" + if call.when == "call": + longrepr = item.repr_failure(excinfo) + else: # exception in setup or teardown + longrepr = item._repr_failure_py( + excinfo, style=item.config.getoption("tbstyle", "auto") + ) + for rwhen, key, content in item._report_sections: + sections.append((f"Captured {key} {rwhen}", content)) + return cls( + item.nodeid, + item.location, + keywords, + outcome, + longrepr, + when, + sections, + duration, + start, + stop, + user_properties=item.user_properties, + ) + + +@final +class CollectReport(BaseReport): + """Collection report object. + + Reports can contain arbitrary extra attributes. + """ + + when = "collect" + + def __init__( + self, + nodeid: str, + outcome: "Literal['passed', 'failed', 'skipped']", + longrepr: Union[ + None, ExceptionInfo[BaseException], Tuple[str, int, str], str, TerminalRepr + ], + result: Optional[List[Union[Item, Collector]]], + sections: Iterable[Tuple[str, str]] = (), + **extra, + ) -> None: + #: Normalized collection nodeid. + self.nodeid = nodeid + + #: Test outcome, always one of "passed", "failed", "skipped". + self.outcome = outcome + + #: None or a failure representation. + self.longrepr = longrepr + + #: The collected items and collection nodes. + self.result = result or [] + + #: Tuples of str ``(heading, content)`` with extra information + #: for the test report. Used by pytest to add text captured + #: from ``stdout``, ``stderr``, and intercepted logging events. May + #: be used by other plugins to add arbitrary information to reports. + self.sections = list(sections) + + self.__dict__.update(extra) + + @property + def location( # type:ignore[override] + self, + ) -> Optional[Tuple[str, Optional[int], str]]: + return (self.fspath, None, self.fspath) + + def __repr__(self) -> str: + return "".format( + self.nodeid, len(self.result), self.outcome + ) + + +class CollectErrorRepr(TerminalRepr): + def __init__(self, msg: str) -> None: + self.longrepr = msg + + def toterminal(self, out: TerminalWriter) -> None: + out.line(self.longrepr, red=True) + + +def pytest_report_to_serializable( + report: Union[CollectReport, TestReport] +) -> Optional[Dict[str, Any]]: + if isinstance(report, (TestReport, CollectReport)): + data = report._to_json() + data["$report_type"] = report.__class__.__name__ + return data + # TODO: Check if this is actually reachable. + return None # type: ignore[unreachable] + + +def pytest_report_from_serializable( + data: Dict[str, Any], +) -> Optional[Union[CollectReport, TestReport]]: + if "$report_type" in data: + if data["$report_type"] == "TestReport": + return TestReport._from_json(data) + elif data["$report_type"] == "CollectReport": + return CollectReport._from_json(data) + assert False, "Unknown report_type unserialize data: {}".format( + data["$report_type"] + ) + return None + + +def _report_to_json(report: BaseReport) -> Dict[str, Any]: + """Return the contents of this report as a dict of builtin entries, + suitable for serialization. + + This was originally the serialize_report() function from xdist (ca03269). + """ + + def serialize_repr_entry( + entry: Union[ReprEntry, ReprEntryNative] + ) -> Dict[str, Any]: + data = dataclasses.asdict(entry) + for key, value in data.items(): + if hasattr(value, "__dict__"): + data[key] = dataclasses.asdict(value) + entry_data = {"type": type(entry).__name__, "data": data} + return entry_data + + def serialize_repr_traceback(reprtraceback: ReprTraceback) -> Dict[str, Any]: + result = dataclasses.asdict(reprtraceback) + result["reprentries"] = [ + serialize_repr_entry(x) for x in reprtraceback.reprentries + ] + return result + + def serialize_repr_crash( + reprcrash: Optional[ReprFileLocation], + ) -> Optional[Dict[str, Any]]: + if reprcrash is not None: + return dataclasses.asdict(reprcrash) + else: + return None + + def serialize_exception_longrepr(rep: BaseReport) -> Dict[str, Any]: + assert rep.longrepr is not None + # TODO: Investigate whether the duck typing is really necessary here. + longrepr = cast(ExceptionRepr, rep.longrepr) + result: Dict[str, Any] = { + "reprcrash": serialize_repr_crash(longrepr.reprcrash), + "reprtraceback": serialize_repr_traceback(longrepr.reprtraceback), + "sections": longrepr.sections, + } + if isinstance(longrepr, ExceptionChainRepr): + result["chain"] = [] + for repr_traceback, repr_crash, description in longrepr.chain: + result["chain"].append( + ( + serialize_repr_traceback(repr_traceback), + serialize_repr_crash(repr_crash), + description, + ) + ) + else: + result["chain"] = None + return result + + d = report.__dict__.copy() + if hasattr(report.longrepr, "toterminal"): + if hasattr(report.longrepr, "reprtraceback") and hasattr( + report.longrepr, "reprcrash" + ): + d["longrepr"] = serialize_exception_longrepr(report) + else: + d["longrepr"] = str(report.longrepr) + else: + d["longrepr"] = report.longrepr + for name in d: + if isinstance(d[name], os.PathLike): + d[name] = os.fspath(d[name]) + elif name == "result": + d[name] = None # for now + return d + + +def _report_kwargs_from_json(reportdict: Dict[str, Any]) -> Dict[str, Any]: + """Return **kwargs that can be used to construct a TestReport or + CollectReport instance. + + This was originally the serialize_report() function from xdist (ca03269). + """ + + def deserialize_repr_entry(entry_data): + data = entry_data["data"] + entry_type = entry_data["type"] + if entry_type == "ReprEntry": + reprfuncargs = None + reprfileloc = None + reprlocals = None + if data["reprfuncargs"]: + reprfuncargs = ReprFuncArgs(**data["reprfuncargs"]) + if data["reprfileloc"]: + reprfileloc = ReprFileLocation(**data["reprfileloc"]) + if data["reprlocals"]: + reprlocals = ReprLocals(data["reprlocals"]["lines"]) + + reprentry: Union[ReprEntry, ReprEntryNative] = ReprEntry( + lines=data["lines"], + reprfuncargs=reprfuncargs, + reprlocals=reprlocals, + reprfileloc=reprfileloc, + style=data["style"], + ) + elif entry_type == "ReprEntryNative": + reprentry = ReprEntryNative(data["lines"]) + else: + _report_unserialization_failure(entry_type, TestReport, reportdict) + return reprentry + + def deserialize_repr_traceback(repr_traceback_dict): + repr_traceback_dict["reprentries"] = [ + deserialize_repr_entry(x) for x in repr_traceback_dict["reprentries"] + ] + return ReprTraceback(**repr_traceback_dict) + + def deserialize_repr_crash(repr_crash_dict: Optional[Dict[str, Any]]): + if repr_crash_dict is not None: + return ReprFileLocation(**repr_crash_dict) + else: + return None + + if ( + reportdict["longrepr"] + and "reprcrash" in reportdict["longrepr"] + and "reprtraceback" in reportdict["longrepr"] + ): + reprtraceback = deserialize_repr_traceback( + reportdict["longrepr"]["reprtraceback"] + ) + reprcrash = deserialize_repr_crash(reportdict["longrepr"]["reprcrash"]) + if reportdict["longrepr"]["chain"]: + chain = [] + for repr_traceback_data, repr_crash_data, description in reportdict[ + "longrepr" + ]["chain"]: + chain.append( + ( + deserialize_repr_traceback(repr_traceback_data), + deserialize_repr_crash(repr_crash_data), + description, + ) + ) + exception_info: Union[ + ExceptionChainRepr, ReprExceptionInfo + ] = ExceptionChainRepr(chain) + else: + exception_info = ReprExceptionInfo( + reprtraceback=reprtraceback, + reprcrash=reprcrash, + ) + + for section in reportdict["longrepr"]["sections"]: + exception_info.addsection(*section) + reportdict["longrepr"] = exception_info + + return reportdict diff --git a/venv/lib/python3.11/site-packages/_pytest/runner.py b/venv/lib/python3.11/site-packages/_pytest/runner.py new file mode 100644 index 0000000..f861c05 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/runner.py @@ -0,0 +1,551 @@ +"""Basic collect and runtest protocol implementations.""" +import bdb +import dataclasses +import os +import sys +from typing import Callable +from typing import cast +from typing import Dict +from typing import Generic +from typing import List +from typing import Optional +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from .reports import BaseReport +from .reports import CollectErrorRepr +from .reports import CollectReport +from .reports import TestReport +from _pytest import timing +from _pytest._code.code import ExceptionChainRepr +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import TerminalRepr +from _pytest.compat import final +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.nodes import Node +from _pytest.outcomes import Exit +from _pytest.outcomes import OutcomeException +from _pytest.outcomes import Skipped +from _pytest.outcomes import TEST_OUTCOME + +if sys.version_info[:2] < (3, 11): + from exceptiongroup import BaseExceptionGroup + +if TYPE_CHECKING: + from typing_extensions import Literal + + from _pytest.main import Session + from _pytest.terminal import TerminalReporter + +# +# pytest plugin hooks. + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("terminal reporting", "Reporting", after="general") + group.addoption( + "--durations", + action="store", + type=int, + default=None, + metavar="N", + help="Show N slowest setup/test durations (N=0 for all)", + ) + group.addoption( + "--durations-min", + action="store", + type=float, + default=0.005, + metavar="N", + help="Minimal duration in seconds for inclusion in slowest list. " + "Default: 0.005.", + ) + + +def pytest_terminal_summary(terminalreporter: "TerminalReporter") -> None: + durations = terminalreporter.config.option.durations + durations_min = terminalreporter.config.option.durations_min + verbose = terminalreporter.config.getvalue("verbose") + if durations is None: + return + tr = terminalreporter + dlist = [] + for replist in tr.stats.values(): + for rep in replist: + if hasattr(rep, "duration"): + dlist.append(rep) + if not dlist: + return + dlist.sort(key=lambda x: x.duration, reverse=True) # type: ignore[no-any-return] + if not durations: + tr.write_sep("=", "slowest durations") + else: + tr.write_sep("=", "slowest %s durations" % durations) + dlist = dlist[:durations] + + for i, rep in enumerate(dlist): + if verbose < 2 and rep.duration < durations_min: + tr.write_line("") + tr.write_line( + "(%s durations < %gs hidden. Use -vv to show these durations.)" + % (len(dlist) - i, durations_min) + ) + break + tr.write_line(f"{rep.duration:02.2f}s {rep.when:<8} {rep.nodeid}") + + +def pytest_sessionstart(session: "Session") -> None: + session._setupstate = SetupState() + + +def pytest_sessionfinish(session: "Session") -> None: + session._setupstate.teardown_exact(None) + + +def pytest_runtest_protocol(item: Item, nextitem: Optional[Item]) -> bool: + ihook = item.ihook + ihook.pytest_runtest_logstart(nodeid=item.nodeid, location=item.location) + runtestprotocol(item, nextitem=nextitem) + ihook.pytest_runtest_logfinish(nodeid=item.nodeid, location=item.location) + return True + + +def runtestprotocol( + item: Item, log: bool = True, nextitem: Optional[Item] = None +) -> List[TestReport]: + hasrequest = hasattr(item, "_request") + if hasrequest and not item._request: # type: ignore[attr-defined] + # This only happens if the item is re-run, as is done by + # pytest-rerunfailures. + item._initrequest() # type: ignore[attr-defined] + rep = call_and_report(item, "setup", log) + reports = [rep] + if rep.passed: + if item.config.getoption("setupshow", False): + show_test_item(item) + if not item.config.getoption("setuponly", False): + reports.append(call_and_report(item, "call", log)) + reports.append(call_and_report(item, "teardown", log, nextitem=nextitem)) + # After all teardown hooks have been called + # want funcargs and request info to go away. + if hasrequest: + item._request = False # type: ignore[attr-defined] + item.funcargs = None # type: ignore[attr-defined] + return reports + + +def show_test_item(item: Item) -> None: + """Show test function, parameters and the fixtures of the test item.""" + tw = item.config.get_terminal_writer() + tw.line() + tw.write(" " * 8) + tw.write(item.nodeid) + used_fixtures = sorted(getattr(item, "fixturenames", [])) + if used_fixtures: + tw.write(" (fixtures used: {})".format(", ".join(used_fixtures))) + tw.flush() + + +def pytest_runtest_setup(item: Item) -> None: + _update_current_test_var(item, "setup") + item.session._setupstate.setup(item) + + +def pytest_runtest_call(item: Item) -> None: + _update_current_test_var(item, "call") + try: + del sys.last_type + del sys.last_value + del sys.last_traceback + except AttributeError: + pass + try: + item.runtest() + except Exception as e: + # Store trace info to allow postmortem debugging + sys.last_type = type(e) + sys.last_value = e + assert e.__traceback__ is not None + # Skip *this* frame + sys.last_traceback = e.__traceback__.tb_next + raise e + + +def pytest_runtest_teardown(item: Item, nextitem: Optional[Item]) -> None: + _update_current_test_var(item, "teardown") + item.session._setupstate.teardown_exact(nextitem) + _update_current_test_var(item, None) + + +def _update_current_test_var( + item: Item, when: Optional["Literal['setup', 'call', 'teardown']"] +) -> None: + """Update :envvar:`PYTEST_CURRENT_TEST` to reflect the current item and stage. + + If ``when`` is None, delete ``PYTEST_CURRENT_TEST`` from the environment. + """ + var_name = "PYTEST_CURRENT_TEST" + if when: + value = f"{item.nodeid} ({when})" + # don't allow null bytes on environment variables (see #2644, #2957) + value = value.replace("\x00", "(null)") + os.environ[var_name] = value + else: + os.environ.pop(var_name) + + +def pytest_report_teststatus(report: BaseReport) -> Optional[Tuple[str, str, str]]: + if report.when in ("setup", "teardown"): + if report.failed: + # category, shortletter, verbose-word + return "error", "E", "ERROR" + elif report.skipped: + return "skipped", "s", "SKIPPED" + else: + return "", "", "" + return None + + +# +# Implementation + + +def call_and_report( + item: Item, when: "Literal['setup', 'call', 'teardown']", log: bool = True, **kwds +) -> TestReport: + call = call_runtest_hook(item, when, **kwds) + hook = item.ihook + report: TestReport = hook.pytest_runtest_makereport(item=item, call=call) + if log: + hook.pytest_runtest_logreport(report=report) + if check_interactive_exception(call, report): + hook.pytest_exception_interact(node=item, call=call, report=report) + return report + + +def check_interactive_exception(call: "CallInfo[object]", report: BaseReport) -> bool: + """Check whether the call raised an exception that should be reported as + interactive.""" + if call.excinfo is None: + # Didn't raise. + return False + if hasattr(report, "wasxfail"): + # Exception was expected. + return False + if isinstance(call.excinfo.value, (Skipped, bdb.BdbQuit)): + # Special control flow exception. + return False + return True + + +def call_runtest_hook( + item: Item, when: "Literal['setup', 'call', 'teardown']", **kwds +) -> "CallInfo[None]": + if when == "setup": + ihook: Callable[..., None] = item.ihook.pytest_runtest_setup + elif when == "call": + ihook = item.ihook.pytest_runtest_call + elif when == "teardown": + ihook = item.ihook.pytest_runtest_teardown + else: + assert False, f"Unhandled runtest hook case: {when}" + reraise: Tuple[Type[BaseException], ...] = (Exit,) + if not item.config.getoption("usepdb", False): + reraise += (KeyboardInterrupt,) + return CallInfo.from_call( + lambda: ihook(item=item, **kwds), when=when, reraise=reraise + ) + + +TResult = TypeVar("TResult", covariant=True) + + +@final +@dataclasses.dataclass +class CallInfo(Generic[TResult]): + """Result/Exception info of a function invocation.""" + + _result: Optional[TResult] + #: The captured exception of the call, if it raised. + excinfo: Optional[ExceptionInfo[BaseException]] + #: The system time when the call started, in seconds since the epoch. + start: float + #: The system time when the call ended, in seconds since the epoch. + stop: float + #: The call duration, in seconds. + duration: float + #: The context of invocation: "collect", "setup", "call" or "teardown". + when: "Literal['collect', 'setup', 'call', 'teardown']" + + def __init__( + self, + result: Optional[TResult], + excinfo: Optional[ExceptionInfo[BaseException]], + start: float, + stop: float, + duration: float, + when: "Literal['collect', 'setup', 'call', 'teardown']", + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self._result = result + self.excinfo = excinfo + self.start = start + self.stop = stop + self.duration = duration + self.when = when + + @property + def result(self) -> TResult: + """The return value of the call, if it didn't raise. + + Can only be accessed if excinfo is None. + """ + if self.excinfo is not None: + raise AttributeError(f"{self!r} has no valid result") + # The cast is safe because an exception wasn't raised, hence + # _result has the expected function return type (which may be + # None, that's why a cast and not an assert). + return cast(TResult, self._result) + + @classmethod + def from_call( + cls, + func: "Callable[[], TResult]", + when: "Literal['collect', 'setup', 'call', 'teardown']", + reraise: Optional[ + Union[Type[BaseException], Tuple[Type[BaseException], ...]] + ] = None, + ) -> "CallInfo[TResult]": + """Call func, wrapping the result in a CallInfo. + + :param func: + The function to call. Called without arguments. + :param when: + The phase in which the function is called. + :param reraise: + Exception or exceptions that shall propagate if raised by the + function, instead of being wrapped in the CallInfo. + """ + excinfo = None + start = timing.time() + precise_start = timing.perf_counter() + try: + result: Optional[TResult] = func() + except BaseException: + excinfo = ExceptionInfo.from_current() + if reraise is not None and isinstance(excinfo.value, reraise): + raise + result = None + # use the perf counter + precise_stop = timing.perf_counter() + duration = precise_stop - precise_start + stop = timing.time() + return cls( + start=start, + stop=stop, + duration=duration, + when=when, + result=result, + excinfo=excinfo, + _ispytest=True, + ) + + def __repr__(self) -> str: + if self.excinfo is None: + return f"" + return f"" + + +def pytest_runtest_makereport(item: Item, call: CallInfo[None]) -> TestReport: + return TestReport.from_item_and_call(item, call) + + +def pytest_make_collect_report(collector: Collector) -> CollectReport: + call = CallInfo.from_call(lambda: list(collector.collect()), "collect") + longrepr: Union[None, Tuple[str, int, str], str, TerminalRepr] = None + if not call.excinfo: + outcome: Literal["passed", "skipped", "failed"] = "passed" + else: + skip_exceptions = [Skipped] + unittest = sys.modules.get("unittest") + if unittest is not None: + # Type ignored because unittest is loaded dynamically. + skip_exceptions.append(unittest.SkipTest) # type: ignore + if isinstance(call.excinfo.value, tuple(skip_exceptions)): + outcome = "skipped" + r_ = collector._repr_failure_py(call.excinfo, "line") + assert isinstance(r_, ExceptionChainRepr), repr(r_) + r = r_.reprcrash + assert r + longrepr = (str(r.path), r.lineno, r.message) + else: + outcome = "failed" + errorinfo = collector.repr_failure(call.excinfo) + if not hasattr(errorinfo, "toterminal"): + assert isinstance(errorinfo, str) + errorinfo = CollectErrorRepr(errorinfo) + longrepr = errorinfo + result = call.result if not call.excinfo else None + rep = CollectReport(collector.nodeid, outcome, longrepr, result) + rep.call = call # type: ignore # see collect_one_node + return rep + + +class SetupState: + """Shared state for setting up/tearing down test items or collectors + in a session. + + Suppose we have a collection tree as follows: + + + + + + + + The SetupState maintains a stack. The stack starts out empty: + + [] + + During the setup phase of item1, setup(item1) is called. What it does + is: + + push session to stack, run session.setup() + push mod1 to stack, run mod1.setup() + push item1 to stack, run item1.setup() + + The stack is: + + [session, mod1, item1] + + While the stack is in this shape, it is allowed to add finalizers to + each of session, mod1, item1 using addfinalizer(). + + During the teardown phase of item1, teardown_exact(item2) is called, + where item2 is the next item to item1. What it does is: + + pop item1 from stack, run its teardowns + pop mod1 from stack, run its teardowns + + mod1 was popped because it ended its purpose with item1. The stack is: + + [session] + + During the setup phase of item2, setup(item2) is called. What it does + is: + + push mod2 to stack, run mod2.setup() + push item2 to stack, run item2.setup() + + Stack: + + [session, mod2, item2] + + During the teardown phase of item2, teardown_exact(None) is called, + because item2 is the last item. What it does is: + + pop item2 from stack, run its teardowns + pop mod2 from stack, run its teardowns + pop session from stack, run its teardowns + + Stack: + + [] + + The end! + """ + + def __init__(self) -> None: + # The stack is in the dict insertion order. + self.stack: Dict[ + Node, + Tuple[ + # Node's finalizers. + List[Callable[[], object]], + # Node's exception, if its setup raised. + Optional[Union[OutcomeException, Exception]], + ], + ] = {} + + def setup(self, item: Item) -> None: + """Setup objects along the collector chain to the item.""" + needed_collectors = item.listchain() + + # If a collector fails its setup, fail its entire subtree of items. + # The setup is not retried for each item - the same exception is used. + for col, (finalizers, exc) in self.stack.items(): + assert col in needed_collectors, "previous item was not torn down properly" + if exc: + raise exc + + for col in needed_collectors[len(self.stack) :]: + assert col not in self.stack + # Push onto the stack. + self.stack[col] = ([col.teardown], None) + try: + col.setup() + except TEST_OUTCOME as exc: + self.stack[col] = (self.stack[col][0], exc) + raise exc + + def addfinalizer(self, finalizer: Callable[[], object], node: Node) -> None: + """Attach a finalizer to the given node. + + The node must be currently active in the stack. + """ + assert node and not isinstance(node, tuple) + assert callable(finalizer) + assert node in self.stack, (node, self.stack) + self.stack[node][0].append(finalizer) + + def teardown_exact(self, nextitem: Optional[Item]) -> None: + """Teardown the current stack up until reaching nodes that nextitem + also descends from. + + When nextitem is None (meaning we're at the last item), the entire + stack is torn down. + """ + needed_collectors = nextitem and nextitem.listchain() or [] + exceptions: List[BaseException] = [] + while self.stack: + if list(self.stack.keys()) == needed_collectors[: len(self.stack)]: + break + node, (finalizers, _) = self.stack.popitem() + these_exceptions = [] + while finalizers: + fin = finalizers.pop() + try: + fin() + except TEST_OUTCOME as e: + these_exceptions.append(e) + + if len(these_exceptions) == 1: + exceptions.extend(these_exceptions) + elif these_exceptions: + msg = f"errors while tearing down {node!r}" + exceptions.append(BaseExceptionGroup(msg, these_exceptions[::-1])) + + if len(exceptions) == 1: + raise exceptions[0] + elif exceptions: + raise BaseExceptionGroup("errors during test teardown", exceptions[::-1]) + if nextitem is None: + assert not self.stack + + +def collect_one_node(collector: Collector) -> CollectReport: + ihook = collector.ihook + ihook.pytest_collectstart(collector=collector) + rep: CollectReport = ihook.pytest_make_collect_report(collector=collector) + call = rep.__dict__.pop("call", None) + if call and check_interactive_exception(call, rep): + ihook.pytest_exception_interact(node=collector, call=call, report=rep) + return rep diff --git a/venv/lib/python3.11/site-packages/_pytest/scope.py b/venv/lib/python3.11/site-packages/_pytest/scope.py new file mode 100644 index 0000000..7a746fb --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/scope.py @@ -0,0 +1,91 @@ +""" +Scope definition and related utilities. + +Those are defined here, instead of in the 'fixtures' module because +their use is spread across many other pytest modules, and centralizing it in 'fixtures' +would cause circular references. + +Also this makes the module light to import, as it should. +""" +from enum import Enum +from functools import total_ordering +from typing import Optional +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing_extensions import Literal + + _ScopeName = Literal["session", "package", "module", "class", "function"] + + +@total_ordering +class Scope(Enum): + """ + Represents one of the possible fixture scopes in pytest. + + Scopes are ordered from lower to higher, that is: + + ->>> higher ->>> + + Function < Class < Module < Package < Session + + <<<- lower <<<- + """ + + # Scopes need to be listed from lower to higher. + Function: "_ScopeName" = "function" + Class: "_ScopeName" = "class" + Module: "_ScopeName" = "module" + Package: "_ScopeName" = "package" + Session: "_ScopeName" = "session" + + def next_lower(self) -> "Scope": + """Return the next lower scope.""" + index = _SCOPE_INDICES[self] + if index == 0: + raise ValueError(f"{self} is the lower-most scope") + return _ALL_SCOPES[index - 1] + + def next_higher(self) -> "Scope": + """Return the next higher scope.""" + index = _SCOPE_INDICES[self] + if index == len(_SCOPE_INDICES) - 1: + raise ValueError(f"{self} is the upper-most scope") + return _ALL_SCOPES[index + 1] + + def __lt__(self, other: "Scope") -> bool: + self_index = _SCOPE_INDICES[self] + other_index = _SCOPE_INDICES[other] + return self_index < other_index + + @classmethod + def from_user( + cls, scope_name: "_ScopeName", descr: str, where: Optional[str] = None + ) -> "Scope": + """ + Given a scope name from the user, return the equivalent Scope enum. Should be used + whenever we want to convert a user provided scope name to its enum object. + + If the scope name is invalid, construct a user friendly message and call pytest.fail. + """ + from _pytest.outcomes import fail + + try: + # Holding this reference is necessary for mypy at the moment. + scope = Scope(scope_name) + except ValueError: + fail( + "{} {}got an unexpected scope value '{}'".format( + descr, f"from {where} " if where else "", scope_name + ), + pytrace=False, + ) + return scope + + +_ALL_SCOPES = list(Scope) +_SCOPE_INDICES = {scope: index for index, scope in enumerate(_ALL_SCOPES)} + + +# Ordered list of scopes which can contain many tests (in practice all except Function). +HIGH_SCOPES = [x for x in Scope if x is not Scope.Function] diff --git a/venv/lib/python3.11/site-packages/_pytest/setuponly.py b/venv/lib/python3.11/site-packages/_pytest/setuponly.py new file mode 100644 index 0000000..583590d --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/setuponly.py @@ -0,0 +1,97 @@ +from typing import Generator +from typing import Optional +from typing import Union + +import pytest +from _pytest._io.saferepr import saferepr +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config.argparsing import Parser +from _pytest.fixtures import FixtureDef +from _pytest.fixtures import SubRequest +from _pytest.scope import Scope + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("debugconfig") + group.addoption( + "--setuponly", + "--setup-only", + action="store_true", + help="Only setup fixtures, do not execute tests", + ) + group.addoption( + "--setupshow", + "--setup-show", + action="store_true", + help="Show setup of fixtures while executing tests", + ) + + +@pytest.hookimpl(hookwrapper=True) +def pytest_fixture_setup( + fixturedef: FixtureDef[object], request: SubRequest +) -> Generator[None, None, None]: + yield + if request.config.option.setupshow: + if hasattr(request, "param"): + # Save the fixture parameter so ._show_fixture_action() can + # display it now and during the teardown (in .finish()). + if fixturedef.ids: + if callable(fixturedef.ids): + param = fixturedef.ids(request.param) + else: + param = fixturedef.ids[request.param_index] + else: + param = request.param + fixturedef.cached_param = param # type: ignore[attr-defined] + _show_fixture_action(fixturedef, "SETUP") + + +def pytest_fixture_post_finalizer(fixturedef: FixtureDef[object]) -> None: + if fixturedef.cached_result is not None: + config = fixturedef._fixturemanager.config + if config.option.setupshow: + _show_fixture_action(fixturedef, "TEARDOWN") + if hasattr(fixturedef, "cached_param"): + del fixturedef.cached_param # type: ignore[attr-defined] + + +def _show_fixture_action(fixturedef: FixtureDef[object], msg: str) -> None: + config = fixturedef._fixturemanager.config + capman = config.pluginmanager.getplugin("capturemanager") + if capman: + capman.suspend_global_capture() + + tw = config.get_terminal_writer() + tw.line() + # Use smaller indentation the higher the scope: Session = 0, Package = 1, etc. + scope_indent = list(reversed(Scope)).index(fixturedef._scope) + tw.write(" " * 2 * scope_indent) + tw.write( + "{step} {scope} {fixture}".format( + step=msg.ljust(8), # align the output to TEARDOWN + scope=fixturedef.scope[0].upper(), + fixture=fixturedef.argname, + ) + ) + + if msg == "SETUP": + deps = sorted(arg for arg in fixturedef.argnames if arg != "request") + if deps: + tw.write(" (fixtures used: {})".format(", ".join(deps))) + + if hasattr(fixturedef, "cached_param"): + tw.write(f"[{saferepr(fixturedef.cached_param, maxsize=42)}]") # type: ignore[attr-defined] + + tw.flush() + + if capman: + capman.resume_global_capture() + + +@pytest.hookimpl(tryfirst=True) +def pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]: + if config.option.setuponly: + config.option.setupshow = True + return None diff --git a/venv/lib/python3.11/site-packages/_pytest/setupplan.py b/venv/lib/python3.11/site-packages/_pytest/setupplan.py new file mode 100644 index 0000000..1a4ebdd --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/setupplan.py @@ -0,0 +1,40 @@ +from typing import Optional +from typing import Union + +import pytest +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config.argparsing import Parser +from _pytest.fixtures import FixtureDef +from _pytest.fixtures import SubRequest + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("debugconfig") + group.addoption( + "--setupplan", + "--setup-plan", + action="store_true", + help="Show what fixtures and tests would be executed but " + "don't execute anything", + ) + + +@pytest.hookimpl(tryfirst=True) +def pytest_fixture_setup( + fixturedef: FixtureDef[object], request: SubRequest +) -> Optional[object]: + # Will return a dummy fixture if the setuponly option is provided. + if request.config.option.setupplan: + my_cache_key = fixturedef.cache_key(request) + fixturedef.cached_result = (None, my_cache_key, None) + return fixturedef.cached_result + return None + + +@pytest.hookimpl(tryfirst=True) +def pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]: + if config.option.setupplan: + config.option.setuponly = True + config.option.setupshow = True + return None diff --git a/venv/lib/python3.11/site-packages/_pytest/skipping.py b/venv/lib/python3.11/site-packages/_pytest/skipping.py new file mode 100644 index 0000000..26ce737 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/skipping.py @@ -0,0 +1,297 @@ +"""Support for skip/xfail functions and markers.""" +import dataclasses +import os +import platform +import sys +import traceback +from collections.abc import Mapping +from typing import Generator +from typing import Optional +from typing import Tuple +from typing import Type + +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.mark.structures import Mark +from _pytest.nodes import Item +from _pytest.outcomes import fail +from _pytest.outcomes import skip +from _pytest.outcomes import xfail +from _pytest.reports import BaseReport +from _pytest.runner import CallInfo +from _pytest.stash import StashKey + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group.addoption( + "--runxfail", + action="store_true", + dest="runxfail", + default=False, + help="Report the results of xfail tests as if they were not marked", + ) + + parser.addini( + "xfail_strict", + "Default for the strict parameter of xfail " + "markers when not given explicitly (default: False)", + default=False, + type="bool", + ) + + +def pytest_configure(config: Config) -> None: + if config.option.runxfail: + # yay a hack + import pytest + + old = pytest.xfail + config.add_cleanup(lambda: setattr(pytest, "xfail", old)) + + def nop(*args, **kwargs): + pass + + nop.Exception = xfail.Exception # type: ignore[attr-defined] + setattr(pytest, "xfail", nop) + + config.addinivalue_line( + "markers", + "skip(reason=None): skip the given test function with an optional reason. " + 'Example: skip(reason="no way of currently testing this") skips the ' + "test.", + ) + config.addinivalue_line( + "markers", + "skipif(condition, ..., *, reason=...): " + "skip the given test function if any of the conditions evaluate to True. " + "Example: skipif(sys.platform == 'win32') skips the test if we are on the win32 platform. " + "See https://docs.pytest.org/en/stable/reference/reference.html#pytest-mark-skipif", + ) + config.addinivalue_line( + "markers", + "xfail(condition, ..., *, reason=..., run=True, raises=None, strict=xfail_strict): " + "mark the test function as an expected failure if any of the conditions " + "evaluate to True. Optionally specify a reason for better reporting " + "and run=False if you don't even want to execute the test function. " + "If only specific exception(s) are expected, you can list them in " + "raises, and if the test fails in other ways, it will be reported as " + "a true failure. See https://docs.pytest.org/en/stable/reference/reference.html#pytest-mark-xfail", + ) + + +def evaluate_condition(item: Item, mark: Mark, condition: object) -> Tuple[bool, str]: + """Evaluate a single skipif/xfail condition. + + If an old-style string condition is given, it is eval()'d, otherwise the + condition is bool()'d. If this fails, an appropriately formatted pytest.fail + is raised. + + Returns (result, reason). The reason is only relevant if the result is True. + """ + # String condition. + if isinstance(condition, str): + globals_ = { + "os": os, + "sys": sys, + "platform": platform, + "config": item.config, + } + for dictionary in reversed( + item.ihook.pytest_markeval_namespace(config=item.config) + ): + if not isinstance(dictionary, Mapping): + raise ValueError( + "pytest_markeval_namespace() needs to return a dict, got {!r}".format( + dictionary + ) + ) + globals_.update(dictionary) + if hasattr(item, "obj"): + globals_.update(item.obj.__globals__) # type: ignore[attr-defined] + try: + filename = f"<{mark.name} condition>" + condition_code = compile(condition, filename, "eval") + result = eval(condition_code, globals_) + except SyntaxError as exc: + msglines = [ + "Error evaluating %r condition" % mark.name, + " " + condition, + " " + " " * (exc.offset or 0) + "^", + "SyntaxError: invalid syntax", + ] + fail("\n".join(msglines), pytrace=False) + except Exception as exc: + msglines = [ + "Error evaluating %r condition" % mark.name, + " " + condition, + *traceback.format_exception_only(type(exc), exc), + ] + fail("\n".join(msglines), pytrace=False) + + # Boolean condition. + else: + try: + result = bool(condition) + except Exception as exc: + msglines = [ + "Error evaluating %r condition as a boolean" % mark.name, + *traceback.format_exception_only(type(exc), exc), + ] + fail("\n".join(msglines), pytrace=False) + + reason = mark.kwargs.get("reason", None) + if reason is None: + if isinstance(condition, str): + reason = "condition: " + condition + else: + # XXX better be checked at collection time + msg = ( + "Error evaluating %r: " % mark.name + + "you need to specify reason=STRING when using booleans as conditions." + ) + fail(msg, pytrace=False) + + return result, reason + + +@dataclasses.dataclass(frozen=True) +class Skip: + """The result of evaluate_skip_marks().""" + + reason: str = "unconditional skip" + + +def evaluate_skip_marks(item: Item) -> Optional[Skip]: + """Evaluate skip and skipif marks on item, returning Skip if triggered.""" + for mark in item.iter_markers(name="skipif"): + if "condition" not in mark.kwargs: + conditions = mark.args + else: + conditions = (mark.kwargs["condition"],) + + # Unconditional. + if not conditions: + reason = mark.kwargs.get("reason", "") + return Skip(reason) + + # If any of the conditions are true. + for condition in conditions: + result, reason = evaluate_condition(item, mark, condition) + if result: + return Skip(reason) + + for mark in item.iter_markers(name="skip"): + try: + return Skip(*mark.args, **mark.kwargs) + except TypeError as e: + raise TypeError(str(e) + " - maybe you meant pytest.mark.skipif?") from None + + return None + + +@dataclasses.dataclass(frozen=True) +class Xfail: + """The result of evaluate_xfail_marks().""" + + __slots__ = ("reason", "run", "strict", "raises") + + reason: str + run: bool + strict: bool + raises: Optional[Tuple[Type[BaseException], ...]] + + +def evaluate_xfail_marks(item: Item) -> Optional[Xfail]: + """Evaluate xfail marks on item, returning Xfail if triggered.""" + for mark in item.iter_markers(name="xfail"): + run = mark.kwargs.get("run", True) + strict = mark.kwargs.get("strict", item.config.getini("xfail_strict")) + raises = mark.kwargs.get("raises", None) + if "condition" not in mark.kwargs: + conditions = mark.args + else: + conditions = (mark.kwargs["condition"],) + + # Unconditional. + if not conditions: + reason = mark.kwargs.get("reason", "") + return Xfail(reason, run, strict, raises) + + # If any of the conditions are true. + for condition in conditions: + result, reason = evaluate_condition(item, mark, condition) + if result: + return Xfail(reason, run, strict, raises) + + return None + + +# Saves the xfail mark evaluation. Can be refreshed during call if None. +xfailed_key = StashKey[Optional[Xfail]]() + + +@hookimpl(tryfirst=True) +def pytest_runtest_setup(item: Item) -> None: + skipped = evaluate_skip_marks(item) + if skipped: + raise skip.Exception(skipped.reason, _use_item_location=True) + + item.stash[xfailed_key] = xfailed = evaluate_xfail_marks(item) + if xfailed and not item.config.option.runxfail and not xfailed.run: + xfail("[NOTRUN] " + xfailed.reason) + + +@hookimpl(hookwrapper=True) +def pytest_runtest_call(item: Item) -> Generator[None, None, None]: + xfailed = item.stash.get(xfailed_key, None) + if xfailed is None: + item.stash[xfailed_key] = xfailed = evaluate_xfail_marks(item) + + if xfailed and not item.config.option.runxfail and not xfailed.run: + xfail("[NOTRUN] " + xfailed.reason) + + yield + + # The test run may have added an xfail mark dynamically. + xfailed = item.stash.get(xfailed_key, None) + if xfailed is None: + item.stash[xfailed_key] = xfailed = evaluate_xfail_marks(item) + + +@hookimpl(hookwrapper=True) +def pytest_runtest_makereport(item: Item, call: CallInfo[None]): + outcome = yield + rep = outcome.get_result() + xfailed = item.stash.get(xfailed_key, None) + if item.config.option.runxfail: + pass # don't interfere + elif call.excinfo and isinstance(call.excinfo.value, xfail.Exception): + assert call.excinfo.value.msg is not None + rep.wasxfail = "reason: " + call.excinfo.value.msg + rep.outcome = "skipped" + elif not rep.skipped and xfailed: + if call.excinfo: + raises = xfailed.raises + if raises is not None and not isinstance(call.excinfo.value, raises): + rep.outcome = "failed" + else: + rep.outcome = "skipped" + rep.wasxfail = xfailed.reason + elif call.when == "call": + if xfailed.strict: + rep.outcome = "failed" + rep.longrepr = "[XPASS(strict)] " + xfailed.reason + else: + rep.outcome = "passed" + rep.wasxfail = xfailed.reason + + +def pytest_report_teststatus(report: BaseReport) -> Optional[Tuple[str, str, str]]: + if hasattr(report, "wasxfail"): + if report.skipped: + return "xfailed", "x", "XFAIL" + elif report.passed: + return "xpassed", "X", "XPASS" + return None diff --git a/venv/lib/python3.11/site-packages/_pytest/stash.py b/venv/lib/python3.11/site-packages/_pytest/stash.py new file mode 100644 index 0000000..e61d75b --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/stash.py @@ -0,0 +1,112 @@ +from typing import Any +from typing import cast +from typing import Dict +from typing import Generic +from typing import TypeVar +from typing import Union + + +__all__ = ["Stash", "StashKey"] + + +T = TypeVar("T") +D = TypeVar("D") + + +class StashKey(Generic[T]): + """``StashKey`` is an object used as a key to a :class:`Stash`. + + A ``StashKey`` is associated with the type ``T`` of the value of the key. + + A ``StashKey`` is unique and cannot conflict with another key. + """ + + __slots__ = () + + +class Stash: + r"""``Stash`` is a type-safe heterogeneous mutable mapping that + allows keys and value types to be defined separately from + where it (the ``Stash``) is created. + + Usually you will be given an object which has a ``Stash``, for example + :class:`~pytest.Config` or a :class:`~_pytest.nodes.Node`: + + .. code-block:: python + + stash: Stash = some_object.stash + + If a module or plugin wants to store data in this ``Stash``, it creates + :class:`StashKey`\s for its keys (at the module level): + + .. code-block:: python + + # At the top-level of the module + some_str_key = StashKey[str]() + some_bool_key = StashKey[bool]() + + To store information: + + .. code-block:: python + + # Value type must match the key. + stash[some_str_key] = "value" + stash[some_bool_key] = True + + To retrieve the information: + + .. code-block:: python + + # The static type of some_str is str. + some_str = stash[some_str_key] + # The static type of some_bool is bool. + some_bool = stash[some_bool_key] + """ + + __slots__ = ("_storage",) + + def __init__(self) -> None: + self._storage: Dict[StashKey[Any], object] = {} + + def __setitem__(self, key: StashKey[T], value: T) -> None: + """Set a value for key.""" + self._storage[key] = value + + def __getitem__(self, key: StashKey[T]) -> T: + """Get the value for key. + + Raises ``KeyError`` if the key wasn't set before. + """ + return cast(T, self._storage[key]) + + def get(self, key: StashKey[T], default: D) -> Union[T, D]: + """Get the value for key, or return default if the key wasn't set + before.""" + try: + return self[key] + except KeyError: + return default + + def setdefault(self, key: StashKey[T], default: T) -> T: + """Return the value of key if already set, otherwise set the value + of key to default and return default.""" + try: + return self[key] + except KeyError: + self[key] = default + return default + + def __delitem__(self, key: StashKey[T]) -> None: + """Delete the value for key. + + Raises ``KeyError`` if the key wasn't set before. + """ + del self._storage[key] + + def __contains__(self, key: StashKey[T]) -> bool: + """Return whether key was set.""" + return key in self._storage + + def __len__(self) -> int: + """Return how many items exist in the stash.""" + return len(self._storage) diff --git a/venv/lib/python3.11/site-packages/_pytest/stepwise.py b/venv/lib/python3.11/site-packages/_pytest/stepwise.py new file mode 100644 index 0000000..74ad9db --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/stepwise.py @@ -0,0 +1,130 @@ +from typing import List +from typing import Optional +from typing import TYPE_CHECKING + +import pytest +from _pytest import nodes +from _pytest.config import Config +from _pytest.config.argparsing import Parser +from _pytest.main import Session +from _pytest.reports import TestReport + +if TYPE_CHECKING: + from _pytest.cacheprovider import Cache + +STEPWISE_CACHE_DIR = "cache/stepwise" + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group.addoption( + "--sw", + "--stepwise", + action="store_true", + default=False, + dest="stepwise", + help="Exit on test failure and continue from last failing test next time", + ) + group.addoption( + "--sw-skip", + "--stepwise-skip", + action="store_true", + default=False, + dest="stepwise_skip", + help="Ignore the first failing test but stop on the next failing test. " + "Implicitly enables --stepwise.", + ) + + +@pytest.hookimpl +def pytest_configure(config: Config) -> None: + if config.option.stepwise_skip: + # allow --stepwise-skip to work on it's own merits. + config.option.stepwise = True + if config.getoption("stepwise"): + config.pluginmanager.register(StepwisePlugin(config), "stepwiseplugin") + + +def pytest_sessionfinish(session: Session) -> None: + if not session.config.getoption("stepwise"): + assert session.config.cache is not None + if hasattr(session.config, "workerinput"): + # Do not update cache if this process is a xdist worker to prevent + # race conditions (#10641). + return + # Clear the list of failing tests if the plugin is not active. + session.config.cache.set(STEPWISE_CACHE_DIR, []) + + +class StepwisePlugin: + def __init__(self, config: Config) -> None: + self.config = config + self.session: Optional[Session] = None + self.report_status = "" + assert config.cache is not None + self.cache: Cache = config.cache + self.lastfailed: Optional[str] = self.cache.get(STEPWISE_CACHE_DIR, None) + self.skip: bool = config.getoption("stepwise_skip") + + def pytest_sessionstart(self, session: Session) -> None: + self.session = session + + def pytest_collection_modifyitems( + self, config: Config, items: List[nodes.Item] + ) -> None: + if not self.lastfailed: + self.report_status = "no previously failed tests, not skipping." + return + + # check all item nodes until we find a match on last failed + failed_index = None + for index, item in enumerate(items): + if item.nodeid == self.lastfailed: + failed_index = index + break + + # If the previously failed test was not found among the test items, + # do not skip any tests. + if failed_index is None: + self.report_status = "previously failed test not found, not skipping." + else: + self.report_status = f"skipping {failed_index} already passed items." + deselected = items[:failed_index] + del items[:failed_index] + config.hook.pytest_deselected(items=deselected) + + def pytest_runtest_logreport(self, report: TestReport) -> None: + if report.failed: + if self.skip: + # Remove test from the failed ones (if it exists) and unset the skip option + # to make sure the following tests will not be skipped. + if report.nodeid == self.lastfailed: + self.lastfailed = None + + self.skip = False + else: + # Mark test as the last failing and interrupt the test session. + self.lastfailed = report.nodeid + assert self.session is not None + self.session.shouldstop = ( + "Test failed, continuing from this test next run." + ) + + else: + # If the test was actually run and did pass. + if report.when == "call": + # Remove test from the failed ones, if exists. + if report.nodeid == self.lastfailed: + self.lastfailed = None + + def pytest_report_collectionfinish(self) -> Optional[str]: + if self.config.getoption("verbose") >= 0 and self.report_status: + return f"stepwise: {self.report_status}" + return None + + def pytest_sessionfinish(self) -> None: + if hasattr(self.config, "workerinput"): + # Do not update cache if this process is a xdist worker to prevent + # race conditions (#10641). + return + self.cache.set(STEPWISE_CACHE_DIR, self.lastfailed) diff --git a/venv/lib/python3.11/site-packages/_pytest/terminal.py b/venv/lib/python3.11/site-packages/_pytest/terminal.py new file mode 100644 index 0000000..b0cdb58 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/terminal.py @@ -0,0 +1,1481 @@ +"""Terminal reporting of the full testing process. + +This is a good source for looking at the various reporting hooks. +""" +import argparse +import dataclasses +import datetime +import inspect +import platform +import sys +import textwrap +import warnings +from collections import Counter +from functools import partial +from pathlib import Path +from typing import Any +from typing import Callable +from typing import cast +from typing import ClassVar +from typing import Dict +from typing import Generator +from typing import List +from typing import Mapping +from typing import NamedTuple +from typing import Optional +from typing import Sequence +from typing import Set +from typing import TextIO +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union + +import pluggy + +import _pytest._version +from _pytest import nodes +from _pytest import timing +from _pytest._code import ExceptionInfo +from _pytest._code.code import ExceptionRepr +from _pytest._io import TerminalWriter +from _pytest._io.wcwidth import wcswidth +from _pytest.assertion.util import running_on_ci +from _pytest.compat import final +from _pytest.config import _PluggyPlugin +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.nodes import Item +from _pytest.nodes import Node +from _pytest.pathlib import absolutepath +from _pytest.pathlib import bestrelpath +from _pytest.reports import BaseReport +from _pytest.reports import CollectReport +from _pytest.reports import TestReport + +if TYPE_CHECKING: + from typing_extensions import Literal + + from _pytest.main import Session + + +REPORT_COLLECTING_RESOLUTION = 0.5 + +KNOWN_TYPES = ( + "failed", + "passed", + "skipped", + "deselected", + "xfailed", + "xpassed", + "warnings", + "error", +) + +_REPORTCHARS_DEFAULT = "fE" + + +class MoreQuietAction(argparse.Action): + """A modified copy of the argparse count action which counts down and updates + the legacy quiet attribute at the same time. + + Used to unify verbosity handling. + """ + + def __init__( + self, + option_strings: Sequence[str], + dest: str, + default: object = None, + required: bool = False, + help: Optional[str] = None, + ) -> None: + super().__init__( + option_strings=option_strings, + dest=dest, + nargs=0, + default=default, + required=required, + help=help, + ) + + def __call__( + self, + parser: argparse.ArgumentParser, + namespace: argparse.Namespace, + values: Union[str, Sequence[object], None], + option_string: Optional[str] = None, + ) -> None: + new_count = getattr(namespace, self.dest, 0) - 1 + setattr(namespace, self.dest, new_count) + # todo Deprecate config.quiet + namespace.quiet = getattr(namespace, "quiet", 0) + 1 + + +class TestShortLogReport(NamedTuple): + """Used to store the test status result category, shortletter and verbose word. + For example ``"rerun", "R", ("RERUN", {"yellow": True})``. + + :ivar category: + The class of result, for example ``“passed”``, ``“skipped”``, ``“error”``, or the empty string. + + :ivar letter: + The short letter shown as testing progresses, for example ``"."``, ``"s"``, ``"E"``, or the empty string. + + :ivar word: + Verbose word is shown as testing progresses in verbose mode, for example ``"PASSED"``, ``"SKIPPED"``, + ``"ERROR"``, or the empty string. + """ + + category: str + letter: str + word: Union[str, Tuple[str, Mapping[str, bool]]] + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("terminal reporting", "Reporting", after="general") + group._addoption( + "-v", + "--verbose", + action="count", + default=0, + dest="verbose", + help="Increase verbosity", + ) + group._addoption( + "--no-header", + action="store_true", + default=False, + dest="no_header", + help="Disable header", + ) + group._addoption( + "--no-summary", + action="store_true", + default=False, + dest="no_summary", + help="Disable summary", + ) + group._addoption( + "-q", + "--quiet", + action=MoreQuietAction, + default=0, + dest="verbose", + help="Decrease verbosity", + ) + group._addoption( + "--verbosity", + dest="verbose", + type=int, + default=0, + help="Set verbosity. Default: 0.", + ) + group._addoption( + "-r", + action="store", + dest="reportchars", + default=_REPORTCHARS_DEFAULT, + metavar="chars", + help="Show extra test summary info as specified by chars: (f)ailed, " + "(E)rror, (s)kipped, (x)failed, (X)passed, " + "(p)assed, (P)assed with output, (a)ll except passed (p/P), or (A)ll. " + "(w)arnings are enabled by default (see --disable-warnings), " + "'N' can be used to reset the list. (default: 'fE').", + ) + group._addoption( + "--disable-warnings", + "--disable-pytest-warnings", + default=False, + dest="disable_warnings", + action="store_true", + help="Disable warnings summary", + ) + group._addoption( + "-l", + "--showlocals", + action="store_true", + dest="showlocals", + default=False, + help="Show locals in tracebacks (disabled by default)", + ) + group._addoption( + "--no-showlocals", + action="store_false", + dest="showlocals", + help="Hide locals in tracebacks (negate --showlocals passed through addopts)", + ) + group._addoption( + "--tb", + metavar="style", + action="store", + dest="tbstyle", + default="auto", + choices=["auto", "long", "short", "no", "line", "native"], + help="Traceback print mode (auto/long/short/line/native/no)", + ) + group._addoption( + "--show-capture", + action="store", + dest="showcapture", + choices=["no", "stdout", "stderr", "log", "all"], + default="all", + help="Controls how captured stdout/stderr/log is shown on failed tests. " + "Default: all.", + ) + group._addoption( + "--fulltrace", + "--full-trace", + action="store_true", + default=False, + help="Don't cut any tracebacks (default is to cut)", + ) + group._addoption( + "--color", + metavar="color", + action="store", + dest="color", + default="auto", + choices=["yes", "no", "auto"], + help="Color terminal output (yes/no/auto)", + ) + group._addoption( + "--code-highlight", + default="yes", + choices=["yes", "no"], + help="Whether code should be highlighted (only if --color is also enabled). " + "Default: yes.", + ) + + parser.addini( + "console_output_style", + help='Console output: "classic", or with additional progress information ' + '("progress" (percentage) | "count" | "progress-even-when-capture-no" (forces ' + "progress even when capture=no)", + default="progress", + ) + + +def pytest_configure(config: Config) -> None: + reporter = TerminalReporter(config, sys.stdout) + config.pluginmanager.register(reporter, "terminalreporter") + if config.option.debug or config.option.traceconfig: + + def mywriter(tags, args): + msg = " ".join(map(str, args)) + reporter.write_line("[traceconfig] " + msg) + + config.trace.root.setprocessor("pytest:config", mywriter) + + +def getreportopt(config: Config) -> str: + reportchars: str = config.option.reportchars + + old_aliases = {"F", "S"} + reportopts = "" + for char in reportchars: + if char in old_aliases: + char = char.lower() + if char == "a": + reportopts = "sxXEf" + elif char == "A": + reportopts = "PpsxXEf" + elif char == "N": + reportopts = "" + elif char not in reportopts: + reportopts += char + + if not config.option.disable_warnings and "w" not in reportopts: + reportopts = "w" + reportopts + elif config.option.disable_warnings and "w" in reportopts: + reportopts = reportopts.replace("w", "") + + return reportopts + + +@hookimpl(trylast=True) # after _pytest.runner +def pytest_report_teststatus(report: BaseReport) -> Tuple[str, str, str]: + letter = "F" + if report.passed: + letter = "." + elif report.skipped: + letter = "s" + + outcome: str = report.outcome + if report.when in ("collect", "setup", "teardown") and outcome == "failed": + outcome = "error" + letter = "E" + + return outcome, letter, outcome.upper() + + +@dataclasses.dataclass +class WarningReport: + """Simple structure to hold warnings information captured by ``pytest_warning_recorded``. + + :ivar str message: + User friendly message about the warning. + :ivar str|None nodeid: + nodeid that generated the warning (see ``get_location``). + :ivar tuple fslocation: + File system location of the source of the warning (see ``get_location``). + """ + + message: str + nodeid: Optional[str] = None + fslocation: Optional[Tuple[str, int]] = None + + count_towards_summary: ClassVar = True + + def get_location(self, config: Config) -> Optional[str]: + """Return the more user-friendly information about the location of a warning, or None.""" + if self.nodeid: + return self.nodeid + if self.fslocation: + filename, linenum = self.fslocation + relpath = bestrelpath(config.invocation_params.dir, absolutepath(filename)) + return f"{relpath}:{linenum}" + return None + + +@final +class TerminalReporter: + def __init__(self, config: Config, file: Optional[TextIO] = None) -> None: + import _pytest.config + + self.config = config + self._numcollected = 0 + self._session: Optional[Session] = None + self._showfspath: Optional[bool] = None + + self.stats: Dict[str, List[Any]] = {} + self._main_color: Optional[str] = None + self._known_types: Optional[List[str]] = None + self.startpath = config.invocation_params.dir + if file is None: + file = sys.stdout + self._tw = _pytest.config.create_terminal_writer(config, file) + self._screen_width = self._tw.fullwidth + self.currentfspath: Union[None, Path, str, int] = None + self.reportchars = getreportopt(config) + self.hasmarkup = self._tw.hasmarkup + self.isatty = file.isatty() + self._progress_nodeids_reported: Set[str] = set() + self._show_progress_info = self._determine_show_progress_info() + self._collect_report_last_write: Optional[float] = None + self._already_displayed_warnings: Optional[int] = None + self._keyboardinterrupt_memo: Optional[ExceptionRepr] = None + + def _determine_show_progress_info(self) -> "Literal['progress', 'count', False]": + """Return whether we should display progress information based on the current config.""" + # do not show progress if we are not capturing output (#3038) unless explicitly + # overridden by progress-even-when-capture-no + if ( + self.config.getoption("capture", "no") == "no" + and self.config.getini("console_output_style") + != "progress-even-when-capture-no" + ): + return False + # do not show progress if we are showing fixture setup/teardown + if self.config.getoption("setupshow", False): + return False + cfg: str = self.config.getini("console_output_style") + if cfg == "progress" or cfg == "progress-even-when-capture-no": + return "progress" + elif cfg == "count": + return "count" + else: + return False + + @property + def verbosity(self) -> int: + verbosity: int = self.config.option.verbose + return verbosity + + @property + def showheader(self) -> bool: + return self.verbosity >= 0 + + @property + def no_header(self) -> bool: + return bool(self.config.option.no_header) + + @property + def no_summary(self) -> bool: + return bool(self.config.option.no_summary) + + @property + def showfspath(self) -> bool: + if self._showfspath is None: + return self.verbosity >= 0 + return self._showfspath + + @showfspath.setter + def showfspath(self, value: Optional[bool]) -> None: + self._showfspath = value + + @property + def showlongtestinfo(self) -> bool: + return self.verbosity > 0 + + def hasopt(self, char: str) -> bool: + char = {"xfailed": "x", "skipped": "s"}.get(char, char) + return char in self.reportchars + + def write_fspath_result(self, nodeid: str, res, **markup: bool) -> None: + fspath = self.config.rootpath / nodeid.split("::")[0] + if self.currentfspath is None or fspath != self.currentfspath: + if self.currentfspath is not None and self._show_progress_info: + self._write_progress_information_filling_space() + self.currentfspath = fspath + relfspath = bestrelpath(self.startpath, fspath) + self._tw.line() + self._tw.write(relfspath + " ") + self._tw.write(res, flush=True, **markup) + + def write_ensure_prefix(self, prefix: str, extra: str = "", **kwargs) -> None: + if self.currentfspath != prefix: + self._tw.line() + self.currentfspath = prefix + self._tw.write(prefix) + if extra: + self._tw.write(extra, **kwargs) + self.currentfspath = -2 + + def ensure_newline(self) -> None: + if self.currentfspath: + self._tw.line() + self.currentfspath = None + + def wrap_write( + self, + content: str, + *, + flush: bool = False, + margin: int = 8, + line_sep: str = "\n", + **markup: bool, + ) -> None: + """Wrap message with margin for progress info.""" + width_of_current_line = self._tw.width_of_current_line + wrapped = line_sep.join( + textwrap.wrap( + " " * width_of_current_line + content, + width=self._screen_width - margin, + drop_whitespace=True, + replace_whitespace=False, + ), + ) + wrapped = wrapped[width_of_current_line:] + self._tw.write(wrapped, flush=flush, **markup) + + def write(self, content: str, *, flush: bool = False, **markup: bool) -> None: + self._tw.write(content, flush=flush, **markup) + + def flush(self) -> None: + self._tw.flush() + + def write_line(self, line: Union[str, bytes], **markup: bool) -> None: + if not isinstance(line, str): + line = str(line, errors="replace") + self.ensure_newline() + self._tw.line(line, **markup) + + def rewrite(self, line: str, **markup: bool) -> None: + """Rewinds the terminal cursor to the beginning and writes the given line. + + :param erase: + If True, will also add spaces until the full terminal width to ensure + previous lines are properly erased. + + The rest of the keyword arguments are markup instructions. + """ + erase = markup.pop("erase", False) + if erase: + fill_count = self._tw.fullwidth - len(line) - 1 + fill = " " * fill_count + else: + fill = "" + line = str(line) + self._tw.write("\r" + line + fill, **markup) + + def write_sep( + self, + sep: str, + title: Optional[str] = None, + fullwidth: Optional[int] = None, + **markup: bool, + ) -> None: + self.ensure_newline() + self._tw.sep(sep, title, fullwidth, **markup) + + def section(self, title: str, sep: str = "=", **kw: bool) -> None: + self._tw.sep(sep, title, **kw) + + def line(self, msg: str, **kw: bool) -> None: + self._tw.line(msg, **kw) + + def _add_stats(self, category: str, items: Sequence[Any]) -> None: + set_main_color = category not in self.stats + self.stats.setdefault(category, []).extend(items) + if set_main_color: + self._set_main_color() + + def pytest_internalerror(self, excrepr: ExceptionRepr) -> bool: + for line in str(excrepr).split("\n"): + self.write_line("INTERNALERROR> " + line) + return True + + def pytest_warning_recorded( + self, + warning_message: warnings.WarningMessage, + nodeid: str, + ) -> None: + from _pytest.warnings import warning_record_to_str + + fslocation = warning_message.filename, warning_message.lineno + message = warning_record_to_str(warning_message) + + warning_report = WarningReport( + fslocation=fslocation, message=message, nodeid=nodeid + ) + self._add_stats("warnings", [warning_report]) + + def pytest_plugin_registered(self, plugin: _PluggyPlugin) -> None: + if self.config.option.traceconfig: + msg = f"PLUGIN registered: {plugin}" + # XXX This event may happen during setup/teardown time + # which unfortunately captures our output here + # which garbles our output if we use self.write_line. + self.write_line(msg) + + def pytest_deselected(self, items: Sequence[Item]) -> None: + self._add_stats("deselected", items) + + def pytest_runtest_logstart( + self, nodeid: str, location: Tuple[str, Optional[int], str] + ) -> None: + # Ensure that the path is printed before the + # 1st test of a module starts running. + if self.showlongtestinfo: + line = self._locationline(nodeid, *location) + self.write_ensure_prefix(line, "") + self.flush() + elif self.showfspath: + self.write_fspath_result(nodeid, "") + self.flush() + + def pytest_runtest_logreport(self, report: TestReport) -> None: + self._tests_ran = True + rep = report + + res = TestShortLogReport( + *self.config.hook.pytest_report_teststatus(report=rep, config=self.config) + ) + category, letter, word = res.category, res.letter, res.word + if not isinstance(word, tuple): + markup = None + else: + word, markup = word + self._add_stats(category, [rep]) + if not letter and not word: + # Probably passed setup/teardown. + return + running_xdist = hasattr(rep, "node") + if markup is None: + was_xfail = hasattr(report, "wasxfail") + if rep.passed and not was_xfail: + markup = {"green": True} + elif rep.passed and was_xfail: + markup = {"yellow": True} + elif rep.failed: + markup = {"red": True} + elif rep.skipped: + markup = {"yellow": True} + else: + markup = {} + if self.verbosity <= 0: + self._tw.write(letter, **markup) + else: + self._progress_nodeids_reported.add(rep.nodeid) + line = self._locationline(rep.nodeid, *rep.location) + if not running_xdist: + self.write_ensure_prefix(line, word, **markup) + if rep.skipped or hasattr(report, "wasxfail"): + reason = _get_raw_skip_reason(rep) + if self.config.option.verbose < 2: + available_width = ( + (self._tw.fullwidth - self._tw.width_of_current_line) + - len(" [100%]") + - 1 + ) + formatted_reason = _format_trimmed( + " ({})", reason, available_width + ) + else: + formatted_reason = f" ({reason})" + + if reason and formatted_reason is not None: + self.wrap_write(formatted_reason) + if self._show_progress_info: + self._write_progress_information_filling_space() + else: + self.ensure_newline() + self._tw.write("[%s]" % rep.node.gateway.id) + if self._show_progress_info: + self._tw.write( + self._get_progress_information_message() + " ", cyan=True + ) + else: + self._tw.write(" ") + self._tw.write(word, **markup) + self._tw.write(" " + line) + self.currentfspath = -2 + self.flush() + + @property + def _is_last_item(self) -> bool: + assert self._session is not None + return len(self._progress_nodeids_reported) == self._session.testscollected + + def pytest_runtest_logfinish(self, nodeid: str) -> None: + assert self._session + if self.verbosity <= 0 and self._show_progress_info: + if self._show_progress_info == "count": + num_tests = self._session.testscollected + progress_length = len(f" [{num_tests}/{num_tests}]") + else: + progress_length = len(" [100%]") + + self._progress_nodeids_reported.add(nodeid) + + if self._is_last_item: + self._write_progress_information_filling_space() + else: + main_color, _ = self._get_main_color() + w = self._width_of_current_line + past_edge = w + progress_length + 1 >= self._screen_width + if past_edge: + msg = self._get_progress_information_message() + self._tw.write(msg + "\n", **{main_color: True}) + + def _get_progress_information_message(self) -> str: + assert self._session + collected = self._session.testscollected + if self._show_progress_info == "count": + if collected: + progress = self._progress_nodeids_reported + counter_format = f"{{:{len(str(collected))}d}}" + format_string = f" [{counter_format}/{{}}]" + return format_string.format(len(progress), collected) + return f" [ {collected} / {collected} ]" + else: + if collected: + return " [{:3d}%]".format( + len(self._progress_nodeids_reported) * 100 // collected + ) + return " [100%]" + + def _write_progress_information_filling_space(self) -> None: + color, _ = self._get_main_color() + msg = self._get_progress_information_message() + w = self._width_of_current_line + fill = self._tw.fullwidth - w - 1 + self.write(msg.rjust(fill), flush=True, **{color: True}) + + @property + def _width_of_current_line(self) -> int: + """Return the width of the current line.""" + return self._tw.width_of_current_line + + def pytest_collection(self) -> None: + if self.isatty: + if self.config.option.verbose >= 0: + self.write("collecting ... ", flush=True, bold=True) + self._collect_report_last_write = timing.time() + elif self.config.option.verbose >= 1: + self.write("collecting ... ", flush=True, bold=True) + + def pytest_collectreport(self, report: CollectReport) -> None: + if report.failed: + self._add_stats("error", [report]) + elif report.skipped: + self._add_stats("skipped", [report]) + items = [x for x in report.result if isinstance(x, Item)] + self._numcollected += len(items) + if self.isatty: + self.report_collect() + + def report_collect(self, final: bool = False) -> None: + if self.config.option.verbose < 0: + return + + if not final: + # Only write "collecting" report every 0.5s. + t = timing.time() + if ( + self._collect_report_last_write is not None + and self._collect_report_last_write > t - REPORT_COLLECTING_RESOLUTION + ): + return + self._collect_report_last_write = t + + errors = len(self.stats.get("error", [])) + skipped = len(self.stats.get("skipped", [])) + deselected = len(self.stats.get("deselected", [])) + selected = self._numcollected - deselected + line = "collected " if final else "collecting " + line += ( + str(self._numcollected) + " item" + ("" if self._numcollected == 1 else "s") + ) + if errors: + line += " / %d error%s" % (errors, "s" if errors != 1 else "") + if deselected: + line += " / %d deselected" % deselected + if skipped: + line += " / %d skipped" % skipped + if self._numcollected > selected: + line += " / %d selected" % selected + if self.isatty: + self.rewrite(line, bold=True, erase=True) + if final: + self.write("\n") + else: + self.write_line(line) + + @hookimpl(trylast=True) + def pytest_sessionstart(self, session: "Session") -> None: + self._session = session + self._sessionstarttime = timing.time() + if not self.showheader: + return + self.write_sep("=", "test session starts", bold=True) + verinfo = platform.python_version() + if not self.no_header: + msg = f"platform {sys.platform} -- Python {verinfo}" + pypy_version_info = getattr(sys, "pypy_version_info", None) + if pypy_version_info: + verinfo = ".".join(map(str, pypy_version_info[:3])) + msg += f"[pypy-{verinfo}-{pypy_version_info[3]}]" + msg += ", pytest-{}, pluggy-{}".format( + _pytest._version.version, pluggy.__version__ + ) + if ( + self.verbosity > 0 + or self.config.option.debug + or getattr(self.config.option, "pastebin", None) + ): + msg += " -- " + str(sys.executable) + self.write_line(msg) + lines = self.config.hook.pytest_report_header( + config=self.config, start_path=self.startpath + ) + self._write_report_lines_from_hooks(lines) + + def _write_report_lines_from_hooks( + self, lines: Sequence[Union[str, Sequence[str]]] + ) -> None: + for line_or_lines in reversed(lines): + if isinstance(line_or_lines, str): + self.write_line(line_or_lines) + else: + for line in line_or_lines: + self.write_line(line) + + def pytest_report_header(self, config: Config) -> List[str]: + result = [f"rootdir: {config.rootpath}"] + + if config.inipath: + result.append("configfile: " + bestrelpath(config.rootpath, config.inipath)) + + if config.args_source == Config.ArgsSource.TESTPATHS: + testpaths: List[str] = config.getini("testpaths") + result.append("testpaths: {}".format(", ".join(testpaths))) + + plugininfo = config.pluginmanager.list_plugin_distinfo() + if plugininfo: + result.append("plugins: %s" % ", ".join(_plugin_nameversions(plugininfo))) + return result + + def pytest_collection_finish(self, session: "Session") -> None: + self.report_collect(True) + + lines = self.config.hook.pytest_report_collectionfinish( + config=self.config, + start_path=self.startpath, + items=session.items, + ) + self._write_report_lines_from_hooks(lines) + + if self.config.getoption("collectonly"): + if session.items: + if self.config.option.verbose > -1: + self._tw.line("") + self._printcollecteditems(session.items) + + failed = self.stats.get("failed") + if failed: + self._tw.sep("!", "collection failures") + for rep in failed: + rep.toterminal(self._tw) + + def _printcollecteditems(self, items: Sequence[Item]) -> None: + if self.config.option.verbose < 0: + if self.config.option.verbose < -1: + counts = Counter(item.nodeid.split("::", 1)[0] for item in items) + for name, count in sorted(counts.items()): + self._tw.line("%s: %d" % (name, count)) + else: + for item in items: + self._tw.line(item.nodeid) + return + stack: List[Node] = [] + indent = "" + for item in items: + needed_collectors = item.listchain()[1:] # strip root node + while stack: + if stack == needed_collectors[: len(stack)]: + break + stack.pop() + for col in needed_collectors[len(stack) :]: + stack.append(col) + indent = (len(stack) - 1) * " " + self._tw.line(f"{indent}{col}") + if self.config.option.verbose >= 1: + obj = getattr(col, "obj", None) + doc = inspect.getdoc(obj) if obj else None + if doc: + for line in doc.splitlines(): + self._tw.line("{}{}".format(indent + " ", line)) + + @hookimpl(hookwrapper=True) + def pytest_sessionfinish( + self, session: "Session", exitstatus: Union[int, ExitCode] + ): + outcome = yield + outcome.get_result() + self._tw.line("") + summary_exit_codes = ( + ExitCode.OK, + ExitCode.TESTS_FAILED, + ExitCode.INTERRUPTED, + ExitCode.USAGE_ERROR, + ExitCode.NO_TESTS_COLLECTED, + ) + if exitstatus in summary_exit_codes and not self.no_summary: + self.config.hook.pytest_terminal_summary( + terminalreporter=self, exitstatus=exitstatus, config=self.config + ) + if session.shouldfail: + self.write_sep("!", str(session.shouldfail), red=True) + if exitstatus == ExitCode.INTERRUPTED: + self._report_keyboardinterrupt() + self._keyboardinterrupt_memo = None + elif session.shouldstop: + self.write_sep("!", str(session.shouldstop), red=True) + self.summary_stats() + + @hookimpl(hookwrapper=True) + def pytest_terminal_summary(self) -> Generator[None, None, None]: + self.summary_errors() + self.summary_failures() + self.summary_warnings() + self.summary_passes() + yield + self.short_test_summary() + # Display any extra warnings from teardown here (if any). + self.summary_warnings() + + def pytest_keyboard_interrupt(self, excinfo: ExceptionInfo[BaseException]) -> None: + self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True) + + def pytest_unconfigure(self) -> None: + if self._keyboardinterrupt_memo is not None: + self._report_keyboardinterrupt() + + def _report_keyboardinterrupt(self) -> None: + excrepr = self._keyboardinterrupt_memo + assert excrepr is not None + assert excrepr.reprcrash is not None + msg = excrepr.reprcrash.message + self.write_sep("!", msg) + if "KeyboardInterrupt" in msg: + if self.config.option.fulltrace: + excrepr.toterminal(self._tw) + else: + excrepr.reprcrash.toterminal(self._tw) + self._tw.line( + "(to show a full traceback on KeyboardInterrupt use --full-trace)", + yellow=True, + ) + + def _locationline( + self, nodeid: str, fspath: str, lineno: Optional[int], domain: str + ) -> str: + def mkrel(nodeid: str) -> str: + line = self.config.cwd_relative_nodeid(nodeid) + if domain and line.endswith(domain): + line = line[: -len(domain)] + values = domain.split("[") + values[0] = values[0].replace(".", "::") # don't replace '.' in params + line += "[".join(values) + return line + + # collect_fspath comes from testid which has a "/"-normalized path. + if fspath: + res = mkrel(nodeid) + if self.verbosity >= 2 and nodeid.split("::")[0] != fspath.replace( + "\\", nodes.SEP + ): + res += " <- " + bestrelpath(self.startpath, Path(fspath)) + else: + res = "[location]" + return res + " " + + def _getfailureheadline(self, rep): + head_line = rep.head_line + if head_line: + return head_line + return "test session" # XXX? + + def _getcrashline(self, rep): + try: + return str(rep.longrepr.reprcrash) + except AttributeError: + try: + return str(rep.longrepr)[:50] + except AttributeError: + return "" + + # + # Summaries for sessionfinish. + # + def getreports(self, name: str): + return [x for x in self.stats.get(name, ()) if not hasattr(x, "_pdbshown")] + + def summary_warnings(self) -> None: + if self.hasopt("w"): + all_warnings: Optional[List[WarningReport]] = self.stats.get("warnings") + if not all_warnings: + return + + final = self._already_displayed_warnings is not None + if final: + warning_reports = all_warnings[self._already_displayed_warnings :] + else: + warning_reports = all_warnings + self._already_displayed_warnings = len(warning_reports) + if not warning_reports: + return + + reports_grouped_by_message: Dict[str, List[WarningReport]] = {} + for wr in warning_reports: + reports_grouped_by_message.setdefault(wr.message, []).append(wr) + + def collapsed_location_report(reports: List[WarningReport]) -> str: + locations = [] + for w in reports: + location = w.get_location(self.config) + if location: + locations.append(location) + + if len(locations) < 10: + return "\n".join(map(str, locations)) + + counts_by_filename = Counter( + str(loc).split("::", 1)[0] for loc in locations + ) + return "\n".join( + "{}: {} warning{}".format(k, v, "s" if v > 1 else "") + for k, v in counts_by_filename.items() + ) + + title = "warnings summary (final)" if final else "warnings summary" + self.write_sep("=", title, yellow=True, bold=False) + for message, message_reports in reports_grouped_by_message.items(): + maybe_location = collapsed_location_report(message_reports) + if maybe_location: + self._tw.line(maybe_location) + lines = message.splitlines() + indented = "\n".join(" " + x for x in lines) + message = indented.rstrip() + else: + message = message.rstrip() + self._tw.line(message) + self._tw.line() + self._tw.line( + "-- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html" + ) + + def summary_passes(self) -> None: + if self.config.option.tbstyle != "no": + if self.hasopt("P"): + reports: List[TestReport] = self.getreports("passed") + if not reports: + return + self.write_sep("=", "PASSES") + for rep in reports: + if rep.sections: + msg = self._getfailureheadline(rep) + self.write_sep("_", msg, green=True, bold=True) + self._outrep_summary(rep) + self._handle_teardown_sections(rep.nodeid) + + def _get_teardown_reports(self, nodeid: str) -> List[TestReport]: + reports = self.getreports("") + return [ + report + for report in reports + if report.when == "teardown" and report.nodeid == nodeid + ] + + def _handle_teardown_sections(self, nodeid: str) -> None: + for report in self._get_teardown_reports(nodeid): + self.print_teardown_sections(report) + + def print_teardown_sections(self, rep: TestReport) -> None: + showcapture = self.config.option.showcapture + if showcapture == "no": + return + for secname, content in rep.sections: + if showcapture != "all" and showcapture not in secname: + continue + if "teardown" in secname: + self._tw.sep("-", secname) + if content[-1:] == "\n": + content = content[:-1] + self._tw.line(content) + + def summary_failures(self) -> None: + if self.config.option.tbstyle != "no": + reports: List[BaseReport] = self.getreports("failed") + if not reports: + return + self.write_sep("=", "FAILURES") + if self.config.option.tbstyle == "line": + for rep in reports: + line = self._getcrashline(rep) + self.write_line(line) + else: + for rep in reports: + msg = self._getfailureheadline(rep) + self.write_sep("_", msg, red=True, bold=True) + self._outrep_summary(rep) + self._handle_teardown_sections(rep.nodeid) + + def summary_errors(self) -> None: + if self.config.option.tbstyle != "no": + reports: List[BaseReport] = self.getreports("error") + if not reports: + return + self.write_sep("=", "ERRORS") + for rep in self.stats["error"]: + msg = self._getfailureheadline(rep) + if rep.when == "collect": + msg = "ERROR collecting " + msg + else: + msg = f"ERROR at {rep.when} of {msg}" + self.write_sep("_", msg, red=True, bold=True) + self._outrep_summary(rep) + + def _outrep_summary(self, rep: BaseReport) -> None: + rep.toterminal(self._tw) + showcapture = self.config.option.showcapture + if showcapture == "no": + return + for secname, content in rep.sections: + if showcapture != "all" and showcapture not in secname: + continue + self._tw.sep("-", secname) + if content[-1:] == "\n": + content = content[:-1] + self._tw.line(content) + + def summary_stats(self) -> None: + if self.verbosity < -1: + return + + session_duration = timing.time() - self._sessionstarttime + (parts, main_color) = self.build_summary_stats_line() + line_parts = [] + + display_sep = self.verbosity >= 0 + if display_sep: + fullwidth = self._tw.fullwidth + for text, markup in parts: + with_markup = self._tw.markup(text, **markup) + if display_sep: + fullwidth += len(with_markup) - len(text) + line_parts.append(with_markup) + msg = ", ".join(line_parts) + + main_markup = {main_color: True} + duration = f" in {format_session_duration(session_duration)}" + duration_with_markup = self._tw.markup(duration, **main_markup) + if display_sep: + fullwidth += len(duration_with_markup) - len(duration) + msg += duration_with_markup + + if display_sep: + markup_for_end_sep = self._tw.markup("", **main_markup) + if markup_for_end_sep.endswith("\x1b[0m"): + markup_for_end_sep = markup_for_end_sep[:-4] + fullwidth += len(markup_for_end_sep) + msg += markup_for_end_sep + + if display_sep: + self.write_sep("=", msg, fullwidth=fullwidth, **main_markup) + else: + self.write_line(msg, **main_markup) + + def short_test_summary(self) -> None: + if not self.reportchars: + return + + def show_simple(lines: List[str], *, stat: str) -> None: + failed = self.stats.get(stat, []) + if not failed: + return + config = self.config + for rep in failed: + color = _color_for_type.get(stat, _color_for_type_default) + line = _get_line_with_reprcrash_message( + config, rep, self._tw, {color: True} + ) + lines.append(line) + + def show_xfailed(lines: List[str]) -> None: + xfailed = self.stats.get("xfailed", []) + for rep in xfailed: + verbose_word = rep._get_verbose_word(self.config) + markup_word = self._tw.markup( + verbose_word, **{_color_for_type["warnings"]: True} + ) + nodeid = _get_node_id_with_markup(self._tw, self.config, rep) + line = f"{markup_word} {nodeid}" + reason = rep.wasxfail + if reason: + line += " - " + str(reason) + + lines.append(line) + + def show_xpassed(lines: List[str]) -> None: + xpassed = self.stats.get("xpassed", []) + for rep in xpassed: + verbose_word = rep._get_verbose_word(self.config) + markup_word = self._tw.markup( + verbose_word, **{_color_for_type["warnings"]: True} + ) + nodeid = _get_node_id_with_markup(self._tw, self.config, rep) + reason = rep.wasxfail + lines.append(f"{markup_word} {nodeid} {reason}") + + def show_skipped(lines: List[str]) -> None: + skipped: List[CollectReport] = self.stats.get("skipped", []) + fskips = _folded_skips(self.startpath, skipped) if skipped else [] + if not fskips: + return + verbose_word = skipped[0]._get_verbose_word(self.config) + markup_word = self._tw.markup( + verbose_word, **{_color_for_type["warnings"]: True} + ) + prefix = "Skipped: " + for num, fspath, lineno, reason in fskips: + if reason.startswith(prefix): + reason = reason[len(prefix) :] + if lineno is not None: + lines.append( + "%s [%d] %s:%d: %s" % (markup_word, num, fspath, lineno, reason) + ) + else: + lines.append("%s [%d] %s: %s" % (markup_word, num, fspath, reason)) + + REPORTCHAR_ACTIONS: Mapping[str, Callable[[List[str]], None]] = { + "x": show_xfailed, + "X": show_xpassed, + "f": partial(show_simple, stat="failed"), + "s": show_skipped, + "p": partial(show_simple, stat="passed"), + "E": partial(show_simple, stat="error"), + } + + lines: List[str] = [] + for char in self.reportchars: + action = REPORTCHAR_ACTIONS.get(char) + if action: # skipping e.g. "P" (passed with output) here. + action(lines) + + if lines: + self.write_sep("=", "short test summary info", cyan=True, bold=True) + for line in lines: + self.write_line(line) + + def _get_main_color(self) -> Tuple[str, List[str]]: + if self._main_color is None or self._known_types is None or self._is_last_item: + self._set_main_color() + assert self._main_color + assert self._known_types + return self._main_color, self._known_types + + def _determine_main_color(self, unknown_type_seen: bool) -> str: + stats = self.stats + if "failed" in stats or "error" in stats: + main_color = "red" + elif "warnings" in stats or "xpassed" in stats or unknown_type_seen: + main_color = "yellow" + elif "passed" in stats or not self._is_last_item: + main_color = "green" + else: + main_color = "yellow" + return main_color + + def _set_main_color(self) -> None: + unknown_types: List[str] = [] + for found_type in self.stats.keys(): + if found_type: # setup/teardown reports have an empty key, ignore them + if found_type not in KNOWN_TYPES and found_type not in unknown_types: + unknown_types.append(found_type) + self._known_types = list(KNOWN_TYPES) + unknown_types + self._main_color = self._determine_main_color(bool(unknown_types)) + + def build_summary_stats_line(self) -> Tuple[List[Tuple[str, Dict[str, bool]]], str]: + """ + Build the parts used in the last summary stats line. + + The summary stats line is the line shown at the end, "=== 12 passed, 2 errors in Xs===". + + This function builds a list of the "parts" that make up for the text in that line, in + the example above it would be: + + [ + ("12 passed", {"green": True}), + ("2 errors", {"red": True} + ] + + That last dict for each line is a "markup dictionary", used by TerminalWriter to + color output. + + The final color of the line is also determined by this function, and is the second + element of the returned tuple. + """ + if self.config.getoption("collectonly"): + return self._build_collect_only_summary_stats_line() + else: + return self._build_normal_summary_stats_line() + + def _get_reports_to_display(self, key: str) -> List[Any]: + """Get test/collection reports for the given status key, such as `passed` or `error`.""" + reports = self.stats.get(key, []) + return [x for x in reports if getattr(x, "count_towards_summary", True)] + + def _build_normal_summary_stats_line( + self, + ) -> Tuple[List[Tuple[str, Dict[str, bool]]], str]: + main_color, known_types = self._get_main_color() + parts = [] + + for key in known_types: + reports = self._get_reports_to_display(key) + if reports: + count = len(reports) + color = _color_for_type.get(key, _color_for_type_default) + markup = {color: True, "bold": color == main_color} + parts.append(("%d %s" % pluralize(count, key), markup)) + + if not parts: + parts = [("no tests ran", {_color_for_type_default: True})] + + return parts, main_color + + def _build_collect_only_summary_stats_line( + self, + ) -> Tuple[List[Tuple[str, Dict[str, bool]]], str]: + deselected = len(self._get_reports_to_display("deselected")) + errors = len(self._get_reports_to_display("error")) + + if self._numcollected == 0: + parts = [("no tests collected", {"yellow": True})] + main_color = "yellow" + + elif deselected == 0: + main_color = "green" + collected_output = "%d %s collected" % pluralize(self._numcollected, "test") + parts = [(collected_output, {main_color: True})] + else: + all_tests_were_deselected = self._numcollected == deselected + if all_tests_were_deselected: + main_color = "yellow" + collected_output = f"no tests collected ({deselected} deselected)" + else: + main_color = "green" + selected = self._numcollected - deselected + collected_output = f"{selected}/{self._numcollected} tests collected ({deselected} deselected)" + + parts = [(collected_output, {main_color: True})] + + if errors: + main_color = _color_for_type["error"] + parts += [("%d %s" % pluralize(errors, "error"), {main_color: True})] + + return parts, main_color + + +def _get_node_id_with_markup(tw: TerminalWriter, config: Config, rep: BaseReport): + nodeid = config.cwd_relative_nodeid(rep.nodeid) + path, *parts = nodeid.split("::") + if parts: + parts_markup = tw.markup("::".join(parts), bold=True) + return path + "::" + parts_markup + else: + return path + + +def _format_trimmed(format: str, msg: str, available_width: int) -> Optional[str]: + """Format msg into format, ellipsizing it if doesn't fit in available_width. + + Returns None if even the ellipsis can't fit. + """ + # Only use the first line. + i = msg.find("\n") + if i != -1: + msg = msg[:i] + + ellipsis = "..." + format_width = wcswidth(format.format("")) + if format_width + len(ellipsis) > available_width: + return None + + if format_width + wcswidth(msg) > available_width: + available_width -= len(ellipsis) + msg = msg[:available_width] + while format_width + wcswidth(msg) > available_width: + msg = msg[:-1] + msg += ellipsis + + return format.format(msg) + + +def _get_line_with_reprcrash_message( + config: Config, rep: BaseReport, tw: TerminalWriter, word_markup: Dict[str, bool] +) -> str: + """Get summary line for a report, trying to add reprcrash message.""" + verbose_word = rep._get_verbose_word(config) + word = tw.markup(verbose_word, **word_markup) + node = _get_node_id_with_markup(tw, config, rep) + + line = f"{word} {node}" + line_width = wcswidth(line) + + try: + # Type ignored intentionally -- possible AttributeError expected. + msg = rep.longrepr.reprcrash.message # type: ignore[union-attr] + except AttributeError: + pass + else: + if not running_on_ci(): + available_width = tw.fullwidth - line_width + msg = _format_trimmed(" - {}", msg, available_width) + else: + msg = f" - {msg}" + if msg is not None: + line += msg + + return line + + +def _folded_skips( + startpath: Path, + skipped: Sequence[CollectReport], +) -> List[Tuple[int, str, Optional[int], str]]: + d: Dict[Tuple[str, Optional[int], str], List[CollectReport]] = {} + for event in skipped: + assert event.longrepr is not None + assert isinstance(event.longrepr, tuple), (event, event.longrepr) + assert len(event.longrepr) == 3, (event, event.longrepr) + fspath, lineno, reason = event.longrepr + # For consistency, report all fspaths in relative form. + fspath = bestrelpath(startpath, Path(fspath)) + keywords = getattr(event, "keywords", {}) + # Folding reports with global pytestmark variable. + # This is a workaround, because for now we cannot identify the scope of a skip marker + # TODO: Revisit after marks scope would be fixed. + if ( + event.when == "setup" + and "skip" in keywords + and "pytestmark" not in keywords + ): + key: Tuple[str, Optional[int], str] = (fspath, None, reason) + else: + key = (fspath, lineno, reason) + d.setdefault(key, []).append(event) + values: List[Tuple[int, str, Optional[int], str]] = [] + for key, events in d.items(): + values.append((len(events), *key)) + return values + + +_color_for_type = { + "failed": "red", + "error": "red", + "warnings": "yellow", + "passed": "green", +} +_color_for_type_default = "yellow" + + +def pluralize(count: int, noun: str) -> Tuple[int, str]: + # No need to pluralize words such as `failed` or `passed`. + if noun not in ["error", "warnings", "test"]: + return count, noun + + # The `warnings` key is plural. To avoid API breakage, we keep it that way but + # set it to singular here so we can determine plurality in the same way as we do + # for `error`. + noun = noun.replace("warnings", "warning") + + return count, noun + "s" if count != 1 else noun + + +def _plugin_nameversions(plugininfo) -> List[str]: + values: List[str] = [] + for plugin, dist in plugininfo: + # Gets us name and version! + name = "{dist.project_name}-{dist.version}".format(dist=dist) + # Questionable convenience, but it keeps things short. + if name.startswith("pytest-"): + name = name[7:] + # We decided to print python package names they can have more than one plugin. + if name not in values: + values.append(name) + return values + + +def format_session_duration(seconds: float) -> str: + """Format the given seconds in a human readable manner to show in the final summary.""" + if seconds < 60: + return f"{seconds:.2f}s" + else: + dt = datetime.timedelta(seconds=int(seconds)) + return f"{seconds:.2f}s ({dt})" + + +def _get_raw_skip_reason(report: TestReport) -> str: + """Get the reason string of a skip/xfail/xpass test report. + + The string is just the part given by the user. + """ + if hasattr(report, "wasxfail"): + reason = cast(str, report.wasxfail) + if reason.startswith("reason: "): + reason = reason[len("reason: ") :] + return reason + else: + assert report.skipped + assert isinstance(report.longrepr, tuple) + _, _, reason = report.longrepr + if reason.startswith("Skipped: "): + reason = reason[len("Skipped: ") :] + elif reason == "Skipped": + reason = "" + return reason diff --git a/venv/lib/python3.11/site-packages/_pytest/threadexception.py b/venv/lib/python3.11/site-packages/_pytest/threadexception.py new file mode 100644 index 0000000..43341e7 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/threadexception.py @@ -0,0 +1,88 @@ +import threading +import traceback +import warnings +from types import TracebackType +from typing import Any +from typing import Callable +from typing import Generator +from typing import Optional +from typing import Type + +import pytest + + +# Copied from cpython/Lib/test/support/threading_helper.py, with modifications. +class catch_threading_exception: + """Context manager catching threading.Thread exception using + threading.excepthook. + + Storing exc_value using a custom hook can create a reference cycle. The + reference cycle is broken explicitly when the context manager exits. + + Storing thread using a custom hook can resurrect it if it is set to an + object which is being finalized. Exiting the context manager clears the + stored object. + + Usage: + with threading_helper.catch_threading_exception() as cm: + # code spawning a thread which raises an exception + ... + # check the thread exception: use cm.args + ... + # cm.args attribute no longer exists at this point + # (to break a reference cycle) + """ + + def __init__(self) -> None: + self.args: Optional["threading.ExceptHookArgs"] = None + self._old_hook: Optional[Callable[["threading.ExceptHookArgs"], Any]] = None + + def _hook(self, args: "threading.ExceptHookArgs") -> None: + self.args = args + + def __enter__(self) -> "catch_threading_exception": + self._old_hook = threading.excepthook + threading.excepthook = self._hook + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + assert self._old_hook is not None + threading.excepthook = self._old_hook + self._old_hook = None + del self.args + + +def thread_exception_runtest_hook() -> Generator[None, None, None]: + with catch_threading_exception() as cm: + yield + if cm.args: + thread_name = "" if cm.args.thread is None else cm.args.thread.name + msg = f"Exception in thread {thread_name}\n\n" + msg += "".join( + traceback.format_exception( + cm.args.exc_type, + cm.args.exc_value, + cm.args.exc_traceback, + ) + ) + warnings.warn(pytest.PytestUnhandledThreadExceptionWarning(msg)) + + +@pytest.hookimpl(hookwrapper=True, trylast=True) +def pytest_runtest_setup() -> Generator[None, None, None]: + yield from thread_exception_runtest_hook() + + +@pytest.hookimpl(hookwrapper=True, tryfirst=True) +def pytest_runtest_call() -> Generator[None, None, None]: + yield from thread_exception_runtest_hook() + + +@pytest.hookimpl(hookwrapper=True, tryfirst=True) +def pytest_runtest_teardown() -> Generator[None, None, None]: + yield from thread_exception_runtest_hook() diff --git a/venv/lib/python3.11/site-packages/_pytest/timing.py b/venv/lib/python3.11/site-packages/_pytest/timing.py new file mode 100644 index 0000000..925163a --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/timing.py @@ -0,0 +1,12 @@ +"""Indirection for time functions. + +We intentionally grab some "time" functions internally to avoid tests mocking "time" to affect +pytest runtime information (issue #185). + +Fixture "mock_timing" also interacts with this module for pytest's own tests. +""" +from time import perf_counter +from time import sleep +from time import time + +__all__ = ["perf_counter", "sleep", "time"] diff --git a/venv/lib/python3.11/site-packages/_pytest/tmpdir.py b/venv/lib/python3.11/site-packages/_pytest/tmpdir.py new file mode 100644 index 0000000..3cc2bac --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/tmpdir.py @@ -0,0 +1,324 @@ +"""Support for providing temporary directories to test functions.""" +import dataclasses +import os +import re +import tempfile +from pathlib import Path +from shutil import rmtree +from typing import Any +from typing import Dict +from typing import Generator +from typing import Optional +from typing import TYPE_CHECKING +from typing import Union + +from _pytest.nodes import Item +from _pytest.reports import CollectReport +from _pytest.stash import StashKey + +if TYPE_CHECKING: + from typing_extensions import Literal + + RetentionType = Literal["all", "failed", "none"] + + +from _pytest.config.argparsing import Parser + +from .pathlib import LOCK_TIMEOUT +from .pathlib import make_numbered_dir +from .pathlib import make_numbered_dir_with_cleanup +from .pathlib import rm_rf +from .pathlib import cleanup_dead_symlinks +from _pytest.compat import final, get_user_id +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.monkeypatch import MonkeyPatch + +tmppath_result_key = StashKey[Dict[str, bool]]() + + +@final +@dataclasses.dataclass +class TempPathFactory: + """Factory for temporary directories under the common base temp directory. + + The base directory can be configured using the ``--basetemp`` option. + """ + + _given_basetemp: Optional[Path] + # pluggy TagTracerSub, not currently exposed, so Any. + _trace: Any + _basetemp: Optional[Path] + _retention_count: int + _retention_policy: "RetentionType" + + def __init__( + self, + given_basetemp: Optional[Path], + retention_count: int, + retention_policy: "RetentionType", + trace, + basetemp: Optional[Path] = None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + if given_basetemp is None: + self._given_basetemp = None + else: + # Use os.path.abspath() to get absolute path instead of resolve() as it + # does not work the same in all platforms (see #4427). + # Path.absolute() exists, but it is not public (see https://bugs.python.org/issue25012). + self._given_basetemp = Path(os.path.abspath(str(given_basetemp))) + self._trace = trace + self._retention_count = retention_count + self._retention_policy = retention_policy + self._basetemp = basetemp + + @classmethod + def from_config( + cls, + config: Config, + *, + _ispytest: bool = False, + ) -> "TempPathFactory": + """Create a factory according to pytest configuration. + + :meta private: + """ + check_ispytest(_ispytest) + count = int(config.getini("tmp_path_retention_count")) + if count < 0: + raise ValueError( + f"tmp_path_retention_count must be >= 0. Current input: {count}." + ) + + policy = config.getini("tmp_path_retention_policy") + if policy not in ("all", "failed", "none"): + raise ValueError( + f"tmp_path_retention_policy must be either all, failed, none. Current input: {policy}." + ) + + return cls( + given_basetemp=config.option.basetemp, + trace=config.trace.get("tmpdir"), + retention_count=count, + retention_policy=policy, + _ispytest=True, + ) + + def _ensure_relative_to_basetemp(self, basename: str) -> str: + basename = os.path.normpath(basename) + if (self.getbasetemp() / basename).resolve().parent != self.getbasetemp(): + raise ValueError(f"{basename} is not a normalized and relative path") + return basename + + def mktemp(self, basename: str, numbered: bool = True) -> Path: + """Create a new temporary directory managed by the factory. + + :param basename: + Directory base name, must be a relative path. + + :param numbered: + If ``True``, ensure the directory is unique by adding a numbered + suffix greater than any existing one: ``basename="foo-"`` and ``numbered=True`` + means that this function will create directories named ``"foo-0"``, + ``"foo-1"``, ``"foo-2"`` and so on. + + :returns: + The path to the new directory. + """ + basename = self._ensure_relative_to_basetemp(basename) + if not numbered: + p = self.getbasetemp().joinpath(basename) + p.mkdir(mode=0o700) + else: + p = make_numbered_dir(root=self.getbasetemp(), prefix=basename, mode=0o700) + self._trace("mktemp", p) + return p + + def getbasetemp(self) -> Path: + """Return the base temporary directory, creating it if needed. + + :returns: + The base temporary directory. + """ + if self._basetemp is not None: + return self._basetemp + + if self._given_basetemp is not None: + basetemp = self._given_basetemp + if basetemp.exists(): + rm_rf(basetemp) + basetemp.mkdir(mode=0o700) + basetemp = basetemp.resolve() + else: + from_env = os.environ.get("PYTEST_DEBUG_TEMPROOT") + temproot = Path(from_env or tempfile.gettempdir()).resolve() + user = get_user() or "unknown" + # use a sub-directory in the temproot to speed-up + # make_numbered_dir() call + rootdir = temproot.joinpath(f"pytest-of-{user}") + try: + rootdir.mkdir(mode=0o700, exist_ok=True) + except OSError: + # getuser() likely returned illegal characters for the platform, use unknown back off mechanism + rootdir = temproot.joinpath("pytest-of-unknown") + rootdir.mkdir(mode=0o700, exist_ok=True) + # Because we use exist_ok=True with a predictable name, make sure + # we are the owners, to prevent any funny business (on unix, where + # temproot is usually shared). + # Also, to keep things private, fixup any world-readable temp + # rootdir's permissions. Historically 0o755 was used, so we can't + # just error out on this, at least for a while. + uid = get_user_id() + if uid is not None: + rootdir_stat = rootdir.stat() + if rootdir_stat.st_uid != uid: + raise OSError( + f"The temporary directory {rootdir} is not owned by the current user. " + "Fix this and try again." + ) + if (rootdir_stat.st_mode & 0o077) != 0: + os.chmod(rootdir, rootdir_stat.st_mode & ~0o077) + keep = self._retention_count + if self._retention_policy == "none": + keep = 0 + basetemp = make_numbered_dir_with_cleanup( + prefix="pytest-", + root=rootdir, + keep=keep, + lock_timeout=LOCK_TIMEOUT, + mode=0o700, + ) + assert basetemp is not None, basetemp + self._basetemp = basetemp + self._trace("new basetemp", basetemp) + return basetemp + + +def get_user() -> Optional[str]: + """Return the current user name, or None if getuser() does not work + in the current environment (see #1010).""" + try: + # In some exotic environments, getpass may not be importable. + import getpass + + return getpass.getuser() + except (ImportError, KeyError): + return None + + +def pytest_configure(config: Config) -> None: + """Create a TempPathFactory and attach it to the config object. + + This is to comply with existing plugins which expect the handler to be + available at pytest_configure time, but ideally should be moved entirely + to the tmp_path_factory session fixture. + """ + mp = MonkeyPatch() + config.add_cleanup(mp.undo) + _tmp_path_factory = TempPathFactory.from_config(config, _ispytest=True) + mp.setattr(config, "_tmp_path_factory", _tmp_path_factory, raising=False) + + +def pytest_addoption(parser: Parser) -> None: + parser.addini( + "tmp_path_retention_count", + help="How many sessions should we keep the `tmp_path` directories, according to `tmp_path_retention_policy`.", + default=3, + ) + + parser.addini( + "tmp_path_retention_policy", + help="Controls which directories created by the `tmp_path` fixture are kept around, based on test outcome. " + "(all/failed/none)", + default="all", + ) + + +@fixture(scope="session") +def tmp_path_factory(request: FixtureRequest) -> TempPathFactory: + """Return a :class:`pytest.TempPathFactory` instance for the test session.""" + # Set dynamically by pytest_configure() above. + return request.config._tmp_path_factory # type: ignore + + +def _mk_tmp(request: FixtureRequest, factory: TempPathFactory) -> Path: + name = request.node.name + name = re.sub(r"[\W]", "_", name) + MAXVAL = 30 + name = name[:MAXVAL] + return factory.mktemp(name, numbered=True) + + +@fixture +def tmp_path( + request: FixtureRequest, tmp_path_factory: TempPathFactory +) -> Generator[Path, None, None]: + """Return a temporary directory path object which is unique to each test + function invocation, created as a sub directory of the base temporary + directory. + + By default, a new base temporary directory is created each test session, + and old bases are removed after 3 sessions, to aid in debugging. + This behavior can be configured with :confval:`tmp_path_retention_count` and + :confval:`tmp_path_retention_policy`. + If ``--basetemp`` is used then it is cleared each session. See :ref:`base + temporary directory`. + + The returned object is a :class:`pathlib.Path` object. + """ + + path = _mk_tmp(request, tmp_path_factory) + yield path + + # Remove the tmpdir if the policy is "failed" and the test passed. + tmp_path_factory: TempPathFactory = request.session.config._tmp_path_factory # type: ignore + policy = tmp_path_factory._retention_policy + result_dict = request.node.stash[tmppath_result_key] + + if policy == "failed" and result_dict.get("call", True): + # We do a "best effort" to remove files, but it might not be possible due to some leaked resource, + # permissions, etc, in which case we ignore it. + rmtree(path, ignore_errors=True) + + del request.node.stash[tmppath_result_key] + + +def pytest_sessionfinish(session, exitstatus: Union[int, ExitCode]): + """After each session, remove base directory if all the tests passed, + the policy is "failed", and the basetemp is not specified by a user. + """ + tmp_path_factory: TempPathFactory = session.config._tmp_path_factory + basetemp = tmp_path_factory._basetemp + if basetemp is None: + return + + policy = tmp_path_factory._retention_policy + if ( + exitstatus == 0 + and policy == "failed" + and tmp_path_factory._given_basetemp is None + ): + if basetemp.is_dir(): + # We do a "best effort" to remove files, but it might not be possible due to some leaked resource, + # permissions, etc, in which case we ignore it. + rmtree(basetemp, ignore_errors=True) + + # Remove dead symlinks. + if basetemp.is_dir(): + cleanup_dead_symlinks(basetemp) + + +@hookimpl(tryfirst=True, hookwrapper=True) +def pytest_runtest_makereport(item: Item, call): + outcome = yield + result: CollectReport = outcome.get_result() + + empty: Dict[str, bool] = {} + item.stash.setdefault(tmppath_result_key, empty)[result.when] = result.passed diff --git a/venv/lib/python3.11/site-packages/_pytest/unittest.py b/venv/lib/python3.11/site-packages/_pytest/unittest.py new file mode 100644 index 0000000..d42a12a --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/unittest.py @@ -0,0 +1,421 @@ +"""Discover and run std-library "unittest" style tests.""" +import sys +import traceback +import types +from typing import Any +from typing import Callable +from typing import Generator +from typing import Iterable +from typing import List +from typing import Optional +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import Union + +import _pytest._code +import pytest +from _pytest.compat import getimfunc +from _pytest.compat import is_async_function +from _pytest.config import hookimpl +from _pytest.fixtures import FixtureRequest +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.outcomes import exit +from _pytest.outcomes import fail +from _pytest.outcomes import skip +from _pytest.outcomes import xfail +from _pytest.python import Class +from _pytest.python import Function +from _pytest.python import Module +from _pytest.runner import CallInfo +from _pytest.scope import Scope + +if TYPE_CHECKING: + import unittest + import twisted.trial.unittest + + _SysExcInfoType = Union[ + Tuple[Type[BaseException], BaseException, types.TracebackType], + Tuple[None, None, None], + ] + + +def pytest_pycollect_makeitem( + collector: Union[Module, Class], name: str, obj: object +) -> Optional["UnitTestCase"]: + # Has unittest been imported and is obj a subclass of its TestCase? + try: + ut = sys.modules["unittest"] + # Type ignored because `ut` is an opaque module. + if not issubclass(obj, ut.TestCase): # type: ignore + return None + except Exception: + return None + # Yes, so let's collect it. + item: UnitTestCase = UnitTestCase.from_parent(collector, name=name, obj=obj) + return item + + +class UnitTestCase(Class): + # Marker for fixturemanger.getfixtureinfo() + # to declare that our children do not support funcargs. + nofuncargs = True + + def collect(self) -> Iterable[Union[Item, Collector]]: + from unittest import TestLoader + + cls = self.obj + if not getattr(cls, "__test__", True): + return + + skipped = _is_skipped(cls) + if not skipped: + self._inject_setup_teardown_fixtures(cls) + self._inject_setup_class_fixture() + + self.session._fixturemanager.parsefactories(self, unittest=True) + loader = TestLoader() + foundsomething = False + for name in loader.getTestCaseNames(self.obj): + x = getattr(self.obj, name) + if not getattr(x, "__test__", True): + continue + funcobj = getimfunc(x) + yield TestCaseFunction.from_parent(self, name=name, callobj=funcobj) + foundsomething = True + + if not foundsomething: + runtest = getattr(self.obj, "runTest", None) + if runtest is not None: + ut = sys.modules.get("twisted.trial.unittest", None) + # Type ignored because `ut` is an opaque module. + if ut is None or runtest != ut.TestCase.runTest: # type: ignore + yield TestCaseFunction.from_parent(self, name="runTest") + + def _inject_setup_teardown_fixtures(self, cls: type) -> None: + """Injects a hidden auto-use fixture to invoke setUpClass/setup_method and corresponding + teardown functions (#517).""" + class_fixture = _make_xunit_fixture( + cls, + "setUpClass", + "tearDownClass", + "doClassCleanups", + scope=Scope.Class, + pass_self=False, + ) + if class_fixture: + cls.__pytest_class_setup = class_fixture # type: ignore[attr-defined] + + method_fixture = _make_xunit_fixture( + cls, + "setup_method", + "teardown_method", + None, + scope=Scope.Function, + pass_self=True, + ) + if method_fixture: + cls.__pytest_method_setup = method_fixture # type: ignore[attr-defined] + + +def _make_xunit_fixture( + obj: type, + setup_name: str, + teardown_name: str, + cleanup_name: Optional[str], + scope: Scope, + pass_self: bool, +): + setup = getattr(obj, setup_name, None) + teardown = getattr(obj, teardown_name, None) + if setup is None and teardown is None: + return None + + if cleanup_name: + cleanup = getattr(obj, cleanup_name, lambda *args: None) + else: + + def cleanup(*args): + pass + + @pytest.fixture( + scope=scope.value, + autouse=True, + # Use a unique name to speed up lookup. + name=f"_unittest_{setup_name}_fixture_{obj.__qualname__}", + ) + def fixture(self, request: FixtureRequest) -> Generator[None, None, None]: + if _is_skipped(self): + reason = self.__unittest_skip_why__ + raise pytest.skip.Exception(reason, _use_item_location=True) + if setup is not None: + try: + if pass_self: + setup(self, request.function) + else: + setup() + # unittest does not call the cleanup function for every BaseException, so we + # follow this here. + except Exception: + if pass_self: + cleanup(self) + else: + cleanup() + + raise + yield + try: + if teardown is not None: + if pass_self: + teardown(self, request.function) + else: + teardown() + finally: + if pass_self: + cleanup(self) + else: + cleanup() + + return fixture + + +class TestCaseFunction(Function): + nofuncargs = True + _excinfo: Optional[List[_pytest._code.ExceptionInfo[BaseException]]] = None + _testcase: Optional["unittest.TestCase"] = None + + def _getobj(self): + assert self.parent is not None + # Unlike a regular Function in a Class, where `item.obj` returns + # a *bound* method (attached to an instance), TestCaseFunction's + # `obj` returns an *unbound* method (not attached to an instance). + # This inconsistency is probably not desirable, but needs some + # consideration before changing. + return getattr(self.parent.obj, self.originalname) # type: ignore[attr-defined] + + def setup(self) -> None: + # A bound method to be called during teardown() if set (see 'runtest()'). + self._explicit_tearDown: Optional[Callable[[], None]] = None + assert self.parent is not None + self._testcase = self.parent.obj(self.name) # type: ignore[attr-defined] + self._obj = getattr(self._testcase, self.name) + if hasattr(self, "_request"): + self._request._fillfixtures() + + def teardown(self) -> None: + if self._explicit_tearDown is not None: + self._explicit_tearDown() + self._explicit_tearDown = None + self._testcase = None + self._obj = None + + def startTest(self, testcase: "unittest.TestCase") -> None: + pass + + def _addexcinfo(self, rawexcinfo: "_SysExcInfoType") -> None: + # Unwrap potential exception info (see twisted trial support below). + rawexcinfo = getattr(rawexcinfo, "_rawexcinfo", rawexcinfo) + try: + excinfo = _pytest._code.ExceptionInfo[BaseException].from_exc_info(rawexcinfo) # type: ignore[arg-type] + # Invoke the attributes to trigger storing the traceback + # trial causes some issue there. + excinfo.value + excinfo.traceback + except TypeError: + try: + try: + values = traceback.format_exception(*rawexcinfo) + values.insert( + 0, + "NOTE: Incompatible Exception Representation, " + "displaying natively:\n\n", + ) + fail("".join(values), pytrace=False) + except (fail.Exception, KeyboardInterrupt): + raise + except BaseException: + fail( + "ERROR: Unknown Incompatible Exception " + "representation:\n%r" % (rawexcinfo,), + pytrace=False, + ) + except KeyboardInterrupt: + raise + except fail.Exception: + excinfo = _pytest._code.ExceptionInfo.from_current() + self.__dict__.setdefault("_excinfo", []).append(excinfo) + + def addError( + self, testcase: "unittest.TestCase", rawexcinfo: "_SysExcInfoType" + ) -> None: + try: + if isinstance(rawexcinfo[1], exit.Exception): + exit(rawexcinfo[1].msg) + except TypeError: + pass + self._addexcinfo(rawexcinfo) + + def addFailure( + self, testcase: "unittest.TestCase", rawexcinfo: "_SysExcInfoType" + ) -> None: + self._addexcinfo(rawexcinfo) + + def addSkip(self, testcase: "unittest.TestCase", reason: str) -> None: + try: + raise pytest.skip.Exception(reason, _use_item_location=True) + except skip.Exception: + self._addexcinfo(sys.exc_info()) + + def addExpectedFailure( + self, + testcase: "unittest.TestCase", + rawexcinfo: "_SysExcInfoType", + reason: str = "", + ) -> None: + try: + xfail(str(reason)) + except xfail.Exception: + self._addexcinfo(sys.exc_info()) + + def addUnexpectedSuccess( + self, + testcase: "unittest.TestCase", + reason: Optional["twisted.trial.unittest.Todo"] = None, + ) -> None: + msg = "Unexpected success" + if reason: + msg += f": {reason.reason}" + # Preserve unittest behaviour - fail the test. Explicitly not an XPASS. + try: + fail(msg, pytrace=False) + except fail.Exception: + self._addexcinfo(sys.exc_info()) + + def addSuccess(self, testcase: "unittest.TestCase") -> None: + pass + + def stopTest(self, testcase: "unittest.TestCase") -> None: + pass + + def addDuration(self, testcase: "unittest.TestCase", elapsed: float) -> None: + pass + + def runtest(self) -> None: + from _pytest.debugging import maybe_wrap_pytest_function_for_tracing + + assert self._testcase is not None + + maybe_wrap_pytest_function_for_tracing(self) + + # Let the unittest framework handle async functions. + if is_async_function(self.obj): + # Type ignored because self acts as the TestResult, but is not actually one. + self._testcase(result=self) # type: ignore[arg-type] + else: + # When --pdb is given, we want to postpone calling tearDown() otherwise + # when entering the pdb prompt, tearDown() would have probably cleaned up + # instance variables, which makes it difficult to debug. + # Arguably we could always postpone tearDown(), but this changes the moment where the + # TestCase instance interacts with the results object, so better to only do it + # when absolutely needed. + # We need to consider if the test itself is skipped, or the whole class. + assert isinstance(self.parent, UnitTestCase) + skipped = _is_skipped(self.obj) or _is_skipped(self.parent.obj) + if self.config.getoption("usepdb") and not skipped: + self._explicit_tearDown = self._testcase.tearDown + setattr(self._testcase, "tearDown", lambda *args: None) + + # We need to update the actual bound method with self.obj, because + # wrap_pytest_function_for_tracing replaces self.obj by a wrapper. + setattr(self._testcase, self.name, self.obj) + try: + self._testcase(result=self) # type: ignore[arg-type] + finally: + delattr(self._testcase, self.name) + + def _traceback_filter( + self, excinfo: _pytest._code.ExceptionInfo[BaseException] + ) -> _pytest._code.Traceback: + traceback = super()._traceback_filter(excinfo) + ntraceback = traceback.filter( + lambda x: not x.frame.f_globals.get("__unittest"), + ) + if not ntraceback: + ntraceback = traceback + return ntraceback + + +@hookimpl(tryfirst=True) +def pytest_runtest_makereport(item: Item, call: CallInfo[None]) -> None: + if isinstance(item, TestCaseFunction): + if item._excinfo: + call.excinfo = item._excinfo.pop(0) + try: + del call.result + except AttributeError: + pass + + # Convert unittest.SkipTest to pytest.skip. + # This is actually only needed for nose, which reuses unittest.SkipTest for + # its own nose.SkipTest. For unittest TestCases, SkipTest is already + # handled internally, and doesn't reach here. + unittest = sys.modules.get("unittest") + if ( + unittest + and call.excinfo + and isinstance(call.excinfo.value, unittest.SkipTest) # type: ignore[attr-defined] + ): + excinfo = call.excinfo + call2 = CallInfo[None].from_call( + lambda: pytest.skip(str(excinfo.value)), call.when + ) + call.excinfo = call2.excinfo + + +# Twisted trial support. + + +@hookimpl(hookwrapper=True) +def pytest_runtest_protocol(item: Item) -> Generator[None, None, None]: + if isinstance(item, TestCaseFunction) and "twisted.trial.unittest" in sys.modules: + ut: Any = sys.modules["twisted.python.failure"] + Failure__init__ = ut.Failure.__init__ + check_testcase_implements_trial_reporter() + + def excstore( + self, exc_value=None, exc_type=None, exc_tb=None, captureVars=None + ): + if exc_value is None: + self._rawexcinfo = sys.exc_info() + else: + if exc_type is None: + exc_type = type(exc_value) + self._rawexcinfo = (exc_type, exc_value, exc_tb) + try: + Failure__init__( + self, exc_value, exc_type, exc_tb, captureVars=captureVars + ) + except TypeError: + Failure__init__(self, exc_value, exc_type, exc_tb) + + ut.Failure.__init__ = excstore + yield + ut.Failure.__init__ = Failure__init__ + else: + yield + + +def check_testcase_implements_trial_reporter(done: List[int] = []) -> None: + if done: + return + from zope.interface import classImplements + from twisted.trial.itrial import IReporter + + classImplements(TestCaseFunction, IReporter) + done.append(1) + + +def _is_skipped(obj) -> bool: + """Return True if the given object has been marked with @unittest.skip.""" + return bool(getattr(obj, "__unittest_skip__", False)) diff --git a/venv/lib/python3.11/site-packages/_pytest/unraisableexception.py b/venv/lib/python3.11/site-packages/_pytest/unraisableexception.py new file mode 100644 index 0000000..fcb5d82 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/unraisableexception.py @@ -0,0 +1,93 @@ +import sys +import traceback +import warnings +from types import TracebackType +from typing import Any +from typing import Callable +from typing import Generator +from typing import Optional +from typing import Type + +import pytest + + +# Copied from cpython/Lib/test/support/__init__.py, with modifications. +class catch_unraisable_exception: + """Context manager catching unraisable exception using sys.unraisablehook. + + Storing the exception value (cm.unraisable.exc_value) creates a reference + cycle. The reference cycle is broken explicitly when the context manager + exits. + + Storing the object (cm.unraisable.object) can resurrect it if it is set to + an object which is being finalized. Exiting the context manager clears the + stored object. + + Usage: + with catch_unraisable_exception() as cm: + # code creating an "unraisable exception" + ... + # check the unraisable exception: use cm.unraisable + ... + # cm.unraisable attribute no longer exists at this point + # (to break a reference cycle) + """ + + def __init__(self) -> None: + self.unraisable: Optional["sys.UnraisableHookArgs"] = None + self._old_hook: Optional[Callable[["sys.UnraisableHookArgs"], Any]] = None + + def _hook(self, unraisable: "sys.UnraisableHookArgs") -> None: + # Storing unraisable.object can resurrect an object which is being + # finalized. Storing unraisable.exc_value creates a reference cycle. + self.unraisable = unraisable + + def __enter__(self) -> "catch_unraisable_exception": + self._old_hook = sys.unraisablehook + sys.unraisablehook = self._hook + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + assert self._old_hook is not None + sys.unraisablehook = self._old_hook + self._old_hook = None + del self.unraisable + + +def unraisable_exception_runtest_hook() -> Generator[None, None, None]: + with catch_unraisable_exception() as cm: + yield + if cm.unraisable: + if cm.unraisable.err_msg is not None: + err_msg = cm.unraisable.err_msg + else: + err_msg = "Exception ignored in" + msg = f"{err_msg}: {cm.unraisable.object!r}\n\n" + msg += "".join( + traceback.format_exception( + cm.unraisable.exc_type, + cm.unraisable.exc_value, + cm.unraisable.exc_traceback, + ) + ) + warnings.warn(pytest.PytestUnraisableExceptionWarning(msg)) + + +@pytest.hookimpl(hookwrapper=True, tryfirst=True) +def pytest_runtest_setup() -> Generator[None, None, None]: + yield from unraisable_exception_runtest_hook() + + +@pytest.hookimpl(hookwrapper=True, tryfirst=True) +def pytest_runtest_call() -> Generator[None, None, None]: + yield from unraisable_exception_runtest_hook() + + +@pytest.hookimpl(hookwrapper=True, tryfirst=True) +def pytest_runtest_teardown() -> Generator[None, None, None]: + yield from unraisable_exception_runtest_hook() diff --git a/venv/lib/python3.11/site-packages/_pytest/warning_types.py b/venv/lib/python3.11/site-packages/_pytest/warning_types.py new file mode 100644 index 0000000..bd5f418 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/warning_types.py @@ -0,0 +1,170 @@ +import dataclasses +import inspect +import warnings +from types import FunctionType +from typing import Any +from typing import Generic +from typing import Type +from typing import TypeVar + +from _pytest.compat import final + + +class PytestWarning(UserWarning): + """Base class for all warnings emitted by pytest.""" + + __module__ = "pytest" + + +@final +class PytestAssertRewriteWarning(PytestWarning): + """Warning emitted by the pytest assert rewrite module.""" + + __module__ = "pytest" + + +@final +class PytestCacheWarning(PytestWarning): + """Warning emitted by the cache plugin in various situations.""" + + __module__ = "pytest" + + +@final +class PytestConfigWarning(PytestWarning): + """Warning emitted for configuration issues.""" + + __module__ = "pytest" + + +@final +class PytestCollectionWarning(PytestWarning): + """Warning emitted when pytest is not able to collect a file or symbol in a module.""" + + __module__ = "pytest" + + +class PytestDeprecationWarning(PytestWarning, DeprecationWarning): + """Warning class for features that will be removed in a future version.""" + + __module__ = "pytest" + + +class PytestRemovedIn8Warning(PytestDeprecationWarning): + """Warning class for features that will be removed in pytest 8.""" + + __module__ = "pytest" + + +class PytestReturnNotNoneWarning(PytestRemovedIn8Warning): + """Warning emitted when a test function is returning value other than None.""" + + __module__ = "pytest" + + +@final +class PytestExperimentalApiWarning(PytestWarning, FutureWarning): + """Warning category used to denote experiments in pytest. + + Use sparingly as the API might change or even be removed completely in a + future version. + """ + + __module__ = "pytest" + + @classmethod + def simple(cls, apiname: str) -> "PytestExperimentalApiWarning": + return cls( + "{apiname} is an experimental api that may change over time".format( + apiname=apiname + ) + ) + + +@final +class PytestUnhandledCoroutineWarning(PytestReturnNotNoneWarning): + """Warning emitted for an unhandled coroutine. + + A coroutine was encountered when collecting test functions, but was not + handled by any async-aware plugin. + Coroutine test functions are not natively supported. + """ + + __module__ = "pytest" + + +@final +class PytestUnknownMarkWarning(PytestWarning): + """Warning emitted on use of unknown markers. + + See :ref:`mark` for details. + """ + + __module__ = "pytest" + + +@final +class PytestUnraisableExceptionWarning(PytestWarning): + """An unraisable exception was reported. + + Unraisable exceptions are exceptions raised in :meth:`__del__ ` + implementations and similar situations when the exception cannot be raised + as normal. + """ + + __module__ = "pytest" + + +@final +class PytestUnhandledThreadExceptionWarning(PytestWarning): + """An unhandled exception occurred in a :class:`~threading.Thread`. + + Such exceptions don't propagate normally. + """ + + __module__ = "pytest" + + +_W = TypeVar("_W", bound=PytestWarning) + + +@final +@dataclasses.dataclass +class UnformattedWarning(Generic[_W]): + """A warning meant to be formatted during runtime. + + This is used to hold warnings that need to format their message at runtime, + as opposed to a direct message. + """ + + category: Type["_W"] + template: str + + def format(self, **kwargs: Any) -> _W: + """Return an instance of the warning category, formatted with given kwargs.""" + return self.category(self.template.format(**kwargs)) + + +def warn_explicit_for(method: FunctionType, message: PytestWarning) -> None: + """ + Issue the warning :param:`message` for the definition of the given :param:`method` + + this helps to log warnings for functions defined prior to finding an issue with them + (like hook wrappers being marked in a legacy mechanism) + """ + lineno = method.__code__.co_firstlineno + filename = inspect.getfile(method) + module = method.__module__ + mod_globals = method.__globals__ + try: + warnings.warn_explicit( + message, + type(message), + filename=filename, + module=module, + registry=mod_globals.setdefault("__warningregistry__", {}), + lineno=lineno, + ) + except Warning as w: + # If warnings are errors (e.g. -Werror), location information gets lost, so we add it to the message. + raise type(w)(f"{w}\n at {filename}:{lineno}") from None diff --git a/venv/lib/python3.11/site-packages/_pytest/warnings.py b/venv/lib/python3.11/site-packages/_pytest/warnings.py new file mode 100644 index 0000000..4aaa944 --- /dev/null +++ b/venv/lib/python3.11/site-packages/_pytest/warnings.py @@ -0,0 +1,148 @@ +import sys +import warnings +from contextlib import contextmanager +from typing import Generator +from typing import Optional +from typing import TYPE_CHECKING + +import pytest +from _pytest.config import apply_warning_filters +from _pytest.config import Config +from _pytest.config import parse_warning_filter +from _pytest.main import Session +from _pytest.nodes import Item +from _pytest.terminal import TerminalReporter + +if TYPE_CHECKING: + from typing_extensions import Literal + + +def pytest_configure(config: Config) -> None: + config.addinivalue_line( + "markers", + "filterwarnings(warning): add a warning filter to the given test. " + "see https://docs.pytest.org/en/stable/how-to/capture-warnings.html#pytest-mark-filterwarnings ", + ) + + +@contextmanager +def catch_warnings_for_item( + config: Config, + ihook, + when: "Literal['config', 'collect', 'runtest']", + item: Optional[Item], +) -> Generator[None, None, None]: + """Context manager that catches warnings generated in the contained execution block. + + ``item`` can be None if we are not in the context of an item execution. + + Each warning captured triggers the ``pytest_warning_recorded`` hook. + """ + config_filters = config.getini("filterwarnings") + cmdline_filters = config.known_args_namespace.pythonwarnings or [] + with warnings.catch_warnings(record=True) as log: + # mypy can't infer that record=True means log is not None; help it. + assert log is not None + + if not sys.warnoptions: + # If user is not explicitly configuring warning filters, show deprecation warnings by default (#2908). + warnings.filterwarnings("always", category=DeprecationWarning) + warnings.filterwarnings("always", category=PendingDeprecationWarning) + + apply_warning_filters(config_filters, cmdline_filters) + + # apply filters from "filterwarnings" marks + nodeid = "" if item is None else item.nodeid + if item is not None: + for mark in item.iter_markers(name="filterwarnings"): + for arg in mark.args: + warnings.filterwarnings(*parse_warning_filter(arg, escape=False)) + + yield + + for warning_message in log: + ihook.pytest_warning_recorded.call_historic( + kwargs=dict( + warning_message=warning_message, + nodeid=nodeid, + when=when, + location=None, + ) + ) + + +def warning_record_to_str(warning_message: warnings.WarningMessage) -> str: + """Convert a warnings.WarningMessage to a string.""" + warn_msg = warning_message.message + msg = warnings.formatwarning( + str(warn_msg), + warning_message.category, + warning_message.filename, + warning_message.lineno, + warning_message.line, + ) + if warning_message.source is not None: + try: + import tracemalloc + except ImportError: + pass + else: + tb = tracemalloc.get_object_traceback(warning_message.source) + if tb is not None: + formatted_tb = "\n".join(tb.format()) + # Use a leading new line to better separate the (large) output + # from the traceback to the previous warning text. + msg += f"\nObject allocated at:\n{formatted_tb}" + else: + # No need for a leading new line. + url = "https://docs.pytest.org/en/stable/how-to/capture-warnings.html#resource-warnings" + msg += "Enable tracemalloc to get traceback where the object was allocated.\n" + msg += f"See {url} for more info." + return msg + + +@pytest.hookimpl(hookwrapper=True, tryfirst=True) +def pytest_runtest_protocol(item: Item) -> Generator[None, None, None]: + with catch_warnings_for_item( + config=item.config, ihook=item.ihook, when="runtest", item=item + ): + yield + + +@pytest.hookimpl(hookwrapper=True, tryfirst=True) +def pytest_collection(session: Session) -> Generator[None, None, None]: + config = session.config + with catch_warnings_for_item( + config=config, ihook=config.hook, when="collect", item=None + ): + yield + + +@pytest.hookimpl(hookwrapper=True) +def pytest_terminal_summary( + terminalreporter: TerminalReporter, +) -> Generator[None, None, None]: + config = terminalreporter.config + with catch_warnings_for_item( + config=config, ihook=config.hook, when="config", item=None + ): + yield + + +@pytest.hookimpl(hookwrapper=True) +def pytest_sessionfinish(session: Session) -> Generator[None, None, None]: + config = session.config + with catch_warnings_for_item( + config=config, ihook=config.hook, when="config", item=None + ): + yield + + +@pytest.hookimpl(hookwrapper=True) +def pytest_load_initial_conftests( + early_config: "Config", +) -> Generator[None, None, None]: + with catch_warnings_for_item( + config=early_config, ihook=early_config.hook, when="config", item=None + ): + yield diff --git a/venv/lib/python3.11/site-packages/annotated_types-0.7.0.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/annotated_types-0.7.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.11/site-packages/annotated_types-0.7.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.11/site-packages/annotated_types-0.7.0.dist-info/METADATA b/venv/lib/python3.11/site-packages/annotated_types-0.7.0.dist-info/METADATA new file mode 100644 index 0000000..3ac05cf --- /dev/null +++ b/venv/lib/python3.11/site-packages/annotated_types-0.7.0.dist-info/METADATA @@ -0,0 +1,295 @@ +Metadata-Version: 2.3 +Name: annotated-types +Version: 0.7.0 +Summary: Reusable constraint types to use with typing.Annotated +Project-URL: Homepage, https://github.com/annotated-types/annotated-types +Project-URL: Source, https://github.com/annotated-types/annotated-types +Project-URL: Changelog, https://github.com/annotated-types/annotated-types/releases +Author-email: Adrian Garcia Badaracco <1755071+adriangb@users.noreply.github.com>, Samuel Colvin , Zac Hatfield-Dodds +License-File: LICENSE +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Console +Classifier: Environment :: MacOS X +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Information Technology +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: Unix +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Typing :: Typed +Requires-Python: >=3.8 +Requires-Dist: typing-extensions>=4.0.0; python_version < '3.9' +Description-Content-Type: text/markdown + +# annotated-types + +[![CI](https://github.com/annotated-types/annotated-types/workflows/CI/badge.svg?event=push)](https://github.com/annotated-types/annotated-types/actions?query=event%3Apush+branch%3Amain+workflow%3ACI) +[![pypi](https://img.shields.io/pypi/v/annotated-types.svg)](https://pypi.python.org/pypi/annotated-types) +[![versions](https://img.shields.io/pypi/pyversions/annotated-types.svg)](https://github.com/annotated-types/annotated-types) +[![license](https://img.shields.io/github/license/annotated-types/annotated-types.svg)](https://github.com/annotated-types/annotated-types/blob/main/LICENSE) + +[PEP-593](https://peps.python.org/pep-0593/) added `typing.Annotated` as a way of +adding context-specific metadata to existing types, and specifies that +`Annotated[T, x]` _should_ be treated as `T` by any tool or library without special +logic for `x`. + +This package provides metadata objects which can be used to represent common +constraints such as upper and lower bounds on scalar values and collection sizes, +a `Predicate` marker for runtime checks, and +descriptions of how we intend these metadata to be interpreted. In some cases, +we also note alternative representations which do not require this package. + +## Install + +```bash +pip install annotated-types +``` + +## Examples + +```python +from typing import Annotated +from annotated_types import Gt, Len, Predicate + +class MyClass: + age: Annotated[int, Gt(18)] # Valid: 19, 20, ... + # Invalid: 17, 18, "19", 19.0, ... + factors: list[Annotated[int, Predicate(is_prime)]] # Valid: 2, 3, 5, 7, 11, ... + # Invalid: 4, 8, -2, 5.0, "prime", ... + + my_list: Annotated[list[int], Len(0, 10)] # Valid: [], [10, 20, 30, 40, 50] + # Invalid: (1, 2), ["abc"], [0] * 20 +``` + +## Documentation + +_While `annotated-types` avoids runtime checks for performance, users should not +construct invalid combinations such as `MultipleOf("non-numeric")` or `Annotated[int, Len(3)]`. +Downstream implementors may choose to raise an error, emit a warning, silently ignore +a metadata item, etc., if the metadata objects described below are used with an +incompatible type - or for any other reason!_ + +### Gt, Ge, Lt, Le + +Express inclusive and/or exclusive bounds on orderable values - which may be numbers, +dates, times, strings, sets, etc. Note that the boundary value need not be of the +same type that was annotated, so long as they can be compared: `Annotated[int, Gt(1.5)]` +is fine, for example, and implies that the value is an integer x such that `x > 1.5`. + +We suggest that implementors may also interpret `functools.partial(operator.le, 1.5)` +as being equivalent to `Gt(1.5)`, for users who wish to avoid a runtime dependency on +the `annotated-types` package. + +To be explicit, these types have the following meanings: + +* `Gt(x)` - value must be "Greater Than" `x` - equivalent to exclusive minimum +* `Ge(x)` - value must be "Greater than or Equal" to `x` - equivalent to inclusive minimum +* `Lt(x)` - value must be "Less Than" `x` - equivalent to exclusive maximum +* `Le(x)` - value must be "Less than or Equal" to `x` - equivalent to inclusive maximum + +### Interval + +`Interval(gt, ge, lt, le)` allows you to specify an upper and lower bound with a single +metadata object. `None` attributes should be ignored, and non-`None` attributes +treated as per the single bounds above. + +### MultipleOf + +`MultipleOf(multiple_of=x)` might be interpreted in two ways: + +1. Python semantics, implying `value % multiple_of == 0`, or +2. [JSONschema semantics](https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.2.1), + where `int(value / multiple_of) == value / multiple_of`. + +We encourage users to be aware of these two common interpretations and their +distinct behaviours, especially since very large or non-integer numbers make +it easy to cause silent data corruption due to floating-point imprecision. + +We encourage libraries to carefully document which interpretation they implement. + +### MinLen, MaxLen, Len + +`Len()` implies that `min_length <= len(value) <= max_length` - lower and upper bounds are inclusive. + +As well as `Len()` which can optionally include upper and lower bounds, we also +provide `MinLen(x)` and `MaxLen(y)` which are equivalent to `Len(min_length=x)` +and `Len(max_length=y)` respectively. + +`Len`, `MinLen`, and `MaxLen` may be used with any type which supports `len(value)`. + +Examples of usage: + +* `Annotated[list, MaxLen(10)]` (or `Annotated[list, Len(max_length=10))`) - list must have a length of 10 or less +* `Annotated[str, MaxLen(10)]` - string must have a length of 10 or less +* `Annotated[list, MinLen(3))` (or `Annotated[list, Len(min_length=3))`) - list must have a length of 3 or more +* `Annotated[list, Len(4, 6)]` - list must have a length of 4, 5, or 6 +* `Annotated[list, Len(8, 8)]` - list must have a length of exactly 8 + +#### Changed in v0.4.0 + +* `min_inclusive` has been renamed to `min_length`, no change in meaning +* `max_exclusive` has been renamed to `max_length`, upper bound is now **inclusive** instead of **exclusive** +* The recommendation that slices are interpreted as `Len` has been removed due to ambiguity and different semantic + meaning of the upper bound in slices vs. `Len` + +See [issue #23](https://github.com/annotated-types/annotated-types/issues/23) for discussion. + +### Timezone + +`Timezone` can be used with a `datetime` or a `time` to express which timezones +are allowed. `Annotated[datetime, Timezone(None)]` must be a naive datetime. +`Timezone[...]` ([literal ellipsis](https://docs.python.org/3/library/constants.html#Ellipsis)) +expresses that any timezone-aware datetime is allowed. You may also pass a specific +timezone string or [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects) +object such as `Timezone(timezone.utc)` or `Timezone("Africa/Abidjan")` to express that you only +allow a specific timezone, though we note that this is often a symptom of fragile design. + +#### Changed in v0.x.x + +* `Timezone` accepts [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects) objects instead of + `timezone`, extending compatibility to [`zoneinfo`](https://docs.python.org/3/library/zoneinfo.html) and third party libraries. + +### Unit + +`Unit(unit: str)` expresses that the annotated numeric value is the magnitude of +a quantity with the specified unit. For example, `Annotated[float, Unit("m/s")]` +would be a float representing a velocity in meters per second. + +Please note that `annotated_types` itself makes no attempt to parse or validate +the unit string in any way. That is left entirely to downstream libraries, +such as [`pint`](https://pint.readthedocs.io) or +[`astropy.units`](https://docs.astropy.org/en/stable/units/). + +An example of how a library might use this metadata: + +```python +from annotated_types import Unit +from typing import Annotated, TypeVar, Callable, Any, get_origin, get_args + +# given a type annotated with a unit: +Meters = Annotated[float, Unit("m")] + + +# you can cast the annotation to a specific unit type with any +# callable that accepts a string and returns the desired type +T = TypeVar("T") +def cast_unit(tp: Any, unit_cls: Callable[[str], T]) -> T | None: + if get_origin(tp) is Annotated: + for arg in get_args(tp): + if isinstance(arg, Unit): + return unit_cls(arg.unit) + return None + + +# using `pint` +import pint +pint_unit = cast_unit(Meters, pint.Unit) + + +# using `astropy.units` +import astropy.units as u +astropy_unit = cast_unit(Meters, u.Unit) +``` + +### Predicate + +`Predicate(func: Callable)` expresses that `func(value)` is truthy for valid values. +Users should prefer the statically inspectable metadata above, but if you need +the full power and flexibility of arbitrary runtime predicates... here it is. + +For some common constraints, we provide generic types: + +* `IsLower = Annotated[T, Predicate(str.islower)]` +* `IsUpper = Annotated[T, Predicate(str.isupper)]` +* `IsDigit = Annotated[T, Predicate(str.isdigit)]` +* `IsFinite = Annotated[T, Predicate(math.isfinite)]` +* `IsNotFinite = Annotated[T, Predicate(Not(math.isfinite))]` +* `IsNan = Annotated[T, Predicate(math.isnan)]` +* `IsNotNan = Annotated[T, Predicate(Not(math.isnan))]` +* `IsInfinite = Annotated[T, Predicate(math.isinf)]` +* `IsNotInfinite = Annotated[T, Predicate(Not(math.isinf))]` + +so that you can write e.g. `x: IsFinite[float] = 2.0` instead of the longer +(but exactly equivalent) `x: Annotated[float, Predicate(math.isfinite)] = 2.0`. + +Some libraries might have special logic to handle known or understandable predicates, +for example by checking for `str.isdigit` and using its presence to both call custom +logic to enforce digit-only strings, and customise some generated external schema. +Users are therefore encouraged to avoid indirection like `lambda s: s.lower()`, in +favor of introspectable methods such as `str.lower` or `re.compile("pattern").search`. + +To enable basic negation of commonly used predicates like `math.isnan` without introducing introspection that makes it impossible for implementers to introspect the predicate we provide a `Not` wrapper that simply negates the predicate in an introspectable manner. Several of the predicates listed above are created in this manner. + +We do not specify what behaviour should be expected for predicates that raise +an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently +skip invalid constraints, or statically raise an error; or it might try calling it +and then propagate or discard the resulting +`TypeError: descriptor 'isdigit' for 'str' objects doesn't apply to a 'int' object` +exception. We encourage libraries to document the behaviour they choose. + +### Doc + +`doc()` can be used to add documentation information in `Annotated`, for function and method parameters, variables, class attributes, return types, and any place where `Annotated` can be used. + +It expects a value that can be statically analyzed, as the main use case is for static analysis, editors, documentation generators, and similar tools. + +It returns a `DocInfo` class with a single attribute `documentation` containing the value passed to `doc()`. + +This is the early adopter's alternative form of the [`typing-doc` proposal](https://github.com/tiangolo/fastapi/blob/typing-doc/typing_doc.md). + +### Integrating downstream types with `GroupedMetadata` + +Implementers may choose to provide a convenience wrapper that groups multiple pieces of metadata. +This can help reduce verbosity and cognitive overhead for users. +For example, an implementer like Pydantic might provide a `Field` or `Meta` type that accepts keyword arguments and transforms these into low-level metadata: + +```python +from dataclasses import dataclass +from typing import Iterator +from annotated_types import GroupedMetadata, Ge + +@dataclass +class Field(GroupedMetadata): + ge: int | None = None + description: str | None = None + + def __iter__(self) -> Iterator[object]: + # Iterating over a GroupedMetadata object should yield annotated-types + # constraint metadata objects which describe it as fully as possible, + # and may include other unknown objects too. + if self.ge is not None: + yield Ge(self.ge) + if self.description is not None: + yield Description(self.description) +``` + +Libraries consuming annotated-types constraints should check for `GroupedMetadata` and unpack it by iterating over the object and treating the results as if they had been "unpacked" in the `Annotated` type. The same logic should be applied to the [PEP 646 `Unpack` type](https://peps.python.org/pep-0646/), so that `Annotated[T, Field(...)]`, `Annotated[T, Unpack[Field(...)]]` and `Annotated[T, *Field(...)]` are all treated consistently. + +Libraries consuming annotated-types should also ignore any metadata they do not recongize that came from unpacking a `GroupedMetadata`, just like they ignore unrecognized metadata in `Annotated` itself. + +Our own `annotated_types.Interval` class is a `GroupedMetadata` which unpacks itself into `Gt`, `Lt`, etc., so this is not an abstract concern. Similarly, `annotated_types.Len` is a `GroupedMetadata` which unpacks itself into `MinLen` (optionally) and `MaxLen`. + +### Consuming metadata + +We intend to not be prescriptive as to _how_ the metadata and constraints are used, but as an example of how one might parse constraints from types annotations see our [implementation in `test_main.py`](https://github.com/annotated-types/annotated-types/blob/f59cf6d1b5255a0fe359b93896759a180bec30ae/tests/test_main.py#L94-L103). + +It is up to the implementer to determine how this metadata is used. +You could use the metadata for runtime type checking, for generating schemas or to generate example data, amongst other use cases. + +## Design & History + +This package was designed at the PyCon 2022 sprints by the maintainers of Pydantic +and Hypothesis, with the goal of making it as easy as possible for end-users to +provide more informative annotations for use by runtime libraries. + +It is deliberately minimal, and following PEP-593 allows considerable downstream +discretion in what (if anything!) they choose to support. Nonetheless, we expect +that staying simple and covering _only_ the most common use-cases will give users +and maintainers the best experience we can. If you'd like more constraints for your +types - follow our lead, by defining them and documenting them downstream! diff --git a/venv/lib/python3.11/site-packages/annotated_types-0.7.0.dist-info/RECORD b/venv/lib/python3.11/site-packages/annotated_types-0.7.0.dist-info/RECORD new file mode 100644 index 0000000..568bd52 --- /dev/null +++ b/venv/lib/python3.11/site-packages/annotated_types-0.7.0.dist-info/RECORD @@ -0,0 +1,10 @@ +annotated_types-0.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +annotated_types-0.7.0.dist-info/METADATA,sha256=7ltqxksJJ0wCYFGBNIQCWTlWQGeAH0hRFdnK3CB895E,15046 +annotated_types-0.7.0.dist-info/RECORD,, +annotated_types-0.7.0.dist-info/WHEEL,sha256=zEMcRr9Kr03x1ozGwg5v9NQBKn3kndp6LSoSlVg-jhU,87 +annotated_types-0.7.0.dist-info/licenses/LICENSE,sha256=_hBJiEsaDZNCkB6I4H8ykl0ksxIdmXK2poBfuYJLCV0,1083 +annotated_types/__init__.py,sha256=RynLsRKUEGI0KimXydlD1fZEfEzWwDo0Uon3zOKhG1Q,13819 +annotated_types/__pycache__/__init__.cpython-311.pyc,, +annotated_types/__pycache__/test_cases.cpython-311.pyc,, +annotated_types/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +annotated_types/test_cases.py,sha256=zHFX6EpcMbGJ8FzBYDbO56bPwx_DYIVSKbZM-4B3_lg,6421 diff --git a/venv/lib/python3.11/site-packages/annotated_types-0.7.0.dist-info/WHEEL b/venv/lib/python3.11/site-packages/annotated_types-0.7.0.dist-info/WHEEL new file mode 100644 index 0000000..516596c --- /dev/null +++ b/venv/lib/python3.11/site-packages/annotated_types-0.7.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.24.2 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.11/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE b/venv/lib/python3.11/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000..d99323a --- /dev/null +++ b/venv/lib/python3.11/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2022 the contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.11/site-packages/annotated_types/__init__.py b/venv/lib/python3.11/site-packages/annotated_types/__init__.py new file mode 100644 index 0000000..74e0dee --- /dev/null +++ b/venv/lib/python3.11/site-packages/annotated_types/__init__.py @@ -0,0 +1,432 @@ +import math +import sys +import types +from dataclasses import dataclass +from datetime import tzinfo +from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, SupportsFloat, SupportsIndex, TypeVar, Union + +if sys.version_info < (3, 8): + from typing_extensions import Protocol, runtime_checkable +else: + from typing import Protocol, runtime_checkable + +if sys.version_info < (3, 9): + from typing_extensions import Annotated, Literal +else: + from typing import Annotated, Literal + +if sys.version_info < (3, 10): + EllipsisType = type(Ellipsis) + KW_ONLY = {} + SLOTS = {} +else: + from types import EllipsisType + + KW_ONLY = {"kw_only": True} + SLOTS = {"slots": True} + + +__all__ = ( + 'BaseMetadata', + 'GroupedMetadata', + 'Gt', + 'Ge', + 'Lt', + 'Le', + 'Interval', + 'MultipleOf', + 'MinLen', + 'MaxLen', + 'Len', + 'Timezone', + 'Predicate', + 'LowerCase', + 'UpperCase', + 'IsDigits', + 'IsFinite', + 'IsNotFinite', + 'IsNan', + 'IsNotNan', + 'IsInfinite', + 'IsNotInfinite', + 'doc', + 'DocInfo', + '__version__', +) + +__version__ = '0.7.0' + + +T = TypeVar('T') + + +# arguments that start with __ are considered +# positional only +# see https://peps.python.org/pep-0484/#positional-only-arguments + + +class SupportsGt(Protocol): + def __gt__(self: T, __other: T) -> bool: + ... + + +class SupportsGe(Protocol): + def __ge__(self: T, __other: T) -> bool: + ... + + +class SupportsLt(Protocol): + def __lt__(self: T, __other: T) -> bool: + ... + + +class SupportsLe(Protocol): + def __le__(self: T, __other: T) -> bool: + ... + + +class SupportsMod(Protocol): + def __mod__(self: T, __other: T) -> T: + ... + + +class SupportsDiv(Protocol): + def __div__(self: T, __other: T) -> T: + ... + + +class BaseMetadata: + """Base class for all metadata. + + This exists mainly so that implementers + can do `isinstance(..., BaseMetadata)` while traversing field annotations. + """ + + __slots__ = () + + +@dataclass(frozen=True, **SLOTS) +class Gt(BaseMetadata): + """Gt(gt=x) implies that the value must be greater than x. + + It can be used with any type that supports the ``>`` operator, + including numbers, dates and times, strings, sets, and so on. + """ + + gt: SupportsGt + + +@dataclass(frozen=True, **SLOTS) +class Ge(BaseMetadata): + """Ge(ge=x) implies that the value must be greater than or equal to x. + + It can be used with any type that supports the ``>=`` operator, + including numbers, dates and times, strings, sets, and so on. + """ + + ge: SupportsGe + + +@dataclass(frozen=True, **SLOTS) +class Lt(BaseMetadata): + """Lt(lt=x) implies that the value must be less than x. + + It can be used with any type that supports the ``<`` operator, + including numbers, dates and times, strings, sets, and so on. + """ + + lt: SupportsLt + + +@dataclass(frozen=True, **SLOTS) +class Le(BaseMetadata): + """Le(le=x) implies that the value must be less than or equal to x. + + It can be used with any type that supports the ``<=`` operator, + including numbers, dates and times, strings, sets, and so on. + """ + + le: SupportsLe + + +@runtime_checkable +class GroupedMetadata(Protocol): + """A grouping of multiple objects, like typing.Unpack. + + `GroupedMetadata` on its own is not metadata and has no meaning. + All of the constraints and metadata should be fully expressable + in terms of the `BaseMetadata`'s returned by `GroupedMetadata.__iter__()`. + + Concrete implementations should override `GroupedMetadata.__iter__()` + to add their own metadata. + For example: + + >>> @dataclass + >>> class Field(GroupedMetadata): + >>> gt: float | None = None + >>> description: str | None = None + ... + >>> def __iter__(self) -> Iterable[object]: + >>> if self.gt is not None: + >>> yield Gt(self.gt) + >>> if self.description is not None: + >>> yield Description(self.gt) + + Also see the implementation of `Interval` below for an example. + + Parsers should recognize this and unpack it so that it can be used + both with and without unpacking: + + - `Annotated[int, Field(...)]` (parser must unpack Field) + - `Annotated[int, *Field(...)]` (PEP-646) + """ # noqa: trailing-whitespace + + @property + def __is_annotated_types_grouped_metadata__(self) -> Literal[True]: + return True + + def __iter__(self) -> Iterator[object]: + ... + + if not TYPE_CHECKING: + __slots__ = () # allow subclasses to use slots + + def __init_subclass__(cls, *args: Any, **kwargs: Any) -> None: + # Basic ABC like functionality without the complexity of an ABC + super().__init_subclass__(*args, **kwargs) + if cls.__iter__ is GroupedMetadata.__iter__: + raise TypeError("Can't subclass GroupedMetadata without implementing __iter__") + + def __iter__(self) -> Iterator[object]: # noqa: F811 + raise NotImplementedError # more helpful than "None has no attribute..." type errors + + +@dataclass(frozen=True, **KW_ONLY, **SLOTS) +class Interval(GroupedMetadata): + """Interval can express inclusive or exclusive bounds with a single object. + + It accepts keyword arguments ``gt``, ``ge``, ``lt``, and/or ``le``, which + are interpreted the same way as the single-bound constraints. + """ + + gt: Union[SupportsGt, None] = None + ge: Union[SupportsGe, None] = None + lt: Union[SupportsLt, None] = None + le: Union[SupportsLe, None] = None + + def __iter__(self) -> Iterator[BaseMetadata]: + """Unpack an Interval into zero or more single-bounds.""" + if self.gt is not None: + yield Gt(self.gt) + if self.ge is not None: + yield Ge(self.ge) + if self.lt is not None: + yield Lt(self.lt) + if self.le is not None: + yield Le(self.le) + + +@dataclass(frozen=True, **SLOTS) +class MultipleOf(BaseMetadata): + """MultipleOf(multiple_of=x) might be interpreted in two ways: + + 1. Python semantics, implying ``value % multiple_of == 0``, or + 2. JSONschema semantics, where ``int(value / multiple_of) == value / multiple_of`` + + We encourage users to be aware of these two common interpretations, + and libraries to carefully document which they implement. + """ + + multiple_of: Union[SupportsDiv, SupportsMod] + + +@dataclass(frozen=True, **SLOTS) +class MinLen(BaseMetadata): + """ + MinLen() implies minimum inclusive length, + e.g. ``len(value) >= min_length``. + """ + + min_length: Annotated[int, Ge(0)] + + +@dataclass(frozen=True, **SLOTS) +class MaxLen(BaseMetadata): + """ + MaxLen() implies maximum inclusive length, + e.g. ``len(value) <= max_length``. + """ + + max_length: Annotated[int, Ge(0)] + + +@dataclass(frozen=True, **SLOTS) +class Len(GroupedMetadata): + """ + Len() implies that ``min_length <= len(value) <= max_length``. + + Upper bound may be omitted or ``None`` to indicate no upper length bound. + """ + + min_length: Annotated[int, Ge(0)] = 0 + max_length: Optional[Annotated[int, Ge(0)]] = None + + def __iter__(self) -> Iterator[BaseMetadata]: + """Unpack a Len into zone or more single-bounds.""" + if self.min_length > 0: + yield MinLen(self.min_length) + if self.max_length is not None: + yield MaxLen(self.max_length) + + +@dataclass(frozen=True, **SLOTS) +class Timezone(BaseMetadata): + """Timezone(tz=...) requires a datetime to be aware (or ``tz=None``, naive). + + ``Annotated[datetime, Timezone(None)]`` must be a naive datetime. + ``Timezone[...]`` (the ellipsis literal) expresses that the datetime must be + tz-aware but any timezone is allowed. + + You may also pass a specific timezone string or tzinfo object such as + ``Timezone(timezone.utc)`` or ``Timezone("Africa/Abidjan")`` to express that + you only allow a specific timezone, though we note that this is often + a symptom of poor design. + """ + + tz: Union[str, tzinfo, EllipsisType, None] + + +@dataclass(frozen=True, **SLOTS) +class Unit(BaseMetadata): + """Indicates that the value is a physical quantity with the specified unit. + + It is intended for usage with numeric types, where the value represents the + magnitude of the quantity. For example, ``distance: Annotated[float, Unit('m')]`` + or ``speed: Annotated[float, Unit('m/s')]``. + + Interpretation of the unit string is left to the discretion of the consumer. + It is suggested to follow conventions established by python libraries that work + with physical quantities, such as + + - ``pint`` : + - ``astropy.units``: + + For indicating a quantity with a certain dimensionality but without a specific unit + it is recommended to use square brackets, e.g. `Annotated[float, Unit('[time]')]`. + Note, however, ``annotated_types`` itself makes no use of the unit string. + """ + + unit: str + + +@dataclass(frozen=True, **SLOTS) +class Predicate(BaseMetadata): + """``Predicate(func: Callable)`` implies `func(value)` is truthy for valid values. + + Users should prefer statically inspectable metadata, but if you need the full + power and flexibility of arbitrary runtime predicates... here it is. + + We provide a few predefined predicates for common string constraints: + ``IsLower = Predicate(str.islower)``, ``IsUpper = Predicate(str.isupper)``, and + ``IsDigits = Predicate(str.isdigit)``. Users are encouraged to use methods which + can be given special handling, and avoid indirection like ``lambda s: s.lower()``. + + Some libraries might have special logic to handle certain predicates, e.g. by + checking for `str.isdigit` and using its presence to both call custom logic to + enforce digit-only strings, and customise some generated external schema. + + We do not specify what behaviour should be expected for predicates that raise + an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently + skip invalid constraints, or statically raise an error; or it might try calling it + and then propagate or discard the resulting exception. + """ + + func: Callable[[Any], bool] + + def __repr__(self) -> str: + if getattr(self.func, "__name__", "") == "": + return f"{self.__class__.__name__}({self.func!r})" + if isinstance(self.func, (types.MethodType, types.BuiltinMethodType)) and ( + namespace := getattr(self.func.__self__, "__name__", None) + ): + return f"{self.__class__.__name__}({namespace}.{self.func.__name__})" + if isinstance(self.func, type(str.isascii)): # method descriptor + return f"{self.__class__.__name__}({self.func.__qualname__})" + return f"{self.__class__.__name__}({self.func.__name__})" + + +@dataclass +class Not: + func: Callable[[Any], bool] + + def __call__(self, __v: Any) -> bool: + return not self.func(__v) + + +_StrType = TypeVar("_StrType", bound=str) + +LowerCase = Annotated[_StrType, Predicate(str.islower)] +""" +Return True if the string is a lowercase string, False otherwise. + +A string is lowercase if all cased characters in the string are lowercase and there is at least one cased character in the string. +""" # noqa: E501 +UpperCase = Annotated[_StrType, Predicate(str.isupper)] +""" +Return True if the string is an uppercase string, False otherwise. + +A string is uppercase if all cased characters in the string are uppercase and there is at least one cased character in the string. +""" # noqa: E501 +IsDigit = Annotated[_StrType, Predicate(str.isdigit)] +IsDigits = IsDigit # type: ignore # plural for backwards compatibility, see #63 +""" +Return True if the string is a digit string, False otherwise. + +A string is a digit string if all characters in the string are digits and there is at least one character in the string. +""" # noqa: E501 +IsAscii = Annotated[_StrType, Predicate(str.isascii)] +""" +Return True if all characters in the string are ASCII, False otherwise. + +ASCII characters have code points in the range U+0000-U+007F. Empty string is ASCII too. +""" + +_NumericType = TypeVar('_NumericType', bound=Union[SupportsFloat, SupportsIndex]) +IsFinite = Annotated[_NumericType, Predicate(math.isfinite)] +"""Return True if x is neither an infinity nor a NaN, and False otherwise.""" +IsNotFinite = Annotated[_NumericType, Predicate(Not(math.isfinite))] +"""Return True if x is one of infinity or NaN, and False otherwise""" +IsNan = Annotated[_NumericType, Predicate(math.isnan)] +"""Return True if x is a NaN (not a number), and False otherwise.""" +IsNotNan = Annotated[_NumericType, Predicate(Not(math.isnan))] +"""Return True if x is anything but NaN (not a number), and False otherwise.""" +IsInfinite = Annotated[_NumericType, Predicate(math.isinf)] +"""Return True if x is a positive or negative infinity, and False otherwise.""" +IsNotInfinite = Annotated[_NumericType, Predicate(Not(math.isinf))] +"""Return True if x is neither a positive or negative infinity, and False otherwise.""" + +try: + from typing_extensions import DocInfo, doc # type: ignore [attr-defined] +except ImportError: + + @dataclass(frozen=True, **SLOTS) + class DocInfo: # type: ignore [no-redef] + """ " + The return value of doc(), mainly to be used by tools that want to extract the + Annotated documentation at runtime. + """ + + documentation: str + """The documentation string passed to doc().""" + + def doc( + documentation: str, + ) -> DocInfo: + """ + Add documentation to a type annotation inside of Annotated. + + For example: + + >>> def hi(name: Annotated[int, doc("The name of the user")]) -> None: ... + """ + return DocInfo(documentation) diff --git a/venv/lib/python3.11/site-packages/annotated_types/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/annotated_types/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..142a287 Binary files /dev/null and b/venv/lib/python3.11/site-packages/annotated_types/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/annotated_types/__pycache__/test_cases.cpython-311.pyc b/venv/lib/python3.11/site-packages/annotated_types/__pycache__/test_cases.cpython-311.pyc new file mode 100644 index 0000000..15d417d Binary files /dev/null and b/venv/lib/python3.11/site-packages/annotated_types/__pycache__/test_cases.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/annotated_types/py.typed b/venv/lib/python3.11/site-packages/annotated_types/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/annotated_types/test_cases.py b/venv/lib/python3.11/site-packages/annotated_types/test_cases.py new file mode 100644 index 0000000..d9164d6 --- /dev/null +++ b/venv/lib/python3.11/site-packages/annotated_types/test_cases.py @@ -0,0 +1,151 @@ +import math +import sys +from datetime import date, datetime, timedelta, timezone +from decimal import Decimal +from typing import Any, Dict, Iterable, Iterator, List, NamedTuple, Set, Tuple + +if sys.version_info < (3, 9): + from typing_extensions import Annotated +else: + from typing import Annotated + +import annotated_types as at + + +class Case(NamedTuple): + """ + A test case for `annotated_types`. + """ + + annotation: Any + valid_cases: Iterable[Any] + invalid_cases: Iterable[Any] + + +def cases() -> Iterable[Case]: + # Gt, Ge, Lt, Le + yield Case(Annotated[int, at.Gt(4)], (5, 6, 1000), (4, 0, -1)) + yield Case(Annotated[float, at.Gt(0.5)], (0.6, 0.7, 0.8, 0.9), (0.5, 0.0, -0.1)) + yield Case( + Annotated[datetime, at.Gt(datetime(2000, 1, 1))], + [datetime(2000, 1, 2), datetime(2000, 1, 3)], + [datetime(2000, 1, 1), datetime(1999, 12, 31)], + ) + yield Case( + Annotated[datetime, at.Gt(date(2000, 1, 1))], + [date(2000, 1, 2), date(2000, 1, 3)], + [date(2000, 1, 1), date(1999, 12, 31)], + ) + yield Case( + Annotated[datetime, at.Gt(Decimal('1.123'))], + [Decimal('1.1231'), Decimal('123')], + [Decimal('1.123'), Decimal('0')], + ) + + yield Case(Annotated[int, at.Ge(4)], (4, 5, 6, 1000, 4), (0, -1)) + yield Case(Annotated[float, at.Ge(0.5)], (0.5, 0.6, 0.7, 0.8, 0.9), (0.4, 0.0, -0.1)) + yield Case( + Annotated[datetime, at.Ge(datetime(2000, 1, 1))], + [datetime(2000, 1, 2), datetime(2000, 1, 3)], + [datetime(1998, 1, 1), datetime(1999, 12, 31)], + ) + + yield Case(Annotated[int, at.Lt(4)], (0, -1), (4, 5, 6, 1000, 4)) + yield Case(Annotated[float, at.Lt(0.5)], (0.4, 0.0, -0.1), (0.5, 0.6, 0.7, 0.8, 0.9)) + yield Case( + Annotated[datetime, at.Lt(datetime(2000, 1, 1))], + [datetime(1999, 12, 31), datetime(1999, 12, 31)], + [datetime(2000, 1, 2), datetime(2000, 1, 3)], + ) + + yield Case(Annotated[int, at.Le(4)], (4, 0, -1), (5, 6, 1000)) + yield Case(Annotated[float, at.Le(0.5)], (0.5, 0.0, -0.1), (0.6, 0.7, 0.8, 0.9)) + yield Case( + Annotated[datetime, at.Le(datetime(2000, 1, 1))], + [datetime(2000, 1, 1), datetime(1999, 12, 31)], + [datetime(2000, 1, 2), datetime(2000, 1, 3)], + ) + + # Interval + yield Case(Annotated[int, at.Interval(gt=4)], (5, 6, 1000), (4, 0, -1)) + yield Case(Annotated[int, at.Interval(gt=4, lt=10)], (5, 6), (4, 10, 1000, 0, -1)) + yield Case(Annotated[float, at.Interval(ge=0.5, le=1)], (0.5, 0.9, 1), (0.49, 1.1)) + yield Case( + Annotated[datetime, at.Interval(gt=datetime(2000, 1, 1), le=datetime(2000, 1, 3))], + [datetime(2000, 1, 2), datetime(2000, 1, 3)], + [datetime(2000, 1, 1), datetime(2000, 1, 4)], + ) + + yield Case(Annotated[int, at.MultipleOf(multiple_of=3)], (0, 3, 9), (1, 2, 4)) + yield Case(Annotated[float, at.MultipleOf(multiple_of=0.5)], (0, 0.5, 1, 1.5), (0.4, 1.1)) + + # lengths + + yield Case(Annotated[str, at.MinLen(3)], ('123', '1234', 'x' * 10), ('', '1', '12')) + yield Case(Annotated[str, at.Len(3)], ('123', '1234', 'x' * 10), ('', '1', '12')) + yield Case(Annotated[List[int], at.MinLen(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2])) + yield Case(Annotated[List[int], at.Len(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2])) + + yield Case(Annotated[str, at.MaxLen(4)], ('', '1234'), ('12345', 'x' * 10)) + yield Case(Annotated[str, at.Len(0, 4)], ('', '1234'), ('12345', 'x' * 10)) + yield Case(Annotated[List[str], at.MaxLen(4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10)) + yield Case(Annotated[List[str], at.Len(0, 4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10)) + + yield Case(Annotated[str, at.Len(3, 5)], ('123', '12345'), ('', '1', '12', '123456', 'x' * 10)) + yield Case(Annotated[str, at.Len(3, 3)], ('123',), ('12', '1234')) + + yield Case(Annotated[Dict[int, int], at.Len(2, 3)], [{1: 1, 2: 2}], [{}, {1: 1}, {1: 1, 2: 2, 3: 3, 4: 4}]) + yield Case(Annotated[Set[int], at.Len(2, 3)], ({1, 2}, {1, 2, 3}), (set(), {1}, {1, 2, 3, 4})) + yield Case(Annotated[Tuple[int, ...], at.Len(2, 3)], ((1, 2), (1, 2, 3)), ((), (1,), (1, 2, 3, 4))) + + # Timezone + + yield Case( + Annotated[datetime, at.Timezone(None)], [datetime(2000, 1, 1)], [datetime(2000, 1, 1, tzinfo=timezone.utc)] + ) + yield Case( + Annotated[datetime, at.Timezone(...)], [datetime(2000, 1, 1, tzinfo=timezone.utc)], [datetime(2000, 1, 1)] + ) + yield Case( + Annotated[datetime, at.Timezone(timezone.utc)], + [datetime(2000, 1, 1, tzinfo=timezone.utc)], + [datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))], + ) + yield Case( + Annotated[datetime, at.Timezone('Europe/London')], + [datetime(2000, 1, 1, tzinfo=timezone(timedelta(0), name='Europe/London'))], + [datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))], + ) + + # Quantity + + yield Case(Annotated[float, at.Unit(unit='m')], (5, 4.2), ('5m', '4.2m')) + + # predicate types + + yield Case(at.LowerCase[str], ['abc', 'foobar'], ['', 'A', 'Boom']) + yield Case(at.UpperCase[str], ['ABC', 'DEFO'], ['', 'a', 'abc', 'AbC']) + yield Case(at.IsDigit[str], ['123'], ['', 'ab', 'a1b2']) + yield Case(at.IsAscii[str], ['123', 'foo bar'], ['£100', '😊', 'whatever 👀']) + + yield Case(Annotated[int, at.Predicate(lambda x: x % 2 == 0)], [0, 2, 4], [1, 3, 5]) + + yield Case(at.IsFinite[float], [1.23], [math.nan, math.inf, -math.inf]) + yield Case(at.IsNotFinite[float], [math.nan, math.inf], [1.23]) + yield Case(at.IsNan[float], [math.nan], [1.23, math.inf]) + yield Case(at.IsNotNan[float], [1.23, math.inf], [math.nan]) + yield Case(at.IsInfinite[float], [math.inf], [math.nan, 1.23]) + yield Case(at.IsNotInfinite[float], [math.nan, 1.23], [math.inf]) + + # check stacked predicates + yield Case(at.IsInfinite[Annotated[float, at.Predicate(lambda x: x > 0)]], [math.inf], [-math.inf, 1.23, math.nan]) + + # doc + yield Case(Annotated[int, at.doc("A number")], [1, 2], []) + + # custom GroupedMetadata + class MyCustomGroupedMetadata(at.GroupedMetadata): + def __iter__(self) -> Iterator[at.Predicate]: + yield at.Predicate(lambda x: float(x).is_integer()) + + yield Case(Annotated[float, MyCustomGroupedMetadata()], [0, 2.0], [0.01, 1.5]) diff --git a/venv/lib/python3.11/site-packages/anyio-4.9.0.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/anyio-4.9.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio-4.9.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.11/site-packages/anyio-4.9.0.dist-info/LICENSE b/venv/lib/python3.11/site-packages/anyio-4.9.0.dist-info/LICENSE new file mode 100644 index 0000000..104eebf --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio-4.9.0.dist-info/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2018 Alex Grönholm + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/lib/python3.11/site-packages/anyio-4.9.0.dist-info/METADATA b/venv/lib/python3.11/site-packages/anyio-4.9.0.dist-info/METADATA new file mode 100644 index 0000000..9d87e1d --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio-4.9.0.dist-info/METADATA @@ -0,0 +1,105 @@ +Metadata-Version: 2.2 +Name: anyio +Version: 4.9.0 +Summary: High level compatibility layer for multiple asynchronous event loop implementations +Author-email: Alex Grönholm +License: MIT +Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/ +Project-URL: Changelog, https://anyio.readthedocs.io/en/stable/versionhistory.html +Project-URL: Source code, https://github.com/agronholm/anyio +Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Framework :: AnyIO +Classifier: Typing :: Typed +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Requires-Python: >=3.9 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: exceptiongroup>=1.0.2; python_version < "3.11" +Requires-Dist: idna>=2.8 +Requires-Dist: sniffio>=1.1 +Requires-Dist: typing_extensions>=4.5; python_version < "3.13" +Provides-Extra: trio +Requires-Dist: trio>=0.26.1; extra == "trio" +Provides-Extra: test +Requires-Dist: anyio[trio]; extra == "test" +Requires-Dist: blockbuster>=1.5.23; extra == "test" +Requires-Dist: coverage[toml]>=7; extra == "test" +Requires-Dist: exceptiongroup>=1.2.0; extra == "test" +Requires-Dist: hypothesis>=4.0; extra == "test" +Requires-Dist: psutil>=5.9; extra == "test" +Requires-Dist: pytest>=7.0; extra == "test" +Requires-Dist: trustme; extra == "test" +Requires-Dist: truststore>=0.9.1; python_version >= "3.10" and extra == "test" +Requires-Dist: uvloop>=0.21; (platform_python_implementation == "CPython" and platform_system != "Windows" and python_version < "3.14") and extra == "test" +Provides-Extra: doc +Requires-Dist: packaging; extra == "doc" +Requires-Dist: Sphinx~=8.2; extra == "doc" +Requires-Dist: sphinx_rtd_theme; extra == "doc" +Requires-Dist: sphinx-autodoc-typehints>=1.2.0; extra == "doc" + +.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg + :target: https://github.com/agronholm/anyio/actions/workflows/test.yml + :alt: Build Status +.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master + :target: https://coveralls.io/github/agronholm/anyio?branch=master + :alt: Code Coverage +.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest + :target: https://anyio.readthedocs.io/en/latest/?badge=latest + :alt: Documentation +.. image:: https://badges.gitter.im/gitterHQ/gitter.svg + :target: https://gitter.im/python-trio/AnyIO + :alt: Gitter chat + +AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or +trio_. It implements trio-like `structured concurrency`_ (SC) on top of asyncio and works in harmony +with the native SC of trio itself. + +Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or +trio_. AnyIO can also be adopted into a library or application incrementally – bit by bit, no full +refactoring necessary. It will blend in with the native libraries of your chosen backend. + +Documentation +------------- + +View full documentation at: https://anyio.readthedocs.io/ + +Features +-------- + +AnyIO offers the following functionality: + +* Task groups (nurseries_ in trio terminology) +* High-level networking (TCP, UDP and UNIX sockets) + + * `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python + 3.8) + * async/await style UDP sockets (unlike asyncio where you still have to use Transports and + Protocols) + +* A versatile API for byte streams and object streams +* Inter-task synchronization and communication (locks, conditions, events, semaphores, object + streams) +* Worker threads +* Subprocesses +* Asynchronous file I/O (using worker threads) +* Signal handling + +AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures. +It even works with the popular Hypothesis_ library. + +.. _asyncio: https://docs.python.org/3/library/asyncio.html +.. _trio: https://github.com/python-trio/trio +.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency +.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning +.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs +.. _pytest: https://docs.pytest.org/en/latest/ +.. _Hypothesis: https://hypothesis.works/ diff --git a/venv/lib/python3.11/site-packages/anyio-4.9.0.dist-info/RECORD b/venv/lib/python3.11/site-packages/anyio-4.9.0.dist-info/RECORD new file mode 100644 index 0000000..e209027 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio-4.9.0.dist-info/RECORD @@ -0,0 +1,88 @@ +anyio-4.9.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +anyio-4.9.0.dist-info/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081 +anyio-4.9.0.dist-info/METADATA,sha256=vvkWPXXTbrpTCFK7zdcYwQcSQhx6Q4qITM9t_PEQCrY,4682 +anyio-4.9.0.dist-info/RECORD,, +anyio-4.9.0.dist-info/WHEEL,sha256=52BFRY2Up02UkjOa29eZOS2VxUrpPORXg1pkohGGUS8,91 +anyio-4.9.0.dist-info/entry_points.txt,sha256=_d6Yu6uiaZmNe0CydowirE9Cmg7zUL2g08tQpoS3Qvc,39 +anyio-4.9.0.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6 +anyio/__init__.py,sha256=t8bZuNXa5ncwXBaNKbv48BDgZt48RT_zCEtrnPmjNU8,4993 +anyio/__pycache__/__init__.cpython-311.pyc,, +anyio/__pycache__/from_thread.cpython-311.pyc,, +anyio/__pycache__/lowlevel.cpython-311.pyc,, +anyio/__pycache__/pytest_plugin.cpython-311.pyc,, +anyio/__pycache__/to_interpreter.cpython-311.pyc,, +anyio/__pycache__/to_process.cpython-311.pyc,, +anyio/__pycache__/to_thread.cpython-311.pyc,, +anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/_backends/__pycache__/__init__.cpython-311.pyc,, +anyio/_backends/__pycache__/_asyncio.cpython-311.pyc,, +anyio/_backends/__pycache__/_trio.cpython-311.pyc,, +anyio/_backends/_asyncio.py,sha256=AT1oaTfCE-9YFxooMlvld2yDqY5U2A-ANMcBDh9eRfI,93455 +anyio/_backends/_trio.py,sha256=HVfDqRGQ7Xj3JfTcYdgzmC7pZEplqU4NOO5kxNNSZnk,40429 +anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/_core/__pycache__/__init__.cpython-311.pyc,, +anyio/_core/__pycache__/_asyncio_selector_thread.cpython-311.pyc,, +anyio/_core/__pycache__/_eventloop.cpython-311.pyc,, +anyio/_core/__pycache__/_exceptions.cpython-311.pyc,, +anyio/_core/__pycache__/_fileio.cpython-311.pyc,, +anyio/_core/__pycache__/_resources.cpython-311.pyc,, +anyio/_core/__pycache__/_signals.cpython-311.pyc,, +anyio/_core/__pycache__/_sockets.cpython-311.pyc,, +anyio/_core/__pycache__/_streams.cpython-311.pyc,, +anyio/_core/__pycache__/_subprocesses.cpython-311.pyc,, +anyio/_core/__pycache__/_synchronization.cpython-311.pyc,, +anyio/_core/__pycache__/_tasks.cpython-311.pyc,, +anyio/_core/__pycache__/_tempfile.cpython-311.pyc,, +anyio/_core/__pycache__/_testing.cpython-311.pyc,, +anyio/_core/__pycache__/_typedattr.cpython-311.pyc,, +anyio/_core/_asyncio_selector_thread.py,sha256=2PdxFM3cs02Kp6BSppbvmRT7q7asreTW5FgBxEsflBo,5626 +anyio/_core/_eventloop.py,sha256=t_tAwBFPjF8jrZGjlJ6bbYy6KA3bjsbZxV9mvh9t1i0,4695 +anyio/_core/_exceptions.py,sha256=RlPRlwastdmfDPoskdXNO6SI8_l3fclA2wtW6cokU9I,3503 +anyio/_core/_fileio.py,sha256=qFZhkLIz0cGXluvih_vcPUTucgq8UFVgsTCtYbijZIg,23340 +anyio/_core/_resources.py,sha256=NbmU5O5UX3xEyACnkmYX28Fmwdl-f-ny0tHym26e0w0,435 +anyio/_core/_signals.py,sha256=vulT1M1xdLYtAR-eY5TamIgaf1WTlOwOrMGwswlTTr8,905 +anyio/_core/_sockets.py,sha256=5Okc_UThGDEN9KCnsIhqWPRHBNuSy6b4NmG1i51TVF4,27150 +anyio/_core/_streams.py,sha256=OnaKgoDD-FcMSwLvkoAUGP51sG2ZdRvMpxt9q2w1gYA,1804 +anyio/_core/_subprocesses.py,sha256=EXm5igL7dj55iYkPlbYVAqtbqxJxjU-6OndSTIx9SRg,8047 +anyio/_core/_synchronization.py,sha256=DwUh8Tl6cG_UMVC_GyzPoC_U9BpfDfjMl9SINSxcZN4,20320 +anyio/_core/_tasks.py,sha256=f3CuWwo06cCZ6jaOv-JHFKWkgpgf2cvaF25Oh4augMA,4757 +anyio/_core/_tempfile.py,sha256=s-_ucacXbxBH5Bo5eo65lN0lPwZQd5B8yNN_9nARpCM,19696 +anyio/_core/_testing.py,sha256=YUGwA5cgFFbUTv4WFd7cv_BSVr4ryTtPp8owQA3JdWE,2118 +anyio/_core/_typedattr.py,sha256=P4ozZikn3-DbpoYcvyghS_FOYAgbmUxeoU8-L_07pZM,2508 +anyio/abc/__init__.py,sha256=c2OQbTCS_fQowviMXanLPh8m29ccwkXmpDr7uyNZYOo,2652 +anyio/abc/__pycache__/__init__.cpython-311.pyc,, +anyio/abc/__pycache__/_eventloop.cpython-311.pyc,, +anyio/abc/__pycache__/_resources.cpython-311.pyc,, +anyio/abc/__pycache__/_sockets.cpython-311.pyc,, +anyio/abc/__pycache__/_streams.cpython-311.pyc,, +anyio/abc/__pycache__/_subprocesses.cpython-311.pyc,, +anyio/abc/__pycache__/_tasks.cpython-311.pyc,, +anyio/abc/__pycache__/_testing.cpython-311.pyc,, +anyio/abc/_eventloop.py,sha256=UmL8DZCvQTgxzmyBZcGm9kWj9VQY8BMWueLh5S8yWN4,9682 +anyio/abc/_resources.py,sha256=DrYvkNN1hH6Uvv5_5uKySvDsnknGVDe8FCKfko0VtN8,783 +anyio/abc/_sockets.py,sha256=KhWtJxan8jpBXKwPaFeQzI4iRXdFaOIn0HXtDZnaO7U,6262 +anyio/abc/_streams.py,sha256=He_JpkAW2g5veOzcUq0XsRC2nId_i35L-d8cs7Uj1ZQ,6598 +anyio/abc/_subprocesses.py,sha256=cumAPJTktOQtw63IqG0lDpyZqu_l1EElvQHMiwJgL08,2067 +anyio/abc/_tasks.py,sha256=yJWbMwowvqjlAX4oJ3l9Is1w-zwynr2lX1Z02AWJqsY,3080 +anyio/abc/_testing.py,sha256=tBJUzkSfOXJw23fe8qSJ03kJlShOYjjaEyFB6k6MYT8,1821 +anyio/from_thread.py,sha256=MbXHZpgM9wgsRkbGhMNMomEGYj7Y_QYq6a5BZ3c5Ev8,17478 +anyio/lowlevel.py,sha256=nkgmW--SdxGVp0cmLUYazjkigveRm5HY7-gW8Bpp9oY,4169 +anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/pytest_plugin.py,sha256=qXNwk9Pa7hPQKWocgLl9qijqKGMkGzdH2wJa-jPkGUM,9375 +anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +anyio/streams/__pycache__/__init__.cpython-311.pyc,, +anyio/streams/__pycache__/buffered.cpython-311.pyc,, +anyio/streams/__pycache__/file.cpython-311.pyc,, +anyio/streams/__pycache__/memory.cpython-311.pyc,, +anyio/streams/__pycache__/stapled.cpython-311.pyc,, +anyio/streams/__pycache__/text.cpython-311.pyc,, +anyio/streams/__pycache__/tls.cpython-311.pyc,, +anyio/streams/buffered.py,sha256=UCldKC168YuLvT7n3HtNPnQ2iWAMSTYQWbZvzLwMwkM,4500 +anyio/streams/file.py,sha256=6uoTNb5KbMoj-6gS3_xrrL8uZN8Q4iIvOS1WtGyFfKw,4383 +anyio/streams/memory.py,sha256=o1OVVx0OooteTTe2GytJreum93Ucuw5s4cAsr3X0-Ag,10560 +anyio/streams/stapled.py,sha256=U09pCrmOw9kkNhe6tKopsm1QIMT1lFTFvtb-A7SIe4k,4302 +anyio/streams/text.py,sha256=6x8w8xlfCZKTUWQoJiMPoMhSSJFUBRKgoBNSBtbd9yg,5094 +anyio/streams/tls.py,sha256=HxzpVmUgo8SUSIBass_lvef1pAI1uRSrnysM3iEGzl4,13199 +anyio/to_interpreter.py,sha256=UhuNCIucCRN7ZtyJg35Mlamzs1JpgDvK4xnL4TDWrAo,6527 +anyio/to_process.py,sha256=ZvruelRM-HNmqDaql4sdNODg2QD_uSlwSCxnV4OhsfQ,9595 +anyio/to_thread.py,sha256=WM2JQ2MbVsd5D5CM08bQiTwzZIvpsGjfH1Fy247KoDQ,2396 diff --git a/venv/lib/python3.11/site-packages/anyio-4.9.0.dist-info/WHEEL b/venv/lib/python3.11/site-packages/anyio-4.9.0.dist-info/WHEEL new file mode 100644 index 0000000..9c3ae63 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio-4.9.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (76.0.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.11/site-packages/anyio-4.9.0.dist-info/entry_points.txt b/venv/lib/python3.11/site-packages/anyio-4.9.0.dist-info/entry_points.txt new file mode 100644 index 0000000..44dd9bd --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio-4.9.0.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[pytest11] +anyio = anyio.pytest_plugin diff --git a/venv/lib/python3.11/site-packages/anyio-4.9.0.dist-info/top_level.txt b/venv/lib/python3.11/site-packages/anyio-4.9.0.dist-info/top_level.txt new file mode 100644 index 0000000..c77c069 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio-4.9.0.dist-info/top_level.txt @@ -0,0 +1 @@ +anyio diff --git a/venv/lib/python3.11/site-packages/anyio/__init__.py b/venv/lib/python3.11/site-packages/anyio/__init__.py new file mode 100644 index 0000000..578cda6 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/__init__.py @@ -0,0 +1,85 @@ +from __future__ import annotations + +from ._core._eventloop import current_time as current_time +from ._core._eventloop import get_all_backends as get_all_backends +from ._core._eventloop import get_cancelled_exc_class as get_cancelled_exc_class +from ._core._eventloop import run as run +from ._core._eventloop import sleep as sleep +from ._core._eventloop import sleep_forever as sleep_forever +from ._core._eventloop import sleep_until as sleep_until +from ._core._exceptions import BrokenResourceError as BrokenResourceError +from ._core._exceptions import BrokenWorkerIntepreter as BrokenWorkerIntepreter +from ._core._exceptions import BrokenWorkerProcess as BrokenWorkerProcess +from ._core._exceptions import BusyResourceError as BusyResourceError +from ._core._exceptions import ClosedResourceError as ClosedResourceError +from ._core._exceptions import DelimiterNotFound as DelimiterNotFound +from ._core._exceptions import EndOfStream as EndOfStream +from ._core._exceptions import IncompleteRead as IncompleteRead +from ._core._exceptions import TypedAttributeLookupError as TypedAttributeLookupError +from ._core._exceptions import WouldBlock as WouldBlock +from ._core._fileio import AsyncFile as AsyncFile +from ._core._fileio import Path as Path +from ._core._fileio import open_file as open_file +from ._core._fileio import wrap_file as wrap_file +from ._core._resources import aclose_forcefully as aclose_forcefully +from ._core._signals import open_signal_receiver as open_signal_receiver +from ._core._sockets import connect_tcp as connect_tcp +from ._core._sockets import connect_unix as connect_unix +from ._core._sockets import create_connected_udp_socket as create_connected_udp_socket +from ._core._sockets import ( + create_connected_unix_datagram_socket as create_connected_unix_datagram_socket, +) +from ._core._sockets import create_tcp_listener as create_tcp_listener +from ._core._sockets import create_udp_socket as create_udp_socket +from ._core._sockets import create_unix_datagram_socket as create_unix_datagram_socket +from ._core._sockets import create_unix_listener as create_unix_listener +from ._core._sockets import getaddrinfo as getaddrinfo +from ._core._sockets import getnameinfo as getnameinfo +from ._core._sockets import wait_readable as wait_readable +from ._core._sockets import wait_socket_readable as wait_socket_readable +from ._core._sockets import wait_socket_writable as wait_socket_writable +from ._core._sockets import wait_writable as wait_writable +from ._core._streams import create_memory_object_stream as create_memory_object_stream +from ._core._subprocesses import open_process as open_process +from ._core._subprocesses import run_process as run_process +from ._core._synchronization import CapacityLimiter as CapacityLimiter +from ._core._synchronization import ( + CapacityLimiterStatistics as CapacityLimiterStatistics, +) +from ._core._synchronization import Condition as Condition +from ._core._synchronization import ConditionStatistics as ConditionStatistics +from ._core._synchronization import Event as Event +from ._core._synchronization import EventStatistics as EventStatistics +from ._core._synchronization import Lock as Lock +from ._core._synchronization import LockStatistics as LockStatistics +from ._core._synchronization import ResourceGuard as ResourceGuard +from ._core._synchronization import Semaphore as Semaphore +from ._core._synchronization import SemaphoreStatistics as SemaphoreStatistics +from ._core._tasks import TASK_STATUS_IGNORED as TASK_STATUS_IGNORED +from ._core._tasks import CancelScope as CancelScope +from ._core._tasks import create_task_group as create_task_group +from ._core._tasks import current_effective_deadline as current_effective_deadline +from ._core._tasks import fail_after as fail_after +from ._core._tasks import move_on_after as move_on_after +from ._core._tempfile import NamedTemporaryFile as NamedTemporaryFile +from ._core._tempfile import SpooledTemporaryFile as SpooledTemporaryFile +from ._core._tempfile import TemporaryDirectory as TemporaryDirectory +from ._core._tempfile import TemporaryFile as TemporaryFile +from ._core._tempfile import gettempdir as gettempdir +from ._core._tempfile import gettempdirb as gettempdirb +from ._core._tempfile import mkdtemp as mkdtemp +from ._core._tempfile import mkstemp as mkstemp +from ._core._testing import TaskInfo as TaskInfo +from ._core._testing import get_current_task as get_current_task +from ._core._testing import get_running_tasks as get_running_tasks +from ._core._testing import wait_all_tasks_blocked as wait_all_tasks_blocked +from ._core._typedattr import TypedAttributeProvider as TypedAttributeProvider +from ._core._typedattr import TypedAttributeSet as TypedAttributeSet +from ._core._typedattr import typed_attribute as typed_attribute + +# Re-export imports so they look like they live directly in this package +for __value in list(locals().values()): + if getattr(__value, "__module__", "").startswith("anyio."): + __value.__module__ = __name__ + +del __value diff --git a/venv/lib/python3.11/site-packages/anyio/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..bcaec7f Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/__pycache__/from_thread.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/__pycache__/from_thread.cpython-311.pyc new file mode 100644 index 0000000..28a032b Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/__pycache__/from_thread.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/__pycache__/lowlevel.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/__pycache__/lowlevel.cpython-311.pyc new file mode 100644 index 0000000..8ceb1c5 Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/__pycache__/lowlevel.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/__pycache__/pytest_plugin.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/__pycache__/pytest_plugin.cpython-311.pyc new file mode 100644 index 0000000..e82299e Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/__pycache__/pytest_plugin.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/__pycache__/to_interpreter.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/__pycache__/to_interpreter.cpython-311.pyc new file mode 100644 index 0000000..46cb3d6 Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/__pycache__/to_interpreter.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/__pycache__/to_process.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/__pycache__/to_process.cpython-311.pyc new file mode 100644 index 0000000..a7696a5 Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/__pycache__/to_process.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/__pycache__/to_thread.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/__pycache__/to_thread.cpython-311.pyc new file mode 100644 index 0000000..758eced Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/__pycache__/to_thread.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/_backends/__init__.py b/venv/lib/python3.11/site-packages/anyio/_backends/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/anyio/_backends/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/_backends/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..c90978c Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/_backends/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-311.pyc new file mode 100644 index 0000000..e89910b Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/_backends/__pycache__/_trio.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/_backends/__pycache__/_trio.cpython-311.pyc new file mode 100644 index 0000000..5c31c0f Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/_backends/__pycache__/_trio.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/_backends/_asyncio.py b/venv/lib/python3.11/site-packages/anyio/_backends/_asyncio.py new file mode 100644 index 0000000..ed91f40 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/_backends/_asyncio.py @@ -0,0 +1,2816 @@ +from __future__ import annotations + +import array +import asyncio +import concurrent.futures +import contextvars +import math +import os +import socket +import sys +import threading +import weakref +from asyncio import ( + AbstractEventLoop, + CancelledError, + all_tasks, + create_task, + current_task, + get_running_loop, + sleep, +) +from asyncio.base_events import _run_until_complete_cb # type: ignore[attr-defined] +from collections import OrderedDict, deque +from collections.abc import ( + AsyncGenerator, + AsyncIterator, + Awaitable, + Callable, + Collection, + Coroutine, + Iterable, + Sequence, +) +from concurrent.futures import Future +from contextlib import AbstractContextManager, suppress +from contextvars import Context, copy_context +from dataclasses import dataclass +from functools import partial, wraps +from inspect import ( + CORO_RUNNING, + CORO_SUSPENDED, + getcoroutinestate, + iscoroutine, +) +from io import IOBase +from os import PathLike +from queue import Queue +from signal import Signals +from socket import AddressFamily, SocketKind +from threading import Thread +from types import CodeType, TracebackType +from typing import ( + IO, + TYPE_CHECKING, + Any, + Optional, + TypeVar, + cast, +) +from weakref import WeakKeyDictionary + +import sniffio + +from .. import ( + CapacityLimiterStatistics, + EventStatistics, + LockStatistics, + TaskInfo, + abc, +) +from .._core._eventloop import claim_worker_thread, threadlocals +from .._core._exceptions import ( + BrokenResourceError, + BusyResourceError, + ClosedResourceError, + EndOfStream, + WouldBlock, + iterate_exceptions, +) +from .._core._sockets import convert_ipv6_sockaddr +from .._core._streams import create_memory_object_stream +from .._core._synchronization import ( + CapacityLimiter as BaseCapacityLimiter, +) +from .._core._synchronization import Event as BaseEvent +from .._core._synchronization import Lock as BaseLock +from .._core._synchronization import ( + ResourceGuard, + SemaphoreStatistics, +) +from .._core._synchronization import Semaphore as BaseSemaphore +from .._core._tasks import CancelScope as BaseCancelScope +from ..abc import ( + AsyncBackend, + IPSockAddrType, + SocketListener, + UDPPacketType, + UNIXDatagramPacketType, +) +from ..abc._eventloop import StrOrBytesPath +from ..lowlevel import RunVar +from ..streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream + +if TYPE_CHECKING: + from _typeshed import FileDescriptorLike +else: + FileDescriptorLike = object + +if sys.version_info >= (3, 10): + from typing import ParamSpec +else: + from typing_extensions import ParamSpec + +if sys.version_info >= (3, 11): + from asyncio import Runner + from typing import TypeVarTuple, Unpack +else: + import contextvars + import enum + import signal + from asyncio import coroutines, events, exceptions, tasks + + from exceptiongroup import BaseExceptionGroup + from typing_extensions import TypeVarTuple, Unpack + + class _State(enum.Enum): + CREATED = "created" + INITIALIZED = "initialized" + CLOSED = "closed" + + class Runner: + # Copied from CPython 3.11 + def __init__( + self, + *, + debug: bool | None = None, + loop_factory: Callable[[], AbstractEventLoop] | None = None, + ): + self._state = _State.CREATED + self._debug = debug + self._loop_factory = loop_factory + self._loop: AbstractEventLoop | None = None + self._context = None + self._interrupt_count = 0 + self._set_event_loop = False + + def __enter__(self) -> Runner: + self._lazy_init() + return self + + def __exit__( + self, + exc_type: type[BaseException], + exc_val: BaseException, + exc_tb: TracebackType, + ) -> None: + self.close() + + def close(self) -> None: + """Shutdown and close event loop.""" + if self._state is not _State.INITIALIZED: + return + try: + loop = self._loop + _cancel_all_tasks(loop) + loop.run_until_complete(loop.shutdown_asyncgens()) + if hasattr(loop, "shutdown_default_executor"): + loop.run_until_complete(loop.shutdown_default_executor()) + else: + loop.run_until_complete(_shutdown_default_executor(loop)) + finally: + if self._set_event_loop: + events.set_event_loop(None) + loop.close() + self._loop = None + self._state = _State.CLOSED + + def get_loop(self) -> AbstractEventLoop: + """Return embedded event loop.""" + self._lazy_init() + return self._loop + + def run(self, coro: Coroutine[T_Retval], *, context=None) -> T_Retval: + """Run a coroutine inside the embedded event loop.""" + if not coroutines.iscoroutine(coro): + raise ValueError(f"a coroutine was expected, got {coro!r}") + + if events._get_running_loop() is not None: + # fail fast with short traceback + raise RuntimeError( + "Runner.run() cannot be called from a running event loop" + ) + + self._lazy_init() + + if context is None: + context = self._context + task = context.run(self._loop.create_task, coro) + + if ( + threading.current_thread() is threading.main_thread() + and signal.getsignal(signal.SIGINT) is signal.default_int_handler + ): + sigint_handler = partial(self._on_sigint, main_task=task) + try: + signal.signal(signal.SIGINT, sigint_handler) + except ValueError: + # `signal.signal` may throw if `threading.main_thread` does + # not support signals (e.g. embedded interpreter with signals + # not registered - see gh-91880) + sigint_handler = None + else: + sigint_handler = None + + self._interrupt_count = 0 + try: + return self._loop.run_until_complete(task) + except exceptions.CancelledError: + if self._interrupt_count > 0: + uncancel = getattr(task, "uncancel", None) + if uncancel is not None and uncancel() == 0: + raise KeyboardInterrupt() + raise # CancelledError + finally: + if ( + sigint_handler is not None + and signal.getsignal(signal.SIGINT) is sigint_handler + ): + signal.signal(signal.SIGINT, signal.default_int_handler) + + def _lazy_init(self) -> None: + if self._state is _State.CLOSED: + raise RuntimeError("Runner is closed") + if self._state is _State.INITIALIZED: + return + if self._loop_factory is None: + self._loop = events.new_event_loop() + if not self._set_event_loop: + # Call set_event_loop only once to avoid calling + # attach_loop multiple times on child watchers + events.set_event_loop(self._loop) + self._set_event_loop = True + else: + self._loop = self._loop_factory() + if self._debug is not None: + self._loop.set_debug(self._debug) + self._context = contextvars.copy_context() + self._state = _State.INITIALIZED + + def _on_sigint(self, signum, frame, main_task: asyncio.Task) -> None: + self._interrupt_count += 1 + if self._interrupt_count == 1 and not main_task.done(): + main_task.cancel() + # wakeup loop if it is blocked by select() with long timeout + self._loop.call_soon_threadsafe(lambda: None) + return + raise KeyboardInterrupt() + + def _cancel_all_tasks(loop: AbstractEventLoop) -> None: + to_cancel = tasks.all_tasks(loop) + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + + loop.run_until_complete(tasks.gather(*to_cancel, return_exceptions=True)) + + for task in to_cancel: + if task.cancelled(): + continue + if task.exception() is not None: + loop.call_exception_handler( + { + "message": "unhandled exception during asyncio.run() shutdown", + "exception": task.exception(), + "task": task, + } + ) + + async def _shutdown_default_executor(loop: AbstractEventLoop) -> None: + """Schedule the shutdown of the default executor.""" + + def _do_shutdown(future: asyncio.futures.Future) -> None: + try: + loop._default_executor.shutdown(wait=True) # type: ignore[attr-defined] + loop.call_soon_threadsafe(future.set_result, None) + except Exception as ex: + loop.call_soon_threadsafe(future.set_exception, ex) + + loop._executor_shutdown_called = True + if loop._default_executor is None: + return + future = loop.create_future() + thread = threading.Thread(target=_do_shutdown, args=(future,)) + thread.start() + try: + await future + finally: + thread.join() + + +T_Retval = TypeVar("T_Retval") +T_contra = TypeVar("T_contra", contravariant=True) +PosArgsT = TypeVarTuple("PosArgsT") +P = ParamSpec("P") + +_root_task: RunVar[asyncio.Task | None] = RunVar("_root_task") + + +def find_root_task() -> asyncio.Task: + root_task = _root_task.get(None) + if root_task is not None and not root_task.done(): + return root_task + + # Look for a task that has been started via run_until_complete() + for task in all_tasks(): + if task._callbacks and not task.done(): + callbacks = [cb for cb, context in task._callbacks] + for cb in callbacks: + if ( + cb is _run_until_complete_cb + or getattr(cb, "__module__", None) == "uvloop.loop" + ): + _root_task.set(task) + return task + + # Look up the topmost task in the AnyIO task tree, if possible + task = cast(asyncio.Task, current_task()) + state = _task_states.get(task) + if state: + cancel_scope = state.cancel_scope + while cancel_scope and cancel_scope._parent_scope is not None: + cancel_scope = cancel_scope._parent_scope + + if cancel_scope is not None: + return cast(asyncio.Task, cancel_scope._host_task) + + return task + + +def get_callable_name(func: Callable) -> str: + module = getattr(func, "__module__", None) + qualname = getattr(func, "__qualname__", None) + return ".".join([x for x in (module, qualname) if x]) + + +# +# Event loop +# + +_run_vars: WeakKeyDictionary[asyncio.AbstractEventLoop, Any] = WeakKeyDictionary() + + +def _task_started(task: asyncio.Task) -> bool: + """Return ``True`` if the task has been started and has not finished.""" + # The task coro should never be None here, as we never add finished tasks to the + # task list + coro = task.get_coro() + assert coro is not None + try: + return getcoroutinestate(coro) in (CORO_RUNNING, CORO_SUSPENDED) + except AttributeError: + # task coro is async_genenerator_asend https://bugs.python.org/issue37771 + raise Exception(f"Cannot determine if task {task} has started or not") from None + + +# +# Timeouts and cancellation +# + + +def is_anyio_cancellation(exc: CancelledError) -> bool: + # Sometimes third party frameworks catch a CancelledError and raise a new one, so as + # a workaround we have to look at the previous ones in __context__ too for a + # matching cancel message + while True: + if ( + exc.args + and isinstance(exc.args[0], str) + and exc.args[0].startswith("Cancelled by cancel scope ") + ): + return True + + if isinstance(exc.__context__, CancelledError): + exc = exc.__context__ + continue + + return False + + +class CancelScope(BaseCancelScope): + def __new__( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + return object.__new__(cls) + + def __init__(self, deadline: float = math.inf, shield: bool = False): + self._deadline = deadline + self._shield = shield + self._parent_scope: CancelScope | None = None + self._child_scopes: set[CancelScope] = set() + self._cancel_called = False + self._cancelled_caught = False + self._active = False + self._timeout_handle: asyncio.TimerHandle | None = None + self._cancel_handle: asyncio.Handle | None = None + self._tasks: set[asyncio.Task] = set() + self._host_task: asyncio.Task | None = None + if sys.version_info >= (3, 11): + self._pending_uncancellations: int | None = 0 + else: + self._pending_uncancellations = None + + def __enter__(self) -> CancelScope: + if self._active: + raise RuntimeError( + "Each CancelScope may only be used for a single 'with' block" + ) + + self._host_task = host_task = cast(asyncio.Task, current_task()) + self._tasks.add(host_task) + try: + task_state = _task_states[host_task] + except KeyError: + task_state = TaskState(None, self) + _task_states[host_task] = task_state + else: + self._parent_scope = task_state.cancel_scope + task_state.cancel_scope = self + if self._parent_scope is not None: + # If using an eager task factory, the parent scope may not even contain + # the host task + self._parent_scope._child_scopes.add(self) + self._parent_scope._tasks.discard(host_task) + + self._timeout() + self._active = True + + # Start cancelling the host task if the scope was cancelled before entering + if self._cancel_called: + self._deliver_cancellation(self) + + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + del exc_tb + + if not self._active: + raise RuntimeError("This cancel scope is not active") + if current_task() is not self._host_task: + raise RuntimeError( + "Attempted to exit cancel scope in a different task than it was " + "entered in" + ) + + assert self._host_task is not None + host_task_state = _task_states.get(self._host_task) + if host_task_state is None or host_task_state.cancel_scope is not self: + raise RuntimeError( + "Attempted to exit a cancel scope that isn't the current tasks's " + "current cancel scope" + ) + + try: + self._active = False + if self._timeout_handle: + self._timeout_handle.cancel() + self._timeout_handle = None + + self._tasks.remove(self._host_task) + if self._parent_scope is not None: + self._parent_scope._child_scopes.remove(self) + self._parent_scope._tasks.add(self._host_task) + + host_task_state.cancel_scope = self._parent_scope + + # Restart the cancellation effort in the closest visible, cancelled parent + # scope if necessary + self._restart_cancellation_in_parent() + + # We only swallow the exception iff it was an AnyIO CancelledError, either + # directly as exc_val or inside an exception group and there are no cancelled + # parent cancel scopes visible to us here + if self._cancel_called and not self._parent_cancellation_is_visible_to_us: + # For each level-cancel() call made on the host task, call uncancel() + while self._pending_uncancellations: + self._host_task.uncancel() + self._pending_uncancellations -= 1 + + # Update cancelled_caught and check for exceptions we must not swallow + cannot_swallow_exc_val = False + if exc_val is not None: + for exc in iterate_exceptions(exc_val): + if isinstance(exc, CancelledError) and is_anyio_cancellation( + exc + ): + self._cancelled_caught = True + else: + cannot_swallow_exc_val = True + + return self._cancelled_caught and not cannot_swallow_exc_val + else: + if self._pending_uncancellations: + assert self._parent_scope is not None + assert self._parent_scope._pending_uncancellations is not None + self._parent_scope._pending_uncancellations += ( + self._pending_uncancellations + ) + self._pending_uncancellations = 0 + + return False + finally: + self._host_task = None + del exc_val + + @property + def _effectively_cancelled(self) -> bool: + cancel_scope: CancelScope | None = self + while cancel_scope is not None: + if cancel_scope._cancel_called: + return True + + if cancel_scope.shield: + return False + + cancel_scope = cancel_scope._parent_scope + + return False + + @property + def _parent_cancellation_is_visible_to_us(self) -> bool: + return ( + self._parent_scope is not None + and not self.shield + and self._parent_scope._effectively_cancelled + ) + + def _timeout(self) -> None: + if self._deadline != math.inf: + loop = get_running_loop() + if loop.time() >= self._deadline: + self.cancel() + else: + self._timeout_handle = loop.call_at(self._deadline, self._timeout) + + def _deliver_cancellation(self, origin: CancelScope) -> bool: + """ + Deliver cancellation to directly contained tasks and nested cancel scopes. + + Schedule another run at the end if we still have tasks eligible for + cancellation. + + :param origin: the cancel scope that originated the cancellation + :return: ``True`` if the delivery needs to be retried on the next cycle + + """ + should_retry = False + current = current_task() + for task in self._tasks: + should_retry = True + if task._must_cancel: # type: ignore[attr-defined] + continue + + # The task is eligible for cancellation if it has started + if task is not current and (task is self._host_task or _task_started(task)): + waiter = task._fut_waiter # type: ignore[attr-defined] + if not isinstance(waiter, asyncio.Future) or not waiter.done(): + task.cancel(f"Cancelled by cancel scope {id(origin):x}") + if ( + task is origin._host_task + and origin._pending_uncancellations is not None + ): + origin._pending_uncancellations += 1 + + # Deliver cancellation to child scopes that aren't shielded or running their own + # cancellation callbacks + for scope in self._child_scopes: + if not scope._shield and not scope.cancel_called: + should_retry = scope._deliver_cancellation(origin) or should_retry + + # Schedule another callback if there are still tasks left + if origin is self: + if should_retry: + self._cancel_handle = get_running_loop().call_soon( + self._deliver_cancellation, origin + ) + else: + self._cancel_handle = None + + return should_retry + + def _restart_cancellation_in_parent(self) -> None: + """ + Restart the cancellation effort in the closest directly cancelled parent scope. + + """ + scope = self._parent_scope + while scope is not None: + if scope._cancel_called: + if scope._cancel_handle is None: + scope._deliver_cancellation(scope) + + break + + # No point in looking beyond any shielded scope + if scope._shield: + break + + scope = scope._parent_scope + + def cancel(self) -> None: + if not self._cancel_called: + if self._timeout_handle: + self._timeout_handle.cancel() + self._timeout_handle = None + + self._cancel_called = True + if self._host_task is not None: + self._deliver_cancellation(self) + + @property + def deadline(self) -> float: + return self._deadline + + @deadline.setter + def deadline(self, value: float) -> None: + self._deadline = float(value) + if self._timeout_handle is not None: + self._timeout_handle.cancel() + self._timeout_handle = None + + if self._active and not self._cancel_called: + self._timeout() + + @property + def cancel_called(self) -> bool: + return self._cancel_called + + @property + def cancelled_caught(self) -> bool: + return self._cancelled_caught + + @property + def shield(self) -> bool: + return self._shield + + @shield.setter + def shield(self, value: bool) -> None: + if self._shield != value: + self._shield = value + if not value: + self._restart_cancellation_in_parent() + + +# +# Task states +# + + +class TaskState: + """ + Encapsulates auxiliary task information that cannot be added to the Task instance + itself because there are no guarantees about its implementation. + """ + + __slots__ = "parent_id", "cancel_scope", "__weakref__" + + def __init__(self, parent_id: int | None, cancel_scope: CancelScope | None): + self.parent_id = parent_id + self.cancel_scope = cancel_scope + + +_task_states: WeakKeyDictionary[asyncio.Task, TaskState] = WeakKeyDictionary() + + +# +# Task groups +# + + +class _AsyncioTaskStatus(abc.TaskStatus): + def __init__(self, future: asyncio.Future, parent_id: int): + self._future = future + self._parent_id = parent_id + + def started(self, value: T_contra | None = None) -> None: + try: + self._future.set_result(value) + except asyncio.InvalidStateError: + if not self._future.cancelled(): + raise RuntimeError( + "called 'started' twice on the same task status" + ) from None + + task = cast(asyncio.Task, current_task()) + _task_states[task].parent_id = self._parent_id + + +if sys.version_info >= (3, 12): + _eager_task_factory_code: CodeType | None = asyncio.eager_task_factory.__code__ +else: + _eager_task_factory_code = None + + +class TaskGroup(abc.TaskGroup): + def __init__(self) -> None: + self.cancel_scope: CancelScope = CancelScope() + self._active = False + self._exceptions: list[BaseException] = [] + self._tasks: set[asyncio.Task] = set() + self._on_completed_fut: asyncio.Future[None] | None = None + + async def __aenter__(self) -> TaskGroup: + self.cancel_scope.__enter__() + self._active = True + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + try: + if exc_val is not None: + self.cancel_scope.cancel() + if not isinstance(exc_val, CancelledError): + self._exceptions.append(exc_val) + + loop = get_running_loop() + try: + if self._tasks: + with CancelScope() as wait_scope: + while self._tasks: + self._on_completed_fut = loop.create_future() + + try: + await self._on_completed_fut + except CancelledError as exc: + # Shield the scope against further cancellation attempts, + # as they're not productive (#695) + wait_scope.shield = True + self.cancel_scope.cancel() + + # Set exc_val from the cancellation exception if it was + # previously unset. However, we should not replace a native + # cancellation exception with one raise by a cancel scope. + if exc_val is None or ( + isinstance(exc_val, CancelledError) + and not is_anyio_cancellation(exc) + ): + exc_val = exc + + self._on_completed_fut = None + else: + # If there are no child tasks to wait on, run at least one checkpoint + # anyway + await AsyncIOBackend.cancel_shielded_checkpoint() + + self._active = False + if self._exceptions: + # The exception that got us here should already have been + # added to self._exceptions so it's ok to break exception + # chaining and avoid adding a "During handling of above..." + # for each nesting level. + raise BaseExceptionGroup( + "unhandled errors in a TaskGroup", self._exceptions + ) from None + elif exc_val: + raise exc_val + except BaseException as exc: + if self.cancel_scope.__exit__(type(exc), exc, exc.__traceback__): + return True + + raise + + return self.cancel_scope.__exit__(exc_type, exc_val, exc_tb) + finally: + del exc_val, exc_tb, self._exceptions + + def _spawn( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + args: tuple[Unpack[PosArgsT]], + name: object, + task_status_future: asyncio.Future | None = None, + ) -> asyncio.Task: + def task_done(_task: asyncio.Task) -> None: + task_state = _task_states[_task] + assert task_state.cancel_scope is not None + assert _task in task_state.cancel_scope._tasks + task_state.cancel_scope._tasks.remove(_task) + self._tasks.remove(task) + del _task_states[_task] + + if self._on_completed_fut is not None and not self._tasks: + try: + self._on_completed_fut.set_result(None) + except asyncio.InvalidStateError: + pass + + try: + exc = _task.exception() + except CancelledError as e: + while isinstance(e.__context__, CancelledError): + e = e.__context__ + + exc = e + + if exc is not None: + # The future can only be in the cancelled state if the host task was + # cancelled, so return immediately instead of adding one more + # CancelledError to the exceptions list + if task_status_future is not None and task_status_future.cancelled(): + return + + if task_status_future is None or task_status_future.done(): + if not isinstance(exc, CancelledError): + self._exceptions.append(exc) + + if not self.cancel_scope._effectively_cancelled: + self.cancel_scope.cancel() + else: + task_status_future.set_exception(exc) + elif task_status_future is not None and not task_status_future.done(): + task_status_future.set_exception( + RuntimeError("Child exited without calling task_status.started()") + ) + + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + kwargs = {} + if task_status_future: + parent_id = id(current_task()) + kwargs["task_status"] = _AsyncioTaskStatus( + task_status_future, id(self.cancel_scope._host_task) + ) + else: + parent_id = id(self.cancel_scope._host_task) + + coro = func(*args, **kwargs) + if not iscoroutine(coro): + prefix = f"{func.__module__}." if hasattr(func, "__module__") else "" + raise TypeError( + f"Expected {prefix}{func.__qualname__}() to return a coroutine, but " + f"the return value ({coro!r}) is not a coroutine object" + ) + + name = get_callable_name(func) if name is None else str(name) + loop = asyncio.get_running_loop() + if ( + (factory := loop.get_task_factory()) + and getattr(factory, "__code__", None) is _eager_task_factory_code + and (closure := getattr(factory, "__closure__", None)) + ): + custom_task_constructor = closure[0].cell_contents + task = custom_task_constructor(coro, loop=loop, name=name) + else: + task = create_task(coro, name=name) + + # Make the spawned task inherit the task group's cancel scope + _task_states[task] = TaskState( + parent_id=parent_id, cancel_scope=self.cancel_scope + ) + self.cancel_scope._tasks.add(task) + self._tasks.add(task) + task.add_done_callback(task_done) + return task + + def start_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> None: + self._spawn(func, args, name) + + async def start( + self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None + ) -> Any: + future: asyncio.Future = asyncio.Future() + task = self._spawn(func, args, name, future) + + # If the task raises an exception after sending a start value without a switch + # point between, the task group is cancelled and this method never proceeds to + # process the completed future. That's why we have to have a shielded cancel + # scope here. + try: + return await future + except CancelledError: + # Cancel the task and wait for it to exit before returning + task.cancel() + with CancelScope(shield=True), suppress(CancelledError): + await task + + raise + + +# +# Threads +# + +_Retval_Queue_Type = tuple[Optional[T_Retval], Optional[BaseException]] + + +class WorkerThread(Thread): + MAX_IDLE_TIME = 10 # seconds + + def __init__( + self, + root_task: asyncio.Task, + workers: set[WorkerThread], + idle_workers: deque[WorkerThread], + ): + super().__init__(name="AnyIO worker thread") + self.root_task = root_task + self.workers = workers + self.idle_workers = idle_workers + self.loop = root_task._loop + self.queue: Queue[ + tuple[Context, Callable, tuple, asyncio.Future, CancelScope] | None + ] = Queue(2) + self.idle_since = AsyncIOBackend.current_time() + self.stopping = False + + def _report_result( + self, future: asyncio.Future, result: Any, exc: BaseException | None + ) -> None: + self.idle_since = AsyncIOBackend.current_time() + if not self.stopping: + self.idle_workers.append(self) + + if not future.cancelled(): + if exc is not None: + if isinstance(exc, StopIteration): + new_exc = RuntimeError("coroutine raised StopIteration") + new_exc.__cause__ = exc + exc = new_exc + + future.set_exception(exc) + else: + future.set_result(result) + + def run(self) -> None: + with claim_worker_thread(AsyncIOBackend, self.loop): + while True: + item = self.queue.get() + if item is None: + # Shutdown command received + return + + context, func, args, future, cancel_scope = item + if not future.cancelled(): + result = None + exception: BaseException | None = None + threadlocals.current_cancel_scope = cancel_scope + try: + result = context.run(func, *args) + except BaseException as exc: + exception = exc + finally: + del threadlocals.current_cancel_scope + + if not self.loop.is_closed(): + self.loop.call_soon_threadsafe( + self._report_result, future, result, exception + ) + + del result, exception + + self.queue.task_done() + del item, context, func, args, future, cancel_scope + + def stop(self, f: asyncio.Task | None = None) -> None: + self.stopping = True + self.queue.put_nowait(None) + self.workers.discard(self) + try: + self.idle_workers.remove(self) + except ValueError: + pass + + +_threadpool_idle_workers: RunVar[deque[WorkerThread]] = RunVar( + "_threadpool_idle_workers" +) +_threadpool_workers: RunVar[set[WorkerThread]] = RunVar("_threadpool_workers") + + +class BlockingPortal(abc.BlockingPortal): + def __new__(cls) -> BlockingPortal: + return object.__new__(cls) + + def __init__(self) -> None: + super().__init__() + self._loop = get_running_loop() + + def _spawn_task_from_thread( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + name: object, + future: Future[T_Retval], + ) -> None: + AsyncIOBackend.run_sync_from_thread( + partial(self._task_group.start_soon, name=name), + (self._call_func, func, args, kwargs, future), + self._loop, + ) + + +# +# Subprocesses +# + + +@dataclass(eq=False) +class StreamReaderWrapper(abc.ByteReceiveStream): + _stream: asyncio.StreamReader + + async def receive(self, max_bytes: int = 65536) -> bytes: + data = await self._stream.read(max_bytes) + if data: + return data + else: + raise EndOfStream + + async def aclose(self) -> None: + self._stream.set_exception(ClosedResourceError()) + await AsyncIOBackend.checkpoint() + + +@dataclass(eq=False) +class StreamWriterWrapper(abc.ByteSendStream): + _stream: asyncio.StreamWriter + + async def send(self, item: bytes) -> None: + self._stream.write(item) + await self._stream.drain() + + async def aclose(self) -> None: + self._stream.close() + await AsyncIOBackend.checkpoint() + + +@dataclass(eq=False) +class Process(abc.Process): + _process: asyncio.subprocess.Process + _stdin: StreamWriterWrapper | None + _stdout: StreamReaderWrapper | None + _stderr: StreamReaderWrapper | None + + async def aclose(self) -> None: + with CancelScope(shield=True) as scope: + if self._stdin: + await self._stdin.aclose() + if self._stdout: + await self._stdout.aclose() + if self._stderr: + await self._stderr.aclose() + + scope.shield = False + try: + await self.wait() + except BaseException: + scope.shield = True + self.kill() + await self.wait() + raise + + async def wait(self) -> int: + return await self._process.wait() + + def terminate(self) -> None: + self._process.terminate() + + def kill(self) -> None: + self._process.kill() + + def send_signal(self, signal: int) -> None: + self._process.send_signal(signal) + + @property + def pid(self) -> int: + return self._process.pid + + @property + def returncode(self) -> int | None: + return self._process.returncode + + @property + def stdin(self) -> abc.ByteSendStream | None: + return self._stdin + + @property + def stdout(self) -> abc.ByteReceiveStream | None: + return self._stdout + + @property + def stderr(self) -> abc.ByteReceiveStream | None: + return self._stderr + + +def _forcibly_shutdown_process_pool_on_exit( + workers: set[Process], _task: object +) -> None: + """ + Forcibly shuts down worker processes belonging to this event loop.""" + child_watcher: asyncio.AbstractChildWatcher | None = None + if sys.version_info < (3, 12): + try: + child_watcher = asyncio.get_event_loop_policy().get_child_watcher() + except NotImplementedError: + pass + + # Close as much as possible (w/o async/await) to avoid warnings + for process in workers: + if process.returncode is None: + continue + + process._stdin._stream._transport.close() # type: ignore[union-attr] + process._stdout._stream._transport.close() # type: ignore[union-attr] + process._stderr._stream._transport.close() # type: ignore[union-attr] + process.kill() + if child_watcher: + child_watcher.remove_child_handler(process.pid) + + +async def _shutdown_process_pool_on_exit(workers: set[abc.Process]) -> None: + """ + Shuts down worker processes belonging to this event loop. + + NOTE: this only works when the event loop was started using asyncio.run() or + anyio.run(). + + """ + process: abc.Process + try: + await sleep(math.inf) + except asyncio.CancelledError: + for process in workers: + if process.returncode is None: + process.kill() + + for process in workers: + await process.aclose() + + +# +# Sockets and networking +# + + +class StreamProtocol(asyncio.Protocol): + read_queue: deque[bytes] + read_event: asyncio.Event + write_event: asyncio.Event + exception: Exception | None = None + is_at_eof: bool = False + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + self.read_queue = deque() + self.read_event = asyncio.Event() + self.write_event = asyncio.Event() + self.write_event.set() + cast(asyncio.Transport, transport).set_write_buffer_limits(0) + + def connection_lost(self, exc: Exception | None) -> None: + if exc: + self.exception = BrokenResourceError() + self.exception.__cause__ = exc + + self.read_event.set() + self.write_event.set() + + def data_received(self, data: bytes) -> None: + # ProactorEventloop sometimes sends bytearray instead of bytes + self.read_queue.append(bytes(data)) + self.read_event.set() + + def eof_received(self) -> bool | None: + self.is_at_eof = True + self.read_event.set() + return True + + def pause_writing(self) -> None: + self.write_event = asyncio.Event() + + def resume_writing(self) -> None: + self.write_event.set() + + +class DatagramProtocol(asyncio.DatagramProtocol): + read_queue: deque[tuple[bytes, IPSockAddrType]] + read_event: asyncio.Event + write_event: asyncio.Event + exception: Exception | None = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + self.read_queue = deque(maxlen=100) # arbitrary value + self.read_event = asyncio.Event() + self.write_event = asyncio.Event() + self.write_event.set() + + def connection_lost(self, exc: Exception | None) -> None: + self.read_event.set() + self.write_event.set() + + def datagram_received(self, data: bytes, addr: IPSockAddrType) -> None: + addr = convert_ipv6_sockaddr(addr) + self.read_queue.append((data, addr)) + self.read_event.set() + + def error_received(self, exc: Exception) -> None: + self.exception = exc + + def pause_writing(self) -> None: + self.write_event.clear() + + def resume_writing(self) -> None: + self.write_event.set() + + +class SocketStream(abc.SocketStream): + def __init__(self, transport: asyncio.Transport, protocol: StreamProtocol): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def receive(self, max_bytes: int = 65536) -> bytes: + with self._receive_guard: + if ( + not self._protocol.read_event.is_set() + and not self._transport.is_closing() + and not self._protocol.is_at_eof + ): + self._transport.resume_reading() + await self._protocol.read_event.wait() + self._transport.pause_reading() + else: + await AsyncIOBackend.checkpoint() + + try: + chunk = self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + elif self._protocol.exception: + raise self._protocol.exception from None + else: + raise EndOfStream from None + + if len(chunk) > max_bytes: + # Split the oversized chunk + chunk, leftover = chunk[:max_bytes], chunk[max_bytes:] + self._protocol.read_queue.appendleft(leftover) + + # If the read queue is empty, clear the flag so that the next call will + # block until data is available + if not self._protocol.read_queue: + self._protocol.read_event.clear() + + return chunk + + async def send(self, item: bytes) -> None: + with self._send_guard: + await AsyncIOBackend.checkpoint() + + if self._closed: + raise ClosedResourceError + elif self._protocol.exception is not None: + raise self._protocol.exception + + try: + self._transport.write(item) + except RuntimeError as exc: + if self._transport.is_closing(): + raise BrokenResourceError from exc + else: + raise + + await self._protocol.write_event.wait() + + async def send_eof(self) -> None: + try: + self._transport.write_eof() + except OSError: + pass + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + try: + self._transport.write_eof() + except OSError: + pass + + self._transport.close() + await sleep(0) + self._transport.abort() + + +class _RawSocketMixin: + _receive_future: asyncio.Future | None = None + _send_future: asyncio.Future | None = None + _closing = False + + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + def _wait_until_readable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: + def callback(f: object) -> None: + del self._receive_future + loop.remove_reader(self.__raw_socket) + + f = self._receive_future = asyncio.Future() + loop.add_reader(self.__raw_socket, f.set_result, None) + f.add_done_callback(callback) + return f + + def _wait_until_writable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: + def callback(f: object) -> None: + del self._send_future + loop.remove_writer(self.__raw_socket) + + f = self._send_future = asyncio.Future() + loop.add_writer(self.__raw_socket, f.set_result, None) + f.add_done_callback(callback) + return f + + async def aclose(self) -> None: + if not self._closing: + self._closing = True + if self.__raw_socket.fileno() != -1: + self.__raw_socket.close() + + if self._receive_future: + self._receive_future.set_result(None) + if self._send_future: + self._send_future.set_result(None) + + +class UNIXSocketStream(_RawSocketMixin, abc.UNIXSocketStream): + async def send_eof(self) -> None: + with self._send_guard: + self._raw_socket.shutdown(socket.SHUT_WR) + + async def receive(self, max_bytes: int = 65536) -> bytes: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + data = self._raw_socket.recv(max_bytes) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + if not data: + raise EndOfStream + + return data + + async def send(self, item: bytes) -> None: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._send_guard: + view = memoryview(item) + while view: + try: + bytes_sent = self._raw_socket.send(view) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + view = view[bytes_sent:] + + async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: + if not isinstance(msglen, int) or msglen < 0: + raise ValueError("msglen must be a non-negative integer") + if not isinstance(maxfds, int) or maxfds < 1: + raise ValueError("maxfds must be a positive integer") + + loop = get_running_loop() + fds = array.array("i") + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + message, ancdata, flags, addr = self._raw_socket.recvmsg( + msglen, socket.CMSG_LEN(maxfds * fds.itemsize) + ) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + if not message and not ancdata: + raise EndOfStream + + break + + for cmsg_level, cmsg_type, cmsg_data in ancdata: + if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: + raise RuntimeError( + f"Received unexpected ancillary data; message = {message!r}, " + f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}" + ) + + fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) + + return message, list(fds) + + async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: + if not message: + raise ValueError("message must not be empty") + if not fds: + raise ValueError("fds must not be empty") + + loop = get_running_loop() + filenos: list[int] = [] + for fd in fds: + if isinstance(fd, int): + filenos.append(fd) + elif isinstance(fd, IOBase): + filenos.append(fd.fileno()) + + fdarray = array.array("i", filenos) + await AsyncIOBackend.checkpoint() + with self._send_guard: + while True: + try: + # The ignore can be removed after mypy picks up + # https://github.com/python/typeshed/pull/5545 + self._raw_socket.sendmsg( + [message], [(socket.SOL_SOCKET, socket.SCM_RIGHTS, fdarray)] + ) + break + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + + +class TCPSocketListener(abc.SocketListener): + _accept_scope: CancelScope | None = None + _closed = False + + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = cast(asyncio.BaseEventLoop, get_running_loop()) + self._accept_guard = ResourceGuard("accepting connections from") + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + async def accept(self) -> abc.SocketStream: + if self._closed: + raise ClosedResourceError + + with self._accept_guard: + await AsyncIOBackend.checkpoint() + with CancelScope() as self._accept_scope: + try: + client_sock, _addr = await self._loop.sock_accept(self._raw_socket) + except asyncio.CancelledError: + # Workaround for https://bugs.python.org/issue41317 + try: + self._loop.remove_reader(self._raw_socket) + except (ValueError, NotImplementedError): + pass + + if self._closed: + raise ClosedResourceError from None + + raise + finally: + self._accept_scope = None + + client_sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + transport, protocol = await self._loop.connect_accepted_socket( + StreamProtocol, client_sock + ) + return SocketStream(transport, protocol) + + async def aclose(self) -> None: + if self._closed: + return + + self._closed = True + if self._accept_scope: + # Workaround for https://bugs.python.org/issue41317 + try: + self._loop.remove_reader(self._raw_socket) + except (ValueError, NotImplementedError): + pass + + self._accept_scope.cancel() + await sleep(0) + + self._raw_socket.close() + + +class UNIXSocketListener(abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = get_running_loop() + self._accept_guard = ResourceGuard("accepting connections from") + self._closed = False + + async def accept(self) -> abc.SocketStream: + await AsyncIOBackend.checkpoint() + with self._accept_guard: + while True: + try: + client_sock, _ = self.__raw_socket.accept() + client_sock.setblocking(False) + return UNIXSocketStream(client_sock) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + self._loop.add_reader(self.__raw_socket, f.set_result, None) + f.add_done_callback( + lambda _: self._loop.remove_reader(self.__raw_socket) + ) + await f + except OSError as exc: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + + async def aclose(self) -> None: + self._closed = True + self.__raw_socket.close() + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + +class UDPSocket(abc.UDPSocket): + def __init__( + self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol + ): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + self._transport.close() + + async def receive(self) -> tuple[bytes, IPSockAddrType]: + with self._receive_guard: + await AsyncIOBackend.checkpoint() + + # If the buffer is empty, ask for more data + if not self._protocol.read_queue and not self._transport.is_closing(): + self._protocol.read_event.clear() + await self._protocol.read_event.wait() + + try: + return self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from None + + async def send(self, item: UDPPacketType) -> None: + with self._send_guard: + await AsyncIOBackend.checkpoint() + await self._protocol.write_event.wait() + if self._closed: + raise ClosedResourceError + elif self._transport.is_closing(): + raise BrokenResourceError + else: + self._transport.sendto(*item) + + +class ConnectedUDPSocket(abc.ConnectedUDPSocket): + def __init__( + self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol + ): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def aclose(self) -> None: + if not self._transport.is_closing(): + self._closed = True + self._transport.close() + + async def receive(self) -> bytes: + with self._receive_guard: + await AsyncIOBackend.checkpoint() + + # If the buffer is empty, ask for more data + if not self._protocol.read_queue and not self._transport.is_closing(): + self._protocol.read_event.clear() + await self._protocol.read_event.wait() + + try: + packet = self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from None + + return packet[0] + + async def send(self, item: bytes) -> None: + with self._send_guard: + await AsyncIOBackend.checkpoint() + await self._protocol.write_event.wait() + if self._closed: + raise ClosedResourceError + elif self._transport.is_closing(): + raise BrokenResourceError + else: + self._transport.sendto(item) + + +class UNIXDatagramSocket(_RawSocketMixin, abc.UNIXDatagramSocket): + async def receive(self) -> UNIXDatagramPacketType: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + data = self._raw_socket.recvfrom(65536) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return data + + async def send(self, item: UNIXDatagramPacketType) -> None: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._send_guard: + while True: + try: + self._raw_socket.sendto(*item) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return + + +class ConnectedUNIXDatagramSocket(_RawSocketMixin, abc.ConnectedUNIXDatagramSocket): + async def receive(self) -> bytes: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + data = self._raw_socket.recv(65536) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return data + + async def send(self, item: bytes) -> None: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._send_guard: + while True: + try: + self._raw_socket.send(item) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return + + +_read_events: RunVar[dict[int, asyncio.Event]] = RunVar("read_events") +_write_events: RunVar[dict[int, asyncio.Event]] = RunVar("write_events") + + +# +# Synchronization +# + + +class Event(BaseEvent): + def __new__(cls) -> Event: + return object.__new__(cls) + + def __init__(self) -> None: + self._event = asyncio.Event() + + def set(self) -> None: + self._event.set() + + def is_set(self) -> bool: + return self._event.is_set() + + async def wait(self) -> None: + if self.is_set(): + await AsyncIOBackend.checkpoint() + else: + await self._event.wait() + + def statistics(self) -> EventStatistics: + return EventStatistics(len(self._event._waiters)) + + +class Lock(BaseLock): + def __new__(cls, *, fast_acquire: bool = False) -> Lock: + return object.__new__(cls) + + def __init__(self, *, fast_acquire: bool = False) -> None: + self._fast_acquire = fast_acquire + self._owner_task: asyncio.Task | None = None + self._waiters: deque[tuple[asyncio.Task, asyncio.Future]] = deque() + + async def acquire(self) -> None: + task = cast(asyncio.Task, current_task()) + if self._owner_task is None and not self._waiters: + await AsyncIOBackend.checkpoint_if_cancelled() + self._owner_task = task + + # Unless on the "fast path", yield control of the event loop so that other + # tasks can run too + if not self._fast_acquire: + try: + await AsyncIOBackend.cancel_shielded_checkpoint() + except CancelledError: + self.release() + raise + + return + + if self._owner_task == task: + raise RuntimeError("Attempted to acquire an already held Lock") + + fut: asyncio.Future[None] = asyncio.Future() + item = task, fut + self._waiters.append(item) + try: + await fut + except CancelledError: + self._waiters.remove(item) + if self._owner_task is task: + self.release() + + raise + + self._waiters.remove(item) + + def acquire_nowait(self) -> None: + task = cast(asyncio.Task, current_task()) + if self._owner_task is None and not self._waiters: + self._owner_task = task + return + + if self._owner_task is task: + raise RuntimeError("Attempted to acquire an already held Lock") + + raise WouldBlock + + def locked(self) -> bool: + return self._owner_task is not None + + def release(self) -> None: + if self._owner_task != current_task(): + raise RuntimeError("The current task is not holding this lock") + + for task, fut in self._waiters: + if not fut.cancelled(): + self._owner_task = task + fut.set_result(None) + return + + self._owner_task = None + + def statistics(self) -> LockStatistics: + task_info = AsyncIOTaskInfo(self._owner_task) if self._owner_task else None + return LockStatistics(self.locked(), task_info, len(self._waiters)) + + +class Semaphore(BaseSemaphore): + def __new__( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> Semaphore: + return object.__new__(cls) + + def __init__( + self, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ): + super().__init__(initial_value, max_value=max_value) + self._value = initial_value + self._max_value = max_value + self._fast_acquire = fast_acquire + self._waiters: deque[asyncio.Future[None]] = deque() + + async def acquire(self) -> None: + if self._value > 0 and not self._waiters: + await AsyncIOBackend.checkpoint_if_cancelled() + self._value -= 1 + + # Unless on the "fast path", yield control of the event loop so that other + # tasks can run too + if not self._fast_acquire: + try: + await AsyncIOBackend.cancel_shielded_checkpoint() + except CancelledError: + self.release() + raise + + return + + fut: asyncio.Future[None] = asyncio.Future() + self._waiters.append(fut) + try: + await fut + except CancelledError: + try: + self._waiters.remove(fut) + except ValueError: + self.release() + + raise + + def acquire_nowait(self) -> None: + if self._value == 0: + raise WouldBlock + + self._value -= 1 + + def release(self) -> None: + if self._max_value is not None and self._value == self._max_value: + raise ValueError("semaphore released too many times") + + for fut in self._waiters: + if not fut.cancelled(): + fut.set_result(None) + self._waiters.remove(fut) + return + + self._value += 1 + + @property + def value(self) -> int: + return self._value + + @property + def max_value(self) -> int | None: + return self._max_value + + def statistics(self) -> SemaphoreStatistics: + return SemaphoreStatistics(len(self._waiters)) + + +class CapacityLimiter(BaseCapacityLimiter): + _total_tokens: float = 0 + + def __new__(cls, total_tokens: float) -> CapacityLimiter: + return object.__new__(cls) + + def __init__(self, total_tokens: float): + self._borrowers: set[Any] = set() + self._wait_queue: OrderedDict[Any, asyncio.Event] = OrderedDict() + self.total_tokens = total_tokens + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + @property + def total_tokens(self) -> float: + return self._total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + if not isinstance(value, int) and not math.isinf(value): + raise TypeError("total_tokens must be an int or math.inf") + if value < 1: + raise ValueError("total_tokens must be >= 1") + + waiters_to_notify = max(value - self._total_tokens, 0) + self._total_tokens = value + + # Notify waiting tasks that they have acquired the limiter + while self._wait_queue and waiters_to_notify: + event = self._wait_queue.popitem(last=False)[1] + event.set() + waiters_to_notify -= 1 + + @property + def borrowed_tokens(self) -> int: + return len(self._borrowers) + + @property + def available_tokens(self) -> float: + return self._total_tokens - len(self._borrowers) + + def acquire_nowait(self) -> None: + self.acquire_on_behalf_of_nowait(current_task()) + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + if borrower in self._borrowers: + raise RuntimeError( + "this borrower is already holding one of this CapacityLimiter's tokens" + ) + + if self._wait_queue or len(self._borrowers) >= self._total_tokens: + raise WouldBlock + + self._borrowers.add(borrower) + + async def acquire(self) -> None: + return await self.acquire_on_behalf_of(current_task()) + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await AsyncIOBackend.checkpoint_if_cancelled() + try: + self.acquire_on_behalf_of_nowait(borrower) + except WouldBlock: + event = asyncio.Event() + self._wait_queue[borrower] = event + try: + await event.wait() + except BaseException: + self._wait_queue.pop(borrower, None) + raise + + self._borrowers.add(borrower) + else: + try: + await AsyncIOBackend.cancel_shielded_checkpoint() + except BaseException: + self.release() + raise + + def release(self) -> None: + self.release_on_behalf_of(current_task()) + + def release_on_behalf_of(self, borrower: object) -> None: + try: + self._borrowers.remove(borrower) + except KeyError: + raise RuntimeError( + "this borrower isn't holding any of this CapacityLimiter's tokens" + ) from None + + # Notify the next task in line if this limiter has free capacity now + if self._wait_queue and len(self._borrowers) < self._total_tokens: + event = self._wait_queue.popitem(last=False)[1] + event.set() + + def statistics(self) -> CapacityLimiterStatistics: + return CapacityLimiterStatistics( + self.borrowed_tokens, + self.total_tokens, + tuple(self._borrowers), + len(self._wait_queue), + ) + + +_default_thread_limiter: RunVar[CapacityLimiter] = RunVar("_default_thread_limiter") + + +# +# Operating system signals +# + + +class _SignalReceiver: + def __init__(self, signals: tuple[Signals, ...]): + self._signals = signals + self._loop = get_running_loop() + self._signal_queue: deque[Signals] = deque() + self._future: asyncio.Future = asyncio.Future() + self._handled_signals: set[Signals] = set() + + def _deliver(self, signum: Signals) -> None: + self._signal_queue.append(signum) + if not self._future.done(): + self._future.set_result(None) + + def __enter__(self) -> _SignalReceiver: + for sig in set(self._signals): + self._loop.add_signal_handler(sig, self._deliver, sig) + self._handled_signals.add(sig) + + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + for sig in self._handled_signals: + self._loop.remove_signal_handler(sig) + + def __aiter__(self) -> _SignalReceiver: + return self + + async def __anext__(self) -> Signals: + await AsyncIOBackend.checkpoint() + if not self._signal_queue: + self._future = asyncio.Future() + await self._future + + return self._signal_queue.popleft() + + +# +# Testing and debugging +# + + +class AsyncIOTaskInfo(TaskInfo): + def __init__(self, task: asyncio.Task): + task_state = _task_states.get(task) + if task_state is None: + parent_id = None + else: + parent_id = task_state.parent_id + + coro = task.get_coro() + assert coro is not None, "created TaskInfo from a completed Task" + super().__init__(id(task), parent_id, task.get_name(), coro) + self._task = weakref.ref(task) + + def has_pending_cancellation(self) -> bool: + if not (task := self._task()): + # If the task isn't around anymore, it won't have a pending cancellation + return False + + if task._must_cancel: # type: ignore[attr-defined] + return True + elif ( + isinstance(task._fut_waiter, asyncio.Future) # type: ignore[attr-defined] + and task._fut_waiter.cancelled() # type: ignore[attr-defined] + ): + return True + + if task_state := _task_states.get(task): + if cancel_scope := task_state.cancel_scope: + return cancel_scope._effectively_cancelled + + return False + + +class TestRunner(abc.TestRunner): + _send_stream: MemoryObjectSendStream[tuple[Awaitable[Any], asyncio.Future[Any]]] + + def __init__( + self, + *, + debug: bool | None = None, + use_uvloop: bool = False, + loop_factory: Callable[[], AbstractEventLoop] | None = None, + ) -> None: + if use_uvloop and loop_factory is None: + import uvloop + + loop_factory = uvloop.new_event_loop + + self._runner = Runner(debug=debug, loop_factory=loop_factory) + self._exceptions: list[BaseException] = [] + self._runner_task: asyncio.Task | None = None + + def __enter__(self) -> TestRunner: + self._runner.__enter__() + self.get_loop().set_exception_handler(self._exception_handler) + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self._runner.__exit__(exc_type, exc_val, exc_tb) + + def get_loop(self) -> AbstractEventLoop: + return self._runner.get_loop() + + def _exception_handler( + self, loop: asyncio.AbstractEventLoop, context: dict[str, Any] + ) -> None: + if isinstance(context.get("exception"), Exception): + self._exceptions.append(context["exception"]) + else: + loop.default_exception_handler(context) + + def _raise_async_exceptions(self) -> None: + # Re-raise any exceptions raised in asynchronous callbacks + if self._exceptions: + exceptions, self._exceptions = self._exceptions, [] + if len(exceptions) == 1: + raise exceptions[0] + elif exceptions: + raise BaseExceptionGroup( + "Multiple exceptions occurred in asynchronous callbacks", exceptions + ) + + async def _run_tests_and_fixtures( + self, + receive_stream: MemoryObjectReceiveStream[ + tuple[Awaitable[T_Retval], asyncio.Future[T_Retval]] + ], + ) -> None: + from _pytest.outcomes import OutcomeException + + with receive_stream, self._send_stream: + async for coro, future in receive_stream: + try: + retval = await coro + except CancelledError as exc: + if not future.cancelled(): + future.cancel(*exc.args) + + raise + except BaseException as exc: + if not future.cancelled(): + future.set_exception(exc) + + if not isinstance(exc, (Exception, OutcomeException)): + raise + else: + if not future.cancelled(): + future.set_result(retval) + + async def _call_in_runner_task( + self, + func: Callable[P, Awaitable[T_Retval]], + *args: P.args, + **kwargs: P.kwargs, + ) -> T_Retval: + if not self._runner_task: + self._send_stream, receive_stream = create_memory_object_stream[ + tuple[Awaitable[Any], asyncio.Future] + ](1) + self._runner_task = self.get_loop().create_task( + self._run_tests_and_fixtures(receive_stream) + ) + + coro = func(*args, **kwargs) + future: asyncio.Future[T_Retval] = self.get_loop().create_future() + self._send_stream.send_nowait((coro, future)) + return await future + + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], + kwargs: dict[str, Any], + ) -> Iterable[T_Retval]: + asyncgen = fixture_func(**kwargs) + fixturevalue: T_Retval = self.get_loop().run_until_complete( + self._call_in_runner_task(asyncgen.asend, None) + ) + self._raise_async_exceptions() + + yield fixturevalue + + try: + self.get_loop().run_until_complete( + self._call_in_runner_task(asyncgen.asend, None) + ) + except StopAsyncIteration: + self._raise_async_exceptions() + else: + self.get_loop().run_until_complete(asyncgen.aclose()) + raise RuntimeError("Async generator fixture did not stop") + + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], + kwargs: dict[str, Any], + ) -> T_Retval: + retval = self.get_loop().run_until_complete( + self._call_in_runner_task(fixture_func, **kwargs) + ) + self._raise_async_exceptions() + return retval + + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] + ) -> None: + try: + self.get_loop().run_until_complete( + self._call_in_runner_task(test_func, **kwargs) + ) + except Exception as exc: + self._exceptions.append(exc) + + self._raise_async_exceptions() + + +class AsyncIOBackend(AsyncBackend): + @classmethod + def run( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + options: dict[str, Any], + ) -> T_Retval: + @wraps(func) + async def wrapper() -> T_Retval: + task = cast(asyncio.Task, current_task()) + task.set_name(get_callable_name(func)) + _task_states[task] = TaskState(None, None) + + try: + return await func(*args) + finally: + del _task_states[task] + + debug = options.get("debug", None) + loop_factory = options.get("loop_factory", None) + if loop_factory is None and options.get("use_uvloop", False): + import uvloop + + loop_factory = uvloop.new_event_loop + + with Runner(debug=debug, loop_factory=loop_factory) as runner: + return runner.run(wrapper()) + + @classmethod + def current_token(cls) -> object: + return get_running_loop() + + @classmethod + def current_time(cls) -> float: + return get_running_loop().time() + + @classmethod + def cancelled_exception_class(cls) -> type[BaseException]: + return CancelledError + + @classmethod + async def checkpoint(cls) -> None: + await sleep(0) + + @classmethod + async def checkpoint_if_cancelled(cls) -> None: + task = current_task() + if task is None: + return + + try: + cancel_scope = _task_states[task].cancel_scope + except KeyError: + return + + while cancel_scope: + if cancel_scope.cancel_called: + await sleep(0) + elif cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + @classmethod + async def cancel_shielded_checkpoint(cls) -> None: + with CancelScope(shield=True): + await sleep(0) + + @classmethod + async def sleep(cls, delay: float) -> None: + await sleep(delay) + + @classmethod + def create_cancel_scope( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + return CancelScope(deadline=deadline, shield=shield) + + @classmethod + def current_effective_deadline(cls) -> float: + if (task := current_task()) is None: + return math.inf + + try: + cancel_scope = _task_states[task].cancel_scope + except KeyError: + return math.inf + + deadline = math.inf + while cancel_scope: + deadline = min(deadline, cancel_scope.deadline) + if cancel_scope._cancel_called: + deadline = -math.inf + break + elif cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + return deadline + + @classmethod + def create_task_group(cls) -> abc.TaskGroup: + return TaskGroup() + + @classmethod + def create_event(cls) -> abc.Event: + return Event() + + @classmethod + def create_lock(cls, *, fast_acquire: bool) -> abc.Lock: + return Lock(fast_acquire=fast_acquire) + + @classmethod + def create_semaphore( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> abc.Semaphore: + return Semaphore(initial_value, max_value=max_value, fast_acquire=fast_acquire) + + @classmethod + def create_capacity_limiter(cls, total_tokens: float) -> abc.CapacityLimiter: + return CapacityLimiter(total_tokens) + + @classmethod + async def run_sync_in_worker_thread( # type: ignore[return] + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + abandon_on_cancel: bool = False, + limiter: abc.CapacityLimiter | None = None, + ) -> T_Retval: + await cls.checkpoint() + + # If this is the first run in this event loop thread, set up the necessary + # variables + try: + idle_workers = _threadpool_idle_workers.get() + workers = _threadpool_workers.get() + except LookupError: + idle_workers = deque() + workers = set() + _threadpool_idle_workers.set(idle_workers) + _threadpool_workers.set(workers) + + async with limiter or cls.current_default_thread_limiter(): + with CancelScope(shield=not abandon_on_cancel) as scope: + future = asyncio.Future[T_Retval]() + root_task = find_root_task() + if not idle_workers: + worker = WorkerThread(root_task, workers, idle_workers) + worker.start() + workers.add(worker) + root_task.add_done_callback( + worker.stop, context=contextvars.Context() + ) + else: + worker = idle_workers.pop() + + # Prune any other workers that have been idle for MAX_IDLE_TIME + # seconds or longer + now = cls.current_time() + while idle_workers: + if ( + now - idle_workers[0].idle_since + < WorkerThread.MAX_IDLE_TIME + ): + break + + expired_worker = idle_workers.popleft() + expired_worker.root_task.remove_done_callback( + expired_worker.stop + ) + expired_worker.stop() + + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, None) + if abandon_on_cancel or scope._parent_scope is None: + worker_scope = scope + else: + worker_scope = scope._parent_scope + + worker.queue.put_nowait((context, func, args, future, worker_scope)) + return await future + + @classmethod + def check_cancelled(cls) -> None: + scope: CancelScope | None = threadlocals.current_cancel_scope + while scope is not None: + if scope.cancel_called: + raise CancelledError(f"Cancelled by cancel scope {id(scope):x}") + + if scope.shield: + return + + scope = scope._parent_scope + + @classmethod + def run_async_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + async def task_wrapper(scope: CancelScope) -> T_Retval: + __tracebackhide__ = True + task = cast(asyncio.Task, current_task()) + _task_states[task] = TaskState(None, scope) + scope._tasks.add(task) + try: + return await func(*args) + except CancelledError as exc: + raise concurrent.futures.CancelledError(str(exc)) from None + finally: + scope._tasks.discard(task) + + loop = cast(AbstractEventLoop, token) + context = copy_context() + context.run(sniffio.current_async_library_cvar.set, "asyncio") + wrapper = task_wrapper(threadlocals.current_cancel_scope) + f: concurrent.futures.Future[T_Retval] = context.run( + asyncio.run_coroutine_threadsafe, wrapper, loop + ) + return f.result() + + @classmethod + def run_sync_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + @wraps(func) + def wrapper() -> None: + try: + sniffio.current_async_library_cvar.set("asyncio") + f.set_result(func(*args)) + except BaseException as exc: + f.set_exception(exc) + if not isinstance(exc, Exception): + raise + + f: concurrent.futures.Future[T_Retval] = Future() + loop = cast(AbstractEventLoop, token) + loop.call_soon_threadsafe(wrapper) + return f.result() + + @classmethod + def create_blocking_portal(cls) -> abc.BlockingPortal: + return BlockingPortal() + + @classmethod + async def open_process( + cls, + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + **kwargs: Any, + ) -> Process: + await cls.checkpoint() + if isinstance(command, PathLike): + command = os.fspath(command) + + if isinstance(command, (str, bytes)): + process = await asyncio.create_subprocess_shell( + command, + stdin=stdin, + stdout=stdout, + stderr=stderr, + **kwargs, + ) + else: + process = await asyncio.create_subprocess_exec( + *command, + stdin=stdin, + stdout=stdout, + stderr=stderr, + **kwargs, + ) + + stdin_stream = StreamWriterWrapper(process.stdin) if process.stdin else None + stdout_stream = StreamReaderWrapper(process.stdout) if process.stdout else None + stderr_stream = StreamReaderWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + @classmethod + def setup_process_pool_exit_at_shutdown(cls, workers: set[abc.Process]) -> None: + create_task( + _shutdown_process_pool_on_exit(workers), + name="AnyIO process pool shutdown task", + ) + find_root_task().add_done_callback( + partial(_forcibly_shutdown_process_pool_on_exit, workers) # type:ignore[arg-type] + ) + + @classmethod + async def connect_tcp( + cls, host: str, port: int, local_address: IPSockAddrType | None = None + ) -> abc.SocketStream: + transport, protocol = cast( + tuple[asyncio.Transport, StreamProtocol], + await get_running_loop().create_connection( + StreamProtocol, host, port, local_addr=local_address + ), + ) + transport.pause_reading() + return SocketStream(transport, protocol) + + @classmethod + async def connect_unix(cls, path: str | bytes) -> abc.UNIXSocketStream: + await cls.checkpoint() + loop = get_running_loop() + raw_socket = socket.socket(socket.AF_UNIX) + raw_socket.setblocking(False) + while True: + try: + raw_socket.connect(path) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + loop.add_writer(raw_socket, f.set_result, None) + f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) + await f + except BaseException: + raw_socket.close() + raise + else: + return UNIXSocketStream(raw_socket) + + @classmethod + def create_tcp_listener(cls, sock: socket.socket) -> SocketListener: + return TCPSocketListener(sock) + + @classmethod + def create_unix_listener(cls, sock: socket.socket) -> SocketListener: + return UNIXSocketListener(sock) + + @classmethod + async def create_udp_socket( + cls, + family: AddressFamily, + local_address: IPSockAddrType | None, + remote_address: IPSockAddrType | None, + reuse_port: bool, + ) -> UDPSocket | ConnectedUDPSocket: + transport, protocol = await get_running_loop().create_datagram_endpoint( + DatagramProtocol, + local_addr=local_address, + remote_addr=remote_address, + family=family, + reuse_port=reuse_port, + ) + if protocol.exception: + transport.close() + raise protocol.exception + + if not remote_address: + return UDPSocket(transport, protocol) + else: + return ConnectedUDPSocket(transport, protocol) + + @classmethod + async def create_unix_datagram_socket( # type: ignore[override] + cls, raw_socket: socket.socket, remote_path: str | bytes | None + ) -> abc.UNIXDatagramSocket | abc.ConnectedUNIXDatagramSocket: + await cls.checkpoint() + loop = get_running_loop() + + if remote_path: + while True: + try: + raw_socket.connect(remote_path) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + loop.add_writer(raw_socket, f.set_result, None) + f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) + await f + except BaseException: + raw_socket.close() + raise + else: + return ConnectedUNIXDatagramSocket(raw_socket) + else: + return UNIXDatagramSocket(raw_socket) + + @classmethod + async def getaddrinfo( + cls, + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, + ) -> Sequence[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes], + ] + ]: + return await get_running_loop().getaddrinfo( + host, port, family=family, type=type, proto=proto, flags=flags + ) + + @classmethod + async def getnameinfo( + cls, sockaddr: IPSockAddrType, flags: int = 0 + ) -> tuple[str, str]: + return await get_running_loop().getnameinfo(sockaddr, flags) + + @classmethod + async def wait_readable(cls, obj: FileDescriptorLike) -> None: + await cls.checkpoint() + try: + read_events = _read_events.get() + except LookupError: + read_events = {} + _read_events.set(read_events) + + if not isinstance(obj, int): + obj = obj.fileno() + + if read_events.get(obj): + raise BusyResourceError("reading from") + + loop = get_running_loop() + event = asyncio.Event() + try: + loop.add_reader(obj, event.set) + except NotImplementedError: + from anyio._core._asyncio_selector_thread import get_selector + + selector = get_selector() + selector.add_reader(obj, event.set) + remove_reader = selector.remove_reader + else: + remove_reader = loop.remove_reader + + read_events[obj] = event + try: + await event.wait() + finally: + remove_reader(obj) + del read_events[obj] + + @classmethod + async def wait_writable(cls, obj: FileDescriptorLike) -> None: + await cls.checkpoint() + try: + write_events = _write_events.get() + except LookupError: + write_events = {} + _write_events.set(write_events) + + if not isinstance(obj, int): + obj = obj.fileno() + + if write_events.get(obj): + raise BusyResourceError("writing to") + + loop = get_running_loop() + event = asyncio.Event() + try: + loop.add_writer(obj, event.set) + except NotImplementedError: + from anyio._core._asyncio_selector_thread import get_selector + + selector = get_selector() + selector.add_writer(obj, event.set) + remove_writer = selector.remove_writer + else: + remove_writer = loop.remove_writer + + write_events[obj] = event + try: + await event.wait() + finally: + del write_events[obj] + remove_writer(obj) + + @classmethod + def current_default_thread_limiter(cls) -> CapacityLimiter: + try: + return _default_thread_limiter.get() + except LookupError: + limiter = CapacityLimiter(40) + _default_thread_limiter.set(limiter) + return limiter + + @classmethod + def open_signal_receiver( + cls, *signals: Signals + ) -> AbstractContextManager[AsyncIterator[Signals]]: + return _SignalReceiver(signals) + + @classmethod + def get_current_task(cls) -> TaskInfo: + return AsyncIOTaskInfo(current_task()) # type: ignore[arg-type] + + @classmethod + def get_running_tasks(cls) -> Sequence[TaskInfo]: + return [AsyncIOTaskInfo(task) for task in all_tasks() if not task.done()] + + @classmethod + async def wait_all_tasks_blocked(cls) -> None: + await cls.checkpoint() + this_task = current_task() + while True: + for task in all_tasks(): + if task is this_task: + continue + + waiter = task._fut_waiter # type: ignore[attr-defined] + if waiter is None or waiter.done(): + await sleep(0.1) + break + else: + return + + @classmethod + def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: + return TestRunner(**options) + + +backend_class = AsyncIOBackend diff --git a/venv/lib/python3.11/site-packages/anyio/_backends/_trio.py b/venv/lib/python3.11/site-packages/anyio/_backends/_trio.py new file mode 100644 index 0000000..b80cc04 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/_backends/_trio.py @@ -0,0 +1,1334 @@ +from __future__ import annotations + +import array +import math +import os +import socket +import sys +import types +import weakref +from collections.abc import ( + AsyncGenerator, + AsyncIterator, + Awaitable, + Callable, + Collection, + Coroutine, + Iterable, + Sequence, +) +from concurrent.futures import Future +from contextlib import AbstractContextManager +from dataclasses import dataclass +from functools import partial +from io import IOBase +from os import PathLike +from signal import Signals +from socket import AddressFamily, SocketKind +from types import TracebackType +from typing import ( + IO, + TYPE_CHECKING, + Any, + Generic, + NoReturn, + TypeVar, + cast, + overload, +) + +import trio.from_thread +import trio.lowlevel +from outcome import Error, Outcome, Value +from trio.lowlevel import ( + current_root_task, + current_task, + wait_readable, + wait_writable, +) +from trio.socket import SocketType as TrioSocketType +from trio.to_thread import run_sync + +from .. import ( + CapacityLimiterStatistics, + EventStatistics, + LockStatistics, + TaskInfo, + WouldBlock, + abc, +) +from .._core._eventloop import claim_worker_thread +from .._core._exceptions import ( + BrokenResourceError, + BusyResourceError, + ClosedResourceError, + EndOfStream, +) +from .._core._sockets import convert_ipv6_sockaddr +from .._core._streams import create_memory_object_stream +from .._core._synchronization import ( + CapacityLimiter as BaseCapacityLimiter, +) +from .._core._synchronization import Event as BaseEvent +from .._core._synchronization import Lock as BaseLock +from .._core._synchronization import ( + ResourceGuard, + SemaphoreStatistics, +) +from .._core._synchronization import Semaphore as BaseSemaphore +from .._core._tasks import CancelScope as BaseCancelScope +from ..abc import IPSockAddrType, UDPPacketType, UNIXDatagramPacketType +from ..abc._eventloop import AsyncBackend, StrOrBytesPath +from ..streams.memory import MemoryObjectSendStream + +if TYPE_CHECKING: + from _typeshed import HasFileno + +if sys.version_info >= (3, 10): + from typing import ParamSpec +else: + from typing_extensions import ParamSpec + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from exceptiongroup import BaseExceptionGroup + from typing_extensions import TypeVarTuple, Unpack + +T = TypeVar("T") +T_Retval = TypeVar("T_Retval") +T_SockAddr = TypeVar("T_SockAddr", str, IPSockAddrType) +PosArgsT = TypeVarTuple("PosArgsT") +P = ParamSpec("P") + + +# +# Event loop +# + +RunVar = trio.lowlevel.RunVar + + +# +# Timeouts and cancellation +# + + +class CancelScope(BaseCancelScope): + def __new__( + cls, original: trio.CancelScope | None = None, **kwargs: object + ) -> CancelScope: + return object.__new__(cls) + + def __init__(self, original: trio.CancelScope | None = None, **kwargs: Any) -> None: + self.__original = original or trio.CancelScope(**kwargs) + + def __enter__(self) -> CancelScope: + self.__original.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + return self.__original.__exit__(exc_type, exc_val, exc_tb) + + def cancel(self) -> None: + self.__original.cancel() + + @property + def deadline(self) -> float: + return self.__original.deadline + + @deadline.setter + def deadline(self, value: float) -> None: + self.__original.deadline = value + + @property + def cancel_called(self) -> bool: + return self.__original.cancel_called + + @property + def cancelled_caught(self) -> bool: + return self.__original.cancelled_caught + + @property + def shield(self) -> bool: + return self.__original.shield + + @shield.setter + def shield(self, value: bool) -> None: + self.__original.shield = value + + +# +# Task groups +# + + +class TaskGroup(abc.TaskGroup): + def __init__(self) -> None: + self._active = False + self._nursery_manager = trio.open_nursery(strict_exception_groups=True) + self.cancel_scope = None # type: ignore[assignment] + + async def __aenter__(self) -> TaskGroup: + self._active = True + self._nursery = await self._nursery_manager.__aenter__() + self.cancel_scope = CancelScope(self._nursery.cancel_scope) + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + try: + # trio.Nursery.__exit__ returns bool; .open_nursery has wrong type + return await self._nursery_manager.__aexit__(exc_type, exc_val, exc_tb) # type: ignore[return-value] + except BaseExceptionGroup as exc: + if not exc.split(trio.Cancelled)[1]: + raise trio.Cancelled._create() from exc + + raise + finally: + del exc_val, exc_tb + self._active = False + + def start_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> None: + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + self._nursery.start_soon(func, *args, name=name) + + async def start( + self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None + ) -> Any: + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + return await self._nursery.start(func, *args, name=name) + + +# +# Threads +# + + +class BlockingPortal(abc.BlockingPortal): + def __new__(cls) -> BlockingPortal: + return object.__new__(cls) + + def __init__(self) -> None: + super().__init__() + self._token = trio.lowlevel.current_trio_token() + + def _spawn_task_from_thread( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + name: object, + future: Future[T_Retval], + ) -> None: + trio.from_thread.run_sync( + partial(self._task_group.start_soon, name=name), + self._call_func, + func, + args, + kwargs, + future, + trio_token=self._token, + ) + + +# +# Subprocesses +# + + +@dataclass(eq=False) +class ReceiveStreamWrapper(abc.ByteReceiveStream): + _stream: trio.abc.ReceiveStream + + async def receive(self, max_bytes: int | None = None) -> bytes: + try: + data = await self._stream.receive_some(max_bytes) + except trio.ClosedResourceError as exc: + raise ClosedResourceError from exc.__cause__ + except trio.BrokenResourceError as exc: + raise BrokenResourceError from exc.__cause__ + + if data: + return data + else: + raise EndOfStream + + async def aclose(self) -> None: + await self._stream.aclose() + + +@dataclass(eq=False) +class SendStreamWrapper(abc.ByteSendStream): + _stream: trio.abc.SendStream + + async def send(self, item: bytes) -> None: + try: + await self._stream.send_all(item) + except trio.ClosedResourceError as exc: + raise ClosedResourceError from exc.__cause__ + except trio.BrokenResourceError as exc: + raise BrokenResourceError from exc.__cause__ + + async def aclose(self) -> None: + await self._stream.aclose() + + +@dataclass(eq=False) +class Process(abc.Process): + _process: trio.Process + _stdin: abc.ByteSendStream | None + _stdout: abc.ByteReceiveStream | None + _stderr: abc.ByteReceiveStream | None + + async def aclose(self) -> None: + with CancelScope(shield=True): + if self._stdin: + await self._stdin.aclose() + if self._stdout: + await self._stdout.aclose() + if self._stderr: + await self._stderr.aclose() + + try: + await self.wait() + except BaseException: + self.kill() + with CancelScope(shield=True): + await self.wait() + raise + + async def wait(self) -> int: + return await self._process.wait() + + def terminate(self) -> None: + self._process.terminate() + + def kill(self) -> None: + self._process.kill() + + def send_signal(self, signal: Signals) -> None: + self._process.send_signal(signal) + + @property + def pid(self) -> int: + return self._process.pid + + @property + def returncode(self) -> int | None: + return self._process.returncode + + @property + def stdin(self) -> abc.ByteSendStream | None: + return self._stdin + + @property + def stdout(self) -> abc.ByteReceiveStream | None: + return self._stdout + + @property + def stderr(self) -> abc.ByteReceiveStream | None: + return self._stderr + + +class _ProcessPoolShutdownInstrument(trio.abc.Instrument): + def after_run(self) -> None: + super().after_run() + + +current_default_worker_process_limiter: trio.lowlevel.RunVar = RunVar( + "current_default_worker_process_limiter" +) + + +async def _shutdown_process_pool(workers: set[abc.Process]) -> None: + try: + await trio.sleep(math.inf) + except trio.Cancelled: + for process in workers: + if process.returncode is None: + process.kill() + + with CancelScope(shield=True): + for process in workers: + await process.aclose() + + +# +# Sockets and networking +# + + +class _TrioSocketMixin(Generic[T_SockAddr]): + def __init__(self, trio_socket: TrioSocketType) -> None: + self._trio_socket = trio_socket + self._closed = False + + def _check_closed(self) -> None: + if self._closed: + raise ClosedResourceError + if self._trio_socket.fileno() < 0: + raise BrokenResourceError + + @property + def _raw_socket(self) -> socket.socket: + return self._trio_socket._sock # type: ignore[attr-defined] + + async def aclose(self) -> None: + if self._trio_socket.fileno() >= 0: + self._closed = True + self._trio_socket.close() + + def _convert_socket_error(self, exc: BaseException) -> NoReturn: + if isinstance(exc, trio.ClosedResourceError): + raise ClosedResourceError from exc + elif self._trio_socket.fileno() < 0 and self._closed: + raise ClosedResourceError from None + elif isinstance(exc, OSError): + raise BrokenResourceError from exc + else: + raise exc + + +class SocketStream(_TrioSocketMixin, abc.SocketStream): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self, max_bytes: int = 65536) -> bytes: + with self._receive_guard: + try: + data = await self._trio_socket.recv(max_bytes) + except BaseException as exc: + self._convert_socket_error(exc) + + if data: + return data + else: + raise EndOfStream + + async def send(self, item: bytes) -> None: + with self._send_guard: + view = memoryview(item) + while view: + try: + bytes_sent = await self._trio_socket.send(view) + except BaseException as exc: + self._convert_socket_error(exc) + + view = view[bytes_sent:] + + async def send_eof(self) -> None: + self._trio_socket.shutdown(socket.SHUT_WR) + + +class UNIXSocketStream(SocketStream, abc.UNIXSocketStream): + async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: + if not isinstance(msglen, int) or msglen < 0: + raise ValueError("msglen must be a non-negative integer") + if not isinstance(maxfds, int) or maxfds < 1: + raise ValueError("maxfds must be a positive integer") + + fds = array.array("i") + await trio.lowlevel.checkpoint() + with self._receive_guard: + while True: + try: + message, ancdata, flags, addr = await self._trio_socket.recvmsg( + msglen, socket.CMSG_LEN(maxfds * fds.itemsize) + ) + except BaseException as exc: + self._convert_socket_error(exc) + else: + if not message and not ancdata: + raise EndOfStream + + break + + for cmsg_level, cmsg_type, cmsg_data in ancdata: + if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: + raise RuntimeError( + f"Received unexpected ancillary data; message = {message!r}, " + f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}" + ) + + fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) + + return message, list(fds) + + async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: + if not message: + raise ValueError("message must not be empty") + if not fds: + raise ValueError("fds must not be empty") + + filenos: list[int] = [] + for fd in fds: + if isinstance(fd, int): + filenos.append(fd) + elif isinstance(fd, IOBase): + filenos.append(fd.fileno()) + + fdarray = array.array("i", filenos) + await trio.lowlevel.checkpoint() + with self._send_guard: + while True: + try: + await self._trio_socket.sendmsg( + [message], + [ + ( + socket.SOL_SOCKET, + socket.SCM_RIGHTS, + fdarray, + ) + ], + ) + break + except BaseException as exc: + self._convert_socket_error(exc) + + +class TCPSocketListener(_TrioSocketMixin, abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + super().__init__(trio.socket.from_stdlib_socket(raw_socket)) + self._accept_guard = ResourceGuard("accepting connections from") + + async def accept(self) -> SocketStream: + with self._accept_guard: + try: + trio_socket, _addr = await self._trio_socket.accept() + except BaseException as exc: + self._convert_socket_error(exc) + + trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + return SocketStream(trio_socket) + + +class UNIXSocketListener(_TrioSocketMixin, abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + super().__init__(trio.socket.from_stdlib_socket(raw_socket)) + self._accept_guard = ResourceGuard("accepting connections from") + + async def accept(self) -> UNIXSocketStream: + with self._accept_guard: + try: + trio_socket, _addr = await self._trio_socket.accept() + except BaseException as exc: + self._convert_socket_error(exc) + + return UNIXSocketStream(trio_socket) + + +class UDPSocket(_TrioSocketMixin[IPSockAddrType], abc.UDPSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> tuple[bytes, IPSockAddrType]: + with self._receive_guard: + try: + data, addr = await self._trio_socket.recvfrom(65536) + return data, convert_ipv6_sockaddr(addr) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: UDPPacketType) -> None: + with self._send_guard: + try: + await self._trio_socket.sendto(*item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class ConnectedUDPSocket(_TrioSocketMixin[IPSockAddrType], abc.ConnectedUDPSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> bytes: + with self._receive_guard: + try: + return await self._trio_socket.recv(65536) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: bytes) -> None: + with self._send_guard: + try: + await self._trio_socket.send(item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class UNIXDatagramSocket(_TrioSocketMixin[str], abc.UNIXDatagramSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> UNIXDatagramPacketType: + with self._receive_guard: + try: + data, addr = await self._trio_socket.recvfrom(65536) + return data, addr + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: UNIXDatagramPacketType) -> None: + with self._send_guard: + try: + await self._trio_socket.sendto(*item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class ConnectedUNIXDatagramSocket( + _TrioSocketMixin[str], abc.ConnectedUNIXDatagramSocket +): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> bytes: + with self._receive_guard: + try: + return await self._trio_socket.recv(65536) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: bytes) -> None: + with self._send_guard: + try: + await self._trio_socket.send(item) + except BaseException as exc: + self._convert_socket_error(exc) + + +# +# Synchronization +# + + +class Event(BaseEvent): + def __new__(cls) -> Event: + return object.__new__(cls) + + def __init__(self) -> None: + self.__original = trio.Event() + + def is_set(self) -> bool: + return self.__original.is_set() + + async def wait(self) -> None: + return await self.__original.wait() + + def statistics(self) -> EventStatistics: + orig_statistics = self.__original.statistics() + return EventStatistics(tasks_waiting=orig_statistics.tasks_waiting) + + def set(self) -> None: + self.__original.set() + + +class Lock(BaseLock): + def __new__(cls, *, fast_acquire: bool = False) -> Lock: + return object.__new__(cls) + + def __init__(self, *, fast_acquire: bool = False) -> None: + self._fast_acquire = fast_acquire + self.__original = trio.Lock() + + @staticmethod + def _convert_runtime_error_msg(exc: RuntimeError) -> None: + if exc.args == ("attempt to re-acquire an already held Lock",): + exc.args = ("Attempted to acquire an already held Lock",) + + async def acquire(self) -> None: + if not self._fast_acquire: + try: + await self.__original.acquire() + except RuntimeError as exc: + self._convert_runtime_error_msg(exc) + raise + + return + + # This is the "fast path" where we don't let other tasks run + await trio.lowlevel.checkpoint_if_cancelled() + try: + self.__original.acquire_nowait() + except trio.WouldBlock: + await self.__original._lot.park() + except RuntimeError as exc: + self._convert_runtime_error_msg(exc) + raise + + def acquire_nowait(self) -> None: + try: + self.__original.acquire_nowait() + except trio.WouldBlock: + raise WouldBlock from None + except RuntimeError as exc: + self._convert_runtime_error_msg(exc) + raise + + def locked(self) -> bool: + return self.__original.locked() + + def release(self) -> None: + self.__original.release() + + def statistics(self) -> LockStatistics: + orig_statistics = self.__original.statistics() + owner = TrioTaskInfo(orig_statistics.owner) if orig_statistics.owner else None + return LockStatistics( + orig_statistics.locked, owner, orig_statistics.tasks_waiting + ) + + +class Semaphore(BaseSemaphore): + def __new__( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> Semaphore: + return object.__new__(cls) + + def __init__( + self, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> None: + super().__init__(initial_value, max_value=max_value, fast_acquire=fast_acquire) + self.__original = trio.Semaphore(initial_value, max_value=max_value) + + async def acquire(self) -> None: + if not self._fast_acquire: + await self.__original.acquire() + return + + # This is the "fast path" where we don't let other tasks run + await trio.lowlevel.checkpoint_if_cancelled() + try: + self.__original.acquire_nowait() + except trio.WouldBlock: + await self.__original._lot.park() + + def acquire_nowait(self) -> None: + try: + self.__original.acquire_nowait() + except trio.WouldBlock: + raise WouldBlock from None + + @property + def max_value(self) -> int | None: + return self.__original.max_value + + @property + def value(self) -> int: + return self.__original.value + + def release(self) -> None: + self.__original.release() + + def statistics(self) -> SemaphoreStatistics: + orig_statistics = self.__original.statistics() + return SemaphoreStatistics(orig_statistics.tasks_waiting) + + +class CapacityLimiter(BaseCapacityLimiter): + def __new__( + cls, + total_tokens: float | None = None, + *, + original: trio.CapacityLimiter | None = None, + ) -> CapacityLimiter: + return object.__new__(cls) + + def __init__( + self, + total_tokens: float | None = None, + *, + original: trio.CapacityLimiter | None = None, + ) -> None: + if original is not None: + self.__original = original + else: + assert total_tokens is not None + self.__original = trio.CapacityLimiter(total_tokens) + + async def __aenter__(self) -> None: + return await self.__original.__aenter__() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + await self.__original.__aexit__(exc_type, exc_val, exc_tb) + + @property + def total_tokens(self) -> float: + return self.__original.total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + self.__original.total_tokens = value + + @property + def borrowed_tokens(self) -> int: + return self.__original.borrowed_tokens + + @property + def available_tokens(self) -> float: + return self.__original.available_tokens + + def acquire_nowait(self) -> None: + self.__original.acquire_nowait() + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + self.__original.acquire_on_behalf_of_nowait(borrower) + + async def acquire(self) -> None: + await self.__original.acquire() + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await self.__original.acquire_on_behalf_of(borrower) + + def release(self) -> None: + return self.__original.release() + + def release_on_behalf_of(self, borrower: object) -> None: + return self.__original.release_on_behalf_of(borrower) + + def statistics(self) -> CapacityLimiterStatistics: + orig = self.__original.statistics() + return CapacityLimiterStatistics( + borrowed_tokens=orig.borrowed_tokens, + total_tokens=orig.total_tokens, + borrowers=tuple(orig.borrowers), + tasks_waiting=orig.tasks_waiting, + ) + + +_capacity_limiter_wrapper: trio.lowlevel.RunVar = RunVar("_capacity_limiter_wrapper") + + +# +# Signal handling +# + + +class _SignalReceiver: + _iterator: AsyncIterator[int] + + def __init__(self, signals: tuple[Signals, ...]): + self._signals = signals + + def __enter__(self) -> _SignalReceiver: + self._cm = trio.open_signal_receiver(*self._signals) + self._iterator = self._cm.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + return self._cm.__exit__(exc_type, exc_val, exc_tb) + + def __aiter__(self) -> _SignalReceiver: + return self + + async def __anext__(self) -> Signals: + signum = await self._iterator.__anext__() + return Signals(signum) + + +# +# Testing and debugging +# + + +class TestRunner(abc.TestRunner): + def __init__(self, **options: Any) -> None: + from queue import Queue + + self._call_queue: Queue[Callable[[], object]] = Queue() + self._send_stream: MemoryObjectSendStream | None = None + self._options = options + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> None: + if self._send_stream: + self._send_stream.close() + while self._send_stream is not None: + self._call_queue.get()() + + async def _run_tests_and_fixtures(self) -> None: + self._send_stream, receive_stream = create_memory_object_stream(1) + with receive_stream: + async for coro, outcome_holder in receive_stream: + try: + retval = await coro + except BaseException as exc: + outcome_holder.append(Error(exc)) + else: + outcome_holder.append(Value(retval)) + + def _main_task_finished(self, outcome: object) -> None: + self._send_stream = None + + def _call_in_runner_task( + self, + func: Callable[P, Awaitable[T_Retval]], + *args: P.args, + **kwargs: P.kwargs, + ) -> T_Retval: + if self._send_stream is None: + trio.lowlevel.start_guest_run( + self._run_tests_and_fixtures, + run_sync_soon_threadsafe=self._call_queue.put, + done_callback=self._main_task_finished, + **self._options, + ) + while self._send_stream is None: + self._call_queue.get()() + + outcome_holder: list[Outcome] = [] + self._send_stream.send_nowait((func(*args, **kwargs), outcome_holder)) + while not outcome_holder: + self._call_queue.get()() + + return outcome_holder[0].unwrap() + + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], + kwargs: dict[str, Any], + ) -> Iterable[T_Retval]: + asyncgen = fixture_func(**kwargs) + fixturevalue: T_Retval = self._call_in_runner_task(asyncgen.asend, None) + + yield fixturevalue + + try: + self._call_in_runner_task(asyncgen.asend, None) + except StopAsyncIteration: + pass + else: + self._call_in_runner_task(asyncgen.aclose) + raise RuntimeError("Async generator fixture did not stop") + + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], + kwargs: dict[str, Any], + ) -> T_Retval: + return self._call_in_runner_task(fixture_func, **kwargs) + + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] + ) -> None: + self._call_in_runner_task(test_func, **kwargs) + + +class TrioTaskInfo(TaskInfo): + def __init__(self, task: trio.lowlevel.Task): + parent_id = None + if task.parent_nursery and task.parent_nursery.parent_task: + parent_id = id(task.parent_nursery.parent_task) + + super().__init__(id(task), parent_id, task.name, task.coro) + self._task = weakref.proxy(task) + + def has_pending_cancellation(self) -> bool: + try: + return self._task._cancel_status.effectively_cancelled + except ReferenceError: + # If the task is no longer around, it surely doesn't have a cancellation + # pending + return False + + +class TrioBackend(AsyncBackend): + @classmethod + def run( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + options: dict[str, Any], + ) -> T_Retval: + return trio.run(func, *args) + + @classmethod + def current_token(cls) -> object: + return trio.lowlevel.current_trio_token() + + @classmethod + def current_time(cls) -> float: + return trio.current_time() + + @classmethod + def cancelled_exception_class(cls) -> type[BaseException]: + return trio.Cancelled + + @classmethod + async def checkpoint(cls) -> None: + await trio.lowlevel.checkpoint() + + @classmethod + async def checkpoint_if_cancelled(cls) -> None: + await trio.lowlevel.checkpoint_if_cancelled() + + @classmethod + async def cancel_shielded_checkpoint(cls) -> None: + await trio.lowlevel.cancel_shielded_checkpoint() + + @classmethod + async def sleep(cls, delay: float) -> None: + await trio.sleep(delay) + + @classmethod + def create_cancel_scope( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> abc.CancelScope: + return CancelScope(deadline=deadline, shield=shield) + + @classmethod + def current_effective_deadline(cls) -> float: + return trio.current_effective_deadline() + + @classmethod + def create_task_group(cls) -> abc.TaskGroup: + return TaskGroup() + + @classmethod + def create_event(cls) -> abc.Event: + return Event() + + @classmethod + def create_lock(cls, *, fast_acquire: bool) -> Lock: + return Lock(fast_acquire=fast_acquire) + + @classmethod + def create_semaphore( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> abc.Semaphore: + return Semaphore(initial_value, max_value=max_value, fast_acquire=fast_acquire) + + @classmethod + def create_capacity_limiter(cls, total_tokens: float) -> CapacityLimiter: + return CapacityLimiter(total_tokens) + + @classmethod + async def run_sync_in_worker_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + abandon_on_cancel: bool = False, + limiter: abc.CapacityLimiter | None = None, + ) -> T_Retval: + def wrapper() -> T_Retval: + with claim_worker_thread(TrioBackend, token): + return func(*args) + + token = TrioBackend.current_token() + return await run_sync( + wrapper, + abandon_on_cancel=abandon_on_cancel, + limiter=cast(trio.CapacityLimiter, limiter), + ) + + @classmethod + def check_cancelled(cls) -> None: + trio.from_thread.check_cancelled() + + @classmethod + def run_async_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + return trio.from_thread.run(func, *args) + + @classmethod + def run_sync_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + return trio.from_thread.run_sync(func, *args) + + @classmethod + def create_blocking_portal(cls) -> abc.BlockingPortal: + return BlockingPortal() + + @classmethod + async def open_process( + cls, + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + **kwargs: Any, + ) -> Process: + def convert_item(item: StrOrBytesPath) -> str: + str_or_bytes = os.fspath(item) + if isinstance(str_or_bytes, str): + return str_or_bytes + else: + return os.fsdecode(str_or_bytes) + + if isinstance(command, (str, bytes, PathLike)): + process = await trio.lowlevel.open_process( + convert_item(command), + stdin=stdin, + stdout=stdout, + stderr=stderr, + shell=True, + **kwargs, + ) + else: + process = await trio.lowlevel.open_process( + [convert_item(item) for item in command], + stdin=stdin, + stdout=stdout, + stderr=stderr, + shell=False, + **kwargs, + ) + + stdin_stream = SendStreamWrapper(process.stdin) if process.stdin else None + stdout_stream = ReceiveStreamWrapper(process.stdout) if process.stdout else None + stderr_stream = ReceiveStreamWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + @classmethod + def setup_process_pool_exit_at_shutdown(cls, workers: set[abc.Process]) -> None: + trio.lowlevel.spawn_system_task(_shutdown_process_pool, workers) + + @classmethod + async def connect_tcp( + cls, host: str, port: int, local_address: IPSockAddrType | None = None + ) -> SocketStream: + family = socket.AF_INET6 if ":" in host else socket.AF_INET + trio_socket = trio.socket.socket(family) + trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + if local_address: + await trio_socket.bind(local_address) + + try: + await trio_socket.connect((host, port)) + except BaseException: + trio_socket.close() + raise + + return SocketStream(trio_socket) + + @classmethod + async def connect_unix(cls, path: str | bytes) -> abc.UNIXSocketStream: + trio_socket = trio.socket.socket(socket.AF_UNIX) + try: + await trio_socket.connect(path) + except BaseException: + trio_socket.close() + raise + + return UNIXSocketStream(trio_socket) + + @classmethod + def create_tcp_listener(cls, sock: socket.socket) -> abc.SocketListener: + return TCPSocketListener(sock) + + @classmethod + def create_unix_listener(cls, sock: socket.socket) -> abc.SocketListener: + return UNIXSocketListener(sock) + + @classmethod + async def create_udp_socket( + cls, + family: socket.AddressFamily, + local_address: IPSockAddrType | None, + remote_address: IPSockAddrType | None, + reuse_port: bool, + ) -> UDPSocket | ConnectedUDPSocket: + trio_socket = trio.socket.socket(family=family, type=socket.SOCK_DGRAM) + + if reuse_port: + trio_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + if local_address: + await trio_socket.bind(local_address) + + if remote_address: + await trio_socket.connect(remote_address) + return ConnectedUDPSocket(trio_socket) + else: + return UDPSocket(trio_socket) + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket.socket, remote_path: None + ) -> abc.UNIXDatagramSocket: ... + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket.socket, remote_path: str | bytes + ) -> abc.ConnectedUNIXDatagramSocket: ... + + @classmethod + async def create_unix_datagram_socket( + cls, raw_socket: socket.socket, remote_path: str | bytes | None + ) -> abc.UNIXDatagramSocket | abc.ConnectedUNIXDatagramSocket: + trio_socket = trio.socket.from_stdlib_socket(raw_socket) + + if remote_path: + await trio_socket.connect(remote_path) + return ConnectedUNIXDatagramSocket(trio_socket) + else: + return UNIXDatagramSocket(trio_socket) + + @classmethod + async def getaddrinfo( + cls, + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, + ) -> Sequence[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes], + ] + ]: + return await trio.socket.getaddrinfo(host, port, family, type, proto, flags) + + @classmethod + async def getnameinfo( + cls, sockaddr: IPSockAddrType, flags: int = 0 + ) -> tuple[str, str]: + return await trio.socket.getnameinfo(sockaddr, flags) + + @classmethod + async def wait_readable(cls, obj: HasFileno | int) -> None: + try: + await wait_readable(obj) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError("reading from") from None + + @classmethod + async def wait_writable(cls, obj: HasFileno | int) -> None: + try: + await wait_writable(obj) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError("writing to") from None + + @classmethod + def current_default_thread_limiter(cls) -> CapacityLimiter: + try: + return _capacity_limiter_wrapper.get() + except LookupError: + limiter = CapacityLimiter( + original=trio.to_thread.current_default_thread_limiter() + ) + _capacity_limiter_wrapper.set(limiter) + return limiter + + @classmethod + def open_signal_receiver( + cls, *signals: Signals + ) -> AbstractContextManager[AsyncIterator[Signals]]: + return _SignalReceiver(signals) + + @classmethod + def get_current_task(cls) -> TaskInfo: + task = current_task() + return TrioTaskInfo(task) + + @classmethod + def get_running_tasks(cls) -> Sequence[TaskInfo]: + root_task = current_root_task() + assert root_task + task_infos = [TrioTaskInfo(root_task)] + nurseries = root_task.child_nurseries + while nurseries: + new_nurseries: list[trio.Nursery] = [] + for nursery in nurseries: + for task in nursery.child_tasks: + task_infos.append(TrioTaskInfo(task)) + new_nurseries.extend(task.child_nurseries) + + nurseries = new_nurseries + + return task_infos + + @classmethod + async def wait_all_tasks_blocked(cls) -> None: + from trio.testing import wait_all_tasks_blocked + + await wait_all_tasks_blocked() + + @classmethod + def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: + return TestRunner(**options) + + +backend_class = TrioBackend diff --git a/venv/lib/python3.11/site-packages/anyio/_core/__init__.py b/venv/lib/python3.11/site-packages/anyio/_core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..a13c16e Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_asyncio_selector_thread.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_asyncio_selector_thread.cpython-311.pyc new file mode 100644 index 0000000..3daf0bc Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_asyncio_selector_thread.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_eventloop.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_eventloop.cpython-311.pyc new file mode 100644 index 0000000..c64b82e Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_eventloop.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_exceptions.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_exceptions.cpython-311.pyc new file mode 100644 index 0000000..4078933 Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_exceptions.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_fileio.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_fileio.cpython-311.pyc new file mode 100644 index 0000000..7b404c1 Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_fileio.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_resources.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_resources.cpython-311.pyc new file mode 100644 index 0000000..903acc6 Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_resources.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_signals.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_signals.cpython-311.pyc new file mode 100644 index 0000000..516797a Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_signals.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_sockets.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_sockets.cpython-311.pyc new file mode 100644 index 0000000..83e9b1a Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_sockets.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_streams.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_streams.cpython-311.pyc new file mode 100644 index 0000000..4f6ae05 Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_streams.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-311.pyc new file mode 100644 index 0000000..2ccdd9f Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_synchronization.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_synchronization.cpython-311.pyc new file mode 100644 index 0000000..2241248 Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_synchronization.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_tasks.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_tasks.cpython-311.pyc new file mode 100644 index 0000000..0ed0695 Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_tasks.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_tempfile.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_tempfile.cpython-311.pyc new file mode 100644 index 0000000..03c187d Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_tempfile.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_testing.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_testing.cpython-311.pyc new file mode 100644 index 0000000..9655be7 Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_testing.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_typedattr.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_typedattr.cpython-311.pyc new file mode 100644 index 0000000..f53bb55 Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/_core/__pycache__/_typedattr.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/_core/_asyncio_selector_thread.py b/venv/lib/python3.11/site-packages/anyio/_core/_asyncio_selector_thread.py new file mode 100644 index 0000000..9f35bae --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/_core/_asyncio_selector_thread.py @@ -0,0 +1,167 @@ +from __future__ import annotations + +import asyncio +import socket +import threading +from collections.abc import Callable +from selectors import EVENT_READ, EVENT_WRITE, DefaultSelector +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from _typeshed import FileDescriptorLike + +_selector_lock = threading.Lock() +_selector: Selector | None = None + + +class Selector: + def __init__(self) -> None: + self._thread = threading.Thread(target=self.run, name="AnyIO socket selector") + self._selector = DefaultSelector() + self._send, self._receive = socket.socketpair() + self._send.setblocking(False) + self._receive.setblocking(False) + # This somewhat reduces the amount of memory wasted queueing up data + # for wakeups. With these settings, maximum number of 1-byte sends + # before getting BlockingIOError: + # Linux 4.8: 6 + # macOS (darwin 15.5): 1 + # Windows 10: 525347 + # Windows you're weird. (And on Windows setting SNDBUF to 0 makes send + # blocking, even on non-blocking sockets, so don't do that.) + self._receive.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 1) + self._send.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1) + # On Windows this is a TCP socket so this might matter. On other + # platforms this fails b/c AF_UNIX sockets aren't actually TCP. + try: + self._send.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + except OSError: + pass + + self._selector.register(self._receive, EVENT_READ) + self._closed = False + + def start(self) -> None: + self._thread.start() + threading._register_atexit(self._stop) # type: ignore[attr-defined] + + def _stop(self) -> None: + global _selector + self._closed = True + self._notify_self() + self._send.close() + self._thread.join() + self._selector.unregister(self._receive) + self._receive.close() + self._selector.close() + _selector = None + assert not self._selector.get_map(), ( + "selector still has registered file descriptors after shutdown" + ) + + def _notify_self(self) -> None: + try: + self._send.send(b"\x00") + except BlockingIOError: + pass + + def add_reader(self, fd: FileDescriptorLike, callback: Callable[[], Any]) -> None: + loop = asyncio.get_running_loop() + try: + key = self._selector.get_key(fd) + except KeyError: + self._selector.register(fd, EVENT_READ, {EVENT_READ: (loop, callback)}) + else: + if EVENT_READ in key.data: + raise ValueError( + "this file descriptor is already registered for reading" + ) + + key.data[EVENT_READ] = loop, callback + self._selector.modify(fd, key.events | EVENT_READ, key.data) + + self._notify_self() + + def add_writer(self, fd: FileDescriptorLike, callback: Callable[[], Any]) -> None: + loop = asyncio.get_running_loop() + try: + key = self._selector.get_key(fd) + except KeyError: + self._selector.register(fd, EVENT_WRITE, {EVENT_WRITE: (loop, callback)}) + else: + if EVENT_WRITE in key.data: + raise ValueError( + "this file descriptor is already registered for writing" + ) + + key.data[EVENT_WRITE] = loop, callback + self._selector.modify(fd, key.events | EVENT_WRITE, key.data) + + self._notify_self() + + def remove_reader(self, fd: FileDescriptorLike) -> bool: + try: + key = self._selector.get_key(fd) + except KeyError: + return False + + if new_events := key.events ^ EVENT_READ: + del key.data[EVENT_READ] + self._selector.modify(fd, new_events, key.data) + else: + self._selector.unregister(fd) + + return True + + def remove_writer(self, fd: FileDescriptorLike) -> bool: + try: + key = self._selector.get_key(fd) + except KeyError: + return False + + if new_events := key.events ^ EVENT_WRITE: + del key.data[EVENT_WRITE] + self._selector.modify(fd, new_events, key.data) + else: + self._selector.unregister(fd) + + return True + + def run(self) -> None: + while not self._closed: + for key, events in self._selector.select(): + if key.fileobj is self._receive: + try: + while self._receive.recv(4096): + pass + except BlockingIOError: + pass + + continue + + if events & EVENT_READ: + loop, callback = key.data[EVENT_READ] + self.remove_reader(key.fd) + try: + loop.call_soon_threadsafe(callback) + except RuntimeError: + pass # the loop was already closed + + if events & EVENT_WRITE: + loop, callback = key.data[EVENT_WRITE] + self.remove_writer(key.fd) + try: + loop.call_soon_threadsafe(callback) + except RuntimeError: + pass # the loop was already closed + + +def get_selector() -> Selector: + global _selector + + with _selector_lock: + if _selector is None: + _selector = Selector() + _selector.start() + + return _selector diff --git a/venv/lib/python3.11/site-packages/anyio/_core/_eventloop.py b/venv/lib/python3.11/site-packages/anyio/_core/_eventloop.py new file mode 100644 index 0000000..6dcb458 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/_core/_eventloop.py @@ -0,0 +1,166 @@ +from __future__ import annotations + +import math +import sys +import threading +from collections.abc import Awaitable, Callable, Generator +from contextlib import contextmanager +from importlib import import_module +from typing import TYPE_CHECKING, Any, TypeVar + +import sniffio + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +if TYPE_CHECKING: + from ..abc import AsyncBackend + +# This must be updated when new backends are introduced +BACKENDS = "asyncio", "trio" + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + +threadlocals = threading.local() +loaded_backends: dict[str, type[AsyncBackend]] = {} + + +def run( + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + *args: Unpack[PosArgsT], + backend: str = "asyncio", + backend_options: dict[str, Any] | None = None, +) -> T_Retval: + """ + Run the given coroutine function in an asynchronous event loop. + + The current thread must not be already running an event loop. + + :param func: a coroutine function + :param args: positional arguments to ``func`` + :param backend: name of the asynchronous event loop implementation – currently + either ``asyncio`` or ``trio`` + :param backend_options: keyword arguments to call the backend ``run()`` + implementation with (documented :ref:`here `) + :return: the return value of the coroutine function + :raises RuntimeError: if an asynchronous event loop is already running in this + thread + :raises LookupError: if the named backend is not found + + """ + try: + asynclib_name = sniffio.current_async_library() + except sniffio.AsyncLibraryNotFoundError: + pass + else: + raise RuntimeError(f"Already running {asynclib_name} in this thread") + + try: + async_backend = get_async_backend(backend) + except ImportError as exc: + raise LookupError(f"No such backend: {backend}") from exc + + token = None + if sniffio.current_async_library_cvar.get(None) is None: + # Since we're in control of the event loop, we can cache the name of the async + # library + token = sniffio.current_async_library_cvar.set(backend) + + try: + backend_options = backend_options or {} + return async_backend.run(func, args, {}, backend_options) + finally: + if token: + sniffio.current_async_library_cvar.reset(token) + + +async def sleep(delay: float) -> None: + """ + Pause the current task for the specified duration. + + :param delay: the duration, in seconds + + """ + return await get_async_backend().sleep(delay) + + +async def sleep_forever() -> None: + """ + Pause the current task until it's cancelled. + + This is a shortcut for ``sleep(math.inf)``. + + .. versionadded:: 3.1 + + """ + await sleep(math.inf) + + +async def sleep_until(deadline: float) -> None: + """ + Pause the current task until the given time. + + :param deadline: the absolute time to wake up at (according to the internal + monotonic clock of the event loop) + + .. versionadded:: 3.1 + + """ + now = current_time() + await sleep(max(deadline - now, 0)) + + +def current_time() -> float: + """ + Return the current value of the event loop's internal clock. + + :return: the clock value (seconds) + + """ + return get_async_backend().current_time() + + +def get_all_backends() -> tuple[str, ...]: + """Return a tuple of the names of all built-in backends.""" + return BACKENDS + + +def get_cancelled_exc_class() -> type[BaseException]: + """Return the current async library's cancellation exception class.""" + return get_async_backend().cancelled_exception_class() + + +# +# Private API +# + + +@contextmanager +def claim_worker_thread( + backend_class: type[AsyncBackend], token: object +) -> Generator[Any, None, None]: + threadlocals.current_async_backend = backend_class + threadlocals.current_token = token + try: + yield + finally: + del threadlocals.current_async_backend + del threadlocals.current_token + + +def get_async_backend(asynclib_name: str | None = None) -> type[AsyncBackend]: + if asynclib_name is None: + asynclib_name = sniffio.current_async_library() + + # We use our own dict instead of sys.modules to get the already imported back-end + # class because the appropriate modules in sys.modules could potentially be only + # partially initialized + try: + return loaded_backends[asynclib_name] + except KeyError: + module = import_module(f"anyio._backends._{asynclib_name}") + loaded_backends[asynclib_name] = module.backend_class + return module.backend_class diff --git a/venv/lib/python3.11/site-packages/anyio/_core/_exceptions.py b/venv/lib/python3.11/site-packages/anyio/_core/_exceptions.py new file mode 100644 index 0000000..16b9448 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/_core/_exceptions.py @@ -0,0 +1,126 @@ +from __future__ import annotations + +import sys +from collections.abc import Generator +from textwrap import dedent +from typing import Any + +if sys.version_info < (3, 11): + from exceptiongroup import BaseExceptionGroup + + +class BrokenResourceError(Exception): + """ + Raised when trying to use a resource that has been rendered unusable due to external + causes (e.g. a send stream whose peer has disconnected). + """ + + +class BrokenWorkerProcess(Exception): + """ + Raised by :meth:`~anyio.to_process.run_sync` if the worker process terminates abruptly or + otherwise misbehaves. + """ + + +class BrokenWorkerIntepreter(Exception): + """ + Raised by :meth:`~anyio.to_interpreter.run_sync` if an unexpected exception is + raised in the subinterpreter. + """ + + def __init__(self, excinfo: Any): + # This was adapted from concurrent.futures.interpreter.ExecutionFailed + msg = excinfo.formatted + if not msg: + if excinfo.type and excinfo.msg: + msg = f"{excinfo.type.__name__}: {excinfo.msg}" + else: + msg = excinfo.type.__name__ or excinfo.msg + + super().__init__(msg) + self.excinfo = excinfo + + def __str__(self) -> str: + try: + formatted = self.excinfo.errdisplay + except Exception: + return super().__str__() + else: + return dedent( + f""" + {super().__str__()} + + Uncaught in the interpreter: + + {formatted} + """.strip() + ) + + +class BusyResourceError(Exception): + """ + Raised when two tasks are trying to read from or write to the same resource + concurrently. + """ + + def __init__(self, action: str): + super().__init__(f"Another task is already {action} this resource") + + +class ClosedResourceError(Exception): + """Raised when trying to use a resource that has been closed.""" + + +class DelimiterNotFound(Exception): + """ + Raised during + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the + maximum number of bytes has been read without the delimiter being found. + """ + + def __init__(self, max_bytes: int) -> None: + super().__init__( + f"The delimiter was not found among the first {max_bytes} bytes" + ) + + +class EndOfStream(Exception): + """ + Raised when trying to read from a stream that has been closed from the other end. + """ + + +class IncompleteRead(Exception): + """ + Raised during + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the + connection is closed before the requested amount of bytes has been read. + """ + + def __init__(self) -> None: + super().__init__( + "The stream was closed before the read operation could be completed" + ) + + +class TypedAttributeLookupError(LookupError): + """ + Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute + is not found and no default value has been given. + """ + + +class WouldBlock(Exception): + """Raised by ``X_nowait`` functions if ``X()`` would block.""" + + +def iterate_exceptions( + exception: BaseException, +) -> Generator[BaseException, None, None]: + if isinstance(exception, BaseExceptionGroup): + for exc in exception.exceptions: + yield from iterate_exceptions(exc) + else: + yield exception diff --git a/venv/lib/python3.11/site-packages/anyio/_core/_fileio.py b/venv/lib/python3.11/site-packages/anyio/_core/_fileio.py new file mode 100644 index 0000000..a0d6198 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/_core/_fileio.py @@ -0,0 +1,742 @@ +from __future__ import annotations + +import os +import pathlib +import sys +from collections.abc import ( + AsyncIterator, + Callable, + Iterable, + Iterator, + Sequence, +) +from dataclasses import dataclass +from functools import partial +from os import PathLike +from typing import ( + IO, + TYPE_CHECKING, + Any, + AnyStr, + ClassVar, + Final, + Generic, + overload, +) + +from .. import to_thread +from ..abc import AsyncResource + +if TYPE_CHECKING: + from types import ModuleType + + from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer +else: + ReadableBuffer = OpenBinaryMode = OpenTextMode = WriteableBuffer = object + + +class AsyncFile(AsyncResource, Generic[AnyStr]): + """ + An asynchronous file object. + + This class wraps a standard file object and provides async friendly versions of the + following blocking methods (where available on the original file object): + + * read + * read1 + * readline + * readlines + * readinto + * readinto1 + * write + * writelines + * truncate + * seek + * tell + * flush + + All other methods are directly passed through. + + This class supports the asynchronous context manager protocol which closes the + underlying file at the end of the context block. + + This class also supports asynchronous iteration:: + + async with await open_file(...) as f: + async for line in f: + print(line) + """ + + def __init__(self, fp: IO[AnyStr]) -> None: + self._fp: Any = fp + + def __getattr__(self, name: str) -> object: + return getattr(self._fp, name) + + @property + def wrapped(self) -> IO[AnyStr]: + """The wrapped file object.""" + return self._fp + + async def __aiter__(self) -> AsyncIterator[AnyStr]: + while True: + line = await self.readline() + if line: + yield line + else: + break + + async def aclose(self) -> None: + return await to_thread.run_sync(self._fp.close) + + async def read(self, size: int = -1) -> AnyStr: + return await to_thread.run_sync(self._fp.read, size) + + async def read1(self: AsyncFile[bytes], size: int = -1) -> bytes: + return await to_thread.run_sync(self._fp.read1, size) + + async def readline(self) -> AnyStr: + return await to_thread.run_sync(self._fp.readline) + + async def readlines(self) -> list[AnyStr]: + return await to_thread.run_sync(self._fp.readlines) + + async def readinto(self: AsyncFile[bytes], b: WriteableBuffer) -> int: + return await to_thread.run_sync(self._fp.readinto, b) + + async def readinto1(self: AsyncFile[bytes], b: WriteableBuffer) -> int: + return await to_thread.run_sync(self._fp.readinto1, b) + + @overload + async def write(self: AsyncFile[bytes], b: ReadableBuffer) -> int: ... + + @overload + async def write(self: AsyncFile[str], b: str) -> int: ... + + async def write(self, b: ReadableBuffer | str) -> int: + return await to_thread.run_sync(self._fp.write, b) + + @overload + async def writelines( + self: AsyncFile[bytes], lines: Iterable[ReadableBuffer] + ) -> None: ... + + @overload + async def writelines(self: AsyncFile[str], lines: Iterable[str]) -> None: ... + + async def writelines(self, lines: Iterable[ReadableBuffer] | Iterable[str]) -> None: + return await to_thread.run_sync(self._fp.writelines, lines) + + async def truncate(self, size: int | None = None) -> int: + return await to_thread.run_sync(self._fp.truncate, size) + + async def seek(self, offset: int, whence: int | None = os.SEEK_SET) -> int: + return await to_thread.run_sync(self._fp.seek, offset, whence) + + async def tell(self) -> int: + return await to_thread.run_sync(self._fp.tell) + + async def flush(self) -> None: + return await to_thread.run_sync(self._fp.flush) + + +@overload +async def open_file( + file: str | PathLike[str] | int, + mode: OpenBinaryMode, + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + closefd: bool = ..., + opener: Callable[[str, int], int] | None = ..., +) -> AsyncFile[bytes]: ... + + +@overload +async def open_file( + file: str | PathLike[str] | int, + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + closefd: bool = ..., + opener: Callable[[str, int], int] | None = ..., +) -> AsyncFile[str]: ... + + +async def open_file( + file: str | PathLike[str] | int, + mode: str = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + closefd: bool = True, + opener: Callable[[str, int], int] | None = None, +) -> AsyncFile[Any]: + """ + Open a file asynchronously. + + The arguments are exactly the same as for the builtin :func:`open`. + + :return: an asynchronous file object + + """ + fp = await to_thread.run_sync( + open, file, mode, buffering, encoding, errors, newline, closefd, opener + ) + return AsyncFile(fp) + + +def wrap_file(file: IO[AnyStr]) -> AsyncFile[AnyStr]: + """ + Wrap an existing file as an asynchronous file. + + :param file: an existing file-like object + :return: an asynchronous file object + + """ + return AsyncFile(file) + + +@dataclass(eq=False) +class _PathIterator(AsyncIterator["Path"]): + iterator: Iterator[PathLike[str]] + + async def __anext__(self) -> Path: + nextval = await to_thread.run_sync( + next, self.iterator, None, abandon_on_cancel=True + ) + if nextval is None: + raise StopAsyncIteration from None + + return Path(nextval) + + +class Path: + """ + An asynchronous version of :class:`pathlib.Path`. + + This class cannot be substituted for :class:`pathlib.Path` or + :class:`pathlib.PurePath`, but it is compatible with the :class:`os.PathLike` + interface. + + It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for + the deprecated :meth:`~pathlib.Path.link_to` method. + + Some methods may be unavailable or have limited functionality, based on the Python + version: + + * :meth:`~pathlib.Path.copy` (available on Python 3.14 or later) + * :meth:`~pathlib.Path.copy_into` (available on Python 3.14 or later) + * :meth:`~pathlib.Path.from_uri` (available on Python 3.13 or later) + * :meth:`~pathlib.PurePath.full_match` (available on Python 3.13 or later) + * :attr:`~pathlib.Path.info` (available on Python 3.14 or later) + * :meth:`~pathlib.Path.is_junction` (available on Python 3.12 or later) + * :meth:`~pathlib.PurePath.match` (the ``case_sensitive`` parameter is only + available on Python 3.13 or later) + * :meth:`~pathlib.Path.move` (available on Python 3.14 or later) + * :meth:`~pathlib.Path.move_into` (available on Python 3.14 or later) + * :meth:`~pathlib.PurePath.relative_to` (the ``walk_up`` parameter is only available + on Python 3.12 or later) + * :meth:`~pathlib.Path.walk` (available on Python 3.12 or later) + + Any methods that do disk I/O need to be awaited on. These methods are: + + * :meth:`~pathlib.Path.absolute` + * :meth:`~pathlib.Path.chmod` + * :meth:`~pathlib.Path.cwd` + * :meth:`~pathlib.Path.exists` + * :meth:`~pathlib.Path.expanduser` + * :meth:`~pathlib.Path.group` + * :meth:`~pathlib.Path.hardlink_to` + * :meth:`~pathlib.Path.home` + * :meth:`~pathlib.Path.is_block_device` + * :meth:`~pathlib.Path.is_char_device` + * :meth:`~pathlib.Path.is_dir` + * :meth:`~pathlib.Path.is_fifo` + * :meth:`~pathlib.Path.is_file` + * :meth:`~pathlib.Path.is_junction` + * :meth:`~pathlib.Path.is_mount` + * :meth:`~pathlib.Path.is_socket` + * :meth:`~pathlib.Path.is_symlink` + * :meth:`~pathlib.Path.lchmod` + * :meth:`~pathlib.Path.lstat` + * :meth:`~pathlib.Path.mkdir` + * :meth:`~pathlib.Path.open` + * :meth:`~pathlib.Path.owner` + * :meth:`~pathlib.Path.read_bytes` + * :meth:`~pathlib.Path.read_text` + * :meth:`~pathlib.Path.readlink` + * :meth:`~pathlib.Path.rename` + * :meth:`~pathlib.Path.replace` + * :meth:`~pathlib.Path.resolve` + * :meth:`~pathlib.Path.rmdir` + * :meth:`~pathlib.Path.samefile` + * :meth:`~pathlib.Path.stat` + * :meth:`~pathlib.Path.symlink_to` + * :meth:`~pathlib.Path.touch` + * :meth:`~pathlib.Path.unlink` + * :meth:`~pathlib.Path.walk` + * :meth:`~pathlib.Path.write_bytes` + * :meth:`~pathlib.Path.write_text` + + Additionally, the following methods return an async iterator yielding + :class:`~.Path` objects: + + * :meth:`~pathlib.Path.glob` + * :meth:`~pathlib.Path.iterdir` + * :meth:`~pathlib.Path.rglob` + """ + + __slots__ = "_path", "__weakref__" + + __weakref__: Any + + def __init__(self, *args: str | PathLike[str]) -> None: + self._path: Final[pathlib.Path] = pathlib.Path(*args) + + def __fspath__(self) -> str: + return self._path.__fspath__() + + def __str__(self) -> str: + return self._path.__str__() + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.as_posix()!r})" + + def __bytes__(self) -> bytes: + return self._path.__bytes__() + + def __hash__(self) -> int: + return self._path.__hash__() + + def __eq__(self, other: object) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__eq__(target) + + def __lt__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__lt__(target) + + def __le__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__le__(target) + + def __gt__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__gt__(target) + + def __ge__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__ge__(target) + + def __truediv__(self, other: str | PathLike[str]) -> Path: + return Path(self._path / other) + + def __rtruediv__(self, other: str | PathLike[str]) -> Path: + return Path(other) / self + + @property + def parts(self) -> tuple[str, ...]: + return self._path.parts + + @property + def drive(self) -> str: + return self._path.drive + + @property + def root(self) -> str: + return self._path.root + + @property + def anchor(self) -> str: + return self._path.anchor + + @property + def parents(self) -> Sequence[Path]: + return tuple(Path(p) for p in self._path.parents) + + @property + def parent(self) -> Path: + return Path(self._path.parent) + + @property + def name(self) -> str: + return self._path.name + + @property + def suffix(self) -> str: + return self._path.suffix + + @property + def suffixes(self) -> list[str]: + return self._path.suffixes + + @property + def stem(self) -> str: + return self._path.stem + + async def absolute(self) -> Path: + path = await to_thread.run_sync(self._path.absolute) + return Path(path) + + def as_posix(self) -> str: + return self._path.as_posix() + + def as_uri(self) -> str: + return self._path.as_uri() + + if sys.version_info >= (3, 13): + parser: ClassVar[ModuleType] = pathlib.Path.parser + + @classmethod + def from_uri(cls, uri: str) -> Path: + return Path(pathlib.Path.from_uri(uri)) + + def full_match( + self, path_pattern: str, *, case_sensitive: bool | None = None + ) -> bool: + return self._path.full_match(path_pattern, case_sensitive=case_sensitive) + + def match( + self, path_pattern: str, *, case_sensitive: bool | None = None + ) -> bool: + return self._path.match(path_pattern, case_sensitive=case_sensitive) + else: + + def match(self, path_pattern: str) -> bool: + return self._path.match(path_pattern) + + if sys.version_info >= (3, 14): + + @property + def info(self) -> Any: # TODO: add return type annotation when Typeshed gets it + return self._path.info + + async def copy( + self, + target: str | os.PathLike[str], + *, + follow_symlinks: bool = True, + dirs_exist_ok: bool = False, + preserve_metadata: bool = False, + ) -> Path: + func = partial( + self._path.copy, + follow_symlinks=follow_symlinks, + dirs_exist_ok=dirs_exist_ok, + preserve_metadata=preserve_metadata, + ) + return Path(await to_thread.run_sync(func, target)) + + async def copy_into( + self, + target_dir: str | os.PathLike[str], + *, + follow_symlinks: bool = True, + dirs_exist_ok: bool = False, + preserve_metadata: bool = False, + ) -> Path: + func = partial( + self._path.copy_into, + follow_symlinks=follow_symlinks, + dirs_exist_ok=dirs_exist_ok, + preserve_metadata=preserve_metadata, + ) + return Path(await to_thread.run_sync(func, target_dir)) + + async def move(self, target: str | os.PathLike[str]) -> Path: + # Upstream does not handle anyio.Path properly as a PathLike + target = pathlib.Path(target) + return Path(await to_thread.run_sync(self._path.move, target)) + + async def move_into( + self, + target_dir: str | os.PathLike[str], + ) -> Path: + return Path(await to_thread.run_sync(self._path.move_into, target_dir)) + + def is_relative_to(self, other: str | PathLike[str]) -> bool: + try: + self.relative_to(other) + return True + except ValueError: + return False + + async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: + func = partial(os.chmod, follow_symlinks=follow_symlinks) + return await to_thread.run_sync(func, self._path, mode) + + @classmethod + async def cwd(cls) -> Path: + path = await to_thread.run_sync(pathlib.Path.cwd) + return cls(path) + + async def exists(self) -> bool: + return await to_thread.run_sync(self._path.exists, abandon_on_cancel=True) + + async def expanduser(self) -> Path: + return Path( + await to_thread.run_sync(self._path.expanduser, abandon_on_cancel=True) + ) + + def glob(self, pattern: str) -> AsyncIterator[Path]: + gen = self._path.glob(pattern) + return _PathIterator(gen) + + async def group(self) -> str: + return await to_thread.run_sync(self._path.group, abandon_on_cancel=True) + + async def hardlink_to( + self, target: str | bytes | PathLike[str] | PathLike[bytes] + ) -> None: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(os.link, target, self) + + @classmethod + async def home(cls) -> Path: + home_path = await to_thread.run_sync(pathlib.Path.home) + return cls(home_path) + + def is_absolute(self) -> bool: + return self._path.is_absolute() + + async def is_block_device(self) -> bool: + return await to_thread.run_sync( + self._path.is_block_device, abandon_on_cancel=True + ) + + async def is_char_device(self) -> bool: + return await to_thread.run_sync( + self._path.is_char_device, abandon_on_cancel=True + ) + + async def is_dir(self) -> bool: + return await to_thread.run_sync(self._path.is_dir, abandon_on_cancel=True) + + async def is_fifo(self) -> bool: + return await to_thread.run_sync(self._path.is_fifo, abandon_on_cancel=True) + + async def is_file(self) -> bool: + return await to_thread.run_sync(self._path.is_file, abandon_on_cancel=True) + + if sys.version_info >= (3, 12): + + async def is_junction(self) -> bool: + return await to_thread.run_sync(self._path.is_junction) + + async def is_mount(self) -> bool: + return await to_thread.run_sync( + os.path.ismount, self._path, abandon_on_cancel=True + ) + + def is_reserved(self) -> bool: + return self._path.is_reserved() + + async def is_socket(self) -> bool: + return await to_thread.run_sync(self._path.is_socket, abandon_on_cancel=True) + + async def is_symlink(self) -> bool: + return await to_thread.run_sync(self._path.is_symlink, abandon_on_cancel=True) + + async def iterdir(self) -> AsyncIterator[Path]: + gen = ( + self._path.iterdir() + if sys.version_info < (3, 13) + else await to_thread.run_sync(self._path.iterdir, abandon_on_cancel=True) + ) + async for path in _PathIterator(gen): + yield path + + def joinpath(self, *args: str | PathLike[str]) -> Path: + return Path(self._path.joinpath(*args)) + + async def lchmod(self, mode: int) -> None: + await to_thread.run_sync(self._path.lchmod, mode) + + async def lstat(self) -> os.stat_result: + return await to_thread.run_sync(self._path.lstat, abandon_on_cancel=True) + + async def mkdir( + self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False + ) -> None: + await to_thread.run_sync(self._path.mkdir, mode, parents, exist_ok) + + @overload + async def open( + self, + mode: OpenBinaryMode, + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + ) -> AsyncFile[bytes]: ... + + @overload + async def open( + self, + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + ) -> AsyncFile[str]: ... + + async def open( + self, + mode: str = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> AsyncFile[Any]: + fp = await to_thread.run_sync( + self._path.open, mode, buffering, encoding, errors, newline + ) + return AsyncFile(fp) + + async def owner(self) -> str: + return await to_thread.run_sync(self._path.owner, abandon_on_cancel=True) + + async def read_bytes(self) -> bytes: + return await to_thread.run_sync(self._path.read_bytes) + + async def read_text( + self, encoding: str | None = None, errors: str | None = None + ) -> str: + return await to_thread.run_sync(self._path.read_text, encoding, errors) + + if sys.version_info >= (3, 12): + + def relative_to( + self, *other: str | PathLike[str], walk_up: bool = False + ) -> Path: + return Path(self._path.relative_to(*other, walk_up=walk_up)) + + else: + + def relative_to(self, *other: str | PathLike[str]) -> Path: + return Path(self._path.relative_to(*other)) + + async def readlink(self) -> Path: + target = await to_thread.run_sync(os.readlink, self._path) + return Path(target) + + async def rename(self, target: str | pathlib.PurePath | Path) -> Path: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.rename, target) + return Path(target) + + async def replace(self, target: str | pathlib.PurePath | Path) -> Path: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.replace, target) + return Path(target) + + async def resolve(self, strict: bool = False) -> Path: + func = partial(self._path.resolve, strict=strict) + return Path(await to_thread.run_sync(func, abandon_on_cancel=True)) + + def rglob(self, pattern: str) -> AsyncIterator[Path]: + gen = self._path.rglob(pattern) + return _PathIterator(gen) + + async def rmdir(self) -> None: + await to_thread.run_sync(self._path.rmdir) + + async def samefile(self, other_path: str | PathLike[str]) -> bool: + if isinstance(other_path, Path): + other_path = other_path._path + + return await to_thread.run_sync( + self._path.samefile, other_path, abandon_on_cancel=True + ) + + async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result: + func = partial(os.stat, follow_symlinks=follow_symlinks) + return await to_thread.run_sync(func, self._path, abandon_on_cancel=True) + + async def symlink_to( + self, + target: str | bytes | PathLike[str] | PathLike[bytes], + target_is_directory: bool = False, + ) -> None: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.symlink_to, target, target_is_directory) + + async def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: + await to_thread.run_sync(self._path.touch, mode, exist_ok) + + async def unlink(self, missing_ok: bool = False) -> None: + try: + await to_thread.run_sync(self._path.unlink) + except FileNotFoundError: + if not missing_ok: + raise + + if sys.version_info >= (3, 12): + + async def walk( + self, + top_down: bool = True, + on_error: Callable[[OSError], object] | None = None, + follow_symlinks: bool = False, + ) -> AsyncIterator[tuple[Path, list[str], list[str]]]: + def get_next_value() -> tuple[pathlib.Path, list[str], list[str]] | None: + try: + return next(gen) + except StopIteration: + return None + + gen = self._path.walk(top_down, on_error, follow_symlinks) + while True: + value = await to_thread.run_sync(get_next_value) + if value is None: + return + + root, dirs, paths = value + yield Path(root), dirs, paths + + def with_name(self, name: str) -> Path: + return Path(self._path.with_name(name)) + + def with_stem(self, stem: str) -> Path: + return Path(self._path.with_name(stem + self._path.suffix)) + + def with_suffix(self, suffix: str) -> Path: + return Path(self._path.with_suffix(suffix)) + + def with_segments(self, *pathsegments: str | PathLike[str]) -> Path: + return Path(*pathsegments) + + async def write_bytes(self, data: bytes) -> int: + return await to_thread.run_sync(self._path.write_bytes, data) + + async def write_text( + self, + data: str, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> int: + # Path.write_text() does not support the "newline" parameter before Python 3.10 + def sync_write_text() -> int: + with self._path.open( + "w", encoding=encoding, errors=errors, newline=newline + ) as fp: + return fp.write(data) + + return await to_thread.run_sync(sync_write_text) + + +PathLike.register(Path) diff --git a/venv/lib/python3.11/site-packages/anyio/_core/_resources.py b/venv/lib/python3.11/site-packages/anyio/_core/_resources.py new file mode 100644 index 0000000..b9a5344 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/_core/_resources.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from ..abc import AsyncResource +from ._tasks import CancelScope + + +async def aclose_forcefully(resource: AsyncResource) -> None: + """ + Close an asynchronous resource in a cancelled scope. + + Doing this closes the resource without waiting on anything. + + :param resource: the resource to close + + """ + with CancelScope() as scope: + scope.cancel() + await resource.aclose() diff --git a/venv/lib/python3.11/site-packages/anyio/_core/_signals.py b/venv/lib/python3.11/site-packages/anyio/_core/_signals.py new file mode 100644 index 0000000..f3451d3 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/_core/_signals.py @@ -0,0 +1,27 @@ +from __future__ import annotations + +from collections.abc import AsyncIterator +from contextlib import AbstractContextManager +from signal import Signals + +from ._eventloop import get_async_backend + + +def open_signal_receiver( + *signals: Signals, +) -> AbstractContextManager[AsyncIterator[Signals]]: + """ + Start receiving operating system signals. + + :param signals: signals to receive (e.g. ``signal.SIGINT``) + :return: an asynchronous context manager for an asynchronous iterator which yields + signal numbers + + .. warning:: Windows does not support signals natively so it is best to avoid + relying on this in cross-platform applications. + + .. warning:: On asyncio, this permanently replaces any previous signal handler for + the given signals, as set via :meth:`~asyncio.loop.add_signal_handler`. + + """ + return get_async_backend().open_signal_receiver(*signals) diff --git a/venv/lib/python3.11/site-packages/anyio/_core/_sockets.py b/venv/lib/python3.11/site-packages/anyio/_core/_sockets.py new file mode 100644 index 0000000..054bcdd --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/_core/_sockets.py @@ -0,0 +1,792 @@ +from __future__ import annotations + +import errno +import os +import socket +import ssl +import stat +import sys +from collections.abc import Awaitable +from ipaddress import IPv6Address, ip_address +from os import PathLike, chmod +from socket import AddressFamily, SocketKind +from typing import TYPE_CHECKING, Any, Literal, cast, overload + +from .. import to_thread +from ..abc import ( + ConnectedUDPSocket, + ConnectedUNIXDatagramSocket, + IPAddressType, + IPSockAddrType, + SocketListener, + SocketStream, + UDPSocket, + UNIXDatagramSocket, + UNIXSocketStream, +) +from ..streams.stapled import MultiListener +from ..streams.tls import TLSStream +from ._eventloop import get_async_backend +from ._resources import aclose_forcefully +from ._synchronization import Event +from ._tasks import create_task_group, move_on_after + +if TYPE_CHECKING: + from _typeshed import FileDescriptorLike +else: + FileDescriptorLike = object + +if sys.version_info < (3, 11): + from exceptiongroup import ExceptionGroup + +if sys.version_info < (3, 13): + from typing_extensions import deprecated +else: + from warnings import deprecated + +IPPROTO_IPV6 = getattr(socket, "IPPROTO_IPV6", 41) # https://bugs.python.org/issue29515 + +AnyIPAddressFamily = Literal[ + AddressFamily.AF_UNSPEC, AddressFamily.AF_INET, AddressFamily.AF_INET6 +] +IPAddressFamily = Literal[AddressFamily.AF_INET, AddressFamily.AF_INET6] + + +# tls_hostname given +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + ssl_context: ssl.SSLContext | None = ..., + tls_standard_compatible: bool = ..., + tls_hostname: str, + happy_eyeballs_delay: float = ..., +) -> TLSStream: ... + + +# ssl_context given +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + ssl_context: ssl.SSLContext, + tls_standard_compatible: bool = ..., + tls_hostname: str | None = ..., + happy_eyeballs_delay: float = ..., +) -> TLSStream: ... + + +# tls=True +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + tls: Literal[True], + ssl_context: ssl.SSLContext | None = ..., + tls_standard_compatible: bool = ..., + tls_hostname: str | None = ..., + happy_eyeballs_delay: float = ..., +) -> TLSStream: ... + + +# tls=False +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + tls: Literal[False], + ssl_context: ssl.SSLContext | None = ..., + tls_standard_compatible: bool = ..., + tls_hostname: str | None = ..., + happy_eyeballs_delay: float = ..., +) -> SocketStream: ... + + +# No TLS arguments +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + happy_eyeballs_delay: float = ..., +) -> SocketStream: ... + + +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = None, + tls: bool = False, + ssl_context: ssl.SSLContext | None = None, + tls_standard_compatible: bool = True, + tls_hostname: str | None = None, + happy_eyeballs_delay: float = 0.25, +) -> SocketStream | TLSStream: + """ + Connect to a host using the TCP protocol. + + This function implements the stateless version of the Happy Eyeballs algorithm (RFC + 6555). If ``remote_host`` is a host name that resolves to multiple IP addresses, + each one is tried until one connection attempt succeeds. If the first attempt does + not connected within 250 milliseconds, a second attempt is started using the next + address in the list, and so on. On IPv6 enabled systems, an IPv6 address (if + available) is tried first. + + When the connection has been established, a TLS handshake will be done if either + ``ssl_context`` or ``tls_hostname`` is not ``None``, or if ``tls`` is ``True``. + + :param remote_host: the IP address or host name to connect to + :param remote_port: port on the target host to connect to + :param local_host: the interface address or name to bind the socket to before + connecting + :param tls: ``True`` to do a TLS handshake with the connected stream and return a + :class:`~anyio.streams.tls.TLSStream` instead + :param ssl_context: the SSL context object to use (if omitted, a default context is + created) + :param tls_standard_compatible: If ``True``, performs the TLS shutdown handshake + before closing the stream and requires that the server does this as well. + Otherwise, :exc:`~ssl.SSLEOFError` may be raised during reads from the stream. + Some protocols, such as HTTP, require this option to be ``False``. + See :meth:`~ssl.SSLContext.wrap_socket` for details. + :param tls_hostname: host name to check the server certificate against (defaults to + the value of ``remote_host``) + :param happy_eyeballs_delay: delay (in seconds) before starting the next connection + attempt + :return: a socket stream object if no TLS handshake was done, otherwise a TLS stream + :raises OSError: if the connection attempt fails + + """ + # Placed here due to https://github.com/python/mypy/issues/7057 + connected_stream: SocketStream | None = None + + async def try_connect(remote_host: str, event: Event) -> None: + nonlocal connected_stream + try: + stream = await asynclib.connect_tcp(remote_host, remote_port, local_address) + except OSError as exc: + oserrors.append(exc) + return + else: + if connected_stream is None: + connected_stream = stream + tg.cancel_scope.cancel() + else: + await stream.aclose() + finally: + event.set() + + asynclib = get_async_backend() + local_address: IPSockAddrType | None = None + family = socket.AF_UNSPEC + if local_host: + gai_res = await getaddrinfo(str(local_host), None) + family, *_, local_address = gai_res[0] + + target_host = str(remote_host) + try: + addr_obj = ip_address(remote_host) + except ValueError: + addr_obj = None + + if addr_obj is not None: + if isinstance(addr_obj, IPv6Address): + target_addrs = [(socket.AF_INET6, addr_obj.compressed)] + else: + target_addrs = [(socket.AF_INET, addr_obj.compressed)] + else: + # getaddrinfo() will raise an exception if name resolution fails + gai_res = await getaddrinfo( + target_host, remote_port, family=family, type=socket.SOCK_STREAM + ) + + # Organize the list so that the first address is an IPv6 address (if available) + # and the second one is an IPv4 addresses. The rest can be in whatever order. + v6_found = v4_found = False + target_addrs = [] + for af, *rest, sa in gai_res: + if af == socket.AF_INET6 and not v6_found: + v6_found = True + target_addrs.insert(0, (af, sa[0])) + elif af == socket.AF_INET and not v4_found and v6_found: + v4_found = True + target_addrs.insert(1, (af, sa[0])) + else: + target_addrs.append((af, sa[0])) + + oserrors: list[OSError] = [] + try: + async with create_task_group() as tg: + for i, (af, addr) in enumerate(target_addrs): + event = Event() + tg.start_soon(try_connect, addr, event) + with move_on_after(happy_eyeballs_delay): + await event.wait() + + if connected_stream is None: + cause = ( + oserrors[0] + if len(oserrors) == 1 + else ExceptionGroup("multiple connection attempts failed", oserrors) + ) + raise OSError("All connection attempts failed") from cause + finally: + oserrors.clear() + + if tls or tls_hostname or ssl_context: + try: + return await TLSStream.wrap( + connected_stream, + server_side=False, + hostname=tls_hostname or str(remote_host), + ssl_context=ssl_context, + standard_compatible=tls_standard_compatible, + ) + except BaseException: + await aclose_forcefully(connected_stream) + raise + + return connected_stream + + +async def connect_unix(path: str | bytes | PathLike[Any]) -> UNIXSocketStream: + """ + Connect to the given UNIX socket. + + Not available on Windows. + + :param path: path to the socket + :return: a socket stream object + + """ + path = os.fspath(path) + return await get_async_backend().connect_unix(path) + + +async def create_tcp_listener( + *, + local_host: IPAddressType | None = None, + local_port: int = 0, + family: AnyIPAddressFamily = socket.AddressFamily.AF_UNSPEC, + backlog: int = 65536, + reuse_port: bool = False, +) -> MultiListener[SocketStream]: + """ + Create a TCP socket listener. + + :param local_port: port number to listen on + :param local_host: IP address of the interface to listen on. If omitted, listen on + all IPv4 and IPv6 interfaces. To listen on all interfaces on a specific address + family, use ``0.0.0.0`` for IPv4 or ``::`` for IPv6. + :param family: address family (used if ``local_host`` was omitted) + :param backlog: maximum number of queued incoming connections (up to a maximum of + 2**16, or 65536) + :param reuse_port: ``True`` to allow multiple sockets to bind to the same + address/port (not supported on Windows) + :return: a list of listener objects + + """ + asynclib = get_async_backend() + backlog = min(backlog, 65536) + local_host = str(local_host) if local_host is not None else None + gai_res = await getaddrinfo( + local_host, + local_port, + family=family, + type=socket.SocketKind.SOCK_STREAM if sys.platform == "win32" else 0, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + listeners: list[SocketListener] = [] + try: + # The set() is here to work around a glibc bug: + # https://sourceware.org/bugzilla/show_bug.cgi?id=14969 + sockaddr: tuple[str, int] | tuple[str, int, int, int] + for fam, kind, *_, sockaddr in sorted(set(gai_res)): + # Workaround for an uvloop bug where we don't get the correct scope ID for + # IPv6 link-local addresses when passing type=socket.SOCK_STREAM to + # getaddrinfo(): https://github.com/MagicStack/uvloop/issues/539 + if sys.platform != "win32" and kind is not SocketKind.SOCK_STREAM: + continue + + raw_socket = socket.socket(fam) + raw_socket.setblocking(False) + + # For Windows, enable exclusive address use. For others, enable address + # reuse. + if sys.platform == "win32": + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) + else: + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + + if reuse_port: + raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + # If only IPv6 was requested, disable dual stack operation + if fam == socket.AF_INET6: + raw_socket.setsockopt(IPPROTO_IPV6, socket.IPV6_V6ONLY, 1) + + # Workaround for #554 + if "%" in sockaddr[0]: + addr, scope_id = sockaddr[0].split("%", 1) + sockaddr = (addr, sockaddr[1], 0, int(scope_id)) + + raw_socket.bind(sockaddr) + raw_socket.listen(backlog) + listener = asynclib.create_tcp_listener(raw_socket) + listeners.append(listener) + except BaseException: + for listener in listeners: + await listener.aclose() + + raise + + return MultiListener(listeners) + + +async def create_unix_listener( + path: str | bytes | PathLike[Any], + *, + mode: int | None = None, + backlog: int = 65536, +) -> SocketListener: + """ + Create a UNIX socket listener. + + Not available on Windows. + + :param path: path of the socket + :param mode: permissions to set on the socket + :param backlog: maximum number of queued incoming connections (up to a maximum of + 2**16, or 65536) + :return: a listener object + + .. versionchanged:: 3.0 + If a socket already exists on the file system in the given path, it will be + removed first. + + """ + backlog = min(backlog, 65536) + raw_socket = await setup_unix_local_socket(path, mode, socket.SOCK_STREAM) + try: + raw_socket.listen(backlog) + return get_async_backend().create_unix_listener(raw_socket) + except BaseException: + raw_socket.close() + raise + + +async def create_udp_socket( + family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, + *, + local_host: IPAddressType | None = None, + local_port: int = 0, + reuse_port: bool = False, +) -> UDPSocket: + """ + Create a UDP socket. + + If ``port`` has been given, the socket will be bound to this port on the local + machine, making this socket suitable for providing UDP based services. + + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically + determined from ``local_host`` if omitted + :param local_host: IP address or host name of the local interface to bind to + :param local_port: local port to bind to + :param reuse_port: ``True`` to allow multiple sockets to bind to the same + address/port (not supported on Windows) + :return: a UDP socket + + """ + if family is AddressFamily.AF_UNSPEC and not local_host: + raise ValueError('Either "family" or "local_host" must be given') + + if local_host: + gai_res = await getaddrinfo( + str(local_host), + local_port, + family=family, + type=socket.SOCK_DGRAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + local_address = gai_res[0][-1] + elif family is AddressFamily.AF_INET6: + local_address = ("::", 0) + else: + local_address = ("0.0.0.0", 0) + + sock = await get_async_backend().create_udp_socket( + family, local_address, None, reuse_port + ) + return cast(UDPSocket, sock) + + +async def create_connected_udp_socket( + remote_host: IPAddressType, + remote_port: int, + *, + family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, + local_host: IPAddressType | None = None, + local_port: int = 0, + reuse_port: bool = False, +) -> ConnectedUDPSocket: + """ + Create a connected UDP socket. + + Connected UDP sockets can only communicate with the specified remote host/port, an + any packets sent from other sources are dropped. + + :param remote_host: remote host to set as the default target + :param remote_port: port on the remote host to set as the default target + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically + determined from ``local_host`` or ``remote_host`` if omitted + :param local_host: IP address or host name of the local interface to bind to + :param local_port: local port to bind to + :param reuse_port: ``True`` to allow multiple sockets to bind to the same + address/port (not supported on Windows) + :return: a connected UDP socket + + """ + local_address = None + if local_host: + gai_res = await getaddrinfo( + str(local_host), + local_port, + family=family, + type=socket.SOCK_DGRAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + local_address = gai_res[0][-1] + + gai_res = await getaddrinfo( + str(remote_host), remote_port, family=family, type=socket.SOCK_DGRAM + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + remote_address = gai_res[0][-1] + + sock = await get_async_backend().create_udp_socket( + family, local_address, remote_address, reuse_port + ) + return cast(ConnectedUDPSocket, sock) + + +async def create_unix_datagram_socket( + *, + local_path: None | str | bytes | PathLike[Any] = None, + local_mode: int | None = None, +) -> UNIXDatagramSocket: + """ + Create a UNIX datagram socket. + + Not available on Windows. + + If ``local_path`` has been given, the socket will be bound to this path, making this + socket suitable for receiving datagrams from other processes. Other processes can + send datagrams to this socket only if ``local_path`` is set. + + If a socket already exists on the file system in the ``local_path``, it will be + removed first. + + :param local_path: the path on which to bind to + :param local_mode: permissions to set on the local socket + :return: a UNIX datagram socket + + """ + raw_socket = await setup_unix_local_socket( + local_path, local_mode, socket.SOCK_DGRAM + ) + return await get_async_backend().create_unix_datagram_socket(raw_socket, None) + + +async def create_connected_unix_datagram_socket( + remote_path: str | bytes | PathLike[Any], + *, + local_path: None | str | bytes | PathLike[Any] = None, + local_mode: int | None = None, +) -> ConnectedUNIXDatagramSocket: + """ + Create a connected UNIX datagram socket. + + Connected datagram sockets can only communicate with the specified remote path. + + If ``local_path`` has been given, the socket will be bound to this path, making + this socket suitable for receiving datagrams from other processes. Other processes + can send datagrams to this socket only if ``local_path`` is set. + + If a socket already exists on the file system in the ``local_path``, it will be + removed first. + + :param remote_path: the path to set as the default target + :param local_path: the path on which to bind to + :param local_mode: permissions to set on the local socket + :return: a connected UNIX datagram socket + + """ + remote_path = os.fspath(remote_path) + raw_socket = await setup_unix_local_socket( + local_path, local_mode, socket.SOCK_DGRAM + ) + return await get_async_backend().create_unix_datagram_socket( + raw_socket, remote_path + ) + + +async def getaddrinfo( + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, +) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int]]]: + """ + Look up a numeric IP address given a host name. + + Internationalized domain names are translated according to the (non-transitional) + IDNA 2008 standard. + + .. note:: 4-tuple IPv6 socket addresses are automatically converted to 2-tuples of + (host, port), unlike what :func:`socket.getaddrinfo` does. + + :param host: host name + :param port: port number + :param family: socket family (`'AF_INET``, ...) + :param type: socket type (``SOCK_STREAM``, ...) + :param proto: protocol number + :param flags: flags to pass to upstream ``getaddrinfo()`` + :return: list of tuples containing (family, type, proto, canonname, sockaddr) + + .. seealso:: :func:`socket.getaddrinfo` + + """ + # Handle unicode hostnames + if isinstance(host, str): + try: + encoded_host: bytes | None = host.encode("ascii") + except UnicodeEncodeError: + import idna + + encoded_host = idna.encode(host, uts46=True) + else: + encoded_host = host + + gai_res = await get_async_backend().getaddrinfo( + encoded_host, port, family=family, type=type, proto=proto, flags=flags + ) + return [ + (family, type, proto, canonname, convert_ipv6_sockaddr(sockaddr)) + for family, type, proto, canonname, sockaddr in gai_res + # filter out IPv6 results when IPv6 is disabled + if not isinstance(sockaddr[0], int) + ] + + +def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Awaitable[tuple[str, str]]: + """ + Look up the host name of an IP address. + + :param sockaddr: socket address (e.g. (ipaddress, port) for IPv4) + :param flags: flags to pass to upstream ``getnameinfo()`` + :return: a tuple of (host name, service name) + + .. seealso:: :func:`socket.getnameinfo` + + """ + return get_async_backend().getnameinfo(sockaddr, flags) + + +@deprecated("This function is deprecated; use `wait_readable` instead") +def wait_socket_readable(sock: socket.socket) -> Awaitable[None]: + """ + .. deprecated:: 4.7.0 + Use :func:`wait_readable` instead. + + Wait until the given socket has data to be read. + + .. warning:: Only use this on raw sockets that have not been wrapped by any higher + level constructs like socket streams! + + :param sock: a socket object + :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the + socket to become readable + :raises ~anyio.BusyResourceError: if another task is already waiting for the socket + to become readable + + """ + return get_async_backend().wait_readable(sock.fileno()) + + +@deprecated("This function is deprecated; use `wait_writable` instead") +def wait_socket_writable(sock: socket.socket) -> Awaitable[None]: + """ + .. deprecated:: 4.7.0 + Use :func:`wait_writable` instead. + + Wait until the given socket can be written to. + + This does **NOT** work on Windows when using the asyncio backend with a proactor + event loop (default on py3.8+). + + .. warning:: Only use this on raw sockets that have not been wrapped by any higher + level constructs like socket streams! + + :param sock: a socket object + :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the + socket to become writable + :raises ~anyio.BusyResourceError: if another task is already waiting for the socket + to become writable + + """ + return get_async_backend().wait_writable(sock.fileno()) + + +def wait_readable(obj: FileDescriptorLike) -> Awaitable[None]: + """ + Wait until the given object has data to be read. + + On Unix systems, ``obj`` must either be an integer file descriptor, or else an + object with a ``.fileno()`` method which returns an integer file descriptor. Any + kind of file descriptor can be passed, though the exact semantics will depend on + your kernel. For example, this probably won't do anything useful for on-disk files. + + On Windows systems, ``obj`` must either be an integer ``SOCKET`` handle, or else an + object with a ``.fileno()`` method which returns an integer ``SOCKET`` handle. File + descriptors aren't supported, and neither are handles that refer to anything besides + a ``SOCKET``. + + On backends where this functionality is not natively provided (asyncio + ``ProactorEventLoop`` on Windows), it is provided using a separate selector thread + which is set to shut down when the interpreter shuts down. + + .. warning:: Don't use this on raw sockets that have been wrapped by any higher + level constructs like socket streams! + + :param obj: an object with a ``.fileno()`` method or an integer handle + :raises ~anyio.ClosedResourceError: if the object was closed while waiting for the + object to become readable + :raises ~anyio.BusyResourceError: if another task is already waiting for the object + to become readable + + """ + return get_async_backend().wait_readable(obj) + + +def wait_writable(obj: FileDescriptorLike) -> Awaitable[None]: + """ + Wait until the given object can be written to. + + :param obj: an object with a ``.fileno()`` method or an integer handle + :raises ~anyio.ClosedResourceError: if the object was closed while waiting for the + object to become writable + :raises ~anyio.BusyResourceError: if another task is already waiting for the object + to become writable + + .. seealso:: See the documentation of :func:`wait_readable` for the definition of + ``obj`` and notes on backend compatibility. + + .. warning:: Don't use this on raw sockets that have been wrapped by any higher + level constructs like socket streams! + + """ + return get_async_backend().wait_writable(obj) + + +# +# Private API +# + + +def convert_ipv6_sockaddr( + sockaddr: tuple[str, int, int, int] | tuple[str, int], +) -> tuple[str, int]: + """ + Convert a 4-tuple IPv6 socket address to a 2-tuple (address, port) format. + + If the scope ID is nonzero, it is added to the address, separated with ``%``. + Otherwise the flow id and scope id are simply cut off from the tuple. + Any other kinds of socket addresses are returned as-is. + + :param sockaddr: the result of :meth:`~socket.socket.getsockname` + :return: the converted socket address + + """ + # This is more complicated than it should be because of MyPy + if isinstance(sockaddr, tuple) and len(sockaddr) == 4: + host, port, flowinfo, scope_id = sockaddr + if scope_id: + # PyPy (as of v7.3.11) leaves the interface name in the result, so + # we discard it and only get the scope ID from the end + # (https://foss.heptapod.net/pypy/pypy/-/issues/3938) + host = host.split("%")[0] + + # Add scope_id to the address + return f"{host}%{scope_id}", port + else: + return host, port + else: + return sockaddr + + +async def setup_unix_local_socket( + path: None | str | bytes | PathLike[Any], + mode: int | None, + socktype: int, +) -> socket.socket: + """ + Create a UNIX local socket object, deleting the socket at the given path if it + exists. + + Not available on Windows. + + :param path: path of the socket + :param mode: permissions to set on the socket + :param socktype: socket.SOCK_STREAM or socket.SOCK_DGRAM + + """ + path_str: str | None + if path is not None: + path_str = os.fsdecode(path) + + # Linux abstract namespace sockets aren't backed by a concrete file so skip stat call + if not path_str.startswith("\0"): + # Copied from pathlib... + try: + stat_result = os.stat(path) + except OSError as e: + if e.errno not in ( + errno.ENOENT, + errno.ENOTDIR, + errno.EBADF, + errno.ELOOP, + ): + raise + else: + if stat.S_ISSOCK(stat_result.st_mode): + os.unlink(path) + else: + path_str = None + + raw_socket = socket.socket(socket.AF_UNIX, socktype) + raw_socket.setblocking(False) + + if path_str is not None: + try: + await to_thread.run_sync(raw_socket.bind, path_str, abandon_on_cancel=True) + if mode is not None: + await to_thread.run_sync(chmod, path_str, mode, abandon_on_cancel=True) + except BaseException: + raw_socket.close() + raise + + return raw_socket diff --git a/venv/lib/python3.11/site-packages/anyio/_core/_streams.py b/venv/lib/python3.11/site-packages/anyio/_core/_streams.py new file mode 100644 index 0000000..6a9814e --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/_core/_streams.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +import math +from typing import TypeVar +from warnings import warn + +from ..streams.memory import ( + MemoryObjectReceiveStream, + MemoryObjectSendStream, + MemoryObjectStreamState, +) + +T_Item = TypeVar("T_Item") + + +class create_memory_object_stream( + tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]], +): + """ + Create a memory object stream. + + The stream's item type can be annotated like + :func:`create_memory_object_stream[T_Item]`. + + :param max_buffer_size: number of items held in the buffer until ``send()`` starts + blocking + :param item_type: old way of marking the streams with the right generic type for + static typing (does nothing on AnyIO 4) + + .. deprecated:: 4.0 + Use ``create_memory_object_stream[YourItemType](...)`` instead. + :return: a tuple of (send stream, receive stream) + + """ + + def __new__( # type: ignore[misc] + cls, max_buffer_size: float = 0, item_type: object = None + ) -> tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]: + if max_buffer_size != math.inf and not isinstance(max_buffer_size, int): + raise ValueError("max_buffer_size must be either an integer or math.inf") + if max_buffer_size < 0: + raise ValueError("max_buffer_size cannot be negative") + if item_type is not None: + warn( + "The item_type argument has been deprecated in AnyIO 4.0. " + "Use create_memory_object_stream[YourItemType](...) instead.", + DeprecationWarning, + stacklevel=2, + ) + + state = MemoryObjectStreamState[T_Item](max_buffer_size) + return (MemoryObjectSendStream(state), MemoryObjectReceiveStream(state)) diff --git a/venv/lib/python3.11/site-packages/anyio/_core/_subprocesses.py b/venv/lib/python3.11/site-packages/anyio/_core/_subprocesses.py new file mode 100644 index 0000000..36d9b30 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/_core/_subprocesses.py @@ -0,0 +1,202 @@ +from __future__ import annotations + +import sys +from collections.abc import AsyncIterable, Iterable, Mapping, Sequence +from io import BytesIO +from os import PathLike +from subprocess import PIPE, CalledProcessError, CompletedProcess +from typing import IO, Any, Union, cast + +from ..abc import Process +from ._eventloop import get_async_backend +from ._tasks import create_task_group + +if sys.version_info >= (3, 10): + from typing import TypeAlias +else: + from typing_extensions import TypeAlias + +StrOrBytesPath: TypeAlias = Union[str, bytes, "PathLike[str]", "PathLike[bytes]"] + + +async def run_process( + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + input: bytes | None = None, + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = PIPE, + stderr: int | IO[Any] | None = PIPE, + check: bool = True, + cwd: StrOrBytesPath | None = None, + env: Mapping[str, str] | None = None, + startupinfo: Any = None, + creationflags: int = 0, + start_new_session: bool = False, + pass_fds: Sequence[int] = (), + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, +) -> CompletedProcess[bytes]: + """ + Run an external command in a subprocess and wait until it completes. + + .. seealso:: :func:`subprocess.run` + + :param command: either a string to pass to the shell, or an iterable of strings + containing the executable name or path and its arguments + :param input: bytes passed to the standard input of the subprocess + :param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + a file-like object, or `None`; ``input`` overrides this + :param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + a file-like object, or `None` + :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + :data:`subprocess.STDOUT`, a file-like object, or `None` + :param check: if ``True``, raise :exc:`~subprocess.CalledProcessError` if the + process terminates with a return code other than 0 + :param cwd: If not ``None``, change the working directory to this before running the + command + :param env: if not ``None``, this mapping replaces the inherited environment + variables from the parent process + :param startupinfo: an instance of :class:`subprocess.STARTUPINFO` that can be used + to specify process startup parameters (Windows only) + :param creationflags: flags that can be used to control the creation of the + subprocess (see :class:`subprocess.Popen` for the specifics) + :param start_new_session: if ``true`` the setsid() system call will be made in the + child process prior to the execution of the subprocess. (POSIX only) + :param pass_fds: sequence of file descriptors to keep open between the parent and + child processes. (POSIX only) + :param user: effective user to run the process as (Python >= 3.9, POSIX only) + :param group: effective group to run the process as (Python >= 3.9, POSIX only) + :param extra_groups: supplementary groups to set in the subprocess (Python >= 3.9, + POSIX only) + :param umask: if not negative, this umask is applied in the child process before + running the given command (Python >= 3.9, POSIX only) + :return: an object representing the completed process + :raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process + exits with a nonzero return code + + """ + + async def drain_stream(stream: AsyncIterable[bytes], index: int) -> None: + buffer = BytesIO() + async for chunk in stream: + buffer.write(chunk) + + stream_contents[index] = buffer.getvalue() + + if stdin is not None and input is not None: + raise ValueError("only one of stdin and input is allowed") + + async with await open_process( + command, + stdin=PIPE if input else stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + startupinfo=startupinfo, + creationflags=creationflags, + start_new_session=start_new_session, + pass_fds=pass_fds, + user=user, + group=group, + extra_groups=extra_groups, + umask=umask, + ) as process: + stream_contents: list[bytes | None] = [None, None] + async with create_task_group() as tg: + if process.stdout: + tg.start_soon(drain_stream, process.stdout, 0) + + if process.stderr: + tg.start_soon(drain_stream, process.stderr, 1) + + if process.stdin and input: + await process.stdin.send(input) + await process.stdin.aclose() + + await process.wait() + + output, errors = stream_contents + if check and process.returncode != 0: + raise CalledProcessError(cast(int, process.returncode), command, output, errors) + + return CompletedProcess(command, cast(int, process.returncode), output, errors) + + +async def open_process( + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + stdin: int | IO[Any] | None = PIPE, + stdout: int | IO[Any] | None = PIPE, + stderr: int | IO[Any] | None = PIPE, + cwd: StrOrBytesPath | None = None, + env: Mapping[str, str] | None = None, + startupinfo: Any = None, + creationflags: int = 0, + start_new_session: bool = False, + pass_fds: Sequence[int] = (), + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, +) -> Process: + """ + Start an external command in a subprocess. + + .. seealso:: :class:`subprocess.Popen` + + :param command: either a string to pass to the shell, or an iterable of strings + containing the executable name or path and its arguments + :param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, a + file-like object, or ``None`` + :param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + a file-like object, or ``None`` + :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + :data:`subprocess.STDOUT`, a file-like object, or ``None`` + :param cwd: If not ``None``, the working directory is changed before executing + :param env: If env is not ``None``, it must be a mapping that defines the + environment variables for the new process + :param creationflags: flags that can be used to control the creation of the + subprocess (see :class:`subprocess.Popen` for the specifics) + :param startupinfo: an instance of :class:`subprocess.STARTUPINFO` that can be used + to specify process startup parameters (Windows only) + :param start_new_session: if ``true`` the setsid() system call will be made in the + child process prior to the execution of the subprocess. (POSIX only) + :param pass_fds: sequence of file descriptors to keep open between the parent and + child processes. (POSIX only) + :param user: effective user to run the process as (POSIX only) + :param group: effective group to run the process as (POSIX only) + :param extra_groups: supplementary groups to set in the subprocess (POSIX only) + :param umask: if not negative, this umask is applied in the child process before + running the given command (POSIX only) + :return: an asynchronous process object + + """ + kwargs: dict[str, Any] = {} + if user is not None: + kwargs["user"] = user + + if group is not None: + kwargs["group"] = group + + if extra_groups is not None: + kwargs["extra_groups"] = group + + if umask >= 0: + kwargs["umask"] = umask + + return await get_async_backend().open_process( + command, + stdin=stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + startupinfo=startupinfo, + creationflags=creationflags, + start_new_session=start_new_session, + pass_fds=pass_fds, + **kwargs, + ) diff --git a/venv/lib/python3.11/site-packages/anyio/_core/_synchronization.py b/venv/lib/python3.11/site-packages/anyio/_core/_synchronization.py new file mode 100644 index 0000000..a633132 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/_core/_synchronization.py @@ -0,0 +1,732 @@ +from __future__ import annotations + +import math +from collections import deque +from dataclasses import dataclass +from types import TracebackType + +from sniffio import AsyncLibraryNotFoundError + +from ..lowlevel import checkpoint +from ._eventloop import get_async_backend +from ._exceptions import BusyResourceError +from ._tasks import CancelScope +from ._testing import TaskInfo, get_current_task + + +@dataclass(frozen=True) +class EventStatistics: + """ + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Event.wait` + """ + + tasks_waiting: int + + +@dataclass(frozen=True) +class CapacityLimiterStatistics: + """ + :ivar int borrowed_tokens: number of tokens currently borrowed by tasks + :ivar float total_tokens: total number of available tokens + :ivar tuple borrowers: tasks or other objects currently holding tokens borrowed from + this limiter + :ivar int tasks_waiting: number of tasks waiting on + :meth:`~.CapacityLimiter.acquire` or + :meth:`~.CapacityLimiter.acquire_on_behalf_of` + """ + + borrowed_tokens: int + total_tokens: float + borrowers: tuple[object, ...] + tasks_waiting: int + + +@dataclass(frozen=True) +class LockStatistics: + """ + :ivar bool locked: flag indicating if this lock is locked or not + :ivar ~anyio.TaskInfo owner: task currently holding the lock (or ``None`` if the + lock is not held by any task) + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Lock.acquire` + """ + + locked: bool + owner: TaskInfo | None + tasks_waiting: int + + +@dataclass(frozen=True) +class ConditionStatistics: + """ + :ivar int tasks_waiting: number of tasks blocked on :meth:`~.Condition.wait` + :ivar ~anyio.LockStatistics lock_statistics: statistics of the underlying + :class:`~.Lock` + """ + + tasks_waiting: int + lock_statistics: LockStatistics + + +@dataclass(frozen=True) +class SemaphoreStatistics: + """ + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Semaphore.acquire` + + """ + + tasks_waiting: int + + +class Event: + def __new__(cls) -> Event: + try: + return get_async_backend().create_event() + except AsyncLibraryNotFoundError: + return EventAdapter() + + def set(self) -> None: + """Set the flag, notifying all listeners.""" + raise NotImplementedError + + def is_set(self) -> bool: + """Return ``True`` if the flag is set, ``False`` if not.""" + raise NotImplementedError + + async def wait(self) -> None: + """ + Wait until the flag has been set. + + If the flag has already been set when this method is called, it returns + immediately. + + """ + raise NotImplementedError + + def statistics(self) -> EventStatistics: + """Return statistics about the current state of this event.""" + raise NotImplementedError + + +class EventAdapter(Event): + _internal_event: Event | None = None + _is_set: bool = False + + def __new__(cls) -> EventAdapter: + return object.__new__(cls) + + @property + def _event(self) -> Event: + if self._internal_event is None: + self._internal_event = get_async_backend().create_event() + if self._is_set: + self._internal_event.set() + + return self._internal_event + + def set(self) -> None: + if self._internal_event is None: + self._is_set = True + else: + self._event.set() + + def is_set(self) -> bool: + if self._internal_event is None: + return self._is_set + + return self._internal_event.is_set() + + async def wait(self) -> None: + await self._event.wait() + + def statistics(self) -> EventStatistics: + if self._internal_event is None: + return EventStatistics(tasks_waiting=0) + + return self._internal_event.statistics() + + +class Lock: + def __new__(cls, *, fast_acquire: bool = False) -> Lock: + try: + return get_async_backend().create_lock(fast_acquire=fast_acquire) + except AsyncLibraryNotFoundError: + return LockAdapter(fast_acquire=fast_acquire) + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + async def acquire(self) -> None: + """Acquire the lock.""" + raise NotImplementedError + + def acquire_nowait(self) -> None: + """ + Acquire the lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + raise NotImplementedError + + def release(self) -> None: + """Release the lock.""" + raise NotImplementedError + + def locked(self) -> bool: + """Return True if the lock is currently held.""" + raise NotImplementedError + + def statistics(self) -> LockStatistics: + """ + Return statistics about the current state of this lock. + + .. versionadded:: 3.0 + """ + raise NotImplementedError + + +class LockAdapter(Lock): + _internal_lock: Lock | None = None + + def __new__(cls, *, fast_acquire: bool = False) -> LockAdapter: + return object.__new__(cls) + + def __init__(self, *, fast_acquire: bool = False): + self._fast_acquire = fast_acquire + + @property + def _lock(self) -> Lock: + if self._internal_lock is None: + self._internal_lock = get_async_backend().create_lock( + fast_acquire=self._fast_acquire + ) + + return self._internal_lock + + async def __aenter__(self) -> None: + await self._lock.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + if self._internal_lock is not None: + self._internal_lock.release() + + async def acquire(self) -> None: + """Acquire the lock.""" + await self._lock.acquire() + + def acquire_nowait(self) -> None: + """ + Acquire the lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + self._lock.acquire_nowait() + + def release(self) -> None: + """Release the lock.""" + self._lock.release() + + def locked(self) -> bool: + """Return True if the lock is currently held.""" + return self._lock.locked() + + def statistics(self) -> LockStatistics: + """ + Return statistics about the current state of this lock. + + .. versionadded:: 3.0 + + """ + if self._internal_lock is None: + return LockStatistics(False, None, 0) + + return self._internal_lock.statistics() + + +class Condition: + _owner_task: TaskInfo | None = None + + def __init__(self, lock: Lock | None = None): + self._lock = lock or Lock() + self._waiters: deque[Event] = deque() + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + def _check_acquired(self) -> None: + if self._owner_task != get_current_task(): + raise RuntimeError("The current task is not holding the underlying lock") + + async def acquire(self) -> None: + """Acquire the underlying lock.""" + await self._lock.acquire() + self._owner_task = get_current_task() + + def acquire_nowait(self) -> None: + """ + Acquire the underlying lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + self._lock.acquire_nowait() + self._owner_task = get_current_task() + + def release(self) -> None: + """Release the underlying lock.""" + self._lock.release() + + def locked(self) -> bool: + """Return True if the lock is set.""" + return self._lock.locked() + + def notify(self, n: int = 1) -> None: + """Notify exactly n listeners.""" + self._check_acquired() + for _ in range(n): + try: + event = self._waiters.popleft() + except IndexError: + break + + event.set() + + def notify_all(self) -> None: + """Notify all the listeners.""" + self._check_acquired() + for event in self._waiters: + event.set() + + self._waiters.clear() + + async def wait(self) -> None: + """Wait for a notification.""" + await checkpoint() + event = Event() + self._waiters.append(event) + self.release() + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(event) + + raise + finally: + with CancelScope(shield=True): + await self.acquire() + + def statistics(self) -> ConditionStatistics: + """ + Return statistics about the current state of this condition. + + .. versionadded:: 3.0 + """ + return ConditionStatistics(len(self._waiters), self._lock.statistics()) + + +class Semaphore: + def __new__( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> Semaphore: + try: + return get_async_backend().create_semaphore( + initial_value, max_value=max_value, fast_acquire=fast_acquire + ) + except AsyncLibraryNotFoundError: + return SemaphoreAdapter(initial_value, max_value=max_value) + + def __init__( + self, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ): + if not isinstance(initial_value, int): + raise TypeError("initial_value must be an integer") + if initial_value < 0: + raise ValueError("initial_value must be >= 0") + if max_value is not None: + if not isinstance(max_value, int): + raise TypeError("max_value must be an integer or None") + if max_value < initial_value: + raise ValueError( + "max_value must be equal to or higher than initial_value" + ) + + self._fast_acquire = fast_acquire + + async def __aenter__(self) -> Semaphore: + await self.acquire() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + async def acquire(self) -> None: + """Decrement the semaphore value, blocking if necessary.""" + raise NotImplementedError + + def acquire_nowait(self) -> None: + """ + Acquire the underlying lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + raise NotImplementedError + + def release(self) -> None: + """Increment the semaphore value.""" + raise NotImplementedError + + @property + def value(self) -> int: + """The current value of the semaphore.""" + raise NotImplementedError + + @property + def max_value(self) -> int | None: + """The maximum value of the semaphore.""" + raise NotImplementedError + + def statistics(self) -> SemaphoreStatistics: + """ + Return statistics about the current state of this semaphore. + + .. versionadded:: 3.0 + """ + raise NotImplementedError + + +class SemaphoreAdapter(Semaphore): + _internal_semaphore: Semaphore | None = None + + def __new__( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> SemaphoreAdapter: + return object.__new__(cls) + + def __init__( + self, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> None: + super().__init__(initial_value, max_value=max_value, fast_acquire=fast_acquire) + self._initial_value = initial_value + self._max_value = max_value + + @property + def _semaphore(self) -> Semaphore: + if self._internal_semaphore is None: + self._internal_semaphore = get_async_backend().create_semaphore( + self._initial_value, max_value=self._max_value + ) + + return self._internal_semaphore + + async def acquire(self) -> None: + await self._semaphore.acquire() + + def acquire_nowait(self) -> None: + self._semaphore.acquire_nowait() + + def release(self) -> None: + self._semaphore.release() + + @property + def value(self) -> int: + if self._internal_semaphore is None: + return self._initial_value + + return self._semaphore.value + + @property + def max_value(self) -> int | None: + return self._max_value + + def statistics(self) -> SemaphoreStatistics: + if self._internal_semaphore is None: + return SemaphoreStatistics(tasks_waiting=0) + + return self._semaphore.statistics() + + +class CapacityLimiter: + def __new__(cls, total_tokens: float) -> CapacityLimiter: + try: + return get_async_backend().create_capacity_limiter(total_tokens) + except AsyncLibraryNotFoundError: + return CapacityLimiterAdapter(total_tokens) + + async def __aenter__(self) -> None: + raise NotImplementedError + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + raise NotImplementedError + + @property + def total_tokens(self) -> float: + """ + The total number of tokens available for borrowing. + + This is a read-write property. If the total number of tokens is increased, the + proportionate number of tasks waiting on this limiter will be granted their + tokens. + + .. versionchanged:: 3.0 + The property is now writable. + + """ + raise NotImplementedError + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + raise NotImplementedError + + @property + def borrowed_tokens(self) -> int: + """The number of tokens that have currently been borrowed.""" + raise NotImplementedError + + @property + def available_tokens(self) -> float: + """The number of tokens currently available to be borrowed""" + raise NotImplementedError + + def acquire_nowait(self) -> None: + """ + Acquire a token for the current task without waiting for one to become + available. + + :raises ~anyio.WouldBlock: if there are no tokens available for borrowing + + """ + raise NotImplementedError + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + """ + Acquire a token without waiting for one to become available. + + :param borrower: the entity borrowing a token + :raises ~anyio.WouldBlock: if there are no tokens available for borrowing + + """ + raise NotImplementedError + + async def acquire(self) -> None: + """ + Acquire a token for the current task, waiting if necessary for one to become + available. + + """ + raise NotImplementedError + + async def acquire_on_behalf_of(self, borrower: object) -> None: + """ + Acquire a token, waiting if necessary for one to become available. + + :param borrower: the entity borrowing a token + + """ + raise NotImplementedError + + def release(self) -> None: + """ + Release the token held by the current task. + + :raises RuntimeError: if the current task has not borrowed a token from this + limiter. + + """ + raise NotImplementedError + + def release_on_behalf_of(self, borrower: object) -> None: + """ + Release the token held by the given borrower. + + :raises RuntimeError: if the borrower has not borrowed a token from this + limiter. + + """ + raise NotImplementedError + + def statistics(self) -> CapacityLimiterStatistics: + """ + Return statistics about the current state of this limiter. + + .. versionadded:: 3.0 + + """ + raise NotImplementedError + + +class CapacityLimiterAdapter(CapacityLimiter): + _internal_limiter: CapacityLimiter | None = None + + def __new__(cls, total_tokens: float) -> CapacityLimiterAdapter: + return object.__new__(cls) + + def __init__(self, total_tokens: float) -> None: + self.total_tokens = total_tokens + + @property + def _limiter(self) -> CapacityLimiter: + if self._internal_limiter is None: + self._internal_limiter = get_async_backend().create_capacity_limiter( + self._total_tokens + ) + + return self._internal_limiter + + async def __aenter__(self) -> None: + await self._limiter.__aenter__() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + return await self._limiter.__aexit__(exc_type, exc_val, exc_tb) + + @property + def total_tokens(self) -> float: + if self._internal_limiter is None: + return self._total_tokens + + return self._internal_limiter.total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + if not isinstance(value, int) and value is not math.inf: + raise TypeError("total_tokens must be an int or math.inf") + elif value < 1: + raise ValueError("total_tokens must be >= 1") + + if self._internal_limiter is None: + self._total_tokens = value + return + + self._limiter.total_tokens = value + + @property + def borrowed_tokens(self) -> int: + if self._internal_limiter is None: + return 0 + + return self._internal_limiter.borrowed_tokens + + @property + def available_tokens(self) -> float: + if self._internal_limiter is None: + return self._total_tokens + + return self._internal_limiter.available_tokens + + def acquire_nowait(self) -> None: + self._limiter.acquire_nowait() + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + self._limiter.acquire_on_behalf_of_nowait(borrower) + + async def acquire(self) -> None: + await self._limiter.acquire() + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await self._limiter.acquire_on_behalf_of(borrower) + + def release(self) -> None: + self._limiter.release() + + def release_on_behalf_of(self, borrower: object) -> None: + self._limiter.release_on_behalf_of(borrower) + + def statistics(self) -> CapacityLimiterStatistics: + if self._internal_limiter is None: + return CapacityLimiterStatistics( + borrowed_tokens=0, + total_tokens=self.total_tokens, + borrowers=(), + tasks_waiting=0, + ) + + return self._internal_limiter.statistics() + + +class ResourceGuard: + """ + A context manager for ensuring that a resource is only used by a single task at a + time. + + Entering this context manager while the previous has not exited it yet will trigger + :exc:`BusyResourceError`. + + :param action: the action to guard against (visible in the :exc:`BusyResourceError` + when triggered, e.g. "Another task is already {action} this resource") + + .. versionadded:: 4.1 + """ + + __slots__ = "action", "_guarded" + + def __init__(self, action: str = "using"): + self.action: str = action + self._guarded = False + + def __enter__(self) -> None: + if self._guarded: + raise BusyResourceError(self.action) + + self._guarded = True + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self._guarded = False diff --git a/venv/lib/python3.11/site-packages/anyio/_core/_tasks.py b/venv/lib/python3.11/site-packages/anyio/_core/_tasks.py new file mode 100644 index 0000000..fe49015 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/_core/_tasks.py @@ -0,0 +1,158 @@ +from __future__ import annotations + +import math +from collections.abc import Generator +from contextlib import contextmanager +from types import TracebackType + +from ..abc._tasks import TaskGroup, TaskStatus +from ._eventloop import get_async_backend + + +class _IgnoredTaskStatus(TaskStatus[object]): + def started(self, value: object = None) -> None: + pass + + +TASK_STATUS_IGNORED = _IgnoredTaskStatus() + + +class CancelScope: + """ + Wraps a unit of work that can be made separately cancellable. + + :param deadline: The time (clock value) when this scope is cancelled automatically + :param shield: ``True`` to shield the cancel scope from external cancellation + """ + + def __new__( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + return get_async_backend().create_cancel_scope(shield=shield, deadline=deadline) + + def cancel(self) -> None: + """Cancel this scope immediately.""" + raise NotImplementedError + + @property + def deadline(self) -> float: + """ + The time (clock value) when this scope is cancelled automatically. + + Will be ``float('inf')`` if no timeout has been set. + + """ + raise NotImplementedError + + @deadline.setter + def deadline(self, value: float) -> None: + raise NotImplementedError + + @property + def cancel_called(self) -> bool: + """``True`` if :meth:`cancel` has been called.""" + raise NotImplementedError + + @property + def cancelled_caught(self) -> bool: + """ + ``True`` if this scope suppressed a cancellation exception it itself raised. + + This is typically used to check if any work was interrupted, or to see if the + scope was cancelled due to its deadline being reached. The value will, however, + only be ``True`` if the cancellation was triggered by the scope itself (and not + an outer scope). + + """ + raise NotImplementedError + + @property + def shield(self) -> bool: + """ + ``True`` if this scope is shielded from external cancellation. + + While a scope is shielded, it will not receive cancellations from outside. + + """ + raise NotImplementedError + + @shield.setter + def shield(self, value: bool) -> None: + raise NotImplementedError + + def __enter__(self) -> CancelScope: + raise NotImplementedError + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + raise NotImplementedError + + +@contextmanager +def fail_after( + delay: float | None, shield: bool = False +) -> Generator[CancelScope, None, None]: + """ + Create a context manager which raises a :class:`TimeoutError` if does not finish in + time. + + :param delay: maximum allowed time (in seconds) before raising the exception, or + ``None`` to disable the timeout + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a context manager that yields a cancel scope + :rtype: :class:`~typing.ContextManager`\\[:class:`~anyio.CancelScope`\\] + + """ + current_time = get_async_backend().current_time + deadline = (current_time() + delay) if delay is not None else math.inf + with get_async_backend().create_cancel_scope( + deadline=deadline, shield=shield + ) as cancel_scope: + yield cancel_scope + + if cancel_scope.cancelled_caught and current_time() >= cancel_scope.deadline: + raise TimeoutError + + +def move_on_after(delay: float | None, shield: bool = False) -> CancelScope: + """ + Create a cancel scope with a deadline that expires after the given delay. + + :param delay: maximum allowed time (in seconds) before exiting the context block, or + ``None`` to disable the timeout + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a cancel scope + + """ + deadline = ( + (get_async_backend().current_time() + delay) if delay is not None else math.inf + ) + return get_async_backend().create_cancel_scope(deadline=deadline, shield=shield) + + +def current_effective_deadline() -> float: + """ + Return the nearest deadline among all the cancel scopes effective for the current + task. + + :return: a clock value from the event loop's internal clock (or ``float('inf')`` if + there is no deadline in effect, or ``float('-inf')`` if the current scope has + been cancelled) + :rtype: float + + """ + return get_async_backend().current_effective_deadline() + + +def create_task_group() -> TaskGroup: + """ + Create a task group. + + :return: a task group + + """ + return get_async_backend().create_task_group() diff --git a/venv/lib/python3.11/site-packages/anyio/_core/_tempfile.py b/venv/lib/python3.11/site-packages/anyio/_core/_tempfile.py new file mode 100644 index 0000000..26d70ec --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/_core/_tempfile.py @@ -0,0 +1,616 @@ +from __future__ import annotations + +import os +import sys +import tempfile +from collections.abc import Iterable +from io import BytesIO, TextIOWrapper +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + AnyStr, + Generic, + overload, +) + +from .. import to_thread +from .._core._fileio import AsyncFile +from ..lowlevel import checkpoint_if_cancelled + +if TYPE_CHECKING: + from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer + + +class TemporaryFile(Generic[AnyStr]): + """ + An asynchronous temporary file that is automatically created and cleaned up. + + This class provides an asynchronous context manager interface to a temporary file. + The file is created using Python's standard `tempfile.TemporaryFile` function in a + background thread, and is wrapped as an asynchronous file using `AsyncFile`. + + :param mode: The mode in which the file is opened. Defaults to "w+b". + :param buffering: The buffering policy (-1 means the default buffering). + :param encoding: The encoding used to decode or encode the file. Only applicable in + text mode. + :param newline: Controls how universal newlines mode works (only applicable in text + mode). + :param suffix: The suffix for the temporary file name. + :param prefix: The prefix for the temporary file name. + :param dir: The directory in which the temporary file is created. + :param errors: The error handling scheme used for encoding/decoding errors. + """ + + _async_file: AsyncFile[AnyStr] + + @overload + def __init__( + self: TemporaryFile[bytes], + mode: OpenBinaryMode = ..., + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: str | None = ..., + prefix: str | None = ..., + dir: str | None = ..., + *, + errors: str | None = ..., + ): ... + @overload + def __init__( + self: TemporaryFile[str], + mode: OpenTextMode, + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: str | None = ..., + prefix: str | None = ..., + dir: str | None = ..., + *, + errors: str | None = ..., + ): ... + + def __init__( + self, + mode: OpenTextMode | OpenBinaryMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + *, + errors: str | None = None, + ) -> None: + self.mode = mode + self.buffering = buffering + self.encoding = encoding + self.newline = newline + self.suffix: str | None = suffix + self.prefix: str | None = prefix + self.dir: str | None = dir + self.errors = errors + + async def __aenter__(self) -> AsyncFile[AnyStr]: + fp = await to_thread.run_sync( + lambda: tempfile.TemporaryFile( + self.mode, + self.buffering, + self.encoding, + self.newline, + self.suffix, + self.prefix, + self.dir, + errors=self.errors, + ) + ) + self._async_file = AsyncFile(fp) + return self._async_file + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + await self._async_file.aclose() + + +class NamedTemporaryFile(Generic[AnyStr]): + """ + An asynchronous named temporary file that is automatically created and cleaned up. + + This class provides an asynchronous context manager for a temporary file with a + visible name in the file system. It uses Python's standard + :func:`~tempfile.NamedTemporaryFile` function and wraps the file object with + :class:`AsyncFile` for asynchronous operations. + + :param mode: The mode in which the file is opened. Defaults to "w+b". + :param buffering: The buffering policy (-1 means the default buffering). + :param encoding: The encoding used to decode or encode the file. Only applicable in + text mode. + :param newline: Controls how universal newlines mode works (only applicable in text + mode). + :param suffix: The suffix for the temporary file name. + :param prefix: The prefix for the temporary file name. + :param dir: The directory in which the temporary file is created. + :param delete: Whether to delete the file when it is closed. + :param errors: The error handling scheme used for encoding/decoding errors. + :param delete_on_close: (Python 3.12+) Whether to delete the file on close. + """ + + _async_file: AsyncFile[AnyStr] + + @overload + def __init__( + self: NamedTemporaryFile[bytes], + mode: OpenBinaryMode = ..., + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: str | None = ..., + prefix: str | None = ..., + dir: str | None = ..., + delete: bool = ..., + *, + errors: str | None = ..., + delete_on_close: bool = ..., + ): ... + @overload + def __init__( + self: NamedTemporaryFile[str], + mode: OpenTextMode, + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: str | None = ..., + prefix: str | None = ..., + dir: str | None = ..., + delete: bool = ..., + *, + errors: str | None = ..., + delete_on_close: bool = ..., + ): ... + + def __init__( + self, + mode: OpenBinaryMode | OpenTextMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + delete: bool = True, + *, + errors: str | None = None, + delete_on_close: bool = True, + ) -> None: + self._params: dict[str, Any] = { + "mode": mode, + "buffering": buffering, + "encoding": encoding, + "newline": newline, + "suffix": suffix, + "prefix": prefix, + "dir": dir, + "delete": delete, + "errors": errors, + } + if sys.version_info >= (3, 12): + self._params["delete_on_close"] = delete_on_close + + async def __aenter__(self) -> AsyncFile[AnyStr]: + fp = await to_thread.run_sync( + lambda: tempfile.NamedTemporaryFile(**self._params) + ) + self._async_file = AsyncFile(fp) + return self._async_file + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + await self._async_file.aclose() + + +class SpooledTemporaryFile(AsyncFile[AnyStr]): + """ + An asynchronous spooled temporary file that starts in memory and is spooled to disk. + + This class provides an asynchronous interface to a spooled temporary file, much like + Python's standard :class:`~tempfile.SpooledTemporaryFile`. It supports asynchronous + write operations and provides a method to force a rollover to disk. + + :param max_size: Maximum size in bytes before the file is rolled over to disk. + :param mode: The mode in which the file is opened. Defaults to "w+b". + :param buffering: The buffering policy (-1 means the default buffering). + :param encoding: The encoding used to decode or encode the file (text mode only). + :param newline: Controls how universal newlines mode works (text mode only). + :param suffix: The suffix for the temporary file name. + :param prefix: The prefix for the temporary file name. + :param dir: The directory in which the temporary file is created. + :param errors: The error handling scheme used for encoding/decoding errors. + """ + + _rolled: bool = False + + @overload + def __init__( + self: SpooledTemporaryFile[bytes], + max_size: int = ..., + mode: OpenBinaryMode = ..., + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: str | None = ..., + prefix: str | None = ..., + dir: str | None = ..., + *, + errors: str | None = ..., + ): ... + @overload + def __init__( + self: SpooledTemporaryFile[str], + max_size: int = ..., + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: str | None = ..., + prefix: str | None = ..., + dir: str | None = ..., + *, + errors: str | None = ..., + ): ... + + def __init__( + self, + max_size: int = 0, + mode: OpenBinaryMode | OpenTextMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + *, + errors: str | None = None, + ) -> None: + self._tempfile_params: dict[str, Any] = { + "mode": mode, + "buffering": buffering, + "encoding": encoding, + "newline": newline, + "suffix": suffix, + "prefix": prefix, + "dir": dir, + "errors": errors, + } + self._max_size = max_size + if "b" in mode: + super().__init__(BytesIO()) # type: ignore[arg-type] + else: + super().__init__( + TextIOWrapper( # type: ignore[arg-type] + BytesIO(), + encoding=encoding, + errors=errors, + newline=newline, + write_through=True, + ) + ) + + async def aclose(self) -> None: + if not self._rolled: + self._fp.close() + return + + await super().aclose() + + async def _check(self) -> None: + if self._rolled or self._fp.tell() < self._max_size: + return + + await self.rollover() + + async def rollover(self) -> None: + if self._rolled: + return + + self._rolled = True + buffer = self._fp + buffer.seek(0) + self._fp = await to_thread.run_sync( + lambda: tempfile.TemporaryFile(**self._tempfile_params) + ) + await self.write(buffer.read()) + buffer.close() + + @property + def closed(self) -> bool: + return self._fp.closed + + async def read(self, size: int = -1) -> AnyStr: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.read(size) + + return await super().read(size) # type: ignore[return-value] + + async def read1(self: SpooledTemporaryFile[bytes], size: int = -1) -> bytes: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.read1(size) + + return await super().read1(size) + + async def readline(self) -> AnyStr: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.readline() + + return await super().readline() # type: ignore[return-value] + + async def readlines(self) -> list[AnyStr]: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.readlines() + + return await super().readlines() # type: ignore[return-value] + + async def readinto(self: SpooledTemporaryFile[bytes], b: WriteableBuffer) -> int: + if not self._rolled: + await checkpoint_if_cancelled() + self._fp.readinto(b) + + return await super().readinto(b) + + async def readinto1(self: SpooledTemporaryFile[bytes], b: WriteableBuffer) -> int: + if not self._rolled: + await checkpoint_if_cancelled() + self._fp.readinto(b) + + return await super().readinto1(b) + + async def seek(self, offset: int, whence: int | None = os.SEEK_SET) -> int: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.seek(offset, whence) + + return await super().seek(offset, whence) + + async def tell(self) -> int: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.tell() + + return await super().tell() + + async def truncate(self, size: int | None = None) -> int: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.truncate(size) + + return await super().truncate(size) + + @overload + async def write(self: SpooledTemporaryFile[bytes], b: ReadableBuffer) -> int: ... + @overload + async def write(self: SpooledTemporaryFile[str], b: str) -> int: ... + + async def write(self, b: ReadableBuffer | str) -> int: + """ + Asynchronously write data to the spooled temporary file. + + If the file has not yet been rolled over, the data is written synchronously, + and a rollover is triggered if the size exceeds the maximum size. + + :param s: The data to write. + :return: The number of bytes written. + :raises RuntimeError: If the underlying file is not initialized. + + """ + if not self._rolled: + await checkpoint_if_cancelled() + result = self._fp.write(b) + await self._check() + return result + + return await super().write(b) # type: ignore[misc] + + @overload + async def writelines( + self: SpooledTemporaryFile[bytes], lines: Iterable[ReadableBuffer] + ) -> None: ... + @overload + async def writelines( + self: SpooledTemporaryFile[str], lines: Iterable[str] + ) -> None: ... + + async def writelines(self, lines: Iterable[str] | Iterable[ReadableBuffer]) -> None: + """ + Asynchronously write a list of lines to the spooled temporary file. + + If the file has not yet been rolled over, the lines are written synchronously, + and a rollover is triggered if the size exceeds the maximum size. + + :param lines: An iterable of lines to write. + :raises RuntimeError: If the underlying file is not initialized. + + """ + if not self._rolled: + await checkpoint_if_cancelled() + result = self._fp.writelines(lines) + await self._check() + return result + + return await super().writelines(lines) # type: ignore[misc] + + +class TemporaryDirectory(Generic[AnyStr]): + """ + An asynchronous temporary directory that is created and cleaned up automatically. + + This class provides an asynchronous context manager for creating a temporary + directory. It wraps Python's standard :class:`~tempfile.TemporaryDirectory` to + perform directory creation and cleanup operations in a background thread. + + :param suffix: Suffix to be added to the temporary directory name. + :param prefix: Prefix to be added to the temporary directory name. + :param dir: The parent directory where the temporary directory is created. + :param ignore_cleanup_errors: Whether to ignore errors during cleanup + (Python 3.10+). + :param delete: Whether to delete the directory upon closing (Python 3.12+). + """ + + def __init__( + self, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: AnyStr | None = None, + *, + ignore_cleanup_errors: bool = False, + delete: bool = True, + ) -> None: + self.suffix: AnyStr | None = suffix + self.prefix: AnyStr | None = prefix + self.dir: AnyStr | None = dir + self.ignore_cleanup_errors = ignore_cleanup_errors + self.delete = delete + + self._tempdir: tempfile.TemporaryDirectory | None = None + + async def __aenter__(self) -> str: + params: dict[str, Any] = { + "suffix": self.suffix, + "prefix": self.prefix, + "dir": self.dir, + } + if sys.version_info >= (3, 10): + params["ignore_cleanup_errors"] = self.ignore_cleanup_errors + + if sys.version_info >= (3, 12): + params["delete"] = self.delete + + self._tempdir = await to_thread.run_sync( + lambda: tempfile.TemporaryDirectory(**params) + ) + return await to_thread.run_sync(self._tempdir.__enter__) + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + if self._tempdir is not None: + await to_thread.run_sync( + self._tempdir.__exit__, exc_type, exc_value, traceback + ) + + async def cleanup(self) -> None: + if self._tempdir is not None: + await to_thread.run_sync(self._tempdir.cleanup) + + +@overload +async def mkstemp( + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + text: bool = False, +) -> tuple[int, str]: ... + + +@overload +async def mkstemp( + suffix: bytes | None = None, + prefix: bytes | None = None, + dir: bytes | None = None, + text: bool = False, +) -> tuple[int, bytes]: ... + + +async def mkstemp( + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: AnyStr | None = None, + text: bool = False, +) -> tuple[int, str | bytes]: + """ + Asynchronously create a temporary file and return an OS-level handle and the file + name. + + This function wraps `tempfile.mkstemp` and executes it in a background thread. + + :param suffix: Suffix to be added to the file name. + :param prefix: Prefix to be added to the file name. + :param dir: Directory in which the temporary file is created. + :param text: Whether the file is opened in text mode. + :return: A tuple containing the file descriptor and the file name. + + """ + return await to_thread.run_sync(tempfile.mkstemp, suffix, prefix, dir, text) + + +@overload +async def mkdtemp( + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, +) -> str: ... + + +@overload +async def mkdtemp( + suffix: bytes | None = None, + prefix: bytes | None = None, + dir: bytes | None = None, +) -> bytes: ... + + +async def mkdtemp( + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: AnyStr | None = None, +) -> str | bytes: + """ + Asynchronously create a temporary directory and return its path. + + This function wraps `tempfile.mkdtemp` and executes it in a background thread. + + :param suffix: Suffix to be added to the directory name. + :param prefix: Prefix to be added to the directory name. + :param dir: Parent directory where the temporary directory is created. + :return: The path of the created temporary directory. + + """ + return await to_thread.run_sync(tempfile.mkdtemp, suffix, prefix, dir) + + +async def gettempdir() -> str: + """ + Asynchronously return the name of the directory used for temporary files. + + This function wraps `tempfile.gettempdir` and executes it in a background thread. + + :return: The path of the temporary directory as a string. + + """ + return await to_thread.run_sync(tempfile.gettempdir) + + +async def gettempdirb() -> bytes: + """ + Asynchronously return the name of the directory used for temporary files in bytes. + + This function wraps `tempfile.gettempdirb` and executes it in a background thread. + + :return: The path of the temporary directory as bytes. + + """ + return await to_thread.run_sync(tempfile.gettempdirb) diff --git a/venv/lib/python3.11/site-packages/anyio/_core/_testing.py b/venv/lib/python3.11/site-packages/anyio/_core/_testing.py new file mode 100644 index 0000000..9e28b22 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/_core/_testing.py @@ -0,0 +1,78 @@ +from __future__ import annotations + +from collections.abc import Awaitable, Generator +from typing import Any, cast + +from ._eventloop import get_async_backend + + +class TaskInfo: + """ + Represents an asynchronous task. + + :ivar int id: the unique identifier of the task + :ivar parent_id: the identifier of the parent task, if any + :vartype parent_id: Optional[int] + :ivar str name: the description of the task (if any) + :ivar ~collections.abc.Coroutine coro: the coroutine object of the task + """ + + __slots__ = "_name", "id", "parent_id", "name", "coro" + + def __init__( + self, + id: int, + parent_id: int | None, + name: str | None, + coro: Generator[Any, Any, Any] | Awaitable[Any], + ): + func = get_current_task + self._name = f"{func.__module__}.{func.__qualname__}" + self.id: int = id + self.parent_id: int | None = parent_id + self.name: str | None = name + self.coro: Generator[Any, Any, Any] | Awaitable[Any] = coro + + def __eq__(self, other: object) -> bool: + if isinstance(other, TaskInfo): + return self.id == other.id + + return NotImplemented + + def __hash__(self) -> int: + return hash(self.id) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}(id={self.id!r}, name={self.name!r})" + + def has_pending_cancellation(self) -> bool: + """ + Return ``True`` if the task has a cancellation pending, ``False`` otherwise. + + """ + return False + + +def get_current_task() -> TaskInfo: + """ + Return the current task. + + :return: a representation of the current task + + """ + return get_async_backend().get_current_task() + + +def get_running_tasks() -> list[TaskInfo]: + """ + Return a list of running tasks in the current event loop. + + :return: a list of task info objects + + """ + return cast("list[TaskInfo]", get_async_backend().get_running_tasks()) + + +async def wait_all_tasks_blocked() -> None: + """Wait until all other tasks are waiting for something.""" + await get_async_backend().wait_all_tasks_blocked() diff --git a/venv/lib/python3.11/site-packages/anyio/_core/_typedattr.py b/venv/lib/python3.11/site-packages/anyio/_core/_typedattr.py new file mode 100644 index 0000000..f358a44 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/_core/_typedattr.py @@ -0,0 +1,81 @@ +from __future__ import annotations + +from collections.abc import Callable, Mapping +from typing import Any, TypeVar, final, overload + +from ._exceptions import TypedAttributeLookupError + +T_Attr = TypeVar("T_Attr") +T_Default = TypeVar("T_Default") +undefined = object() + + +def typed_attribute() -> Any: + """Return a unique object, used to mark typed attributes.""" + return object() + + +class TypedAttributeSet: + """ + Superclass for typed attribute collections. + + Checks that every public attribute of every subclass has a type annotation. + """ + + def __init_subclass__(cls) -> None: + annotations: dict[str, Any] = getattr(cls, "__annotations__", {}) + for attrname in dir(cls): + if not attrname.startswith("_") and attrname not in annotations: + raise TypeError( + f"Attribute {attrname!r} is missing its type annotation" + ) + + super().__init_subclass__() + + +class TypedAttributeProvider: + """Base class for classes that wish to provide typed extra attributes.""" + + @property + def extra_attributes(self) -> Mapping[T_Attr, Callable[[], T_Attr]]: + """ + A mapping of the extra attributes to callables that return the corresponding + values. + + If the provider wraps another provider, the attributes from that wrapper should + also be included in the returned mapping (but the wrapper may override the + callables from the wrapped instance). + + """ + return {} + + @overload + def extra(self, attribute: T_Attr) -> T_Attr: ... + + @overload + def extra(self, attribute: T_Attr, default: T_Default) -> T_Attr | T_Default: ... + + @final + def extra(self, attribute: Any, default: object = undefined) -> object: + """ + extra(attribute, default=undefined) + + Return the value of the given typed extra attribute. + + :param attribute: the attribute (member of a :class:`~TypedAttributeSet`) to + look for + :param default: the value that should be returned if no value is found for the + attribute + :raises ~anyio.TypedAttributeLookupError: if the search failed and no default + value was given + + """ + try: + getter = self.extra_attributes[attribute] + except KeyError: + if default is undefined: + raise TypedAttributeLookupError("Attribute not found") from None + else: + return default + + return getter() diff --git a/venv/lib/python3.11/site-packages/anyio/abc/__init__.py b/venv/lib/python3.11/site-packages/anyio/abc/__init__.py new file mode 100644 index 0000000..3d3b61c --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/abc/__init__.py @@ -0,0 +1,55 @@ +from __future__ import annotations + +from ._eventloop import AsyncBackend as AsyncBackend +from ._resources import AsyncResource as AsyncResource +from ._sockets import ConnectedUDPSocket as ConnectedUDPSocket +from ._sockets import ConnectedUNIXDatagramSocket as ConnectedUNIXDatagramSocket +from ._sockets import IPAddressType as IPAddressType +from ._sockets import IPSockAddrType as IPSockAddrType +from ._sockets import SocketAttribute as SocketAttribute +from ._sockets import SocketListener as SocketListener +from ._sockets import SocketStream as SocketStream +from ._sockets import UDPPacketType as UDPPacketType +from ._sockets import UDPSocket as UDPSocket +from ._sockets import UNIXDatagramPacketType as UNIXDatagramPacketType +from ._sockets import UNIXDatagramSocket as UNIXDatagramSocket +from ._sockets import UNIXSocketStream as UNIXSocketStream +from ._streams import AnyByteReceiveStream as AnyByteReceiveStream +from ._streams import AnyByteSendStream as AnyByteSendStream +from ._streams import AnyByteStream as AnyByteStream +from ._streams import AnyUnreliableByteReceiveStream as AnyUnreliableByteReceiveStream +from ._streams import AnyUnreliableByteSendStream as AnyUnreliableByteSendStream +from ._streams import AnyUnreliableByteStream as AnyUnreliableByteStream +from ._streams import ByteReceiveStream as ByteReceiveStream +from ._streams import ByteSendStream as ByteSendStream +from ._streams import ByteStream as ByteStream +from ._streams import Listener as Listener +from ._streams import ObjectReceiveStream as ObjectReceiveStream +from ._streams import ObjectSendStream as ObjectSendStream +from ._streams import ObjectStream as ObjectStream +from ._streams import UnreliableObjectReceiveStream as UnreliableObjectReceiveStream +from ._streams import UnreliableObjectSendStream as UnreliableObjectSendStream +from ._streams import UnreliableObjectStream as UnreliableObjectStream +from ._subprocesses import Process as Process +from ._tasks import TaskGroup as TaskGroup +from ._tasks import TaskStatus as TaskStatus +from ._testing import TestRunner as TestRunner + +# Re-exported here, for backwards compatibility +# isort: off +from .._core._synchronization import ( + CapacityLimiter as CapacityLimiter, + Condition as Condition, + Event as Event, + Lock as Lock, + Semaphore as Semaphore, +) +from .._core._tasks import CancelScope as CancelScope +from ..from_thread import BlockingPortal as BlockingPortal + +# Re-export imports so they look like they live directly in this package +for __value in list(locals().values()): + if getattr(__value, "__module__", "").startswith("anyio.abc."): + __value.__module__ = __name__ + +del __value diff --git a/venv/lib/python3.11/site-packages/anyio/abc/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/abc/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..fa5a284 Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/abc/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/abc/__pycache__/_eventloop.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/abc/__pycache__/_eventloop.cpython-311.pyc new file mode 100644 index 0000000..47d1d6c Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/abc/__pycache__/_eventloop.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/abc/__pycache__/_resources.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/abc/__pycache__/_resources.cpython-311.pyc new file mode 100644 index 0000000..808c8cf Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/abc/__pycache__/_resources.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/abc/__pycache__/_sockets.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/abc/__pycache__/_sockets.cpython-311.pyc new file mode 100644 index 0000000..8004f46 Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/abc/__pycache__/_sockets.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/abc/__pycache__/_streams.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/abc/__pycache__/_streams.cpython-311.pyc new file mode 100644 index 0000000..215eb5c Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/abc/__pycache__/_streams.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-311.pyc new file mode 100644 index 0000000..ea6845a Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/abc/__pycache__/_tasks.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/abc/__pycache__/_tasks.cpython-311.pyc new file mode 100644 index 0000000..5521b0c Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/abc/__pycache__/_tasks.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/abc/__pycache__/_testing.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/abc/__pycache__/_testing.cpython-311.pyc new file mode 100644 index 0000000..a2564a6 Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/abc/__pycache__/_testing.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/abc/_eventloop.py b/venv/lib/python3.11/site-packages/anyio/abc/_eventloop.py new file mode 100644 index 0000000..4cfce83 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/abc/_eventloop.py @@ -0,0 +1,376 @@ +from __future__ import annotations + +import math +import sys +from abc import ABCMeta, abstractmethod +from collections.abc import AsyncIterator, Awaitable, Callable, Sequence +from contextlib import AbstractContextManager +from os import PathLike +from signal import Signals +from socket import AddressFamily, SocketKind, socket +from typing import ( + IO, + TYPE_CHECKING, + Any, + TypeVar, + Union, + overload, +) + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +if sys.version_info >= (3, 10): + from typing import TypeAlias +else: + from typing_extensions import TypeAlias + +if TYPE_CHECKING: + from _typeshed import HasFileno + + from .._core._synchronization import CapacityLimiter, Event, Lock, Semaphore + from .._core._tasks import CancelScope + from .._core._testing import TaskInfo + from ..from_thread import BlockingPortal + from ._sockets import ( + ConnectedUDPSocket, + ConnectedUNIXDatagramSocket, + IPSockAddrType, + SocketListener, + SocketStream, + UDPSocket, + UNIXDatagramSocket, + UNIXSocketStream, + ) + from ._subprocesses import Process + from ._tasks import TaskGroup + from ._testing import TestRunner + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") +StrOrBytesPath: TypeAlias = Union[str, bytes, "PathLike[str]", "PathLike[bytes]"] + + +class AsyncBackend(metaclass=ABCMeta): + @classmethod + @abstractmethod + def run( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + options: dict[str, Any], + ) -> T_Retval: + """ + Run the given coroutine function in an asynchronous event loop. + + The current thread must not be already running an event loop. + + :param func: a coroutine function + :param args: positional arguments to ``func`` + :param kwargs: positional arguments to ``func`` + :param options: keyword arguments to call the backend ``run()`` implementation + with + :return: the return value of the coroutine function + """ + + @classmethod + @abstractmethod + def current_token(cls) -> object: + """ + + :return: + """ + + @classmethod + @abstractmethod + def current_time(cls) -> float: + """ + Return the current value of the event loop's internal clock. + + :return: the clock value (seconds) + """ + + @classmethod + @abstractmethod + def cancelled_exception_class(cls) -> type[BaseException]: + """Return the exception class that is raised in a task if it's cancelled.""" + + @classmethod + @abstractmethod + async def checkpoint(cls) -> None: + """ + Check if the task has been cancelled, and allow rescheduling of other tasks. + + This is effectively the same as running :meth:`checkpoint_if_cancelled` and then + :meth:`cancel_shielded_checkpoint`. + """ + + @classmethod + async def checkpoint_if_cancelled(cls) -> None: + """ + Check if the current task group has been cancelled. + + This will check if the task has been cancelled, but will not allow other tasks + to be scheduled if not. + + """ + if cls.current_effective_deadline() == -math.inf: + await cls.checkpoint() + + @classmethod + async def cancel_shielded_checkpoint(cls) -> None: + """ + Allow the rescheduling of other tasks. + + This will give other tasks the opportunity to run, but without checking if the + current task group has been cancelled, unlike with :meth:`checkpoint`. + + """ + with cls.create_cancel_scope(shield=True): + await cls.sleep(0) + + @classmethod + @abstractmethod + async def sleep(cls, delay: float) -> None: + """ + Pause the current task for the specified duration. + + :param delay: the duration, in seconds + """ + + @classmethod + @abstractmethod + def create_cancel_scope( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + pass + + @classmethod + @abstractmethod + def current_effective_deadline(cls) -> float: + """ + Return the nearest deadline among all the cancel scopes effective for the + current task. + + :return: + - a clock value from the event loop's internal clock + - ``inf`` if there is no deadline in effect + - ``-inf`` if the current scope has been cancelled + :rtype: float + """ + + @classmethod + @abstractmethod + def create_task_group(cls) -> TaskGroup: + pass + + @classmethod + @abstractmethod + def create_event(cls) -> Event: + pass + + @classmethod + @abstractmethod + def create_lock(cls, *, fast_acquire: bool) -> Lock: + pass + + @classmethod + @abstractmethod + def create_semaphore( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> Semaphore: + pass + + @classmethod + @abstractmethod + def create_capacity_limiter(cls, total_tokens: float) -> CapacityLimiter: + pass + + @classmethod + @abstractmethod + async def run_sync_in_worker_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + abandon_on_cancel: bool = False, + limiter: CapacityLimiter | None = None, + ) -> T_Retval: + pass + + @classmethod + @abstractmethod + def check_cancelled(cls) -> None: + pass + + @classmethod + @abstractmethod + def run_async_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + pass + + @classmethod + @abstractmethod + def run_sync_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + pass + + @classmethod + @abstractmethod + def create_blocking_portal(cls) -> BlockingPortal: + pass + + @classmethod + @abstractmethod + async def open_process( + cls, + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + **kwargs: Any, + ) -> Process: + pass + + @classmethod + @abstractmethod + def setup_process_pool_exit_at_shutdown(cls, workers: set[Process]) -> None: + pass + + @classmethod + @abstractmethod + async def connect_tcp( + cls, host: str, port: int, local_address: IPSockAddrType | None = None + ) -> SocketStream: + pass + + @classmethod + @abstractmethod + async def connect_unix(cls, path: str | bytes) -> UNIXSocketStream: + pass + + @classmethod + @abstractmethod + def create_tcp_listener(cls, sock: socket) -> SocketListener: + pass + + @classmethod + @abstractmethod + def create_unix_listener(cls, sock: socket) -> SocketListener: + pass + + @classmethod + @abstractmethod + async def create_udp_socket( + cls, + family: AddressFamily, + local_address: IPSockAddrType | None, + remote_address: IPSockAddrType | None, + reuse_port: bool, + ) -> UDPSocket | ConnectedUDPSocket: + pass + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket, remote_path: None + ) -> UNIXDatagramSocket: ... + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket, remote_path: str | bytes + ) -> ConnectedUNIXDatagramSocket: ... + + @classmethod + @abstractmethod + async def create_unix_datagram_socket( + cls, raw_socket: socket, remote_path: str | bytes | None + ) -> UNIXDatagramSocket | ConnectedUNIXDatagramSocket: + pass + + @classmethod + @abstractmethod + async def getaddrinfo( + cls, + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, + ) -> Sequence[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes], + ] + ]: + pass + + @classmethod + @abstractmethod + async def getnameinfo( + cls, sockaddr: IPSockAddrType, flags: int = 0 + ) -> tuple[str, str]: + pass + + @classmethod + @abstractmethod + async def wait_readable(cls, obj: HasFileno | int) -> None: + pass + + @classmethod + @abstractmethod + async def wait_writable(cls, obj: HasFileno | int) -> None: + pass + + @classmethod + @abstractmethod + def current_default_thread_limiter(cls) -> CapacityLimiter: + pass + + @classmethod + @abstractmethod + def open_signal_receiver( + cls, *signals: Signals + ) -> AbstractContextManager[AsyncIterator[Signals]]: + pass + + @classmethod + @abstractmethod + def get_current_task(cls) -> TaskInfo: + pass + + @classmethod + @abstractmethod + def get_running_tasks(cls) -> Sequence[TaskInfo]: + pass + + @classmethod + @abstractmethod + async def wait_all_tasks_blocked(cls) -> None: + pass + + @classmethod + @abstractmethod + def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: + pass diff --git a/venv/lib/python3.11/site-packages/anyio/abc/_resources.py b/venv/lib/python3.11/site-packages/anyio/abc/_resources.py new file mode 100644 index 0000000..10df115 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/abc/_resources.py @@ -0,0 +1,33 @@ +from __future__ import annotations + +from abc import ABCMeta, abstractmethod +from types import TracebackType +from typing import TypeVar + +T = TypeVar("T") + + +class AsyncResource(metaclass=ABCMeta): + """ + Abstract base class for all closeable asynchronous resources. + + Works as an asynchronous context manager which returns the instance itself on enter, + and calls :meth:`aclose` on exit. + """ + + __slots__ = () + + async def __aenter__(self: T) -> T: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + await self.aclose() + + @abstractmethod + async def aclose(self) -> None: + """Close the resource.""" diff --git a/venv/lib/python3.11/site-packages/anyio/abc/_sockets.py b/venv/lib/python3.11/site-packages/anyio/abc/_sockets.py new file mode 100644 index 0000000..1c6a450 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/abc/_sockets.py @@ -0,0 +1,194 @@ +from __future__ import annotations + +import socket +from abc import abstractmethod +from collections.abc import Callable, Collection, Mapping +from contextlib import AsyncExitStack +from io import IOBase +from ipaddress import IPv4Address, IPv6Address +from socket import AddressFamily +from types import TracebackType +from typing import Any, TypeVar, Union + +from .._core._typedattr import ( + TypedAttributeProvider, + TypedAttributeSet, + typed_attribute, +) +from ._streams import ByteStream, Listener, UnreliableObjectStream +from ._tasks import TaskGroup + +IPAddressType = Union[str, IPv4Address, IPv6Address] +IPSockAddrType = tuple[str, int] +SockAddrType = Union[IPSockAddrType, str] +UDPPacketType = tuple[bytes, IPSockAddrType] +UNIXDatagramPacketType = tuple[bytes, str] +T_Retval = TypeVar("T_Retval") + + +class _NullAsyncContextManager: + async def __aenter__(self) -> None: + pass + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + return None + + +class SocketAttribute(TypedAttributeSet): + #: the address family of the underlying socket + family: AddressFamily = typed_attribute() + #: the local socket address of the underlying socket + local_address: SockAddrType = typed_attribute() + #: for IP addresses, the local port the underlying socket is bound to + local_port: int = typed_attribute() + #: the underlying stdlib socket object + raw_socket: socket.socket = typed_attribute() + #: the remote address the underlying socket is connected to + remote_address: SockAddrType = typed_attribute() + #: for IP addresses, the remote port the underlying socket is connected to + remote_port: int = typed_attribute() + + +class _SocketProvider(TypedAttributeProvider): + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + from .._core._sockets import convert_ipv6_sockaddr as convert + + attributes: dict[Any, Callable[[], Any]] = { + SocketAttribute.family: lambda: self._raw_socket.family, + SocketAttribute.local_address: lambda: convert( + self._raw_socket.getsockname() + ), + SocketAttribute.raw_socket: lambda: self._raw_socket, + } + try: + peername: tuple[str, int] | None = convert(self._raw_socket.getpeername()) + except OSError: + peername = None + + # Provide the remote address for connected sockets + if peername is not None: + attributes[SocketAttribute.remote_address] = lambda: peername + + # Provide local and remote ports for IP based sockets + if self._raw_socket.family in (AddressFamily.AF_INET, AddressFamily.AF_INET6): + attributes[SocketAttribute.local_port] = ( + lambda: self._raw_socket.getsockname()[1] + ) + if peername is not None: + remote_port = peername[1] + attributes[SocketAttribute.remote_port] = lambda: remote_port + + return attributes + + @property + @abstractmethod + def _raw_socket(self) -> socket.socket: + pass + + +class SocketStream(ByteStream, _SocketProvider): + """ + Transports bytes over a socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + +class UNIXSocketStream(SocketStream): + @abstractmethod + async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: + """ + Send file descriptors along with a message to the peer. + + :param message: a non-empty bytestring + :param fds: a collection of files (either numeric file descriptors or open file + or socket objects) + """ + + @abstractmethod + async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: + """ + Receive file descriptors along with a message from the peer. + + :param msglen: length of the message to expect from the peer + :param maxfds: maximum number of file descriptors to expect from the peer + :return: a tuple of (message, file descriptors) + """ + + +class SocketListener(Listener[SocketStream], _SocketProvider): + """ + Listens to incoming socket connections. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @abstractmethod + async def accept(self) -> SocketStream: + """Accept an incoming connection.""" + + async def serve( + self, + handler: Callable[[SocketStream], Any], + task_group: TaskGroup | None = None, + ) -> None: + from .. import create_task_group + + async with AsyncExitStack() as stack: + if task_group is None: + task_group = await stack.enter_async_context(create_task_group()) + + while True: + stream = await self.accept() + task_group.start_soon(handler, stream) + + +class UDPSocket(UnreliableObjectStream[UDPPacketType], _SocketProvider): + """ + Represents an unconnected UDP socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + async def sendto(self, data: bytes, host: str, port: int) -> None: + """ + Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, (host, port))). + + """ + return await self.send((data, (host, port))) + + +class ConnectedUDPSocket(UnreliableObjectStream[bytes], _SocketProvider): + """ + Represents an connected UDP socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + +class UNIXDatagramSocket( + UnreliableObjectStream[UNIXDatagramPacketType], _SocketProvider +): + """ + Represents an unconnected Unix datagram socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + async def sendto(self, data: bytes, path: str) -> None: + """Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, path)).""" + return await self.send((data, path)) + + +class ConnectedUNIXDatagramSocket(UnreliableObjectStream[bytes], _SocketProvider): + """ + Represents a connected Unix datagram socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ diff --git a/venv/lib/python3.11/site-packages/anyio/abc/_streams.py b/venv/lib/python3.11/site-packages/anyio/abc/_streams.py new file mode 100644 index 0000000..f11d97b --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/abc/_streams.py @@ -0,0 +1,203 @@ +from __future__ import annotations + +from abc import abstractmethod +from collections.abc import Callable +from typing import Any, Generic, TypeVar, Union + +from .._core._exceptions import EndOfStream +from .._core._typedattr import TypedAttributeProvider +from ._resources import AsyncResource +from ._tasks import TaskGroup + +T_Item = TypeVar("T_Item") +T_co = TypeVar("T_co", covariant=True) +T_contra = TypeVar("T_contra", contravariant=True) + + +class UnreliableObjectReceiveStream( + Generic[T_co], AsyncResource, TypedAttributeProvider +): + """ + An interface for receiving objects. + + This interface makes no guarantees that the received messages arrive in the order in + which they were sent, or that no messages are missed. + + Asynchronously iterating over objects of this type will yield objects matching the + given type parameter. + """ + + def __aiter__(self) -> UnreliableObjectReceiveStream[T_co]: + return self + + async def __anext__(self) -> T_co: + try: + return await self.receive() + except EndOfStream: + raise StopAsyncIteration + + @abstractmethod + async def receive(self) -> T_co: + """ + Receive the next item. + + :raises ~anyio.ClosedResourceError: if the receive stream has been explicitly + closed + :raises ~anyio.EndOfStream: if this stream has been closed from the other end + :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable + due to external causes + """ + + +class UnreliableObjectSendStream( + Generic[T_contra], AsyncResource, TypedAttributeProvider +): + """ + An interface for sending objects. + + This interface makes no guarantees that the messages sent will reach the + recipient(s) in the same order in which they were sent, or at all. + """ + + @abstractmethod + async def send(self, item: T_contra) -> None: + """ + Send an item to the peer(s). + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if the send stream has been explicitly + closed + :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable + due to external causes + """ + + +class UnreliableObjectStream( + UnreliableObjectReceiveStream[T_Item], UnreliableObjectSendStream[T_Item] +): + """ + A bidirectional message stream which does not guarantee the order or reliability of + message delivery. + """ + + +class ObjectReceiveStream(UnreliableObjectReceiveStream[T_co]): + """ + A receive message stream which guarantees that messages are received in the same + order in which they were sent, and that no messages are missed. + """ + + +class ObjectSendStream(UnreliableObjectSendStream[T_contra]): + """ + A send message stream which guarantees that messages are delivered in the same order + in which they were sent, without missing any messages in the middle. + """ + + +class ObjectStream( + ObjectReceiveStream[T_Item], + ObjectSendStream[T_Item], + UnreliableObjectStream[T_Item], +): + """ + A bidirectional message stream which guarantees the order and reliability of message + delivery. + """ + + @abstractmethod + async def send_eof(self) -> None: + """ + Send an end-of-file indication to the peer. + + You should not try to send any further data to this stream after calling this + method. This method is idempotent (does nothing on successive calls). + """ + + +class ByteReceiveStream(AsyncResource, TypedAttributeProvider): + """ + An interface for receiving bytes from a single peer. + + Iterating this byte stream will yield a byte string of arbitrary length, but no more + than 65536 bytes. + """ + + def __aiter__(self) -> ByteReceiveStream: + return self + + async def __anext__(self) -> bytes: + try: + return await self.receive() + except EndOfStream: + raise StopAsyncIteration + + @abstractmethod + async def receive(self, max_bytes: int = 65536) -> bytes: + """ + Receive at most ``max_bytes`` bytes from the peer. + + .. note:: Implementers of this interface should not return an empty + :class:`bytes` object, and users should ignore them. + + :param max_bytes: maximum number of bytes to receive + :return: the received bytes + :raises ~anyio.EndOfStream: if this stream has been closed from the other end + """ + + +class ByteSendStream(AsyncResource, TypedAttributeProvider): + """An interface for sending bytes to a single peer.""" + + @abstractmethod + async def send(self, item: bytes) -> None: + """ + Send the given bytes to the peer. + + :param item: the bytes to send + """ + + +class ByteStream(ByteReceiveStream, ByteSendStream): + """A bidirectional byte stream.""" + + @abstractmethod + async def send_eof(self) -> None: + """ + Send an end-of-file indication to the peer. + + You should not try to send any further data to this stream after calling this + method. This method is idempotent (does nothing on successive calls). + """ + + +#: Type alias for all unreliable bytes-oriented receive streams. +AnyUnreliableByteReceiveStream = Union[ + UnreliableObjectReceiveStream[bytes], ByteReceiveStream +] +#: Type alias for all unreliable bytes-oriented send streams. +AnyUnreliableByteSendStream = Union[UnreliableObjectSendStream[bytes], ByteSendStream] +#: Type alias for all unreliable bytes-oriented streams. +AnyUnreliableByteStream = Union[UnreliableObjectStream[bytes], ByteStream] +#: Type alias for all bytes-oriented receive streams. +AnyByteReceiveStream = Union[ObjectReceiveStream[bytes], ByteReceiveStream] +#: Type alias for all bytes-oriented send streams. +AnyByteSendStream = Union[ObjectSendStream[bytes], ByteSendStream] +#: Type alias for all bytes-oriented streams. +AnyByteStream = Union[ObjectStream[bytes], ByteStream] + + +class Listener(Generic[T_co], AsyncResource, TypedAttributeProvider): + """An interface for objects that let you accept incoming connections.""" + + @abstractmethod + async def serve( + self, handler: Callable[[T_co], Any], task_group: TaskGroup | None = None + ) -> None: + """ + Accept incoming connections as they come in and start tasks to handle them. + + :param handler: a callable that will be used to handle each accepted connection + :param task_group: the task group that will be used to start tasks for handling + each accepted connection (if omitted, an ad-hoc task group will be created) + """ diff --git a/venv/lib/python3.11/site-packages/anyio/abc/_subprocesses.py b/venv/lib/python3.11/site-packages/anyio/abc/_subprocesses.py new file mode 100644 index 0000000..ce0564c --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/abc/_subprocesses.py @@ -0,0 +1,79 @@ +from __future__ import annotations + +from abc import abstractmethod +from signal import Signals + +from ._resources import AsyncResource +from ._streams import ByteReceiveStream, ByteSendStream + + +class Process(AsyncResource): + """An asynchronous version of :class:`subprocess.Popen`.""" + + @abstractmethod + async def wait(self) -> int: + """ + Wait until the process exits. + + :return: the exit code of the process + """ + + @abstractmethod + def terminate(self) -> None: + """ + Terminates the process, gracefully if possible. + + On Windows, this calls ``TerminateProcess()``. + On POSIX systems, this sends ``SIGTERM`` to the process. + + .. seealso:: :meth:`subprocess.Popen.terminate` + """ + + @abstractmethod + def kill(self) -> None: + """ + Kills the process. + + On Windows, this calls ``TerminateProcess()``. + On POSIX systems, this sends ``SIGKILL`` to the process. + + .. seealso:: :meth:`subprocess.Popen.kill` + """ + + @abstractmethod + def send_signal(self, signal: Signals) -> None: + """ + Send a signal to the subprocess. + + .. seealso:: :meth:`subprocess.Popen.send_signal` + + :param signal: the signal number (e.g. :data:`signal.SIGHUP`) + """ + + @property + @abstractmethod + def pid(self) -> int: + """The process ID of the process.""" + + @property + @abstractmethod + def returncode(self) -> int | None: + """ + The return code of the process. If the process has not yet terminated, this will + be ``None``. + """ + + @property + @abstractmethod + def stdin(self) -> ByteSendStream | None: + """The stream for the standard input of the process.""" + + @property + @abstractmethod + def stdout(self) -> ByteReceiveStream | None: + """The stream for the standard output of the process.""" + + @property + @abstractmethod + def stderr(self) -> ByteReceiveStream | None: + """The stream for the standard error output of the process.""" diff --git a/venv/lib/python3.11/site-packages/anyio/abc/_tasks.py b/venv/lib/python3.11/site-packages/anyio/abc/_tasks.py new file mode 100644 index 0000000..f6e5c40 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/abc/_tasks.py @@ -0,0 +1,101 @@ +from __future__ import annotations + +import sys +from abc import ABCMeta, abstractmethod +from collections.abc import Awaitable, Callable +from types import TracebackType +from typing import TYPE_CHECKING, Any, Protocol, TypeVar, overload + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +if TYPE_CHECKING: + from .._core._tasks import CancelScope + +T_Retval = TypeVar("T_Retval") +T_contra = TypeVar("T_contra", contravariant=True) +PosArgsT = TypeVarTuple("PosArgsT") + + +class TaskStatus(Protocol[T_contra]): + @overload + def started(self: TaskStatus[None]) -> None: ... + + @overload + def started(self, value: T_contra) -> None: ... + + def started(self, value: T_contra | None = None) -> None: + """ + Signal that the task has started. + + :param value: object passed back to the starter of the task + """ + + +class TaskGroup(metaclass=ABCMeta): + """ + Groups several asynchronous tasks together. + + :ivar cancel_scope: the cancel scope inherited by all child tasks + :vartype cancel_scope: CancelScope + + .. note:: On asyncio, support for eager task factories is considered to be + **experimental**. In particular, they don't follow the usual semantics of new + tasks being scheduled on the next iteration of the event loop, and may thus + cause unexpected behavior in code that wasn't written with such semantics in + mind. + """ + + cancel_scope: CancelScope + + @abstractmethod + def start_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> None: + """ + Start a new task in this task group. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + + .. versionadded:: 3.0 + """ + + @abstractmethod + async def start( + self, + func: Callable[..., Awaitable[Any]], + *args: object, + name: object = None, + ) -> Any: + """ + Start a new task and wait until it signals for readiness. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + :return: the value passed to ``task_status.started()`` + :raises RuntimeError: if the task finishes without calling + ``task_status.started()`` + + .. versionadded:: 3.0 + """ + + @abstractmethod + async def __aenter__(self) -> TaskGroup: + """Enter the task group context and allow starting new tasks.""" + + @abstractmethod + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + """Exit the task group context waiting for all tasks to finish.""" diff --git a/venv/lib/python3.11/site-packages/anyio/abc/_testing.py b/venv/lib/python3.11/site-packages/anyio/abc/_testing.py new file mode 100644 index 0000000..7c50ed7 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/abc/_testing.py @@ -0,0 +1,65 @@ +from __future__ import annotations + +import types +from abc import ABCMeta, abstractmethod +from collections.abc import AsyncGenerator, Callable, Coroutine, Iterable +from typing import Any, TypeVar + +_T = TypeVar("_T") + + +class TestRunner(metaclass=ABCMeta): + """ + Encapsulates a running event loop. Every call made through this object will use the + same event loop. + """ + + def __enter__(self) -> TestRunner: + return self + + @abstractmethod + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> bool | None: ... + + @abstractmethod + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., AsyncGenerator[_T, Any]], + kwargs: dict[str, Any], + ) -> Iterable[_T]: + """ + Run an async generator fixture. + + :param fixture_func: the fixture function + :param kwargs: keyword arguments to call the fixture function with + :return: an iterator yielding the value yielded from the async generator + """ + + @abstractmethod + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, _T]], + kwargs: dict[str, Any], + ) -> _T: + """ + Run an async fixture. + + :param fixture_func: the fixture function + :param kwargs: keyword arguments to call the fixture function with + :return: the return value of the fixture function + """ + + @abstractmethod + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] + ) -> None: + """ + Run an async test function. + + :param test_func: the test function + :param kwargs: keyword arguments to call the test function with + """ diff --git a/venv/lib/python3.11/site-packages/anyio/from_thread.py b/venv/lib/python3.11/site-packages/anyio/from_thread.py new file mode 100644 index 0000000..6179097 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/from_thread.py @@ -0,0 +1,527 @@ +from __future__ import annotations + +import sys +from collections.abc import Awaitable, Callable, Generator +from concurrent.futures import Future +from contextlib import ( + AbstractAsyncContextManager, + AbstractContextManager, + contextmanager, +) +from dataclasses import dataclass, field +from inspect import isawaitable +from threading import Lock, Thread, get_ident +from types import TracebackType +from typing import ( + Any, + Generic, + TypeVar, + cast, + overload, +) + +from ._core import _eventloop +from ._core._eventloop import get_async_backend, get_cancelled_exc_class, threadlocals +from ._core._synchronization import Event +from ._core._tasks import CancelScope, create_task_group +from .abc import AsyncBackend +from .abc._tasks import TaskStatus + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +T_Retval = TypeVar("T_Retval") +T_co = TypeVar("T_co", covariant=True) +PosArgsT = TypeVarTuple("PosArgsT") + + +def run( + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], *args: Unpack[PosArgsT] +) -> T_Retval: + """ + Call a coroutine function from a worker thread. + + :param func: a coroutine function + :param args: positional arguments for the callable + :return: the return value of the coroutine function + + """ + try: + async_backend = threadlocals.current_async_backend + token = threadlocals.current_token + except AttributeError: + raise RuntimeError( + "This function can only be run from an AnyIO worker thread" + ) from None + + return async_backend.run_async_from_thread(func, args, token=token) + + +def run_sync( + func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] +) -> T_Retval: + """ + Call a function in the event loop thread from a worker thread. + + :param func: a callable + :param args: positional arguments for the callable + :return: the return value of the callable + + """ + try: + async_backend = threadlocals.current_async_backend + token = threadlocals.current_token + except AttributeError: + raise RuntimeError( + "This function can only be run from an AnyIO worker thread" + ) from None + + return async_backend.run_sync_from_thread(func, args, token=token) + + +class _BlockingAsyncContextManager(Generic[T_co], AbstractContextManager): + _enter_future: Future[T_co] + _exit_future: Future[bool | None] + _exit_event: Event + _exit_exc_info: tuple[ + type[BaseException] | None, BaseException | None, TracebackType | None + ] = (None, None, None) + + def __init__( + self, async_cm: AbstractAsyncContextManager[T_co], portal: BlockingPortal + ): + self._async_cm = async_cm + self._portal = portal + + async def run_async_cm(self) -> bool | None: + try: + self._exit_event = Event() + value = await self._async_cm.__aenter__() + except BaseException as exc: + self._enter_future.set_exception(exc) + raise + else: + self._enter_future.set_result(value) + + try: + # Wait for the sync context manager to exit. + # This next statement can raise `get_cancelled_exc_class()` if + # something went wrong in a task group in this async context + # manager. + await self._exit_event.wait() + finally: + # In case of cancellation, it could be that we end up here before + # `_BlockingAsyncContextManager.__exit__` is called, and an + # `_exit_exc_info` has been set. + result = await self._async_cm.__aexit__(*self._exit_exc_info) + return result + + def __enter__(self) -> T_co: + self._enter_future = Future() + self._exit_future = self._portal.start_task_soon(self.run_async_cm) + return self._enter_future.result() + + def __exit__( + self, + __exc_type: type[BaseException] | None, + __exc_value: BaseException | None, + __traceback: TracebackType | None, + ) -> bool | None: + self._exit_exc_info = __exc_type, __exc_value, __traceback + self._portal.call(self._exit_event.set) + return self._exit_future.result() + + +class _BlockingPortalTaskStatus(TaskStatus): + def __init__(self, future: Future): + self._future = future + + def started(self, value: object = None) -> None: + self._future.set_result(value) + + +class BlockingPortal: + """An object that lets external threads run code in an asynchronous event loop.""" + + def __new__(cls) -> BlockingPortal: + return get_async_backend().create_blocking_portal() + + def __init__(self) -> None: + self._event_loop_thread_id: int | None = get_ident() + self._stop_event = Event() + self._task_group = create_task_group() + self._cancelled_exc_class = get_cancelled_exc_class() + + async def __aenter__(self) -> BlockingPortal: + await self._task_group.__aenter__() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + await self.stop() + return await self._task_group.__aexit__(exc_type, exc_val, exc_tb) + + def _check_running(self) -> None: + if self._event_loop_thread_id is None: + raise RuntimeError("This portal is not running") + if self._event_loop_thread_id == get_ident(): + raise RuntimeError( + "This method cannot be called from the event loop thread" + ) + + async def sleep_until_stopped(self) -> None: + """Sleep until :meth:`stop` is called.""" + await self._stop_event.wait() + + async def stop(self, cancel_remaining: bool = False) -> None: + """ + Signal the portal to shut down. + + This marks the portal as no longer accepting new calls and exits from + :meth:`sleep_until_stopped`. + + :param cancel_remaining: ``True`` to cancel all the remaining tasks, ``False`` + to let them finish before returning + + """ + self._event_loop_thread_id = None + self._stop_event.set() + if cancel_remaining: + self._task_group.cancel_scope.cancel() + + async def _call_func( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + future: Future[T_Retval], + ) -> None: + def callback(f: Future[T_Retval]) -> None: + if f.cancelled() and self._event_loop_thread_id not in ( + None, + get_ident(), + ): + self.call(scope.cancel) + + try: + retval_or_awaitable = func(*args, **kwargs) + if isawaitable(retval_or_awaitable): + with CancelScope() as scope: + if future.cancelled(): + scope.cancel() + else: + future.add_done_callback(callback) + + retval = await retval_or_awaitable + else: + retval = retval_or_awaitable + except self._cancelled_exc_class: + future.cancel() + future.set_running_or_notify_cancel() + except BaseException as exc: + if not future.cancelled(): + future.set_exception(exc) + + # Let base exceptions fall through + if not isinstance(exc, Exception): + raise + else: + if not future.cancelled(): + future.set_result(retval) + finally: + scope = None # type: ignore[assignment] + + def _spawn_task_from_thread( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + name: object, + future: Future[T_Retval], + ) -> None: + """ + Spawn a new task using the given callable. + + Implementers must ensure that the future is resolved when the task finishes. + + :param func: a callable + :param args: positional arguments to be passed to the callable + :param kwargs: keyword arguments to be passed to the callable + :param name: name of the task (will be coerced to a string if not ``None``) + :param future: a future that will resolve to the return value of the callable, + or the exception raised during its execution + + """ + raise NotImplementedError + + @overload + def call( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + *args: Unpack[PosArgsT], + ) -> T_Retval: ... + + @overload + def call( + self, func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] + ) -> T_Retval: ... + + def call( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + *args: Unpack[PosArgsT], + ) -> T_Retval: + """ + Call the given function in the event loop thread. + + If the callable returns a coroutine object, it is awaited on. + + :param func: any callable + :raises RuntimeError: if the portal is not running or if this method is called + from within the event loop thread + + """ + return cast(T_Retval, self.start_task_soon(func, *args).result()) + + @overload + def start_task_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> Future[T_Retval]: ... + + @overload + def start_task_soon( + self, + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + name: object = None, + ) -> Future[T_Retval]: ... + + def start_task_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], + *args: Unpack[PosArgsT], + name: object = None, + ) -> Future[T_Retval]: + """ + Start a task in the portal's task group. + + The task will be run inside a cancel scope which can be cancelled by cancelling + the returned future. + + :param func: the target function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a future that resolves with the return value of the callable if the + task completes successfully, or with the exception raised in the task + :raises RuntimeError: if the portal is not running or if this method is called + from within the event loop thread + :rtype: concurrent.futures.Future[T_Retval] + + .. versionadded:: 3.0 + + """ + self._check_running() + f: Future[T_Retval] = Future() + self._spawn_task_from_thread(func, args, {}, name, f) + return f + + def start_task( + self, + func: Callable[..., Awaitable[T_Retval]], + *args: object, + name: object = None, + ) -> tuple[Future[T_Retval], Any]: + """ + Start a task in the portal's task group and wait until it signals for readiness. + + This method works the same way as :meth:`.abc.TaskGroup.start`. + + :param func: the target function + :param args: positional arguments passed to ``func`` + :param name: name of the task (will be coerced to a string if not ``None``) + :return: a tuple of (future, task_status_value) where the ``task_status_value`` + is the value passed to ``task_status.started()`` from within the target + function + :rtype: tuple[concurrent.futures.Future[T_Retval], Any] + + .. versionadded:: 3.0 + + """ + + def task_done(future: Future[T_Retval]) -> None: + if not task_status_future.done(): + if future.cancelled(): + task_status_future.cancel() + elif future.exception(): + task_status_future.set_exception(future.exception()) + else: + exc = RuntimeError( + "Task exited without calling task_status.started()" + ) + task_status_future.set_exception(exc) + + self._check_running() + task_status_future: Future = Future() + task_status = _BlockingPortalTaskStatus(task_status_future) + f: Future = Future() + f.add_done_callback(task_done) + self._spawn_task_from_thread(func, args, {"task_status": task_status}, name, f) + return f, task_status_future.result() + + def wrap_async_context_manager( + self, cm: AbstractAsyncContextManager[T_co] + ) -> AbstractContextManager[T_co]: + """ + Wrap an async context manager as a synchronous context manager via this portal. + + Spawns a task that will call both ``__aenter__()`` and ``__aexit__()``, stopping + in the middle until the synchronous context manager exits. + + :param cm: an asynchronous context manager + :return: a synchronous context manager + + .. versionadded:: 2.1 + + """ + return _BlockingAsyncContextManager(cm, self) + + +@dataclass +class BlockingPortalProvider: + """ + A manager for a blocking portal. Used as a context manager. The first thread to + enter this context manager causes a blocking portal to be started with the specific + parameters, and the last thread to exit causes the portal to be shut down. Thus, + there will be exactly one blocking portal running in this context as long as at + least one thread has entered this context manager. + + The parameters are the same as for :func:`~anyio.run`. + + :param backend: name of the backend + :param backend_options: backend options + + .. versionadded:: 4.4 + """ + + backend: str = "asyncio" + backend_options: dict[str, Any] | None = None + _lock: Lock = field(init=False, default_factory=Lock) + _leases: int = field(init=False, default=0) + _portal: BlockingPortal = field(init=False) + _portal_cm: AbstractContextManager[BlockingPortal] | None = field( + init=False, default=None + ) + + def __enter__(self) -> BlockingPortal: + with self._lock: + if self._portal_cm is None: + self._portal_cm = start_blocking_portal( + self.backend, self.backend_options + ) + self._portal = self._portal_cm.__enter__() + + self._leases += 1 + return self._portal + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + portal_cm: AbstractContextManager[BlockingPortal] | None = None + with self._lock: + assert self._portal_cm + assert self._leases > 0 + self._leases -= 1 + if not self._leases: + portal_cm = self._portal_cm + self._portal_cm = None + del self._portal + + if portal_cm: + portal_cm.__exit__(None, None, None) + + +@contextmanager +def start_blocking_portal( + backend: str = "asyncio", backend_options: dict[str, Any] | None = None +) -> Generator[BlockingPortal, Any, None]: + """ + Start a new event loop in a new thread and run a blocking portal in its main task. + + The parameters are the same as for :func:`~anyio.run`. + + :param backend: name of the backend + :param backend_options: backend options + :return: a context manager that yields a blocking portal + + .. versionchanged:: 3.0 + Usage as a context manager is now required. + + """ + + async def run_portal() -> None: + async with BlockingPortal() as portal_: + future.set_result(portal_) + await portal_.sleep_until_stopped() + + def run_blocking_portal() -> None: + if future.set_running_or_notify_cancel(): + try: + _eventloop.run( + run_portal, backend=backend, backend_options=backend_options + ) + except BaseException as exc: + if not future.done(): + future.set_exception(exc) + + future: Future[BlockingPortal] = Future() + thread = Thread(target=run_blocking_portal, daemon=True) + thread.start() + try: + cancel_remaining_tasks = False + portal = future.result() + try: + yield portal + except BaseException: + cancel_remaining_tasks = True + raise + finally: + try: + portal.call(portal.stop, cancel_remaining_tasks) + except RuntimeError: + pass + finally: + thread.join() + + +def check_cancelled() -> None: + """ + Check if the cancel scope of the host task's running the current worker thread has + been cancelled. + + If the host task's current cancel scope has indeed been cancelled, the + backend-specific cancellation exception will be raised. + + :raises RuntimeError: if the current thread was not spawned by + :func:`.to_thread.run_sync` + + """ + try: + async_backend: AsyncBackend = threadlocals.current_async_backend + except AttributeError: + raise RuntimeError( + "This function can only be run from an AnyIO worker thread" + ) from None + + async_backend.check_cancelled() diff --git a/venv/lib/python3.11/site-packages/anyio/lowlevel.py b/venv/lib/python3.11/site-packages/anyio/lowlevel.py new file mode 100644 index 0000000..14c7668 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/lowlevel.py @@ -0,0 +1,161 @@ +from __future__ import annotations + +import enum +from dataclasses import dataclass +from typing import Any, Generic, Literal, TypeVar, overload +from weakref import WeakKeyDictionary + +from ._core._eventloop import get_async_backend + +T = TypeVar("T") +D = TypeVar("D") + + +async def checkpoint() -> None: + """ + Check for cancellation and allow the scheduler to switch to another task. + + Equivalent to (but more efficient than):: + + await checkpoint_if_cancelled() + await cancel_shielded_checkpoint() + + + .. versionadded:: 3.0 + + """ + await get_async_backend().checkpoint() + + +async def checkpoint_if_cancelled() -> None: + """ + Enter a checkpoint if the enclosing cancel scope has been cancelled. + + This does not allow the scheduler to switch to a different task. + + .. versionadded:: 3.0 + + """ + await get_async_backend().checkpoint_if_cancelled() + + +async def cancel_shielded_checkpoint() -> None: + """ + Allow the scheduler to switch to another task but without checking for cancellation. + + Equivalent to (but potentially more efficient than):: + + with CancelScope(shield=True): + await checkpoint() + + + .. versionadded:: 3.0 + + """ + await get_async_backend().cancel_shielded_checkpoint() + + +def current_token() -> object: + """ + Return a backend specific token object that can be used to get back to the event + loop. + + """ + return get_async_backend().current_token() + + +_run_vars: WeakKeyDictionary[Any, dict[str, Any]] = WeakKeyDictionary() +_token_wrappers: dict[Any, _TokenWrapper] = {} + + +@dataclass(frozen=True) +class _TokenWrapper: + __slots__ = "_token", "__weakref__" + _token: object + + +class _NoValueSet(enum.Enum): + NO_VALUE_SET = enum.auto() + + +class RunvarToken(Generic[T]): + __slots__ = "_var", "_value", "_redeemed" + + def __init__(self, var: RunVar[T], value: T | Literal[_NoValueSet.NO_VALUE_SET]): + self._var = var + self._value: T | Literal[_NoValueSet.NO_VALUE_SET] = value + self._redeemed = False + + +class RunVar(Generic[T]): + """ + Like a :class:`~contextvars.ContextVar`, except scoped to the running event loop. + """ + + __slots__ = "_name", "_default" + + NO_VALUE_SET: Literal[_NoValueSet.NO_VALUE_SET] = _NoValueSet.NO_VALUE_SET + + _token_wrappers: set[_TokenWrapper] = set() + + def __init__( + self, name: str, default: T | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET + ): + self._name = name + self._default = default + + @property + def _current_vars(self) -> dict[str, T]: + token = current_token() + try: + return _run_vars[token] + except KeyError: + run_vars = _run_vars[token] = {} + return run_vars + + @overload + def get(self, default: D) -> T | D: ... + + @overload + def get(self) -> T: ... + + def get( + self, default: D | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET + ) -> T | D: + try: + return self._current_vars[self._name] + except KeyError: + if default is not RunVar.NO_VALUE_SET: + return default + elif self._default is not RunVar.NO_VALUE_SET: + return self._default + + raise LookupError( + f'Run variable "{self._name}" has no value and no default set' + ) + + def set(self, value: T) -> RunvarToken[T]: + current_vars = self._current_vars + token = RunvarToken(self, current_vars.get(self._name, RunVar.NO_VALUE_SET)) + current_vars[self._name] = value + return token + + def reset(self, token: RunvarToken[T]) -> None: + if token._var is not self: + raise ValueError("This token does not belong to this RunVar") + + if token._redeemed: + raise ValueError("This token has already been used") + + if token._value is _NoValueSet.NO_VALUE_SET: + try: + del self._current_vars[self._name] + except KeyError: + pass + else: + self._current_vars[self._name] = token._value + + token._redeemed = True + + def __repr__(self) -> str: + return f"" diff --git a/venv/lib/python3.11/site-packages/anyio/py.typed b/venv/lib/python3.11/site-packages/anyio/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/anyio/pytest_plugin.py b/venv/lib/python3.11/site-packages/anyio/pytest_plugin.py new file mode 100644 index 0000000..21e4ab2 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/pytest_plugin.py @@ -0,0 +1,272 @@ +from __future__ import annotations + +import socket +import sys +from collections.abc import Callable, Generator, Iterator +from contextlib import ExitStack, contextmanager +from inspect import isasyncgenfunction, iscoroutinefunction, ismethod +from typing import Any, cast + +import pytest +import sniffio +from _pytest.fixtures import SubRequest +from _pytest.outcomes import Exit + +from ._core._eventloop import get_all_backends, get_async_backend +from ._core._exceptions import iterate_exceptions +from .abc import TestRunner + +if sys.version_info < (3, 11): + from exceptiongroup import ExceptionGroup + +_current_runner: TestRunner | None = None +_runner_stack: ExitStack | None = None +_runner_leases = 0 + + +def extract_backend_and_options(backend: object) -> tuple[str, dict[str, Any]]: + if isinstance(backend, str): + return backend, {} + elif isinstance(backend, tuple) and len(backend) == 2: + if isinstance(backend[0], str) and isinstance(backend[1], dict): + return cast(tuple[str, dict[str, Any]], backend) + + raise TypeError("anyio_backend must be either a string or tuple of (string, dict)") + + +@contextmanager +def get_runner( + backend_name: str, backend_options: dict[str, Any] +) -> Iterator[TestRunner]: + global _current_runner, _runner_leases, _runner_stack + if _current_runner is None: + asynclib = get_async_backend(backend_name) + _runner_stack = ExitStack() + if sniffio.current_async_library_cvar.get(None) is None: + # Since we're in control of the event loop, we can cache the name of the + # async library + token = sniffio.current_async_library_cvar.set(backend_name) + _runner_stack.callback(sniffio.current_async_library_cvar.reset, token) + + backend_options = backend_options or {} + _current_runner = _runner_stack.enter_context( + asynclib.create_test_runner(backend_options) + ) + + _runner_leases += 1 + try: + yield _current_runner + finally: + _runner_leases -= 1 + if not _runner_leases: + assert _runner_stack is not None + _runner_stack.close() + _runner_stack = _current_runner = None + + +def pytest_configure(config: Any) -> None: + config.addinivalue_line( + "markers", + "anyio: mark the (coroutine function) test to be run asynchronously via anyio.", + ) + + +@pytest.hookimpl(hookwrapper=True) +def pytest_fixture_setup(fixturedef: Any, request: Any) -> Generator[Any]: + def wrapper( + *args: Any, anyio_backend: Any, request: SubRequest, **kwargs: Any + ) -> Any: + # Rebind any fixture methods to the request instance + if ( + request.instance + and ismethod(func) + and type(func.__self__) is type(request.instance) + ): + local_func = func.__func__.__get__(request.instance) + else: + local_func = func + + backend_name, backend_options = extract_backend_and_options(anyio_backend) + if has_backend_arg: + kwargs["anyio_backend"] = anyio_backend + + if has_request_arg: + kwargs["request"] = request + + with get_runner(backend_name, backend_options) as runner: + if isasyncgenfunction(local_func): + yield from runner.run_asyncgen_fixture(local_func, kwargs) + else: + yield runner.run_fixture(local_func, kwargs) + + # Only apply this to coroutine functions and async generator functions in requests + # that involve the anyio_backend fixture + func = fixturedef.func + if isasyncgenfunction(func) or iscoroutinefunction(func): + if "anyio_backend" in request.fixturenames: + fixturedef.func = wrapper + original_argname = fixturedef.argnames + + if not (has_backend_arg := "anyio_backend" in fixturedef.argnames): + fixturedef.argnames += ("anyio_backend",) + + if not (has_request_arg := "request" in fixturedef.argnames): + fixturedef.argnames += ("request",) + + try: + return (yield) + finally: + fixturedef.func = func + fixturedef.argnames = original_argname + + return (yield) + + +@pytest.hookimpl(tryfirst=True) +def pytest_pycollect_makeitem(collector: Any, name: Any, obj: Any) -> None: + if collector.istestfunction(obj, name): + inner_func = obj.hypothesis.inner_test if hasattr(obj, "hypothesis") else obj + if iscoroutinefunction(inner_func): + marker = collector.get_closest_marker("anyio") + own_markers = getattr(obj, "pytestmark", ()) + if marker or any(marker.name == "anyio" for marker in own_markers): + pytest.mark.usefixtures("anyio_backend")(obj) + + +@pytest.hookimpl(tryfirst=True) +def pytest_pyfunc_call(pyfuncitem: Any) -> bool | None: + def run_with_hypothesis(**kwargs: Any) -> None: + with get_runner(backend_name, backend_options) as runner: + runner.run_test(original_func, kwargs) + + backend = pyfuncitem.funcargs.get("anyio_backend") + if backend: + backend_name, backend_options = extract_backend_and_options(backend) + + if hasattr(pyfuncitem.obj, "hypothesis"): + # Wrap the inner test function unless it's already wrapped + original_func = pyfuncitem.obj.hypothesis.inner_test + if original_func.__qualname__ != run_with_hypothesis.__qualname__: + if iscoroutinefunction(original_func): + pyfuncitem.obj.hypothesis.inner_test = run_with_hypothesis + + return None + + if iscoroutinefunction(pyfuncitem.obj): + funcargs = pyfuncitem.funcargs + testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames} + with get_runner(backend_name, backend_options) as runner: + try: + runner.run_test(pyfuncitem.obj, testargs) + except ExceptionGroup as excgrp: + for exc in iterate_exceptions(excgrp): + if isinstance(exc, (Exit, KeyboardInterrupt, SystemExit)): + raise exc from excgrp + + raise + + return True + + return None + + +@pytest.fixture(scope="module", params=get_all_backends()) +def anyio_backend(request: Any) -> Any: + return request.param + + +@pytest.fixture +def anyio_backend_name(anyio_backend: Any) -> str: + if isinstance(anyio_backend, str): + return anyio_backend + else: + return anyio_backend[0] + + +@pytest.fixture +def anyio_backend_options(anyio_backend: Any) -> dict[str, Any]: + if isinstance(anyio_backend, str): + return {} + else: + return anyio_backend[1] + + +class FreePortFactory: + """ + Manages port generation based on specified socket kind, ensuring no duplicate + ports are generated. + + This class provides functionality for generating available free ports on the + system. It is initialized with a specific socket kind and can generate ports + for given address families while avoiding reuse of previously generated ports. + + Users should not instantiate this class directly, but use the + ``free_tcp_port_factory`` and ``free_udp_port_factory`` fixtures instead. For simple + uses cases, ``free_tcp_port`` and ``free_udp_port`` can be used instead. + """ + + def __init__(self, kind: socket.SocketKind) -> None: + self._kind = kind + self._generated = set[int]() + + @property + def kind(self) -> socket.SocketKind: + """ + The type of socket connection (e.g., :data:`~socket.SOCK_STREAM` or + :data:`~socket.SOCK_DGRAM`) used to bind for checking port availability + + """ + return self._kind + + def __call__(self, family: socket.AddressFamily | None = None) -> int: + """ + Return an unbound port for the given address family. + + :param family: if omitted, both IPv4 and IPv6 addresses will be tried + :return: a port number + + """ + if family is not None: + families = [family] + else: + families = [socket.AF_INET] + if socket.has_ipv6: + families.append(socket.AF_INET6) + + while True: + port = 0 + with ExitStack() as stack: + for family in families: + sock = stack.enter_context(socket.socket(family, self._kind)) + addr = "::1" if family == socket.AF_INET6 else "127.0.0.1" + try: + sock.bind((addr, port)) + except OSError: + break + + if not port: + port = sock.getsockname()[1] + else: + if port not in self._generated: + self._generated.add(port) + return port + + +@pytest.fixture(scope="session") +def free_tcp_port_factory() -> FreePortFactory: + return FreePortFactory(socket.SOCK_STREAM) + + +@pytest.fixture(scope="session") +def free_udp_port_factory() -> FreePortFactory: + return FreePortFactory(socket.SOCK_DGRAM) + + +@pytest.fixture +def free_tcp_port(free_tcp_port_factory: Callable[[], int]) -> int: + return free_tcp_port_factory() + + +@pytest.fixture +def free_udp_port(free_udp_port_factory: Callable[[], int]) -> int: + return free_udp_port_factory() diff --git a/venv/lib/python3.11/site-packages/anyio/streams/__init__.py b/venv/lib/python3.11/site-packages/anyio/streams/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/anyio/streams/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/streams/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..c0846a3 Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/streams/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/streams/__pycache__/buffered.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/streams/__pycache__/buffered.cpython-311.pyc new file mode 100644 index 0000000..2d45995 Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/streams/__pycache__/buffered.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/streams/__pycache__/file.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/streams/__pycache__/file.cpython-311.pyc new file mode 100644 index 0000000..a62bc41 Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/streams/__pycache__/file.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/streams/__pycache__/memory.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/streams/__pycache__/memory.cpython-311.pyc new file mode 100644 index 0000000..1e9a682 Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/streams/__pycache__/memory.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/streams/__pycache__/stapled.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/streams/__pycache__/stapled.cpython-311.pyc new file mode 100644 index 0000000..a764d27 Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/streams/__pycache__/stapled.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/streams/__pycache__/text.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/streams/__pycache__/text.cpython-311.pyc new file mode 100644 index 0000000..783325a Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/streams/__pycache__/text.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/streams/__pycache__/tls.cpython-311.pyc b/venv/lib/python3.11/site-packages/anyio/streams/__pycache__/tls.cpython-311.pyc new file mode 100644 index 0000000..c311129 Binary files /dev/null and b/venv/lib/python3.11/site-packages/anyio/streams/__pycache__/tls.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/anyio/streams/buffered.py b/venv/lib/python3.11/site-packages/anyio/streams/buffered.py new file mode 100644 index 0000000..f5d5e83 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/streams/buffered.py @@ -0,0 +1,119 @@ +from __future__ import annotations + +from collections.abc import Callable, Mapping +from dataclasses import dataclass, field +from typing import Any + +from .. import ClosedResourceError, DelimiterNotFound, EndOfStream, IncompleteRead +from ..abc import AnyByteReceiveStream, ByteReceiveStream + + +@dataclass(eq=False) +class BufferedByteReceiveStream(ByteReceiveStream): + """ + Wraps any bytes-based receive stream and uses a buffer to provide sophisticated + receiving capabilities in the form of a byte stream. + """ + + receive_stream: AnyByteReceiveStream + _buffer: bytearray = field(init=False, default_factory=bytearray) + _closed: bool = field(init=False, default=False) + + async def aclose(self) -> None: + await self.receive_stream.aclose() + self._closed = True + + @property + def buffer(self) -> bytes: + """The bytes currently in the buffer.""" + return bytes(self._buffer) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.receive_stream.extra_attributes + + async def receive(self, max_bytes: int = 65536) -> bytes: + if self._closed: + raise ClosedResourceError + + if self._buffer: + chunk = bytes(self._buffer[:max_bytes]) + del self._buffer[:max_bytes] + return chunk + elif isinstance(self.receive_stream, ByteReceiveStream): + return await self.receive_stream.receive(max_bytes) + else: + # With a bytes-oriented object stream, we need to handle any surplus bytes + # we get from the receive() call + chunk = await self.receive_stream.receive() + if len(chunk) > max_bytes: + # Save the surplus bytes in the buffer + self._buffer.extend(chunk[max_bytes:]) + return chunk[:max_bytes] + else: + return chunk + + async def receive_exactly(self, nbytes: int) -> bytes: + """ + Read exactly the given amount of bytes from the stream. + + :param nbytes: the number of bytes to read + :return: the bytes read + :raises ~anyio.IncompleteRead: if the stream was closed before the requested + amount of bytes could be read from the stream + + """ + while True: + remaining = nbytes - len(self._buffer) + if remaining <= 0: + retval = self._buffer[:nbytes] + del self._buffer[:nbytes] + return bytes(retval) + + try: + if isinstance(self.receive_stream, ByteReceiveStream): + chunk = await self.receive_stream.receive(remaining) + else: + chunk = await self.receive_stream.receive() + except EndOfStream as exc: + raise IncompleteRead from exc + + self._buffer.extend(chunk) + + async def receive_until(self, delimiter: bytes, max_bytes: int) -> bytes: + """ + Read from the stream until the delimiter is found or max_bytes have been read. + + :param delimiter: the marker to look for in the stream + :param max_bytes: maximum number of bytes that will be read before raising + :exc:`~anyio.DelimiterNotFound` + :return: the bytes read (not including the delimiter) + :raises ~anyio.IncompleteRead: if the stream was closed before the delimiter + was found + :raises ~anyio.DelimiterNotFound: if the delimiter is not found within the + bytes read up to the maximum allowed + + """ + delimiter_size = len(delimiter) + offset = 0 + while True: + # Check if the delimiter can be found in the current buffer + index = self._buffer.find(delimiter, offset) + if index >= 0: + found = self._buffer[:index] + del self._buffer[: index + len(delimiter) :] + return bytes(found) + + # Check if the buffer is already at or over the limit + if len(self._buffer) >= max_bytes: + raise DelimiterNotFound(max_bytes) + + # Read more data into the buffer from the socket + try: + data = await self.receive_stream.receive() + except EndOfStream as exc: + raise IncompleteRead from exc + + # Move the offset forward and add the new data to the buffer + offset = max(len(self._buffer) - delimiter_size + 1, 0) + self._buffer.extend(data) diff --git a/venv/lib/python3.11/site-packages/anyio/streams/file.py b/venv/lib/python3.11/site-packages/anyio/streams/file.py new file mode 100644 index 0000000..f492464 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/streams/file.py @@ -0,0 +1,148 @@ +from __future__ import annotations + +from collections.abc import Callable, Mapping +from io import SEEK_SET, UnsupportedOperation +from os import PathLike +from pathlib import Path +from typing import Any, BinaryIO, cast + +from .. import ( + BrokenResourceError, + ClosedResourceError, + EndOfStream, + TypedAttributeSet, + to_thread, + typed_attribute, +) +from ..abc import ByteReceiveStream, ByteSendStream + + +class FileStreamAttribute(TypedAttributeSet): + #: the open file descriptor + file: BinaryIO = typed_attribute() + #: the path of the file on the file system, if available (file must be a real file) + path: Path = typed_attribute() + #: the file number, if available (file must be a real file or a TTY) + fileno: int = typed_attribute() + + +class _BaseFileStream: + def __init__(self, file: BinaryIO): + self._file = file + + async def aclose(self) -> None: + await to_thread.run_sync(self._file.close) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + attributes: dict[Any, Callable[[], Any]] = { + FileStreamAttribute.file: lambda: self._file, + } + + if hasattr(self._file, "name"): + attributes[FileStreamAttribute.path] = lambda: Path(self._file.name) + + try: + self._file.fileno() + except UnsupportedOperation: + pass + else: + attributes[FileStreamAttribute.fileno] = lambda: self._file.fileno() + + return attributes + + +class FileReadStream(_BaseFileStream, ByteReceiveStream): + """ + A byte stream that reads from a file in the file system. + + :param file: a file that has been opened for reading in binary mode + + .. versionadded:: 3.0 + """ + + @classmethod + async def from_path(cls, path: str | PathLike[str]) -> FileReadStream: + """ + Create a file read stream by opening the given file. + + :param path: path of the file to read from + + """ + file = await to_thread.run_sync(Path(path).open, "rb") + return cls(cast(BinaryIO, file)) + + async def receive(self, max_bytes: int = 65536) -> bytes: + try: + data = await to_thread.run_sync(self._file.read, max_bytes) + except ValueError: + raise ClosedResourceError from None + except OSError as exc: + raise BrokenResourceError from exc + + if data: + return data + else: + raise EndOfStream + + async def seek(self, position: int, whence: int = SEEK_SET) -> int: + """ + Seek the file to the given position. + + .. seealso:: :meth:`io.IOBase.seek` + + .. note:: Not all file descriptors are seekable. + + :param position: position to seek the file to + :param whence: controls how ``position`` is interpreted + :return: the new absolute position + :raises OSError: if the file is not seekable + + """ + return await to_thread.run_sync(self._file.seek, position, whence) + + async def tell(self) -> int: + """ + Return the current stream position. + + .. note:: Not all file descriptors are seekable. + + :return: the current absolute position + :raises OSError: if the file is not seekable + + """ + return await to_thread.run_sync(self._file.tell) + + +class FileWriteStream(_BaseFileStream, ByteSendStream): + """ + A byte stream that writes to a file in the file system. + + :param file: a file that has been opened for writing in binary mode + + .. versionadded:: 3.0 + """ + + @classmethod + async def from_path( + cls, path: str | PathLike[str], append: bool = False + ) -> FileWriteStream: + """ + Create a file write stream by opening the given file for writing. + + :param path: path of the file to write to + :param append: if ``True``, open the file for appending; if ``False``, any + existing file at the given path will be truncated + + """ + mode = "ab" if append else "wb" + file = await to_thread.run_sync(Path(path).open, mode) + return cls(cast(BinaryIO, file)) + + async def send(self, item: bytes) -> None: + try: + await to_thread.run_sync(self._file.write, item) + except ValueError: + raise ClosedResourceError from None + except OSError as exc: + raise BrokenResourceError from exc diff --git a/venv/lib/python3.11/site-packages/anyio/streams/memory.py b/venv/lib/python3.11/site-packages/anyio/streams/memory.py new file mode 100644 index 0000000..83bf1d9 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/streams/memory.py @@ -0,0 +1,317 @@ +from __future__ import annotations + +import warnings +from collections import OrderedDict, deque +from dataclasses import dataclass, field +from types import TracebackType +from typing import Generic, NamedTuple, TypeVar + +from .. import ( + BrokenResourceError, + ClosedResourceError, + EndOfStream, + WouldBlock, +) +from .._core._testing import TaskInfo, get_current_task +from ..abc import Event, ObjectReceiveStream, ObjectSendStream +from ..lowlevel import checkpoint + +T_Item = TypeVar("T_Item") +T_co = TypeVar("T_co", covariant=True) +T_contra = TypeVar("T_contra", contravariant=True) + + +class MemoryObjectStreamStatistics(NamedTuple): + current_buffer_used: int #: number of items stored in the buffer + #: maximum number of items that can be stored on this stream (or :data:`math.inf`) + max_buffer_size: float + open_send_streams: int #: number of unclosed clones of the send stream + open_receive_streams: int #: number of unclosed clones of the receive stream + #: number of tasks blocked on :meth:`MemoryObjectSendStream.send` + tasks_waiting_send: int + #: number of tasks blocked on :meth:`MemoryObjectReceiveStream.receive` + tasks_waiting_receive: int + + +@dataclass(eq=False) +class MemoryObjectItemReceiver(Generic[T_Item]): + task_info: TaskInfo = field(init=False, default_factory=get_current_task) + item: T_Item = field(init=False) + + def __repr__(self) -> str: + # When item is not defined, we get following error with default __repr__: + # AttributeError: 'MemoryObjectItemReceiver' object has no attribute 'item' + item = getattr(self, "item", None) + return f"{self.__class__.__name__}(task_info={self.task_info}, item={item!r})" + + +@dataclass(eq=False) +class MemoryObjectStreamState(Generic[T_Item]): + max_buffer_size: float = field() + buffer: deque[T_Item] = field(init=False, default_factory=deque) + open_send_channels: int = field(init=False, default=0) + open_receive_channels: int = field(init=False, default=0) + waiting_receivers: OrderedDict[Event, MemoryObjectItemReceiver[T_Item]] = field( + init=False, default_factory=OrderedDict + ) + waiting_senders: OrderedDict[Event, T_Item] = field( + init=False, default_factory=OrderedDict + ) + + def statistics(self) -> MemoryObjectStreamStatistics: + return MemoryObjectStreamStatistics( + len(self.buffer), + self.max_buffer_size, + self.open_send_channels, + self.open_receive_channels, + len(self.waiting_senders), + len(self.waiting_receivers), + ) + + +@dataclass(eq=False) +class MemoryObjectReceiveStream(Generic[T_co], ObjectReceiveStream[T_co]): + _state: MemoryObjectStreamState[T_co] + _closed: bool = field(init=False, default=False) + + def __post_init__(self) -> None: + self._state.open_receive_channels += 1 + + def receive_nowait(self) -> T_co: + """ + Receive the next item if it can be done without waiting. + + :return: the received item + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.EndOfStream: if the buffer is empty and this stream has been + closed from the sending end + :raises ~anyio.WouldBlock: if there are no items in the buffer and no tasks + waiting to send + + """ + if self._closed: + raise ClosedResourceError + + if self._state.waiting_senders: + # Get the item from the next sender + send_event, item = self._state.waiting_senders.popitem(last=False) + self._state.buffer.append(item) + send_event.set() + + if self._state.buffer: + return self._state.buffer.popleft() + elif not self._state.open_send_channels: + raise EndOfStream + + raise WouldBlock + + async def receive(self) -> T_co: + await checkpoint() + try: + return self.receive_nowait() + except WouldBlock: + # Add ourselves in the queue + receive_event = Event() + receiver = MemoryObjectItemReceiver[T_co]() + self._state.waiting_receivers[receive_event] = receiver + + try: + await receive_event.wait() + finally: + self._state.waiting_receivers.pop(receive_event, None) + + try: + return receiver.item + except AttributeError: + raise EndOfStream from None + + def clone(self) -> MemoryObjectReceiveStream[T_co]: + """ + Create a clone of this receive stream. + + Each clone can be closed separately. Only when all clones have been closed will + the receiving end of the memory stream be considered closed by the sending ends. + + :return: the cloned stream + + """ + if self._closed: + raise ClosedResourceError + + return MemoryObjectReceiveStream(_state=self._state) + + def close(self) -> None: + """ + Close the stream. + + This works the exact same way as :meth:`aclose`, but is provided as a special + case for the benefit of synchronous callbacks. + + """ + if not self._closed: + self._closed = True + self._state.open_receive_channels -= 1 + if self._state.open_receive_channels == 0: + send_events = list(self._state.waiting_senders.keys()) + for event in send_events: + event.set() + + async def aclose(self) -> None: + self.close() + + def statistics(self) -> MemoryObjectStreamStatistics: + """ + Return statistics about the current state of this stream. + + .. versionadded:: 3.0 + """ + return self._state.statistics() + + def __enter__(self) -> MemoryObjectReceiveStream[T_co]: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() + + def __del__(self) -> None: + if not self._closed: + warnings.warn( + f"Unclosed <{self.__class__.__name__} at {id(self):x}>", + ResourceWarning, + source=self, + ) + + +@dataclass(eq=False) +class MemoryObjectSendStream(Generic[T_contra], ObjectSendStream[T_contra]): + _state: MemoryObjectStreamState[T_contra] + _closed: bool = field(init=False, default=False) + + def __post_init__(self) -> None: + self._state.open_send_channels += 1 + + def send_nowait(self, item: T_contra) -> None: + """ + Send an item immediately if it can be done without waiting. + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.BrokenResourceError: if the stream has been closed from the + receiving end + :raises ~anyio.WouldBlock: if the buffer is full and there are no tasks waiting + to receive + + """ + if self._closed: + raise ClosedResourceError + if not self._state.open_receive_channels: + raise BrokenResourceError + + while self._state.waiting_receivers: + receive_event, receiver = self._state.waiting_receivers.popitem(last=False) + if not receiver.task_info.has_pending_cancellation(): + receiver.item = item + receive_event.set() + return + + if len(self._state.buffer) < self._state.max_buffer_size: + self._state.buffer.append(item) + else: + raise WouldBlock + + async def send(self, item: T_contra) -> None: + """ + Send an item to the stream. + + If the buffer is full, this method blocks until there is again room in the + buffer or the item can be sent directly to a receiver. + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.BrokenResourceError: if the stream has been closed from the + receiving end + + """ + await checkpoint() + try: + self.send_nowait(item) + except WouldBlock: + # Wait until there's someone on the receiving end + send_event = Event() + self._state.waiting_senders[send_event] = item + try: + await send_event.wait() + except BaseException: + self._state.waiting_senders.pop(send_event, None) + raise + + if send_event in self._state.waiting_senders: + del self._state.waiting_senders[send_event] + raise BrokenResourceError from None + + def clone(self) -> MemoryObjectSendStream[T_contra]: + """ + Create a clone of this send stream. + + Each clone can be closed separately. Only when all clones have been closed will + the sending end of the memory stream be considered closed by the receiving ends. + + :return: the cloned stream + + """ + if self._closed: + raise ClosedResourceError + + return MemoryObjectSendStream(_state=self._state) + + def close(self) -> None: + """ + Close the stream. + + This works the exact same way as :meth:`aclose`, but is provided as a special + case for the benefit of synchronous callbacks. + + """ + if not self._closed: + self._closed = True + self._state.open_send_channels -= 1 + if self._state.open_send_channels == 0: + receive_events = list(self._state.waiting_receivers.keys()) + self._state.waiting_receivers.clear() + for event in receive_events: + event.set() + + async def aclose(self) -> None: + self.close() + + def statistics(self) -> MemoryObjectStreamStatistics: + """ + Return statistics about the current state of this stream. + + .. versionadded:: 3.0 + """ + return self._state.statistics() + + def __enter__(self) -> MemoryObjectSendStream[T_contra]: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() + + def __del__(self) -> None: + if not self._closed: + warnings.warn( + f"Unclosed <{self.__class__.__name__} at {id(self):x}>", + ResourceWarning, + source=self, + ) diff --git a/venv/lib/python3.11/site-packages/anyio/streams/stapled.py b/venv/lib/python3.11/site-packages/anyio/streams/stapled.py new file mode 100644 index 0000000..80f64a2 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/streams/stapled.py @@ -0,0 +1,141 @@ +from __future__ import annotations + +from collections.abc import Callable, Mapping, Sequence +from dataclasses import dataclass +from typing import Any, Generic, TypeVar + +from ..abc import ( + ByteReceiveStream, + ByteSendStream, + ByteStream, + Listener, + ObjectReceiveStream, + ObjectSendStream, + ObjectStream, + TaskGroup, +) + +T_Item = TypeVar("T_Item") +T_Stream = TypeVar("T_Stream") + + +@dataclass(eq=False) +class StapledByteStream(ByteStream): + """ + Combines two byte streams into a single, bidirectional byte stream. + + Extra attributes will be provided from both streams, with the receive stream + providing the values in case of a conflict. + + :param ByteSendStream send_stream: the sending byte stream + :param ByteReceiveStream receive_stream: the receiving byte stream + """ + + send_stream: ByteSendStream + receive_stream: ByteReceiveStream + + async def receive(self, max_bytes: int = 65536) -> bytes: + return await self.receive_stream.receive(max_bytes) + + async def send(self, item: bytes) -> None: + await self.send_stream.send(item) + + async def send_eof(self) -> None: + await self.send_stream.aclose() + + async def aclose(self) -> None: + await self.send_stream.aclose() + await self.receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.send_stream.extra_attributes, + **self.receive_stream.extra_attributes, + } + + +@dataclass(eq=False) +class StapledObjectStream(Generic[T_Item], ObjectStream[T_Item]): + """ + Combines two object streams into a single, bidirectional object stream. + + Extra attributes will be provided from both streams, with the receive stream + providing the values in case of a conflict. + + :param ObjectSendStream send_stream: the sending object stream + :param ObjectReceiveStream receive_stream: the receiving object stream + """ + + send_stream: ObjectSendStream[T_Item] + receive_stream: ObjectReceiveStream[T_Item] + + async def receive(self) -> T_Item: + return await self.receive_stream.receive() + + async def send(self, item: T_Item) -> None: + await self.send_stream.send(item) + + async def send_eof(self) -> None: + await self.send_stream.aclose() + + async def aclose(self) -> None: + await self.send_stream.aclose() + await self.receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.send_stream.extra_attributes, + **self.receive_stream.extra_attributes, + } + + +@dataclass(eq=False) +class MultiListener(Generic[T_Stream], Listener[T_Stream]): + """ + Combines multiple listeners into one, serving connections from all of them at once. + + Any MultiListeners in the given collection of listeners will have their listeners + moved into this one. + + Extra attributes are provided from each listener, with each successive listener + overriding any conflicting attributes from the previous one. + + :param listeners: listeners to serve + :type listeners: Sequence[Listener[T_Stream]] + """ + + listeners: Sequence[Listener[T_Stream]] + + def __post_init__(self) -> None: + listeners: list[Listener[T_Stream]] = [] + for listener in self.listeners: + if isinstance(listener, MultiListener): + listeners.extend(listener.listeners) + del listener.listeners[:] # type: ignore[attr-defined] + else: + listeners.append(listener) + + self.listeners = listeners + + async def serve( + self, handler: Callable[[T_Stream], Any], task_group: TaskGroup | None = None + ) -> None: + from .. import create_task_group + + async with create_task_group() as tg: + for listener in self.listeners: + tg.start_soon(listener.serve, handler, task_group) + + async def aclose(self) -> None: + for listener in self.listeners: + await listener.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + attributes: dict = {} + for listener in self.listeners: + attributes.update(listener.extra_attributes) + + return attributes diff --git a/venv/lib/python3.11/site-packages/anyio/streams/text.py b/venv/lib/python3.11/site-packages/anyio/streams/text.py new file mode 100644 index 0000000..f1a1127 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/streams/text.py @@ -0,0 +1,147 @@ +from __future__ import annotations + +import codecs +from collections.abc import Callable, Mapping +from dataclasses import InitVar, dataclass, field +from typing import Any + +from ..abc import ( + AnyByteReceiveStream, + AnyByteSendStream, + AnyByteStream, + ObjectReceiveStream, + ObjectSendStream, + ObjectStream, +) + + +@dataclass(eq=False) +class TextReceiveStream(ObjectReceiveStream[str]): + """ + Stream wrapper that decodes bytes to strings using the given encoding. + + Decoding is done using :class:`~codecs.IncrementalDecoder` which returns any + completely received unicode characters as soon as they come in. + + :param transport_stream: any bytes-based receive stream + :param encoding: character encoding to use for decoding bytes to strings (defaults + to ``utf-8``) + :param errors: handling scheme for decoding errors (defaults to ``strict``; see the + `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: + https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteReceiveStream + encoding: InitVar[str] = "utf-8" + errors: InitVar[str] = "strict" + _decoder: codecs.IncrementalDecoder = field(init=False) + + def __post_init__(self, encoding: str, errors: str) -> None: + decoder_class = codecs.getincrementaldecoder(encoding) + self._decoder = decoder_class(errors=errors) + + async def receive(self) -> str: + while True: + chunk = await self.transport_stream.receive() + decoded = self._decoder.decode(chunk) + if decoded: + return decoded + + async def aclose(self) -> None: + await self.transport_stream.aclose() + self._decoder.reset() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.transport_stream.extra_attributes + + +@dataclass(eq=False) +class TextSendStream(ObjectSendStream[str]): + """ + Sends strings to the wrapped stream as bytes using the given encoding. + + :param AnyByteSendStream transport_stream: any bytes-based send stream + :param str encoding: character encoding to use for encoding strings to bytes + (defaults to ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see + the `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: + https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteSendStream + encoding: InitVar[str] = "utf-8" + errors: str = "strict" + _encoder: Callable[..., tuple[bytes, int]] = field(init=False) + + def __post_init__(self, encoding: str) -> None: + self._encoder = codecs.getencoder(encoding) + + async def send(self, item: str) -> None: + encoded = self._encoder(item, self.errors)[0] + await self.transport_stream.send(encoded) + + async def aclose(self) -> None: + await self.transport_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.transport_stream.extra_attributes + + +@dataclass(eq=False) +class TextStream(ObjectStream[str]): + """ + A bidirectional stream that decodes bytes to strings on receive and encodes strings + to bytes on send. + + Extra attributes will be provided from both streams, with the receive stream + providing the values in case of a conflict. + + :param AnyByteStream transport_stream: any bytes-based stream + :param str encoding: character encoding to use for encoding/decoding strings to/from + bytes (defaults to ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see + the `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: + https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteStream + encoding: InitVar[str] = "utf-8" + errors: InitVar[str] = "strict" + _receive_stream: TextReceiveStream = field(init=False) + _send_stream: TextSendStream = field(init=False) + + def __post_init__(self, encoding: str, errors: str) -> None: + self._receive_stream = TextReceiveStream( + self.transport_stream, encoding=encoding, errors=errors + ) + self._send_stream = TextSendStream( + self.transport_stream, encoding=encoding, errors=errors + ) + + async def receive(self) -> str: + return await self._receive_stream.receive() + + async def send(self, item: str) -> None: + await self._send_stream.send(item) + + async def send_eof(self) -> None: + await self.transport_stream.send_eof() + + async def aclose(self) -> None: + await self._send_stream.aclose() + await self._receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self._send_stream.extra_attributes, + **self._receive_stream.extra_attributes, + } diff --git a/venv/lib/python3.11/site-packages/anyio/streams/tls.py b/venv/lib/python3.11/site-packages/anyio/streams/tls.py new file mode 100644 index 0000000..70a41cc --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/streams/tls.py @@ -0,0 +1,352 @@ +from __future__ import annotations + +import logging +import re +import ssl +import sys +from collections.abc import Callable, Mapping +from dataclasses import dataclass +from functools import wraps +from typing import Any, TypeVar + +from .. import ( + BrokenResourceError, + EndOfStream, + aclose_forcefully, + get_cancelled_exc_class, + to_thread, +) +from .._core._typedattr import TypedAttributeSet, typed_attribute +from ..abc import AnyByteStream, ByteStream, Listener, TaskGroup + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") +_PCTRTT = tuple[tuple[str, str], ...] +_PCTRTTT = tuple[_PCTRTT, ...] + + +class TLSAttribute(TypedAttributeSet): + """Contains Transport Layer Security related attributes.""" + + #: the selected ALPN protocol + alpn_protocol: str | None = typed_attribute() + #: the channel binding for type ``tls-unique`` + channel_binding_tls_unique: bytes = typed_attribute() + #: the selected cipher + cipher: tuple[str, str, int] = typed_attribute() + #: the peer certificate in dictionary form (see :meth:`ssl.SSLSocket.getpeercert` + # for more information) + peer_certificate: None | (dict[str, str | _PCTRTTT | _PCTRTT]) = typed_attribute() + #: the peer certificate in binary form + peer_certificate_binary: bytes | None = typed_attribute() + #: ``True`` if this is the server side of the connection + server_side: bool = typed_attribute() + #: ciphers shared by the client during the TLS handshake (``None`` if this is the + #: client side) + shared_ciphers: list[tuple[str, str, int]] | None = typed_attribute() + #: the :class:`~ssl.SSLObject` used for encryption + ssl_object: ssl.SSLObject = typed_attribute() + #: ``True`` if this stream does (and expects) a closing TLS handshake when the + #: stream is being closed + standard_compatible: bool = typed_attribute() + #: the TLS protocol version (e.g. ``TLSv1.2``) + tls_version: str = typed_attribute() + + +@dataclass(eq=False) +class TLSStream(ByteStream): + """ + A stream wrapper that encrypts all sent data and decrypts received data. + + This class has no public initializer; use :meth:`wrap` instead. + All extra attributes from :class:`~TLSAttribute` are supported. + + :var AnyByteStream transport_stream: the wrapped stream + + """ + + transport_stream: AnyByteStream + standard_compatible: bool + _ssl_object: ssl.SSLObject + _read_bio: ssl.MemoryBIO + _write_bio: ssl.MemoryBIO + + @classmethod + async def wrap( + cls, + transport_stream: AnyByteStream, + *, + server_side: bool | None = None, + hostname: str | None = None, + ssl_context: ssl.SSLContext | None = None, + standard_compatible: bool = True, + ) -> TLSStream: + """ + Wrap an existing stream with Transport Layer Security. + + This performs a TLS handshake with the peer. + + :param transport_stream: a bytes-transporting stream to wrap + :param server_side: ``True`` if this is the server side of the connection, + ``False`` if this is the client side (if omitted, will be set to ``False`` + if ``hostname`` has been provided, ``False`` otherwise). Used only to create + a default context when an explicit context has not been provided. + :param hostname: host name of the peer (if host name checking is desired) + :param ssl_context: the SSLContext object to use (if not provided, a secure + default will be created) + :param standard_compatible: if ``False``, skip the closing handshake when + closing the connection, and don't raise an exception if the peer does the + same + :raises ~ssl.SSLError: if the TLS handshake fails + + """ + if server_side is None: + server_side = not hostname + + if not ssl_context: + purpose = ( + ssl.Purpose.CLIENT_AUTH if server_side else ssl.Purpose.SERVER_AUTH + ) + ssl_context = ssl.create_default_context(purpose) + + # Re-enable detection of unexpected EOFs if it was disabled by Python + if hasattr(ssl, "OP_IGNORE_UNEXPECTED_EOF"): + ssl_context.options &= ~ssl.OP_IGNORE_UNEXPECTED_EOF + + bio_in = ssl.MemoryBIO() + bio_out = ssl.MemoryBIO() + + # External SSLContext implementations may do blocking I/O in wrap_bio(), + # but the standard library implementation won't + if type(ssl_context) is ssl.SSLContext: + ssl_object = ssl_context.wrap_bio( + bio_in, bio_out, server_side=server_side, server_hostname=hostname + ) + else: + ssl_object = await to_thread.run_sync( + ssl_context.wrap_bio, + bio_in, + bio_out, + server_side, + hostname, + None, + ) + + wrapper = cls( + transport_stream=transport_stream, + standard_compatible=standard_compatible, + _ssl_object=ssl_object, + _read_bio=bio_in, + _write_bio=bio_out, + ) + await wrapper._call_sslobject_method(ssl_object.do_handshake) + return wrapper + + async def _call_sslobject_method( + self, func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] + ) -> T_Retval: + while True: + try: + result = func(*args) + except ssl.SSLWantReadError: + try: + # Flush any pending writes first + if self._write_bio.pending: + await self.transport_stream.send(self._write_bio.read()) + + data = await self.transport_stream.receive() + except EndOfStream: + self._read_bio.write_eof() + except OSError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + raise BrokenResourceError from exc + else: + self._read_bio.write(data) + except ssl.SSLWantWriteError: + await self.transport_stream.send(self._write_bio.read()) + except ssl.SSLSyscallError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + raise BrokenResourceError from exc + except ssl.SSLError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + if isinstance(exc, ssl.SSLEOFError) or ( + exc.strerror and "UNEXPECTED_EOF_WHILE_READING" in exc.strerror + ): + if self.standard_compatible: + raise BrokenResourceError from exc + else: + raise EndOfStream from None + + raise + else: + # Flush any pending writes first + if self._write_bio.pending: + await self.transport_stream.send(self._write_bio.read()) + + return result + + async def unwrap(self) -> tuple[AnyByteStream, bytes]: + """ + Does the TLS closing handshake. + + :return: a tuple of (wrapped byte stream, bytes left in the read buffer) + + """ + await self._call_sslobject_method(self._ssl_object.unwrap) + self._read_bio.write_eof() + self._write_bio.write_eof() + return self.transport_stream, self._read_bio.read() + + async def aclose(self) -> None: + if self.standard_compatible: + try: + await self.unwrap() + except BaseException: + await aclose_forcefully(self.transport_stream) + raise + + await self.transport_stream.aclose() + + async def receive(self, max_bytes: int = 65536) -> bytes: + data = await self._call_sslobject_method(self._ssl_object.read, max_bytes) + if not data: + raise EndOfStream + + return data + + async def send(self, item: bytes) -> None: + await self._call_sslobject_method(self._ssl_object.write, item) + + async def send_eof(self) -> None: + tls_version = self.extra(TLSAttribute.tls_version) + match = re.match(r"TLSv(\d+)(?:\.(\d+))?", tls_version) + if match: + major, minor = int(match.group(1)), int(match.group(2) or 0) + if (major, minor) < (1, 3): + raise NotImplementedError( + f"send_eof() requires at least TLSv1.3; current " + f"session uses {tls_version}" + ) + + raise NotImplementedError( + "send_eof() has not yet been implemented for TLS streams" + ) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.transport_stream.extra_attributes, + TLSAttribute.alpn_protocol: self._ssl_object.selected_alpn_protocol, + TLSAttribute.channel_binding_tls_unique: ( + self._ssl_object.get_channel_binding + ), + TLSAttribute.cipher: self._ssl_object.cipher, + TLSAttribute.peer_certificate: lambda: self._ssl_object.getpeercert(False), + TLSAttribute.peer_certificate_binary: lambda: self._ssl_object.getpeercert( + True + ), + TLSAttribute.server_side: lambda: self._ssl_object.server_side, + TLSAttribute.shared_ciphers: lambda: self._ssl_object.shared_ciphers() + if self._ssl_object.server_side + else None, + TLSAttribute.standard_compatible: lambda: self.standard_compatible, + TLSAttribute.ssl_object: lambda: self._ssl_object, + TLSAttribute.tls_version: self._ssl_object.version, + } + + +@dataclass(eq=False) +class TLSListener(Listener[TLSStream]): + """ + A convenience listener that wraps another listener and auto-negotiates a TLS session + on every accepted connection. + + If the TLS handshake times out or raises an exception, + :meth:`handle_handshake_error` is called to do whatever post-mortem processing is + deemed necessary. + + Supports only the :attr:`~TLSAttribute.standard_compatible` extra attribute. + + :param Listener listener: the listener to wrap + :param ssl_context: the SSL context object + :param standard_compatible: a flag passed through to :meth:`TLSStream.wrap` + :param handshake_timeout: time limit for the TLS handshake + (passed to :func:`~anyio.fail_after`) + """ + + listener: Listener[Any] + ssl_context: ssl.SSLContext + standard_compatible: bool = True + handshake_timeout: float = 30 + + @staticmethod + async def handle_handshake_error(exc: BaseException, stream: AnyByteStream) -> None: + """ + Handle an exception raised during the TLS handshake. + + This method does 3 things: + + #. Forcefully closes the original stream + #. Logs the exception (unless it was a cancellation exception) using the + ``anyio.streams.tls`` logger + #. Reraises the exception if it was a base exception or a cancellation exception + + :param exc: the exception + :param stream: the original stream + + """ + await aclose_forcefully(stream) + + # Log all except cancellation exceptions + if not isinstance(exc, get_cancelled_exc_class()): + # CPython (as of 3.11.5) returns incorrect `sys.exc_info()` here when using + # any asyncio implementation, so we explicitly pass the exception to log + # (https://github.com/python/cpython/issues/108668). Trio does not have this + # issue because it works around the CPython bug. + logging.getLogger(__name__).exception( + "Error during TLS handshake", exc_info=exc + ) + + # Only reraise base exceptions and cancellation exceptions + if not isinstance(exc, Exception) or isinstance(exc, get_cancelled_exc_class()): + raise + + async def serve( + self, + handler: Callable[[TLSStream], Any], + task_group: TaskGroup | None = None, + ) -> None: + @wraps(handler) + async def handler_wrapper(stream: AnyByteStream) -> None: + from .. import fail_after + + try: + with fail_after(self.handshake_timeout): + wrapped_stream = await TLSStream.wrap( + stream, + ssl_context=self.ssl_context, + standard_compatible=self.standard_compatible, + ) + except BaseException as exc: + await self.handle_handshake_error(exc, stream) + else: + await handler(wrapped_stream) + + await self.listener.serve(handler_wrapper, task_group) + + async def aclose(self) -> None: + await self.listener.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + TLSAttribute.standard_compatible: lambda: self.standard_compatible, + } diff --git a/venv/lib/python3.11/site-packages/anyio/to_interpreter.py b/venv/lib/python3.11/site-packages/anyio/to_interpreter.py new file mode 100644 index 0000000..8a2e993 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/to_interpreter.py @@ -0,0 +1,218 @@ +from __future__ import annotations + +import atexit +import os +import pickle +import sys +from collections import deque +from collections.abc import Callable +from textwrap import dedent +from typing import Any, Final, TypeVar + +from . import current_time, to_thread +from ._core._exceptions import BrokenWorkerIntepreter +from ._core._synchronization import CapacityLimiter +from .lowlevel import RunVar + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +UNBOUND: Final = 2 # I have no clue how this works, but it was used in the stdlib +FMT_UNPICKLED: Final = 0 +FMT_PICKLED: Final = 1 +DEFAULT_CPU_COUNT: Final = 8 # this is just an arbitrarily selected value +MAX_WORKER_IDLE_TIME = ( + 30 # seconds a subinterpreter can be idle before becoming eligible for pruning +) + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + +_idle_workers = RunVar[deque["Worker"]]("_available_workers") +_default_interpreter_limiter = RunVar[CapacityLimiter]("_default_interpreter_limiter") + + +class Worker: + _run_func = compile( + dedent(""" + import _interpqueues as queues + import _interpreters as interpreters + from pickle import loads, dumps, HIGHEST_PROTOCOL + + item = queues.get(queue_id)[0] + try: + func, args = loads(item) + retval = func(*args) + except BaseException as exc: + is_exception = True + retval = exc + else: + is_exception = False + + try: + queues.put(queue_id, (retval, is_exception), FMT_UNPICKLED, UNBOUND) + except interpreters.NotShareableError: + retval = dumps(retval, HIGHEST_PROTOCOL) + queues.put(queue_id, (retval, is_exception), FMT_PICKLED, UNBOUND) + """), + "", + "exec", + ) + + last_used: float = 0 + + _initialized: bool = False + _interpreter_id: int + _queue_id: int + + def initialize(self) -> None: + import _interpqueues as queues + import _interpreters as interpreters + + self._interpreter_id = interpreters.create() + self._queue_id = queues.create(2, FMT_UNPICKLED, UNBOUND) + self._initialized = True + interpreters.set___main___attrs( + self._interpreter_id, + { + "queue_id": self._queue_id, + "FMT_PICKLED": FMT_PICKLED, + "FMT_UNPICKLED": FMT_UNPICKLED, + "UNBOUND": UNBOUND, + }, + ) + + def destroy(self) -> None: + import _interpqueues as queues + import _interpreters as interpreters + + if self._initialized: + interpreters.destroy(self._interpreter_id) + queues.destroy(self._queue_id) + + def _call( + self, + func: Callable[..., T_Retval], + args: tuple[Any], + ) -> tuple[Any, bool]: + import _interpqueues as queues + import _interpreters as interpreters + + if not self._initialized: + self.initialize() + + payload = pickle.dumps((func, args), pickle.HIGHEST_PROTOCOL) + queues.put(self._queue_id, payload, FMT_PICKLED, UNBOUND) + + res: Any + is_exception: bool + if exc_info := interpreters.exec(self._interpreter_id, self._run_func): + raise BrokenWorkerIntepreter(exc_info) + + (res, is_exception), fmt = queues.get(self._queue_id)[:2] + if fmt == FMT_PICKLED: + res = pickle.loads(res) + + return res, is_exception + + async def call( + self, + func: Callable[..., T_Retval], + args: tuple[Any], + limiter: CapacityLimiter, + ) -> T_Retval: + result, is_exception = await to_thread.run_sync( + self._call, + func, + args, + limiter=limiter, + ) + if is_exception: + raise result + + return result + + +def _stop_workers(workers: deque[Worker]) -> None: + for worker in workers: + worker.destroy() + + workers.clear() + + +async def run_sync( + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + limiter: CapacityLimiter | None = None, +) -> T_Retval: + """ + Call the given function with the given arguments in a subinterpreter. + + If the ``cancellable`` option is enabled and the task waiting for its completion is + cancelled, the call will still run its course but its return value (or any raised + exception) will be ignored. + + .. warning:: This feature is **experimental**. The upstream interpreter API has not + yet been finalized or thoroughly tested, so don't rely on this for anything + mission critical. + + :param func: a callable + :param args: positional arguments for the callable + :param limiter: capacity limiter to use to limit the total amount of subinterpreters + running (if omitted, the default limiter is used) + :return: the result of the call + :raises BrokenWorkerIntepreter: if there's an internal error in a subinterpreter + + """ + if sys.version_info <= (3, 13): + raise RuntimeError("subinterpreters require at least Python 3.13") + + if limiter is None: + limiter = current_default_interpreter_limiter() + + try: + idle_workers = _idle_workers.get() + except LookupError: + idle_workers = deque() + _idle_workers.set(idle_workers) + atexit.register(_stop_workers, idle_workers) + + async with limiter: + try: + worker = idle_workers.pop() + except IndexError: + worker = Worker() + + try: + return await worker.call(func, args, limiter) + finally: + # Prune workers that have been idle for too long + now = current_time() + while idle_workers: + if now - idle_workers[0].last_used <= MAX_WORKER_IDLE_TIME: + break + + await to_thread.run_sync(idle_workers.popleft().destroy, limiter=limiter) + + worker.last_used = current_time() + idle_workers.append(worker) + + +def current_default_interpreter_limiter() -> CapacityLimiter: + """ + Return the capacity limiter that is used by default to limit the number of + concurrently running subinterpreters. + + Defaults to the number of CPU cores. + + :return: a capacity limiter object + + """ + try: + return _default_interpreter_limiter.get() + except LookupError: + limiter = CapacityLimiter(os.cpu_count() or DEFAULT_CPU_COUNT) + _default_interpreter_limiter.set(limiter) + return limiter diff --git a/venv/lib/python3.11/site-packages/anyio/to_process.py b/venv/lib/python3.11/site-packages/anyio/to_process.py new file mode 100644 index 0000000..495de2a --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/to_process.py @@ -0,0 +1,258 @@ +from __future__ import annotations + +import os +import pickle +import subprocess +import sys +from collections import deque +from collections.abc import Callable +from importlib.util import module_from_spec, spec_from_file_location +from typing import TypeVar, cast + +from ._core._eventloop import current_time, get_async_backend, get_cancelled_exc_class +from ._core._exceptions import BrokenWorkerProcess +from ._core._subprocesses import open_process +from ._core._synchronization import CapacityLimiter +from ._core._tasks import CancelScope, fail_after +from .abc import ByteReceiveStream, ByteSendStream, Process +from .lowlevel import RunVar, checkpoint_if_cancelled +from .streams.buffered import BufferedByteReceiveStream + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +WORKER_MAX_IDLE_TIME = 300 # 5 minutes + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + +_process_pool_workers: RunVar[set[Process]] = RunVar("_process_pool_workers") +_process_pool_idle_workers: RunVar[deque[tuple[Process, float]]] = RunVar( + "_process_pool_idle_workers" +) +_default_process_limiter: RunVar[CapacityLimiter] = RunVar("_default_process_limiter") + + +async def run_sync( # type: ignore[return] + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + cancellable: bool = False, + limiter: CapacityLimiter | None = None, +) -> T_Retval: + """ + Call the given function with the given arguments in a worker process. + + If the ``cancellable`` option is enabled and the task waiting for its completion is + cancelled, the worker process running it will be abruptly terminated using SIGKILL + (or ``terminateProcess()`` on Windows). + + :param func: a callable + :param args: positional arguments for the callable + :param cancellable: ``True`` to allow cancellation of the operation while it's + running + :param limiter: capacity limiter to use to limit the total amount of processes + running (if omitted, the default limiter is used) + :return: an awaitable that yields the return value of the function. + + """ + + async def send_raw_command(pickled_cmd: bytes) -> object: + try: + await stdin.send(pickled_cmd) + response = await buffered.receive_until(b"\n", 50) + status, length = response.split(b" ") + if status not in (b"RETURN", b"EXCEPTION"): + raise RuntimeError( + f"Worker process returned unexpected response: {response!r}" + ) + + pickled_response = await buffered.receive_exactly(int(length)) + except BaseException as exc: + workers.discard(process) + try: + process.kill() + with CancelScope(shield=True): + await process.aclose() + except ProcessLookupError: + pass + + if isinstance(exc, get_cancelled_exc_class()): + raise + else: + raise BrokenWorkerProcess from exc + + retval = pickle.loads(pickled_response) + if status == b"EXCEPTION": + assert isinstance(retval, BaseException) + raise retval + else: + return retval + + # First pickle the request before trying to reserve a worker process + await checkpoint_if_cancelled() + request = pickle.dumps(("run", func, args), protocol=pickle.HIGHEST_PROTOCOL) + + # If this is the first run in this event loop thread, set up the necessary variables + try: + workers = _process_pool_workers.get() + idle_workers = _process_pool_idle_workers.get() + except LookupError: + workers = set() + idle_workers = deque() + _process_pool_workers.set(workers) + _process_pool_idle_workers.set(idle_workers) + get_async_backend().setup_process_pool_exit_at_shutdown(workers) + + async with limiter or current_default_process_limiter(): + # Pop processes from the pool (starting from the most recently used) until we + # find one that hasn't exited yet + process: Process + while idle_workers: + process, idle_since = idle_workers.pop() + if process.returncode is None: + stdin = cast(ByteSendStream, process.stdin) + buffered = BufferedByteReceiveStream( + cast(ByteReceiveStream, process.stdout) + ) + + # Prune any other workers that have been idle for WORKER_MAX_IDLE_TIME + # seconds or longer + now = current_time() + killed_processes: list[Process] = [] + while idle_workers: + if now - idle_workers[0][1] < WORKER_MAX_IDLE_TIME: + break + + process_to_kill, idle_since = idle_workers.popleft() + process_to_kill.kill() + workers.remove(process_to_kill) + killed_processes.append(process_to_kill) + + with CancelScope(shield=True): + for killed_process in killed_processes: + await killed_process.aclose() + + break + + workers.remove(process) + else: + command = [sys.executable, "-u", "-m", __name__] + process = await open_process( + command, stdin=subprocess.PIPE, stdout=subprocess.PIPE + ) + try: + stdin = cast(ByteSendStream, process.stdin) + buffered = BufferedByteReceiveStream( + cast(ByteReceiveStream, process.stdout) + ) + with fail_after(20): + message = await buffered.receive(6) + + if message != b"READY\n": + raise BrokenWorkerProcess( + f"Worker process returned unexpected response: {message!r}" + ) + + main_module_path = getattr(sys.modules["__main__"], "__file__", None) + pickled = pickle.dumps( + ("init", sys.path, main_module_path), + protocol=pickle.HIGHEST_PROTOCOL, + ) + await send_raw_command(pickled) + except (BrokenWorkerProcess, get_cancelled_exc_class()): + raise + except BaseException as exc: + process.kill() + raise BrokenWorkerProcess( + "Error during worker process initialization" + ) from exc + + workers.add(process) + + with CancelScope(shield=not cancellable): + try: + return cast(T_Retval, await send_raw_command(request)) + finally: + if process in workers: + idle_workers.append((process, current_time())) + + +def current_default_process_limiter() -> CapacityLimiter: + """ + Return the capacity limiter that is used by default to limit the number of worker + processes. + + :return: a capacity limiter object + + """ + try: + return _default_process_limiter.get() + except LookupError: + limiter = CapacityLimiter(os.cpu_count() or 2) + _default_process_limiter.set(limiter) + return limiter + + +def process_worker() -> None: + # Redirect standard streams to os.devnull so that user code won't interfere with the + # parent-worker communication + stdin = sys.stdin + stdout = sys.stdout + sys.stdin = open(os.devnull) + sys.stdout = open(os.devnull, "w") + + stdout.buffer.write(b"READY\n") + while True: + retval = exception = None + try: + command, *args = pickle.load(stdin.buffer) + except EOFError: + return + except BaseException as exc: + exception = exc + else: + if command == "run": + func, args = args + try: + retval = func(*args) + except BaseException as exc: + exception = exc + elif command == "init": + main_module_path: str | None + sys.path, main_module_path = args + del sys.modules["__main__"] + if main_module_path and os.path.isfile(main_module_path): + # Load the parent's main module but as __mp_main__ instead of + # __main__ (like multiprocessing does) to avoid infinite recursion + try: + spec = spec_from_file_location("__mp_main__", main_module_path) + if spec and spec.loader: + main = module_from_spec(spec) + spec.loader.exec_module(main) + sys.modules["__main__"] = main + except BaseException as exc: + exception = exc + try: + if exception is not None: + status = b"EXCEPTION" + pickled = pickle.dumps(exception, pickle.HIGHEST_PROTOCOL) + else: + status = b"RETURN" + pickled = pickle.dumps(retval, pickle.HIGHEST_PROTOCOL) + except BaseException as exc: + exception = exc + status = b"EXCEPTION" + pickled = pickle.dumps(exc, pickle.HIGHEST_PROTOCOL) + + stdout.buffer.write(b"%s %d\n" % (status, len(pickled))) + stdout.buffer.write(pickled) + + # Respect SIGTERM + if isinstance(exception, SystemExit): + raise exception + + +if __name__ == "__main__": + process_worker() diff --git a/venv/lib/python3.11/site-packages/anyio/to_thread.py b/venv/lib/python3.11/site-packages/anyio/to_thread.py new file mode 100644 index 0000000..5070516 --- /dev/null +++ b/venv/lib/python3.11/site-packages/anyio/to_thread.py @@ -0,0 +1,69 @@ +from __future__ import annotations + +import sys +from collections.abc import Callable +from typing import TypeVar +from warnings import warn + +from ._core._eventloop import get_async_backend +from .abc import CapacityLimiter + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + + +async def run_sync( + func: Callable[[Unpack[PosArgsT]], T_Retval], + *args: Unpack[PosArgsT], + abandon_on_cancel: bool = False, + cancellable: bool | None = None, + limiter: CapacityLimiter | None = None, +) -> T_Retval: + """ + Call the given function with the given arguments in a worker thread. + + If the ``cancellable`` option is enabled and the task waiting for its completion is + cancelled, the thread will still run its course but its return value (or any raised + exception) will be ignored. + + :param func: a callable + :param args: positional arguments for the callable + :param abandon_on_cancel: ``True`` to abandon the thread (leaving it to run + unchecked on own) if the host task is cancelled, ``False`` to ignore + cancellations in the host task until the operation has completed in the worker + thread + :param cancellable: deprecated alias of ``abandon_on_cancel``; will override + ``abandon_on_cancel`` if both parameters are passed + :param limiter: capacity limiter to use to limit the total amount of threads running + (if omitted, the default limiter is used) + :return: an awaitable that yields the return value of the function. + + """ + if cancellable is not None: + abandon_on_cancel = cancellable + warn( + "The `cancellable=` keyword argument to `anyio.to_thread.run_sync` is " + "deprecated since AnyIO 4.1.0; use `abandon_on_cancel=` instead", + DeprecationWarning, + stacklevel=2, + ) + + return await get_async_backend().run_sync_in_worker_thread( + func, args, abandon_on_cancel=abandon_on_cancel, limiter=limiter + ) + + +def current_default_thread_limiter() -> CapacityLimiter: + """ + Return the capacity limiter that is used by default to limit the number of + concurrent threads. + + :return: a capacity limiter object + + """ + return get_async_backend().current_default_thread_limiter() diff --git a/venv/lib/python3.11/site-packages/certifi-2025.4.26.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/certifi-2025.4.26.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.11/site-packages/certifi-2025.4.26.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.11/site-packages/certifi-2025.4.26.dist-info/METADATA b/venv/lib/python3.11/site-packages/certifi-2025.4.26.dist-info/METADATA new file mode 100644 index 0000000..bba2b69 --- /dev/null +++ b/venv/lib/python3.11/site-packages/certifi-2025.4.26.dist-info/METADATA @@ -0,0 +1,78 @@ +Metadata-Version: 2.4 +Name: certifi +Version: 2025.4.26 +Summary: Python package for providing Mozilla's CA Bundle. +Home-page: https://github.com/certifi/python-certifi +Author: Kenneth Reitz +Author-email: me@kennethreitz.com +License: MPL-2.0 +Project-URL: Source, https://github.com/certifi/python-certifi +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) +Classifier: Natural Language :: English +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Requires-Python: >=3.6 +License-File: LICENSE +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: home-page +Dynamic: license +Dynamic: license-file +Dynamic: project-url +Dynamic: requires-python +Dynamic: summary + +Certifi: Python SSL Certificates +================================ + +Certifi provides Mozilla's carefully curated collection of Root Certificates for +validating the trustworthiness of SSL certificates while verifying the identity +of TLS hosts. It has been extracted from the `Requests`_ project. + +Installation +------------ + +``certifi`` is available on PyPI. Simply install it with ``pip``:: + + $ pip install certifi + +Usage +----- + +To reference the installed certificate authority (CA) bundle, you can use the +built-in function:: + + >>> import certifi + + >>> certifi.where() + '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' + +Or from the command line:: + + $ python -m certifi + /usr/local/lib/python3.7/site-packages/certifi/cacert.pem + +Enjoy! + +.. _`Requests`: https://requests.readthedocs.io/en/master/ + +Addition/Removal of Certificates +-------------------------------- + +Certifi does not support any addition/removal or other modification of the +CA trust store content. This project is intended to provide a reliable and +highly portable root of trust to python deployments. Look to upstream projects +for methods to use alternate trust. diff --git a/venv/lib/python3.11/site-packages/certifi-2025.4.26.dist-info/RECORD b/venv/lib/python3.11/site-packages/certifi-2025.4.26.dist-info/RECORD new file mode 100644 index 0000000..d5d2720 --- /dev/null +++ b/venv/lib/python3.11/site-packages/certifi-2025.4.26.dist-info/RECORD @@ -0,0 +1,14 @@ +certifi-2025.4.26.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +certifi-2025.4.26.dist-info/METADATA,sha256=Q1SDFkY5LOQAJmDltZz2wU3VTv1Kh5X-rjGI4KiPHNM,2473 +certifi-2025.4.26.dist-info/RECORD,, +certifi-2025.4.26.dist-info/WHEEL,sha256=SmOxYU7pzNKBqASvQJ7DjX3XGUF92lrGhMb3R6_iiqI,91 +certifi-2025.4.26.dist-info/licenses/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989 +certifi-2025.4.26.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 +certifi/__init__.py,sha256=9pyWUGr6sbAlksfOHo0BTV0Gxljjh4IK1kXAjHgjL4I,94 +certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 +certifi/__pycache__/__init__.cpython-311.pyc,, +certifi/__pycache__/__main__.cpython-311.pyc,, +certifi/__pycache__/core.cpython-311.pyc,, +certifi/cacert.pem,sha256=K3sQJvGKKX4hSBicoMvn0-f578NvcjHMwoIKQE_rVZY,283771 +certifi/core.py,sha256=qRDDFyXVJwTB_EmoGppaXU_R9qCZvhl-EzxPMuV3nTA,4426 +certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.11/site-packages/certifi-2025.4.26.dist-info/WHEEL b/venv/lib/python3.11/site-packages/certifi-2025.4.26.dist-info/WHEEL new file mode 100644 index 0000000..8acb955 --- /dev/null +++ b/venv/lib/python3.11/site-packages/certifi-2025.4.26.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (79.0.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.11/site-packages/certifi-2025.4.26.dist-info/licenses/LICENSE b/venv/lib/python3.11/site-packages/certifi-2025.4.26.dist-info/licenses/LICENSE new file mode 100644 index 0000000..62b076c --- /dev/null +++ b/venv/lib/python3.11/site-packages/certifi-2025.4.26.dist-info/licenses/LICENSE @@ -0,0 +1,20 @@ +This package contains a modified version of ca-bundle.crt: + +ca-bundle.crt -- Bundle of CA Root Certificates + +This is a bundle of X.509 certificates of public Certificate Authorities +(CA). These were automatically extracted from Mozilla's root certificates +file (certdata.txt). This file can be found in the mozilla source tree: +https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt +It contains the certificates in PEM format and therefore +can be directly used with curl / libcurl / php_curl, or with +an Apache+mod_ssl webserver for SSL client authentication. +Just configure this file as the SSLCACertificateFile.# + +***** BEGIN LICENSE BLOCK ***** +This Source Code Form is subject to the terms of the Mozilla Public License, +v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at http://mozilla.org/MPL/2.0/. + +***** END LICENSE BLOCK ***** +@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/venv/lib/python3.11/site-packages/certifi-2025.4.26.dist-info/top_level.txt b/venv/lib/python3.11/site-packages/certifi-2025.4.26.dist-info/top_level.txt new file mode 100644 index 0000000..963eac5 --- /dev/null +++ b/venv/lib/python3.11/site-packages/certifi-2025.4.26.dist-info/top_level.txt @@ -0,0 +1 @@ +certifi diff --git a/venv/lib/python3.11/site-packages/certifi/__init__.py b/venv/lib/python3.11/site-packages/certifi/__init__.py new file mode 100644 index 0000000..bf83fa9 --- /dev/null +++ b/venv/lib/python3.11/site-packages/certifi/__init__.py @@ -0,0 +1,4 @@ +from .core import contents, where + +__all__ = ["contents", "where"] +__version__ = "2025.04.26" diff --git a/venv/lib/python3.11/site-packages/certifi/__main__.py b/venv/lib/python3.11/site-packages/certifi/__main__.py new file mode 100644 index 0000000..8945b5d --- /dev/null +++ b/venv/lib/python3.11/site-packages/certifi/__main__.py @@ -0,0 +1,12 @@ +import argparse + +from certifi import contents, where + +parser = argparse.ArgumentParser() +parser.add_argument("-c", "--contents", action="store_true") +args = parser.parse_args() + +if args.contents: + print(contents()) +else: + print(where()) diff --git a/venv/lib/python3.11/site-packages/certifi/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/certifi/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..3ab5989 Binary files /dev/null and b/venv/lib/python3.11/site-packages/certifi/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/certifi/__pycache__/__main__.cpython-311.pyc b/venv/lib/python3.11/site-packages/certifi/__pycache__/__main__.cpython-311.pyc new file mode 100644 index 0000000..cb43aae Binary files /dev/null and b/venv/lib/python3.11/site-packages/certifi/__pycache__/__main__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/certifi/__pycache__/core.cpython-311.pyc b/venv/lib/python3.11/site-packages/certifi/__pycache__/core.cpython-311.pyc new file mode 100644 index 0000000..ea7d6bf Binary files /dev/null and b/venv/lib/python3.11/site-packages/certifi/__pycache__/core.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/certifi/cacert.pem b/venv/lib/python3.11/site-packages/certifi/cacert.pem new file mode 100644 index 0000000..b1d0cfd --- /dev/null +++ b/venv/lib/python3.11/site-packages/certifi/cacert.pem @@ -0,0 +1,4676 @@ + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2" +# Serial: 1289 +# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b +# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 +# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa +GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg +Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J +WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB +rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp ++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 +ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i +Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz +PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og +/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH +oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI +yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud +EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 +A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL +MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f +BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn +g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl +fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K +WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha +B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc +hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR +TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD +mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z +ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y +4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza +8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3" +# Serial: 1478 +# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf +# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 +# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM +V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB +4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr +H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd +8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv +vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT +mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe +btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc +T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt +WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ +c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A +4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD +VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG +CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 +aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu +dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw +czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G +A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg +Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 +7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem +d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd ++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B +4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN +t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x +DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 +k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s +zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j +Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT +mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK +4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Label: "SwissSign Gold CA - G2" +# Serial: 13492815561806991280 +# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 +# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 +# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln +biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF +MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT +d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 +76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ +bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c +6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE +emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd +MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt +MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y +MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y +FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi +aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM +gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB +qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 +lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn +8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 +45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO +UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 +O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC +bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv +GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a +77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC +hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 +92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp +Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w +ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt +Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +# Issuer: CN=SecureTrust CA O=SecureTrust Corporation +# Subject: CN=SecureTrust CA O=SecureTrust Corporation +# Label: "SecureTrust CA" +# Serial: 17199774589125277788362757014266862032 +# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 +# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 +# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz +MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv +cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz +Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO +0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao +wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj +7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS +8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT +BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg +JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 +6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ +3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm +D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS +CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= +-----END CERTIFICATE----- + +# Issuer: CN=Secure Global CA O=SecureTrust Corporation +# Subject: CN=Secure Global CA O=SecureTrust Corporation +# Label: "Secure Global CA" +# Serial: 9751836167731051554232119481456978597 +# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de +# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b +# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx +MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg +Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ +iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa +/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ +jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI +HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 +sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w +gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw +KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG +AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L +URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO +H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm +I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY +iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: CN=Certigna O=Dhimyotis +# Subject: CN=Certigna O=Dhimyotis +# Label: "Certigna" +# Serial: 18364802974209362175 +# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff +# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 +# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X +DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ +BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 +QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny +gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw +zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q +130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 +JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw +ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT +AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj +AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG +9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h +bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc +fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu +HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w +t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== +-----END CERTIFICATE----- + +# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Label: "ePKI Root Certification Authority" +# Serial: 28956088682735189655030529057352760477 +# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 +# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 +# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw +IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL +SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH +SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh +ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X +DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 +TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ +fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA +sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU +WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS +nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH +dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip +NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC +AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF +MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB +uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl +PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP +JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ +gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 +j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 +5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB +o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS +/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z +Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE +W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D +hNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +# Issuer: O=certSIGN OU=certSIGN ROOT CA +# Subject: O=certSIGN OU=certSIGN ROOT CA +# Label: "certSIGN ROOT CA" +# Serial: 35210227249154 +# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 +# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b +# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT +AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD +QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP +MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do +0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ +UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d +RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ +OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv +JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C +AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O +BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ +LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY +MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ +44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I +Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw +i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN +9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" +# Serial: 80544274841616 +# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 +# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 +# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG +EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 +MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl +cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR +dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB +pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM +b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm +aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz +IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT +lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz +AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 +VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG +ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 +BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG +AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M +U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh +bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C ++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F +uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 +XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Label: "Microsec e-Szigno Root CA 2009" +# Serial: 14014712776195784473 +# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 +# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e +# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD +VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 +ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G +CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y +OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx +FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp +Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP +kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc +cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U +fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 +N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC +xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 ++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM +Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG +SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h +mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk +ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c +2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t +HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=Izenpe.com O=IZENPE S.A. +# Subject: CN=Izenpe.com O=IZENPE S.A. +# Label: "Izenpe.com" +# Serial: 917563065490389241595536686991402621 +# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 +# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 +# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 +MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 +ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD +VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j +b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq +scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO +xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H +LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX +uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD +yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ +JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q +rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN +BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L +hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB +QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ +HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu +Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg +QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB +BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA +A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb +laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 +awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo +JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw +LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT +VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk +LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb +UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ +QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ +naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls +QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA" +# Serial: 279744 +# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 +# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e +# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM +MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D +ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU +cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 +WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg +Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw +IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH +UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM +TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU +BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM +kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x +AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV +HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y +sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL +I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 +J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY +VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Label: "TWCA Root Certification Authority" +# Serial: 1 +# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 +# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 +# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES +MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU +V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz +WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO +LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE +AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH +K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX +RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z +rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx +3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq +hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC +MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls +XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D +lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn +aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ +YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Label: "Security Communication RootCA2" +# Serial: 0 +# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 +# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 +# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX +DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy +dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj +YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV +OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr +zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM +VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ +hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO +ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw +awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs +OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 +DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF +coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc +okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 +t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy +1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ +SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Label: "Actalis Authentication Root CA" +# Serial: 6271844772424770508 +# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 +# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac +# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 +-----BEGIN CERTIFICATE----- +MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE +BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w +MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 +IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC +SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 +ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv +UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX +4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 +KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ +gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb +rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ +51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F +be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe +KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F +v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn +fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 +jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz +ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt +ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL +e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 +jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz +WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V +SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j +pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX +X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok +fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R +K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU +ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU +LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT +LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 2 Root CA" +# Serial: 2 +# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 +# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 +# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr +6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV +L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 +1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx +MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ +QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB +arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr +Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi +FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS +P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN +9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz +uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h +9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s +A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t +OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo ++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 +KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 +DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us +H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ +I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 +5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h +3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz +Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 3 Root CA" +# Serial: 2 +# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec +# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 +# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y +ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E +N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 +tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX +0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c +/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X +KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY +zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS +O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D +34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP +K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv +Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj +QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV +cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS +IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 +HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa +O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv +033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u +dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE +kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 +3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD +u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq +4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 3" +# Serial: 1 +# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef +# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 +# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN +8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ +RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 +hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 +ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM +EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 +A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy +WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ +1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 +6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT +91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml +e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p +TpPDpFQUWw== +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 2009" +# Serial: 623603 +# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f +# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 +# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 +-----BEGIN CERTIFICATE----- +MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha +ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM +HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 +UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 +tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R +ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM +lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp +/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G +A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G +A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj +dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy +MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl +cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js +L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL +BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni +acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 +o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K +zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 +PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y +Johw1+qRzT65ysCQblrGXnRl11z+o+I= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 EV 2009" +# Serial: 623604 +# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 +# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 +# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw +NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV +BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn +ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 +3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z +qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR +p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 +HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw +ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea +HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw +Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh +c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E +RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt +dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku +Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp +3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 +nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF +CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na +xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX +KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R2 O=Disig a.s. +# Subject: CN=CA Disig Root R2 O=Disig a.s. +# Label: "CA Disig Root R2" +# Serial: 10572350602393338211 +# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 +# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 +# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy +MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe +NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH +PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I +x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe +QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR +yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO +QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 +H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ +QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD +i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs +nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 +rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI +hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM +tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf +GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb +lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka ++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal +TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i +nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 +gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr +G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os +zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x +L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL +-----END CERTIFICATE----- + +# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Label: "ACCVRAIZ1" +# Serial: 6828503384748696800 +# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 +# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 +# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 +-----BEGIN CERTIFICATE----- +MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE +AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw +CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ +BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND +VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb +qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY +HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo +G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA +lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr +IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ +0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH +k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 +4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO +m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa +cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl +uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI +KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls +ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG +AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 +VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT +VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG +CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA +cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA +QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA +7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA +cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA +QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA +czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu +aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt +aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud +DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF +BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp +D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU +JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m +AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD +vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms +tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH +7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h +I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA +h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF +d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H +pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA Global Root CA" +# Serial: 3262 +# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 +# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 +# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx +EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT +VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 +NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT +B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF +10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz +0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh +MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH +zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc +46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 +yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi +laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP +oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA +BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE +qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm +4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL +1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn +LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF +H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo +RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ +nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh +15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW +6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW +nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j +wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz +aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy +KwbQBM0= +-----END CERTIFICATE----- + +# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Label: "TeliaSonera Root CA v1" +# Serial: 199041966741090107964904287217786801558 +# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c +# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 +# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw +NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv +b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD +VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F +VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 +7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X +Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ +/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs +81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm +dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe +Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu +sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 +pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs +slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ +arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD +VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG +9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl +dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj +TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed +Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 +Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI +OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 +vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW +t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn +HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx +SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 2" +# Serial: 1 +# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a +# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 +# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd +AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC +FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi +1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq +jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ +wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ +WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy +NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC +uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw +IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 +g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP +BSeOE6Fuwg== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot 2011 O=Atos +# Subject: CN=Atos TrustedRoot 2011 O=Atos +# Label: "Atos TrustedRoot 2011" +# Serial: 6643877497813316402 +# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 +# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 +# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE +AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG +EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM +FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC +REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp +Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM +VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ +SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ +4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L +cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi +eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG +A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 +DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j +vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP +DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc +maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D +lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv +KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 1 G3" +# Serial: 687049649626669250736271037606554624078720034195 +# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab +# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 +# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 +MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV +wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe +rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 +68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh +4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp +UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o +abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc +3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G +KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt +hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO +Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt +zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD +ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 +cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN +qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 +YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv +b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 +8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k +NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj +ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp +q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt +nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2 G3" +# Serial: 390156079458959257446133169266079962026824725800 +# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 +# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 +# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 +MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf +qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW +n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym +c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ +O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 +o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j +IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq +IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz +8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh +vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l +7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG +cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD +ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC +roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga +W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n +lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE ++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV +csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd +dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg +KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM +HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 +WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3 G3" +# Serial: 268090761170461462463995952157327242137089239581 +# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 +# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d +# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 +MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR +/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu +FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR +U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c +ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR +FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k +A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw +eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl +sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp +VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q +A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ +ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD +ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI +FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv +oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg +u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP +0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf +3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl +8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ +DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN +PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ +ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G2" +# Serial: 15385348160840213938643033620894905419 +# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d +# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f +# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA +n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc +biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp +EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA +bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu +YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW +BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI +QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I +0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni +lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 +B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv +ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +IhNzbM8m9Yop5w== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G3" +# Serial: 15459312981008553731928384953135426796 +# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb +# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 +# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg +RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf +Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q +RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD +AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY +JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv +6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G2" +# Serial: 4293743540046975378534879503202253541 +# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 +# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 +# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G3" +# Serial: 7089244469030293291760083333884364146 +# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca +# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e +# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe +Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw +EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x +IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG +fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO +Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd +BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx +AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ +oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 +sycX +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Trusted Root G4" +# Serial: 7451500558977370777930084869016614236 +# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 +# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 +# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg +RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y +ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If +xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV +ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO +DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ +jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ +CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi +EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM +fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY +uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK +chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t +9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 +SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd ++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc +fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa +sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N +cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N +0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie +4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI +r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 +/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm +gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ +-----END CERTIFICATE----- + +# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Label: "COMODO RSA Certification Authority" +# Serial: 101909084537582093308941363524873193117 +# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 +# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 +# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB +hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV +BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT +EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR +6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X +pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC +9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV +/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf +Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z ++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w +qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah +SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC +u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf +Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq +crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB +/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl +wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM +4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV +2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna +FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ +CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK +boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke +jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL +S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb +QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl +0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB +NVOFBkpdn627G190 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Label: "USERTrust RSA Certification Authority" +# Serial: 2645093764781058787591871645665788717 +# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 +# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e +# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB +iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl +cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV +BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw +MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV +BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B +3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY +tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ +Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 +VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT +79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 +c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT +Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l +c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee +UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE +Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G +A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF +Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO +VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 +ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs +8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR +iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze +Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ +XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ +qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB +VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB +L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG +jjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Label: "USERTrust ECC Certification Authority" +# Serial: 123013823720199481456569720443997572134 +# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 +# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 +# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl +eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT +JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg +VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G +A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB +zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW +RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Label: "GlobalSign ECC Root CA - R5" +# Serial: 32785792099990507226680698011560947931244 +# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 +# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa +# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc +8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke +hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI +KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg +515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO +xwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Label: "IdenTrust Commercial Root CA 1" +# Serial: 13298821034946342390520003877796839426 +# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 +# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 +# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu +VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw +MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw +JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT +3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU ++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp +S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 +bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi +T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL +vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK +Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK +dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT +c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv +l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N +iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD +ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt +LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 +nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 ++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK +W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT +AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq +l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG +4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ +mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A +7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Label: "IdenTrust Public Sector Root CA 1" +# Serial: 13298821034946342390521976156843933698 +# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba +# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd +# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu +VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN +MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 +MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 +ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy +RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS +bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF +/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R +3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw +EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy +9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V +GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ +2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV +WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD +W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN +AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV +DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 +TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G +lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW +mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df +WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 ++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ +tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA +GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv +8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G2" +# Serial: 1246989352 +# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 +# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 +# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 +-----BEGIN CERTIFICATE----- +MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 +cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs +IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz +dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy +NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu +dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt +dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 +aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T +RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN +cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW +wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 +U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 +jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN +BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ +jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ +Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v +1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R +nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH +VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - EC1" +# Serial: 51543124481930649114116133369 +# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc +# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 +# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 +-----BEGIN CERTIFICATE----- +MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG +A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 +d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu +dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq +RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy +MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD +VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 +L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g +Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi +A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt +ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH +Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC +R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX +hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G +-----END CERTIFICATE----- + +# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority +# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority +# Label: "CFCA EV ROOT" +# Serial: 407555286 +# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 +# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 +# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD +TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y +aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx +MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j +aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP +T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 +sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL +TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 +/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp +7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz +EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt +hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP +a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot +aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg +TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV +PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv +cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL +tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd +BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT +ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL +jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS +ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy +P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 +xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d +Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN +5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe +/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z +AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ +5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GB CA" +# Serial: 157768595616588414422159278966750757568 +# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d +# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed +# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 +-----BEGIN CERTIFICATE----- +MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt +MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg +Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i +YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x +CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG +b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh +bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 +HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx +WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX +1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk +u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P +99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r +M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB +BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh +cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 +gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO +ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf +aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic +Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= +-----END CERTIFICATE----- + +# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Label: "SZAFIR ROOT CA2" +# Serial: 357043034767186914217277344587386743377558296292 +# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 +# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de +# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 +ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw +NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L +cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg +Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN +QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT +3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw +3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 +3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 +BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN +XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF +AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw +8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG +nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP +oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy +d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg +LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA 2" +# Serial: 44979900017204383099463764357512596969 +# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 +# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 +# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 +-----BEGIN CERTIFICATE----- +MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB +gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu +QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG +A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz +OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ +VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 +b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA +DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn +0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB +OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE +fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E +Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m +o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i +sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW +OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez +Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS +adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n +3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ +F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf +CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 +XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm +djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ +WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb +AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq +P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko +b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj +XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P +5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi +DrW5viSP +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce +# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 +# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix +DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k +IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT +N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v +dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG +A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh +ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx +QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA +4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 +AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 +4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C +ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV +9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD +gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 +Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq +NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko +LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd +ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I +XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI +M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot +9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V +Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea +j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh +X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ +l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf +bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 +pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK +e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 +vm9qp/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef +# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 +# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN +BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl +bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv +b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ +BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj +YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 +MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 +dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg +QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa +jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi +C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep +lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof +TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X1 O=Internet Security Research Group +# Subject: CN=ISRG Root X1 O=Internet Security Research Group +# Label: "ISRG Root X1" +# Serial: 172886928669790476064670243504169061120 +# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e +# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 +# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- + +# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Label: "AC RAIZ FNMT-RCM" +# Serial: 485876308206448804701554682760554759 +# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d +# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 +# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx +CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ +WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ +BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG +Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ +yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf +BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz +WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF +tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z +374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC +IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL +mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 +wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS +MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 +ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet +UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H +YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 +LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD +nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 +RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM +LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf +77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N +JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm +fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp +6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp +1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B +9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok +RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv +uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 1 O=Amazon +# Subject: CN=Amazon Root CA 1 O=Amazon +# Label: "Amazon Root CA 1" +# Serial: 143266978916655856878034712317230054538369994 +# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 +# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 +# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e +-----BEGIN CERTIFICATE----- +MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj +ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM +9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw +IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 +VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L +93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm +jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA +A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI +U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs +N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv +o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU +5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy +rqXRfboQnoZsG4q5WTP468SQvvG5 +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 2 O=Amazon +# Subject: CN=Amazon Root CA 2 O=Amazon +# Label: "Amazon Root CA 2" +# Serial: 143266982885963551818349160658925006970653239 +# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 +# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a +# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK +gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ +W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg +1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K +8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r +2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me +z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR +8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj +mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz +7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 ++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI +0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm +UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 +LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY ++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS +k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl +7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm +btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl +urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ +fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 +n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE +76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H +9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT +4PsJYGw= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 3 O=Amazon +# Subject: CN=Amazon Root CA 3 O=Amazon +# Label: "Amazon Root CA 3" +# Serial: 143266986699090766294700635381230934788665930 +# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 +# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e +# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 +-----BEGIN CERTIFICATE----- +MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl +ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr +ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr +BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM +YyRIHN8wfdVoOw== +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 4 O=Amazon +# Subject: CN=Amazon Root CA 4 O=Amazon +# Label: "Amazon Root CA 4" +# Serial: 143266989758080763974105200630763877849284878 +# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd +# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be +# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 +-----BEGIN CERTIFICATE----- +MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi +9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk +M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB +MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw +CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW +1KyLa2tJElMzrdfkviT8tQp21KW8EA== +-----END CERTIFICATE----- + +# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" +# Serial: 1 +# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 +# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca +# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 +-----BEGIN CERTIFICATE----- +MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx +GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp +bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w +KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 +BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy +dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG +EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll +IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU +QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT +TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg +LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 +a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr +LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr +N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X +YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ +iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f +AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH +V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh +AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf +IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 +lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c +8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf +lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= +-----END CERTIFICATE----- + +# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Label: "GDCA TrustAUTH R5 ROOT" +# Serial: 9009899650740120186 +# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 +# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 +# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 +-----BEGIN CERTIFICATE----- +MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE +BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ +IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 +MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV +BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w +HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj +Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj +TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u +KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj +qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm +MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 +ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP +zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk +L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC +jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA +HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC +AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg +p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm +DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 +COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry +L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf +JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg +IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io +2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV +09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ +XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq +T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe +MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Label: "SSL.com Root Certification Authority RSA" +# Serial: 8875640296558310041 +# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 +# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb +# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 +-----BEGIN CERTIFICATE----- +MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE +BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK +DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz +OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv +bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R +xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX +qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC +C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 +6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh +/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF +YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E +JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc +US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 +ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm ++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi +M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G +A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV +cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc +Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs +PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ +q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 +cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr +a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I +H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y +K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu +nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf +oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY +Ic2wBlX7Jz9TkHCpBB5XJ7k= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com Root Certification Authority ECC" +# Serial: 8495723813297216424 +# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e +# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a +# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 +-----BEGIN CERTIFICATE----- +MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz +WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 +b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS +b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI +7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg +CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud +EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD +VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T +kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ +gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority RSA R2" +# Serial: 6248227494352943350 +# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 +# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a +# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c +-----BEGIN CERTIFICATE----- +MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV +BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE +CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy +MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G +A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD +DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq +M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf +OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa +4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 +HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR +aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA +b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ +Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV +PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO +pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu +UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY +MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV +HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 +9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW +s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 +Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg +cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM +79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz +/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt +ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm +Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK +QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ +w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi +S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 +mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority ECC" +# Serial: 3182246526754555285 +# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 +# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d +# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 +-----BEGIN CERTIFICATE----- +MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx +NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv +bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA +VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku +WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX +5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ +ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg +h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Label: "GlobalSign Root CA - R6" +# Serial: 1417766617973444989252670301619537 +# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae +# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 +# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg +MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx +MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET +MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI +xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k +ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD +aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw +LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw +1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX +k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 +SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h +bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n +WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY +rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce +MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu +bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN +nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt +Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 +55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj +vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf +cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz +oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp +nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs +pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v +JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R +8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 +5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GC CA" +# Serial: 44084345621038548146064804565436152554 +# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 +# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 +# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d +-----BEGIN CERTIFICATE----- +MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw +CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 +bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg +Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ +BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu +ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS +b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni +eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W +p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T +rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV +57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg +Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 +-----END CERTIFICATE----- + +# Issuer: CN=UCA Global G2 Root O=UniTrust +# Subject: CN=UCA Global G2 Root O=UniTrust +# Label: "UCA Global G2 Root" +# Serial: 124779693093741543919145257850076631279 +# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 +# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a +# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH +bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x +CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds +b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr +b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 +kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm +VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R +VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc +C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj +tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY +D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv +j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl +NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 +iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP +O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV +ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj +L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 +1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl +1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU +b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV +PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj +y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb +EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg +DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI ++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy +YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX +UB+K+wb1whnw0A== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Extended Validation Root O=UniTrust +# Subject: CN=UCA Extended Validation Root O=UniTrust +# Label: "UCA Extended Validation Root" +# Serial: 106100277556486529736699587978573607008 +# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 +# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a +# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF +eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx +MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV +BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog +D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS +sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop +O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk +sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi +c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj +VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz +KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ +TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G +sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs +1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD +fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T +AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN +l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR +ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ +VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 +c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp +4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s +t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj +2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO +vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C +xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx +cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM +fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax +-----END CERTIFICATE----- + +# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Label: "Certigna Root CA" +# Serial: 269714418870597844693661054334862075617 +# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 +# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 +# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 +-----BEGIN CERTIFICATE----- +MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw +WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw +MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x +MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD +VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX +BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw +ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO +ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M +CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu +I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm +TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh +C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf +ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz +IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT +Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k +JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 +hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB +GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of +1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov +L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo +dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr +aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq +hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L +6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG +HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 +0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB +lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi +o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 +gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v +faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 +Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh +jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw +3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign Root CA - G1" +# Serial: 235931866688319308814040 +# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac +# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c +# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 +-----BEGIN CERTIFICATE----- +MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD +VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU +ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH +MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO +MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv +Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz +f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO +8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq +d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM +tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt +Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB +o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD +AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x +PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM +wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d +GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH +6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby +RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx +iN66zB+Afko= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign ECC Root CA - G3" +# Serial: 287880440101571086945156 +# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 +# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 +# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b +-----BEGIN CERTIFICATE----- +MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG +EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo +bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g +RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ +TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s +b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 +WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS +fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB +zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq +hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB +CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD ++JbNR6iC8hZVdyR+EhCVBCyj +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Label: "emSign Root CA - C1" +# Serial: 825510296613316004955058 +# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 +# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 +# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f +-----BEGIN CERTIFICATE----- +MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG +A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg +SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v +dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ +BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ +HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH +3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH +GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c +xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 +aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq +TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 +/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 +kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG +YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT ++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo +WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Label: "emSign ECC Root CA - C3" +# Serial: 582948710642506000014504 +# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 +# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 +# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 +-----BEGIN CERTIFICATE----- +MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG +EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx +IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND +IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci +MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti +sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O +BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB +Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c +3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J +0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Label: "Hongkong Post Root CA 3" +# Serial: 46170865288971385588281144162979347873371282084 +# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 +# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 +# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 +-----BEGIN CERTIFICATE----- +MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL +BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ +SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n +a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 +NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT +CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u +Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO +dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI +VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV +9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY +2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY +vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt +bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb +x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ +l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK +TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj +Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e +i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw +DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG +7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk +MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr +gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk +GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS +3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm +Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ +l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c +JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP +L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa +LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG +mpv0 +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft ECC Root Certificate Authority 2017" +# Serial: 136839042543790627607696632466672567020 +# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 +# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 +# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 +-----BEGIN CERTIFICATE----- +MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD +VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw +MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV +UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy +b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR +ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb +hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 +FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV +L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB +iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft RSA Root Certificate Authority 2017" +# Serial: 40975477897264996090493496164228220339 +# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 +# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 +# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 +-----BEGIN CERTIFICATE----- +MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl +MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw +NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 +IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG +EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N +aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ +Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 +ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 +HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm +gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ +jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc +aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG +YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 +W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K +UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH ++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q +W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC +LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC +gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 +tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh +SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 +TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 +pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR +xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp +GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 +dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN +AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB +RA+GsCyRxj3qrg+E +-----END CERTIFICATE----- + +# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Label: "e-Szigno Root CA 2017" +# Serial: 411379200276854331539784714 +# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 +# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 +# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 +-----BEGIN CERTIFICATE----- +MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV +BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk +LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv +b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ +BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg +THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v +IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv +xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H +Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB +eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo +jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ ++efcMQ== +-----END CERTIFICATE----- + +# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Label: "certSIGN Root CA G2" +# Serial: 313609486401300475190 +# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 +# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 +# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV +BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g +Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ +BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ +R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF +dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw +vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ +uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp +n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs +cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW +xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P +rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF +DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx +DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy +LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C +eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ +d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq +kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC +b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl +qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 +OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c +NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk +ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO +pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj +03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk +PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE +1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX +QRBdJ3NghVdJIgc= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global Certification Authority" +# Serial: 1846098327275375458322922162 +# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e +# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 +# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 +-----BEGIN CERTIFICATE----- +MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw +CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x +ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 +c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx +OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI +SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI +b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn +swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu +7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 +1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW +80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP +JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l +RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw +hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 +coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc +BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n +twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud +EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud +DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W +0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe +uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q +lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB +aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE +sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT +MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe +qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh +VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 +h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 +EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK +yeC2nOnOcXHebD8WpHk= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P256 Certification Authority" +# Serial: 4151900041497450638097112925 +# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 +# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf +# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 +-----BEGIN CERTIFICATE----- +MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG +SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN +FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w +DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw +CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh +DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P384 Certification Authority" +# Serial: 2704997926503831671788816187 +# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 +# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 +# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 +-----BEGIN CERTIFICATE----- +MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB +BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ +j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF +1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G +A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 +AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC +MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu +Sw== +-----END CERTIFICATE----- + +# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Label: "NAVER Global Root Certification Authority" +# Serial: 9013692873798656336226253319739695165984492813 +# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b +# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 +# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 +-----BEGIN CERTIFICATE----- +MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM +BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG +T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx +CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD +b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA +iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH +38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE +HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz +kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP +szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq +vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf +nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG +YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo +0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a +CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K +AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I +36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB +Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN +qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj +cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm ++LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL +hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe +lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 +p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 +piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR +LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX +5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO +dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul +9XXeifdy +-----END CERTIFICATE----- + +# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS" +# Serial: 131542671362353147877283741781055151509 +# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb +# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a +# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb +-----BEGIN CERTIFICATE----- +MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw +CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw +FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S +Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5 +MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL +DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS +QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH +sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK +Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu +SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC +MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy +v+c= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Label: "GlobalSign Root R46" +# Serial: 1552617688466950547958867513931858518042577 +# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef +# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90 +# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA +MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD +VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy +MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt +c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ +OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG +vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud +316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo +0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE +y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF +zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE ++cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN +I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs +x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa +ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC +4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4 +7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg +JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti +2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk +pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF +FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt +rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk +ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5 +u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP +4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6 +N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3 +vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6 +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Label: "GlobalSign Root E46" +# Serial: 1552617690338932563915843282459653771421763 +# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f +# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84 +# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58 +-----BEGIN CERTIFICATE----- +MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx +CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD +ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw +MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex +HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq +R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd +yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ +7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8 ++RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= +-----END CERTIFICATE----- + +# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Label: "GLOBALTRUST 2020" +# Serial: 109160994242082918454945253 +# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8 +# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2 +# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a +-----BEGIN CERTIFICATE----- +MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG +A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw +FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx +MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u +aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq +hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b +RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z +YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3 +QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw +yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+ +BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ +SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH +r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0 +4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me +dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw +q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2 +nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu +H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA +VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC +XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd +6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf ++I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi +kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7 +wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB +TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C +MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn +4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I +aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy +qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg== +-----END CERTIFICATE----- + +# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Label: "ANF Secure Server Root CA" +# Serial: 996390341000653745 +# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96 +# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74 +# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99 +-----BEGIN CERTIFICATE----- +MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV +BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk +YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV +BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN +MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF +UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD +VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v +dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj +cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q +yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH +2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX +H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL +zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR +p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz +W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/ +SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn +LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3 +n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B +u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj +o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC +AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L +9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej +rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK +pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0 +vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq +OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ +/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9 +2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI ++PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2 +MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo +tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw= +-----END CERTIFICATE----- + +# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum EC-384 CA" +# Serial: 160250656287871593594747141429395092468 +# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1 +# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed +# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6 +-----BEGIN CERTIFICATE----- +MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw +CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw +JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT +EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0 +WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT +LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX +BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE +KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm +Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8 +EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J +UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn +nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k= +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Root CA" +# Serial: 40870380103424195783807378461123655149 +# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29 +# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5 +# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd +-----BEGIN CERTIFICATE----- +MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6 +MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu +MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV +BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw +MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg +U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo +b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ +n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q +p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq +NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF +8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3 +HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa +mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi +7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF +ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P +qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ +v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6 +Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1 +vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD +ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4 +WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo +zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR +5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ +GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf +5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq +0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D +P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM +qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP +0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf +E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb +-----END CERTIFICATE----- + +# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Label: "TunTrust Root CA" +# Serial: 108534058042236574382096126452369648152337120275 +# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4 +# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb +# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL +BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg +Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv +b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG +EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u +IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ +n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd +2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF +VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ +GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF +li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU +r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2 +eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb +MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg +jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB +7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW +5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE +ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0 +90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z +xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu +QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4 +FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH +22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP +xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn +dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5 +Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b +nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ +CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH +u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj +d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS RSA Root CA 2021" +# Serial: 76817823531813593706434026085292783742 +# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91 +# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d +# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d +-----BEGIN CERTIFICATE----- +MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs +MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg +Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL +MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl +YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv +b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l +mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE +4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv +a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M +pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw +Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b +LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY +AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB +AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq +E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr +W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ +CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE +AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU +X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3 +f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja +H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP +JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P +zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt +jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0 +/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT +BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79 +aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW +xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU +63ZTGI0RmLo= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS ECC Root CA 2021" +# Serial: 137515985548005187474074462014555733966 +# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0 +# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48 +# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01 +-----BEGIN CERTIFICATE----- +MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw +CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh +cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v +dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG +A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj +aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg +Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7 +KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y +STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD +AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw +SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN +nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 1977337328857672817 +# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3 +# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe +# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1 +MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc +tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd +IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j +b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC +AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw +ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m +iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF +Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ +hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P +Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE +EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV +1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t +CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR +5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw +f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9 +ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK +GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV +-----END CERTIFICATE----- + +# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus ECC Root CA" +# Serial: 630369271402956006249506845124680065938238527194 +# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85 +# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1 +# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3 +-----BEGIN CERTIFICATE----- +MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw +RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY +BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz +MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u +LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0 +v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd +e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw +V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA +AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG +GJTO +-----END CERTIFICATE----- + +# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus Root CA" +# Serial: 387574501246983434957692974888460947164905180485 +# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc +# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7 +# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87 +-----BEGIN CERTIFICATE----- +MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL +BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x +FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx +MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s +THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD +ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc +IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU +AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+ +GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9 +8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH +flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt +J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim +0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN +pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ +UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW +OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB +AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet +8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd +nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j +bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM +Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv +TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS +S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr +I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9 +b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB +UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P +Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven +sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s= +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X2 O=Internet Security Research Group +# Subject: CN=ISRG Root X2 O=Internet Security Research Group +# Label: "ISRG Root X2" +# Serial: 87493402998870891108772069816698636114 +# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5 +# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af +# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70 +-----BEGIN CERTIFICATE----- +MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw +CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg +R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00 +MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT +ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw +EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW ++1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9 +ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI +zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW +tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1 +/q4AaOeMSQ+2b1tbFfLn +-----END CERTIFICATE----- + +# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Label: "HiPKI Root CA - G1" +# Serial: 60966262342023497858655262305426234976 +# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3 +# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60 +# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc +-----BEGIN CERTIFICATE----- +MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa +Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3 +YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw +qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv +Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6 +lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz +Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ +KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK +FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj +HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr +y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ +/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM +a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6 +fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG +SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi +7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc +SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza +ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc +XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg +iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho +L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF +Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr +kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+ +vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU +YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 159662223612894884239637590694 +# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc +# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28 +# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2 +-----BEGIN CERTIFICATE----- +MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD +VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw +MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g +UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT +BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx +uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV +HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/ ++wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147 +bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R1 O=Google Trust Services LLC +# Subject: CN=GTS Root R1 O=Google Trust Services LLC +# Label: "GTS Root R1" +# Serial: 159662320309726417404178440727 +# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40 +# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a +# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo +27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w +Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw +TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl +qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH +szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8 +Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk +MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92 +wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p +aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN +VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb +C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe +QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy +h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4 +7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J +ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef +MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/ +Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT +6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ +0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm +2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb +bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R2 O=Google Trust Services LLC +# Subject: CN=GTS Root R2 O=Google Trust Services LLC +# Label: "GTS Root R2" +# Serial: 159662449406622349769042896298 +# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc +# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94 +# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8 +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt +nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY +6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu +MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k +RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg +f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV ++3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo +dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW +Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa +G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq +gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H +vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8 +0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC +B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u +NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg +yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev +HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6 +xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR +TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg +JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV +7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl +6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R3 O=Google Trust Services LLC +# Subject: CN=GTS Root R3 O=Google Trust Services LLC +# Label: "GTS Root R3" +# Serial: 159662495401136852707857743206 +# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73 +# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46 +# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48 +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G +jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2 +4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7 +VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm +ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R4 O=Google Trust Services LLC +# Subject: CN=GTS Root R4 O=Google Trust Services LLC +# Label: "GTS Root R4" +# Serial: 159662532700760215368942768210 +# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8 +# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47 +# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi +QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR +HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D +9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8 +p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD +-----END CERTIFICATE----- + +# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj +# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj +# Label: "Telia Root CA v2" +# Serial: 7288924052977061235122729490515358 +# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48 +# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd +# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx +CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE +AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1 +NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ +MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP +ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq +AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9 +vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9 +lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD +n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT +7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o +6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC +TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6 +WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R +DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI +pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj +YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy +rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw +AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ +8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi +0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM +A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS +SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K +TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF +6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er +3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt +Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT +VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW +ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA +rBPuUBQemMc= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST BR Root CA 1 2020" +# Serial: 165870826978392376648679885835942448534 +# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed +# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67 +# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44 +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5 +NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS +zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0 +QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/ +VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW +wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV +dWNbFJWcHwHP2NVypw87 +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST EV Root CA 1 2020" +# Serial: 126288379621884218666039612629459926992 +# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e +# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07 +# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5 +NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC +/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD +wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3 +OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA +y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb +gfM0agPnIjhQW+0ZT0MW +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS ECC P384 Root G5" +# Serial: 13129116028163249804115411775095713523 +# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed +# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee +# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05 +-----BEGIN CERTIFICATE----- +MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp +Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2 +MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ +bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS +7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp +0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS +B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49 +BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ +LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4 +DXZDjC5Ty3zfDBeWUA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS RSA4096 Root G5" +# Serial: 11930366277458970227240571539258396554 +# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1 +# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35 +# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75 +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN +MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT +HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN +NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs +IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+ +ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0 +2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp +wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM +pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD +nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po +sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx +Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd +Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX +KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe +XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL +tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv +TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN +AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw +GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H +PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF +O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ +REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik +AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv +/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+ +p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw +MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF +qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK +ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+ +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root R1 O=Certainly +# Subject: CN=Certainly Root R1 O=Certainly +# Label: "Certainly Root R1" +# Serial: 188833316161142517227353805653483829216 +# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12 +# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af +# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw +PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy +dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9 +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0 +YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2 +1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT +vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed +aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0 +1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5 +r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5 +cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ +wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ +6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA +2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH +Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR +eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB +/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u +d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr +PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d +8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi +1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd +rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di +taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7 +lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj +yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn +Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy +yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n +wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6 +OV+KmalBWQewLK8= +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root E1 O=Certainly +# Subject: CN=Certainly Root E1 O=Certainly +# Label: "Certainly Root E1" +# Serial: 8168531406727139161245376702891150584 +# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9 +# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b +# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2 +-----BEGIN CERTIFICATE----- +MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw +CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu +bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ +BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s +eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK ++IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2 +QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4 +hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm +ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG +BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR +-----END CERTIFICATE----- + +# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Label: "Security Communication ECC RootCA1" +# Serial: 15446673492073852651 +# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86 +# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41 +# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11 +-----BEGIN CERTIFICATE----- +MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT +AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD +VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx +NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT +HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5 +IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl +dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK +ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu +9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O +be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k= +-----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA1" +# Serial: 113562791157148395269083148143378328608 +# MD5 Fingerprint: 42:32:99:76:43:33:36:24:35:07:82:9b:28:f9:d0:90 +# SHA1 Fingerprint: d5:ec:8d:7b:4c:ba:79:f4:e7:e8:cb:9d:6b:ae:77:83:10:03:21:6a +# SHA256 Fingerprint: f3:89:6f:88:fe:7c:0a:88:27:66:a7:fa:6a:d2:74:9f:b5:7a:7f:3e:98:fb:76:9c:1f:a7:b0:9c:2c:44:d5:ae +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIQVW9l47TZkGobCdFsPsBsIDANBgkqhkiG9w0BAQsFADBU +MQswCQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRI +T1JJVFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0ExMB4XDTE5MTIxOTAz +MTYxN1oXDTQ0MTIxMjAzMTYxN1owVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJF +SUpJTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2Jh +bCBSb290IENBMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPFmCL3Z +xRVhy4QEQaVpN3cdwbB7+sN3SJATcmTRuHyQNZ0YeYjjlwE8R4HyDqKYDZ4/N+AZ +spDyRhySsTphzvq3Rp4Dhtczbu33RYx2N95ulpH3134rhxfVizXuhJFyV9xgw8O5 +58dnJCNPYwpj9mZ9S1WnP3hkSWkSl+BMDdMJoDIwOvqfwPKcxRIqLhy1BDPapDgR +at7GGPZHOiJBhyL8xIkoVNiMpTAK+BcWyqw3/XmnkRd4OJmtWO2y3syJfQOcs4ll +5+M7sSKGjwZteAf9kRJ/sGsciQ35uMt0WwfCyPQ10WRjeulumijWML3mG90Vr4Tq +nMfK9Q7q8l0ph49pczm+LiRvRSGsxdRpJQaDrXpIhRMsDQa4bHlW/KNnMoH1V6XK +V0Jp6VwkYe/iMBhORJhVb3rCk9gZtt58R4oRTklH2yiUAguUSiz5EtBP6DF+bHq/ +pj+bOT0CFqMYs2esWz8sgytnOYFcuX6U1WTdno9uruh8W7TXakdI136z1C2OVnZO +z2nxbkRs1CTqjSShGL+9V/6pmTW12xB3uD1IutbB5/EjPtffhZ0nPNRAvQoMvfXn +jSXWgXSHRtQpdaJCbPdzied9v3pKH9MiyRVVz99vfFXQpIsHETdfg6YmV6YBW37+ +WGgHqel62bno/1Afq8K0wM7o6v0PvY1NuLxxAgMBAAGjQjBAMB0GA1UdDgQWBBTF +7+3M2I0hxkjk49cULqcWk+WYATAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE +AwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAUoKsITQfI/Ki2Pm4rzc2IInRNwPWaZ+4 +YRC6ojGYWUfo0Q0lHhVBDOAqVdVXUsv45Mdpox1NcQJeXyFFYEhcCY5JEMEE3Kli +awLwQ8hOnThJdMkycFRtwUf8jrQ2ntScvd0g1lPJGKm1Vrl2i5VnZu69mP6u775u ++2D2/VnGKhs/I0qUJDAnyIm860Qkmss9vk/Ves6OF8tiwdneHg56/0OGNFK8YT88 +X7vZdrRTvJez/opMEi4r89fO4aL/3Xtw+zuhTaRjAv04l5U/BXCga99igUOLtFkN +SoxUnMW7gZ/NfaXvCyUeOiDbHPwfmGcCCtRzRBPbUYQaVQNW4AB+dAb/OMRyHdOo +P2gxXdMJxy6MW2Pg6Nwe0uxhHvLe5e/2mXZgLR6UcnHGCyoyx5JO1UbXHfmpGQrI ++pXObSOYqgs4rZpWDW+N8TEAiMEXnM0ZNjX+VVOg4DwzX5Ze4jLp3zO7Bkqp2IRz +znfSxqxx4VyjHQy7Ct9f4qNx2No3WqB4K/TUfet27fJhcKVlmtOJNBir+3I+17Q9 +eVzYH6Eze9mCUAyTF6ps3MKCuwJXNq+YJyo5UOGwifUll35HaBC07HPKs5fRJNz2 +YqAo07WjuGS3iGJCz51TzZm+ZGiPTx4SSPfSKcOYKMryMguTjClPPGAyzQWWYezy +r/6zcCwupvI= +-----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA2" +# Serial: 58605626836079930195615843123109055211 +# MD5 Fingerprint: 5e:0a:f6:47:5f:a6:14:e8:11:01:95:3f:4d:01:eb:3c +# SHA1 Fingerprint: f4:27:86:eb:6e:b8:6d:88:31:67:02:fb:ba:66:a4:53:00:aa:7a:a6 +# SHA256 Fingerprint: 57:4d:f6:93:1e:27:80:39:66:7b:72:0a:fd:c1:60:0f:c2:7e:b6:6d:d3:09:29:79:fb:73:85:64:87:21:28:82 +-----BEGIN CERTIFICATE----- +MIICJTCCAaugAwIBAgIQLBcIfWQqwP6FGFkGz7RK6zAKBggqhkjOPQQDAzBUMQsw +CQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRIT1JJ +VFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0EyMB4XDTE5MTIxOTAzMTgy +MVoXDTQ0MTIxMjAzMTgyMVowVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJFSUpJ +TkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2JhbCBS +b290IENBMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABJ3LgJGNU2e1uVCxA/jlSR9B +IgmwUVJY1is0j8USRhTFiy8shP8sbqjV8QnjAyEUxEM9fMEsxEtqSs3ph+B99iK+ ++kpRuDCK/eHeGBIK9ke35xe/J4rUQUyWPGCWwf0VHKNCMEAwHQYDVR0OBBYEFNJK +sVF/BvDRgh9Obl+rg/xI1LCRMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA +94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B +43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root E46" +# Serial: 88989738453351742415770396670917916916 +# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01 +# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a +# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83 +-----BEGIN CERTIFICATE----- +MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw +CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T +ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN +MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG +A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT +ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC +WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+ +6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B +Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa +qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q +4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root R46" +# Serial: 156256931880233212765902055439220583700 +# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5 +# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38 +# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06 +-----BEGIN CERTIFICATE----- +MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf +MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD +Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw +HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY +MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp +YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa +ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz +SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf +iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X +ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3 +IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS +VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE +SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu ++Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt +8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L +HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt +zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P +AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c +mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ +YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52 +gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA +Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB +JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX +DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui +TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5 +dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65 +LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp +0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY +QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS RSA Root CA 2022" +# Serial: 148535279242832292258835760425842727825 +# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da +# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca +# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed +-----BEGIN CERTIFICATE----- +MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO +MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD +DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX +DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw +b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC +AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP +L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY +t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins +S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3 +PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO +L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3 +R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w +dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS ++YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS +d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG +AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f +gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j +BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z +NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt +hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM +QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf +R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ +DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW +P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy +lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq +bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w +AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q +r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji +Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU +98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS ECC Root CA 2022" +# Serial: 26605119622390491762507526719404364228 +# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5 +# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39 +# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43 +-----BEGIN CERTIFICATE----- +MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT +U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2 +MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh +dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm +acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN +SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME +GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW +uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp +15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN +b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA ECC TLS 2021" +# Serial: 81873346711060652204712539181482831616 +# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8 +# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd +# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8 +-----BEGIN CERTIFICATE----- +MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w +LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w +CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0 +MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF +Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI +zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X +tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4 +AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2 +KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD +aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu +CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo +9H1/IISpQuQo +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA RSA TLS 2021" +# Serial: 111436099570196163832749341232207667876 +# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2 +# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48 +# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f +-----BEGIN CERTIFICATE----- +MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM +MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx +MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00 +MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD +QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN +BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z +4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv +Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ +kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs +GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln +nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh +3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD +0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy +geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8 +ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB +c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI +pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB +DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS +4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs +o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ +qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw +xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM +rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4 +AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR +0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY +o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5 +dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE +oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ== +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. +# Label: "TrustAsia Global Root CA G3" +# Serial: 576386314500428537169965010905813481816650257167 +# MD5 Fingerprint: 30:42:1b:b7:bb:81:75:35:e4:16:4f:53:d2:94:de:04 +# SHA1 Fingerprint: 63:cf:b6:c1:27:2b:56:e4:88:8e:1c:23:9a:b6:2e:81:47:24:c3:c7 +# SHA256 Fingerprint: e0:d3:22:6a:eb:11:63:c2:e4:8f:f9:be:3b:50:b4:c6:43:1b:e7:bb:1e:ac:c5:c3:6b:5d:5e:c5:09:03:9a:08 +-----BEGIN CERTIFICATE----- +MIIFpTCCA42gAwIBAgIUZPYOZXdhaqs7tOqFhLuxibhxkw8wDQYJKoZIhvcNAQEM +BQAwWjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dp +ZXMsIEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHMzAe +Fw0yMTA1MjAwMjEwMTlaFw00NjA1MTkwMjEwMTlaMFoxCzAJBgNVBAYTAkNOMSUw +IwYDVQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtU +cnVzdEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4IC +DwAwggIKAoICAQDAMYJhkuSUGwoqZdC+BqmHO1ES6nBBruL7dOoKjbmzTNyPtxNS +T1QY4SxzlZHFZjtqz6xjbYdT8PfxObegQ2OwxANdV6nnRM7EoYNl9lA+sX4WuDqK +AtCWHwDNBSHvBm3dIZwZQ0WhxeiAysKtQGIXBsaqvPPW5vxQfmZCHzyLpnl5hkA1 +nyDvP+uLRx+PjsXUjrYsyUQE49RDdT/VP68czH5GX6zfZBCK70bwkPAPLfSIC7Ep +qq+FqklYqL9joDiR5rPmd2jE+SoZhLsO4fWvieylL1AgdB4SQXMeJNnKziyhWTXA +yB1GJ2Faj/lN03J5Zh6fFZAhLf3ti1ZwA0pJPn9pMRJpxx5cynoTi+jm9WAPzJMs +hH/x/Gr8m0ed262IPfN2dTPXS6TIi/n1Q1hPy8gDVI+lhXgEGvNz8teHHUGf59gX +zhqcD0r83ERoVGjiQTz+LISGNzzNPy+i2+f3VANfWdP3kXjHi3dqFuVJhZBFcnAv +kV34PmVACxmZySYgWmjBNb9Pp1Hx2BErW+Canig7CjoKH8GB5S7wprlppYiU5msT +f9FkPz2ccEblooV7WIQn3MSAPmeamseaMQ4w7OYXQJXZRe0Blqq/DPNL0WP3E1jA +uPP6Z92bfW1K/zJMtSU7/xxnD4UiWQWRkUF3gdCFTIcQcf+eQxuulXUtgQIDAQAB +o2MwYTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEDk5PIj7zjKsK5Xf/Ih +MBY027ySMB0GA1UdDgQWBBRA5OTyI+84yrCuV3/yITAWNNu8kjAOBgNVHQ8BAf8E +BAMCAQYwDQYJKoZIhvcNAQEMBQADggIBACY7UeFNOPMyGLS0XuFlXsSUT9SnYaP4 +wM8zAQLpw6o1D/GUE3d3NZ4tVlFEbuHGLige/9rsR82XRBf34EzC4Xx8MnpmyFq2 +XFNFV1pF1AWZLy4jVe5jaN/TG3inEpQGAHUNcoTpLrxaatXeL1nHo+zSh2bbt1S1 +JKv0Q3jbSwTEb93mPmY+KfJLaHEih6D4sTNjduMNhXJEIlU/HHzp/LgV6FL6qj6j +ITk1dImmasI5+njPtqzn59ZW/yOSLlALqbUHM/Q4X6RJpstlcHboCoWASzY9M/eV +VHUl2qzEc4Jl6VL1XP04lQJqaTDFHApXB64ipCz5xUG3uOyfT0gA+QEEVcys+TIx +xHWVBqB/0Y0n3bOppHKH/lmLmnp0Ft0WpWIp6zqW3IunaFnT63eROfjXy9mPX1on +AX1daBli2MjN9LdyR75bl87yraKZk62Uy5P2EgmVtqvXO9A/EcswFi55gORngS1d +7XB4tmBZrOFdRWOPyN9yaFvqHbgB8X7754qz41SgOAngPN5C8sLtLpvzHzW2Ntjj +gKGLzZlkD8Kqq7HK9W+eQ42EVJmzbsASZthwEPEGNTNDqJwuuhQxzhB/HIbjj9LV ++Hfsm6vxL2PZQl/gZ4FkkfGXL/xuJvYz+NO1+MRiqzFRJQJ6+N1rZdVtTTDIZbpo +FGWsJwt0ivKH +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. +# Label: "TrustAsia Global Root CA G4" +# Serial: 451799571007117016466790293371524403291602933463 +# MD5 Fingerprint: 54:dd:b2:d7:5f:d8:3e:ed:7c:e0:0b:2e:cc:ed:eb:eb +# SHA1 Fingerprint: 57:73:a5:61:5d:80:b2:e6:ac:38:82:fc:68:07:31:ac:9f:b5:92:5a +# SHA256 Fingerprint: be:4b:56:cb:50:56:c0:13:6a:52:6d:f4:44:50:8d:aa:36:a0:b5:4f:42:e4:ac:38:f7:2a:f4:70:e4:79:65:4c +-----BEGIN CERTIFICATE----- +MIICVTCCAdygAwIBAgIUTyNkuI6XY57GU4HBdk7LKnQV1tcwCgYIKoZIzj0EAwMw +WjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs +IEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHNDAeFw0y +MTA1MjAwMjEwMjJaFw00NjA1MTkwMjEwMjJaMFoxCzAJBgNVBAYTAkNOMSUwIwYD +VQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtUcnVz +dEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATx +s8045CVD5d4ZCbuBeaIVXxVjAd7Cq92zphtnS4CDr5nLrBfbK5bKfFJV4hrhPVbw +LxYI+hW8m7tH5j/uqOFMjPXTNvk4XatwmkcN4oFBButJ+bAp3TPsUKV/eSm4IJij +YzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUpbtKl86zK3+kMd6Xg1mD +pm9xy94wHQYDVR0OBBYEFKW7SpfOsyt/pDHel4NZg6ZvccveMA4GA1UdDwEB/wQE +AwIBBjAKBggqhkjOPQQDAwNnADBkAjBe8usGzEkxn0AAbbd+NvBNEU/zy4k6LHiR +UKNbwMp1JvK/kF0LgoxgKJ/GcJpo5PECMFxYDlZ2z1jD1xCMuo6u47xkdUfFVZDj +/bpV6wfEU6s3qe4hsiFbYI89MvHVI5TWWA== +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust ECC Root-01 O=CommScope +# Subject: CN=CommScope Public Trust ECC Root-01 O=CommScope +# Label: "CommScope Public Trust ECC Root-01" +# Serial: 385011430473757362783587124273108818652468453534 +# MD5 Fingerprint: 3a:40:a7:fc:03:8c:9c:38:79:2f:3a:a2:6c:b6:0a:16 +# SHA1 Fingerprint: 07:86:c0:d8:dd:8e:c0:80:98:06:98:d0:58:7a:ef:de:a6:cc:a2:5d +# SHA256 Fingerprint: 11:43:7c:da:7b:b4:5e:41:36:5f:45:b3:9a:38:98:6b:0d:e0:0d:ef:34:8e:0c:7b:b0:87:36:33:80:0b:c3:8b +-----BEGIN CERTIFICATE----- +MIICHTCCAaOgAwIBAgIUQ3CCd89NXTTxyq4yLzf39H91oJ4wCgYIKoZIzj0EAwMw +TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t +bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMTAeFw0yMTA0MjgxNzM1NDNa +Fw00NjA0MjgxNzM1NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv +cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDEw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAARLNumuV16ocNfQj3Rid8NeeqrltqLxeP0C +flfdkXmcbLlSiFS8LwS+uM32ENEp7LXQoMPwiXAZu1FlxUOcw5tjnSCDPgYLpkJE +hRGnSjot6dZoL0hOUysHP029uax3OVejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSOB2LAUN3GGQYARnQE9/OufXVNMDAKBggq +hkjOPQQDAwNoADBlAjEAnDPfQeMjqEI2Jpc1XHvr20v4qotzVRVcrHgpD7oh2MSg +2NED3W3ROT3Ek2DS43KyAjB8xX6I01D1HiXo+k515liWpDVfG2XqYZpwI7UNo5uS +Um9poIyNStDuiw7LR47QjRE= +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust ECC Root-02 O=CommScope +# Subject: CN=CommScope Public Trust ECC Root-02 O=CommScope +# Label: "CommScope Public Trust ECC Root-02" +# Serial: 234015080301808452132356021271193974922492992893 +# MD5 Fingerprint: 59:b0:44:d5:65:4d:b8:5c:55:19:92:02:b6:d1:94:b2 +# SHA1 Fingerprint: 3c:3f:ef:57:0f:fe:65:93:86:9e:a0:fe:b0:f6:ed:8e:d1:13:c7:e5 +# SHA256 Fingerprint: 2f:fb:7f:81:3b:bb:b3:c8:9a:b4:e8:16:2d:0f:16:d7:15:09:a8:30:cc:9d:73:c2:62:e5:14:08:75:d1:ad:4a +-----BEGIN CERTIFICATE----- +MIICHDCCAaOgAwIBAgIUKP2ZYEFHpgE6yhR7H+/5aAiDXX0wCgYIKoZIzj0EAwMw +TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t +bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMjAeFw0yMTA0MjgxNzQ0NTRa +Fw00NjA0MjgxNzQ0NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv +cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDIw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAR4MIHoYx7l63FRD/cHB8o5mXxO1Q/MMDAL +j2aTPs+9xYa9+bG3tD60B8jzljHz7aRP+KNOjSkVWLjVb3/ubCK1sK9IRQq9qEmU +v4RDsNuESgMjGWdqb8FuvAY5N9GIIvejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTmGHX/72DehKT1RsfeSlXjMjZ59TAKBggq +hkjOPQQDAwNnADBkAjAmc0l6tqvmSfR9Uj/UQQSugEODZXW5hYA4O9Zv5JOGq4/n +ich/m35rChJVYaoR4HkCMHfoMXGsPHED1oQmHhS48zs73u1Z/GtMMH9ZzkXpc2AV +mkzw5l4lIhVtwodZ0LKOag== +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust RSA Root-01 O=CommScope +# Subject: CN=CommScope Public Trust RSA Root-01 O=CommScope +# Label: "CommScope Public Trust RSA Root-01" +# Serial: 354030733275608256394402989253558293562031411421 +# MD5 Fingerprint: 0e:b4:15:bc:87:63:5d:5d:02:73:d4:26:38:68:73:d8 +# SHA1 Fingerprint: 6d:0a:5f:f7:b4:23:06:b4:85:b3:b7:97:64:fc:ac:75:f5:33:f2:93 +# SHA256 Fingerprint: 02:bd:f9:6e:2a:45:dd:9b:f1:8f:c7:e1:db:df:21:a0:37:9b:a3:c9:c2:61:03:44:cf:d8:d6:06:fe:c1:ed:81 +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIUPgNJgXUWdDGOTKvVxZAplsU5EN0wDQYJKoZIhvcNAQEL +BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi +Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMTAeFw0yMTA0MjgxNjQ1 +NTRaFw00NjA0MjgxNjQ1NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t +U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt +MDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCwSGWjDR1C45FtnYSk +YZYSwu3D2iM0GXb26v1VWvZVAVMP8syMl0+5UMuzAURWlv2bKOx7dAvnQmtVzslh +suitQDy6uUEKBU8bJoWPQ7VAtYXR1HHcg0Hz9kXHgKKEUJdGzqAMxGBWBB0HW0al +DrJLpA6lfO741GIDuZNqihS4cPgugkY4Iw50x2tBt9Apo52AsH53k2NC+zSDO3Oj +WiE260f6GBfZumbCk6SP/F2krfxQapWsvCQz0b2If4b19bJzKo98rwjyGpg/qYFl +P8GMicWWMJoKz/TUyDTtnS+8jTiGU+6Xn6myY5QXjQ/cZip8UlF1y5mO6D1cv547 +KI2DAg+pn3LiLCuz3GaXAEDQpFSOm117RTYm1nJD68/A6g3czhLmfTifBSeolz7p +UcZsBSjBAg/pGG3svZwG1KdJ9FQFa2ww8esD1eo9anbCyxooSU1/ZOD6K9pzg4H/ +kQO9lLvkuI6cMmPNn7togbGEW682v3fuHX/3SZtS7NJ3Wn2RnU3COS3kuoL4b/JO +Hg9O5j9ZpSPcPYeoKFgo0fEbNttPxP/hjFtyjMcmAyejOQoBqsCyMWCDIqFPEgkB +Ea801M/XrmLTBQe0MXXgDW1XT2mH+VepuhX2yFJtocucH+X8eKg1mp9BFM6ltM6U +CBwJrVbl2rZJmkrqYxhTnCwuwwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUN12mmnQywsL5x6YVEFm45P3luG0wDQYJ +KoZIhvcNAQELBQADggIBAK+nz97/4L1CjU3lIpbfaOp9TSp90K09FlxD533Ahuh6 +NWPxzIHIxgvoLlI1pKZJkGNRrDSsBTtXAOnTYtPZKdVUvhwQkZyybf5Z/Xn36lbQ +nmhUQo8mUuJM3y+Xpi/SB5io82BdS5pYV4jvguX6r2yBS5KPQJqTRlnLX3gWsWc+ +QgvfKNmwrZggvkN80V4aCRckjXtdlemrwWCrWxhkgPut4AZ9HcpZuPN4KWfGVh2v +trV0KnahP/t1MJ+UXjulYPPLXAziDslg+MkfFoom3ecnf+slpoq9uC02EJqxWE2a +aE9gVOX2RhOOiKy8IUISrcZKiX2bwdgt6ZYD9KJ0DLwAHb/WNyVntHKLr4W96ioD +j8z7PEQkguIBpQtZtjSNMgsSDesnwv1B10A8ckYpwIzqug/xBpMu95yo9GA+o/E4 +Xo4TwbM6l4c/ksp4qRyv0LAbJh6+cOx69TOY6lz/KwsETkPdY34Op054A5U+1C0w +lREQKC6/oAI+/15Z0wUOlV9TRe9rh9VIzRamloPh37MG88EU26fsHItdkJANclHn +YfkUyq+Dj7+vsQpZXdxc1+SWrVtgHdqul7I52Qb1dgAT+GhMIbA1xNxVssnBQVoc +icCMb3SgazNNtQEo/a2tiRc7ppqEvOuM6sRxJKi6KfkIsidWNTJf6jn7MZrVGczw +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust RSA Root-02 O=CommScope +# Subject: CN=CommScope Public Trust RSA Root-02 O=CommScope +# Label: "CommScope Public Trust RSA Root-02" +# Serial: 480062499834624527752716769107743131258796508494 +# MD5 Fingerprint: e1:29:f9:62:7b:76:e2:96:6d:f3:d4:d7:0f:ae:1f:aa +# SHA1 Fingerprint: ea:b0:e2:52:1b:89:93:4c:11:68:f2:d8:9a:ac:22:4c:a3:8a:57:ae +# SHA256 Fingerprint: ff:e9:43:d7:93:42:4b:4f:7c:44:0c:1c:3d:64:8d:53:63:f3:4b:82:dc:87:aa:7a:9f:11:8f:c5:de:e1:01:f1 +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIUVBa/O345lXGN0aoApYYNK496BU4wDQYJKoZIhvcNAQEL +BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi +Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMjAeFw0yMTA0MjgxNzE2 +NDNaFw00NjA0MjgxNzE2NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t +U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt +MDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDh+g77aAASyE3VrCLE +NQE7xVTlWXZjpX/rwcRqmL0yjReA61260WI9JSMZNRTpf4mnG2I81lDnNJUDMrG0 +kyI9p+Kx7eZ7Ti6Hmw0zdQreqjXnfuU2mKKuJZ6VszKWpCtYHu8//mI0SFHRtI1C +rWDaSWqVcN3SAOLMV2MCe5bdSZdbkk6V0/nLKR8YSvgBKtJjCW4k6YnS5cciTNxz +hkcAqg2Ijq6FfUrpuzNPDlJwnZXjfG2WWy09X6GDRl224yW4fKcZgBzqZUPckXk2 +LHR88mcGyYnJ27/aaL8j7dxrrSiDeS/sOKUNNwFnJ5rpM9kzXzehxfCrPfp4sOcs +n/Y+n2Dg70jpkEUeBVF4GiwSLFworA2iI540jwXmojPOEXcT1A6kHkIfhs1w/tku +FT0du7jyU1fbzMZ0KZwYszZ1OC4PVKH4kh+Jlk+71O6d6Ts2QrUKOyrUZHk2EOH5 +kQMreyBUzQ0ZGshBMjTRsJnhkB4BQDa1t/qp5Xd1pCKBXbCL5CcSD1SIxtuFdOa3 +wNemKfrb3vOTlycEVS8KbzfFPROvCgCpLIscgSjX74Yxqa7ybrjKaixUR9gqiC6v +wQcQeKwRoi9C8DfF8rhW3Q5iLc4tVn5V8qdE9isy9COoR+jUKgF4z2rDN6ieZdIs +5fq6M8EGRPbmz6UNp2YINIos8wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUR9DnsSL/nSz12Vdgs7GxcJXvYXowDQYJ +KoZIhvcNAQELBQADggIBAIZpsU0v6Z9PIpNojuQhmaPORVMbc0RTAIFhzTHjCLqB +KCh6krm2qMhDnscTJk3C2OVVnJJdUNjCK9v+5qiXz1I6JMNlZFxHMaNlNRPDk7n3 ++VGXu6TwYofF1gbTl4MgqX67tiHCpQ2EAOHyJxCDut0DgdXdaMNmEMjRdrSzbyme +APnCKfWxkxlSaRosTKCL4BWaMS/TiJVZbuXEs1DIFAhKm4sTg7GkcrI7djNB3Nyq +pgdvHSQSn8h2vS/ZjvQs7rfSOBAkNlEv41xdgSGn2rtO/+YHqP65DSdsu3BaVXoT +6fEqSWnHX4dXTEN5bTpl6TBcQe7rd6VzEojov32u5cSoHw2OHG1QAk8mGEPej1WF +sQs3BWDJVTkSBKEqz3EWnzZRSb9wO55nnPt7eck5HHisd5FUmrh1CoFSl+NmYWvt +PjgelmFV4ZFUjO2MJB+ByRCac5krFk5yAD9UG/iNuovnFNa2RU9g7Jauwy8CTl2d +lklyALKrdVwPaFsdZcJfMw8eD/A7hvWwTruc9+olBdytoptLFwG+Qt81IR2tq670 +v64fG9PiO/yzcnMcmyiQiRM9HcEARwmWmjgb3bHPDcK0RPOWlc4yOo80nOAXx17O +rg3bhzjlP1v9mxnhMUF6cKojawHhRUzNlM47ni3niAIi9G7oyOzWPPO5std3eqx7 +-----END CERTIFICATE----- + +# Issuer: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH +# Subject: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH +# Label: "Telekom Security TLS ECC Root 2020" +# Serial: 72082518505882327255703894282316633856 +# MD5 Fingerprint: c1:ab:fe:6a:10:2c:03:8d:bc:1c:22:32:c0:85:a7:fd +# SHA1 Fingerprint: c0:f8:96:c5:a9:3b:01:06:21:07:da:18:42:48:bc:e9:9d:88:d5:ec +# SHA256 Fingerprint: 57:8a:f4:de:d0:85:3f:4e:59:98:db:4a:ea:f9:cb:ea:8d:94:5f:60:b6:20:a3:8d:1a:3c:13:b2:bc:7b:a8:e1 +-----BEGIN CERTIFICATE----- +MIICQjCCAcmgAwIBAgIQNjqWjMlcsljN0AFdxeVXADAKBggqhkjOPQQDAzBjMQsw +CQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0eSBH +bWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBFQ0MgUm9vdCAyMDIw +MB4XDTIwMDgyNTA3NDgyMFoXDTQ1MDgyNTIzNTk1OVowYzELMAkGA1UEBhMCREUx +JzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkGA1UE +AwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgRUNDIFJvb3QgMjAyMDB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABM6//leov9Wq9xCazbzREaK9Z0LMkOsVGJDZos0MKiXrPk/O +tdKPD/M12kOLAoC+b1EkHQ9rK8qfwm9QMuU3ILYg/4gND21Ju9sGpIeQkpT0CdDP +f8iAC8GXs7s1J8nCG6NCMEAwHQYDVR0OBBYEFONyzG6VmUex5rNhTNHLq+O6zd6f +MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2cA +MGQCMHVSi7ekEE+uShCLsoRbQuHmKjYC2qBuGT8lv9pZMo7k+5Dck2TOrbRBR2Di +z6fLHgIwN0GMZt9Ba9aDAEH9L1r3ULRn0SyocddDypwnJJGDSA3PzfdUga/sf+Rn +27iQ7t0l +-----END CERTIFICATE----- + +# Issuer: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH +# Subject: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH +# Label: "Telekom Security TLS RSA Root 2023" +# Serial: 44676229530606711399881795178081572759 +# MD5 Fingerprint: bf:5b:eb:54:40:cd:48:71:c4:20:8d:7d:de:0a:42:f2 +# SHA1 Fingerprint: 54:d3:ac:b3:bd:57:56:f6:85:9d:ce:e5:c3:21:e2:d4:ad:83:d0:93 +# SHA256 Fingerprint: ef:c6:5c:ad:bb:59:ad:b6:ef:e8:4d:a2:23:11:b3:56:24:b7:1b:3b:1e:a0:da:8b:66:55:17:4e:c8:97:86:46 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIQIZxULej27HF3+k7ow3BXlzANBgkqhkiG9w0BAQwFADBj +MQswCQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0 +eSBHbWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBSU0EgUm9vdCAy +MDIzMB4XDTIzMDMyODEyMTY0NVoXDTQ4MDMyNzIzNTk1OVowYzELMAkGA1UEBhMC +REUxJzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkG +A1UEAwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgUlNBIFJvb3QgMjAyMzCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAO01oYGA88tKaVvC+1GDrib94W7zgRJ9 +cUD/h3VCKSHtgVIs3xLBGYSJwb3FKNXVS2xE1kzbB5ZKVXrKNoIENqil/Cf2SfHV +cp6R+SPWcHu79ZvB7JPPGeplfohwoHP89v+1VmLhc2o0mD6CuKyVU/QBoCcHcqMA +U6DksquDOFczJZSfvkgdmOGjup5czQRxUX11eKvzWarE4GC+j4NSuHUaQTXtvPM6 +Y+mpFEXX5lLRbtLevOP1Czvm4MS9Q2QTps70mDdsipWol8hHD/BeEIvnHRz+sTug +BTNoBUGCwQMrAcjnj02r6LX2zWtEtefdi+zqJbQAIldNsLGyMcEWzv/9FIS3R/qy +8XDe24tsNlikfLMR0cN3f1+2JeANxdKz+bi4d9s3cXFH42AYTyS2dTd4uaNir73J +co4vzLuu2+QVUhkHM/tqty1LkCiCc/4YizWN26cEar7qwU02OxY2kTLvtkCJkUPg +8qKrBC7m8kwOFjQgrIfBLX7JZkcXFBGk8/ehJImr2BrIoVyxo/eMbcgByU/J7MT8 +rFEz0ciD0cmfHdRHNCk+y7AO+oMLKFjlKdw/fKifybYKu6boRhYPluV75Gp6SG12 +mAWl3G0eQh5C2hrgUve1g8Aae3g1LDj1H/1Joy7SWWO/gLCMk3PLNaaZlSJhZQNg ++y+TS/qanIA7AgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUtqeX +gj10hZv3PJ+TmpV5dVKMbUcwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS2 +p5eCPXSFm/c8n5OalXl1UoxtRzANBgkqhkiG9w0BAQwFAAOCAgEAqMxhpr51nhVQ +pGv7qHBFfLp+sVr8WyP6Cnf4mHGCDG3gXkaqk/QeoMPhk9tLrbKmXauw1GLLXrtm +9S3ul0A8Yute1hTWjOKWi0FpkzXmuZlrYrShF2Y0pmtjxrlO8iLpWA1WQdH6DErw +M807u20hOq6OcrXDSvvpfeWxm4bu4uB9tPcy/SKE8YXJN3nptT+/XOR0so8RYgDd +GGah2XsjX/GO1WfoVNpbOms2b/mBsTNHM3dA+VKq3dSDz4V4mZqTuXNnQkYRIer+ +CqkbGmVps4+uFrb2S1ayLfmlyOw7YqPta9BO1UAJpB+Y1zqlklkg5LB9zVtzaL1t +xKITDmcZuI1CfmwMmm6gJC3VRRvcxAIU/oVbZZfKTpBQCHpCNfnqwmbU+AGuHrS+ +w6jv/naaoqYfRvaE7fzbzsQCzndILIyy7MMAo+wsVRjBfhnu4S/yrYObnqsZ38aK +L4x35bcF7DvB7L6Gs4a8wPfc5+pbrrLMtTWGS9DiP7bY+A4A7l3j941Y/8+LN+lj +X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q +ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm +dTdmQRCsu/WU48IxK63nI1bMNSWSs1A= +-----END CERTIFICATE----- + +# Issuer: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA +# Subject: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA +# Label: "FIRMAPROFESIONAL CA ROOT-A WEB" +# Serial: 65916896770016886708751106294915943533 +# MD5 Fingerprint: 82:b2:ad:45:00:82:b0:66:63:f8:5f:c3:67:4e:ce:a3 +# SHA1 Fingerprint: a8:31:11:74:a6:14:15:0d:ca:77:dd:0e:e4:0c:5d:58:fc:a0:72:a5 +# SHA256 Fingerprint: be:f2:56:da:f2:6e:9c:69:bd:ec:16:02:35:97:98:f3:ca:f7:18:21:a0:3e:01:82:57:c5:3c:65:61:7f:3d:4a +-----BEGIN CERTIFICATE----- +MIICejCCAgCgAwIBAgIQMZch7a+JQn81QYehZ1ZMbTAKBggqhkjOPQQDAzBuMQsw +CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE +YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB +IFJPT1QtQSBXRUIwHhcNMjIwNDA2MDkwMTM2WhcNNDcwMzMxMDkwMTM2WjBuMQsw +CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE +YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB +IFJPT1QtQSBXRUIwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARHU+osEaR3xyrq89Zf +e9MEkVz6iMYiuYMQYneEMy3pA4jU4DP37XcsSmDq5G+tbbT4TIqk5B/K6k84Si6C +cyvHZpsKjECcfIr28jlgst7L7Ljkb+qbXbdTkBgyVcUgt5SjYzBhMA8GA1UdEwEB +/wQFMAMBAf8wHwYDVR0jBBgwFoAUk+FDY1w8ndYn81LsF7Kpryz3dvgwHQYDVR0O +BBYEFJPhQ2NcPJ3WJ/NS7Beyqa8s93b4MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjO +PQQDAwNoADBlAjAdfKR7w4l1M+E7qUW/Runpod3JIha3RxEL2Jq68cgLcFBTApFw +hVmpHqTm6iMxoAACMQD94vizrxa5HnPEluPBMBnYfubDl94cT7iJLzPrSA8Z94dG +XSaQpYXFuXqUPoeovQA= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA CYBER Root CA" +# Serial: 85076849864375384482682434040119489222 +# MD5 Fingerprint: 0b:33:a0:97:52:95:d4:a9:fd:bb:db:6e:a3:55:5b:51 +# SHA1 Fingerprint: f6:b1:1c:1a:83:38:e9:7b:db:b3:a8:c8:33:24:e0:2d:9c:7f:26:66 +# SHA256 Fingerprint: 3f:63:bb:28:14:be:17:4e:c8:b6:43:9c:f0:8d:6d:56:f0:b7:c4:05:88:3a:56:48:a3:34:42:4d:6b:3e:c5:58 +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIQQAE0jMIAAAAAAAAAATzyxjANBgkqhkiG9w0BAQwFADBQ +MQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FOLUNBMRAwDgYDVQQLEwdSb290 +IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3QgQ0EwHhcNMjIxMTIyMDY1NDI5 +WhcNNDcxMTIyMTU1OTU5WjBQMQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FO +LUNBMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3Qg +Q0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDG+Moe2Qkgfh1sTs6P +40czRJzHyWmqOlt47nDSkvgEs1JSHWdyKKHfi12VCv7qze33Kc7wb3+szT3vsxxF +avcokPFhV8UMxKNQXd7UtcsZyoC5dc4pztKFIuwCY8xEMCDa6pFbVuYdHNWdZsc/ +34bKS1PE2Y2yHer43CdTo0fhYcx9tbD47nORxc5zb87uEB8aBs/pJ2DFTxnk684i +JkXXYJndzk834H/nY62wuFm40AZoNWDTNq5xQwTxaWV4fPMf88oon1oglWa0zbfu +j3ikRRjpJi+NmykosaS3Om251Bw4ckVYsV7r8Cibt4LK/c/WMw+f+5eesRycnupf +Xtuq3VTpMCEobY5583WSjCb+3MX2w7DfRFlDo7YDKPYIMKoNM+HvnKkHIuNZW0CP +2oi3aQiotyMuRAlZN1vH4xfyIutuOVLF3lSnmMlLIJXcRolftBL5hSmO68gnFSDA +S9TMfAxsNAwmmyYxpjyn9tnQS6Jk/zuZQXLB4HCX8SS7K8R0IrGsayIyJNN4KsDA +oS/xUgXJP+92ZuJF2A09rZXIx4kmyA+upwMu+8Ff+iDhcK2wZSA3M2Cw1a/XDBzC +kHDXShi8fgGwsOsVHkQGzaRP6AzRwyAQ4VRlnrZR0Bp2a0JaWHY06rc3Ga4udfmW +5cFZ95RXKSWNOkyrTZpB0F8mAwIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBSdhWEUfMFib5do5E83QOGt4A1WNzAd +BgNVHQ4EFgQUnYVhFHzBYm+XaORPN0DhreANVjcwDQYJKoZIhvcNAQEMBQADggIB +AGSPesRiDrWIzLjHhg6hShbNcAu3p4ULs3a2D6f/CIsLJc+o1IN1KriWiLb73y0t +tGlTITVX1olNc79pj3CjYcya2x6a4CD4bLubIp1dhDGaLIrdaqHXKGnK/nZVekZn +68xDiBaiA9a5F/gZbG0jAn/xX9AKKSM70aoK7akXJlQKTcKlTfjF/biBzysseKNn +TKkHmvPfXvt89YnNdJdhEGoHK4Fa0o635yDRIG4kqIQnoVesqlVYL9zZyvpoBJ7t +RCT5dEA7IzOrg1oYJkK2bVS1FmAwbLGg+LhBoF1JSdJlBTrq/p1hvIbZv97Tujqx +f36SNI7JAG7cmL3c7IAFrQI932XtCwP39xaEBDG6k5TY8hL4iuO/Qq+n1M0RFxbI +Qh0UqEL20kCGoE8jypZFVmAGzbdVAaYBlGX+bgUJurSkquLvWL69J1bY73NxW0Qz +8ppy6rBePm6pUlvscG21h483XjyMnM7k8M4MZ0HMzvaAq07MTFb1wWFZk7Q+ptq4 +NxKfKjLji7gh7MMrZQzvIt6IKTtM1/r+t+FHvpw+PoP7UV31aPcuIYXcv/Fa4nzX +xeSDwWrruoBa3lwtcHb4yOWHh8qgnaHlIhInD0Q9HWzq1MKLL295q39QpsQZp6F6 +t5b5wR9iWqJDB0BeJsas7a5wFsWqynKKTbDPAYsDP27X +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. +# Subject: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. +# Label: "SecureSign Root CA12" +# Serial: 587887345431707215246142177076162061960426065942 +# MD5 Fingerprint: c6:89:ca:64:42:9b:62:08:49:0b:1e:7f:e9:07:3d:e8 +# SHA1 Fingerprint: 7a:22:1e:3d:de:1b:06:ac:9e:c8:47:70:16:8e:3c:e5:f7:6b:06:f4 +# SHA256 Fingerprint: 3f:03:4b:b5:70:4d:44:b2:d0:85:45:a0:20:57:de:93:eb:f3:90:5f:ce:72:1a:cb:c7:30:c0:6d:da:ee:90:4e +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUZvnHwa/swlG07VOX5uaCwysckBYwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u +LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExMjAeFw0yMDA0MDgw +NTM2NDZaFw00MDA0MDgwNTM2NDZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD +eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS +b290IENBMTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6OcE3emhF +KxS06+QT61d1I02PJC0W6K6OyX2kVzsqdiUzg2zqMoqUm048luT9Ub+ZyZN+v/mt +p7JIKwccJ/VMvHASd6SFVLX9kHrko+RRWAPNEHl57muTH2SOa2SroxPjcf59q5zd +J1M3s6oYwlkm7Fsf0uZlfO+TvdhYXAvA42VvPMfKWeP+bl+sg779XSVOKik71gur +FzJ4pOE+lEa+Ym6b3kaosRbnhW70CEBFEaCeVESE99g2zvVQR9wsMJvuwPWW0v4J +hscGWa5Pro4RmHvzC1KqYiaqId+OJTN5lxZJjfU+1UefNzFJM3IFTQy2VYzxV4+K +h9GtxRESOaCtAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBRXNPN0zwRL1SXm8UC2LEzZLemgrTANBgkqhkiG9w0BAQsF +AAOCAQEAPrvbFxbS8hQBICw4g0utvsqFepq2m2um4fylOqyttCg6r9cBg0krY6Ld +mmQOmFxv3Y67ilQiLUoT865AQ9tPkbeGGuwAtEGBpE/6aouIs3YIcipJQMPTw4WJ +mBClnW8Zt7vPemVV2zfrPIpyMpcemik+rY3moxtt9XUa5rBouVui7mlHJzWhhpmA +8zNL4WukJsPvdFlseqJkth5Ew1DgDzk9qTPxpfPSvWKErI4cqc1avTc7bgoitPQV +55FYxTpE05Uo2cBl6XLK0A+9H7MV2anjpEcJnuDLN/v9vZfVvhgaaaI5gdka9at/ +yOPiZwud9AzqVN/Ssq+xIvEg37xEHA== +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. +# Subject: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. +# Label: "SecureSign Root CA14" +# Serial: 575790784512929437950770173562378038616896959179 +# MD5 Fingerprint: 71:0d:72:fa:92:19:65:5e:89:04:ac:16:33:f0:bc:d5 +# SHA1 Fingerprint: dd:50:c0:f7:79:b3:64:2e:74:a2:b8:9d:9f:d3:40:dd:bb:f0:f2:4f +# SHA256 Fingerprint: 4b:00:9c:10:34:49:4f:9a:b5:6b:ba:3b:a1:d6:27:31:fc:4d:20:d8:95:5a:dc:ec:10:a9:25:60:72:61:e3:38 +-----BEGIN CERTIFICATE----- +MIIFcjCCA1qgAwIBAgIUZNtaDCBO6Ncpd8hQJ6JaJ90t8sswDQYJKoZIhvcNAQEM +BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u +LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNDAeFw0yMDA0MDgw +NzA2MTlaFw00NTA0MDgwNzA2MTlaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD +eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS +b290IENBMTQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDF0nqh1oq/ +FjHQmNE6lPxauG4iwWL3pwon71D2LrGeaBLwbCRjOfHw3xDG3rdSINVSW0KZnvOg +vlIfX8xnbacuUKLBl422+JX1sLrcneC+y9/3OPJH9aaakpUqYllQC6KxNedlsmGy +6pJxaeQp8E+BgQQ8sqVb1MWoWWd7VRxJq3qdwudzTe/NCcLEVxLbAQ4jeQkHO6Lo +/IrPj8BGJJw4J+CDnRugv3gVEOuGTgpa/d/aLIJ+7sr2KeH6caH3iGicnPCNvg9J +kdjqOvn90Ghx2+m1K06Ckm9mH+Dw3EzsytHqunQG+bOEkJTRX45zGRBdAuVwpcAQ +0BB8b8VYSbSwbprafZX1zNoCr7gsfXmPvkPx+SgojQlD+Ajda8iLLCSxjVIHvXib +y8posqTdDEx5YMaZ0ZPxMBoH064iwurO8YQJzOAUbn8/ftKChazcqRZOhaBgy/ac +18izju3Gm5h1DVXoX+WViwKkrkMpKBGk5hIwAUt1ax5mnXkvpXYvHUC0bcl9eQjs +0Wq2XSqypWa9a4X0dFbD9ed1Uigspf9mR6XU/v6eVL9lfgHWMI+lNpyiUBzuOIAB +SMbHdPTGrMNASRZhdCyvjG817XsYAFs2PJxQDcqSMxDxJklt33UkN4Ii1+iW/RVL +ApY+B3KVfqs9TC7XyvDf4Fg/LS8EmjijAQIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUBpOjCl4oaTeqYR3r6/wtbyPk +86AwDQYJKoZIhvcNAQEMBQADggIBAJaAcgkGfpzMkwQWu6A6jZJOtxEaCnFxEM0E +rX+lRVAQZk5KQaID2RFPeje5S+LGjzJmdSX7684/AykmjbgWHfYfM25I5uj4V7Ib +ed87hwriZLoAymzvftAj63iP/2SbNDefNWWipAA9EiOWWF3KY4fGoweITedpdopT +zfFP7ELyk+OZpDc8h7hi2/DsHzc/N19DzFGdtfCXwreFamgLRB7lUe6TzktuhsHS +DCRZNhqfLJGP4xjblJUK7ZGqDpncllPjYYPGFrojutzdfhrGe0K22VoF3Jpf1d+4 +2kd92jjbrDnVHmtsKheMYc2xbXIBw8MgAGJoFjHVdqqGuw6qnsb58Nn4DSEC5MUo +FlkRudlpcyqSeLiSV5sI8jrlL5WwWLdrIBRtFO8KvH7YVdiI2i/6GaX7i+B/OfVy +K4XELKzvGUWSTLNhB9xNH27SgRNcmvMSZ4PPmz+Ln52kuaiWA3rF7iDeM9ovnhp6 +dB7h7sxaOgTdsxoEqBRjrLdHEoOabPXm6RUVkRqEGQ6UROcSjiVbgGcZ3GOTEAtl +Lor6CZpO2oYofaphNdgOpygau1LgePhsumywbrmHXumZNTfxPWQrqaA0k89jL9WB +365jJ6UeTo3cKXhZ+PmhIIynJkBugnLNeLLIjzwec+fBH7/PzqUqm9tEZDKgu39c +JRNItX+S +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. +# Subject: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. +# Label: "SecureSign Root CA15" +# Serial: 126083514594751269499665114766174399806381178503 +# MD5 Fingerprint: 13:30:fc:c4:62:a6:a9:de:b5:c1:68:af:b5:d2:31:47 +# SHA1 Fingerprint: cb:ba:83:c8:c1:5a:5d:f1:f9:73:6f:ca:d7:ef:28:13:06:4a:07:7d +# SHA256 Fingerprint: e7:78:f0:f0:95:fe:84:37:29:cd:1a:00:82:17:9e:53:14:a9:c2:91:44:28:05:e1:fb:1d:8f:b6:b8:88:6c:3a +-----BEGIN CERTIFICATE----- +MIICIzCCAamgAwIBAgIUFhXHw9hJp75pDIqI7fBw+d23PocwCgYIKoZIzj0EAwMw +UTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28uLCBM +dGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNTAeFw0yMDA0MDgwODMy +NTZaFw00NTA0MDgwODMyNTZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpDeWJl +cnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBSb290 +IENBMTUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQLUHSNZDKZmbPSYAi4Io5GdCx4 +wCtELW1fHcmuS1Iggz24FG1Th2CeX2yF2wYUleDHKP+dX+Sq8bOLbe1PL0vJSpSR +ZHX+AezB2Ot6lHhWGENfa4HL9rzatAy2KZMIaY+jQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTrQciu/NWeUUj1vYv0hyCTQSvT +9DAKBggqhkjOPQQDAwNoADBlAjEA2S6Jfl5OpBEHvVnCB96rMjhTKkZEBhd6zlHp +4P9mLQlO4E/0BdGF9jVg3PVys0Z9AjBEmEYagoUeYWmJSwdLZrWeqrqgHkHZAXQ6 +bkU6iYAZezKYVWOr62Nuk22rGwlgMU4= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH +# Subject: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH +# Label: "D-TRUST BR Root CA 2 2023" +# Serial: 153168538924886464690566649552453098598 +# MD5 Fingerprint: e1:09:ed:d3:60:d4:56:1b:47:1f:b7:0c:5f:1b:5f:85 +# SHA1 Fingerprint: 2d:b0:70:ee:71:94:af:69:68:17:db:79:ce:58:9f:a0:6b:96:f7:87 +# SHA256 Fingerprint: 05:52:e6:f8:3f:df:65:e8:fa:96:70:e6:66:df:28:a4:e2:13:40:b5:10:cb:e5:25:66:f9:7c:4f:b9:4b:2b:d1 +-----BEGIN CERTIFICATE----- +MIIFqTCCA5GgAwIBAgIQczswBEhb2U14LnNLyaHcZjANBgkqhkiG9w0BAQ0FADBI +MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE +LVRSVVNUIEJSIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA4NTYzMVoXDTM4MDUw +OTA4NTYzMFowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi +MCAGA1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAK7/CVmRgApKaOYkP7in5Mg6CjoWzckjYaCTcfKr +i3OPoGdlYNJUa2NRb0kz4HIHE304zQaSBylSa053bATTlfrdTIzZXcFhfUvnKLNE +gXtRr90zsWh81k5M/itoucpmacTsXld/9w3HnDY25QdgrMBM6ghs7wZ8T1soegj8 +k12b9py0i4a6Ibn08OhZWiihNIQaJZG2tY/vsvmA+vk9PBFy2OMvhnbFeSzBqZCT +Rphny4NqoFAjpzv2gTng7fC5v2Xx2Mt6++9zA84A9H3X4F07ZrjcjrqDy4d2A/wl +2ecjbwb9Z/Pg/4S8R7+1FhhGaRTMBffb00msa8yr5LULQyReS2tNZ9/WtT5PeB+U +cSTq3nD88ZP+npNa5JRal1QMNXtfbO4AHyTsA7oC9Xb0n9Sa7YUsOCIvx9gvdhFP +/Wxc6PWOJ4d/GUohR5AdeY0cW/jPSoXk7bNbjb7EZChdQcRurDhaTyN0dKkSw/bS +uREVMweR2Ds3OmMwBtHFIjYoYiMQ4EbMl6zWK11kJNXuHA7e+whadSr2Y23OC0K+ +0bpwHJwh5Q8xaRfX/Aq03u2AnMuStIv13lmiWAmlY0cL4UEyNEHZmrHZqLAbWt4N +DfTisl01gLmB1IRpkQLLddCNxbU9CZEJjxShFHR5PtbJFR2kWVki3PaKRT08EtY+ +XTIvAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUZ5Dw1t61 +GNVGKX5cq/ieCLxklRAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG +OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfYnJfcm9vdF9jYV8y +XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQA097N3U9swFrktpSHxQCF16+tI +FoE9c+CeJyrrd6kTpGoKWloUMz1oH4Guaf2Mn2VsNELZLdB/eBaxOqwjMa1ef67n +riv6uvw8l5VAk1/DLQOj7aRvU9f6QA4w9QAgLABMjDu0ox+2v5Eyq6+SmNMW5tTR +VFxDWy6u71cqqLRvpO8NVhTaIasgdp4D/Ca4nj8+AybmTNudX0KEPUUDAxxZiMrc +LmEkWqTqJwtzEr5SswrPMhfiHocaFpVIbVrg0M8JkiZmkdijYQ6qgYF/6FKC0ULn +4B0Y+qSFNueG4A3rvNTJ1jxD8V1Jbn6Bm2m1iWKPiFLY1/4nwSPFyysCu7Ff/vtD +hQNGvl3GyiEm/9cCnnRK3PgTFbGBVzbLZVzRHTF36SXDw7IyN9XxmAnkbWOACKsG +koHU6XCPpz+y7YaMgmo1yEJagtFSGkUPFaUA8JR7ZSdXOUPPfH/mvTWze/EZTN46 +ls/pdu4D58JDUjxqgejBWoC9EV2Ta/vH5mQ/u2kc6d0li690yVRAysuTEwrt+2aS +Ecr1wPrYg1UDfNPFIkZ1cGt5SAYqgpq/5usWDiJFAbzdNpQ0qTUmiteXue4Icr80 +knCDgKs4qllo3UCkGJCy89UDyibK79XH4I9TjvAA46jtn/mtd+ArY0+ew+43u3gJ +hJ65bvspmZDogNOfJA== +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH +# Subject: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH +# Label: "D-TRUST EV Root CA 2 2023" +# Serial: 139766439402180512324132425437959641711 +# MD5 Fingerprint: 96:b4:78:09:f0:09:cb:77:eb:bb:1b:4d:6f:36:bc:b6 +# SHA1 Fingerprint: a5:5b:d8:47:6c:8f:19:f7:4c:f4:6d:6b:b6:c2:79:82:22:df:54:8b +# SHA256 Fingerprint: 8e:82:21:b2:e7:d4:00:78:36:a1:67:2f:0d:cc:29:9c:33:bc:07:d3:16:f1:32:fa:1a:20:6d:58:71:50:f1:ce +-----BEGIN CERTIFICATE----- +MIIFqTCCA5GgAwIBAgIQaSYJfoBLTKCnjHhiU19abzANBgkqhkiG9w0BAQ0FADBI +MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE +LVRSVVNUIEVWIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA5MTAzM1oXDTM4MDUw +OTA5MTAzMlowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi +MCAGA1UEAxMZRC1UUlVTVCBFViBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBANiOo4mAC7JXUtypU0w3uX9jFxPvp1sjW2l1sJkK +F8GLxNuo4MwxusLyzV3pt/gdr2rElYfXR8mV2IIEUD2BCP/kPbOx1sWy/YgJ25yE +7CUXFId/MHibaljJtnMoPDT3mfd/06b4HEV8rSyMlD/YZxBTfiLNTiVR8CUkNRFe +EMbsh2aJgWi6zCudR3Mfvc2RpHJqnKIbGKBv7FD0fUDCqDDPvXPIEysQEx6Lmqg6 +lHPTGGkKSv/BAQP/eX+1SH977ugpbzZMlWGG2Pmic4ruri+W7mjNPU0oQvlFKzIb +RlUWaqZLKfm7lVa/Rh3sHZMdwGWyH6FDrlaeoLGPaxK3YG14C8qKXO0elg6DpkiV +jTujIcSuWMYAsoS0I6SWhjW42J7YrDRJmGOVxcttSEfi8i4YHtAxq9107PncjLgc +jmgjutDzUNzPZY9zOjLHfP7KgiJPvo5iR2blzYfi6NUPGJ/lBHJLRjwQ8kTCZFZx +TnXonMkmdMV9WdEKWw9t/p51HBjGGjp82A0EzM23RWV6sY+4roRIPrN6TagD4uJ+ +ARZZaBhDM7DS3LAaQzXupdqpRlyuhoFBAUp0JuyfBr/CBTdkdXgpaP3F9ev+R/nk +hbDhezGdpn9yo7nELC7MmVcOIQxFAZRl62UJxmMiCzNJkkg8/M3OsD6Onov4/knF +NXJHAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUqvyREBuH +kV8Wub9PS5FeAByxMoAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG +OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfZXZfcm9vdF9jYV8y +XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQCTy6UfmRHsmg1fLBWTxj++EI14 +QvBukEdHjqOSMo1wj/Zbjb6JzkcBahsgIIlbyIIQbODnmaprxiqgYzWRaoUlrRc4 +pZt+UPJ26oUFKidBK7GB0aL2QHWpDsvxVUjY7NHss+jOFKE17MJeNRqrphYBBo7q +3C+jisosketSjl8MmxfPy3MHGcRqwnNU73xDUmPBEcrCRbH0O1P1aa4846XerOhU +t7KR/aypH/KH5BfGSah82ApB9PI+53c0BFLd6IHyTS9URZ0V4U/M5d40VxDJI3IX +cI1QcB9WbMy5/zpaT2N6w25lBx2Eof+pDGOJbbJAiDnXH3dotfyc1dZnaVuodNv8 +ifYbMvekJKZ2t0dT741Jj6m2g1qllpBFYfXeA08mD6iL8AOWsKwV0HFaanuU5nCT +2vFp4LJiTZ6P/4mdm13NRemUAiKN4DV/6PEEeXFsVIP4M7kFMhtYVRFP0OUnR3Hs +7dpn1mKmS00PaaLJvOwiS5THaJQXfuKOKD62xur1NGyfN4gHONuGcfrNlUhDbqNP +gofXNJhuS5N5YHVpD/Aa1VP6IQzCP+k/HxiMkl14p3ZnGbuy6n/pcAlWVqOwDAst +Nl7F6cTVg8uGF5csbBNvh1qvSaYd2804BC5f4ko1Di1L+KIkBI3Y4WNeApI02phh +XBxvWHZks/wCuPWdCg== +-----END CERTIFICATE----- diff --git a/venv/lib/python3.11/site-packages/certifi/core.py b/venv/lib/python3.11/site-packages/certifi/core.py new file mode 100644 index 0000000..91f538b --- /dev/null +++ b/venv/lib/python3.11/site-packages/certifi/core.py @@ -0,0 +1,114 @@ +""" +certifi.py +~~~~~~~~~~ + +This module returns the installation location of cacert.pem or its contents. +""" +import sys +import atexit + +def exit_cacert_ctx() -> None: + _CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr] + + +if sys.version_info >= (3, 11): + + from importlib.resources import as_file, files + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the file + # in cases where we're inside of a zipimport situation until someone + # actually calls where(), but we don't want to re-extract the file + # on every call of where(), so we'll do it once then store it in a + # global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you to + # manage the cleanup of this file, so it doesn't actually return a + # path, it returns a context manager that will give you the path + # when you enter it and will do any cleanup when you leave it. In + # the common case of not needing a temporary file, it will just + # return the file system location and the __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem")) + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + atexit.register(exit_cacert_ctx) + + return _CACERT_PATH + + def contents() -> str: + return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii") + +elif sys.version_info >= (3, 7): + + from importlib.resources import path as get_path, read_text + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the + # file in cases where we're inside of a zipimport situation until + # someone actually calls where(), but we don't want to re-extract + # the file on every call of where(), so we'll do it once then store + # it in a global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you + # to manage the cleanup of this file, so it doesn't actually + # return a path, it returns a context manager that will give + # you the path when you enter it and will do any cleanup when + # you leave it. In the common case of not needing a temporary + # file, it will just return the file system location and the + # __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = get_path("certifi", "cacert.pem") + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + atexit.register(exit_cacert_ctx) + + return _CACERT_PATH + + def contents() -> str: + return read_text("certifi", "cacert.pem", encoding="ascii") + +else: + import os + import types + from typing import Union + + Package = Union[types.ModuleType, str] + Resource = Union[str, "os.PathLike"] + + # This fallback will work for Python versions prior to 3.7 that lack the + # importlib.resources module but relies on the existing `where` function + # so won't address issues with environments like PyOxidizer that don't set + # __file__ on modules. + def read_text( + package: Package, + resource: Resource, + encoding: str = 'utf-8', + errors: str = 'strict' + ) -> str: + with open(where(), encoding=encoding) as data: + return data.read() + + # If we don't have importlib.resources, then we will just do the old logic + # of assuming we're on the filesystem and munge the path directly. + def where() -> str: + f = os.path.dirname(__file__) + + return os.path.join(f, "cacert.pem") + + def contents() -> str: + return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/venv/lib/python3.11/site-packages/certifi/py.typed b/venv/lib/python3.11/site-packages/certifi/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/LICENSE b/venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/LICENSE new file mode 100644 index 0000000..9725772 --- /dev/null +++ b/venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2025 TAHRI Ahmed R. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/METADATA b/venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/METADATA new file mode 100644 index 0000000..58f3667 --- /dev/null +++ b/venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/METADATA @@ -0,0 +1,721 @@ +Metadata-Version: 2.1 +Name: charset-normalizer +Version: 3.4.1 +Summary: The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet. +Author-email: "Ahmed R. TAHRI" +Maintainer-email: "Ahmed R. TAHRI" +License: MIT +Project-URL: Changelog, https://github.com/jawah/charset_normalizer/blob/master/CHANGELOG.md +Project-URL: Documentation, https://charset-normalizer.readthedocs.io/ +Project-URL: Code, https://github.com/jawah/charset_normalizer +Project-URL: Issue tracker, https://github.com/jawah/charset_normalizer/issues +Keywords: encoding,charset,charset-detector,detector,normalization,unicode,chardet,detect +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Text Processing :: Linguistic +Classifier: Topic :: Utilities +Classifier: Typing :: Typed +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +License-File: LICENSE +Provides-Extra: unicode-backport + +

Charset Detection, for Everyone 👋

+ +

+ The Real First Universal Charset Detector
+ + + + + Download Count Total + + + + +

+

+ Featured Packages
+ + Static Badge + + + Static Badge + +

+

+ In other language (unofficial port - by the community)
+ + Static Badge + +

+ +> A library that helps you read text from an unknown charset encoding.
Motivated by `chardet`, +> I'm trying to resolve the issue by taking a new approach. +> All IANA character set names for which the Python core library provides codecs are supported. + +

+ >>>>> 👉 Try Me Online Now, Then Adopt Me 👈 <<<<< +

+ +This project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**. + +| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) | +|--------------------------------------------------|:---------------------------------------------:|:--------------------------------------------------------------------------------------------------:|:-----------------------------------------------:| +| `Fast` | ❌ | ✅ | ✅ | +| `Universal**` | ❌ | ✅ | ❌ | +| `Reliable` **without** distinguishable standards | ❌ | ✅ | ✅ | +| `Reliable` **with** distinguishable standards | ✅ | ✅ | ✅ | +| `License` | LGPL-2.1
_restrictive_ | MIT | MPL-1.1
_restrictive_ | +| `Native Python` | ✅ | ✅ | ❌ | +| `Detect spoken language` | ❌ | ✅ | N/A | +| `UnicodeDecodeError Safety` | ❌ | ✅ | ❌ | +| `Whl Size (min)` | 193.6 kB | 42 kB | ~200 kB | +| `Supported Encoding` | 33 | 🎉 [99](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 | + +

+Reading Normalized TextCat Reading Text +

+ +*\*\* : They are clearly using specific code for a specific encoding even if covering most of used one*
+ +## ⚡ Performance + +This package offer better performance than its counterpart Chardet. Here are some numbers. + +| Package | Accuracy | Mean per file (ms) | File per sec (est) | +|-----------------------------------------------|:--------:|:------------------:|:------------------:| +| [chardet](https://github.com/chardet/chardet) | 86 % | 63 ms | 16 file/sec | +| charset-normalizer | **98 %** | **10 ms** | 100 file/sec | + +| Package | 99th percentile | 95th percentile | 50th percentile | +|-----------------------------------------------|:---------------:|:---------------:|:---------------:| +| [chardet](https://github.com/chardet/chardet) | 265 ms | 71 ms | 7 ms | +| charset-normalizer | 100 ms | 50 ms | 5 ms | + +_updated as of december 2024 using CPython 3.12_ + +Chardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload. + +> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows. +> And yes, these results might change at any time. The dataset can be updated to include more files. +> The actual delays heavily depends on your CPU capabilities. The factors should remain the same. +> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability +> (e.g. Supported Encoding) Challenge-them if you want. + +## ✨ Installation + +Using pip: + +```sh +pip install charset-normalizer -U +``` + +## 🚀 Basic Usage + +### CLI +This package comes with a CLI. + +``` +usage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD] + file [file ...] + +The Real First Universal Charset Detector. Discover originating encoding used +on text file. Normalize text to unicode. + +positional arguments: + files File(s) to be analysed + +optional arguments: + -h, --help show this help message and exit + -v, --verbose Display complementary information about file if any. + Stdout will contain logs about the detection process. + -a, --with-alternative + Output complementary possibilities if any. Top-level + JSON WILL be a list. + -n, --normalize Permit to normalize input file. If not set, program + does not write anything. + -m, --minimal Only output the charset detected to STDOUT. Disabling + JSON output. + -r, --replace Replace file when trying to normalize it instead of + creating a new one. + -f, --force Replace file without asking if you are sure, use this + flag with caution. + -t THRESHOLD, --threshold THRESHOLD + Define a custom maximum amount of chaos allowed in + decoded content. 0. <= chaos <= 1. + --version Show version information and exit. +``` + +```bash +normalizer ./data/sample.1.fr.srt +``` + +or + +```bash +python -m charset_normalizer ./data/sample.1.fr.srt +``` + +🎉 Since version 1.4.0 the CLI produce easily usable stdout result in JSON format. + +```json +{ + "path": "/home/default/projects/charset_normalizer/data/sample.1.fr.srt", + "encoding": "cp1252", + "encoding_aliases": [ + "1252", + "windows_1252" + ], + "alternative_encodings": [ + "cp1254", + "cp1256", + "cp1258", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + "mbcs" + ], + "language": "French", + "alphabets": [ + "Basic Latin", + "Latin-1 Supplement" + ], + "has_sig_or_bom": false, + "chaos": 0.149, + "coherence": 97.152, + "unicode_path": null, + "is_preferred": true +} +``` + +### Python +*Just print out normalized text* +```python +from charset_normalizer import from_path + +results = from_path('./my_subtitle.srt') + +print(str(results.best())) +``` + +*Upgrade your code without effort* +```python +from charset_normalizer import detect +``` + +The above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible. + +See the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/) + +## 😇 Why + +When I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a +reliable alternative using a completely different method. Also! I never back down on a good challenge! + +I **don't care** about the **originating charset** encoding, because **two different tables** can +produce **two identical rendered string.** +What I want is to get readable text, the best I can. + +In a way, **I'm brute forcing text decoding.** How cool is that ? 😎 + +Don't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair Unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode. + +## 🍰 How + + - Discard all charset encoding table that could not fit the binary content. + - Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding. + - Extract matches with the lowest mess detected. + - Additionally, we measure coherence / probe for a language. + +**Wait a minute**, what is noise/mess and coherence according to **YOU ?** + +*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then +**I established** some ground rules about **what is obvious** when **it seems like** a mess (aka. defining noise in rendered text). + I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to + improve or rewrite it. + +*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought +that intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design. + +## ⚡ Known limitations + + - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters)) + - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content. + +## ⚠️ About Python EOLs + +**If you are running:** + +- Python >=2.7,<3.5: Unsupported +- Python 3.5: charset-normalizer < 2.1 +- Python 3.6: charset-normalizer < 3.1 +- Python 3.7: charset-normalizer < 4.0 + +Upgrade your Python interpreter as soon as possible. + +## 👤 Contributing + +Contributions, issues and feature requests are very much welcome.
+Feel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute. + +## 📝 License + +Copyright © [Ahmed TAHRI @Ousret](https://github.com/Ousret).
+This project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed. + +Characters frequencies used in this project © 2012 [Denny Vrandečić](http://simia.net/letters/) + +## 💼 For Enterprise + +Professional support for charset-normalizer is available as part of the [Tidelift +Subscription][1]. Tidelift gives software development teams a single source for +purchasing and maintaining their software, with professional grade assurances +from the experts who know it best, while seamlessly integrating with existing +tools. + +[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme + +[![OpenSSF Best Practices](https://www.bestpractices.dev/projects/7297/badge)](https://www.bestpractices.dev/projects/7297) + +# Changelog +All notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). + +## [3.4.1](https://github.com/Ousret/charset_normalizer/compare/3.4.0...3.4.1) (2024-12-24) + +### Changed +- Project metadata are now stored using `pyproject.toml` instead of `setup.cfg` using setuptools as the build backend. +- Enforce annotation delayed loading for a simpler and consistent types in the project. +- Optional mypyc compilation upgraded to version 1.14 for Python >= 3.8 + +### Added +- pre-commit configuration. +- noxfile. + +### Removed +- `build-requirements.txt` as per using `pyproject.toml` native build configuration. +- `bin/integration.py` and `bin/serve.py` in favor of downstream integration test (see noxfile). +- `setup.cfg` in favor of `pyproject.toml` metadata configuration. +- Unused `utils.range_scan` function. + +### Fixed +- Converting content to Unicode bytes may insert `utf_8` instead of preferred `utf-8`. (#572) +- Deprecation warning "'count' is passed as positional argument" when converting to Unicode bytes on Python 3.13+ + +## [3.4.0](https://github.com/Ousret/charset_normalizer/compare/3.3.2...3.4.0) (2024-10-08) + +### Added +- Argument `--no-preemptive` in the CLI to prevent the detector to search for hints. +- Support for Python 3.13 (#512) + +### Fixed +- Relax the TypeError exception thrown when trying to compare a CharsetMatch with anything else than a CharsetMatch. +- Improved the general reliability of the detector based on user feedbacks. (#520) (#509) (#498) (#407) (#537) +- Declared charset in content (preemptive detection) not changed when converting to utf-8 bytes. (#381) + +## [3.3.2](https://github.com/Ousret/charset_normalizer/compare/3.3.1...3.3.2) (2023-10-31) + +### Fixed +- Unintentional memory usage regression when using large payload that match several encoding (#376) +- Regression on some detection case showcased in the documentation (#371) + +### Added +- Noise (md) probe that identify malformed arabic representation due to the presence of letters in isolated form (credit to my wife) + +## [3.3.1](https://github.com/Ousret/charset_normalizer/compare/3.3.0...3.3.1) (2023-10-22) + +### Changed +- Optional mypyc compilation upgraded to version 1.6.1 for Python >= 3.8 +- Improved the general detection reliability based on reports from the community + +## [3.3.0](https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0) (2023-09-30) + +### Added +- Allow to execute the CLI (e.g. normalizer) through `python -m charset_normalizer.cli` or `python -m charset_normalizer` +- Support for 9 forgotten encoding that are supported by Python but unlisted in `encoding.aliases` as they have no alias (#323) + +### Removed +- (internal) Redundant utils.is_ascii function and unused function is_private_use_only +- (internal) charset_normalizer.assets is moved inside charset_normalizer.constant + +### Changed +- (internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection +- Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.8 + +### Fixed +- Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in \_\_lt\_\_ (#350) + +## [3.2.0](https://github.com/Ousret/charset_normalizer/compare/3.1.0...3.2.0) (2023-06-07) + +### Changed +- Typehint for function `from_path` no longer enforce `PathLike` as its first argument +- Minor improvement over the global detection reliability + +### Added +- Introduce function `is_binary` that relies on main capabilities, and optimized to detect binaries +- Propagate `enable_fallback` argument throughout `from_bytes`, `from_path`, and `from_fp` that allow a deeper control over the detection (default True) +- Explicit support for Python 3.12 + +### Fixed +- Edge case detection failure where a file would contain 'very-long' camel cased word (Issue #289) + +## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06) + +### Added +- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262) + +### Removed +- Support for Python 3.6 (PR #260) + +### Changed +- Optional speedup provided by mypy/c 1.0.1 + +## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18) + +### Fixed +- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233) + +### Changed +- Speedup provided by mypy/c 0.990 on Python >= 3.7 + +## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20) + +### Added +- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results +- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES +- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio +- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl) + +### Changed +- Build with static metadata using 'build' frontend +- Make the language detection stricter +- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1 + +### Fixed +- CLI with opt --normalize fail when using full path for files +- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it +- Sphinx warnings when generating the documentation + +### Removed +- Coherence detector no longer return 'Simple English' instead return 'English' +- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese' +- Breaking: Method `first()` and `best()` from CharsetMatch +- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII) +- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches +- Breaking: Top-level function `normalize` +- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch +- Support for the backport `unicodedata2` + +## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18) + +### Added +- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results +- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES +- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio + +### Changed +- Build with static metadata using 'build' frontend +- Make the language detection stricter + +### Fixed +- CLI with opt --normalize fail when using full path for files +- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it + +### Removed +- Coherence detector no longer return 'Simple English' instead return 'English' +- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese' + +## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21) + +### Added +- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl) + +### Removed +- Breaking: Method `first()` and `best()` from CharsetMatch +- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII) + +### Fixed +- Sphinx warnings when generating the documentation + +## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15) + +### Changed +- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1 + +### Removed +- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches +- Breaking: Top-level function `normalize` +- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch +- Support for the backport `unicodedata2` + +## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19) + +### Deprecated +- Function `normalize` scheduled for removal in 3.0 + +### Changed +- Removed useless call to decode in fn is_unprintable (#206) + +### Fixed +- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204) + +## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19) + +### Added +- Output the Unicode table version when running the CLI with `--version` (PR #194) + +### Changed +- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175) +- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183) + +### Fixed +- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175) +- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181) + +### Removed +- Support for Python 3.5 (PR #192) + +### Deprecated +- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194) + +## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12) + +### Fixed +- ASCII miss-detection on rare cases (PR #170) + +## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30) + +### Added +- Explicit support for Python 3.11 (PR #164) + +### Changed +- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165) + +## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04) + +### Fixed +- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154) + +### Changed +- Skipping the language-detection (CD) on ASCII (PR #155) + +## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03) + +### Changed +- Moderating the logging impact (since 2.0.8) for specific environments (PR #147) + +### Fixed +- Wrong logging level applied when setting kwarg `explain` to True (PR #146) + +## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24) +### Changed +- Improvement over Vietnamese detection (PR #126) +- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124) +- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122) +- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129) +- Code style as refactored by Sourcery-AI (PR #131) +- Minor adjustment on the MD around european words (PR #133) +- Remove and replace SRTs from assets / tests (PR #139) +- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135) +- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135) + +### Fixed +- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137) +- Avoid using too insignificant chunk (PR #137) + +### Added +- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135) +- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141) + +## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11) +### Added +- Add support for Kazakh (Cyrillic) language detection (PR #109) + +### Changed +- Further, improve inferring the language from a given single-byte code page (PR #112) +- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116) +- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113) +- Various detection improvement (MD+CD) (PR #117) + +### Removed +- Remove redundant logging entry about detected language(s) (PR #115) + +### Fixed +- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102) + +## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18) +### Fixed +- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100) +- Fix CLI crash when using --minimal output in certain cases (PR #103) + +### Changed +- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101) + +## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14) +### Changed +- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81) +- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82) +- The Unicode detection is slightly improved (PR #93) +- Add syntax sugar \_\_bool\_\_ for results CharsetMatches list-container (PR #91) + +### Removed +- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92) + +### Fixed +- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95) +- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96) +- The MANIFEST.in was not exhaustive (PR #78) + +## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30) +### Fixed +- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70) +- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68) +- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72) +- Submatch factoring could be wrong in rare edge cases (PR #72) +- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72) +- Fix line endings from CRLF to LF for certain project files (PR #67) + +### Changed +- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76) +- Allow fallback on specified encoding if any (PR #71) + +## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16) +### Changed +- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63) +- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64) + +## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15) +### Fixed +- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59) + +### Changed +- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57) + +## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13) +### Fixed +- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55) +- Using explain=False permanently disable the verbose output in the current runtime (PR #47) +- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47) +- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52) + +### Changed +- Public function normalize default args values were not aligned with from_bytes (PR #53) + +### Added +- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47) + +## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02) +### Changed +- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet. +- Accent has been made on UTF-8 detection, should perform rather instantaneous. +- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible. +- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time) +- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+ +- utf_7 detection has been reinstated. + +### Removed +- This package no longer require anything when used with Python 3.5 (Dropped cached_property) +- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volapük, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian. +- The exception hook on UnicodeDecodeError has been removed. + +### Deprecated +- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0 + +### Fixed +- The CLI output used the relative path of the file(s). Should be absolute. + +## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28) +### Fixed +- Logger configuration/usage no longer conflict with others (PR #44) + +## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21) +### Removed +- Using standard logging instead of using the package loguru. +- Dropping nose test framework in favor of the maintained pytest. +- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text. +- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version. +- Stop support for UTF-7 that does not contain a SIG. +- Dropping PrettyTable, replaced with pure JSON output in CLI. + +### Fixed +- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process. +- Not searching properly for the BOM when trying utf32/16 parent codec. + +### Changed +- Improving the package final size by compressing frequencies.json. +- Huge improvement over the larges payload. + +### Added +- CLI now produces JSON consumable output. +- Return ASCII if given sequences fit. Given reasonable confidence. + +## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13) + +### Fixed +- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40) + +## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12) + +### Fixed +- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39) + +## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12) + +### Fixed +- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38) + +## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09) + +### Changed +- Amend the previous release to allow prettytable 2.0 (PR #35) + +## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08) + +### Fixed +- Fix error while using the package with a python pre-release interpreter (PR #33) + +### Changed +- Dependencies refactoring, constraints revised. + +### Added +- Add python 3.9 and 3.10 to the supported interpreters + +MIT License + +Copyright (c) 2025 TAHRI Ahmed R. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/RECORD b/venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/RECORD new file mode 100644 index 0000000..7815def --- /dev/null +++ b/venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/RECORD @@ -0,0 +1,35 @@ +../../../bin/normalizer,sha256=A6ou4L8woatRHrqhnRxGUEqi8iip4KIlrH0iTFmr3AU,252 +charset_normalizer-3.4.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +charset_normalizer-3.4.1.dist-info/LICENSE,sha256=bQ1Bv-FwrGx9wkjJpj4lTQ-0WmDVCoJX0K-SxuJJuIc,1071 +charset_normalizer-3.4.1.dist-info/METADATA,sha256=JbyHzhmqZh_ugEn1Y7TY7CDYZA9FoU6BP25hrCNDf50,35313 +charset_normalizer-3.4.1.dist-info/RECORD,, +charset_normalizer-3.4.1.dist-info/WHEEL,sha256=9BFfIe-Zq441iQ0ehutX65O5faGDpmB1Uw3WaQGk4f0,151 +charset_normalizer-3.4.1.dist-info/entry_points.txt,sha256=8C-Y3iXIfyXQ83Tpir2B8t-XLJYpxF5xbb38d_js-h4,65 +charset_normalizer-3.4.1.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19 +charset_normalizer/__init__.py,sha256=OKRxRv2Zhnqk00tqkN0c1BtJjm165fWXLydE52IKuHc,1590 +charset_normalizer/__main__.py,sha256=yzYxMR-IhKRHYwcSlavEv8oGdwxsR89mr2X09qXGdps,109 +charset_normalizer/__pycache__/__init__.cpython-311.pyc,, +charset_normalizer/__pycache__/__main__.cpython-311.pyc,, +charset_normalizer/__pycache__/api.cpython-311.pyc,, +charset_normalizer/__pycache__/cd.cpython-311.pyc,, +charset_normalizer/__pycache__/constant.cpython-311.pyc,, +charset_normalizer/__pycache__/legacy.cpython-311.pyc,, +charset_normalizer/__pycache__/md.cpython-311.pyc,, +charset_normalizer/__pycache__/models.cpython-311.pyc,, +charset_normalizer/__pycache__/utils.cpython-311.pyc,, +charset_normalizer/__pycache__/version.cpython-311.pyc,, +charset_normalizer/api.py,sha256=qBRz8mJ_R5E713R6TOyqHEdnmyxbEDnCSHvx32ubDGg,22617 +charset_normalizer/cd.py,sha256=WKTo1HDb-H9HfCDc3Bfwq5jzS25Ziy9SE2a74SgTq88,12522 +charset_normalizer/cli/__init__.py,sha256=D8I86lFk2-py45JvqxniTirSj_sFyE6sjaY_0-G1shc,136 +charset_normalizer/cli/__main__.py,sha256=VGC9klOoi6_R2z8rmyrc936kv7u2A1udjjHtlmNPDTM,10410 +charset_normalizer/cli/__pycache__/__init__.cpython-311.pyc,, +charset_normalizer/cli/__pycache__/__main__.cpython-311.pyc,, +charset_normalizer/constant.py,sha256=4VuTcZNLew1j_8ixA-Rt_VVqNWD4pwgHOHMCMlr0964,40477 +charset_normalizer/legacy.py,sha256=yhNXsPHkBfqPXKRb-sPXNj3Bscp9-mFGcYOkJ62tg9c,2328 +charset_normalizer/md.cpython-311-x86_64-linux-gnu.so,sha256=Y7QSLD5QLoSFAWys0-tL7R6QB7oi5864zM6zr7RWek4,16064 +charset_normalizer/md.py,sha256=iyXXQGWl54nnLQLueMWTmUtlivO0-rTBgVkmJxIIAGU,20036 +charset_normalizer/md__mypyc.cpython-311-x86_64-linux-gnu.so,sha256=XhkMX4lYOzNR-a68sp24uNetCg-njAJCHj14aJM6dY8,272624 +charset_normalizer/models.py,sha256=lKXhOnIPtiakbK3i__J9wpOfzx3JDTKj7Dn3Rg0VaRI,12394 +charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +charset_normalizer/utils.py,sha256=T5UHo8AS7NVMmgruWoZyqEf0WrZVcQpgUNetRoborSk,12002 +charset_normalizer/version.py,sha256=Ambcj3O8FfvdLfDLc8dkaxZx97O1IM_R4_aKGD_TDdE,115 diff --git a/venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/WHEEL b/venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/WHEEL new file mode 100644 index 0000000..8b03636 --- /dev/null +++ b/venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (75.6.0) +Root-Is-Purelib: false +Tag: cp311-cp311-manylinux_2_17_x86_64 +Tag: cp311-cp311-manylinux2014_x86_64 + diff --git a/venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/entry_points.txt b/venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/entry_points.txt new file mode 100644 index 0000000..ec92012 --- /dev/null +++ b/venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +normalizer = charset_normalizer:cli.cli_detect diff --git a/venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/top_level.txt b/venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/top_level.txt new file mode 100644 index 0000000..66958f0 --- /dev/null +++ b/venv/lib/python3.11/site-packages/charset_normalizer-3.4.1.dist-info/top_level.txt @@ -0,0 +1 @@ +charset_normalizer diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/__init__.py b/venv/lib/python3.11/site-packages/charset_normalizer/__init__.py new file mode 100644 index 0000000..0d3a379 --- /dev/null +++ b/venv/lib/python3.11/site-packages/charset_normalizer/__init__.py @@ -0,0 +1,48 @@ +""" +Charset-Normalizer +~~~~~~~~~~~~~~ +The Real First Universal Charset Detector. +A library that helps you read text from an unknown charset encoding. +Motivated by chardet, This package is trying to resolve the issue by taking a new approach. +All IANA character set names for which the Python core library provides codecs are supported. + +Basic usage: + >>> from charset_normalizer import from_bytes + >>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8')) + >>> best_guess = results.best() + >>> str(best_guess) + 'Bсеки човек има право на образование. Oбразованието!' + +Others methods and usages are available - see the full documentation +at . +:copyright: (c) 2021 by Ahmed TAHRI +:license: MIT, see LICENSE for more details. +""" + +from __future__ import annotations + +import logging + +from .api import from_bytes, from_fp, from_path, is_binary +from .legacy import detect +from .models import CharsetMatch, CharsetMatches +from .utils import set_logging_handler +from .version import VERSION, __version__ + +__all__ = ( + "from_fp", + "from_path", + "from_bytes", + "is_binary", + "detect", + "CharsetMatch", + "CharsetMatches", + "__version__", + "VERSION", + "set_logging_handler", +) + +# Attach a NullHandler to the top level logger by default +# https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library + +logging.getLogger("charset_normalizer").addHandler(logging.NullHandler()) diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/__main__.py b/venv/lib/python3.11/site-packages/charset_normalizer/__main__.py new file mode 100644 index 0000000..e0e76f7 --- /dev/null +++ b/venv/lib/python3.11/site-packages/charset_normalizer/__main__.py @@ -0,0 +1,6 @@ +from __future__ import annotations + +from .cli import cli_detect + +if __name__ == "__main__": + cli_detect() diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..1fee4e0 Binary files /dev/null and b/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/__main__.cpython-311.pyc b/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/__main__.cpython-311.pyc new file mode 100644 index 0000000..a055c00 Binary files /dev/null and b/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/__main__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/api.cpython-311.pyc b/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/api.cpython-311.pyc new file mode 100644 index 0000000..249c1fa Binary files /dev/null and b/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/api.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/cd.cpython-311.pyc b/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/cd.cpython-311.pyc new file mode 100644 index 0000000..9d668f5 Binary files /dev/null and b/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/cd.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/constant.cpython-311.pyc b/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/constant.cpython-311.pyc new file mode 100644 index 0000000..b5395bd Binary files /dev/null and b/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/constant.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/legacy.cpython-311.pyc b/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/legacy.cpython-311.pyc new file mode 100644 index 0000000..c71d4de Binary files /dev/null and b/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/legacy.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/md.cpython-311.pyc b/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/md.cpython-311.pyc new file mode 100644 index 0000000..c923972 Binary files /dev/null and b/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/md.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/models.cpython-311.pyc b/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/models.cpython-311.pyc new file mode 100644 index 0000000..ba31d00 Binary files /dev/null and b/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/models.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/utils.cpython-311.pyc b/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/utils.cpython-311.pyc new file mode 100644 index 0000000..29bec3e Binary files /dev/null and b/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/utils.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/version.cpython-311.pyc b/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/version.cpython-311.pyc new file mode 100644 index 0000000..f67c4a6 Binary files /dev/null and b/venv/lib/python3.11/site-packages/charset_normalizer/__pycache__/version.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/api.py b/venv/lib/python3.11/site-packages/charset_normalizer/api.py new file mode 100644 index 0000000..2c8c061 --- /dev/null +++ b/venv/lib/python3.11/site-packages/charset_normalizer/api.py @@ -0,0 +1,668 @@ +from __future__ import annotations + +import logging +from os import PathLike +from typing import BinaryIO + +from .cd import ( + coherence_ratio, + encoding_languages, + mb_encoding_languages, + merge_coherence_ratios, +) +from .constant import IANA_SUPPORTED, TOO_BIG_SEQUENCE, TOO_SMALL_SEQUENCE, TRACE +from .md import mess_ratio +from .models import CharsetMatch, CharsetMatches +from .utils import ( + any_specified_encoding, + cut_sequence_chunks, + iana_name, + identify_sig_or_bom, + is_cp_similar, + is_multi_byte_encoding, + should_strip_sig_or_bom, +) + +logger = logging.getLogger("charset_normalizer") +explain_handler = logging.StreamHandler() +explain_handler.setFormatter( + logging.Formatter("%(asctime)s | %(levelname)s | %(message)s") +) + + +def from_bytes( + sequences: bytes | bytearray, + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.2, + cp_isolation: list[str] | None = None, + cp_exclusion: list[str] | None = None, + preemptive_behaviour: bool = True, + explain: bool = False, + language_threshold: float = 0.1, + enable_fallback: bool = True, +) -> CharsetMatches: + """ + Given a raw bytes sequence, return the best possibles charset usable to render str objects. + If there is no results, it is a strong indicator that the source is binary/not text. + By default, the process will extract 5 blocks of 512o each to assess the mess and coherence of a given sequence. + And will give up a particular code page after 20% of measured mess. Those criteria are customizable at will. + + The preemptive behavior DOES NOT replace the traditional detection workflow, it prioritize a particular code page + but never take it for granted. Can improve the performance. + + You may want to focus your attention to some code page or/and not others, use cp_isolation and cp_exclusion for that + purpose. + + This function will strip the SIG in the payload/sequence every time except on UTF-16, UTF-32. + By default the library does not setup any handler other than the NullHandler, if you choose to set the 'explain' + toggle to True it will alter the logger configuration to add a StreamHandler that is suitable for debugging. + Custom logging format and handler can be set manually. + """ + + if not isinstance(sequences, (bytearray, bytes)): + raise TypeError( + "Expected object of type bytes or bytearray, got: {}".format( + type(sequences) + ) + ) + + if explain: + previous_logger_level: int = logger.level + logger.addHandler(explain_handler) + logger.setLevel(TRACE) + + length: int = len(sequences) + + if length == 0: + logger.debug("Encoding detection on empty bytes, assuming utf_8 intention.") + if explain: # Defensive: ensure exit path clean handler + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level or logging.WARNING) + return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")]) + + if cp_isolation is not None: + logger.log( + TRACE, + "cp_isolation is set. use this flag for debugging purpose. " + "limited list of encoding allowed : %s.", + ", ".join(cp_isolation), + ) + cp_isolation = [iana_name(cp, False) for cp in cp_isolation] + else: + cp_isolation = [] + + if cp_exclusion is not None: + logger.log( + TRACE, + "cp_exclusion is set. use this flag for debugging purpose. " + "limited list of encoding excluded : %s.", + ", ".join(cp_exclusion), + ) + cp_exclusion = [iana_name(cp, False) for cp in cp_exclusion] + else: + cp_exclusion = [] + + if length <= (chunk_size * steps): + logger.log( + TRACE, + "override steps (%i) and chunk_size (%i) as content does not fit (%i byte(s) given) parameters.", + steps, + chunk_size, + length, + ) + steps = 1 + chunk_size = length + + if steps > 1 and length / steps < chunk_size: + chunk_size = int(length / steps) + + is_too_small_sequence: bool = len(sequences) < TOO_SMALL_SEQUENCE + is_too_large_sequence: bool = len(sequences) >= TOO_BIG_SEQUENCE + + if is_too_small_sequence: + logger.log( + TRACE, + "Trying to detect encoding from a tiny portion of ({}) byte(s).".format( + length + ), + ) + elif is_too_large_sequence: + logger.log( + TRACE, + "Using lazy str decoding because the payload is quite large, ({}) byte(s).".format( + length + ), + ) + + prioritized_encodings: list[str] = [] + + specified_encoding: str | None = ( + any_specified_encoding(sequences) if preemptive_behaviour else None + ) + + if specified_encoding is not None: + prioritized_encodings.append(specified_encoding) + logger.log( + TRACE, + "Detected declarative mark in sequence. Priority +1 given for %s.", + specified_encoding, + ) + + tested: set[str] = set() + tested_but_hard_failure: list[str] = [] + tested_but_soft_failure: list[str] = [] + + fallback_ascii: CharsetMatch | None = None + fallback_u8: CharsetMatch | None = None + fallback_specified: CharsetMatch | None = None + + results: CharsetMatches = CharsetMatches() + + early_stop_results: CharsetMatches = CharsetMatches() + + sig_encoding, sig_payload = identify_sig_or_bom(sequences) + + if sig_encoding is not None: + prioritized_encodings.append(sig_encoding) + logger.log( + TRACE, + "Detected a SIG or BOM mark on first %i byte(s). Priority +1 given for %s.", + len(sig_payload), + sig_encoding, + ) + + prioritized_encodings.append("ascii") + + if "utf_8" not in prioritized_encodings: + prioritized_encodings.append("utf_8") + + for encoding_iana in prioritized_encodings + IANA_SUPPORTED: + if cp_isolation and encoding_iana not in cp_isolation: + continue + + if cp_exclusion and encoding_iana in cp_exclusion: + continue + + if encoding_iana in tested: + continue + + tested.add(encoding_iana) + + decoded_payload: str | None = None + bom_or_sig_available: bool = sig_encoding == encoding_iana + strip_sig_or_bom: bool = bom_or_sig_available and should_strip_sig_or_bom( + encoding_iana + ) + + if encoding_iana in {"utf_16", "utf_32"} and not bom_or_sig_available: + logger.log( + TRACE, + "Encoding %s won't be tested as-is because it require a BOM. Will try some sub-encoder LE/BE.", + encoding_iana, + ) + continue + if encoding_iana in {"utf_7"} and not bom_or_sig_available: + logger.log( + TRACE, + "Encoding %s won't be tested as-is because detection is unreliable without BOM/SIG.", + encoding_iana, + ) + continue + + try: + is_multi_byte_decoder: bool = is_multi_byte_encoding(encoding_iana) + except (ModuleNotFoundError, ImportError): + logger.log( + TRACE, + "Encoding %s does not provide an IncrementalDecoder", + encoding_iana, + ) + continue + + try: + if is_too_large_sequence and is_multi_byte_decoder is False: + str( + ( + sequences[: int(50e4)] + if strip_sig_or_bom is False + else sequences[len(sig_payload) : int(50e4)] + ), + encoding=encoding_iana, + ) + else: + decoded_payload = str( + ( + sequences + if strip_sig_or_bom is False + else sequences[len(sig_payload) :] + ), + encoding=encoding_iana, + ) + except (UnicodeDecodeError, LookupError) as e: + if not isinstance(e, LookupError): + logger.log( + TRACE, + "Code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + tested_but_hard_failure.append(encoding_iana) + continue + + similar_soft_failure_test: bool = False + + for encoding_soft_failed in tested_but_soft_failure: + if is_cp_similar(encoding_iana, encoding_soft_failed): + similar_soft_failure_test = True + break + + if similar_soft_failure_test: + logger.log( + TRACE, + "%s is deemed too similar to code page %s and was consider unsuited already. Continuing!", + encoding_iana, + encoding_soft_failed, + ) + continue + + r_ = range( + 0 if not bom_or_sig_available else len(sig_payload), + length, + int(length / steps), + ) + + multi_byte_bonus: bool = ( + is_multi_byte_decoder + and decoded_payload is not None + and len(decoded_payload) < length + ) + + if multi_byte_bonus: + logger.log( + TRACE, + "Code page %s is a multi byte encoding table and it appear that at least one character " + "was encoded using n-bytes.", + encoding_iana, + ) + + max_chunk_gave_up: int = int(len(r_) / 4) + + max_chunk_gave_up = max(max_chunk_gave_up, 2) + early_stop_count: int = 0 + lazy_str_hard_failure = False + + md_chunks: list[str] = [] + md_ratios = [] + + try: + for chunk in cut_sequence_chunks( + sequences, + encoding_iana, + r_, + chunk_size, + bom_or_sig_available, + strip_sig_or_bom, + sig_payload, + is_multi_byte_decoder, + decoded_payload, + ): + md_chunks.append(chunk) + + md_ratios.append( + mess_ratio( + chunk, + threshold, + explain is True and 1 <= len(cp_isolation) <= 2, + ) + ) + + if md_ratios[-1] >= threshold: + early_stop_count += 1 + + if (early_stop_count >= max_chunk_gave_up) or ( + bom_or_sig_available and strip_sig_or_bom is False + ): + break + except ( + UnicodeDecodeError + ) as e: # Lazy str loading may have missed something there + logger.log( + TRACE, + "LazyStr Loading: After MD chunk decode, code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + early_stop_count = max_chunk_gave_up + lazy_str_hard_failure = True + + # We might want to check the sequence again with the whole content + # Only if initial MD tests passes + if ( + not lazy_str_hard_failure + and is_too_large_sequence + and not is_multi_byte_decoder + ): + try: + sequences[int(50e3) :].decode(encoding_iana, errors="strict") + except UnicodeDecodeError as e: + logger.log( + TRACE, + "LazyStr Loading: After final lookup, code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + tested_but_hard_failure.append(encoding_iana) + continue + + mean_mess_ratio: float = sum(md_ratios) / len(md_ratios) if md_ratios else 0.0 + if mean_mess_ratio >= threshold or early_stop_count >= max_chunk_gave_up: + tested_but_soft_failure.append(encoding_iana) + logger.log( + TRACE, + "%s was excluded because of initial chaos probing. Gave up %i time(s). " + "Computed mean chaos is %f %%.", + encoding_iana, + early_stop_count, + round(mean_mess_ratio * 100, ndigits=3), + ) + # Preparing those fallbacks in case we got nothing. + if ( + enable_fallback + and encoding_iana in ["ascii", "utf_8", specified_encoding] + and not lazy_str_hard_failure + ): + fallback_entry = CharsetMatch( + sequences, + encoding_iana, + threshold, + False, + [], + decoded_payload, + preemptive_declaration=specified_encoding, + ) + if encoding_iana == specified_encoding: + fallback_specified = fallback_entry + elif encoding_iana == "ascii": + fallback_ascii = fallback_entry + else: + fallback_u8 = fallback_entry + continue + + logger.log( + TRACE, + "%s passed initial chaos probing. Mean measured chaos is %f %%", + encoding_iana, + round(mean_mess_ratio * 100, ndigits=3), + ) + + if not is_multi_byte_decoder: + target_languages: list[str] = encoding_languages(encoding_iana) + else: + target_languages = mb_encoding_languages(encoding_iana) + + if target_languages: + logger.log( + TRACE, + "{} should target any language(s) of {}".format( + encoding_iana, str(target_languages) + ), + ) + + cd_ratios = [] + + # We shall skip the CD when its about ASCII + # Most of the time its not relevant to run "language-detection" on it. + if encoding_iana != "ascii": + for chunk in md_chunks: + chunk_languages = coherence_ratio( + chunk, + language_threshold, + ",".join(target_languages) if target_languages else None, + ) + + cd_ratios.append(chunk_languages) + + cd_ratios_merged = merge_coherence_ratios(cd_ratios) + + if cd_ratios_merged: + logger.log( + TRACE, + "We detected language {} using {}".format( + cd_ratios_merged, encoding_iana + ), + ) + + current_match = CharsetMatch( + sequences, + encoding_iana, + mean_mess_ratio, + bom_or_sig_available, + cd_ratios_merged, + ( + decoded_payload + if ( + is_too_large_sequence is False + or encoding_iana in [specified_encoding, "ascii", "utf_8"] + ) + else None + ), + preemptive_declaration=specified_encoding, + ) + + results.append(current_match) + + if ( + encoding_iana in [specified_encoding, "ascii", "utf_8"] + and mean_mess_ratio < 0.1 + ): + # If md says nothing to worry about, then... stop immediately! + if mean_mess_ratio == 0.0: + logger.debug( + "Encoding detection: %s is most likely the one.", + current_match.encoding, + ) + if explain: # Defensive: ensure exit path clean handler + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + return CharsetMatches([current_match]) + + early_stop_results.append(current_match) + + if ( + len(early_stop_results) + and (specified_encoding is None or specified_encoding in tested) + and "ascii" in tested + and "utf_8" in tested + ): + probable_result: CharsetMatch = early_stop_results.best() # type: ignore[assignment] + logger.debug( + "Encoding detection: %s is most likely the one.", + probable_result.encoding, + ) + if explain: # Defensive: ensure exit path clean handler + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + + return CharsetMatches([probable_result]) + + if encoding_iana == sig_encoding: + logger.debug( + "Encoding detection: %s is most likely the one as we detected a BOM or SIG within " + "the beginning of the sequence.", + encoding_iana, + ) + if explain: # Defensive: ensure exit path clean handler + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + return CharsetMatches([results[encoding_iana]]) + + if len(results) == 0: + if fallback_u8 or fallback_ascii or fallback_specified: + logger.log( + TRACE, + "Nothing got out of the detection process. Using ASCII/UTF-8/Specified fallback.", + ) + + if fallback_specified: + logger.debug( + "Encoding detection: %s will be used as a fallback match", + fallback_specified.encoding, + ) + results.append(fallback_specified) + elif ( + (fallback_u8 and fallback_ascii is None) + or ( + fallback_u8 + and fallback_ascii + and fallback_u8.fingerprint != fallback_ascii.fingerprint + ) + or (fallback_u8 is not None) + ): + logger.debug("Encoding detection: utf_8 will be used as a fallback match") + results.append(fallback_u8) + elif fallback_ascii: + logger.debug("Encoding detection: ascii will be used as a fallback match") + results.append(fallback_ascii) + + if results: + logger.debug( + "Encoding detection: Found %s as plausible (best-candidate) for content. With %i alternatives.", + results.best().encoding, # type: ignore + len(results) - 1, + ) + else: + logger.debug("Encoding detection: Unable to determine any suitable charset.") + + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + + return results + + +def from_fp( + fp: BinaryIO, + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: list[str] | None = None, + cp_exclusion: list[str] | None = None, + preemptive_behaviour: bool = True, + explain: bool = False, + language_threshold: float = 0.1, + enable_fallback: bool = True, +) -> CharsetMatches: + """ + Same thing than the function from_bytes but using a file pointer that is already ready. + Will not close the file pointer. + """ + return from_bytes( + fp.read(), + steps, + chunk_size, + threshold, + cp_isolation, + cp_exclusion, + preemptive_behaviour, + explain, + language_threshold, + enable_fallback, + ) + + +def from_path( + path: str | bytes | PathLike, # type: ignore[type-arg] + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: list[str] | None = None, + cp_exclusion: list[str] | None = None, + preemptive_behaviour: bool = True, + explain: bool = False, + language_threshold: float = 0.1, + enable_fallback: bool = True, +) -> CharsetMatches: + """ + Same thing than the function from_bytes but with one extra step. Opening and reading given file path in binary mode. + Can raise IOError. + """ + with open(path, "rb") as fp: + return from_fp( + fp, + steps, + chunk_size, + threshold, + cp_isolation, + cp_exclusion, + preemptive_behaviour, + explain, + language_threshold, + enable_fallback, + ) + + +def is_binary( + fp_or_path_or_payload: PathLike | str | BinaryIO | bytes, # type: ignore[type-arg] + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: list[str] | None = None, + cp_exclusion: list[str] | None = None, + preemptive_behaviour: bool = True, + explain: bool = False, + language_threshold: float = 0.1, + enable_fallback: bool = False, +) -> bool: + """ + Detect if the given input (file, bytes, or path) points to a binary file. aka. not a string. + Based on the same main heuristic algorithms and default kwargs at the sole exception that fallbacks match + are disabled to be stricter around ASCII-compatible but unlikely to be a string. + """ + if isinstance(fp_or_path_or_payload, (str, PathLike)): + guesses = from_path( + fp_or_path_or_payload, + steps=steps, + chunk_size=chunk_size, + threshold=threshold, + cp_isolation=cp_isolation, + cp_exclusion=cp_exclusion, + preemptive_behaviour=preemptive_behaviour, + explain=explain, + language_threshold=language_threshold, + enable_fallback=enable_fallback, + ) + elif isinstance( + fp_or_path_or_payload, + ( + bytes, + bytearray, + ), + ): + guesses = from_bytes( + fp_or_path_or_payload, + steps=steps, + chunk_size=chunk_size, + threshold=threshold, + cp_isolation=cp_isolation, + cp_exclusion=cp_exclusion, + preemptive_behaviour=preemptive_behaviour, + explain=explain, + language_threshold=language_threshold, + enable_fallback=enable_fallback, + ) + else: + guesses = from_fp( + fp_or_path_or_payload, + steps=steps, + chunk_size=chunk_size, + threshold=threshold, + cp_isolation=cp_isolation, + cp_exclusion=cp_exclusion, + preemptive_behaviour=preemptive_behaviour, + explain=explain, + language_threshold=language_threshold, + enable_fallback=enable_fallback, + ) + + return not guesses diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/cd.py b/venv/lib/python3.11/site-packages/charset_normalizer/cd.py new file mode 100644 index 0000000..71a3ed5 --- /dev/null +++ b/venv/lib/python3.11/site-packages/charset_normalizer/cd.py @@ -0,0 +1,395 @@ +from __future__ import annotations + +import importlib +from codecs import IncrementalDecoder +from collections import Counter +from functools import lru_cache +from typing import Counter as TypeCounter + +from .constant import ( + FREQUENCIES, + KO_NAMES, + LANGUAGE_SUPPORTED_COUNT, + TOO_SMALL_SEQUENCE, + ZH_NAMES, +) +from .md import is_suspiciously_successive_range +from .models import CoherenceMatches +from .utils import ( + is_accentuated, + is_latin, + is_multi_byte_encoding, + is_unicode_range_secondary, + unicode_range, +) + + +def encoding_unicode_range(iana_name: str) -> list[str]: + """ + Return associated unicode ranges in a single byte code page. + """ + if is_multi_byte_encoding(iana_name): + raise OSError("Function not supported on multi-byte code page") + + decoder = importlib.import_module(f"encodings.{iana_name}").IncrementalDecoder + + p: IncrementalDecoder = decoder(errors="ignore") + seen_ranges: dict[str, int] = {} + character_count: int = 0 + + for i in range(0x40, 0xFF): + chunk: str = p.decode(bytes([i])) + + if chunk: + character_range: str | None = unicode_range(chunk) + + if character_range is None: + continue + + if is_unicode_range_secondary(character_range) is False: + if character_range not in seen_ranges: + seen_ranges[character_range] = 0 + seen_ranges[character_range] += 1 + character_count += 1 + + return sorted( + [ + character_range + for character_range in seen_ranges + if seen_ranges[character_range] / character_count >= 0.15 + ] + ) + + +def unicode_range_languages(primary_range: str) -> list[str]: + """ + Return inferred languages used with a unicode range. + """ + languages: list[str] = [] + + for language, characters in FREQUENCIES.items(): + for character in characters: + if unicode_range(character) == primary_range: + languages.append(language) + break + + return languages + + +@lru_cache() +def encoding_languages(iana_name: str) -> list[str]: + """ + Single-byte encoding language association. Some code page are heavily linked to particular language(s). + This function does the correspondence. + """ + unicode_ranges: list[str] = encoding_unicode_range(iana_name) + primary_range: str | None = None + + for specified_range in unicode_ranges: + if "Latin" not in specified_range: + primary_range = specified_range + break + + if primary_range is None: + return ["Latin Based"] + + return unicode_range_languages(primary_range) + + +@lru_cache() +def mb_encoding_languages(iana_name: str) -> list[str]: + """ + Multi-byte encoding language association. Some code page are heavily linked to particular language(s). + This function does the correspondence. + """ + if ( + iana_name.startswith("shift_") + or iana_name.startswith("iso2022_jp") + or iana_name.startswith("euc_j") + or iana_name == "cp932" + ): + return ["Japanese"] + if iana_name.startswith("gb") or iana_name in ZH_NAMES: + return ["Chinese"] + if iana_name.startswith("iso2022_kr") or iana_name in KO_NAMES: + return ["Korean"] + + return [] + + +@lru_cache(maxsize=LANGUAGE_SUPPORTED_COUNT) +def get_target_features(language: str) -> tuple[bool, bool]: + """ + Determine main aspects from a supported language if it contains accents and if is pure Latin. + """ + target_have_accents: bool = False + target_pure_latin: bool = True + + for character in FREQUENCIES[language]: + if not target_have_accents and is_accentuated(character): + target_have_accents = True + if target_pure_latin and is_latin(character) is False: + target_pure_latin = False + + return target_have_accents, target_pure_latin + + +def alphabet_languages( + characters: list[str], ignore_non_latin: bool = False +) -> list[str]: + """ + Return associated languages associated to given characters. + """ + languages: list[tuple[str, float]] = [] + + source_have_accents = any(is_accentuated(character) for character in characters) + + for language, language_characters in FREQUENCIES.items(): + target_have_accents, target_pure_latin = get_target_features(language) + + if ignore_non_latin and target_pure_latin is False: + continue + + if target_have_accents is False and source_have_accents: + continue + + character_count: int = len(language_characters) + + character_match_count: int = len( + [c for c in language_characters if c in characters] + ) + + ratio: float = character_match_count / character_count + + if ratio >= 0.2: + languages.append((language, ratio)) + + languages = sorted(languages, key=lambda x: x[1], reverse=True) + + return [compatible_language[0] for compatible_language in languages] + + +def characters_popularity_compare( + language: str, ordered_characters: list[str] +) -> float: + """ + Determine if a ordered characters list (by occurrence from most appearance to rarest) match a particular language. + The result is a ratio between 0. (absolutely no correspondence) and 1. (near perfect fit). + Beware that is function is not strict on the match in order to ease the detection. (Meaning close match is 1.) + """ + if language not in FREQUENCIES: + raise ValueError(f"{language} not available") + + character_approved_count: int = 0 + FREQUENCIES_language_set = set(FREQUENCIES[language]) + + ordered_characters_count: int = len(ordered_characters) + target_language_characters_count: int = len(FREQUENCIES[language]) + + large_alphabet: bool = target_language_characters_count > 26 + + for character, character_rank in zip( + ordered_characters, range(0, ordered_characters_count) + ): + if character not in FREQUENCIES_language_set: + continue + + character_rank_in_language: int = FREQUENCIES[language].index(character) + expected_projection_ratio: float = ( + target_language_characters_count / ordered_characters_count + ) + character_rank_projection: int = int(character_rank * expected_projection_ratio) + + if ( + large_alphabet is False + and abs(character_rank_projection - character_rank_in_language) > 4 + ): + continue + + if ( + large_alphabet is True + and abs(character_rank_projection - character_rank_in_language) + < target_language_characters_count / 3 + ): + character_approved_count += 1 + continue + + characters_before_source: list[str] = FREQUENCIES[language][ + 0:character_rank_in_language + ] + characters_after_source: list[str] = FREQUENCIES[language][ + character_rank_in_language: + ] + characters_before: list[str] = ordered_characters[0:character_rank] + characters_after: list[str] = ordered_characters[character_rank:] + + before_match_count: int = len( + set(characters_before) & set(characters_before_source) + ) + + after_match_count: int = len( + set(characters_after) & set(characters_after_source) + ) + + if len(characters_before_source) == 0 and before_match_count <= 4: + character_approved_count += 1 + continue + + if len(characters_after_source) == 0 and after_match_count <= 4: + character_approved_count += 1 + continue + + if ( + before_match_count / len(characters_before_source) >= 0.4 + or after_match_count / len(characters_after_source) >= 0.4 + ): + character_approved_count += 1 + continue + + return character_approved_count / len(ordered_characters) + + +def alpha_unicode_split(decoded_sequence: str) -> list[str]: + """ + Given a decoded text sequence, return a list of str. Unicode range / alphabet separation. + Ex. a text containing English/Latin with a bit a Hebrew will return two items in the resulting list; + One containing the latin letters and the other hebrew. + """ + layers: dict[str, str] = {} + + for character in decoded_sequence: + if character.isalpha() is False: + continue + + character_range: str | None = unicode_range(character) + + if character_range is None: + continue + + layer_target_range: str | None = None + + for discovered_range in layers: + if ( + is_suspiciously_successive_range(discovered_range, character_range) + is False + ): + layer_target_range = discovered_range + break + + if layer_target_range is None: + layer_target_range = character_range + + if layer_target_range not in layers: + layers[layer_target_range] = character.lower() + continue + + layers[layer_target_range] += character.lower() + + return list(layers.values()) + + +def merge_coherence_ratios(results: list[CoherenceMatches]) -> CoherenceMatches: + """ + This function merge results previously given by the function coherence_ratio. + The return type is the same as coherence_ratio. + """ + per_language_ratios: dict[str, list[float]] = {} + for result in results: + for sub_result in result: + language, ratio = sub_result + if language not in per_language_ratios: + per_language_ratios[language] = [ratio] + continue + per_language_ratios[language].append(ratio) + + merge = [ + ( + language, + round( + sum(per_language_ratios[language]) / len(per_language_ratios[language]), + 4, + ), + ) + for language in per_language_ratios + ] + + return sorted(merge, key=lambda x: x[1], reverse=True) + + +def filter_alt_coherence_matches(results: CoherenceMatches) -> CoherenceMatches: + """ + We shall NOT return "English—" in CoherenceMatches because it is an alternative + of "English". This function only keeps the best match and remove the em-dash in it. + """ + index_results: dict[str, list[float]] = dict() + + for result in results: + language, ratio = result + no_em_name: str = language.replace("—", "") + + if no_em_name not in index_results: + index_results[no_em_name] = [] + + index_results[no_em_name].append(ratio) + + if any(len(index_results[e]) > 1 for e in index_results): + filtered_results: CoherenceMatches = [] + + for language in index_results: + filtered_results.append((language, max(index_results[language]))) + + return filtered_results + + return results + + +@lru_cache(maxsize=2048) +def coherence_ratio( + decoded_sequence: str, threshold: float = 0.1, lg_inclusion: str | None = None +) -> CoherenceMatches: + """ + Detect ANY language that can be identified in given sequence. The sequence will be analysed by layers. + A layer = Character extraction by alphabets/ranges. + """ + + results: list[tuple[str, float]] = [] + ignore_non_latin: bool = False + + sufficient_match_count: int = 0 + + lg_inclusion_list = lg_inclusion.split(",") if lg_inclusion is not None else [] + if "Latin Based" in lg_inclusion_list: + ignore_non_latin = True + lg_inclusion_list.remove("Latin Based") + + for layer in alpha_unicode_split(decoded_sequence): + sequence_frequencies: TypeCounter[str] = Counter(layer) + most_common = sequence_frequencies.most_common() + + character_count: int = sum(o for c, o in most_common) + + if character_count <= TOO_SMALL_SEQUENCE: + continue + + popular_character_ordered: list[str] = [c for c, o in most_common] + + for language in lg_inclusion_list or alphabet_languages( + popular_character_ordered, ignore_non_latin + ): + ratio: float = characters_popularity_compare( + language, popular_character_ordered + ) + + if ratio < threshold: + continue + elif ratio >= 0.8: + sufficient_match_count += 1 + + results.append((language, round(ratio, 4))) + + if sufficient_match_count >= 3: + break + + return sorted( + filter_alt_coherence_matches(results), key=lambda x: x[1], reverse=True + ) diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/cli/__init__.py b/venv/lib/python3.11/site-packages/charset_normalizer/cli/__init__.py new file mode 100644 index 0000000..543a5a4 --- /dev/null +++ b/venv/lib/python3.11/site-packages/charset_normalizer/cli/__init__.py @@ -0,0 +1,8 @@ +from __future__ import annotations + +from .__main__ import cli_detect, query_yes_no + +__all__ = ( + "cli_detect", + "query_yes_no", +) diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/cli/__main__.py b/venv/lib/python3.11/site-packages/charset_normalizer/cli/__main__.py new file mode 100644 index 0000000..64a290f --- /dev/null +++ b/venv/lib/python3.11/site-packages/charset_normalizer/cli/__main__.py @@ -0,0 +1,321 @@ +from __future__ import annotations + +import argparse +import sys +from json import dumps +from os.path import abspath, basename, dirname, join, realpath +from platform import python_version +from unicodedata import unidata_version + +import charset_normalizer.md as md_module +from charset_normalizer import from_fp +from charset_normalizer.models import CliDetectionResult +from charset_normalizer.version import __version__ + + +def query_yes_no(question: str, default: str = "yes") -> bool: + """Ask a yes/no question via input() and return their answer. + + "question" is a string that is presented to the user. + "default" is the presumed answer if the user just hits . + It must be "yes" (the default), "no" or None (meaning + an answer is required of the user). + + The "answer" return value is True for "yes" or False for "no". + + Credit goes to (c) https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input + """ + valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False} + if default is None: + prompt = " [y/n] " + elif default == "yes": + prompt = " [Y/n] " + elif default == "no": + prompt = " [y/N] " + else: + raise ValueError("invalid default answer: '%s'" % default) + + while True: + sys.stdout.write(question + prompt) + choice = input().lower() + if default is not None and choice == "": + return valid[default] + elif choice in valid: + return valid[choice] + else: + sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n") + + +def cli_detect(argv: list[str] | None = None) -> int: + """ + CLI assistant using ARGV and ArgumentParser + :param argv: + :return: 0 if everything is fine, anything else equal trouble + """ + parser = argparse.ArgumentParser( + description="The Real First Universal Charset Detector. " + "Discover originating encoding used on text file. " + "Normalize text to unicode." + ) + + parser.add_argument( + "files", type=argparse.FileType("rb"), nargs="+", help="File(s) to be analysed" + ) + parser.add_argument( + "-v", + "--verbose", + action="store_true", + default=False, + dest="verbose", + help="Display complementary information about file if any. " + "Stdout will contain logs about the detection process.", + ) + parser.add_argument( + "-a", + "--with-alternative", + action="store_true", + default=False, + dest="alternatives", + help="Output complementary possibilities if any. Top-level JSON WILL be a list.", + ) + parser.add_argument( + "-n", + "--normalize", + action="store_true", + default=False, + dest="normalize", + help="Permit to normalize input file. If not set, program does not write anything.", + ) + parser.add_argument( + "-m", + "--minimal", + action="store_true", + default=False, + dest="minimal", + help="Only output the charset detected to STDOUT. Disabling JSON output.", + ) + parser.add_argument( + "-r", + "--replace", + action="store_true", + default=False, + dest="replace", + help="Replace file when trying to normalize it instead of creating a new one.", + ) + parser.add_argument( + "-f", + "--force", + action="store_true", + default=False, + dest="force", + help="Replace file without asking if you are sure, use this flag with caution.", + ) + parser.add_argument( + "-i", + "--no-preemptive", + action="store_true", + default=False, + dest="no_preemptive", + help="Disable looking at a charset declaration to hint the detector.", + ) + parser.add_argument( + "-t", + "--threshold", + action="store", + default=0.2, + type=float, + dest="threshold", + help="Define a custom maximum amount of noise allowed in decoded content. 0. <= noise <= 1.", + ) + parser.add_argument( + "--version", + action="version", + version="Charset-Normalizer {} - Python {} - Unicode {} - SpeedUp {}".format( + __version__, + python_version(), + unidata_version, + "OFF" if md_module.__file__.lower().endswith(".py") else "ON", + ), + help="Show version information and exit.", + ) + + args = parser.parse_args(argv) + + if args.replace is True and args.normalize is False: + if args.files: + for my_file in args.files: + my_file.close() + print("Use --replace in addition of --normalize only.", file=sys.stderr) + return 1 + + if args.force is True and args.replace is False: + if args.files: + for my_file in args.files: + my_file.close() + print("Use --force in addition of --replace only.", file=sys.stderr) + return 1 + + if args.threshold < 0.0 or args.threshold > 1.0: + if args.files: + for my_file in args.files: + my_file.close() + print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr) + return 1 + + x_ = [] + + for my_file in args.files: + matches = from_fp( + my_file, + threshold=args.threshold, + explain=args.verbose, + preemptive_behaviour=args.no_preemptive is False, + ) + + best_guess = matches.best() + + if best_guess is None: + print( + 'Unable to identify originating encoding for "{}". {}'.format( + my_file.name, + ( + "Maybe try increasing maximum amount of chaos." + if args.threshold < 1.0 + else "" + ), + ), + file=sys.stderr, + ) + x_.append( + CliDetectionResult( + abspath(my_file.name), + None, + [], + [], + "Unknown", + [], + False, + 1.0, + 0.0, + None, + True, + ) + ) + else: + x_.append( + CliDetectionResult( + abspath(my_file.name), + best_guess.encoding, + best_guess.encoding_aliases, + [ + cp + for cp in best_guess.could_be_from_charset + if cp != best_guess.encoding + ], + best_guess.language, + best_guess.alphabets, + best_guess.bom, + best_guess.percent_chaos, + best_guess.percent_coherence, + None, + True, + ) + ) + + if len(matches) > 1 and args.alternatives: + for el in matches: + if el != best_guess: + x_.append( + CliDetectionResult( + abspath(my_file.name), + el.encoding, + el.encoding_aliases, + [ + cp + for cp in el.could_be_from_charset + if cp != el.encoding + ], + el.language, + el.alphabets, + el.bom, + el.percent_chaos, + el.percent_coherence, + None, + False, + ) + ) + + if args.normalize is True: + if best_guess.encoding.startswith("utf") is True: + print( + '"{}" file does not need to be normalized, as it already came from unicode.'.format( + my_file.name + ), + file=sys.stderr, + ) + if my_file.closed is False: + my_file.close() + continue + + dir_path = dirname(realpath(my_file.name)) + file_name = basename(realpath(my_file.name)) + + o_: list[str] = file_name.split(".") + + if args.replace is False: + o_.insert(-1, best_guess.encoding) + if my_file.closed is False: + my_file.close() + elif ( + args.force is False + and query_yes_no( + 'Are you sure to normalize "{}" by replacing it ?'.format( + my_file.name + ), + "no", + ) + is False + ): + if my_file.closed is False: + my_file.close() + continue + + try: + x_[0].unicode_path = join(dir_path, ".".join(o_)) + + with open(x_[0].unicode_path, "wb") as fp: + fp.write(best_guess.output()) + except OSError as e: + print(str(e), file=sys.stderr) + if my_file.closed is False: + my_file.close() + return 2 + + if my_file.closed is False: + my_file.close() + + if args.minimal is False: + print( + dumps( + [el.__dict__ for el in x_] if len(x_) > 1 else x_[0].__dict__, + ensure_ascii=True, + indent=4, + ) + ) + else: + for my_file in args.files: + print( + ", ".join( + [ + el.encoding or "undefined" + for el in x_ + if el.path == abspath(my_file.name) + ] + ) + ) + + return 0 + + +if __name__ == "__main__": + cli_detect() diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..62dec78 Binary files /dev/null and b/venv/lib/python3.11/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/cli/__pycache__/__main__.cpython-311.pyc b/venv/lib/python3.11/site-packages/charset_normalizer/cli/__pycache__/__main__.cpython-311.pyc new file mode 100644 index 0000000..7639e15 Binary files /dev/null and b/venv/lib/python3.11/site-packages/charset_normalizer/cli/__pycache__/__main__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/constant.py b/venv/lib/python3.11/site-packages/charset_normalizer/constant.py new file mode 100644 index 0000000..1fb9508 --- /dev/null +++ b/venv/lib/python3.11/site-packages/charset_normalizer/constant.py @@ -0,0 +1,1998 @@ +from __future__ import annotations + +from codecs import BOM_UTF8, BOM_UTF16_BE, BOM_UTF16_LE, BOM_UTF32_BE, BOM_UTF32_LE +from encodings.aliases import aliases +from re import IGNORECASE +from re import compile as re_compile + +# Contain for each eligible encoding a list of/item bytes SIG/BOM +ENCODING_MARKS: dict[str, bytes | list[bytes]] = { + "utf_8": BOM_UTF8, + "utf_7": [ + b"\x2b\x2f\x76\x38", + b"\x2b\x2f\x76\x39", + b"\x2b\x2f\x76\x2b", + b"\x2b\x2f\x76\x2f", + b"\x2b\x2f\x76\x38\x2d", + ], + "gb18030": b"\x84\x31\x95\x33", + "utf_32": [BOM_UTF32_BE, BOM_UTF32_LE], + "utf_16": [BOM_UTF16_BE, BOM_UTF16_LE], +} + +TOO_SMALL_SEQUENCE: int = 32 +TOO_BIG_SEQUENCE: int = int(10e6) + +UTF8_MAXIMAL_ALLOCATION: int = 1_112_064 + +# Up-to-date Unicode ucd/15.0.0 +UNICODE_RANGES_COMBINED: dict[str, range] = { + "Control character": range(32), + "Basic Latin": range(32, 128), + "Latin-1 Supplement": range(128, 256), + "Latin Extended-A": range(256, 384), + "Latin Extended-B": range(384, 592), + "IPA Extensions": range(592, 688), + "Spacing Modifier Letters": range(688, 768), + "Combining Diacritical Marks": range(768, 880), + "Greek and Coptic": range(880, 1024), + "Cyrillic": range(1024, 1280), + "Cyrillic Supplement": range(1280, 1328), + "Armenian": range(1328, 1424), + "Hebrew": range(1424, 1536), + "Arabic": range(1536, 1792), + "Syriac": range(1792, 1872), + "Arabic Supplement": range(1872, 1920), + "Thaana": range(1920, 1984), + "NKo": range(1984, 2048), + "Samaritan": range(2048, 2112), + "Mandaic": range(2112, 2144), + "Syriac Supplement": range(2144, 2160), + "Arabic Extended-B": range(2160, 2208), + "Arabic Extended-A": range(2208, 2304), + "Devanagari": range(2304, 2432), + "Bengali": range(2432, 2560), + "Gurmukhi": range(2560, 2688), + "Gujarati": range(2688, 2816), + "Oriya": range(2816, 2944), + "Tamil": range(2944, 3072), + "Telugu": range(3072, 3200), + "Kannada": range(3200, 3328), + "Malayalam": range(3328, 3456), + "Sinhala": range(3456, 3584), + "Thai": range(3584, 3712), + "Lao": range(3712, 3840), + "Tibetan": range(3840, 4096), + "Myanmar": range(4096, 4256), + "Georgian": range(4256, 4352), + "Hangul Jamo": range(4352, 4608), + "Ethiopic": range(4608, 4992), + "Ethiopic Supplement": range(4992, 5024), + "Cherokee": range(5024, 5120), + "Unified Canadian Aboriginal Syllabics": range(5120, 5760), + "Ogham": range(5760, 5792), + "Runic": range(5792, 5888), + "Tagalog": range(5888, 5920), + "Hanunoo": range(5920, 5952), + "Buhid": range(5952, 5984), + "Tagbanwa": range(5984, 6016), + "Khmer": range(6016, 6144), + "Mongolian": range(6144, 6320), + "Unified Canadian Aboriginal Syllabics Extended": range(6320, 6400), + "Limbu": range(6400, 6480), + "Tai Le": range(6480, 6528), + "New Tai Lue": range(6528, 6624), + "Khmer Symbols": range(6624, 6656), + "Buginese": range(6656, 6688), + "Tai Tham": range(6688, 6832), + "Combining Diacritical Marks Extended": range(6832, 6912), + "Balinese": range(6912, 7040), + "Sundanese": range(7040, 7104), + "Batak": range(7104, 7168), + "Lepcha": range(7168, 7248), + "Ol Chiki": range(7248, 7296), + "Cyrillic Extended-C": range(7296, 7312), + "Georgian Extended": range(7312, 7360), + "Sundanese Supplement": range(7360, 7376), + "Vedic Extensions": range(7376, 7424), + "Phonetic Extensions": range(7424, 7552), + "Phonetic Extensions Supplement": range(7552, 7616), + "Combining Diacritical Marks Supplement": range(7616, 7680), + "Latin Extended Additional": range(7680, 7936), + "Greek Extended": range(7936, 8192), + "General Punctuation": range(8192, 8304), + "Superscripts and Subscripts": range(8304, 8352), + "Currency Symbols": range(8352, 8400), + "Combining Diacritical Marks for Symbols": range(8400, 8448), + "Letterlike Symbols": range(8448, 8528), + "Number Forms": range(8528, 8592), + "Arrows": range(8592, 8704), + "Mathematical Operators": range(8704, 8960), + "Miscellaneous Technical": range(8960, 9216), + "Control Pictures": range(9216, 9280), + "Optical Character Recognition": range(9280, 9312), + "Enclosed Alphanumerics": range(9312, 9472), + "Box Drawing": range(9472, 9600), + "Block Elements": range(9600, 9632), + "Geometric Shapes": range(9632, 9728), + "Miscellaneous Symbols": range(9728, 9984), + "Dingbats": range(9984, 10176), + "Miscellaneous Mathematical Symbols-A": range(10176, 10224), + "Supplemental Arrows-A": range(10224, 10240), + "Braille Patterns": range(10240, 10496), + "Supplemental Arrows-B": range(10496, 10624), + "Miscellaneous Mathematical Symbols-B": range(10624, 10752), + "Supplemental Mathematical Operators": range(10752, 11008), + "Miscellaneous Symbols and Arrows": range(11008, 11264), + "Glagolitic": range(11264, 11360), + "Latin Extended-C": range(11360, 11392), + "Coptic": range(11392, 11520), + "Georgian Supplement": range(11520, 11568), + "Tifinagh": range(11568, 11648), + "Ethiopic Extended": range(11648, 11744), + "Cyrillic Extended-A": range(11744, 11776), + "Supplemental Punctuation": range(11776, 11904), + "CJK Radicals Supplement": range(11904, 12032), + "Kangxi Radicals": range(12032, 12256), + "Ideographic Description Characters": range(12272, 12288), + "CJK Symbols and Punctuation": range(12288, 12352), + "Hiragana": range(12352, 12448), + "Katakana": range(12448, 12544), + "Bopomofo": range(12544, 12592), + "Hangul Compatibility Jamo": range(12592, 12688), + "Kanbun": range(12688, 12704), + "Bopomofo Extended": range(12704, 12736), + "CJK Strokes": range(12736, 12784), + "Katakana Phonetic Extensions": range(12784, 12800), + "Enclosed CJK Letters and Months": range(12800, 13056), + "CJK Compatibility": range(13056, 13312), + "CJK Unified Ideographs Extension A": range(13312, 19904), + "Yijing Hexagram Symbols": range(19904, 19968), + "CJK Unified Ideographs": range(19968, 40960), + "Yi Syllables": range(40960, 42128), + "Yi Radicals": range(42128, 42192), + "Lisu": range(42192, 42240), + "Vai": range(42240, 42560), + "Cyrillic Extended-B": range(42560, 42656), + "Bamum": range(42656, 42752), + "Modifier Tone Letters": range(42752, 42784), + "Latin Extended-D": range(42784, 43008), + "Syloti Nagri": range(43008, 43056), + "Common Indic Number Forms": range(43056, 43072), + "Phags-pa": range(43072, 43136), + "Saurashtra": range(43136, 43232), + "Devanagari Extended": range(43232, 43264), + "Kayah Li": range(43264, 43312), + "Rejang": range(43312, 43360), + "Hangul Jamo Extended-A": range(43360, 43392), + "Javanese": range(43392, 43488), + "Myanmar Extended-B": range(43488, 43520), + "Cham": range(43520, 43616), + "Myanmar Extended-A": range(43616, 43648), + "Tai Viet": range(43648, 43744), + "Meetei Mayek Extensions": range(43744, 43776), + "Ethiopic Extended-A": range(43776, 43824), + "Latin Extended-E": range(43824, 43888), + "Cherokee Supplement": range(43888, 43968), + "Meetei Mayek": range(43968, 44032), + "Hangul Syllables": range(44032, 55216), + "Hangul Jamo Extended-B": range(55216, 55296), + "High Surrogates": range(55296, 56192), + "High Private Use Surrogates": range(56192, 56320), + "Low Surrogates": range(56320, 57344), + "Private Use Area": range(57344, 63744), + "CJK Compatibility Ideographs": range(63744, 64256), + "Alphabetic Presentation Forms": range(64256, 64336), + "Arabic Presentation Forms-A": range(64336, 65024), + "Variation Selectors": range(65024, 65040), + "Vertical Forms": range(65040, 65056), + "Combining Half Marks": range(65056, 65072), + "CJK Compatibility Forms": range(65072, 65104), + "Small Form Variants": range(65104, 65136), + "Arabic Presentation Forms-B": range(65136, 65280), + "Halfwidth and Fullwidth Forms": range(65280, 65520), + "Specials": range(65520, 65536), + "Linear B Syllabary": range(65536, 65664), + "Linear B Ideograms": range(65664, 65792), + "Aegean Numbers": range(65792, 65856), + "Ancient Greek Numbers": range(65856, 65936), + "Ancient Symbols": range(65936, 66000), + "Phaistos Disc": range(66000, 66048), + "Lycian": range(66176, 66208), + "Carian": range(66208, 66272), + "Coptic Epact Numbers": range(66272, 66304), + "Old Italic": range(66304, 66352), + "Gothic": range(66352, 66384), + "Old Permic": range(66384, 66432), + "Ugaritic": range(66432, 66464), + "Old Persian": range(66464, 66528), + "Deseret": range(66560, 66640), + "Shavian": range(66640, 66688), + "Osmanya": range(66688, 66736), + "Osage": range(66736, 66816), + "Elbasan": range(66816, 66864), + "Caucasian Albanian": range(66864, 66928), + "Vithkuqi": range(66928, 67008), + "Linear A": range(67072, 67456), + "Latin Extended-F": range(67456, 67520), + "Cypriot Syllabary": range(67584, 67648), + "Imperial Aramaic": range(67648, 67680), + "Palmyrene": range(67680, 67712), + "Nabataean": range(67712, 67760), + "Hatran": range(67808, 67840), + "Phoenician": range(67840, 67872), + "Lydian": range(67872, 67904), + "Meroitic Hieroglyphs": range(67968, 68000), + "Meroitic Cursive": range(68000, 68096), + "Kharoshthi": range(68096, 68192), + "Old South Arabian": range(68192, 68224), + "Old North Arabian": range(68224, 68256), + "Manichaean": range(68288, 68352), + "Avestan": range(68352, 68416), + "Inscriptional Parthian": range(68416, 68448), + "Inscriptional Pahlavi": range(68448, 68480), + "Psalter Pahlavi": range(68480, 68528), + "Old Turkic": range(68608, 68688), + "Old Hungarian": range(68736, 68864), + "Hanifi Rohingya": range(68864, 68928), + "Rumi Numeral Symbols": range(69216, 69248), + "Yezidi": range(69248, 69312), + "Arabic Extended-C": range(69312, 69376), + "Old Sogdian": range(69376, 69424), + "Sogdian": range(69424, 69488), + "Old Uyghur": range(69488, 69552), + "Chorasmian": range(69552, 69600), + "Elymaic": range(69600, 69632), + "Brahmi": range(69632, 69760), + "Kaithi": range(69760, 69840), + "Sora Sompeng": range(69840, 69888), + "Chakma": range(69888, 69968), + "Mahajani": range(69968, 70016), + "Sharada": range(70016, 70112), + "Sinhala Archaic Numbers": range(70112, 70144), + "Khojki": range(70144, 70224), + "Multani": range(70272, 70320), + "Khudawadi": range(70320, 70400), + "Grantha": range(70400, 70528), + "Newa": range(70656, 70784), + "Tirhuta": range(70784, 70880), + "Siddham": range(71040, 71168), + "Modi": range(71168, 71264), + "Mongolian Supplement": range(71264, 71296), + "Takri": range(71296, 71376), + "Ahom": range(71424, 71504), + "Dogra": range(71680, 71760), + "Warang Citi": range(71840, 71936), + "Dives Akuru": range(71936, 72032), + "Nandinagari": range(72096, 72192), + "Zanabazar Square": range(72192, 72272), + "Soyombo": range(72272, 72368), + "Unified Canadian Aboriginal Syllabics Extended-A": range(72368, 72384), + "Pau Cin Hau": range(72384, 72448), + "Devanagari Extended-A": range(72448, 72544), + "Bhaiksuki": range(72704, 72816), + "Marchen": range(72816, 72896), + "Masaram Gondi": range(72960, 73056), + "Gunjala Gondi": range(73056, 73136), + "Makasar": range(73440, 73472), + "Kawi": range(73472, 73568), + "Lisu Supplement": range(73648, 73664), + "Tamil Supplement": range(73664, 73728), + "Cuneiform": range(73728, 74752), + "Cuneiform Numbers and Punctuation": range(74752, 74880), + "Early Dynastic Cuneiform": range(74880, 75088), + "Cypro-Minoan": range(77712, 77824), + "Egyptian Hieroglyphs": range(77824, 78896), + "Egyptian Hieroglyph Format Controls": range(78896, 78944), + "Anatolian Hieroglyphs": range(82944, 83584), + "Bamum Supplement": range(92160, 92736), + "Mro": range(92736, 92784), + "Tangsa": range(92784, 92880), + "Bassa Vah": range(92880, 92928), + "Pahawh Hmong": range(92928, 93072), + "Medefaidrin": range(93760, 93856), + "Miao": range(93952, 94112), + "Ideographic Symbols and Punctuation": range(94176, 94208), + "Tangut": range(94208, 100352), + "Tangut Components": range(100352, 101120), + "Khitan Small Script": range(101120, 101632), + "Tangut Supplement": range(101632, 101760), + "Kana Extended-B": range(110576, 110592), + "Kana Supplement": range(110592, 110848), + "Kana Extended-A": range(110848, 110896), + "Small Kana Extension": range(110896, 110960), + "Nushu": range(110960, 111360), + "Duployan": range(113664, 113824), + "Shorthand Format Controls": range(113824, 113840), + "Znamenny Musical Notation": range(118528, 118736), + "Byzantine Musical Symbols": range(118784, 119040), + "Musical Symbols": range(119040, 119296), + "Ancient Greek Musical Notation": range(119296, 119376), + "Kaktovik Numerals": range(119488, 119520), + "Mayan Numerals": range(119520, 119552), + "Tai Xuan Jing Symbols": range(119552, 119648), + "Counting Rod Numerals": range(119648, 119680), + "Mathematical Alphanumeric Symbols": range(119808, 120832), + "Sutton SignWriting": range(120832, 121520), + "Latin Extended-G": range(122624, 122880), + "Glagolitic Supplement": range(122880, 122928), + "Cyrillic Extended-D": range(122928, 123024), + "Nyiakeng Puachue Hmong": range(123136, 123216), + "Toto": range(123536, 123584), + "Wancho": range(123584, 123648), + "Nag Mundari": range(124112, 124160), + "Ethiopic Extended-B": range(124896, 124928), + "Mende Kikakui": range(124928, 125152), + "Adlam": range(125184, 125280), + "Indic Siyaq Numbers": range(126064, 126144), + "Ottoman Siyaq Numbers": range(126208, 126288), + "Arabic Mathematical Alphabetic Symbols": range(126464, 126720), + "Mahjong Tiles": range(126976, 127024), + "Domino Tiles": range(127024, 127136), + "Playing Cards": range(127136, 127232), + "Enclosed Alphanumeric Supplement": range(127232, 127488), + "Enclosed Ideographic Supplement": range(127488, 127744), + "Miscellaneous Symbols and Pictographs": range(127744, 128512), + "Emoticons range(Emoji)": range(128512, 128592), + "Ornamental Dingbats": range(128592, 128640), + "Transport and Map Symbols": range(128640, 128768), + "Alchemical Symbols": range(128768, 128896), + "Geometric Shapes Extended": range(128896, 129024), + "Supplemental Arrows-C": range(129024, 129280), + "Supplemental Symbols and Pictographs": range(129280, 129536), + "Chess Symbols": range(129536, 129648), + "Symbols and Pictographs Extended-A": range(129648, 129792), + "Symbols for Legacy Computing": range(129792, 130048), + "CJK Unified Ideographs Extension B": range(131072, 173792), + "CJK Unified Ideographs Extension C": range(173824, 177984), + "CJK Unified Ideographs Extension D": range(177984, 178208), + "CJK Unified Ideographs Extension E": range(178208, 183984), + "CJK Unified Ideographs Extension F": range(183984, 191472), + "CJK Compatibility Ideographs Supplement": range(194560, 195104), + "CJK Unified Ideographs Extension G": range(196608, 201552), + "CJK Unified Ideographs Extension H": range(201552, 205744), + "Tags": range(917504, 917632), + "Variation Selectors Supplement": range(917760, 918000), + "Supplementary Private Use Area-A": range(983040, 1048576), + "Supplementary Private Use Area-B": range(1048576, 1114112), +} + + +UNICODE_SECONDARY_RANGE_KEYWORD: list[str] = [ + "Supplement", + "Extended", + "Extensions", + "Modifier", + "Marks", + "Punctuation", + "Symbols", + "Forms", + "Operators", + "Miscellaneous", + "Drawing", + "Block", + "Shapes", + "Supplemental", + "Tags", +] + +RE_POSSIBLE_ENCODING_INDICATION = re_compile( + r"(?:(?:encoding)|(?:charset)|(?:coding))(?:[\:= ]{1,10})(?:[\"\']?)([a-zA-Z0-9\-_]+)(?:[\"\']?)", + IGNORECASE, +) + +IANA_NO_ALIASES = [ + "cp720", + "cp737", + "cp856", + "cp874", + "cp875", + "cp1006", + "koi8_r", + "koi8_t", + "koi8_u", +] + +IANA_SUPPORTED: list[str] = sorted( + filter( + lambda x: x.endswith("_codec") is False + and x not in {"rot_13", "tactis", "mbcs"}, + list(set(aliases.values())) + IANA_NO_ALIASES, + ) +) + +IANA_SUPPORTED_COUNT: int = len(IANA_SUPPORTED) + +# pre-computed code page that are similar using the function cp_similarity. +IANA_SUPPORTED_SIMILAR: dict[str, list[str]] = { + "cp037": ["cp1026", "cp1140", "cp273", "cp500"], + "cp1026": ["cp037", "cp1140", "cp273", "cp500"], + "cp1125": ["cp866"], + "cp1140": ["cp037", "cp1026", "cp273", "cp500"], + "cp1250": ["iso8859_2"], + "cp1251": ["kz1048", "ptcp154"], + "cp1252": ["iso8859_15", "iso8859_9", "latin_1"], + "cp1253": ["iso8859_7"], + "cp1254": ["iso8859_15", "iso8859_9", "latin_1"], + "cp1257": ["iso8859_13"], + "cp273": ["cp037", "cp1026", "cp1140", "cp500"], + "cp437": ["cp850", "cp858", "cp860", "cp861", "cp862", "cp863", "cp865"], + "cp500": ["cp037", "cp1026", "cp1140", "cp273"], + "cp850": ["cp437", "cp857", "cp858", "cp865"], + "cp857": ["cp850", "cp858", "cp865"], + "cp858": ["cp437", "cp850", "cp857", "cp865"], + "cp860": ["cp437", "cp861", "cp862", "cp863", "cp865"], + "cp861": ["cp437", "cp860", "cp862", "cp863", "cp865"], + "cp862": ["cp437", "cp860", "cp861", "cp863", "cp865"], + "cp863": ["cp437", "cp860", "cp861", "cp862", "cp865"], + "cp865": ["cp437", "cp850", "cp857", "cp858", "cp860", "cp861", "cp862", "cp863"], + "cp866": ["cp1125"], + "iso8859_10": ["iso8859_14", "iso8859_15", "iso8859_4", "iso8859_9", "latin_1"], + "iso8859_11": ["tis_620"], + "iso8859_13": ["cp1257"], + "iso8859_14": [ + "iso8859_10", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_15": [ + "cp1252", + "cp1254", + "iso8859_10", + "iso8859_14", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_16": [ + "iso8859_14", + "iso8859_15", + "iso8859_2", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_2": ["cp1250", "iso8859_16", "iso8859_4"], + "iso8859_3": ["iso8859_14", "iso8859_15", "iso8859_16", "iso8859_9", "latin_1"], + "iso8859_4": ["iso8859_10", "iso8859_2", "iso8859_9", "latin_1"], + "iso8859_7": ["cp1253"], + "iso8859_9": [ + "cp1252", + "cp1254", + "cp1258", + "iso8859_10", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_4", + "latin_1", + ], + "kz1048": ["cp1251", "ptcp154"], + "latin_1": [ + "cp1252", + "cp1254", + "cp1258", + "iso8859_10", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_4", + "iso8859_9", + ], + "mac_iceland": ["mac_roman", "mac_turkish"], + "mac_roman": ["mac_iceland", "mac_turkish"], + "mac_turkish": ["mac_iceland", "mac_roman"], + "ptcp154": ["cp1251", "kz1048"], + "tis_620": ["iso8859_11"], +} + + +CHARDET_CORRESPONDENCE: dict[str, str] = { + "iso2022_kr": "ISO-2022-KR", + "iso2022_jp": "ISO-2022-JP", + "euc_kr": "EUC-KR", + "tis_620": "TIS-620", + "utf_32": "UTF-32", + "euc_jp": "EUC-JP", + "koi8_r": "KOI8-R", + "iso8859_1": "ISO-8859-1", + "iso8859_2": "ISO-8859-2", + "iso8859_5": "ISO-8859-5", + "iso8859_6": "ISO-8859-6", + "iso8859_7": "ISO-8859-7", + "iso8859_8": "ISO-8859-8", + "utf_16": "UTF-16", + "cp855": "IBM855", + "mac_cyrillic": "MacCyrillic", + "gb2312": "GB2312", + "gb18030": "GB18030", + "cp932": "CP932", + "cp866": "IBM866", + "utf_8": "utf-8", + "utf_8_sig": "UTF-8-SIG", + "shift_jis": "SHIFT_JIS", + "big5": "Big5", + "cp1250": "windows-1250", + "cp1251": "windows-1251", + "cp1252": "Windows-1252", + "cp1253": "windows-1253", + "cp1255": "windows-1255", + "cp1256": "windows-1256", + "cp1254": "Windows-1254", + "cp949": "CP949", +} + + +COMMON_SAFE_ASCII_CHARACTERS: set[str] = { + "<", + ">", + "=", + ":", + "/", + "&", + ";", + "{", + "}", + "[", + "]", + ",", + "|", + '"', + "-", + "(", + ")", +} + + +KO_NAMES: set[str] = {"johab", "cp949", "euc_kr"} +ZH_NAMES: set[str] = {"big5", "cp950", "big5hkscs", "hz"} + +# Logging LEVEL below DEBUG +TRACE: int = 5 + + +# Language label that contain the em dash "—" +# character are to be considered alternative seq to origin +FREQUENCIES: dict[str, list[str]] = { + "English": [ + "e", + "a", + "t", + "i", + "o", + "n", + "s", + "r", + "h", + "l", + "d", + "c", + "u", + "m", + "f", + "p", + "g", + "w", + "y", + "b", + "v", + "k", + "x", + "j", + "z", + "q", + ], + "English—": [ + "e", + "a", + "t", + "i", + "o", + "n", + "s", + "r", + "h", + "l", + "d", + "c", + "m", + "u", + "f", + "p", + "g", + "w", + "b", + "y", + "v", + "k", + "j", + "x", + "z", + "q", + ], + "German": [ + "e", + "n", + "i", + "r", + "s", + "t", + "a", + "d", + "h", + "u", + "l", + "g", + "o", + "c", + "m", + "b", + "f", + "k", + "w", + "z", + "p", + "v", + "ü", + "ä", + "ö", + "j", + ], + "French": [ + "e", + "a", + "s", + "n", + "i", + "t", + "r", + "l", + "u", + "o", + "d", + "c", + "p", + "m", + "é", + "v", + "g", + "f", + "b", + "h", + "q", + "à", + "x", + "è", + "y", + "j", + ], + "Dutch": [ + "e", + "n", + "a", + "i", + "r", + "t", + "o", + "d", + "s", + "l", + "g", + "h", + "v", + "m", + "u", + "k", + "c", + "p", + "b", + "w", + "j", + "z", + "f", + "y", + "x", + "ë", + ], + "Italian": [ + "e", + "i", + "a", + "o", + "n", + "l", + "t", + "r", + "s", + "c", + "d", + "u", + "p", + "m", + "g", + "v", + "f", + "b", + "z", + "h", + "q", + "è", + "à", + "k", + "y", + "ò", + ], + "Polish": [ + "a", + "i", + "o", + "e", + "n", + "r", + "z", + "w", + "s", + "c", + "t", + "k", + "y", + "d", + "p", + "m", + "u", + "l", + "j", + "ł", + "g", + "b", + "h", + "ą", + "ę", + "ó", + ], + "Spanish": [ + "e", + "a", + "o", + "n", + "s", + "r", + "i", + "l", + "d", + "t", + "c", + "u", + "m", + "p", + "b", + "g", + "v", + "f", + "y", + "ó", + "h", + "q", + "í", + "j", + "z", + "á", + ], + "Russian": [ + "о", + "а", + "е", + "и", + "н", + "с", + "т", + "р", + "в", + "л", + "к", + "м", + "д", + "п", + "у", + "г", + "я", + "ы", + "з", + "б", + "й", + "ь", + "ч", + "х", + "ж", + "ц", + ], + # Jap-Kanji + "Japanese": [ + "人", + "一", + "大", + "亅", + "丁", + "丨", + "竹", + "笑", + "口", + "日", + "今", + "二", + "彳", + "行", + "十", + "土", + "丶", + "寸", + "寺", + "時", + "乙", + "丿", + "乂", + "气", + "気", + "冂", + "巾", + "亠", + "市", + "目", + "儿", + "見", + "八", + "小", + "凵", + "県", + "月", + "彐", + "門", + "間", + "木", + "東", + "山", + "出", + "本", + "中", + "刀", + "分", + "耳", + "又", + "取", + "最", + "言", + "田", + "心", + "思", + "刂", + "前", + "京", + "尹", + "事", + "生", + "厶", + "云", + "会", + "未", + "来", + "白", + "冫", + "楽", + "灬", + "馬", + "尸", + "尺", + "駅", + "明", + "耂", + "者", + "了", + "阝", + "都", + "高", + "卜", + "占", + "厂", + "广", + "店", + "子", + "申", + "奄", + "亻", + "俺", + "上", + "方", + "冖", + "学", + "衣", + "艮", + "食", + "自", + ], + # Jap-Katakana + "Japanese—": [ + "ー", + "ン", + "ス", + "・", + "ル", + "ト", + "リ", + "イ", + "ア", + "ラ", + "ッ", + "ク", + "ド", + "シ", + "レ", + "ジ", + "タ", + "フ", + "ロ", + "カ", + "テ", + "マ", + "ィ", + "グ", + "バ", + "ム", + "プ", + "オ", + "コ", + "デ", + "ニ", + "ウ", + "メ", + "サ", + "ビ", + "ナ", + "ブ", + "ャ", + "エ", + "ュ", + "チ", + "キ", + "ズ", + "ダ", + "パ", + "ミ", + "ェ", + "ョ", + "ハ", + "セ", + "ベ", + "ガ", + "モ", + "ツ", + "ネ", + "ボ", + "ソ", + "ノ", + "ァ", + "ヴ", + "ワ", + "ポ", + "ペ", + "ピ", + "ケ", + "ゴ", + "ギ", + "ザ", + "ホ", + "ゲ", + "ォ", + "ヤ", + "ヒ", + "ユ", + "ヨ", + "ヘ", + "ゼ", + "ヌ", + "ゥ", + "ゾ", + "ヶ", + "ヂ", + "ヲ", + "ヅ", + "ヵ", + "ヱ", + "ヰ", + "ヮ", + "ヽ", + "゠", + "ヾ", + "ヷ", + "ヿ", + "ヸ", + "ヹ", + "ヺ", + ], + # Jap-Hiragana + "Japanese——": [ + "の", + "に", + "る", + "た", + "と", + "は", + "し", + "い", + "を", + "で", + "て", + "が", + "な", + "れ", + "か", + "ら", + "さ", + "っ", + "り", + "す", + "あ", + "も", + "こ", + "ま", + "う", + "く", + "よ", + "き", + "ん", + "め", + "お", + "け", + "そ", + "つ", + "だ", + "や", + "え", + "ど", + "わ", + "ち", + "み", + "せ", + "じ", + "ば", + "へ", + "び", + "ず", + "ろ", + "ほ", + "げ", + "む", + "べ", + "ひ", + "ょ", + "ゆ", + "ぶ", + "ご", + "ゃ", + "ね", + "ふ", + "ぐ", + "ぎ", + "ぼ", + "ゅ", + "づ", + "ざ", + "ぞ", + "ぬ", + "ぜ", + "ぱ", + "ぽ", + "ぷ", + "ぴ", + "ぃ", + "ぁ", + "ぇ", + "ぺ", + "ゞ", + "ぢ", + "ぉ", + "ぅ", + "ゐ", + "ゝ", + "ゑ", + "゛", + "゜", + "ゎ", + "ゔ", + "゚", + "ゟ", + "゙", + "ゕ", + "ゖ", + ], + "Portuguese": [ + "a", + "e", + "o", + "s", + "i", + "r", + "d", + "n", + "t", + "m", + "u", + "c", + "l", + "p", + "g", + "v", + "b", + "f", + "h", + "ã", + "q", + "é", + "ç", + "á", + "z", + "í", + ], + "Swedish": [ + "e", + "a", + "n", + "r", + "t", + "s", + "i", + "l", + "d", + "o", + "m", + "k", + "g", + "v", + "h", + "f", + "u", + "p", + "ä", + "c", + "b", + "ö", + "å", + "y", + "j", + "x", + ], + "Chinese": [ + "的", + "一", + "是", + "不", + "了", + "在", + "人", + "有", + "我", + "他", + "这", + "个", + "们", + "中", + "来", + "上", + "大", + "为", + "和", + "国", + "地", + "到", + "以", + "说", + "时", + "要", + "就", + "出", + "会", + "可", + "也", + "你", + "对", + "生", + "能", + "而", + "子", + "那", + "得", + "于", + "着", + "下", + "自", + "之", + "年", + "过", + "发", + "后", + "作", + "里", + "用", + "道", + "行", + "所", + "然", + "家", + "种", + "事", + "成", + "方", + "多", + "经", + "么", + "去", + "法", + "学", + "如", + "都", + "同", + "现", + "当", + "没", + "动", + "面", + "起", + "看", + "定", + "天", + "分", + "还", + "进", + "好", + "小", + "部", + "其", + "些", + "主", + "样", + "理", + "心", + "她", + "本", + "前", + "开", + "但", + "因", + "只", + "从", + "想", + "实", + ], + "Ukrainian": [ + "о", + "а", + "н", + "і", + "и", + "р", + "в", + "т", + "е", + "с", + "к", + "л", + "у", + "д", + "м", + "п", + "з", + "я", + "ь", + "б", + "г", + "й", + "ч", + "х", + "ц", + "ї", + ], + "Norwegian": [ + "e", + "r", + "n", + "t", + "a", + "s", + "i", + "o", + "l", + "d", + "g", + "k", + "m", + "v", + "f", + "p", + "u", + "b", + "h", + "å", + "y", + "j", + "ø", + "c", + "æ", + "w", + ], + "Finnish": [ + "a", + "i", + "n", + "t", + "e", + "s", + "l", + "o", + "u", + "k", + "ä", + "m", + "r", + "v", + "j", + "h", + "p", + "y", + "d", + "ö", + "g", + "c", + "b", + "f", + "w", + "z", + ], + "Vietnamese": [ + "n", + "h", + "t", + "i", + "c", + "g", + "a", + "o", + "u", + "m", + "l", + "r", + "à", + "đ", + "s", + "e", + "v", + "p", + "b", + "y", + "ư", + "d", + "á", + "k", + "ộ", + "ế", + ], + "Czech": [ + "o", + "e", + "a", + "n", + "t", + "s", + "i", + "l", + "v", + "r", + "k", + "d", + "u", + "m", + "p", + "í", + "c", + "h", + "z", + "á", + "y", + "j", + "b", + "ě", + "é", + "ř", + ], + "Hungarian": [ + "e", + "a", + "t", + "l", + "s", + "n", + "k", + "r", + "i", + "o", + "z", + "á", + "é", + "g", + "m", + "b", + "y", + "v", + "d", + "h", + "u", + "p", + "j", + "ö", + "f", + "c", + ], + "Korean": [ + "이", + "다", + "에", + "의", + "는", + "로", + "하", + "을", + "가", + "고", + "지", + "서", + "한", + "은", + "기", + "으", + "년", + "대", + "사", + "시", + "를", + "리", + "도", + "인", + "스", + "일", + ], + "Indonesian": [ + "a", + "n", + "e", + "i", + "r", + "t", + "u", + "s", + "d", + "k", + "m", + "l", + "g", + "p", + "b", + "o", + "h", + "y", + "j", + "c", + "w", + "f", + "v", + "z", + "x", + "q", + ], + "Turkish": [ + "a", + "e", + "i", + "n", + "r", + "l", + "ı", + "k", + "d", + "t", + "s", + "m", + "y", + "u", + "o", + "b", + "ü", + "ş", + "v", + "g", + "z", + "h", + "c", + "p", + "ç", + "ğ", + ], + "Romanian": [ + "e", + "i", + "a", + "r", + "n", + "t", + "u", + "l", + "o", + "c", + "s", + "d", + "p", + "m", + "ă", + "f", + "v", + "î", + "g", + "b", + "ș", + "ț", + "z", + "h", + "â", + "j", + ], + "Farsi": [ + "ا", + "ی", + "ر", + "د", + "ن", + "ه", + "و", + "م", + "ت", + "ب", + "س", + "ل", + "ک", + "ش", + "ز", + "ف", + "گ", + "ع", + "خ", + "ق", + "ج", + "آ", + "پ", + "ح", + "ط", + "ص", + ], + "Arabic": [ + "ا", + "ل", + "ي", + "م", + "و", + "ن", + "ر", + "ت", + "ب", + "ة", + "ع", + "د", + "س", + "ف", + "ه", + "ك", + "ق", + "أ", + "ح", + "ج", + "ش", + "ط", + "ص", + "ى", + "خ", + "إ", + ], + "Danish": [ + "e", + "r", + "n", + "t", + "a", + "i", + "s", + "d", + "l", + "o", + "g", + "m", + "k", + "f", + "v", + "u", + "b", + "h", + "p", + "å", + "y", + "ø", + "æ", + "c", + "j", + "w", + ], + "Serbian": [ + "а", + "и", + "о", + "е", + "н", + "р", + "с", + "у", + "т", + "к", + "ј", + "в", + "д", + "м", + "п", + "л", + "г", + "з", + "б", + "a", + "i", + "e", + "o", + "n", + "ц", + "ш", + ], + "Lithuanian": [ + "i", + "a", + "s", + "o", + "r", + "e", + "t", + "n", + "u", + "k", + "m", + "l", + "p", + "v", + "d", + "j", + "g", + "ė", + "b", + "y", + "ų", + "š", + "ž", + "c", + "ą", + "į", + ], + "Slovene": [ + "e", + "a", + "i", + "o", + "n", + "r", + "s", + "l", + "t", + "j", + "v", + "k", + "d", + "p", + "m", + "u", + "z", + "b", + "g", + "h", + "č", + "c", + "š", + "ž", + "f", + "y", + ], + "Slovak": [ + "o", + "a", + "e", + "n", + "i", + "r", + "v", + "t", + "s", + "l", + "k", + "d", + "m", + "p", + "u", + "c", + "h", + "j", + "b", + "z", + "á", + "y", + "ý", + "í", + "č", + "é", + ], + "Hebrew": [ + "י", + "ו", + "ה", + "ל", + "ר", + "ב", + "ת", + "מ", + "א", + "ש", + "נ", + "ע", + "ם", + "ד", + "ק", + "ח", + "פ", + "ס", + "כ", + "ג", + "ט", + "צ", + "ן", + "ז", + "ך", + ], + "Bulgarian": [ + "а", + "и", + "о", + "е", + "н", + "т", + "р", + "с", + "в", + "л", + "к", + "д", + "п", + "м", + "з", + "г", + "я", + "ъ", + "у", + "б", + "ч", + "ц", + "й", + "ж", + "щ", + "х", + ], + "Croatian": [ + "a", + "i", + "o", + "e", + "n", + "r", + "j", + "s", + "t", + "u", + "k", + "l", + "v", + "d", + "m", + "p", + "g", + "z", + "b", + "c", + "č", + "h", + "š", + "ž", + "ć", + "f", + ], + "Hindi": [ + "क", + "र", + "स", + "न", + "त", + "म", + "ह", + "प", + "य", + "ल", + "व", + "ज", + "द", + "ग", + "ब", + "श", + "ट", + "अ", + "ए", + "थ", + "भ", + "ड", + "च", + "ध", + "ष", + "इ", + ], + "Estonian": [ + "a", + "i", + "e", + "s", + "t", + "l", + "u", + "n", + "o", + "k", + "r", + "d", + "m", + "v", + "g", + "p", + "j", + "h", + "ä", + "b", + "õ", + "ü", + "f", + "c", + "ö", + "y", + ], + "Thai": [ + "า", + "น", + "ร", + "อ", + "ก", + "เ", + "ง", + "ม", + "ย", + "ล", + "ว", + "ด", + "ท", + "ส", + "ต", + "ะ", + "ป", + "บ", + "ค", + "ห", + "แ", + "จ", + "พ", + "ช", + "ข", + "ใ", + ], + "Greek": [ + "α", + "τ", + "ο", + "ι", + "ε", + "ν", + "ρ", + "σ", + "κ", + "η", + "π", + "ς", + "υ", + "μ", + "λ", + "ί", + "ό", + "ά", + "γ", + "έ", + "δ", + "ή", + "ω", + "χ", + "θ", + "ύ", + ], + "Tamil": [ + "க", + "த", + "ப", + "ட", + "ர", + "ம", + "ல", + "ன", + "வ", + "ற", + "ய", + "ள", + "ச", + "ந", + "இ", + "ண", + "அ", + "ஆ", + "ழ", + "ங", + "எ", + "உ", + "ஒ", + "ஸ", + ], + "Kazakh": [ + "а", + "ы", + "е", + "н", + "т", + "р", + "л", + "і", + "д", + "с", + "м", + "қ", + "к", + "о", + "б", + "и", + "у", + "ғ", + "ж", + "ң", + "з", + "ш", + "й", + "п", + "г", + "ө", + ], +} + +LANGUAGE_SUPPORTED_COUNT: int = len(FREQUENCIES) diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/legacy.py b/venv/lib/python3.11/site-packages/charset_normalizer/legacy.py new file mode 100644 index 0000000..a2f5345 --- /dev/null +++ b/venv/lib/python3.11/site-packages/charset_normalizer/legacy.py @@ -0,0 +1,66 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any +from warnings import warn + +from .api import from_bytes +from .constant import CHARDET_CORRESPONDENCE + +# TODO: remove this check when dropping Python 3.7 support +if TYPE_CHECKING: + from typing_extensions import TypedDict + + class ResultDict(TypedDict): + encoding: str | None + language: str + confidence: float | None + + +def detect( + byte_str: bytes, should_rename_legacy: bool = False, **kwargs: Any +) -> ResultDict: + """ + chardet legacy method + Detect the encoding of the given byte string. It should be mostly backward-compatible. + Encoding name will match Chardet own writing whenever possible. (Not on encoding name unsupported by it) + This function is deprecated and should be used to migrate your project easily, consult the documentation for + further information. Not planned for removal. + + :param byte_str: The byte sequence to examine. + :param should_rename_legacy: Should we rename legacy encodings + to their more modern equivalents? + """ + if len(kwargs): + warn( + f"charset-normalizer disregard arguments '{','.join(list(kwargs.keys()))}' in legacy function detect()" + ) + + if not isinstance(byte_str, (bytearray, bytes)): + raise TypeError( # pragma: nocover + "Expected object of type bytes or bytearray, got: " "{}".format( + type(byte_str) + ) + ) + + if isinstance(byte_str, bytearray): + byte_str = bytes(byte_str) + + r = from_bytes(byte_str).best() + + encoding = r.encoding if r is not None else None + language = r.language if r is not None and r.language != "Unknown" else "" + confidence = 1.0 - r.chaos if r is not None else None + + # Note: CharsetNormalizer does not return 'UTF-8-SIG' as the sig get stripped in the detection/normalization process + # but chardet does return 'utf-8-sig' and it is a valid codec name. + if r is not None and encoding == "utf_8" and r.bom: + encoding += "_sig" + + if should_rename_legacy is False and encoding in CHARDET_CORRESPONDENCE: + encoding = CHARDET_CORRESPONDENCE[encoding] + + return { + "encoding": encoding, + "language": language, + "confidence": confidence, + } diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/md.cpython-311-x86_64-linux-gnu.so b/venv/lib/python3.11/site-packages/charset_normalizer/md.cpython-311-x86_64-linux-gnu.so new file mode 100644 index 0000000..3824a42 Binary files /dev/null and b/venv/lib/python3.11/site-packages/charset_normalizer/md.cpython-311-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/md.py b/venv/lib/python3.11/site-packages/charset_normalizer/md.py new file mode 100644 index 0000000..9ed59a8 --- /dev/null +++ b/venv/lib/python3.11/site-packages/charset_normalizer/md.py @@ -0,0 +1,630 @@ +from __future__ import annotations + +from functools import lru_cache +from logging import getLogger + +from .constant import ( + COMMON_SAFE_ASCII_CHARACTERS, + TRACE, + UNICODE_SECONDARY_RANGE_KEYWORD, +) +from .utils import ( + is_accentuated, + is_arabic, + is_arabic_isolated_form, + is_case_variable, + is_cjk, + is_emoticon, + is_hangul, + is_hiragana, + is_katakana, + is_latin, + is_punctuation, + is_separator, + is_symbol, + is_thai, + is_unprintable, + remove_accent, + unicode_range, +) + + +class MessDetectorPlugin: + """ + Base abstract class used for mess detection plugins. + All detectors MUST extend and implement given methods. + """ + + def eligible(self, character: str) -> bool: + """ + Determine if given character should be fed in. + """ + raise NotImplementedError # pragma: nocover + + def feed(self, character: str) -> None: + """ + The main routine to be executed upon character. + Insert the logic in witch the text would be considered chaotic. + """ + raise NotImplementedError # pragma: nocover + + def reset(self) -> None: # pragma: no cover + """ + Permit to reset the plugin to the initial state. + """ + raise NotImplementedError + + @property + def ratio(self) -> float: + """ + Compute the chaos ratio based on what your feed() has seen. + Must NOT be lower than 0.; No restriction gt 0. + """ + raise NotImplementedError # pragma: nocover + + +class TooManySymbolOrPunctuationPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._punctuation_count: int = 0 + self._symbol_count: int = 0 + self._character_count: int = 0 + + self._last_printable_char: str | None = None + self._frenzy_symbol_in_word: bool = False + + def eligible(self, character: str) -> bool: + return character.isprintable() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if ( + character != self._last_printable_char + and character not in COMMON_SAFE_ASCII_CHARACTERS + ): + if is_punctuation(character): + self._punctuation_count += 1 + elif ( + character.isdigit() is False + and is_symbol(character) + and is_emoticon(character) is False + ): + self._symbol_count += 2 + + self._last_printable_char = character + + def reset(self) -> None: # Abstract + self._punctuation_count = 0 + self._character_count = 0 + self._symbol_count = 0 + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + ratio_of_punctuation: float = ( + self._punctuation_count + self._symbol_count + ) / self._character_count + + return ratio_of_punctuation if ratio_of_punctuation >= 0.3 else 0.0 + + +class TooManyAccentuatedPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._character_count: int = 0 + self._accentuated_count: int = 0 + + def eligible(self, character: str) -> bool: + return character.isalpha() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if is_accentuated(character): + self._accentuated_count += 1 + + def reset(self) -> None: # Abstract + self._character_count = 0 + self._accentuated_count = 0 + + @property + def ratio(self) -> float: + if self._character_count < 8: + return 0.0 + + ratio_of_accentuation: float = self._accentuated_count / self._character_count + return ratio_of_accentuation if ratio_of_accentuation >= 0.35 else 0.0 + + +class UnprintablePlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._unprintable_count: int = 0 + self._character_count: int = 0 + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if is_unprintable(character): + self._unprintable_count += 1 + self._character_count += 1 + + def reset(self) -> None: # Abstract + self._unprintable_count = 0 + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return (self._unprintable_count * 8) / self._character_count + + +class SuspiciousDuplicateAccentPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._successive_count: int = 0 + self._character_count: int = 0 + + self._last_latin_character: str | None = None + + def eligible(self, character: str) -> bool: + return character.isalpha() and is_latin(character) + + def feed(self, character: str) -> None: + self._character_count += 1 + if ( + self._last_latin_character is not None + and is_accentuated(character) + and is_accentuated(self._last_latin_character) + ): + if character.isupper() and self._last_latin_character.isupper(): + self._successive_count += 1 + # Worse if its the same char duplicated with different accent. + if remove_accent(character) == remove_accent(self._last_latin_character): + self._successive_count += 1 + self._last_latin_character = character + + def reset(self) -> None: # Abstract + self._successive_count = 0 + self._character_count = 0 + self._last_latin_character = None + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return (self._successive_count * 2) / self._character_count + + +class SuspiciousRange(MessDetectorPlugin): + def __init__(self) -> None: + self._suspicious_successive_range_count: int = 0 + self._character_count: int = 0 + self._last_printable_seen: str | None = None + + def eligible(self, character: str) -> bool: + return character.isprintable() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if ( + character.isspace() + or is_punctuation(character) + or character in COMMON_SAFE_ASCII_CHARACTERS + ): + self._last_printable_seen = None + return + + if self._last_printable_seen is None: + self._last_printable_seen = character + return + + unicode_range_a: str | None = unicode_range(self._last_printable_seen) + unicode_range_b: str | None = unicode_range(character) + + if is_suspiciously_successive_range(unicode_range_a, unicode_range_b): + self._suspicious_successive_range_count += 1 + + self._last_printable_seen = character + + def reset(self) -> None: # Abstract + self._character_count = 0 + self._suspicious_successive_range_count = 0 + self._last_printable_seen = None + + @property + def ratio(self) -> float: + if self._character_count <= 13: + return 0.0 + + ratio_of_suspicious_range_usage: float = ( + self._suspicious_successive_range_count * 2 + ) / self._character_count + + return ratio_of_suspicious_range_usage + + +class SuperWeirdWordPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._word_count: int = 0 + self._bad_word_count: int = 0 + self._foreign_long_count: int = 0 + + self._is_current_word_bad: bool = False + self._foreign_long_watch: bool = False + + self._character_count: int = 0 + self._bad_character_count: int = 0 + + self._buffer: str = "" + self._buffer_accent_count: int = 0 + self._buffer_glyph_count: int = 0 + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if character.isalpha(): + self._buffer += character + if is_accentuated(character): + self._buffer_accent_count += 1 + if ( + self._foreign_long_watch is False + and (is_latin(character) is False or is_accentuated(character)) + and is_cjk(character) is False + and is_hangul(character) is False + and is_katakana(character) is False + and is_hiragana(character) is False + and is_thai(character) is False + ): + self._foreign_long_watch = True + if ( + is_cjk(character) + or is_hangul(character) + or is_katakana(character) + or is_hiragana(character) + or is_thai(character) + ): + self._buffer_glyph_count += 1 + return + if not self._buffer: + return + if ( + character.isspace() or is_punctuation(character) or is_separator(character) + ) and self._buffer: + self._word_count += 1 + buffer_length: int = len(self._buffer) + + self._character_count += buffer_length + + if buffer_length >= 4: + if self._buffer_accent_count / buffer_length >= 0.5: + self._is_current_word_bad = True + # Word/Buffer ending with an upper case accentuated letter are so rare, + # that we will consider them all as suspicious. Same weight as foreign_long suspicious. + elif ( + is_accentuated(self._buffer[-1]) + and self._buffer[-1].isupper() + and all(_.isupper() for _ in self._buffer) is False + ): + self._foreign_long_count += 1 + self._is_current_word_bad = True + elif self._buffer_glyph_count == 1: + self._is_current_word_bad = True + self._foreign_long_count += 1 + if buffer_length >= 24 and self._foreign_long_watch: + camel_case_dst = [ + i + for c, i in zip(self._buffer, range(0, buffer_length)) + if c.isupper() + ] + probable_camel_cased: bool = False + + if camel_case_dst and (len(camel_case_dst) / buffer_length <= 0.3): + probable_camel_cased = True + + if not probable_camel_cased: + self._foreign_long_count += 1 + self._is_current_word_bad = True + + if self._is_current_word_bad: + self._bad_word_count += 1 + self._bad_character_count += len(self._buffer) + self._is_current_word_bad = False + + self._foreign_long_watch = False + self._buffer = "" + self._buffer_accent_count = 0 + self._buffer_glyph_count = 0 + elif ( + character not in {"<", ">", "-", "=", "~", "|", "_"} + and character.isdigit() is False + and is_symbol(character) + ): + self._is_current_word_bad = True + self._buffer += character + + def reset(self) -> None: # Abstract + self._buffer = "" + self._is_current_word_bad = False + self._foreign_long_watch = False + self._bad_word_count = 0 + self._word_count = 0 + self._character_count = 0 + self._bad_character_count = 0 + self._foreign_long_count = 0 + + @property + def ratio(self) -> float: + if self._word_count <= 10 and self._foreign_long_count == 0: + return 0.0 + + return self._bad_character_count / self._character_count + + +class CjkInvalidStopPlugin(MessDetectorPlugin): + """ + GB(Chinese) based encoding often render the stop incorrectly when the content does not fit and + can be easily detected. Searching for the overuse of '丅' and '丄'. + """ + + def __init__(self) -> None: + self._wrong_stop_count: int = 0 + self._cjk_character_count: int = 0 + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if character in {"丅", "丄"}: + self._wrong_stop_count += 1 + return + if is_cjk(character): + self._cjk_character_count += 1 + + def reset(self) -> None: # Abstract + self._wrong_stop_count = 0 + self._cjk_character_count = 0 + + @property + def ratio(self) -> float: + if self._cjk_character_count < 16: + return 0.0 + return self._wrong_stop_count / self._cjk_character_count + + +class ArchaicUpperLowerPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._buf: bool = False + + self._character_count_since_last_sep: int = 0 + + self._successive_upper_lower_count: int = 0 + self._successive_upper_lower_count_final: int = 0 + + self._character_count: int = 0 + + self._last_alpha_seen: str | None = None + self._current_ascii_only: bool = True + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + is_concerned = character.isalpha() and is_case_variable(character) + chunk_sep = is_concerned is False + + if chunk_sep and self._character_count_since_last_sep > 0: + if ( + self._character_count_since_last_sep <= 64 + and character.isdigit() is False + and self._current_ascii_only is False + ): + self._successive_upper_lower_count_final += ( + self._successive_upper_lower_count + ) + + self._successive_upper_lower_count = 0 + self._character_count_since_last_sep = 0 + self._last_alpha_seen = None + self._buf = False + self._character_count += 1 + self._current_ascii_only = True + + return + + if self._current_ascii_only is True and character.isascii() is False: + self._current_ascii_only = False + + if self._last_alpha_seen is not None: + if (character.isupper() and self._last_alpha_seen.islower()) or ( + character.islower() and self._last_alpha_seen.isupper() + ): + if self._buf is True: + self._successive_upper_lower_count += 2 + self._buf = False + else: + self._buf = True + else: + self._buf = False + + self._character_count += 1 + self._character_count_since_last_sep += 1 + self._last_alpha_seen = character + + def reset(self) -> None: # Abstract + self._character_count = 0 + self._character_count_since_last_sep = 0 + self._successive_upper_lower_count = 0 + self._successive_upper_lower_count_final = 0 + self._last_alpha_seen = None + self._buf = False + self._current_ascii_only = True + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return self._successive_upper_lower_count_final / self._character_count + + +class ArabicIsolatedFormPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._character_count: int = 0 + self._isolated_form_count: int = 0 + + def reset(self) -> None: # Abstract + self._character_count = 0 + self._isolated_form_count = 0 + + def eligible(self, character: str) -> bool: + return is_arabic(character) + + def feed(self, character: str) -> None: + self._character_count += 1 + + if is_arabic_isolated_form(character): + self._isolated_form_count += 1 + + @property + def ratio(self) -> float: + if self._character_count < 8: + return 0.0 + + isolated_form_usage: float = self._isolated_form_count / self._character_count + + return isolated_form_usage + + +@lru_cache(maxsize=1024) +def is_suspiciously_successive_range( + unicode_range_a: str | None, unicode_range_b: str | None +) -> bool: + """ + Determine if two Unicode range seen next to each other can be considered as suspicious. + """ + if unicode_range_a is None or unicode_range_b is None: + return True + + if unicode_range_a == unicode_range_b: + return False + + if "Latin" in unicode_range_a and "Latin" in unicode_range_b: + return False + + if "Emoticons" in unicode_range_a or "Emoticons" in unicode_range_b: + return False + + # Latin characters can be accompanied with a combining diacritical mark + # eg. Vietnamese. + if ("Latin" in unicode_range_a or "Latin" in unicode_range_b) and ( + "Combining" in unicode_range_a or "Combining" in unicode_range_b + ): + return False + + keywords_range_a, keywords_range_b = ( + unicode_range_a.split(" "), + unicode_range_b.split(" "), + ) + + for el in keywords_range_a: + if el in UNICODE_SECONDARY_RANGE_KEYWORD: + continue + if el in keywords_range_b: + return False + + # Japanese Exception + range_a_jp_chars, range_b_jp_chars = ( + unicode_range_a + in ( + "Hiragana", + "Katakana", + ), + unicode_range_b in ("Hiragana", "Katakana"), + ) + if (range_a_jp_chars or range_b_jp_chars) and ( + "CJK" in unicode_range_a or "CJK" in unicode_range_b + ): + return False + if range_a_jp_chars and range_b_jp_chars: + return False + + if "Hangul" in unicode_range_a or "Hangul" in unicode_range_b: + if "CJK" in unicode_range_a or "CJK" in unicode_range_b: + return False + if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin": + return False + + # Chinese/Japanese use dedicated range for punctuation and/or separators. + if ("CJK" in unicode_range_a or "CJK" in unicode_range_b) or ( + unicode_range_a in ["Katakana", "Hiragana"] + and unicode_range_b in ["Katakana", "Hiragana"] + ): + if "Punctuation" in unicode_range_a or "Punctuation" in unicode_range_b: + return False + if "Forms" in unicode_range_a or "Forms" in unicode_range_b: + return False + if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin": + return False + + return True + + +@lru_cache(maxsize=2048) +def mess_ratio( + decoded_sequence: str, maximum_threshold: float = 0.2, debug: bool = False +) -> float: + """ + Compute a mess ratio given a decoded bytes sequence. The maximum threshold does stop the computation earlier. + """ + + detectors: list[MessDetectorPlugin] = [ + md_class() for md_class in MessDetectorPlugin.__subclasses__() + ] + + length: int = len(decoded_sequence) + 1 + + mean_mess_ratio: float = 0.0 + + if length < 512: + intermediary_mean_mess_ratio_calc: int = 32 + elif length <= 1024: + intermediary_mean_mess_ratio_calc = 64 + else: + intermediary_mean_mess_ratio_calc = 128 + + for character, index in zip(decoded_sequence + "\n", range(length)): + for detector in detectors: + if detector.eligible(character): + detector.feed(character) + + if ( + index > 0 and index % intermediary_mean_mess_ratio_calc == 0 + ) or index == length - 1: + mean_mess_ratio = sum(dt.ratio for dt in detectors) + + if mean_mess_ratio >= maximum_threshold: + break + + if debug: + logger = getLogger("charset_normalizer") + + logger.log( + TRACE, + "Mess-detector extended-analysis start. " + f"intermediary_mean_mess_ratio_calc={intermediary_mean_mess_ratio_calc} mean_mess_ratio={mean_mess_ratio} " + f"maximum_threshold={maximum_threshold}", + ) + + if len(decoded_sequence) > 16: + logger.log(TRACE, f"Starting with: {decoded_sequence[:16]}") + logger.log(TRACE, f"Ending with: {decoded_sequence[-16::]}") + + for dt in detectors: + logger.log(TRACE, f"{dt.__class__}: {dt.ratio}") + + return round(mean_mess_ratio, 3) diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/md__mypyc.cpython-311-x86_64-linux-gnu.so b/venv/lib/python3.11/site-packages/charset_normalizer/md__mypyc.cpython-311-x86_64-linux-gnu.so new file mode 100644 index 0000000..864b9aa Binary files /dev/null and b/venv/lib/python3.11/site-packages/charset_normalizer/md__mypyc.cpython-311-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/models.py b/venv/lib/python3.11/site-packages/charset_normalizer/models.py new file mode 100644 index 0000000..1042758 --- /dev/null +++ b/venv/lib/python3.11/site-packages/charset_normalizer/models.py @@ -0,0 +1,360 @@ +from __future__ import annotations + +from encodings.aliases import aliases +from hashlib import sha256 +from json import dumps +from re import sub +from typing import Any, Iterator, List, Tuple + +from .constant import RE_POSSIBLE_ENCODING_INDICATION, TOO_BIG_SEQUENCE +from .utils import iana_name, is_multi_byte_encoding, unicode_range + + +class CharsetMatch: + def __init__( + self, + payload: bytes, + guessed_encoding: str, + mean_mess_ratio: float, + has_sig_or_bom: bool, + languages: CoherenceMatches, + decoded_payload: str | None = None, + preemptive_declaration: str | None = None, + ): + self._payload: bytes = payload + + self._encoding: str = guessed_encoding + self._mean_mess_ratio: float = mean_mess_ratio + self._languages: CoherenceMatches = languages + self._has_sig_or_bom: bool = has_sig_or_bom + self._unicode_ranges: list[str] | None = None + + self._leaves: list[CharsetMatch] = [] + self._mean_coherence_ratio: float = 0.0 + + self._output_payload: bytes | None = None + self._output_encoding: str | None = None + + self._string: str | None = decoded_payload + + self._preemptive_declaration: str | None = preemptive_declaration + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CharsetMatch): + if isinstance(other, str): + return iana_name(other) == self.encoding + return False + return self.encoding == other.encoding and self.fingerprint == other.fingerprint + + def __lt__(self, other: object) -> bool: + """ + Implemented to make sorted available upon CharsetMatches items. + """ + if not isinstance(other, CharsetMatch): + raise ValueError + + chaos_difference: float = abs(self.chaos - other.chaos) + coherence_difference: float = abs(self.coherence - other.coherence) + + # Below 1% difference --> Use Coherence + if chaos_difference < 0.01 and coherence_difference > 0.02: + return self.coherence > other.coherence + elif chaos_difference < 0.01 and coherence_difference <= 0.02: + # When having a difficult decision, use the result that decoded as many multi-byte as possible. + # preserve RAM usage! + if len(self._payload) >= TOO_BIG_SEQUENCE: + return self.chaos < other.chaos + return self.multi_byte_usage > other.multi_byte_usage + + return self.chaos < other.chaos + + @property + def multi_byte_usage(self) -> float: + return 1.0 - (len(str(self)) / len(self.raw)) + + def __str__(self) -> str: + # Lazy Str Loading + if self._string is None: + self._string = str(self._payload, self._encoding, "strict") + return self._string + + def __repr__(self) -> str: + return f"" + + def add_submatch(self, other: CharsetMatch) -> None: + if not isinstance(other, CharsetMatch) or other == self: + raise ValueError( + "Unable to add instance <{}> as a submatch of a CharsetMatch".format( + other.__class__ + ) + ) + + other._string = None # Unload RAM usage; dirty trick. + self._leaves.append(other) + + @property + def encoding(self) -> str: + return self._encoding + + @property + def encoding_aliases(self) -> list[str]: + """ + Encoding name are known by many name, using this could help when searching for IBM855 when it's listed as CP855. + """ + also_known_as: list[str] = [] + for u, p in aliases.items(): + if self.encoding == u: + also_known_as.append(p) + elif self.encoding == p: + also_known_as.append(u) + return also_known_as + + @property + def bom(self) -> bool: + return self._has_sig_or_bom + + @property + def byte_order_mark(self) -> bool: + return self._has_sig_or_bom + + @property + def languages(self) -> list[str]: + """ + Return the complete list of possible languages found in decoded sequence. + Usually not really useful. Returned list may be empty even if 'language' property return something != 'Unknown'. + """ + return [e[0] for e in self._languages] + + @property + def language(self) -> str: + """ + Most probable language found in decoded sequence. If none were detected or inferred, the property will return + "Unknown". + """ + if not self._languages: + # Trying to infer the language based on the given encoding + # Its either English or we should not pronounce ourselves in certain cases. + if "ascii" in self.could_be_from_charset: + return "English" + + # doing it there to avoid circular import + from charset_normalizer.cd import encoding_languages, mb_encoding_languages + + languages = ( + mb_encoding_languages(self.encoding) + if is_multi_byte_encoding(self.encoding) + else encoding_languages(self.encoding) + ) + + if len(languages) == 0 or "Latin Based" in languages: + return "Unknown" + + return languages[0] + + return self._languages[0][0] + + @property + def chaos(self) -> float: + return self._mean_mess_ratio + + @property + def coherence(self) -> float: + if not self._languages: + return 0.0 + return self._languages[0][1] + + @property + def percent_chaos(self) -> float: + return round(self.chaos * 100, ndigits=3) + + @property + def percent_coherence(self) -> float: + return round(self.coherence * 100, ndigits=3) + + @property + def raw(self) -> bytes: + """ + Original untouched bytes. + """ + return self._payload + + @property + def submatch(self) -> list[CharsetMatch]: + return self._leaves + + @property + def has_submatch(self) -> bool: + return len(self._leaves) > 0 + + @property + def alphabets(self) -> list[str]: + if self._unicode_ranges is not None: + return self._unicode_ranges + # list detected ranges + detected_ranges: list[str | None] = [unicode_range(char) for char in str(self)] + # filter and sort + self._unicode_ranges = sorted(list({r for r in detected_ranges if r})) + return self._unicode_ranges + + @property + def could_be_from_charset(self) -> list[str]: + """ + The complete list of encoding that output the exact SAME str result and therefore could be the originating + encoding. + This list does include the encoding available in property 'encoding'. + """ + return [self._encoding] + [m.encoding for m in self._leaves] + + def output(self, encoding: str = "utf_8") -> bytes: + """ + Method to get re-encoded bytes payload using given target encoding. Default to UTF-8. + Any errors will be simply ignored by the encoder NOT replaced. + """ + if self._output_encoding is None or self._output_encoding != encoding: + self._output_encoding = encoding + decoded_string = str(self) + if ( + self._preemptive_declaration is not None + and self._preemptive_declaration.lower() + not in ["utf-8", "utf8", "utf_8"] + ): + patched_header = sub( + RE_POSSIBLE_ENCODING_INDICATION, + lambda m: m.string[m.span()[0] : m.span()[1]].replace( + m.groups()[0], + iana_name(self._output_encoding).replace("_", "-"), # type: ignore[arg-type] + ), + decoded_string[:8192], + count=1, + ) + + decoded_string = patched_header + decoded_string[8192:] + + self._output_payload = decoded_string.encode(encoding, "replace") + + return self._output_payload # type: ignore + + @property + def fingerprint(self) -> str: + """ + Retrieve the unique SHA256 computed using the transformed (re-encoded) payload. Not the original one. + """ + return sha256(self.output()).hexdigest() + + +class CharsetMatches: + """ + Container with every CharsetMatch items ordered by default from most probable to the less one. + Act like a list(iterable) but does not implements all related methods. + """ + + def __init__(self, results: list[CharsetMatch] | None = None): + self._results: list[CharsetMatch] = sorted(results) if results else [] + + def __iter__(self) -> Iterator[CharsetMatch]: + yield from self._results + + def __getitem__(self, item: int | str) -> CharsetMatch: + """ + Retrieve a single item either by its position or encoding name (alias may be used here). + Raise KeyError upon invalid index or encoding not present in results. + """ + if isinstance(item, int): + return self._results[item] + if isinstance(item, str): + item = iana_name(item, False) + for result in self._results: + if item in result.could_be_from_charset: + return result + raise KeyError + + def __len__(self) -> int: + return len(self._results) + + def __bool__(self) -> bool: + return len(self._results) > 0 + + def append(self, item: CharsetMatch) -> None: + """ + Insert a single match. Will be inserted accordingly to preserve sort. + Can be inserted as a submatch. + """ + if not isinstance(item, CharsetMatch): + raise ValueError( + "Cannot append instance '{}' to CharsetMatches".format( + str(item.__class__) + ) + ) + # We should disable the submatch factoring when the input file is too heavy (conserve RAM usage) + if len(item.raw) < TOO_BIG_SEQUENCE: + for match in self._results: + if match.fingerprint == item.fingerprint and match.chaos == item.chaos: + match.add_submatch(item) + return + self._results.append(item) + self._results = sorted(self._results) + + def best(self) -> CharsetMatch | None: + """ + Simply return the first match. Strict equivalent to matches[0]. + """ + if not self._results: + return None + return self._results[0] + + def first(self) -> CharsetMatch | None: + """ + Redundant method, call the method best(). Kept for BC reasons. + """ + return self.best() + + +CoherenceMatch = Tuple[str, float] +CoherenceMatches = List[CoherenceMatch] + + +class CliDetectionResult: + def __init__( + self, + path: str, + encoding: str | None, + encoding_aliases: list[str], + alternative_encodings: list[str], + language: str, + alphabets: list[str], + has_sig_or_bom: bool, + chaos: float, + coherence: float, + unicode_path: str | None, + is_preferred: bool, + ): + self.path: str = path + self.unicode_path: str | None = unicode_path + self.encoding: str | None = encoding + self.encoding_aliases: list[str] = encoding_aliases + self.alternative_encodings: list[str] = alternative_encodings + self.language: str = language + self.alphabets: list[str] = alphabets + self.has_sig_or_bom: bool = has_sig_or_bom + self.chaos: float = chaos + self.coherence: float = coherence + self.is_preferred: bool = is_preferred + + @property + def __dict__(self) -> dict[str, Any]: # type: ignore + return { + "path": self.path, + "encoding": self.encoding, + "encoding_aliases": self.encoding_aliases, + "alternative_encodings": self.alternative_encodings, + "language": self.language, + "alphabets": self.alphabets, + "has_sig_or_bom": self.has_sig_or_bom, + "chaos": self.chaos, + "coherence": self.coherence, + "unicode_path": self.unicode_path, + "is_preferred": self.is_preferred, + } + + def to_json(self) -> str: + return dumps(self.__dict__, ensure_ascii=True, indent=4) diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/py.typed b/venv/lib/python3.11/site-packages/charset_normalizer/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/utils.py b/venv/lib/python3.11/site-packages/charset_normalizer/utils.py new file mode 100644 index 0000000..0175e0a --- /dev/null +++ b/venv/lib/python3.11/site-packages/charset_normalizer/utils.py @@ -0,0 +1,408 @@ +from __future__ import annotations + +import importlib +import logging +import unicodedata +from codecs import IncrementalDecoder +from encodings.aliases import aliases +from functools import lru_cache +from re import findall +from typing import Generator + +from _multibytecodec import ( # type: ignore[import-not-found,import] + MultibyteIncrementalDecoder, +) + +from .constant import ( + ENCODING_MARKS, + IANA_SUPPORTED_SIMILAR, + RE_POSSIBLE_ENCODING_INDICATION, + UNICODE_RANGES_COMBINED, + UNICODE_SECONDARY_RANGE_KEYWORD, + UTF8_MAXIMAL_ALLOCATION, +) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_accentuated(character: str) -> bool: + try: + description: str = unicodedata.name(character) + except ValueError: # Defensive: unicode database outdated? + return False + return ( + "WITH GRAVE" in description + or "WITH ACUTE" in description + or "WITH CEDILLA" in description + or "WITH DIAERESIS" in description + or "WITH CIRCUMFLEX" in description + or "WITH TILDE" in description + or "WITH MACRON" in description + or "WITH RING ABOVE" in description + ) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def remove_accent(character: str) -> str: + decomposed: str = unicodedata.decomposition(character) + if not decomposed: + return character + + codes: list[str] = decomposed.split(" ") + + return chr(int(codes[0], 16)) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def unicode_range(character: str) -> str | None: + """ + Retrieve the Unicode range official name from a single character. + """ + character_ord: int = ord(character) + + for range_name, ord_range in UNICODE_RANGES_COMBINED.items(): + if character_ord in ord_range: + return range_name + + return None + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_latin(character: str) -> bool: + try: + description: str = unicodedata.name(character) + except ValueError: # Defensive: unicode database outdated? + return False + return "LATIN" in description + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_punctuation(character: str) -> bool: + character_category: str = unicodedata.category(character) + + if "P" in character_category: + return True + + character_range: str | None = unicode_range(character) + + if character_range is None: + return False + + return "Punctuation" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_symbol(character: str) -> bool: + character_category: str = unicodedata.category(character) + + if "S" in character_category or "N" in character_category: + return True + + character_range: str | None = unicode_range(character) + + if character_range is None: + return False + + return "Forms" in character_range and character_category != "Lo" + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_emoticon(character: str) -> bool: + character_range: str | None = unicode_range(character) + + if character_range is None: + return False + + return "Emoticons" in character_range or "Pictographs" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_separator(character: str) -> bool: + if character.isspace() or character in {"|", "+", "<", ">"}: + return True + + character_category: str = unicodedata.category(character) + + return "Z" in character_category or character_category in {"Po", "Pd", "Pc"} + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_case_variable(character: str) -> bool: + return character.islower() != character.isupper() + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_cjk(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: # Defensive: unicode database outdated? + return False + + return "CJK" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_hiragana(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: # Defensive: unicode database outdated? + return False + + return "HIRAGANA" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_katakana(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: # Defensive: unicode database outdated? + return False + + return "KATAKANA" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_hangul(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: # Defensive: unicode database outdated? + return False + + return "HANGUL" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_thai(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: # Defensive: unicode database outdated? + return False + + return "THAI" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_arabic(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: # Defensive: unicode database outdated? + return False + + return "ARABIC" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_arabic_isolated_form(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: # Defensive: unicode database outdated? + return False + + return "ARABIC" in character_name and "ISOLATED FORM" in character_name + + +@lru_cache(maxsize=len(UNICODE_RANGES_COMBINED)) +def is_unicode_range_secondary(range_name: str) -> bool: + return any(keyword in range_name for keyword in UNICODE_SECONDARY_RANGE_KEYWORD) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_unprintable(character: str) -> bool: + return ( + character.isspace() is False # includes \n \t \r \v + and character.isprintable() is False + and character != "\x1a" # Why? Its the ASCII substitute character. + and character != "\ufeff" # bug discovered in Python, + # Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space. + ) + + +def any_specified_encoding(sequence: bytes, search_zone: int = 8192) -> str | None: + """ + Extract using ASCII-only decoder any specified encoding in the first n-bytes. + """ + if not isinstance(sequence, bytes): + raise TypeError + + seq_len: int = len(sequence) + + results: list[str] = findall( + RE_POSSIBLE_ENCODING_INDICATION, + sequence[: min(seq_len, search_zone)].decode("ascii", errors="ignore"), + ) + + if len(results) == 0: + return None + + for specified_encoding in results: + specified_encoding = specified_encoding.lower().replace("-", "_") + + encoding_alias: str + encoding_iana: str + + for encoding_alias, encoding_iana in aliases.items(): + if encoding_alias == specified_encoding: + return encoding_iana + if encoding_iana == specified_encoding: + return encoding_iana + + return None + + +@lru_cache(maxsize=128) +def is_multi_byte_encoding(name: str) -> bool: + """ + Verify is a specific encoding is a multi byte one based on it IANA name + """ + return name in { + "utf_8", + "utf_8_sig", + "utf_16", + "utf_16_be", + "utf_16_le", + "utf_32", + "utf_32_le", + "utf_32_be", + "utf_7", + } or issubclass( + importlib.import_module(f"encodings.{name}").IncrementalDecoder, + MultibyteIncrementalDecoder, + ) + + +def identify_sig_or_bom(sequence: bytes) -> tuple[str | None, bytes]: + """ + Identify and extract SIG/BOM in given sequence. + """ + + for iana_encoding in ENCODING_MARKS: + marks: bytes | list[bytes] = ENCODING_MARKS[iana_encoding] + + if isinstance(marks, bytes): + marks = [marks] + + for mark in marks: + if sequence.startswith(mark): + return iana_encoding, mark + + return None, b"" + + +def should_strip_sig_or_bom(iana_encoding: str) -> bool: + return iana_encoding not in {"utf_16", "utf_32"} + + +def iana_name(cp_name: str, strict: bool = True) -> str: + """Returns the Python normalized encoding name (Not the IANA official name).""" + cp_name = cp_name.lower().replace("-", "_") + + encoding_alias: str + encoding_iana: str + + for encoding_alias, encoding_iana in aliases.items(): + if cp_name in [encoding_alias, encoding_iana]: + return encoding_iana + + if strict: + raise ValueError(f"Unable to retrieve IANA for '{cp_name}'") + + return cp_name + + +def cp_similarity(iana_name_a: str, iana_name_b: str) -> float: + if is_multi_byte_encoding(iana_name_a) or is_multi_byte_encoding(iana_name_b): + return 0.0 + + decoder_a = importlib.import_module(f"encodings.{iana_name_a}").IncrementalDecoder + decoder_b = importlib.import_module(f"encodings.{iana_name_b}").IncrementalDecoder + + id_a: IncrementalDecoder = decoder_a(errors="ignore") + id_b: IncrementalDecoder = decoder_b(errors="ignore") + + character_match_count: int = 0 + + for i in range(255): + to_be_decoded: bytes = bytes([i]) + if id_a.decode(to_be_decoded) == id_b.decode(to_be_decoded): + character_match_count += 1 + + return character_match_count / 254 + + +def is_cp_similar(iana_name_a: str, iana_name_b: str) -> bool: + """ + Determine if two code page are at least 80% similar. IANA_SUPPORTED_SIMILAR dict was generated using + the function cp_similarity. + """ + return ( + iana_name_a in IANA_SUPPORTED_SIMILAR + and iana_name_b in IANA_SUPPORTED_SIMILAR[iana_name_a] + ) + + +def set_logging_handler( + name: str = "charset_normalizer", + level: int = logging.INFO, + format_string: str = "%(asctime)s | %(levelname)s | %(message)s", +) -> None: + logger = logging.getLogger(name) + logger.setLevel(level) + + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter(format_string)) + logger.addHandler(handler) + + +def cut_sequence_chunks( + sequences: bytes, + encoding_iana: str, + offsets: range, + chunk_size: int, + bom_or_sig_available: bool, + strip_sig_or_bom: bool, + sig_payload: bytes, + is_multi_byte_decoder: bool, + decoded_payload: str | None = None, +) -> Generator[str, None, None]: + if decoded_payload and is_multi_byte_decoder is False: + for i in offsets: + chunk = decoded_payload[i : i + chunk_size] + if not chunk: + break + yield chunk + else: + for i in offsets: + chunk_end = i + chunk_size + if chunk_end > len(sequences) + 8: + continue + + cut_sequence = sequences[i : i + chunk_size] + + if bom_or_sig_available and strip_sig_or_bom is False: + cut_sequence = sig_payload + cut_sequence + + chunk = cut_sequence.decode( + encoding_iana, + errors="ignore" if is_multi_byte_decoder else "strict", + ) + + # multi-byte bad cutting detector and adjustment + # not the cleanest way to perform that fix but clever enough for now. + if is_multi_byte_decoder and i > 0: + chunk_partial_size_chk: int = min(chunk_size, 16) + + if ( + decoded_payload + and chunk[:chunk_partial_size_chk] not in decoded_payload + ): + for j in range(i, i - 4, -1): + cut_sequence = sequences[j:chunk_end] + + if bom_or_sig_available and strip_sig_or_bom is False: + cut_sequence = sig_payload + cut_sequence + + chunk = cut_sequence.decode(encoding_iana, errors="ignore") + + if chunk[:chunk_partial_size_chk] in decoded_payload: + break + + yield chunk diff --git a/venv/lib/python3.11/site-packages/charset_normalizer/version.py b/venv/lib/python3.11/site-packages/charset_normalizer/version.py new file mode 100644 index 0000000..f85e892 --- /dev/null +++ b/venv/lib/python3.11/site-packages/charset_normalizer/version.py @@ -0,0 +1,8 @@ +""" +Expose version +""" + +from __future__ import annotations + +__version__ = "3.4.1" +VERSION = __version__.split(".") diff --git a/venv/lib/python3.11/site-packages/click-8.1.8.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/click-8.1.8.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.11/site-packages/click-8.1.8.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.11/site-packages/click-8.1.8.dist-info/LICENSE.txt b/venv/lib/python3.11/site-packages/click-8.1.8.dist-info/LICENSE.txt new file mode 100644 index 0000000..d12a849 --- /dev/null +++ b/venv/lib/python3.11/site-packages/click-8.1.8.dist-info/LICENSE.txt @@ -0,0 +1,28 @@ +Copyright 2014 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.11/site-packages/click-8.1.8.dist-info/METADATA b/venv/lib/python3.11/site-packages/click-8.1.8.dist-info/METADATA new file mode 100644 index 0000000..366d1a7 --- /dev/null +++ b/venv/lib/python3.11/site-packages/click-8.1.8.dist-info/METADATA @@ -0,0 +1,74 @@ +Metadata-Version: 2.3 +Name: click +Version: 8.1.8 +Summary: Composable command line interface toolkit +Maintainer-email: Pallets +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Typing :: Typed +Requires-Dist: colorama; platform_system == 'Windows' +Requires-Dist: importlib-metadata; python_version < '3.8' +Project-URL: Changes, https://click.palletsprojects.com/changes/ +Project-URL: Chat, https://discord.gg/pallets +Project-URL: Documentation, https://click.palletsprojects.com/ +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Source, https://github.com/pallets/click/ + +# $ click_ + +Click is a Python package for creating beautiful command line interfaces +in a composable way with as little code as necessary. It's the "Command +Line Interface Creation Kit". It's highly configurable but comes with +sensible defaults out of the box. + +It aims to make the process of writing command line tools quick and fun +while also preventing any frustration caused by the inability to +implement an intended CLI API. + +Click in three points: + +- Arbitrary nesting of commands +- Automatic help page generation +- Supports lazy loading of subcommands at runtime + + +## A Simple Example + +```python +import click + +@click.command() +@click.option("--count", default=1, help="Number of greetings.") +@click.option("--name", prompt="Your name", help="The person to greet.") +def hello(count, name): + """Simple program that greets NAME for a total of COUNT times.""" + for _ in range(count): + click.echo(f"Hello, {name}!") + +if __name__ == '__main__': + hello() +``` + +``` +$ python hello.py --count=3 +Your name: Click +Hello, Click! +Hello, Click! +Hello, Click! +``` + + +## Donate + +The Pallets organization develops and supports Click and other popular +packages. In order to grow the community of contributors and users, and +allow the maintainers to devote more time to the projects, [please +donate today][]. + +[please donate today]: https://palletsprojects.com/donate + diff --git a/venv/lib/python3.11/site-packages/click-8.1.8.dist-info/RECORD b/venv/lib/python3.11/site-packages/click-8.1.8.dist-info/RECORD new file mode 100644 index 0000000..a8eaba9 --- /dev/null +++ b/venv/lib/python3.11/site-packages/click-8.1.8.dist-info/RECORD @@ -0,0 +1,38 @@ +click-8.1.8.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +click-8.1.8.dist-info/LICENSE.txt,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475 +click-8.1.8.dist-info/METADATA,sha256=WJtQ6uGS2ybLfvUE4vC0XIhIBr4yFGwjrMBR2fiCQ-Q,2263 +click-8.1.8.dist-info/RECORD,, +click-8.1.8.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82 +click/__init__.py,sha256=j1DJeCbga4ribkv5uyvIAzI0oFN13fW9mevDKShFelo,3188 +click/__pycache__/__init__.cpython-311.pyc,, +click/__pycache__/_compat.cpython-311.pyc,, +click/__pycache__/_termui_impl.cpython-311.pyc,, +click/__pycache__/_textwrap.cpython-311.pyc,, +click/__pycache__/_winconsole.cpython-311.pyc,, +click/__pycache__/core.cpython-311.pyc,, +click/__pycache__/decorators.cpython-311.pyc,, +click/__pycache__/exceptions.cpython-311.pyc,, +click/__pycache__/formatting.cpython-311.pyc,, +click/__pycache__/globals.cpython-311.pyc,, +click/__pycache__/parser.cpython-311.pyc,, +click/__pycache__/shell_completion.cpython-311.pyc,, +click/__pycache__/termui.cpython-311.pyc,, +click/__pycache__/testing.cpython-311.pyc,, +click/__pycache__/types.cpython-311.pyc,, +click/__pycache__/utils.cpython-311.pyc,, +click/_compat.py,sha256=IGKh_J5QdfKELitnRfTGHneejWxoCw_NX9tfMbdcg3w,18730 +click/_termui_impl.py,sha256=a5z7I9gOFeMmu7Gb6_RPyQ8GPuVP1EeblixcWSPSQPk,24783 +click/_textwrap.py,sha256=10fQ64OcBUMuK7mFvh8363_uoOxPlRItZBmKzRJDgoY,1353 +click/_winconsole.py,sha256=5ju3jQkcZD0W27WEMGqmEP4y_crUVzPCqsX_FYb7BO0,7860 +click/core.py,sha256=Q1nEVdctZwvIPOlt4vfHko0TYnHCeE40UEEul8Wpyvs,114748 +click/decorators.py,sha256=7t6F-QWowtLh6F_6l-4YV4Y4yNTcqFQEu9i37zIz68s,18925 +click/exceptions.py,sha256=V7zDT6emqJ8iNl0kF1P5kpFmLMWQ1T1L7aNNKM4YR0w,9600 +click/formatting.py,sha256=Frf0-5W33-loyY_i9qrwXR8-STnW3m5gvyxLVUdyxyk,9706 +click/globals.py,sha256=cuJ6Bbo073lgEEmhjr394PeM-QFmXM-Ci-wmfsd7H5g,1954 +click/parser.py,sha256=h4sndcpF5OHrZQN8vD8IWb5OByvW7ABbhRToxovrqS8,19067 +click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +click/shell_completion.py,sha256=TR0dXEGcvWb9Eo3aaQEXGhnvNS3FF4H4QcuLnvAvYo4,18636 +click/termui.py,sha256=dLxiS70UOvIYBda_nEEZaPAFOVDVmRs1sEPMuLDowQo,28310 +click/testing.py,sha256=3RA8anCf7TZ8-5RAF5it2Te-aWXBAL5VLasQnMiC2ZQ,16282 +click/types.py,sha256=BD5Qqq4h-8kawBmOIzJlmq4xzThAf4wCvaOLZSBDNx0,36422 +click/utils.py,sha256=ce-IrO9ilII76LGkU354pOdHbepM8UftfNH7SfMU_28,20330 diff --git a/venv/lib/python3.11/site-packages/click-8.1.8.dist-info/WHEEL b/venv/lib/python3.11/site-packages/click-8.1.8.dist-info/WHEEL new file mode 100644 index 0000000..e3c6fee --- /dev/null +++ b/venv/lib/python3.11/site-packages/click-8.1.8.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.10.1 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.11/site-packages/click/__init__.py b/venv/lib/python3.11/site-packages/click/__init__.py new file mode 100644 index 0000000..2610d0e --- /dev/null +++ b/venv/lib/python3.11/site-packages/click/__init__.py @@ -0,0 +1,75 @@ +""" +Click is a simple Python module inspired by the stdlib optparse to make +writing command line scripts fun. Unlike other modules, it's based +around a simple API that does not come with too much magic and is +composable. +""" + +from .core import Argument as Argument +from .core import BaseCommand as BaseCommand +from .core import Command as Command +from .core import CommandCollection as CommandCollection +from .core import Context as Context +from .core import Group as Group +from .core import MultiCommand as MultiCommand +from .core import Option as Option +from .core import Parameter as Parameter +from .decorators import argument as argument +from .decorators import command as command +from .decorators import confirmation_option as confirmation_option +from .decorators import group as group +from .decorators import help_option as help_option +from .decorators import HelpOption as HelpOption +from .decorators import make_pass_decorator as make_pass_decorator +from .decorators import option as option +from .decorators import pass_context as pass_context +from .decorators import pass_obj as pass_obj +from .decorators import password_option as password_option +from .decorators import version_option as version_option +from .exceptions import Abort as Abort +from .exceptions import BadArgumentUsage as BadArgumentUsage +from .exceptions import BadOptionUsage as BadOptionUsage +from .exceptions import BadParameter as BadParameter +from .exceptions import ClickException as ClickException +from .exceptions import FileError as FileError +from .exceptions import MissingParameter as MissingParameter +from .exceptions import NoSuchOption as NoSuchOption +from .exceptions import UsageError as UsageError +from .formatting import HelpFormatter as HelpFormatter +from .formatting import wrap_text as wrap_text +from .globals import get_current_context as get_current_context +from .parser import OptionParser as OptionParser +from .termui import clear as clear +from .termui import confirm as confirm +from .termui import echo_via_pager as echo_via_pager +from .termui import edit as edit +from .termui import getchar as getchar +from .termui import launch as launch +from .termui import pause as pause +from .termui import progressbar as progressbar +from .termui import prompt as prompt +from .termui import secho as secho +from .termui import style as style +from .termui import unstyle as unstyle +from .types import BOOL as BOOL +from .types import Choice as Choice +from .types import DateTime as DateTime +from .types import File as File +from .types import FLOAT as FLOAT +from .types import FloatRange as FloatRange +from .types import INT as INT +from .types import IntRange as IntRange +from .types import ParamType as ParamType +from .types import Path as Path +from .types import STRING as STRING +from .types import Tuple as Tuple +from .types import UNPROCESSED as UNPROCESSED +from .types import UUID as UUID +from .utils import echo as echo +from .utils import format_filename as format_filename +from .utils import get_app_dir as get_app_dir +from .utils import get_binary_stream as get_binary_stream +from .utils import get_text_stream as get_text_stream +from .utils import open_file as open_file + +__version__ = "8.1.8" diff --git a/venv/lib/python3.11/site-packages/click/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/click/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..a21e0af Binary files /dev/null and b/venv/lib/python3.11/site-packages/click/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/click/__pycache__/_compat.cpython-311.pyc b/venv/lib/python3.11/site-packages/click/__pycache__/_compat.cpython-311.pyc new file mode 100644 index 0000000..2a46188 Binary files /dev/null and b/venv/lib/python3.11/site-packages/click/__pycache__/_compat.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/click/__pycache__/_termui_impl.cpython-311.pyc b/venv/lib/python3.11/site-packages/click/__pycache__/_termui_impl.cpython-311.pyc new file mode 100644 index 0000000..07b8733 Binary files /dev/null and b/venv/lib/python3.11/site-packages/click/__pycache__/_termui_impl.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/click/__pycache__/_textwrap.cpython-311.pyc b/venv/lib/python3.11/site-packages/click/__pycache__/_textwrap.cpython-311.pyc new file mode 100644 index 0000000..bc05c4f Binary files /dev/null and b/venv/lib/python3.11/site-packages/click/__pycache__/_textwrap.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/click/__pycache__/_winconsole.cpython-311.pyc b/venv/lib/python3.11/site-packages/click/__pycache__/_winconsole.cpython-311.pyc new file mode 100644 index 0000000..76dd3d7 Binary files /dev/null and b/venv/lib/python3.11/site-packages/click/__pycache__/_winconsole.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/click/__pycache__/core.cpython-311.pyc b/venv/lib/python3.11/site-packages/click/__pycache__/core.cpython-311.pyc new file mode 100644 index 0000000..935e303 Binary files /dev/null and b/venv/lib/python3.11/site-packages/click/__pycache__/core.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/click/__pycache__/decorators.cpython-311.pyc b/venv/lib/python3.11/site-packages/click/__pycache__/decorators.cpython-311.pyc new file mode 100644 index 0000000..893a62b Binary files /dev/null and b/venv/lib/python3.11/site-packages/click/__pycache__/decorators.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/click/__pycache__/exceptions.cpython-311.pyc b/venv/lib/python3.11/site-packages/click/__pycache__/exceptions.cpython-311.pyc new file mode 100644 index 0000000..8868be5 Binary files /dev/null and b/venv/lib/python3.11/site-packages/click/__pycache__/exceptions.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/click/__pycache__/formatting.cpython-311.pyc b/venv/lib/python3.11/site-packages/click/__pycache__/formatting.cpython-311.pyc new file mode 100644 index 0000000..6f3577e Binary files /dev/null and b/venv/lib/python3.11/site-packages/click/__pycache__/formatting.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/click/__pycache__/globals.cpython-311.pyc b/venv/lib/python3.11/site-packages/click/__pycache__/globals.cpython-311.pyc new file mode 100644 index 0000000..f65ca2d Binary files /dev/null and b/venv/lib/python3.11/site-packages/click/__pycache__/globals.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/click/__pycache__/parser.cpython-311.pyc b/venv/lib/python3.11/site-packages/click/__pycache__/parser.cpython-311.pyc new file mode 100644 index 0000000..4dae062 Binary files /dev/null and b/venv/lib/python3.11/site-packages/click/__pycache__/parser.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/click/__pycache__/shell_completion.cpython-311.pyc b/venv/lib/python3.11/site-packages/click/__pycache__/shell_completion.cpython-311.pyc new file mode 100644 index 0000000..726c509 Binary files /dev/null and b/venv/lib/python3.11/site-packages/click/__pycache__/shell_completion.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/click/__pycache__/termui.cpython-311.pyc b/venv/lib/python3.11/site-packages/click/__pycache__/termui.cpython-311.pyc new file mode 100644 index 0000000..25e5445 Binary files /dev/null and b/venv/lib/python3.11/site-packages/click/__pycache__/termui.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/click/__pycache__/testing.cpython-311.pyc b/venv/lib/python3.11/site-packages/click/__pycache__/testing.cpython-311.pyc new file mode 100644 index 0000000..e579a31 Binary files /dev/null and b/venv/lib/python3.11/site-packages/click/__pycache__/testing.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/click/__pycache__/types.cpython-311.pyc b/venv/lib/python3.11/site-packages/click/__pycache__/types.cpython-311.pyc new file mode 100644 index 0000000..c444209 Binary files /dev/null and b/venv/lib/python3.11/site-packages/click/__pycache__/types.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/click/__pycache__/utils.cpython-311.pyc b/venv/lib/python3.11/site-packages/click/__pycache__/utils.cpython-311.pyc new file mode 100644 index 0000000..093708d Binary files /dev/null and b/venv/lib/python3.11/site-packages/click/__pycache__/utils.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/click/_compat.py b/venv/lib/python3.11/site-packages/click/_compat.py new file mode 100644 index 0000000..9153d15 --- /dev/null +++ b/venv/lib/python3.11/site-packages/click/_compat.py @@ -0,0 +1,623 @@ +import codecs +import io +import os +import re +import sys +import typing as t +from weakref import WeakKeyDictionary + +CYGWIN = sys.platform.startswith("cygwin") +WIN = sys.platform.startswith("win") +auto_wrap_for_ansi: t.Optional[t.Callable[[t.TextIO], t.TextIO]] = None +_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]") + + +def _make_text_stream( + stream: t.BinaryIO, + encoding: t.Optional[str], + errors: t.Optional[str], + force_readable: bool = False, + force_writable: bool = False, +) -> t.TextIO: + if encoding is None: + encoding = get_best_encoding(stream) + if errors is None: + errors = "replace" + return _NonClosingTextIOWrapper( + stream, + encoding, + errors, + line_buffering=True, + force_readable=force_readable, + force_writable=force_writable, + ) + + +def is_ascii_encoding(encoding: str) -> bool: + """Checks if a given encoding is ascii.""" + try: + return codecs.lookup(encoding).name == "ascii" + except LookupError: + return False + + +def get_best_encoding(stream: t.IO[t.Any]) -> str: + """Returns the default stream encoding if not found.""" + rv = getattr(stream, "encoding", None) or sys.getdefaultencoding() + if is_ascii_encoding(rv): + return "utf-8" + return rv + + +class _NonClosingTextIOWrapper(io.TextIOWrapper): + def __init__( + self, + stream: t.BinaryIO, + encoding: t.Optional[str], + errors: t.Optional[str], + force_readable: bool = False, + force_writable: bool = False, + **extra: t.Any, + ) -> None: + self._stream = stream = t.cast( + t.BinaryIO, _FixupStream(stream, force_readable, force_writable) + ) + super().__init__(stream, encoding, errors, **extra) + + def __del__(self) -> None: + try: + self.detach() + except Exception: + pass + + def isatty(self) -> bool: + # https://bitbucket.org/pypy/pypy/issue/1803 + return self._stream.isatty() + + +class _FixupStream: + """The new io interface needs more from streams than streams + traditionally implement. As such, this fix-up code is necessary in + some circumstances. + + The forcing of readable and writable flags are there because some tools + put badly patched objects on sys (one such offender are certain version + of jupyter notebook). + """ + + def __init__( + self, + stream: t.BinaryIO, + force_readable: bool = False, + force_writable: bool = False, + ): + self._stream = stream + self._force_readable = force_readable + self._force_writable = force_writable + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._stream, name) + + def read1(self, size: int) -> bytes: + f = getattr(self._stream, "read1", None) + + if f is not None: + return t.cast(bytes, f(size)) + + return self._stream.read(size) + + def readable(self) -> bool: + if self._force_readable: + return True + x = getattr(self._stream, "readable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.read(0) + except Exception: + return False + return True + + def writable(self) -> bool: + if self._force_writable: + return True + x = getattr(self._stream, "writable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.write("") # type: ignore + except Exception: + try: + self._stream.write(b"") + except Exception: + return False + return True + + def seekable(self) -> bool: + x = getattr(self._stream, "seekable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.seek(self._stream.tell()) + except Exception: + return False + return True + + +def _is_binary_reader(stream: t.IO[t.Any], default: bool = False) -> bool: + try: + return isinstance(stream.read(0), bytes) + except Exception: + return default + # This happens in some cases where the stream was already + # closed. In this case, we assume the default. + + +def _is_binary_writer(stream: t.IO[t.Any], default: bool = False) -> bool: + try: + stream.write(b"") + except Exception: + try: + stream.write("") + return False + except Exception: + pass + return default + return True + + +def _find_binary_reader(stream: t.IO[t.Any]) -> t.Optional[t.BinaryIO]: + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detaching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_reader(stream, False): + return t.cast(t.BinaryIO, stream) + + buf = getattr(stream, "buffer", None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_reader(buf, True): + return t.cast(t.BinaryIO, buf) + + return None + + +def _find_binary_writer(stream: t.IO[t.Any]) -> t.Optional[t.BinaryIO]: + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detaching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_writer(stream, False): + return t.cast(t.BinaryIO, stream) + + buf = getattr(stream, "buffer", None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_writer(buf, True): + return t.cast(t.BinaryIO, buf) + + return None + + +def _stream_is_misconfigured(stream: t.TextIO) -> bool: + """A stream is misconfigured if its encoding is ASCII.""" + # If the stream does not have an encoding set, we assume it's set + # to ASCII. This appears to happen in certain unittest + # environments. It's not quite clear what the correct behavior is + # but this at least will force Click to recover somehow. + return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii") + + +def _is_compat_stream_attr(stream: t.TextIO, attr: str, value: t.Optional[str]) -> bool: + """A stream attribute is compatible if it is equal to the + desired value or the desired value is unset and the attribute + has a value. + """ + stream_value = getattr(stream, attr, None) + return stream_value == value or (value is None and stream_value is not None) + + +def _is_compatible_text_stream( + stream: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] +) -> bool: + """Check if a stream's encoding and errors attributes are + compatible with the desired values. + """ + return _is_compat_stream_attr( + stream, "encoding", encoding + ) and _is_compat_stream_attr(stream, "errors", errors) + + +def _force_correct_text_stream( + text_stream: t.IO[t.Any], + encoding: t.Optional[str], + errors: t.Optional[str], + is_binary: t.Callable[[t.IO[t.Any], bool], bool], + find_binary: t.Callable[[t.IO[t.Any]], t.Optional[t.BinaryIO]], + force_readable: bool = False, + force_writable: bool = False, +) -> t.TextIO: + if is_binary(text_stream, False): + binary_reader = t.cast(t.BinaryIO, text_stream) + else: + text_stream = t.cast(t.TextIO, text_stream) + # If the stream looks compatible, and won't default to a + # misconfigured ascii encoding, return it as-is. + if _is_compatible_text_stream(text_stream, encoding, errors) and not ( + encoding is None and _stream_is_misconfigured(text_stream) + ): + return text_stream + + # Otherwise, get the underlying binary reader. + possible_binary_reader = find_binary(text_stream) + + # If that's not possible, silently use the original reader + # and get mojibake instead of exceptions. + if possible_binary_reader is None: + return text_stream + + binary_reader = possible_binary_reader + + # Default errors to replace instead of strict in order to get + # something that works. + if errors is None: + errors = "replace" + + # Wrap the binary stream in a text stream with the correct + # encoding parameters. + return _make_text_stream( + binary_reader, + encoding, + errors, + force_readable=force_readable, + force_writable=force_writable, + ) + + +def _force_correct_text_reader( + text_reader: t.IO[t.Any], + encoding: t.Optional[str], + errors: t.Optional[str], + force_readable: bool = False, +) -> t.TextIO: + return _force_correct_text_stream( + text_reader, + encoding, + errors, + _is_binary_reader, + _find_binary_reader, + force_readable=force_readable, + ) + + +def _force_correct_text_writer( + text_writer: t.IO[t.Any], + encoding: t.Optional[str], + errors: t.Optional[str], + force_writable: bool = False, +) -> t.TextIO: + return _force_correct_text_stream( + text_writer, + encoding, + errors, + _is_binary_writer, + _find_binary_writer, + force_writable=force_writable, + ) + + +def get_binary_stdin() -> t.BinaryIO: + reader = _find_binary_reader(sys.stdin) + if reader is None: + raise RuntimeError("Was not able to determine binary stream for sys.stdin.") + return reader + + +def get_binary_stdout() -> t.BinaryIO: + writer = _find_binary_writer(sys.stdout) + if writer is None: + raise RuntimeError("Was not able to determine binary stream for sys.stdout.") + return writer + + +def get_binary_stderr() -> t.BinaryIO: + writer = _find_binary_writer(sys.stderr) + if writer is None: + raise RuntimeError("Was not able to determine binary stream for sys.stderr.") + return writer + + +def get_text_stdin( + encoding: t.Optional[str] = None, errors: t.Optional[str] = None +) -> t.TextIO: + rv = _get_windows_console_stream(sys.stdin, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_reader(sys.stdin, encoding, errors, force_readable=True) + + +def get_text_stdout( + encoding: t.Optional[str] = None, errors: t.Optional[str] = None +) -> t.TextIO: + rv = _get_windows_console_stream(sys.stdout, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer(sys.stdout, encoding, errors, force_writable=True) + + +def get_text_stderr( + encoding: t.Optional[str] = None, errors: t.Optional[str] = None +) -> t.TextIO: + rv = _get_windows_console_stream(sys.stderr, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer(sys.stderr, encoding, errors, force_writable=True) + + +def _wrap_io_open( + file: t.Union[str, "os.PathLike[str]", int], + mode: str, + encoding: t.Optional[str], + errors: t.Optional[str], +) -> t.IO[t.Any]: + """Handles not passing ``encoding`` and ``errors`` in binary mode.""" + if "b" in mode: + return open(file, mode) + + return open(file, mode, encoding=encoding, errors=errors) + + +def open_stream( + filename: "t.Union[str, os.PathLike[str]]", + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + atomic: bool = False, +) -> t.Tuple[t.IO[t.Any], bool]: + binary = "b" in mode + filename = os.fspath(filename) + + # Standard streams first. These are simple because they ignore the + # atomic flag. Use fsdecode to handle Path("-"). + if os.fsdecode(filename) == "-": + if any(m in mode for m in ["w", "a", "x"]): + if binary: + return get_binary_stdout(), False + return get_text_stdout(encoding=encoding, errors=errors), False + if binary: + return get_binary_stdin(), False + return get_text_stdin(encoding=encoding, errors=errors), False + + # Non-atomic writes directly go out through the regular open functions. + if not atomic: + return _wrap_io_open(filename, mode, encoding, errors), True + + # Some usability stuff for atomic writes + if "a" in mode: + raise ValueError( + "Appending to an existing file is not supported, because that" + " would involve an expensive `copy`-operation to a temporary" + " file. Open the file in normal `w`-mode and copy explicitly" + " if that's what you're after." + ) + if "x" in mode: + raise ValueError("Use the `overwrite`-parameter instead.") + if "w" not in mode: + raise ValueError("Atomic writes only make sense with `w`-mode.") + + # Atomic writes are more complicated. They work by opening a file + # as a proxy in the same folder and then using the fdopen + # functionality to wrap it in a Python file. Then we wrap it in an + # atomic file that moves the file over on close. + import errno + import random + + try: + perm: t.Optional[int] = os.stat(filename).st_mode + except OSError: + perm = None + + flags = os.O_RDWR | os.O_CREAT | os.O_EXCL + + if binary: + flags |= getattr(os, "O_BINARY", 0) + + while True: + tmp_filename = os.path.join( + os.path.dirname(filename), + f".__atomic-write{random.randrange(1 << 32):08x}", + ) + try: + fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm) + break + except OSError as e: + if e.errno == errno.EEXIST or ( + os.name == "nt" + and e.errno == errno.EACCES + and os.path.isdir(e.filename) + and os.access(e.filename, os.W_OK) + ): + continue + raise + + if perm is not None: + os.chmod(tmp_filename, perm) # in case perm includes bits in umask + + f = _wrap_io_open(fd, mode, encoding, errors) + af = _AtomicFile(f, tmp_filename, os.path.realpath(filename)) + return t.cast(t.IO[t.Any], af), True + + +class _AtomicFile: + def __init__(self, f: t.IO[t.Any], tmp_filename: str, real_filename: str) -> None: + self._f = f + self._tmp_filename = tmp_filename + self._real_filename = real_filename + self.closed = False + + @property + def name(self) -> str: + return self._real_filename + + def close(self, delete: bool = False) -> None: + if self.closed: + return + self._f.close() + os.replace(self._tmp_filename, self._real_filename) + self.closed = True + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._f, name) + + def __enter__(self) -> "_AtomicFile": + return self + + def __exit__(self, exc_type: t.Optional[t.Type[BaseException]], *_: t.Any) -> None: + self.close(delete=exc_type is not None) + + def __repr__(self) -> str: + return repr(self._f) + + +def strip_ansi(value: str) -> str: + return _ansi_re.sub("", value) + + +def _is_jupyter_kernel_output(stream: t.IO[t.Any]) -> bool: + while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)): + stream = stream._stream + + return stream.__class__.__module__.startswith("ipykernel.") + + +def should_strip_ansi( + stream: t.Optional[t.IO[t.Any]] = None, color: t.Optional[bool] = None +) -> bool: + if color is None: + if stream is None: + stream = sys.stdin + return not isatty(stream) and not _is_jupyter_kernel_output(stream) + return not color + + +# On Windows, wrap the output streams with colorama to support ANSI +# color codes. +# NOTE: double check is needed so mypy does not analyze this on Linux +if sys.platform.startswith("win") and WIN: + from ._winconsole import _get_windows_console_stream + + def _get_argv_encoding() -> str: + import locale + + return locale.getpreferredencoding() + + _ansi_stream_wrappers: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() + + def auto_wrap_for_ansi( + stream: t.TextIO, color: t.Optional[bool] = None + ) -> t.TextIO: + """Support ANSI color and style codes on Windows by wrapping a + stream with colorama. + """ + try: + cached = _ansi_stream_wrappers.get(stream) + except Exception: + cached = None + + if cached is not None: + return cached + + import colorama + + strip = should_strip_ansi(stream, color) + ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip) + rv = t.cast(t.TextIO, ansi_wrapper.stream) + _write = rv.write + + def _safe_write(s): + try: + return _write(s) + except BaseException: + ansi_wrapper.reset_all() + raise + + rv.write = _safe_write + + try: + _ansi_stream_wrappers[stream] = rv + except Exception: + pass + + return rv + +else: + + def _get_argv_encoding() -> str: + return getattr(sys.stdin, "encoding", None) or sys.getfilesystemencoding() + + def _get_windows_console_stream( + f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] + ) -> t.Optional[t.TextIO]: + return None + + +def term_len(x: str) -> int: + return len(strip_ansi(x)) + + +def isatty(stream: t.IO[t.Any]) -> bool: + try: + return stream.isatty() + except Exception: + return False + + +def _make_cached_stream_func( + src_func: t.Callable[[], t.Optional[t.TextIO]], + wrapper_func: t.Callable[[], t.TextIO], +) -> t.Callable[[], t.Optional[t.TextIO]]: + cache: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() + + def func() -> t.Optional[t.TextIO]: + stream = src_func() + + if stream is None: + return None + + try: + rv = cache.get(stream) + except Exception: + rv = None + if rv is not None: + return rv + rv = wrapper_func() + try: + cache[stream] = rv + except Exception: + pass + return rv + + return func + + +_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin) +_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout) +_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr) + + +binary_streams: t.Mapping[str, t.Callable[[], t.BinaryIO]] = { + "stdin": get_binary_stdin, + "stdout": get_binary_stdout, + "stderr": get_binary_stderr, +} + +text_streams: t.Mapping[ + str, t.Callable[[t.Optional[str], t.Optional[str]], t.TextIO] +] = { + "stdin": get_text_stdin, + "stdout": get_text_stdout, + "stderr": get_text_stderr, +} diff --git a/venv/lib/python3.11/site-packages/click/_termui_impl.py b/venv/lib/python3.11/site-packages/click/_termui_impl.py new file mode 100644 index 0000000..ad9f8f6 --- /dev/null +++ b/venv/lib/python3.11/site-packages/click/_termui_impl.py @@ -0,0 +1,788 @@ +""" +This module contains implementations for the termui module. To keep the +import time of Click down, some infrequently used functionality is +placed in this module and only imported as needed. +""" + +import contextlib +import math +import os +import sys +import time +import typing as t +from gettext import gettext as _ +from io import StringIO +from shutil import which +from types import TracebackType + +from ._compat import _default_text_stdout +from ._compat import CYGWIN +from ._compat import get_best_encoding +from ._compat import isatty +from ._compat import open_stream +from ._compat import strip_ansi +from ._compat import term_len +from ._compat import WIN +from .exceptions import ClickException +from .utils import echo + +V = t.TypeVar("V") + +if os.name == "nt": + BEFORE_BAR = "\r" + AFTER_BAR = "\n" +else: + BEFORE_BAR = "\r\033[?25l" + AFTER_BAR = "\033[?25h\n" + + +class ProgressBar(t.Generic[V]): + def __init__( + self, + iterable: t.Optional[t.Iterable[V]], + length: t.Optional[int] = None, + fill_char: str = "#", + empty_char: str = " ", + bar_template: str = "%(bar)s", + info_sep: str = " ", + show_eta: bool = True, + show_percent: t.Optional[bool] = None, + show_pos: bool = False, + item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None, + label: t.Optional[str] = None, + file: t.Optional[t.TextIO] = None, + color: t.Optional[bool] = None, + update_min_steps: int = 1, + width: int = 30, + ) -> None: + self.fill_char = fill_char + self.empty_char = empty_char + self.bar_template = bar_template + self.info_sep = info_sep + self.show_eta = show_eta + self.show_percent = show_percent + self.show_pos = show_pos + self.item_show_func = item_show_func + self.label: str = label or "" + + if file is None: + file = _default_text_stdout() + + # There are no standard streams attached to write to. For example, + # pythonw on Windows. + if file is None: + file = StringIO() + + self.file = file + self.color = color + self.update_min_steps = update_min_steps + self._completed_intervals = 0 + self.width: int = width + self.autowidth: bool = width == 0 + + if length is None: + from operator import length_hint + + length = length_hint(iterable, -1) + + if length == -1: + length = None + if iterable is None: + if length is None: + raise TypeError("iterable or length is required") + iterable = t.cast(t.Iterable[V], range(length)) + self.iter: t.Iterable[V] = iter(iterable) + self.length = length + self.pos = 0 + self.avg: t.List[float] = [] + self.last_eta: float + self.start: float + self.start = self.last_eta = time.time() + self.eta_known: bool = False + self.finished: bool = False + self.max_width: t.Optional[int] = None + self.entered: bool = False + self.current_item: t.Optional[V] = None + self.is_hidden: bool = not isatty(self.file) + self._last_line: t.Optional[str] = None + + def __enter__(self) -> "ProgressBar[V]": + self.entered = True + self.render_progress() + return self + + def __exit__( + self, + exc_type: t.Optional[t.Type[BaseException]], + exc_value: t.Optional[BaseException], + tb: t.Optional[TracebackType], + ) -> None: + self.render_finish() + + def __iter__(self) -> t.Iterator[V]: + if not self.entered: + raise RuntimeError("You need to use progress bars in a with block.") + self.render_progress() + return self.generator() + + def __next__(self) -> V: + # Iteration is defined in terms of a generator function, + # returned by iter(self); use that to define next(). This works + # because `self.iter` is an iterable consumed by that generator, + # so it is re-entry safe. Calling `next(self.generator())` + # twice works and does "what you want". + return next(iter(self)) + + def render_finish(self) -> None: + if self.is_hidden: + return + self.file.write(AFTER_BAR) + self.file.flush() + + @property + def pct(self) -> float: + if self.finished: + return 1.0 + return min(self.pos / (float(self.length or 1) or 1), 1.0) + + @property + def time_per_iteration(self) -> float: + if not self.avg: + return 0.0 + return sum(self.avg) / float(len(self.avg)) + + @property + def eta(self) -> float: + if self.length is not None and not self.finished: + return self.time_per_iteration * (self.length - self.pos) + return 0.0 + + def format_eta(self) -> str: + if self.eta_known: + t = int(self.eta) + seconds = t % 60 + t //= 60 + minutes = t % 60 + t //= 60 + hours = t % 24 + t //= 24 + if t > 0: + return f"{t}d {hours:02}:{minutes:02}:{seconds:02}" + else: + return f"{hours:02}:{minutes:02}:{seconds:02}" + return "" + + def format_pos(self) -> str: + pos = str(self.pos) + if self.length is not None: + pos += f"/{self.length}" + return pos + + def format_pct(self) -> str: + return f"{int(self.pct * 100): 4}%"[1:] + + def format_bar(self) -> str: + if self.length is not None: + bar_length = int(self.pct * self.width) + bar = self.fill_char * bar_length + bar += self.empty_char * (self.width - bar_length) + elif self.finished: + bar = self.fill_char * self.width + else: + chars = list(self.empty_char * (self.width or 1)) + if self.time_per_iteration != 0: + chars[ + int( + (math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5) + * self.width + ) + ] = self.fill_char + bar = "".join(chars) + return bar + + def format_progress_line(self) -> str: + show_percent = self.show_percent + + info_bits = [] + if self.length is not None and show_percent is None: + show_percent = not self.show_pos + + if self.show_pos: + info_bits.append(self.format_pos()) + if show_percent: + info_bits.append(self.format_pct()) + if self.show_eta and self.eta_known and not self.finished: + info_bits.append(self.format_eta()) + if self.item_show_func is not None: + item_info = self.item_show_func(self.current_item) + if item_info is not None: + info_bits.append(item_info) + + return ( + self.bar_template + % { + "label": self.label, + "bar": self.format_bar(), + "info": self.info_sep.join(info_bits), + } + ).rstrip() + + def render_progress(self) -> None: + import shutil + + if self.is_hidden: + # Only output the label as it changes if the output is not a + # TTY. Use file=stderr if you expect to be piping stdout. + if self._last_line != self.label: + self._last_line = self.label + echo(self.label, file=self.file, color=self.color) + + return + + buf = [] + # Update width in case the terminal has been resized + if self.autowidth: + old_width = self.width + self.width = 0 + clutter_length = term_len(self.format_progress_line()) + new_width = max(0, shutil.get_terminal_size().columns - clutter_length) + if new_width < old_width: + buf.append(BEFORE_BAR) + buf.append(" " * self.max_width) # type: ignore + self.max_width = new_width + self.width = new_width + + clear_width = self.width + if self.max_width is not None: + clear_width = self.max_width + + buf.append(BEFORE_BAR) + line = self.format_progress_line() + line_len = term_len(line) + if self.max_width is None or self.max_width < line_len: + self.max_width = line_len + + buf.append(line) + buf.append(" " * (clear_width - line_len)) + line = "".join(buf) + # Render the line only if it changed. + + if line != self._last_line: + self._last_line = line + echo(line, file=self.file, color=self.color, nl=False) + self.file.flush() + + def make_step(self, n_steps: int) -> None: + self.pos += n_steps + if self.length is not None and self.pos >= self.length: + self.finished = True + + if (time.time() - self.last_eta) < 1.0: + return + + self.last_eta = time.time() + + # self.avg is a rolling list of length <= 7 of steps where steps are + # defined as time elapsed divided by the total progress through + # self.length. + if self.pos: + step = (time.time() - self.start) / self.pos + else: + step = time.time() - self.start + + self.avg = self.avg[-6:] + [step] + + self.eta_known = self.length is not None + + def update(self, n_steps: int, current_item: t.Optional[V] = None) -> None: + """Update the progress bar by advancing a specified number of + steps, and optionally set the ``current_item`` for this new + position. + + :param n_steps: Number of steps to advance. + :param current_item: Optional item to set as ``current_item`` + for the updated position. + + .. versionchanged:: 8.0 + Added the ``current_item`` optional parameter. + + .. versionchanged:: 8.0 + Only render when the number of steps meets the + ``update_min_steps`` threshold. + """ + if current_item is not None: + self.current_item = current_item + + self._completed_intervals += n_steps + + if self._completed_intervals >= self.update_min_steps: + self.make_step(self._completed_intervals) + self.render_progress() + self._completed_intervals = 0 + + def finish(self) -> None: + self.eta_known = False + self.current_item = None + self.finished = True + + def generator(self) -> t.Iterator[V]: + """Return a generator which yields the items added to the bar + during construction, and updates the progress bar *after* the + yielded block returns. + """ + # WARNING: the iterator interface for `ProgressBar` relies on + # this and only works because this is a simple generator which + # doesn't create or manage additional state. If this function + # changes, the impact should be evaluated both against + # `iter(bar)` and `next(bar)`. `next()` in particular may call + # `self.generator()` repeatedly, and this must remain safe in + # order for that interface to work. + if not self.entered: + raise RuntimeError("You need to use progress bars in a with block.") + + if self.is_hidden: + yield from self.iter + else: + for rv in self.iter: + self.current_item = rv + + # This allows show_item_func to be updated before the + # item is processed. Only trigger at the beginning of + # the update interval. + if self._completed_intervals == 0: + self.render_progress() + + yield rv + self.update(1) + + self.finish() + self.render_progress() + + +def pager(generator: t.Iterable[str], color: t.Optional[bool] = None) -> None: + """Decide what method to use for paging through text.""" + stdout = _default_text_stdout() + + # There are no standard streams attached to write to. For example, + # pythonw on Windows. + if stdout is None: + stdout = StringIO() + + if not isatty(sys.stdin) or not isatty(stdout): + return _nullpager(stdout, generator, color) + pager_cmd = (os.environ.get("PAGER", None) or "").strip() + if pager_cmd: + if WIN: + if _tempfilepager(generator, pager_cmd, color): + return + elif _pipepager(generator, pager_cmd, color): + return + if os.environ.get("TERM") in ("dumb", "emacs"): + return _nullpager(stdout, generator, color) + if (WIN or sys.platform.startswith("os2")) and _tempfilepager( + generator, "more", color + ): + return + if _pipepager(generator, "less", color): + return + + import tempfile + + fd, filename = tempfile.mkstemp() + os.close(fd) + try: + if _pipepager(generator, "more", color): + return + return _nullpager(stdout, generator, color) + finally: + os.unlink(filename) + + +def _pipepager(generator: t.Iterable[str], cmd: str, color: t.Optional[bool]) -> bool: + """Page through text by feeding it to another program. Invoking a + pager through this might support colors. + + Returns True if the command was found, False otherwise and thus another + pager should be attempted. + """ + cmd_absolute = which(cmd) + if cmd_absolute is None: + return False + + import subprocess + + env = dict(os.environ) + + # If we're piping to less we might support colors under the + # condition that + cmd_detail = cmd.rsplit("/", 1)[-1].split() + if color is None and cmd_detail[0] == "less": + less_flags = f"{os.environ.get('LESS', '')}{' '.join(cmd_detail[1:])}" + if not less_flags: + env["LESS"] = "-R" + color = True + elif "r" in less_flags or "R" in less_flags: + color = True + + c = subprocess.Popen( + [cmd_absolute], + shell=True, + stdin=subprocess.PIPE, + env=env, + errors="replace", + text=True, + ) + assert c.stdin is not None + try: + for text in generator: + if not color: + text = strip_ansi(text) + + c.stdin.write(text) + except (OSError, KeyboardInterrupt): + pass + else: + c.stdin.close() + + # Less doesn't respect ^C, but catches it for its own UI purposes (aborting + # search or other commands inside less). + # + # That means when the user hits ^C, the parent process (click) terminates, + # but less is still alive, paging the output and messing up the terminal. + # + # If the user wants to make the pager exit on ^C, they should set + # `LESS='-K'`. It's not our decision to make. + while True: + try: + c.wait() + except KeyboardInterrupt: + pass + else: + break + + return True + + +def _tempfilepager( + generator: t.Iterable[str], + cmd: str, + color: t.Optional[bool], +) -> bool: + """Page through text by invoking a program on a temporary file. + + Returns True if the command was found, False otherwise and thus another + pager should be attempted. + """ + # Which is necessary for Windows, it is also recommended in the Popen docs. + cmd_absolute = which(cmd) + if cmd_absolute is None: + return False + + import subprocess + import tempfile + + fd, filename = tempfile.mkstemp() + # TODO: This never terminates if the passed generator never terminates. + text = "".join(generator) + if not color: + text = strip_ansi(text) + encoding = get_best_encoding(sys.stdout) + with open_stream(filename, "wb")[0] as f: + f.write(text.encode(encoding)) + try: + subprocess.call([cmd_absolute, filename]) + except OSError: + # Command not found + pass + finally: + os.close(fd) + os.unlink(filename) + + return True + + +def _nullpager( + stream: t.TextIO, generator: t.Iterable[str], color: t.Optional[bool] +) -> None: + """Simply print unformatted text. This is the ultimate fallback.""" + for text in generator: + if not color: + text = strip_ansi(text) + stream.write(text) + + +class Editor: + def __init__( + self, + editor: t.Optional[str] = None, + env: t.Optional[t.Mapping[str, str]] = None, + require_save: bool = True, + extension: str = ".txt", + ) -> None: + self.editor = editor + self.env = env + self.require_save = require_save + self.extension = extension + + def get_editor(self) -> str: + if self.editor is not None: + return self.editor + for key in "VISUAL", "EDITOR": + rv = os.environ.get(key) + if rv: + return rv + if WIN: + return "notepad" + for editor in "sensible-editor", "vim", "nano": + if which(editor) is not None: + return editor + return "vi" + + def edit_file(self, filename: str) -> None: + import subprocess + + editor = self.get_editor() + environ: t.Optional[t.Dict[str, str]] = None + + if self.env: + environ = os.environ.copy() + environ.update(self.env) + + try: + c = subprocess.Popen(f'{editor} "{filename}"', env=environ, shell=True) + exit_code = c.wait() + if exit_code != 0: + raise ClickException( + _("{editor}: Editing failed").format(editor=editor) + ) + except OSError as e: + raise ClickException( + _("{editor}: Editing failed: {e}").format(editor=editor, e=e) + ) from e + + def edit(self, text: t.Optional[t.AnyStr]) -> t.Optional[t.AnyStr]: + import tempfile + + if not text: + data = b"" + elif isinstance(text, (bytes, bytearray)): + data = text + else: + if text and not text.endswith("\n"): + text += "\n" + + if WIN: + data = text.replace("\n", "\r\n").encode("utf-8-sig") + else: + data = text.encode("utf-8") + + fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension) + f: t.BinaryIO + + try: + with os.fdopen(fd, "wb") as f: + f.write(data) + + # If the filesystem resolution is 1 second, like Mac OS + # 10.12 Extended, or 2 seconds, like FAT32, and the editor + # closes very fast, require_save can fail. Set the modified + # time to be 2 seconds in the past to work around this. + os.utime(name, (os.path.getatime(name), os.path.getmtime(name) - 2)) + # Depending on the resolution, the exact value might not be + # recorded, so get the new recorded value. + timestamp = os.path.getmtime(name) + + self.edit_file(name) + + if self.require_save and os.path.getmtime(name) == timestamp: + return None + + with open(name, "rb") as f: + rv = f.read() + + if isinstance(text, (bytes, bytearray)): + return rv + + return rv.decode("utf-8-sig").replace("\r\n", "\n") # type: ignore + finally: + os.unlink(name) + + +def open_url(url: str, wait: bool = False, locate: bool = False) -> int: + import subprocess + + def _unquote_file(url: str) -> str: + from urllib.parse import unquote + + if url.startswith("file://"): + url = unquote(url[7:]) + + return url + + if sys.platform == "darwin": + args = ["open"] + if wait: + args.append("-W") + if locate: + args.append("-R") + args.append(_unquote_file(url)) + null = open("/dev/null", "w") + try: + return subprocess.Popen(args, stderr=null).wait() + finally: + null.close() + elif WIN: + if locate: + url = _unquote_file(url) + args = ["explorer", f"/select,{url}"] + else: + args = ["start"] + if wait: + args.append("/WAIT") + args.append("") + args.append(url) + try: + return subprocess.call(args) + except OSError: + # Command not found + return 127 + elif CYGWIN: + if locate: + url = _unquote_file(url) + args = ["cygstart", os.path.dirname(url)] + else: + args = ["cygstart"] + if wait: + args.append("-w") + args.append(url) + try: + return subprocess.call(args) + except OSError: + # Command not found + return 127 + + try: + if locate: + url = os.path.dirname(_unquote_file(url)) or "." + else: + url = _unquote_file(url) + c = subprocess.Popen(["xdg-open", url]) + if wait: + return c.wait() + return 0 + except OSError: + if url.startswith(("http://", "https://")) and not locate and not wait: + import webbrowser + + webbrowser.open(url) + return 0 + return 1 + + +def _translate_ch_to_exc(ch: str) -> t.Optional[BaseException]: + if ch == "\x03": + raise KeyboardInterrupt() + + if ch == "\x04" and not WIN: # Unix-like, Ctrl+D + raise EOFError() + + if ch == "\x1a" and WIN: # Windows, Ctrl+Z + raise EOFError() + + return None + + +if WIN: + import msvcrt + + @contextlib.contextmanager + def raw_terminal() -> t.Iterator[int]: + yield -1 + + def getchar(echo: bool) -> str: + # The function `getch` will return a bytes object corresponding to + # the pressed character. Since Windows 10 build 1803, it will also + # return \x00 when called a second time after pressing a regular key. + # + # `getwch` does not share this probably-bugged behavior. Moreover, it + # returns a Unicode object by default, which is what we want. + # + # Either of these functions will return \x00 or \xe0 to indicate + # a special key, and you need to call the same function again to get + # the "rest" of the code. The fun part is that \u00e0 is + # "latin small letter a with grave", so if you type that on a French + # keyboard, you _also_ get a \xe0. + # E.g., consider the Up arrow. This returns \xe0 and then \x48. The + # resulting Unicode string reads as "a with grave" + "capital H". + # This is indistinguishable from when the user actually types + # "a with grave" and then "capital H". + # + # When \xe0 is returned, we assume it's part of a special-key sequence + # and call `getwch` again, but that means that when the user types + # the \u00e0 character, `getchar` doesn't return until a second + # character is typed. + # The alternative is returning immediately, but that would mess up + # cross-platform handling of arrow keys and others that start with + # \xe0. Another option is using `getch`, but then we can't reliably + # read non-ASCII characters, because return values of `getch` are + # limited to the current 8-bit codepage. + # + # Anyway, Click doesn't claim to do this Right(tm), and using `getwch` + # is doing the right thing in more situations than with `getch`. + func: t.Callable[[], str] + + if echo: + func = msvcrt.getwche # type: ignore + else: + func = msvcrt.getwch # type: ignore + + rv = func() + + if rv in ("\x00", "\xe0"): + # \x00 and \xe0 are control characters that indicate special key, + # see above. + rv += func() + + _translate_ch_to_exc(rv) + return rv + +else: + import termios + import tty + + @contextlib.contextmanager + def raw_terminal() -> t.Iterator[int]: + f: t.Optional[t.TextIO] + fd: int + + if not isatty(sys.stdin): + f = open("/dev/tty") + fd = f.fileno() + else: + fd = sys.stdin.fileno() + f = None + + try: + old_settings = termios.tcgetattr(fd) + + try: + tty.setraw(fd) + yield fd + finally: + termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) + sys.stdout.flush() + + if f is not None: + f.close() + except termios.error: + pass + + def getchar(echo: bool) -> str: + with raw_terminal() as fd: + ch = os.read(fd, 32).decode(get_best_encoding(sys.stdin), "replace") + + if echo and isatty(sys.stdout): + sys.stdout.write(ch) + + _translate_ch_to_exc(ch) + return ch diff --git a/venv/lib/python3.11/site-packages/click/_textwrap.py b/venv/lib/python3.11/site-packages/click/_textwrap.py new file mode 100644 index 0000000..b47dcbd --- /dev/null +++ b/venv/lib/python3.11/site-packages/click/_textwrap.py @@ -0,0 +1,49 @@ +import textwrap +import typing as t +from contextlib import contextmanager + + +class TextWrapper(textwrap.TextWrapper): + def _handle_long_word( + self, + reversed_chunks: t.List[str], + cur_line: t.List[str], + cur_len: int, + width: int, + ) -> None: + space_left = max(width - cur_len, 1) + + if self.break_long_words: + last = reversed_chunks[-1] + cut = last[:space_left] + res = last[space_left:] + cur_line.append(cut) + reversed_chunks[-1] = res + elif not cur_line: + cur_line.append(reversed_chunks.pop()) + + @contextmanager + def extra_indent(self, indent: str) -> t.Iterator[None]: + old_initial_indent = self.initial_indent + old_subsequent_indent = self.subsequent_indent + self.initial_indent += indent + self.subsequent_indent += indent + + try: + yield + finally: + self.initial_indent = old_initial_indent + self.subsequent_indent = old_subsequent_indent + + def indent_only(self, text: str) -> str: + rv = [] + + for idx, line in enumerate(text.splitlines()): + indent = self.initial_indent + + if idx > 0: + indent = self.subsequent_indent + + rv.append(f"{indent}{line}") + + return "\n".join(rv) diff --git a/venv/lib/python3.11/site-packages/click/_winconsole.py b/venv/lib/python3.11/site-packages/click/_winconsole.py new file mode 100644 index 0000000..6b20df3 --- /dev/null +++ b/venv/lib/python3.11/site-packages/click/_winconsole.py @@ -0,0 +1,279 @@ +# This module is based on the excellent work by Adam Bartoš who +# provided a lot of what went into the implementation here in +# the discussion to issue1602 in the Python bug tracker. +# +# There are some general differences in regards to how this works +# compared to the original patches as we do not need to patch +# the entire interpreter but just work in our little world of +# echo and prompt. +import io +import sys +import time +import typing as t +from ctypes import byref +from ctypes import c_char +from ctypes import c_char_p +from ctypes import c_int +from ctypes import c_ssize_t +from ctypes import c_ulong +from ctypes import c_void_p +from ctypes import POINTER +from ctypes import py_object +from ctypes import Structure +from ctypes.wintypes import DWORD +from ctypes.wintypes import HANDLE +from ctypes.wintypes import LPCWSTR +from ctypes.wintypes import LPWSTR + +from ._compat import _NonClosingTextIOWrapper + +assert sys.platform == "win32" +import msvcrt # noqa: E402 +from ctypes import windll # noqa: E402 +from ctypes import WINFUNCTYPE # noqa: E402 + +c_ssize_p = POINTER(c_ssize_t) + +kernel32 = windll.kernel32 +GetStdHandle = kernel32.GetStdHandle +ReadConsoleW = kernel32.ReadConsoleW +WriteConsoleW = kernel32.WriteConsoleW +GetConsoleMode = kernel32.GetConsoleMode +GetLastError = kernel32.GetLastError +GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32)) +CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( + ("CommandLineToArgvW", windll.shell32) +) +LocalFree = WINFUNCTYPE(c_void_p, c_void_p)(("LocalFree", windll.kernel32)) + +STDIN_HANDLE = GetStdHandle(-10) +STDOUT_HANDLE = GetStdHandle(-11) +STDERR_HANDLE = GetStdHandle(-12) + +PyBUF_SIMPLE = 0 +PyBUF_WRITABLE = 1 + +ERROR_SUCCESS = 0 +ERROR_NOT_ENOUGH_MEMORY = 8 +ERROR_OPERATION_ABORTED = 995 + +STDIN_FILENO = 0 +STDOUT_FILENO = 1 +STDERR_FILENO = 2 + +EOF = b"\x1a" +MAX_BYTES_WRITTEN = 32767 + +try: + from ctypes import pythonapi +except ImportError: + # On PyPy we cannot get buffers so our ability to operate here is + # severely limited. + get_buffer = None +else: + + class Py_buffer(Structure): + _fields_ = [ + ("buf", c_void_p), + ("obj", py_object), + ("len", c_ssize_t), + ("itemsize", c_ssize_t), + ("readonly", c_int), + ("ndim", c_int), + ("format", c_char_p), + ("shape", c_ssize_p), + ("strides", c_ssize_p), + ("suboffsets", c_ssize_p), + ("internal", c_void_p), + ] + + PyObject_GetBuffer = pythonapi.PyObject_GetBuffer + PyBuffer_Release = pythonapi.PyBuffer_Release + + def get_buffer(obj, writable=False): + buf = Py_buffer() + flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE + PyObject_GetBuffer(py_object(obj), byref(buf), flags) + + try: + buffer_type = c_char * buf.len + return buffer_type.from_address(buf.buf) + finally: + PyBuffer_Release(byref(buf)) + + +class _WindowsConsoleRawIOBase(io.RawIOBase): + def __init__(self, handle): + self.handle = handle + + def isatty(self): + super().isatty() + return True + + +class _WindowsConsoleReader(_WindowsConsoleRawIOBase): + def readable(self): + return True + + def readinto(self, b): + bytes_to_be_read = len(b) + if not bytes_to_be_read: + return 0 + elif bytes_to_be_read % 2: + raise ValueError( + "cannot read odd number of bytes from UTF-16-LE encoded console" + ) + + buffer = get_buffer(b, writable=True) + code_units_to_be_read = bytes_to_be_read // 2 + code_units_read = c_ulong() + + rv = ReadConsoleW( + HANDLE(self.handle), + buffer, + code_units_to_be_read, + byref(code_units_read), + None, + ) + if GetLastError() == ERROR_OPERATION_ABORTED: + # wait for KeyboardInterrupt + time.sleep(0.1) + if not rv: + raise OSError(f"Windows error: {GetLastError()}") + + if buffer[0] == EOF: + return 0 + return 2 * code_units_read.value + + +class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): + def writable(self): + return True + + @staticmethod + def _get_error_message(errno): + if errno == ERROR_SUCCESS: + return "ERROR_SUCCESS" + elif errno == ERROR_NOT_ENOUGH_MEMORY: + return "ERROR_NOT_ENOUGH_MEMORY" + return f"Windows error {errno}" + + def write(self, b): + bytes_to_be_written = len(b) + buf = get_buffer(b) + code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2 + code_units_written = c_ulong() + + WriteConsoleW( + HANDLE(self.handle), + buf, + code_units_to_be_written, + byref(code_units_written), + None, + ) + bytes_written = 2 * code_units_written.value + + if bytes_written == 0 and bytes_to_be_written > 0: + raise OSError(self._get_error_message(GetLastError())) + return bytes_written + + +class ConsoleStream: + def __init__(self, text_stream: t.TextIO, byte_stream: t.BinaryIO) -> None: + self._text_stream = text_stream + self.buffer = byte_stream + + @property + def name(self) -> str: + return self.buffer.name + + def write(self, x: t.AnyStr) -> int: + if isinstance(x, str): + return self._text_stream.write(x) + try: + self.flush() + except Exception: + pass + return self.buffer.write(x) + + def writelines(self, lines: t.Iterable[t.AnyStr]) -> None: + for line in lines: + self.write(line) + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._text_stream, name) + + def isatty(self) -> bool: + return self.buffer.isatty() + + def __repr__(self): + return f"" + + +def _get_text_stdin(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +def _get_text_stdout(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +def _get_text_stderr(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +_stream_factories: t.Mapping[int, t.Callable[[t.BinaryIO], t.TextIO]] = { + 0: _get_text_stdin, + 1: _get_text_stdout, + 2: _get_text_stderr, +} + + +def _is_console(f: t.TextIO) -> bool: + if not hasattr(f, "fileno"): + return False + + try: + fileno = f.fileno() + except (OSError, io.UnsupportedOperation): + return False + + handle = msvcrt.get_osfhandle(fileno) + return bool(GetConsoleMode(handle, byref(DWORD()))) + + +def _get_windows_console_stream( + f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] +) -> t.Optional[t.TextIO]: + if ( + get_buffer is not None + and encoding in {"utf-16-le", None} + and errors in {"strict", None} + and _is_console(f) + ): + func = _stream_factories.get(f.fileno()) + if func is not None: + b = getattr(f, "buffer", None) + + if b is None: + return None + + return func(b) diff --git a/venv/lib/python3.11/site-packages/click/core.py b/venv/lib/python3.11/site-packages/click/core.py new file mode 100644 index 0000000..e630501 --- /dev/null +++ b/venv/lib/python3.11/site-packages/click/core.py @@ -0,0 +1,3047 @@ +import enum +import errno +import inspect +import os +import sys +import typing as t +from collections import abc +from contextlib import contextmanager +from contextlib import ExitStack +from functools import update_wrapper +from gettext import gettext as _ +from gettext import ngettext +from itertools import repeat +from types import TracebackType + +from . import types +from .exceptions import Abort +from .exceptions import BadParameter +from .exceptions import ClickException +from .exceptions import Exit +from .exceptions import MissingParameter +from .exceptions import UsageError +from .formatting import HelpFormatter +from .formatting import join_options +from .globals import pop_context +from .globals import push_context +from .parser import _flag_needs_value +from .parser import OptionParser +from .parser import split_opt +from .termui import confirm +from .termui import prompt +from .termui import style +from .utils import _detect_program_name +from .utils import _expand_args +from .utils import echo +from .utils import make_default_short_help +from .utils import make_str +from .utils import PacifyFlushWrapper + +if t.TYPE_CHECKING: + import typing_extensions as te + + from .decorators import HelpOption + from .shell_completion import CompletionItem + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) +V = t.TypeVar("V") + + +def _complete_visible_commands( + ctx: "Context", incomplete: str +) -> t.Iterator[t.Tuple[str, "Command"]]: + """List all the subcommands of a group that start with the + incomplete value and aren't hidden. + + :param ctx: Invocation context for the group. + :param incomplete: Value being completed. May be empty. + """ + multi = t.cast(MultiCommand, ctx.command) + + for name in multi.list_commands(ctx): + if name.startswith(incomplete): + command = multi.get_command(ctx, name) + + if command is not None and not command.hidden: + yield name, command + + +def _check_multicommand( + base_command: "MultiCommand", cmd_name: str, cmd: "Command", register: bool = False +) -> None: + if not base_command.chain or not isinstance(cmd, MultiCommand): + return + if register: + hint = ( + "It is not possible to add multi commands as children to" + " another multi command that is in chain mode." + ) + else: + hint = ( + "Found a multi command as subcommand to a multi command" + " that is in chain mode. This is not supported." + ) + raise RuntimeError( + f"{hint}. Command {base_command.name!r} is set to chain and" + f" {cmd_name!r} was added as a subcommand but it in itself is a" + f" multi command. ({cmd_name!r} is a {type(cmd).__name__}" + f" within a chained {type(base_command).__name__} named" + f" {base_command.name!r})." + ) + + +def batch(iterable: t.Iterable[V], batch_size: int) -> t.List[t.Tuple[V, ...]]: + return list(zip(*repeat(iter(iterable), batch_size))) + + +@contextmanager +def augment_usage_errors( + ctx: "Context", param: t.Optional["Parameter"] = None +) -> t.Iterator[None]: + """Context manager that attaches extra information to exceptions.""" + try: + yield + except BadParameter as e: + if e.ctx is None: + e.ctx = ctx + if param is not None and e.param is None: + e.param = param + raise + except UsageError as e: + if e.ctx is None: + e.ctx = ctx + raise + + +def iter_params_for_processing( + invocation_order: t.Sequence["Parameter"], + declaration_order: t.Sequence["Parameter"], +) -> t.List["Parameter"]: + """Returns all declared parameters in the order they should be processed. + + The declared parameters are re-shuffled depending on the order in which + they were invoked, as well as the eagerness of each parameters. + + The invocation order takes precedence over the declaration order. I.e. the + order in which the user provided them to the CLI is respected. + + This behavior and its effect on callback evaluation is detailed at: + https://click.palletsprojects.com/en/stable/advanced/#callback-evaluation-order + """ + + def sort_key(item: "Parameter") -> t.Tuple[bool, float]: + try: + idx: float = invocation_order.index(item) + except ValueError: + idx = float("inf") + + return not item.is_eager, idx + + return sorted(declaration_order, key=sort_key) + + +class ParameterSource(enum.Enum): + """This is an :class:`~enum.Enum` that indicates the source of a + parameter's value. + + Use :meth:`click.Context.get_parameter_source` to get the + source for a parameter by name. + + .. versionchanged:: 8.0 + Use :class:`~enum.Enum` and drop the ``validate`` method. + + .. versionchanged:: 8.0 + Added the ``PROMPT`` value. + """ + + COMMANDLINE = enum.auto() + """The value was provided by the command line args.""" + ENVIRONMENT = enum.auto() + """The value was provided with an environment variable.""" + DEFAULT = enum.auto() + """Used the default specified by the parameter.""" + DEFAULT_MAP = enum.auto() + """Used a default provided by :attr:`Context.default_map`.""" + PROMPT = enum.auto() + """Used a prompt to confirm a default or provide a value.""" + + +class Context: + """The context is a special internal object that holds state relevant + for the script execution at every single level. It's normally invisible + to commands unless they opt-in to getting access to it. + + The context is useful as it can pass internal objects around and can + control special execution features such as reading data from + environment variables. + + A context can be used as context manager in which case it will call + :meth:`close` on teardown. + + :param command: the command class for this context. + :param parent: the parent context. + :param info_name: the info name for this invocation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it is usually + the name of the script, for commands below it it's + the name of the script. + :param obj: an arbitrary object of user data. + :param auto_envvar_prefix: the prefix to use for automatic environment + variables. If this is `None` then reading + from environment variables is disabled. This + does not affect manually set environment + variables which are always read. + :param default_map: a dictionary (like object) with default values + for parameters. + :param terminal_width: the width of the terminal. The default is + inherit from parent context. If no context + defines the terminal width then auto + detection will be applied. + :param max_content_width: the maximum width for content rendered by + Click (this currently only affects help + pages). This defaults to 80 characters if + not overridden. In other words: even if the + terminal is larger than that, Click will not + format things wider than 80 characters by + default. In addition to that, formatters might + add some safety mapping on the right. + :param resilient_parsing: if this flag is enabled then Click will + parse without any interactivity or callback + invocation. Default values will also be + ignored. This is useful for implementing + things such as completion support. + :param allow_extra_args: if this is set to `True` then extra arguments + at the end will not raise an error and will be + kept on the context. The default is to inherit + from the command. + :param allow_interspersed_args: if this is set to `False` then options + and arguments cannot be mixed. The + default is to inherit from the command. + :param ignore_unknown_options: instructs click to ignore options it does + not know and keeps them for later + processing. + :param help_option_names: optionally a list of strings that define how + the default help parameter is named. The + default is ``['--help']``. + :param token_normalize_func: an optional function that is used to + normalize tokens (options, choices, + etc.). This for instance can be used to + implement case insensitive behavior. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are used in texts that Click prints which is by + default not the case. This for instance would affect + help output. + :param show_default: Show the default value for commands. If this + value is not set, it defaults to the value from the parent + context. ``Command.show_default`` overrides this default for the + specific command. + + .. versionchanged:: 8.1 + The ``show_default`` parameter is overridden by + ``Command.show_default``, instead of the other way around. + + .. versionchanged:: 8.0 + The ``show_default`` parameter defaults to the value from the + parent context. + + .. versionchanged:: 7.1 + Added the ``show_default`` parameter. + + .. versionchanged:: 4.0 + Added the ``color``, ``ignore_unknown_options``, and + ``max_content_width`` parameters. + + .. versionchanged:: 3.0 + Added the ``allow_extra_args`` and ``allow_interspersed_args`` + parameters. + + .. versionchanged:: 2.0 + Added the ``resilient_parsing``, ``help_option_names``, and + ``token_normalize_func`` parameters. + """ + + #: The formatter class to create with :meth:`make_formatter`. + #: + #: .. versionadded:: 8.0 + formatter_class: t.Type["HelpFormatter"] = HelpFormatter + + def __init__( + self, + command: "Command", + parent: t.Optional["Context"] = None, + info_name: t.Optional[str] = None, + obj: t.Optional[t.Any] = None, + auto_envvar_prefix: t.Optional[str] = None, + default_map: t.Optional[t.MutableMapping[str, t.Any]] = None, + terminal_width: t.Optional[int] = None, + max_content_width: t.Optional[int] = None, + resilient_parsing: bool = False, + allow_extra_args: t.Optional[bool] = None, + allow_interspersed_args: t.Optional[bool] = None, + ignore_unknown_options: t.Optional[bool] = None, + help_option_names: t.Optional[t.List[str]] = None, + token_normalize_func: t.Optional[t.Callable[[str], str]] = None, + color: t.Optional[bool] = None, + show_default: t.Optional[bool] = None, + ) -> None: + #: the parent context or `None` if none exists. + self.parent = parent + #: the :class:`Command` for this context. + self.command = command + #: the descriptive information name + self.info_name = info_name + #: Map of parameter names to their parsed values. Parameters + #: with ``expose_value=False`` are not stored. + self.params: t.Dict[str, t.Any] = {} + #: the leftover arguments. + self.args: t.List[str] = [] + #: protected arguments. These are arguments that are prepended + #: to `args` when certain parsing scenarios are encountered but + #: must be never propagated to another arguments. This is used + #: to implement nested parsing. + self.protected_args: t.List[str] = [] + #: the collected prefixes of the command's options. + self._opt_prefixes: t.Set[str] = set(parent._opt_prefixes) if parent else set() + + if obj is None and parent is not None: + obj = parent.obj + + #: the user object stored. + self.obj: t.Any = obj + self._meta: t.Dict[str, t.Any] = getattr(parent, "meta", {}) + + #: A dictionary (-like object) with defaults for parameters. + if ( + default_map is None + and info_name is not None + and parent is not None + and parent.default_map is not None + ): + default_map = parent.default_map.get(info_name) + + self.default_map: t.Optional[t.MutableMapping[str, t.Any]] = default_map + + #: This flag indicates if a subcommand is going to be executed. A + #: group callback can use this information to figure out if it's + #: being executed directly or because the execution flow passes + #: onwards to a subcommand. By default it's None, but it can be + #: the name of the subcommand to execute. + #: + #: If chaining is enabled this will be set to ``'*'`` in case + #: any commands are executed. It is however not possible to + #: figure out which ones. If you require this knowledge you + #: should use a :func:`result_callback`. + self.invoked_subcommand: t.Optional[str] = None + + if terminal_width is None and parent is not None: + terminal_width = parent.terminal_width + + #: The width of the terminal (None is autodetection). + self.terminal_width: t.Optional[int] = terminal_width + + if max_content_width is None and parent is not None: + max_content_width = parent.max_content_width + + #: The maximum width of formatted content (None implies a sensible + #: default which is 80 for most things). + self.max_content_width: t.Optional[int] = max_content_width + + if allow_extra_args is None: + allow_extra_args = command.allow_extra_args + + #: Indicates if the context allows extra args or if it should + #: fail on parsing. + #: + #: .. versionadded:: 3.0 + self.allow_extra_args = allow_extra_args + + if allow_interspersed_args is None: + allow_interspersed_args = command.allow_interspersed_args + + #: Indicates if the context allows mixing of arguments and + #: options or not. + #: + #: .. versionadded:: 3.0 + self.allow_interspersed_args: bool = allow_interspersed_args + + if ignore_unknown_options is None: + ignore_unknown_options = command.ignore_unknown_options + + #: Instructs click to ignore options that a command does not + #: understand and will store it on the context for later + #: processing. This is primarily useful for situations where you + #: want to call into external programs. Generally this pattern is + #: strongly discouraged because it's not possibly to losslessly + #: forward all arguments. + #: + #: .. versionadded:: 4.0 + self.ignore_unknown_options: bool = ignore_unknown_options + + if help_option_names is None: + if parent is not None: + help_option_names = parent.help_option_names + else: + help_option_names = ["--help"] + + #: The names for the help options. + self.help_option_names: t.List[str] = help_option_names + + if token_normalize_func is None and parent is not None: + token_normalize_func = parent.token_normalize_func + + #: An optional normalization function for tokens. This is + #: options, choices, commands etc. + self.token_normalize_func: t.Optional[t.Callable[[str], str]] = ( + token_normalize_func + ) + + #: Indicates if resilient parsing is enabled. In that case Click + #: will do its best to not cause any failures and default values + #: will be ignored. Useful for completion. + self.resilient_parsing: bool = resilient_parsing + + # If there is no envvar prefix yet, but the parent has one and + # the command on this level has a name, we can expand the envvar + # prefix automatically. + if auto_envvar_prefix is None: + if ( + parent is not None + and parent.auto_envvar_prefix is not None + and self.info_name is not None + ): + auto_envvar_prefix = ( + f"{parent.auto_envvar_prefix}_{self.info_name.upper()}" + ) + else: + auto_envvar_prefix = auto_envvar_prefix.upper() + + if auto_envvar_prefix is not None: + auto_envvar_prefix = auto_envvar_prefix.replace("-", "_") + + self.auto_envvar_prefix: t.Optional[str] = auto_envvar_prefix + + if color is None and parent is not None: + color = parent.color + + #: Controls if styling output is wanted or not. + self.color: t.Optional[bool] = color + + if show_default is None and parent is not None: + show_default = parent.show_default + + #: Show option default values when formatting help text. + self.show_default: t.Optional[bool] = show_default + + self._close_callbacks: t.List[t.Callable[[], t.Any]] = [] + self._depth = 0 + self._parameter_source: t.Dict[str, ParameterSource] = {} + self._exit_stack = ExitStack() + + def to_info_dict(self) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. This traverses the entire CLI + structure. + + .. code-block:: python + + with Context(cli) as ctx: + info = ctx.to_info_dict() + + .. versionadded:: 8.0 + """ + return { + "command": self.command.to_info_dict(self), + "info_name": self.info_name, + "allow_extra_args": self.allow_extra_args, + "allow_interspersed_args": self.allow_interspersed_args, + "ignore_unknown_options": self.ignore_unknown_options, + "auto_envvar_prefix": self.auto_envvar_prefix, + } + + def __enter__(self) -> "Context": + self._depth += 1 + push_context(self) + return self + + def __exit__( + self, + exc_type: t.Optional[t.Type[BaseException]], + exc_value: t.Optional[BaseException], + tb: t.Optional[TracebackType], + ) -> None: + self._depth -= 1 + if self._depth == 0: + self.close() + pop_context() + + @contextmanager + def scope(self, cleanup: bool = True) -> t.Iterator["Context"]: + """This helper method can be used with the context object to promote + it to the current thread local (see :func:`get_current_context`). + The default behavior of this is to invoke the cleanup functions which + can be disabled by setting `cleanup` to `False`. The cleanup + functions are typically used for things such as closing file handles. + + If the cleanup is intended the context object can also be directly + used as a context manager. + + Example usage:: + + with ctx.scope(): + assert get_current_context() is ctx + + This is equivalent:: + + with ctx: + assert get_current_context() is ctx + + .. versionadded:: 5.0 + + :param cleanup: controls if the cleanup functions should be run or + not. The default is to run these functions. In + some situations the context only wants to be + temporarily pushed in which case this can be disabled. + Nested pushes automatically defer the cleanup. + """ + if not cleanup: + self._depth += 1 + try: + with self as rv: + yield rv + finally: + if not cleanup: + self._depth -= 1 + + @property + def meta(self) -> t.Dict[str, t.Any]: + """This is a dictionary which is shared with all the contexts + that are nested. It exists so that click utilities can store some + state here if they need to. It is however the responsibility of + that code to manage this dictionary well. + + The keys are supposed to be unique dotted strings. For instance + module paths are a good choice for it. What is stored in there is + irrelevant for the operation of click. However what is important is + that code that places data here adheres to the general semantics of + the system. + + Example usage:: + + LANG_KEY = f'{__name__}.lang' + + def set_language(value): + ctx = get_current_context() + ctx.meta[LANG_KEY] = value + + def get_language(): + return get_current_context().meta.get(LANG_KEY, 'en_US') + + .. versionadded:: 5.0 + """ + return self._meta + + def make_formatter(self) -> HelpFormatter: + """Creates the :class:`~click.HelpFormatter` for the help and + usage output. + + To quickly customize the formatter class used without overriding + this method, set the :attr:`formatter_class` attribute. + + .. versionchanged:: 8.0 + Added the :attr:`formatter_class` attribute. + """ + return self.formatter_class( + width=self.terminal_width, max_width=self.max_content_width + ) + + def with_resource(self, context_manager: t.ContextManager[V]) -> V: + """Register a resource as if it were used in a ``with`` + statement. The resource will be cleaned up when the context is + popped. + + Uses :meth:`contextlib.ExitStack.enter_context`. It calls the + resource's ``__enter__()`` method and returns the result. When + the context is popped, it closes the stack, which calls the + resource's ``__exit__()`` method. + + To register a cleanup function for something that isn't a + context manager, use :meth:`call_on_close`. Or use something + from :mod:`contextlib` to turn it into a context manager first. + + .. code-block:: python + + @click.group() + @click.option("--name") + @click.pass_context + def cli(ctx): + ctx.obj = ctx.with_resource(connect_db(name)) + + :param context_manager: The context manager to enter. + :return: Whatever ``context_manager.__enter__()`` returns. + + .. versionadded:: 8.0 + """ + return self._exit_stack.enter_context(context_manager) + + def call_on_close(self, f: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: + """Register a function to be called when the context tears down. + + This can be used to close resources opened during the script + execution. Resources that support Python's context manager + protocol which would be used in a ``with`` statement should be + registered with :meth:`with_resource` instead. + + :param f: The function to execute on teardown. + """ + return self._exit_stack.callback(f) + + def close(self) -> None: + """Invoke all close callbacks registered with + :meth:`call_on_close`, and exit all context managers entered + with :meth:`with_resource`. + """ + self._exit_stack.close() + # In case the context is reused, create a new exit stack. + self._exit_stack = ExitStack() + + @property + def command_path(self) -> str: + """The computed command path. This is used for the ``usage`` + information on the help page. It's automatically created by + combining the info names of the chain of contexts to the root. + """ + rv = "" + if self.info_name is not None: + rv = self.info_name + if self.parent is not None: + parent_command_path = [self.parent.command_path] + + if isinstance(self.parent.command, Command): + for param in self.parent.command.get_params(self): + parent_command_path.extend(param.get_usage_pieces(self)) + + rv = f"{' '.join(parent_command_path)} {rv}" + return rv.lstrip() + + def find_root(self) -> "Context": + """Finds the outermost context.""" + node = self + while node.parent is not None: + node = node.parent + return node + + def find_object(self, object_type: t.Type[V]) -> t.Optional[V]: + """Finds the closest object of a given type.""" + node: t.Optional[Context] = self + + while node is not None: + if isinstance(node.obj, object_type): + return node.obj + + node = node.parent + + return None + + def ensure_object(self, object_type: t.Type[V]) -> V: + """Like :meth:`find_object` but sets the innermost object to a + new instance of `object_type` if it does not exist. + """ + rv = self.find_object(object_type) + if rv is None: + self.obj = rv = object_type() + return rv + + @t.overload + def lookup_default( + self, name: str, call: "te.Literal[True]" = True + ) -> t.Optional[t.Any]: ... + + @t.overload + def lookup_default( + self, name: str, call: "te.Literal[False]" = ... + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: ... + + def lookup_default(self, name: str, call: bool = True) -> t.Optional[t.Any]: + """Get the default for a parameter from :attr:`default_map`. + + :param name: Name of the parameter. + :param call: If the default is a callable, call it. Disable to + return the callable instead. + + .. versionchanged:: 8.0 + Added the ``call`` parameter. + """ + if self.default_map is not None: + value = self.default_map.get(name) + + if call and callable(value): + return value() + + return value + + return None + + def fail(self, message: str) -> "te.NoReturn": + """Aborts the execution of the program with a specific error + message. + + :param message: the error message to fail with. + """ + raise UsageError(message, self) + + def abort(self) -> "te.NoReturn": + """Aborts the script.""" + raise Abort() + + def exit(self, code: int = 0) -> "te.NoReturn": + """Exits the application with a given exit code.""" + raise Exit(code) + + def get_usage(self) -> str: + """Helper method to get formatted usage string for the current + context and command. + """ + return self.command.get_usage(self) + + def get_help(self) -> str: + """Helper method to get formatted help page for the current + context and command. + """ + return self.command.get_help(self) + + def _make_sub_context(self, command: "Command") -> "Context": + """Create a new context of the same type as this context, but + for a new command. + + :meta private: + """ + return type(self)(command, info_name=command.name, parent=self) + + @t.overload + def invoke( + __self, + __callback: "t.Callable[..., V]", + *args: t.Any, + **kwargs: t.Any, + ) -> V: ... + + @t.overload + def invoke( + __self, + __callback: "Command", + *args: t.Any, + **kwargs: t.Any, + ) -> t.Any: ... + + def invoke( + __self, + __callback: t.Union["Command", "t.Callable[..., V]"], + *args: t.Any, + **kwargs: t.Any, + ) -> t.Union[t.Any, V]: + """Invokes a command callback in exactly the way it expects. There + are two ways to invoke this method: + + 1. the first argument can be a callback and all other arguments and + keyword arguments are forwarded directly to the function. + 2. the first argument is a click command object. In that case all + arguments are forwarded as well but proper click parameters + (options and click arguments) must be keyword arguments and Click + will fill in defaults. + + Note that before Click 3.2 keyword arguments were not properly filled + in against the intention of this code and no context was created. For + more information about this change and why it was done in a bugfix + release see :ref:`upgrade-to-3.2`. + + .. versionchanged:: 8.0 + All ``kwargs`` are tracked in :attr:`params` so they will be + passed if :meth:`forward` is called at multiple levels. + """ + if isinstance(__callback, Command): + other_cmd = __callback + + if other_cmd.callback is None: + raise TypeError( + "The given command does not have a callback that can be invoked." + ) + else: + __callback = t.cast("t.Callable[..., V]", other_cmd.callback) + + ctx = __self._make_sub_context(other_cmd) + + for param in other_cmd.params: + if param.name not in kwargs and param.expose_value: + kwargs[param.name] = param.type_cast_value( # type: ignore + ctx, param.get_default(ctx) + ) + + # Track all kwargs as params, so that forward() will pass + # them on in subsequent calls. + ctx.params.update(kwargs) + else: + ctx = __self + + with augment_usage_errors(__self): + with ctx: + return __callback(*args, **kwargs) + + def forward(__self, __cmd: "Command", *args: t.Any, **kwargs: t.Any) -> t.Any: + """Similar to :meth:`invoke` but fills in default keyword + arguments from the current context if the other command expects + it. This cannot invoke callbacks directly, only other commands. + + .. versionchanged:: 8.0 + All ``kwargs`` are tracked in :attr:`params` so they will be + passed if ``forward`` is called at multiple levels. + """ + # Can only forward to other commands, not direct callbacks. + if not isinstance(__cmd, Command): + raise TypeError("Callback is not a command.") + + for param in __self.params: + if param not in kwargs: + kwargs[param] = __self.params[param] + + return __self.invoke(__cmd, *args, **kwargs) + + def set_parameter_source(self, name: str, source: ParameterSource) -> None: + """Set the source of a parameter. This indicates the location + from which the value of the parameter was obtained. + + :param name: The name of the parameter. + :param source: A member of :class:`~click.core.ParameterSource`. + """ + self._parameter_source[name] = source + + def get_parameter_source(self, name: str) -> t.Optional[ParameterSource]: + """Get the source of a parameter. This indicates the location + from which the value of the parameter was obtained. + + This can be useful for determining when a user specified a value + on the command line that is the same as the default value. It + will be :attr:`~click.core.ParameterSource.DEFAULT` only if the + value was actually taken from the default. + + :param name: The name of the parameter. + :rtype: ParameterSource + + .. versionchanged:: 8.0 + Returns ``None`` if the parameter was not provided from any + source. + """ + return self._parameter_source.get(name) + + +class BaseCommand: + """The base command implements the minimal API contract of commands. + Most code will never use this as it does not implement a lot of useful + functionality but it can act as the direct subclass of alternative + parsing methods that do not depend on the Click parser. + + For instance, this can be used to bridge Click and other systems like + argparse or docopt. + + Because base commands do not implement a lot of the API that other + parts of Click take for granted, they are not supported for all + operations. For instance, they cannot be used with the decorators + usually and they have no built-in callback system. + + .. versionchanged:: 2.0 + Added the `context_settings` parameter. + + :param name: the name of the command to use unless a group overrides it. + :param context_settings: an optional dictionary with defaults that are + passed to the context object. + """ + + #: The context class to create with :meth:`make_context`. + #: + #: .. versionadded:: 8.0 + context_class: t.Type[Context] = Context + #: the default for the :attr:`Context.allow_extra_args` flag. + allow_extra_args = False + #: the default for the :attr:`Context.allow_interspersed_args` flag. + allow_interspersed_args = True + #: the default for the :attr:`Context.ignore_unknown_options` flag. + ignore_unknown_options = False + + def __init__( + self, + name: t.Optional[str], + context_settings: t.Optional[t.MutableMapping[str, t.Any]] = None, + ) -> None: + #: the name the command thinks it has. Upon registering a command + #: on a :class:`Group` the group will default the command name + #: with this information. You should instead use the + #: :class:`Context`\'s :attr:`~Context.info_name` attribute. + self.name = name + + if context_settings is None: + context_settings = {} + + #: an optional dictionary with defaults passed to the context. + self.context_settings: t.MutableMapping[str, t.Any] = context_settings + + def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. This traverses the entire structure + below this command. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + :param ctx: A :class:`Context` representing this command. + + .. versionadded:: 8.0 + """ + return {"name": self.name} + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.name}>" + + def get_usage(self, ctx: Context) -> str: + raise NotImplementedError("Base commands cannot get usage") + + def get_help(self, ctx: Context) -> str: + raise NotImplementedError("Base commands cannot get help") + + def make_context( + self, + info_name: t.Optional[str], + args: t.List[str], + parent: t.Optional[Context] = None, + **extra: t.Any, + ) -> Context: + """This function when given an info name and arguments will kick + off the parsing and create a new :class:`Context`. It does not + invoke the actual command callback though. + + To quickly customize the context class used without overriding + this method, set the :attr:`context_class` attribute. + + :param info_name: the info name for this invocation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it's usually + the name of the script, for commands below it's + the name of the command. + :param args: the arguments to parse as list of strings. + :param parent: the parent context if available. + :param extra: extra keyword arguments forwarded to the context + constructor. + + .. versionchanged:: 8.0 + Added the :attr:`context_class` attribute. + """ + for key, value in self.context_settings.items(): + if key not in extra: + extra[key] = value + + ctx = self.context_class( + self, # type: ignore[arg-type] + info_name=info_name, + parent=parent, + **extra, + ) + + with ctx.scope(cleanup=False): + self.parse_args(ctx, args) + return ctx + + def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: + """Given a context and a list of arguments this creates the parser + and parses the arguments, then modifies the context as necessary. + This is automatically invoked by :meth:`make_context`. + """ + raise NotImplementedError("Base commands do not know how to parse arguments.") + + def invoke(self, ctx: Context) -> t.Any: + """Given a context, this invokes the command. The default + implementation is raising a not implemented error. + """ + raise NotImplementedError("Base commands are not invocable by default") + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. Looks + at the names of chained multi-commands. + + Any command could be part of a chained multi-command, so sibling + commands are valid at any point during command completion. Other + command classes will return more completions. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results: t.List[CompletionItem] = [] + + while ctx.parent is not None: + ctx = ctx.parent + + if isinstance(ctx.command, MultiCommand) and ctx.command.chain: + results.extend( + CompletionItem(name, help=command.get_short_help_str()) + for name, command in _complete_visible_commands(ctx, incomplete) + if name not in ctx.protected_args + ) + + return results + + @t.overload + def main( + self, + args: t.Optional[t.Sequence[str]] = None, + prog_name: t.Optional[str] = None, + complete_var: t.Optional[str] = None, + standalone_mode: "te.Literal[True]" = True, + **extra: t.Any, + ) -> "te.NoReturn": ... + + @t.overload + def main( + self, + args: t.Optional[t.Sequence[str]] = None, + prog_name: t.Optional[str] = None, + complete_var: t.Optional[str] = None, + standalone_mode: bool = ..., + **extra: t.Any, + ) -> t.Any: ... + + def main( + self, + args: t.Optional[t.Sequence[str]] = None, + prog_name: t.Optional[str] = None, + complete_var: t.Optional[str] = None, + standalone_mode: bool = True, + windows_expand_args: bool = True, + **extra: t.Any, + ) -> t.Any: + """This is the way to invoke a script with all the bells and + whistles as a command line application. This will always terminate + the application after a call. If this is not wanted, ``SystemExit`` + needs to be caught. + + This method is also available by directly calling the instance of + a :class:`Command`. + + :param args: the arguments that should be used for parsing. If not + provided, ``sys.argv[1:]`` is used. + :param prog_name: the program name that should be used. By default + the program name is constructed by taking the file + name from ``sys.argv[0]``. + :param complete_var: the environment variable that controls the + bash completion support. The default is + ``"__COMPLETE"`` with prog_name in + uppercase. + :param standalone_mode: the default behavior is to invoke the script + in standalone mode. Click will then + handle exceptions and convert them into + error messages and the function will never + return but shut down the interpreter. If + this is set to `False` they will be + propagated to the caller and the return + value of this function is the return value + of :meth:`invoke`. + :param windows_expand_args: Expand glob patterns, user dir, and + env vars in command line args on Windows. + :param extra: extra keyword arguments are forwarded to the context + constructor. See :class:`Context` for more information. + + .. versionchanged:: 8.0.1 + Added the ``windows_expand_args`` parameter to allow + disabling command line arg expansion on Windows. + + .. versionchanged:: 8.0 + When taking arguments from ``sys.argv`` on Windows, glob + patterns, user dir, and env vars are expanded. + + .. versionchanged:: 3.0 + Added the ``standalone_mode`` parameter. + """ + if args is None: + args = sys.argv[1:] + + if os.name == "nt" and windows_expand_args: + args = _expand_args(args) + else: + args = list(args) + + if prog_name is None: + prog_name = _detect_program_name() + + # Process shell completion requests and exit early. + self._main_shell_completion(extra, prog_name, complete_var) + + try: + try: + with self.make_context(prog_name, args, **extra) as ctx: + rv = self.invoke(ctx) + if not standalone_mode: + return rv + # it's not safe to `ctx.exit(rv)` here! + # note that `rv` may actually contain data like "1" which + # has obvious effects + # more subtle case: `rv=[None, None]` can come out of + # chained commands which all returned `None` -- so it's not + # even always obvious that `rv` indicates success/failure + # by its truthiness/falsiness + ctx.exit() + except (EOFError, KeyboardInterrupt) as e: + echo(file=sys.stderr) + raise Abort() from e + except ClickException as e: + if not standalone_mode: + raise + e.show() + sys.exit(e.exit_code) + except OSError as e: + if e.errno == errno.EPIPE: + sys.stdout = t.cast(t.TextIO, PacifyFlushWrapper(sys.stdout)) + sys.stderr = t.cast(t.TextIO, PacifyFlushWrapper(sys.stderr)) + sys.exit(1) + else: + raise + except Exit as e: + if standalone_mode: + sys.exit(e.exit_code) + else: + # in non-standalone mode, return the exit code + # note that this is only reached if `self.invoke` above raises + # an Exit explicitly -- thus bypassing the check there which + # would return its result + # the results of non-standalone execution may therefore be + # somewhat ambiguous: if there are codepaths which lead to + # `ctx.exit(1)` and to `return 1`, the caller won't be able to + # tell the difference between the two + return e.exit_code + except Abort: + if not standalone_mode: + raise + echo(_("Aborted!"), file=sys.stderr) + sys.exit(1) + + def _main_shell_completion( + self, + ctx_args: t.MutableMapping[str, t.Any], + prog_name: str, + complete_var: t.Optional[str] = None, + ) -> None: + """Check if the shell is asking for tab completion, process + that, then exit early. Called from :meth:`main` before the + program is invoked. + + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. Defaults to + ``_{PROG_NAME}_COMPLETE``. + + .. versionchanged:: 8.2.0 + Dots (``.``) in ``prog_name`` are replaced with underscores (``_``). + """ + if complete_var is None: + complete_name = prog_name.replace("-", "_").replace(".", "_") + complete_var = f"_{complete_name}_COMPLETE".upper() + + instruction = os.environ.get(complete_var) + + if not instruction: + return + + from .shell_completion import shell_complete + + rv = shell_complete(self, ctx_args, prog_name, complete_var, instruction) + sys.exit(rv) + + def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Any: + """Alias for :meth:`main`.""" + return self.main(*args, **kwargs) + + +class Command(BaseCommand): + """Commands are the basic building block of command line interfaces in + Click. A basic command handles command line parsing and might dispatch + more parsing to commands nested below it. + + :param name: the name of the command to use unless a group overrides it. + :param context_settings: an optional dictionary with defaults that are + passed to the context object. + :param callback: the callback to invoke. This is optional. + :param params: the parameters to register with this command. This can + be either :class:`Option` or :class:`Argument` objects. + :param help: the help string to use for this command. + :param epilog: like the help string but it's printed at the end of the + help page after everything else. + :param short_help: the short help to use for this command. This is + shown on the command listing of the parent command. + :param add_help_option: by default each command registers a ``--help`` + option. This can be disabled by this parameter. + :param no_args_is_help: this controls what happens if no arguments are + provided. This option is disabled by default. + If enabled this will add ``--help`` as argument + if no arguments are passed + :param hidden: hide this command from help outputs. + + :param deprecated: issues a message indicating that + the command is deprecated. + + .. versionchanged:: 8.1 + ``help``, ``epilog``, and ``short_help`` are stored unprocessed, + all formatting is done when outputting help text, not at init, + and is done even if not using the ``@command`` decorator. + + .. versionchanged:: 8.0 + Added a ``repr`` showing the command name. + + .. versionchanged:: 7.1 + Added the ``no_args_is_help`` parameter. + + .. versionchanged:: 2.0 + Added the ``context_settings`` parameter. + """ + + def __init__( + self, + name: t.Optional[str], + context_settings: t.Optional[t.MutableMapping[str, t.Any]] = None, + callback: t.Optional[t.Callable[..., t.Any]] = None, + params: t.Optional[t.List["Parameter"]] = None, + help: t.Optional[str] = None, + epilog: t.Optional[str] = None, + short_help: t.Optional[str] = None, + options_metavar: t.Optional[str] = "[OPTIONS]", + add_help_option: bool = True, + no_args_is_help: bool = False, + hidden: bool = False, + deprecated: bool = False, + ) -> None: + super().__init__(name, context_settings) + #: the callback to execute when the command fires. This might be + #: `None` in which case nothing happens. + self.callback = callback + #: the list of parameters for this command in the order they + #: should show up in the help page and execute. Eager parameters + #: will automatically be handled before non eager ones. + self.params: t.List[Parameter] = params or [] + self.help = help + self.epilog = epilog + self.options_metavar = options_metavar + self.short_help = short_help + self.add_help_option = add_help_option + self._help_option: t.Optional[HelpOption] = None + self.no_args_is_help = no_args_is_help + self.hidden = hidden + self.deprecated = deprecated + + def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict(ctx) + info_dict.update( + params=[param.to_info_dict() for param in self.get_params(ctx)], + help=self.help, + epilog=self.epilog, + short_help=self.short_help, + hidden=self.hidden, + deprecated=self.deprecated, + ) + return info_dict + + def get_usage(self, ctx: Context) -> str: + """Formats the usage line into a string and returns it. + + Calls :meth:`format_usage` internally. + """ + formatter = ctx.make_formatter() + self.format_usage(ctx, formatter) + return formatter.getvalue().rstrip("\n") + + def get_params(self, ctx: Context) -> t.List["Parameter"]: + rv = self.params + help_option = self.get_help_option(ctx) + + if help_option is not None: + rv = [*rv, help_option] + + return rv + + def format_usage(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the usage line into the formatter. + + This is a low-level method called by :meth:`get_usage`. + """ + pieces = self.collect_usage_pieces(ctx) + formatter.write_usage(ctx.command_path, " ".join(pieces)) + + def collect_usage_pieces(self, ctx: Context) -> t.List[str]: + """Returns all the pieces that go into the usage line and returns + it as a list of strings. + """ + rv = [self.options_metavar] if self.options_metavar else [] + + for param in self.get_params(ctx): + rv.extend(param.get_usage_pieces(ctx)) + + return rv + + def get_help_option_names(self, ctx: Context) -> t.List[str]: + """Returns the names for the help option.""" + all_names = set(ctx.help_option_names) + for param in self.params: + all_names.difference_update(param.opts) + all_names.difference_update(param.secondary_opts) + return list(all_names) + + def get_help_option(self, ctx: Context) -> t.Optional["Option"]: + """Returns the help option object. + + Unless ``add_help_option`` is ``False``. + + .. versionchanged:: 8.1.8 + The help option is now cached to avoid creating it multiple times. + """ + help_options = self.get_help_option_names(ctx) + + if not help_options or not self.add_help_option: + return None + + # Cache the help option object in private _help_option attribute to + # avoid creating it multiple times. Not doing this will break the + # callback odering by iter_params_for_processing(), which relies on + # object comparison. + if self._help_option is None: + # Avoid circular import. + from .decorators import HelpOption + + self._help_option = HelpOption(help_options) + + return self._help_option + + def make_parser(self, ctx: Context) -> OptionParser: + """Creates the underlying option parser for this command.""" + parser = OptionParser(ctx) + for param in self.get_params(ctx): + param.add_to_parser(parser, ctx) + return parser + + def get_help(self, ctx: Context) -> str: + """Formats the help into a string and returns it. + + Calls :meth:`format_help` internally. + """ + formatter = ctx.make_formatter() + self.format_help(ctx, formatter) + return formatter.getvalue().rstrip("\n") + + def get_short_help_str(self, limit: int = 45) -> str: + """Gets short help for the command or makes it by shortening the + long help string. + """ + if self.short_help: + text = inspect.cleandoc(self.short_help) + elif self.help: + text = make_default_short_help(self.help, limit) + else: + text = "" + + if self.deprecated: + text = _("(Deprecated) {text}").format(text=text) + + return text.strip() + + def format_help(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the help into the formatter if it exists. + + This is a low-level method called by :meth:`get_help`. + + This calls the following methods: + + - :meth:`format_usage` + - :meth:`format_help_text` + - :meth:`format_options` + - :meth:`format_epilog` + """ + self.format_usage(ctx, formatter) + self.format_help_text(ctx, formatter) + self.format_options(ctx, formatter) + self.format_epilog(ctx, formatter) + + def format_help_text(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the help text to the formatter if it exists.""" + if self.help is not None: + # truncate the help text to the first form feed + text = inspect.cleandoc(self.help).partition("\f")[0] + else: + text = "" + + if self.deprecated: + text = _("(Deprecated) {text}").format(text=text) + + if text: + formatter.write_paragraph() + + with formatter.indentation(): + formatter.write_text(text) + + def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes all the options into the formatter if they exist.""" + opts = [] + for param in self.get_params(ctx): + rv = param.get_help_record(ctx) + if rv is not None: + opts.append(rv) + + if opts: + with formatter.section(_("Options")): + formatter.write_dl(opts) + + def format_epilog(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the epilog into the formatter if it exists.""" + if self.epilog: + epilog = inspect.cleandoc(self.epilog) + formatter.write_paragraph() + + with formatter.indentation(): + formatter.write_text(epilog) + + def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: + if not args and self.no_args_is_help and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + parser = self.make_parser(ctx) + opts, args, param_order = parser.parse_args(args=args) + + for param in iter_params_for_processing(param_order, self.get_params(ctx)): + value, args = param.handle_parse_result(ctx, opts, args) + + if args and not ctx.allow_extra_args and not ctx.resilient_parsing: + ctx.fail( + ngettext( + "Got unexpected extra argument ({args})", + "Got unexpected extra arguments ({args})", + len(args), + ).format(args=" ".join(map(str, args))) + ) + + ctx.args = args + ctx._opt_prefixes.update(parser._opt_prefixes) + return args + + def invoke(self, ctx: Context) -> t.Any: + """Given a context, this invokes the attached callback (if it exists) + in the right way. + """ + if self.deprecated: + message = _( + "DeprecationWarning: The command {name!r} is deprecated." + ).format(name=self.name) + echo(style(message, fg="red"), err=True) + + if self.callback is not None: + return ctx.invoke(self.callback, **ctx.params) + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. Looks + at the names of options and chained multi-commands. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results: t.List[CompletionItem] = [] + + if incomplete and not incomplete[0].isalnum(): + for param in self.get_params(ctx): + if ( + not isinstance(param, Option) + or param.hidden + or ( + not param.multiple + and ctx.get_parameter_source(param.name) # type: ignore + is ParameterSource.COMMANDLINE + ) + ): + continue + + results.extend( + CompletionItem(name, help=param.help) + for name in [*param.opts, *param.secondary_opts] + if name.startswith(incomplete) + ) + + results.extend(super().shell_complete(ctx, incomplete)) + return results + + +class MultiCommand(Command): + """A multi command is the basic implementation of a command that + dispatches to subcommands. The most common version is the + :class:`Group`. + + :param invoke_without_command: this controls how the multi command itself + is invoked. By default it's only invoked + if a subcommand is provided. + :param no_args_is_help: this controls what happens if no arguments are + provided. This option is enabled by default if + `invoke_without_command` is disabled or disabled + if it's enabled. If enabled this will add + ``--help`` as argument if no arguments are + passed. + :param subcommand_metavar: the string that is used in the documentation + to indicate the subcommand place. + :param chain: if this is set to `True` chaining of multiple subcommands + is enabled. This restricts the form of commands in that + they cannot have optional arguments but it allows + multiple commands to be chained together. + :param result_callback: The result callback to attach to this multi + command. This can be set or changed later with the + :meth:`result_callback` decorator. + :param attrs: Other command arguments described in :class:`Command`. + """ + + allow_extra_args = True + allow_interspersed_args = False + + def __init__( + self, + name: t.Optional[str] = None, + invoke_without_command: bool = False, + no_args_is_help: t.Optional[bool] = None, + subcommand_metavar: t.Optional[str] = None, + chain: bool = False, + result_callback: t.Optional[t.Callable[..., t.Any]] = None, + **attrs: t.Any, + ) -> None: + super().__init__(name, **attrs) + + if no_args_is_help is None: + no_args_is_help = not invoke_without_command + + self.no_args_is_help = no_args_is_help + self.invoke_without_command = invoke_without_command + + if subcommand_metavar is None: + if chain: + subcommand_metavar = "COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]..." + else: + subcommand_metavar = "COMMAND [ARGS]..." + + self.subcommand_metavar = subcommand_metavar + self.chain = chain + # The result callback that is stored. This can be set or + # overridden with the :func:`result_callback` decorator. + self._result_callback = result_callback + + if self.chain: + for param in self.params: + if isinstance(param, Argument) and not param.required: + raise RuntimeError( + "Multi commands in chain mode cannot have" + " optional arguments." + ) + + def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict(ctx) + commands = {} + + for name in self.list_commands(ctx): + command = self.get_command(ctx, name) + + if command is None: + continue + + sub_ctx = ctx._make_sub_context(command) + + with sub_ctx.scope(cleanup=False): + commands[name] = command.to_info_dict(sub_ctx) + + info_dict.update(commands=commands, chain=self.chain) + return info_dict + + def collect_usage_pieces(self, ctx: Context) -> t.List[str]: + rv = super().collect_usage_pieces(ctx) + rv.append(self.subcommand_metavar) + return rv + + def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: + super().format_options(ctx, formatter) + self.format_commands(ctx, formatter) + + def result_callback(self, replace: bool = False) -> t.Callable[[F], F]: + """Adds a result callback to the command. By default if a + result callback is already registered this will chain them but + this can be disabled with the `replace` parameter. The result + callback is invoked with the return value of the subcommand + (or the list of return values from all subcommands if chaining + is enabled) as well as the parameters as they would be passed + to the main callback. + + Example:: + + @click.group() + @click.option('-i', '--input', default=23) + def cli(input): + return 42 + + @cli.result_callback() + def process_result(result, input): + return result + input + + :param replace: if set to `True` an already existing result + callback will be removed. + + .. versionchanged:: 8.0 + Renamed from ``resultcallback``. + + .. versionadded:: 3.0 + """ + + def decorator(f: F) -> F: + old_callback = self._result_callback + + if old_callback is None or replace: + self._result_callback = f + return f + + def function(__value, *args, **kwargs): # type: ignore + inner = old_callback(__value, *args, **kwargs) + return f(inner, *args, **kwargs) + + self._result_callback = rv = update_wrapper(t.cast(F, function), f) + return rv # type: ignore[return-value] + + return decorator + + def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None: + """Extra format methods for multi methods that adds all the commands + after the options. + """ + commands = [] + for subcommand in self.list_commands(ctx): + cmd = self.get_command(ctx, subcommand) + # What is this, the tool lied about a command. Ignore it + if cmd is None: + continue + if cmd.hidden: + continue + + commands.append((subcommand, cmd)) + + # allow for 3 times the default spacing + if len(commands): + limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands) + + rows = [] + for subcommand, cmd in commands: + help = cmd.get_short_help_str(limit) + rows.append((subcommand, help)) + + if rows: + with formatter.section(_("Commands")): + formatter.write_dl(rows) + + def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: + if not args and self.no_args_is_help and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + rest = super().parse_args(ctx, args) + + if self.chain: + ctx.protected_args = rest + ctx.args = [] + elif rest: + ctx.protected_args, ctx.args = rest[:1], rest[1:] + + return ctx.args + + def invoke(self, ctx: Context) -> t.Any: + def _process_result(value: t.Any) -> t.Any: + if self._result_callback is not None: + value = ctx.invoke(self._result_callback, value, **ctx.params) + return value + + if not ctx.protected_args: + if self.invoke_without_command: + # No subcommand was invoked, so the result callback is + # invoked with the group return value for regular + # groups, or an empty list for chained groups. + with ctx: + rv = super().invoke(ctx) + return _process_result([] if self.chain else rv) + ctx.fail(_("Missing command.")) + + # Fetch args back out + args = [*ctx.protected_args, *ctx.args] + ctx.args = [] + ctx.protected_args = [] + + # If we're not in chain mode, we only allow the invocation of a + # single command but we also inform the current context about the + # name of the command to invoke. + if not self.chain: + # Make sure the context is entered so we do not clean up + # resources until the result processor has worked. + with ctx: + cmd_name, cmd, args = self.resolve_command(ctx, args) + assert cmd is not None + ctx.invoked_subcommand = cmd_name + super().invoke(ctx) + sub_ctx = cmd.make_context(cmd_name, args, parent=ctx) + with sub_ctx: + return _process_result(sub_ctx.command.invoke(sub_ctx)) + + # In chain mode we create the contexts step by step, but after the + # base command has been invoked. Because at that point we do not + # know the subcommands yet, the invoked subcommand attribute is + # set to ``*`` to inform the command that subcommands are executed + # but nothing else. + with ctx: + ctx.invoked_subcommand = "*" if args else None + super().invoke(ctx) + + # Otherwise we make every single context and invoke them in a + # chain. In that case the return value to the result processor + # is the list of all invoked subcommand's results. + contexts = [] + while args: + cmd_name, cmd, args = self.resolve_command(ctx, args) + assert cmd is not None + sub_ctx = cmd.make_context( + cmd_name, + args, + parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False, + ) + contexts.append(sub_ctx) + args, sub_ctx.args = sub_ctx.args, [] + + rv = [] + for sub_ctx in contexts: + with sub_ctx: + rv.append(sub_ctx.command.invoke(sub_ctx)) + return _process_result(rv) + + def resolve_command( + self, ctx: Context, args: t.List[str] + ) -> t.Tuple[t.Optional[str], t.Optional[Command], t.List[str]]: + cmd_name = make_str(args[0]) + original_cmd_name = cmd_name + + # Get the command + cmd = self.get_command(ctx, cmd_name) + + # If we can't find the command but there is a normalization + # function available, we try with that one. + if cmd is None and ctx.token_normalize_func is not None: + cmd_name = ctx.token_normalize_func(cmd_name) + cmd = self.get_command(ctx, cmd_name) + + # If we don't find the command we want to show an error message + # to the user that it was not provided. However, there is + # something else we should do: if the first argument looks like + # an option we want to kick off parsing again for arguments to + # resolve things like --help which now should go to the main + # place. + if cmd is None and not ctx.resilient_parsing: + if split_opt(cmd_name)[0]: + self.parse_args(ctx, ctx.args) + ctx.fail(_("No such command {name!r}.").format(name=original_cmd_name)) + return cmd_name if cmd else None, cmd, args[1:] + + def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: + """Given a context and a command name, this returns a + :class:`Command` object if it exists or returns `None`. + """ + raise NotImplementedError + + def list_commands(self, ctx: Context) -> t.List[str]: + """Returns a list of subcommand names in the order they should + appear. + """ + return [] + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. Looks + at the names of options, subcommands, and chained + multi-commands. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results = [ + CompletionItem(name, help=command.get_short_help_str()) + for name, command in _complete_visible_commands(ctx, incomplete) + ] + results.extend(super().shell_complete(ctx, incomplete)) + return results + + +class Group(MultiCommand): + """A group allows a command to have subcommands attached. This is + the most common way to implement nesting in Click. + + :param name: The name of the group command. + :param commands: A dict mapping names to :class:`Command` objects. + Can also be a list of :class:`Command`, which will use + :attr:`Command.name` to create the dict. + :param attrs: Other command arguments described in + :class:`MultiCommand`, :class:`Command`, and + :class:`BaseCommand`. + + .. versionchanged:: 8.0 + The ``commands`` argument can be a list of command objects. + """ + + #: If set, this is used by the group's :meth:`command` decorator + #: as the default :class:`Command` class. This is useful to make all + #: subcommands use a custom command class. + #: + #: .. versionadded:: 8.0 + command_class: t.Optional[t.Type[Command]] = None + + #: If set, this is used by the group's :meth:`group` decorator + #: as the default :class:`Group` class. This is useful to make all + #: subgroups use a custom group class. + #: + #: If set to the special value :class:`type` (literally + #: ``group_class = type``), this group's class will be used as the + #: default class. This makes a custom group class continue to make + #: custom groups. + #: + #: .. versionadded:: 8.0 + group_class: t.Optional[t.Union[t.Type["Group"], t.Type[type]]] = None + # Literal[type] isn't valid, so use Type[type] + + def __init__( + self, + name: t.Optional[str] = None, + commands: t.Optional[ + t.Union[t.MutableMapping[str, Command], t.Sequence[Command]] + ] = None, + **attrs: t.Any, + ) -> None: + super().__init__(name, **attrs) + + if commands is None: + commands = {} + elif isinstance(commands, abc.Sequence): + commands = {c.name: c for c in commands if c.name is not None} + + #: The registered subcommands by their exported names. + self.commands: t.MutableMapping[str, Command] = commands + + def add_command(self, cmd: Command, name: t.Optional[str] = None) -> None: + """Registers another :class:`Command` with this group. If the name + is not provided, the name of the command is used. + """ + name = name or cmd.name + if name is None: + raise TypeError("Command has no name.") + _check_multicommand(self, name, cmd, register=True) + self.commands[name] = cmd + + @t.overload + def command(self, __func: t.Callable[..., t.Any]) -> Command: ... + + @t.overload + def command( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[t.Callable[..., t.Any]], Command]: ... + + def command( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], Command], Command]: + """A shortcut decorator for declaring and attaching a command to + the group. This takes the same arguments as :func:`command` and + immediately registers the created command with this group by + calling :meth:`add_command`. + + To customize the command class used, set the + :attr:`command_class` attribute. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.0 + Added the :attr:`command_class` attribute. + """ + from .decorators import command + + func: t.Optional[t.Callable[..., t.Any]] = None + + if args and callable(args[0]): + assert ( + len(args) == 1 and not kwargs + ), "Use 'command(**kwargs)(callable)' to provide arguments." + (func,) = args + args = () + + if self.command_class and kwargs.get("cls") is None: + kwargs["cls"] = self.command_class + + def decorator(f: t.Callable[..., t.Any]) -> Command: + cmd: Command = command(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + + if func is not None: + return decorator(func) + + return decorator + + @t.overload + def group(self, __func: t.Callable[..., t.Any]) -> "Group": ... + + @t.overload + def group( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[t.Callable[..., t.Any]], "Group"]: ... + + def group( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], "Group"], "Group"]: + """A shortcut decorator for declaring and attaching a group to + the group. This takes the same arguments as :func:`group` and + immediately registers the created group with this group by + calling :meth:`add_command`. + + To customize the group class used, set the :attr:`group_class` + attribute. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.0 + Added the :attr:`group_class` attribute. + """ + from .decorators import group + + func: t.Optional[t.Callable[..., t.Any]] = None + + if args and callable(args[0]): + assert ( + len(args) == 1 and not kwargs + ), "Use 'group(**kwargs)(callable)' to provide arguments." + (func,) = args + args = () + + if self.group_class is not None and kwargs.get("cls") is None: + if self.group_class is type: + kwargs["cls"] = type(self) + else: + kwargs["cls"] = self.group_class + + def decorator(f: t.Callable[..., t.Any]) -> "Group": + cmd: Group = group(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + + if func is not None: + return decorator(func) + + return decorator + + def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: + return self.commands.get(cmd_name) + + def list_commands(self, ctx: Context) -> t.List[str]: + return sorted(self.commands) + + +class CommandCollection(MultiCommand): + """A command collection is a multi command that merges multiple multi + commands together into one. This is a straightforward implementation + that accepts a list of different multi commands as sources and + provides all the commands for each of them. + + See :class:`MultiCommand` and :class:`Command` for the description of + ``name`` and ``attrs``. + """ + + def __init__( + self, + name: t.Optional[str] = None, + sources: t.Optional[t.List[MultiCommand]] = None, + **attrs: t.Any, + ) -> None: + super().__init__(name, **attrs) + #: The list of registered multi commands. + self.sources: t.List[MultiCommand] = sources or [] + + def add_source(self, multi_cmd: MultiCommand) -> None: + """Adds a new multi command to the chain dispatcher.""" + self.sources.append(multi_cmd) + + def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: + for source in self.sources: + rv = source.get_command(ctx, cmd_name) + + if rv is not None: + if self.chain: + _check_multicommand(self, cmd_name, rv) + + return rv + + return None + + def list_commands(self, ctx: Context) -> t.List[str]: + rv: t.Set[str] = set() + + for source in self.sources: + rv.update(source.list_commands(ctx)) + + return sorted(rv) + + +def _check_iter(value: t.Any) -> t.Iterator[t.Any]: + """Check if the value is iterable but not a string. Raises a type + error, or return an iterator over the value. + """ + if isinstance(value, str): + raise TypeError + + return iter(value) + + +class Parameter: + r"""A parameter to a command comes in two versions: they are either + :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently + not supported by design as some of the internals for parsing are + intentionally not finalized. + + Some settings are supported by both options and arguments. + + :param param_decls: the parameter declarations for this option or + argument. This is a list of flags or argument + names. + :param type: the type that should be used. Either a :class:`ParamType` + or a Python type. The latter is converted into the former + automatically if supported. + :param required: controls if this is optional or not. + :param default: the default value if omitted. This can also be a callable, + in which case it's invoked when the default is needed + without any arguments. + :param callback: A function to further process or validate the value + after type conversion. It is called as ``f(ctx, param, value)`` + and must return the value. It is called for all sources, + including prompts. + :param nargs: the number of arguments to match. If not ``1`` the return + value is a tuple instead of single value. The default for + nargs is ``1`` (except if the type is a tuple, then it's + the arity of the tuple). If ``nargs=-1``, all remaining + parameters are collected. + :param metavar: how the value is represented in the help page. + :param expose_value: if this is `True` then the value is passed onwards + to the command callback and stored on the context, + otherwise it's skipped. + :param is_eager: eager values are processed before non eager ones. This + should not be set for arguments or it will inverse the + order of processing. + :param envvar: a string or list of strings that are environment variables + that should be checked. + :param shell_complete: A function that returns custom shell + completions. Used instead of the param's type completion if + given. Takes ``ctx, param, incomplete`` and must return a list + of :class:`~click.shell_completion.CompletionItem` or a list of + strings. + + .. versionchanged:: 8.0 + ``process_value`` validates required parameters and bounded + ``nargs``, and invokes the parameter callback before returning + the value. This allows the callback to validate prompts. + ``full_process_value`` is removed. + + .. versionchanged:: 8.0 + ``autocompletion`` is renamed to ``shell_complete`` and has new + semantics described above. The old name is deprecated and will + be removed in 8.1, until then it will be wrapped to match the + new requirements. + + .. versionchanged:: 8.0 + For ``multiple=True, nargs>1``, the default must be a list of + tuples. + + .. versionchanged:: 8.0 + Setting a default is no longer required for ``nargs>1``, it will + default to ``None``. ``multiple=True`` or ``nargs=-1`` will + default to ``()``. + + .. versionchanged:: 7.1 + Empty environment variables are ignored rather than taking the + empty string value. This makes it possible for scripts to clear + variables if they can't unset them. + + .. versionchanged:: 2.0 + Changed signature for parameter callback to also be passed the + parameter. The old callback format will still work, but it will + raise a warning to give you a chance to migrate the code easier. + """ + + param_type_name = "parameter" + + def __init__( + self, + param_decls: t.Optional[t.Sequence[str]] = None, + type: t.Optional[t.Union[types.ParamType, t.Any]] = None, + required: bool = False, + default: t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]] = None, + callback: t.Optional[t.Callable[[Context, "Parameter", t.Any], t.Any]] = None, + nargs: t.Optional[int] = None, + multiple: bool = False, + metavar: t.Optional[str] = None, + expose_value: bool = True, + is_eager: bool = False, + envvar: t.Optional[t.Union[str, t.Sequence[str]]] = None, + shell_complete: t.Optional[ + t.Callable[ + [Context, "Parameter", str], + t.Union[t.List["CompletionItem"], t.List[str]], + ] + ] = None, + ) -> None: + self.name: t.Optional[str] + self.opts: t.List[str] + self.secondary_opts: t.List[str] + self.name, self.opts, self.secondary_opts = self._parse_decls( + param_decls or (), expose_value + ) + self.type: types.ParamType = types.convert_type(type, default) + + # Default nargs to what the type tells us if we have that + # information available. + if nargs is None: + if self.type.is_composite: + nargs = self.type.arity + else: + nargs = 1 + + self.required = required + self.callback = callback + self.nargs = nargs + self.multiple = multiple + self.expose_value = expose_value + self.default = default + self.is_eager = is_eager + self.metavar = metavar + self.envvar = envvar + self._custom_shell_complete = shell_complete + + if __debug__: + if self.type.is_composite and nargs != self.type.arity: + raise ValueError( + f"'nargs' must be {self.type.arity} (or None) for" + f" type {self.type!r}, but it was {nargs}." + ) + + # Skip no default or callable default. + check_default = default if not callable(default) else None + + if check_default is not None: + if multiple: + try: + # Only check the first value against nargs. + check_default = next(_check_iter(check_default), None) + except TypeError: + raise ValueError( + "'default' must be a list when 'multiple' is true." + ) from None + + # Can be None for multiple with empty default. + if nargs != 1 and check_default is not None: + try: + _check_iter(check_default) + except TypeError: + if multiple: + message = ( + "'default' must be a list of lists when 'multiple' is" + " true and 'nargs' != 1." + ) + else: + message = "'default' must be a list when 'nargs' != 1." + + raise ValueError(message) from None + + if nargs > 1 and len(check_default) != nargs: + subject = "item length" if multiple else "length" + raise ValueError( + f"'default' {subject} must match nargs={nargs}." + ) + + def to_info_dict(self) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + .. versionadded:: 8.0 + """ + return { + "name": self.name, + "param_type_name": self.param_type_name, + "opts": self.opts, + "secondary_opts": self.secondary_opts, + "type": self.type.to_info_dict(), + "required": self.required, + "nargs": self.nargs, + "multiple": self.multiple, + "default": self.default, + "envvar": self.envvar, + } + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.name}>" + + def _parse_decls( + self, decls: t.Sequence[str], expose_value: bool + ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: + raise NotImplementedError() + + @property + def human_readable_name(self) -> str: + """Returns the human readable name of this parameter. This is the + same as the name for options, but the metavar for arguments. + """ + return self.name # type: ignore + + def make_metavar(self) -> str: + if self.metavar is not None: + return self.metavar + + metavar = self.type.get_metavar(self) + + if metavar is None: + metavar = self.type.name.upper() + + if self.nargs != 1: + metavar += "..." + + return metavar + + @t.overload + def get_default( + self, ctx: Context, call: "te.Literal[True]" = True + ) -> t.Optional[t.Any]: ... + + @t.overload + def get_default( + self, ctx: Context, call: bool = ... + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: ... + + def get_default( + self, ctx: Context, call: bool = True + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + """Get the default for the parameter. Tries + :meth:`Context.lookup_default` first, then the local default. + + :param ctx: Current context. + :param call: If the default is a callable, call it. Disable to + return the callable instead. + + .. versionchanged:: 8.0.2 + Type casting is no longer performed when getting a default. + + .. versionchanged:: 8.0.1 + Type casting can fail in resilient parsing mode. Invalid + defaults will not prevent showing help text. + + .. versionchanged:: 8.0 + Looks at ``ctx.default_map`` first. + + .. versionchanged:: 8.0 + Added the ``call`` parameter. + """ + value = ctx.lookup_default(self.name, call=False) # type: ignore + + if value is None: + value = self.default + + if call and callable(value): + value = value() + + return value + + def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: + raise NotImplementedError() + + def consume_value( + self, ctx: Context, opts: t.Mapping[str, t.Any] + ) -> t.Tuple[t.Any, ParameterSource]: + value = opts.get(self.name) # type: ignore + source = ParameterSource.COMMANDLINE + + if value is None: + value = self.value_from_envvar(ctx) + source = ParameterSource.ENVIRONMENT + + if value is None: + value = ctx.lookup_default(self.name) # type: ignore + source = ParameterSource.DEFAULT_MAP + + if value is None: + value = self.get_default(ctx) + source = ParameterSource.DEFAULT + + return value, source + + def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any: + """Convert and validate a value against the option's + :attr:`type`, :attr:`multiple`, and :attr:`nargs`. + """ + if value is None: + return () if self.multiple or self.nargs == -1 else None + + def check_iter(value: t.Any) -> t.Iterator[t.Any]: + try: + return _check_iter(value) + except TypeError: + # This should only happen when passing in args manually, + # the parser should construct an iterable when parsing + # the command line. + raise BadParameter( + _("Value must be an iterable."), ctx=ctx, param=self + ) from None + + if self.nargs == 1 or self.type.is_composite: + + def convert(value: t.Any) -> t.Any: + return self.type(value, param=self, ctx=ctx) + + elif self.nargs == -1: + + def convert(value: t.Any) -> t.Any: # t.Tuple[t.Any, ...] + return tuple(self.type(x, self, ctx) for x in check_iter(value)) + + else: # nargs > 1 + + def convert(value: t.Any) -> t.Any: # t.Tuple[t.Any, ...] + value = tuple(check_iter(value)) + + if len(value) != self.nargs: + raise BadParameter( + ngettext( + "Takes {nargs} values but 1 was given.", + "Takes {nargs} values but {len} were given.", + len(value), + ).format(nargs=self.nargs, len=len(value)), + ctx=ctx, + param=self, + ) + + return tuple(self.type(x, self, ctx) for x in value) + + if self.multiple: + return tuple(convert(x) for x in check_iter(value)) + + return convert(value) + + def value_is_missing(self, value: t.Any) -> bool: + if value is None: + return True + + if (self.nargs != 1 or self.multiple) and value == (): + return True + + return False + + def process_value(self, ctx: Context, value: t.Any) -> t.Any: + value = self.type_cast_value(ctx, value) + + if self.required and self.value_is_missing(value): + raise MissingParameter(ctx=ctx, param=self) + + if self.callback is not None: + value = self.callback(ctx, self, value) + + return value + + def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]: + if self.envvar is None: + return None + + if isinstance(self.envvar, str): + rv = os.environ.get(self.envvar) + + if rv: + return rv + else: + for envvar in self.envvar: + rv = os.environ.get(envvar) + + if rv: + return rv + + return None + + def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]: + rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx) + + if rv is not None and self.nargs != 1: + rv = self.type.split_envvar_value(rv) + + return rv + + def handle_parse_result( + self, ctx: Context, opts: t.Mapping[str, t.Any], args: t.List[str] + ) -> t.Tuple[t.Any, t.List[str]]: + with augment_usage_errors(ctx, param=self): + value, source = self.consume_value(ctx, opts) + ctx.set_parameter_source(self.name, source) # type: ignore + + try: + value = self.process_value(ctx, value) + except Exception: + if not ctx.resilient_parsing: + raise + + value = None + + if self.expose_value: + ctx.params[self.name] = value # type: ignore + + return value, args + + def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]: + pass + + def get_usage_pieces(self, ctx: Context) -> t.List[str]: + return [] + + def get_error_hint(self, ctx: Context) -> str: + """Get a stringified version of the param for use in error messages to + indicate which param caused the error. + """ + hint_list = self.opts or [self.human_readable_name] + return " / ".join(f"'{x}'" for x in hint_list) + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. If a + ``shell_complete`` function was given during init, it is used. + Otherwise, the :attr:`type` + :meth:`~click.types.ParamType.shell_complete` function is used. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + if self._custom_shell_complete is not None: + results = self._custom_shell_complete(ctx, self, incomplete) + + if results and isinstance(results[0], str): + from click.shell_completion import CompletionItem + + results = [CompletionItem(c) for c in results] + + return t.cast(t.List["CompletionItem"], results) + + return self.type.shell_complete(ctx, self, incomplete) + + +class Option(Parameter): + """Options are usually optional values on the command line and + have some extra features that arguments don't have. + + All other parameters are passed onwards to the parameter constructor. + + :param show_default: Show the default value for this option in its + help text. Values are not shown by default, unless + :attr:`Context.show_default` is ``True``. If this value is a + string, it shows that string in parentheses instead of the + actual value. This is particularly useful for dynamic options. + For single option boolean flags, the default remains hidden if + its value is ``False``. + :param show_envvar: Controls if an environment variable should be + shown on the help page. Normally, environment variables are not + shown. + :param prompt: If set to ``True`` or a non empty string then the + user will be prompted for input. If set to ``True`` the prompt + will be the option name capitalized. + :param confirmation_prompt: Prompt a second time to confirm the + value if it was prompted for. Can be set to a string instead of + ``True`` to customize the message. + :param prompt_required: If set to ``False``, the user will be + prompted for input only when the option was specified as a flag + without a value. + :param hide_input: If this is ``True`` then the input on the prompt + will be hidden from the user. This is useful for password input. + :param is_flag: forces this option to act as a flag. The default is + auto detection. + :param flag_value: which value should be used for this flag if it's + enabled. This is set to a boolean automatically if + the option string contains a slash to mark two options. + :param multiple: if this is set to `True` then the argument is accepted + multiple times and recorded. This is similar to ``nargs`` + in how it works but supports arbitrary number of + arguments. + :param count: this flag makes an option increment an integer. + :param allow_from_autoenv: if this is enabled then the value of this + parameter will be pulled from an environment + variable in case a prefix is defined on the + context. + :param help: the help string. + :param hidden: hide this option from help outputs. + :param attrs: Other command arguments described in :class:`Parameter`. + + .. versionchanged:: 8.1.0 + Help text indentation is cleaned here instead of only in the + ``@option`` decorator. + + .. versionchanged:: 8.1.0 + The ``show_default`` parameter overrides + ``Context.show_default``. + + .. versionchanged:: 8.1.0 + The default of a single option boolean flag is not shown if the + default value is ``False``. + + .. versionchanged:: 8.0.1 + ``type`` is detected from ``flag_value`` if given. + """ + + param_type_name = "option" + + def __init__( + self, + param_decls: t.Optional[t.Sequence[str]] = None, + show_default: t.Union[bool, str, None] = None, + prompt: t.Union[bool, str] = False, + confirmation_prompt: t.Union[bool, str] = False, + prompt_required: bool = True, + hide_input: bool = False, + is_flag: t.Optional[bool] = None, + flag_value: t.Optional[t.Any] = None, + multiple: bool = False, + count: bool = False, + allow_from_autoenv: bool = True, + type: t.Optional[t.Union[types.ParamType, t.Any]] = None, + help: t.Optional[str] = None, + hidden: bool = False, + show_choices: bool = True, + show_envvar: bool = False, + **attrs: t.Any, + ) -> None: + if help: + help = inspect.cleandoc(help) + + default_is_missing = "default" not in attrs + super().__init__(param_decls, type=type, multiple=multiple, **attrs) + + if prompt is True: + if self.name is None: + raise TypeError("'name' is required with 'prompt=True'.") + + prompt_text: t.Optional[str] = self.name.replace("_", " ").capitalize() + elif prompt is False: + prompt_text = None + else: + prompt_text = prompt + + self.prompt = prompt_text + self.confirmation_prompt = confirmation_prompt + self.prompt_required = prompt_required + self.hide_input = hide_input + self.hidden = hidden + + # If prompt is enabled but not required, then the option can be + # used as a flag to indicate using prompt or flag_value. + self._flag_needs_value = self.prompt is not None and not self.prompt_required + + if is_flag is None: + if flag_value is not None: + # Implicitly a flag because flag_value was set. + is_flag = True + elif self._flag_needs_value: + # Not a flag, but when used as a flag it shows a prompt. + is_flag = False + else: + # Implicitly a flag because flag options were given. + is_flag = bool(self.secondary_opts) + elif is_flag is False and not self._flag_needs_value: + # Not a flag, and prompt is not enabled, can be used as a + # flag if flag_value is set. + self._flag_needs_value = flag_value is not None + + self.default: t.Union[t.Any, t.Callable[[], t.Any]] + + if is_flag and default_is_missing and not self.required: + if multiple: + self.default = () + else: + self.default = False + + if flag_value is None: + flag_value = not self.default + + self.type: types.ParamType + if is_flag and type is None: + # Re-guess the type from the flag value instead of the + # default. + self.type = types.convert_type(None, flag_value) + + self.is_flag: bool = is_flag + self.is_bool_flag: bool = is_flag and isinstance(self.type, types.BoolParamType) + self.flag_value: t.Any = flag_value + + # Counting + self.count = count + if count: + if type is None: + self.type = types.IntRange(min=0) + if default_is_missing: + self.default = 0 + + self.allow_from_autoenv = allow_from_autoenv + self.help = help + self.show_default = show_default + self.show_choices = show_choices + self.show_envvar = show_envvar + + if __debug__: + if self.nargs == -1: + raise TypeError("nargs=-1 is not supported for options.") + + if self.prompt and self.is_flag and not self.is_bool_flag: + raise TypeError("'prompt' is not valid for non-boolean flag.") + + if not self.is_bool_flag and self.secondary_opts: + raise TypeError("Secondary flag is not valid for non-boolean flag.") + + if self.is_bool_flag and self.hide_input and self.prompt is not None: + raise TypeError( + "'prompt' with 'hide_input' is not valid for boolean flag." + ) + + if self.count: + if self.multiple: + raise TypeError("'count' is not valid with 'multiple'.") + + if self.is_flag: + raise TypeError("'count' is not valid with 'is_flag'.") + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + help=self.help, + prompt=self.prompt, + is_flag=self.is_flag, + flag_value=self.flag_value, + count=self.count, + hidden=self.hidden, + ) + return info_dict + + def _parse_decls( + self, decls: t.Sequence[str], expose_value: bool + ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: + opts = [] + secondary_opts = [] + name = None + possible_names = [] + + for decl in decls: + if decl.isidentifier(): + if name is not None: + raise TypeError(f"Name '{name}' defined twice") + name = decl + else: + split_char = ";" if decl[:1] == "/" else "/" + if split_char in decl: + first, second = decl.split(split_char, 1) + first = first.rstrip() + if first: + possible_names.append(split_opt(first)) + opts.append(first) + second = second.lstrip() + if second: + secondary_opts.append(second.lstrip()) + if first == second: + raise ValueError( + f"Boolean option {decl!r} cannot use the" + " same flag for true/false." + ) + else: + possible_names.append(split_opt(decl)) + opts.append(decl) + + if name is None and possible_names: + possible_names.sort(key=lambda x: -len(x[0])) # group long options first + name = possible_names[0][1].replace("-", "_").lower() + if not name.isidentifier(): + name = None + + if name is None: + if not expose_value: + return None, opts, secondary_opts + raise TypeError( + f"Could not determine name for option with declarations {decls!r}" + ) + + if not opts and not secondary_opts: + raise TypeError( + f"No options defined but a name was passed ({name})." + " Did you mean to declare an argument instead? Did" + f" you mean to pass '--{name}'?" + ) + + return name, opts, secondary_opts + + def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: + if self.multiple: + action = "append" + elif self.count: + action = "count" + else: + action = "store" + + if self.is_flag: + action = f"{action}_const" + + if self.is_bool_flag and self.secondary_opts: + parser.add_option( + obj=self, opts=self.opts, dest=self.name, action=action, const=True + ) + parser.add_option( + obj=self, + opts=self.secondary_opts, + dest=self.name, + action=action, + const=False, + ) + else: + parser.add_option( + obj=self, + opts=self.opts, + dest=self.name, + action=action, + const=self.flag_value, + ) + else: + parser.add_option( + obj=self, + opts=self.opts, + dest=self.name, + action=action, + nargs=self.nargs, + ) + + def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]: + if self.hidden: + return None + + any_prefix_is_slash = False + + def _write_opts(opts: t.Sequence[str]) -> str: + nonlocal any_prefix_is_slash + + rv, any_slashes = join_options(opts) + + if any_slashes: + any_prefix_is_slash = True + + if not self.is_flag and not self.count: + rv += f" {self.make_metavar()}" + + return rv + + rv = [_write_opts(self.opts)] + + if self.secondary_opts: + rv.append(_write_opts(self.secondary_opts)) + + help = self.help or "" + extra = [] + + if self.show_envvar: + envvar = self.envvar + + if envvar is None: + if ( + self.allow_from_autoenv + and ctx.auto_envvar_prefix is not None + and self.name is not None + ): + envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" + + if envvar is not None: + var_str = ( + envvar + if isinstance(envvar, str) + else ", ".join(str(d) for d in envvar) + ) + extra.append(_("env var: {var}").format(var=var_str)) + + # Temporarily enable resilient parsing to avoid type casting + # failing for the default. Might be possible to extend this to + # help formatting in general. + resilient = ctx.resilient_parsing + ctx.resilient_parsing = True + + try: + default_value = self.get_default(ctx, call=False) + finally: + ctx.resilient_parsing = resilient + + show_default = False + show_default_is_str = False + + if self.show_default is not None: + if isinstance(self.show_default, str): + show_default_is_str = show_default = True + else: + show_default = self.show_default + elif ctx.show_default is not None: + show_default = ctx.show_default + + if show_default_is_str or (show_default and (default_value is not None)): + if show_default_is_str: + default_string = f"({self.show_default})" + elif isinstance(default_value, (list, tuple)): + default_string = ", ".join(str(d) for d in default_value) + elif inspect.isfunction(default_value): + default_string = _("(dynamic)") + elif self.is_bool_flag and self.secondary_opts: + # For boolean flags that have distinct True/False opts, + # use the opt without prefix instead of the value. + default_string = split_opt( + (self.opts if default_value else self.secondary_opts)[0] + )[1] + elif self.is_bool_flag and not self.secondary_opts and not default_value: + default_string = "" + elif default_value == "": + default_string = '""' + else: + default_string = str(default_value) + + if default_string: + extra.append(_("default: {default}").format(default=default_string)) + + if ( + isinstance(self.type, types._NumberRangeBase) + # skip count with default range type + and not (self.count and self.type.min == 0 and self.type.max is None) + ): + range_str = self.type._describe_range() + + if range_str: + extra.append(range_str) + + if self.required: + extra.append(_("required")) + + if extra: + extra_str = "; ".join(extra) + help = f"{help} [{extra_str}]" if help else f"[{extra_str}]" + + return ("; " if any_prefix_is_slash else " / ").join(rv), help + + @t.overload + def get_default( + self, ctx: Context, call: "te.Literal[True]" = True + ) -> t.Optional[t.Any]: ... + + @t.overload + def get_default( + self, ctx: Context, call: bool = ... + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: ... + + def get_default( + self, ctx: Context, call: bool = True + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + # If we're a non boolean flag our default is more complex because + # we need to look at all flags in the same group to figure out + # if we're the default one in which case we return the flag + # value as default. + if self.is_flag and not self.is_bool_flag: + for param in ctx.command.params: + if param.name == self.name and param.default: + return t.cast(Option, param).flag_value + + return None + + return super().get_default(ctx, call=call) + + def prompt_for_value(self, ctx: Context) -> t.Any: + """This is an alternative flow that can be activated in the full + value processing if a value does not exist. It will prompt the + user until a valid value exists and then returns the processed + value as result. + """ + assert self.prompt is not None + + # Calculate the default before prompting anything to be stable. + default = self.get_default(ctx) + + # If this is a prompt for a flag we need to handle this + # differently. + if self.is_bool_flag: + return confirm(self.prompt, default) + + return prompt( + self.prompt, + default=default, + type=self.type, + hide_input=self.hide_input, + show_choices=self.show_choices, + confirmation_prompt=self.confirmation_prompt, + value_proc=lambda x: self.process_value(ctx, x), + ) + + def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]: + rv = super().resolve_envvar_value(ctx) + + if rv is not None: + return rv + + if ( + self.allow_from_autoenv + and ctx.auto_envvar_prefix is not None + and self.name is not None + ): + envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" + rv = os.environ.get(envvar) + + if rv: + return rv + + return None + + def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]: + rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx) + + if rv is None: + return None + + value_depth = (self.nargs != 1) + bool(self.multiple) + + if value_depth > 0: + rv = self.type.split_envvar_value(rv) + + if self.multiple and self.nargs != 1: + rv = batch(rv, self.nargs) + + return rv + + def consume_value( + self, ctx: Context, opts: t.Mapping[str, "Parameter"] + ) -> t.Tuple[t.Any, ParameterSource]: + value, source = super().consume_value(ctx, opts) + + # The parser will emit a sentinel value if the option can be + # given as a flag without a value. This is different from None + # to distinguish from the flag not being given at all. + if value is _flag_needs_value: + if self.prompt is not None and not ctx.resilient_parsing: + value = self.prompt_for_value(ctx) + source = ParameterSource.PROMPT + else: + value = self.flag_value + source = ParameterSource.COMMANDLINE + + elif ( + self.multiple + and value is not None + and any(v is _flag_needs_value for v in value) + ): + value = [self.flag_value if v is _flag_needs_value else v for v in value] + source = ParameterSource.COMMANDLINE + + # The value wasn't set, or used the param's default, prompt if + # prompting is enabled. + elif ( + source in {None, ParameterSource.DEFAULT} + and self.prompt is not None + and (self.required or self.prompt_required) + and not ctx.resilient_parsing + ): + value = self.prompt_for_value(ctx) + source = ParameterSource.PROMPT + + return value, source + + +class Argument(Parameter): + """Arguments are positional parameters to a command. They generally + provide fewer features than options but can have infinite ``nargs`` + and are required by default. + + All parameters are passed onwards to the constructor of :class:`Parameter`. + """ + + param_type_name = "argument" + + def __init__( + self, + param_decls: t.Sequence[str], + required: t.Optional[bool] = None, + **attrs: t.Any, + ) -> None: + if required is None: + if attrs.get("default") is not None: + required = False + else: + required = attrs.get("nargs", 1) > 0 + + if "multiple" in attrs: + raise TypeError("__init__() got an unexpected keyword argument 'multiple'.") + + super().__init__(param_decls, required=required, **attrs) + + if __debug__: + if self.default is not None and self.nargs == -1: + raise TypeError("'default' is not supported for nargs=-1.") + + @property + def human_readable_name(self) -> str: + if self.metavar is not None: + return self.metavar + return self.name.upper() # type: ignore + + def make_metavar(self) -> str: + if self.metavar is not None: + return self.metavar + var = self.type.get_metavar(self) + if not var: + var = self.name.upper() # type: ignore + if not self.required: + var = f"[{var}]" + if self.nargs != 1: + var += "..." + return var + + def _parse_decls( + self, decls: t.Sequence[str], expose_value: bool + ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: + if not decls: + if not expose_value: + return None, [], [] + raise TypeError("Argument is marked as exposed, but does not have a name.") + if len(decls) == 1: + name = arg = decls[0] + name = name.replace("-", "_").lower() + else: + raise TypeError( + "Arguments take exactly one parameter declaration, got" + f" {len(decls)}." + ) + return name, [arg], [] + + def get_usage_pieces(self, ctx: Context) -> t.List[str]: + return [self.make_metavar()] + + def get_error_hint(self, ctx: Context) -> str: + return f"'{self.make_metavar()}'" + + def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: + parser.add_argument(dest=self.name, nargs=self.nargs, obj=self) diff --git a/venv/lib/python3.11/site-packages/click/decorators.py b/venv/lib/python3.11/site-packages/click/decorators.py new file mode 100644 index 0000000..bcf8906 --- /dev/null +++ b/venv/lib/python3.11/site-packages/click/decorators.py @@ -0,0 +1,562 @@ +import inspect +import types +import typing as t +from functools import update_wrapper +from gettext import gettext as _ + +from .core import Argument +from .core import Command +from .core import Context +from .core import Group +from .core import Option +from .core import Parameter +from .globals import get_current_context +from .utils import echo + +if t.TYPE_CHECKING: + import typing_extensions as te + + P = te.ParamSpec("P") + +R = t.TypeVar("R") +T = t.TypeVar("T") +_AnyCallable = t.Callable[..., t.Any] +FC = t.TypeVar("FC", bound=t.Union[_AnyCallable, Command]) + + +def pass_context(f: "t.Callable[te.Concatenate[Context, P], R]") -> "t.Callable[P, R]": + """Marks a callback as wanting to receive the current context + object as first argument. + """ + + def new_func(*args: "P.args", **kwargs: "P.kwargs") -> "R": + return f(get_current_context(), *args, **kwargs) + + return update_wrapper(new_func, f) + + +def pass_obj(f: "t.Callable[te.Concatenate[t.Any, P], R]") -> "t.Callable[P, R]": + """Similar to :func:`pass_context`, but only pass the object on the + context onwards (:attr:`Context.obj`). This is useful if that object + represents the state of a nested system. + """ + + def new_func(*args: "P.args", **kwargs: "P.kwargs") -> "R": + return f(get_current_context().obj, *args, **kwargs) + + return update_wrapper(new_func, f) + + +def make_pass_decorator( + object_type: t.Type[T], ensure: bool = False +) -> t.Callable[["t.Callable[te.Concatenate[T, P], R]"], "t.Callable[P, R]"]: + """Given an object type this creates a decorator that will work + similar to :func:`pass_obj` but instead of passing the object of the + current context, it will find the innermost context of type + :func:`object_type`. + + This generates a decorator that works roughly like this:: + + from functools import update_wrapper + + def decorator(f): + @pass_context + def new_func(ctx, *args, **kwargs): + obj = ctx.find_object(object_type) + return ctx.invoke(f, obj, *args, **kwargs) + return update_wrapper(new_func, f) + return decorator + + :param object_type: the type of the object to pass. + :param ensure: if set to `True`, a new object will be created and + remembered on the context if it's not there yet. + """ + + def decorator(f: "t.Callable[te.Concatenate[T, P], R]") -> "t.Callable[P, R]": + def new_func(*args: "P.args", **kwargs: "P.kwargs") -> "R": + ctx = get_current_context() + + obj: t.Optional[T] + if ensure: + obj = ctx.ensure_object(object_type) + else: + obj = ctx.find_object(object_type) + + if obj is None: + raise RuntimeError( + "Managed to invoke callback without a context" + f" object of type {object_type.__name__!r}" + " existing." + ) + + return ctx.invoke(f, obj, *args, **kwargs) + + return update_wrapper(new_func, f) + + return decorator + + +def pass_meta_key( + key: str, *, doc_description: t.Optional[str] = None +) -> "t.Callable[[t.Callable[te.Concatenate[t.Any, P], R]], t.Callable[P, R]]": + """Create a decorator that passes a key from + :attr:`click.Context.meta` as the first argument to the decorated + function. + + :param key: Key in ``Context.meta`` to pass. + :param doc_description: Description of the object being passed, + inserted into the decorator's docstring. Defaults to "the 'key' + key from Context.meta". + + .. versionadded:: 8.0 + """ + + def decorator(f: "t.Callable[te.Concatenate[t.Any, P], R]") -> "t.Callable[P, R]": + def new_func(*args: "P.args", **kwargs: "P.kwargs") -> R: + ctx = get_current_context() + obj = ctx.meta[key] + return ctx.invoke(f, obj, *args, **kwargs) + + return update_wrapper(new_func, f) + + if doc_description is None: + doc_description = f"the {key!r} key from :attr:`click.Context.meta`" + + decorator.__doc__ = ( + f"Decorator that passes {doc_description} as the first argument" + " to the decorated function." + ) + return decorator + + +CmdType = t.TypeVar("CmdType", bound=Command) + + +# variant: no call, directly as decorator for a function. +@t.overload +def command(name: _AnyCallable) -> Command: ... + + +# variant: with positional name and with positional or keyword cls argument: +# @command(namearg, CommandCls, ...) or @command(namearg, cls=CommandCls, ...) +@t.overload +def command( + name: t.Optional[str], + cls: t.Type[CmdType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], CmdType]: ... + + +# variant: name omitted, cls _must_ be a keyword argument, @command(cls=CommandCls, ...) +@t.overload +def command( + name: None = None, + *, + cls: t.Type[CmdType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], CmdType]: ... + + +# variant: with optional string name, no cls argument provided. +@t.overload +def command( + name: t.Optional[str] = ..., cls: None = None, **attrs: t.Any +) -> t.Callable[[_AnyCallable], Command]: ... + + +def command( + name: t.Union[t.Optional[str], _AnyCallable] = None, + cls: t.Optional[t.Type[CmdType]] = None, + **attrs: t.Any, +) -> t.Union[Command, t.Callable[[_AnyCallable], t.Union[Command, CmdType]]]: + r"""Creates a new :class:`Command` and uses the decorated function as + callback. This will also automatically attach all decorated + :func:`option`\s and :func:`argument`\s as parameters to the command. + + The name of the command defaults to the name of the function with + underscores replaced by dashes. If you want to change that, you can + pass the intended name as the first argument. + + All keyword arguments are forwarded to the underlying command class. + For the ``params`` argument, any decorated params are appended to + the end of the list. + + Once decorated the function turns into a :class:`Command` instance + that can be invoked as a command line utility or be attached to a + command :class:`Group`. + + :param name: the name of the command. This defaults to the function + name with underscores replaced by dashes. + :param cls: the command class to instantiate. This defaults to + :class:`Command`. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.1 + The ``params`` argument can be used. Decorated params are + appended to the end of the list. + """ + + func: t.Optional[t.Callable[[_AnyCallable], t.Any]] = None + + if callable(name): + func = name + name = None + assert cls is None, "Use 'command(cls=cls)(callable)' to specify a class." + assert not attrs, "Use 'command(**kwargs)(callable)' to provide arguments." + + if cls is None: + cls = t.cast(t.Type[CmdType], Command) + + def decorator(f: _AnyCallable) -> CmdType: + if isinstance(f, Command): + raise TypeError("Attempted to convert a callback into a command twice.") + + attr_params = attrs.pop("params", None) + params = attr_params if attr_params is not None else [] + + try: + decorator_params = f.__click_params__ # type: ignore + except AttributeError: + pass + else: + del f.__click_params__ # type: ignore + params.extend(reversed(decorator_params)) + + if attrs.get("help") is None: + attrs["help"] = f.__doc__ + + if t.TYPE_CHECKING: + assert cls is not None + assert not callable(name) + + cmd = cls( + name=name or f.__name__.lower().replace("_", "-"), + callback=f, + params=params, + **attrs, + ) + cmd.__doc__ = f.__doc__ + return cmd + + if func is not None: + return decorator(func) + + return decorator + + +GrpType = t.TypeVar("GrpType", bound=Group) + + +# variant: no call, directly as decorator for a function. +@t.overload +def group(name: _AnyCallable) -> Group: ... + + +# variant: with positional name and with positional or keyword cls argument: +# @group(namearg, GroupCls, ...) or @group(namearg, cls=GroupCls, ...) +@t.overload +def group( + name: t.Optional[str], + cls: t.Type[GrpType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], GrpType]: ... + + +# variant: name omitted, cls _must_ be a keyword argument, @group(cmd=GroupCls, ...) +@t.overload +def group( + name: None = None, + *, + cls: t.Type[GrpType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], GrpType]: ... + + +# variant: with optional string name, no cls argument provided. +@t.overload +def group( + name: t.Optional[str] = ..., cls: None = None, **attrs: t.Any +) -> t.Callable[[_AnyCallable], Group]: ... + + +def group( + name: t.Union[str, _AnyCallable, None] = None, + cls: t.Optional[t.Type[GrpType]] = None, + **attrs: t.Any, +) -> t.Union[Group, t.Callable[[_AnyCallable], t.Union[Group, GrpType]]]: + """Creates a new :class:`Group` with a function as callback. This + works otherwise the same as :func:`command` just that the `cls` + parameter is set to :class:`Group`. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + """ + if cls is None: + cls = t.cast(t.Type[GrpType], Group) + + if callable(name): + return command(cls=cls, **attrs)(name) + + return command(name, cls, **attrs) + + +def _param_memo(f: t.Callable[..., t.Any], param: Parameter) -> None: + if isinstance(f, Command): + f.params.append(param) + else: + if not hasattr(f, "__click_params__"): + f.__click_params__ = [] # type: ignore + + f.__click_params__.append(param) # type: ignore + + +def argument( + *param_decls: str, cls: t.Optional[t.Type[Argument]] = None, **attrs: t.Any +) -> t.Callable[[FC], FC]: + """Attaches an argument to the command. All positional arguments are + passed as parameter declarations to :class:`Argument`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Argument` instance manually + and attaching it to the :attr:`Command.params` list. + + For the default argument class, refer to :class:`Argument` and + :class:`Parameter` for descriptions of parameters. + + :param cls: the argument class to instantiate. This defaults to + :class:`Argument`. + :param param_decls: Passed as positional arguments to the constructor of + ``cls``. + :param attrs: Passed as keyword arguments to the constructor of ``cls``. + """ + if cls is None: + cls = Argument + + def decorator(f: FC) -> FC: + _param_memo(f, cls(param_decls, **attrs)) + return f + + return decorator + + +def option( + *param_decls: str, cls: t.Optional[t.Type[Option]] = None, **attrs: t.Any +) -> t.Callable[[FC], FC]: + """Attaches an option to the command. All positional arguments are + passed as parameter declarations to :class:`Option`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Option` instance manually + and attaching it to the :attr:`Command.params` list. + + For the default option class, refer to :class:`Option` and + :class:`Parameter` for descriptions of parameters. + + :param cls: the option class to instantiate. This defaults to + :class:`Option`. + :param param_decls: Passed as positional arguments to the constructor of + ``cls``. + :param attrs: Passed as keyword arguments to the constructor of ``cls``. + """ + if cls is None: + cls = Option + + def decorator(f: FC) -> FC: + _param_memo(f, cls(param_decls, **attrs)) + return f + + return decorator + + +def confirmation_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--yes`` option which shows a prompt before continuing if + not passed. If the prompt is declined, the program will exit. + + :param param_decls: One or more option names. Defaults to the single + value ``"--yes"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value: + ctx.abort() + + if not param_decls: + param_decls = ("--yes",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("callback", callback) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("prompt", "Do you want to continue?") + kwargs.setdefault("help", "Confirm the action without prompting.") + return option(*param_decls, **kwargs) + + +def password_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--password`` option which prompts for a password, hiding + input and asking to enter the value again for confirmation. + + :param param_decls: One or more option names. Defaults to the single + value ``"--password"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + if not param_decls: + param_decls = ("--password",) + + kwargs.setdefault("prompt", True) + kwargs.setdefault("confirmation_prompt", True) + kwargs.setdefault("hide_input", True) + return option(*param_decls, **kwargs) + + +def version_option( + version: t.Optional[str] = None, + *param_decls: str, + package_name: t.Optional[str] = None, + prog_name: t.Optional[str] = None, + message: t.Optional[str] = None, + **kwargs: t.Any, +) -> t.Callable[[FC], FC]: + """Add a ``--version`` option which immediately prints the version + number and exits the program. + + If ``version`` is not provided, Click will try to detect it using + :func:`importlib.metadata.version` to get the version for the + ``package_name``. On Python < 3.8, the ``importlib_metadata`` + backport must be installed. + + If ``package_name`` is not provided, Click will try to detect it by + inspecting the stack frames. This will be used to detect the + version, so it must match the name of the installed package. + + :param version: The version number to show. If not provided, Click + will try to detect it. + :param param_decls: One or more option names. Defaults to the single + value ``"--version"``. + :param package_name: The package name to detect the version from. If + not provided, Click will try to detect it. + :param prog_name: The name of the CLI to show in the message. If not + provided, it will be detected from the command. + :param message: The message to show. The values ``%(prog)s``, + ``%(package)s``, and ``%(version)s`` are available. Defaults to + ``"%(prog)s, version %(version)s"``. + :param kwargs: Extra arguments are passed to :func:`option`. + :raise RuntimeError: ``version`` could not be detected. + + .. versionchanged:: 8.0 + Add the ``package_name`` parameter, and the ``%(package)s`` + value for messages. + + .. versionchanged:: 8.0 + Use :mod:`importlib.metadata` instead of ``pkg_resources``. The + version is detected based on the package name, not the entry + point name. The Python package name must match the installed + package name, or be passed with ``package_name=``. + """ + if message is None: + message = _("%(prog)s, version %(version)s") + + if version is None and package_name is None: + frame = inspect.currentframe() + f_back = frame.f_back if frame is not None else None + f_globals = f_back.f_globals if f_back is not None else None + # break reference cycle + # https://docs.python.org/3/library/inspect.html#the-interpreter-stack + del frame + + if f_globals is not None: + package_name = f_globals.get("__name__") + + if package_name == "__main__": + package_name = f_globals.get("__package__") + + if package_name: + package_name = package_name.partition(".")[0] + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value or ctx.resilient_parsing: + return + + nonlocal prog_name + nonlocal version + + if prog_name is None: + prog_name = ctx.find_root().info_name + + if version is None and package_name is not None: + metadata: t.Optional[types.ModuleType] + + try: + from importlib import metadata + except ImportError: + # Python < 3.8 + import importlib_metadata as metadata # type: ignore + + try: + version = metadata.version(package_name) # type: ignore + except metadata.PackageNotFoundError: # type: ignore + raise RuntimeError( + f"{package_name!r} is not installed. Try passing" + " 'package_name' instead." + ) from None + + if version is None: + raise RuntimeError( + f"Could not determine the version for {package_name!r} automatically." + ) + + echo( + message % {"prog": prog_name, "package": package_name, "version": version}, + color=ctx.color, + ) + ctx.exit() + + if not param_decls: + param_decls = ("--version",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("is_eager", True) + kwargs.setdefault("help", _("Show the version and exit.")) + kwargs["callback"] = callback + return option(*param_decls, **kwargs) + + +class HelpOption(Option): + """Pre-configured ``--help`` option which immediately prints the help page + and exits the program. + """ + + def __init__( + self, + param_decls: t.Optional[t.Sequence[str]] = None, + **kwargs: t.Any, + ) -> None: + if not param_decls: + param_decls = ("--help",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("is_eager", True) + kwargs.setdefault("help", _("Show this message and exit.")) + kwargs.setdefault("callback", self.show_help) + + super().__init__(param_decls, **kwargs) + + @staticmethod + def show_help(ctx: Context, param: Parameter, value: bool) -> None: + """Callback that print the help page on ```` and exits.""" + if value and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + +def help_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Decorator for the pre-configured ``--help`` option defined above. + + :param param_decls: One or more option names. Defaults to the single + value ``"--help"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + kwargs.setdefault("cls", HelpOption) + return option(*param_decls, **kwargs) diff --git a/venv/lib/python3.11/site-packages/click/exceptions.py b/venv/lib/python3.11/site-packages/click/exceptions.py new file mode 100644 index 0000000..0b83151 --- /dev/null +++ b/venv/lib/python3.11/site-packages/click/exceptions.py @@ -0,0 +1,296 @@ +import typing as t +from gettext import gettext as _ +from gettext import ngettext + +from ._compat import get_text_stderr +from .globals import resolve_color_default +from .utils import echo +from .utils import format_filename + +if t.TYPE_CHECKING: + from .core import Command + from .core import Context + from .core import Parameter + + +def _join_param_hints( + param_hint: t.Optional[t.Union[t.Sequence[str], str]], +) -> t.Optional[str]: + if param_hint is not None and not isinstance(param_hint, str): + return " / ".join(repr(x) for x in param_hint) + + return param_hint + + +class ClickException(Exception): + """An exception that Click can handle and show to the user.""" + + #: The exit code for this exception. + exit_code = 1 + + def __init__(self, message: str) -> None: + super().__init__(message) + # The context will be removed by the time we print the message, so cache + # the color settings here to be used later on (in `show`) + self.show_color: t.Optional[bool] = resolve_color_default() + self.message = message + + def format_message(self) -> str: + return self.message + + def __str__(self) -> str: + return self.message + + def show(self, file: t.Optional[t.IO[t.Any]] = None) -> None: + if file is None: + file = get_text_stderr() + + echo( + _("Error: {message}").format(message=self.format_message()), + file=file, + color=self.show_color, + ) + + +class UsageError(ClickException): + """An internal exception that signals a usage error. This typically + aborts any further handling. + + :param message: the error message to display. + :param ctx: optionally the context that caused this error. Click will + fill in the context automatically in some situations. + """ + + exit_code = 2 + + def __init__(self, message: str, ctx: t.Optional["Context"] = None) -> None: + super().__init__(message) + self.ctx = ctx + self.cmd: t.Optional[Command] = self.ctx.command if self.ctx else None + + def show(self, file: t.Optional[t.IO[t.Any]] = None) -> None: + if file is None: + file = get_text_stderr() + color = None + hint = "" + if ( + self.ctx is not None + and self.ctx.command.get_help_option(self.ctx) is not None + ): + hint = _("Try '{command} {option}' for help.").format( + command=self.ctx.command_path, option=self.ctx.help_option_names[0] + ) + hint = f"{hint}\n" + if self.ctx is not None: + color = self.ctx.color + echo(f"{self.ctx.get_usage()}\n{hint}", file=file, color=color) + echo( + _("Error: {message}").format(message=self.format_message()), + file=file, + color=color, + ) + + +class BadParameter(UsageError): + """An exception that formats out a standardized error message for a + bad parameter. This is useful when thrown from a callback or type as + Click will attach contextual information to it (for instance, which + parameter it is). + + .. versionadded:: 2.0 + + :param param: the parameter object that caused this error. This can + be left out, and Click will attach this info itself + if possible. + :param param_hint: a string that shows up as parameter name. This + can be used as alternative to `param` in cases + where custom validation should happen. If it is + a string it's used as such, if it's a list then + each item is quoted and separated. + """ + + def __init__( + self, + message: str, + ctx: t.Optional["Context"] = None, + param: t.Optional["Parameter"] = None, + param_hint: t.Optional[str] = None, + ) -> None: + super().__init__(message, ctx) + self.param = param + self.param_hint = param_hint + + def format_message(self) -> str: + if self.param_hint is not None: + param_hint = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) # type: ignore + else: + return _("Invalid value: {message}").format(message=self.message) + + return _("Invalid value for {param_hint}: {message}").format( + param_hint=_join_param_hints(param_hint), message=self.message + ) + + +class MissingParameter(BadParameter): + """Raised if click required an option or argument but it was not + provided when invoking the script. + + .. versionadded:: 4.0 + + :param param_type: a string that indicates the type of the parameter. + The default is to inherit the parameter type from + the given `param`. Valid values are ``'parameter'``, + ``'option'`` or ``'argument'``. + """ + + def __init__( + self, + message: t.Optional[str] = None, + ctx: t.Optional["Context"] = None, + param: t.Optional["Parameter"] = None, + param_hint: t.Optional[str] = None, + param_type: t.Optional[str] = None, + ) -> None: + super().__init__(message or "", ctx, param, param_hint) + self.param_type = param_type + + def format_message(self) -> str: + if self.param_hint is not None: + param_hint: t.Optional[str] = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) # type: ignore + else: + param_hint = None + + param_hint = _join_param_hints(param_hint) + param_hint = f" {param_hint}" if param_hint else "" + + param_type = self.param_type + if param_type is None and self.param is not None: + param_type = self.param.param_type_name + + msg = self.message + if self.param is not None: + msg_extra = self.param.type.get_missing_message(self.param) + if msg_extra: + if msg: + msg += f". {msg_extra}" + else: + msg = msg_extra + + msg = f" {msg}" if msg else "" + + # Translate param_type for known types. + if param_type == "argument": + missing = _("Missing argument") + elif param_type == "option": + missing = _("Missing option") + elif param_type == "parameter": + missing = _("Missing parameter") + else: + missing = _("Missing {param_type}").format(param_type=param_type) + + return f"{missing}{param_hint}.{msg}" + + def __str__(self) -> str: + if not self.message: + param_name = self.param.name if self.param else None + return _("Missing parameter: {param_name}").format(param_name=param_name) + else: + return self.message + + +class NoSuchOption(UsageError): + """Raised if click attempted to handle an option that does not + exist. + + .. versionadded:: 4.0 + """ + + def __init__( + self, + option_name: str, + message: t.Optional[str] = None, + possibilities: t.Optional[t.Sequence[str]] = None, + ctx: t.Optional["Context"] = None, + ) -> None: + if message is None: + message = _("No such option: {name}").format(name=option_name) + + super().__init__(message, ctx) + self.option_name = option_name + self.possibilities = possibilities + + def format_message(self) -> str: + if not self.possibilities: + return self.message + + possibility_str = ", ".join(sorted(self.possibilities)) + suggest = ngettext( + "Did you mean {possibility}?", + "(Possible options: {possibilities})", + len(self.possibilities), + ).format(possibility=possibility_str, possibilities=possibility_str) + return f"{self.message} {suggest}" + + +class BadOptionUsage(UsageError): + """Raised if an option is generally supplied but the use of the option + was incorrect. This is for instance raised if the number of arguments + for an option is not correct. + + .. versionadded:: 4.0 + + :param option_name: the name of the option being used incorrectly. + """ + + def __init__( + self, option_name: str, message: str, ctx: t.Optional["Context"] = None + ) -> None: + super().__init__(message, ctx) + self.option_name = option_name + + +class BadArgumentUsage(UsageError): + """Raised if an argument is generally supplied but the use of the argument + was incorrect. This is for instance raised if the number of values + for an argument is not correct. + + .. versionadded:: 6.0 + """ + + +class FileError(ClickException): + """Raised if a file cannot be opened.""" + + def __init__(self, filename: str, hint: t.Optional[str] = None) -> None: + if hint is None: + hint = _("unknown error") + + super().__init__(hint) + self.ui_filename: str = format_filename(filename) + self.filename = filename + + def format_message(self) -> str: + return _("Could not open file {filename!r}: {message}").format( + filename=self.ui_filename, message=self.message + ) + + +class Abort(RuntimeError): + """An internal signalling exception that signals Click to abort.""" + + +class Exit(RuntimeError): + """An exception that indicates that the application should exit with some + status code. + + :param code: the status code to exit with. + """ + + __slots__ = ("exit_code",) + + def __init__(self, code: int = 0) -> None: + self.exit_code: int = code diff --git a/venv/lib/python3.11/site-packages/click/formatting.py b/venv/lib/python3.11/site-packages/click/formatting.py new file mode 100644 index 0000000..ddd2a2f --- /dev/null +++ b/venv/lib/python3.11/site-packages/click/formatting.py @@ -0,0 +1,301 @@ +import typing as t +from contextlib import contextmanager +from gettext import gettext as _ + +from ._compat import term_len +from .parser import split_opt + +# Can force a width. This is used by the test system +FORCED_WIDTH: t.Optional[int] = None + + +def measure_table(rows: t.Iterable[t.Tuple[str, str]]) -> t.Tuple[int, ...]: + widths: t.Dict[int, int] = {} + + for row in rows: + for idx, col in enumerate(row): + widths[idx] = max(widths.get(idx, 0), term_len(col)) + + return tuple(y for x, y in sorted(widths.items())) + + +def iter_rows( + rows: t.Iterable[t.Tuple[str, str]], col_count: int +) -> t.Iterator[t.Tuple[str, ...]]: + for row in rows: + yield row + ("",) * (col_count - len(row)) + + +def wrap_text( + text: str, + width: int = 78, + initial_indent: str = "", + subsequent_indent: str = "", + preserve_paragraphs: bool = False, +) -> str: + """A helper function that intelligently wraps text. By default, it + assumes that it operates on a single paragraph of text but if the + `preserve_paragraphs` parameter is provided it will intelligently + handle paragraphs (defined by two empty lines). + + If paragraphs are handled, a paragraph can be prefixed with an empty + line containing the ``\\b`` character (``\\x08``) to indicate that + no rewrapping should happen in that block. + + :param text: the text that should be rewrapped. + :param width: the maximum width for the text. + :param initial_indent: the initial indent that should be placed on the + first line as a string. + :param subsequent_indent: the indent string that should be placed on + each consecutive line. + :param preserve_paragraphs: if this flag is set then the wrapping will + intelligently handle paragraphs. + """ + from ._textwrap import TextWrapper + + text = text.expandtabs() + wrapper = TextWrapper( + width, + initial_indent=initial_indent, + subsequent_indent=subsequent_indent, + replace_whitespace=False, + ) + if not preserve_paragraphs: + return wrapper.fill(text) + + p: t.List[t.Tuple[int, bool, str]] = [] + buf: t.List[str] = [] + indent = None + + def _flush_par() -> None: + if not buf: + return + if buf[0].strip() == "\b": + p.append((indent or 0, True, "\n".join(buf[1:]))) + else: + p.append((indent or 0, False, " ".join(buf))) + del buf[:] + + for line in text.splitlines(): + if not line: + _flush_par() + indent = None + else: + if indent is None: + orig_len = term_len(line) + line = line.lstrip() + indent = orig_len - term_len(line) + buf.append(line) + _flush_par() + + rv = [] + for indent, raw, text in p: + with wrapper.extra_indent(" " * indent): + if raw: + rv.append(wrapper.indent_only(text)) + else: + rv.append(wrapper.fill(text)) + + return "\n\n".join(rv) + + +class HelpFormatter: + """This class helps with formatting text-based help pages. It's + usually just needed for very special internal cases, but it's also + exposed so that developers can write their own fancy outputs. + + At present, it always writes into memory. + + :param indent_increment: the additional increment for each level. + :param width: the width for the text. This defaults to the terminal + width clamped to a maximum of 78. + """ + + def __init__( + self, + indent_increment: int = 2, + width: t.Optional[int] = None, + max_width: t.Optional[int] = None, + ) -> None: + import shutil + + self.indent_increment = indent_increment + if max_width is None: + max_width = 80 + if width is None: + width = FORCED_WIDTH + if width is None: + width = max(min(shutil.get_terminal_size().columns, max_width) - 2, 50) + self.width = width + self.current_indent = 0 + self.buffer: t.List[str] = [] + + def write(self, string: str) -> None: + """Writes a unicode string into the internal buffer.""" + self.buffer.append(string) + + def indent(self) -> None: + """Increases the indentation.""" + self.current_indent += self.indent_increment + + def dedent(self) -> None: + """Decreases the indentation.""" + self.current_indent -= self.indent_increment + + def write_usage( + self, prog: str, args: str = "", prefix: t.Optional[str] = None + ) -> None: + """Writes a usage line into the buffer. + + :param prog: the program name. + :param args: whitespace separated list of arguments. + :param prefix: The prefix for the first line. Defaults to + ``"Usage: "``. + """ + if prefix is None: + prefix = f"{_('Usage:')} " + + usage_prefix = f"{prefix:>{self.current_indent}}{prog} " + text_width = self.width - self.current_indent + + if text_width >= (term_len(usage_prefix) + 20): + # The arguments will fit to the right of the prefix. + indent = " " * term_len(usage_prefix) + self.write( + wrap_text( + args, + text_width, + initial_indent=usage_prefix, + subsequent_indent=indent, + ) + ) + else: + # The prefix is too long, put the arguments on the next line. + self.write(usage_prefix) + self.write("\n") + indent = " " * (max(self.current_indent, term_len(prefix)) + 4) + self.write( + wrap_text( + args, text_width, initial_indent=indent, subsequent_indent=indent + ) + ) + + self.write("\n") + + def write_heading(self, heading: str) -> None: + """Writes a heading into the buffer.""" + self.write(f"{'':>{self.current_indent}}{heading}:\n") + + def write_paragraph(self) -> None: + """Writes a paragraph into the buffer.""" + if self.buffer: + self.write("\n") + + def write_text(self, text: str) -> None: + """Writes re-indented text into the buffer. This rewraps and + preserves paragraphs. + """ + indent = " " * self.current_indent + self.write( + wrap_text( + text, + self.width, + initial_indent=indent, + subsequent_indent=indent, + preserve_paragraphs=True, + ) + ) + self.write("\n") + + def write_dl( + self, + rows: t.Sequence[t.Tuple[str, str]], + col_max: int = 30, + col_spacing: int = 2, + ) -> None: + """Writes a definition list into the buffer. This is how options + and commands are usually formatted. + + :param rows: a list of two item tuples for the terms and values. + :param col_max: the maximum width of the first column. + :param col_spacing: the number of spaces between the first and + second column. + """ + rows = list(rows) + widths = measure_table(rows) + if len(widths) != 2: + raise TypeError("Expected two columns for definition list") + + first_col = min(widths[0], col_max) + col_spacing + + for first, second in iter_rows(rows, len(widths)): + self.write(f"{'':>{self.current_indent}}{first}") + if not second: + self.write("\n") + continue + if term_len(first) <= first_col - col_spacing: + self.write(" " * (first_col - term_len(first))) + else: + self.write("\n") + self.write(" " * (first_col + self.current_indent)) + + text_width = max(self.width - first_col - 2, 10) + wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True) + lines = wrapped_text.splitlines() + + if lines: + self.write(f"{lines[0]}\n") + + for line in lines[1:]: + self.write(f"{'':>{first_col + self.current_indent}}{line}\n") + else: + self.write("\n") + + @contextmanager + def section(self, name: str) -> t.Iterator[None]: + """Helpful context manager that writes a paragraph, a heading, + and the indents. + + :param name: the section name that is written as heading. + """ + self.write_paragraph() + self.write_heading(name) + self.indent() + try: + yield + finally: + self.dedent() + + @contextmanager + def indentation(self) -> t.Iterator[None]: + """A context manager that increases the indentation.""" + self.indent() + try: + yield + finally: + self.dedent() + + def getvalue(self) -> str: + """Returns the buffer contents.""" + return "".join(self.buffer) + + +def join_options(options: t.Sequence[str]) -> t.Tuple[str, bool]: + """Given a list of option strings this joins them in the most appropriate + way and returns them in the form ``(formatted_string, + any_prefix_is_slash)`` where the second item in the tuple is a flag that + indicates if any of the option prefixes was a slash. + """ + rv = [] + any_prefix_is_slash = False + + for opt in options: + prefix = split_opt(opt)[0] + + if prefix == "/": + any_prefix_is_slash = True + + rv.append((len(prefix), opt)) + + rv.sort(key=lambda x: x[0]) + return ", ".join(x[1] for x in rv), any_prefix_is_slash diff --git a/venv/lib/python3.11/site-packages/click/globals.py b/venv/lib/python3.11/site-packages/click/globals.py new file mode 100644 index 0000000..191e712 --- /dev/null +++ b/venv/lib/python3.11/site-packages/click/globals.py @@ -0,0 +1,67 @@ +import typing as t +from threading import local + +if t.TYPE_CHECKING: + import typing_extensions as te + + from .core import Context + +_local = local() + + +@t.overload +def get_current_context(silent: "te.Literal[False]" = False) -> "Context": ... + + +@t.overload +def get_current_context(silent: bool = ...) -> t.Optional["Context"]: ... + + +def get_current_context(silent: bool = False) -> t.Optional["Context"]: + """Returns the current click context. This can be used as a way to + access the current context object from anywhere. This is a more implicit + alternative to the :func:`pass_context` decorator. This function is + primarily useful for helpers such as :func:`echo` which might be + interested in changing its behavior based on the current context. + + To push the current context, :meth:`Context.scope` can be used. + + .. versionadded:: 5.0 + + :param silent: if set to `True` the return value is `None` if no context + is available. The default behavior is to raise a + :exc:`RuntimeError`. + """ + try: + return t.cast("Context", _local.stack[-1]) + except (AttributeError, IndexError) as e: + if not silent: + raise RuntimeError("There is no active click context.") from e + + return None + + +def push_context(ctx: "Context") -> None: + """Pushes a new context to the current stack.""" + _local.__dict__.setdefault("stack", []).append(ctx) + + +def pop_context() -> None: + """Removes the top level from the stack.""" + _local.stack.pop() + + +def resolve_color_default(color: t.Optional[bool] = None) -> t.Optional[bool]: + """Internal helper to get the default value of the color flag. If a + value is passed it's returned unchanged, otherwise it's looked up from + the current context. + """ + if color is not None: + return color + + ctx = get_current_context(silent=True) + + if ctx is not None: + return ctx.color + + return None diff --git a/venv/lib/python3.11/site-packages/click/parser.py b/venv/lib/python3.11/site-packages/click/parser.py new file mode 100644 index 0000000..600b843 --- /dev/null +++ b/venv/lib/python3.11/site-packages/click/parser.py @@ -0,0 +1,531 @@ +""" +This module started out as largely a copy paste from the stdlib's +optparse module with the features removed that we do not need from +optparse because we implement them in Click on a higher level (for +instance type handling, help formatting and a lot more). + +The plan is to remove more and more from here over time. + +The reason this is a different module and not optparse from the stdlib +is that there are differences in 2.x and 3.x about the error messages +generated and optparse in the stdlib uses gettext for no good reason +and might cause us issues. + +Click uses parts of optparse written by Gregory P. Ward and maintained +by the Python Software Foundation. This is limited to code in parser.py. + +Copyright 2001-2006 Gregory P. Ward. All rights reserved. +Copyright 2002-2006 Python Software Foundation. All rights reserved. +""" + +# This code uses parts of optparse written by Gregory P. Ward and +# maintained by the Python Software Foundation. +# Copyright 2001-2006 Gregory P. Ward +# Copyright 2002-2006 Python Software Foundation +import typing as t +from collections import deque +from gettext import gettext as _ +from gettext import ngettext + +from .exceptions import BadArgumentUsage +from .exceptions import BadOptionUsage +from .exceptions import NoSuchOption +from .exceptions import UsageError + +if t.TYPE_CHECKING: + import typing_extensions as te + + from .core import Argument as CoreArgument + from .core import Context + from .core import Option as CoreOption + from .core import Parameter as CoreParameter + +V = t.TypeVar("V") + +# Sentinel value that indicates an option was passed as a flag without a +# value but is not a flag option. Option.consume_value uses this to +# prompt or use the flag_value. +_flag_needs_value = object() + + +def _unpack_args( + args: t.Sequence[str], nargs_spec: t.Sequence[int] +) -> t.Tuple[t.Sequence[t.Union[str, t.Sequence[t.Optional[str]], None]], t.List[str]]: + """Given an iterable of arguments and an iterable of nargs specifications, + it returns a tuple with all the unpacked arguments at the first index + and all remaining arguments as the second. + + The nargs specification is the number of arguments that should be consumed + or `-1` to indicate that this position should eat up all the remainders. + + Missing items are filled with `None`. + """ + args = deque(args) + nargs_spec = deque(nargs_spec) + rv: t.List[t.Union[str, t.Tuple[t.Optional[str], ...], None]] = [] + spos: t.Optional[int] = None + + def _fetch(c: "te.Deque[V]") -> t.Optional[V]: + try: + if spos is None: + return c.popleft() + else: + return c.pop() + except IndexError: + return None + + while nargs_spec: + nargs = _fetch(nargs_spec) + + if nargs is None: + continue + + if nargs == 1: + rv.append(_fetch(args)) + elif nargs > 1: + x = [_fetch(args) for _ in range(nargs)] + + # If we're reversed, we're pulling in the arguments in reverse, + # so we need to turn them around. + if spos is not None: + x.reverse() + + rv.append(tuple(x)) + elif nargs < 0: + if spos is not None: + raise TypeError("Cannot have two nargs < 0") + + spos = len(rv) + rv.append(None) + + # spos is the position of the wildcard (star). If it's not `None`, + # we fill it with the remainder. + if spos is not None: + rv[spos] = tuple(args) + args = [] + rv[spos + 1 :] = reversed(rv[spos + 1 :]) + + return tuple(rv), list(args) + + +def split_opt(opt: str) -> t.Tuple[str, str]: + first = opt[:1] + if first.isalnum(): + return "", opt + if opt[1:2] == first: + return opt[:2], opt[2:] + return first, opt[1:] + + +def normalize_opt(opt: str, ctx: t.Optional["Context"]) -> str: + if ctx is None or ctx.token_normalize_func is None: + return opt + prefix, opt = split_opt(opt) + return f"{prefix}{ctx.token_normalize_func(opt)}" + + +def split_arg_string(string: str) -> t.List[str]: + """Split an argument string as with :func:`shlex.split`, but don't + fail if the string is incomplete. Ignores a missing closing quote or + incomplete escape sequence and uses the partial token as-is. + + .. code-block:: python + + split_arg_string("example 'my file") + ["example", "my file"] + + split_arg_string("example my\\") + ["example", "my"] + + :param string: String to split. + """ + import shlex + + lex = shlex.shlex(string, posix=True) + lex.whitespace_split = True + lex.commenters = "" + out = [] + + try: + for token in lex: + out.append(token) + except ValueError: + # Raised when end-of-string is reached in an invalid state. Use + # the partial token as-is. The quote or escape character is in + # lex.state, not lex.token. + out.append(lex.token) + + return out + + +class Option: + def __init__( + self, + obj: "CoreOption", + opts: t.Sequence[str], + dest: t.Optional[str], + action: t.Optional[str] = None, + nargs: int = 1, + const: t.Optional[t.Any] = None, + ): + self._short_opts = [] + self._long_opts = [] + self.prefixes: t.Set[str] = set() + + for opt in opts: + prefix, value = split_opt(opt) + if not prefix: + raise ValueError(f"Invalid start character for option ({opt})") + self.prefixes.add(prefix[0]) + if len(prefix) == 1 and len(value) == 1: + self._short_opts.append(opt) + else: + self._long_opts.append(opt) + self.prefixes.add(prefix) + + if action is None: + action = "store" + + self.dest = dest + self.action = action + self.nargs = nargs + self.const = const + self.obj = obj + + @property + def takes_value(self) -> bool: + return self.action in ("store", "append") + + def process(self, value: t.Any, state: "ParsingState") -> None: + if self.action == "store": + state.opts[self.dest] = value # type: ignore + elif self.action == "store_const": + state.opts[self.dest] = self.const # type: ignore + elif self.action == "append": + state.opts.setdefault(self.dest, []).append(value) # type: ignore + elif self.action == "append_const": + state.opts.setdefault(self.dest, []).append(self.const) # type: ignore + elif self.action == "count": + state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 # type: ignore + else: + raise ValueError(f"unknown action '{self.action}'") + state.order.append(self.obj) + + +class Argument: + def __init__(self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1): + self.dest = dest + self.nargs = nargs + self.obj = obj + + def process( + self, + value: t.Union[t.Optional[str], t.Sequence[t.Optional[str]]], + state: "ParsingState", + ) -> None: + if self.nargs > 1: + assert value is not None + holes = sum(1 for x in value if x is None) + if holes == len(value): + value = None + elif holes != 0: + raise BadArgumentUsage( + _("Argument {name!r} takes {nargs} values.").format( + name=self.dest, nargs=self.nargs + ) + ) + + if self.nargs == -1 and self.obj.envvar is not None and value == (): + # Replace empty tuple with None so that a value from the + # environment may be tried. + value = None + + state.opts[self.dest] = value # type: ignore + state.order.append(self.obj) + + +class ParsingState: + def __init__(self, rargs: t.List[str]) -> None: + self.opts: t.Dict[str, t.Any] = {} + self.largs: t.List[str] = [] + self.rargs = rargs + self.order: t.List[CoreParameter] = [] + + +class OptionParser: + """The option parser is an internal class that is ultimately used to + parse options and arguments. It's modelled after optparse and brings + a similar but vastly simplified API. It should generally not be used + directly as the high level Click classes wrap it for you. + + It's not nearly as extensible as optparse or argparse as it does not + implement features that are implemented on a higher level (such as + types or defaults). + + :param ctx: optionally the :class:`~click.Context` where this parser + should go with. + """ + + def __init__(self, ctx: t.Optional["Context"] = None) -> None: + #: The :class:`~click.Context` for this parser. This might be + #: `None` for some advanced use cases. + self.ctx = ctx + #: This controls how the parser deals with interspersed arguments. + #: If this is set to `False`, the parser will stop on the first + #: non-option. Click uses this to implement nested subcommands + #: safely. + self.allow_interspersed_args: bool = True + #: This tells the parser how to deal with unknown options. By + #: default it will error out (which is sensible), but there is a + #: second mode where it will ignore it and continue processing + #: after shifting all the unknown options into the resulting args. + self.ignore_unknown_options: bool = False + + if ctx is not None: + self.allow_interspersed_args = ctx.allow_interspersed_args + self.ignore_unknown_options = ctx.ignore_unknown_options + + self._short_opt: t.Dict[str, Option] = {} + self._long_opt: t.Dict[str, Option] = {} + self._opt_prefixes = {"-", "--"} + self._args: t.List[Argument] = [] + + def add_option( + self, + obj: "CoreOption", + opts: t.Sequence[str], + dest: t.Optional[str], + action: t.Optional[str] = None, + nargs: int = 1, + const: t.Optional[t.Any] = None, + ) -> None: + """Adds a new option named `dest` to the parser. The destination + is not inferred (unlike with optparse) and needs to be explicitly + provided. Action can be any of ``store``, ``store_const``, + ``append``, ``append_const`` or ``count``. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + opts = [normalize_opt(opt, self.ctx) for opt in opts] + option = Option(obj, opts, dest, action=action, nargs=nargs, const=const) + self._opt_prefixes.update(option.prefixes) + for opt in option._short_opts: + self._short_opt[opt] = option + for opt in option._long_opts: + self._long_opt[opt] = option + + def add_argument( + self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1 + ) -> None: + """Adds a positional argument named `dest` to the parser. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + self._args.append(Argument(obj, dest=dest, nargs=nargs)) + + def parse_args( + self, args: t.List[str] + ) -> t.Tuple[t.Dict[str, t.Any], t.List[str], t.List["CoreParameter"]]: + """Parses positional arguments and returns ``(values, args, order)`` + for the parsed options and arguments as well as the leftover + arguments if there are any. The order is a list of objects as they + appear on the command line. If arguments appear multiple times they + will be memorized multiple times as well. + """ + state = ParsingState(args) + try: + self._process_args_for_options(state) + self._process_args_for_args(state) + except UsageError: + if self.ctx is None or not self.ctx.resilient_parsing: + raise + return state.opts, state.largs, state.order + + def _process_args_for_args(self, state: ParsingState) -> None: + pargs, args = _unpack_args( + state.largs + state.rargs, [x.nargs for x in self._args] + ) + + for idx, arg in enumerate(self._args): + arg.process(pargs[idx], state) + + state.largs = args + state.rargs = [] + + def _process_args_for_options(self, state: ParsingState) -> None: + while state.rargs: + arg = state.rargs.pop(0) + arglen = len(arg) + # Double dashes always handled explicitly regardless of what + # prefixes are valid. + if arg == "--": + return + elif arg[:1] in self._opt_prefixes and arglen > 1: + self._process_opts(arg, state) + elif self.allow_interspersed_args: + state.largs.append(arg) + else: + state.rargs.insert(0, arg) + return + + # Say this is the original argument list: + # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] + # ^ + # (we are about to process arg(i)). + # + # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of + # [arg0, ..., arg(i-1)] (any options and their arguments will have + # been removed from largs). + # + # The while loop will usually consume 1 or more arguments per pass. + # If it consumes 1 (eg. arg is an option that takes no arguments), + # then after _process_arg() is done the situation is: + # + # largs = subset of [arg0, ..., arg(i)] + # rargs = [arg(i+1), ..., arg(N-1)] + # + # If allow_interspersed_args is false, largs will always be + # *empty* -- still a subset of [arg0, ..., arg(i-1)], but + # not a very interesting subset! + + def _match_long_opt( + self, opt: str, explicit_value: t.Optional[str], state: ParsingState + ) -> None: + if opt not in self._long_opt: + from difflib import get_close_matches + + possibilities = get_close_matches(opt, self._long_opt) + raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx) + + option = self._long_opt[opt] + if option.takes_value: + # At this point it's safe to modify rargs by injecting the + # explicit value, because no exception is raised in this + # branch. This means that the inserted value will be fully + # consumed. + if explicit_value is not None: + state.rargs.insert(0, explicit_value) + + value = self._get_value_from_state(opt, option, state) + + elif explicit_value is not None: + raise BadOptionUsage( + opt, _("Option {name!r} does not take a value.").format(name=opt) + ) + + else: + value = None + + option.process(value, state) + + def _match_short_opt(self, arg: str, state: ParsingState) -> None: + stop = False + i = 1 + prefix = arg[0] + unknown_options = [] + + for ch in arg[1:]: + opt = normalize_opt(f"{prefix}{ch}", self.ctx) + option = self._short_opt.get(opt) + i += 1 + + if not option: + if self.ignore_unknown_options: + unknown_options.append(ch) + continue + raise NoSuchOption(opt, ctx=self.ctx) + if option.takes_value: + # Any characters left in arg? Pretend they're the + # next arg, and stop consuming characters of arg. + if i < len(arg): + state.rargs.insert(0, arg[i:]) + stop = True + + value = self._get_value_from_state(opt, option, state) + + else: + value = None + + option.process(value, state) + + if stop: + break + + # If we got any unknown options we recombine the string of the + # remaining options and re-attach the prefix, then report that + # to the state as new larg. This way there is basic combinatorics + # that can be achieved while still ignoring unknown arguments. + if self.ignore_unknown_options and unknown_options: + state.largs.append(f"{prefix}{''.join(unknown_options)}") + + def _get_value_from_state( + self, option_name: str, option: Option, state: ParsingState + ) -> t.Any: + nargs = option.nargs + + if len(state.rargs) < nargs: + if option.obj._flag_needs_value: + # Option allows omitting the value. + value = _flag_needs_value + else: + raise BadOptionUsage( + option_name, + ngettext( + "Option {name!r} requires an argument.", + "Option {name!r} requires {nargs} arguments.", + nargs, + ).format(name=option_name, nargs=nargs), + ) + elif nargs == 1: + next_rarg = state.rargs[0] + + if ( + option.obj._flag_needs_value + and isinstance(next_rarg, str) + and next_rarg[:1] in self._opt_prefixes + and len(next_rarg) > 1 + ): + # The next arg looks like the start of an option, don't + # use it as the value if omitting the value is allowed. + value = _flag_needs_value + else: + value = state.rargs.pop(0) + else: + value = tuple(state.rargs[:nargs]) + del state.rargs[:nargs] + + return value + + def _process_opts(self, arg: str, state: ParsingState) -> None: + explicit_value = None + # Long option handling happens in two parts. The first part is + # supporting explicitly attached values. In any case, we will try + # to long match the option first. + if "=" in arg: + long_opt, explicit_value = arg.split("=", 1) + else: + long_opt = arg + norm_long_opt = normalize_opt(long_opt, self.ctx) + + # At this point we will match the (assumed) long option through + # the long option matching code. Note that this allows options + # like "-foo" to be matched as long options. + try: + self._match_long_opt(norm_long_opt, explicit_value, state) + except NoSuchOption: + # At this point the long option matching failed, and we need + # to try with short options. However there is a special rule + # which says, that if we have a two character options prefix + # (applies to "--foo" for instance), we do not dispatch to the + # short option code and will instead raise the no option + # error. + if arg[:2] not in self._opt_prefixes: + self._match_short_opt(arg, state) + return + + if not self.ignore_unknown_options: + raise + + state.largs.append(arg) diff --git a/venv/lib/python3.11/site-packages/click/py.typed b/venv/lib/python3.11/site-packages/click/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/click/shell_completion.py b/venv/lib/python3.11/site-packages/click/shell_completion.py new file mode 100644 index 0000000..07d0f09 --- /dev/null +++ b/venv/lib/python3.11/site-packages/click/shell_completion.py @@ -0,0 +1,603 @@ +import os +import re +import typing as t +from gettext import gettext as _ + +from .core import Argument +from .core import BaseCommand +from .core import Context +from .core import MultiCommand +from .core import Option +from .core import Parameter +from .core import ParameterSource +from .parser import split_arg_string +from .utils import echo + + +def shell_complete( + cli: BaseCommand, + ctx_args: t.MutableMapping[str, t.Any], + prog_name: str, + complete_var: str, + instruction: str, +) -> int: + """Perform shell completion for the given CLI program. + + :param cli: Command being called. + :param ctx_args: Extra arguments to pass to + ``cli.make_context``. + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. + :param instruction: Value of ``complete_var`` with the completion + instruction and shell, in the form ``instruction_shell``. + :return: Status code to exit with. + """ + shell, _, instruction = instruction.partition("_") + comp_cls = get_completion_class(shell) + + if comp_cls is None: + return 1 + + comp = comp_cls(cli, ctx_args, prog_name, complete_var) + + if instruction == "source": + echo(comp.source()) + return 0 + + if instruction == "complete": + echo(comp.complete()) + return 0 + + return 1 + + +class CompletionItem: + """Represents a completion value and metadata about the value. The + default metadata is ``type`` to indicate special shell handling, + and ``help`` if a shell supports showing a help string next to the + value. + + Arbitrary parameters can be passed when creating the object, and + accessed using ``item.attr``. If an attribute wasn't passed, + accessing it returns ``None``. + + :param value: The completion suggestion. + :param type: Tells the shell script to provide special completion + support for the type. Click uses ``"dir"`` and ``"file"``. + :param help: String shown next to the value if supported. + :param kwargs: Arbitrary metadata. The built-in implementations + don't use this, but custom type completions paired with custom + shell support could use it. + """ + + __slots__ = ("value", "type", "help", "_info") + + def __init__( + self, + value: t.Any, + type: str = "plain", + help: t.Optional[str] = None, + **kwargs: t.Any, + ) -> None: + self.value: t.Any = value + self.type: str = type + self.help: t.Optional[str] = help + self._info = kwargs + + def __getattr__(self, name: str) -> t.Any: + return self._info.get(name) + + +# Only Bash >= 4.4 has the nosort option. +_SOURCE_BASH = """\ +%(complete_func)s() { + local IFS=$'\\n' + local response + + response=$(env COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD \ +%(complete_var)s=bash_complete $1) + + for completion in $response; do + IFS=',' read type value <<< "$completion" + + if [[ $type == 'dir' ]]; then + COMPREPLY=() + compopt -o dirnames + elif [[ $type == 'file' ]]; then + COMPREPLY=() + compopt -o default + elif [[ $type == 'plain' ]]; then + COMPREPLY+=($value) + fi + done + + return 0 +} + +%(complete_func)s_setup() { + complete -o nosort -F %(complete_func)s %(prog_name)s +} + +%(complete_func)s_setup; +""" + +_SOURCE_ZSH = """\ +#compdef %(prog_name)s + +%(complete_func)s() { + local -a completions + local -a completions_with_descriptions + local -a response + (( ! $+commands[%(prog_name)s] )) && return 1 + + response=("${(@f)$(env COMP_WORDS="${words[*]}" COMP_CWORD=$((CURRENT-1)) \ +%(complete_var)s=zsh_complete %(prog_name)s)}") + + for type key descr in ${response}; do + if [[ "$type" == "plain" ]]; then + if [[ "$descr" == "_" ]]; then + completions+=("$key") + else + completions_with_descriptions+=("$key":"$descr") + fi + elif [[ "$type" == "dir" ]]; then + _path_files -/ + elif [[ "$type" == "file" ]]; then + _path_files -f + fi + done + + if [ -n "$completions_with_descriptions" ]; then + _describe -V unsorted completions_with_descriptions -U + fi + + if [ -n "$completions" ]; then + compadd -U -V unsorted -a completions + fi +} + +if [[ $zsh_eval_context[-1] == loadautofunc ]]; then + # autoload from fpath, call function directly + %(complete_func)s "$@" +else + # eval/source/. command, register function for later + compdef %(complete_func)s %(prog_name)s +fi +""" + +_SOURCE_FISH = """\ +function %(complete_func)s; + set -l response (env %(complete_var)s=fish_complete COMP_WORDS=(commandline -cp) \ +COMP_CWORD=(commandline -t) %(prog_name)s); + + for completion in $response; + set -l metadata (string split "," $completion); + + if test $metadata[1] = "dir"; + __fish_complete_directories $metadata[2]; + else if test $metadata[1] = "file"; + __fish_complete_path $metadata[2]; + else if test $metadata[1] = "plain"; + echo $metadata[2]; + end; + end; +end; + +complete --no-files --command %(prog_name)s --arguments \ +"(%(complete_func)s)"; +""" + + +class ShellComplete: + """Base class for providing shell completion support. A subclass for + a given shell will override attributes and methods to implement the + completion instructions (``source`` and ``complete``). + + :param cli: Command being called. + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. + + .. versionadded:: 8.0 + """ + + name: t.ClassVar[str] + """Name to register the shell as with :func:`add_completion_class`. + This is used in completion instructions (``{name}_source`` and + ``{name}_complete``). + """ + + source_template: t.ClassVar[str] + """Completion script template formatted by :meth:`source`. This must + be provided by subclasses. + """ + + def __init__( + self, + cli: BaseCommand, + ctx_args: t.MutableMapping[str, t.Any], + prog_name: str, + complete_var: str, + ) -> None: + self.cli = cli + self.ctx_args = ctx_args + self.prog_name = prog_name + self.complete_var = complete_var + + @property + def func_name(self) -> str: + """The name of the shell function defined by the completion + script. + """ + safe_name = re.sub(r"\W*", "", self.prog_name.replace("-", "_"), flags=re.ASCII) + return f"_{safe_name}_completion" + + def source_vars(self) -> t.Dict[str, t.Any]: + """Vars for formatting :attr:`source_template`. + + By default this provides ``complete_func``, ``complete_var``, + and ``prog_name``. + """ + return { + "complete_func": self.func_name, + "complete_var": self.complete_var, + "prog_name": self.prog_name, + } + + def source(self) -> str: + """Produce the shell script that defines the completion + function. By default this ``%``-style formats + :attr:`source_template` with the dict returned by + :meth:`source_vars`. + """ + return self.source_template % self.source_vars() + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + """Use the env vars defined by the shell script to return a + tuple of ``args, incomplete``. This must be implemented by + subclasses. + """ + raise NotImplementedError + + def get_completions( + self, args: t.List[str], incomplete: str + ) -> t.List[CompletionItem]: + """Determine the context and last complete command or parameter + from the complete args. Call that object's ``shell_complete`` + method to get the completions for the incomplete value. + + :param args: List of complete args before the incomplete value. + :param incomplete: Value being completed. May be empty. + """ + ctx = _resolve_context(self.cli, self.ctx_args, self.prog_name, args) + obj, incomplete = _resolve_incomplete(ctx, args, incomplete) + return obj.shell_complete(ctx, incomplete) + + def format_completion(self, item: CompletionItem) -> str: + """Format a completion item into the form recognized by the + shell script. This must be implemented by subclasses. + + :param item: Completion item to format. + """ + raise NotImplementedError + + def complete(self) -> str: + """Produce the completion data to send back to the shell. + + By default this calls :meth:`get_completion_args`, gets the + completions, then calls :meth:`format_completion` for each + completion. + """ + args, incomplete = self.get_completion_args() + completions = self.get_completions(args, incomplete) + out = [self.format_completion(item) for item in completions] + return "\n".join(out) + + +class BashComplete(ShellComplete): + """Shell completion for Bash.""" + + name = "bash" + source_template = _SOURCE_BASH + + @staticmethod + def _check_version() -> None: + import shutil + import subprocess + + bash_exe = shutil.which("bash") + + if bash_exe is None: + match = None + else: + output = subprocess.run( + [bash_exe, "--norc", "-c", 'echo "${BASH_VERSION}"'], + stdout=subprocess.PIPE, + ) + match = re.search(r"^(\d+)\.(\d+)\.\d+", output.stdout.decode()) + + if match is not None: + major, minor = match.groups() + + if major < "4" or major == "4" and minor < "4": + echo( + _( + "Shell completion is not supported for Bash" + " versions older than 4.4." + ), + err=True, + ) + else: + echo( + _("Couldn't detect Bash version, shell completion is not supported."), + err=True, + ) + + def source(self) -> str: + self._check_version() + return super().source() + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + cword = int(os.environ["COMP_CWORD"]) + args = cwords[1:cword] + + try: + incomplete = cwords[cword] + except IndexError: + incomplete = "" + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + return f"{item.type},{item.value}" + + +class ZshComplete(ShellComplete): + """Shell completion for Zsh.""" + + name = "zsh" + source_template = _SOURCE_ZSH + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + cword = int(os.environ["COMP_CWORD"]) + args = cwords[1:cword] + + try: + incomplete = cwords[cword] + except IndexError: + incomplete = "" + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + return f"{item.type}\n{item.value}\n{item.help if item.help else '_'}" + + +class FishComplete(ShellComplete): + """Shell completion for Fish.""" + + name = "fish" + source_template = _SOURCE_FISH + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + incomplete = os.environ["COMP_CWORD"] + args = cwords[1:] + + # Fish stores the partial word in both COMP_WORDS and + # COMP_CWORD, remove it from complete args. + if incomplete and args and args[-1] == incomplete: + args.pop() + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + if item.help: + return f"{item.type},{item.value}\t{item.help}" + + return f"{item.type},{item.value}" + + +ShellCompleteType = t.TypeVar("ShellCompleteType", bound=t.Type[ShellComplete]) + + +_available_shells: t.Dict[str, t.Type[ShellComplete]] = { + "bash": BashComplete, + "fish": FishComplete, + "zsh": ZshComplete, +} + + +def add_completion_class( + cls: ShellCompleteType, name: t.Optional[str] = None +) -> ShellCompleteType: + """Register a :class:`ShellComplete` subclass under the given name. + The name will be provided by the completion instruction environment + variable during completion. + + :param cls: The completion class that will handle completion for the + shell. + :param name: Name to register the class under. Defaults to the + class's ``name`` attribute. + """ + if name is None: + name = cls.name + + _available_shells[name] = cls + + return cls + + +def get_completion_class(shell: str) -> t.Optional[t.Type[ShellComplete]]: + """Look up a registered :class:`ShellComplete` subclass by the name + provided by the completion instruction environment variable. If the + name isn't registered, returns ``None``. + + :param shell: Name the class is registered under. + """ + return _available_shells.get(shell) + + +def _is_incomplete_argument(ctx: Context, param: Parameter) -> bool: + """Determine if the given parameter is an argument that can still + accept values. + + :param ctx: Invocation context for the command represented by the + parsed complete args. + :param param: Argument object being checked. + """ + if not isinstance(param, Argument): + return False + + assert param.name is not None + # Will be None if expose_value is False. + value = ctx.params.get(param.name) + return ( + param.nargs == -1 + or ctx.get_parameter_source(param.name) is not ParameterSource.COMMANDLINE + or ( + param.nargs > 1 + and isinstance(value, (tuple, list)) + and len(value) < param.nargs + ) + ) + + +def _start_of_option(ctx: Context, value: str) -> bool: + """Check if the value looks like the start of an option.""" + if not value: + return False + + c = value[0] + return c in ctx._opt_prefixes + + +def _is_incomplete_option(ctx: Context, args: t.List[str], param: Parameter) -> bool: + """Determine if the given parameter is an option that needs a value. + + :param args: List of complete args before the incomplete value. + :param param: Option object being checked. + """ + if not isinstance(param, Option): + return False + + if param.is_flag or param.count: + return False + + last_option = None + + for index, arg in enumerate(reversed(args)): + if index + 1 > param.nargs: + break + + if _start_of_option(ctx, arg): + last_option = arg + + return last_option is not None and last_option in param.opts + + +def _resolve_context( + cli: BaseCommand, + ctx_args: t.MutableMapping[str, t.Any], + prog_name: str, + args: t.List[str], +) -> Context: + """Produce the context hierarchy starting with the command and + traversing the complete arguments. This only follows the commands, + it doesn't trigger input prompts or callbacks. + + :param cli: Command being called. + :param prog_name: Name of the executable in the shell. + :param args: List of complete args before the incomplete value. + """ + ctx_args["resilient_parsing"] = True + ctx = cli.make_context(prog_name, args.copy(), **ctx_args) + args = ctx.protected_args + ctx.args + + while args: + command = ctx.command + + if isinstance(command, MultiCommand): + if not command.chain: + name, cmd, args = command.resolve_command(ctx, args) + + if cmd is None: + return ctx + + ctx = cmd.make_context(name, args, parent=ctx, resilient_parsing=True) + args = ctx.protected_args + ctx.args + else: + sub_ctx = ctx + + while args: + name, cmd, args = command.resolve_command(ctx, args) + + if cmd is None: + return ctx + + sub_ctx = cmd.make_context( + name, + args, + parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False, + resilient_parsing=True, + ) + args = sub_ctx.args + + ctx = sub_ctx + args = [*sub_ctx.protected_args, *sub_ctx.args] + else: + break + + return ctx + + +def _resolve_incomplete( + ctx: Context, args: t.List[str], incomplete: str +) -> t.Tuple[t.Union[BaseCommand, Parameter], str]: + """Find the Click object that will handle the completion of the + incomplete value. Return the object and the incomplete value. + + :param ctx: Invocation context for the command represented by + the parsed complete args. + :param args: List of complete args before the incomplete value. + :param incomplete: Value being completed. May be empty. + """ + # Different shells treat an "=" between a long option name and + # value differently. Might keep the value joined, return the "=" + # as a separate item, or return the split name and value. Always + # split and discard the "=" to make completion easier. + if incomplete == "=": + incomplete = "" + elif "=" in incomplete and _start_of_option(ctx, incomplete): + name, _, incomplete = incomplete.partition("=") + args.append(name) + + # The "--" marker tells Click to stop treating values as options + # even if they start with the option character. If it hasn't been + # given and the incomplete arg looks like an option, the current + # command will provide option name completions. + if "--" not in args and _start_of_option(ctx, incomplete): + return ctx.command, incomplete + + params = ctx.command.get_params(ctx) + + # If the last complete arg is an option name with an incomplete + # value, the option will provide value completions. + for param in params: + if _is_incomplete_option(ctx, args, param): + return param, incomplete + + # It's not an option name or value. The first argument without a + # parsed value will provide value completions. + for param in params: + if _is_incomplete_argument(ctx, param): + return param, incomplete + + # There were no unparsed arguments, the command may be a group that + # will provide command name completions. + return ctx.command, incomplete diff --git a/venv/lib/python3.11/site-packages/click/termui.py b/venv/lib/python3.11/site-packages/click/termui.py new file mode 100644 index 0000000..c084f19 --- /dev/null +++ b/venv/lib/python3.11/site-packages/click/termui.py @@ -0,0 +1,784 @@ +import inspect +import io +import itertools +import sys +import typing as t +from gettext import gettext as _ + +from ._compat import isatty +from ._compat import strip_ansi +from .exceptions import Abort +from .exceptions import UsageError +from .globals import resolve_color_default +from .types import Choice +from .types import convert_type +from .types import ParamType +from .utils import echo +from .utils import LazyFile + +if t.TYPE_CHECKING: + from ._termui_impl import ProgressBar + +V = t.TypeVar("V") + +# The prompt functions to use. The doc tools currently override these +# functions to customize how they work. +visible_prompt_func: t.Callable[[str], str] = input + +_ansi_colors = { + "black": 30, + "red": 31, + "green": 32, + "yellow": 33, + "blue": 34, + "magenta": 35, + "cyan": 36, + "white": 37, + "reset": 39, + "bright_black": 90, + "bright_red": 91, + "bright_green": 92, + "bright_yellow": 93, + "bright_blue": 94, + "bright_magenta": 95, + "bright_cyan": 96, + "bright_white": 97, +} +_ansi_reset_all = "\033[0m" + + +def hidden_prompt_func(prompt: str) -> str: + import getpass + + return getpass.getpass(prompt) + + +def _build_prompt( + text: str, + suffix: str, + show_default: bool = False, + default: t.Optional[t.Any] = None, + show_choices: bool = True, + type: t.Optional[ParamType] = None, +) -> str: + prompt = text + if type is not None and show_choices and isinstance(type, Choice): + prompt += f" ({', '.join(map(str, type.choices))})" + if default is not None and show_default: + prompt = f"{prompt} [{_format_default(default)}]" + return f"{prompt}{suffix}" + + +def _format_default(default: t.Any) -> t.Any: + if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"): + return default.name + + return default + + +def prompt( + text: str, + default: t.Optional[t.Any] = None, + hide_input: bool = False, + confirmation_prompt: t.Union[bool, str] = False, + type: t.Optional[t.Union[ParamType, t.Any]] = None, + value_proc: t.Optional[t.Callable[[str], t.Any]] = None, + prompt_suffix: str = ": ", + show_default: bool = True, + err: bool = False, + show_choices: bool = True, +) -> t.Any: + """Prompts a user for input. This is a convenience function that can + be used to prompt a user for input later. + + If the user aborts the input by sending an interrupt signal, this + function will catch it and raise a :exc:`Abort` exception. + + :param text: the text to show for the prompt. + :param default: the default value to use if no input happens. If this + is not given it will prompt until it's aborted. + :param hide_input: if this is set to true then the input value will + be hidden. + :param confirmation_prompt: Prompt a second time to confirm the + value. Can be set to a string instead of ``True`` to customize + the message. + :param type: the type to use to check the value against. + :param value_proc: if this parameter is provided it's a function that + is invoked instead of the type conversion to + convert a value. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + :param show_choices: Show or hide choices if the passed type is a Choice. + For example if type is a Choice of either day or week, + show_choices is true and text is "Group by" then the + prompt will be "Group by (day, week): ". + + .. versionadded:: 8.0 + ``confirmation_prompt`` can be a custom string. + + .. versionadded:: 7.0 + Added the ``show_choices`` parameter. + + .. versionadded:: 6.0 + Added unicode support for cmd.exe on Windows. + + .. versionadded:: 4.0 + Added the `err` parameter. + + """ + + def prompt_func(text: str) -> str: + f = hidden_prompt_func if hide_input else visible_prompt_func + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(text.rstrip(" "), nl=False, err=err) + # Echo a space to stdout to work around an issue where + # readline causes backspace to clear the whole line. + return f(" ") + except (KeyboardInterrupt, EOFError): + # getpass doesn't print a newline if the user aborts input with ^C. + # Allegedly this behavior is inherited from getpass(3). + # A doc bug has been filed at https://bugs.python.org/issue24711 + if hide_input: + echo(None, err=err) + raise Abort() from None + + if value_proc is None: + value_proc = convert_type(type, default) + + prompt = _build_prompt( + text, prompt_suffix, show_default, default, show_choices, type + ) + + if confirmation_prompt: + if confirmation_prompt is True: + confirmation_prompt = _("Repeat for confirmation") + + confirmation_prompt = _build_prompt(confirmation_prompt, prompt_suffix) + + while True: + while True: + value = prompt_func(prompt) + if value: + break + elif default is not None: + value = default + break + try: + result = value_proc(value) + except UsageError as e: + if hide_input: + echo(_("Error: The value you entered was invalid."), err=err) + else: + echo(_("Error: {e.message}").format(e=e), err=err) + continue + if not confirmation_prompt: + return result + while True: + value2 = prompt_func(confirmation_prompt) + is_empty = not value and not value2 + if value2 or is_empty: + break + if value == value2: + return result + echo(_("Error: The two entered values do not match."), err=err) + + +def confirm( + text: str, + default: t.Optional[bool] = False, + abort: bool = False, + prompt_suffix: str = ": ", + show_default: bool = True, + err: bool = False, +) -> bool: + """Prompts for confirmation (yes/no question). + + If the user aborts the input by sending a interrupt signal this + function will catch it and raise a :exc:`Abort` exception. + + :param text: the question to ask. + :param default: The default value to use when no input is given. If + ``None``, repeat until input is given. + :param abort: if this is set to `True` a negative answer aborts the + exception by raising :exc:`Abort`. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + + .. versionchanged:: 8.0 + Repeat until input is given if ``default`` is ``None``. + + .. versionadded:: 4.0 + Added the ``err`` parameter. + """ + prompt = _build_prompt( + text, + prompt_suffix, + show_default, + "y/n" if default is None else ("Y/n" if default else "y/N"), + ) + + while True: + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(prompt.rstrip(" "), nl=False, err=err) + # Echo a space to stdout to work around an issue where + # readline causes backspace to clear the whole line. + value = visible_prompt_func(" ").lower().strip() + except (KeyboardInterrupt, EOFError): + raise Abort() from None + if value in ("y", "yes"): + rv = True + elif value in ("n", "no"): + rv = False + elif default is not None and value == "": + rv = default + else: + echo(_("Error: invalid input"), err=err) + continue + break + if abort and not rv: + raise Abort() + return rv + + +def echo_via_pager( + text_or_generator: t.Union[t.Iterable[str], t.Callable[[], t.Iterable[str]], str], + color: t.Optional[bool] = None, +) -> None: + """This function takes a text and shows it via an environment specific + pager on stdout. + + .. versionchanged:: 3.0 + Added the `color` flag. + + :param text_or_generator: the text to page, or alternatively, a + generator emitting the text to page. + :param color: controls if the pager supports ANSI colors or not. The + default is autodetection. + """ + color = resolve_color_default(color) + + if inspect.isgeneratorfunction(text_or_generator): + i = t.cast(t.Callable[[], t.Iterable[str]], text_or_generator)() + elif isinstance(text_or_generator, str): + i = [text_or_generator] + else: + i = iter(t.cast(t.Iterable[str], text_or_generator)) + + # convert every element of i to a text type if necessary + text_generator = (el if isinstance(el, str) else str(el) for el in i) + + from ._termui_impl import pager + + return pager(itertools.chain(text_generator, "\n"), color) + + +def progressbar( + iterable: t.Optional[t.Iterable[V]] = None, + length: t.Optional[int] = None, + label: t.Optional[str] = None, + show_eta: bool = True, + show_percent: t.Optional[bool] = None, + show_pos: bool = False, + item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None, + fill_char: str = "#", + empty_char: str = "-", + bar_template: str = "%(label)s [%(bar)s] %(info)s", + info_sep: str = " ", + width: int = 36, + file: t.Optional[t.TextIO] = None, + color: t.Optional[bool] = None, + update_min_steps: int = 1, +) -> "ProgressBar[V]": + """This function creates an iterable context manager that can be used + to iterate over something while showing a progress bar. It will + either iterate over the `iterable` or `length` items (that are counted + up). While iteration happens, this function will print a rendered + progress bar to the given `file` (defaults to stdout) and will attempt + to calculate remaining time and more. By default, this progress bar + will not be rendered if the file is not a terminal. + + The context manager creates the progress bar. When the context + manager is entered the progress bar is already created. With every + iteration over the progress bar, the iterable passed to the bar is + advanced and the bar is updated. When the context manager exits, + a newline is printed and the progress bar is finalized on screen. + + Note: The progress bar is currently designed for use cases where the + total progress can be expected to take at least several seconds. + Because of this, the ProgressBar class object won't display + progress that is considered too fast, and progress where the time + between steps is less than a second. + + No printing must happen or the progress bar will be unintentionally + destroyed. + + Example usage:: + + with progressbar(items) as bar: + for item in bar: + do_something_with(item) + + Alternatively, if no iterable is specified, one can manually update the + progress bar through the `update()` method instead of directly + iterating over the progress bar. The update method accepts the number + of steps to increment the bar with:: + + with progressbar(length=chunks.total_bytes) as bar: + for chunk in chunks: + process_chunk(chunk) + bar.update(chunks.bytes) + + The ``update()`` method also takes an optional value specifying the + ``current_item`` at the new position. This is useful when used + together with ``item_show_func`` to customize the output for each + manual step:: + + with click.progressbar( + length=total_size, + label='Unzipping archive', + item_show_func=lambda a: a.filename + ) as bar: + for archive in zip_file: + archive.extract() + bar.update(archive.size, archive) + + :param iterable: an iterable to iterate over. If not provided the length + is required. + :param length: the number of items to iterate over. By default the + progressbar will attempt to ask the iterator about its + length, which might or might not work. If an iterable is + also provided this parameter can be used to override the + length. If an iterable is not provided the progress bar + will iterate over a range of that length. + :param label: the label to show next to the progress bar. + :param show_eta: enables or disables the estimated time display. This is + automatically disabled if the length cannot be + determined. + :param show_percent: enables or disables the percentage display. The + default is `True` if the iterable has a length or + `False` if not. + :param show_pos: enables or disables the absolute position display. The + default is `False`. + :param item_show_func: A function called with the current item which + can return a string to show next to the progress bar. If the + function returns ``None`` nothing is shown. The current item can + be ``None``, such as when entering and exiting the bar. + :param fill_char: the character to use to show the filled part of the + progress bar. + :param empty_char: the character to use to show the non-filled part of + the progress bar. + :param bar_template: the format string to use as template for the bar. + The parameters in it are ``label`` for the label, + ``bar`` for the progress bar and ``info`` for the + info section. + :param info_sep: the separator between multiple info items (eta etc.) + :param width: the width of the progress bar in characters, 0 means full + terminal width + :param file: The file to write to. If this is not a terminal then + only the label is printed. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are included anywhere in the progress bar output + which is not the case by default. + :param update_min_steps: Render only when this many updates have + completed. This allows tuning for very fast iterators. + + .. versionchanged:: 8.0 + Output is shown even if execution time is less than 0.5 seconds. + + .. versionchanged:: 8.0 + ``item_show_func`` shows the current item, not the previous one. + + .. versionchanged:: 8.0 + Labels are echoed if the output is not a TTY. Reverts a change + in 7.0 that removed all output. + + .. versionadded:: 8.0 + Added the ``update_min_steps`` parameter. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. Added the ``update`` method to + the object. + + .. versionadded:: 2.0 + """ + from ._termui_impl import ProgressBar + + color = resolve_color_default(color) + return ProgressBar( + iterable=iterable, + length=length, + show_eta=show_eta, + show_percent=show_percent, + show_pos=show_pos, + item_show_func=item_show_func, + fill_char=fill_char, + empty_char=empty_char, + bar_template=bar_template, + info_sep=info_sep, + file=file, + label=label, + width=width, + color=color, + update_min_steps=update_min_steps, + ) + + +def clear() -> None: + """Clears the terminal screen. This will have the effect of clearing + the whole visible space of the terminal and moving the cursor to the + top left. This does not do anything if not connected to a terminal. + + .. versionadded:: 2.0 + """ + if not isatty(sys.stdout): + return + + # ANSI escape \033[2J clears the screen, \033[1;1H moves the cursor + echo("\033[2J\033[1;1H", nl=False) + + +def _interpret_color( + color: t.Union[int, t.Tuple[int, int, int], str], offset: int = 0 +) -> str: + if isinstance(color, int): + return f"{38 + offset};5;{color:d}" + + if isinstance(color, (tuple, list)): + r, g, b = color + return f"{38 + offset};2;{r:d};{g:d};{b:d}" + + return str(_ansi_colors[color] + offset) + + +def style( + text: t.Any, + fg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None, + bg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None, + bold: t.Optional[bool] = None, + dim: t.Optional[bool] = None, + underline: t.Optional[bool] = None, + overline: t.Optional[bool] = None, + italic: t.Optional[bool] = None, + blink: t.Optional[bool] = None, + reverse: t.Optional[bool] = None, + strikethrough: t.Optional[bool] = None, + reset: bool = True, +) -> str: + """Styles a text with ANSI styles and returns the new string. By + default the styling is self contained which means that at the end + of the string a reset code is issued. This can be prevented by + passing ``reset=False``. + + Examples:: + + click.echo(click.style('Hello World!', fg='green')) + click.echo(click.style('ATTENTION!', blink=True)) + click.echo(click.style('Some things', reverse=True, fg='cyan')) + click.echo(click.style('More colors', fg=(255, 12, 128), bg=117)) + + Supported color names: + + * ``black`` (might be a gray) + * ``red`` + * ``green`` + * ``yellow`` (might be an orange) + * ``blue`` + * ``magenta`` + * ``cyan`` + * ``white`` (might be light gray) + * ``bright_black`` + * ``bright_red`` + * ``bright_green`` + * ``bright_yellow`` + * ``bright_blue`` + * ``bright_magenta`` + * ``bright_cyan`` + * ``bright_white`` + * ``reset`` (reset the color code only) + + If the terminal supports it, color may also be specified as: + + - An integer in the interval [0, 255]. The terminal must support + 8-bit/256-color mode. + - An RGB tuple of three integers in [0, 255]. The terminal must + support 24-bit/true-color mode. + + See https://en.wikipedia.org/wiki/ANSI_color and + https://gist.github.com/XVilka/8346728 for more information. + + :param text: the string to style with ansi codes. + :param fg: if provided this will become the foreground color. + :param bg: if provided this will become the background color. + :param bold: if provided this will enable or disable bold mode. + :param dim: if provided this will enable or disable dim mode. This is + badly supported. + :param underline: if provided this will enable or disable underline. + :param overline: if provided this will enable or disable overline. + :param italic: if provided this will enable or disable italic. + :param blink: if provided this will enable or disable blinking. + :param reverse: if provided this will enable or disable inverse + rendering (foreground becomes background and the + other way round). + :param strikethrough: if provided this will enable or disable + striking through text. + :param reset: by default a reset-all code is added at the end of the + string which means that styles do not carry over. This + can be disabled to compose styles. + + .. versionchanged:: 8.0 + A non-string ``message`` is converted to a string. + + .. versionchanged:: 8.0 + Added support for 256 and RGB color codes. + + .. versionchanged:: 8.0 + Added the ``strikethrough``, ``italic``, and ``overline`` + parameters. + + .. versionchanged:: 7.0 + Added support for bright colors. + + .. versionadded:: 2.0 + """ + if not isinstance(text, str): + text = str(text) + + bits = [] + + if fg: + try: + bits.append(f"\033[{_interpret_color(fg)}m") + except KeyError: + raise TypeError(f"Unknown color {fg!r}") from None + + if bg: + try: + bits.append(f"\033[{_interpret_color(bg, 10)}m") + except KeyError: + raise TypeError(f"Unknown color {bg!r}") from None + + if bold is not None: + bits.append(f"\033[{1 if bold else 22}m") + if dim is not None: + bits.append(f"\033[{2 if dim else 22}m") + if underline is not None: + bits.append(f"\033[{4 if underline else 24}m") + if overline is not None: + bits.append(f"\033[{53 if overline else 55}m") + if italic is not None: + bits.append(f"\033[{3 if italic else 23}m") + if blink is not None: + bits.append(f"\033[{5 if blink else 25}m") + if reverse is not None: + bits.append(f"\033[{7 if reverse else 27}m") + if strikethrough is not None: + bits.append(f"\033[{9 if strikethrough else 29}m") + bits.append(text) + if reset: + bits.append(_ansi_reset_all) + return "".join(bits) + + +def unstyle(text: str) -> str: + """Removes ANSI styling information from a string. Usually it's not + necessary to use this function as Click's echo function will + automatically remove styling if necessary. + + .. versionadded:: 2.0 + + :param text: the text to remove style information from. + """ + return strip_ansi(text) + + +def secho( + message: t.Optional[t.Any] = None, + file: t.Optional[t.IO[t.AnyStr]] = None, + nl: bool = True, + err: bool = False, + color: t.Optional[bool] = None, + **styles: t.Any, +) -> None: + """This function combines :func:`echo` and :func:`style` into one + call. As such the following two calls are the same:: + + click.secho('Hello World!', fg='green') + click.echo(click.style('Hello World!', fg='green')) + + All keyword arguments are forwarded to the underlying functions + depending on which one they go with. + + Non-string types will be converted to :class:`str`. However, + :class:`bytes` are passed directly to :meth:`echo` without applying + style. If you want to style bytes that represent text, call + :meth:`bytes.decode` first. + + .. versionchanged:: 8.0 + A non-string ``message`` is converted to a string. Bytes are + passed through without style applied. + + .. versionadded:: 2.0 + """ + if message is not None and not isinstance(message, (bytes, bytearray)): + message = style(message, **styles) + + return echo(message, file=file, nl=nl, err=err, color=color) + + +def edit( + text: t.Optional[t.AnyStr] = None, + editor: t.Optional[str] = None, + env: t.Optional[t.Mapping[str, str]] = None, + require_save: bool = True, + extension: str = ".txt", + filename: t.Optional[str] = None, +) -> t.Optional[t.AnyStr]: + r"""Edits the given text in the defined editor. If an editor is given + (should be the full path to the executable but the regular operating + system search path is used for finding the executable) it overrides + the detected editor. Optionally, some environment variables can be + used. If the editor is closed without changes, `None` is returned. In + case a file is edited directly the return value is always `None` and + `require_save` and `extension` are ignored. + + If the editor cannot be opened a :exc:`UsageError` is raised. + + Note for Windows: to simplify cross-platform usage, the newlines are + automatically converted from POSIX to Windows and vice versa. As such, + the message here will have ``\n`` as newline markers. + + :param text: the text to edit. + :param editor: optionally the editor to use. Defaults to automatic + detection. + :param env: environment variables to forward to the editor. + :param require_save: if this is true, then not saving in the editor + will make the return value become `None`. + :param extension: the extension to tell the editor about. This defaults + to `.txt` but changing this might change syntax + highlighting. + :param filename: if provided it will edit this file instead of the + provided text contents. It will not use a temporary + file as an indirection in that case. + """ + from ._termui_impl import Editor + + ed = Editor(editor=editor, env=env, require_save=require_save, extension=extension) + + if filename is None: + return ed.edit(text) + + ed.edit_file(filename) + return None + + +def launch(url: str, wait: bool = False, locate: bool = False) -> int: + """This function launches the given URL (or filename) in the default + viewer application for this file type. If this is an executable, it + might launch the executable in a new session. The return value is + the exit code of the launched application. Usually, ``0`` indicates + success. + + Examples:: + + click.launch('https://click.palletsprojects.com/') + click.launch('/my/downloaded/file', locate=True) + + .. versionadded:: 2.0 + + :param url: URL or filename of the thing to launch. + :param wait: Wait for the program to exit before returning. This + only works if the launched program blocks. In particular, + ``xdg-open`` on Linux does not block. + :param locate: if this is set to `True` then instead of launching the + application associated with the URL it will attempt to + launch a file manager with the file located. This + might have weird effects if the URL does not point to + the filesystem. + """ + from ._termui_impl import open_url + + return open_url(url, wait=wait, locate=locate) + + +# If this is provided, getchar() calls into this instead. This is used +# for unittesting purposes. +_getchar: t.Optional[t.Callable[[bool], str]] = None + + +def getchar(echo: bool = False) -> str: + """Fetches a single character from the terminal and returns it. This + will always return a unicode character and under certain rare + circumstances this might return more than one character. The + situations which more than one character is returned is when for + whatever reason multiple characters end up in the terminal buffer or + standard input was not actually a terminal. + + Note that this will always read from the terminal, even if something + is piped into the standard input. + + Note for Windows: in rare cases when typing non-ASCII characters, this + function might wait for a second character and then return both at once. + This is because certain Unicode characters look like special-key markers. + + .. versionadded:: 2.0 + + :param echo: if set to `True`, the character read will also show up on + the terminal. The default is to not show it. + """ + global _getchar + + if _getchar is None: + from ._termui_impl import getchar as f + + _getchar = f + + return _getchar(echo) + + +def raw_terminal() -> t.ContextManager[int]: + from ._termui_impl import raw_terminal as f + + return f() + + +def pause(info: t.Optional[str] = None, err: bool = False) -> None: + """This command stops execution and waits for the user to press any + key to continue. This is similar to the Windows batch "pause" + command. If the program is not run through a terminal, this command + will instead do nothing. + + .. versionadded:: 2.0 + + .. versionadded:: 4.0 + Added the `err` parameter. + + :param info: The message to print before pausing. Defaults to + ``"Press any key to continue..."``. + :param err: if set to message goes to ``stderr`` instead of + ``stdout``, the same as with echo. + """ + if not isatty(sys.stdin) or not isatty(sys.stdout): + return + + if info is None: + info = _("Press any key to continue...") + + try: + if info: + echo(info, nl=False, err=err) + try: + getchar() + except (KeyboardInterrupt, EOFError): + pass + finally: + if info: + echo(err=err) diff --git a/venv/lib/python3.11/site-packages/click/testing.py b/venv/lib/python3.11/site-packages/click/testing.py new file mode 100644 index 0000000..772b215 --- /dev/null +++ b/venv/lib/python3.11/site-packages/click/testing.py @@ -0,0 +1,483 @@ +import contextlib +import io +import os +import shlex +import shutil +import sys +import tempfile +import typing as t +from types import TracebackType + +from . import _compat +from . import formatting +from . import termui +from . import utils +from ._compat import _find_binary_reader + +if t.TYPE_CHECKING: + from .core import BaseCommand + + +class EchoingStdin: + def __init__(self, input: t.BinaryIO, output: t.BinaryIO) -> None: + self._input = input + self._output = output + self._paused = False + + def __getattr__(self, x: str) -> t.Any: + return getattr(self._input, x) + + def _echo(self, rv: bytes) -> bytes: + if not self._paused: + self._output.write(rv) + + return rv + + def read(self, n: int = -1) -> bytes: + return self._echo(self._input.read(n)) + + def read1(self, n: int = -1) -> bytes: + return self._echo(self._input.read1(n)) # type: ignore + + def readline(self, n: int = -1) -> bytes: + return self._echo(self._input.readline(n)) + + def readlines(self) -> t.List[bytes]: + return [self._echo(x) for x in self._input.readlines()] + + def __iter__(self) -> t.Iterator[bytes]: + return iter(self._echo(x) for x in self._input) + + def __repr__(self) -> str: + return repr(self._input) + + +@contextlib.contextmanager +def _pause_echo(stream: t.Optional[EchoingStdin]) -> t.Iterator[None]: + if stream is None: + yield + else: + stream._paused = True + yield + stream._paused = False + + +class _NamedTextIOWrapper(io.TextIOWrapper): + def __init__( + self, buffer: t.BinaryIO, name: str, mode: str, **kwargs: t.Any + ) -> None: + super().__init__(buffer, **kwargs) + self._name = name + self._mode = mode + + @property + def name(self) -> str: + return self._name + + @property + def mode(self) -> str: + return self._mode + + +def make_input_stream( + input: t.Optional[t.Union[str, bytes, t.IO[t.Any]]], charset: str +) -> t.BinaryIO: + # Is already an input stream. + if hasattr(input, "read"): + rv = _find_binary_reader(t.cast(t.IO[t.Any], input)) + + if rv is not None: + return rv + + raise TypeError("Could not find binary reader for input stream.") + + if input is None: + input = b"" + elif isinstance(input, str): + input = input.encode(charset) + + return io.BytesIO(input) + + +class Result: + """Holds the captured result of an invoked CLI script.""" + + def __init__( + self, + runner: "CliRunner", + stdout_bytes: bytes, + stderr_bytes: t.Optional[bytes], + return_value: t.Any, + exit_code: int, + exception: t.Optional[BaseException], + exc_info: t.Optional[ + t.Tuple[t.Type[BaseException], BaseException, TracebackType] + ] = None, + ): + #: The runner that created the result + self.runner = runner + #: The standard output as bytes. + self.stdout_bytes = stdout_bytes + #: The standard error as bytes, or None if not available + self.stderr_bytes = stderr_bytes + #: The value returned from the invoked command. + #: + #: .. versionadded:: 8.0 + self.return_value = return_value + #: The exit code as integer. + self.exit_code = exit_code + #: The exception that happened if one did. + self.exception = exception + #: The traceback + self.exc_info = exc_info + + @property + def output(self) -> str: + """The (standard) output as unicode string.""" + return self.stdout + + @property + def stdout(self) -> str: + """The standard output as unicode string.""" + return self.stdout_bytes.decode(self.runner.charset, "replace").replace( + "\r\n", "\n" + ) + + @property + def stderr(self) -> str: + """The standard error as unicode string.""" + if self.stderr_bytes is None: + raise ValueError("stderr not separately captured") + return self.stderr_bytes.decode(self.runner.charset, "replace").replace( + "\r\n", "\n" + ) + + def __repr__(self) -> str: + exc_str = repr(self.exception) if self.exception else "okay" + return f"<{type(self).__name__} {exc_str}>" + + +class CliRunner: + """The CLI runner provides functionality to invoke a Click command line + script for unittesting purposes in a isolated environment. This only + works in single-threaded systems without any concurrency as it changes the + global interpreter state. + + :param charset: the character set for the input and output data. + :param env: a dictionary with environment variables for overriding. + :param echo_stdin: if this is set to `True`, then reading from stdin writes + to stdout. This is useful for showing examples in + some circumstances. Note that regular prompts + will automatically echo the input. + :param mix_stderr: if this is set to `False`, then stdout and stderr are + preserved as independent streams. This is useful for + Unix-philosophy apps that have predictable stdout and + noisy stderr, such that each may be measured + independently + """ + + def __init__( + self, + charset: str = "utf-8", + env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, + echo_stdin: bool = False, + mix_stderr: bool = True, + ) -> None: + self.charset = charset + self.env: t.Mapping[str, t.Optional[str]] = env or {} + self.echo_stdin = echo_stdin + self.mix_stderr = mix_stderr + + def get_default_prog_name(self, cli: "BaseCommand") -> str: + """Given a command object it will return the default program name + for it. The default is the `name` attribute or ``"root"`` if not + set. + """ + return cli.name or "root" + + def make_env( + self, overrides: t.Optional[t.Mapping[str, t.Optional[str]]] = None + ) -> t.Mapping[str, t.Optional[str]]: + """Returns the environment overrides for invoking a script.""" + rv = dict(self.env) + if overrides: + rv.update(overrides) + return rv + + @contextlib.contextmanager + def isolation( + self, + input: t.Optional[t.Union[str, bytes, t.IO[t.Any]]] = None, + env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, + color: bool = False, + ) -> t.Iterator[t.Tuple[io.BytesIO, t.Optional[io.BytesIO]]]: + """A context manager that sets up the isolation for invoking of a + command line tool. This sets up stdin with the given input data + and `os.environ` with the overrides from the given dictionary. + This also rebinds some internals in Click to be mocked (like the + prompt functionality). + + This is automatically done in the :meth:`invoke` method. + + :param input: the input stream to put into sys.stdin. + :param env: the environment overrides as dictionary. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + + .. versionchanged:: 8.0 + ``stderr`` is opened with ``errors="backslashreplace"`` + instead of the default ``"strict"``. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + """ + bytes_input = make_input_stream(input, self.charset) + echo_input = None + + old_stdin = sys.stdin + old_stdout = sys.stdout + old_stderr = sys.stderr + old_forced_width = formatting.FORCED_WIDTH + formatting.FORCED_WIDTH = 80 + + env = self.make_env(env) + + bytes_output = io.BytesIO() + + if self.echo_stdin: + bytes_input = echo_input = t.cast( + t.BinaryIO, EchoingStdin(bytes_input, bytes_output) + ) + + sys.stdin = text_input = _NamedTextIOWrapper( + bytes_input, encoding=self.charset, name="", mode="r" + ) + + if self.echo_stdin: + # Force unbuffered reads, otherwise TextIOWrapper reads a + # large chunk which is echoed early. + text_input._CHUNK_SIZE = 1 # type: ignore + + sys.stdout = _NamedTextIOWrapper( + bytes_output, encoding=self.charset, name="", mode="w" + ) + + bytes_error = None + if self.mix_stderr: + sys.stderr = sys.stdout + else: + bytes_error = io.BytesIO() + sys.stderr = _NamedTextIOWrapper( + bytes_error, + encoding=self.charset, + name="", + mode="w", + errors="backslashreplace", + ) + + @_pause_echo(echo_input) # type: ignore + def visible_input(prompt: t.Optional[str] = None) -> str: + sys.stdout.write(prompt or "") + val = text_input.readline().rstrip("\r\n") + sys.stdout.write(f"{val}\n") + sys.stdout.flush() + return val + + @_pause_echo(echo_input) # type: ignore + def hidden_input(prompt: t.Optional[str] = None) -> str: + sys.stdout.write(f"{prompt or ''}\n") + sys.stdout.flush() + return text_input.readline().rstrip("\r\n") + + @_pause_echo(echo_input) # type: ignore + def _getchar(echo: bool) -> str: + char = sys.stdin.read(1) + + if echo: + sys.stdout.write(char) + + sys.stdout.flush() + return char + + default_color = color + + def should_strip_ansi( + stream: t.Optional[t.IO[t.Any]] = None, color: t.Optional[bool] = None + ) -> bool: + if color is None: + return not default_color + return not color + + old_visible_prompt_func = termui.visible_prompt_func + old_hidden_prompt_func = termui.hidden_prompt_func + old__getchar_func = termui._getchar + old_should_strip_ansi = utils.should_strip_ansi # type: ignore + old__compat_should_strip_ansi = _compat.should_strip_ansi + termui.visible_prompt_func = visible_input + termui.hidden_prompt_func = hidden_input + termui._getchar = _getchar + utils.should_strip_ansi = should_strip_ansi # type: ignore + _compat.should_strip_ansi = should_strip_ansi + + old_env = {} + try: + for key, value in env.items(): + old_env[key] = os.environ.get(key) + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + yield (bytes_output, bytes_error) + finally: + for key, value in old_env.items(): + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + sys.stdout = old_stdout + sys.stderr = old_stderr + sys.stdin = old_stdin + termui.visible_prompt_func = old_visible_prompt_func + termui.hidden_prompt_func = old_hidden_prompt_func + termui._getchar = old__getchar_func + utils.should_strip_ansi = old_should_strip_ansi # type: ignore + _compat.should_strip_ansi = old__compat_should_strip_ansi + formatting.FORCED_WIDTH = old_forced_width + + def invoke( + self, + cli: "BaseCommand", + args: t.Optional[t.Union[str, t.Sequence[str]]] = None, + input: t.Optional[t.Union[str, bytes, t.IO[t.Any]]] = None, + env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, + catch_exceptions: bool = True, + color: bool = False, + **extra: t.Any, + ) -> Result: + """Invokes a command in an isolated environment. The arguments are + forwarded directly to the command line script, the `extra` keyword + arguments are passed to the :meth:`~clickpkg.Command.main` function of + the command. + + This returns a :class:`Result` object. + + :param cli: the command to invoke + :param args: the arguments to invoke. It may be given as an iterable + or a string. When given as string it will be interpreted + as a Unix shell command. More details at + :func:`shlex.split`. + :param input: the input data for `sys.stdin`. + :param env: the environment overrides. + :param catch_exceptions: Whether to catch any other exceptions than + ``SystemExit``. + :param extra: the keyword arguments to pass to :meth:`main`. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + + .. versionchanged:: 8.0 + The result object has the ``return_value`` attribute with + the value returned from the invoked command. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + + .. versionchanged:: 3.0 + Added the ``catch_exceptions`` parameter. + + .. versionchanged:: 3.0 + The result object has the ``exc_info`` attribute with the + traceback if available. + """ + exc_info = None + with self.isolation(input=input, env=env, color=color) as outstreams: + return_value = None + exception: t.Optional[BaseException] = None + exit_code = 0 + + if isinstance(args, str): + args = shlex.split(args) + + try: + prog_name = extra.pop("prog_name") + except KeyError: + prog_name = self.get_default_prog_name(cli) + + try: + return_value = cli.main(args=args or (), prog_name=prog_name, **extra) + except SystemExit as e: + exc_info = sys.exc_info() + e_code = t.cast(t.Optional[t.Union[int, t.Any]], e.code) + + if e_code is None: + e_code = 0 + + if e_code != 0: + exception = e + + if not isinstance(e_code, int): + sys.stdout.write(str(e_code)) + sys.stdout.write("\n") + e_code = 1 + + exit_code = e_code + + except Exception as e: + if not catch_exceptions: + raise + exception = e + exit_code = 1 + exc_info = sys.exc_info() + finally: + sys.stdout.flush() + stdout = outstreams[0].getvalue() + if self.mix_stderr: + stderr = None + else: + stderr = outstreams[1].getvalue() # type: ignore + + return Result( + runner=self, + stdout_bytes=stdout, + stderr_bytes=stderr, + return_value=return_value, + exit_code=exit_code, + exception=exception, + exc_info=exc_info, # type: ignore + ) + + @contextlib.contextmanager + def isolated_filesystem( + self, temp_dir: t.Optional[t.Union[str, "os.PathLike[str]"]] = None + ) -> t.Iterator[str]: + """A context manager that creates a temporary directory and + changes the current working directory to it. This isolates tests + that affect the contents of the CWD to prevent them from + interfering with each other. + + :param temp_dir: Create the temporary directory under this + directory. If given, the created directory is not removed + when exiting. + + .. versionchanged:: 8.0 + Added the ``temp_dir`` parameter. + """ + cwd = os.getcwd() + dt = tempfile.mkdtemp(dir=temp_dir) + os.chdir(dt) + + try: + yield dt + finally: + os.chdir(cwd) + + if temp_dir is None: + try: + shutil.rmtree(dt) + except OSError: + pass diff --git a/venv/lib/python3.11/site-packages/click/types.py b/venv/lib/python3.11/site-packages/click/types.py new file mode 100644 index 0000000..a70fd58 --- /dev/null +++ b/venv/lib/python3.11/site-packages/click/types.py @@ -0,0 +1,1093 @@ +import os +import stat +import sys +import typing as t +from datetime import datetime +from gettext import gettext as _ +from gettext import ngettext + +from ._compat import _get_argv_encoding +from ._compat import open_stream +from .exceptions import BadParameter +from .utils import format_filename +from .utils import LazyFile +from .utils import safecall + +if t.TYPE_CHECKING: + import typing_extensions as te + + from .core import Context + from .core import Parameter + from .shell_completion import CompletionItem + + +class ParamType: + """Represents the type of a parameter. Validates and converts values + from the command line or Python into the correct type. + + To implement a custom type, subclass and implement at least the + following: + + - The :attr:`name` class attribute must be set. + - Calling an instance of the type with ``None`` must return + ``None``. This is already implemented by default. + - :meth:`convert` must convert string values to the correct type. + - :meth:`convert` must accept values that are already the correct + type. + - It must be able to convert a value if the ``ctx`` and ``param`` + arguments are ``None``. This can occur when converting prompt + input. + """ + + is_composite: t.ClassVar[bool] = False + arity: t.ClassVar[int] = 1 + + #: the descriptive name of this type + name: str + + #: if a list of this type is expected and the value is pulled from a + #: string environment variable, this is what splits it up. `None` + #: means any whitespace. For all parameters the general rule is that + #: whitespace splits them up. The exception are paths and files which + #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on + #: Windows). + envvar_list_splitter: t.ClassVar[t.Optional[str]] = None + + def to_info_dict(self) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + .. versionadded:: 8.0 + """ + # The class name without the "ParamType" suffix. + param_type = type(self).__name__.partition("ParamType")[0] + param_type = param_type.partition("ParameterType")[0] + + # Custom subclasses might not remember to set a name. + if hasattr(self, "name"): + name = self.name + else: + name = param_type + + return {"param_type": param_type, "name": name} + + def __call__( + self, + value: t.Any, + param: t.Optional["Parameter"] = None, + ctx: t.Optional["Context"] = None, + ) -> t.Any: + if value is not None: + return self.convert(value, param, ctx) + + def get_metavar(self, param: "Parameter") -> t.Optional[str]: + """Returns the metavar default for this param if it provides one.""" + + def get_missing_message(self, param: "Parameter") -> t.Optional[str]: + """Optionally might return extra information about a missing + parameter. + + .. versionadded:: 2.0 + """ + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + """Convert the value to the correct type. This is not called if + the value is ``None`` (the missing value). + + This must accept string values from the command line, as well as + values that are already the correct type. It may also convert + other compatible types. + + The ``param`` and ``ctx`` arguments may be ``None`` in certain + situations, such as when converting prompt input. + + If the value cannot be converted, call :meth:`fail` with a + descriptive message. + + :param value: The value to convert. + :param param: The parameter that is using this type to convert + its value. May be ``None``. + :param ctx: The current context that arrived at this value. May + be ``None``. + """ + return value + + def split_envvar_value(self, rv: str) -> t.Sequence[str]: + """Given a value from an environment variable this splits it up + into small chunks depending on the defined envvar list splitter. + + If the splitter is set to `None`, which means that whitespace splits, + then leading and trailing whitespace is ignored. Otherwise, leading + and trailing splitters usually lead to empty items being included. + """ + return (rv or "").split(self.envvar_list_splitter) + + def fail( + self, + message: str, + param: t.Optional["Parameter"] = None, + ctx: t.Optional["Context"] = None, + ) -> "t.NoReturn": + """Helper method to fail with an invalid value message.""" + raise BadParameter(message, ctx=ctx, param=param) + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Return a list of + :class:`~click.shell_completion.CompletionItem` objects for the + incomplete value. Most types do not provide completions, but + some do, and this allows custom types to provide custom + completions as well. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + return [] + + +class CompositeParamType(ParamType): + is_composite = True + + @property + def arity(self) -> int: # type: ignore + raise NotImplementedError() + + +class FuncParamType(ParamType): + def __init__(self, func: t.Callable[[t.Any], t.Any]) -> None: + self.name: str = func.__name__ + self.func = func + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["func"] = self.func + return info_dict + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + try: + return self.func(value) + except ValueError: + try: + value = str(value) + except UnicodeError: + value = value.decode("utf-8", "replace") + + self.fail(value, param, ctx) + + +class UnprocessedParamType(ParamType): + name = "text" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + return value + + def __repr__(self) -> str: + return "UNPROCESSED" + + +class StringParamType(ParamType): + name = "text" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + if isinstance(value, bytes): + enc = _get_argv_encoding() + try: + value = value.decode(enc) + except UnicodeError: + fs_enc = sys.getfilesystemencoding() + if fs_enc != enc: + try: + value = value.decode(fs_enc) + except UnicodeError: + value = value.decode("utf-8", "replace") + else: + value = value.decode("utf-8", "replace") + return value + return str(value) + + def __repr__(self) -> str: + return "STRING" + + +class Choice(ParamType): + """The choice type allows a value to be checked against a fixed set + of supported values. All of these values have to be strings. + + You should only pass a list or tuple of choices. Other iterables + (like generators) may lead to surprising results. + + The resulting value will always be one of the originally passed choices + regardless of ``case_sensitive`` or any ``ctx.token_normalize_func`` + being specified. + + See :ref:`choice-opts` for an example. + + :param case_sensitive: Set to false to make choices case + insensitive. Defaults to true. + """ + + name = "choice" + + def __init__(self, choices: t.Sequence[str], case_sensitive: bool = True) -> None: + self.choices = choices + self.case_sensitive = case_sensitive + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["choices"] = self.choices + info_dict["case_sensitive"] = self.case_sensitive + return info_dict + + def get_metavar(self, param: "Parameter") -> str: + choices_str = "|".join(self.choices) + + # Use curly braces to indicate a required argument. + if param.required and param.param_type_name == "argument": + return f"{{{choices_str}}}" + + # Use square braces to indicate an option or optional argument. + return f"[{choices_str}]" + + def get_missing_message(self, param: "Parameter") -> str: + return _("Choose from:\n\t{choices}").format(choices=",\n\t".join(self.choices)) + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + # Match through normalization and case sensitivity + # first do token_normalize_func, then lowercase + # preserve original `value` to produce an accurate message in + # `self.fail` + normed_value = value + normed_choices = {choice: choice for choice in self.choices} + + if ctx is not None and ctx.token_normalize_func is not None: + normed_value = ctx.token_normalize_func(value) + normed_choices = { + ctx.token_normalize_func(normed_choice): original + for normed_choice, original in normed_choices.items() + } + + if not self.case_sensitive: + normed_value = normed_value.casefold() + normed_choices = { + normed_choice.casefold(): original + for normed_choice, original in normed_choices.items() + } + + if normed_value in normed_choices: + return normed_choices[normed_value] + + choices_str = ", ".join(map(repr, self.choices)) + self.fail( + ngettext( + "{value!r} is not {choice}.", + "{value!r} is not one of {choices}.", + len(self.choices), + ).format(value=value, choice=choices_str, choices=choices_str), + param, + ctx, + ) + + def __repr__(self) -> str: + return f"Choice({list(self.choices)})" + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Complete choices that start with the incomplete value. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + str_choices = map(str, self.choices) + + if self.case_sensitive: + matched = (c for c in str_choices if c.startswith(incomplete)) + else: + incomplete = incomplete.lower() + matched = (c for c in str_choices if c.lower().startswith(incomplete)) + + return [CompletionItem(c) for c in matched] + + +class DateTime(ParamType): + """The DateTime type converts date strings into `datetime` objects. + + The format strings which are checked are configurable, but default to some + common (non-timezone aware) ISO 8601 formats. + + When specifying *DateTime* formats, you should only pass a list or a tuple. + Other iterables, like generators, may lead to surprising results. + + The format strings are processed using ``datetime.strptime``, and this + consequently defines the format strings which are allowed. + + Parsing is tried using each format, in order, and the first format which + parses successfully is used. + + :param formats: A list or tuple of date format strings, in the order in + which they should be tried. Defaults to + ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``, + ``'%Y-%m-%d %H:%M:%S'``. + """ + + name = "datetime" + + def __init__(self, formats: t.Optional[t.Sequence[str]] = None): + self.formats: t.Sequence[str] = formats or [ + "%Y-%m-%d", + "%Y-%m-%dT%H:%M:%S", + "%Y-%m-%d %H:%M:%S", + ] + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["formats"] = self.formats + return info_dict + + def get_metavar(self, param: "Parameter") -> str: + return f"[{'|'.join(self.formats)}]" + + def _try_to_convert_date(self, value: t.Any, format: str) -> t.Optional[datetime]: + try: + return datetime.strptime(value, format) + except ValueError: + return None + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + if isinstance(value, datetime): + return value + + for format in self.formats: + converted = self._try_to_convert_date(value, format) + + if converted is not None: + return converted + + formats_str = ", ".join(map(repr, self.formats)) + self.fail( + ngettext( + "{value!r} does not match the format {format}.", + "{value!r} does not match the formats {formats}.", + len(self.formats), + ).format(value=value, format=formats_str, formats=formats_str), + param, + ctx, + ) + + def __repr__(self) -> str: + return "DateTime" + + +class _NumberParamTypeBase(ParamType): + _number_class: t.ClassVar[t.Type[t.Any]] + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + try: + return self._number_class(value) + except ValueError: + self.fail( + _("{value!r} is not a valid {number_type}.").format( + value=value, number_type=self.name + ), + param, + ctx, + ) + + +class _NumberRangeBase(_NumberParamTypeBase): + def __init__( + self, + min: t.Optional[float] = None, + max: t.Optional[float] = None, + min_open: bool = False, + max_open: bool = False, + clamp: bool = False, + ) -> None: + self.min = min + self.max = max + self.min_open = min_open + self.max_open = max_open + self.clamp = clamp + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + min=self.min, + max=self.max, + min_open=self.min_open, + max_open=self.max_open, + clamp=self.clamp, + ) + return info_dict + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + import operator + + rv = super().convert(value, param, ctx) + lt_min: bool = self.min is not None and ( + operator.le if self.min_open else operator.lt + )(rv, self.min) + gt_max: bool = self.max is not None and ( + operator.ge if self.max_open else operator.gt + )(rv, self.max) + + if self.clamp: + if lt_min: + return self._clamp(self.min, 1, self.min_open) # type: ignore + + if gt_max: + return self._clamp(self.max, -1, self.max_open) # type: ignore + + if lt_min or gt_max: + self.fail( + _("{value} is not in the range {range}.").format( + value=rv, range=self._describe_range() + ), + param, + ctx, + ) + + return rv + + def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float: + """Find the valid value to clamp to bound in the given + direction. + + :param bound: The boundary value. + :param dir: 1 or -1 indicating the direction to move. + :param open: If true, the range does not include the bound. + """ + raise NotImplementedError + + def _describe_range(self) -> str: + """Describe the range for use in help text.""" + if self.min is None: + op = "<" if self.max_open else "<=" + return f"x{op}{self.max}" + + if self.max is None: + op = ">" if self.min_open else ">=" + return f"x{op}{self.min}" + + lop = "<" if self.min_open else "<=" + rop = "<" if self.max_open else "<=" + return f"{self.min}{lop}x{rop}{self.max}" + + def __repr__(self) -> str: + clamp = " clamped" if self.clamp else "" + return f"<{type(self).__name__} {self._describe_range()}{clamp}>" + + +class IntParamType(_NumberParamTypeBase): + name = "integer" + _number_class = int + + def __repr__(self) -> str: + return "INT" + + +class IntRange(_NumberRangeBase, IntParamType): + """Restrict an :data:`click.INT` value to a range of accepted + values. See :ref:`ranges`. + + If ``min`` or ``max`` are not passed, any value is accepted in that + direction. If ``min_open`` or ``max_open`` are enabled, the + corresponding boundary is not included in the range. + + If ``clamp`` is enabled, a value outside the range is clamped to the + boundary instead of failing. + + .. versionchanged:: 8.0 + Added the ``min_open`` and ``max_open`` parameters. + """ + + name = "integer range" + + def _clamp( # type: ignore + self, bound: int, dir: "te.Literal[1, -1]", open: bool + ) -> int: + if not open: + return bound + + return bound + dir + + +class FloatParamType(_NumberParamTypeBase): + name = "float" + _number_class = float + + def __repr__(self) -> str: + return "FLOAT" + + +class FloatRange(_NumberRangeBase, FloatParamType): + """Restrict a :data:`click.FLOAT` value to a range of accepted + values. See :ref:`ranges`. + + If ``min`` or ``max`` are not passed, any value is accepted in that + direction. If ``min_open`` or ``max_open`` are enabled, the + corresponding boundary is not included in the range. + + If ``clamp`` is enabled, a value outside the range is clamped to the + boundary instead of failing. This is not supported if either + boundary is marked ``open``. + + .. versionchanged:: 8.0 + Added the ``min_open`` and ``max_open`` parameters. + """ + + name = "float range" + + def __init__( + self, + min: t.Optional[float] = None, + max: t.Optional[float] = None, + min_open: bool = False, + max_open: bool = False, + clamp: bool = False, + ) -> None: + super().__init__( + min=min, max=max, min_open=min_open, max_open=max_open, clamp=clamp + ) + + if (min_open or max_open) and clamp: + raise TypeError("Clamping is not supported for open bounds.") + + def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float: + if not open: + return bound + + # Could use Python 3.9's math.nextafter here, but clamping an + # open float range doesn't seem to be particularly useful. It's + # left up to the user to write a callback to do it if needed. + raise RuntimeError("Clamping is not supported for open bounds.") + + +class BoolParamType(ParamType): + name = "boolean" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + if value in {False, True}: + return bool(value) + + norm = value.strip().lower() + + if norm in {"1", "true", "t", "yes", "y", "on"}: + return True + + if norm in {"0", "false", "f", "no", "n", "off"}: + return False + + self.fail( + _("{value!r} is not a valid boolean.").format(value=value), param, ctx + ) + + def __repr__(self) -> str: + return "BOOL" + + +class UUIDParameterType(ParamType): + name = "uuid" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + import uuid + + if isinstance(value, uuid.UUID): + return value + + value = value.strip() + + try: + return uuid.UUID(value) + except ValueError: + self.fail( + _("{value!r} is not a valid UUID.").format(value=value), param, ctx + ) + + def __repr__(self) -> str: + return "UUID" + + +class File(ParamType): + """Declares a parameter to be a file for reading or writing. The file + is automatically closed once the context tears down (after the command + finished working). + + Files can be opened for reading or writing. The special value ``-`` + indicates stdin or stdout depending on the mode. + + By default, the file is opened for reading text data, but it can also be + opened in binary mode or for writing. The encoding parameter can be used + to force a specific encoding. + + The `lazy` flag controls if the file should be opened immediately or upon + first IO. The default is to be non-lazy for standard input and output + streams as well as files opened for reading, `lazy` otherwise. When opening a + file lazily for reading, it is still opened temporarily for validation, but + will not be held open until first IO. lazy is mainly useful when opening + for writing to avoid creating the file until it is needed. + + Files can also be opened atomically in which case all writes go into a + separate file in the same folder and upon completion the file will + be moved over to the original location. This is useful if a file + regularly read by other users is modified. + + See :ref:`file-args` for more information. + + .. versionchanged:: 2.0 + Added the ``atomic`` parameter. + """ + + name = "filename" + envvar_list_splitter: t.ClassVar[str] = os.path.pathsep + + def __init__( + self, + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + lazy: t.Optional[bool] = None, + atomic: bool = False, + ) -> None: + self.mode = mode + self.encoding = encoding + self.errors = errors + self.lazy = lazy + self.atomic = atomic + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update(mode=self.mode, encoding=self.encoding) + return info_dict + + def resolve_lazy_flag(self, value: "t.Union[str, os.PathLike[str]]") -> bool: + if self.lazy is not None: + return self.lazy + if os.fspath(value) == "-": + return False + elif "w" in self.mode: + return True + return False + + def convert( + self, + value: t.Union[str, "os.PathLike[str]", t.IO[t.Any]], + param: t.Optional["Parameter"], + ctx: t.Optional["Context"], + ) -> t.IO[t.Any]: + if _is_file_like(value): + return value + + value = t.cast("t.Union[str, os.PathLike[str]]", value) + + try: + lazy = self.resolve_lazy_flag(value) + + if lazy: + lf = LazyFile( + value, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + + if ctx is not None: + ctx.call_on_close(lf.close_intelligently) + + return t.cast(t.IO[t.Any], lf) + + f, should_close = open_stream( + value, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + + # If a context is provided, we automatically close the file + # at the end of the context execution (or flush out). If a + # context does not exist, it's the caller's responsibility to + # properly close the file. This for instance happens when the + # type is used with prompts. + if ctx is not None: + if should_close: + ctx.call_on_close(safecall(f.close)) + else: + ctx.call_on_close(safecall(f.flush)) + + return f + except OSError as e: + self.fail(f"'{format_filename(value)}': {e.strerror}", param, ctx) + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Return a special completion marker that tells the completion + system to use the shell to provide file path completions. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + return [CompletionItem(incomplete, type="file")] + + +def _is_file_like(value: t.Any) -> "te.TypeGuard[t.IO[t.Any]]": + return hasattr(value, "read") or hasattr(value, "write") + + +class Path(ParamType): + """The ``Path`` type is similar to the :class:`File` type, but + returns the filename instead of an open file. Various checks can be + enabled to validate the type of file and permissions. + + :param exists: The file or directory needs to exist for the value to + be valid. If this is not set to ``True``, and the file does not + exist, then all further checks are silently skipped. + :param file_okay: Allow a file as a value. + :param dir_okay: Allow a directory as a value. + :param readable: if true, a readable check is performed. + :param writable: if true, a writable check is performed. + :param executable: if true, an executable check is performed. + :param resolve_path: Make the value absolute and resolve any + symlinks. A ``~`` is not expanded, as this is supposed to be + done by the shell only. + :param allow_dash: Allow a single dash as a value, which indicates + a standard stream (but does not open it). Use + :func:`~click.open_file` to handle opening this value. + :param path_type: Convert the incoming path value to this type. If + ``None``, keep Python's default, which is ``str``. Useful to + convert to :class:`pathlib.Path`. + + .. versionchanged:: 8.1 + Added the ``executable`` parameter. + + .. versionchanged:: 8.0 + Allow passing ``path_type=pathlib.Path``. + + .. versionchanged:: 6.0 + Added the ``allow_dash`` parameter. + """ + + envvar_list_splitter: t.ClassVar[str] = os.path.pathsep + + def __init__( + self, + exists: bool = False, + file_okay: bool = True, + dir_okay: bool = True, + writable: bool = False, + readable: bool = True, + resolve_path: bool = False, + allow_dash: bool = False, + path_type: t.Optional[t.Type[t.Any]] = None, + executable: bool = False, + ): + self.exists = exists + self.file_okay = file_okay + self.dir_okay = dir_okay + self.readable = readable + self.writable = writable + self.executable = executable + self.resolve_path = resolve_path + self.allow_dash = allow_dash + self.type = path_type + + if self.file_okay and not self.dir_okay: + self.name: str = _("file") + elif self.dir_okay and not self.file_okay: + self.name = _("directory") + else: + self.name = _("path") + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + exists=self.exists, + file_okay=self.file_okay, + dir_okay=self.dir_okay, + writable=self.writable, + readable=self.readable, + allow_dash=self.allow_dash, + ) + return info_dict + + def coerce_path_result( + self, value: "t.Union[str, os.PathLike[str]]" + ) -> "t.Union[str, bytes, os.PathLike[str]]": + if self.type is not None and not isinstance(value, self.type): + if self.type is str: + return os.fsdecode(value) + elif self.type is bytes: + return os.fsencode(value) + else: + return t.cast("os.PathLike[str]", self.type(value)) + + return value + + def convert( + self, + value: "t.Union[str, os.PathLike[str]]", + param: t.Optional["Parameter"], + ctx: t.Optional["Context"], + ) -> "t.Union[str, bytes, os.PathLike[str]]": + rv = value + + is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-") + + if not is_dash: + if self.resolve_path: + # os.path.realpath doesn't resolve symlinks on Windows + # until Python 3.8. Use pathlib for now. + import pathlib + + rv = os.fsdecode(pathlib.Path(rv).resolve()) + + try: + st = os.stat(rv) + except OSError: + if not self.exists: + return self.coerce_path_result(rv) + self.fail( + _("{name} {filename!r} does not exist.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if not self.file_okay and stat.S_ISREG(st.st_mode): + self.fail( + _("{name} {filename!r} is a file.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + if not self.dir_okay and stat.S_ISDIR(st.st_mode): + self.fail( + _("{name} {filename!r} is a directory.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if self.readable and not os.access(rv, os.R_OK): + self.fail( + _("{name} {filename!r} is not readable.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if self.writable and not os.access(rv, os.W_OK): + self.fail( + _("{name} {filename!r} is not writable.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if self.executable and not os.access(value, os.X_OK): + self.fail( + _("{name} {filename!r} is not executable.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + return self.coerce_path_result(rv) + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Return a special completion marker that tells the completion + system to use the shell to provide path completions for only + directories or any paths. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + type = "dir" if self.dir_okay and not self.file_okay else "file" + return [CompletionItem(incomplete, type=type)] + + +class Tuple(CompositeParamType): + """The default behavior of Click is to apply a type on a value directly. + This works well in most cases, except for when `nargs` is set to a fixed + count and different types should be used for different items. In this + case the :class:`Tuple` type can be used. This type can only be used + if `nargs` is set to a fixed number. + + For more information see :ref:`tuple-type`. + + This can be selected by using a Python tuple literal as a type. + + :param types: a list of types that should be used for the tuple items. + """ + + def __init__(self, types: t.Sequence[t.Union[t.Type[t.Any], ParamType]]) -> None: + self.types: t.Sequence[ParamType] = [convert_type(ty) for ty in types] + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["types"] = [t.to_info_dict() for t in self.types] + return info_dict + + @property + def name(self) -> str: # type: ignore + return f"<{' '.join(ty.name for ty in self.types)}>" + + @property + def arity(self) -> int: # type: ignore + return len(self.types) + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + len_type = len(self.types) + len_value = len(value) + + if len_value != len_type: + self.fail( + ngettext( + "{len_type} values are required, but {len_value} was given.", + "{len_type} values are required, but {len_value} were given.", + len_value, + ).format(len_type=len_type, len_value=len_value), + param=param, + ctx=ctx, + ) + + return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value)) + + +def convert_type(ty: t.Optional[t.Any], default: t.Optional[t.Any] = None) -> ParamType: + """Find the most appropriate :class:`ParamType` for the given Python + type. If the type isn't provided, it can be inferred from a default + value. + """ + guessed_type = False + + if ty is None and default is not None: + if isinstance(default, (tuple, list)): + # If the default is empty, ty will remain None and will + # return STRING. + if default: + item = default[0] + + # A tuple of tuples needs to detect the inner types. + # Can't call convert recursively because that would + # incorrectly unwind the tuple to a single type. + if isinstance(item, (tuple, list)): + ty = tuple(map(type, item)) + else: + ty = type(item) + else: + ty = type(default) + + guessed_type = True + + if isinstance(ty, tuple): + return Tuple(ty) + + if isinstance(ty, ParamType): + return ty + + if ty is str or ty is None: + return STRING + + if ty is int: + return INT + + if ty is float: + return FLOAT + + if ty is bool: + return BOOL + + if guessed_type: + return STRING + + if __debug__: + try: + if issubclass(ty, ParamType): + raise AssertionError( + f"Attempted to use an uninstantiated parameter type ({ty})." + ) + except TypeError: + # ty is an instance (correct), so issubclass fails. + pass + + return FuncParamType(ty) + + +#: A dummy parameter type that just does nothing. From a user's +#: perspective this appears to just be the same as `STRING` but +#: internally no string conversion takes place if the input was bytes. +#: This is usually useful when working with file paths as they can +#: appear in bytes and unicode. +#: +#: For path related uses the :class:`Path` type is a better choice but +#: there are situations where an unprocessed type is useful which is why +#: it is is provided. +#: +#: .. versionadded:: 4.0 +UNPROCESSED = UnprocessedParamType() + +#: A unicode string parameter type which is the implicit default. This +#: can also be selected by using ``str`` as type. +STRING = StringParamType() + +#: An integer parameter. This can also be selected by using ``int`` as +#: type. +INT = IntParamType() + +#: A floating point value parameter. This can also be selected by using +#: ``float`` as type. +FLOAT = FloatParamType() + +#: A boolean parameter. This is the default for boolean flags. This can +#: also be selected by using ``bool`` as a type. +BOOL = BoolParamType() + +#: A UUID parameter. +UUID = UUIDParameterType() diff --git a/venv/lib/python3.11/site-packages/click/utils.py b/venv/lib/python3.11/site-packages/click/utils.py new file mode 100644 index 0000000..836c6f2 --- /dev/null +++ b/venv/lib/python3.11/site-packages/click/utils.py @@ -0,0 +1,624 @@ +import os +import re +import sys +import typing as t +from functools import update_wrapper +from types import ModuleType +from types import TracebackType + +from ._compat import _default_text_stderr +from ._compat import _default_text_stdout +from ._compat import _find_binary_writer +from ._compat import auto_wrap_for_ansi +from ._compat import binary_streams +from ._compat import open_stream +from ._compat import should_strip_ansi +from ._compat import strip_ansi +from ._compat import text_streams +from ._compat import WIN +from .globals import resolve_color_default + +if t.TYPE_CHECKING: + import typing_extensions as te + + P = te.ParamSpec("P") + +R = t.TypeVar("R") + + +def _posixify(name: str) -> str: + return "-".join(name.split()).lower() + + +def safecall(func: "t.Callable[P, R]") -> "t.Callable[P, t.Optional[R]]": + """Wraps a function so that it swallows exceptions.""" + + def wrapper(*args: "P.args", **kwargs: "P.kwargs") -> t.Optional[R]: + try: + return func(*args, **kwargs) + except Exception: + pass + return None + + return update_wrapper(wrapper, func) + + +def make_str(value: t.Any) -> str: + """Converts a value into a valid string.""" + if isinstance(value, bytes): + try: + return value.decode(sys.getfilesystemencoding()) + except UnicodeError: + return value.decode("utf-8", "replace") + return str(value) + + +def make_default_short_help(help: str, max_length: int = 45) -> str: + """Returns a condensed version of help string.""" + # Consider only the first paragraph. + paragraph_end = help.find("\n\n") + + if paragraph_end != -1: + help = help[:paragraph_end] + + # Collapse newlines, tabs, and spaces. + words = help.split() + + if not words: + return "" + + # The first paragraph started with a "no rewrap" marker, ignore it. + if words[0] == "\b": + words = words[1:] + + total_length = 0 + last_index = len(words) - 1 + + for i, word in enumerate(words): + total_length += len(word) + (i > 0) + + if total_length > max_length: # too long, truncate + break + + if word[-1] == ".": # sentence end, truncate without "..." + return " ".join(words[: i + 1]) + + if total_length == max_length and i != last_index: + break # not at sentence end, truncate with "..." + else: + return " ".join(words) # no truncation needed + + # Account for the length of the suffix. + total_length += len("...") + + # remove words until the length is short enough + while i > 0: + total_length -= len(words[i]) + (i > 0) + + if total_length <= max_length: + break + + i -= 1 + + return " ".join(words[:i]) + "..." + + +class LazyFile: + """A lazy file works like a regular file but it does not fully open + the file but it does perform some basic checks early to see if the + filename parameter does make sense. This is useful for safely opening + files for writing. + """ + + def __init__( + self, + filename: t.Union[str, "os.PathLike[str]"], + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + atomic: bool = False, + ): + self.name: str = os.fspath(filename) + self.mode = mode + self.encoding = encoding + self.errors = errors + self.atomic = atomic + self._f: t.Optional[t.IO[t.Any]] + self.should_close: bool + + if self.name == "-": + self._f, self.should_close = open_stream(filename, mode, encoding, errors) + else: + if "r" in mode: + # Open and close the file in case we're opening it for + # reading so that we can catch at least some errors in + # some cases early. + open(filename, mode).close() + self._f = None + self.should_close = True + + def __getattr__(self, name: str) -> t.Any: + return getattr(self.open(), name) + + def __repr__(self) -> str: + if self._f is not None: + return repr(self._f) + return f"" + + def open(self) -> t.IO[t.Any]: + """Opens the file if it's not yet open. This call might fail with + a :exc:`FileError`. Not handling this error will produce an error + that Click shows. + """ + if self._f is not None: + return self._f + try: + rv, self.should_close = open_stream( + self.name, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + except OSError as e: + from .exceptions import FileError + + raise FileError(self.name, hint=e.strerror) from e + self._f = rv + return rv + + def close(self) -> None: + """Closes the underlying file, no matter what.""" + if self._f is not None: + self._f.close() + + def close_intelligently(self) -> None: + """This function only closes the file if it was opened by the lazy + file wrapper. For instance this will never close stdin. + """ + if self.should_close: + self.close() + + def __enter__(self) -> "LazyFile": + return self + + def __exit__( + self, + exc_type: t.Optional[t.Type[BaseException]], + exc_value: t.Optional[BaseException], + tb: t.Optional[TracebackType], + ) -> None: + self.close_intelligently() + + def __iter__(self) -> t.Iterator[t.AnyStr]: + self.open() + return iter(self._f) # type: ignore + + +class KeepOpenFile: + def __init__(self, file: t.IO[t.Any]) -> None: + self._file: t.IO[t.Any] = file + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._file, name) + + def __enter__(self) -> "KeepOpenFile": + return self + + def __exit__( + self, + exc_type: t.Optional[t.Type[BaseException]], + exc_value: t.Optional[BaseException], + tb: t.Optional[TracebackType], + ) -> None: + pass + + def __repr__(self) -> str: + return repr(self._file) + + def __iter__(self) -> t.Iterator[t.AnyStr]: + return iter(self._file) + + +def echo( + message: t.Optional[t.Any] = None, + file: t.Optional[t.IO[t.Any]] = None, + nl: bool = True, + err: bool = False, + color: t.Optional[bool] = None, +) -> None: + """Print a message and newline to stdout or a file. This should be + used instead of :func:`print` because it provides better support + for different data, files, and environments. + + Compared to :func:`print`, this does the following: + + - Ensures that the output encoding is not misconfigured on Linux. + - Supports Unicode in the Windows console. + - Supports writing to binary outputs, and supports writing bytes + to text outputs. + - Supports colors and styles on Windows. + - Removes ANSI color and style codes if the output does not look + like an interactive terminal. + - Always flushes the output. + + :param message: The string or bytes to output. Other objects are + converted to strings. + :param file: The file to write to. Defaults to ``stdout``. + :param err: Write to ``stderr`` instead of ``stdout``. + :param nl: Print a newline after the message. Enabled by default. + :param color: Force showing or hiding colors and other styles. By + default Click will remove color if the output does not look like + an interactive terminal. + + .. versionchanged:: 6.0 + Support Unicode output on the Windows console. Click does not + modify ``sys.stdout``, so ``sys.stdout.write()`` and ``print()`` + will still not support Unicode. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + + .. versionadded:: 3.0 + Added the ``err`` parameter. + + .. versionchanged:: 2.0 + Support colors on Windows if colorama is installed. + """ + if file is None: + if err: + file = _default_text_stderr() + else: + file = _default_text_stdout() + + # There are no standard streams attached to write to. For example, + # pythonw on Windows. + if file is None: + return + + # Convert non bytes/text into the native string type. + if message is not None and not isinstance(message, (str, bytes, bytearray)): + out: t.Optional[t.Union[str, bytes]] = str(message) + else: + out = message + + if nl: + out = out or "" + if isinstance(out, str): + out += "\n" + else: + out += b"\n" + + if not out: + file.flush() + return + + # If there is a message and the value looks like bytes, we manually + # need to find the binary stream and write the message in there. + # This is done separately so that most stream types will work as you + # would expect. Eg: you can write to StringIO for other cases. + if isinstance(out, (bytes, bytearray)): + binary_file = _find_binary_writer(file) + + if binary_file is not None: + file.flush() + binary_file.write(out) + binary_file.flush() + return + + # ANSI style code support. For no message or bytes, nothing happens. + # When outputting to a file instead of a terminal, strip codes. + else: + color = resolve_color_default(color) + + if should_strip_ansi(file, color): + out = strip_ansi(out) + elif WIN: + if auto_wrap_for_ansi is not None: + file = auto_wrap_for_ansi(file, color) # type: ignore + elif not color: + out = strip_ansi(out) + + file.write(out) # type: ignore + file.flush() + + +def get_binary_stream(name: "te.Literal['stdin', 'stdout', 'stderr']") -> t.BinaryIO: + """Returns a system stream for byte processing. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + """ + opener = binary_streams.get(name) + if opener is None: + raise TypeError(f"Unknown standard stream '{name}'") + return opener() + + +def get_text_stream( + name: "te.Literal['stdin', 'stdout', 'stderr']", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", +) -> t.TextIO: + """Returns a system stream for text processing. This usually returns + a wrapped stream around a binary stream returned from + :func:`get_binary_stream` but it also can take shortcuts for already + correctly configured streams. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + :param encoding: overrides the detected default encoding. + :param errors: overrides the default error mode. + """ + opener = text_streams.get(name) + if opener is None: + raise TypeError(f"Unknown standard stream '{name}'") + return opener(encoding, errors) + + +def open_file( + filename: t.Union[str, "os.PathLike[str]"], + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + lazy: bool = False, + atomic: bool = False, +) -> t.IO[t.Any]: + """Open a file, with extra behavior to handle ``'-'`` to indicate + a standard stream, lazy open on write, and atomic write. Similar to + the behavior of the :class:`~click.File` param type. + + If ``'-'`` is given to open ``stdout`` or ``stdin``, the stream is + wrapped so that using it in a context manager will not close it. + This makes it possible to use the function without accidentally + closing a standard stream: + + .. code-block:: python + + with open_file(filename) as f: + ... + + :param filename: The name or Path of the file to open, or ``'-'`` for + ``stdin``/``stdout``. + :param mode: The mode in which to open the file. + :param encoding: The encoding to decode or encode a file opened in + text mode. + :param errors: The error handling mode. + :param lazy: Wait to open the file until it is accessed. For read + mode, the file is temporarily opened to raise access errors + early, then closed until it is read again. + :param atomic: Write to a temporary file and replace the given file + on close. + + .. versionadded:: 3.0 + """ + if lazy: + return t.cast( + t.IO[t.Any], LazyFile(filename, mode, encoding, errors, atomic=atomic) + ) + + f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic) + + if not should_close: + f = t.cast(t.IO[t.Any], KeepOpenFile(f)) + + return f + + +def format_filename( + filename: "t.Union[str, bytes, os.PathLike[str], os.PathLike[bytes]]", + shorten: bool = False, +) -> str: + """Format a filename as a string for display. Ensures the filename can be + displayed by replacing any invalid bytes or surrogate escapes in the name + with the replacement character ``�``. + + Invalid bytes or surrogate escapes will raise an error when written to a + stream with ``errors="strict"``. This will typically happen with ``stdout`` + when the locale is something like ``en_GB.UTF-8``. + + Many scenarios *are* safe to write surrogates though, due to PEP 538 and + PEP 540, including: + + - Writing to ``stderr``, which uses ``errors="backslashreplace"``. + - The system has ``LANG=C.UTF-8``, ``C``, or ``POSIX``. Python opens + stdout and stderr with ``errors="surrogateescape"``. + - None of ``LANG/LC_*`` are set. Python assumes ``LANG=C.UTF-8``. + - Python is started in UTF-8 mode with ``PYTHONUTF8=1`` or ``-X utf8``. + Python opens stdout and stderr with ``errors="surrogateescape"``. + + :param filename: formats a filename for UI display. This will also convert + the filename into unicode without failing. + :param shorten: this optionally shortens the filename to strip of the + path that leads up to it. + """ + if shorten: + filename = os.path.basename(filename) + else: + filename = os.fspath(filename) + + if isinstance(filename, bytes): + filename = filename.decode(sys.getfilesystemencoding(), "replace") + else: + filename = filename.encode("utf-8", "surrogateescape").decode( + "utf-8", "replace" + ) + + return filename + + +def get_app_dir(app_name: str, roaming: bool = True, force_posix: bool = False) -> str: + r"""Returns the config folder for the application. The default behavior + is to return whatever is most appropriate for the operating system. + + To give you an idea, for an app called ``"Foo Bar"``, something like + the following folders could be returned: + + Mac OS X: + ``~/Library/Application Support/Foo Bar`` + Mac OS X (POSIX): + ``~/.foo-bar`` + Unix: + ``~/.config/foo-bar`` + Unix (POSIX): + ``~/.foo-bar`` + Windows (roaming): + ``C:\Users\\AppData\Roaming\Foo Bar`` + Windows (not roaming): + ``C:\Users\\AppData\Local\Foo Bar`` + + .. versionadded:: 2.0 + + :param app_name: the application name. This should be properly capitalized + and can contain whitespace. + :param roaming: controls if the folder should be roaming or not on Windows. + Has no effect otherwise. + :param force_posix: if this is set to `True` then on any POSIX system the + folder will be stored in the home folder with a leading + dot instead of the XDG config home or darwin's + application support folder. + """ + if WIN: + key = "APPDATA" if roaming else "LOCALAPPDATA" + folder = os.environ.get(key) + if folder is None: + folder = os.path.expanduser("~") + return os.path.join(folder, app_name) + if force_posix: + return os.path.join(os.path.expanduser(f"~/.{_posixify(app_name)}")) + if sys.platform == "darwin": + return os.path.join( + os.path.expanduser("~/Library/Application Support"), app_name + ) + return os.path.join( + os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")), + _posixify(app_name), + ) + + +class PacifyFlushWrapper: + """This wrapper is used to catch and suppress BrokenPipeErrors resulting + from ``.flush()`` being called on broken pipe during the shutdown/final-GC + of the Python interpreter. Notably ``.flush()`` is always called on + ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any + other cleanup code, and the case where the underlying file is not a broken + pipe, all calls and attributes are proxied. + """ + + def __init__(self, wrapped: t.IO[t.Any]) -> None: + self.wrapped = wrapped + + def flush(self) -> None: + try: + self.wrapped.flush() + except OSError as e: + import errno + + if e.errno != errno.EPIPE: + raise + + def __getattr__(self, attr: str) -> t.Any: + return getattr(self.wrapped, attr) + + +def _detect_program_name( + path: t.Optional[str] = None, _main: t.Optional[ModuleType] = None +) -> str: + """Determine the command used to run the program, for use in help + text. If a file or entry point was executed, the file name is + returned. If ``python -m`` was used to execute a module or package, + ``python -m name`` is returned. + + This doesn't try to be too precise, the goal is to give a concise + name for help text. Files are only shown as their name without the + path. ``python`` is only shown for modules, and the full path to + ``sys.executable`` is not shown. + + :param path: The Python file being executed. Python puts this in + ``sys.argv[0]``, which is used by default. + :param _main: The ``__main__`` module. This should only be passed + during internal testing. + + .. versionadded:: 8.0 + Based on command args detection in the Werkzeug reloader. + + :meta private: + """ + if _main is None: + _main = sys.modules["__main__"] + + if not path: + path = sys.argv[0] + + # The value of __package__ indicates how Python was called. It may + # not exist if a setuptools script is installed as an egg. It may be + # set incorrectly for entry points created with pip on Windows. + # It is set to "" inside a Shiv or PEX zipapp. + if getattr(_main, "__package__", None) in {None, ""} or ( + os.name == "nt" + and _main.__package__ == "" + and not os.path.exists(path) + and os.path.exists(f"{path}.exe") + ): + # Executed a file, like "python app.py". + return os.path.basename(path) + + # Executed a module, like "python -m example". + # Rewritten by Python from "-m script" to "/path/to/script.py". + # Need to look at main module to determine how it was executed. + py_module = t.cast(str, _main.__package__) + name = os.path.splitext(os.path.basename(path))[0] + + # A submodule like "example.cli". + if name != "__main__": + py_module = f"{py_module}.{name}" + + return f"python -m {py_module.lstrip('.')}" + + +def _expand_args( + args: t.Iterable[str], + *, + user: bool = True, + env: bool = True, + glob_recursive: bool = True, +) -> t.List[str]: + """Simulate Unix shell expansion with Python functions. + + See :func:`glob.glob`, :func:`os.path.expanduser`, and + :func:`os.path.expandvars`. + + This is intended for use on Windows, where the shell does not do any + expansion. It may not exactly match what a Unix shell would do. + + :param args: List of command line arguments to expand. + :param user: Expand user home directory. + :param env: Expand environment variables. + :param glob_recursive: ``**`` matches directories recursively. + + .. versionchanged:: 8.1 + Invalid glob patterns are treated as empty expansions rather + than raising an error. + + .. versionadded:: 8.0 + + :meta private: + """ + from glob import glob + + out = [] + + for arg in args: + if user: + arg = os.path.expanduser(arg) + + if env: + arg = os.path.expandvars(arg) + + try: + matches = glob(arg, recursive=glob_recursive) + except re.error: + matches = [] + + if not matches: + out.append(arg) + else: + out.extend(matches) + + return out diff --git a/venv/lib/python3.11/site-packages/distro-1.9.0.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/distro-1.9.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.11/site-packages/distro-1.9.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.11/site-packages/distro-1.9.0.dist-info/LICENSE b/venv/lib/python3.11/site-packages/distro-1.9.0.dist-info/LICENSE new file mode 100644 index 0000000..e06d208 --- /dev/null +++ b/venv/lib/python3.11/site-packages/distro-1.9.0.dist-info/LICENSE @@ -0,0 +1,202 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/venv/lib/python3.11/site-packages/distro-1.9.0.dist-info/METADATA b/venv/lib/python3.11/site-packages/distro-1.9.0.dist-info/METADATA new file mode 100644 index 0000000..9312e8e --- /dev/null +++ b/venv/lib/python3.11/site-packages/distro-1.9.0.dist-info/METADATA @@ -0,0 +1,184 @@ +Metadata-Version: 2.1 +Name: distro +Version: 1.9.0 +Summary: Distro - an OS platform information API +Home-page: https://github.com/python-distro/distro +Author: Nir Cohen +Author-email: nir36g@gmail.com +License: Apache License, Version 2.0 +Platform: All +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: POSIX :: BSD +Classifier: Operating System :: POSIX :: BSD :: FreeBSD +Classifier: Operating System :: POSIX :: BSD :: NetBSD +Classifier: Operating System :: POSIX :: BSD :: OpenBSD +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: System :: Operating System +Requires-Python: >=3.6 +Description-Content-Type: text/markdown +License-File: LICENSE + +Distro - an OS platform information API +======================================= + +[![CI Status](https://github.com/python-distro/distro/workflows/CI/badge.svg)](https://github.com/python-distro/distro/actions/workflows/ci.yaml) +[![PyPI version](http://img.shields.io/pypi/v/distro.svg)](https://pypi.python.org/pypi/distro) +[![Supported Python Versions](https://img.shields.io/pypi/pyversions/distro.svg)](https://img.shields.io/pypi/pyversions/distro.svg) +[![Code Coverage](https://codecov.io/github/python-distro/distro/coverage.svg?branch=master)](https://codecov.io/github/python-distro/distro?branch=master) +[![Is Wheel](https://img.shields.io/pypi/wheel/distro.svg?style=flat)](https://pypi.python.org/pypi/distro) +[![Latest Github Release](https://readthedocs.org/projects/distro/badge/?version=stable)](http://distro.readthedocs.io/en/latest/) + +`distro` provides information about the +OS distribution it runs on, such as a reliable machine-readable ID, or +version information. + +It is the recommended replacement for Python's original +[`platform.linux_distribution`](https://docs.python.org/3.7/library/platform.html#platform.linux_distribution) +function (removed in Python 3.8). It also provides much more functionality +which isn't necessarily Python bound, like a command-line interface. + +Distro currently supports Linux and BSD based systems but [Windows and OS X support](https://github.com/python-distro/distro/issues/177) is also planned. + +For Python 2.6 support, see https://github.com/python-distro/distro/tree/python2.6-support + +## Installation + +Installation of the latest released version from PyPI: + +```shell +pip install distro +``` + +Installation of the latest development version: + +```shell +pip install https://github.com/python-distro/distro/archive/master.tar.gz +``` + +To use as a standalone script, download `distro.py` directly: + +```shell +curl -O https://raw.githubusercontent.com/python-distro/distro/master/src/distro/distro.py +python distro.py +``` + +``distro`` is safe to vendor within projects that do not wish to add +dependencies. + +```shell +cd myproject +curl -O https://raw.githubusercontent.com/python-distro/distro/master/src/distro/distro.py +``` + +## Usage + +```bash +$ distro +Name: Antergos Linux +Version: 2015.10 (ISO-Rolling) +Codename: ISO-Rolling + +$ distro -j +{ + "codename": "ISO-Rolling", + "id": "antergos", + "like": "arch", + "version": "16.9", + "version_parts": { + "build_number": "", + "major": "16", + "minor": "9" + } +} + + +$ python +>>> import distro +>>> distro.name(pretty=True) +'CentOS Linux 8' +>>> distro.id() +'centos' +>>> distro.version(best=True) +'8.4.2105' +``` + + +## Documentation + +On top of the aforementioned API, several more functions are available. For a complete description of the +API, see the [latest API documentation](http://distro.readthedocs.org/en/latest/). + +## Background + +An alternative implementation became necessary because Python 3.5 deprecated +this function, and Python 3.8 removed it altogether. Its predecessor function +[`platform.dist`](https://docs.python.org/3.7/library/platform.html#platform.dist) +was already deprecated since Python 2.6 and removed in Python 3.8. Still, there +are many cases in which access to that information is needed. See [Python issue +1322](https://bugs.python.org/issue1322) for more information. + +The `distro` package implements a robust and inclusive way of retrieving the +information about a distribution based on new standards and old methods, +namely from these data sources (from high to low precedence): + +* The os-release file `/etc/os-release` if present, with a fall-back on `/usr/lib/os-release` if needed. +* The output of the `lsb_release` command, if available. +* The distro release file (`/etc/*(-|_)(release|version)`), if present. +* The `uname` command for BSD based distrubtions. + + +## Python and Distribution Support + +`distro` is supported and tested on Python 3.6+ and PyPy and on any +distribution that provides one or more of the data sources covered. + +This package is tested with test data that mimics the exact behavior of the data sources of [a number of Linux distributions](https://github.com/python-distro/distro/tree/master/tests/resources/distros). + + +## Testing + +```shell +git clone git@github.com:python-distro/distro.git +cd distro +pip install tox +tox +``` + + +## Contributions + +Pull requests are always welcome to deal with specific distributions or just +for general merriment. + +See [CONTRIBUTIONS](https://github.com/python-distro/distro/blob/master/CONTRIBUTING.md) for contribution info. + +Reference implementations for supporting additional distributions and file +formats can be found here: + +* https://github.com/saltstack/salt/blob/develop/salt/grains/core.py#L1172 +* https://github.com/chef/ohai/blob/master/lib/ohai/plugins/linux/platform.rb +* https://github.com/ansible/ansible/blob/devel/lib/ansible/module_utils/facts/system/distribution.py +* https://github.com/puppetlabs/facter/blob/master/lib/src/facts/linux/os_linux.cc + +## Package manager distributions + +* https://src.fedoraproject.org/rpms/python-distro +* https://www.archlinux.org/packages/community/any/python-distro/ +* https://launchpad.net/ubuntu/+source/python-distro +* https://packages.debian.org/stable/python3-distro +* https://packages.gentoo.org/packages/dev-python/distro +* https://pkgs.org/download/python3-distro +* https://slackbuilds.org/repository/14.2/python/python-distro/ diff --git a/venv/lib/python3.11/site-packages/distro-1.9.0.dist-info/RECORD b/venv/lib/python3.11/site-packages/distro-1.9.0.dist-info/RECORD new file mode 100644 index 0000000..eda98f6 --- /dev/null +++ b/venv/lib/python3.11/site-packages/distro-1.9.0.dist-info/RECORD @@ -0,0 +1,15 @@ +../../../bin/distro,sha256=y8gWdMgf5MaypHgAv3GspQ6tSQoXxtjC_Pi9cCSmfUo,238 +distro-1.9.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +distro-1.9.0.dist-info/LICENSE,sha256=y16Ofl9KOYjhBjwULGDcLfdWBfTEZRXnduOspt-XbhQ,11325 +distro-1.9.0.dist-info/METADATA,sha256=MWMqst5VkRMQkbM5e9zfeXcYV52Fp1GG8Gg53QwJ6B0,6791 +distro-1.9.0.dist-info/RECORD,, +distro-1.9.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92 +distro-1.9.0.dist-info/entry_points.txt,sha256=3ObjqQMbh1xeQQwsWtgbfDNDMDD-EbggR1Oj_z8s9hc,46 +distro-1.9.0.dist-info/top_level.txt,sha256=ikde_V_XEdSBqaGd5tEriN_wzYHLgTX_zVtlsGLHvwQ,7 +distro/__init__.py,sha256=2fHjF-SfgPvjyNZ1iHh_wjqWdR_Yo5ODHwZC0jLBPhc,981 +distro/__main__.py,sha256=bu9d3TifoKciZFcqRBuygV3GSuThnVD_m2IK4cz96Vs,64 +distro/__pycache__/__init__.cpython-311.pyc,, +distro/__pycache__/__main__.cpython-311.pyc,, +distro/__pycache__/distro.cpython-311.pyc,, +distro/distro.py,sha256=XqbefacAhDT4zr_trnbA15eY8vdK4GTghgmvUGrEM_4,49430 +distro/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.11/site-packages/distro-1.9.0.dist-info/WHEEL b/venv/lib/python3.11/site-packages/distro-1.9.0.dist-info/WHEEL new file mode 100644 index 0000000..98c0d20 --- /dev/null +++ b/venv/lib/python3.11/site-packages/distro-1.9.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.42.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.11/site-packages/distro-1.9.0.dist-info/entry_points.txt b/venv/lib/python3.11/site-packages/distro-1.9.0.dist-info/entry_points.txt new file mode 100644 index 0000000..08d29c5 --- /dev/null +++ b/venv/lib/python3.11/site-packages/distro-1.9.0.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +distro = distro.distro:main diff --git a/venv/lib/python3.11/site-packages/distro-1.9.0.dist-info/top_level.txt b/venv/lib/python3.11/site-packages/distro-1.9.0.dist-info/top_level.txt new file mode 100644 index 0000000..0e09331 --- /dev/null +++ b/venv/lib/python3.11/site-packages/distro-1.9.0.dist-info/top_level.txt @@ -0,0 +1 @@ +distro diff --git a/venv/lib/python3.11/site-packages/distro/__init__.py b/venv/lib/python3.11/site-packages/distro/__init__.py new file mode 100644 index 0000000..7686fe8 --- /dev/null +++ b/venv/lib/python3.11/site-packages/distro/__init__.py @@ -0,0 +1,54 @@ +from .distro import ( + NORMALIZED_DISTRO_ID, + NORMALIZED_LSB_ID, + NORMALIZED_OS_ID, + LinuxDistribution, + __version__, + build_number, + codename, + distro_release_attr, + distro_release_info, + id, + info, + like, + linux_distribution, + lsb_release_attr, + lsb_release_info, + major_version, + minor_version, + name, + os_release_attr, + os_release_info, + uname_attr, + uname_info, + version, + version_parts, +) + +__all__ = [ + "NORMALIZED_DISTRO_ID", + "NORMALIZED_LSB_ID", + "NORMALIZED_OS_ID", + "LinuxDistribution", + "build_number", + "codename", + "distro_release_attr", + "distro_release_info", + "id", + "info", + "like", + "linux_distribution", + "lsb_release_attr", + "lsb_release_info", + "major_version", + "minor_version", + "name", + "os_release_attr", + "os_release_info", + "uname_attr", + "uname_info", + "version", + "version_parts", +] + +__version__ = __version__ diff --git a/venv/lib/python3.11/site-packages/distro/__main__.py b/venv/lib/python3.11/site-packages/distro/__main__.py new file mode 100644 index 0000000..0c01d5b --- /dev/null +++ b/venv/lib/python3.11/site-packages/distro/__main__.py @@ -0,0 +1,4 @@ +from .distro import main + +if __name__ == "__main__": + main() diff --git a/venv/lib/python3.11/site-packages/distro/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/distro/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..f1fb3a4 Binary files /dev/null and b/venv/lib/python3.11/site-packages/distro/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/distro/__pycache__/__main__.cpython-311.pyc b/venv/lib/python3.11/site-packages/distro/__pycache__/__main__.cpython-311.pyc new file mode 100644 index 0000000..0e31b02 Binary files /dev/null and b/venv/lib/python3.11/site-packages/distro/__pycache__/__main__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/distro/__pycache__/distro.cpython-311.pyc b/venv/lib/python3.11/site-packages/distro/__pycache__/distro.cpython-311.pyc new file mode 100644 index 0000000..58f81d3 Binary files /dev/null and b/venv/lib/python3.11/site-packages/distro/__pycache__/distro.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/distro/distro.py b/venv/lib/python3.11/site-packages/distro/distro.py new file mode 100644 index 0000000..78ccdfa --- /dev/null +++ b/venv/lib/python3.11/site-packages/distro/distro.py @@ -0,0 +1,1403 @@ +#!/usr/bin/env python +# Copyright 2015-2021 Nir Cohen +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +The ``distro`` package (``distro`` stands for Linux Distribution) provides +information about the Linux distribution it runs on, such as a reliable +machine-readable distro ID, or version information. + +It is the recommended replacement for Python's original +:py:func:`platform.linux_distribution` function, but it provides much more +functionality. An alternative implementation became necessary because Python +3.5 deprecated this function, and Python 3.8 removed it altogether. Its +predecessor function :py:func:`platform.dist` was already deprecated since +Python 2.6 and removed in Python 3.8. Still, there are many cases in which +access to OS distribution information is needed. See `Python issue 1322 +`_ for more information. +""" + +import argparse +import json +import logging +import os +import re +import shlex +import subprocess +import sys +import warnings +from typing import ( + Any, + Callable, + Dict, + Iterable, + Optional, + Sequence, + TextIO, + Tuple, + Type, +) + +try: + from typing import TypedDict +except ImportError: + # Python 3.7 + TypedDict = dict + +__version__ = "1.9.0" + + +class VersionDict(TypedDict): + major: str + minor: str + build_number: str + + +class InfoDict(TypedDict): + id: str + version: str + version_parts: VersionDict + like: str + codename: str + + +_UNIXCONFDIR = os.environ.get("UNIXCONFDIR", "/etc") +_UNIXUSRLIBDIR = os.environ.get("UNIXUSRLIBDIR", "/usr/lib") +_OS_RELEASE_BASENAME = "os-release" + +#: Translation table for normalizing the "ID" attribute defined in os-release +#: files, for use by the :func:`distro.id` method. +#: +#: * Key: Value as defined in the os-release file, translated to lower case, +#: with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_OS_ID = { + "ol": "oracle", # Oracle Linux + "opensuse-leap": "opensuse", # Newer versions of OpenSuSE report as opensuse-leap +} + +#: Translation table for normalizing the "Distributor ID" attribute returned by +#: the lsb_release command, for use by the :func:`distro.id` method. +#: +#: * Key: Value as returned by the lsb_release command, translated to lower +#: case, with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_LSB_ID = { + "enterpriseenterpriseas": "oracle", # Oracle Enterprise Linux 4 + "enterpriseenterpriseserver": "oracle", # Oracle Linux 5 + "redhatenterpriseworkstation": "rhel", # RHEL 6, 7 Workstation + "redhatenterpriseserver": "rhel", # RHEL 6, 7 Server + "redhatenterprisecomputenode": "rhel", # RHEL 6 ComputeNode +} + +#: Translation table for normalizing the distro ID derived from the file name +#: of distro release files, for use by the :func:`distro.id` method. +#: +#: * Key: Value as derived from the file name of a distro release file, +#: translated to lower case, with blanks translated to underscores. +#: +#: * Value: Normalized value. +NORMALIZED_DISTRO_ID = { + "redhat": "rhel", # RHEL 6.x, 7.x +} + +# Pattern for content of distro release file (reversed) +_DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile( + r"(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)" +) + +# Pattern for base file name of distro release file +_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$") + +# Base file names to be looked up for if _UNIXCONFDIR is not readable. +_DISTRO_RELEASE_BASENAMES = [ + "SuSE-release", + "altlinux-release", + "arch-release", + "base-release", + "centos-release", + "fedora-release", + "gentoo-release", + "mageia-release", + "mandrake-release", + "mandriva-release", + "mandrivalinux-release", + "manjaro-release", + "oracle-release", + "redhat-release", + "rocky-release", + "sl-release", + "slackware-version", +] + +# Base file names to be ignored when searching for distro release file +_DISTRO_RELEASE_IGNORE_BASENAMES = ( + "debian_version", + "lsb-release", + "oem-release", + _OS_RELEASE_BASENAME, + "system-release", + "plesk-release", + "iredmail-release", + "board-release", + "ec2_version", +) + + +def linux_distribution(full_distribution_name: bool = True) -> Tuple[str, str, str]: + """ + .. deprecated:: 1.6.0 + + :func:`distro.linux_distribution()` is deprecated. It should only be + used as a compatibility shim with Python's + :py:func:`platform.linux_distribution()`. Please use :func:`distro.id`, + :func:`distro.version` and :func:`distro.name` instead. + + Return information about the current OS distribution as a tuple + ``(id_name, version, codename)`` with items as follows: + + * ``id_name``: If *full_distribution_name* is false, the result of + :func:`distro.id`. Otherwise, the result of :func:`distro.name`. + + * ``version``: The result of :func:`distro.version`. + + * ``codename``: The extra item (usually in parentheses) after the + os-release version number, or the result of :func:`distro.codename`. + + The interface of this function is compatible with the original + :py:func:`platform.linux_distribution` function, supporting a subset of + its parameters. + + The data it returns may not exactly be the same, because it uses more data + sources than the original function, and that may lead to different data if + the OS distribution is not consistent across multiple data sources it + provides (there are indeed such distributions ...). + + Another reason for differences is the fact that the :func:`distro.id` + method normalizes the distro ID string to a reliable machine-readable value + for a number of popular OS distributions. + """ + warnings.warn( + "distro.linux_distribution() is deprecated. It should only be used as a " + "compatibility shim with Python's platform.linux_distribution(). Please use " + "distro.id(), distro.version() and distro.name() instead.", + DeprecationWarning, + stacklevel=2, + ) + return _distro.linux_distribution(full_distribution_name) + + +def id() -> str: + """ + Return the distro ID of the current distribution, as a + machine-readable string. + + For a number of OS distributions, the returned distro ID value is + *reliable*, in the sense that it is documented and that it does not change + across releases of the distribution. + + This package maintains the following reliable distro ID values: + + ============== ========================================= + Distro ID Distribution + ============== ========================================= + "ubuntu" Ubuntu + "debian" Debian + "rhel" RedHat Enterprise Linux + "centos" CentOS + "fedora" Fedora + "sles" SUSE Linux Enterprise Server + "opensuse" openSUSE + "amzn" Amazon Linux + "arch" Arch Linux + "buildroot" Buildroot + "cloudlinux" CloudLinux OS + "exherbo" Exherbo Linux + "gentoo" GenToo Linux + "ibm_powerkvm" IBM PowerKVM + "kvmibm" KVM for IBM z Systems + "linuxmint" Linux Mint + "mageia" Mageia + "mandriva" Mandriva Linux + "parallels" Parallels + "pidora" Pidora + "raspbian" Raspbian + "oracle" Oracle Linux (and Oracle Enterprise Linux) + "scientific" Scientific Linux + "slackware" Slackware + "xenserver" XenServer + "openbsd" OpenBSD + "netbsd" NetBSD + "freebsd" FreeBSD + "midnightbsd" MidnightBSD + "rocky" Rocky Linux + "aix" AIX + "guix" Guix System + "altlinux" ALT Linux + ============== ========================================= + + If you have a need to get distros for reliable IDs added into this set, + or if you find that the :func:`distro.id` function returns a different + distro ID for one of the listed distros, please create an issue in the + `distro issue tracker`_. + + **Lookup hierarchy and transformations:** + + First, the ID is obtained from the following sources, in the specified + order. The first available and non-empty value is used: + + * the value of the "ID" attribute of the os-release file, + + * the value of the "Distributor ID" attribute returned by the lsb_release + command, + + * the first part of the file name of the distro release file, + + The so determined ID value then passes the following transformations, + before it is returned by this method: + + * it is translated to lower case, + + * blanks (which should not be there anyway) are translated to underscores, + + * a normalization of the ID is performed, based upon + `normalization tables`_. The purpose of this normalization is to ensure + that the ID is as reliable as possible, even across incompatible changes + in the OS distributions. A common reason for an incompatible change is + the addition of an os-release file, or the addition of the lsb_release + command, with ID values that differ from what was previously determined + from the distro release file name. + """ + return _distro.id() + + +def name(pretty: bool = False) -> str: + """ + Return the name of the current OS distribution, as a human-readable + string. + + If *pretty* is false, the name is returned without version or codename. + (e.g. "CentOS Linux") + + If *pretty* is true, the version and codename are appended. + (e.g. "CentOS Linux 7.1.1503 (Core)") + + **Lookup hierarchy:** + + The name is obtained from the following sources, in the specified order. + The first available and non-empty value is used: + + * If *pretty* is false: + + - the value of the "NAME" attribute of the os-release file, + + - the value of the "Distributor ID" attribute returned by the lsb_release + command, + + - the value of the "" field of the distro release file. + + * If *pretty* is true: + + - the value of the "PRETTY_NAME" attribute of the os-release file, + + - the value of the "Description" attribute returned by the lsb_release + command, + + - the value of the "" field of the distro release file, appended + with the value of the pretty version ("" and "" + fields) of the distro release file, if available. + """ + return _distro.name(pretty) + + +def version(pretty: bool = False, best: bool = False) -> str: + """ + Return the version of the current OS distribution, as a human-readable + string. + + If *pretty* is false, the version is returned without codename (e.g. + "7.0"). + + If *pretty* is true, the codename in parenthesis is appended, if the + codename is non-empty (e.g. "7.0 (Maipo)"). + + Some distributions provide version numbers with different precisions in + the different sources of distribution information. Examining the different + sources in a fixed priority order does not always yield the most precise + version (e.g. for Debian 8.2, or CentOS 7.1). + + Some other distributions may not provide this kind of information. In these + cases, an empty string would be returned. This behavior can be observed + with rolling releases distributions (e.g. Arch Linux). + + The *best* parameter can be used to control the approach for the returned + version: + + If *best* is false, the first non-empty version number in priority order of + the examined sources is returned. + + If *best* is true, the most precise version number out of all examined + sources is returned. + + **Lookup hierarchy:** + + In all cases, the version number is obtained from the following sources. + If *best* is false, this order represents the priority order: + + * the value of the "VERSION_ID" attribute of the os-release file, + * the value of the "Release" attribute returned by the lsb_release + command, + * the version number parsed from the "" field of the first line + of the distro release file, + * the version number parsed from the "PRETTY_NAME" attribute of the + os-release file, if it follows the format of the distro release files. + * the version number parsed from the "Description" attribute returned by + the lsb_release command, if it follows the format of the distro release + files. + """ + return _distro.version(pretty, best) + + +def version_parts(best: bool = False) -> Tuple[str, str, str]: + """ + Return the version of the current OS distribution as a tuple + ``(major, minor, build_number)`` with items as follows: + + * ``major``: The result of :func:`distro.major_version`. + + * ``minor``: The result of :func:`distro.minor_version`. + + * ``build_number``: The result of :func:`distro.build_number`. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.version_parts(best) + + +def major_version(best: bool = False) -> str: + """ + Return the major version of the current OS distribution, as a string, + if provided. + Otherwise, the empty string is returned. The major version is the first + part of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.major_version(best) + + +def minor_version(best: bool = False) -> str: + """ + Return the minor version of the current OS distribution, as a string, + if provided. + Otherwise, the empty string is returned. The minor version is the second + part of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.minor_version(best) + + +def build_number(best: bool = False) -> str: + """ + Return the build number of the current OS distribution, as a string, + if provided. + Otherwise, the empty string is returned. The build number is the third part + of the dot-separated version string. + + For a description of the *best* parameter, see the :func:`distro.version` + method. + """ + return _distro.build_number(best) + + +def like() -> str: + """ + Return a space-separated list of distro IDs of distributions that are + closely related to the current OS distribution in regards to packaging + and programming interfaces, for example distributions the current + distribution is a derivative from. + + **Lookup hierarchy:** + + This information item is only provided by the os-release file. + For details, see the description of the "ID_LIKE" attribute in the + `os-release man page + `_. + """ + return _distro.like() + + +def codename() -> str: + """ + Return the codename for the release of the current OS distribution, + as a string. + + If the distribution does not have a codename, an empty string is returned. + + Note that the returned codename is not always really a codename. For + example, openSUSE returns "x86_64". This function does not handle such + cases in any special way and just returns the string it finds, if any. + + **Lookup hierarchy:** + + * the codename within the "VERSION" attribute of the os-release file, if + provided, + + * the value of the "Codename" attribute returned by the lsb_release + command, + + * the value of the "" field of the distro release file. + """ + return _distro.codename() + + +def info(pretty: bool = False, best: bool = False) -> InfoDict: + """ + Return certain machine-readable information items about the current OS + distribution in a dictionary, as shown in the following example: + + .. sourcecode:: python + + { + 'id': 'rhel', + 'version': '7.0', + 'version_parts': { + 'major': '7', + 'minor': '0', + 'build_number': '' + }, + 'like': 'fedora', + 'codename': 'Maipo' + } + + The dictionary structure and keys are always the same, regardless of which + information items are available in the underlying data sources. The values + for the various keys are as follows: + + * ``id``: The result of :func:`distro.id`. + + * ``version``: The result of :func:`distro.version`. + + * ``version_parts -> major``: The result of :func:`distro.major_version`. + + * ``version_parts -> minor``: The result of :func:`distro.minor_version`. + + * ``version_parts -> build_number``: The result of + :func:`distro.build_number`. + + * ``like``: The result of :func:`distro.like`. + + * ``codename``: The result of :func:`distro.codename`. + + For a description of the *pretty* and *best* parameters, see the + :func:`distro.version` method. + """ + return _distro.info(pretty, best) + + +def os_release_info() -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information items + from the os-release file data source of the current OS distribution. + + See `os-release file`_ for details about these information items. + """ + return _distro.os_release_info() + + +def lsb_release_info() -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information items + from the lsb_release command data source of the current OS distribution. + + See `lsb_release command output`_ for details about these information + items. + """ + return _distro.lsb_release_info() + + +def distro_release_info() -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information items + from the distro release file data source of the current OS distribution. + + See `distro release file`_ for details about these information items. + """ + return _distro.distro_release_info() + + +def uname_info() -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information items + from the distro release file data source of the current OS distribution. + """ + return _distro.uname_info() + + +def os_release_attr(attribute: str) -> str: + """ + Return a single named information item from the os-release file data source + of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `os-release file`_ for details about these information items. + """ + return _distro.os_release_attr(attribute) + + +def lsb_release_attr(attribute: str) -> str: + """ + Return a single named information item from the lsb_release command output + data source of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `lsb_release command output`_ for details about these information + items. + """ + return _distro.lsb_release_attr(attribute) + + +def distro_release_attr(attribute: str) -> str: + """ + Return a single named information item from the distro release file + data source of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + + See `distro release file`_ for details about these information items. + """ + return _distro.distro_release_attr(attribute) + + +def uname_attr(attribute: str) -> str: + """ + Return a single named information item from the distro release file + data source of the current OS distribution. + + Parameters: + + * ``attribute`` (string): Key of the information item. + + Returns: + + * (string): Value of the information item, if the item exists. + The empty string, if the item does not exist. + """ + return _distro.uname_attr(attribute) + + +try: + from functools import cached_property +except ImportError: + # Python < 3.8 + class cached_property: # type: ignore + """A version of @property which caches the value. On access, it calls the + underlying function and sets the value in `__dict__` so future accesses + will not re-call the property. + """ + + def __init__(self, f: Callable[[Any], Any]) -> None: + self._fname = f.__name__ + self._f = f + + def __get__(self, obj: Any, owner: Type[Any]) -> Any: + assert obj is not None, f"call {self._fname} on an instance" + ret = obj.__dict__[self._fname] = self._f(obj) + return ret + + +class LinuxDistribution: + """ + Provides information about a OS distribution. + + This package creates a private module-global instance of this class with + default initialization arguments, that is used by the + `consolidated accessor functions`_ and `single source accessor functions`_. + By using default initialization arguments, that module-global instance + returns data about the current OS distribution (i.e. the distro this + package runs on). + + Normally, it is not necessary to create additional instances of this class. + However, in situations where control is needed over the exact data sources + that are used, instances of this class can be created with a specific + distro release file, or a specific os-release file, or without invoking the + lsb_release command. + """ + + def __init__( + self, + include_lsb: Optional[bool] = None, + os_release_file: str = "", + distro_release_file: str = "", + include_uname: Optional[bool] = None, + root_dir: Optional[str] = None, + include_oslevel: Optional[bool] = None, + ) -> None: + """ + The initialization method of this class gathers information from the + available data sources, and stores that in private instance attributes. + Subsequent access to the information items uses these private instance + attributes, so that the data sources are read only once. + + Parameters: + + * ``include_lsb`` (bool): Controls whether the + `lsb_release command output`_ is included as a data source. + + If the lsb_release command is not available in the program execution + path, the data source for the lsb_release command will be empty. + + * ``os_release_file`` (string): The path name of the + `os-release file`_ that is to be used as a data source. + + An empty string (the default) will cause the default path name to + be used (see `os-release file`_ for details). + + If the specified or defaulted os-release file does not exist, the + data source for the os-release file will be empty. + + * ``distro_release_file`` (string): The path name of the + `distro release file`_ that is to be used as a data source. + + An empty string (the default) will cause a default search algorithm + to be used (see `distro release file`_ for details). + + If the specified distro release file does not exist, or if no default + distro release file can be found, the data source for the distro + release file will be empty. + + * ``include_uname`` (bool): Controls whether uname command output is + included as a data source. If the uname command is not available in + the program execution path the data source for the uname command will + be empty. + + * ``root_dir`` (string): The absolute path to the root directory to use + to find distro-related information files. Note that ``include_*`` + parameters must not be enabled in combination with ``root_dir``. + + * ``include_oslevel`` (bool): Controls whether (AIX) oslevel command + output is included as a data source. If the oslevel command is not + available in the program execution path the data source will be + empty. + + Public instance attributes: + + * ``os_release_file`` (string): The path name of the + `os-release file`_ that is actually used as a data source. The + empty string if no distro release file is used as a data source. + + * ``distro_release_file`` (string): The path name of the + `distro release file`_ that is actually used as a data source. The + empty string if no distro release file is used as a data source. + + * ``include_lsb`` (bool): The result of the ``include_lsb`` parameter. + This controls whether the lsb information will be loaded. + + * ``include_uname`` (bool): The result of the ``include_uname`` + parameter. This controls whether the uname information will + be loaded. + + * ``include_oslevel`` (bool): The result of the ``include_oslevel`` + parameter. This controls whether (AIX) oslevel information will be + loaded. + + * ``root_dir`` (string): The result of the ``root_dir`` parameter. + The absolute path to the root directory to use to find distro-related + information files. + + Raises: + + * :py:exc:`ValueError`: Initialization parameters combination is not + supported. + + * :py:exc:`OSError`: Some I/O issue with an os-release file or distro + release file. + + * :py:exc:`UnicodeError`: A data source has unexpected characters or + uses an unexpected encoding. + """ + self.root_dir = root_dir + self.etc_dir = os.path.join(root_dir, "etc") if root_dir else _UNIXCONFDIR + self.usr_lib_dir = ( + os.path.join(root_dir, "usr/lib") if root_dir else _UNIXUSRLIBDIR + ) + + if os_release_file: + self.os_release_file = os_release_file + else: + etc_dir_os_release_file = os.path.join(self.etc_dir, _OS_RELEASE_BASENAME) + usr_lib_os_release_file = os.path.join( + self.usr_lib_dir, _OS_RELEASE_BASENAME + ) + + # NOTE: The idea is to respect order **and** have it set + # at all times for API backwards compatibility. + if os.path.isfile(etc_dir_os_release_file) or not os.path.isfile( + usr_lib_os_release_file + ): + self.os_release_file = etc_dir_os_release_file + else: + self.os_release_file = usr_lib_os_release_file + + self.distro_release_file = distro_release_file or "" # updated later + + is_root_dir_defined = root_dir is not None + if is_root_dir_defined and (include_lsb or include_uname or include_oslevel): + raise ValueError( + "Including subprocess data sources from specific root_dir is disallowed" + " to prevent false information" + ) + self.include_lsb = ( + include_lsb if include_lsb is not None else not is_root_dir_defined + ) + self.include_uname = ( + include_uname if include_uname is not None else not is_root_dir_defined + ) + self.include_oslevel = ( + include_oslevel if include_oslevel is not None else not is_root_dir_defined + ) + + def __repr__(self) -> str: + """Return repr of all info""" + return ( + "LinuxDistribution(" + "os_release_file={self.os_release_file!r}, " + "distro_release_file={self.distro_release_file!r}, " + "include_lsb={self.include_lsb!r}, " + "include_uname={self.include_uname!r}, " + "include_oslevel={self.include_oslevel!r}, " + "root_dir={self.root_dir!r}, " + "_os_release_info={self._os_release_info!r}, " + "_lsb_release_info={self._lsb_release_info!r}, " + "_distro_release_info={self._distro_release_info!r}, " + "_uname_info={self._uname_info!r}, " + "_oslevel_info={self._oslevel_info!r})".format(self=self) + ) + + def linux_distribution( + self, full_distribution_name: bool = True + ) -> Tuple[str, str, str]: + """ + Return information about the OS distribution that is compatible + with Python's :func:`platform.linux_distribution`, supporting a subset + of its parameters. + + For details, see :func:`distro.linux_distribution`. + """ + return ( + self.name() if full_distribution_name else self.id(), + self.version(), + self._os_release_info.get("release_codename") or self.codename(), + ) + + def id(self) -> str: + """Return the distro ID of the OS distribution, as a string. + + For details, see :func:`distro.id`. + """ + + def normalize(distro_id: str, table: Dict[str, str]) -> str: + distro_id = distro_id.lower().replace(" ", "_") + return table.get(distro_id, distro_id) + + distro_id = self.os_release_attr("id") + if distro_id: + return normalize(distro_id, NORMALIZED_OS_ID) + + distro_id = self.lsb_release_attr("distributor_id") + if distro_id: + return normalize(distro_id, NORMALIZED_LSB_ID) + + distro_id = self.distro_release_attr("id") + if distro_id: + return normalize(distro_id, NORMALIZED_DISTRO_ID) + + distro_id = self.uname_attr("id") + if distro_id: + return normalize(distro_id, NORMALIZED_DISTRO_ID) + + return "" + + def name(self, pretty: bool = False) -> str: + """ + Return the name of the OS distribution, as a string. + + For details, see :func:`distro.name`. + """ + name = ( + self.os_release_attr("name") + or self.lsb_release_attr("distributor_id") + or self.distro_release_attr("name") + or self.uname_attr("name") + ) + if pretty: + name = self.os_release_attr("pretty_name") or self.lsb_release_attr( + "description" + ) + if not name: + name = self.distro_release_attr("name") or self.uname_attr("name") + version = self.version(pretty=True) + if version: + name = f"{name} {version}" + return name or "" + + def version(self, pretty: bool = False, best: bool = False) -> str: + """ + Return the version of the OS distribution, as a string. + + For details, see :func:`distro.version`. + """ + versions = [ + self.os_release_attr("version_id"), + self.lsb_release_attr("release"), + self.distro_release_attr("version_id"), + self._parse_distro_release_content(self.os_release_attr("pretty_name")).get( + "version_id", "" + ), + self._parse_distro_release_content( + self.lsb_release_attr("description") + ).get("version_id", ""), + self.uname_attr("release"), + ] + if self.uname_attr("id").startswith("aix"): + # On AIX platforms, prefer oslevel command output. + versions.insert(0, self.oslevel_info()) + elif self.id() == "debian" or "debian" in self.like().split(): + # On Debian-like, add debian_version file content to candidates list. + versions.append(self._debian_version) + version = "" + if best: + # This algorithm uses the last version in priority order that has + # the best precision. If the versions are not in conflict, that + # does not matter; otherwise, using the last one instead of the + # first one might be considered a surprise. + for v in versions: + if v.count(".") > version.count(".") or version == "": + version = v + else: + for v in versions: + if v != "": + version = v + break + if pretty and version and self.codename(): + version = f"{version} ({self.codename()})" + return version + + def version_parts(self, best: bool = False) -> Tuple[str, str, str]: + """ + Return the version of the OS distribution, as a tuple of version + numbers. + + For details, see :func:`distro.version_parts`. + """ + version_str = self.version(best=best) + if version_str: + version_regex = re.compile(r"(\d+)\.?(\d+)?\.?(\d+)?") + matches = version_regex.match(version_str) + if matches: + major, minor, build_number = matches.groups() + return major, minor or "", build_number or "" + return "", "", "" + + def major_version(self, best: bool = False) -> str: + """ + Return the major version number of the current distribution. + + For details, see :func:`distro.major_version`. + """ + return self.version_parts(best)[0] + + def minor_version(self, best: bool = False) -> str: + """ + Return the minor version number of the current distribution. + + For details, see :func:`distro.minor_version`. + """ + return self.version_parts(best)[1] + + def build_number(self, best: bool = False) -> str: + """ + Return the build number of the current distribution. + + For details, see :func:`distro.build_number`. + """ + return self.version_parts(best)[2] + + def like(self) -> str: + """ + Return the IDs of distributions that are like the OS distribution. + + For details, see :func:`distro.like`. + """ + return self.os_release_attr("id_like") or "" + + def codename(self) -> str: + """ + Return the codename of the OS distribution. + + For details, see :func:`distro.codename`. + """ + try: + # Handle os_release specially since distros might purposefully set + # this to empty string to have no codename + return self._os_release_info["codename"] + except KeyError: + return ( + self.lsb_release_attr("codename") + or self.distro_release_attr("codename") + or "" + ) + + def info(self, pretty: bool = False, best: bool = False) -> InfoDict: + """ + Return certain machine-readable information about the OS + distribution. + + For details, see :func:`distro.info`. + """ + return InfoDict( + id=self.id(), + version=self.version(pretty, best), + version_parts=VersionDict( + major=self.major_version(best), + minor=self.minor_version(best), + build_number=self.build_number(best), + ), + like=self.like(), + codename=self.codename(), + ) + + def os_release_info(self) -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information + items from the os-release file data source of the OS distribution. + + For details, see :func:`distro.os_release_info`. + """ + return self._os_release_info + + def lsb_release_info(self) -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information + items from the lsb_release command data source of the OS + distribution. + + For details, see :func:`distro.lsb_release_info`. + """ + return self._lsb_release_info + + def distro_release_info(self) -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information + items from the distro release file data source of the OS + distribution. + + For details, see :func:`distro.distro_release_info`. + """ + return self._distro_release_info + + def uname_info(self) -> Dict[str, str]: + """ + Return a dictionary containing key-value pairs for the information + items from the uname command data source of the OS distribution. + + For details, see :func:`distro.uname_info`. + """ + return self._uname_info + + def oslevel_info(self) -> str: + """ + Return AIX' oslevel command output. + """ + return self._oslevel_info + + def os_release_attr(self, attribute: str) -> str: + """ + Return a single named information item from the os-release file data + source of the OS distribution. + + For details, see :func:`distro.os_release_attr`. + """ + return self._os_release_info.get(attribute, "") + + def lsb_release_attr(self, attribute: str) -> str: + """ + Return a single named information item from the lsb_release command + output data source of the OS distribution. + + For details, see :func:`distro.lsb_release_attr`. + """ + return self._lsb_release_info.get(attribute, "") + + def distro_release_attr(self, attribute: str) -> str: + """ + Return a single named information item from the distro release file + data source of the OS distribution. + + For details, see :func:`distro.distro_release_attr`. + """ + return self._distro_release_info.get(attribute, "") + + def uname_attr(self, attribute: str) -> str: + """ + Return a single named information item from the uname command + output data source of the OS distribution. + + For details, see :func:`distro.uname_attr`. + """ + return self._uname_info.get(attribute, "") + + @cached_property + def _os_release_info(self) -> Dict[str, str]: + """ + Get the information items from the specified os-release file. + + Returns: + A dictionary containing all information items. + """ + if os.path.isfile(self.os_release_file): + with open(self.os_release_file, encoding="utf-8") as release_file: + return self._parse_os_release_content(release_file) + return {} + + @staticmethod + def _parse_os_release_content(lines: TextIO) -> Dict[str, str]: + """ + Parse the lines of an os-release file. + + Parameters: + + * lines: Iterable through the lines in the os-release file. + Each line must be a unicode string or a UTF-8 encoded byte + string. + + Returns: + A dictionary containing all information items. + """ + props = {} + lexer = shlex.shlex(lines, posix=True) + lexer.whitespace_split = True + + tokens = list(lexer) + for token in tokens: + # At this point, all shell-like parsing has been done (i.e. + # comments processed, quotes and backslash escape sequences + # processed, multi-line values assembled, trailing newlines + # stripped, etc.), so the tokens are now either: + # * variable assignments: var=value + # * commands or their arguments (not allowed in os-release) + # Ignore any tokens that are not variable assignments + if "=" in token: + k, v = token.split("=", 1) + props[k.lower()] = v + + if "version" in props: + # extract release codename (if any) from version attribute + match = re.search(r"\((\D+)\)|,\s*(\D+)", props["version"]) + if match: + release_codename = match.group(1) or match.group(2) + props["codename"] = props["release_codename"] = release_codename + + if "version_codename" in props: + # os-release added a version_codename field. Use that in + # preference to anything else Note that some distros purposefully + # do not have code names. They should be setting + # version_codename="" + props["codename"] = props["version_codename"] + elif "ubuntu_codename" in props: + # Same as above but a non-standard field name used on older Ubuntus + props["codename"] = props["ubuntu_codename"] + + return props + + @cached_property + def _lsb_release_info(self) -> Dict[str, str]: + """ + Get the information items from the lsb_release command output. + + Returns: + A dictionary containing all information items. + """ + if not self.include_lsb: + return {} + try: + cmd = ("lsb_release", "-a") + stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL) + # Command not found or lsb_release returned error + except (OSError, subprocess.CalledProcessError): + return {} + content = self._to_str(stdout).splitlines() + return self._parse_lsb_release_content(content) + + @staticmethod + def _parse_lsb_release_content(lines: Iterable[str]) -> Dict[str, str]: + """ + Parse the output of the lsb_release command. + + Parameters: + + * lines: Iterable through the lines of the lsb_release output. + Each line must be a unicode string or a UTF-8 encoded byte + string. + + Returns: + A dictionary containing all information items. + """ + props = {} + for line in lines: + kv = line.strip("\n").split(":", 1) + if len(kv) != 2: + # Ignore lines without colon. + continue + k, v = kv + props.update({k.replace(" ", "_").lower(): v.strip()}) + return props + + @cached_property + def _uname_info(self) -> Dict[str, str]: + if not self.include_uname: + return {} + try: + cmd = ("uname", "-rs") + stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL) + except OSError: + return {} + content = self._to_str(stdout).splitlines() + return self._parse_uname_content(content) + + @cached_property + def _oslevel_info(self) -> str: + if not self.include_oslevel: + return "" + try: + stdout = subprocess.check_output("oslevel", stderr=subprocess.DEVNULL) + except (OSError, subprocess.CalledProcessError): + return "" + return self._to_str(stdout).strip() + + @cached_property + def _debian_version(self) -> str: + try: + with open( + os.path.join(self.etc_dir, "debian_version"), encoding="ascii" + ) as fp: + return fp.readline().rstrip() + except FileNotFoundError: + return "" + + @staticmethod + def _parse_uname_content(lines: Sequence[str]) -> Dict[str, str]: + if not lines: + return {} + props = {} + match = re.search(r"^([^\s]+)\s+([\d\.]+)", lines[0].strip()) + if match: + name, version = match.groups() + + # This is to prevent the Linux kernel version from + # appearing as the 'best' version on otherwise + # identifiable distributions. + if name == "Linux": + return {} + props["id"] = name.lower() + props["name"] = name + props["release"] = version + return props + + @staticmethod + def _to_str(bytestring: bytes) -> str: + encoding = sys.getfilesystemencoding() + return bytestring.decode(encoding) + + @cached_property + def _distro_release_info(self) -> Dict[str, str]: + """ + Get the information items from the specified distro release file. + + Returns: + A dictionary containing all information items. + """ + if self.distro_release_file: + # If it was specified, we use it and parse what we can, even if + # its file name or content does not match the expected pattern. + distro_info = self._parse_distro_release_file(self.distro_release_file) + basename = os.path.basename(self.distro_release_file) + # The file name pattern for user-specified distro release files + # is somewhat more tolerant (compared to when searching for the + # file), because we want to use what was specified as best as + # possible. + match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) + else: + try: + basenames = [ + basename + for basename in os.listdir(self.etc_dir) + if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES + and os.path.isfile(os.path.join(self.etc_dir, basename)) + ] + # We sort for repeatability in cases where there are multiple + # distro specific files; e.g. CentOS, Oracle, Enterprise all + # containing `redhat-release` on top of their own. + basenames.sort() + except OSError: + # This may occur when /etc is not readable but we can't be + # sure about the *-release files. Check common entries of + # /etc for information. If they turn out to not be there the + # error is handled in `_parse_distro_release_file()`. + basenames = _DISTRO_RELEASE_BASENAMES + for basename in basenames: + match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) + if match is None: + continue + filepath = os.path.join(self.etc_dir, basename) + distro_info = self._parse_distro_release_file(filepath) + # The name is always present if the pattern matches. + if "name" not in distro_info: + continue + self.distro_release_file = filepath + break + else: # the loop didn't "break": no candidate. + return {} + + if match is not None: + distro_info["id"] = match.group(1) + + # CloudLinux < 7: manually enrich info with proper id. + if "cloudlinux" in distro_info.get("name", "").lower(): + distro_info["id"] = "cloudlinux" + + return distro_info + + def _parse_distro_release_file(self, filepath: str) -> Dict[str, str]: + """ + Parse a distro release file. + + Parameters: + + * filepath: Path name of the distro release file. + + Returns: + A dictionary containing all information items. + """ + try: + with open(filepath, encoding="utf-8") as fp: + # Only parse the first line. For instance, on SLES there + # are multiple lines. We don't want them... + return self._parse_distro_release_content(fp.readline()) + except OSError: + # Ignore not being able to read a specific, seemingly version + # related file. + # See https://github.com/python-distro/distro/issues/162 + return {} + + @staticmethod + def _parse_distro_release_content(line: str) -> Dict[str, str]: + """ + Parse a line from a distro release file. + + Parameters: + * line: Line from the distro release file. Must be a unicode string + or a UTF-8 encoded byte string. + + Returns: + A dictionary containing all information items. + """ + matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(line.strip()[::-1]) + distro_info = {} + if matches: + # regexp ensures non-None + distro_info["name"] = matches.group(3)[::-1] + if matches.group(2): + distro_info["version_id"] = matches.group(2)[::-1] + if matches.group(1): + distro_info["codename"] = matches.group(1)[::-1] + elif line: + distro_info["name"] = line.strip() + return distro_info + + +_distro = LinuxDistribution() + + +def main() -> None: + logger = logging.getLogger(__name__) + logger.setLevel(logging.DEBUG) + logger.addHandler(logging.StreamHandler(sys.stdout)) + + parser = argparse.ArgumentParser(description="OS distro info tool") + parser.add_argument( + "--json", "-j", help="Output in machine readable format", action="store_true" + ) + + parser.add_argument( + "--root-dir", + "-r", + type=str, + dest="root_dir", + help="Path to the root filesystem directory (defaults to /)", + ) + + args = parser.parse_args() + + if args.root_dir: + dist = LinuxDistribution( + include_lsb=False, + include_uname=False, + include_oslevel=False, + root_dir=args.root_dir, + ) + else: + dist = _distro + + if args.json: + logger.info(json.dumps(dist.info(), indent=4, sort_keys=True)) + else: + logger.info("Name: %s", dist.name(pretty=True)) + distribution_version = dist.version(pretty=True) + logger.info("Version: %s", distribution_version) + distribution_codename = dist.codename() + logger.info("Codename: %s", distribution_codename) + + +if __name__ == "__main__": + main() diff --git a/venv/lib/python3.11/site-packages/distro/py.typed b/venv/lib/python3.11/site-packages/distro/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/distutils-precedence.pth b/venv/lib/python3.11/site-packages/distutils-precedence.pth new file mode 100644 index 0000000..7f009fe --- /dev/null +++ b/venv/lib/python3.11/site-packages/distutils-precedence.pth @@ -0,0 +1 @@ +import os; var = 'SETUPTOOLS_USE_DISTUTILS'; enabled = os.environ.get(var, 'local') == 'local'; enabled and __import__('_distutils_hack').add_shim(); diff --git a/venv/lib/python3.11/site-packages/dns/__init__.py b/venv/lib/python3.11/site-packages/dns/__init__.py new file mode 100644 index 0000000..a4249b9 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/__init__.py @@ -0,0 +1,70 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009, 2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""dnspython DNS toolkit""" + +__all__ = [ + "asyncbackend", + "asyncquery", + "asyncresolver", + "dnssec", + "dnssecalgs", + "dnssectypes", + "e164", + "edns", + "entropy", + "exception", + "flags", + "immutable", + "inet", + "ipv4", + "ipv6", + "message", + "name", + "namedict", + "node", + "opcode", + "query", + "quic", + "rcode", + "rdata", + "rdataclass", + "rdataset", + "rdatatype", + "renderer", + "resolver", + "reversename", + "rrset", + "serial", + "set", + "tokenizer", + "transaction", + "tsig", + "tsigkeyring", + "ttl", + "rdtypes", + "update", + "version", + "versioned", + "wire", + "xfr", + "zone", + "zonetypes", + "zonefile", +] + +from dns.version import version as __version__ # noqa diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..db5128a Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/_asyncbackend.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/_asyncbackend.cpython-311.pyc new file mode 100644 index 0000000..8e53ac4 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/_asyncbackend.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/_asyncio_backend.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/_asyncio_backend.cpython-311.pyc new file mode 100644 index 0000000..1805e1b Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/_asyncio_backend.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/_ddr.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/_ddr.cpython-311.pyc new file mode 100644 index 0000000..150bb67 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/_ddr.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/_features.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/_features.cpython-311.pyc new file mode 100644 index 0000000..0d0e39a Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/_features.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/_immutable_ctx.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/_immutable_ctx.cpython-311.pyc new file mode 100644 index 0000000..ff1c690 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/_immutable_ctx.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/_trio_backend.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/_trio_backend.cpython-311.pyc new file mode 100644 index 0000000..e8d0835 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/_trio_backend.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/asyncbackend.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/asyncbackend.cpython-311.pyc new file mode 100644 index 0000000..9e53aad Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/asyncbackend.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/asyncquery.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/asyncquery.cpython-311.pyc new file mode 100644 index 0000000..c5524d8 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/asyncquery.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/asyncresolver.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/asyncresolver.cpython-311.pyc new file mode 100644 index 0000000..70ae30c Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/asyncresolver.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/dnssec.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/dnssec.cpython-311.pyc new file mode 100644 index 0000000..b54ec54 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/dnssec.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/dnssectypes.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/dnssectypes.cpython-311.pyc new file mode 100644 index 0000000..a8c9674 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/dnssectypes.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/e164.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/e164.cpython-311.pyc new file mode 100644 index 0000000..203493e Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/e164.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/edns.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/edns.cpython-311.pyc new file mode 100644 index 0000000..9149f31 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/edns.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/entropy.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/entropy.cpython-311.pyc new file mode 100644 index 0000000..45bc1b6 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/entropy.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/enum.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/enum.cpython-311.pyc new file mode 100644 index 0000000..9f0fa04 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/enum.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/exception.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/exception.cpython-311.pyc new file mode 100644 index 0000000..e052bbe Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/exception.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/flags.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/flags.cpython-311.pyc new file mode 100644 index 0000000..40d5451 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/flags.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/grange.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/grange.cpython-311.pyc new file mode 100644 index 0000000..eab69c6 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/grange.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/immutable.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/immutable.cpython-311.pyc new file mode 100644 index 0000000..21a6968 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/immutable.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/inet.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/inet.cpython-311.pyc new file mode 100644 index 0000000..6458d72 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/inet.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/ipv4.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/ipv4.cpython-311.pyc new file mode 100644 index 0000000..dc66df4 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/ipv4.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/ipv6.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/ipv6.cpython-311.pyc new file mode 100644 index 0000000..f39ccd2 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/ipv6.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/message.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/message.cpython-311.pyc new file mode 100644 index 0000000..66e92b3 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/message.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/name.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/name.cpython-311.pyc new file mode 100644 index 0000000..870006b Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/name.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/namedict.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/namedict.cpython-311.pyc new file mode 100644 index 0000000..6c85ab9 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/namedict.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/nameserver.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/nameserver.cpython-311.pyc new file mode 100644 index 0000000..62acd05 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/nameserver.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/node.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/node.cpython-311.pyc new file mode 100644 index 0000000..2668f0b Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/node.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/opcode.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/opcode.cpython-311.pyc new file mode 100644 index 0000000..80a4416 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/opcode.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/query.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/query.cpython-311.pyc new file mode 100644 index 0000000..5ccf192 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/query.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/rcode.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/rcode.cpython-311.pyc new file mode 100644 index 0000000..5d9b711 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/rcode.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/rdata.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/rdata.cpython-311.pyc new file mode 100644 index 0000000..63ed24e Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/rdata.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/rdataclass.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/rdataclass.cpython-311.pyc new file mode 100644 index 0000000..9611ce3 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/rdataclass.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/rdataset.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/rdataset.cpython-311.pyc new file mode 100644 index 0000000..f598fd3 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/rdataset.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/rdatatype.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/rdatatype.cpython-311.pyc new file mode 100644 index 0000000..f2a062c Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/rdatatype.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/renderer.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/renderer.cpython-311.pyc new file mode 100644 index 0000000..0c3afb1 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/renderer.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/resolver.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/resolver.cpython-311.pyc new file mode 100644 index 0000000..a4fe0de Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/resolver.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/reversename.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/reversename.cpython-311.pyc new file mode 100644 index 0000000..5e1bec5 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/reversename.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/rrset.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/rrset.cpython-311.pyc new file mode 100644 index 0000000..a16500c Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/rrset.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/serial.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/serial.cpython-311.pyc new file mode 100644 index 0000000..da317a8 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/serial.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/set.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/set.cpython-311.pyc new file mode 100644 index 0000000..4727eb4 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/set.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/tokenizer.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/tokenizer.cpython-311.pyc new file mode 100644 index 0000000..3bce049 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/tokenizer.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/transaction.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/transaction.cpython-311.pyc new file mode 100644 index 0000000..e32f84a Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/transaction.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/tsig.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/tsig.cpython-311.pyc new file mode 100644 index 0000000..1940688 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/tsig.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/tsigkeyring.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/tsigkeyring.cpython-311.pyc new file mode 100644 index 0000000..2f38115 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/tsigkeyring.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/ttl.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/ttl.cpython-311.pyc new file mode 100644 index 0000000..195ef4c Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/ttl.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/update.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/update.cpython-311.pyc new file mode 100644 index 0000000..0446dac Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/update.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/version.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/version.cpython-311.pyc new file mode 100644 index 0000000..6153d69 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/version.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/versioned.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/versioned.cpython-311.pyc new file mode 100644 index 0000000..5931612 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/versioned.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/win32util.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/win32util.cpython-311.pyc new file mode 100644 index 0000000..de2fb79 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/win32util.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/wire.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/wire.cpython-311.pyc new file mode 100644 index 0000000..bcfeb17 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/wire.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/xfr.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/xfr.cpython-311.pyc new file mode 100644 index 0000000..b233d1e Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/xfr.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/zone.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/zone.cpython-311.pyc new file mode 100644 index 0000000..bce07bc Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/zone.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/zonefile.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/zonefile.cpython-311.pyc new file mode 100644 index 0000000..87ae6cf Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/zonefile.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/__pycache__/zonetypes.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/__pycache__/zonetypes.cpython-311.pyc new file mode 100644 index 0000000..27432d4 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/__pycache__/zonetypes.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/_asyncbackend.py b/venv/lib/python3.11/site-packages/dns/_asyncbackend.py new file mode 100644 index 0000000..f6760fd --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/_asyncbackend.py @@ -0,0 +1,100 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# This is a nullcontext for both sync and async. 3.7 has a nullcontext, +# but it is only for sync use. + + +class NullContext: + def __init__(self, enter_result=None): + self.enter_result = enter_result + + def __enter__(self): + return self.enter_result + + def __exit__(self, exc_type, exc_value, traceback): + pass + + async def __aenter__(self): + return self.enter_result + + async def __aexit__(self, exc_type, exc_value, traceback): + pass + + +# These are declared here so backends can import them without creating +# circular dependencies with dns.asyncbackend. + + +class Socket: # pragma: no cover + def __init__(self, family: int, type: int): + self.family = family + self.type = type + + async def close(self): + pass + + async def getpeername(self): + raise NotImplementedError + + async def getsockname(self): + raise NotImplementedError + + async def getpeercert(self, timeout): + raise NotImplementedError + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_value, traceback): + await self.close() + + +class DatagramSocket(Socket): # pragma: no cover + async def sendto(self, what, destination, timeout): + raise NotImplementedError + + async def recvfrom(self, size, timeout): + raise NotImplementedError + + +class StreamSocket(Socket): # pragma: no cover + async def sendall(self, what, timeout): + raise NotImplementedError + + async def recv(self, size, timeout): + raise NotImplementedError + + +class NullTransport: + async def connect_tcp(self, host, port, timeout, local_address): + raise NotImplementedError + + +class Backend: # pragma: no cover + def name(self): + return "unknown" + + async def make_socket( + self, + af, + socktype, + proto=0, + source=None, + destination=None, + timeout=None, + ssl_context=None, + server_hostname=None, + ): + raise NotImplementedError + + def datagram_connection_required(self): + return False + + async def sleep(self, interval): + raise NotImplementedError + + def get_transport_class(self): + raise NotImplementedError + + async def wait_for(self, awaitable, timeout): + raise NotImplementedError diff --git a/venv/lib/python3.11/site-packages/dns/_asyncio_backend.py b/venv/lib/python3.11/site-packages/dns/_asyncio_backend.py new file mode 100644 index 0000000..6ab168d --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/_asyncio_backend.py @@ -0,0 +1,275 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +"""asyncio library query support""" + +import asyncio +import socket +import sys + +import dns._asyncbackend +import dns._features +import dns.exception +import dns.inet + +_is_win32 = sys.platform == "win32" + + +def _get_running_loop(): + try: + return asyncio.get_running_loop() + except AttributeError: # pragma: no cover + return asyncio.get_event_loop() + + +class _DatagramProtocol: + def __init__(self): + self.transport = None + self.recvfrom = None + + def connection_made(self, transport): + self.transport = transport + + def datagram_received(self, data, addr): + if self.recvfrom and not self.recvfrom.done(): + self.recvfrom.set_result((data, addr)) + + def error_received(self, exc): # pragma: no cover + if self.recvfrom and not self.recvfrom.done(): + self.recvfrom.set_exception(exc) + + def connection_lost(self, exc): + if self.recvfrom and not self.recvfrom.done(): + if exc is None: + # EOF we triggered. Is there a better way to do this? + try: + raise EOFError("EOF") + except EOFError as e: + self.recvfrom.set_exception(e) + else: + self.recvfrom.set_exception(exc) + + def close(self): + self.transport.close() + + +async def _maybe_wait_for(awaitable, timeout): + if timeout is not None: + try: + return await asyncio.wait_for(awaitable, timeout) + except asyncio.TimeoutError: + raise dns.exception.Timeout(timeout=timeout) + else: + return await awaitable + + +class DatagramSocket(dns._asyncbackend.DatagramSocket): + def __init__(self, family, transport, protocol): + super().__init__(family, socket.SOCK_DGRAM) + self.transport = transport + self.protocol = protocol + + async def sendto(self, what, destination, timeout): # pragma: no cover + # no timeout for asyncio sendto + self.transport.sendto(what, destination) + return len(what) + + async def recvfrom(self, size, timeout): + # ignore size as there's no way I know to tell protocol about it + done = _get_running_loop().create_future() + try: + assert self.protocol.recvfrom is None + self.protocol.recvfrom = done + await _maybe_wait_for(done, timeout) + return done.result() + finally: + self.protocol.recvfrom = None + + async def close(self): + self.protocol.close() + + async def getpeername(self): + return self.transport.get_extra_info("peername") + + async def getsockname(self): + return self.transport.get_extra_info("sockname") + + async def getpeercert(self, timeout): + raise NotImplementedError + + +class StreamSocket(dns._asyncbackend.StreamSocket): + def __init__(self, af, reader, writer): + super().__init__(af, socket.SOCK_STREAM) + self.reader = reader + self.writer = writer + + async def sendall(self, what, timeout): + self.writer.write(what) + return await _maybe_wait_for(self.writer.drain(), timeout) + + async def recv(self, size, timeout): + return await _maybe_wait_for(self.reader.read(size), timeout) + + async def close(self): + self.writer.close() + + async def getpeername(self): + return self.writer.get_extra_info("peername") + + async def getsockname(self): + return self.writer.get_extra_info("sockname") + + async def getpeercert(self, timeout): + return self.writer.get_extra_info("peercert") + + +if dns._features.have("doh"): + import anyio + import httpcore + import httpcore._backends.anyio + import httpx + + _CoreAsyncNetworkBackend = httpcore.AsyncNetworkBackend + _CoreAnyIOStream = httpcore._backends.anyio.AnyIOStream + + from dns.query import _compute_times, _expiration_for_this_attempt, _remaining + + class _NetworkBackend(_CoreAsyncNetworkBackend): + def __init__(self, resolver, local_port, bootstrap_address, family): + super().__init__() + self._local_port = local_port + self._resolver = resolver + self._bootstrap_address = bootstrap_address + self._family = family + if local_port != 0: + raise NotImplementedError( + "the asyncio transport for HTTPX cannot set the local port" + ) + + async def connect_tcp( + self, host, port, timeout, local_address, socket_options=None + ): # pylint: disable=signature-differs + addresses = [] + _, expiration = _compute_times(timeout) + if dns.inet.is_address(host): + addresses.append(host) + elif self._bootstrap_address is not None: + addresses.append(self._bootstrap_address) + else: + timeout = _remaining(expiration) + family = self._family + if local_address: + family = dns.inet.af_for_address(local_address) + answers = await self._resolver.resolve_name( + host, family=family, lifetime=timeout + ) + addresses = answers.addresses() + for address in addresses: + try: + attempt_expiration = _expiration_for_this_attempt(2.0, expiration) + timeout = _remaining(attempt_expiration) + with anyio.fail_after(timeout): + stream = await anyio.connect_tcp( + remote_host=address, + remote_port=port, + local_host=local_address, + ) + return _CoreAnyIOStream(stream) + except Exception: + pass + raise httpcore.ConnectError + + async def connect_unix_socket( + self, path, timeout, socket_options=None + ): # pylint: disable=signature-differs + raise NotImplementedError + + async def sleep(self, seconds): # pylint: disable=signature-differs + await anyio.sleep(seconds) + + class _HTTPTransport(httpx.AsyncHTTPTransport): + def __init__( + self, + *args, + local_port=0, + bootstrap_address=None, + resolver=None, + family=socket.AF_UNSPEC, + **kwargs, + ): + if resolver is None and bootstrap_address is None: + # pylint: disable=import-outside-toplevel,redefined-outer-name + import dns.asyncresolver + + resolver = dns.asyncresolver.Resolver() + super().__init__(*args, **kwargs) + self._pool._network_backend = _NetworkBackend( + resolver, local_port, bootstrap_address, family + ) + +else: + _HTTPTransport = dns._asyncbackend.NullTransport # type: ignore + + +class Backend(dns._asyncbackend.Backend): + def name(self): + return "asyncio" + + async def make_socket( + self, + af, + socktype, + proto=0, + source=None, + destination=None, + timeout=None, + ssl_context=None, + server_hostname=None, + ): + loop = _get_running_loop() + if socktype == socket.SOCK_DGRAM: + if _is_win32 and source is None: + # Win32 wants explicit binding before recvfrom(). This is the + # proper fix for [#637]. + source = (dns.inet.any_for_af(af), 0) + transport, protocol = await loop.create_datagram_endpoint( + _DatagramProtocol, + source, + family=af, + proto=proto, + remote_addr=destination, + ) + return DatagramSocket(af, transport, protocol) + elif socktype == socket.SOCK_STREAM: + if destination is None: + # This shouldn't happen, but we check to make code analysis software + # happier. + raise ValueError("destination required for stream sockets") + (r, w) = await _maybe_wait_for( + asyncio.open_connection( + destination[0], + destination[1], + ssl=ssl_context, + family=af, + proto=proto, + local_addr=source, + server_hostname=server_hostname, + ), + timeout, + ) + return StreamSocket(af, r, w) + raise NotImplementedError( + "unsupported socket " + f"type {socktype}" + ) # pragma: no cover + + async def sleep(self, interval): + await asyncio.sleep(interval) + + def datagram_connection_required(self): + return False + + def get_transport_class(self): + return _HTTPTransport + + async def wait_for(self, awaitable, timeout): + return await _maybe_wait_for(awaitable, timeout) diff --git a/venv/lib/python3.11/site-packages/dns/_ddr.py b/venv/lib/python3.11/site-packages/dns/_ddr.py new file mode 100644 index 0000000..bf5c11e --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/_ddr.py @@ -0,0 +1,154 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license +# +# Support for Discovery of Designated Resolvers + +import socket +import time +from urllib.parse import urlparse + +import dns.asyncbackend +import dns.inet +import dns.name +import dns.nameserver +import dns.query +import dns.rdtypes.svcbbase + +# The special name of the local resolver when using DDR +_local_resolver_name = dns.name.from_text("_dns.resolver.arpa") + + +# +# Processing is split up into I/O independent and I/O dependent parts to +# make supporting sync and async versions easy. +# + + +class _SVCBInfo: + def __init__(self, bootstrap_address, port, hostname, nameservers): + self.bootstrap_address = bootstrap_address + self.port = port + self.hostname = hostname + self.nameservers = nameservers + + def ddr_check_certificate(self, cert): + """Verify that the _SVCBInfo's address is in the cert's subjectAltName (SAN)""" + for name, value in cert["subjectAltName"]: + if name == "IP Address" and value == self.bootstrap_address: + return True + return False + + def make_tls_context(self): + ssl = dns.query.ssl + ctx = ssl.create_default_context() + ctx.minimum_version = ssl.TLSVersion.TLSv1_2 + return ctx + + def ddr_tls_check_sync(self, lifetime): + ctx = self.make_tls_context() + expiration = time.time() + lifetime + with socket.create_connection( + (self.bootstrap_address, self.port), lifetime + ) as s: + with ctx.wrap_socket(s, server_hostname=self.hostname) as ts: + ts.settimeout(dns.query._remaining(expiration)) + ts.do_handshake() + cert = ts.getpeercert() + return self.ddr_check_certificate(cert) + + async def ddr_tls_check_async(self, lifetime, backend=None): + if backend is None: + backend = dns.asyncbackend.get_default_backend() + ctx = self.make_tls_context() + expiration = time.time() + lifetime + async with await backend.make_socket( + dns.inet.af_for_address(self.bootstrap_address), + socket.SOCK_STREAM, + 0, + None, + (self.bootstrap_address, self.port), + lifetime, + ctx, + self.hostname, + ) as ts: + cert = await ts.getpeercert(dns.query._remaining(expiration)) + return self.ddr_check_certificate(cert) + + +def _extract_nameservers_from_svcb(answer): + bootstrap_address = answer.nameserver + if not dns.inet.is_address(bootstrap_address): + return [] + infos = [] + for rr in answer.rrset.processing_order(): + nameservers = [] + param = rr.params.get(dns.rdtypes.svcbbase.ParamKey.ALPN) + if param is None: + continue + alpns = set(param.ids) + host = rr.target.to_text(omit_final_dot=True) + port = None + param = rr.params.get(dns.rdtypes.svcbbase.ParamKey.PORT) + if param is not None: + port = param.port + # For now we ignore address hints and address resolution and always use the + # bootstrap address + if b"h2" in alpns: + param = rr.params.get(dns.rdtypes.svcbbase.ParamKey.DOHPATH) + if param is None or not param.value.endswith(b"{?dns}"): + continue + path = param.value[:-6].decode() + if not path.startswith("/"): + path = "/" + path + if port is None: + port = 443 + url = f"https://{host}:{port}{path}" + # check the URL + try: + urlparse(url) + nameservers.append(dns.nameserver.DoHNameserver(url, bootstrap_address)) + except Exception: + # continue processing other ALPN types + pass + if b"dot" in alpns: + if port is None: + port = 853 + nameservers.append( + dns.nameserver.DoTNameserver(bootstrap_address, port, host) + ) + if b"doq" in alpns: + if port is None: + port = 853 + nameservers.append( + dns.nameserver.DoQNameserver(bootstrap_address, port, True, host) + ) + if len(nameservers) > 0: + infos.append(_SVCBInfo(bootstrap_address, port, host, nameservers)) + return infos + + +def _get_nameservers_sync(answer, lifetime): + """Return a list of TLS-validated resolver nameservers extracted from an SVCB + answer.""" + nameservers = [] + infos = _extract_nameservers_from_svcb(answer) + for info in infos: + try: + if info.ddr_tls_check_sync(lifetime): + nameservers.extend(info.nameservers) + except Exception: + pass + return nameservers + + +async def _get_nameservers_async(answer, lifetime): + """Return a list of TLS-validated resolver nameservers extracted from an SVCB + answer.""" + nameservers = [] + infos = _extract_nameservers_from_svcb(answer) + for info in infos: + try: + if await info.ddr_tls_check_async(lifetime): + nameservers.extend(info.nameservers) + except Exception: + pass + return nameservers diff --git a/venv/lib/python3.11/site-packages/dns/_features.py b/venv/lib/python3.11/site-packages/dns/_features.py new file mode 100644 index 0000000..fa6d495 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/_features.py @@ -0,0 +1,95 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import importlib.metadata +import itertools +import string +from typing import Dict, List, Tuple + + +def _tuple_from_text(version: str) -> Tuple: + text_parts = version.split(".") + int_parts = [] + for text_part in text_parts: + digit_prefix = "".join( + itertools.takewhile(lambda x: x in string.digits, text_part) + ) + try: + int_parts.append(int(digit_prefix)) + except Exception: + break + return tuple(int_parts) + + +def _version_check( + requirement: str, +) -> bool: + """Is the requirement fulfilled? + + The requirement must be of the form + + package>=version + """ + package, minimum = requirement.split(">=") + try: + version = importlib.metadata.version(package) + # This shouldn't happen, but it apparently can. + if version is None: + return False + except Exception: + return False + t_version = _tuple_from_text(version) + t_minimum = _tuple_from_text(minimum) + if t_version < t_minimum: + return False + return True + + +_cache: Dict[str, bool] = {} + + +def have(feature: str) -> bool: + """Is *feature* available? + + This tests if all optional packages needed for the + feature are available and recent enough. + + Returns ``True`` if the feature is available, + and ``False`` if it is not or if metadata is + missing. + """ + value = _cache.get(feature) + if value is not None: + return value + requirements = _requirements.get(feature) + if requirements is None: + # we make a cache entry here for consistency not performance + _cache[feature] = False + return False + ok = True + for requirement in requirements: + if not _version_check(requirement): + ok = False + break + _cache[feature] = ok + return ok + + +def force(feature: str, enabled: bool) -> None: + """Force the status of *feature* to be *enabled*. + + This method is provided as a workaround for any cases + where importlib.metadata is ineffective, or for testing. + """ + _cache[feature] = enabled + + +_requirements: Dict[str, List[str]] = { + ### BEGIN generated requirements + "dnssec": ["cryptography>=43"], + "doh": ["httpcore>=1.0.0", "httpx>=0.26.0", "h2>=4.1.0"], + "doq": ["aioquic>=1.0.0"], + "idna": ["idna>=3.7"], + "trio": ["trio>=0.23"], + "wmi": ["wmi>=1.5.1"], + ### END generated requirements +} diff --git a/venv/lib/python3.11/site-packages/dns/_immutable_ctx.py b/venv/lib/python3.11/site-packages/dns/_immutable_ctx.py new file mode 100644 index 0000000..ae7a33b --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/_immutable_ctx.py @@ -0,0 +1,76 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# This implementation of the immutable decorator requires python >= +# 3.7, and is significantly more storage efficient when making classes +# with slots immutable. It's also faster. + +import contextvars +import inspect + +_in__init__ = contextvars.ContextVar("_immutable_in__init__", default=False) + + +class _Immutable: + """Immutable mixin class""" + + # We set slots to the empty list to say "we don't have any attributes". + # We do this so that if we're mixed in with a class with __slots__, we + # don't cause a __dict__ to be added which would waste space. + + __slots__ = () + + def __setattr__(self, name, value): + if _in__init__.get() is not self: + raise TypeError("object doesn't support attribute assignment") + else: + super().__setattr__(name, value) + + def __delattr__(self, name): + if _in__init__.get() is not self: + raise TypeError("object doesn't support attribute assignment") + else: + super().__delattr__(name) + + +def _immutable_init(f): + def nf(*args, **kwargs): + previous = _in__init__.set(args[0]) + try: + # call the actual __init__ + f(*args, **kwargs) + finally: + _in__init__.reset(previous) + + nf.__signature__ = inspect.signature(f) + return nf + + +def immutable(cls): + if _Immutable in cls.__mro__: + # Some ancestor already has the mixin, so just make sure we keep + # following the __init__ protocol. + cls.__init__ = _immutable_init(cls.__init__) + if hasattr(cls, "__setstate__"): + cls.__setstate__ = _immutable_init(cls.__setstate__) + ncls = cls + else: + # Mixin the Immutable class and follow the __init__ protocol. + class ncls(_Immutable, cls): + # We have to do the __slots__ declaration here too! + __slots__ = () + + @_immutable_init + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + if hasattr(cls, "__setstate__"): + + @_immutable_init + def __setstate__(self, *args, **kwargs): + super().__setstate__(*args, **kwargs) + + # make ncls have the same name and module as cls + ncls.__name__ = cls.__name__ + ncls.__qualname__ = cls.__qualname__ + ncls.__module__ = cls.__module__ + return ncls diff --git a/venv/lib/python3.11/site-packages/dns/_trio_backend.py b/venv/lib/python3.11/site-packages/dns/_trio_backend.py new file mode 100644 index 0000000..0ed904d --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/_trio_backend.py @@ -0,0 +1,253 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +"""trio async I/O library query support""" + +import socket + +import trio +import trio.socket # type: ignore + +import dns._asyncbackend +import dns._features +import dns.exception +import dns.inet + +if not dns._features.have("trio"): + raise ImportError("trio not found or too old") + + +def _maybe_timeout(timeout): + if timeout is not None: + return trio.move_on_after(timeout) + else: + return dns._asyncbackend.NullContext() + + +# for brevity +_lltuple = dns.inet.low_level_address_tuple + +# pylint: disable=redefined-outer-name + + +class DatagramSocket(dns._asyncbackend.DatagramSocket): + def __init__(self, sock): + super().__init__(sock.family, socket.SOCK_DGRAM) + self.socket = sock + + async def sendto(self, what, destination, timeout): + with _maybe_timeout(timeout): + if destination is None: + return await self.socket.send(what) + else: + return await self.socket.sendto(what, destination) + raise dns.exception.Timeout( + timeout=timeout + ) # pragma: no cover lgtm[py/unreachable-statement] + + async def recvfrom(self, size, timeout): + with _maybe_timeout(timeout): + return await self.socket.recvfrom(size) + raise dns.exception.Timeout(timeout=timeout) # lgtm[py/unreachable-statement] + + async def close(self): + self.socket.close() + + async def getpeername(self): + return self.socket.getpeername() + + async def getsockname(self): + return self.socket.getsockname() + + async def getpeercert(self, timeout): + raise NotImplementedError + + +class StreamSocket(dns._asyncbackend.StreamSocket): + def __init__(self, family, stream, tls=False): + super().__init__(family, socket.SOCK_STREAM) + self.stream = stream + self.tls = tls + + async def sendall(self, what, timeout): + with _maybe_timeout(timeout): + return await self.stream.send_all(what) + raise dns.exception.Timeout(timeout=timeout) # lgtm[py/unreachable-statement] + + async def recv(self, size, timeout): + with _maybe_timeout(timeout): + return await self.stream.receive_some(size) + raise dns.exception.Timeout(timeout=timeout) # lgtm[py/unreachable-statement] + + async def close(self): + await self.stream.aclose() + + async def getpeername(self): + if self.tls: + return self.stream.transport_stream.socket.getpeername() + else: + return self.stream.socket.getpeername() + + async def getsockname(self): + if self.tls: + return self.stream.transport_stream.socket.getsockname() + else: + return self.stream.socket.getsockname() + + async def getpeercert(self, timeout): + if self.tls: + with _maybe_timeout(timeout): + await self.stream.do_handshake() + return self.stream.getpeercert() + else: + raise NotImplementedError + + +if dns._features.have("doh"): + import httpcore + import httpcore._backends.trio + import httpx + + _CoreAsyncNetworkBackend = httpcore.AsyncNetworkBackend + _CoreTrioStream = httpcore._backends.trio.TrioStream + + from dns.query import _compute_times, _expiration_for_this_attempt, _remaining + + class _NetworkBackend(_CoreAsyncNetworkBackend): + def __init__(self, resolver, local_port, bootstrap_address, family): + super().__init__() + self._local_port = local_port + self._resolver = resolver + self._bootstrap_address = bootstrap_address + self._family = family + + async def connect_tcp( + self, host, port, timeout, local_address, socket_options=None + ): # pylint: disable=signature-differs + addresses = [] + _, expiration = _compute_times(timeout) + if dns.inet.is_address(host): + addresses.append(host) + elif self._bootstrap_address is not None: + addresses.append(self._bootstrap_address) + else: + timeout = _remaining(expiration) + family = self._family + if local_address: + family = dns.inet.af_for_address(local_address) + answers = await self._resolver.resolve_name( + host, family=family, lifetime=timeout + ) + addresses = answers.addresses() + for address in addresses: + try: + af = dns.inet.af_for_address(address) + if local_address is not None or self._local_port != 0: + source = (local_address, self._local_port) + else: + source = None + destination = (address, port) + attempt_expiration = _expiration_for_this_attempt(2.0, expiration) + timeout = _remaining(attempt_expiration) + sock = await Backend().make_socket( + af, socket.SOCK_STREAM, 0, source, destination, timeout + ) + return _CoreTrioStream(sock.stream) + except Exception: + continue + raise httpcore.ConnectError + + async def connect_unix_socket( + self, path, timeout, socket_options=None + ): # pylint: disable=signature-differs + raise NotImplementedError + + async def sleep(self, seconds): # pylint: disable=signature-differs + await trio.sleep(seconds) + + class _HTTPTransport(httpx.AsyncHTTPTransport): + def __init__( + self, + *args, + local_port=0, + bootstrap_address=None, + resolver=None, + family=socket.AF_UNSPEC, + **kwargs, + ): + if resolver is None and bootstrap_address is None: + # pylint: disable=import-outside-toplevel,redefined-outer-name + import dns.asyncresolver + + resolver = dns.asyncresolver.Resolver() + super().__init__(*args, **kwargs) + self._pool._network_backend = _NetworkBackend( + resolver, local_port, bootstrap_address, family + ) + +else: + _HTTPTransport = dns._asyncbackend.NullTransport # type: ignore + + +class Backend(dns._asyncbackend.Backend): + def name(self): + return "trio" + + async def make_socket( + self, + af, + socktype, + proto=0, + source=None, + destination=None, + timeout=None, + ssl_context=None, + server_hostname=None, + ): + s = trio.socket.socket(af, socktype, proto) + stream = None + try: + if source: + await s.bind(_lltuple(source, af)) + if socktype == socket.SOCK_STREAM or destination is not None: + connected = False + with _maybe_timeout(timeout): + await s.connect(_lltuple(destination, af)) + connected = True + if not connected: + raise dns.exception.Timeout( + timeout=timeout + ) # lgtm[py/unreachable-statement] + except Exception: # pragma: no cover + s.close() + raise + if socktype == socket.SOCK_DGRAM: + return DatagramSocket(s) + elif socktype == socket.SOCK_STREAM: + stream = trio.SocketStream(s) + tls = False + if ssl_context: + tls = True + try: + stream = trio.SSLStream( + stream, ssl_context, server_hostname=server_hostname + ) + except Exception: # pragma: no cover + await stream.aclose() + raise + return StreamSocket(af, stream, tls) + raise NotImplementedError( + "unsupported socket " + f"type {socktype}" + ) # pragma: no cover + + async def sleep(self, interval): + await trio.sleep(interval) + + def get_transport_class(self): + return _HTTPTransport + + async def wait_for(self, awaitable, timeout): + with _maybe_timeout(timeout): + return await awaitable + raise dns.exception.Timeout( + timeout=timeout + ) # pragma: no cover lgtm[py/unreachable-statement] diff --git a/venv/lib/python3.11/site-packages/dns/asyncbackend.py b/venv/lib/python3.11/site-packages/dns/asyncbackend.py new file mode 100644 index 0000000..0ec58b0 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/asyncbackend.py @@ -0,0 +1,101 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +from typing import Dict + +import dns.exception + +# pylint: disable=unused-import +from dns._asyncbackend import ( # noqa: F401 lgtm[py/unused-import] + Backend, + DatagramSocket, + Socket, + StreamSocket, +) + +# pylint: enable=unused-import + +_default_backend = None + +_backends: Dict[str, Backend] = {} + +# Allow sniffio import to be disabled for testing purposes +_no_sniffio = False + + +class AsyncLibraryNotFoundError(dns.exception.DNSException): + pass + + +def get_backend(name: str) -> Backend: + """Get the specified asynchronous backend. + + *name*, a ``str``, the name of the backend. Currently the "trio" + and "asyncio" backends are available. + + Raises NotImplementedError if an unknown backend name is specified. + """ + # pylint: disable=import-outside-toplevel,redefined-outer-name + backend = _backends.get(name) + if backend: + return backend + if name == "trio": + import dns._trio_backend + + backend = dns._trio_backend.Backend() + elif name == "asyncio": + import dns._asyncio_backend + + backend = dns._asyncio_backend.Backend() + else: + raise NotImplementedError(f"unimplemented async backend {name}") + _backends[name] = backend + return backend + + +def sniff() -> str: + """Attempt to determine the in-use asynchronous I/O library by using + the ``sniffio`` module if it is available. + + Returns the name of the library, or raises AsyncLibraryNotFoundError + if the library cannot be determined. + """ + # pylint: disable=import-outside-toplevel + try: + if _no_sniffio: + raise ImportError + import sniffio + + try: + return sniffio.current_async_library() + except sniffio.AsyncLibraryNotFoundError: + raise AsyncLibraryNotFoundError("sniffio cannot determine async library") + except ImportError: + import asyncio + + try: + asyncio.get_running_loop() + return "asyncio" + except RuntimeError: + raise AsyncLibraryNotFoundError("no async library detected") + + +def get_default_backend() -> Backend: + """Get the default backend, initializing it if necessary.""" + if _default_backend: + return _default_backend + + return set_default_backend(sniff()) + + +def set_default_backend(name: str) -> Backend: + """Set the default backend. + + It's not normally necessary to call this method, as + ``get_default_backend()`` will initialize the backend + appropriately in many cases. If ``sniffio`` is not installed, or + in testing situations, this function allows the backend to be set + explicitly. + """ + global _default_backend + _default_backend = get_backend(name) + return _default_backend diff --git a/venv/lib/python3.11/site-packages/dns/asyncquery.py b/venv/lib/python3.11/site-packages/dns/asyncquery.py new file mode 100644 index 0000000..efad0fd --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/asyncquery.py @@ -0,0 +1,913 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Talk to a DNS server.""" + +import base64 +import contextlib +import random +import socket +import struct +import time +import urllib.parse +from typing import Any, Dict, Optional, Tuple, Union, cast + +import dns.asyncbackend +import dns.exception +import dns.inet +import dns.message +import dns.name +import dns.quic +import dns.rcode +import dns.rdataclass +import dns.rdatatype +import dns.transaction +from dns._asyncbackend import NullContext +from dns.query import ( + BadResponse, + HTTPVersion, + NoDOH, + NoDOQ, + UDPMode, + _check_status, + _compute_times, + _make_dot_ssl_context, + _matches_destination, + _remaining, + have_doh, + ssl, +) + +if have_doh: + import httpx + +# for brevity +_lltuple = dns.inet.low_level_address_tuple + + +def _source_tuple(af, address, port): + # Make a high level source tuple, or return None if address and port + # are both None + if address or port: + if address is None: + if af == socket.AF_INET: + address = "0.0.0.0" + elif af == socket.AF_INET6: + address = "::" + else: + raise NotImplementedError(f"unknown address family {af}") + return (address, port) + else: + return None + + +def _timeout(expiration, now=None): + if expiration is not None: + if not now: + now = time.time() + return max(expiration - now, 0) + else: + return None + + +async def send_udp( + sock: dns.asyncbackend.DatagramSocket, + what: Union[dns.message.Message, bytes], + destination: Any, + expiration: Optional[float] = None, +) -> Tuple[int, float]: + """Send a DNS message to the specified UDP socket. + + *sock*, a ``dns.asyncbackend.DatagramSocket``. + + *what*, a ``bytes`` or ``dns.message.Message``, the message to send. + + *destination*, a destination tuple appropriate for the address family + of the socket, specifying where to send the query. + + *expiration*, a ``float`` or ``None``, the absolute time at which + a timeout exception should be raised. If ``None``, no timeout will + occur. The expiration value is meaningless for the asyncio backend, as + asyncio's transport sendto() never blocks. + + Returns an ``(int, float)`` tuple of bytes sent and the sent time. + """ + + if isinstance(what, dns.message.Message): + what = what.to_wire() + sent_time = time.time() + n = await sock.sendto(what, destination, _timeout(expiration, sent_time)) + return (n, sent_time) + + +async def receive_udp( + sock: dns.asyncbackend.DatagramSocket, + destination: Optional[Any] = None, + expiration: Optional[float] = None, + ignore_unexpected: bool = False, + one_rr_per_rrset: bool = False, + keyring: Optional[Dict[dns.name.Name, dns.tsig.Key]] = None, + request_mac: Optional[bytes] = b"", + ignore_trailing: bool = False, + raise_on_truncation: bool = False, + ignore_errors: bool = False, + query: Optional[dns.message.Message] = None, +) -> Any: + """Read a DNS message from a UDP socket. + + *sock*, a ``dns.asyncbackend.DatagramSocket``. + + See :py:func:`dns.query.receive_udp()` for the documentation of the other + parameters, and exceptions. + + Returns a ``(dns.message.Message, float, tuple)`` tuple of the received message, the + received time, and the address where the message arrived from. + """ + + wire = b"" + while True: + (wire, from_address) = await sock.recvfrom(65535, _timeout(expiration)) + if not _matches_destination( + sock.family, from_address, destination, ignore_unexpected + ): + continue + received_time = time.time() + try: + r = dns.message.from_wire( + wire, + keyring=keyring, + request_mac=request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + raise_on_truncation=raise_on_truncation, + ) + except dns.message.Truncated as e: + # See the comment in query.py for details. + if ( + ignore_errors + and query is not None + and not query.is_response(e.message()) + ): + continue + else: + raise + except Exception: + if ignore_errors: + continue + else: + raise + if ignore_errors and query is not None and not query.is_response(r): + continue + return (r, received_time, from_address) + + +async def udp( + q: dns.message.Message, + where: str, + timeout: Optional[float] = None, + port: int = 53, + source: Optional[str] = None, + source_port: int = 0, + ignore_unexpected: bool = False, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + raise_on_truncation: bool = False, + sock: Optional[dns.asyncbackend.DatagramSocket] = None, + backend: Optional[dns.asyncbackend.Backend] = None, + ignore_errors: bool = False, +) -> dns.message.Message: + """Return the response obtained after sending a query via UDP. + + *sock*, a ``dns.asyncbackend.DatagramSocket``, or ``None``, + the socket to use for the query. If ``None``, the default, a + socket is created. Note that if a socket is provided, the + *source*, *source_port*, and *backend* are ignored. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.query.udp()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + wire = q.to_wire() + (begin_time, expiration) = _compute_times(timeout) + af = dns.inet.af_for_address(where) + destination = _lltuple((where, port), af) + if sock: + cm: contextlib.AbstractAsyncContextManager = NullContext(sock) + else: + if not backend: + backend = dns.asyncbackend.get_default_backend() + stuple = _source_tuple(af, source, source_port) + if backend.datagram_connection_required(): + dtuple = (where, port) + else: + dtuple = None + cm = await backend.make_socket(af, socket.SOCK_DGRAM, 0, stuple, dtuple) + async with cm as s: + await send_udp(s, wire, destination, expiration) + (r, received_time, _) = await receive_udp( + s, + destination, + expiration, + ignore_unexpected, + one_rr_per_rrset, + q.keyring, + q.mac, + ignore_trailing, + raise_on_truncation, + ignore_errors, + q, + ) + r.time = received_time - begin_time + # We don't need to check q.is_response() if we are in ignore_errors mode + # as receive_udp() will have checked it. + if not (ignore_errors or q.is_response(r)): + raise BadResponse + return r + + +async def udp_with_fallback( + q: dns.message.Message, + where: str, + timeout: Optional[float] = None, + port: int = 53, + source: Optional[str] = None, + source_port: int = 0, + ignore_unexpected: bool = False, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + udp_sock: Optional[dns.asyncbackend.DatagramSocket] = None, + tcp_sock: Optional[dns.asyncbackend.StreamSocket] = None, + backend: Optional[dns.asyncbackend.Backend] = None, + ignore_errors: bool = False, +) -> Tuple[dns.message.Message, bool]: + """Return the response to the query, trying UDP first and falling back + to TCP if UDP results in a truncated response. + + *udp_sock*, a ``dns.asyncbackend.DatagramSocket``, or ``None``, + the socket to use for the UDP query. If ``None``, the default, a + socket is created. Note that if a socket is provided the *source*, + *source_port*, and *backend* are ignored for the UDP query. + + *tcp_sock*, a ``dns.asyncbackend.StreamSocket``, or ``None``, the + socket to use for the TCP query. If ``None``, the default, a + socket is created. Note that if a socket is provided *where*, + *source*, *source_port*, and *backend* are ignored for the TCP query. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.query.udp_with_fallback()` for the documentation + of the other parameters, exceptions, and return type of this + method. + """ + try: + response = await udp( + q, + where, + timeout, + port, + source, + source_port, + ignore_unexpected, + one_rr_per_rrset, + ignore_trailing, + True, + udp_sock, + backend, + ignore_errors, + ) + return (response, False) + except dns.message.Truncated: + response = await tcp( + q, + where, + timeout, + port, + source, + source_port, + one_rr_per_rrset, + ignore_trailing, + tcp_sock, + backend, + ) + return (response, True) + + +async def send_tcp( + sock: dns.asyncbackend.StreamSocket, + what: Union[dns.message.Message, bytes], + expiration: Optional[float] = None, +) -> Tuple[int, float]: + """Send a DNS message to the specified TCP socket. + + *sock*, a ``dns.asyncbackend.StreamSocket``. + + See :py:func:`dns.query.send_tcp()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + + if isinstance(what, dns.message.Message): + tcpmsg = what.to_wire(prepend_length=True) + else: + # copying the wire into tcpmsg is inefficient, but lets us + # avoid writev() or doing a short write that would get pushed + # onto the net + tcpmsg = len(what).to_bytes(2, "big") + what + sent_time = time.time() + await sock.sendall(tcpmsg, _timeout(expiration, sent_time)) + return (len(tcpmsg), sent_time) + + +async def _read_exactly(sock, count, expiration): + """Read the specified number of bytes from stream. Keep trying until we + either get the desired amount, or we hit EOF. + """ + s = b"" + while count > 0: + n = await sock.recv(count, _timeout(expiration)) + if n == b"": + raise EOFError("EOF") + count = count - len(n) + s = s + n + return s + + +async def receive_tcp( + sock: dns.asyncbackend.StreamSocket, + expiration: Optional[float] = None, + one_rr_per_rrset: bool = False, + keyring: Optional[Dict[dns.name.Name, dns.tsig.Key]] = None, + request_mac: Optional[bytes] = b"", + ignore_trailing: bool = False, +) -> Tuple[dns.message.Message, float]: + """Read a DNS message from a TCP socket. + + *sock*, a ``dns.asyncbackend.StreamSocket``. + + See :py:func:`dns.query.receive_tcp()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + + ldata = await _read_exactly(sock, 2, expiration) + (l,) = struct.unpack("!H", ldata) + wire = await _read_exactly(sock, l, expiration) + received_time = time.time() + r = dns.message.from_wire( + wire, + keyring=keyring, + request_mac=request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + return (r, received_time) + + +async def tcp( + q: dns.message.Message, + where: str, + timeout: Optional[float] = None, + port: int = 53, + source: Optional[str] = None, + source_port: int = 0, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + sock: Optional[dns.asyncbackend.StreamSocket] = None, + backend: Optional[dns.asyncbackend.Backend] = None, +) -> dns.message.Message: + """Return the response obtained after sending a query via TCP. + + *sock*, a ``dns.asyncbacket.StreamSocket``, or ``None``, the + socket to use for the query. If ``None``, the default, a socket + is created. Note that if a socket is provided + *where*, *port*, *source*, *source_port*, and *backend* are ignored. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.query.tcp()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + + wire = q.to_wire() + (begin_time, expiration) = _compute_times(timeout) + if sock: + # Verify that the socket is connected, as if it's not connected, + # it's not writable, and the polling in send_tcp() will time out or + # hang forever. + await sock.getpeername() + cm: contextlib.AbstractAsyncContextManager = NullContext(sock) + else: + # These are simple (address, port) pairs, not family-dependent tuples + # you pass to low-level socket code. + af = dns.inet.af_for_address(where) + stuple = _source_tuple(af, source, source_port) + dtuple = (where, port) + if not backend: + backend = dns.asyncbackend.get_default_backend() + cm = await backend.make_socket( + af, socket.SOCK_STREAM, 0, stuple, dtuple, timeout + ) + async with cm as s: + await send_tcp(s, wire, expiration) + (r, received_time) = await receive_tcp( + s, expiration, one_rr_per_rrset, q.keyring, q.mac, ignore_trailing + ) + r.time = received_time - begin_time + if not q.is_response(r): + raise BadResponse + return r + + +async def tls( + q: dns.message.Message, + where: str, + timeout: Optional[float] = None, + port: int = 853, + source: Optional[str] = None, + source_port: int = 0, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + sock: Optional[dns.asyncbackend.StreamSocket] = None, + backend: Optional[dns.asyncbackend.Backend] = None, + ssl_context: Optional[ssl.SSLContext] = None, + server_hostname: Optional[str] = None, + verify: Union[bool, str] = True, +) -> dns.message.Message: + """Return the response obtained after sending a query via TLS. + + *sock*, an ``asyncbackend.StreamSocket``, or ``None``, the socket + to use for the query. If ``None``, the default, a socket is + created. Note that if a socket is provided, it must be a + connected SSL stream socket, and *where*, *port*, + *source*, *source_port*, *backend*, *ssl_context*, and *server_hostname* + are ignored. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.query.tls()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + (begin_time, expiration) = _compute_times(timeout) + if sock: + cm: contextlib.AbstractAsyncContextManager = NullContext(sock) + else: + if ssl_context is None: + ssl_context = _make_dot_ssl_context(server_hostname, verify) + af = dns.inet.af_for_address(where) + stuple = _source_tuple(af, source, source_port) + dtuple = (where, port) + if not backend: + backend = dns.asyncbackend.get_default_backend() + cm = await backend.make_socket( + af, + socket.SOCK_STREAM, + 0, + stuple, + dtuple, + timeout, + ssl_context, + server_hostname, + ) + async with cm as s: + timeout = _timeout(expiration) + response = await tcp( + q, + where, + timeout, + port, + source, + source_port, + one_rr_per_rrset, + ignore_trailing, + s, + backend, + ) + end_time = time.time() + response.time = end_time - begin_time + return response + + +def _maybe_get_resolver( + resolver: Optional["dns.asyncresolver.Resolver"], +) -> "dns.asyncresolver.Resolver": + # We need a separate method for this to avoid overriding the global + # variable "dns" with the as-yet undefined local variable "dns" + # in https(). + if resolver is None: + # pylint: disable=import-outside-toplevel,redefined-outer-name + import dns.asyncresolver + + resolver = dns.asyncresolver.Resolver() + return resolver + + +async def https( + q: dns.message.Message, + where: str, + timeout: Optional[float] = None, + port: int = 443, + source: Optional[str] = None, + source_port: int = 0, # pylint: disable=W0613 + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + client: Optional["httpx.AsyncClient"] = None, + path: str = "/dns-query", + post: bool = True, + verify: Union[bool, str] = True, + bootstrap_address: Optional[str] = None, + resolver: Optional["dns.asyncresolver.Resolver"] = None, + family: int = socket.AF_UNSPEC, + http_version: HTTPVersion = HTTPVersion.DEFAULT, +) -> dns.message.Message: + """Return the response obtained after sending a query via DNS-over-HTTPS. + + *client*, a ``httpx.AsyncClient``. If provided, the client to use for + the query. + + Unlike the other dnspython async functions, a backend cannot be provided + in this function because httpx always auto-detects the async backend. + + See :py:func:`dns.query.https()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + + try: + af = dns.inet.af_for_address(where) + except ValueError: + af = None + if af is not None and dns.inet.is_address(where): + if af == socket.AF_INET: + url = f"https://{where}:{port}{path}" + elif af == socket.AF_INET6: + url = f"https://[{where}]:{port}{path}" + else: + url = where + + extensions = {} + if bootstrap_address is None: + # pylint: disable=possibly-used-before-assignment + parsed = urllib.parse.urlparse(url) + if parsed.hostname is None: + raise ValueError("no hostname in URL") + if dns.inet.is_address(parsed.hostname): + bootstrap_address = parsed.hostname + extensions["sni_hostname"] = parsed.hostname + if parsed.port is not None: + port = parsed.port + + if http_version == HTTPVersion.H3 or ( + http_version == HTTPVersion.DEFAULT and not have_doh + ): + if bootstrap_address is None: + resolver = _maybe_get_resolver(resolver) + assert parsed.hostname is not None # for mypy + answers = await resolver.resolve_name(parsed.hostname, family) + bootstrap_address = random.choice(list(answers.addresses())) + return await _http3( + q, + bootstrap_address, + url, + timeout, + port, + source, + source_port, + one_rr_per_rrset, + ignore_trailing, + verify=verify, + post=post, + ) + + if not have_doh: + raise NoDOH # pragma: no cover + # pylint: disable=possibly-used-before-assignment + if client and not isinstance(client, httpx.AsyncClient): + raise ValueError("session parameter must be an httpx.AsyncClient") + # pylint: enable=possibly-used-before-assignment + + wire = q.to_wire() + headers = {"accept": "application/dns-message"} + + h1 = http_version in (HTTPVersion.H1, HTTPVersion.DEFAULT) + h2 = http_version in (HTTPVersion.H2, HTTPVersion.DEFAULT) + + backend = dns.asyncbackend.get_default_backend() + + if source is None: + local_address = None + local_port = 0 + else: + local_address = source + local_port = source_port + + if client: + cm: contextlib.AbstractAsyncContextManager = NullContext(client) + else: + transport = backend.get_transport_class()( + local_address=local_address, + http1=h1, + http2=h2, + verify=verify, + local_port=local_port, + bootstrap_address=bootstrap_address, + resolver=resolver, + family=family, + ) + + cm = httpx.AsyncClient(http1=h1, http2=h2, verify=verify, transport=transport) + + async with cm as the_client: + # see https://tools.ietf.org/html/rfc8484#section-4.1.1 for DoH + # GET and POST examples + if post: + headers.update( + { + "content-type": "application/dns-message", + "content-length": str(len(wire)), + } + ) + response = await backend.wait_for( + the_client.post( + url, + headers=headers, + content=wire, + extensions=extensions, + ), + timeout, + ) + else: + wire = base64.urlsafe_b64encode(wire).rstrip(b"=") + twire = wire.decode() # httpx does a repr() if we give it bytes + response = await backend.wait_for( + the_client.get( + url, + headers=headers, + params={"dns": twire}, + extensions=extensions, + ), + timeout, + ) + + # see https://tools.ietf.org/html/rfc8484#section-4.2.1 for info about DoH + # status codes + if response.status_code < 200 or response.status_code > 299: + raise ValueError( + f"{where} responded with status code {response.status_code}" + f"\nResponse body: {response.content!r}" + ) + r = dns.message.from_wire( + response.content, + keyring=q.keyring, + request_mac=q.request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + r.time = response.elapsed.total_seconds() + if not q.is_response(r): + raise BadResponse + return r + + +async def _http3( + q: dns.message.Message, + where: str, + url: str, + timeout: Optional[float] = None, + port: int = 853, + source: Optional[str] = None, + source_port: int = 0, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + verify: Union[bool, str] = True, + backend: Optional[dns.asyncbackend.Backend] = None, + hostname: Optional[str] = None, + post: bool = True, +) -> dns.message.Message: + if not dns.quic.have_quic: + raise NoDOH("DNS-over-HTTP3 is not available.") # pragma: no cover + + url_parts = urllib.parse.urlparse(url) + hostname = url_parts.hostname + if url_parts.port is not None: + port = url_parts.port + + q.id = 0 + wire = q.to_wire() + (cfactory, mfactory) = dns.quic.factories_for_backend(backend) + + async with cfactory() as context: + async with mfactory( + context, verify_mode=verify, server_name=hostname, h3=True + ) as the_manager: + the_connection = the_manager.connect(where, port, source, source_port) + (start, expiration) = _compute_times(timeout) + stream = await the_connection.make_stream(timeout) + async with stream: + # note that send_h3() does not need await + stream.send_h3(url, wire, post) + wire = await stream.receive(_remaining(expiration)) + _check_status(stream.headers(), where, wire) + finish = time.time() + r = dns.message.from_wire( + wire, + keyring=q.keyring, + request_mac=q.request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + r.time = max(finish - start, 0.0) + if not q.is_response(r): + raise BadResponse + return r + + +async def quic( + q: dns.message.Message, + where: str, + timeout: Optional[float] = None, + port: int = 853, + source: Optional[str] = None, + source_port: int = 0, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + connection: Optional[dns.quic.AsyncQuicConnection] = None, + verify: Union[bool, str] = True, + backend: Optional[dns.asyncbackend.Backend] = None, + hostname: Optional[str] = None, + server_hostname: Optional[str] = None, +) -> dns.message.Message: + """Return the response obtained after sending an asynchronous query via + DNS-over-QUIC. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.query.quic()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + + if not dns.quic.have_quic: + raise NoDOQ("DNS-over-QUIC is not available.") # pragma: no cover + + if server_hostname is not None and hostname is None: + hostname = server_hostname + + q.id = 0 + wire = q.to_wire() + the_connection: dns.quic.AsyncQuicConnection + if connection: + cfactory = dns.quic.null_factory + mfactory = dns.quic.null_factory + the_connection = connection + else: + (cfactory, mfactory) = dns.quic.factories_for_backend(backend) + + async with cfactory() as context: + async with mfactory( + context, + verify_mode=verify, + server_name=server_hostname, + ) as the_manager: + if not connection: + the_connection = the_manager.connect(where, port, source, source_port) + (start, expiration) = _compute_times(timeout) + stream = await the_connection.make_stream(timeout) + async with stream: + await stream.send(wire, True) + wire = await stream.receive(_remaining(expiration)) + finish = time.time() + r = dns.message.from_wire( + wire, + keyring=q.keyring, + request_mac=q.request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + r.time = max(finish - start, 0.0) + if not q.is_response(r): + raise BadResponse + return r + + +async def _inbound_xfr( + txn_manager: dns.transaction.TransactionManager, + s: dns.asyncbackend.Socket, + query: dns.message.Message, + serial: Optional[int], + timeout: Optional[float], + expiration: float, +) -> Any: + """Given a socket, does the zone transfer.""" + rdtype = query.question[0].rdtype + is_ixfr = rdtype == dns.rdatatype.IXFR + origin = txn_manager.from_wire_origin() + wire = query.to_wire() + is_udp = s.type == socket.SOCK_DGRAM + if is_udp: + udp_sock = cast(dns.asyncbackend.DatagramSocket, s) + await udp_sock.sendto(wire, None, _timeout(expiration)) + else: + tcp_sock = cast(dns.asyncbackend.StreamSocket, s) + tcpmsg = struct.pack("!H", len(wire)) + wire + await tcp_sock.sendall(tcpmsg, expiration) + with dns.xfr.Inbound(txn_manager, rdtype, serial, is_udp) as inbound: + done = False + tsig_ctx = None + while not done: + (_, mexpiration) = _compute_times(timeout) + if mexpiration is None or ( + expiration is not None and mexpiration > expiration + ): + mexpiration = expiration + if is_udp: + timeout = _timeout(mexpiration) + (rwire, _) = await udp_sock.recvfrom(65535, timeout) + else: + ldata = await _read_exactly(tcp_sock, 2, mexpiration) + (l,) = struct.unpack("!H", ldata) + rwire = await _read_exactly(tcp_sock, l, mexpiration) + r = dns.message.from_wire( + rwire, + keyring=query.keyring, + request_mac=query.mac, + xfr=True, + origin=origin, + tsig_ctx=tsig_ctx, + multi=(not is_udp), + one_rr_per_rrset=is_ixfr, + ) + done = inbound.process_message(r) + yield r + tsig_ctx = r.tsig_ctx + if query.keyring and not r.had_tsig: + raise dns.exception.FormError("missing TSIG") + + +async def inbound_xfr( + where: str, + txn_manager: dns.transaction.TransactionManager, + query: Optional[dns.message.Message] = None, + port: int = 53, + timeout: Optional[float] = None, + lifetime: Optional[float] = None, + source: Optional[str] = None, + source_port: int = 0, + udp_mode: UDPMode = UDPMode.NEVER, + backend: Optional[dns.asyncbackend.Backend] = None, +) -> None: + """Conduct an inbound transfer and apply it via a transaction from the + txn_manager. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.query.inbound_xfr()` for the documentation of + the other parameters, exceptions, and return type of this method. + """ + if query is None: + (query, serial) = dns.xfr.make_query(txn_manager) + else: + serial = dns.xfr.extract_serial_from_query(query) + af = dns.inet.af_for_address(where) + stuple = _source_tuple(af, source, source_port) + dtuple = (where, port) + if not backend: + backend = dns.asyncbackend.get_default_backend() + (_, expiration) = _compute_times(lifetime) + if query.question[0].rdtype == dns.rdatatype.IXFR and udp_mode != UDPMode.NEVER: + s = await backend.make_socket( + af, socket.SOCK_DGRAM, 0, stuple, dtuple, _timeout(expiration) + ) + async with s: + try: + async for _ in _inbound_xfr( + txn_manager, s, query, serial, timeout, expiration + ): + pass + return + except dns.xfr.UseTCP: + if udp_mode == UDPMode.ONLY: + raise + + s = await backend.make_socket( + af, socket.SOCK_STREAM, 0, stuple, dtuple, _timeout(expiration) + ) + async with s: + async for _ in _inbound_xfr(txn_manager, s, query, serial, timeout, expiration): + pass diff --git a/venv/lib/python3.11/site-packages/dns/asyncresolver.py b/venv/lib/python3.11/site-packages/dns/asyncresolver.py new file mode 100644 index 0000000..8f5e062 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/asyncresolver.py @@ -0,0 +1,475 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Asynchronous DNS stub resolver.""" + +import socket +import time +from typing import Any, Dict, List, Optional, Union + +import dns._ddr +import dns.asyncbackend +import dns.asyncquery +import dns.exception +import dns.name +import dns.query +import dns.rdataclass +import dns.rdatatype +import dns.resolver # lgtm[py/import-and-import-from] + +# import some resolver symbols for brevity +from dns.resolver import NXDOMAIN, NoAnswer, NoRootSOA, NotAbsolute + +# for indentation purposes below +_udp = dns.asyncquery.udp +_tcp = dns.asyncquery.tcp + + +class Resolver(dns.resolver.BaseResolver): + """Asynchronous DNS stub resolver.""" + + async def resolve( + self, + qname: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, + rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, + tcp: bool = False, + source: Optional[str] = None, + raise_on_no_answer: bool = True, + source_port: int = 0, + lifetime: Optional[float] = None, + search: Optional[bool] = None, + backend: Optional[dns.asyncbackend.Backend] = None, + ) -> dns.resolver.Answer: + """Query nameservers asynchronously to find the answer to the question. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.resolver.Resolver.resolve()` for the + documentation of the other parameters, exceptions, and return + type of this method. + """ + + resolution = dns.resolver._Resolution( + self, qname, rdtype, rdclass, tcp, raise_on_no_answer, search + ) + if not backend: + backend = dns.asyncbackend.get_default_backend() + start = time.time() + while True: + (request, answer) = resolution.next_request() + # Note we need to say "if answer is not None" and not just + # "if answer" because answer implements __len__, and python + # will call that. We want to return if we have an answer + # object, including in cases where its length is 0. + if answer is not None: + # cache hit! + return answer + assert request is not None # needed for type checking + done = False + while not done: + (nameserver, tcp, backoff) = resolution.next_nameserver() + if backoff: + await backend.sleep(backoff) + timeout = self._compute_timeout(start, lifetime, resolution.errors) + try: + response = await nameserver.async_query( + request, + timeout=timeout, + source=source, + source_port=source_port, + max_size=tcp, + backend=backend, + ) + except Exception as ex: + (_, done) = resolution.query_result(None, ex) + continue + (answer, done) = resolution.query_result(response, None) + # Note we need to say "if answer is not None" and not just + # "if answer" because answer implements __len__, and python + # will call that. We want to return if we have an answer + # object, including in cases where its length is 0. + if answer is not None: + return answer + + async def resolve_address( + self, ipaddr: str, *args: Any, **kwargs: Any + ) -> dns.resolver.Answer: + """Use an asynchronous resolver to run a reverse query for PTR + records. + + This utilizes the resolve() method to perform a PTR lookup on the + specified IP address. + + *ipaddr*, a ``str``, the IPv4 or IPv6 address you want to get + the PTR record for. + + All other arguments that can be passed to the resolve() function + except for rdtype and rdclass are also supported by this + function. + + """ + # We make a modified kwargs for type checking happiness, as otherwise + # we get a legit warning about possibly having rdtype and rdclass + # in the kwargs more than once. + modified_kwargs: Dict[str, Any] = {} + modified_kwargs.update(kwargs) + modified_kwargs["rdtype"] = dns.rdatatype.PTR + modified_kwargs["rdclass"] = dns.rdataclass.IN + return await self.resolve( + dns.reversename.from_address(ipaddr), *args, **modified_kwargs + ) + + async def resolve_name( + self, + name: Union[dns.name.Name, str], + family: int = socket.AF_UNSPEC, + **kwargs: Any, + ) -> dns.resolver.HostAnswers: + """Use an asynchronous resolver to query for address records. + + This utilizes the resolve() method to perform A and/or AAAA lookups on + the specified name. + + *qname*, a ``dns.name.Name`` or ``str``, the name to resolve. + + *family*, an ``int``, the address family. If socket.AF_UNSPEC + (the default), both A and AAAA records will be retrieved. + + All other arguments that can be passed to the resolve() function + except for rdtype and rdclass are also supported by this + function. + """ + # We make a modified kwargs for type checking happiness, as otherwise + # we get a legit warning about possibly having rdtype and rdclass + # in the kwargs more than once. + modified_kwargs: Dict[str, Any] = {} + modified_kwargs.update(kwargs) + modified_kwargs.pop("rdtype", None) + modified_kwargs["rdclass"] = dns.rdataclass.IN + + if family == socket.AF_INET: + v4 = await self.resolve(name, dns.rdatatype.A, **modified_kwargs) + return dns.resolver.HostAnswers.make(v4=v4) + elif family == socket.AF_INET6: + v6 = await self.resolve(name, dns.rdatatype.AAAA, **modified_kwargs) + return dns.resolver.HostAnswers.make(v6=v6) + elif family != socket.AF_UNSPEC: + raise NotImplementedError(f"unknown address family {family}") + + raise_on_no_answer = modified_kwargs.pop("raise_on_no_answer", True) + lifetime = modified_kwargs.pop("lifetime", None) + start = time.time() + v6 = await self.resolve( + name, + dns.rdatatype.AAAA, + raise_on_no_answer=False, + lifetime=self._compute_timeout(start, lifetime), + **modified_kwargs, + ) + # Note that setting name ensures we query the same name + # for A as we did for AAAA. (This is just in case search lists + # are active by default in the resolver configuration and + # we might be talking to a server that says NXDOMAIN when it + # wants to say NOERROR no data. + name = v6.qname + v4 = await self.resolve( + name, + dns.rdatatype.A, + raise_on_no_answer=False, + lifetime=self._compute_timeout(start, lifetime), + **modified_kwargs, + ) + answers = dns.resolver.HostAnswers.make( + v6=v6, v4=v4, add_empty=not raise_on_no_answer + ) + if not answers: + raise NoAnswer(response=v6.response) + return answers + + # pylint: disable=redefined-outer-name + + async def canonical_name(self, name: Union[dns.name.Name, str]) -> dns.name.Name: + """Determine the canonical name of *name*. + + The canonical name is the name the resolver uses for queries + after all CNAME and DNAME renamings have been applied. + + *name*, a ``dns.name.Name`` or ``str``, the query name. + + This method can raise any exception that ``resolve()`` can + raise, other than ``dns.resolver.NoAnswer`` and + ``dns.resolver.NXDOMAIN``. + + Returns a ``dns.name.Name``. + """ + try: + answer = await self.resolve(name, raise_on_no_answer=False) + canonical_name = answer.canonical_name + except dns.resolver.NXDOMAIN as e: + canonical_name = e.canonical_name + return canonical_name + + async def try_ddr(self, lifetime: float = 5.0) -> None: + """Try to update the resolver's nameservers using Discovery of Designated + Resolvers (DDR). If successful, the resolver will subsequently use + DNS-over-HTTPS or DNS-over-TLS for future queries. + + *lifetime*, a float, is the maximum time to spend attempting DDR. The default + is 5 seconds. + + If the SVCB query is successful and results in a non-empty list of nameservers, + then the resolver's nameservers are set to the returned servers in priority + order. + + The current implementation does not use any address hints from the SVCB record, + nor does it resolve addresses for the SCVB target name, rather it assumes that + the bootstrap nameserver will always be one of the addresses and uses it. + A future revision to the code may offer fuller support. The code verifies that + the bootstrap nameserver is in the Subject Alternative Name field of the + TLS certficate. + """ + try: + expiration = time.time() + lifetime + answer = await self.resolve( + dns._ddr._local_resolver_name, "svcb", lifetime=lifetime + ) + timeout = dns.query._remaining(expiration) + nameservers = await dns._ddr._get_nameservers_async(answer, timeout) + if len(nameservers) > 0: + self.nameservers = nameservers + except Exception: + pass + + +default_resolver = None + + +def get_default_resolver() -> Resolver: + """Get the default asynchronous resolver, initializing it if necessary.""" + if default_resolver is None: + reset_default_resolver() + assert default_resolver is not None + return default_resolver + + +def reset_default_resolver() -> None: + """Re-initialize default asynchronous resolver. + + Note that the resolver configuration (i.e. /etc/resolv.conf on UNIX + systems) will be re-read immediately. + """ + + global default_resolver + default_resolver = Resolver() + + +async def resolve( + qname: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, + rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, + tcp: bool = False, + source: Optional[str] = None, + raise_on_no_answer: bool = True, + source_port: int = 0, + lifetime: Optional[float] = None, + search: Optional[bool] = None, + backend: Optional[dns.asyncbackend.Backend] = None, +) -> dns.resolver.Answer: + """Query nameservers asynchronously to find the answer to the question. + + This is a convenience function that uses the default resolver + object to make the query. + + See :py:func:`dns.asyncresolver.Resolver.resolve` for more + information on the parameters. + """ + + return await get_default_resolver().resolve( + qname, + rdtype, + rdclass, + tcp, + source, + raise_on_no_answer, + source_port, + lifetime, + search, + backend, + ) + + +async def resolve_address( + ipaddr: str, *args: Any, **kwargs: Any +) -> dns.resolver.Answer: + """Use a resolver to run a reverse query for PTR records. + + See :py:func:`dns.asyncresolver.Resolver.resolve_address` for more + information on the parameters. + """ + + return await get_default_resolver().resolve_address(ipaddr, *args, **kwargs) + + +async def resolve_name( + name: Union[dns.name.Name, str], family: int = socket.AF_UNSPEC, **kwargs: Any +) -> dns.resolver.HostAnswers: + """Use a resolver to asynchronously query for address records. + + See :py:func:`dns.asyncresolver.Resolver.resolve_name` for more + information on the parameters. + """ + + return await get_default_resolver().resolve_name(name, family, **kwargs) + + +async def canonical_name(name: Union[dns.name.Name, str]) -> dns.name.Name: + """Determine the canonical name of *name*. + + See :py:func:`dns.resolver.Resolver.canonical_name` for more + information on the parameters and possible exceptions. + """ + + return await get_default_resolver().canonical_name(name) + + +async def try_ddr(timeout: float = 5.0) -> None: + """Try to update the default resolver's nameservers using Discovery of Designated + Resolvers (DDR). If successful, the resolver will subsequently use + DNS-over-HTTPS or DNS-over-TLS for future queries. + + See :py:func:`dns.resolver.Resolver.try_ddr` for more information. + """ + return await get_default_resolver().try_ddr(timeout) + + +async def zone_for_name( + name: Union[dns.name.Name, str], + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, + tcp: bool = False, + resolver: Optional[Resolver] = None, + backend: Optional[dns.asyncbackend.Backend] = None, +) -> dns.name.Name: + """Find the name of the zone which contains the specified name. + + See :py:func:`dns.resolver.Resolver.zone_for_name` for more + information on the parameters and possible exceptions. + """ + + if isinstance(name, str): + name = dns.name.from_text(name, dns.name.root) + if resolver is None: + resolver = get_default_resolver() + if not name.is_absolute(): + raise NotAbsolute(name) + while True: + try: + answer = await resolver.resolve( + name, dns.rdatatype.SOA, rdclass, tcp, backend=backend + ) + assert answer.rrset is not None + if answer.rrset.name == name: + return name + # otherwise we were CNAMEd or DNAMEd and need to look higher + except (NXDOMAIN, NoAnswer): + pass + try: + name = name.parent() + except dns.name.NoParent: # pragma: no cover + raise NoRootSOA + + +async def make_resolver_at( + where: Union[dns.name.Name, str], + port: int = 53, + family: int = socket.AF_UNSPEC, + resolver: Optional[Resolver] = None, +) -> Resolver: + """Make a stub resolver using the specified destination as the full resolver. + + *where*, a ``dns.name.Name`` or ``str`` the domain name or IP address of the + full resolver. + + *port*, an ``int``, the port to use. If not specified, the default is 53. + + *family*, an ``int``, the address family to use. This parameter is used if + *where* is not an address. The default is ``socket.AF_UNSPEC`` in which case + the first address returned by ``resolve_name()`` will be used, otherwise the + first address of the specified family will be used. + + *resolver*, a ``dns.asyncresolver.Resolver`` or ``None``, the resolver to use for + resolution of hostnames. If not specified, the default resolver will be used. + + Returns a ``dns.resolver.Resolver`` or raises an exception. + """ + if resolver is None: + resolver = get_default_resolver() + nameservers: List[Union[str, dns.nameserver.Nameserver]] = [] + if isinstance(where, str) and dns.inet.is_address(where): + nameservers.append(dns.nameserver.Do53Nameserver(where, port)) + else: + answers = await resolver.resolve_name(where, family) + for address in answers.addresses(): + nameservers.append(dns.nameserver.Do53Nameserver(address, port)) + res = dns.asyncresolver.Resolver(configure=False) + res.nameservers = nameservers + return res + + +async def resolve_at( + where: Union[dns.name.Name, str], + qname: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, + rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, + tcp: bool = False, + source: Optional[str] = None, + raise_on_no_answer: bool = True, + source_port: int = 0, + lifetime: Optional[float] = None, + search: Optional[bool] = None, + backend: Optional[dns.asyncbackend.Backend] = None, + port: int = 53, + family: int = socket.AF_UNSPEC, + resolver: Optional[Resolver] = None, +) -> dns.resolver.Answer: + """Query nameservers to find the answer to the question. + + This is a convenience function that calls ``dns.asyncresolver.make_resolver_at()`` + to make a resolver, and then uses it to resolve the query. + + See ``dns.asyncresolver.Resolver.resolve`` for more information on the resolution + parameters, and ``dns.asyncresolver.make_resolver_at`` for information about the + resolver parameters *where*, *port*, *family*, and *resolver*. + + If making more than one query, it is more efficient to call + ``dns.asyncresolver.make_resolver_at()`` and then use that resolver for the queries + instead of calling ``resolve_at()`` multiple times. + """ + res = await make_resolver_at(where, port, family, resolver) + return await res.resolve( + qname, + rdtype, + rdclass, + tcp, + source, + raise_on_no_answer, + source_port, + lifetime, + search, + backend, + ) diff --git a/venv/lib/python3.11/site-packages/dns/dnssec.py b/venv/lib/python3.11/site-packages/dns/dnssec.py new file mode 100644 index 0000000..b69d0a1 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/dnssec.py @@ -0,0 +1,1247 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Common DNSSEC-related functions and constants.""" + + +import base64 +import contextlib +import functools +import hashlib +import struct +import time +from datetime import datetime +from typing import Callable, Dict, List, Optional, Set, Tuple, Union, cast + +import dns._features +import dns.exception +import dns.name +import dns.node +import dns.rdata +import dns.rdataclass +import dns.rdataset +import dns.rdatatype +import dns.rrset +import dns.transaction +import dns.zone +from dns.dnssectypes import Algorithm, DSDigest, NSEC3Hash +from dns.exception import ( # pylint: disable=W0611 + AlgorithmKeyMismatch, + DeniedByPolicy, + UnsupportedAlgorithm, + ValidationFailure, +) +from dns.rdtypes.ANY.CDNSKEY import CDNSKEY +from dns.rdtypes.ANY.CDS import CDS +from dns.rdtypes.ANY.DNSKEY import DNSKEY +from dns.rdtypes.ANY.DS import DS +from dns.rdtypes.ANY.NSEC import NSEC, Bitmap +from dns.rdtypes.ANY.NSEC3PARAM import NSEC3PARAM +from dns.rdtypes.ANY.RRSIG import RRSIG, sigtime_to_posixtime +from dns.rdtypes.dnskeybase import Flag + +PublicKey = Union[ + "GenericPublicKey", + "rsa.RSAPublicKey", + "ec.EllipticCurvePublicKey", + "ed25519.Ed25519PublicKey", + "ed448.Ed448PublicKey", +] + +PrivateKey = Union[ + "GenericPrivateKey", + "rsa.RSAPrivateKey", + "ec.EllipticCurvePrivateKey", + "ed25519.Ed25519PrivateKey", + "ed448.Ed448PrivateKey", +] + +RRsetSigner = Callable[[dns.transaction.Transaction, dns.rrset.RRset], None] + + +def algorithm_from_text(text: str) -> Algorithm: + """Convert text into a DNSSEC algorithm value. + + *text*, a ``str``, the text to convert to into an algorithm value. + + Returns an ``int``. + """ + + return Algorithm.from_text(text) + + +def algorithm_to_text(value: Union[Algorithm, int]) -> str: + """Convert a DNSSEC algorithm value to text + + *value*, a ``dns.dnssec.Algorithm``. + + Returns a ``str``, the name of a DNSSEC algorithm. + """ + + return Algorithm.to_text(value) + + +def to_timestamp(value: Union[datetime, str, float, int]) -> int: + """Convert various format to a timestamp""" + if isinstance(value, datetime): + return int(value.timestamp()) + elif isinstance(value, str): + return sigtime_to_posixtime(value) + elif isinstance(value, float): + return int(value) + elif isinstance(value, int): + return value + else: + raise TypeError("Unsupported timestamp type") + + +def key_id(key: Union[DNSKEY, CDNSKEY]) -> int: + """Return the key id (a 16-bit number) for the specified key. + + *key*, a ``dns.rdtypes.ANY.DNSKEY.DNSKEY`` + + Returns an ``int`` between 0 and 65535 + """ + + rdata = key.to_wire() + assert rdata is not None # for mypy + if key.algorithm == Algorithm.RSAMD5: + return (rdata[-3] << 8) + rdata[-2] + else: + total = 0 + for i in range(len(rdata) // 2): + total += (rdata[2 * i] << 8) + rdata[2 * i + 1] + if len(rdata) % 2 != 0: + total += rdata[len(rdata) - 1] << 8 + total += (total >> 16) & 0xFFFF + return total & 0xFFFF + + +class Policy: + def __init__(self): + pass + + def ok_to_sign(self, _: DNSKEY) -> bool: # pragma: no cover + return False + + def ok_to_validate(self, _: DNSKEY) -> bool: # pragma: no cover + return False + + def ok_to_create_ds(self, _: DSDigest) -> bool: # pragma: no cover + return False + + def ok_to_validate_ds(self, _: DSDigest) -> bool: # pragma: no cover + return False + + +class SimpleDeny(Policy): + def __init__(self, deny_sign, deny_validate, deny_create_ds, deny_validate_ds): + super().__init__() + self._deny_sign = deny_sign + self._deny_validate = deny_validate + self._deny_create_ds = deny_create_ds + self._deny_validate_ds = deny_validate_ds + + def ok_to_sign(self, key: DNSKEY) -> bool: + return key.algorithm not in self._deny_sign + + def ok_to_validate(self, key: DNSKEY) -> bool: + return key.algorithm not in self._deny_validate + + def ok_to_create_ds(self, algorithm: DSDigest) -> bool: + return algorithm not in self._deny_create_ds + + def ok_to_validate_ds(self, algorithm: DSDigest) -> bool: + return algorithm not in self._deny_validate_ds + + +rfc_8624_policy = SimpleDeny( + {Algorithm.RSAMD5, Algorithm.DSA, Algorithm.DSANSEC3SHA1, Algorithm.ECCGOST}, + {Algorithm.RSAMD5, Algorithm.DSA, Algorithm.DSANSEC3SHA1}, + {DSDigest.NULL, DSDigest.SHA1, DSDigest.GOST}, + {DSDigest.NULL}, +) + +allow_all_policy = SimpleDeny(set(), set(), set(), set()) + + +default_policy = rfc_8624_policy + + +def make_ds( + name: Union[dns.name.Name, str], + key: dns.rdata.Rdata, + algorithm: Union[DSDigest, str], + origin: Optional[dns.name.Name] = None, + policy: Optional[Policy] = None, + validating: bool = False, +) -> DS: + """Create a DS record for a DNSSEC key. + + *name*, a ``dns.name.Name`` or ``str``, the owner name of the DS record. + + *key*, a ``dns.rdtypes.ANY.DNSKEY.DNSKEY`` or ``dns.rdtypes.ANY.DNSKEY.CDNSKEY``, + the key the DS is about. + + *algorithm*, a ``str`` or ``int`` specifying the hash algorithm. + The currently supported hashes are "SHA1", "SHA256", and "SHA384". Case + does not matter for these strings. + + *origin*, a ``dns.name.Name`` or ``None``. If *key* is a relative name, + then it will be made absolute using the specified origin. + + *policy*, a ``dns.dnssec.Policy`` or ``None``. If ``None``, the default policy, + ``dns.dnssec.default_policy`` is used; this policy defaults to that of RFC 8624. + + *validating*, a ``bool``. If ``True``, then policy is checked in + validating mode, i.e. "Is it ok to validate using this digest algorithm?". + Otherwise the policy is checked in creating mode, i.e. "Is it ok to create a DS with + this digest algorithm?". + + Raises ``UnsupportedAlgorithm`` if the algorithm is unknown. + + Raises ``DeniedByPolicy`` if the algorithm is denied by policy. + + Returns a ``dns.rdtypes.ANY.DS.DS`` + """ + + if policy is None: + policy = default_policy + try: + if isinstance(algorithm, str): + algorithm = DSDigest[algorithm.upper()] + except Exception: + raise UnsupportedAlgorithm(f'unsupported algorithm "{algorithm}"') + if validating: + check = policy.ok_to_validate_ds + else: + check = policy.ok_to_create_ds + if not check(algorithm): + raise DeniedByPolicy + if not isinstance(key, (DNSKEY, CDNSKEY)): + raise ValueError("key is not a DNSKEY/CDNSKEY") + if algorithm == DSDigest.SHA1: + dshash = hashlib.sha1() + elif algorithm == DSDigest.SHA256: + dshash = hashlib.sha256() + elif algorithm == DSDigest.SHA384: + dshash = hashlib.sha384() + else: + raise UnsupportedAlgorithm(f'unsupported algorithm "{algorithm}"') + + if isinstance(name, str): + name = dns.name.from_text(name, origin) + wire = name.canonicalize().to_wire() + kwire = key.to_wire(origin=origin) + assert wire is not None and kwire is not None # for mypy + dshash.update(wire) + dshash.update(kwire) + digest = dshash.digest() + + dsrdata = struct.pack("!HBB", key_id(key), key.algorithm, algorithm) + digest + ds = dns.rdata.from_wire( + dns.rdataclass.IN, dns.rdatatype.DS, dsrdata, 0, len(dsrdata) + ) + return cast(DS, ds) + + +def make_cds( + name: Union[dns.name.Name, str], + key: dns.rdata.Rdata, + algorithm: Union[DSDigest, str], + origin: Optional[dns.name.Name] = None, +) -> CDS: + """Create a CDS record for a DNSSEC key. + + *name*, a ``dns.name.Name`` or ``str``, the owner name of the DS record. + + *key*, a ``dns.rdtypes.ANY.DNSKEY.DNSKEY`` or ``dns.rdtypes.ANY.DNSKEY.CDNSKEY``, + the key the DS is about. + + *algorithm*, a ``str`` or ``int`` specifying the hash algorithm. + The currently supported hashes are "SHA1", "SHA256", and "SHA384". Case + does not matter for these strings. + + *origin*, a ``dns.name.Name`` or ``None``. If *key* is a relative name, + then it will be made absolute using the specified origin. + + Raises ``UnsupportedAlgorithm`` if the algorithm is unknown. + + Returns a ``dns.rdtypes.ANY.DS.CDS`` + """ + + ds = make_ds(name, key, algorithm, origin) + return CDS( + rdclass=ds.rdclass, + rdtype=dns.rdatatype.CDS, + key_tag=ds.key_tag, + algorithm=ds.algorithm, + digest_type=ds.digest_type, + digest=ds.digest, + ) + + +def _find_candidate_keys( + keys: Dict[dns.name.Name, Union[dns.rdataset.Rdataset, dns.node.Node]], rrsig: RRSIG +) -> Optional[List[DNSKEY]]: + value = keys.get(rrsig.signer) + if isinstance(value, dns.node.Node): + rdataset = value.get_rdataset(dns.rdataclass.IN, dns.rdatatype.DNSKEY) + else: + rdataset = value + if rdataset is None: + return None + return [ + cast(DNSKEY, rd) + for rd in rdataset + if rd.algorithm == rrsig.algorithm + and key_id(rd) == rrsig.key_tag + and (rd.flags & Flag.ZONE) == Flag.ZONE # RFC 4034 2.1.1 + and rd.protocol == 3 # RFC 4034 2.1.2 + ] + + +def _get_rrname_rdataset( + rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], +) -> Tuple[dns.name.Name, dns.rdataset.Rdataset]: + if isinstance(rrset, tuple): + return rrset[0], rrset[1] + else: + return rrset.name, rrset + + +def _validate_signature(sig: bytes, data: bytes, key: DNSKEY) -> None: + # pylint: disable=possibly-used-before-assignment + public_cls = get_algorithm_cls_from_dnskey(key).public_cls + try: + public_key = public_cls.from_dnskey(key) + except ValueError: + raise ValidationFailure("invalid public key") + public_key.verify(sig, data) + + +def _validate_rrsig( + rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], + rrsig: RRSIG, + keys: Dict[dns.name.Name, Union[dns.node.Node, dns.rdataset.Rdataset]], + origin: Optional[dns.name.Name] = None, + now: Optional[float] = None, + policy: Optional[Policy] = None, +) -> None: + """Validate an RRset against a single signature rdata, throwing an + exception if validation is not successful. + + *rrset*, the RRset to validate. This can be a + ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) + tuple. + + *rrsig*, a ``dns.rdata.Rdata``, the signature to validate. + + *keys*, the key dictionary, used to find the DNSKEY associated + with a given name. The dictionary is keyed by a + ``dns.name.Name``, and has ``dns.node.Node`` or + ``dns.rdataset.Rdataset`` values. + + *origin*, a ``dns.name.Name`` or ``None``, the origin to use for relative + names. + + *now*, a ``float`` or ``None``, the time, in seconds since the epoch, to + use as the current time when validating. If ``None``, the actual current + time is used. + + *policy*, a ``dns.dnssec.Policy`` or ``None``. If ``None``, the default policy, + ``dns.dnssec.default_policy`` is used; this policy defaults to that of RFC 8624. + + Raises ``ValidationFailure`` if the signature is expired, not yet valid, + the public key is invalid, the algorithm is unknown, the verification + fails, etc. + + Raises ``UnsupportedAlgorithm`` if the algorithm is recognized by + dnspython but not implemented. + """ + + if policy is None: + policy = default_policy + + candidate_keys = _find_candidate_keys(keys, rrsig) + if candidate_keys is None: + raise ValidationFailure("unknown key") + + if now is None: + now = time.time() + if rrsig.expiration < now: + raise ValidationFailure("expired") + if rrsig.inception > now: + raise ValidationFailure("not yet valid") + + data = _make_rrsig_signature_data(rrset, rrsig, origin) + + # pylint: disable=possibly-used-before-assignment + for candidate_key in candidate_keys: + if not policy.ok_to_validate(candidate_key): + continue + try: + _validate_signature(rrsig.signature, data, candidate_key) + return + except (InvalidSignature, ValidationFailure): + # this happens on an individual validation failure + continue + # nothing verified -- raise failure: + raise ValidationFailure("verify failure") + + +def _validate( + rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], + rrsigset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], + keys: Dict[dns.name.Name, Union[dns.node.Node, dns.rdataset.Rdataset]], + origin: Optional[dns.name.Name] = None, + now: Optional[float] = None, + policy: Optional[Policy] = None, +) -> None: + """Validate an RRset against a signature RRset, throwing an exception + if none of the signatures validate. + + *rrset*, the RRset to validate. This can be a + ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) + tuple. + + *rrsigset*, the signature RRset. This can be a + ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) + tuple. + + *keys*, the key dictionary, used to find the DNSKEY associated + with a given name. The dictionary is keyed by a + ``dns.name.Name``, and has ``dns.node.Node`` or + ``dns.rdataset.Rdataset`` values. + + *origin*, a ``dns.name.Name``, the origin to use for relative names; + defaults to None. + + *now*, an ``int`` or ``None``, the time, in seconds since the epoch, to + use as the current time when validating. If ``None``, the actual current + time is used. + + *policy*, a ``dns.dnssec.Policy`` or ``None``. If ``None``, the default policy, + ``dns.dnssec.default_policy`` is used; this policy defaults to that of RFC 8624. + + Raises ``ValidationFailure`` if the signature is expired, not yet valid, + the public key is invalid, the algorithm is unknown, the verification + fails, etc. + """ + + if policy is None: + policy = default_policy + + if isinstance(origin, str): + origin = dns.name.from_text(origin, dns.name.root) + + if isinstance(rrset, tuple): + rrname = rrset[0] + else: + rrname = rrset.name + + if isinstance(rrsigset, tuple): + rrsigname = rrsigset[0] + rrsigrdataset = rrsigset[1] + else: + rrsigname = rrsigset.name + rrsigrdataset = rrsigset + + rrname = rrname.choose_relativity(origin) + rrsigname = rrsigname.choose_relativity(origin) + if rrname != rrsigname: + raise ValidationFailure("owner names do not match") + + for rrsig in rrsigrdataset: + if not isinstance(rrsig, RRSIG): + raise ValidationFailure("expected an RRSIG") + try: + _validate_rrsig(rrset, rrsig, keys, origin, now, policy) + return + except (ValidationFailure, UnsupportedAlgorithm): + pass + raise ValidationFailure("no RRSIGs validated") + + +def _sign( + rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], + private_key: PrivateKey, + signer: dns.name.Name, + dnskey: DNSKEY, + inception: Optional[Union[datetime, str, int, float]] = None, + expiration: Optional[Union[datetime, str, int, float]] = None, + lifetime: Optional[int] = None, + verify: bool = False, + policy: Optional[Policy] = None, + origin: Optional[dns.name.Name] = None, + deterministic: bool = True, +) -> RRSIG: + """Sign RRset using private key. + + *rrset*, the RRset to validate. This can be a + ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) + tuple. + + *private_key*, the private key to use for signing, a + ``cryptography.hazmat.primitives.asymmetric`` private key class applicable + for DNSSEC. + + *signer*, a ``dns.name.Name``, the Signer's name. + + *dnskey*, a ``DNSKEY`` matching ``private_key``. + + *inception*, a ``datetime``, ``str``, ``int``, ``float`` or ``None``, the + signature inception time. If ``None``, the current time is used. If a ``str``, the + format is "YYYYMMDDHHMMSS" or alternatively the number of seconds since the UNIX + epoch in text form; this is the same the RRSIG rdata's text form. + Values of type `int` or `float` are interpreted as seconds since the UNIX epoch. + + *expiration*, a ``datetime``, ``str``, ``int``, ``float`` or ``None``, the signature + expiration time. If ``None``, the expiration time will be the inception time plus + the value of the *lifetime* parameter. See the description of *inception* above + for how the various parameter types are interpreted. + + *lifetime*, an ``int`` or ``None``, the signature lifetime in seconds. This + parameter is only meaningful if *expiration* is ``None``. + + *verify*, a ``bool``. If set to ``True``, the signer will verify signatures + after they are created; the default is ``False``. + + *policy*, a ``dns.dnssec.Policy`` or ``None``. If ``None``, the default policy, + ``dns.dnssec.default_policy`` is used; this policy defaults to that of RFC 8624. + + *origin*, a ``dns.name.Name`` or ``None``. If ``None``, the default, then all + names in the rrset (including its owner name) must be absolute; otherwise the + specified origin will be used to make names absolute when signing. + + *deterministic*, a ``bool``. If ``True``, the default, use deterministic + (reproducible) signatures when supported by the algorithm used for signing. + Currently, this only affects ECDSA. + + Raises ``DeniedByPolicy`` if the signature is denied by policy. + """ + + if policy is None: + policy = default_policy + if not policy.ok_to_sign(dnskey): + raise DeniedByPolicy + + if isinstance(rrset, tuple): + rdclass = rrset[1].rdclass + rdtype = rrset[1].rdtype + rrname = rrset[0] + original_ttl = rrset[1].ttl + else: + rdclass = rrset.rdclass + rdtype = rrset.rdtype + rrname = rrset.name + original_ttl = rrset.ttl + + if inception is not None: + rrsig_inception = to_timestamp(inception) + else: + rrsig_inception = int(time.time()) + + if expiration is not None: + rrsig_expiration = to_timestamp(expiration) + elif lifetime is not None: + rrsig_expiration = rrsig_inception + lifetime + else: + raise ValueError("expiration or lifetime must be specified") + + # Derelativize now because we need a correct labels length for the + # rrsig_template. + if origin is not None: + rrname = rrname.derelativize(origin) + labels = len(rrname) - 1 + + # Adjust labels appropriately for wildcards. + if rrname.is_wild(): + labels -= 1 + + rrsig_template = RRSIG( + rdclass=rdclass, + rdtype=dns.rdatatype.RRSIG, + type_covered=rdtype, + algorithm=dnskey.algorithm, + labels=labels, + original_ttl=original_ttl, + expiration=rrsig_expiration, + inception=rrsig_inception, + key_tag=key_id(dnskey), + signer=signer, + signature=b"", + ) + + data = dns.dnssec._make_rrsig_signature_data(rrset, rrsig_template, origin) + + # pylint: disable=possibly-used-before-assignment + if isinstance(private_key, GenericPrivateKey): + signing_key = private_key + else: + try: + private_cls = get_algorithm_cls_from_dnskey(dnskey) + signing_key = private_cls(key=private_key) + except UnsupportedAlgorithm: + raise TypeError("Unsupported key algorithm") + + signature = signing_key.sign(data, verify, deterministic) + + return cast(RRSIG, rrsig_template.replace(signature=signature)) + + +def _make_rrsig_signature_data( + rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], + rrsig: RRSIG, + origin: Optional[dns.name.Name] = None, +) -> bytes: + """Create signature rdata. + + *rrset*, the RRset to sign/validate. This can be a + ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) + tuple. + + *rrsig*, a ``dns.rdata.Rdata``, the signature to validate, or the + signature template used when signing. + + *origin*, a ``dns.name.Name`` or ``None``, the origin to use for relative + names. + + Raises ``UnsupportedAlgorithm`` if the algorithm is recognized by + dnspython but not implemented. + """ + + if isinstance(origin, str): + origin = dns.name.from_text(origin, dns.name.root) + + signer = rrsig.signer + if not signer.is_absolute(): + if origin is None: + raise ValidationFailure("relative RR name without an origin specified") + signer = signer.derelativize(origin) + + # For convenience, allow the rrset to be specified as a (name, + # rdataset) tuple as well as a proper rrset + rrname, rdataset = _get_rrname_rdataset(rrset) + + data = b"" + wire = rrsig.to_wire(origin=signer) + assert wire is not None # for mypy + data += wire[:18] + data += rrsig.signer.to_digestable(signer) + + # Derelativize the name before considering labels. + if not rrname.is_absolute(): + if origin is None: + raise ValidationFailure("relative RR name without an origin specified") + rrname = rrname.derelativize(origin) + + name_len = len(rrname) + if rrname.is_wild() and rrsig.labels != name_len - 2: + raise ValidationFailure("wild owner name has wrong label length") + if name_len - 1 < rrsig.labels: + raise ValidationFailure("owner name longer than RRSIG labels") + elif rrsig.labels < name_len - 1: + suffix = rrname.split(rrsig.labels + 1)[1] + rrname = dns.name.from_text("*", suffix) + rrnamebuf = rrname.to_digestable() + rrfixed = struct.pack("!HHI", rdataset.rdtype, rdataset.rdclass, rrsig.original_ttl) + rdatas = [rdata.to_digestable(origin) for rdata in rdataset] + for rdata in sorted(rdatas): + data += rrnamebuf + data += rrfixed + rrlen = struct.pack("!H", len(rdata)) + data += rrlen + data += rdata + + return data + + +def _make_dnskey( + public_key: PublicKey, + algorithm: Union[int, str], + flags: int = Flag.ZONE, + protocol: int = 3, +) -> DNSKEY: + """Convert a public key to DNSKEY Rdata + + *public_key*, a ``PublicKey`` (``GenericPublicKey`` or + ``cryptography.hazmat.primitives.asymmetric``) to convert. + + *algorithm*, a ``str`` or ``int`` specifying the DNSKEY algorithm. + + *flags*: DNSKEY flags field as an integer. + + *protocol*: DNSKEY protocol field as an integer. + + Raises ``ValueError`` if the specified key algorithm parameters are not + unsupported, ``TypeError`` if the key type is unsupported, + `UnsupportedAlgorithm` if the algorithm is unknown and + `AlgorithmKeyMismatch` if the algorithm does not match the key type. + + Return DNSKEY ``Rdata``. + """ + + algorithm = Algorithm.make(algorithm) + + # pylint: disable=possibly-used-before-assignment + if isinstance(public_key, GenericPublicKey): + return public_key.to_dnskey(flags=flags, protocol=protocol) + else: + public_cls = get_algorithm_cls(algorithm).public_cls + return public_cls(key=public_key).to_dnskey(flags=flags, protocol=protocol) + + +def _make_cdnskey( + public_key: PublicKey, + algorithm: Union[int, str], + flags: int = Flag.ZONE, + protocol: int = 3, +) -> CDNSKEY: + """Convert a public key to CDNSKEY Rdata + + *public_key*, the public key to convert, a + ``cryptography.hazmat.primitives.asymmetric`` public key class applicable + for DNSSEC. + + *algorithm*, a ``str`` or ``int`` specifying the DNSKEY algorithm. + + *flags*: DNSKEY flags field as an integer. + + *protocol*: DNSKEY protocol field as an integer. + + Raises ``ValueError`` if the specified key algorithm parameters are not + unsupported, ``TypeError`` if the key type is unsupported, + `UnsupportedAlgorithm` if the algorithm is unknown and + `AlgorithmKeyMismatch` if the algorithm does not match the key type. + + Return CDNSKEY ``Rdata``. + """ + + dnskey = _make_dnskey(public_key, algorithm, flags, protocol) + + return CDNSKEY( + rdclass=dnskey.rdclass, + rdtype=dns.rdatatype.CDNSKEY, + flags=dnskey.flags, + protocol=dnskey.protocol, + algorithm=dnskey.algorithm, + key=dnskey.key, + ) + + +def nsec3_hash( + domain: Union[dns.name.Name, str], + salt: Optional[Union[str, bytes]], + iterations: int, + algorithm: Union[int, str], +) -> str: + """ + Calculate the NSEC3 hash, according to + https://tools.ietf.org/html/rfc5155#section-5 + + *domain*, a ``dns.name.Name`` or ``str``, the name to hash. + + *salt*, a ``str``, ``bytes``, or ``None``, the hash salt. If a + string, it is decoded as a hex string. + + *iterations*, an ``int``, the number of iterations. + + *algorithm*, a ``str`` or ``int``, the hash algorithm. + The only defined algorithm is SHA1. + + Returns a ``str``, the encoded NSEC3 hash. + """ + + b32_conversion = str.maketrans( + "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567", "0123456789ABCDEFGHIJKLMNOPQRSTUV" + ) + + try: + if isinstance(algorithm, str): + algorithm = NSEC3Hash[algorithm.upper()] + except Exception: + raise ValueError("Wrong hash algorithm (only SHA1 is supported)") + + if algorithm != NSEC3Hash.SHA1: + raise ValueError("Wrong hash algorithm (only SHA1 is supported)") + + if salt is None: + salt_encoded = b"" + elif isinstance(salt, str): + if len(salt) % 2 == 0: + salt_encoded = bytes.fromhex(salt) + else: + raise ValueError("Invalid salt length") + else: + salt_encoded = salt + + if not isinstance(domain, dns.name.Name): + domain = dns.name.from_text(domain) + domain_encoded = domain.canonicalize().to_wire() + assert domain_encoded is not None + + digest = hashlib.sha1(domain_encoded + salt_encoded).digest() + for _ in range(iterations): + digest = hashlib.sha1(digest + salt_encoded).digest() + + output = base64.b32encode(digest).decode("utf-8") + output = output.translate(b32_conversion) + + return output + + +def make_ds_rdataset( + rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], + algorithms: Set[Union[DSDigest, str]], + origin: Optional[dns.name.Name] = None, +) -> dns.rdataset.Rdataset: + """Create a DS record from DNSKEY/CDNSKEY/CDS. + + *rrset*, the RRset to create DS Rdataset for. This can be a + ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) + tuple. + + *algorithms*, a set of ``str`` or ``int`` specifying the hash algorithms. + The currently supported hashes are "SHA1", "SHA256", and "SHA384". Case + does not matter for these strings. If the RRset is a CDS, only digest + algorithms matching algorithms are accepted. + + *origin*, a ``dns.name.Name`` or ``None``. If `key` is a relative name, + then it will be made absolute using the specified origin. + + Raises ``UnsupportedAlgorithm`` if any of the algorithms are unknown and + ``ValueError`` if the given RRset is not usable. + + Returns a ``dns.rdataset.Rdataset`` + """ + + rrname, rdataset = _get_rrname_rdataset(rrset) + + if rdataset.rdtype not in ( + dns.rdatatype.DNSKEY, + dns.rdatatype.CDNSKEY, + dns.rdatatype.CDS, + ): + raise ValueError("rrset not a DNSKEY/CDNSKEY/CDS") + + _algorithms = set() + for algorithm in algorithms: + try: + if isinstance(algorithm, str): + algorithm = DSDigest[algorithm.upper()] + except Exception: + raise UnsupportedAlgorithm(f'unsupported algorithm "{algorithm}"') + _algorithms.add(algorithm) + + if rdataset.rdtype == dns.rdatatype.CDS: + res = [] + for rdata in cds_rdataset_to_ds_rdataset(rdataset): + if rdata.digest_type in _algorithms: + res.append(rdata) + if len(res) == 0: + raise ValueError("no acceptable CDS rdata found") + return dns.rdataset.from_rdata_list(rdataset.ttl, res) + + res = [] + for algorithm in _algorithms: + res.extend(dnskey_rdataset_to_cds_rdataset(rrname, rdataset, algorithm, origin)) + return dns.rdataset.from_rdata_list(rdataset.ttl, res) + + +def cds_rdataset_to_ds_rdataset( + rdataset: dns.rdataset.Rdataset, +) -> dns.rdataset.Rdataset: + """Create a CDS record from DS. + + *rdataset*, a ``dns.rdataset.Rdataset``, to create DS Rdataset for. + + Raises ``ValueError`` if the rdataset is not CDS. + + Returns a ``dns.rdataset.Rdataset`` + """ + + if rdataset.rdtype != dns.rdatatype.CDS: + raise ValueError("rdataset not a CDS") + res = [] + for rdata in rdataset: + res.append( + CDS( + rdclass=rdata.rdclass, + rdtype=dns.rdatatype.DS, + key_tag=rdata.key_tag, + algorithm=rdata.algorithm, + digest_type=rdata.digest_type, + digest=rdata.digest, + ) + ) + return dns.rdataset.from_rdata_list(rdataset.ttl, res) + + +def dnskey_rdataset_to_cds_rdataset( + name: Union[dns.name.Name, str], + rdataset: dns.rdataset.Rdataset, + algorithm: Union[DSDigest, str], + origin: Optional[dns.name.Name] = None, +) -> dns.rdataset.Rdataset: + """Create a CDS record from DNSKEY/CDNSKEY. + + *name*, a ``dns.name.Name`` or ``str``, the owner name of the CDS record. + + *rdataset*, a ``dns.rdataset.Rdataset``, to create DS Rdataset for. + + *algorithm*, a ``str`` or ``int`` specifying the hash algorithm. + The currently supported hashes are "SHA1", "SHA256", and "SHA384". Case + does not matter for these strings. + + *origin*, a ``dns.name.Name`` or ``None``. If `key` is a relative name, + then it will be made absolute using the specified origin. + + Raises ``UnsupportedAlgorithm`` if the algorithm is unknown or + ``ValueError`` if the rdataset is not DNSKEY/CDNSKEY. + + Returns a ``dns.rdataset.Rdataset`` + """ + + if rdataset.rdtype not in (dns.rdatatype.DNSKEY, dns.rdatatype.CDNSKEY): + raise ValueError("rdataset not a DNSKEY/CDNSKEY") + res = [] + for rdata in rdataset: + res.append(make_cds(name, rdata, algorithm, origin)) + return dns.rdataset.from_rdata_list(rdataset.ttl, res) + + +def dnskey_rdataset_to_cdnskey_rdataset( + rdataset: dns.rdataset.Rdataset, +) -> dns.rdataset.Rdataset: + """Create a CDNSKEY record from DNSKEY. + + *rdataset*, a ``dns.rdataset.Rdataset``, to create CDNSKEY Rdataset for. + + Returns a ``dns.rdataset.Rdataset`` + """ + + if rdataset.rdtype != dns.rdatatype.DNSKEY: + raise ValueError("rdataset not a DNSKEY") + res = [] + for rdata in rdataset: + res.append( + CDNSKEY( + rdclass=rdataset.rdclass, + rdtype=rdataset.rdtype, + flags=rdata.flags, + protocol=rdata.protocol, + algorithm=rdata.algorithm, + key=rdata.key, + ) + ) + return dns.rdataset.from_rdata_list(rdataset.ttl, res) + + +def default_rrset_signer( + txn: dns.transaction.Transaction, + rrset: dns.rrset.RRset, + signer: dns.name.Name, + ksks: List[Tuple[PrivateKey, DNSKEY]], + zsks: List[Tuple[PrivateKey, DNSKEY]], + inception: Optional[Union[datetime, str, int, float]] = None, + expiration: Optional[Union[datetime, str, int, float]] = None, + lifetime: Optional[int] = None, + policy: Optional[Policy] = None, + origin: Optional[dns.name.Name] = None, + deterministic: bool = True, +) -> None: + """Default RRset signer""" + + if rrset.rdtype in set( + [ + dns.rdatatype.RdataType.DNSKEY, + dns.rdatatype.RdataType.CDS, + dns.rdatatype.RdataType.CDNSKEY, + ] + ): + keys = ksks + else: + keys = zsks + + for private_key, dnskey in keys: + rrsig = dns.dnssec.sign( + rrset=rrset, + private_key=private_key, + dnskey=dnskey, + inception=inception, + expiration=expiration, + lifetime=lifetime, + signer=signer, + policy=policy, + origin=origin, + deterministic=deterministic, + ) + txn.add(rrset.name, rrset.ttl, rrsig) + + +def sign_zone( + zone: dns.zone.Zone, + txn: Optional[dns.transaction.Transaction] = None, + keys: Optional[List[Tuple[PrivateKey, DNSKEY]]] = None, + add_dnskey: bool = True, + dnskey_ttl: Optional[int] = None, + inception: Optional[Union[datetime, str, int, float]] = None, + expiration: Optional[Union[datetime, str, int, float]] = None, + lifetime: Optional[int] = None, + nsec3: Optional[NSEC3PARAM] = None, + rrset_signer: Optional[RRsetSigner] = None, + policy: Optional[Policy] = None, + deterministic: bool = True, +) -> None: + """Sign zone. + + *zone*, a ``dns.zone.Zone``, the zone to sign. + + *txn*, a ``dns.transaction.Transaction``, an optional transaction to use for + signing. + + *keys*, a list of (``PrivateKey``, ``DNSKEY``) tuples, to use for signing. KSK/ZSK + roles are assigned automatically if the SEP flag is used, otherwise all RRsets are + signed by all keys. + + *add_dnskey*, a ``bool``. If ``True``, the default, all specified DNSKEYs are + automatically added to the zone on signing. + + *dnskey_ttl*, a``int``, specifies the TTL for DNSKEY RRs. If not specified the TTL + of the existing DNSKEY RRset used or the TTL of the SOA RRset. + + *inception*, a ``datetime``, ``str``, ``int``, ``float`` or ``None``, the signature + inception time. If ``None``, the current time is used. If a ``str``, the format is + "YYYYMMDDHHMMSS" or alternatively the number of seconds since the UNIX epoch in text + form; this is the same the RRSIG rdata's text form. Values of type `int` or `float` + are interpreted as seconds since the UNIX epoch. + + *expiration*, a ``datetime``, ``str``, ``int``, ``float`` or ``None``, the signature + expiration time. If ``None``, the expiration time will be the inception time plus + the value of the *lifetime* parameter. See the description of *inception* above for + how the various parameter types are interpreted. + + *lifetime*, an ``int`` or ``None``, the signature lifetime in seconds. This + parameter is only meaningful if *expiration* is ``None``. + + *nsec3*, a ``NSEC3PARAM`` Rdata, configures signing using NSEC3. Not yet + implemented. + + *rrset_signer*, a ``Callable``, an optional function for signing RRsets. The + function requires two arguments: transaction and RRset. If the not specified, + ``dns.dnssec.default_rrset_signer`` will be used. + + *deterministic*, a ``bool``. If ``True``, the default, use deterministic + (reproducible) signatures when supported by the algorithm used for signing. + Currently, this only affects ECDSA. + + Returns ``None``. + """ + + ksks = [] + zsks = [] + + # if we have both KSKs and ZSKs, split by SEP flag. if not, sign all + # records with all keys + if keys: + for key in keys: + if key[1].flags & Flag.SEP: + ksks.append(key) + else: + zsks.append(key) + if not ksks: + ksks = keys + if not zsks: + zsks = keys + else: + keys = [] + + if txn: + cm: contextlib.AbstractContextManager = contextlib.nullcontext(txn) + else: + cm = zone.writer() + + if zone.origin is None: + raise ValueError("no zone origin") + + with cm as _txn: + if add_dnskey: + if dnskey_ttl is None: + dnskey = _txn.get(zone.origin, dns.rdatatype.DNSKEY) + if dnskey: + dnskey_ttl = dnskey.ttl + else: + soa = _txn.get(zone.origin, dns.rdatatype.SOA) + dnskey_ttl = soa.ttl + for _, dnskey in keys: + _txn.add(zone.origin, dnskey_ttl, dnskey) + + if nsec3: + raise NotImplementedError("Signing with NSEC3 not yet implemented") + else: + _rrset_signer = rrset_signer or functools.partial( + default_rrset_signer, + signer=zone.origin, + ksks=ksks, + zsks=zsks, + inception=inception, + expiration=expiration, + lifetime=lifetime, + policy=policy, + origin=zone.origin, + deterministic=deterministic, + ) + return _sign_zone_nsec(zone, _txn, _rrset_signer) + + +def _sign_zone_nsec( + zone: dns.zone.Zone, + txn: dns.transaction.Transaction, + rrset_signer: Optional[RRsetSigner] = None, +) -> None: + """NSEC zone signer""" + + def _txn_add_nsec( + txn: dns.transaction.Transaction, + name: dns.name.Name, + next_secure: Optional[dns.name.Name], + rdclass: dns.rdataclass.RdataClass, + ttl: int, + rrset_signer: Optional[RRsetSigner] = None, + ) -> None: + """NSEC zone signer helper""" + mandatory_types = set( + [dns.rdatatype.RdataType.RRSIG, dns.rdatatype.RdataType.NSEC] + ) + node = txn.get_node(name) + if node and next_secure: + types = ( + set([rdataset.rdtype for rdataset in node.rdatasets]) | mandatory_types + ) + windows = Bitmap.from_rdtypes(list(types)) + rrset = dns.rrset.from_rdata( + name, + ttl, + NSEC( + rdclass=rdclass, + rdtype=dns.rdatatype.RdataType.NSEC, + next=next_secure, + windows=windows, + ), + ) + txn.add(rrset) + if rrset_signer: + rrset_signer(txn, rrset) + + rrsig_ttl = zone.get_soa().minimum + delegation = None + last_secure = None + + for name in sorted(txn.iterate_names()): + if delegation and name.is_subdomain(delegation): + # names below delegations are not secure + continue + elif txn.get(name, dns.rdatatype.NS) and name != zone.origin: + # inside delegation + delegation = name + else: + # outside delegation + delegation = None + + if rrset_signer: + node = txn.get_node(name) + if node: + for rdataset in node.rdatasets: + if rdataset.rdtype == dns.rdatatype.RRSIG: + # do not sign RRSIGs + continue + elif delegation and rdataset.rdtype != dns.rdatatype.DS: + # do not sign delegations except DS records + continue + else: + rrset = dns.rrset.from_rdata(name, rdataset.ttl, *rdataset) + rrset_signer(txn, rrset) + + # We need "is not None" as the empty name is False because its length is 0. + if last_secure is not None: + _txn_add_nsec(txn, last_secure, name, zone.rdclass, rrsig_ttl, rrset_signer) + last_secure = name + + if last_secure: + _txn_add_nsec( + txn, last_secure, zone.origin, zone.rdclass, rrsig_ttl, rrset_signer + ) + + +def _need_pyca(*args, **kwargs): + raise ImportError( + "DNSSEC validation requires python cryptography" + ) # pragma: no cover + + +if dns._features.have("dnssec"): + from cryptography.exceptions import InvalidSignature + from cryptography.hazmat.primitives.asymmetric import dsa # pylint: disable=W0611 + from cryptography.hazmat.primitives.asymmetric import ec # pylint: disable=W0611 + from cryptography.hazmat.primitives.asymmetric import ed448 # pylint: disable=W0611 + from cryptography.hazmat.primitives.asymmetric import rsa # pylint: disable=W0611 + from cryptography.hazmat.primitives.asymmetric import ( # pylint: disable=W0611 + ed25519, + ) + + from dns.dnssecalgs import ( # pylint: disable=C0412 + get_algorithm_cls, + get_algorithm_cls_from_dnskey, + ) + from dns.dnssecalgs.base import GenericPrivateKey, GenericPublicKey + + validate = _validate # type: ignore + validate_rrsig = _validate_rrsig # type: ignore + sign = _sign + make_dnskey = _make_dnskey + make_cdnskey = _make_cdnskey + _have_pyca = True +else: # pragma: no cover + validate = _need_pyca + validate_rrsig = _need_pyca + sign = _need_pyca + make_dnskey = _need_pyca + make_cdnskey = _need_pyca + _have_pyca = False + +### BEGIN generated Algorithm constants + +RSAMD5 = Algorithm.RSAMD5 +DH = Algorithm.DH +DSA = Algorithm.DSA +ECC = Algorithm.ECC +RSASHA1 = Algorithm.RSASHA1 +DSANSEC3SHA1 = Algorithm.DSANSEC3SHA1 +RSASHA1NSEC3SHA1 = Algorithm.RSASHA1NSEC3SHA1 +RSASHA256 = Algorithm.RSASHA256 +RSASHA512 = Algorithm.RSASHA512 +ECCGOST = Algorithm.ECCGOST +ECDSAP256SHA256 = Algorithm.ECDSAP256SHA256 +ECDSAP384SHA384 = Algorithm.ECDSAP384SHA384 +ED25519 = Algorithm.ED25519 +ED448 = Algorithm.ED448 +INDIRECT = Algorithm.INDIRECT +PRIVATEDNS = Algorithm.PRIVATEDNS +PRIVATEOID = Algorithm.PRIVATEOID + +### END generated Algorithm constants diff --git a/venv/lib/python3.11/site-packages/dns/dnssecalgs/__init__.py b/venv/lib/python3.11/site-packages/dns/dnssecalgs/__init__.py new file mode 100644 index 0000000..602367e --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/dnssecalgs/__init__.py @@ -0,0 +1,121 @@ +from typing import Dict, Optional, Tuple, Type, Union + +import dns.name +from dns.dnssecalgs.base import GenericPrivateKey +from dns.dnssectypes import Algorithm +from dns.exception import UnsupportedAlgorithm +from dns.rdtypes.ANY.DNSKEY import DNSKEY + +if dns._features.have("dnssec"): + from dns.dnssecalgs.dsa import PrivateDSA, PrivateDSANSEC3SHA1 + from dns.dnssecalgs.ecdsa import PrivateECDSAP256SHA256, PrivateECDSAP384SHA384 + from dns.dnssecalgs.eddsa import PrivateED448, PrivateED25519 + from dns.dnssecalgs.rsa import ( + PrivateRSAMD5, + PrivateRSASHA1, + PrivateRSASHA1NSEC3SHA1, + PrivateRSASHA256, + PrivateRSASHA512, + ) + + _have_cryptography = True +else: + _have_cryptography = False + +AlgorithmPrefix = Optional[Union[bytes, dns.name.Name]] + +algorithms: Dict[Tuple[Algorithm, AlgorithmPrefix], Type[GenericPrivateKey]] = {} +if _have_cryptography: + # pylint: disable=possibly-used-before-assignment + algorithms.update( + { + (Algorithm.RSAMD5, None): PrivateRSAMD5, + (Algorithm.DSA, None): PrivateDSA, + (Algorithm.RSASHA1, None): PrivateRSASHA1, + (Algorithm.DSANSEC3SHA1, None): PrivateDSANSEC3SHA1, + (Algorithm.RSASHA1NSEC3SHA1, None): PrivateRSASHA1NSEC3SHA1, + (Algorithm.RSASHA256, None): PrivateRSASHA256, + (Algorithm.RSASHA512, None): PrivateRSASHA512, + (Algorithm.ECDSAP256SHA256, None): PrivateECDSAP256SHA256, + (Algorithm.ECDSAP384SHA384, None): PrivateECDSAP384SHA384, + (Algorithm.ED25519, None): PrivateED25519, + (Algorithm.ED448, None): PrivateED448, + } + ) + + +def get_algorithm_cls( + algorithm: Union[int, str], prefix: AlgorithmPrefix = None +) -> Type[GenericPrivateKey]: + """Get Private Key class from Algorithm. + + *algorithm*, a ``str`` or ``int`` specifying the DNSKEY algorithm. + + Raises ``UnsupportedAlgorithm`` if the algorithm is unknown. + + Returns a ``dns.dnssecalgs.GenericPrivateKey`` + """ + algorithm = Algorithm.make(algorithm) + cls = algorithms.get((algorithm, prefix)) + if cls: + return cls + raise UnsupportedAlgorithm( + f'algorithm "{Algorithm.to_text(algorithm)}" not supported by dnspython' + ) + + +def get_algorithm_cls_from_dnskey(dnskey: DNSKEY) -> Type[GenericPrivateKey]: + """Get Private Key class from DNSKEY. + + *dnskey*, a ``DNSKEY`` to get Algorithm class for. + + Raises ``UnsupportedAlgorithm`` if the algorithm is unknown. + + Returns a ``dns.dnssecalgs.GenericPrivateKey`` + """ + prefix: AlgorithmPrefix = None + if dnskey.algorithm == Algorithm.PRIVATEDNS: + prefix, _ = dns.name.from_wire(dnskey.key, 0) + elif dnskey.algorithm == Algorithm.PRIVATEOID: + length = int(dnskey.key[0]) + prefix = dnskey.key[0 : length + 1] + return get_algorithm_cls(dnskey.algorithm, prefix) + + +def register_algorithm_cls( + algorithm: Union[int, str], + algorithm_cls: Type[GenericPrivateKey], + name: Optional[Union[dns.name.Name, str]] = None, + oid: Optional[bytes] = None, +) -> None: + """Register Algorithm Private Key class. + + *algorithm*, a ``str`` or ``int`` specifying the DNSKEY algorithm. + + *algorithm_cls*: A `GenericPrivateKey` class. + + *name*, an optional ``dns.name.Name`` or ``str``, for for PRIVATEDNS algorithms. + + *oid*: an optional BER-encoded `bytes` for PRIVATEOID algorithms. + + Raises ``ValueError`` if a name or oid is specified incorrectly. + """ + if not issubclass(algorithm_cls, GenericPrivateKey): + raise TypeError("Invalid algorithm class") + algorithm = Algorithm.make(algorithm) + prefix: AlgorithmPrefix = None + if algorithm == Algorithm.PRIVATEDNS: + if name is None: + raise ValueError("Name required for PRIVATEDNS algorithms") + if isinstance(name, str): + name = dns.name.from_text(name) + prefix = name + elif algorithm == Algorithm.PRIVATEOID: + if oid is None: + raise ValueError("OID required for PRIVATEOID algorithms") + prefix = bytes([len(oid)]) + oid + elif name: + raise ValueError("Name only supported for PRIVATEDNS algorithm") + elif oid: + raise ValueError("OID only supported for PRIVATEOID algorithm") + algorithms[(algorithm, prefix)] = algorithm_cls diff --git a/venv/lib/python3.11/site-packages/dns/dnssecalgs/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/dnssecalgs/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..f3972ec Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/dnssecalgs/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/dnssecalgs/__pycache__/base.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/dnssecalgs/__pycache__/base.cpython-311.pyc new file mode 100644 index 0000000..d059de7 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/dnssecalgs/__pycache__/base.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/dnssecalgs/__pycache__/cryptography.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/dnssecalgs/__pycache__/cryptography.cpython-311.pyc new file mode 100644 index 0000000..b46ed4e Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/dnssecalgs/__pycache__/cryptography.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/dnssecalgs/__pycache__/dsa.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/dnssecalgs/__pycache__/dsa.cpython-311.pyc new file mode 100644 index 0000000..a93ad09 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/dnssecalgs/__pycache__/dsa.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/dnssecalgs/__pycache__/ecdsa.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/dnssecalgs/__pycache__/ecdsa.cpython-311.pyc new file mode 100644 index 0000000..994c895 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/dnssecalgs/__pycache__/ecdsa.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/dnssecalgs/__pycache__/eddsa.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/dnssecalgs/__pycache__/eddsa.cpython-311.pyc new file mode 100644 index 0000000..c26e54b Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/dnssecalgs/__pycache__/eddsa.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/dnssecalgs/__pycache__/rsa.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/dnssecalgs/__pycache__/rsa.cpython-311.pyc new file mode 100644 index 0000000..d64c6f3 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/dnssecalgs/__pycache__/rsa.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/dnssecalgs/base.py b/venv/lib/python3.11/site-packages/dns/dnssecalgs/base.py new file mode 100644 index 0000000..752ee48 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/dnssecalgs/base.py @@ -0,0 +1,89 @@ +from abc import ABC, abstractmethod # pylint: disable=no-name-in-module +from typing import Any, Optional, Type + +import dns.rdataclass +import dns.rdatatype +from dns.dnssectypes import Algorithm +from dns.exception import AlgorithmKeyMismatch +from dns.rdtypes.ANY.DNSKEY import DNSKEY +from dns.rdtypes.dnskeybase import Flag + + +class GenericPublicKey(ABC): + algorithm: Algorithm + + @abstractmethod + def __init__(self, key: Any) -> None: + pass + + @abstractmethod + def verify(self, signature: bytes, data: bytes) -> None: + """Verify signed DNSSEC data""" + + @abstractmethod + def encode_key_bytes(self) -> bytes: + """Encode key as bytes for DNSKEY""" + + @classmethod + def _ensure_algorithm_key_combination(cls, key: DNSKEY) -> None: + if key.algorithm != cls.algorithm: + raise AlgorithmKeyMismatch + + def to_dnskey(self, flags: int = Flag.ZONE, protocol: int = 3) -> DNSKEY: + """Return public key as DNSKEY""" + return DNSKEY( + rdclass=dns.rdataclass.IN, + rdtype=dns.rdatatype.DNSKEY, + flags=flags, + protocol=protocol, + algorithm=self.algorithm, + key=self.encode_key_bytes(), + ) + + @classmethod + @abstractmethod + def from_dnskey(cls, key: DNSKEY) -> "GenericPublicKey": + """Create public key from DNSKEY""" + + @classmethod + @abstractmethod + def from_pem(cls, public_pem: bytes) -> "GenericPublicKey": + """Create public key from PEM-encoded SubjectPublicKeyInfo as specified + in RFC 5280""" + + @abstractmethod + def to_pem(self) -> bytes: + """Return public-key as PEM-encoded SubjectPublicKeyInfo as specified + in RFC 5280""" + + +class GenericPrivateKey(ABC): + public_cls: Type[GenericPublicKey] + + @abstractmethod + def __init__(self, key: Any) -> None: + pass + + @abstractmethod + def sign( + self, + data: bytes, + verify: bool = False, + deterministic: bool = True, + ) -> bytes: + """Sign DNSSEC data""" + + @abstractmethod + def public_key(self) -> "GenericPublicKey": + """Return public key instance""" + + @classmethod + @abstractmethod + def from_pem( + cls, private_pem: bytes, password: Optional[bytes] = None + ) -> "GenericPrivateKey": + """Create private key from PEM-encoded PKCS#8""" + + @abstractmethod + def to_pem(self, password: Optional[bytes] = None) -> bytes: + """Return private key as PEM-encoded PKCS#8""" diff --git a/venv/lib/python3.11/site-packages/dns/dnssecalgs/cryptography.py b/venv/lib/python3.11/site-packages/dns/dnssecalgs/cryptography.py new file mode 100644 index 0000000..5a31a81 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/dnssecalgs/cryptography.py @@ -0,0 +1,68 @@ +from typing import Any, Optional, Type + +from cryptography.hazmat.primitives import serialization + +from dns.dnssecalgs.base import GenericPrivateKey, GenericPublicKey +from dns.exception import AlgorithmKeyMismatch + + +class CryptographyPublicKey(GenericPublicKey): + key: Any = None + key_cls: Any = None + + def __init__(self, key: Any) -> None: # pylint: disable=super-init-not-called + if self.key_cls is None: + raise TypeError("Undefined private key class") + if not isinstance( # pylint: disable=isinstance-second-argument-not-valid-type + key, self.key_cls + ): + raise AlgorithmKeyMismatch + self.key = key + + @classmethod + def from_pem(cls, public_pem: bytes) -> "GenericPublicKey": + key = serialization.load_pem_public_key(public_pem) + return cls(key=key) + + def to_pem(self) -> bytes: + return self.key.public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + + +class CryptographyPrivateKey(GenericPrivateKey): + key: Any = None + key_cls: Any = None + public_cls: Type[CryptographyPublicKey] + + def __init__(self, key: Any) -> None: # pylint: disable=super-init-not-called + if self.key_cls is None: + raise TypeError("Undefined private key class") + if not isinstance( # pylint: disable=isinstance-second-argument-not-valid-type + key, self.key_cls + ): + raise AlgorithmKeyMismatch + self.key = key + + def public_key(self) -> "CryptographyPublicKey": + return self.public_cls(key=self.key.public_key()) + + @classmethod + def from_pem( + cls, private_pem: bytes, password: Optional[bytes] = None + ) -> "GenericPrivateKey": + key = serialization.load_pem_private_key(private_pem, password=password) + return cls(key=key) + + def to_pem(self, password: Optional[bytes] = None) -> bytes: + encryption_algorithm: serialization.KeySerializationEncryption + if password: + encryption_algorithm = serialization.BestAvailableEncryption(password) + else: + encryption_algorithm = serialization.NoEncryption() + return self.key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=encryption_algorithm, + ) diff --git a/venv/lib/python3.11/site-packages/dns/dnssecalgs/dsa.py b/venv/lib/python3.11/site-packages/dns/dnssecalgs/dsa.py new file mode 100644 index 0000000..adca3de --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/dnssecalgs/dsa.py @@ -0,0 +1,106 @@ +import struct + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import dsa, utils + +from dns.dnssecalgs.cryptography import CryptographyPrivateKey, CryptographyPublicKey +from dns.dnssectypes import Algorithm +from dns.rdtypes.ANY.DNSKEY import DNSKEY + + +class PublicDSA(CryptographyPublicKey): + key: dsa.DSAPublicKey + key_cls = dsa.DSAPublicKey + algorithm = Algorithm.DSA + chosen_hash = hashes.SHA1() + + def verify(self, signature: bytes, data: bytes) -> None: + sig_r = signature[1:21] + sig_s = signature[21:] + sig = utils.encode_dss_signature( + int.from_bytes(sig_r, "big"), int.from_bytes(sig_s, "big") + ) + self.key.verify(sig, data, self.chosen_hash) + + def encode_key_bytes(self) -> bytes: + """Encode a public key per RFC 2536, section 2.""" + pn = self.key.public_numbers() + dsa_t = (self.key.key_size // 8 - 64) // 8 + if dsa_t > 8: + raise ValueError("unsupported DSA key size") + octets = 64 + dsa_t * 8 + res = struct.pack("!B", dsa_t) + res += pn.parameter_numbers.q.to_bytes(20, "big") + res += pn.parameter_numbers.p.to_bytes(octets, "big") + res += pn.parameter_numbers.g.to_bytes(octets, "big") + res += pn.y.to_bytes(octets, "big") + return res + + @classmethod + def from_dnskey(cls, key: DNSKEY) -> "PublicDSA": + cls._ensure_algorithm_key_combination(key) + keyptr = key.key + (t,) = struct.unpack("!B", keyptr[0:1]) + keyptr = keyptr[1:] + octets = 64 + t * 8 + dsa_q = keyptr[0:20] + keyptr = keyptr[20:] + dsa_p = keyptr[0:octets] + keyptr = keyptr[octets:] + dsa_g = keyptr[0:octets] + keyptr = keyptr[octets:] + dsa_y = keyptr[0:octets] + return cls( + key=dsa.DSAPublicNumbers( # type: ignore + int.from_bytes(dsa_y, "big"), + dsa.DSAParameterNumbers( + int.from_bytes(dsa_p, "big"), + int.from_bytes(dsa_q, "big"), + int.from_bytes(dsa_g, "big"), + ), + ).public_key(default_backend()), + ) + + +class PrivateDSA(CryptographyPrivateKey): + key: dsa.DSAPrivateKey + key_cls = dsa.DSAPrivateKey + public_cls = PublicDSA + + def sign( + self, + data: bytes, + verify: bool = False, + deterministic: bool = True, + ) -> bytes: + """Sign using a private key per RFC 2536, section 3.""" + public_dsa_key = self.key.public_key() + if public_dsa_key.key_size > 1024: + raise ValueError("DSA key size overflow") + der_signature = self.key.sign(data, self.public_cls.chosen_hash) + dsa_r, dsa_s = utils.decode_dss_signature(der_signature) + dsa_t = (public_dsa_key.key_size // 8 - 64) // 8 + octets = 20 + signature = ( + struct.pack("!B", dsa_t) + + int.to_bytes(dsa_r, length=octets, byteorder="big") + + int.to_bytes(dsa_s, length=octets, byteorder="big") + ) + if verify: + self.public_key().verify(signature, data) + return signature + + @classmethod + def generate(cls, key_size: int) -> "PrivateDSA": + return cls( + key=dsa.generate_private_key(key_size=key_size), + ) + + +class PublicDSANSEC3SHA1(PublicDSA): + algorithm = Algorithm.DSANSEC3SHA1 + + +class PrivateDSANSEC3SHA1(PrivateDSA): + public_cls = PublicDSANSEC3SHA1 diff --git a/venv/lib/python3.11/site-packages/dns/dnssecalgs/ecdsa.py b/venv/lib/python3.11/site-packages/dns/dnssecalgs/ecdsa.py new file mode 100644 index 0000000..86d5764 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/dnssecalgs/ecdsa.py @@ -0,0 +1,97 @@ +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ec, utils + +from dns.dnssecalgs.cryptography import CryptographyPrivateKey, CryptographyPublicKey +from dns.dnssectypes import Algorithm +from dns.rdtypes.ANY.DNSKEY import DNSKEY + + +class PublicECDSA(CryptographyPublicKey): + key: ec.EllipticCurvePublicKey + key_cls = ec.EllipticCurvePublicKey + algorithm: Algorithm + chosen_hash: hashes.HashAlgorithm + curve: ec.EllipticCurve + octets: int + + def verify(self, signature: bytes, data: bytes) -> None: + sig_r = signature[0 : self.octets] + sig_s = signature[self.octets :] + sig = utils.encode_dss_signature( + int.from_bytes(sig_r, "big"), int.from_bytes(sig_s, "big") + ) + self.key.verify(sig, data, ec.ECDSA(self.chosen_hash)) + + def encode_key_bytes(self) -> bytes: + """Encode a public key per RFC 6605, section 4.""" + pn = self.key.public_numbers() + return pn.x.to_bytes(self.octets, "big") + pn.y.to_bytes(self.octets, "big") + + @classmethod + def from_dnskey(cls, key: DNSKEY) -> "PublicECDSA": + cls._ensure_algorithm_key_combination(key) + ecdsa_x = key.key[0 : cls.octets] + ecdsa_y = key.key[cls.octets : cls.octets * 2] + return cls( + key=ec.EllipticCurvePublicNumbers( + curve=cls.curve, + x=int.from_bytes(ecdsa_x, "big"), + y=int.from_bytes(ecdsa_y, "big"), + ).public_key(default_backend()), + ) + + +class PrivateECDSA(CryptographyPrivateKey): + key: ec.EllipticCurvePrivateKey + key_cls = ec.EllipticCurvePrivateKey + public_cls = PublicECDSA + + def sign( + self, + data: bytes, + verify: bool = False, + deterministic: bool = True, + ) -> bytes: + """Sign using a private key per RFC 6605, section 4.""" + algorithm = ec.ECDSA( + self.public_cls.chosen_hash, deterministic_signing=deterministic + ) + der_signature = self.key.sign(data, algorithm) + dsa_r, dsa_s = utils.decode_dss_signature(der_signature) + signature = int.to_bytes( + dsa_r, length=self.public_cls.octets, byteorder="big" + ) + int.to_bytes(dsa_s, length=self.public_cls.octets, byteorder="big") + if verify: + self.public_key().verify(signature, data) + return signature + + @classmethod + def generate(cls) -> "PrivateECDSA": + return cls( + key=ec.generate_private_key( + curve=cls.public_cls.curve, backend=default_backend() + ), + ) + + +class PublicECDSAP256SHA256(PublicECDSA): + algorithm = Algorithm.ECDSAP256SHA256 + chosen_hash = hashes.SHA256() + curve = ec.SECP256R1() + octets = 32 + + +class PrivateECDSAP256SHA256(PrivateECDSA): + public_cls = PublicECDSAP256SHA256 + + +class PublicECDSAP384SHA384(PublicECDSA): + algorithm = Algorithm.ECDSAP384SHA384 + chosen_hash = hashes.SHA384() + curve = ec.SECP384R1() + octets = 48 + + +class PrivateECDSAP384SHA384(PrivateECDSA): + public_cls = PublicECDSAP384SHA384 diff --git a/venv/lib/python3.11/site-packages/dns/dnssecalgs/eddsa.py b/venv/lib/python3.11/site-packages/dns/dnssecalgs/eddsa.py new file mode 100644 index 0000000..604bcbf --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/dnssecalgs/eddsa.py @@ -0,0 +1,70 @@ +from typing import Type + +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import ed448, ed25519 + +from dns.dnssecalgs.cryptography import CryptographyPrivateKey, CryptographyPublicKey +from dns.dnssectypes import Algorithm +from dns.rdtypes.ANY.DNSKEY import DNSKEY + + +class PublicEDDSA(CryptographyPublicKey): + def verify(self, signature: bytes, data: bytes) -> None: + self.key.verify(signature, data) + + def encode_key_bytes(self) -> bytes: + """Encode a public key per RFC 8080, section 3.""" + return self.key.public_bytes( + encoding=serialization.Encoding.Raw, format=serialization.PublicFormat.Raw + ) + + @classmethod + def from_dnskey(cls, key: DNSKEY) -> "PublicEDDSA": + cls._ensure_algorithm_key_combination(key) + return cls( + key=cls.key_cls.from_public_bytes(key.key), + ) + + +class PrivateEDDSA(CryptographyPrivateKey): + public_cls: Type[PublicEDDSA] + + def sign( + self, + data: bytes, + verify: bool = False, + deterministic: bool = True, + ) -> bytes: + """Sign using a private key per RFC 8080, section 4.""" + signature = self.key.sign(data) + if verify: + self.public_key().verify(signature, data) + return signature + + @classmethod + def generate(cls) -> "PrivateEDDSA": + return cls(key=cls.key_cls.generate()) + + +class PublicED25519(PublicEDDSA): + key: ed25519.Ed25519PublicKey + key_cls = ed25519.Ed25519PublicKey + algorithm = Algorithm.ED25519 + + +class PrivateED25519(PrivateEDDSA): + key: ed25519.Ed25519PrivateKey + key_cls = ed25519.Ed25519PrivateKey + public_cls = PublicED25519 + + +class PublicED448(PublicEDDSA): + key: ed448.Ed448PublicKey + key_cls = ed448.Ed448PublicKey + algorithm = Algorithm.ED448 + + +class PrivateED448(PrivateEDDSA): + key: ed448.Ed448PrivateKey + key_cls = ed448.Ed448PrivateKey + public_cls = PublicED448 diff --git a/venv/lib/python3.11/site-packages/dns/dnssecalgs/rsa.py b/venv/lib/python3.11/site-packages/dns/dnssecalgs/rsa.py new file mode 100644 index 0000000..27537aa --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/dnssecalgs/rsa.py @@ -0,0 +1,124 @@ +import math +import struct + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import padding, rsa + +from dns.dnssecalgs.cryptography import CryptographyPrivateKey, CryptographyPublicKey +from dns.dnssectypes import Algorithm +from dns.rdtypes.ANY.DNSKEY import DNSKEY + + +class PublicRSA(CryptographyPublicKey): + key: rsa.RSAPublicKey + key_cls = rsa.RSAPublicKey + algorithm: Algorithm + chosen_hash: hashes.HashAlgorithm + + def verify(self, signature: bytes, data: bytes) -> None: + self.key.verify(signature, data, padding.PKCS1v15(), self.chosen_hash) + + def encode_key_bytes(self) -> bytes: + """Encode a public key per RFC 3110, section 2.""" + pn = self.key.public_numbers() + _exp_len = math.ceil(int.bit_length(pn.e) / 8) + exp = int.to_bytes(pn.e, length=_exp_len, byteorder="big") + if _exp_len > 255: + exp_header = b"\0" + struct.pack("!H", _exp_len) + else: + exp_header = struct.pack("!B", _exp_len) + if pn.n.bit_length() < 512 or pn.n.bit_length() > 4096: + raise ValueError("unsupported RSA key length") + return exp_header + exp + pn.n.to_bytes((pn.n.bit_length() + 7) // 8, "big") + + @classmethod + def from_dnskey(cls, key: DNSKEY) -> "PublicRSA": + cls._ensure_algorithm_key_combination(key) + keyptr = key.key + (bytes_,) = struct.unpack("!B", keyptr[0:1]) + keyptr = keyptr[1:] + if bytes_ == 0: + (bytes_,) = struct.unpack("!H", keyptr[0:2]) + keyptr = keyptr[2:] + rsa_e = keyptr[0:bytes_] + rsa_n = keyptr[bytes_:] + return cls( + key=rsa.RSAPublicNumbers( + int.from_bytes(rsa_e, "big"), int.from_bytes(rsa_n, "big") + ).public_key(default_backend()) + ) + + +class PrivateRSA(CryptographyPrivateKey): + key: rsa.RSAPrivateKey + key_cls = rsa.RSAPrivateKey + public_cls = PublicRSA + default_public_exponent = 65537 + + def sign( + self, + data: bytes, + verify: bool = False, + deterministic: bool = True, + ) -> bytes: + """Sign using a private key per RFC 3110, section 3.""" + signature = self.key.sign(data, padding.PKCS1v15(), self.public_cls.chosen_hash) + if verify: + self.public_key().verify(signature, data) + return signature + + @classmethod + def generate(cls, key_size: int) -> "PrivateRSA": + return cls( + key=rsa.generate_private_key( + public_exponent=cls.default_public_exponent, + key_size=key_size, + backend=default_backend(), + ) + ) + + +class PublicRSAMD5(PublicRSA): + algorithm = Algorithm.RSAMD5 + chosen_hash = hashes.MD5() + + +class PrivateRSAMD5(PrivateRSA): + public_cls = PublicRSAMD5 + + +class PublicRSASHA1(PublicRSA): + algorithm = Algorithm.RSASHA1 + chosen_hash = hashes.SHA1() + + +class PrivateRSASHA1(PrivateRSA): + public_cls = PublicRSASHA1 + + +class PublicRSASHA1NSEC3SHA1(PublicRSA): + algorithm = Algorithm.RSASHA1NSEC3SHA1 + chosen_hash = hashes.SHA1() + + +class PrivateRSASHA1NSEC3SHA1(PrivateRSA): + public_cls = PublicRSASHA1NSEC3SHA1 + + +class PublicRSASHA256(PublicRSA): + algorithm = Algorithm.RSASHA256 + chosen_hash = hashes.SHA256() + + +class PrivateRSASHA256(PrivateRSA): + public_cls = PublicRSASHA256 + + +class PublicRSASHA512(PublicRSA): + algorithm = Algorithm.RSASHA512 + chosen_hash = hashes.SHA512() + + +class PrivateRSASHA512(PrivateRSA): + public_cls = PublicRSASHA512 diff --git a/venv/lib/python3.11/site-packages/dns/dnssectypes.py b/venv/lib/python3.11/site-packages/dns/dnssectypes.py new file mode 100644 index 0000000..02131e0 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/dnssectypes.py @@ -0,0 +1,71 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Common DNSSEC-related types.""" + +# This is a separate file to avoid import circularity between dns.dnssec and +# the implementations of the DS and DNSKEY types. + +import dns.enum + + +class Algorithm(dns.enum.IntEnum): + RSAMD5 = 1 + DH = 2 + DSA = 3 + ECC = 4 + RSASHA1 = 5 + DSANSEC3SHA1 = 6 + RSASHA1NSEC3SHA1 = 7 + RSASHA256 = 8 + RSASHA512 = 10 + ECCGOST = 12 + ECDSAP256SHA256 = 13 + ECDSAP384SHA384 = 14 + ED25519 = 15 + ED448 = 16 + INDIRECT = 252 + PRIVATEDNS = 253 + PRIVATEOID = 254 + + @classmethod + def _maximum(cls): + return 255 + + +class DSDigest(dns.enum.IntEnum): + """DNSSEC Delegation Signer Digest Algorithm""" + + NULL = 0 + SHA1 = 1 + SHA256 = 2 + GOST = 3 + SHA384 = 4 + + @classmethod + def _maximum(cls): + return 255 + + +class NSEC3Hash(dns.enum.IntEnum): + """NSEC3 hash algorithm""" + + SHA1 = 1 + + @classmethod + def _maximum(cls): + return 255 diff --git a/venv/lib/python3.11/site-packages/dns/e164.py b/venv/lib/python3.11/site-packages/dns/e164.py new file mode 100644 index 0000000..453736d --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/e164.py @@ -0,0 +1,116 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS E.164 helpers.""" + +from typing import Iterable, Optional, Union + +import dns.exception +import dns.name +import dns.resolver + +#: The public E.164 domain. +public_enum_domain = dns.name.from_text("e164.arpa.") + + +def from_e164( + text: str, origin: Optional[dns.name.Name] = public_enum_domain +) -> dns.name.Name: + """Convert an E.164 number in textual form into a Name object whose + value is the ENUM domain name for that number. + + Non-digits in the text are ignored, i.e. "16505551212", + "+1.650.555.1212" and "1 (650) 555-1212" are all the same. + + *text*, a ``str``, is an E.164 number in textual form. + + *origin*, a ``dns.name.Name``, the domain in which the number + should be constructed. The default is ``e164.arpa.``. + + Returns a ``dns.name.Name``. + """ + + parts = [d for d in text if d.isdigit()] + parts.reverse() + return dns.name.from_text(".".join(parts), origin=origin) + + +def to_e164( + name: dns.name.Name, + origin: Optional[dns.name.Name] = public_enum_domain, + want_plus_prefix: bool = True, +) -> str: + """Convert an ENUM domain name into an E.164 number. + + Note that dnspython does not have any information about preferred + number formats within national numbering plans, so all numbers are + emitted as a simple string of digits, prefixed by a '+' (unless + *want_plus_prefix* is ``False``). + + *name* is a ``dns.name.Name``, the ENUM domain name. + + *origin* is a ``dns.name.Name``, a domain containing the ENUM + domain name. The name is relativized to this domain before being + converted to text. If ``None``, no relativization is done. + + *want_plus_prefix* is a ``bool``. If True, add a '+' to the beginning of + the returned number. + + Returns a ``str``. + + """ + if origin is not None: + name = name.relativize(origin) + dlabels = [d for d in name.labels if d.isdigit() and len(d) == 1] + if len(dlabels) != len(name.labels): + raise dns.exception.SyntaxError("non-digit labels in ENUM domain name") + dlabels.reverse() + text = b"".join(dlabels) + if want_plus_prefix: + text = b"+" + text + return text.decode() + + +def query( + number: str, + domains: Iterable[Union[dns.name.Name, str]], + resolver: Optional[dns.resolver.Resolver] = None, +) -> dns.resolver.Answer: + """Look for NAPTR RRs for the specified number in the specified domains. + + e.g. lookup('16505551212', ['e164.dnspython.org.', 'e164.arpa.']) + + *number*, a ``str`` is the number to look for. + + *domains* is an iterable containing ``dns.name.Name`` values. + + *resolver*, a ``dns.resolver.Resolver``, is the resolver to use. If + ``None``, the default resolver is used. + """ + + if resolver is None: + resolver = dns.resolver.get_default_resolver() + e_nx = dns.resolver.NXDOMAIN() + for domain in domains: + if isinstance(domain, str): + domain = dns.name.from_text(domain) + qname = dns.e164.from_e164(number, domain) + try: + return resolver.resolve(qname, "NAPTR") + except dns.resolver.NXDOMAIN as e: + e_nx += e + raise e_nx diff --git a/venv/lib/python3.11/site-packages/dns/edns.py b/venv/lib/python3.11/site-packages/dns/edns.py new file mode 100644 index 0000000..f7d9ff9 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/edns.py @@ -0,0 +1,572 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2009-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""EDNS Options""" + +import binascii +import math +import socket +import struct +from typing import Any, Dict, Optional, Union + +import dns.enum +import dns.inet +import dns.rdata +import dns.wire + + +class OptionType(dns.enum.IntEnum): + #: NSID + NSID = 3 + #: DAU + DAU = 5 + #: DHU + DHU = 6 + #: N3U + N3U = 7 + #: ECS (client-subnet) + ECS = 8 + #: EXPIRE + EXPIRE = 9 + #: COOKIE + COOKIE = 10 + #: KEEPALIVE + KEEPALIVE = 11 + #: PADDING + PADDING = 12 + #: CHAIN + CHAIN = 13 + #: EDE (extended-dns-error) + EDE = 15 + #: REPORTCHANNEL + REPORTCHANNEL = 18 + + @classmethod + def _maximum(cls): + return 65535 + + +class Option: + """Base class for all EDNS option types.""" + + def __init__(self, otype: Union[OptionType, str]): + """Initialize an option. + + *otype*, a ``dns.edns.OptionType``, is the option type. + """ + self.otype = OptionType.make(otype) + + def to_wire(self, file: Optional[Any] = None) -> Optional[bytes]: + """Convert an option to wire format. + + Returns a ``bytes`` or ``None``. + + """ + raise NotImplementedError # pragma: no cover + + def to_text(self) -> str: + raise NotImplementedError # pragma: no cover + + @classmethod + def from_wire_parser(cls, otype: OptionType, parser: "dns.wire.Parser") -> "Option": + """Build an EDNS option object from wire format. + + *otype*, a ``dns.edns.OptionType``, is the option type. + + *parser*, a ``dns.wire.Parser``, the parser, which should be + restructed to the option length. + + Returns a ``dns.edns.Option``. + """ + raise NotImplementedError # pragma: no cover + + def _cmp(self, other): + """Compare an EDNS option with another option of the same type. + + Returns < 0 if < *other*, 0 if == *other*, and > 0 if > *other*. + """ + wire = self.to_wire() + owire = other.to_wire() + if wire == owire: + return 0 + if wire > owire: + return 1 + return -1 + + def __eq__(self, other): + if not isinstance(other, Option): + return False + if self.otype != other.otype: + return False + return self._cmp(other) == 0 + + def __ne__(self, other): + if not isinstance(other, Option): + return True + if self.otype != other.otype: + return True + return self._cmp(other) != 0 + + def __lt__(self, other): + if not isinstance(other, Option) or self.otype != other.otype: + return NotImplemented + return self._cmp(other) < 0 + + def __le__(self, other): + if not isinstance(other, Option) or self.otype != other.otype: + return NotImplemented + return self._cmp(other) <= 0 + + def __ge__(self, other): + if not isinstance(other, Option) or self.otype != other.otype: + return NotImplemented + return self._cmp(other) >= 0 + + def __gt__(self, other): + if not isinstance(other, Option) or self.otype != other.otype: + return NotImplemented + return self._cmp(other) > 0 + + def __str__(self): + return self.to_text() + + +class GenericOption(Option): # lgtm[py/missing-equals] + """Generic Option Class + + This class is used for EDNS option types for which we have no better + implementation. + """ + + def __init__(self, otype: Union[OptionType, str], data: Union[bytes, str]): + super().__init__(otype) + self.data = dns.rdata.Rdata._as_bytes(data, True) + + def to_wire(self, file: Optional[Any] = None) -> Optional[bytes]: + if file: + file.write(self.data) + return None + else: + return self.data + + def to_text(self) -> str: + return "Generic %d" % self.otype + + @classmethod + def from_wire_parser( + cls, otype: Union[OptionType, str], parser: "dns.wire.Parser" + ) -> Option: + return cls(otype, parser.get_remaining()) + + +class ECSOption(Option): # lgtm[py/missing-equals] + """EDNS Client Subnet (ECS, RFC7871)""" + + def __init__(self, address: str, srclen: Optional[int] = None, scopelen: int = 0): + """*address*, a ``str``, is the client address information. + + *srclen*, an ``int``, the source prefix length, which is the + leftmost number of bits of the address to be used for the + lookup. The default is 24 for IPv4 and 56 for IPv6. + + *scopelen*, an ``int``, the scope prefix length. This value + must be 0 in queries, and should be set in responses. + """ + + super().__init__(OptionType.ECS) + af = dns.inet.af_for_address(address) + + if af == socket.AF_INET6: + self.family = 2 + if srclen is None: + srclen = 56 + address = dns.rdata.Rdata._as_ipv6_address(address) + srclen = dns.rdata.Rdata._as_int(srclen, 0, 128) + scopelen = dns.rdata.Rdata._as_int(scopelen, 0, 128) + elif af == socket.AF_INET: + self.family = 1 + if srclen is None: + srclen = 24 + address = dns.rdata.Rdata._as_ipv4_address(address) + srclen = dns.rdata.Rdata._as_int(srclen, 0, 32) + scopelen = dns.rdata.Rdata._as_int(scopelen, 0, 32) + else: # pragma: no cover (this will never happen) + raise ValueError("Bad address family") + + assert srclen is not None + self.address = address + self.srclen = srclen + self.scopelen = scopelen + + addrdata = dns.inet.inet_pton(af, address) + nbytes = int(math.ceil(srclen / 8.0)) + + # Truncate to srclen and pad to the end of the last octet needed + # See RFC section 6 + self.addrdata = addrdata[:nbytes] + nbits = srclen % 8 + if nbits != 0: + last = struct.pack("B", ord(self.addrdata[-1:]) & (0xFF << (8 - nbits))) + self.addrdata = self.addrdata[:-1] + last + + def to_text(self) -> str: + return f"ECS {self.address}/{self.srclen} scope/{self.scopelen}" + + @staticmethod + def from_text(text: str) -> Option: + """Convert a string into a `dns.edns.ECSOption` + + *text*, a `str`, the text form of the option. + + Returns a `dns.edns.ECSOption`. + + Examples: + + >>> import dns.edns + >>> + >>> # basic example + >>> dns.edns.ECSOption.from_text('1.2.3.4/24') + >>> + >>> # also understands scope + >>> dns.edns.ECSOption.from_text('1.2.3.4/24/32') + >>> + >>> # IPv6 + >>> dns.edns.ECSOption.from_text('2001:4b98::1/64/64') + >>> + >>> # it understands results from `dns.edns.ECSOption.to_text()` + >>> dns.edns.ECSOption.from_text('ECS 1.2.3.4/24/32') + """ + optional_prefix = "ECS" + tokens = text.split() + ecs_text = None + if len(tokens) == 1: + ecs_text = tokens[0] + elif len(tokens) == 2: + if tokens[0] != optional_prefix: + raise ValueError(f'could not parse ECS from "{text}"') + ecs_text = tokens[1] + else: + raise ValueError(f'could not parse ECS from "{text}"') + n_slashes = ecs_text.count("/") + if n_slashes == 1: + address, tsrclen = ecs_text.split("/") + tscope = "0" + elif n_slashes == 2: + address, tsrclen, tscope = ecs_text.split("/") + else: + raise ValueError(f'could not parse ECS from "{text}"') + try: + scope = int(tscope) + except ValueError: + raise ValueError("invalid scope " + f'"{tscope}": scope must be an integer') + try: + srclen = int(tsrclen) + except ValueError: + raise ValueError( + "invalid srclen " + f'"{tsrclen}": srclen must be an integer' + ) + return ECSOption(address, srclen, scope) + + def to_wire(self, file: Optional[Any] = None) -> Optional[bytes]: + value = ( + struct.pack("!HBB", self.family, self.srclen, self.scopelen) + self.addrdata + ) + if file: + file.write(value) + return None + else: + return value + + @classmethod + def from_wire_parser( + cls, otype: Union[OptionType, str], parser: "dns.wire.Parser" + ) -> Option: + family, src, scope = parser.get_struct("!HBB") + addrlen = int(math.ceil(src / 8.0)) + prefix = parser.get_bytes(addrlen) + if family == 1: + pad = 4 - addrlen + addr = dns.ipv4.inet_ntoa(prefix + b"\x00" * pad) + elif family == 2: + pad = 16 - addrlen + addr = dns.ipv6.inet_ntoa(prefix + b"\x00" * pad) + else: + raise ValueError("unsupported family") + + return cls(addr, src, scope) + + +class EDECode(dns.enum.IntEnum): + OTHER = 0 + UNSUPPORTED_DNSKEY_ALGORITHM = 1 + UNSUPPORTED_DS_DIGEST_TYPE = 2 + STALE_ANSWER = 3 + FORGED_ANSWER = 4 + DNSSEC_INDETERMINATE = 5 + DNSSEC_BOGUS = 6 + SIGNATURE_EXPIRED = 7 + SIGNATURE_NOT_YET_VALID = 8 + DNSKEY_MISSING = 9 + RRSIGS_MISSING = 10 + NO_ZONE_KEY_BIT_SET = 11 + NSEC_MISSING = 12 + CACHED_ERROR = 13 + NOT_READY = 14 + BLOCKED = 15 + CENSORED = 16 + FILTERED = 17 + PROHIBITED = 18 + STALE_NXDOMAIN_ANSWER = 19 + NOT_AUTHORITATIVE = 20 + NOT_SUPPORTED = 21 + NO_REACHABLE_AUTHORITY = 22 + NETWORK_ERROR = 23 + INVALID_DATA = 24 + + @classmethod + def _maximum(cls): + return 65535 + + +class EDEOption(Option): # lgtm[py/missing-equals] + """Extended DNS Error (EDE, RFC8914)""" + + _preserve_case = {"DNSKEY", "DS", "DNSSEC", "RRSIGs", "NSEC", "NXDOMAIN"} + + def __init__(self, code: Union[EDECode, str], text: Optional[str] = None): + """*code*, a ``dns.edns.EDECode`` or ``str``, the info code of the + extended error. + + *text*, a ``str`` or ``None``, specifying additional information about + the error. + """ + + super().__init__(OptionType.EDE) + + self.code = EDECode.make(code) + if text is not None and not isinstance(text, str): + raise ValueError("text must be string or None") + self.text = text + + def to_text(self) -> str: + output = f"EDE {self.code}" + if self.code in EDECode: + desc = EDECode.to_text(self.code) + desc = " ".join( + word if word in self._preserve_case else word.title() + for word in desc.split("_") + ) + output += f" ({desc})" + if self.text is not None: + output += f": {self.text}" + return output + + def to_wire(self, file: Optional[Any] = None) -> Optional[bytes]: + value = struct.pack("!H", self.code) + if self.text is not None: + value += self.text.encode("utf8") + + if file: + file.write(value) + return None + else: + return value + + @classmethod + def from_wire_parser( + cls, otype: Union[OptionType, str], parser: "dns.wire.Parser" + ) -> Option: + code = EDECode.make(parser.get_uint16()) + text = parser.get_remaining() + + if text: + if text[-1] == 0: # text MAY be null-terminated + text = text[:-1] + btext = text.decode("utf8") + else: + btext = None + + return cls(code, btext) + + +class NSIDOption(Option): + def __init__(self, nsid: bytes): + super().__init__(OptionType.NSID) + self.nsid = nsid + + def to_wire(self, file: Any = None) -> Optional[bytes]: + if file: + file.write(self.nsid) + return None + else: + return self.nsid + + def to_text(self) -> str: + if all(c >= 0x20 and c <= 0x7E for c in self.nsid): + # All ASCII printable, so it's probably a string. + value = self.nsid.decode() + else: + value = binascii.hexlify(self.nsid).decode() + return f"NSID {value}" + + @classmethod + def from_wire_parser( + cls, otype: Union[OptionType, str], parser: dns.wire.Parser + ) -> Option: + return cls(parser.get_remaining()) + + +class CookieOption(Option): + def __init__(self, client: bytes, server: bytes): + super().__init__(dns.edns.OptionType.COOKIE) + self.client = client + self.server = server + if len(client) != 8: + raise ValueError("client cookie must be 8 bytes") + if len(server) != 0 and (len(server) < 8 or len(server) > 32): + raise ValueError("server cookie must be empty or between 8 and 32 bytes") + + def to_wire(self, file: Any = None) -> Optional[bytes]: + if file: + file.write(self.client) + if len(self.server) > 0: + file.write(self.server) + return None + else: + return self.client + self.server + + def to_text(self) -> str: + client = binascii.hexlify(self.client).decode() + if len(self.server) > 0: + server = binascii.hexlify(self.server).decode() + else: + server = "" + return f"COOKIE {client}{server}" + + @classmethod + def from_wire_parser( + cls, otype: Union[OptionType, str], parser: dns.wire.Parser + ) -> Option: + return cls(parser.get_bytes(8), parser.get_remaining()) + + +class ReportChannelOption(Option): + # RFC 9567 + def __init__(self, agent_domain: dns.name.Name): + super().__init__(OptionType.REPORTCHANNEL) + self.agent_domain = agent_domain + + def to_wire(self, file: Any = None) -> Optional[bytes]: + return self.agent_domain.to_wire(file) + + def to_text(self) -> str: + return "REPORTCHANNEL " + self.agent_domain.to_text() + + @classmethod + def from_wire_parser( + cls, otype: Union[OptionType, str], parser: dns.wire.Parser + ) -> Option: + return cls(parser.get_name()) + + +_type_to_class: Dict[OptionType, Any] = { + OptionType.ECS: ECSOption, + OptionType.EDE: EDEOption, + OptionType.NSID: NSIDOption, + OptionType.COOKIE: CookieOption, + OptionType.REPORTCHANNEL: ReportChannelOption, +} + + +def get_option_class(otype: OptionType) -> Any: + """Return the class for the specified option type. + + The GenericOption class is used if a more specific class is not + known. + """ + + cls = _type_to_class.get(otype) + if cls is None: + cls = GenericOption + return cls + + +def option_from_wire_parser( + otype: Union[OptionType, str], parser: "dns.wire.Parser" +) -> Option: + """Build an EDNS option object from wire format. + + *otype*, an ``int``, is the option type. + + *parser*, a ``dns.wire.Parser``, the parser, which should be + restricted to the option length. + + Returns an instance of a subclass of ``dns.edns.Option``. + """ + otype = OptionType.make(otype) + cls = get_option_class(otype) + return cls.from_wire_parser(otype, parser) + + +def option_from_wire( + otype: Union[OptionType, str], wire: bytes, current: int, olen: int +) -> Option: + """Build an EDNS option object from wire format. + + *otype*, an ``int``, is the option type. + + *wire*, a ``bytes``, is the wire-format message. + + *current*, an ``int``, is the offset in *wire* of the beginning + of the rdata. + + *olen*, an ``int``, is the length of the wire-format option data + + Returns an instance of a subclass of ``dns.edns.Option``. + """ + parser = dns.wire.Parser(wire, current) + with parser.restrict_to(olen): + return option_from_wire_parser(otype, parser) + + +def register_type(implementation: Any, otype: OptionType) -> None: + """Register the implementation of an option type. + + *implementation*, a ``class``, is a subclass of ``dns.edns.Option``. + + *otype*, an ``int``, is the option type. + """ + + _type_to_class[otype] = implementation + + +### BEGIN generated OptionType constants + +NSID = OptionType.NSID +DAU = OptionType.DAU +DHU = OptionType.DHU +N3U = OptionType.N3U +ECS = OptionType.ECS +EXPIRE = OptionType.EXPIRE +COOKIE = OptionType.COOKIE +KEEPALIVE = OptionType.KEEPALIVE +PADDING = OptionType.PADDING +CHAIN = OptionType.CHAIN +EDE = OptionType.EDE +REPORTCHANNEL = OptionType.REPORTCHANNEL + +### END generated OptionType constants diff --git a/venv/lib/python3.11/site-packages/dns/entropy.py b/venv/lib/python3.11/site-packages/dns/entropy.py new file mode 100644 index 0000000..4dcdc62 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/entropy.py @@ -0,0 +1,130 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2009-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import hashlib +import os +import random +import threading +import time +from typing import Any, Optional + + +class EntropyPool: + # This is an entropy pool for Python implementations that do not + # have a working SystemRandom. I'm not sure there are any, but + # leaving this code doesn't hurt anything as the library code + # is used if present. + + def __init__(self, seed: Optional[bytes] = None): + self.pool_index = 0 + self.digest: Optional[bytearray] = None + self.next_byte = 0 + self.lock = threading.Lock() + self.hash = hashlib.sha1() + self.hash_len = 20 + self.pool = bytearray(b"\0" * self.hash_len) + if seed is not None: + self._stir(seed) + self.seeded = True + self.seed_pid = os.getpid() + else: + self.seeded = False + self.seed_pid = 0 + + def _stir(self, entropy: bytes) -> None: + for c in entropy: + if self.pool_index == self.hash_len: + self.pool_index = 0 + b = c & 0xFF + self.pool[self.pool_index] ^= b + self.pool_index += 1 + + def stir(self, entropy: bytes) -> None: + with self.lock: + self._stir(entropy) + + def _maybe_seed(self) -> None: + if not self.seeded or self.seed_pid != os.getpid(): + try: + seed = os.urandom(16) + except Exception: # pragma: no cover + try: + with open("/dev/urandom", "rb", 0) as r: + seed = r.read(16) + except Exception: + seed = str(time.time()).encode() + self.seeded = True + self.seed_pid = os.getpid() + self.digest = None + seed = bytearray(seed) + self._stir(seed) + + def random_8(self) -> int: + with self.lock: + self._maybe_seed() + if self.digest is None or self.next_byte == self.hash_len: + self.hash.update(bytes(self.pool)) + self.digest = bytearray(self.hash.digest()) + self._stir(self.digest) + self.next_byte = 0 + value = self.digest[self.next_byte] + self.next_byte += 1 + return value + + def random_16(self) -> int: + return self.random_8() * 256 + self.random_8() + + def random_32(self) -> int: + return self.random_16() * 65536 + self.random_16() + + def random_between(self, first: int, last: int) -> int: + size = last - first + 1 + if size > 4294967296: + raise ValueError("too big") + if size > 65536: + rand = self.random_32 + max = 4294967295 + elif size > 256: + rand = self.random_16 + max = 65535 + else: + rand = self.random_8 + max = 255 + return first + size * rand() // (max + 1) + + +pool = EntropyPool() + +system_random: Optional[Any] +try: + system_random = random.SystemRandom() +except Exception: # pragma: no cover + system_random = None + + +def random_16() -> int: + if system_random is not None: + return system_random.randrange(0, 65536) + else: + return pool.random_16() + + +def between(first: int, last: int) -> int: + if system_random is not None: + return system_random.randrange(first, last + 1) + else: + return pool.random_between(first, last) diff --git a/venv/lib/python3.11/site-packages/dns/enum.py b/venv/lib/python3.11/site-packages/dns/enum.py new file mode 100644 index 0000000..71461f1 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/enum.py @@ -0,0 +1,116 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import enum +from typing import Type, TypeVar, Union + +TIntEnum = TypeVar("TIntEnum", bound="IntEnum") + + +class IntEnum(enum.IntEnum): + @classmethod + def _missing_(cls, value): + cls._check_value(value) + val = int.__new__(cls, value) + val._name_ = cls._extra_to_text(value, None) or f"{cls._prefix()}{value}" + val._value_ = value + return val + + @classmethod + def _check_value(cls, value): + max = cls._maximum() + if not isinstance(value, int): + raise TypeError + if value < 0 or value > max: + name = cls._short_name() + raise ValueError(f"{name} must be an int between >= 0 and <= {max}") + + @classmethod + def from_text(cls: Type[TIntEnum], text: str) -> TIntEnum: + text = text.upper() + try: + return cls[text] + except KeyError: + pass + value = cls._extra_from_text(text) + if value: + return value + prefix = cls._prefix() + if text.startswith(prefix) and text[len(prefix) :].isdigit(): + value = int(text[len(prefix) :]) + cls._check_value(value) + try: + return cls(value) + except ValueError: + return value + raise cls._unknown_exception_class() + + @classmethod + def to_text(cls: Type[TIntEnum], value: int) -> str: + cls._check_value(value) + try: + text = cls(value).name + except ValueError: + text = None + text = cls._extra_to_text(value, text) + if text is None: + text = f"{cls._prefix()}{value}" + return text + + @classmethod + def make(cls: Type[TIntEnum], value: Union[int, str]) -> TIntEnum: + """Convert text or a value into an enumerated type, if possible. + + *value*, the ``int`` or ``str`` to convert. + + Raises a class-specific exception if a ``str`` is provided that + cannot be converted. + + Raises ``ValueError`` if the value is out of range. + + Returns an enumeration from the calling class corresponding to the + value, if one is defined, or an ``int`` otherwise. + """ + + if isinstance(value, str): + return cls.from_text(value) + cls._check_value(value) + return cls(value) + + @classmethod + def _maximum(cls): + raise NotImplementedError # pragma: no cover + + @classmethod + def _short_name(cls): + return cls.__name__.lower() + + @classmethod + def _prefix(cls): + return "" + + @classmethod + def _extra_from_text(cls, text): # pylint: disable=W0613 + return None + + @classmethod + def _extra_to_text(cls, value, current_text): # pylint: disable=W0613 + return current_text + + @classmethod + def _unknown_exception_class(cls): + return ValueError diff --git a/venv/lib/python3.11/site-packages/dns/exception.py b/venv/lib/python3.11/site-packages/dns/exception.py new file mode 100644 index 0000000..223f2d6 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/exception.py @@ -0,0 +1,169 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Common DNS Exceptions. + +Dnspython modules may also define their own exceptions, which will +always be subclasses of ``DNSException``. +""" + + +from typing import Optional, Set + + +class DNSException(Exception): + """Abstract base class shared by all dnspython exceptions. + + It supports two basic modes of operation: + + a) Old/compatible mode is used if ``__init__`` was called with + empty *kwargs*. In compatible mode all *args* are passed + to the standard Python Exception class as before and all *args* are + printed by the standard ``__str__`` implementation. Class variable + ``msg`` (or doc string if ``msg`` is ``None``) is returned from ``str()`` + if *args* is empty. + + b) New/parametrized mode is used if ``__init__`` was called with + non-empty *kwargs*. + In the new mode *args* must be empty and all kwargs must match + those set in class variable ``supp_kwargs``. All kwargs are stored inside + ``self.kwargs`` and used in a new ``__str__`` implementation to construct + a formatted message based on the ``fmt`` class variable, a ``string``. + + In the simplest case it is enough to override the ``supp_kwargs`` + and ``fmt`` class variables to get nice parametrized messages. + """ + + msg: Optional[str] = None # non-parametrized message + supp_kwargs: Set[str] = set() # accepted parameters for _fmt_kwargs (sanity check) + fmt: Optional[str] = None # message parametrized with results from _fmt_kwargs + + def __init__(self, *args, **kwargs): + self._check_params(*args, **kwargs) + if kwargs: + # This call to a virtual method from __init__ is ok in our usage + self.kwargs = self._check_kwargs(**kwargs) # lgtm[py/init-calls-subclass] + self.msg = str(self) + else: + self.kwargs = dict() # defined but empty for old mode exceptions + if self.msg is None: + # doc string is better implicit message than empty string + self.msg = self.__doc__ + if args: + super().__init__(*args) + else: + super().__init__(self.msg) + + def _check_params(self, *args, **kwargs): + """Old exceptions supported only args and not kwargs. + + For sanity we do not allow to mix old and new behavior.""" + if args or kwargs: + assert bool(args) != bool( + kwargs + ), "keyword arguments are mutually exclusive with positional args" + + def _check_kwargs(self, **kwargs): + if kwargs: + assert ( + set(kwargs.keys()) == self.supp_kwargs + ), f"following set of keyword args is required: {self.supp_kwargs}" + return kwargs + + def _fmt_kwargs(self, **kwargs): + """Format kwargs before printing them. + + Resulting dictionary has to have keys necessary for str.format call + on fmt class variable. + """ + fmtargs = {} + for kw, data in kwargs.items(): + if isinstance(data, (list, set)): + # convert list of to list of str() + fmtargs[kw] = list(map(str, data)) + if len(fmtargs[kw]) == 1: + # remove list brackets [] from single-item lists + fmtargs[kw] = fmtargs[kw].pop() + else: + fmtargs[kw] = data + return fmtargs + + def __str__(self): + if self.kwargs and self.fmt: + # provide custom message constructed from keyword arguments + fmtargs = self._fmt_kwargs(**self.kwargs) + return self.fmt.format(**fmtargs) + else: + # print *args directly in the same way as old DNSException + return super().__str__() + + +class FormError(DNSException): + """DNS message is malformed.""" + + +class SyntaxError(DNSException): + """Text input is malformed.""" + + +class UnexpectedEnd(SyntaxError): + """Text input ended unexpectedly.""" + + +class TooBig(DNSException): + """The DNS message is too big.""" + + +class Timeout(DNSException): + """The DNS operation timed out.""" + + supp_kwargs = {"timeout"} + fmt = "The DNS operation timed out after {timeout:.3f} seconds" + + # We do this as otherwise mypy complains about unexpected keyword argument + # idna_exception + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class UnsupportedAlgorithm(DNSException): + """The DNSSEC algorithm is not supported.""" + + +class AlgorithmKeyMismatch(UnsupportedAlgorithm): + """The DNSSEC algorithm is not supported for the given key type.""" + + +class ValidationFailure(DNSException): + """The DNSSEC signature is invalid.""" + + +class DeniedByPolicy(DNSException): + """Denied by DNSSEC policy.""" + + +class ExceptionWrapper: + def __init__(self, exception_class): + self.exception_class = exception_class + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if exc_type is not None and not isinstance(exc_val, self.exception_class): + raise self.exception_class(str(exc_val)) from exc_val + return False diff --git a/venv/lib/python3.11/site-packages/dns/flags.py b/venv/lib/python3.11/site-packages/dns/flags.py new file mode 100644 index 0000000..4c60be1 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/flags.py @@ -0,0 +1,123 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Message Flags.""" + +import enum +from typing import Any + +# Standard DNS flags + + +class Flag(enum.IntFlag): + #: Query Response + QR = 0x8000 + #: Authoritative Answer + AA = 0x0400 + #: Truncated Response + TC = 0x0200 + #: Recursion Desired + RD = 0x0100 + #: Recursion Available + RA = 0x0080 + #: Authentic Data + AD = 0x0020 + #: Checking Disabled + CD = 0x0010 + + +# EDNS flags + + +class EDNSFlag(enum.IntFlag): + #: DNSSEC answer OK + DO = 0x8000 + + +def _from_text(text: str, enum_class: Any) -> int: + flags = 0 + tokens = text.split() + for t in tokens: + flags |= enum_class[t.upper()] + return flags + + +def _to_text(flags: int, enum_class: Any) -> str: + text_flags = [] + for k, v in enum_class.__members__.items(): + if flags & v != 0: + text_flags.append(k) + return " ".join(text_flags) + + +def from_text(text: str) -> int: + """Convert a space-separated list of flag text values into a flags + value. + + Returns an ``int`` + """ + + return _from_text(text, Flag) + + +def to_text(flags: int) -> str: + """Convert a flags value into a space-separated list of flag text + values. + + Returns a ``str``. + """ + + return _to_text(flags, Flag) + + +def edns_from_text(text: str) -> int: + """Convert a space-separated list of EDNS flag text values into a EDNS + flags value. + + Returns an ``int`` + """ + + return _from_text(text, EDNSFlag) + + +def edns_to_text(flags: int) -> str: + """Convert an EDNS flags value into a space-separated list of EDNS flag + text values. + + Returns a ``str``. + """ + + return _to_text(flags, EDNSFlag) + + +### BEGIN generated Flag constants + +QR = Flag.QR +AA = Flag.AA +TC = Flag.TC +RD = Flag.RD +RA = Flag.RA +AD = Flag.AD +CD = Flag.CD + +### END generated Flag constants + +### BEGIN generated EDNSFlag constants + +DO = EDNSFlag.DO + +### END generated EDNSFlag constants diff --git a/venv/lib/python3.11/site-packages/dns/grange.py b/venv/lib/python3.11/site-packages/dns/grange.py new file mode 100644 index 0000000..a967ca4 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/grange.py @@ -0,0 +1,72 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2012-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS GENERATE range conversion.""" + +from typing import Tuple + +import dns + + +def from_text(text: str) -> Tuple[int, int, int]: + """Convert the text form of a range in a ``$GENERATE`` statement to an + integer. + + *text*, a ``str``, the textual range in ``$GENERATE`` form. + + Returns a tuple of three ``int`` values ``(start, stop, step)``. + """ + + start = -1 + stop = -1 + step = 1 + cur = "" + state = 0 + # state 0 1 2 + # x - y / z + + if text and text[0] == "-": + raise dns.exception.SyntaxError("Start cannot be a negative number") + + for c in text: + if c == "-" and state == 0: + start = int(cur) + cur = "" + state = 1 + elif c == "/": + stop = int(cur) + cur = "" + state = 2 + elif c.isdigit(): + cur += c + else: + raise dns.exception.SyntaxError(f"Could not parse {c}") + + if state == 0: + raise dns.exception.SyntaxError("no stop value specified") + elif state == 1: + stop = int(cur) + else: + assert state == 2 + step = int(cur) + + assert step >= 1 + assert start >= 0 + if start > stop: + raise dns.exception.SyntaxError("start must be <= stop") + + return (start, stop, step) diff --git a/venv/lib/python3.11/site-packages/dns/immutable.py b/venv/lib/python3.11/site-packages/dns/immutable.py new file mode 100644 index 0000000..36b0362 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/immutable.py @@ -0,0 +1,68 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import collections.abc +from typing import Any, Callable + +from dns._immutable_ctx import immutable + + +@immutable +class Dict(collections.abc.Mapping): # lgtm[py/missing-equals] + def __init__( + self, + dictionary: Any, + no_copy: bool = False, + map_factory: Callable[[], collections.abc.MutableMapping] = dict, + ): + """Make an immutable dictionary from the specified dictionary. + + If *no_copy* is `True`, then *dictionary* will be wrapped instead + of copied. Only set this if you are sure there will be no external + references to the dictionary. + """ + if no_copy and isinstance(dictionary, collections.abc.MutableMapping): + self._odict = dictionary + else: + self._odict = map_factory() + self._odict.update(dictionary) + self._hash = None + + def __getitem__(self, key): + return self._odict.__getitem__(key) + + def __hash__(self): # pylint: disable=invalid-hash-returned + if self._hash is None: + h = 0 + for key in sorted(self._odict.keys()): + h ^= hash(key) + object.__setattr__(self, "_hash", h) + # this does return an int, but pylint doesn't figure that out + return self._hash + + def __len__(self): + return len(self._odict) + + def __iter__(self): + return iter(self._odict) + + +def constify(o: Any) -> Any: + """ + Convert mutable types to immutable types. + """ + if isinstance(o, bytearray): + return bytes(o) + if isinstance(o, tuple): + try: + hash(o) + return o + except Exception: + return tuple(constify(elt) for elt in o) + if isinstance(o, list): + return tuple(constify(elt) for elt in o) + if isinstance(o, dict): + cdict = dict() + for k, v in o.items(): + cdict[k] = constify(v) + return Dict(cdict, True) + return o diff --git a/venv/lib/python3.11/site-packages/dns/inet.py b/venv/lib/python3.11/site-packages/dns/inet.py new file mode 100644 index 0000000..4a03f99 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/inet.py @@ -0,0 +1,197 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Generic Internet address helper functions.""" + +import socket +from typing import Any, Optional, Tuple + +import dns.ipv4 +import dns.ipv6 + +# We assume that AF_INET and AF_INET6 are always defined. We keep +# these here for the benefit of any old code (unlikely though that +# is!). +AF_INET = socket.AF_INET +AF_INET6 = socket.AF_INET6 + + +def inet_pton(family: int, text: str) -> bytes: + """Convert the textual form of a network address into its binary form. + + *family* is an ``int``, the address family. + + *text* is a ``str``, the textual address. + + Raises ``NotImplementedError`` if the address family specified is not + implemented. + + Returns a ``bytes``. + """ + + if family == AF_INET: + return dns.ipv4.inet_aton(text) + elif family == AF_INET6: + return dns.ipv6.inet_aton(text, True) + else: + raise NotImplementedError + + +def inet_ntop(family: int, address: bytes) -> str: + """Convert the binary form of a network address into its textual form. + + *family* is an ``int``, the address family. + + *address* is a ``bytes``, the network address in binary form. + + Raises ``NotImplementedError`` if the address family specified is not + implemented. + + Returns a ``str``. + """ + + if family == AF_INET: + return dns.ipv4.inet_ntoa(address) + elif family == AF_INET6: + return dns.ipv6.inet_ntoa(address) + else: + raise NotImplementedError + + +def af_for_address(text: str) -> int: + """Determine the address family of a textual-form network address. + + *text*, a ``str``, the textual address. + + Raises ``ValueError`` if the address family cannot be determined + from the input. + + Returns an ``int``. + """ + + try: + dns.ipv4.inet_aton(text) + return AF_INET + except Exception: + try: + dns.ipv6.inet_aton(text, True) + return AF_INET6 + except Exception: + raise ValueError + + +def is_multicast(text: str) -> bool: + """Is the textual-form network address a multicast address? + + *text*, a ``str``, the textual address. + + Raises ``ValueError`` if the address family cannot be determined + from the input. + + Returns a ``bool``. + """ + + try: + first = dns.ipv4.inet_aton(text)[0] + return first >= 224 and first <= 239 + except Exception: + try: + first = dns.ipv6.inet_aton(text, True)[0] + return first == 255 + except Exception: + raise ValueError + + +def is_address(text: str) -> bool: + """Is the specified string an IPv4 or IPv6 address? + + *text*, a ``str``, the textual address. + + Returns a ``bool``. + """ + + try: + dns.ipv4.inet_aton(text) + return True + except Exception: + try: + dns.ipv6.inet_aton(text, True) + return True + except Exception: + return False + + +def low_level_address_tuple( + high_tuple: Tuple[str, int], af: Optional[int] = None +) -> Any: + """Given a "high-level" address tuple, i.e. + an (address, port) return the appropriate "low-level" address tuple + suitable for use in socket calls. + + If an *af* other than ``None`` is provided, it is assumed the + address in the high-level tuple is valid and has that af. If af + is ``None``, then af_for_address will be called. + """ + address, port = high_tuple + if af is None: + af = af_for_address(address) + if af == AF_INET: + return (address, port) + elif af == AF_INET6: + i = address.find("%") + if i < 0: + # no scope, shortcut! + return (address, port, 0, 0) + # try to avoid getaddrinfo() + addrpart = address[:i] + scope = address[i + 1 :] + if scope.isdigit(): + return (addrpart, port, 0, int(scope)) + try: + return (addrpart, port, 0, socket.if_nametoindex(scope)) + except AttributeError: # pragma: no cover (we can't really test this) + ai_flags = socket.AI_NUMERICHOST + ((*_, tup), *_) = socket.getaddrinfo(address, port, flags=ai_flags) + return tup + else: + raise NotImplementedError(f"unknown address family {af}") + + +def any_for_af(af): + """Return the 'any' address for the specified address family.""" + if af == socket.AF_INET: + return "0.0.0.0" + elif af == socket.AF_INET6: + return "::" + raise NotImplementedError(f"unknown address family {af}") + + +def canonicalize(text: str) -> str: + """Verify that *address* is a valid text form IPv4 or IPv6 address and return its + canonical text form. IPv6 addresses with scopes are rejected. + + *text*, a ``str``, the address in textual form. + + Raises ``ValueError`` if the text is not valid. + """ + try: + return dns.ipv6.canonicalize(text) + except Exception: + try: + return dns.ipv4.canonicalize(text) + except Exception: + raise ValueError diff --git a/venv/lib/python3.11/site-packages/dns/ipv4.py b/venv/lib/python3.11/site-packages/dns/ipv4.py new file mode 100644 index 0000000..65ee69c --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/ipv4.py @@ -0,0 +1,77 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""IPv4 helper functions.""" + +import struct +from typing import Union + +import dns.exception + + +def inet_ntoa(address: bytes) -> str: + """Convert an IPv4 address in binary form to text form. + + *address*, a ``bytes``, the IPv4 address in binary form. + + Returns a ``str``. + """ + + if len(address) != 4: + raise dns.exception.SyntaxError + return "%u.%u.%u.%u" % (address[0], address[1], address[2], address[3]) + + +def inet_aton(text: Union[str, bytes]) -> bytes: + """Convert an IPv4 address in text form to binary form. + + *text*, a ``str`` or ``bytes``, the IPv4 address in textual form. + + Returns a ``bytes``. + """ + + if not isinstance(text, bytes): + btext = text.encode() + else: + btext = text + parts = btext.split(b".") + if len(parts) != 4: + raise dns.exception.SyntaxError + for part in parts: + if not part.isdigit(): + raise dns.exception.SyntaxError + if len(part) > 1 and part[0] == ord("0"): + # No leading zeros + raise dns.exception.SyntaxError + try: + b = [int(part) for part in parts] + return struct.pack("BBBB", *b) + except Exception: + raise dns.exception.SyntaxError + + +def canonicalize(text: Union[str, bytes]) -> str: + """Verify that *address* is a valid text form IPv4 address and return its + canonical text form. + + *text*, a ``str`` or ``bytes``, the IPv4 address in textual form. + + Raises ``dns.exception.SyntaxError`` if the text is not valid. + """ + # Note that inet_aton() only accepts canonial form, but we still run through + # inet_ntoa() to ensure the output is a str. + return dns.ipv4.inet_ntoa(dns.ipv4.inet_aton(text)) diff --git a/venv/lib/python3.11/site-packages/dns/ipv6.py b/venv/lib/python3.11/site-packages/dns/ipv6.py new file mode 100644 index 0000000..4dd1d1c --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/ipv6.py @@ -0,0 +1,217 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""IPv6 helper functions.""" + +import binascii +import re +from typing import List, Union + +import dns.exception +import dns.ipv4 + +_leading_zero = re.compile(r"0+([0-9a-f]+)") + + +def inet_ntoa(address: bytes) -> str: + """Convert an IPv6 address in binary form to text form. + + *address*, a ``bytes``, the IPv6 address in binary form. + + Raises ``ValueError`` if the address isn't 16 bytes long. + Returns a ``str``. + """ + + if len(address) != 16: + raise ValueError("IPv6 addresses are 16 bytes long") + hex = binascii.hexlify(address) + chunks = [] + i = 0 + l = len(hex) + while i < l: + chunk = hex[i : i + 4].decode() + # strip leading zeros. we do this with an re instead of + # with lstrip() because lstrip() didn't support chars until + # python 2.2.2 + m = _leading_zero.match(chunk) + if m is not None: + chunk = m.group(1) + chunks.append(chunk) + i += 4 + # + # Compress the longest subsequence of 0-value chunks to :: + # + best_start = 0 + best_len = 0 + start = -1 + last_was_zero = False + for i in range(8): + if chunks[i] != "0": + if last_was_zero: + end = i + current_len = end - start + if current_len > best_len: + best_start = start + best_len = current_len + last_was_zero = False + elif not last_was_zero: + start = i + last_was_zero = True + if last_was_zero: + end = 8 + current_len = end - start + if current_len > best_len: + best_start = start + best_len = current_len + if best_len > 1: + if best_start == 0 and (best_len == 6 or best_len == 5 and chunks[5] == "ffff"): + # We have an embedded IPv4 address + if best_len == 6: + prefix = "::" + else: + prefix = "::ffff:" + thex = prefix + dns.ipv4.inet_ntoa(address[12:]) + else: + thex = ( + ":".join(chunks[:best_start]) + + "::" + + ":".join(chunks[best_start + best_len :]) + ) + else: + thex = ":".join(chunks) + return thex + + +_v4_ending = re.compile(rb"(.*):(\d+\.\d+\.\d+\.\d+)$") +_colon_colon_start = re.compile(rb"::.*") +_colon_colon_end = re.compile(rb".*::$") + + +def inet_aton(text: Union[str, bytes], ignore_scope: bool = False) -> bytes: + """Convert an IPv6 address in text form to binary form. + + *text*, a ``str`` or ``bytes``, the IPv6 address in textual form. + + *ignore_scope*, a ``bool``. If ``True``, a scope will be ignored. + If ``False``, the default, it is an error for a scope to be present. + + Returns a ``bytes``. + """ + + # + # Our aim here is not something fast; we just want something that works. + # + if not isinstance(text, bytes): + btext = text.encode() + else: + btext = text + + if ignore_scope: + parts = btext.split(b"%") + l = len(parts) + if l == 2: + btext = parts[0] + elif l > 2: + raise dns.exception.SyntaxError + + if btext == b"": + raise dns.exception.SyntaxError + elif btext.endswith(b":") and not btext.endswith(b"::"): + raise dns.exception.SyntaxError + elif btext.startswith(b":") and not btext.startswith(b"::"): + raise dns.exception.SyntaxError + elif btext == b"::": + btext = b"0::" + # + # Get rid of the icky dot-quad syntax if we have it. + # + m = _v4_ending.match(btext) + if m is not None: + b = dns.ipv4.inet_aton(m.group(2)) + btext = ( + f"{m.group(1).decode()}:{b[0]:02x}{b[1]:02x}:{b[2]:02x}{b[3]:02x}" + ).encode() + # + # Try to turn '::' into ':'; if no match try to + # turn '::' into ':' + # + m = _colon_colon_start.match(btext) + if m is not None: + btext = btext[1:] + else: + m = _colon_colon_end.match(btext) + if m is not None: + btext = btext[:-1] + # + # Now canonicalize into 8 chunks of 4 hex digits each + # + chunks = btext.split(b":") + l = len(chunks) + if l > 8: + raise dns.exception.SyntaxError + seen_empty = False + canonical: List[bytes] = [] + for c in chunks: + if c == b"": + if seen_empty: + raise dns.exception.SyntaxError + seen_empty = True + for _ in range(0, 8 - l + 1): + canonical.append(b"0000") + else: + lc = len(c) + if lc > 4: + raise dns.exception.SyntaxError + if lc != 4: + c = (b"0" * (4 - lc)) + c + canonical.append(c) + if l < 8 and not seen_empty: + raise dns.exception.SyntaxError + btext = b"".join(canonical) + + # + # Finally we can go to binary. + # + try: + return binascii.unhexlify(btext) + except (binascii.Error, TypeError): + raise dns.exception.SyntaxError + + +_mapped_prefix = b"\x00" * 10 + b"\xff\xff" + + +def is_mapped(address: bytes) -> bool: + """Is the specified address a mapped IPv4 address? + + *address*, a ``bytes`` is an IPv6 address in binary form. + + Returns a ``bool``. + """ + + return address.startswith(_mapped_prefix) + + +def canonicalize(text: Union[str, bytes]) -> str: + """Verify that *address* is a valid text form IPv6 address and return its + canonical text form. Addresses with scopes are rejected. + + *text*, a ``str`` or ``bytes``, the IPv6 address in textual form. + + Raises ``dns.exception.SyntaxError`` if the text is not valid. + """ + return dns.ipv6.inet_ntoa(dns.ipv6.inet_aton(text)) diff --git a/venv/lib/python3.11/site-packages/dns/message.py b/venv/lib/python3.11/site-packages/dns/message.py new file mode 100644 index 0000000..e978a0a --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/message.py @@ -0,0 +1,1933 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Messages""" + +import contextlib +import enum +import io +import time +from typing import Any, Dict, List, Optional, Tuple, Union, cast + +import dns.edns +import dns.entropy +import dns.enum +import dns.exception +import dns.flags +import dns.name +import dns.opcode +import dns.rcode +import dns.rdata +import dns.rdataclass +import dns.rdatatype +import dns.rdtypes.ANY.OPT +import dns.rdtypes.ANY.TSIG +import dns.renderer +import dns.rrset +import dns.tsig +import dns.ttl +import dns.wire + + +class ShortHeader(dns.exception.FormError): + """The DNS packet passed to from_wire() is too short.""" + + +class TrailingJunk(dns.exception.FormError): + """The DNS packet passed to from_wire() has extra junk at the end of it.""" + + +class UnknownHeaderField(dns.exception.DNSException): + """The header field name was not recognized when converting from text + into a message.""" + + +class BadEDNS(dns.exception.FormError): + """An OPT record occurred somewhere other than + the additional data section.""" + + +class BadTSIG(dns.exception.FormError): + """A TSIG record occurred somewhere other than the end of + the additional data section.""" + + +class UnknownTSIGKey(dns.exception.DNSException): + """A TSIG with an unknown key was received.""" + + +class Truncated(dns.exception.DNSException): + """The truncated flag is set.""" + + supp_kwargs = {"message"} + + # We do this as otherwise mypy complains about unexpected keyword argument + # idna_exception + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def message(self): + """As much of the message as could be processed. + + Returns a ``dns.message.Message``. + """ + return self.kwargs["message"] + + +class NotQueryResponse(dns.exception.DNSException): + """Message is not a response to a query.""" + + +class ChainTooLong(dns.exception.DNSException): + """The CNAME chain is too long.""" + + +class AnswerForNXDOMAIN(dns.exception.DNSException): + """The rcode is NXDOMAIN but an answer was found.""" + + +class NoPreviousName(dns.exception.SyntaxError): + """No previous name was known.""" + + +class MessageSection(dns.enum.IntEnum): + """Message sections""" + + QUESTION = 0 + ANSWER = 1 + AUTHORITY = 2 + ADDITIONAL = 3 + + @classmethod + def _maximum(cls): + return 3 + + +class MessageError: + def __init__(self, exception: Exception, offset: int): + self.exception = exception + self.offset = offset + + +DEFAULT_EDNS_PAYLOAD = 1232 +MAX_CHAIN = 16 + +IndexKeyType = Tuple[ + int, + dns.name.Name, + dns.rdataclass.RdataClass, + dns.rdatatype.RdataType, + Optional[dns.rdatatype.RdataType], + Optional[dns.rdataclass.RdataClass], +] +IndexType = Dict[IndexKeyType, dns.rrset.RRset] +SectionType = Union[int, str, List[dns.rrset.RRset]] + + +class Message: + """A DNS message.""" + + _section_enum = MessageSection + + def __init__(self, id: Optional[int] = None): + if id is None: + self.id = dns.entropy.random_16() + else: + self.id = id + self.flags = 0 + self.sections: List[List[dns.rrset.RRset]] = [[], [], [], []] + self.opt: Optional[dns.rrset.RRset] = None + self.request_payload = 0 + self.pad = 0 + self.keyring: Any = None + self.tsig: Optional[dns.rrset.RRset] = None + self.request_mac = b"" + self.xfr = False + self.origin: Optional[dns.name.Name] = None + self.tsig_ctx: Optional[Any] = None + self.index: IndexType = {} + self.errors: List[MessageError] = [] + self.time = 0.0 + self.wire: Optional[bytes] = None + + @property + def question(self) -> List[dns.rrset.RRset]: + """The question section.""" + return self.sections[0] + + @question.setter + def question(self, v): + self.sections[0] = v + + @property + def answer(self) -> List[dns.rrset.RRset]: + """The answer section.""" + return self.sections[1] + + @answer.setter + def answer(self, v): + self.sections[1] = v + + @property + def authority(self) -> List[dns.rrset.RRset]: + """The authority section.""" + return self.sections[2] + + @authority.setter + def authority(self, v): + self.sections[2] = v + + @property + def additional(self) -> List[dns.rrset.RRset]: + """The additional data section.""" + return self.sections[3] + + @additional.setter + def additional(self, v): + self.sections[3] = v + + def __repr__(self): + return "" + + def __str__(self): + return self.to_text() + + def to_text( + self, + origin: Optional[dns.name.Name] = None, + relativize: bool = True, + **kw: Dict[str, Any], + ) -> str: + """Convert the message to text. + + The *origin*, *relativize*, and any other keyword + arguments are passed to the RRset ``to_wire()`` method. + + Returns a ``str``. + """ + + s = io.StringIO() + s.write("id %d\n" % self.id) + s.write(f"opcode {dns.opcode.to_text(self.opcode())}\n") + s.write(f"rcode {dns.rcode.to_text(self.rcode())}\n") + s.write(f"flags {dns.flags.to_text(self.flags)}\n") + if self.edns >= 0: + s.write(f"edns {self.edns}\n") + if self.ednsflags != 0: + s.write(f"eflags {dns.flags.edns_to_text(self.ednsflags)}\n") + s.write("payload %d\n" % self.payload) + for opt in self.options: + s.write(f"option {opt.to_text()}\n") + for name, which in self._section_enum.__members__.items(): + s.write(f";{name}\n") + for rrset in self.section_from_number(which): + s.write(rrset.to_text(origin, relativize, **kw)) + s.write("\n") + # + # We strip off the final \n so the caller can print the result without + # doing weird things to get around eccentricities in Python print + # formatting + # + return s.getvalue()[:-1] + + def __eq__(self, other): + """Two messages are equal if they have the same content in the + header, question, answer, and authority sections. + + Returns a ``bool``. + """ + + if not isinstance(other, Message): + return False + if self.id != other.id: + return False + if self.flags != other.flags: + return False + for i, section in enumerate(self.sections): + other_section = other.sections[i] + for n in section: + if n not in other_section: + return False + for n in other_section: + if n not in section: + return False + return True + + def __ne__(self, other): + return not self.__eq__(other) + + def is_response(self, other: "Message") -> bool: + """Is *other*, also a ``dns.message.Message``, a response to this + message? + + Returns a ``bool``. + """ + + if ( + other.flags & dns.flags.QR == 0 + or self.id != other.id + or dns.opcode.from_flags(self.flags) != dns.opcode.from_flags(other.flags) + ): + return False + if other.rcode() in { + dns.rcode.FORMERR, + dns.rcode.SERVFAIL, + dns.rcode.NOTIMP, + dns.rcode.REFUSED, + }: + # We don't check the question section in these cases if + # the other question section is empty, even though they + # still really ought to have a question section. + if len(other.question) == 0: + return True + if dns.opcode.is_update(self.flags): + # This is assuming the "sender doesn't include anything + # from the update", but we don't care to check the other + # case, which is that all the sections are returned and + # identical. + return True + for n in self.question: + if n not in other.question: + return False + for n in other.question: + if n not in self.question: + return False + return True + + def section_number(self, section: List[dns.rrset.RRset]) -> int: + """Return the "section number" of the specified section for use + in indexing. + + *section* is one of the section attributes of this message. + + Raises ``ValueError`` if the section isn't known. + + Returns an ``int``. + """ + + for i, our_section in enumerate(self.sections): + if section is our_section: + return self._section_enum(i) + raise ValueError("unknown section") + + def section_from_number(self, number: int) -> List[dns.rrset.RRset]: + """Return the section list associated with the specified section + number. + + *number* is a section number `int` or the text form of a section + name. + + Raises ``ValueError`` if the section isn't known. + + Returns a ``list``. + """ + + section = self._section_enum.make(number) + return self.sections[section] + + def find_rrset( + self, + section: SectionType, + name: dns.name.Name, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + deleting: Optional[dns.rdataclass.RdataClass] = None, + create: bool = False, + force_unique: bool = False, + idna_codec: Optional[dns.name.IDNACodec] = None, + ) -> dns.rrset.RRset: + """Find the RRset with the given attributes in the specified section. + + *section*, an ``int`` section number, a ``str`` section name, or one of + the section attributes of this message. This specifies the + the section of the message to search. For example:: + + my_message.find_rrset(my_message.answer, name, rdclass, rdtype) + my_message.find_rrset(dns.message.ANSWER, name, rdclass, rdtype) + my_message.find_rrset("ANSWER", name, rdclass, rdtype) + + *name*, a ``dns.name.Name`` or ``str``, the name of the RRset. + + *rdclass*, an ``int`` or ``str``, the class of the RRset. + + *rdtype*, an ``int`` or ``str``, the type of the RRset. + + *covers*, an ``int`` or ``str``, the covers value of the RRset. + The default is ``dns.rdatatype.NONE``. + + *deleting*, an ``int``, ``str``, or ``None``, the deleting value of the + RRset. The default is ``None``. + + *create*, a ``bool``. If ``True``, create the RRset if it is not found. + The created RRset is appended to *section*. + + *force_unique*, a ``bool``. If ``True`` and *create* is also ``True``, + create a new RRset regardless of whether a matching RRset exists + already. The default is ``False``. This is useful when creating + DDNS Update messages, as order matters for them. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + Raises ``KeyError`` if the RRset was not found and create was + ``False``. + + Returns a ``dns.rrset.RRset object``. + """ + + if isinstance(section, int): + section_number = section + section = self.section_from_number(section_number) + elif isinstance(section, str): + section_number = self._section_enum.from_text(section) + section = self.section_from_number(section_number) + else: + section_number = self.section_number(section) + if isinstance(name, str): + name = dns.name.from_text(name, idna_codec=idna_codec) + rdtype = dns.rdatatype.RdataType.make(rdtype) + rdclass = dns.rdataclass.RdataClass.make(rdclass) + covers = dns.rdatatype.RdataType.make(covers) + if deleting is not None: + deleting = dns.rdataclass.RdataClass.make(deleting) + key = (section_number, name, rdclass, rdtype, covers, deleting) + if not force_unique: + if self.index is not None: + rrset = self.index.get(key) + if rrset is not None: + return rrset + else: + for rrset in section: + if rrset.full_match(name, rdclass, rdtype, covers, deleting): + return rrset + if not create: + raise KeyError + rrset = dns.rrset.RRset(name, rdclass, rdtype, covers, deleting) + section.append(rrset) + if self.index is not None: + self.index[key] = rrset + return rrset + + def get_rrset( + self, + section: SectionType, + name: dns.name.Name, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + deleting: Optional[dns.rdataclass.RdataClass] = None, + create: bool = False, + force_unique: bool = False, + idna_codec: Optional[dns.name.IDNACodec] = None, + ) -> Optional[dns.rrset.RRset]: + """Get the RRset with the given attributes in the specified section. + + If the RRset is not found, None is returned. + + *section*, an ``int`` section number, a ``str`` section name, or one of + the section attributes of this message. This specifies the + the section of the message to search. For example:: + + my_message.get_rrset(my_message.answer, name, rdclass, rdtype) + my_message.get_rrset(dns.message.ANSWER, name, rdclass, rdtype) + my_message.get_rrset("ANSWER", name, rdclass, rdtype) + + *name*, a ``dns.name.Name`` or ``str``, the name of the RRset. + + *rdclass*, an ``int`` or ``str``, the class of the RRset. + + *rdtype*, an ``int`` or ``str``, the type of the RRset. + + *covers*, an ``int`` or ``str``, the covers value of the RRset. + The default is ``dns.rdatatype.NONE``. + + *deleting*, an ``int``, ``str``, or ``None``, the deleting value of the + RRset. The default is ``None``. + + *create*, a ``bool``. If ``True``, create the RRset if it is not found. + The created RRset is appended to *section*. + + *force_unique*, a ``bool``. If ``True`` and *create* is also ``True``, + create a new RRset regardless of whether a matching RRset exists + already. The default is ``False``. This is useful when creating + DDNS Update messages, as order matters for them. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + Returns a ``dns.rrset.RRset object`` or ``None``. + """ + + try: + rrset = self.find_rrset( + section, + name, + rdclass, + rdtype, + covers, + deleting, + create, + force_unique, + idna_codec, + ) + except KeyError: + rrset = None + return rrset + + def section_count(self, section: SectionType) -> int: + """Returns the number of records in the specified section. + + *section*, an ``int`` section number, a ``str`` section name, or one of + the section attributes of this message. This specifies the + the section of the message to count. For example:: + + my_message.section_count(my_message.answer) + my_message.section_count(dns.message.ANSWER) + my_message.section_count("ANSWER") + """ + + if isinstance(section, int): + section_number = section + section = self.section_from_number(section_number) + elif isinstance(section, str): + section_number = self._section_enum.from_text(section) + section = self.section_from_number(section_number) + else: + section_number = self.section_number(section) + count = sum(max(1, len(rrs)) for rrs in section) + if section_number == MessageSection.ADDITIONAL: + if self.opt is not None: + count += 1 + if self.tsig is not None: + count += 1 + return count + + def _compute_opt_reserve(self) -> int: + """Compute the size required for the OPT RR, padding excluded""" + if not self.opt: + return 0 + # 1 byte for the root name, 10 for the standard RR fields + size = 11 + # This would be more efficient if options had a size() method, but we won't + # worry about that for now. We also don't worry if there is an existing padding + # option, as it is unlikely and probably harmless, as the worst case is that we + # may add another, and this seems to be legal. + for option in self.opt[0].options: + wire = option.to_wire() + # We add 4 here to account for the option type and length + size += len(wire) + 4 + if self.pad: + # Padding will be added, so again add the option type and length. + size += 4 + return size + + def _compute_tsig_reserve(self) -> int: + """Compute the size required for the TSIG RR""" + # This would be more efficient if TSIGs had a size method, but we won't + # worry about for now. Also, we can't really cope with the potential + # compressibility of the TSIG owner name, so we estimate with the uncompressed + # size. We will disable compression when TSIG and padding are both is active + # so that the padding comes out right. + if not self.tsig: + return 0 + f = io.BytesIO() + self.tsig.to_wire(f) + return len(f.getvalue()) + + def to_wire( + self, + origin: Optional[dns.name.Name] = None, + max_size: int = 0, + multi: bool = False, + tsig_ctx: Optional[Any] = None, + prepend_length: bool = False, + prefer_truncation: bool = False, + **kw: Dict[str, Any], + ) -> bytes: + """Return a string containing the message in DNS compressed wire + format. + + Additional keyword arguments are passed to the RRset ``to_wire()`` + method. + + *origin*, a ``dns.name.Name`` or ``None``, the origin to be appended + to any relative names. If ``None``, and the message has an origin + attribute that is not ``None``, then it will be used. + + *max_size*, an ``int``, the maximum size of the wire format + output; default is 0, which means "the message's request + payload, if nonzero, or 65535". + + *multi*, a ``bool``, should be set to ``True`` if this message is + part of a multiple message sequence. + + *tsig_ctx*, a ``dns.tsig.HMACTSig`` or ``dns.tsig.GSSTSig`` object, the + ongoing TSIG context, used when signing zone transfers. + + *prepend_length*, a ``bool``, should be set to ``True`` if the caller + wants the message length prepended to the message itself. This is + useful for messages sent over TCP, TLS (DoT), or QUIC (DoQ). + + *prefer_truncation*, a ``bool``, should be set to ``True`` if the caller + wants the message to be truncated if it would otherwise exceed the + maximum length. If the truncation occurs before the additional section, + the TC bit will be set. + + Raises ``dns.exception.TooBig`` if *max_size* was exceeded. + + Returns a ``bytes``. + """ + + if origin is None and self.origin is not None: + origin = self.origin + if max_size == 0: + if self.request_payload != 0: + max_size = self.request_payload + else: + max_size = 65535 + if max_size < 512: + max_size = 512 + elif max_size > 65535: + max_size = 65535 + r = dns.renderer.Renderer(self.id, self.flags, max_size, origin) + opt_reserve = self._compute_opt_reserve() + r.reserve(opt_reserve) + tsig_reserve = self._compute_tsig_reserve() + r.reserve(tsig_reserve) + try: + for rrset in self.question: + r.add_question(rrset.name, rrset.rdtype, rrset.rdclass) + for rrset in self.answer: + r.add_rrset(dns.renderer.ANSWER, rrset, **kw) + for rrset in self.authority: + r.add_rrset(dns.renderer.AUTHORITY, rrset, **kw) + for rrset in self.additional: + r.add_rrset(dns.renderer.ADDITIONAL, rrset, **kw) + except dns.exception.TooBig: + if prefer_truncation: + if r.section < dns.renderer.ADDITIONAL: + r.flags |= dns.flags.TC + else: + raise + r.release_reserved() + if self.opt is not None: + r.add_opt(self.opt, self.pad, opt_reserve, tsig_reserve) + r.write_header() + if self.tsig is not None: + (new_tsig, ctx) = dns.tsig.sign( + r.get_wire(), + self.keyring, + self.tsig[0], + int(time.time()), + self.request_mac, + tsig_ctx, + multi, + ) + self.tsig.clear() + self.tsig.add(new_tsig) + r.add_rrset(dns.renderer.ADDITIONAL, self.tsig) + r.write_header() + if multi: + self.tsig_ctx = ctx + wire = r.get_wire() + self.wire = wire + if prepend_length: + wire = len(wire).to_bytes(2, "big") + wire + return wire + + @staticmethod + def _make_tsig( + keyname, algorithm, time_signed, fudge, mac, original_id, error, other + ): + tsig = dns.rdtypes.ANY.TSIG.TSIG( + dns.rdataclass.ANY, + dns.rdatatype.TSIG, + algorithm, + time_signed, + fudge, + mac, + original_id, + error, + other, + ) + return dns.rrset.from_rdata(keyname, 0, tsig) + + def use_tsig( + self, + keyring: Any, + keyname: Optional[Union[dns.name.Name, str]] = None, + fudge: int = 300, + original_id: Optional[int] = None, + tsig_error: int = 0, + other_data: bytes = b"", + algorithm: Union[dns.name.Name, str] = dns.tsig.default_algorithm, + ) -> None: + """When sending, a TSIG signature using the specified key + should be added. + + *key*, a ``dns.tsig.Key`` is the key to use. If a key is specified, + the *keyring* and *algorithm* fields are not used. + + *keyring*, a ``dict``, ``callable`` or ``dns.tsig.Key``, is either + the TSIG keyring or key to use. + + The format of a keyring dict is a mapping from TSIG key name, as + ``dns.name.Name`` to ``dns.tsig.Key`` or a TSIG secret, a ``bytes``. + If a ``dict`` *keyring* is specified but a *keyname* is not, the key + used will be the first key in the *keyring*. Note that the order of + keys in a dictionary is not defined, so applications should supply a + keyname when a ``dict`` keyring is used, unless they know the keyring + contains only one key. If a ``callable`` keyring is specified, the + callable will be called with the message and the keyname, and is + expected to return a key. + + *keyname*, a ``dns.name.Name``, ``str`` or ``None``, the name of + this TSIG key to use; defaults to ``None``. If *keyring* is a + ``dict``, the key must be defined in it. If *keyring* is a + ``dns.tsig.Key``, this is ignored. + + *fudge*, an ``int``, the TSIG time fudge. + + *original_id*, an ``int``, the TSIG original id. If ``None``, + the message's id is used. + + *tsig_error*, an ``int``, the TSIG error code. + + *other_data*, a ``bytes``, the TSIG other data. + + *algorithm*, a ``dns.name.Name`` or ``str``, the TSIG algorithm to use. This is + only used if *keyring* is a ``dict``, and the key entry is a ``bytes``. + """ + + if isinstance(keyring, dns.tsig.Key): + key = keyring + keyname = key.name + elif callable(keyring): + key = keyring(self, keyname) + else: + if isinstance(keyname, str): + keyname = dns.name.from_text(keyname) + if keyname is None: + keyname = next(iter(keyring)) + key = keyring[keyname] + if isinstance(key, bytes): + key = dns.tsig.Key(keyname, key, algorithm) + self.keyring = key + if original_id is None: + original_id = self.id + self.tsig = self._make_tsig( + keyname, + self.keyring.algorithm, + 0, + fudge, + b"\x00" * dns.tsig.mac_sizes[self.keyring.algorithm], + original_id, + tsig_error, + other_data, + ) + + @property + def keyname(self) -> Optional[dns.name.Name]: + if self.tsig: + return self.tsig.name + else: + return None + + @property + def keyalgorithm(self) -> Optional[dns.name.Name]: + if self.tsig: + return self.tsig[0].algorithm + else: + return None + + @property + def mac(self) -> Optional[bytes]: + if self.tsig: + return self.tsig[0].mac + else: + return None + + @property + def tsig_error(self) -> Optional[int]: + if self.tsig: + return self.tsig[0].error + else: + return None + + @property + def had_tsig(self) -> bool: + return bool(self.tsig) + + @staticmethod + def _make_opt(flags=0, payload=DEFAULT_EDNS_PAYLOAD, options=None): + opt = dns.rdtypes.ANY.OPT.OPT(payload, dns.rdatatype.OPT, options or ()) + return dns.rrset.from_rdata(dns.name.root, int(flags), opt) + + def use_edns( + self, + edns: Optional[Union[int, bool]] = 0, + ednsflags: int = 0, + payload: int = DEFAULT_EDNS_PAYLOAD, + request_payload: Optional[int] = None, + options: Optional[List[dns.edns.Option]] = None, + pad: int = 0, + ) -> None: + """Configure EDNS behavior. + + *edns*, an ``int``, is the EDNS level to use. Specifying ``None``, ``False``, + or ``-1`` means "do not use EDNS", and in this case the other parameters are + ignored. Specifying ``True`` is equivalent to specifying 0, i.e. "use EDNS0". + + *ednsflags*, an ``int``, the EDNS flag values. + + *payload*, an ``int``, is the EDNS sender's payload field, which is the maximum + size of UDP datagram the sender can handle. I.e. how big a response to this + message can be. + + *request_payload*, an ``int``, is the EDNS payload size to use when sending this + message. If not specified, defaults to the value of *payload*. + + *options*, a list of ``dns.edns.Option`` objects or ``None``, the EDNS options. + + *pad*, a non-negative ``int``. If 0, the default, do not pad; otherwise add + padding bytes to make the message size a multiple of *pad*. Note that if + padding is non-zero, an EDNS PADDING option will always be added to the + message. + """ + + if edns is None or edns is False: + edns = -1 + elif edns is True: + edns = 0 + if edns < 0: + self.opt = None + self.request_payload = 0 + else: + # make sure the EDNS version in ednsflags agrees with edns + ednsflags &= 0xFF00FFFF + ednsflags |= edns << 16 + if options is None: + options = [] + self.opt = self._make_opt(ednsflags, payload, options) + if request_payload is None: + request_payload = payload + self.request_payload = request_payload + if pad < 0: + raise ValueError("pad must be non-negative") + self.pad = pad + + @property + def edns(self) -> int: + if self.opt: + return (self.ednsflags & 0xFF0000) >> 16 + else: + return -1 + + @property + def ednsflags(self) -> int: + if self.opt: + return self.opt.ttl + else: + return 0 + + @ednsflags.setter + def ednsflags(self, v): + if self.opt: + self.opt.ttl = v + elif v: + self.opt = self._make_opt(v) + + @property + def payload(self) -> int: + if self.opt: + return self.opt[0].payload + else: + return 0 + + @property + def options(self) -> Tuple: + if self.opt: + return self.opt[0].options + else: + return () + + def want_dnssec(self, wanted: bool = True) -> None: + """Enable or disable 'DNSSEC desired' flag in requests. + + *wanted*, a ``bool``. If ``True``, then DNSSEC data is + desired in the response, EDNS is enabled if required, and then + the DO bit is set. If ``False``, the DO bit is cleared if + EDNS is enabled. + """ + + if wanted: + self.ednsflags |= dns.flags.DO + elif self.opt: + self.ednsflags &= ~int(dns.flags.DO) + + def rcode(self) -> dns.rcode.Rcode: + """Return the rcode. + + Returns a ``dns.rcode.Rcode``. + """ + return dns.rcode.from_flags(int(self.flags), int(self.ednsflags)) + + def set_rcode(self, rcode: dns.rcode.Rcode) -> None: + """Set the rcode. + + *rcode*, a ``dns.rcode.Rcode``, is the rcode to set. + """ + (value, evalue) = dns.rcode.to_flags(rcode) + self.flags &= 0xFFF0 + self.flags |= value + self.ednsflags &= 0x00FFFFFF + self.ednsflags |= evalue + + def opcode(self) -> dns.opcode.Opcode: + """Return the opcode. + + Returns a ``dns.opcode.Opcode``. + """ + return dns.opcode.from_flags(int(self.flags)) + + def set_opcode(self, opcode: dns.opcode.Opcode) -> None: + """Set the opcode. + + *opcode*, a ``dns.opcode.Opcode``, is the opcode to set. + """ + self.flags &= 0x87FF + self.flags |= dns.opcode.to_flags(opcode) + + def get_options(self, otype: dns.edns.OptionType) -> List[dns.edns.Option]: + """Return the list of options of the specified type.""" + return [option for option in self.options if option.otype == otype] + + def extended_errors(self) -> List[dns.edns.EDEOption]: + """Return the list of Extended DNS Error (EDE) options in the message""" + return cast(List[dns.edns.EDEOption], self.get_options(dns.edns.OptionType.EDE)) + + def _get_one_rr_per_rrset(self, value): + # What the caller picked is fine. + return value + + # pylint: disable=unused-argument + + def _parse_rr_header(self, section, name, rdclass, rdtype): + return (rdclass, rdtype, None, False) + + # pylint: enable=unused-argument + + def _parse_special_rr_header(self, section, count, position, name, rdclass, rdtype): + if rdtype == dns.rdatatype.OPT: + if ( + section != MessageSection.ADDITIONAL + or self.opt + or name != dns.name.root + ): + raise BadEDNS + elif rdtype == dns.rdatatype.TSIG: + if ( + section != MessageSection.ADDITIONAL + or rdclass != dns.rdatatype.ANY + or position != count - 1 + ): + raise BadTSIG + return (rdclass, rdtype, None, False) + + +class ChainingResult: + """The result of a call to dns.message.QueryMessage.resolve_chaining(). + + The ``answer`` attribute is the answer RRSet, or ``None`` if it doesn't + exist. + + The ``canonical_name`` attribute is the canonical name after all + chaining has been applied (this is the same name as ``rrset.name`` in cases + where rrset is not ``None``). + + The ``minimum_ttl`` attribute is the minimum TTL, i.e. the TTL to + use if caching the data. It is the smallest of all the CNAME TTLs + and either the answer TTL if it exists or the SOA TTL and SOA + minimum values for negative answers. + + The ``cnames`` attribute is a list of all the CNAME RRSets followed to + get to the canonical name. + """ + + def __init__( + self, + canonical_name: dns.name.Name, + answer: Optional[dns.rrset.RRset], + minimum_ttl: int, + cnames: List[dns.rrset.RRset], + ): + self.canonical_name = canonical_name + self.answer = answer + self.minimum_ttl = minimum_ttl + self.cnames = cnames + + +class QueryMessage(Message): + def resolve_chaining(self) -> ChainingResult: + """Follow the CNAME chain in the response to determine the answer + RRset. + + Raises ``dns.message.NotQueryResponse`` if the message is not + a response. + + Raises ``dns.message.ChainTooLong`` if the CNAME chain is too long. + + Raises ``dns.message.AnswerForNXDOMAIN`` if the rcode is NXDOMAIN + but an answer was found. + + Raises ``dns.exception.FormError`` if the question count is not 1. + + Returns a ChainingResult object. + """ + if self.flags & dns.flags.QR == 0: + raise NotQueryResponse + if len(self.question) != 1: + raise dns.exception.FormError + question = self.question[0] + qname = question.name + min_ttl = dns.ttl.MAX_TTL + answer = None + count = 0 + cnames = [] + while count < MAX_CHAIN: + try: + answer = self.find_rrset( + self.answer, qname, question.rdclass, question.rdtype + ) + min_ttl = min(min_ttl, answer.ttl) + break + except KeyError: + if question.rdtype != dns.rdatatype.CNAME: + try: + crrset = self.find_rrset( + self.answer, qname, question.rdclass, dns.rdatatype.CNAME + ) + cnames.append(crrset) + min_ttl = min(min_ttl, crrset.ttl) + for rd in crrset: + qname = rd.target + break + count += 1 + continue + except KeyError: + # Exit the chaining loop + break + else: + # Exit the chaining loop + break + if count >= MAX_CHAIN: + raise ChainTooLong + if self.rcode() == dns.rcode.NXDOMAIN and answer is not None: + raise AnswerForNXDOMAIN + if answer is None: + # Further minimize the TTL with NCACHE. + auname = qname + while True: + # Look for an SOA RR whose owner name is a superdomain + # of qname. + try: + srrset = self.find_rrset( + self.authority, auname, question.rdclass, dns.rdatatype.SOA + ) + min_ttl = min(min_ttl, srrset.ttl, srrset[0].minimum) + break + except KeyError: + try: + auname = auname.parent() + except dns.name.NoParent: + break + return ChainingResult(qname, answer, min_ttl, cnames) + + def canonical_name(self) -> dns.name.Name: + """Return the canonical name of the first name in the question + section. + + Raises ``dns.message.NotQueryResponse`` if the message is not + a response. + + Raises ``dns.message.ChainTooLong`` if the CNAME chain is too long. + + Raises ``dns.message.AnswerForNXDOMAIN`` if the rcode is NXDOMAIN + but an answer was found. + + Raises ``dns.exception.FormError`` if the question count is not 1. + """ + return self.resolve_chaining().canonical_name + + +def _maybe_import_update(): + # We avoid circular imports by doing this here. We do it in another + # function as doing it in _message_factory_from_opcode() makes "dns" + # a local symbol, and the first line fails :) + + # pylint: disable=redefined-outer-name,import-outside-toplevel,unused-import + import dns.update # noqa: F401 + + +def _message_factory_from_opcode(opcode): + if opcode == dns.opcode.QUERY: + return QueryMessage + elif opcode == dns.opcode.UPDATE: + _maybe_import_update() + return dns.update.UpdateMessage + else: + return Message + + +class _WireReader: + """Wire format reader. + + parser: the binary parser + message: The message object being built + initialize_message: Callback to set message parsing options + question_only: Are we only reading the question? + one_rr_per_rrset: Put each RR into its own RRset? + keyring: TSIG keyring + ignore_trailing: Ignore trailing junk at end of request? + multi: Is this message part of a multi-message sequence? + DNS dynamic updates. + continue_on_error: try to extract as much information as possible from + the message, accumulating MessageErrors in the *errors* attribute instead of + raising them. + """ + + def __init__( + self, + wire, + initialize_message, + question_only=False, + one_rr_per_rrset=False, + ignore_trailing=False, + keyring=None, + multi=False, + continue_on_error=False, + ): + self.parser = dns.wire.Parser(wire) + self.message = None + self.initialize_message = initialize_message + self.question_only = question_only + self.one_rr_per_rrset = one_rr_per_rrset + self.ignore_trailing = ignore_trailing + self.keyring = keyring + self.multi = multi + self.continue_on_error = continue_on_error + self.errors = [] + + def _get_question(self, section_number, qcount): + """Read the next *qcount* records from the wire data and add them to + the question section. + """ + assert self.message is not None + section = self.message.sections[section_number] + for _ in range(qcount): + qname = self.parser.get_name(self.message.origin) + (rdtype, rdclass) = self.parser.get_struct("!HH") + (rdclass, rdtype, _, _) = self.message._parse_rr_header( + section_number, qname, rdclass, rdtype + ) + self.message.find_rrset( + section, qname, rdclass, rdtype, create=True, force_unique=True + ) + + def _add_error(self, e): + self.errors.append(MessageError(e, self.parser.current)) + + def _get_section(self, section_number, count): + """Read the next I{count} records from the wire data and add them to + the specified section. + + section_number: the section of the message to which to add records + count: the number of records to read + """ + assert self.message is not None + section = self.message.sections[section_number] + force_unique = self.one_rr_per_rrset + for i in range(count): + rr_start = self.parser.current + absolute_name = self.parser.get_name() + if self.message.origin is not None: + name = absolute_name.relativize(self.message.origin) + else: + name = absolute_name + (rdtype, rdclass, ttl, rdlen) = self.parser.get_struct("!HHIH") + if rdtype in (dns.rdatatype.OPT, dns.rdatatype.TSIG): + ( + rdclass, + rdtype, + deleting, + empty, + ) = self.message._parse_special_rr_header( + section_number, count, i, name, rdclass, rdtype + ) + else: + (rdclass, rdtype, deleting, empty) = self.message._parse_rr_header( + section_number, name, rdclass, rdtype + ) + rdata_start = self.parser.current + try: + if empty: + if rdlen > 0: + raise dns.exception.FormError + rd = None + covers = dns.rdatatype.NONE + else: + with self.parser.restrict_to(rdlen): + rd = dns.rdata.from_wire_parser( + rdclass, rdtype, self.parser, self.message.origin + ) + covers = rd.covers() + if self.message.xfr and rdtype == dns.rdatatype.SOA: + force_unique = True + if rdtype == dns.rdatatype.OPT: + self.message.opt = dns.rrset.from_rdata(name, ttl, rd) + elif rdtype == dns.rdatatype.TSIG: + if self.keyring is None or self.keyring is True: + raise UnknownTSIGKey("got signed message without keyring") + elif isinstance(self.keyring, dict): + key = self.keyring.get(absolute_name) + if isinstance(key, bytes): + key = dns.tsig.Key(absolute_name, key, rd.algorithm) + elif callable(self.keyring): + key = self.keyring(self.message, absolute_name) + else: + key = self.keyring + if key is None: + raise UnknownTSIGKey(f"key '{name}' unknown") + if key: + self.message.keyring = key + self.message.tsig_ctx = dns.tsig.validate( + self.parser.wire, + key, + absolute_name, + rd, + int(time.time()), + self.message.request_mac, + rr_start, + self.message.tsig_ctx, + self.multi, + ) + self.message.tsig = dns.rrset.from_rdata(absolute_name, 0, rd) + else: + rrset = self.message.find_rrset( + section, + name, + rdclass, + rdtype, + covers, + deleting, + True, + force_unique, + ) + if rd is not None: + if ttl > 0x7FFFFFFF: + ttl = 0 + rrset.add(rd, ttl) + except Exception as e: + if self.continue_on_error: + self._add_error(e) + self.parser.seek(rdata_start + rdlen) + else: + raise + + def read(self): + """Read a wire format DNS message and build a dns.message.Message + object.""" + + if self.parser.remaining() < 12: + raise ShortHeader + (id, flags, qcount, ancount, aucount, adcount) = self.parser.get_struct( + "!HHHHHH" + ) + factory = _message_factory_from_opcode(dns.opcode.from_flags(flags)) + self.message = factory(id=id) + self.message.flags = dns.flags.Flag(flags) + self.message.wire = self.parser.wire + self.initialize_message(self.message) + self.one_rr_per_rrset = self.message._get_one_rr_per_rrset( + self.one_rr_per_rrset + ) + try: + self._get_question(MessageSection.QUESTION, qcount) + if self.question_only: + return self.message + self._get_section(MessageSection.ANSWER, ancount) + self._get_section(MessageSection.AUTHORITY, aucount) + self._get_section(MessageSection.ADDITIONAL, adcount) + if not self.ignore_trailing and self.parser.remaining() != 0: + raise TrailingJunk + if self.multi and self.message.tsig_ctx and not self.message.had_tsig: + self.message.tsig_ctx.update(self.parser.wire) + except Exception as e: + if self.continue_on_error: + self._add_error(e) + else: + raise + return self.message + + +def from_wire( + wire: bytes, + keyring: Optional[Any] = None, + request_mac: Optional[bytes] = b"", + xfr: bool = False, + origin: Optional[dns.name.Name] = None, + tsig_ctx: Optional[Union[dns.tsig.HMACTSig, dns.tsig.GSSTSig]] = None, + multi: bool = False, + question_only: bool = False, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + raise_on_truncation: bool = False, + continue_on_error: bool = False, +) -> Message: + """Convert a DNS wire format message into a message object. + + *keyring*, a ``dns.tsig.Key``, ``dict``, ``bool``, or ``None``, the key or keyring + to use if the message is signed. If ``None`` or ``True``, then trying to decode + a message with a TSIG will fail as it cannot be validated. If ``False``, then + TSIG validation is disabled. + + *request_mac*, a ``bytes`` or ``None``. If the message is a response to a + TSIG-signed request, *request_mac* should be set to the MAC of that request. + + *xfr*, a ``bool``, should be set to ``True`` if this message is part of a zone + transfer. + + *origin*, a ``dns.name.Name`` or ``None``. If the message is part of a zone + transfer, *origin* should be the origin name of the zone. If not ``None``, names + will be relativized to the origin. + + *tsig_ctx*, a ``dns.tsig.HMACTSig`` or ``dns.tsig.GSSTSig`` object, the ongoing TSIG + context, used when validating zone transfers. + + *multi*, a ``bool``, should be set to ``True`` if this message is part of a multiple + message sequence. + + *question_only*, a ``bool``. If ``True``, read only up to the end of the question + section. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing junk at end of the + message. + + *raise_on_truncation*, a ``bool``. If ``True``, raise an exception if the TC bit is + set. + + *continue_on_error*, a ``bool``. If ``True``, try to continue parsing even if + errors occur. Erroneous rdata will be ignored. Errors will be accumulated as a + list of MessageError objects in the message's ``errors`` attribute. This option is + recommended only for DNS analysis tools, or for use in a server as part of an error + handling path. The default is ``False``. + + Raises ``dns.message.ShortHeader`` if the message is less than 12 octets long. + + Raises ``dns.message.TrailingJunk`` if there were octets in the message past the end + of the proper DNS message, and *ignore_trailing* is ``False``. + + Raises ``dns.message.BadEDNS`` if an OPT record was in the wrong section, or + occurred more than once. + + Raises ``dns.message.BadTSIG`` if a TSIG record was not the last record of the + additional data section. + + Raises ``dns.message.Truncated`` if the TC flag is set and *raise_on_truncation* is + ``True``. + + Returns a ``dns.message.Message``. + """ + + # We permit None for request_mac solely for backwards compatibility + if request_mac is None: + request_mac = b"" + + def initialize_message(message): + message.request_mac = request_mac + message.xfr = xfr + message.origin = origin + message.tsig_ctx = tsig_ctx + + reader = _WireReader( + wire, + initialize_message, + question_only, + one_rr_per_rrset, + ignore_trailing, + keyring, + multi, + continue_on_error, + ) + try: + m = reader.read() + except dns.exception.FormError: + if ( + reader.message + and (reader.message.flags & dns.flags.TC) + and raise_on_truncation + ): + raise Truncated(message=reader.message) + else: + raise + # Reading a truncated message might not have any errors, so we + # have to do this check here too. + if m.flags & dns.flags.TC and raise_on_truncation: + raise Truncated(message=m) + if continue_on_error: + m.errors = reader.errors + + return m + + +class _TextReader: + """Text format reader. + + tok: the tokenizer. + message: The message object being built. + DNS dynamic updates. + last_name: The most recently read name when building a message object. + one_rr_per_rrset: Put each RR into its own RRset? + origin: The origin for relative names + relativize: relativize names? + relativize_to: the origin to relativize to. + """ + + def __init__( + self, + text, + idna_codec, + one_rr_per_rrset=False, + origin=None, + relativize=True, + relativize_to=None, + ): + self.message = None + self.tok = dns.tokenizer.Tokenizer(text, idna_codec=idna_codec) + self.last_name = None + self.one_rr_per_rrset = one_rr_per_rrset + self.origin = origin + self.relativize = relativize + self.relativize_to = relativize_to + self.id = None + self.edns = -1 + self.ednsflags = 0 + self.payload = DEFAULT_EDNS_PAYLOAD + self.rcode = None + self.opcode = dns.opcode.QUERY + self.flags = 0 + + def _header_line(self, _): + """Process one line from the text format header section.""" + + token = self.tok.get() + what = token.value + if what == "id": + self.id = self.tok.get_int() + elif what == "flags": + while True: + token = self.tok.get() + if not token.is_identifier(): + self.tok.unget(token) + break + self.flags = self.flags | dns.flags.from_text(token.value) + elif what == "edns": + self.edns = self.tok.get_int() + self.ednsflags = self.ednsflags | (self.edns << 16) + elif what == "eflags": + if self.edns < 0: + self.edns = 0 + while True: + token = self.tok.get() + if not token.is_identifier(): + self.tok.unget(token) + break + self.ednsflags = self.ednsflags | dns.flags.edns_from_text(token.value) + elif what == "payload": + self.payload = self.tok.get_int() + if self.edns < 0: + self.edns = 0 + elif what == "opcode": + text = self.tok.get_string() + self.opcode = dns.opcode.from_text(text) + self.flags = self.flags | dns.opcode.to_flags(self.opcode) + elif what == "rcode": + text = self.tok.get_string() + self.rcode = dns.rcode.from_text(text) + else: + raise UnknownHeaderField + self.tok.get_eol() + + def _question_line(self, section_number): + """Process one line from the text format question section.""" + + section = self.message.sections[section_number] + token = self.tok.get(want_leading=True) + if not token.is_whitespace(): + self.last_name = self.tok.as_name( + token, self.message.origin, self.relativize, self.relativize_to + ) + name = self.last_name + if name is None: + raise NoPreviousName + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + # Class + try: + rdclass = dns.rdataclass.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.exception.SyntaxError: + raise dns.exception.SyntaxError + except Exception: + rdclass = dns.rdataclass.IN + # Type + rdtype = dns.rdatatype.from_text(token.value) + (rdclass, rdtype, _, _) = self.message._parse_rr_header( + section_number, name, rdclass, rdtype + ) + self.message.find_rrset( + section, name, rdclass, rdtype, create=True, force_unique=True + ) + self.tok.get_eol() + + def _rr_line(self, section_number): + """Process one line from the text format answer, authority, or + additional data sections. + """ + + section = self.message.sections[section_number] + # Name + token = self.tok.get(want_leading=True) + if not token.is_whitespace(): + self.last_name = self.tok.as_name( + token, self.message.origin, self.relativize, self.relativize_to + ) + name = self.last_name + if name is None: + raise NoPreviousName + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + # TTL + try: + ttl = int(token.value, 0) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.exception.SyntaxError: + raise dns.exception.SyntaxError + except Exception: + ttl = 0 + # Class + try: + rdclass = dns.rdataclass.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.exception.SyntaxError: + raise dns.exception.SyntaxError + except Exception: + rdclass = dns.rdataclass.IN + # Type + rdtype = dns.rdatatype.from_text(token.value) + (rdclass, rdtype, deleting, empty) = self.message._parse_rr_header( + section_number, name, rdclass, rdtype + ) + token = self.tok.get() + if empty and not token.is_eol_or_eof(): + raise dns.exception.SyntaxError + if not empty and token.is_eol_or_eof(): + raise dns.exception.UnexpectedEnd + if not token.is_eol_or_eof(): + self.tok.unget(token) + rd = dns.rdata.from_text( + rdclass, + rdtype, + self.tok, + self.message.origin, + self.relativize, + self.relativize_to, + ) + covers = rd.covers() + else: + rd = None + covers = dns.rdatatype.NONE + rrset = self.message.find_rrset( + section, + name, + rdclass, + rdtype, + covers, + deleting, + True, + self.one_rr_per_rrset, + ) + if rd is not None: + rrset.add(rd, ttl) + + def _make_message(self): + factory = _message_factory_from_opcode(self.opcode) + message = factory(id=self.id) + message.flags = self.flags + if self.edns >= 0: + message.use_edns(self.edns, self.ednsflags, self.payload) + if self.rcode: + message.set_rcode(self.rcode) + if self.origin: + message.origin = self.origin + return message + + def read(self): + """Read a text format DNS message and build a dns.message.Message + object.""" + + line_method = self._header_line + section_number = None + while 1: + token = self.tok.get(True, True) + if token.is_eol_or_eof(): + break + if token.is_comment(): + u = token.value.upper() + if u == "HEADER": + line_method = self._header_line + + if self.message: + message = self.message + else: + # If we don't have a message, create one with the current + # opcode, so that we know which section names to parse. + message = self._make_message() + try: + section_number = message._section_enum.from_text(u) + # We found a section name. If we don't have a message, + # use the one we just created. + if not self.message: + self.message = message + self.one_rr_per_rrset = message._get_one_rr_per_rrset( + self.one_rr_per_rrset + ) + if section_number == MessageSection.QUESTION: + line_method = self._question_line + else: + line_method = self._rr_line + except Exception: + # It's just a comment. + pass + self.tok.get_eol() + continue + self.tok.unget(token) + line_method(section_number) + if not self.message: + self.message = self._make_message() + return self.message + + +def from_text( + text: str, + idna_codec: Optional[dns.name.IDNACodec] = None, + one_rr_per_rrset: bool = False, + origin: Optional[dns.name.Name] = None, + relativize: bool = True, + relativize_to: Optional[dns.name.Name] = None, +) -> Message: + """Convert the text format message into a message object. + + The reader stops after reading the first blank line in the input to + facilitate reading multiple messages from a single file with + ``dns.message.from_file()``. + + *text*, a ``str``, the text format message. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + *one_rr_per_rrset*, a ``bool``. If ``True``, then each RR is put + into its own rrset. The default is ``False``. + + *origin*, a ``dns.name.Name`` (or ``None``), the + origin to use for relative names. + + *relativize*, a ``bool``. If true, name will be relativized. + + *relativize_to*, a ``dns.name.Name`` (or ``None``), the origin to use + when relativizing names. If not set, the *origin* value will be used. + + Raises ``dns.message.UnknownHeaderField`` if a header is unknown. + + Raises ``dns.exception.SyntaxError`` if the text is badly formed. + + Returns a ``dns.message.Message object`` + """ + + # 'text' can also be a file, but we don't publish that fact + # since it's an implementation detail. The official file + # interface is from_file(). + + reader = _TextReader( + text, idna_codec, one_rr_per_rrset, origin, relativize, relativize_to + ) + return reader.read() + + +def from_file( + f: Any, + idna_codec: Optional[dns.name.IDNACodec] = None, + one_rr_per_rrset: bool = False, +) -> Message: + """Read the next text format message from the specified file. + + Message blocks are separated by a single blank line. + + *f*, a ``file`` or ``str``. If *f* is text, it is treated as the + pathname of a file to open. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + *one_rr_per_rrset*, a ``bool``. If ``True``, then each RR is put + into its own rrset. The default is ``False``. + + Raises ``dns.message.UnknownHeaderField`` if a header is unknown. + + Raises ``dns.exception.SyntaxError`` if the text is badly formed. + + Returns a ``dns.message.Message object`` + """ + + if isinstance(f, str): + cm: contextlib.AbstractContextManager = open(f) + else: + cm = contextlib.nullcontext(f) + with cm as f: + return from_text(f, idna_codec, one_rr_per_rrset) + assert False # for mypy lgtm[py/unreachable-statement] + + +def make_query( + qname: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str], + rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, + use_edns: Optional[Union[int, bool]] = None, + want_dnssec: bool = False, + ednsflags: Optional[int] = None, + payload: Optional[int] = None, + request_payload: Optional[int] = None, + options: Optional[List[dns.edns.Option]] = None, + idna_codec: Optional[dns.name.IDNACodec] = None, + id: Optional[int] = None, + flags: int = dns.flags.RD, + pad: int = 0, +) -> QueryMessage: + """Make a query message. + + The query name, type, and class may all be specified either + as objects of the appropriate type, or as strings. + + The query will have a randomly chosen query id, and its DNS flags + will be set to dns.flags.RD. + + qname, a ``dns.name.Name`` or ``str``, the query name. + + *rdtype*, an ``int`` or ``str``, the desired rdata type. + + *rdclass*, an ``int`` or ``str``, the desired rdata class; the default + is class IN. + + *use_edns*, an ``int``, ``bool`` or ``None``. The EDNS level to use; the + default is ``None``. If ``None``, EDNS will be enabled only if other + parameters (*ednsflags*, *payload*, *request_payload*, or *options*) are + set. + See the description of dns.message.Message.use_edns() for the possible + values for use_edns and their meanings. + + *want_dnssec*, a ``bool``. If ``True``, DNSSEC data is desired. + + *ednsflags*, an ``int``, the EDNS flag values. + + *payload*, an ``int``, is the EDNS sender's payload field, which is the + maximum size of UDP datagram the sender can handle. I.e. how big + a response to this message can be. + + *request_payload*, an ``int``, is the EDNS payload size to use when + sending this message. If not specified, defaults to the value of + *payload*. + + *options*, a list of ``dns.edns.Option`` objects or ``None``, the EDNS + options. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + *id*, an ``int`` or ``None``, the desired query id. The default is + ``None``, which generates a random query id. + + *flags*, an ``int``, the desired query flags. The default is + ``dns.flags.RD``. + + *pad*, a non-negative ``int``. If 0, the default, do not pad; otherwise add + padding bytes to make the message size a multiple of *pad*. Note that if + padding is non-zero, an EDNS PADDING option will always be added to the + message. + + Returns a ``dns.message.QueryMessage`` + """ + + if isinstance(qname, str): + qname = dns.name.from_text(qname, idna_codec=idna_codec) + rdtype = dns.rdatatype.RdataType.make(rdtype) + rdclass = dns.rdataclass.RdataClass.make(rdclass) + m = QueryMessage(id=id) + m.flags = dns.flags.Flag(flags) + m.find_rrset(m.question, qname, rdclass, rdtype, create=True, force_unique=True) + # only pass keywords on to use_edns if they have been set to a + # non-None value. Setting a field will turn EDNS on if it hasn't + # been configured. + kwargs: Dict[str, Any] = {} + if ednsflags is not None: + kwargs["ednsflags"] = ednsflags + if payload is not None: + kwargs["payload"] = payload + if request_payload is not None: + kwargs["request_payload"] = request_payload + if options is not None: + kwargs["options"] = options + if kwargs and use_edns is None: + use_edns = 0 + kwargs["edns"] = use_edns + kwargs["pad"] = pad + m.use_edns(**kwargs) + m.want_dnssec(want_dnssec) + return m + + +class CopyMode(enum.Enum): + """ + How should sections be copied when making an update response? + """ + + NOTHING = 0 + QUESTION = 1 + EVERYTHING = 2 + + +def make_response( + query: Message, + recursion_available: bool = False, + our_payload: int = 8192, + fudge: int = 300, + tsig_error: int = 0, + pad: Optional[int] = None, + copy_mode: Optional[CopyMode] = None, +) -> Message: + """Make a message which is a response for the specified query. + The message returned is really a response skeleton; it has all of the infrastructure + required of a response, but none of the content. + + Response section(s) which are copied are shallow copies of the matching section(s) + in the query, so the query's RRsets should not be changed. + + *query*, a ``dns.message.Message``, the query to respond to. + + *recursion_available*, a ``bool``, should RA be set in the response? + + *our_payload*, an ``int``, the payload size to advertise in EDNS responses. + + *fudge*, an ``int``, the TSIG time fudge. + + *tsig_error*, an ``int``, the TSIG error. + + *pad*, a non-negative ``int`` or ``None``. If 0, the default, do not pad; otherwise + if not ``None`` add padding bytes to make the message size a multiple of *pad*. Note + that if padding is non-zero, an EDNS PADDING option will always be added to the + message. If ``None``, add padding following RFC 8467, namely if the request is + padded, pad the response to 468 otherwise do not pad. + + *copy_mode*, a ``dns.message.CopyMode`` or ``None``, determines how sections are + copied. The default, ``None`` copies sections according to the default for the + message's opcode, which is currently ``dns.message.CopyMode.QUESTION`` for all + opcodes. ``dns.message.CopyMode.QUESTION`` copies only the question section. + ``dns.message.CopyMode.EVERYTHING`` copies all sections other than OPT or TSIG + records, which are created appropriately if needed. ``dns.message.CopyMode.NOTHING`` + copies no sections; note that this mode is for server testing purposes and is + otherwise not recommended for use. In particular, ``dns.message.is_response()`` + will be ``False`` if you create a response this way and the rcode is not + ``FORMERR``, ``SERVFAIL``, ``NOTIMP``, or ``REFUSED``. + + Returns a ``dns.message.Message`` object whose specific class is appropriate for the + query. For example, if query is a ``dns.update.UpdateMessage``, the response will + be one too. + """ + + if query.flags & dns.flags.QR: + raise dns.exception.FormError("specified query message is not a query") + opcode = query.opcode() + factory = _message_factory_from_opcode(opcode) + response = factory(id=query.id) + response.flags = dns.flags.QR | (query.flags & dns.flags.RD) + if recursion_available: + response.flags |= dns.flags.RA + response.set_opcode(opcode) + if copy_mode is None: + copy_mode = CopyMode.QUESTION + if copy_mode != CopyMode.NOTHING: + response.question = list(query.question) + if copy_mode == CopyMode.EVERYTHING: + response.answer = list(query.answer) + response.authority = list(query.authority) + response.additional = list(query.additional) + if query.edns >= 0: + if pad is None: + # Set response padding per RFC 8467 + pad = 0 + for option in query.options: + if option.otype == dns.edns.OptionType.PADDING: + pad = 468 + response.use_edns(0, 0, our_payload, query.payload, pad=pad) + if query.had_tsig: + response.use_tsig( + query.keyring, + query.keyname, + fudge, + None, + tsig_error, + b"", + query.keyalgorithm, + ) + response.request_mac = query.mac + return response + + +### BEGIN generated MessageSection constants + +QUESTION = MessageSection.QUESTION +ANSWER = MessageSection.ANSWER +AUTHORITY = MessageSection.AUTHORITY +ADDITIONAL = MessageSection.ADDITIONAL + +### END generated MessageSection constants diff --git a/venv/lib/python3.11/site-packages/dns/name.py b/venv/lib/python3.11/site-packages/dns/name.py new file mode 100644 index 0000000..f79f0d0 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/name.py @@ -0,0 +1,1284 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Names. +""" + +import copy +import encodings.idna # type: ignore +import functools +import struct +from typing import Any, Callable, Dict, Iterable, Optional, Tuple, Union + +import dns._features +import dns.enum +import dns.exception +import dns.immutable +import dns.wire + +if dns._features.have("idna"): + import idna # type: ignore + + have_idna_2008 = True +else: # pragma: no cover + have_idna_2008 = False + +CompressType = Dict["Name", int] + + +class NameRelation(dns.enum.IntEnum): + """Name relation result from fullcompare().""" + + # This is an IntEnum for backwards compatibility in case anyone + # has hardwired the constants. + + #: The compared names have no relationship to each other. + NONE = 0 + #: the first name is a superdomain of the second. + SUPERDOMAIN = 1 + #: The first name is a subdomain of the second. + SUBDOMAIN = 2 + #: The compared names are equal. + EQUAL = 3 + #: The compared names have a common ancestor. + COMMONANCESTOR = 4 + + @classmethod + def _maximum(cls): + return cls.COMMONANCESTOR # pragma: no cover + + @classmethod + def _short_name(cls): + return cls.__name__ # pragma: no cover + + +# Backwards compatibility +NAMERELN_NONE = NameRelation.NONE +NAMERELN_SUPERDOMAIN = NameRelation.SUPERDOMAIN +NAMERELN_SUBDOMAIN = NameRelation.SUBDOMAIN +NAMERELN_EQUAL = NameRelation.EQUAL +NAMERELN_COMMONANCESTOR = NameRelation.COMMONANCESTOR + + +class EmptyLabel(dns.exception.SyntaxError): + """A DNS label is empty.""" + + +class BadEscape(dns.exception.SyntaxError): + """An escaped code in a text format of DNS name is invalid.""" + + +class BadPointer(dns.exception.FormError): + """A DNS compression pointer points forward instead of backward.""" + + +class BadLabelType(dns.exception.FormError): + """The label type in DNS name wire format is unknown.""" + + +class NeedAbsoluteNameOrOrigin(dns.exception.DNSException): + """An attempt was made to convert a non-absolute name to + wire when there was also a non-absolute (or missing) origin.""" + + +class NameTooLong(dns.exception.FormError): + """A DNS name is > 255 octets long.""" + + +class LabelTooLong(dns.exception.SyntaxError): + """A DNS label is > 63 octets long.""" + + +class AbsoluteConcatenation(dns.exception.DNSException): + """An attempt was made to append anything other than the + empty name to an absolute DNS name.""" + + +class NoParent(dns.exception.DNSException): + """An attempt was made to get the parent of the root name + or the empty name.""" + + +class NoIDNA2008(dns.exception.DNSException): + """IDNA 2008 processing was requested but the idna module is not + available.""" + + +class IDNAException(dns.exception.DNSException): + """IDNA processing raised an exception.""" + + supp_kwargs = {"idna_exception"} + fmt = "IDNA processing exception: {idna_exception}" + + # We do this as otherwise mypy complains about unexpected keyword argument + # idna_exception + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class NeedSubdomainOfOrigin(dns.exception.DNSException): + """An absolute name was provided that is not a subdomain of the specified origin.""" + + +_escaped = b'"().;\\@$' +_escaped_text = '"().;\\@$' + + +def _escapify(label: Union[bytes, str]) -> str: + """Escape the characters in label which need it. + @returns: the escaped string + @rtype: string""" + if isinstance(label, bytes): + # Ordinary DNS label mode. Escape special characters and values + # < 0x20 or > 0x7f. + text = "" + for c in label: + if c in _escaped: + text += "\\" + chr(c) + elif c > 0x20 and c < 0x7F: + text += chr(c) + else: + text += "\\%03d" % c + return text + + # Unicode label mode. Escape only special characters and values < 0x20 + text = "" + for uc in label: + if uc in _escaped_text: + text += "\\" + uc + elif uc <= "\x20": + text += "\\%03d" % ord(uc) + else: + text += uc + return text + + +class IDNACodec: + """Abstract base class for IDNA encoder/decoders.""" + + def __init__(self): + pass + + def is_idna(self, label: bytes) -> bool: + return label.lower().startswith(b"xn--") + + def encode(self, label: str) -> bytes: + raise NotImplementedError # pragma: no cover + + def decode(self, label: bytes) -> str: + # We do not apply any IDNA policy on decode. + if self.is_idna(label): + try: + slabel = label[4:].decode("punycode") + return _escapify(slabel) + except Exception as e: + raise IDNAException(idna_exception=e) + else: + return _escapify(label) + + +class IDNA2003Codec(IDNACodec): + """IDNA 2003 encoder/decoder.""" + + def __init__(self, strict_decode: bool = False): + """Initialize the IDNA 2003 encoder/decoder. + + *strict_decode* is a ``bool``. If `True`, then IDNA2003 checking + is done when decoding. This can cause failures if the name + was encoded with IDNA2008. The default is `False`. + """ + + super().__init__() + self.strict_decode = strict_decode + + def encode(self, label: str) -> bytes: + """Encode *label*.""" + + if label == "": + return b"" + try: + return encodings.idna.ToASCII(label) + except UnicodeError: + raise LabelTooLong + + def decode(self, label: bytes) -> str: + """Decode *label*.""" + if not self.strict_decode: + return super().decode(label) + if label == b"": + return "" + try: + return _escapify(encodings.idna.ToUnicode(label)) + except Exception as e: + raise IDNAException(idna_exception=e) + + +class IDNA2008Codec(IDNACodec): + """IDNA 2008 encoder/decoder.""" + + def __init__( + self, + uts_46: bool = False, + transitional: bool = False, + allow_pure_ascii: bool = False, + strict_decode: bool = False, + ): + """Initialize the IDNA 2008 encoder/decoder. + + *uts_46* is a ``bool``. If True, apply Unicode IDNA + compatibility processing as described in Unicode Technical + Standard #46 (https://unicode.org/reports/tr46/). + If False, do not apply the mapping. The default is False. + + *transitional* is a ``bool``: If True, use the + "transitional" mode described in Unicode Technical Standard + #46. The default is False. + + *allow_pure_ascii* is a ``bool``. If True, then a label which + consists of only ASCII characters is allowed. This is less + strict than regular IDNA 2008, but is also necessary for mixed + names, e.g. a name with starting with "_sip._tcp." and ending + in an IDN suffix which would otherwise be disallowed. The + default is False. + + *strict_decode* is a ``bool``: If True, then IDNA2008 checking + is done when decoding. This can cause failures if the name + was encoded with IDNA2003. The default is False. + """ + super().__init__() + self.uts_46 = uts_46 + self.transitional = transitional + self.allow_pure_ascii = allow_pure_ascii + self.strict_decode = strict_decode + + def encode(self, label: str) -> bytes: + if label == "": + return b"" + if self.allow_pure_ascii and is_all_ascii(label): + encoded = label.encode("ascii") + if len(encoded) > 63: + raise LabelTooLong + return encoded + if not have_idna_2008: + raise NoIDNA2008 + try: + if self.uts_46: + # pylint: disable=possibly-used-before-assignment + label = idna.uts46_remap(label, False, self.transitional) + return idna.alabel(label) + except idna.IDNAError as e: + if e.args[0] == "Label too long": + raise LabelTooLong + else: + raise IDNAException(idna_exception=e) + + def decode(self, label: bytes) -> str: + if not self.strict_decode: + return super().decode(label) + if label == b"": + return "" + if not have_idna_2008: + raise NoIDNA2008 + try: + ulabel = idna.ulabel(label) + if self.uts_46: + ulabel = idna.uts46_remap(ulabel, False, self.transitional) + return _escapify(ulabel) + except (idna.IDNAError, UnicodeError) as e: + raise IDNAException(idna_exception=e) + + +IDNA_2003_Practical = IDNA2003Codec(False) +IDNA_2003_Strict = IDNA2003Codec(True) +IDNA_2003 = IDNA_2003_Practical +IDNA_2008_Practical = IDNA2008Codec(True, False, True, False) +IDNA_2008_UTS_46 = IDNA2008Codec(True, False, False, False) +IDNA_2008_Strict = IDNA2008Codec(False, False, False, True) +IDNA_2008_Transitional = IDNA2008Codec(True, True, False, False) +IDNA_2008 = IDNA_2008_Practical + + +def _validate_labels(labels: Tuple[bytes, ...]) -> None: + """Check for empty labels in the middle of a label sequence, + labels that are too long, and for too many labels. + + Raises ``dns.name.NameTooLong`` if the name as a whole is too long. + + Raises ``dns.name.EmptyLabel`` if a label is empty (i.e. the root + label) and appears in a position other than the end of the label + sequence + + """ + + l = len(labels) + total = 0 + i = -1 + j = 0 + for label in labels: + ll = len(label) + total += ll + 1 + if ll > 63: + raise LabelTooLong + if i < 0 and label == b"": + i = j + j += 1 + if total > 255: + raise NameTooLong + if i >= 0 and i != l - 1: + raise EmptyLabel + + +def _maybe_convert_to_binary(label: Union[bytes, str]) -> bytes: + """If label is ``str``, convert it to ``bytes``. If it is already + ``bytes`` just return it. + + """ + + if isinstance(label, bytes): + return label + if isinstance(label, str): + return label.encode() + raise ValueError # pragma: no cover + + +@dns.immutable.immutable +class Name: + """A DNS name. + + The dns.name.Name class represents a DNS name as a tuple of + labels. Each label is a ``bytes`` in DNS wire format. Instances + of the class are immutable. + """ + + __slots__ = ["labels"] + + def __init__(self, labels: Iterable[Union[bytes, str]]): + """*labels* is any iterable whose values are ``str`` or ``bytes``.""" + + blabels = [_maybe_convert_to_binary(x) for x in labels] + self.labels = tuple(blabels) + _validate_labels(self.labels) + + def __copy__(self): + return Name(self.labels) + + def __deepcopy__(self, memo): + return Name(copy.deepcopy(self.labels, memo)) + + def __getstate__(self): + # Names can be pickled + return {"labels": self.labels} + + def __setstate__(self, state): + super().__setattr__("labels", state["labels"]) + _validate_labels(self.labels) + + def is_absolute(self) -> bool: + """Is the most significant label of this name the root label? + + Returns a ``bool``. + """ + + return len(self.labels) > 0 and self.labels[-1] == b"" + + def is_wild(self) -> bool: + """Is this name wild? (I.e. Is the least significant label '*'?) + + Returns a ``bool``. + """ + + return len(self.labels) > 0 and self.labels[0] == b"*" + + def __hash__(self) -> int: + """Return a case-insensitive hash of the name. + + Returns an ``int``. + """ + + h = 0 + for label in self.labels: + for c in label.lower(): + h += (h << 3) + c + return h + + def fullcompare(self, other: "Name") -> Tuple[NameRelation, int, int]: + """Compare two names, returning a 3-tuple + ``(relation, order, nlabels)``. + + *relation* describes the relation ship between the names, + and is one of: ``dns.name.NameRelation.NONE``, + ``dns.name.NameRelation.SUPERDOMAIN``, ``dns.name.NameRelation.SUBDOMAIN``, + ``dns.name.NameRelation.EQUAL``, or ``dns.name.NameRelation.COMMONANCESTOR``. + + *order* is < 0 if *self* < *other*, > 0 if *self* > *other*, and == + 0 if *self* == *other*. A relative name is always less than an + absolute name. If both names have the same relativity, then + the DNSSEC order relation is used to order them. + + *nlabels* is the number of significant labels that the two names + have in common. + + Here are some examples. Names ending in "." are absolute names, + those not ending in "." are relative names. + + ============= ============= =========== ===== ======= + self other relation order nlabels + ============= ============= =========== ===== ======= + www.example. www.example. equal 0 3 + www.example. example. subdomain > 0 2 + example. www.example. superdomain < 0 2 + example1.com. example2.com. common anc. < 0 2 + example1 example2. none < 0 0 + example1. example2 none > 0 0 + ============= ============= =========== ===== ======= + """ + + sabs = self.is_absolute() + oabs = other.is_absolute() + if sabs != oabs: + if sabs: + return (NameRelation.NONE, 1, 0) + else: + return (NameRelation.NONE, -1, 0) + l1 = len(self.labels) + l2 = len(other.labels) + ldiff = l1 - l2 + if ldiff < 0: + l = l1 + else: + l = l2 + + order = 0 + nlabels = 0 + namereln = NameRelation.NONE + while l > 0: + l -= 1 + l1 -= 1 + l2 -= 1 + label1 = self.labels[l1].lower() + label2 = other.labels[l2].lower() + if label1 < label2: + order = -1 + if nlabels > 0: + namereln = NameRelation.COMMONANCESTOR + return (namereln, order, nlabels) + elif label1 > label2: + order = 1 + if nlabels > 0: + namereln = NameRelation.COMMONANCESTOR + return (namereln, order, nlabels) + nlabels += 1 + order = ldiff + if ldiff < 0: + namereln = NameRelation.SUPERDOMAIN + elif ldiff > 0: + namereln = NameRelation.SUBDOMAIN + else: + namereln = NameRelation.EQUAL + return (namereln, order, nlabels) + + def is_subdomain(self, other: "Name") -> bool: + """Is self a subdomain of other? + + Note that the notion of subdomain includes equality, e.g. + "dnspython.org" is a subdomain of itself. + + Returns a ``bool``. + """ + + (nr, _, _) = self.fullcompare(other) + if nr == NameRelation.SUBDOMAIN or nr == NameRelation.EQUAL: + return True + return False + + def is_superdomain(self, other: "Name") -> bool: + """Is self a superdomain of other? + + Note that the notion of superdomain includes equality, e.g. + "dnspython.org" is a superdomain of itself. + + Returns a ``bool``. + """ + + (nr, _, _) = self.fullcompare(other) + if nr == NameRelation.SUPERDOMAIN or nr == NameRelation.EQUAL: + return True + return False + + def canonicalize(self) -> "Name": + """Return a name which is equal to the current name, but is in + DNSSEC canonical form. + """ + + return Name([x.lower() for x in self.labels]) + + def __eq__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] == 0 + else: + return False + + def __ne__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] != 0 + else: + return True + + def __lt__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] < 0 + else: + return NotImplemented + + def __le__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] <= 0 + else: + return NotImplemented + + def __ge__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] >= 0 + else: + return NotImplemented + + def __gt__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] > 0 + else: + return NotImplemented + + def __repr__(self): + return "" + + def __str__(self): + return self.to_text(False) + + def to_text(self, omit_final_dot: bool = False) -> str: + """Convert name to DNS text format. + + *omit_final_dot* is a ``bool``. If True, don't emit the final + dot (denoting the root label) for absolute names. The default + is False. + + Returns a ``str``. + """ + + if len(self.labels) == 0: + return "@" + if len(self.labels) == 1 and self.labels[0] == b"": + return "." + if omit_final_dot and self.is_absolute(): + l = self.labels[:-1] + else: + l = self.labels + s = ".".join(map(_escapify, l)) + return s + + def to_unicode( + self, omit_final_dot: bool = False, idna_codec: Optional[IDNACodec] = None + ) -> str: + """Convert name to Unicode text format. + + IDN ACE labels are converted to Unicode. + + *omit_final_dot* is a ``bool``. If True, don't emit the final + dot (denoting the root label) for absolute names. The default + is False. + *idna_codec* specifies the IDNA encoder/decoder. If None, the + dns.name.IDNA_2003_Practical encoder/decoder is used. + The IDNA_2003_Practical decoder does + not impose any policy, it just decodes punycode, so if you + don't want checking for compliance, you can use this decoder + for IDNA2008 as well. + + Returns a ``str``. + """ + + if len(self.labels) == 0: + return "@" + if len(self.labels) == 1 and self.labels[0] == b"": + return "." + if omit_final_dot and self.is_absolute(): + l = self.labels[:-1] + else: + l = self.labels + if idna_codec is None: + idna_codec = IDNA_2003_Practical + return ".".join([idna_codec.decode(x) for x in l]) + + def to_digestable(self, origin: Optional["Name"] = None) -> bytes: + """Convert name to a format suitable for digesting in hashes. + + The name is canonicalized and converted to uncompressed wire + format. All names in wire format are absolute. If the name + is a relative name, then an origin must be supplied. + + *origin* is a ``dns.name.Name`` or ``None``. If the name is + relative and origin is not ``None``, then origin will be appended + to the name. + + Raises ``dns.name.NeedAbsoluteNameOrOrigin`` if the name is + relative and no origin was provided. + + Returns a ``bytes``. + """ + + digest = self.to_wire(origin=origin, canonicalize=True) + assert digest is not None + return digest + + def to_wire( + self, + file: Optional[Any] = None, + compress: Optional[CompressType] = None, + origin: Optional["Name"] = None, + canonicalize: bool = False, + ) -> Optional[bytes]: + """Convert name to wire format, possibly compressing it. + + *file* is the file where the name is emitted (typically an + io.BytesIO file). If ``None`` (the default), a ``bytes`` + containing the wire name will be returned. + + *compress*, a ``dict``, is the compression table to use. If + ``None`` (the default), names will not be compressed. Note that + the compression code assumes that compression offset 0 is the + start of *file*, and thus compression will not be correct + if this is not the case. + + *origin* is a ``dns.name.Name`` or ``None``. If the name is + relative and origin is not ``None``, then *origin* will be appended + to it. + + *canonicalize*, a ``bool``, indicates whether the name should + be canonicalized; that is, converted to a format suitable for + digesting in hashes. + + Raises ``dns.name.NeedAbsoluteNameOrOrigin`` if the name is + relative and no origin was provided. + + Returns a ``bytes`` or ``None``. + """ + + if file is None: + out = bytearray() + for label in self.labels: + out.append(len(label)) + if canonicalize: + out += label.lower() + else: + out += label + if not self.is_absolute(): + if origin is None or not origin.is_absolute(): + raise NeedAbsoluteNameOrOrigin + for label in origin.labels: + out.append(len(label)) + if canonicalize: + out += label.lower() + else: + out += label + return bytes(out) + + labels: Iterable[bytes] + if not self.is_absolute(): + if origin is None or not origin.is_absolute(): + raise NeedAbsoluteNameOrOrigin + labels = list(self.labels) + labels.extend(list(origin.labels)) + else: + labels = self.labels + i = 0 + for label in labels: + n = Name(labels[i:]) + i += 1 + if compress is not None: + pos = compress.get(n) + else: + pos = None + if pos is not None: + value = 0xC000 + pos + s = struct.pack("!H", value) + file.write(s) + break + else: + if compress is not None and len(n) > 1: + pos = file.tell() + if pos <= 0x3FFF: + compress[n] = pos + l = len(label) + file.write(struct.pack("!B", l)) + if l > 0: + if canonicalize: + file.write(label.lower()) + else: + file.write(label) + return None + + def __len__(self) -> int: + """The length of the name (in labels). + + Returns an ``int``. + """ + + return len(self.labels) + + def __getitem__(self, index): + return self.labels[index] + + def __add__(self, other): + return self.concatenate(other) + + def __sub__(self, other): + return self.relativize(other) + + def split(self, depth: int) -> Tuple["Name", "Name"]: + """Split a name into a prefix and suffix names at the specified depth. + + *depth* is an ``int`` specifying the number of labels in the suffix + + Raises ``ValueError`` if *depth* was not >= 0 and <= the length of the + name. + + Returns the tuple ``(prefix, suffix)``. + """ + + l = len(self.labels) + if depth == 0: + return (self, dns.name.empty) + elif depth == l: + return (dns.name.empty, self) + elif depth < 0 or depth > l: + raise ValueError("depth must be >= 0 and <= the length of the name") + return (Name(self[:-depth]), Name(self[-depth:])) + + def concatenate(self, other: "Name") -> "Name": + """Return a new name which is the concatenation of self and other. + + Raises ``dns.name.AbsoluteConcatenation`` if the name is + absolute and *other* is not the empty name. + + Returns a ``dns.name.Name``. + """ + + if self.is_absolute() and len(other) > 0: + raise AbsoluteConcatenation + labels = list(self.labels) + labels.extend(list(other.labels)) + return Name(labels) + + def relativize(self, origin: "Name") -> "Name": + """If the name is a subdomain of *origin*, return a new name which is + the name relative to origin. Otherwise return the name. + + For example, relativizing ``www.dnspython.org.`` to origin + ``dnspython.org.`` returns the name ``www``. Relativizing ``example.`` + to origin ``dnspython.org.`` returns ``example.``. + + Returns a ``dns.name.Name``. + """ + + if origin is not None and self.is_subdomain(origin): + return Name(self[: -len(origin)]) + else: + return self + + def derelativize(self, origin: "Name") -> "Name": + """If the name is a relative name, return a new name which is the + concatenation of the name and origin. Otherwise return the name. + + For example, derelativizing ``www`` to origin ``dnspython.org.`` + returns the name ``www.dnspython.org.``. Derelativizing ``example.`` + to origin ``dnspython.org.`` returns ``example.``. + + Returns a ``dns.name.Name``. + """ + + if not self.is_absolute(): + return self.concatenate(origin) + else: + return self + + def choose_relativity( + self, origin: Optional["Name"] = None, relativize: bool = True + ) -> "Name": + """Return a name with the relativity desired by the caller. + + If *origin* is ``None``, then the name is returned. + Otherwise, if *relativize* is ``True`` the name is + relativized, and if *relativize* is ``False`` the name is + derelativized. + + Returns a ``dns.name.Name``. + """ + + if origin: + if relativize: + return self.relativize(origin) + else: + return self.derelativize(origin) + else: + return self + + def parent(self) -> "Name": + """Return the parent of the name. + + For example, the parent of ``www.dnspython.org.`` is ``dnspython.org``. + + Raises ``dns.name.NoParent`` if the name is either the root name or the + empty name, and thus has no parent. + + Returns a ``dns.name.Name``. + """ + + if self == root or self == empty: + raise NoParent + return Name(self.labels[1:]) + + def predecessor(self, origin: "Name", prefix_ok: bool = True) -> "Name": + """Return the maximal predecessor of *name* in the DNSSEC ordering in the zone + whose origin is *origin*, or return the longest name under *origin* if the + name is origin (i.e. wrap around to the longest name, which may still be + *origin* due to length considerations. + + The relativity of the name is preserved, so if this name is relative + then the method will return a relative name, and likewise if this name + is absolute then the predecessor will be absolute. + + *prefix_ok* indicates if prefixing labels is allowed, and + defaults to ``True``. Normally it is good to allow this, but if computing + a maximal predecessor at a zone cut point then ``False`` must be specified. + """ + return _handle_relativity_and_call( + _absolute_predecessor, self, origin, prefix_ok + ) + + def successor(self, origin: "Name", prefix_ok: bool = True) -> "Name": + """Return the minimal successor of *name* in the DNSSEC ordering in the zone + whose origin is *origin*, or return *origin* if the successor cannot be + computed due to name length limitations. + + Note that *origin* is returned in the "too long" cases because wrapping + around to the origin is how NSEC records express "end of the zone". + + The relativity of the name is preserved, so if this name is relative + then the method will return a relative name, and likewise if this name + is absolute then the successor will be absolute. + + *prefix_ok* indicates if prefixing a new minimal label is allowed, and + defaults to ``True``. Normally it is good to allow this, but if computing + a minimal successor at a zone cut point then ``False`` must be specified. + """ + return _handle_relativity_and_call(_absolute_successor, self, origin, prefix_ok) + + +#: The root name, '.' +root = Name([b""]) + +#: The empty name. +empty = Name([]) + + +def from_unicode( + text: str, origin: Optional[Name] = root, idna_codec: Optional[IDNACodec] = None +) -> Name: + """Convert unicode text into a Name object. + + Labels are encoded in IDN ACE form according to rules specified by + the IDNA codec. + + *text*, a ``str``, is the text to convert into a name. + + *origin*, a ``dns.name.Name``, specifies the origin to + append to non-absolute names. The default is the root name. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + Returns a ``dns.name.Name``. + """ + + if not isinstance(text, str): + raise ValueError("input to from_unicode() must be a unicode string") + if not (origin is None or isinstance(origin, Name)): + raise ValueError("origin must be a Name or None") + labels = [] + label = "" + escaping = False + edigits = 0 + total = 0 + if idna_codec is None: + idna_codec = IDNA_2003 + if text == "@": + text = "" + if text: + if text in [".", "\u3002", "\uff0e", "\uff61"]: + return Name([b""]) # no Unicode "u" on this constant! + for c in text: + if escaping: + if edigits == 0: + if c.isdigit(): + total = int(c) + edigits += 1 + else: + label += c + escaping = False + else: + if not c.isdigit(): + raise BadEscape + total *= 10 + total += int(c) + edigits += 1 + if edigits == 3: + escaping = False + label += chr(total) + elif c in [".", "\u3002", "\uff0e", "\uff61"]: + if len(label) == 0: + raise EmptyLabel + labels.append(idna_codec.encode(label)) + label = "" + elif c == "\\": + escaping = True + edigits = 0 + total = 0 + else: + label += c + if escaping: + raise BadEscape + if len(label) > 0: + labels.append(idna_codec.encode(label)) + else: + labels.append(b"") + + if (len(labels) == 0 or labels[-1] != b"") and origin is not None: + labels.extend(list(origin.labels)) + return Name(labels) + + +def is_all_ascii(text: str) -> bool: + for c in text: + if ord(c) > 0x7F: + return False + return True + + +def from_text( + text: Union[bytes, str], + origin: Optional[Name] = root, + idna_codec: Optional[IDNACodec] = None, +) -> Name: + """Convert text into a Name object. + + *text*, a ``bytes`` or ``str``, is the text to convert into a name. + + *origin*, a ``dns.name.Name``, specifies the origin to + append to non-absolute names. The default is the root name. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + Returns a ``dns.name.Name``. + """ + + if isinstance(text, str): + if not is_all_ascii(text): + # Some codepoint in the input text is > 127, so IDNA applies. + return from_unicode(text, origin, idna_codec) + # The input is all ASCII, so treat this like an ordinary non-IDNA + # domain name. Note that "all ASCII" is about the input text, + # not the codepoints in the domain name. E.g. if text has value + # + # r'\150\151\152\153\154\155\156\157\158\159' + # + # then it's still "all ASCII" even though the domain name has + # codepoints > 127. + text = text.encode("ascii") + if not isinstance(text, bytes): + raise ValueError("input to from_text() must be a string") + if not (origin is None or isinstance(origin, Name)): + raise ValueError("origin must be a Name or None") + labels = [] + label = b"" + escaping = False + edigits = 0 + total = 0 + if text == b"@": + text = b"" + if text: + if text == b".": + return Name([b""]) + for c in text: + byte_ = struct.pack("!B", c) + if escaping: + if edigits == 0: + if byte_.isdigit(): + total = int(byte_) + edigits += 1 + else: + label += byte_ + escaping = False + else: + if not byte_.isdigit(): + raise BadEscape + total *= 10 + total += int(byte_) + edigits += 1 + if edigits == 3: + escaping = False + label += struct.pack("!B", total) + elif byte_ == b".": + if len(label) == 0: + raise EmptyLabel + labels.append(label) + label = b"" + elif byte_ == b"\\": + escaping = True + edigits = 0 + total = 0 + else: + label += byte_ + if escaping: + raise BadEscape + if len(label) > 0: + labels.append(label) + else: + labels.append(b"") + if (len(labels) == 0 or labels[-1] != b"") and origin is not None: + labels.extend(list(origin.labels)) + return Name(labels) + + +# we need 'dns.wire.Parser' quoted as dns.name and dns.wire depend on each other. + + +def from_wire_parser(parser: "dns.wire.Parser") -> Name: + """Convert possibly compressed wire format into a Name. + + *parser* is a dns.wire.Parser. + + Raises ``dns.name.BadPointer`` if a compression pointer did not + point backwards in the message. + + Raises ``dns.name.BadLabelType`` if an invalid label type was encountered. + + Returns a ``dns.name.Name`` + """ + + labels = [] + biggest_pointer = parser.current + with parser.restore_furthest(): + count = parser.get_uint8() + while count != 0: + if count < 64: + labels.append(parser.get_bytes(count)) + elif count >= 192: + current = (count & 0x3F) * 256 + parser.get_uint8() + if current >= biggest_pointer: + raise BadPointer + biggest_pointer = current + parser.seek(current) + else: + raise BadLabelType + count = parser.get_uint8() + labels.append(b"") + return Name(labels) + + +def from_wire(message: bytes, current: int) -> Tuple[Name, int]: + """Convert possibly compressed wire format into a Name. + + *message* is a ``bytes`` containing an entire DNS message in DNS + wire form. + + *current*, an ``int``, is the offset of the beginning of the name + from the start of the message + + Raises ``dns.name.BadPointer`` if a compression pointer did not + point backwards in the message. + + Raises ``dns.name.BadLabelType`` if an invalid label type was encountered. + + Returns a ``(dns.name.Name, int)`` tuple consisting of the name + that was read and the number of bytes of the wire format message + which were consumed reading it. + """ + + if not isinstance(message, bytes): + raise ValueError("input to from_wire() must be a byte string") + parser = dns.wire.Parser(message, current) + name = from_wire_parser(parser) + return (name, parser.current - current) + + +# RFC 4471 Support + +_MINIMAL_OCTET = b"\x00" +_MINIMAL_OCTET_VALUE = ord(_MINIMAL_OCTET) +_SUCCESSOR_PREFIX = Name([_MINIMAL_OCTET]) +_MAXIMAL_OCTET = b"\xff" +_MAXIMAL_OCTET_VALUE = ord(_MAXIMAL_OCTET) +_AT_SIGN_VALUE = ord("@") +_LEFT_SQUARE_BRACKET_VALUE = ord("[") + + +def _wire_length(labels): + return functools.reduce(lambda v, x: v + len(x) + 1, labels, 0) + + +def _pad_to_max_name(name): + needed = 255 - _wire_length(name.labels) + new_labels = [] + while needed > 64: + new_labels.append(_MAXIMAL_OCTET * 63) + needed -= 64 + if needed >= 2: + new_labels.append(_MAXIMAL_OCTET * (needed - 1)) + # Note we're already maximal in the needed == 1 case as while we'd like + # to add one more byte as a new label, we can't, as adding a new non-empty + # label requires at least 2 bytes. + new_labels = list(reversed(new_labels)) + new_labels.extend(name.labels) + return Name(new_labels) + + +def _pad_to_max_label(label, suffix_labels): + length = len(label) + # We have to subtract one here to account for the length byte of label. + remaining = 255 - _wire_length(suffix_labels) - length - 1 + if remaining <= 0: + # Shouldn't happen! + return label + needed = min(63 - length, remaining) + return label + _MAXIMAL_OCTET * needed + + +def _absolute_predecessor(name: Name, origin: Name, prefix_ok: bool) -> Name: + # This is the RFC 4471 predecessor algorithm using the "absolute method" of section + # 3.1.1. + # + # Our caller must ensure that the name and origin are absolute, and that name is a + # subdomain of origin. + if name == origin: + return _pad_to_max_name(name) + least_significant_label = name[0] + if least_significant_label == _MINIMAL_OCTET: + return name.parent() + least_octet = least_significant_label[-1] + suffix_labels = name.labels[1:] + if least_octet == _MINIMAL_OCTET_VALUE: + new_labels = [least_significant_label[:-1]] + else: + octets = bytearray(least_significant_label) + octet = octets[-1] + if octet == _LEFT_SQUARE_BRACKET_VALUE: + octet = _AT_SIGN_VALUE + else: + octet -= 1 + octets[-1] = octet + least_significant_label = bytes(octets) + new_labels = [_pad_to_max_label(least_significant_label, suffix_labels)] + new_labels.extend(suffix_labels) + name = Name(new_labels) + if prefix_ok: + return _pad_to_max_name(name) + else: + return name + + +def _absolute_successor(name: Name, origin: Name, prefix_ok: bool) -> Name: + # This is the RFC 4471 successor algorithm using the "absolute method" of section + # 3.1.2. + # + # Our caller must ensure that the name and origin are absolute, and that name is a + # subdomain of origin. + if prefix_ok: + # Try prefixing \000 as new label + try: + return _SUCCESSOR_PREFIX.concatenate(name) + except NameTooLong: + pass + while name != origin: + # Try extending the least significant label. + least_significant_label = name[0] + if len(least_significant_label) < 63: + # We may be able to extend the least label with a minimal additional byte. + # This is only "may" because we could have a maximal length name even though + # the least significant label isn't maximally long. + new_labels = [least_significant_label + _MINIMAL_OCTET] + new_labels.extend(name.labels[1:]) + try: + return dns.name.Name(new_labels) + except dns.name.NameTooLong: + pass + # We can't extend the label either, so we'll try to increment the least + # signficant non-maximal byte in it. + octets = bytearray(least_significant_label) + # We do this reversed iteration with an explicit indexing variable because + # if we find something to increment, we're going to want to truncate everything + # to the right of it. + for i in range(len(octets) - 1, -1, -1): + octet = octets[i] + if octet == _MAXIMAL_OCTET_VALUE: + # We can't increment this, so keep looking. + continue + # Finally, something we can increment. We have to apply a special rule for + # incrementing "@", sending it to "[", because RFC 4034 6.1 says that when + # comparing names, uppercase letters compare as if they were their + # lower-case equivalents. If we increment "@" to "A", then it would compare + # as "a", which is after "[", "\", "]", "^", "_", and "`", so we would have + # skipped the most minimal successor, namely "[". + if octet == _AT_SIGN_VALUE: + octet = _LEFT_SQUARE_BRACKET_VALUE + else: + octet += 1 + octets[i] = octet + # We can now truncate all of the maximal values we skipped (if any) + new_labels = [bytes(octets[: i + 1])] + new_labels.extend(name.labels[1:]) + # We haven't changed the length of the name, so the Name constructor will + # always work. + return Name(new_labels) + # We couldn't increment, so chop off the least significant label and try + # again. + name = name.parent() + + # We couldn't increment at all, so return the origin, as wrapping around is the + # DNSSEC way. + return origin + + +def _handle_relativity_and_call( + function: Callable[[Name, Name, bool], Name], + name: Name, + origin: Name, + prefix_ok: bool, +) -> Name: + # Make "name" absolute if needed, ensure that the origin is absolute, + # call function(), and then relativize the result if needed. + if not origin.is_absolute(): + raise NeedAbsoluteNameOrOrigin + relative = not name.is_absolute() + if relative: + name = name.derelativize(origin) + elif not name.is_subdomain(origin): + raise NeedSubdomainOfOrigin + result_name = function(name, origin, prefix_ok) + if relative: + result_name = result_name.relativize(origin) + return result_name diff --git a/venv/lib/python3.11/site-packages/dns/namedict.py b/venv/lib/python3.11/site-packages/dns/namedict.py new file mode 100644 index 0000000..ca8b197 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/namedict.py @@ -0,0 +1,109 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# Copyright (C) 2016 Coresec Systems AB +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND CORESEC SYSTEMS AB DISCLAIMS ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL CORESEC +# SYSTEMS AB BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS name dictionary""" + +# pylint seems to be confused about this one! +from collections.abc import MutableMapping # pylint: disable=no-name-in-module + +import dns.name + + +class NameDict(MutableMapping): + """A dictionary whose keys are dns.name.Name objects. + + In addition to being like a regular Python dictionary, this + dictionary can also get the deepest match for a given key. + """ + + __slots__ = ["max_depth", "max_depth_items", "__store"] + + def __init__(self, *args, **kwargs): + super().__init__() + self.__store = dict() + #: the maximum depth of the keys that have ever been added + self.max_depth = 0 + #: the number of items of maximum depth + self.max_depth_items = 0 + self.update(dict(*args, **kwargs)) + + def __update_max_depth(self, key): + if len(key) == self.max_depth: + self.max_depth_items = self.max_depth_items + 1 + elif len(key) > self.max_depth: + self.max_depth = len(key) + self.max_depth_items = 1 + + def __getitem__(self, key): + return self.__store[key] + + def __setitem__(self, key, value): + if not isinstance(key, dns.name.Name): + raise ValueError("NameDict key must be a name") + self.__store[key] = value + self.__update_max_depth(key) + + def __delitem__(self, key): + self.__store.pop(key) + if len(key) == self.max_depth: + self.max_depth_items = self.max_depth_items - 1 + if self.max_depth_items == 0: + self.max_depth = 0 + for k in self.__store: + self.__update_max_depth(k) + + def __iter__(self): + return iter(self.__store) + + def __len__(self): + return len(self.__store) + + def has_key(self, key): + return key in self.__store + + def get_deepest_match(self, name): + """Find the deepest match to *name* in the dictionary. + + The deepest match is the longest name in the dictionary which is + a superdomain of *name*. Note that *superdomain* includes matching + *name* itself. + + *name*, a ``dns.name.Name``, the name to find. + + Returns a ``(key, value)`` where *key* is the deepest + ``dns.name.Name``, and *value* is the value associated with *key*. + """ + + depth = len(name) + if depth > self.max_depth: + depth = self.max_depth + for i in range(-depth, 0): + n = dns.name.Name(name[i:]) + if n in self: + return (n, self[n]) + v = self[dns.name.empty] + return (dns.name.empty, v) diff --git a/venv/lib/python3.11/site-packages/dns/nameserver.py b/venv/lib/python3.11/site-packages/dns/nameserver.py new file mode 100644 index 0000000..b02a239 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/nameserver.py @@ -0,0 +1,363 @@ +from typing import Optional, Union +from urllib.parse import urlparse + +import dns.asyncbackend +import dns.asyncquery +import dns.inet +import dns.message +import dns.query + + +class Nameserver: + def __init__(self): + pass + + def __str__(self): + raise NotImplementedError + + def kind(self) -> str: + raise NotImplementedError + + def is_always_max_size(self) -> bool: + raise NotImplementedError + + def answer_nameserver(self) -> str: + raise NotImplementedError + + def answer_port(self) -> int: + raise NotImplementedError + + def query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: Optional[str], + source_port: int, + max_size: bool, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + raise NotImplementedError + + async def async_query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: Optional[str], + source_port: int, + max_size: bool, + backend: dns.asyncbackend.Backend, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + raise NotImplementedError + + +class AddressAndPortNameserver(Nameserver): + def __init__(self, address: str, port: int): + super().__init__() + self.address = address + self.port = port + + def kind(self) -> str: + raise NotImplementedError + + def is_always_max_size(self) -> bool: + return False + + def __str__(self): + ns_kind = self.kind() + return f"{ns_kind}:{self.address}@{self.port}" + + def answer_nameserver(self) -> str: + return self.address + + def answer_port(self) -> int: + return self.port + + +class Do53Nameserver(AddressAndPortNameserver): + def __init__(self, address: str, port: int = 53): + super().__init__(address, port) + + def kind(self): + return "Do53" + + def query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: Optional[str], + source_port: int, + max_size: bool, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + if max_size: + response = dns.query.tcp( + request, + self.address, + timeout=timeout, + port=self.port, + source=source, + source_port=source_port, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + else: + response = dns.query.udp( + request, + self.address, + timeout=timeout, + port=self.port, + source=source, + source_port=source_port, + raise_on_truncation=True, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ignore_errors=True, + ignore_unexpected=True, + ) + return response + + async def async_query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: Optional[str], + source_port: int, + max_size: bool, + backend: dns.asyncbackend.Backend, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + if max_size: + response = await dns.asyncquery.tcp( + request, + self.address, + timeout=timeout, + port=self.port, + source=source, + source_port=source_port, + backend=backend, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + else: + response = await dns.asyncquery.udp( + request, + self.address, + timeout=timeout, + port=self.port, + source=source, + source_port=source_port, + raise_on_truncation=True, + backend=backend, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ignore_errors=True, + ignore_unexpected=True, + ) + return response + + +class DoHNameserver(Nameserver): + def __init__( + self, + url: str, + bootstrap_address: Optional[str] = None, + verify: Union[bool, str] = True, + want_get: bool = False, + http_version: dns.query.HTTPVersion = dns.query.HTTPVersion.DEFAULT, + ): + super().__init__() + self.url = url + self.bootstrap_address = bootstrap_address + self.verify = verify + self.want_get = want_get + self.http_version = http_version + + def kind(self): + return "DoH" + + def is_always_max_size(self) -> bool: + return True + + def __str__(self): + return self.url + + def answer_nameserver(self) -> str: + return self.url + + def answer_port(self) -> int: + port = urlparse(self.url).port + if port is None: + port = 443 + return port + + def query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: Optional[str], + source_port: int, + max_size: bool = False, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + return dns.query.https( + request, + self.url, + timeout=timeout, + source=source, + source_port=source_port, + bootstrap_address=self.bootstrap_address, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + verify=self.verify, + post=(not self.want_get), + http_version=self.http_version, + ) + + async def async_query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: Optional[str], + source_port: int, + max_size: bool, + backend: dns.asyncbackend.Backend, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + return await dns.asyncquery.https( + request, + self.url, + timeout=timeout, + source=source, + source_port=source_port, + bootstrap_address=self.bootstrap_address, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + verify=self.verify, + post=(not self.want_get), + http_version=self.http_version, + ) + + +class DoTNameserver(AddressAndPortNameserver): + def __init__( + self, + address: str, + port: int = 853, + hostname: Optional[str] = None, + verify: Union[bool, str] = True, + ): + super().__init__(address, port) + self.hostname = hostname + self.verify = verify + + def kind(self): + return "DoT" + + def query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: Optional[str], + source_port: int, + max_size: bool = False, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + return dns.query.tls( + request, + self.address, + port=self.port, + timeout=timeout, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + server_hostname=self.hostname, + verify=self.verify, + ) + + async def async_query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: Optional[str], + source_port: int, + max_size: bool, + backend: dns.asyncbackend.Backend, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + return await dns.asyncquery.tls( + request, + self.address, + port=self.port, + timeout=timeout, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + server_hostname=self.hostname, + verify=self.verify, + ) + + +class DoQNameserver(AddressAndPortNameserver): + def __init__( + self, + address: str, + port: int = 853, + verify: Union[bool, str] = True, + server_hostname: Optional[str] = None, + ): + super().__init__(address, port) + self.verify = verify + self.server_hostname = server_hostname + + def kind(self): + return "DoQ" + + def query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: Optional[str], + source_port: int, + max_size: bool = False, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + return dns.query.quic( + request, + self.address, + port=self.port, + timeout=timeout, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + verify=self.verify, + server_hostname=self.server_hostname, + ) + + async def async_query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: Optional[str], + source_port: int, + max_size: bool, + backend: dns.asyncbackend.Backend, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + return await dns.asyncquery.quic( + request, + self.address, + port=self.port, + timeout=timeout, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + verify=self.verify, + server_hostname=self.server_hostname, + ) diff --git a/venv/lib/python3.11/site-packages/dns/node.py b/venv/lib/python3.11/site-packages/dns/node.py new file mode 100644 index 0000000..de85a82 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/node.py @@ -0,0 +1,359 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS nodes. A node is a set of rdatasets.""" + +import enum +import io +from typing import Any, Dict, Optional + +import dns.immutable +import dns.name +import dns.rdataclass +import dns.rdataset +import dns.rdatatype +import dns.renderer +import dns.rrset + +_cname_types = { + dns.rdatatype.CNAME, +} + +# "neutral" types can coexist with a CNAME and thus are not "other data" +_neutral_types = { + dns.rdatatype.NSEC, # RFC 4035 section 2.5 + dns.rdatatype.NSEC3, # This is not likely to happen, but not impossible! + dns.rdatatype.KEY, # RFC 4035 section 2.5, RFC 3007 +} + + +def _matches_type_or_its_signature(rdtypes, rdtype, covers): + return rdtype in rdtypes or (rdtype == dns.rdatatype.RRSIG and covers in rdtypes) + + +@enum.unique +class NodeKind(enum.Enum): + """Rdatasets in nodes""" + + REGULAR = 0 # a.k.a "other data" + NEUTRAL = 1 + CNAME = 2 + + @classmethod + def classify( + cls, rdtype: dns.rdatatype.RdataType, covers: dns.rdatatype.RdataType + ) -> "NodeKind": + if _matches_type_or_its_signature(_cname_types, rdtype, covers): + return NodeKind.CNAME + elif _matches_type_or_its_signature(_neutral_types, rdtype, covers): + return NodeKind.NEUTRAL + else: + return NodeKind.REGULAR + + @classmethod + def classify_rdataset(cls, rdataset: dns.rdataset.Rdataset) -> "NodeKind": + return cls.classify(rdataset.rdtype, rdataset.covers) + + +class Node: + """A Node is a set of rdatasets. + + A node is either a CNAME node or an "other data" node. A CNAME + node contains only CNAME, KEY, NSEC, and NSEC3 rdatasets along with their + covering RRSIG rdatasets. An "other data" node contains any + rdataset other than a CNAME or RRSIG(CNAME) rdataset. When + changes are made to a node, the CNAME or "other data" state is + always consistent with the update, i.e. the most recent change + wins. For example, if you have a node which contains a CNAME + rdataset, and then add an MX rdataset to it, then the CNAME + rdataset will be deleted. Likewise if you have a node containing + an MX rdataset and add a CNAME rdataset, the MX rdataset will be + deleted. + """ + + __slots__ = ["rdatasets"] + + def __init__(self): + # the set of rdatasets, represented as a list. + self.rdatasets = [] + + def to_text(self, name: dns.name.Name, **kw: Dict[str, Any]) -> str: + """Convert a node to text format. + + Each rdataset at the node is printed. Any keyword arguments + to this method are passed on to the rdataset's to_text() method. + + *name*, a ``dns.name.Name``, the owner name of the + rdatasets. + + Returns a ``str``. + + """ + + s = io.StringIO() + for rds in self.rdatasets: + if len(rds) > 0: + s.write(rds.to_text(name, **kw)) # type: ignore[arg-type] + s.write("\n") + return s.getvalue()[:-1] + + def __repr__(self): + return "" + + def __eq__(self, other): + # + # This is inefficient. Good thing we don't need to do it much. + # + for rd in self.rdatasets: + if rd not in other.rdatasets: + return False + for rd in other.rdatasets: + if rd not in self.rdatasets: + return False + return True + + def __ne__(self, other): + return not self.__eq__(other) + + def __len__(self): + return len(self.rdatasets) + + def __iter__(self): + return iter(self.rdatasets) + + def _append_rdataset(self, rdataset): + """Append rdataset to the node with special handling for CNAME and + other data conditions. + + Specifically, if the rdataset being appended has ``NodeKind.CNAME``, + then all rdatasets other than KEY, NSEC, NSEC3, and their covering + RRSIGs are deleted. If the rdataset being appended has + ``NodeKind.REGULAR`` then CNAME and RRSIG(CNAME) are deleted. + """ + # Make having just one rdataset at the node fast. + if len(self.rdatasets) > 0: + kind = NodeKind.classify_rdataset(rdataset) + if kind == NodeKind.CNAME: + self.rdatasets = [ + rds + for rds in self.rdatasets + if NodeKind.classify_rdataset(rds) != NodeKind.REGULAR + ] + elif kind == NodeKind.REGULAR: + self.rdatasets = [ + rds + for rds in self.rdatasets + if NodeKind.classify_rdataset(rds) != NodeKind.CNAME + ] + # Otherwise the rdataset is NodeKind.NEUTRAL and we do not need to + # edit self.rdatasets. + self.rdatasets.append(rdataset) + + def find_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + create: bool = False, + ) -> dns.rdataset.Rdataset: + """Find an rdataset matching the specified properties in the + current node. + + *rdclass*, a ``dns.rdataclass.RdataClass``, the class of the rdataset. + + *rdtype*, a ``dns.rdatatype.RdataType``, the type of the rdataset. + + *covers*, a ``dns.rdatatype.RdataType``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + + *create*, a ``bool``. If True, create the rdataset if it is not found. + + Raises ``KeyError`` if an rdataset of the desired type and class does + not exist and *create* is not ``True``. + + Returns a ``dns.rdataset.Rdataset``. + """ + + for rds in self.rdatasets: + if rds.match(rdclass, rdtype, covers): + return rds + if not create: + raise KeyError + rds = dns.rdataset.Rdataset(rdclass, rdtype, covers) + self._append_rdataset(rds) + return rds + + def get_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + create: bool = False, + ) -> Optional[dns.rdataset.Rdataset]: + """Get an rdataset matching the specified properties in the + current node. + + None is returned if an rdataset of the specified type and + class does not exist and *create* is not ``True``. + + *rdclass*, an ``int``, the class of the rdataset. + + *rdtype*, an ``int``, the type of the rdataset. + + *covers*, an ``int``, the covered type. Usually this value is + dns.rdatatype.NONE, but if the rdtype is dns.rdatatype.SIG or + dns.rdatatype.RRSIG, then the covers value will be the rdata + type the SIG/RRSIG covers. The library treats the SIG and RRSIG + types as if they were a family of + types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). This makes RRSIGs much + easier to work with than if RRSIGs covering different rdata + types were aggregated into a single RRSIG rdataset. + + *create*, a ``bool``. If True, create the rdataset if it is not found. + + Returns a ``dns.rdataset.Rdataset`` or ``None``. + """ + + try: + rds = self.find_rdataset(rdclass, rdtype, covers, create) + except KeyError: + rds = None + return rds + + def delete_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + ) -> None: + """Delete the rdataset matching the specified properties in the + current node. + + If a matching rdataset does not exist, it is not an error. + + *rdclass*, an ``int``, the class of the rdataset. + + *rdtype*, an ``int``, the type of the rdataset. + + *covers*, an ``int``, the covered type. + """ + + rds = self.get_rdataset(rdclass, rdtype, covers) + if rds is not None: + self.rdatasets.remove(rds) + + def replace_rdataset(self, replacement: dns.rdataset.Rdataset) -> None: + """Replace an rdataset. + + It is not an error if there is no rdataset matching *replacement*. + + Ownership of the *replacement* object is transferred to the node; + in other words, this method does not store a copy of *replacement* + at the node, it stores *replacement* itself. + + *replacement*, a ``dns.rdataset.Rdataset``. + + Raises ``ValueError`` if *replacement* is not a + ``dns.rdataset.Rdataset``. + """ + + if not isinstance(replacement, dns.rdataset.Rdataset): + raise ValueError("replacement is not an rdataset") + if isinstance(replacement, dns.rrset.RRset): + # RRsets are not good replacements as the match() method + # is not compatible. + replacement = replacement.to_rdataset() + self.delete_rdataset( + replacement.rdclass, replacement.rdtype, replacement.covers + ) + self._append_rdataset(replacement) + + def classify(self) -> NodeKind: + """Classify a node. + + A node which contains a CNAME or RRSIG(CNAME) is a + ``NodeKind.CNAME`` node. + + A node which contains only "neutral" types, i.e. types allowed to + co-exist with a CNAME, is a ``NodeKind.NEUTRAL`` node. The neutral + types are NSEC, NSEC3, KEY, and their associated RRSIGS. An empty node + is also considered neutral. + + A node which contains some rdataset which is not a CNAME, RRSIG(CNAME), + or a neutral type is a a ``NodeKind.REGULAR`` node. Regular nodes are + also commonly referred to as "other data". + """ + for rdataset in self.rdatasets: + kind = NodeKind.classify(rdataset.rdtype, rdataset.covers) + if kind != NodeKind.NEUTRAL: + return kind + return NodeKind.NEUTRAL + + def is_immutable(self) -> bool: + return False + + +@dns.immutable.immutable +class ImmutableNode(Node): + def __init__(self, node): + super().__init__() + self.rdatasets = tuple( + [dns.rdataset.ImmutableRdataset(rds) for rds in node.rdatasets] + ) + + def find_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + create: bool = False, + ) -> dns.rdataset.Rdataset: + if create: + raise TypeError("immutable") + return super().find_rdataset(rdclass, rdtype, covers, False) + + def get_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + create: bool = False, + ) -> Optional[dns.rdataset.Rdataset]: + if create: + raise TypeError("immutable") + return super().get_rdataset(rdclass, rdtype, covers, False) + + def delete_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + ) -> None: + raise TypeError("immutable") + + def replace_rdataset(self, replacement: dns.rdataset.Rdataset) -> None: + raise TypeError("immutable") + + def is_immutable(self) -> bool: + return True diff --git a/venv/lib/python3.11/site-packages/dns/opcode.py b/venv/lib/python3.11/site-packages/dns/opcode.py new file mode 100644 index 0000000..78b43d2 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/opcode.py @@ -0,0 +1,117 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Opcodes.""" + +import dns.enum +import dns.exception + + +class Opcode(dns.enum.IntEnum): + #: Query + QUERY = 0 + #: Inverse Query (historical) + IQUERY = 1 + #: Server Status (unspecified and unimplemented anywhere) + STATUS = 2 + #: Notify + NOTIFY = 4 + #: Dynamic Update + UPDATE = 5 + + @classmethod + def _maximum(cls): + return 15 + + @classmethod + def _unknown_exception_class(cls): + return UnknownOpcode + + +class UnknownOpcode(dns.exception.DNSException): + """An DNS opcode is unknown.""" + + +def from_text(text: str) -> Opcode: + """Convert text into an opcode. + + *text*, a ``str``, the textual opcode + + Raises ``dns.opcode.UnknownOpcode`` if the opcode is unknown. + + Returns an ``int``. + """ + + return Opcode.from_text(text) + + +def from_flags(flags: int) -> Opcode: + """Extract an opcode from DNS message flags. + + *flags*, an ``int``, the DNS flags. + + Returns an ``int``. + """ + + return Opcode((flags & 0x7800) >> 11) + + +def to_flags(value: Opcode) -> int: + """Convert an opcode to a value suitable for ORing into DNS message + flags. + + *value*, an ``int``, the DNS opcode value. + + Returns an ``int``. + """ + + return (value << 11) & 0x7800 + + +def to_text(value: Opcode) -> str: + """Convert an opcode to text. + + *value*, an ``int`` the opcode value, + + Raises ``dns.opcode.UnknownOpcode`` if the opcode is unknown. + + Returns a ``str``. + """ + + return Opcode.to_text(value) + + +def is_update(flags: int) -> bool: + """Is the opcode in flags UPDATE? + + *flags*, an ``int``, the DNS message flags. + + Returns a ``bool``. + """ + + return from_flags(flags) == Opcode.UPDATE + + +### BEGIN generated Opcode constants + +QUERY = Opcode.QUERY +IQUERY = Opcode.IQUERY +STATUS = Opcode.STATUS +NOTIFY = Opcode.NOTIFY +UPDATE = Opcode.UPDATE + +### END generated Opcode constants diff --git a/venv/lib/python3.11/site-packages/dns/py.typed b/venv/lib/python3.11/site-packages/dns/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/dns/query.py b/venv/lib/python3.11/site-packages/dns/query.py new file mode 100644 index 0000000..0d8a977 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/query.py @@ -0,0 +1,1665 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Talk to a DNS server.""" + +import base64 +import contextlib +import enum +import errno +import os +import os.path +import random +import selectors +import socket +import struct +import time +import urllib.parse +from typing import Any, Dict, Optional, Tuple, Union, cast + +import dns._features +import dns.exception +import dns.inet +import dns.message +import dns.name +import dns.quic +import dns.rcode +import dns.rdataclass +import dns.rdatatype +import dns.serial +import dns.transaction +import dns.tsig +import dns.xfr + + +def _remaining(expiration): + if expiration is None: + return None + timeout = expiration - time.time() + if timeout <= 0.0: + raise dns.exception.Timeout + return timeout + + +def _expiration_for_this_attempt(timeout, expiration): + if expiration is None: + return None + return min(time.time() + timeout, expiration) + + +_have_httpx = dns._features.have("doh") +if _have_httpx: + import httpcore._backends.sync + import httpx + + _CoreNetworkBackend = httpcore.NetworkBackend + _CoreSyncStream = httpcore._backends.sync.SyncStream + + class _NetworkBackend(_CoreNetworkBackend): + def __init__(self, resolver, local_port, bootstrap_address, family): + super().__init__() + self._local_port = local_port + self._resolver = resolver + self._bootstrap_address = bootstrap_address + self._family = family + + def connect_tcp( + self, host, port, timeout, local_address, socket_options=None + ): # pylint: disable=signature-differs + addresses = [] + _, expiration = _compute_times(timeout) + if dns.inet.is_address(host): + addresses.append(host) + elif self._bootstrap_address is not None: + addresses.append(self._bootstrap_address) + else: + timeout = _remaining(expiration) + family = self._family + if local_address: + family = dns.inet.af_for_address(local_address) + answers = self._resolver.resolve_name( + host, family=family, lifetime=timeout + ) + addresses = answers.addresses() + for address in addresses: + af = dns.inet.af_for_address(address) + if local_address is not None or self._local_port != 0: + source = dns.inet.low_level_address_tuple( + (local_address, self._local_port), af + ) + else: + source = None + sock = _make_socket(af, socket.SOCK_STREAM, source) + attempt_expiration = _expiration_for_this_attempt(2.0, expiration) + try: + _connect( + sock, + dns.inet.low_level_address_tuple((address, port), af), + attempt_expiration, + ) + return _CoreSyncStream(sock) + except Exception: + pass + raise httpcore.ConnectError + + def connect_unix_socket( + self, path, timeout, socket_options=None + ): # pylint: disable=signature-differs + raise NotImplementedError + + class _HTTPTransport(httpx.HTTPTransport): + def __init__( + self, + *args, + local_port=0, + bootstrap_address=None, + resolver=None, + family=socket.AF_UNSPEC, + **kwargs, + ): + if resolver is None and bootstrap_address is None: + # pylint: disable=import-outside-toplevel,redefined-outer-name + import dns.resolver + + resolver = dns.resolver.Resolver() + super().__init__(*args, **kwargs) + self._pool._network_backend = _NetworkBackend( + resolver, local_port, bootstrap_address, family + ) + +else: + + class _HTTPTransport: # type: ignore + def connect_tcp(self, host, port, timeout, local_address): + raise NotImplementedError + + +have_doh = _have_httpx + +try: + import ssl +except ImportError: # pragma: no cover + + class ssl: # type: ignore + CERT_NONE = 0 + + class WantReadException(Exception): + pass + + class WantWriteException(Exception): + pass + + class SSLContext: + pass + + class SSLSocket: + pass + + @classmethod + def create_default_context(cls, *args, **kwargs): + raise Exception("no ssl support") # pylint: disable=broad-exception-raised + + +# Function used to create a socket. Can be overridden if needed in special +# situations. +socket_factory = socket.socket + + +class UnexpectedSource(dns.exception.DNSException): + """A DNS query response came from an unexpected address or port.""" + + +class BadResponse(dns.exception.FormError): + """A DNS query response does not respond to the question asked.""" + + +class NoDOH(dns.exception.DNSException): + """DNS over HTTPS (DOH) was requested but the httpx module is not + available.""" + + +class NoDOQ(dns.exception.DNSException): + """DNS over QUIC (DOQ) was requested but the aioquic module is not + available.""" + + +# for backwards compatibility +TransferError = dns.xfr.TransferError + + +def _compute_times(timeout): + now = time.time() + if timeout is None: + return (now, None) + else: + return (now, now + timeout) + + +def _wait_for(fd, readable, writable, _, expiration): + # Use the selected selector class to wait for any of the specified + # events. An "expiration" absolute time is converted into a relative + # timeout. + # + # The unused parameter is 'error', which is always set when + # selecting for read or write, and we have no error-only selects. + + if readable and isinstance(fd, ssl.SSLSocket) and fd.pending() > 0: + return True + sel = selectors.DefaultSelector() + events = 0 + if readable: + events |= selectors.EVENT_READ + if writable: + events |= selectors.EVENT_WRITE + if events: + sel.register(fd, events) + if expiration is None: + timeout = None + else: + timeout = expiration - time.time() + if timeout <= 0.0: + raise dns.exception.Timeout + if not sel.select(timeout): + raise dns.exception.Timeout + + +def _wait_for_readable(s, expiration): + _wait_for(s, True, False, True, expiration) + + +def _wait_for_writable(s, expiration): + _wait_for(s, False, True, True, expiration) + + +def _addresses_equal(af, a1, a2): + # Convert the first value of the tuple, which is a textual format + # address into binary form, so that we are not confused by different + # textual representations of the same address + try: + n1 = dns.inet.inet_pton(af, a1[0]) + n2 = dns.inet.inet_pton(af, a2[0]) + except dns.exception.SyntaxError: + return False + return n1 == n2 and a1[1:] == a2[1:] + + +def _matches_destination(af, from_address, destination, ignore_unexpected): + # Check that from_address is appropriate for a response to a query + # sent to destination. + if not destination: + return True + if _addresses_equal(af, from_address, destination) or ( + dns.inet.is_multicast(destination[0]) and from_address[1:] == destination[1:] + ): + return True + elif ignore_unexpected: + return False + raise UnexpectedSource( + f"got a response from {from_address} instead of " f"{destination}" + ) + + +def _destination_and_source( + where, port, source, source_port, where_must_be_address=True +): + # Apply defaults and compute destination and source tuples + # suitable for use in connect(), sendto(), or bind(). + af = None + destination = None + try: + af = dns.inet.af_for_address(where) + destination = where + except Exception: + if where_must_be_address: + raise + # URLs are ok so eat the exception + if source: + saf = dns.inet.af_for_address(source) + if af: + # We know the destination af, so source had better agree! + if saf != af: + raise ValueError( + "different address families for source and destination" + ) + else: + # We didn't know the destination af, but we know the source, + # so that's our af. + af = saf + if source_port and not source: + # Caller has specified a source_port but not an address, so we + # need to return a source, and we need to use the appropriate + # wildcard address as the address. + try: + source = dns.inet.any_for_af(af) + except Exception: + # we catch this and raise ValueError for backwards compatibility + raise ValueError("source_port specified but address family is unknown") + # Convert high-level (address, port) tuples into low-level address + # tuples. + if destination: + destination = dns.inet.low_level_address_tuple((destination, port), af) + if source: + source = dns.inet.low_level_address_tuple((source, source_port), af) + return (af, destination, source) + + +def _make_socket(af, type, source, ssl_context=None, server_hostname=None): + s = socket_factory(af, type) + try: + s.setblocking(False) + if source is not None: + s.bind(source) + if ssl_context: + # LGTM gets a false positive here, as our default context is OK + return ssl_context.wrap_socket( + s, + do_handshake_on_connect=False, # lgtm[py/insecure-protocol] + server_hostname=server_hostname, + ) + else: + return s + except Exception: + s.close() + raise + + +def _maybe_get_resolver( + resolver: Optional["dns.resolver.Resolver"], +) -> "dns.resolver.Resolver": + # We need a separate method for this to avoid overriding the global + # variable "dns" with the as-yet undefined local variable "dns" + # in https(). + if resolver is None: + # pylint: disable=import-outside-toplevel,redefined-outer-name + import dns.resolver + + resolver = dns.resolver.Resolver() + return resolver + + +class HTTPVersion(enum.IntEnum): + """Which version of HTTP should be used? + + DEFAULT will select the first version from the list [2, 1.1, 3] that + is available. + """ + + DEFAULT = 0 + HTTP_1 = 1 + H1 = 1 + HTTP_2 = 2 + H2 = 2 + HTTP_3 = 3 + H3 = 3 + + +def https( + q: dns.message.Message, + where: str, + timeout: Optional[float] = None, + port: int = 443, + source: Optional[str] = None, + source_port: int = 0, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + session: Optional[Any] = None, + path: str = "/dns-query", + post: bool = True, + bootstrap_address: Optional[str] = None, + verify: Union[bool, str] = True, + resolver: Optional["dns.resolver.Resolver"] = None, + family: int = socket.AF_UNSPEC, + http_version: HTTPVersion = HTTPVersion.DEFAULT, +) -> dns.message.Message: + """Return the response obtained after sending a query via DNS-over-HTTPS. + + *q*, a ``dns.message.Message``, the query to send. + + *where*, a ``str``, the nameserver IP address or the full URL. If an IP address is + given, the URL will be constructed using the following schema: + https://:/. + + *timeout*, a ``float`` or ``None``, the number of seconds to wait before the query + times out. If ``None``, the default, wait forever. + + *port*, a ``int``, the port to send the query to. The default is 443. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying the source + address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. The default is + 0. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing junk at end of the + received message. + + *session*, an ``httpx.Client``. If provided, the client session to use to send the + queries. + + *path*, a ``str``. If *where* is an IP address, then *path* will be used to + construct the URL to send the DNS query to. + + *post*, a ``bool``. If ``True``, the default, POST method will be used. + + *bootstrap_address*, a ``str``, the IP address to use to bypass resolution. + + *verify*, a ``bool`` or ``str``. If a ``True``, then TLS certificate verification + of the server is done using the default CA bundle; if ``False``, then no + verification is done; if a `str` then it specifies the path to a certificate file or + directory which will be used for verification. + + *resolver*, a ``dns.resolver.Resolver`` or ``None``, the resolver to use for + resolution of hostnames in URLs. If not specified, a new resolver with a default + configuration will be used; note this is *not* the default resolver as that resolver + might have been configured to use DoH causing a chicken-and-egg problem. This + parameter only has an effect if the HTTP library is httpx. + + *family*, an ``int``, the address family. If socket.AF_UNSPEC (the default), both A + and AAAA records will be retrieved. + + *http_version*, a ``dns.query.HTTPVersion``, indicating which HTTP version to use. + + Returns a ``dns.message.Message``. + """ + + (af, _, the_source) = _destination_and_source( + where, port, source, source_port, False + ) + if af is not None and dns.inet.is_address(where): + if af == socket.AF_INET: + url = f"https://{where}:{port}{path}" + elif af == socket.AF_INET6: + url = f"https://[{where}]:{port}{path}" + else: + url = where + + extensions = {} + if bootstrap_address is None: + # pylint: disable=possibly-used-before-assignment + parsed = urllib.parse.urlparse(url) + if parsed.hostname is None: + raise ValueError("no hostname in URL") + if dns.inet.is_address(parsed.hostname): + bootstrap_address = parsed.hostname + extensions["sni_hostname"] = parsed.hostname + if parsed.port is not None: + port = parsed.port + + if http_version == HTTPVersion.H3 or ( + http_version == HTTPVersion.DEFAULT and not have_doh + ): + if bootstrap_address is None: + resolver = _maybe_get_resolver(resolver) + assert parsed.hostname is not None # for mypy + answers = resolver.resolve_name(parsed.hostname, family) + bootstrap_address = random.choice(list(answers.addresses())) + return _http3( + q, + bootstrap_address, + url, + timeout, + port, + source, + source_port, + one_rr_per_rrset, + ignore_trailing, + verify=verify, + post=post, + ) + + if not have_doh: + raise NoDOH # pragma: no cover + if session and not isinstance(session, httpx.Client): + raise ValueError("session parameter must be an httpx.Client") + + wire = q.to_wire() + headers = {"accept": "application/dns-message"} + + h1 = http_version in (HTTPVersion.H1, HTTPVersion.DEFAULT) + h2 = http_version in (HTTPVersion.H2, HTTPVersion.DEFAULT) + + # set source port and source address + + if the_source is None: + local_address = None + local_port = 0 + else: + local_address = the_source[0] + local_port = the_source[1] + + if session: + cm: contextlib.AbstractContextManager = contextlib.nullcontext(session) + else: + transport = _HTTPTransport( + local_address=local_address, + http1=h1, + http2=h2, + verify=verify, + local_port=local_port, + bootstrap_address=bootstrap_address, + resolver=resolver, + family=family, + ) + + cm = httpx.Client(http1=h1, http2=h2, verify=verify, transport=transport) + with cm as session: + # see https://tools.ietf.org/html/rfc8484#section-4.1.1 for DoH + # GET and POST examples + if post: + headers.update( + { + "content-type": "application/dns-message", + "content-length": str(len(wire)), + } + ) + response = session.post( + url, + headers=headers, + content=wire, + timeout=timeout, + extensions=extensions, + ) + else: + wire = base64.urlsafe_b64encode(wire).rstrip(b"=") + twire = wire.decode() # httpx does a repr() if we give it bytes + response = session.get( + url, + headers=headers, + timeout=timeout, + params={"dns": twire}, + extensions=extensions, + ) + + # see https://tools.ietf.org/html/rfc8484#section-4.2.1 for info about DoH + # status codes + if response.status_code < 200 or response.status_code > 299: + raise ValueError( + f"{where} responded with status code {response.status_code}" + f"\nResponse body: {response.content}" + ) + r = dns.message.from_wire( + response.content, + keyring=q.keyring, + request_mac=q.request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + r.time = response.elapsed.total_seconds() + if not q.is_response(r): + raise BadResponse + return r + + +def _find_header(headers: dns.quic.Headers, name: bytes) -> bytes: + if headers is None: + raise KeyError + for header, value in headers: + if header == name: + return value + raise KeyError + + +def _check_status(headers: dns.quic.Headers, peer: str, wire: bytes) -> None: + value = _find_header(headers, b":status") + if value is None: + raise SyntaxError("no :status header in response") + status = int(value) + if status < 0: + raise SyntaxError("status is negative") + if status < 200 or status > 299: + error = "" + if len(wire) > 0: + try: + error = ": " + wire.decode() + except Exception: + pass + raise ValueError(f"{peer} responded with status code {status}{error}") + + +def _http3( + q: dns.message.Message, + where: str, + url: str, + timeout: Optional[float] = None, + port: int = 853, + source: Optional[str] = None, + source_port: int = 0, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + verify: Union[bool, str] = True, + hostname: Optional[str] = None, + post: bool = True, +) -> dns.message.Message: + if not dns.quic.have_quic: + raise NoDOH("DNS-over-HTTP3 is not available.") # pragma: no cover + + url_parts = urllib.parse.urlparse(url) + hostname = url_parts.hostname + if url_parts.port is not None: + port = url_parts.port + + q.id = 0 + wire = q.to_wire() + manager = dns.quic.SyncQuicManager( + verify_mode=verify, server_name=hostname, h3=True + ) + + with manager: + connection = manager.connect(where, port, source, source_port) + (start, expiration) = _compute_times(timeout) + with connection.make_stream(timeout) as stream: + stream.send_h3(url, wire, post) + wire = stream.receive(_remaining(expiration)) + _check_status(stream.headers(), where, wire) + finish = time.time() + r = dns.message.from_wire( + wire, + keyring=q.keyring, + request_mac=q.request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + r.time = max(finish - start, 0.0) + if not q.is_response(r): + raise BadResponse + return r + + +def _udp_recv(sock, max_size, expiration): + """Reads a datagram from the socket. + A Timeout exception will be raised if the operation is not completed + by the expiration time. + """ + while True: + try: + return sock.recvfrom(max_size) + except BlockingIOError: + _wait_for_readable(sock, expiration) + + +def _udp_send(sock, data, destination, expiration): + """Sends the specified datagram to destination over the socket. + A Timeout exception will be raised if the operation is not completed + by the expiration time. + """ + while True: + try: + if destination: + return sock.sendto(data, destination) + else: + return sock.send(data) + except BlockingIOError: # pragma: no cover + _wait_for_writable(sock, expiration) + + +def send_udp( + sock: Any, + what: Union[dns.message.Message, bytes], + destination: Any, + expiration: Optional[float] = None, +) -> Tuple[int, float]: + """Send a DNS message to the specified UDP socket. + + *sock*, a ``socket``. + + *what*, a ``bytes`` or ``dns.message.Message``, the message to send. + + *destination*, a destination tuple appropriate for the address family + of the socket, specifying where to send the query. + + *expiration*, a ``float`` or ``None``, the absolute time at which + a timeout exception should be raised. If ``None``, no timeout will + occur. + + Returns an ``(int, float)`` tuple of bytes sent and the sent time. + """ + + if isinstance(what, dns.message.Message): + what = what.to_wire() + sent_time = time.time() + n = _udp_send(sock, what, destination, expiration) + return (n, sent_time) + + +def receive_udp( + sock: Any, + destination: Optional[Any] = None, + expiration: Optional[float] = None, + ignore_unexpected: bool = False, + one_rr_per_rrset: bool = False, + keyring: Optional[Dict[dns.name.Name, dns.tsig.Key]] = None, + request_mac: Optional[bytes] = b"", + ignore_trailing: bool = False, + raise_on_truncation: bool = False, + ignore_errors: bool = False, + query: Optional[dns.message.Message] = None, +) -> Any: + """Read a DNS message from a UDP socket. + + *sock*, a ``socket``. + + *destination*, a destination tuple appropriate for the address family + of the socket, specifying where the message is expected to arrive from. + When receiving a response, this would be where the associated query was + sent. + + *expiration*, a ``float`` or ``None``, the absolute time at which + a timeout exception should be raised. If ``None``, no timeout will + occur. + + *ignore_unexpected*, a ``bool``. If ``True``, ignore responses from + unexpected sources. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *keyring*, a ``dict``, the keyring to use for TSIG. + + *request_mac*, a ``bytes`` or ``None``, the MAC of the request (for TSIG). + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + *raise_on_truncation*, a ``bool``. If ``True``, raise an exception if + the TC bit is set. + + Raises if the message is malformed, if network errors occur, of if + there is a timeout. + + If *destination* is not ``None``, returns a ``(dns.message.Message, float)`` + tuple of the received message and the received time. + + If *destination* is ``None``, returns a + ``(dns.message.Message, float, tuple)`` + tuple of the received message, the received time, and the address where + the message arrived from. + + *ignore_errors*, a ``bool``. If various format errors or response + mismatches occur, ignore them and keep listening for a valid response. + The default is ``False``. + + *query*, a ``dns.message.Message`` or ``None``. If not ``None`` and + *ignore_errors* is ``True``, check that the received message is a response + to this query, and if not keep listening for a valid response. + """ + + wire = b"" + while True: + (wire, from_address) = _udp_recv(sock, 65535, expiration) + if not _matches_destination( + sock.family, from_address, destination, ignore_unexpected + ): + continue + received_time = time.time() + try: + r = dns.message.from_wire( + wire, + keyring=keyring, + request_mac=request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + raise_on_truncation=raise_on_truncation, + ) + except dns.message.Truncated as e: + # If we got Truncated and not FORMERR, we at least got the header with TC + # set, and very likely the question section, so we'll re-raise if the + # message seems to be a response as we need to know when truncation happens. + # We need to check that it seems to be a response as we don't want a random + # injected message with TC set to cause us to bail out. + if ( + ignore_errors + and query is not None + and not query.is_response(e.message()) + ): + continue + else: + raise + except Exception: + if ignore_errors: + continue + else: + raise + if ignore_errors and query is not None and not query.is_response(r): + continue + if destination: + return (r, received_time) + else: + return (r, received_time, from_address) + + +def udp( + q: dns.message.Message, + where: str, + timeout: Optional[float] = None, + port: int = 53, + source: Optional[str] = None, + source_port: int = 0, + ignore_unexpected: bool = False, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + raise_on_truncation: bool = False, + sock: Optional[Any] = None, + ignore_errors: bool = False, +) -> dns.message.Message: + """Return the response obtained after sending a query via UDP. + + *q*, a ``dns.message.Message``, the query to send + + *where*, a ``str`` containing an IPv4 or IPv6 address, where + to send the message. + + *timeout*, a ``float`` or ``None``, the number of seconds to wait before the + query times out. If ``None``, the default, wait forever. + + *port*, an ``int``, the port send the message to. The default is 53. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *ignore_unexpected*, a ``bool``. If ``True``, ignore responses from + unexpected sources. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + *raise_on_truncation*, a ``bool``. If ``True``, raise an exception if + the TC bit is set. + + *sock*, a ``socket.socket``, or ``None``, the socket to use for the + query. If ``None``, the default, a socket is created. Note that + if a socket is provided, it must be a nonblocking datagram socket, + and the *source* and *source_port* are ignored. + + *ignore_errors*, a ``bool``. If various format errors or response + mismatches occur, ignore them and keep listening for a valid response. + The default is ``False``. + + Returns a ``dns.message.Message``. + """ + + wire = q.to_wire() + (af, destination, source) = _destination_and_source( + where, port, source, source_port + ) + (begin_time, expiration) = _compute_times(timeout) + if sock: + cm: contextlib.AbstractContextManager = contextlib.nullcontext(sock) + else: + cm = _make_socket(af, socket.SOCK_DGRAM, source) + with cm as s: + send_udp(s, wire, destination, expiration) + (r, received_time) = receive_udp( + s, + destination, + expiration, + ignore_unexpected, + one_rr_per_rrset, + q.keyring, + q.mac, + ignore_trailing, + raise_on_truncation, + ignore_errors, + q, + ) + r.time = received_time - begin_time + # We don't need to check q.is_response() if we are in ignore_errors mode + # as receive_udp() will have checked it. + if not (ignore_errors or q.is_response(r)): + raise BadResponse + return r + assert ( + False # help mypy figure out we can't get here lgtm[py/unreachable-statement] + ) + + +def udp_with_fallback( + q: dns.message.Message, + where: str, + timeout: Optional[float] = None, + port: int = 53, + source: Optional[str] = None, + source_port: int = 0, + ignore_unexpected: bool = False, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + udp_sock: Optional[Any] = None, + tcp_sock: Optional[Any] = None, + ignore_errors: bool = False, +) -> Tuple[dns.message.Message, bool]: + """Return the response to the query, trying UDP first and falling back + to TCP if UDP results in a truncated response. + + *q*, a ``dns.message.Message``, the query to send + + *where*, a ``str`` containing an IPv4 or IPv6 address, where to send the message. + + *timeout*, a ``float`` or ``None``, the number of seconds to wait before the query + times out. If ``None``, the default, wait forever. + + *port*, an ``int``, the port send the message to. The default is 53. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying the source + address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. The default is + 0. + + *ignore_unexpected*, a ``bool``. If ``True``, ignore responses from unexpected + sources. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing junk at end of the + received message. + + *udp_sock*, a ``socket.socket``, or ``None``, the socket to use for the UDP query. + If ``None``, the default, a socket is created. Note that if a socket is provided, + it must be a nonblocking datagram socket, and the *source* and *source_port* are + ignored for the UDP query. + + *tcp_sock*, a ``socket.socket``, or ``None``, the connected socket to use for the + TCP query. If ``None``, the default, a socket is created. Note that if a socket is + provided, it must be a nonblocking connected stream socket, and *where*, *source* + and *source_port* are ignored for the TCP query. + + *ignore_errors*, a ``bool``. If various format errors or response mismatches occur + while listening for UDP, ignore them and keep listening for a valid response. The + default is ``False``. + + Returns a (``dns.message.Message``, tcp) tuple where tcp is ``True`` if and only if + TCP was used. + """ + try: + response = udp( + q, + where, + timeout, + port, + source, + source_port, + ignore_unexpected, + one_rr_per_rrset, + ignore_trailing, + True, + udp_sock, + ignore_errors, + ) + return (response, False) + except dns.message.Truncated: + response = tcp( + q, + where, + timeout, + port, + source, + source_port, + one_rr_per_rrset, + ignore_trailing, + tcp_sock, + ) + return (response, True) + + +def _net_read(sock, count, expiration): + """Read the specified number of bytes from sock. Keep trying until we + either get the desired amount, or we hit EOF. + A Timeout exception will be raised if the operation is not completed + by the expiration time. + """ + s = b"" + while count > 0: + try: + n = sock.recv(count) + if n == b"": + raise EOFError("EOF") + count -= len(n) + s += n + except (BlockingIOError, ssl.SSLWantReadError): + _wait_for_readable(sock, expiration) + except ssl.SSLWantWriteError: # pragma: no cover + _wait_for_writable(sock, expiration) + return s + + +def _net_write(sock, data, expiration): + """Write the specified data to the socket. + A Timeout exception will be raised if the operation is not completed + by the expiration time. + """ + current = 0 + l = len(data) + while current < l: + try: + current += sock.send(data[current:]) + except (BlockingIOError, ssl.SSLWantWriteError): + _wait_for_writable(sock, expiration) + except ssl.SSLWantReadError: # pragma: no cover + _wait_for_readable(sock, expiration) + + +def send_tcp( + sock: Any, + what: Union[dns.message.Message, bytes], + expiration: Optional[float] = None, +) -> Tuple[int, float]: + """Send a DNS message to the specified TCP socket. + + *sock*, a ``socket``. + + *what*, a ``bytes`` or ``dns.message.Message``, the message to send. + + *expiration*, a ``float`` or ``None``, the absolute time at which + a timeout exception should be raised. If ``None``, no timeout will + occur. + + Returns an ``(int, float)`` tuple of bytes sent and the sent time. + """ + + if isinstance(what, dns.message.Message): + tcpmsg = what.to_wire(prepend_length=True) + else: + # copying the wire into tcpmsg is inefficient, but lets us + # avoid writev() or doing a short write that would get pushed + # onto the net + tcpmsg = len(what).to_bytes(2, "big") + what + sent_time = time.time() + _net_write(sock, tcpmsg, expiration) + return (len(tcpmsg), sent_time) + + +def receive_tcp( + sock: Any, + expiration: Optional[float] = None, + one_rr_per_rrset: bool = False, + keyring: Optional[Dict[dns.name.Name, dns.tsig.Key]] = None, + request_mac: Optional[bytes] = b"", + ignore_trailing: bool = False, +) -> Tuple[dns.message.Message, float]: + """Read a DNS message from a TCP socket. + + *sock*, a ``socket``. + + *expiration*, a ``float`` or ``None``, the absolute time at which + a timeout exception should be raised. If ``None``, no timeout will + occur. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *keyring*, a ``dict``, the keyring to use for TSIG. + + *request_mac*, a ``bytes`` or ``None``, the MAC of the request (for TSIG). + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + Raises if the message is malformed, if network errors occur, of if + there is a timeout. + + Returns a ``(dns.message.Message, float)`` tuple of the received message + and the received time. + """ + + ldata = _net_read(sock, 2, expiration) + (l,) = struct.unpack("!H", ldata) + wire = _net_read(sock, l, expiration) + received_time = time.time() + r = dns.message.from_wire( + wire, + keyring=keyring, + request_mac=request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + return (r, received_time) + + +def _connect(s, address, expiration): + err = s.connect_ex(address) + if err == 0: + return + if err in (errno.EINPROGRESS, errno.EWOULDBLOCK, errno.EALREADY): + _wait_for_writable(s, expiration) + err = s.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR) + if err != 0: + raise OSError(err, os.strerror(err)) + + +def tcp( + q: dns.message.Message, + where: str, + timeout: Optional[float] = None, + port: int = 53, + source: Optional[str] = None, + source_port: int = 0, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + sock: Optional[Any] = None, +) -> dns.message.Message: + """Return the response obtained after sending a query via TCP. + + *q*, a ``dns.message.Message``, the query to send + + *where*, a ``str`` containing an IPv4 or IPv6 address, where + to send the message. + + *timeout*, a ``float`` or ``None``, the number of seconds to wait before the + query times out. If ``None``, the default, wait forever. + + *port*, an ``int``, the port send the message to. The default is 53. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + *sock*, a ``socket.socket``, or ``None``, the connected socket to use for the + query. If ``None``, the default, a socket is created. Note that + if a socket is provided, it must be a nonblocking connected stream + socket, and *where*, *port*, *source* and *source_port* are ignored. + + Returns a ``dns.message.Message``. + """ + + wire = q.to_wire() + (begin_time, expiration) = _compute_times(timeout) + if sock: + cm: contextlib.AbstractContextManager = contextlib.nullcontext(sock) + else: + (af, destination, source) = _destination_and_source( + where, port, source, source_port + ) + cm = _make_socket(af, socket.SOCK_STREAM, source) + with cm as s: + if not sock: + # pylint: disable=possibly-used-before-assignment + _connect(s, destination, expiration) + send_tcp(s, wire, expiration) + (r, received_time) = receive_tcp( + s, expiration, one_rr_per_rrset, q.keyring, q.mac, ignore_trailing + ) + r.time = received_time - begin_time + if not q.is_response(r): + raise BadResponse + return r + assert ( + False # help mypy figure out we can't get here lgtm[py/unreachable-statement] + ) + + +def _tls_handshake(s, expiration): + while True: + try: + s.do_handshake() + return + except ssl.SSLWantReadError: + _wait_for_readable(s, expiration) + except ssl.SSLWantWriteError: # pragma: no cover + _wait_for_writable(s, expiration) + + +def _make_dot_ssl_context( + server_hostname: Optional[str], verify: Union[bool, str] +) -> ssl.SSLContext: + cafile: Optional[str] = None + capath: Optional[str] = None + if isinstance(verify, str): + if os.path.isfile(verify): + cafile = verify + elif os.path.isdir(verify): + capath = verify + else: + raise ValueError("invalid verify string") + ssl_context = ssl.create_default_context(cafile=cafile, capath=capath) + ssl_context.minimum_version = ssl.TLSVersion.TLSv1_2 + if server_hostname is None: + ssl_context.check_hostname = False + ssl_context.set_alpn_protocols(["dot"]) + if verify is False: + ssl_context.verify_mode = ssl.CERT_NONE + return ssl_context + + +def tls( + q: dns.message.Message, + where: str, + timeout: Optional[float] = None, + port: int = 853, + source: Optional[str] = None, + source_port: int = 0, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + sock: Optional[ssl.SSLSocket] = None, + ssl_context: Optional[ssl.SSLContext] = None, + server_hostname: Optional[str] = None, + verify: Union[bool, str] = True, +) -> dns.message.Message: + """Return the response obtained after sending a query via TLS. + + *q*, a ``dns.message.Message``, the query to send + + *where*, a ``str`` containing an IPv4 or IPv6 address, where + to send the message. + + *timeout*, a ``float`` or ``None``, the number of seconds to wait before the + query times out. If ``None``, the default, wait forever. + + *port*, an ``int``, the port send the message to. The default is 853. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + *sock*, an ``ssl.SSLSocket``, or ``None``, the socket to use for + the query. If ``None``, the default, a socket is created. Note + that if a socket is provided, it must be a nonblocking connected + SSL stream socket, and *where*, *port*, *source*, *source_port*, + and *ssl_context* are ignored. + + *ssl_context*, an ``ssl.SSLContext``, the context to use when establishing + a TLS connection. If ``None``, the default, creates one with the default + configuration. + + *server_hostname*, a ``str`` containing the server's hostname. The + default is ``None``, which means that no hostname is known, and if an + SSL context is created, hostname checking will be disabled. + + *verify*, a ``bool`` or ``str``. If a ``True``, then TLS certificate verification + of the server is done using the default CA bundle; if ``False``, then no + verification is done; if a `str` then it specifies the path to a certificate file or + directory which will be used for verification. + + Returns a ``dns.message.Message``. + + """ + + if sock: + # + # If a socket was provided, there's no special TLS handling needed. + # + return tcp( + q, + where, + timeout, + port, + source, + source_port, + one_rr_per_rrset, + ignore_trailing, + sock, + ) + + wire = q.to_wire() + (begin_time, expiration) = _compute_times(timeout) + (af, destination, source) = _destination_and_source( + where, port, source, source_port + ) + if ssl_context is None and not sock: + ssl_context = _make_dot_ssl_context(server_hostname, verify) + + with _make_socket( + af, + socket.SOCK_STREAM, + source, + ssl_context=ssl_context, + server_hostname=server_hostname, + ) as s: + _connect(s, destination, expiration) + _tls_handshake(s, expiration) + send_tcp(s, wire, expiration) + (r, received_time) = receive_tcp( + s, expiration, one_rr_per_rrset, q.keyring, q.mac, ignore_trailing + ) + r.time = received_time - begin_time + if not q.is_response(r): + raise BadResponse + return r + assert ( + False # help mypy figure out we can't get here lgtm[py/unreachable-statement] + ) + + +def quic( + q: dns.message.Message, + where: str, + timeout: Optional[float] = None, + port: int = 853, + source: Optional[str] = None, + source_port: int = 0, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + connection: Optional[dns.quic.SyncQuicConnection] = None, + verify: Union[bool, str] = True, + hostname: Optional[str] = None, + server_hostname: Optional[str] = None, +) -> dns.message.Message: + """Return the response obtained after sending a query via DNS-over-QUIC. + + *q*, a ``dns.message.Message``, the query to send. + + *where*, a ``str``, the nameserver IP address. + + *timeout*, a ``float`` or ``None``, the number of seconds to wait before the query + times out. If ``None``, the default, wait forever. + + *port*, a ``int``, the port to send the query to. The default is 853. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying the source + address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. The default is + 0. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing junk at end of the + received message. + + *connection*, a ``dns.quic.SyncQuicConnection``. If provided, the connection to use + to send the query. + + *verify*, a ``bool`` or ``str``. If a ``True``, then TLS certificate verification + of the server is done using the default CA bundle; if ``False``, then no + verification is done; if a `str` then it specifies the path to a certificate file or + directory which will be used for verification. + + *hostname*, a ``str`` containing the server's hostname or ``None``. The default is + ``None``, which means that no hostname is known, and if an SSL context is created, + hostname checking will be disabled. This value is ignored if *url* is not + ``None``. + + *server_hostname*, a ``str`` or ``None``. This item is for backwards compatibility + only, and has the same meaning as *hostname*. + + Returns a ``dns.message.Message``. + """ + + if not dns.quic.have_quic: + raise NoDOQ("DNS-over-QUIC is not available.") # pragma: no cover + + if server_hostname is not None and hostname is None: + hostname = server_hostname + + q.id = 0 + wire = q.to_wire() + the_connection: dns.quic.SyncQuicConnection + the_manager: dns.quic.SyncQuicManager + if connection: + manager: contextlib.AbstractContextManager = contextlib.nullcontext(None) + the_connection = connection + else: + manager = dns.quic.SyncQuicManager(verify_mode=verify, server_name=hostname) + the_manager = manager # for type checking happiness + + with manager: + if not connection: + the_connection = the_manager.connect(where, port, source, source_port) + (start, expiration) = _compute_times(timeout) + with the_connection.make_stream(timeout) as stream: + stream.send(wire, True) + wire = stream.receive(_remaining(expiration)) + finish = time.time() + r = dns.message.from_wire( + wire, + keyring=q.keyring, + request_mac=q.request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + r.time = max(finish - start, 0.0) + if not q.is_response(r): + raise BadResponse + return r + + +class UDPMode(enum.IntEnum): + """How should UDP be used in an IXFR from :py:func:`inbound_xfr()`? + + NEVER means "never use UDP; always use TCP" + TRY_FIRST means "try to use UDP but fall back to TCP if needed" + ONLY means "raise ``dns.xfr.UseTCP`` if trying UDP does not succeed" + """ + + NEVER = 0 + TRY_FIRST = 1 + ONLY = 2 + + +def _inbound_xfr( + txn_manager: dns.transaction.TransactionManager, + s: socket.socket, + query: dns.message.Message, + serial: Optional[int], + timeout: Optional[float], + expiration: float, +) -> Any: + """Given a socket, does the zone transfer.""" + rdtype = query.question[0].rdtype + is_ixfr = rdtype == dns.rdatatype.IXFR + origin = txn_manager.from_wire_origin() + wire = query.to_wire() + is_udp = s.type == socket.SOCK_DGRAM + if is_udp: + _udp_send(s, wire, None, expiration) + else: + tcpmsg = struct.pack("!H", len(wire)) + wire + _net_write(s, tcpmsg, expiration) + with dns.xfr.Inbound(txn_manager, rdtype, serial, is_udp) as inbound: + done = False + tsig_ctx = None + while not done: + (_, mexpiration) = _compute_times(timeout) + if mexpiration is None or ( + expiration is not None and mexpiration > expiration + ): + mexpiration = expiration + if is_udp: + (rwire, _) = _udp_recv(s, 65535, mexpiration) + else: + ldata = _net_read(s, 2, mexpiration) + (l,) = struct.unpack("!H", ldata) + rwire = _net_read(s, l, mexpiration) + r = dns.message.from_wire( + rwire, + keyring=query.keyring, + request_mac=query.mac, + xfr=True, + origin=origin, + tsig_ctx=tsig_ctx, + multi=(not is_udp), + one_rr_per_rrset=is_ixfr, + ) + done = inbound.process_message(r) + yield r + tsig_ctx = r.tsig_ctx + if query.keyring and not r.had_tsig: + raise dns.exception.FormError("missing TSIG") + + +def xfr( + where: str, + zone: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.AXFR, + rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, + timeout: Optional[float] = None, + port: int = 53, + keyring: Optional[Dict[dns.name.Name, dns.tsig.Key]] = None, + keyname: Optional[Union[dns.name.Name, str]] = None, + relativize: bool = True, + lifetime: Optional[float] = None, + source: Optional[str] = None, + source_port: int = 0, + serial: int = 0, + use_udp: bool = False, + keyalgorithm: Union[dns.name.Name, str] = dns.tsig.default_algorithm, +) -> Any: + """Return a generator for the responses to a zone transfer. + + *where*, a ``str`` containing an IPv4 or IPv6 address, where + to send the message. + + *zone*, a ``dns.name.Name`` or ``str``, the name of the zone to transfer. + + *rdtype*, an ``int`` or ``str``, the type of zone transfer. The + default is ``dns.rdatatype.AXFR``. ``dns.rdatatype.IXFR`` can be + used to do an incremental transfer instead. + + *rdclass*, an ``int`` or ``str``, the class of the zone transfer. + The default is ``dns.rdataclass.IN``. + + *timeout*, a ``float``, the number of seconds to wait for each + response message. If None, the default, wait forever. + + *port*, an ``int``, the port send the message to. The default is 53. + + *keyring*, a ``dict``, the keyring to use for TSIG. + + *keyname*, a ``dns.name.Name`` or ``str``, the name of the TSIG + key to use. + + *relativize*, a ``bool``. If ``True``, all names in the zone will be + relativized to the zone origin. It is essential that the + relativize setting matches the one specified to + ``dns.zone.from_xfr()`` if using this generator to make a zone. + + *lifetime*, a ``float``, the total number of seconds to spend + doing the transfer. If ``None``, the default, then there is no + limit on the time the transfer may take. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *serial*, an ``int``, the SOA serial number to use as the base for + an IXFR diff sequence (only meaningful if *rdtype* is + ``dns.rdatatype.IXFR``). + + *use_udp*, a ``bool``. If ``True``, use UDP (only meaningful for IXFR). + + *keyalgorithm*, a ``dns.name.Name`` or ``str``, the TSIG algorithm to use. + + Raises on errors, and so does the generator. + + Returns a generator of ``dns.message.Message`` objects. + """ + + class DummyTransactionManager(dns.transaction.TransactionManager): + def __init__(self, origin, relativize): + self.info = (origin, relativize, dns.name.empty if relativize else origin) + + def origin_information(self): + return self.info + + def get_class(self) -> dns.rdataclass.RdataClass: + raise NotImplementedError # pragma: no cover + + def reader(self): + raise NotImplementedError # pragma: no cover + + def writer(self, replacement: bool = False) -> dns.transaction.Transaction: + class DummyTransaction: + def nop(self, *args, **kw): + pass + + def __getattr__(self, _): + return self.nop + + return cast(dns.transaction.Transaction, DummyTransaction()) + + if isinstance(zone, str): + zone = dns.name.from_text(zone) + rdtype = dns.rdatatype.RdataType.make(rdtype) + q = dns.message.make_query(zone, rdtype, rdclass) + if rdtype == dns.rdatatype.IXFR: + rrset = q.find_rrset( + q.authority, zone, dns.rdataclass.IN, dns.rdatatype.SOA, create=True + ) + soa = dns.rdata.from_text("IN", "SOA", ". . %u 0 0 0 0" % serial) + rrset.add(soa, 0) + if keyring is not None: + q.use_tsig(keyring, keyname, algorithm=keyalgorithm) + (af, destination, source) = _destination_and_source( + where, port, source, source_port + ) + (_, expiration) = _compute_times(lifetime) + tm = DummyTransactionManager(zone, relativize) + if use_udp and rdtype != dns.rdatatype.IXFR: + raise ValueError("cannot do a UDP AXFR") + sock_type = socket.SOCK_DGRAM if use_udp else socket.SOCK_STREAM + with _make_socket(af, sock_type, source) as s: + _connect(s, destination, expiration) + yield from _inbound_xfr(tm, s, q, serial, timeout, expiration) + + +def inbound_xfr( + where: str, + txn_manager: dns.transaction.TransactionManager, + query: Optional[dns.message.Message] = None, + port: int = 53, + timeout: Optional[float] = None, + lifetime: Optional[float] = None, + source: Optional[str] = None, + source_port: int = 0, + udp_mode: UDPMode = UDPMode.NEVER, +) -> None: + """Conduct an inbound transfer and apply it via a transaction from the + txn_manager. + + *where*, a ``str`` containing an IPv4 or IPv6 address, where + to send the message. + + *txn_manager*, a ``dns.transaction.TransactionManager``, the txn_manager + for this transfer (typically a ``dns.zone.Zone``). + + *query*, the query to send. If not supplied, a default query is + constructed using information from the *txn_manager*. + + *port*, an ``int``, the port send the message to. The default is 53. + + *timeout*, a ``float``, the number of seconds to wait for each + response message. If None, the default, wait forever. + + *lifetime*, a ``float``, the total number of seconds to spend + doing the transfer. If ``None``, the default, then there is no + limit on the time the transfer may take. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *udp_mode*, a ``dns.query.UDPMode``, determines how UDP is used + for IXFRs. The default is ``dns.UDPMode.NEVER``, i.e. only use + TCP. Other possibilities are ``dns.UDPMode.TRY_FIRST``, which + means "try UDP but fallback to TCP if needed", and + ``dns.UDPMode.ONLY``, which means "try UDP and raise + ``dns.xfr.UseTCP`` if it does not succeed. + + Raises on errors. + """ + if query is None: + (query, serial) = dns.xfr.make_query(txn_manager) + else: + serial = dns.xfr.extract_serial_from_query(query) + + (af, destination, source) = _destination_and_source( + where, port, source, source_port + ) + (_, expiration) = _compute_times(lifetime) + if query.question[0].rdtype == dns.rdatatype.IXFR and udp_mode != UDPMode.NEVER: + with _make_socket(af, socket.SOCK_DGRAM, source) as s: + _connect(s, destination, expiration) + try: + for _ in _inbound_xfr( + txn_manager, s, query, serial, timeout, expiration + ): + pass + return + except dns.xfr.UseTCP: + if udp_mode == UDPMode.ONLY: + raise + + with _make_socket(af, socket.SOCK_STREAM, source) as s: + _connect(s, destination, expiration) + for _ in _inbound_xfr(txn_manager, s, query, serial, timeout, expiration): + pass diff --git a/venv/lib/python3.11/site-packages/dns/quic/__init__.py b/venv/lib/python3.11/site-packages/dns/quic/__init__.py new file mode 100644 index 0000000..0750e72 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/quic/__init__.py @@ -0,0 +1,80 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +from typing import List, Tuple + +import dns._features +import dns.asyncbackend + +if dns._features.have("doq"): + import aioquic.quic.configuration # type: ignore + + from dns._asyncbackend import NullContext + from dns.quic._asyncio import ( + AsyncioQuicConnection, + AsyncioQuicManager, + AsyncioQuicStream, + ) + from dns.quic._common import AsyncQuicConnection, AsyncQuicManager + from dns.quic._sync import SyncQuicConnection, SyncQuicManager, SyncQuicStream + + have_quic = True + + def null_factory( + *args, # pylint: disable=unused-argument + **kwargs, # pylint: disable=unused-argument + ): + return NullContext(None) + + def _asyncio_manager_factory( + context, *args, **kwargs # pylint: disable=unused-argument + ): + return AsyncioQuicManager(*args, **kwargs) + + # We have a context factory and a manager factory as for trio we need to have + # a nursery. + + _async_factories = {"asyncio": (null_factory, _asyncio_manager_factory)} + + if dns._features.have("trio"): + import trio + + from dns.quic._trio import ( # pylint: disable=ungrouped-imports + TrioQuicConnection, + TrioQuicManager, + TrioQuicStream, + ) + + def _trio_context_factory(): + return trio.open_nursery() + + def _trio_manager_factory(context, *args, **kwargs): + return TrioQuicManager(context, *args, **kwargs) + + _async_factories["trio"] = (_trio_context_factory, _trio_manager_factory) + + def factories_for_backend(backend=None): + if backend is None: + backend = dns.asyncbackend.get_default_backend() + return _async_factories[backend.name()] + +else: # pragma: no cover + have_quic = False + + from typing import Any + + class AsyncQuicStream: # type: ignore + pass + + class AsyncQuicConnection: # type: ignore + async def make_stream(self) -> Any: + raise NotImplementedError + + class SyncQuicStream: # type: ignore + pass + + class SyncQuicConnection: # type: ignore + def make_stream(self) -> Any: + raise NotImplementedError + + +Headers = List[Tuple[bytes, bytes]] diff --git a/venv/lib/python3.11/site-packages/dns/quic/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/quic/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..0a49033 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/quic/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/quic/__pycache__/_asyncio.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/quic/__pycache__/_asyncio.cpython-311.pyc new file mode 100644 index 0000000..df8c43f Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/quic/__pycache__/_asyncio.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/quic/__pycache__/_common.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/quic/__pycache__/_common.cpython-311.pyc new file mode 100644 index 0000000..6633a79 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/quic/__pycache__/_common.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/quic/__pycache__/_sync.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/quic/__pycache__/_sync.cpython-311.pyc new file mode 100644 index 0000000..b5ea4fa Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/quic/__pycache__/_sync.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/quic/__pycache__/_trio.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/quic/__pycache__/_trio.cpython-311.pyc new file mode 100644 index 0000000..922ef09 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/quic/__pycache__/_trio.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/quic/_asyncio.py b/venv/lib/python3.11/site-packages/dns/quic/_asyncio.py new file mode 100644 index 0000000..f87515d --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/quic/_asyncio.py @@ -0,0 +1,267 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import asyncio +import socket +import ssl +import struct +import time + +import aioquic.quic.configuration # type: ignore +import aioquic.quic.connection # type: ignore +import aioquic.quic.events # type: ignore + +import dns.asyncbackend +import dns.exception +import dns.inet +from dns.quic._common import ( + QUIC_MAX_DATAGRAM, + AsyncQuicConnection, + AsyncQuicManager, + BaseQuicStream, + UnexpectedEOF, +) + + +class AsyncioQuicStream(BaseQuicStream): + def __init__(self, connection, stream_id): + super().__init__(connection, stream_id) + self._wake_up = asyncio.Condition() + + async def _wait_for_wake_up(self): + async with self._wake_up: + await self._wake_up.wait() + + async def wait_for(self, amount, expiration): + while True: + timeout = self._timeout_from_expiration(expiration) + if self._buffer.have(amount): + return + self._expecting = amount + try: + await asyncio.wait_for(self._wait_for_wake_up(), timeout) + except TimeoutError: + raise dns.exception.Timeout + self._expecting = 0 + + async def wait_for_end(self, expiration): + while True: + timeout = self._timeout_from_expiration(expiration) + if self._buffer.seen_end(): + return + try: + await asyncio.wait_for(self._wait_for_wake_up(), timeout) + except TimeoutError: + raise dns.exception.Timeout + + async def receive(self, timeout=None): + expiration = self._expiration_from_timeout(timeout) + if self._connection.is_h3(): + await self.wait_for_end(expiration) + return self._buffer.get_all() + else: + await self.wait_for(2, expiration) + (size,) = struct.unpack("!H", self._buffer.get(2)) + await self.wait_for(size, expiration) + return self._buffer.get(size) + + async def send(self, datagram, is_end=False): + data = self._encapsulate(datagram) + await self._connection.write(self._stream_id, data, is_end) + + async def _add_input(self, data, is_end): + if self._common_add_input(data, is_end): + async with self._wake_up: + self._wake_up.notify() + + async def close(self): + self._close() + + # Streams are async context managers + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + await self.close() + async with self._wake_up: + self._wake_up.notify() + return False + + +class AsyncioQuicConnection(AsyncQuicConnection): + def __init__(self, connection, address, port, source, source_port, manager=None): + super().__init__(connection, address, port, source, source_port, manager) + self._socket = None + self._handshake_complete = asyncio.Event() + self._socket_created = asyncio.Event() + self._wake_timer = asyncio.Condition() + self._receiver_task = None + self._sender_task = None + self._wake_pending = False + + async def _receiver(self): + try: + af = dns.inet.af_for_address(self._address) + backend = dns.asyncbackend.get_backend("asyncio") + # Note that peer is a low-level address tuple, but make_socket() wants + # a high-level address tuple, so we convert. + self._socket = await backend.make_socket( + af, socket.SOCK_DGRAM, 0, self._source, (self._peer[0], self._peer[1]) + ) + self._socket_created.set() + async with self._socket: + while not self._done: + (datagram, address) = await self._socket.recvfrom( + QUIC_MAX_DATAGRAM, None + ) + if address[0] != self._peer[0] or address[1] != self._peer[1]: + continue + self._connection.receive_datagram(datagram, address, time.time()) + # Wake up the timer in case the sender is sleeping, as there may be + # stuff to send now. + await self._wakeup() + except Exception: + pass + finally: + self._done = True + await self._wakeup() + self._handshake_complete.set() + + async def _wakeup(self): + self._wake_pending = True + async with self._wake_timer: + self._wake_timer.notify_all() + + async def _wait_for_wake_timer(self): + async with self._wake_timer: + if not self._wake_pending: + await self._wake_timer.wait() + self._wake_pending = False + + async def _sender(self): + await self._socket_created.wait() + while not self._done: + datagrams = self._connection.datagrams_to_send(time.time()) + for datagram, address in datagrams: + assert address == self._peer + await self._socket.sendto(datagram, self._peer, None) + (expiration, interval) = self._get_timer_values() + try: + await asyncio.wait_for(self._wait_for_wake_timer(), interval) + except Exception: + pass + self._handle_timer(expiration) + await self._handle_events() + + async def _handle_events(self): + count = 0 + while True: + event = self._connection.next_event() + if event is None: + return + if isinstance(event, aioquic.quic.events.StreamDataReceived): + if self.is_h3(): + h3_events = self._h3_conn.handle_event(event) + for h3_event in h3_events: + if isinstance(h3_event, aioquic.h3.events.HeadersReceived): + stream = self._streams.get(event.stream_id) + if stream: + if stream._headers is None: + stream._headers = h3_event.headers + elif stream._trailers is None: + stream._trailers = h3_event.headers + if h3_event.stream_ended: + await stream._add_input(b"", True) + elif isinstance(h3_event, aioquic.h3.events.DataReceived): + stream = self._streams.get(event.stream_id) + if stream: + await stream._add_input( + h3_event.data, h3_event.stream_ended + ) + else: + stream = self._streams.get(event.stream_id) + if stream: + await stream._add_input(event.data, event.end_stream) + elif isinstance(event, aioquic.quic.events.HandshakeCompleted): + self._handshake_complete.set() + elif isinstance(event, aioquic.quic.events.ConnectionTerminated): + self._done = True + self._receiver_task.cancel() + elif isinstance(event, aioquic.quic.events.StreamReset): + stream = self._streams.get(event.stream_id) + if stream: + await stream._add_input(b"", True) + + count += 1 + if count > 10: + # yield + count = 0 + await asyncio.sleep(0) + + async def write(self, stream, data, is_end=False): + self._connection.send_stream_data(stream, data, is_end) + await self._wakeup() + + def run(self): + if self._closed: + return + self._receiver_task = asyncio.Task(self._receiver()) + self._sender_task = asyncio.Task(self._sender()) + + async def make_stream(self, timeout=None): + try: + await asyncio.wait_for(self._handshake_complete.wait(), timeout) + except TimeoutError: + raise dns.exception.Timeout + if self._done: + raise UnexpectedEOF + stream_id = self._connection.get_next_available_stream_id(False) + stream = AsyncioQuicStream(self, stream_id) + self._streams[stream_id] = stream + return stream + + async def close(self): + if not self._closed: + self._manager.closed(self._peer[0], self._peer[1]) + self._closed = True + self._connection.close() + # sender might be blocked on this, so set it + self._socket_created.set() + await self._wakeup() + try: + await self._receiver_task + except asyncio.CancelledError: + pass + try: + await self._sender_task + except asyncio.CancelledError: + pass + await self._socket.close() + + +class AsyncioQuicManager(AsyncQuicManager): + def __init__( + self, conf=None, verify_mode=ssl.CERT_REQUIRED, server_name=None, h3=False + ): + super().__init__(conf, verify_mode, AsyncioQuicConnection, server_name, h3) + + def connect( + self, address, port=853, source=None, source_port=0, want_session_ticket=True + ): + (connection, start) = self._connect( + address, port, source, source_port, want_session_ticket + ) + if start: + connection.run() + return connection + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + # Copy the iterator into a list as exiting things will mutate the connections + # table. + connections = list(self._connections.values()) + for connection in connections: + await connection.close() + return False diff --git a/venv/lib/python3.11/site-packages/dns/quic/_common.py b/venv/lib/python3.11/site-packages/dns/quic/_common.py new file mode 100644 index 0000000..ce575b0 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/quic/_common.py @@ -0,0 +1,339 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import base64 +import copy +import functools +import socket +import struct +import time +import urllib +from typing import Any, Optional + +import aioquic.h3.connection # type: ignore +import aioquic.h3.events # type: ignore +import aioquic.quic.configuration # type: ignore +import aioquic.quic.connection # type: ignore + +import dns.inet + +QUIC_MAX_DATAGRAM = 2048 +MAX_SESSION_TICKETS = 8 +# If we hit the max sessions limit we will delete this many of the oldest connections. +# The value must be a integer > 0 and <= MAX_SESSION_TICKETS. +SESSIONS_TO_DELETE = MAX_SESSION_TICKETS // 4 + + +class UnexpectedEOF(Exception): + pass + + +class Buffer: + def __init__(self): + self._buffer = b"" + self._seen_end = False + + def put(self, data, is_end): + if self._seen_end: + return + self._buffer += data + if is_end: + self._seen_end = True + + def have(self, amount): + if len(self._buffer) >= amount: + return True + if self._seen_end: + raise UnexpectedEOF + return False + + def seen_end(self): + return self._seen_end + + def get(self, amount): + assert self.have(amount) + data = self._buffer[:amount] + self._buffer = self._buffer[amount:] + return data + + def get_all(self): + assert self.seen_end() + data = self._buffer + self._buffer = b"" + return data + + +class BaseQuicStream: + def __init__(self, connection, stream_id): + self._connection = connection + self._stream_id = stream_id + self._buffer = Buffer() + self._expecting = 0 + self._headers = None + self._trailers = None + + def id(self): + return self._stream_id + + def headers(self): + return self._headers + + def trailers(self): + return self._trailers + + def _expiration_from_timeout(self, timeout): + if timeout is not None: + expiration = time.time() + timeout + else: + expiration = None + return expiration + + def _timeout_from_expiration(self, expiration): + if expiration is not None: + timeout = max(expiration - time.time(), 0.0) + else: + timeout = None + return timeout + + # Subclass must implement receive() as sync / async and which returns a message + # or raises. + + # Subclass must implement send() as sync / async and which takes a message and + # an EOF indicator. + + def send_h3(self, url, datagram, post=True): + if not self._connection.is_h3(): + raise SyntaxError("cannot send H3 to a non-H3 connection") + url_parts = urllib.parse.urlparse(url) + path = url_parts.path.encode() + if post: + method = b"POST" + else: + method = b"GET" + path += b"?dns=" + base64.urlsafe_b64encode(datagram).rstrip(b"=") + headers = [ + (b":method", method), + (b":scheme", url_parts.scheme.encode()), + (b":authority", url_parts.netloc.encode()), + (b":path", path), + (b"accept", b"application/dns-message"), + ] + if post: + headers.extend( + [ + (b"content-type", b"application/dns-message"), + (b"content-length", str(len(datagram)).encode()), + ] + ) + self._connection.send_headers(self._stream_id, headers, not post) + if post: + self._connection.send_data(self._stream_id, datagram, True) + + def _encapsulate(self, datagram): + if self._connection.is_h3(): + return datagram + l = len(datagram) + return struct.pack("!H", l) + datagram + + def _common_add_input(self, data, is_end): + self._buffer.put(data, is_end) + try: + return ( + self._expecting > 0 and self._buffer.have(self._expecting) + ) or self._buffer.seen_end + except UnexpectedEOF: + return True + + def _close(self): + self._connection.close_stream(self._stream_id) + self._buffer.put(b"", True) # send EOF in case we haven't seen it. + + +class BaseQuicConnection: + def __init__( + self, + connection, + address, + port, + source=None, + source_port=0, + manager=None, + ): + self._done = False + self._connection = connection + self._address = address + self._port = port + self._closed = False + self._manager = manager + self._streams = {} + if manager.is_h3(): + self._h3_conn = aioquic.h3.connection.H3Connection(connection, False) + else: + self._h3_conn = None + self._af = dns.inet.af_for_address(address) + self._peer = dns.inet.low_level_address_tuple((address, port)) + if source is None and source_port != 0: + if self._af == socket.AF_INET: + source = "0.0.0.0" + elif self._af == socket.AF_INET6: + source = "::" + else: + raise NotImplementedError + if source: + self._source = (source, source_port) + else: + self._source = None + + def is_h3(self): + return self._h3_conn is not None + + def close_stream(self, stream_id): + del self._streams[stream_id] + + def send_headers(self, stream_id, headers, is_end=False): + self._h3_conn.send_headers(stream_id, headers, is_end) + + def send_data(self, stream_id, data, is_end=False): + self._h3_conn.send_data(stream_id, data, is_end) + + def _get_timer_values(self, closed_is_special=True): + now = time.time() + expiration = self._connection.get_timer() + if expiration is None: + expiration = now + 3600 # arbitrary "big" value + interval = max(expiration - now, 0) + if self._closed and closed_is_special: + # lower sleep interval to avoid a race in the closing process + # which can lead to higher latency closing due to sleeping when + # we have events. + interval = min(interval, 0.05) + return (expiration, interval) + + def _handle_timer(self, expiration): + now = time.time() + if expiration <= now: + self._connection.handle_timer(now) + + +class AsyncQuicConnection(BaseQuicConnection): + async def make_stream(self, timeout: Optional[float] = None) -> Any: + pass + + +class BaseQuicManager: + def __init__( + self, conf, verify_mode, connection_factory, server_name=None, h3=False + ): + self._connections = {} + self._connection_factory = connection_factory + self._session_tickets = {} + self._tokens = {} + self._h3 = h3 + if conf is None: + verify_path = None + if isinstance(verify_mode, str): + verify_path = verify_mode + verify_mode = True + if h3: + alpn_protocols = ["h3"] + else: + alpn_protocols = ["doq", "doq-i03"] + conf = aioquic.quic.configuration.QuicConfiguration( + alpn_protocols=alpn_protocols, + verify_mode=verify_mode, + server_name=server_name, + ) + if verify_path is not None: + conf.load_verify_locations(verify_path) + self._conf = conf + + def _connect( + self, + address, + port=853, + source=None, + source_port=0, + want_session_ticket=True, + want_token=True, + ): + connection = self._connections.get((address, port)) + if connection is not None: + return (connection, False) + conf = self._conf + if want_session_ticket: + try: + session_ticket = self._session_tickets.pop((address, port)) + # We found a session ticket, so make a configuration that uses it. + conf = copy.copy(conf) + conf.session_ticket = session_ticket + except KeyError: + # No session ticket. + pass + # Whether or not we found a session ticket, we want a handler to save + # one. + session_ticket_handler = functools.partial( + self.save_session_ticket, address, port + ) + else: + session_ticket_handler = None + if want_token: + try: + token = self._tokens.pop((address, port)) + # We found a token, so make a configuration that uses it. + conf = copy.copy(conf) + conf.token = token + except KeyError: + # No token + pass + # Whether or not we found a token, we want a handler to save # one. + token_handler = functools.partial(self.save_token, address, port) + else: + token_handler = None + + qconn = aioquic.quic.connection.QuicConnection( + configuration=conf, + session_ticket_handler=session_ticket_handler, + token_handler=token_handler, + ) + lladdress = dns.inet.low_level_address_tuple((address, port)) + qconn.connect(lladdress, time.time()) + connection = self._connection_factory( + qconn, address, port, source, source_port, self + ) + self._connections[(address, port)] = connection + return (connection, True) + + def closed(self, address, port): + try: + del self._connections[(address, port)] + except KeyError: + pass + + def is_h3(self): + return self._h3 + + def save_session_ticket(self, address, port, ticket): + # We rely on dictionaries keys() being in insertion order here. We + # can't just popitem() as that would be LIFO which is the opposite of + # what we want. + l = len(self._session_tickets) + if l >= MAX_SESSION_TICKETS: + keys_to_delete = list(self._session_tickets.keys())[0:SESSIONS_TO_DELETE] + for key in keys_to_delete: + del self._session_tickets[key] + self._session_tickets[(address, port)] = ticket + + def save_token(self, address, port, token): + # We rely on dictionaries keys() being in insertion order here. We + # can't just popitem() as that would be LIFO which is the opposite of + # what we want. + l = len(self._tokens) + if l >= MAX_SESSION_TICKETS: + keys_to_delete = list(self._tokens.keys())[0:SESSIONS_TO_DELETE] + for key in keys_to_delete: + del self._tokens[key] + self._tokens[(address, port)] = token + + +class AsyncQuicManager(BaseQuicManager): + def connect(self, address, port=853, source=None, source_port=0): + raise NotImplementedError diff --git a/venv/lib/python3.11/site-packages/dns/quic/_sync.py b/venv/lib/python3.11/site-packages/dns/quic/_sync.py new file mode 100644 index 0000000..473d1f4 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/quic/_sync.py @@ -0,0 +1,295 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import selectors +import socket +import ssl +import struct +import threading +import time + +import aioquic.quic.configuration # type: ignore +import aioquic.quic.connection # type: ignore +import aioquic.quic.events # type: ignore + +import dns.exception +import dns.inet +from dns.quic._common import ( + QUIC_MAX_DATAGRAM, + BaseQuicConnection, + BaseQuicManager, + BaseQuicStream, + UnexpectedEOF, +) + +# Function used to create a socket. Can be overridden if needed in special +# situations. +socket_factory = socket.socket + + +class SyncQuicStream(BaseQuicStream): + def __init__(self, connection, stream_id): + super().__init__(connection, stream_id) + self._wake_up = threading.Condition() + self._lock = threading.Lock() + + def wait_for(self, amount, expiration): + while True: + timeout = self._timeout_from_expiration(expiration) + with self._lock: + if self._buffer.have(amount): + return + self._expecting = amount + with self._wake_up: + if not self._wake_up.wait(timeout): + raise dns.exception.Timeout + self._expecting = 0 + + def wait_for_end(self, expiration): + while True: + timeout = self._timeout_from_expiration(expiration) + with self._lock: + if self._buffer.seen_end(): + return + with self._wake_up: + if not self._wake_up.wait(timeout): + raise dns.exception.Timeout + + def receive(self, timeout=None): + expiration = self._expiration_from_timeout(timeout) + if self._connection.is_h3(): + self.wait_for_end(expiration) + with self._lock: + return self._buffer.get_all() + else: + self.wait_for(2, expiration) + with self._lock: + (size,) = struct.unpack("!H", self._buffer.get(2)) + self.wait_for(size, expiration) + with self._lock: + return self._buffer.get(size) + + def send(self, datagram, is_end=False): + data = self._encapsulate(datagram) + self._connection.write(self._stream_id, data, is_end) + + def _add_input(self, data, is_end): + if self._common_add_input(data, is_end): + with self._wake_up: + self._wake_up.notify() + + def close(self): + with self._lock: + self._close() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + with self._wake_up: + self._wake_up.notify() + return False + + +class SyncQuicConnection(BaseQuicConnection): + def __init__(self, connection, address, port, source, source_port, manager): + super().__init__(connection, address, port, source, source_port, manager) + self._socket = socket_factory(self._af, socket.SOCK_DGRAM, 0) + if self._source is not None: + try: + self._socket.bind( + dns.inet.low_level_address_tuple(self._source, self._af) + ) + except Exception: + self._socket.close() + raise + self._socket.connect(self._peer) + (self._send_wakeup, self._receive_wakeup) = socket.socketpair() + self._receive_wakeup.setblocking(False) + self._socket.setblocking(False) + self._handshake_complete = threading.Event() + self._worker_thread = None + self._lock = threading.Lock() + + def _read(self): + count = 0 + while count < 10: + count += 1 + try: + datagram = self._socket.recv(QUIC_MAX_DATAGRAM) + except BlockingIOError: + return + with self._lock: + self._connection.receive_datagram(datagram, self._peer, time.time()) + + def _drain_wakeup(self): + while True: + try: + self._receive_wakeup.recv(32) + except BlockingIOError: + return + + def _worker(self): + try: + sel = selectors.DefaultSelector() + sel.register(self._socket, selectors.EVENT_READ, self._read) + sel.register(self._receive_wakeup, selectors.EVENT_READ, self._drain_wakeup) + while not self._done: + (expiration, interval) = self._get_timer_values(False) + items = sel.select(interval) + for key, _ in items: + key.data() + with self._lock: + self._handle_timer(expiration) + self._handle_events() + with self._lock: + datagrams = self._connection.datagrams_to_send(time.time()) + for datagram, _ in datagrams: + try: + self._socket.send(datagram) + except BlockingIOError: + # we let QUIC handle any lossage + pass + finally: + with self._lock: + self._done = True + self._socket.close() + # Ensure anyone waiting for this gets woken up. + self._handshake_complete.set() + + def _handle_events(self): + while True: + with self._lock: + event = self._connection.next_event() + if event is None: + return + if isinstance(event, aioquic.quic.events.StreamDataReceived): + if self.is_h3(): + h3_events = self._h3_conn.handle_event(event) + for h3_event in h3_events: + if isinstance(h3_event, aioquic.h3.events.HeadersReceived): + with self._lock: + stream = self._streams.get(event.stream_id) + if stream: + if stream._headers is None: + stream._headers = h3_event.headers + elif stream._trailers is None: + stream._trailers = h3_event.headers + if h3_event.stream_ended: + stream._add_input(b"", True) + elif isinstance(h3_event, aioquic.h3.events.DataReceived): + with self._lock: + stream = self._streams.get(event.stream_id) + if stream: + stream._add_input(h3_event.data, h3_event.stream_ended) + else: + with self._lock: + stream = self._streams.get(event.stream_id) + if stream: + stream._add_input(event.data, event.end_stream) + elif isinstance(event, aioquic.quic.events.HandshakeCompleted): + self._handshake_complete.set() + elif isinstance(event, aioquic.quic.events.ConnectionTerminated): + with self._lock: + self._done = True + elif isinstance(event, aioquic.quic.events.StreamReset): + with self._lock: + stream = self._streams.get(event.stream_id) + if stream: + stream._add_input(b"", True) + + def write(self, stream, data, is_end=False): + with self._lock: + self._connection.send_stream_data(stream, data, is_end) + self._send_wakeup.send(b"\x01") + + def send_headers(self, stream_id, headers, is_end=False): + with self._lock: + super().send_headers(stream_id, headers, is_end) + if is_end: + self._send_wakeup.send(b"\x01") + + def send_data(self, stream_id, data, is_end=False): + with self._lock: + super().send_data(stream_id, data, is_end) + if is_end: + self._send_wakeup.send(b"\x01") + + def run(self): + if self._closed: + return + self._worker_thread = threading.Thread(target=self._worker) + self._worker_thread.start() + + def make_stream(self, timeout=None): + if not self._handshake_complete.wait(timeout): + raise dns.exception.Timeout + with self._lock: + if self._done: + raise UnexpectedEOF + stream_id = self._connection.get_next_available_stream_id(False) + stream = SyncQuicStream(self, stream_id) + self._streams[stream_id] = stream + return stream + + def close_stream(self, stream_id): + with self._lock: + super().close_stream(stream_id) + + def close(self): + with self._lock: + if self._closed: + return + self._manager.closed(self._peer[0], self._peer[1]) + self._closed = True + self._connection.close() + self._send_wakeup.send(b"\x01") + self._worker_thread.join() + + +class SyncQuicManager(BaseQuicManager): + def __init__( + self, conf=None, verify_mode=ssl.CERT_REQUIRED, server_name=None, h3=False + ): + super().__init__(conf, verify_mode, SyncQuicConnection, server_name, h3) + self._lock = threading.Lock() + + def connect( + self, + address, + port=853, + source=None, + source_port=0, + want_session_ticket=True, + want_token=True, + ): + with self._lock: + (connection, start) = self._connect( + address, port, source, source_port, want_session_ticket, want_token + ) + if start: + connection.run() + return connection + + def closed(self, address, port): + with self._lock: + super().closed(address, port) + + def save_session_ticket(self, address, port, ticket): + with self._lock: + super().save_session_ticket(address, port, ticket) + + def save_token(self, address, port, token): + with self._lock: + super().save_token(address, port, token) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + # Copy the iterator into a list as exiting things will mutate the connections + # table. + connections = list(self._connections.values()) + for connection in connections: + connection.close() + return False diff --git a/venv/lib/python3.11/site-packages/dns/quic/_trio.py b/venv/lib/python3.11/site-packages/dns/quic/_trio.py new file mode 100644 index 0000000..ae79f36 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/quic/_trio.py @@ -0,0 +1,246 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import socket +import ssl +import struct +import time + +import aioquic.quic.configuration # type: ignore +import aioquic.quic.connection # type: ignore +import aioquic.quic.events # type: ignore +import trio + +import dns.exception +import dns.inet +from dns._asyncbackend import NullContext +from dns.quic._common import ( + QUIC_MAX_DATAGRAM, + AsyncQuicConnection, + AsyncQuicManager, + BaseQuicStream, + UnexpectedEOF, +) + + +class TrioQuicStream(BaseQuicStream): + def __init__(self, connection, stream_id): + super().__init__(connection, stream_id) + self._wake_up = trio.Condition() + + async def wait_for(self, amount): + while True: + if self._buffer.have(amount): + return + self._expecting = amount + async with self._wake_up: + await self._wake_up.wait() + self._expecting = 0 + + async def wait_for_end(self): + while True: + if self._buffer.seen_end(): + return + async with self._wake_up: + await self._wake_up.wait() + + async def receive(self, timeout=None): + if timeout is None: + context = NullContext(None) + else: + context = trio.move_on_after(timeout) + with context: + if self._connection.is_h3(): + await self.wait_for_end() + return self._buffer.get_all() + else: + await self.wait_for(2) + (size,) = struct.unpack("!H", self._buffer.get(2)) + await self.wait_for(size) + return self._buffer.get(size) + raise dns.exception.Timeout + + async def send(self, datagram, is_end=False): + data = self._encapsulate(datagram) + await self._connection.write(self._stream_id, data, is_end) + + async def _add_input(self, data, is_end): + if self._common_add_input(data, is_end): + async with self._wake_up: + self._wake_up.notify() + + async def close(self): + self._close() + + # Streams are async context managers + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + await self.close() + async with self._wake_up: + self._wake_up.notify() + return False + + +class TrioQuicConnection(AsyncQuicConnection): + def __init__(self, connection, address, port, source, source_port, manager=None): + super().__init__(connection, address, port, source, source_port, manager) + self._socket = trio.socket.socket(self._af, socket.SOCK_DGRAM, 0) + self._handshake_complete = trio.Event() + self._run_done = trio.Event() + self._worker_scope = None + self._send_pending = False + + async def _worker(self): + try: + if self._source: + await self._socket.bind( + dns.inet.low_level_address_tuple(self._source, self._af) + ) + await self._socket.connect(self._peer) + while not self._done: + (expiration, interval) = self._get_timer_values(False) + if self._send_pending: + # Do not block forever if sends are pending. Even though we + # have a wake-up mechanism if we've already started the blocking + # read, the possibility of context switching in send means that + # more writes can happen while we have no wake up context, so + # we need self._send_pending to avoid (effectively) a "lost wakeup" + # race. + interval = 0.0 + with trio.CancelScope( + deadline=trio.current_time() + interval + ) as self._worker_scope: + datagram = await self._socket.recv(QUIC_MAX_DATAGRAM) + self._connection.receive_datagram(datagram, self._peer, time.time()) + self._worker_scope = None + self._handle_timer(expiration) + await self._handle_events() + # We clear this now, before sending anything, as sending can cause + # context switches that do more sends. We want to know if that + # happens so we don't block a long time on the recv() above. + self._send_pending = False + datagrams = self._connection.datagrams_to_send(time.time()) + for datagram, _ in datagrams: + await self._socket.send(datagram) + finally: + self._done = True + self._socket.close() + self._handshake_complete.set() + + async def _handle_events(self): + count = 0 + while True: + event = self._connection.next_event() + if event is None: + return + if isinstance(event, aioquic.quic.events.StreamDataReceived): + if self.is_h3(): + h3_events = self._h3_conn.handle_event(event) + for h3_event in h3_events: + if isinstance(h3_event, aioquic.h3.events.HeadersReceived): + stream = self._streams.get(event.stream_id) + if stream: + if stream._headers is None: + stream._headers = h3_event.headers + elif stream._trailers is None: + stream._trailers = h3_event.headers + if h3_event.stream_ended: + await stream._add_input(b"", True) + elif isinstance(h3_event, aioquic.h3.events.DataReceived): + stream = self._streams.get(event.stream_id) + if stream: + await stream._add_input( + h3_event.data, h3_event.stream_ended + ) + else: + stream = self._streams.get(event.stream_id) + if stream: + await stream._add_input(event.data, event.end_stream) + elif isinstance(event, aioquic.quic.events.HandshakeCompleted): + self._handshake_complete.set() + elif isinstance(event, aioquic.quic.events.ConnectionTerminated): + self._done = True + self._socket.close() + elif isinstance(event, aioquic.quic.events.StreamReset): + stream = self._streams.get(event.stream_id) + if stream: + await stream._add_input(b"", True) + count += 1 + if count > 10: + # yield + count = 0 + await trio.sleep(0) + + async def write(self, stream, data, is_end=False): + self._connection.send_stream_data(stream, data, is_end) + self._send_pending = True + if self._worker_scope is not None: + self._worker_scope.cancel() + + async def run(self): + if self._closed: + return + async with trio.open_nursery() as nursery: + nursery.start_soon(self._worker) + self._run_done.set() + + async def make_stream(self, timeout=None): + if timeout is None: + context = NullContext(None) + else: + context = trio.move_on_after(timeout) + with context: + await self._handshake_complete.wait() + if self._done: + raise UnexpectedEOF + stream_id = self._connection.get_next_available_stream_id(False) + stream = TrioQuicStream(self, stream_id) + self._streams[stream_id] = stream + return stream + raise dns.exception.Timeout + + async def close(self): + if not self._closed: + self._manager.closed(self._peer[0], self._peer[1]) + self._closed = True + self._connection.close() + self._send_pending = True + if self._worker_scope is not None: + self._worker_scope.cancel() + await self._run_done.wait() + + +class TrioQuicManager(AsyncQuicManager): + def __init__( + self, + nursery, + conf=None, + verify_mode=ssl.CERT_REQUIRED, + server_name=None, + h3=False, + ): + super().__init__(conf, verify_mode, TrioQuicConnection, server_name, h3) + self._nursery = nursery + + def connect( + self, address, port=853, source=None, source_port=0, want_session_ticket=True + ): + (connection, start) = self._connect( + address, port, source, source_port, want_session_ticket + ) + if start: + self._nursery.start_soon(connection.run) + return connection + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + # Copy the iterator into a list as exiting things will mutate the connections + # table. + connections = list(self._connections.values()) + for connection in connections: + await connection.close() + return False diff --git a/venv/lib/python3.11/site-packages/dns/rcode.py b/venv/lib/python3.11/site-packages/dns/rcode.py new file mode 100644 index 0000000..8e6386f --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rcode.py @@ -0,0 +1,168 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Result Codes.""" + +from typing import Tuple + +import dns.enum +import dns.exception + + +class Rcode(dns.enum.IntEnum): + #: No error + NOERROR = 0 + #: Format error + FORMERR = 1 + #: Server failure + SERVFAIL = 2 + #: Name does not exist ("Name Error" in RFC 1025 terminology). + NXDOMAIN = 3 + #: Not implemented + NOTIMP = 4 + #: Refused + REFUSED = 5 + #: Name exists. + YXDOMAIN = 6 + #: RRset exists. + YXRRSET = 7 + #: RRset does not exist. + NXRRSET = 8 + #: Not authoritative. + NOTAUTH = 9 + #: Name not in zone. + NOTZONE = 10 + #: DSO-TYPE Not Implemented + DSOTYPENI = 11 + #: Bad EDNS version. + BADVERS = 16 + #: TSIG Signature Failure + BADSIG = 16 + #: Key not recognized. + BADKEY = 17 + #: Signature out of time window. + BADTIME = 18 + #: Bad TKEY Mode. + BADMODE = 19 + #: Duplicate key name. + BADNAME = 20 + #: Algorithm not supported. + BADALG = 21 + #: Bad Truncation + BADTRUNC = 22 + #: Bad/missing Server Cookie + BADCOOKIE = 23 + + @classmethod + def _maximum(cls): + return 4095 + + @classmethod + def _unknown_exception_class(cls): + return UnknownRcode + + +class UnknownRcode(dns.exception.DNSException): + """A DNS rcode is unknown.""" + + +def from_text(text: str) -> Rcode: + """Convert text into an rcode. + + *text*, a ``str``, the textual rcode or an integer in textual form. + + Raises ``dns.rcode.UnknownRcode`` if the rcode mnemonic is unknown. + + Returns a ``dns.rcode.Rcode``. + """ + + return Rcode.from_text(text) + + +def from_flags(flags: int, ednsflags: int) -> Rcode: + """Return the rcode value encoded by flags and ednsflags. + + *flags*, an ``int``, the DNS flags field. + + *ednsflags*, an ``int``, the EDNS flags field. + + Raises ``ValueError`` if rcode is < 0 or > 4095 + + Returns a ``dns.rcode.Rcode``. + """ + + value = (flags & 0x000F) | ((ednsflags >> 20) & 0xFF0) + return Rcode.make(value) + + +def to_flags(value: Rcode) -> Tuple[int, int]: + """Return a (flags, ednsflags) tuple which encodes the rcode. + + *value*, a ``dns.rcode.Rcode``, the rcode. + + Raises ``ValueError`` if rcode is < 0 or > 4095. + + Returns an ``(int, int)`` tuple. + """ + + if value < 0 or value > 4095: + raise ValueError("rcode must be >= 0 and <= 4095") + v = value & 0xF + ev = (value & 0xFF0) << 20 + return (v, ev) + + +def to_text(value: Rcode, tsig: bool = False) -> str: + """Convert rcode into text. + + *value*, a ``dns.rcode.Rcode``, the rcode. + + Raises ``ValueError`` if rcode is < 0 or > 4095. + + Returns a ``str``. + """ + + if tsig and value == Rcode.BADVERS: + return "BADSIG" + return Rcode.to_text(value) + + +### BEGIN generated Rcode constants + +NOERROR = Rcode.NOERROR +FORMERR = Rcode.FORMERR +SERVFAIL = Rcode.SERVFAIL +NXDOMAIN = Rcode.NXDOMAIN +NOTIMP = Rcode.NOTIMP +REFUSED = Rcode.REFUSED +YXDOMAIN = Rcode.YXDOMAIN +YXRRSET = Rcode.YXRRSET +NXRRSET = Rcode.NXRRSET +NOTAUTH = Rcode.NOTAUTH +NOTZONE = Rcode.NOTZONE +DSOTYPENI = Rcode.DSOTYPENI +BADVERS = Rcode.BADVERS +BADSIG = Rcode.BADSIG +BADKEY = Rcode.BADKEY +BADTIME = Rcode.BADTIME +BADMODE = Rcode.BADMODE +BADNAME = Rcode.BADNAME +BADALG = Rcode.BADALG +BADTRUNC = Rcode.BADTRUNC +BADCOOKIE = Rcode.BADCOOKIE + +### END generated Rcode constants diff --git a/venv/lib/python3.11/site-packages/dns/rdata.py b/venv/lib/python3.11/site-packages/dns/rdata.py new file mode 100644 index 0000000..8099c26 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdata.py @@ -0,0 +1,911 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS rdata.""" + +import base64 +import binascii +import inspect +import io +import itertools +import random +from importlib import import_module +from typing import Any, Dict, Optional, Tuple, Union + +import dns.exception +import dns.immutable +import dns.ipv4 +import dns.ipv6 +import dns.name +import dns.rdataclass +import dns.rdatatype +import dns.tokenizer +import dns.ttl +import dns.wire + +_chunksize = 32 + +# We currently allow comparisons for rdata with relative names for backwards +# compatibility, but in the future we will not, as these kinds of comparisons +# can lead to subtle bugs if code is not carefully written. +# +# This switch allows the future behavior to be turned on so code can be +# tested with it. +_allow_relative_comparisons = True + + +class NoRelativeRdataOrdering(dns.exception.DNSException): + """An attempt was made to do an ordered comparison of one or more + rdata with relative names. The only reliable way of sorting rdata + is to use non-relativized rdata. + + """ + + +def _wordbreak(data, chunksize=_chunksize, separator=b" "): + """Break a binary string into chunks of chunksize characters separated by + a space. + """ + + if not chunksize: + return data.decode() + return separator.join( + [data[i : i + chunksize] for i in range(0, len(data), chunksize)] + ).decode() + + +# pylint: disable=unused-argument + + +def _hexify(data, chunksize=_chunksize, separator=b" ", **kw): + """Convert a binary string into its hex encoding, broken up into chunks + of chunksize characters separated by a separator. + """ + + return _wordbreak(binascii.hexlify(data), chunksize, separator) + + +def _base64ify(data, chunksize=_chunksize, separator=b" ", **kw): + """Convert a binary string into its base64 encoding, broken up into chunks + of chunksize characters separated by a separator. + """ + + return _wordbreak(base64.b64encode(data), chunksize, separator) + + +# pylint: enable=unused-argument + +__escaped = b'"\\' + + +def _escapify(qstring): + """Escape the characters in a quoted string which need it.""" + + if isinstance(qstring, str): + qstring = qstring.encode() + if not isinstance(qstring, bytearray): + qstring = bytearray(qstring) + + text = "" + for c in qstring: + if c in __escaped: + text += "\\" + chr(c) + elif c >= 0x20 and c < 0x7F: + text += chr(c) + else: + text += "\\%03d" % c + return text + + +def _truncate_bitmap(what): + """Determine the index of greatest byte that isn't all zeros, and + return the bitmap that contains all the bytes less than that index. + """ + + for i in range(len(what) - 1, -1, -1): + if what[i] != 0: + return what[0 : i + 1] + return what[0:1] + + +# So we don't have to edit all the rdata classes... +_constify = dns.immutable.constify + + +@dns.immutable.immutable +class Rdata: + """Base class for all DNS rdata types.""" + + __slots__ = ["rdclass", "rdtype", "rdcomment"] + + def __init__(self, rdclass, rdtype): + """Initialize an rdata. + + *rdclass*, an ``int`` is the rdataclass of the Rdata. + + *rdtype*, an ``int`` is the rdatatype of the Rdata. + """ + + self.rdclass = self._as_rdataclass(rdclass) + self.rdtype = self._as_rdatatype(rdtype) + self.rdcomment = None + + def _get_all_slots(self): + return itertools.chain.from_iterable( + getattr(cls, "__slots__", []) for cls in self.__class__.__mro__ + ) + + def __getstate__(self): + # We used to try to do a tuple of all slots here, but it + # doesn't work as self._all_slots isn't available at + # __setstate__() time. Before that we tried to store a tuple + # of __slots__, but that didn't work as it didn't store the + # slots defined by ancestors. This older way didn't fail + # outright, but ended up with partially broken objects, e.g. + # if you unpickled an A RR it wouldn't have rdclass and rdtype + # attributes, and would compare badly. + state = {} + for slot in self._get_all_slots(): + state[slot] = getattr(self, slot) + return state + + def __setstate__(self, state): + for slot, val in state.items(): + object.__setattr__(self, slot, val) + if not hasattr(self, "rdcomment"): + # Pickled rdata from 2.0.x might not have a rdcomment, so add + # it if needed. + object.__setattr__(self, "rdcomment", None) + + def covers(self) -> dns.rdatatype.RdataType: + """Return the type a Rdata covers. + + DNS SIG/RRSIG rdatas apply to a specific type; this type is + returned by the covers() function. If the rdata type is not + SIG or RRSIG, dns.rdatatype.NONE is returned. This is useful when + creating rdatasets, allowing the rdataset to contain only RRSIGs + of a particular type, e.g. RRSIG(NS). + + Returns a ``dns.rdatatype.RdataType``. + """ + + return dns.rdatatype.NONE + + def extended_rdatatype(self) -> int: + """Return a 32-bit type value, the least significant 16 bits of + which are the ordinary DNS type, and the upper 16 bits of which are + the "covered" type, if any. + + Returns an ``int``. + """ + + return self.covers() << 16 | self.rdtype + + def to_text( + self, + origin: Optional[dns.name.Name] = None, + relativize: bool = True, + **kw: Dict[str, Any], + ) -> str: + """Convert an rdata to text format. + + Returns a ``str``. + """ + + raise NotImplementedError # pragma: no cover + + def _to_wire( + self, + file: Optional[Any], + compress: Optional[dns.name.CompressType] = None, + origin: Optional[dns.name.Name] = None, + canonicalize: bool = False, + ) -> None: + raise NotImplementedError # pragma: no cover + + def to_wire( + self, + file: Optional[Any] = None, + compress: Optional[dns.name.CompressType] = None, + origin: Optional[dns.name.Name] = None, + canonicalize: bool = False, + ) -> Optional[bytes]: + """Convert an rdata to wire format. + + Returns a ``bytes`` if no output file was specified, or ``None`` otherwise. + """ + + if file: + # We call _to_wire() and then return None explicitly instead of + # of just returning the None from _to_wire() as mypy's func-returns-value + # unhelpfully errors out with "error: "_to_wire" of "Rdata" does not return + # a value (it only ever returns None)" + self._to_wire(file, compress, origin, canonicalize) + return None + else: + f = io.BytesIO() + self._to_wire(f, compress, origin, canonicalize) + return f.getvalue() + + def to_generic( + self, origin: Optional[dns.name.Name] = None + ) -> "dns.rdata.GenericRdata": + """Creates a dns.rdata.GenericRdata equivalent of this rdata. + + Returns a ``dns.rdata.GenericRdata``. + """ + return dns.rdata.GenericRdata( + self.rdclass, self.rdtype, self.to_wire(origin=origin) + ) + + def to_digestable(self, origin: Optional[dns.name.Name] = None) -> bytes: + """Convert rdata to a format suitable for digesting in hashes. This + is also the DNSSEC canonical form. + + Returns a ``bytes``. + """ + wire = self.to_wire(origin=origin, canonicalize=True) + assert wire is not None # for mypy + return wire + + def __repr__(self): + covers = self.covers() + if covers == dns.rdatatype.NONE: + ctext = "" + else: + ctext = "(" + dns.rdatatype.to_text(covers) + ")" + return ( + "" + ) + + def __str__(self): + return self.to_text() + + def _cmp(self, other): + """Compare an rdata with another rdata of the same rdtype and + rdclass. + + For rdata with only absolute names: + Return < 0 if self < other in the DNSSEC ordering, 0 if self + == other, and > 0 if self > other. + For rdata with at least one relative names: + The rdata sorts before any rdata with only absolute names. + When compared with another relative rdata, all names are + made absolute as if they were relative to the root, as the + proper origin is not available. While this creates a stable + ordering, it is NOT guaranteed to be the DNSSEC ordering. + In the future, all ordering comparisons for rdata with + relative names will be disallowed. + """ + try: + our = self.to_digestable() + our_relative = False + except dns.name.NeedAbsoluteNameOrOrigin: + if _allow_relative_comparisons: + our = self.to_digestable(dns.name.root) + our_relative = True + try: + their = other.to_digestable() + their_relative = False + except dns.name.NeedAbsoluteNameOrOrigin: + if _allow_relative_comparisons: + their = other.to_digestable(dns.name.root) + their_relative = True + if _allow_relative_comparisons: + if our_relative != their_relative: + # For the purpose of comparison, all rdata with at least one + # relative name is less than an rdata with only absolute names. + if our_relative: + return -1 + else: + return 1 + elif our_relative or their_relative: + raise NoRelativeRdataOrdering + if our == their: + return 0 + elif our > their: + return 1 + else: + return -1 + + def __eq__(self, other): + if not isinstance(other, Rdata): + return False + if self.rdclass != other.rdclass or self.rdtype != other.rdtype: + return False + our_relative = False + their_relative = False + try: + our = self.to_digestable() + except dns.name.NeedAbsoluteNameOrOrigin: + our = self.to_digestable(dns.name.root) + our_relative = True + try: + their = other.to_digestable() + except dns.name.NeedAbsoluteNameOrOrigin: + their = other.to_digestable(dns.name.root) + their_relative = True + if our_relative != their_relative: + return False + return our == their + + def __ne__(self, other): + if not isinstance(other, Rdata): + return True + if self.rdclass != other.rdclass or self.rdtype != other.rdtype: + return True + return not self.__eq__(other) + + def __lt__(self, other): + if ( + not isinstance(other, Rdata) + or self.rdclass != other.rdclass + or self.rdtype != other.rdtype + ): + return NotImplemented + return self._cmp(other) < 0 + + def __le__(self, other): + if ( + not isinstance(other, Rdata) + or self.rdclass != other.rdclass + or self.rdtype != other.rdtype + ): + return NotImplemented + return self._cmp(other) <= 0 + + def __ge__(self, other): + if ( + not isinstance(other, Rdata) + or self.rdclass != other.rdclass + or self.rdtype != other.rdtype + ): + return NotImplemented + return self._cmp(other) >= 0 + + def __gt__(self, other): + if ( + not isinstance(other, Rdata) + or self.rdclass != other.rdclass + or self.rdtype != other.rdtype + ): + return NotImplemented + return self._cmp(other) > 0 + + def __hash__(self): + return hash(self.to_digestable(dns.name.root)) + + @classmethod + def from_text( + cls, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + tok: dns.tokenizer.Tokenizer, + origin: Optional[dns.name.Name] = None, + relativize: bool = True, + relativize_to: Optional[dns.name.Name] = None, + ) -> "Rdata": + raise NotImplementedError # pragma: no cover + + @classmethod + def from_wire_parser( + cls, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + parser: dns.wire.Parser, + origin: Optional[dns.name.Name] = None, + ) -> "Rdata": + raise NotImplementedError # pragma: no cover + + def replace(self, **kwargs: Any) -> "Rdata": + """ + Create a new Rdata instance based on the instance replace was + invoked on. It is possible to pass different parameters to + override the corresponding properties of the base Rdata. + + Any field specific to the Rdata type can be replaced, but the + *rdtype* and *rdclass* fields cannot. + + Returns an instance of the same Rdata subclass as *self*. + """ + + # Get the constructor parameters. + parameters = inspect.signature(self.__init__).parameters # type: ignore + + # Ensure that all of the arguments correspond to valid fields. + # Don't allow rdclass or rdtype to be changed, though. + for key in kwargs: + if key == "rdcomment": + continue + if key not in parameters: + raise AttributeError( + f"'{self.__class__.__name__}' object has no attribute '{key}'" + ) + if key in ("rdclass", "rdtype"): + raise AttributeError( + f"Cannot overwrite '{self.__class__.__name__}' attribute '{key}'" + ) + + # Construct the parameter list. For each field, use the value in + # kwargs if present, and the current value otherwise. + args = (kwargs.get(key, getattr(self, key)) for key in parameters) + + # Create, validate, and return the new object. + rd = self.__class__(*args) + # The comment is not set in the constructor, so give it special + # handling. + rdcomment = kwargs.get("rdcomment", self.rdcomment) + if rdcomment is not None: + object.__setattr__(rd, "rdcomment", rdcomment) + return rd + + # Type checking and conversion helpers. These are class methods as + # they don't touch object state and may be useful to others. + + @classmethod + def _as_rdataclass(cls, value): + return dns.rdataclass.RdataClass.make(value) + + @classmethod + def _as_rdatatype(cls, value): + return dns.rdatatype.RdataType.make(value) + + @classmethod + def _as_bytes( + cls, + value: Any, + encode: bool = False, + max_length: Optional[int] = None, + empty_ok: bool = True, + ) -> bytes: + if encode and isinstance(value, str): + bvalue = value.encode() + elif isinstance(value, bytearray): + bvalue = bytes(value) + elif isinstance(value, bytes): + bvalue = value + else: + raise ValueError("not bytes") + if max_length is not None and len(bvalue) > max_length: + raise ValueError("too long") + if not empty_ok and len(bvalue) == 0: + raise ValueError("empty bytes not allowed") + return bvalue + + @classmethod + def _as_name(cls, value): + # Note that proper name conversion (e.g. with origin and IDNA + # awareness) is expected to be done via from_text. This is just + # a simple thing for people invoking the constructor directly. + if isinstance(value, str): + return dns.name.from_text(value) + elif not isinstance(value, dns.name.Name): + raise ValueError("not a name") + return value + + @classmethod + def _as_uint8(cls, value): + if not isinstance(value, int): + raise ValueError("not an integer") + if value < 0 or value > 255: + raise ValueError("not a uint8") + return value + + @classmethod + def _as_uint16(cls, value): + if not isinstance(value, int): + raise ValueError("not an integer") + if value < 0 or value > 65535: + raise ValueError("not a uint16") + return value + + @classmethod + def _as_uint32(cls, value): + if not isinstance(value, int): + raise ValueError("not an integer") + if value < 0 or value > 4294967295: + raise ValueError("not a uint32") + return value + + @classmethod + def _as_uint48(cls, value): + if not isinstance(value, int): + raise ValueError("not an integer") + if value < 0 or value > 281474976710655: + raise ValueError("not a uint48") + return value + + @classmethod + def _as_int(cls, value, low=None, high=None): + if not isinstance(value, int): + raise ValueError("not an integer") + if low is not None and value < low: + raise ValueError("value too small") + if high is not None and value > high: + raise ValueError("value too large") + return value + + @classmethod + def _as_ipv4_address(cls, value): + if isinstance(value, str): + return dns.ipv4.canonicalize(value) + elif isinstance(value, bytes): + return dns.ipv4.inet_ntoa(value) + else: + raise ValueError("not an IPv4 address") + + @classmethod + def _as_ipv6_address(cls, value): + if isinstance(value, str): + return dns.ipv6.canonicalize(value) + elif isinstance(value, bytes): + return dns.ipv6.inet_ntoa(value) + else: + raise ValueError("not an IPv6 address") + + @classmethod + def _as_bool(cls, value): + if isinstance(value, bool): + return value + else: + raise ValueError("not a boolean") + + @classmethod + def _as_ttl(cls, value): + if isinstance(value, int): + return cls._as_int(value, 0, dns.ttl.MAX_TTL) + elif isinstance(value, str): + return dns.ttl.from_text(value) + else: + raise ValueError("not a TTL") + + @classmethod + def _as_tuple(cls, value, as_value): + try: + # For user convenience, if value is a singleton of the list + # element type, wrap it in a tuple. + return (as_value(value),) + except Exception: + # Otherwise, check each element of the iterable *value* + # against *as_value*. + return tuple(as_value(v) for v in value) + + # Processing order + + @classmethod + def _processing_order(cls, iterable): + items = list(iterable) + random.shuffle(items) + return items + + +@dns.immutable.immutable +class GenericRdata(Rdata): + """Generic Rdata Class + + This class is used for rdata types for which we have no better + implementation. It implements the DNS "unknown RRs" scheme. + """ + + __slots__ = ["data"] + + def __init__(self, rdclass, rdtype, data): + super().__init__(rdclass, rdtype) + self.data = data + + def to_text( + self, + origin: Optional[dns.name.Name] = None, + relativize: bool = True, + **kw: Dict[str, Any], + ) -> str: + return r"\# %d " % len(self.data) + _hexify(self.data, **kw) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + token = tok.get() + if not token.is_identifier() or token.value != r"\#": + raise dns.exception.SyntaxError(r"generic rdata does not start with \#") + length = tok.get_int() + hex = tok.concatenate_remaining_identifiers(True).encode() + data = binascii.unhexlify(hex) + if len(data) != length: + raise dns.exception.SyntaxError("generic rdata hex data has wrong length") + return cls(rdclass, rdtype, data) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(self.data) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + return cls(rdclass, rdtype, parser.get_remaining()) + + +_rdata_classes: Dict[Tuple[dns.rdataclass.RdataClass, dns.rdatatype.RdataType], Any] = ( + {} +) +_module_prefix = "dns.rdtypes" +_dynamic_load_allowed = True + + +def get_rdata_class(rdclass, rdtype, use_generic=True): + cls = _rdata_classes.get((rdclass, rdtype)) + if not cls: + cls = _rdata_classes.get((dns.rdatatype.ANY, rdtype)) + if not cls and _dynamic_load_allowed: + rdclass_text = dns.rdataclass.to_text(rdclass) + rdtype_text = dns.rdatatype.to_text(rdtype) + rdtype_text = rdtype_text.replace("-", "_") + try: + mod = import_module( + ".".join([_module_prefix, rdclass_text, rdtype_text]) + ) + cls = getattr(mod, rdtype_text) + _rdata_classes[(rdclass, rdtype)] = cls + except ImportError: + try: + mod = import_module(".".join([_module_prefix, "ANY", rdtype_text])) + cls = getattr(mod, rdtype_text) + _rdata_classes[(dns.rdataclass.ANY, rdtype)] = cls + _rdata_classes[(rdclass, rdtype)] = cls + except ImportError: + pass + if not cls and use_generic: + cls = GenericRdata + _rdata_classes[(rdclass, rdtype)] = cls + return cls + + +def load_all_types(disable_dynamic_load=True): + """Load all rdata types for which dnspython has a non-generic implementation. + + Normally dnspython loads DNS rdatatype implementations on demand, but in some + specialized cases loading all types at an application-controlled time is preferred. + + If *disable_dynamic_load*, a ``bool``, is ``True`` then dnspython will not attempt + to use its dynamic loading mechanism if an unknown type is subsequently encountered, + and will simply use the ``GenericRdata`` class. + """ + # Load class IN and ANY types. + for rdtype in dns.rdatatype.RdataType: + get_rdata_class(dns.rdataclass.IN, rdtype, False) + # Load the one non-ANY implementation we have in CH. Everything + # else in CH is an ANY type, and we'll discover those on demand but won't + # have to import anything. + get_rdata_class(dns.rdataclass.CH, dns.rdatatype.A, False) + if disable_dynamic_load: + # Now disable dynamic loading so any subsequent unknown type immediately becomes + # GenericRdata without a load attempt. + global _dynamic_load_allowed + _dynamic_load_allowed = False + + +def from_text( + rdclass: Union[dns.rdataclass.RdataClass, str], + rdtype: Union[dns.rdatatype.RdataType, str], + tok: Union[dns.tokenizer.Tokenizer, str], + origin: Optional[dns.name.Name] = None, + relativize: bool = True, + relativize_to: Optional[dns.name.Name] = None, + idna_codec: Optional[dns.name.IDNACodec] = None, +) -> Rdata: + """Build an rdata object from text format. + + This function attempts to dynamically load a class which + implements the specified rdata class and type. If there is no + class-and-type-specific implementation, the GenericRdata class + is used. + + Once a class is chosen, its from_text() class method is called + with the parameters to this function. + + If *tok* is a ``str``, then a tokenizer is created and the string + is used as its input. + + *rdclass*, a ``dns.rdataclass.RdataClass`` or ``str``, the rdataclass. + + *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdatatype. + + *tok*, a ``dns.tokenizer.Tokenizer`` or a ``str``. + + *origin*, a ``dns.name.Name`` (or ``None``), the + origin to use for relative names. + + *relativize*, a ``bool``. If true, name will be relativized. + + *relativize_to*, a ``dns.name.Name`` (or ``None``), the origin to use + when relativizing names. If not set, the *origin* value will be used. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder to use if a tokenizer needs to be created. If + ``None``, the default IDNA 2003 encoder/decoder is used. If a + tokenizer is not created, then the codec associated with the tokenizer + is the one that is used. + + Returns an instance of the chosen Rdata subclass. + + """ + if isinstance(tok, str): + tok = dns.tokenizer.Tokenizer(tok, idna_codec=idna_codec) + rdclass = dns.rdataclass.RdataClass.make(rdclass) + rdtype = dns.rdatatype.RdataType.make(rdtype) + cls = get_rdata_class(rdclass, rdtype) + with dns.exception.ExceptionWrapper(dns.exception.SyntaxError): + rdata = None + if cls != GenericRdata: + # peek at first token + token = tok.get() + tok.unget(token) + if token.is_identifier() and token.value == r"\#": + # + # Known type using the generic syntax. Extract the + # wire form from the generic syntax, and then run + # from_wire on it. + # + grdata = GenericRdata.from_text( + rdclass, rdtype, tok, origin, relativize, relativize_to + ) + rdata = from_wire( + rdclass, rdtype, grdata.data, 0, len(grdata.data), origin + ) + # + # If this comparison isn't equal, then there must have been + # compressed names in the wire format, which is an error, + # there being no reasonable context to decompress with. + # + rwire = rdata.to_wire() + if rwire != grdata.data: + raise dns.exception.SyntaxError( + "compressed data in " + "generic syntax form " + "of known rdatatype" + ) + if rdata is None: + rdata = cls.from_text( + rdclass, rdtype, tok, origin, relativize, relativize_to + ) + token = tok.get_eol_as_token() + if token.comment is not None: + object.__setattr__(rdata, "rdcomment", token.comment) + return rdata + + +def from_wire_parser( + rdclass: Union[dns.rdataclass.RdataClass, str], + rdtype: Union[dns.rdatatype.RdataType, str], + parser: dns.wire.Parser, + origin: Optional[dns.name.Name] = None, +) -> Rdata: + """Build an rdata object from wire format + + This function attempts to dynamically load a class which + implements the specified rdata class and type. If there is no + class-and-type-specific implementation, the GenericRdata class + is used. + + Once a class is chosen, its from_wire() class method is called + with the parameters to this function. + + *rdclass*, a ``dns.rdataclass.RdataClass`` or ``str``, the rdataclass. + + *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdatatype. + + *parser*, a ``dns.wire.Parser``, the parser, which should be + restricted to the rdata length. + + *origin*, a ``dns.name.Name`` (or ``None``). If not ``None``, + then names will be relativized to this origin. + + Returns an instance of the chosen Rdata subclass. + """ + + rdclass = dns.rdataclass.RdataClass.make(rdclass) + rdtype = dns.rdatatype.RdataType.make(rdtype) + cls = get_rdata_class(rdclass, rdtype) + with dns.exception.ExceptionWrapper(dns.exception.FormError): + return cls.from_wire_parser(rdclass, rdtype, parser, origin) + + +def from_wire( + rdclass: Union[dns.rdataclass.RdataClass, str], + rdtype: Union[dns.rdatatype.RdataType, str], + wire: bytes, + current: int, + rdlen: int, + origin: Optional[dns.name.Name] = None, +) -> Rdata: + """Build an rdata object from wire format + + This function attempts to dynamically load a class which + implements the specified rdata class and type. If there is no + class-and-type-specific implementation, the GenericRdata class + is used. + + Once a class is chosen, its from_wire() class method is called + with the parameters to this function. + + *rdclass*, an ``int``, the rdataclass. + + *rdtype*, an ``int``, the rdatatype. + + *wire*, a ``bytes``, the wire-format message. + + *current*, an ``int``, the offset in wire of the beginning of + the rdata. + + *rdlen*, an ``int``, the length of the wire-format rdata + + *origin*, a ``dns.name.Name`` (or ``None``). If not ``None``, + then names will be relativized to this origin. + + Returns an instance of the chosen Rdata subclass. + """ + parser = dns.wire.Parser(wire, current) + with parser.restrict_to(rdlen): + return from_wire_parser(rdclass, rdtype, parser, origin) + + +class RdatatypeExists(dns.exception.DNSException): + """DNS rdatatype already exists.""" + + supp_kwargs = {"rdclass", "rdtype"} + fmt = ( + "The rdata type with class {rdclass:d} and rdtype {rdtype:d} " + + "already exists." + ) + + +def register_type( + implementation: Any, + rdtype: int, + rdtype_text: str, + is_singleton: bool = False, + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, +) -> None: + """Dynamically register a module to handle an rdatatype. + + *implementation*, a module implementing the type in the usual dnspython + way. + + *rdtype*, an ``int``, the rdatatype to register. + + *rdtype_text*, a ``str``, the textual form of the rdatatype. + + *is_singleton*, a ``bool``, indicating if the type is a singleton (i.e. + RRsets of the type can have only one member.) + + *rdclass*, the rdataclass of the type, or ``dns.rdataclass.ANY`` if + it applies to all classes. + """ + + rdtype = dns.rdatatype.RdataType.make(rdtype) + existing_cls = get_rdata_class(rdclass, rdtype) + if existing_cls != GenericRdata or dns.rdatatype.is_metatype(rdtype): + raise RdatatypeExists(rdclass=rdclass, rdtype=rdtype) + _rdata_classes[(rdclass, rdtype)] = getattr( + implementation, rdtype_text.replace("-", "_") + ) + dns.rdatatype.register_type(rdtype, rdtype_text, is_singleton) diff --git a/venv/lib/python3.11/site-packages/dns/rdataclass.py b/venv/lib/python3.11/site-packages/dns/rdataclass.py new file mode 100644 index 0000000..89b85a7 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdataclass.py @@ -0,0 +1,118 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Rdata Classes.""" + +import dns.enum +import dns.exception + + +class RdataClass(dns.enum.IntEnum): + """DNS Rdata Class""" + + RESERVED0 = 0 + IN = 1 + INTERNET = IN + CH = 3 + CHAOS = CH + HS = 4 + HESIOD = HS + NONE = 254 + ANY = 255 + + @classmethod + def _maximum(cls): + return 65535 + + @classmethod + def _short_name(cls): + return "class" + + @classmethod + def _prefix(cls): + return "CLASS" + + @classmethod + def _unknown_exception_class(cls): + return UnknownRdataclass + + +_metaclasses = {RdataClass.NONE, RdataClass.ANY} + + +class UnknownRdataclass(dns.exception.DNSException): + """A DNS class is unknown.""" + + +def from_text(text: str) -> RdataClass: + """Convert text into a DNS rdata class value. + + The input text can be a defined DNS RR class mnemonic or + instance of the DNS generic class syntax. + + For example, "IN" and "CLASS1" will both result in a value of 1. + + Raises ``dns.rdatatype.UnknownRdataclass`` if the class is unknown. + + Raises ``ValueError`` if the rdata class value is not >= 0 and <= 65535. + + Returns a ``dns.rdataclass.RdataClass``. + """ + + return RdataClass.from_text(text) + + +def to_text(value: RdataClass) -> str: + """Convert a DNS rdata class value to text. + + If the value has a known mnemonic, it will be used, otherwise the + DNS generic class syntax will be used. + + Raises ``ValueError`` if the rdata class value is not >= 0 and <= 65535. + + Returns a ``str``. + """ + + return RdataClass.to_text(value) + + +def is_metaclass(rdclass: RdataClass) -> bool: + """True if the specified class is a metaclass. + + The currently defined metaclasses are ANY and NONE. + + *rdclass* is a ``dns.rdataclass.RdataClass``. + """ + + if rdclass in _metaclasses: + return True + return False + + +### BEGIN generated RdataClass constants + +RESERVED0 = RdataClass.RESERVED0 +IN = RdataClass.IN +INTERNET = RdataClass.INTERNET +CH = RdataClass.CH +CHAOS = RdataClass.CHAOS +HS = RdataClass.HS +HESIOD = RdataClass.HESIOD +NONE = RdataClass.NONE +ANY = RdataClass.ANY + +### END generated RdataClass constants diff --git a/venv/lib/python3.11/site-packages/dns/rdataset.py b/venv/lib/python3.11/site-packages/dns/rdataset.py new file mode 100644 index 0000000..39cab23 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdataset.py @@ -0,0 +1,512 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS rdatasets (an rdataset is a set of rdatas of a given type and class)""" + +import io +import random +import struct +from typing import Any, Collection, Dict, List, Optional, Union, cast + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdataclass +import dns.rdatatype +import dns.renderer +import dns.set +import dns.ttl + +# define SimpleSet here for backwards compatibility +SimpleSet = dns.set.Set + + +class DifferingCovers(dns.exception.DNSException): + """An attempt was made to add a DNS SIG/RRSIG whose covered type + is not the same as that of the other rdatas in the rdataset.""" + + +class IncompatibleTypes(dns.exception.DNSException): + """An attempt was made to add DNS RR data of an incompatible type.""" + + +class Rdataset(dns.set.Set): + """A DNS rdataset.""" + + __slots__ = ["rdclass", "rdtype", "covers", "ttl"] + + def __init__( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + ttl: int = 0, + ): + """Create a new rdataset of the specified class and type. + + *rdclass*, a ``dns.rdataclass.RdataClass``, the rdataclass. + + *rdtype*, an ``dns.rdatatype.RdataType``, the rdatatype. + + *covers*, an ``dns.rdatatype.RdataType``, the covered rdatatype. + + *ttl*, an ``int``, the TTL. + """ + + super().__init__() + self.rdclass = rdclass + self.rdtype: dns.rdatatype.RdataType = rdtype + self.covers: dns.rdatatype.RdataType = covers + self.ttl = ttl + + def _clone(self): + obj = super()._clone() + obj.rdclass = self.rdclass + obj.rdtype = self.rdtype + obj.covers = self.covers + obj.ttl = self.ttl + return obj + + def update_ttl(self, ttl: int) -> None: + """Perform TTL minimization. + + Set the TTL of the rdataset to be the lesser of the set's current + TTL or the specified TTL. If the set contains no rdatas, set the TTL + to the specified TTL. + + *ttl*, an ``int`` or ``str``. + """ + ttl = dns.ttl.make(ttl) + if len(self) == 0: + self.ttl = ttl + elif ttl < self.ttl: + self.ttl = ttl + + def add( # pylint: disable=arguments-differ,arguments-renamed + self, rd: dns.rdata.Rdata, ttl: Optional[int] = None + ) -> None: + """Add the specified rdata to the rdataset. + + If the optional *ttl* parameter is supplied, then + ``self.update_ttl(ttl)`` will be called prior to adding the rdata. + + *rd*, a ``dns.rdata.Rdata``, the rdata + + *ttl*, an ``int``, the TTL. + + Raises ``dns.rdataset.IncompatibleTypes`` if the type and class + do not match the type and class of the rdataset. + + Raises ``dns.rdataset.DifferingCovers`` if the type is a signature + type and the covered type does not match that of the rdataset. + """ + + # + # If we're adding a signature, do some special handling to + # check that the signature covers the same type as the + # other rdatas in this rdataset. If this is the first rdata + # in the set, initialize the covers field. + # + if self.rdclass != rd.rdclass or self.rdtype != rd.rdtype: + raise IncompatibleTypes + if ttl is not None: + self.update_ttl(ttl) + if self.rdtype == dns.rdatatype.RRSIG or self.rdtype == dns.rdatatype.SIG: + covers = rd.covers() + if len(self) == 0 and self.covers == dns.rdatatype.NONE: + self.covers = covers + elif self.covers != covers: + raise DifferingCovers + if dns.rdatatype.is_singleton(rd.rdtype) and len(self) > 0: + self.clear() + super().add(rd) + + def union_update(self, other): + self.update_ttl(other.ttl) + super().union_update(other) + + def intersection_update(self, other): + self.update_ttl(other.ttl) + super().intersection_update(other) + + def update(self, other): + """Add all rdatas in other to self. + + *other*, a ``dns.rdataset.Rdataset``, the rdataset from which + to update. + """ + + self.update_ttl(other.ttl) + super().update(other) + + def _rdata_repr(self): + def maybe_truncate(s): + if len(s) > 100: + return s[:100] + "..." + return s + + return "[" + ", ".join(f"<{maybe_truncate(str(rr))}>" for rr in self) + "]" + + def __repr__(self): + if self.covers == 0: + ctext = "" + else: + ctext = "(" + dns.rdatatype.to_text(self.covers) + ")" + return ( + "" + ) + + def __str__(self): + return self.to_text() + + def __eq__(self, other): + if not isinstance(other, Rdataset): + return False + if ( + self.rdclass != other.rdclass + or self.rdtype != other.rdtype + or self.covers != other.covers + ): + return False + return super().__eq__(other) + + def __ne__(self, other): + return not self.__eq__(other) + + def to_text( + self, + name: Optional[dns.name.Name] = None, + origin: Optional[dns.name.Name] = None, + relativize: bool = True, + override_rdclass: Optional[dns.rdataclass.RdataClass] = None, + want_comments: bool = False, + **kw: Dict[str, Any], + ) -> str: + """Convert the rdataset into DNS zone file format. + + See ``dns.name.Name.choose_relativity`` for more information + on how *origin* and *relativize* determine the way names + are emitted. + + Any additional keyword arguments are passed on to the rdata + ``to_text()`` method. + + *name*, a ``dns.name.Name``. If name is not ``None``, emit RRs with + *name* as the owner name. + + *origin*, a ``dns.name.Name`` or ``None``, the origin for relative + names. + + *relativize*, a ``bool``. If ``True``, names will be relativized + to *origin*. + + *override_rdclass*, a ``dns.rdataclass.RdataClass`` or ``None``. + If not ``None``, use this class instead of the Rdataset's class. + + *want_comments*, a ``bool``. If ``True``, emit comments for rdata + which have them. The default is ``False``. + """ + + if name is not None: + name = name.choose_relativity(origin, relativize) + ntext = str(name) + pad = " " + else: + ntext = "" + pad = "" + s = io.StringIO() + if override_rdclass is not None: + rdclass = override_rdclass + else: + rdclass = self.rdclass + if len(self) == 0: + # + # Empty rdatasets are used for the question section, and in + # some dynamic updates, so we don't need to print out the TTL + # (which is meaningless anyway). + # + s.write( + f"{ntext}{pad}{dns.rdataclass.to_text(rdclass)} " + f"{dns.rdatatype.to_text(self.rdtype)}\n" + ) + else: + for rd in self: + extra = "" + if want_comments: + if rd.rdcomment: + extra = f" ;{rd.rdcomment}" + s.write( + "%s%s%d %s %s %s%s\n" + % ( + ntext, + pad, + self.ttl, + dns.rdataclass.to_text(rdclass), + dns.rdatatype.to_text(self.rdtype), + rd.to_text(origin=origin, relativize=relativize, **kw), + extra, + ) + ) + # + # We strip off the final \n for the caller's convenience in printing + # + return s.getvalue()[:-1] + + def to_wire( + self, + name: dns.name.Name, + file: Any, + compress: Optional[dns.name.CompressType] = None, + origin: Optional[dns.name.Name] = None, + override_rdclass: Optional[dns.rdataclass.RdataClass] = None, + want_shuffle: bool = True, + ) -> int: + """Convert the rdataset to wire format. + + *name*, a ``dns.name.Name`` is the owner name to use. + + *file* is the file where the name is emitted (typically a + BytesIO file). + + *compress*, a ``dict``, is the compression table to use. If + ``None`` (the default), names will not be compressed. + + *origin* is a ``dns.name.Name`` or ``None``. If the name is + relative and origin is not ``None``, then *origin* will be appended + to it. + + *override_rdclass*, an ``int``, is used as the class instead of the + class of the rdataset. This is useful when rendering rdatasets + associated with dynamic updates. + + *want_shuffle*, a ``bool``. If ``True``, then the order of the + Rdatas within the Rdataset will be shuffled before rendering. + + Returns an ``int``, the number of records emitted. + """ + + if override_rdclass is not None: + rdclass = override_rdclass + want_shuffle = False + else: + rdclass = self.rdclass + if len(self) == 0: + name.to_wire(file, compress, origin) + file.write(struct.pack("!HHIH", self.rdtype, rdclass, 0, 0)) + return 1 + else: + l: Union[Rdataset, List[dns.rdata.Rdata]] + if want_shuffle: + l = list(self) + random.shuffle(l) + else: + l = self + for rd in l: + name.to_wire(file, compress, origin) + file.write(struct.pack("!HHI", self.rdtype, rdclass, self.ttl)) + with dns.renderer.prefixed_length(file, 2): + rd.to_wire(file, compress, origin) + return len(self) + + def match( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType, + ) -> bool: + """Returns ``True`` if this rdataset matches the specified class, + type, and covers. + """ + if self.rdclass == rdclass and self.rdtype == rdtype and self.covers == covers: + return True + return False + + def processing_order(self) -> List[dns.rdata.Rdata]: + """Return rdatas in a valid processing order according to the type's + specification. For example, MX records are in preference order from + lowest to highest preferences, with items of the same preference + shuffled. + + For types that do not define a processing order, the rdatas are + simply shuffled. + """ + if len(self) == 0: + return [] + else: + return self[0]._processing_order(iter(self)) + + +@dns.immutable.immutable +class ImmutableRdataset(Rdataset): # lgtm[py/missing-equals] + """An immutable DNS rdataset.""" + + _clone_class = Rdataset + + def __init__(self, rdataset: Rdataset): + """Create an immutable rdataset from the specified rdataset.""" + + super().__init__( + rdataset.rdclass, rdataset.rdtype, rdataset.covers, rdataset.ttl + ) + self.items = dns.immutable.Dict(rdataset.items) + + def update_ttl(self, ttl): + raise TypeError("immutable") + + def add(self, rd, ttl=None): + raise TypeError("immutable") + + def union_update(self, other): + raise TypeError("immutable") + + def intersection_update(self, other): + raise TypeError("immutable") + + def update(self, other): + raise TypeError("immutable") + + def __delitem__(self, i): + raise TypeError("immutable") + + # lgtm complains about these not raising ArithmeticError, but there is + # precedent for overrides of these methods in other classes to raise + # TypeError, and it seems like the better exception. + + def __ior__(self, other): # lgtm[py/unexpected-raise-in-special-method] + raise TypeError("immutable") + + def __iand__(self, other): # lgtm[py/unexpected-raise-in-special-method] + raise TypeError("immutable") + + def __iadd__(self, other): # lgtm[py/unexpected-raise-in-special-method] + raise TypeError("immutable") + + def __isub__(self, other): # lgtm[py/unexpected-raise-in-special-method] + raise TypeError("immutable") + + def clear(self): + raise TypeError("immutable") + + def __copy__(self): + return ImmutableRdataset(super().copy()) + + def copy(self): + return ImmutableRdataset(super().copy()) + + def union(self, other): + return ImmutableRdataset(super().union(other)) + + def intersection(self, other): + return ImmutableRdataset(super().intersection(other)) + + def difference(self, other): + return ImmutableRdataset(super().difference(other)) + + def symmetric_difference(self, other): + return ImmutableRdataset(super().symmetric_difference(other)) + + +def from_text_list( + rdclass: Union[dns.rdataclass.RdataClass, str], + rdtype: Union[dns.rdatatype.RdataType, str], + ttl: int, + text_rdatas: Collection[str], + idna_codec: Optional[dns.name.IDNACodec] = None, + origin: Optional[dns.name.Name] = None, + relativize: bool = True, + relativize_to: Optional[dns.name.Name] = None, +) -> Rdataset: + """Create an rdataset with the specified class, type, and TTL, and with + the specified list of rdatas in text format. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder to use; if ``None``, the default IDNA 2003 + encoder/decoder is used. + + *origin*, a ``dns.name.Name`` (or ``None``), the + origin to use for relative names. + + *relativize*, a ``bool``. If true, name will be relativized. + + *relativize_to*, a ``dns.name.Name`` (or ``None``), the origin to use + when relativizing names. If not set, the *origin* value will be used. + + Returns a ``dns.rdataset.Rdataset`` object. + """ + + rdclass = dns.rdataclass.RdataClass.make(rdclass) + rdtype = dns.rdatatype.RdataType.make(rdtype) + r = Rdataset(rdclass, rdtype) + r.update_ttl(ttl) + for t in text_rdatas: + rd = dns.rdata.from_text( + r.rdclass, r.rdtype, t, origin, relativize, relativize_to, idna_codec + ) + r.add(rd) + return r + + +def from_text( + rdclass: Union[dns.rdataclass.RdataClass, str], + rdtype: Union[dns.rdatatype.RdataType, str], + ttl: int, + *text_rdatas: Any, +) -> Rdataset: + """Create an rdataset with the specified class, type, and TTL, and with + the specified rdatas in text format. + + Returns a ``dns.rdataset.Rdataset`` object. + """ + + return from_text_list(rdclass, rdtype, ttl, cast(Collection[str], text_rdatas)) + + +def from_rdata_list(ttl: int, rdatas: Collection[dns.rdata.Rdata]) -> Rdataset: + """Create an rdataset with the specified TTL, and with + the specified list of rdata objects. + + Returns a ``dns.rdataset.Rdataset`` object. + """ + + if len(rdatas) == 0: + raise ValueError("rdata list must not be empty") + r = None + for rd in rdatas: + if r is None: + r = Rdataset(rd.rdclass, rd.rdtype) + r.update_ttl(ttl) + r.add(rd) + assert r is not None + return r + + +def from_rdata(ttl: int, *rdatas: Any) -> Rdataset: + """Create an rdataset with the specified TTL, and with + the specified rdata objects. + + Returns a ``dns.rdataset.Rdataset`` object. + """ + + return from_rdata_list(ttl, cast(Collection[dns.rdata.Rdata], rdatas)) diff --git a/venv/lib/python3.11/site-packages/dns/rdatatype.py b/venv/lib/python3.11/site-packages/dns/rdatatype.py new file mode 100644 index 0000000..aa9e561 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdatatype.py @@ -0,0 +1,336 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Rdata Types.""" + +from typing import Dict + +import dns.enum +import dns.exception + + +class RdataType(dns.enum.IntEnum): + """DNS Rdata Type""" + + TYPE0 = 0 + NONE = 0 + A = 1 + NS = 2 + MD = 3 + MF = 4 + CNAME = 5 + SOA = 6 + MB = 7 + MG = 8 + MR = 9 + NULL = 10 + WKS = 11 + PTR = 12 + HINFO = 13 + MINFO = 14 + MX = 15 + TXT = 16 + RP = 17 + AFSDB = 18 + X25 = 19 + ISDN = 20 + RT = 21 + NSAP = 22 + NSAP_PTR = 23 + SIG = 24 + KEY = 25 + PX = 26 + GPOS = 27 + AAAA = 28 + LOC = 29 + NXT = 30 + SRV = 33 + NAPTR = 35 + KX = 36 + CERT = 37 + A6 = 38 + DNAME = 39 + OPT = 41 + APL = 42 + DS = 43 + SSHFP = 44 + IPSECKEY = 45 + RRSIG = 46 + NSEC = 47 + DNSKEY = 48 + DHCID = 49 + NSEC3 = 50 + NSEC3PARAM = 51 + TLSA = 52 + SMIMEA = 53 + HIP = 55 + NINFO = 56 + CDS = 59 + CDNSKEY = 60 + OPENPGPKEY = 61 + CSYNC = 62 + ZONEMD = 63 + SVCB = 64 + HTTPS = 65 + SPF = 99 + UNSPEC = 103 + NID = 104 + L32 = 105 + L64 = 106 + LP = 107 + EUI48 = 108 + EUI64 = 109 + TKEY = 249 + TSIG = 250 + IXFR = 251 + AXFR = 252 + MAILB = 253 + MAILA = 254 + ANY = 255 + URI = 256 + CAA = 257 + AVC = 258 + AMTRELAY = 260 + RESINFO = 261 + WALLET = 262 + TA = 32768 + DLV = 32769 + + @classmethod + def _maximum(cls): + return 65535 + + @classmethod + def _short_name(cls): + return "type" + + @classmethod + def _prefix(cls): + return "TYPE" + + @classmethod + def _extra_from_text(cls, text): + if text.find("-") >= 0: + try: + return cls[text.replace("-", "_")] + except KeyError: # pragma: no cover + pass + return _registered_by_text.get(text) + + @classmethod + def _extra_to_text(cls, value, current_text): + if current_text is None: + return _registered_by_value.get(value) + if current_text.find("_") >= 0: + return current_text.replace("_", "-") + return current_text + + @classmethod + def _unknown_exception_class(cls): + return UnknownRdatatype + + +_registered_by_text: Dict[str, RdataType] = {} +_registered_by_value: Dict[RdataType, str] = {} + +_metatypes = {RdataType.OPT} + +_singletons = { + RdataType.SOA, + RdataType.NXT, + RdataType.DNAME, + RdataType.NSEC, + RdataType.CNAME, +} + + +class UnknownRdatatype(dns.exception.DNSException): + """DNS resource record type is unknown.""" + + +def from_text(text: str) -> RdataType: + """Convert text into a DNS rdata type value. + + The input text can be a defined DNS RR type mnemonic or + instance of the DNS generic type syntax. + + For example, "NS" and "TYPE2" will both result in a value of 2. + + Raises ``dns.rdatatype.UnknownRdatatype`` if the type is unknown. + + Raises ``ValueError`` if the rdata type value is not >= 0 and <= 65535. + + Returns a ``dns.rdatatype.RdataType``. + """ + + return RdataType.from_text(text) + + +def to_text(value: RdataType) -> str: + """Convert a DNS rdata type value to text. + + If the value has a known mnemonic, it will be used, otherwise the + DNS generic type syntax will be used. + + Raises ``ValueError`` if the rdata type value is not >= 0 and <= 65535. + + Returns a ``str``. + """ + + return RdataType.to_text(value) + + +def is_metatype(rdtype: RdataType) -> bool: + """True if the specified type is a metatype. + + *rdtype* is a ``dns.rdatatype.RdataType``. + + The currently defined metatypes are TKEY, TSIG, IXFR, AXFR, MAILA, + MAILB, ANY, and OPT. + + Returns a ``bool``. + """ + + return (256 > rdtype >= 128) or rdtype in _metatypes + + +def is_singleton(rdtype: RdataType) -> bool: + """Is the specified type a singleton type? + + Singleton types can only have a single rdata in an rdataset, or a single + RR in an RRset. + + The currently defined singleton types are CNAME, DNAME, NSEC, NXT, and + SOA. + + *rdtype* is an ``int``. + + Returns a ``bool``. + """ + + if rdtype in _singletons: + return True + return False + + +# pylint: disable=redefined-outer-name +def register_type( + rdtype: RdataType, rdtype_text: str, is_singleton: bool = False +) -> None: + """Dynamically register an rdatatype. + + *rdtype*, a ``dns.rdatatype.RdataType``, the rdatatype to register. + + *rdtype_text*, a ``str``, the textual form of the rdatatype. + + *is_singleton*, a ``bool``, indicating if the type is a singleton (i.e. + RRsets of the type can have only one member.) + """ + + _registered_by_text[rdtype_text] = rdtype + _registered_by_value[rdtype] = rdtype_text + if is_singleton: + _singletons.add(rdtype) + + +### BEGIN generated RdataType constants + +TYPE0 = RdataType.TYPE0 +NONE = RdataType.NONE +A = RdataType.A +NS = RdataType.NS +MD = RdataType.MD +MF = RdataType.MF +CNAME = RdataType.CNAME +SOA = RdataType.SOA +MB = RdataType.MB +MG = RdataType.MG +MR = RdataType.MR +NULL = RdataType.NULL +WKS = RdataType.WKS +PTR = RdataType.PTR +HINFO = RdataType.HINFO +MINFO = RdataType.MINFO +MX = RdataType.MX +TXT = RdataType.TXT +RP = RdataType.RP +AFSDB = RdataType.AFSDB +X25 = RdataType.X25 +ISDN = RdataType.ISDN +RT = RdataType.RT +NSAP = RdataType.NSAP +NSAP_PTR = RdataType.NSAP_PTR +SIG = RdataType.SIG +KEY = RdataType.KEY +PX = RdataType.PX +GPOS = RdataType.GPOS +AAAA = RdataType.AAAA +LOC = RdataType.LOC +NXT = RdataType.NXT +SRV = RdataType.SRV +NAPTR = RdataType.NAPTR +KX = RdataType.KX +CERT = RdataType.CERT +A6 = RdataType.A6 +DNAME = RdataType.DNAME +OPT = RdataType.OPT +APL = RdataType.APL +DS = RdataType.DS +SSHFP = RdataType.SSHFP +IPSECKEY = RdataType.IPSECKEY +RRSIG = RdataType.RRSIG +NSEC = RdataType.NSEC +DNSKEY = RdataType.DNSKEY +DHCID = RdataType.DHCID +NSEC3 = RdataType.NSEC3 +NSEC3PARAM = RdataType.NSEC3PARAM +TLSA = RdataType.TLSA +SMIMEA = RdataType.SMIMEA +HIP = RdataType.HIP +NINFO = RdataType.NINFO +CDS = RdataType.CDS +CDNSKEY = RdataType.CDNSKEY +OPENPGPKEY = RdataType.OPENPGPKEY +CSYNC = RdataType.CSYNC +ZONEMD = RdataType.ZONEMD +SVCB = RdataType.SVCB +HTTPS = RdataType.HTTPS +SPF = RdataType.SPF +UNSPEC = RdataType.UNSPEC +NID = RdataType.NID +L32 = RdataType.L32 +L64 = RdataType.L64 +LP = RdataType.LP +EUI48 = RdataType.EUI48 +EUI64 = RdataType.EUI64 +TKEY = RdataType.TKEY +TSIG = RdataType.TSIG +IXFR = RdataType.IXFR +AXFR = RdataType.AXFR +MAILB = RdataType.MAILB +MAILA = RdataType.MAILA +ANY = RdataType.ANY +URI = RdataType.URI +CAA = RdataType.CAA +AVC = RdataType.AVC +AMTRELAY = RdataType.AMTRELAY +RESINFO = RdataType.RESINFO +WALLET = RdataType.WALLET +TA = RdataType.TA +DLV = RdataType.DLV + +### END generated RdataType constants diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/AFSDB.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/AFSDB.py new file mode 100644 index 0000000..06a3b97 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/AFSDB.py @@ -0,0 +1,45 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.mxbase + + +@dns.immutable.immutable +class AFSDB(dns.rdtypes.mxbase.UncompressedDowncasingMX): + """AFSDB record""" + + # Use the property mechanism to make "subtype" an alias for the + # "preference" attribute, and "hostname" an alias for the "exchange" + # attribute. + # + # This lets us inherit the UncompressedMX implementation but lets + # the caller use appropriate attribute names for the rdata type. + # + # We probably lose some performance vs. a cut-and-paste + # implementation, but this way we don't copy code, and that's + # good. + + @property + def subtype(self): + "the AFSDB subtype" + return self.preference + + @property + def hostname(self): + "the AFSDB hostname" + return self.exchange diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/AMTRELAY.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/AMTRELAY.py new file mode 100644 index 0000000..ed2b072 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/AMTRELAY.py @@ -0,0 +1,91 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdtypes.util + + +class Relay(dns.rdtypes.util.Gateway): + name = "AMTRELAY relay" + + @property + def relay(self): + return self.gateway + + +@dns.immutable.immutable +class AMTRELAY(dns.rdata.Rdata): + """AMTRELAY record""" + + # see: RFC 8777 + + __slots__ = ["precedence", "discovery_optional", "relay_type", "relay"] + + def __init__( + self, rdclass, rdtype, precedence, discovery_optional, relay_type, relay + ): + super().__init__(rdclass, rdtype) + relay = Relay(relay_type, relay) + self.precedence = self._as_uint8(precedence) + self.discovery_optional = self._as_bool(discovery_optional) + self.relay_type = relay.type + self.relay = relay.relay + + def to_text(self, origin=None, relativize=True, **kw): + relay = Relay(self.relay_type, self.relay).to_text(origin, relativize) + return "%d %d %d %s" % ( + self.precedence, + self.discovery_optional, + self.relay_type, + relay, + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + precedence = tok.get_uint8() + discovery_optional = tok.get_uint8() + if discovery_optional > 1: + raise dns.exception.SyntaxError("expecting 0 or 1") + discovery_optional = bool(discovery_optional) + relay_type = tok.get_uint8() + if relay_type > 0x7F: + raise dns.exception.SyntaxError("expecting an integer <= 127") + relay = Relay.from_text(relay_type, tok, origin, relativize, relativize_to) + return cls( + rdclass, rdtype, precedence, discovery_optional, relay_type, relay.relay + ) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + relay_type = self.relay_type | (self.discovery_optional << 7) + header = struct.pack("!BB", self.precedence, relay_type) + file.write(header) + Relay(self.relay_type, self.relay).to_wire(file, compress, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (precedence, relay_type) = parser.get_struct("!BB") + discovery_optional = bool(relay_type >> 7) + relay_type &= 0x7F + relay = Relay.from_wire_parser(relay_type, parser, origin) + return cls( + rdclass, rdtype, precedence, discovery_optional, relay_type, relay.relay + ) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/AVC.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/AVC.py new file mode 100644 index 0000000..a27ae2d --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/AVC.py @@ -0,0 +1,26 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2016 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.txtbase + + +@dns.immutable.immutable +class AVC(dns.rdtypes.txtbase.TXTBase): + """AVC record""" + + # See: IANA dns parameters for AVC diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/CAA.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/CAA.py new file mode 100644 index 0000000..2e6a7e7 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/CAA.py @@ -0,0 +1,71 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class CAA(dns.rdata.Rdata): + """CAA (Certification Authority Authorization) record""" + + # see: RFC 6844 + + __slots__ = ["flags", "tag", "value"] + + def __init__(self, rdclass, rdtype, flags, tag, value): + super().__init__(rdclass, rdtype) + self.flags = self._as_uint8(flags) + self.tag = self._as_bytes(tag, True, 255) + if not tag.isalnum(): + raise ValueError("tag is not alphanumeric") + self.value = self._as_bytes(value) + + def to_text(self, origin=None, relativize=True, **kw): + return '%u %s "%s"' % ( + self.flags, + dns.rdata._escapify(self.tag), + dns.rdata._escapify(self.value), + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + flags = tok.get_uint8() + tag = tok.get_string().encode() + value = tok.get_string().encode() + return cls(rdclass, rdtype, flags, tag, value) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack("!B", self.flags)) + l = len(self.tag) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.tag) + file.write(self.value) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + flags = parser.get_uint8() + tag = parser.get_counted_bytes() + value = parser.get_remaining() + return cls(rdclass, rdtype, flags, tag, value) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/CDNSKEY.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/CDNSKEY.py new file mode 100644 index 0000000..b613409 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/CDNSKEY.py @@ -0,0 +1,33 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.dnskeybase # lgtm[py/import-and-import-from] + +# pylint: disable=unused-import +from dns.rdtypes.dnskeybase import ( # noqa: F401 lgtm[py/unused-import] + REVOKE, + SEP, + ZONE, +) + +# pylint: enable=unused-import + + +@dns.immutable.immutable +class CDNSKEY(dns.rdtypes.dnskeybase.DNSKEYBase): + """CDNSKEY record""" diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/CDS.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/CDS.py new file mode 100644 index 0000000..8312b97 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/CDS.py @@ -0,0 +1,29 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.dsbase + + +@dns.immutable.immutable +class CDS(dns.rdtypes.dsbase.DSBase): + """CDS record""" + + _digest_length_by_type = { + **dns.rdtypes.dsbase.DSBase._digest_length_by_type, + 0: 1, # delete, RFC 8078 Sec. 4 (including Errata ID 5049) + } diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/CERT.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/CERT.py new file mode 100644 index 0000000..f369cc8 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/CERT.py @@ -0,0 +1,116 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import struct + +import dns.dnssectypes +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + +_ctype_by_value = { + 1: "PKIX", + 2: "SPKI", + 3: "PGP", + 4: "IPKIX", + 5: "ISPKI", + 6: "IPGP", + 7: "ACPKIX", + 8: "IACPKIX", + 253: "URI", + 254: "OID", +} + +_ctype_by_name = { + "PKIX": 1, + "SPKI": 2, + "PGP": 3, + "IPKIX": 4, + "ISPKI": 5, + "IPGP": 6, + "ACPKIX": 7, + "IACPKIX": 8, + "URI": 253, + "OID": 254, +} + + +def _ctype_from_text(what): + v = _ctype_by_name.get(what) + if v is not None: + return v + return int(what) + + +def _ctype_to_text(what): + v = _ctype_by_value.get(what) + if v is not None: + return v + return str(what) + + +@dns.immutable.immutable +class CERT(dns.rdata.Rdata): + """CERT record""" + + # see RFC 4398 + + __slots__ = ["certificate_type", "key_tag", "algorithm", "certificate"] + + def __init__( + self, rdclass, rdtype, certificate_type, key_tag, algorithm, certificate + ): + super().__init__(rdclass, rdtype) + self.certificate_type = self._as_uint16(certificate_type) + self.key_tag = self._as_uint16(key_tag) + self.algorithm = self._as_uint8(algorithm) + self.certificate = self._as_bytes(certificate) + + def to_text(self, origin=None, relativize=True, **kw): + certificate_type = _ctype_to_text(self.certificate_type) + return "%s %d %s %s" % ( + certificate_type, + self.key_tag, + dns.dnssectypes.Algorithm.to_text(self.algorithm), + dns.rdata._base64ify(self.certificate, **kw), + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + certificate_type = _ctype_from_text(tok.get_string()) + key_tag = tok.get_uint16() + algorithm = dns.dnssectypes.Algorithm.from_text(tok.get_string()) + b64 = tok.concatenate_remaining_identifiers().encode() + certificate = base64.b64decode(b64) + return cls(rdclass, rdtype, certificate_type, key_tag, algorithm, certificate) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + prefix = struct.pack( + "!HHB", self.certificate_type, self.key_tag, self.algorithm + ) + file.write(prefix) + file.write(self.certificate) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (certificate_type, key_tag, algorithm) = parser.get_struct("!HHB") + certificate = parser.get_remaining() + return cls(rdclass, rdtype, certificate_type, key_tag, algorithm, certificate) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/CNAME.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/CNAME.py new file mode 100644 index 0000000..665e407 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/CNAME.py @@ -0,0 +1,28 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.nsbase + + +@dns.immutable.immutable +class CNAME(dns.rdtypes.nsbase.NSBase): + """CNAME record + + Note: although CNAME is officially a singleton type, dnspython allows + non-singleton CNAME rdatasets because such sets have been commonly + used by BIND and other nameservers for load balancing.""" diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/CSYNC.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/CSYNC.py new file mode 100644 index 0000000..2f972f6 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/CSYNC.py @@ -0,0 +1,68 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011, 2016 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdatatype +import dns.rdtypes.util + + +@dns.immutable.immutable +class Bitmap(dns.rdtypes.util.Bitmap): + type_name = "CSYNC" + + +@dns.immutable.immutable +class CSYNC(dns.rdata.Rdata): + """CSYNC record""" + + __slots__ = ["serial", "flags", "windows"] + + def __init__(self, rdclass, rdtype, serial, flags, windows): + super().__init__(rdclass, rdtype) + self.serial = self._as_uint32(serial) + self.flags = self._as_uint16(flags) + if not isinstance(windows, Bitmap): + windows = Bitmap(windows) + self.windows = tuple(windows.windows) + + def to_text(self, origin=None, relativize=True, **kw): + text = Bitmap(self.windows).to_text() + return "%d %d%s" % (self.serial, self.flags, text) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + serial = tok.get_uint32() + flags = tok.get_uint16() + bitmap = Bitmap.from_text(tok) + return cls(rdclass, rdtype, serial, flags, bitmap) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack("!IH", self.serial, self.flags)) + Bitmap(self.windows).to_wire(file) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (serial, flags) = parser.get_struct("!IH") + bitmap = Bitmap.from_wire_parser(parser) + return cls(rdclass, rdtype, serial, flags, bitmap) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/DLV.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/DLV.py new file mode 100644 index 0000000..6c134f1 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/DLV.py @@ -0,0 +1,24 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.dsbase + + +@dns.immutable.immutable +class DLV(dns.rdtypes.dsbase.DSBase): + """DLV record""" diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/DNAME.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/DNAME.py new file mode 100644 index 0000000..bbf9186 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/DNAME.py @@ -0,0 +1,27 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.nsbase + + +@dns.immutable.immutable +class DNAME(dns.rdtypes.nsbase.UncompressedNS): + """DNAME record""" + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.target.to_wire(file, None, origin, canonicalize) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/DNSKEY.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/DNSKEY.py new file mode 100644 index 0000000..6d961a9 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/DNSKEY.py @@ -0,0 +1,33 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.dnskeybase # lgtm[py/import-and-import-from] + +# pylint: disable=unused-import +from dns.rdtypes.dnskeybase import ( # noqa: F401 lgtm[py/unused-import] + REVOKE, + SEP, + ZONE, +) + +# pylint: enable=unused-import + + +@dns.immutable.immutable +class DNSKEY(dns.rdtypes.dnskeybase.DNSKEYBase): + """DNSKEY record""" diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/DS.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/DS.py new file mode 100644 index 0000000..58b3108 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/DS.py @@ -0,0 +1,24 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.dsbase + + +@dns.immutable.immutable +class DS(dns.rdtypes.dsbase.DSBase): + """DS record""" diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/EUI48.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/EUI48.py new file mode 100644 index 0000000..c843be5 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/EUI48.py @@ -0,0 +1,30 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2015 Red Hat, Inc. +# Author: Petr Spacek +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.euibase + + +@dns.immutable.immutable +class EUI48(dns.rdtypes.euibase.EUIBase): + """EUI48 record""" + + # see: rfc7043.txt + + byte_len = 6 # 0123456789ab (in hex) + text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/EUI64.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/EUI64.py new file mode 100644 index 0000000..f6d7e25 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/EUI64.py @@ -0,0 +1,30 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2015 Red Hat, Inc. +# Author: Petr Spacek +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.euibase + + +@dns.immutable.immutable +class EUI64(dns.rdtypes.euibase.EUIBase): + """EUI64 record""" + + # see: rfc7043.txt + + byte_len = 8 # 0123456789abcdef (in hex) + text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab-cd-ef diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/GPOS.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/GPOS.py new file mode 100644 index 0000000..d79f4a0 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/GPOS.py @@ -0,0 +1,126 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +def _validate_float_string(what): + if len(what) == 0: + raise dns.exception.FormError + if what[0] == b"-"[0] or what[0] == b"+"[0]: + what = what[1:] + if what.isdigit(): + return + try: + (left, right) = what.split(b".") + except ValueError: + raise dns.exception.FormError + if left == b"" and right == b"": + raise dns.exception.FormError + if not left == b"" and not left.decode().isdigit(): + raise dns.exception.FormError + if not right == b"" and not right.decode().isdigit(): + raise dns.exception.FormError + + +@dns.immutable.immutable +class GPOS(dns.rdata.Rdata): + """GPOS record""" + + # see: RFC 1712 + + __slots__ = ["latitude", "longitude", "altitude"] + + def __init__(self, rdclass, rdtype, latitude, longitude, altitude): + super().__init__(rdclass, rdtype) + if isinstance(latitude, float) or isinstance(latitude, int): + latitude = str(latitude) + if isinstance(longitude, float) or isinstance(longitude, int): + longitude = str(longitude) + if isinstance(altitude, float) or isinstance(altitude, int): + altitude = str(altitude) + latitude = self._as_bytes(latitude, True, 255) + longitude = self._as_bytes(longitude, True, 255) + altitude = self._as_bytes(altitude, True, 255) + _validate_float_string(latitude) + _validate_float_string(longitude) + _validate_float_string(altitude) + self.latitude = latitude + self.longitude = longitude + self.altitude = altitude + flat = self.float_latitude + if flat < -90.0 or flat > 90.0: + raise dns.exception.FormError("bad latitude") + flong = self.float_longitude + if flong < -180.0 or flong > 180.0: + raise dns.exception.FormError("bad longitude") + + def to_text(self, origin=None, relativize=True, **kw): + return ( + f"{self.latitude.decode()} {self.longitude.decode()} " + f"{self.altitude.decode()}" + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + latitude = tok.get_string() + longitude = tok.get_string() + altitude = tok.get_string() + return cls(rdclass, rdtype, latitude, longitude, altitude) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + l = len(self.latitude) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.latitude) + l = len(self.longitude) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.longitude) + l = len(self.altitude) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.altitude) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + latitude = parser.get_counted_bytes() + longitude = parser.get_counted_bytes() + altitude = parser.get_counted_bytes() + return cls(rdclass, rdtype, latitude, longitude, altitude) + + @property + def float_latitude(self): + "latitude as a floating point value" + return float(self.latitude) + + @property + def float_longitude(self): + "longitude as a floating point value" + return float(self.longitude) + + @property + def float_altitude(self): + "altitude as a floating point value" + return float(self.altitude) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/HINFO.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/HINFO.py new file mode 100644 index 0000000..06ad348 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/HINFO.py @@ -0,0 +1,64 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class HINFO(dns.rdata.Rdata): + """HINFO record""" + + # see: RFC 1035 + + __slots__ = ["cpu", "os"] + + def __init__(self, rdclass, rdtype, cpu, os): + super().__init__(rdclass, rdtype) + self.cpu = self._as_bytes(cpu, True, 255) + self.os = self._as_bytes(os, True, 255) + + def to_text(self, origin=None, relativize=True, **kw): + return f'"{dns.rdata._escapify(self.cpu)}" "{dns.rdata._escapify(self.os)}"' + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + cpu = tok.get_string(max_length=255) + os = tok.get_string(max_length=255) + return cls(rdclass, rdtype, cpu, os) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + l = len(self.cpu) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.cpu) + l = len(self.os) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.os) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + cpu = parser.get_counted_bytes() + os = parser.get_counted_bytes() + return cls(rdclass, rdtype, cpu, os) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/HIP.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/HIP.py new file mode 100644 index 0000000..f3157da --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/HIP.py @@ -0,0 +1,85 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2010, 2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import binascii +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.rdatatype + + +@dns.immutable.immutable +class HIP(dns.rdata.Rdata): + """HIP record""" + + # see: RFC 5205 + + __slots__ = ["hit", "algorithm", "key", "servers"] + + def __init__(self, rdclass, rdtype, hit, algorithm, key, servers): + super().__init__(rdclass, rdtype) + self.hit = self._as_bytes(hit, True, 255) + self.algorithm = self._as_uint8(algorithm) + self.key = self._as_bytes(key, True) + self.servers = self._as_tuple(servers, self._as_name) + + def to_text(self, origin=None, relativize=True, **kw): + hit = binascii.hexlify(self.hit).decode() + key = base64.b64encode(self.key).replace(b"\n", b"").decode() + text = "" + servers = [] + for server in self.servers: + servers.append(server.choose_relativity(origin, relativize)) + if len(servers) > 0: + text += " " + " ".join(x.to_unicode() for x in servers) + return "%u %s %s%s" % (self.algorithm, hit, key, text) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + algorithm = tok.get_uint8() + hit = binascii.unhexlify(tok.get_string().encode()) + key = base64.b64decode(tok.get_string().encode()) + servers = [] + for token in tok.get_remaining(): + server = tok.as_name(token, origin, relativize, relativize_to) + servers.append(server) + return cls(rdclass, rdtype, hit, algorithm, key, servers) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + lh = len(self.hit) + lk = len(self.key) + file.write(struct.pack("!BBH", lh, self.algorithm, lk)) + file.write(self.hit) + file.write(self.key) + for server in self.servers: + server.to_wire(file, None, origin, False) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (lh, algorithm, lk) = parser.get_struct("!BBH") + hit = parser.get_bytes(lh) + key = parser.get_bytes(lk) + servers = [] + while parser.remaining() > 0: + server = parser.get_name(origin) + servers.append(server) + return cls(rdclass, rdtype, hit, algorithm, key, servers) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/ISDN.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/ISDN.py new file mode 100644 index 0000000..6428a0a --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/ISDN.py @@ -0,0 +1,78 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class ISDN(dns.rdata.Rdata): + """ISDN record""" + + # see: RFC 1183 + + __slots__ = ["address", "subaddress"] + + def __init__(self, rdclass, rdtype, address, subaddress): + super().__init__(rdclass, rdtype) + self.address = self._as_bytes(address, True, 255) + self.subaddress = self._as_bytes(subaddress, True, 255) + + def to_text(self, origin=None, relativize=True, **kw): + if self.subaddress: + return ( + f'"{dns.rdata._escapify(self.address)}" ' + f'"{dns.rdata._escapify(self.subaddress)}"' + ) + else: + return f'"{dns.rdata._escapify(self.address)}"' + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + address = tok.get_string() + tokens = tok.get_remaining(max_tokens=1) + if len(tokens) >= 1: + subaddress = tokens[0].unescape().value + else: + subaddress = "" + return cls(rdclass, rdtype, address, subaddress) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + l = len(self.address) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.address) + l = len(self.subaddress) + if l > 0: + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.subaddress) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + address = parser.get_counted_bytes() + if parser.remaining() > 0: + subaddress = parser.get_counted_bytes() + else: + subaddress = b"" + return cls(rdclass, rdtype, address, subaddress) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/L32.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/L32.py new file mode 100644 index 0000000..09804c2 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/L32.py @@ -0,0 +1,41 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import struct + +import dns.immutable +import dns.rdata + + +@dns.immutable.immutable +class L32(dns.rdata.Rdata): + """L32 record""" + + # see: rfc6742.txt + + __slots__ = ["preference", "locator32"] + + def __init__(self, rdclass, rdtype, preference, locator32): + super().__init__(rdclass, rdtype) + self.preference = self._as_uint16(preference) + self.locator32 = self._as_ipv4_address(locator32) + + def to_text(self, origin=None, relativize=True, **kw): + return f"{self.preference} {self.locator32}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + preference = tok.get_uint16() + nodeid = tok.get_identifier() + return cls(rdclass, rdtype, preference, nodeid) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack("!H", self.preference)) + file.write(dns.ipv4.inet_aton(self.locator32)) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + preference = parser.get_uint16() + locator32 = parser.get_remaining() + return cls(rdclass, rdtype, preference, locator32) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/L64.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/L64.py new file mode 100644 index 0000000..fb76808 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/L64.py @@ -0,0 +1,47 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import struct + +import dns.immutable +import dns.rdtypes.util + + +@dns.immutable.immutable +class L64(dns.rdata.Rdata): + """L64 record""" + + # see: rfc6742.txt + + __slots__ = ["preference", "locator64"] + + def __init__(self, rdclass, rdtype, preference, locator64): + super().__init__(rdclass, rdtype) + self.preference = self._as_uint16(preference) + if isinstance(locator64, bytes): + if len(locator64) != 8: + raise ValueError("invalid locator64") + self.locator64 = dns.rdata._hexify(locator64, 4, b":") + else: + dns.rdtypes.util.parse_formatted_hex(locator64, 4, 4, ":") + self.locator64 = locator64 + + def to_text(self, origin=None, relativize=True, **kw): + return f"{self.preference} {self.locator64}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + preference = tok.get_uint16() + locator64 = tok.get_identifier() + return cls(rdclass, rdtype, preference, locator64) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack("!H", self.preference)) + file.write(dns.rdtypes.util.parse_formatted_hex(self.locator64, 4, 4, ":")) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + preference = parser.get_uint16() + locator64 = parser.get_remaining() + return cls(rdclass, rdtype, preference, locator64) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/LOC.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/LOC.py new file mode 100644 index 0000000..1153cf0 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/LOC.py @@ -0,0 +1,353 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata + +_pows = tuple(10**i for i in range(0, 11)) + +# default values are in centimeters +_default_size = 100.0 +_default_hprec = 1000000.0 +_default_vprec = 1000.0 + +# for use by from_wire() +_MAX_LATITUDE = 0x80000000 + 90 * 3600000 +_MIN_LATITUDE = 0x80000000 - 90 * 3600000 +_MAX_LONGITUDE = 0x80000000 + 180 * 3600000 +_MIN_LONGITUDE = 0x80000000 - 180 * 3600000 + + +def _exponent_of(what, desc): + if what == 0: + return 0 + exp = None + for i, pow in enumerate(_pows): + if what < pow: + exp = i - 1 + break + if exp is None or exp < 0: + raise dns.exception.SyntaxError(f"{desc} value out of bounds") + return exp + + +def _float_to_tuple(what): + if what < 0: + sign = -1 + what *= -1 + else: + sign = 1 + what = round(what * 3600000) + degrees = int(what // 3600000) + what -= degrees * 3600000 + minutes = int(what // 60000) + what -= minutes * 60000 + seconds = int(what // 1000) + what -= int(seconds * 1000) + what = int(what) + return (degrees, minutes, seconds, what, sign) + + +def _tuple_to_float(what): + value = float(what[0]) + value += float(what[1]) / 60.0 + value += float(what[2]) / 3600.0 + value += float(what[3]) / 3600000.0 + return float(what[4]) * value + + +def _encode_size(what, desc): + what = int(what) + exponent = _exponent_of(what, desc) & 0xF + base = what // pow(10, exponent) & 0xF + return base * 16 + exponent + + +def _decode_size(what, desc): + exponent = what & 0x0F + if exponent > 9: + raise dns.exception.FormError(f"bad {desc} exponent") + base = (what & 0xF0) >> 4 + if base > 9: + raise dns.exception.FormError(f"bad {desc} base") + return base * pow(10, exponent) + + +def _check_coordinate_list(value, low, high): + if value[0] < low or value[0] > high: + raise ValueError(f"not in range [{low}, {high}]") + if value[1] < 0 or value[1] > 59: + raise ValueError("bad minutes value") + if value[2] < 0 or value[2] > 59: + raise ValueError("bad seconds value") + if value[3] < 0 or value[3] > 999: + raise ValueError("bad milliseconds value") + if value[4] != 1 and value[4] != -1: + raise ValueError("bad hemisphere value") + + +@dns.immutable.immutable +class LOC(dns.rdata.Rdata): + """LOC record""" + + # see: RFC 1876 + + __slots__ = [ + "latitude", + "longitude", + "altitude", + "size", + "horizontal_precision", + "vertical_precision", + ] + + def __init__( + self, + rdclass, + rdtype, + latitude, + longitude, + altitude, + size=_default_size, + hprec=_default_hprec, + vprec=_default_vprec, + ): + """Initialize a LOC record instance. + + The parameters I{latitude} and I{longitude} may be either a 4-tuple + of integers specifying (degrees, minutes, seconds, milliseconds), + or they may be floating point values specifying the number of + degrees. The other parameters are floats. Size, horizontal precision, + and vertical precision are specified in centimeters.""" + + super().__init__(rdclass, rdtype) + if isinstance(latitude, int): + latitude = float(latitude) + if isinstance(latitude, float): + latitude = _float_to_tuple(latitude) + _check_coordinate_list(latitude, -90, 90) + self.latitude = tuple(latitude) + if isinstance(longitude, int): + longitude = float(longitude) + if isinstance(longitude, float): + longitude = _float_to_tuple(longitude) + _check_coordinate_list(longitude, -180, 180) + self.longitude = tuple(longitude) + self.altitude = float(altitude) + self.size = float(size) + self.horizontal_precision = float(hprec) + self.vertical_precision = float(vprec) + + def to_text(self, origin=None, relativize=True, **kw): + if self.latitude[4] > 0: + lat_hemisphere = "N" + else: + lat_hemisphere = "S" + if self.longitude[4] > 0: + long_hemisphere = "E" + else: + long_hemisphere = "W" + text = "%d %d %d.%03d %s %d %d %d.%03d %s %0.2fm" % ( + self.latitude[0], + self.latitude[1], + self.latitude[2], + self.latitude[3], + lat_hemisphere, + self.longitude[0], + self.longitude[1], + self.longitude[2], + self.longitude[3], + long_hemisphere, + self.altitude / 100.0, + ) + + # do not print default values + if ( + self.size != _default_size + or self.horizontal_precision != _default_hprec + or self.vertical_precision != _default_vprec + ): + text += ( + f" {self.size / 100.0:0.2f}m {self.horizontal_precision / 100.0:0.2f}m" + f" {self.vertical_precision / 100.0:0.2f}m" + ) + return text + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + latitude = [0, 0, 0, 0, 1] + longitude = [0, 0, 0, 0, 1] + size = _default_size + hprec = _default_hprec + vprec = _default_vprec + + latitude[0] = tok.get_int() + t = tok.get_string() + if t.isdigit(): + latitude[1] = int(t) + t = tok.get_string() + if "." in t: + (seconds, milliseconds) = t.split(".") + if not seconds.isdigit(): + raise dns.exception.SyntaxError("bad latitude seconds value") + latitude[2] = int(seconds) + l = len(milliseconds) + if l == 0 or l > 3 or not milliseconds.isdigit(): + raise dns.exception.SyntaxError("bad latitude milliseconds value") + if l == 1: + m = 100 + elif l == 2: + m = 10 + else: + m = 1 + latitude[3] = m * int(milliseconds) + t = tok.get_string() + elif t.isdigit(): + latitude[2] = int(t) + t = tok.get_string() + if t == "S": + latitude[4] = -1 + elif t != "N": + raise dns.exception.SyntaxError("bad latitude hemisphere value") + + longitude[0] = tok.get_int() + t = tok.get_string() + if t.isdigit(): + longitude[1] = int(t) + t = tok.get_string() + if "." in t: + (seconds, milliseconds) = t.split(".") + if not seconds.isdigit(): + raise dns.exception.SyntaxError("bad longitude seconds value") + longitude[2] = int(seconds) + l = len(milliseconds) + if l == 0 or l > 3 or not milliseconds.isdigit(): + raise dns.exception.SyntaxError("bad longitude milliseconds value") + if l == 1: + m = 100 + elif l == 2: + m = 10 + else: + m = 1 + longitude[3] = m * int(milliseconds) + t = tok.get_string() + elif t.isdigit(): + longitude[2] = int(t) + t = tok.get_string() + if t == "W": + longitude[4] = -1 + elif t != "E": + raise dns.exception.SyntaxError("bad longitude hemisphere value") + + t = tok.get_string() + if t[-1] == "m": + t = t[0:-1] + altitude = float(t) * 100.0 # m -> cm + + tokens = tok.get_remaining(max_tokens=3) + if len(tokens) >= 1: + value = tokens[0].unescape().value + if value[-1] == "m": + value = value[0:-1] + size = float(value) * 100.0 # m -> cm + if len(tokens) >= 2: + value = tokens[1].unescape().value + if value[-1] == "m": + value = value[0:-1] + hprec = float(value) * 100.0 # m -> cm + if len(tokens) >= 3: + value = tokens[2].unescape().value + if value[-1] == "m": + value = value[0:-1] + vprec = float(value) * 100.0 # m -> cm + + # Try encoding these now so we raise if they are bad + _encode_size(size, "size") + _encode_size(hprec, "horizontal precision") + _encode_size(vprec, "vertical precision") + + return cls(rdclass, rdtype, latitude, longitude, altitude, size, hprec, vprec) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + milliseconds = ( + self.latitude[0] * 3600000 + + self.latitude[1] * 60000 + + self.latitude[2] * 1000 + + self.latitude[3] + ) * self.latitude[4] + latitude = 0x80000000 + milliseconds + milliseconds = ( + self.longitude[0] * 3600000 + + self.longitude[1] * 60000 + + self.longitude[2] * 1000 + + self.longitude[3] + ) * self.longitude[4] + longitude = 0x80000000 + milliseconds + altitude = int(self.altitude) + 10000000 + size = _encode_size(self.size, "size") + hprec = _encode_size(self.horizontal_precision, "horizontal precision") + vprec = _encode_size(self.vertical_precision, "vertical precision") + wire = struct.pack( + "!BBBBIII", 0, size, hprec, vprec, latitude, longitude, altitude + ) + file.write(wire) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + ( + version, + size, + hprec, + vprec, + latitude, + longitude, + altitude, + ) = parser.get_struct("!BBBBIII") + if version != 0: + raise dns.exception.FormError("LOC version not zero") + if latitude < _MIN_LATITUDE or latitude > _MAX_LATITUDE: + raise dns.exception.FormError("bad latitude") + if latitude > 0x80000000: + latitude = (latitude - 0x80000000) / 3600000 + else: + latitude = -1 * (0x80000000 - latitude) / 3600000 + if longitude < _MIN_LONGITUDE or longitude > _MAX_LONGITUDE: + raise dns.exception.FormError("bad longitude") + if longitude > 0x80000000: + longitude = (longitude - 0x80000000) / 3600000 + else: + longitude = -1 * (0x80000000 - longitude) / 3600000 + altitude = float(altitude) - 10000000.0 + size = _decode_size(size, "size") + hprec = _decode_size(hprec, "horizontal precision") + vprec = _decode_size(vprec, "vertical precision") + return cls(rdclass, rdtype, latitude, longitude, altitude, size, hprec, vprec) + + @property + def float_latitude(self): + "latitude as a floating point value" + return _tuple_to_float(self.latitude) + + @property + def float_longitude(self): + "longitude as a floating point value" + return _tuple_to_float(self.longitude) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/LP.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/LP.py new file mode 100644 index 0000000..312663f --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/LP.py @@ -0,0 +1,42 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import struct + +import dns.immutable +import dns.rdata + + +@dns.immutable.immutable +class LP(dns.rdata.Rdata): + """LP record""" + + # see: rfc6742.txt + + __slots__ = ["preference", "fqdn"] + + def __init__(self, rdclass, rdtype, preference, fqdn): + super().__init__(rdclass, rdtype) + self.preference = self._as_uint16(preference) + self.fqdn = self._as_name(fqdn) + + def to_text(self, origin=None, relativize=True, **kw): + fqdn = self.fqdn.choose_relativity(origin, relativize) + return "%d %s" % (self.preference, fqdn) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + preference = tok.get_uint16() + fqdn = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, preference, fqdn) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack("!H", self.preference)) + self.fqdn.to_wire(file, compress, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + preference = parser.get_uint16() + fqdn = parser.get_name(origin) + return cls(rdclass, rdtype, preference, fqdn) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/MX.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/MX.py new file mode 100644 index 0000000..0c300c5 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/MX.py @@ -0,0 +1,24 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.mxbase + + +@dns.immutable.immutable +class MX(dns.rdtypes.mxbase.MXBase): + """MX record""" diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/NID.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/NID.py new file mode 100644 index 0000000..2f64917 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/NID.py @@ -0,0 +1,47 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import struct + +import dns.immutable +import dns.rdtypes.util + + +@dns.immutable.immutable +class NID(dns.rdata.Rdata): + """NID record""" + + # see: rfc6742.txt + + __slots__ = ["preference", "nodeid"] + + def __init__(self, rdclass, rdtype, preference, nodeid): + super().__init__(rdclass, rdtype) + self.preference = self._as_uint16(preference) + if isinstance(nodeid, bytes): + if len(nodeid) != 8: + raise ValueError("invalid nodeid") + self.nodeid = dns.rdata._hexify(nodeid, 4, b":") + else: + dns.rdtypes.util.parse_formatted_hex(nodeid, 4, 4, ":") + self.nodeid = nodeid + + def to_text(self, origin=None, relativize=True, **kw): + return f"{self.preference} {self.nodeid}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + preference = tok.get_uint16() + nodeid = tok.get_identifier() + return cls(rdclass, rdtype, preference, nodeid) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack("!H", self.preference)) + file.write(dns.rdtypes.util.parse_formatted_hex(self.nodeid, 4, 4, ":")) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + preference = parser.get_uint16() + nodeid = parser.get_remaining() + return cls(rdclass, rdtype, preference, nodeid) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/NINFO.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/NINFO.py new file mode 100644 index 0000000..b177bdd --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/NINFO.py @@ -0,0 +1,26 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.txtbase + + +@dns.immutable.immutable +class NINFO(dns.rdtypes.txtbase.TXTBase): + """NINFO record""" + + # see: draft-reid-dnsext-zs-01 diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/NS.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/NS.py new file mode 100644 index 0000000..c3f34ce --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/NS.py @@ -0,0 +1,24 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.nsbase + + +@dns.immutable.immutable +class NS(dns.rdtypes.nsbase.NSBase): + """NS record""" diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/NSEC.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/NSEC.py new file mode 100644 index 0000000..3c78b72 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/NSEC.py @@ -0,0 +1,67 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdatatype +import dns.rdtypes.util + + +@dns.immutable.immutable +class Bitmap(dns.rdtypes.util.Bitmap): + type_name = "NSEC" + + +@dns.immutable.immutable +class NSEC(dns.rdata.Rdata): + """NSEC record""" + + __slots__ = ["next", "windows"] + + def __init__(self, rdclass, rdtype, next, windows): + super().__init__(rdclass, rdtype) + self.next = self._as_name(next) + if not isinstance(windows, Bitmap): + windows = Bitmap(windows) + self.windows = tuple(windows.windows) + + def to_text(self, origin=None, relativize=True, **kw): + next = self.next.choose_relativity(origin, relativize) + text = Bitmap(self.windows).to_text() + return f"{next}{text}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + next = tok.get_name(origin, relativize, relativize_to) + windows = Bitmap.from_text(tok) + return cls(rdclass, rdtype, next, windows) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + # Note that NSEC downcasing, originally mandated by RFC 4034 + # section 6.2 was removed by RFC 6840 section 5.1. + self.next.to_wire(file, None, origin, False) + Bitmap(self.windows).to_wire(file) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + next = parser.get_name(origin) + bitmap = Bitmap.from_wire_parser(parser) + return cls(rdclass, rdtype, next, bitmap) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/NSEC3.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/NSEC3.py new file mode 100644 index 0000000..d71302b --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/NSEC3.py @@ -0,0 +1,126 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import binascii +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.rdatatype +import dns.rdtypes.util + +b32_hex_to_normal = bytes.maketrans( + b"0123456789ABCDEFGHIJKLMNOPQRSTUV", b"ABCDEFGHIJKLMNOPQRSTUVWXYZ234567" +) +b32_normal_to_hex = bytes.maketrans( + b"ABCDEFGHIJKLMNOPQRSTUVWXYZ234567", b"0123456789ABCDEFGHIJKLMNOPQRSTUV" +) + +# hash algorithm constants +SHA1 = 1 + +# flag constants +OPTOUT = 1 + + +@dns.immutable.immutable +class Bitmap(dns.rdtypes.util.Bitmap): + type_name = "NSEC3" + + +@dns.immutable.immutable +class NSEC3(dns.rdata.Rdata): + """NSEC3 record""" + + __slots__ = ["algorithm", "flags", "iterations", "salt", "next", "windows"] + + def __init__( + self, rdclass, rdtype, algorithm, flags, iterations, salt, next, windows + ): + super().__init__(rdclass, rdtype) + self.algorithm = self._as_uint8(algorithm) + self.flags = self._as_uint8(flags) + self.iterations = self._as_uint16(iterations) + self.salt = self._as_bytes(salt, True, 255) + self.next = self._as_bytes(next, True, 255) + if not isinstance(windows, Bitmap): + windows = Bitmap(windows) + self.windows = tuple(windows.windows) + + def _next_text(self): + next = base64.b32encode(self.next).translate(b32_normal_to_hex).lower().decode() + next = next.rstrip("=") + return next + + def to_text(self, origin=None, relativize=True, **kw): + next = self._next_text() + if self.salt == b"": + salt = "-" + else: + salt = binascii.hexlify(self.salt).decode() + text = Bitmap(self.windows).to_text() + return "%u %u %u %s %s%s" % ( + self.algorithm, + self.flags, + self.iterations, + salt, + next, + text, + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + algorithm = tok.get_uint8() + flags = tok.get_uint8() + iterations = tok.get_uint16() + salt = tok.get_string() + if salt == "-": + salt = b"" + else: + salt = binascii.unhexlify(salt.encode("ascii")) + next = tok.get_string().encode("ascii").upper().translate(b32_hex_to_normal) + if next.endswith(b"="): + raise binascii.Error("Incorrect padding") + if len(next) % 8 != 0: + next += b"=" * (8 - len(next) % 8) + next = base64.b32decode(next) + bitmap = Bitmap.from_text(tok) + return cls(rdclass, rdtype, algorithm, flags, iterations, salt, next, bitmap) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + l = len(self.salt) + file.write(struct.pack("!BBHB", self.algorithm, self.flags, self.iterations, l)) + file.write(self.salt) + l = len(self.next) + file.write(struct.pack("!B", l)) + file.write(self.next) + Bitmap(self.windows).to_wire(file) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (algorithm, flags, iterations) = parser.get_struct("!BBH") + salt = parser.get_counted_bytes() + next = parser.get_counted_bytes() + bitmap = Bitmap.from_wire_parser(parser) + return cls(rdclass, rdtype, algorithm, flags, iterations, salt, next, bitmap) + + def next_name(self, origin=None): + return dns.name.from_text(self._next_text(), origin) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/NSEC3PARAM.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/NSEC3PARAM.py new file mode 100644 index 0000000..d1e62eb --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/NSEC3PARAM.py @@ -0,0 +1,69 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii +import struct + +import dns.exception +import dns.immutable +import dns.rdata + + +@dns.immutable.immutable +class NSEC3PARAM(dns.rdata.Rdata): + """NSEC3PARAM record""" + + __slots__ = ["algorithm", "flags", "iterations", "salt"] + + def __init__(self, rdclass, rdtype, algorithm, flags, iterations, salt): + super().__init__(rdclass, rdtype) + self.algorithm = self._as_uint8(algorithm) + self.flags = self._as_uint8(flags) + self.iterations = self._as_uint16(iterations) + self.salt = self._as_bytes(salt, True, 255) + + def to_text(self, origin=None, relativize=True, **kw): + if self.salt == b"": + salt = "-" + else: + salt = binascii.hexlify(self.salt).decode() + return "%u %u %u %s" % (self.algorithm, self.flags, self.iterations, salt) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + algorithm = tok.get_uint8() + flags = tok.get_uint8() + iterations = tok.get_uint16() + salt = tok.get_string() + if salt == "-": + salt = "" + else: + salt = binascii.unhexlify(salt.encode()) + return cls(rdclass, rdtype, algorithm, flags, iterations, salt) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + l = len(self.salt) + file.write(struct.pack("!BBHB", self.algorithm, self.flags, self.iterations, l)) + file.write(self.salt) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (algorithm, flags, iterations) = parser.get_struct("!BBH") + salt = parser.get_counted_bytes() + return cls(rdclass, rdtype, algorithm, flags, iterations, salt) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/OPENPGPKEY.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/OPENPGPKEY.py new file mode 100644 index 0000000..4d7a4b6 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/OPENPGPKEY.py @@ -0,0 +1,53 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2016 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class OPENPGPKEY(dns.rdata.Rdata): + """OPENPGPKEY record""" + + # see: RFC 7929 + + def __init__(self, rdclass, rdtype, key): + super().__init__(rdclass, rdtype) + self.key = self._as_bytes(key) + + def to_text(self, origin=None, relativize=True, **kw): + return dns.rdata._base64ify(self.key, chunksize=None, **kw) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + b64 = tok.concatenate_remaining_identifiers().encode() + key = base64.b64decode(b64) + return cls(rdclass, rdtype, key) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(self.key) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + key = parser.get_remaining() + return cls(rdclass, rdtype, key) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/OPT.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/OPT.py new file mode 100644 index 0000000..d343dfa --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/OPT.py @@ -0,0 +1,77 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.edns +import dns.exception +import dns.immutable +import dns.rdata + +# We don't implement from_text, and that's ok. +# pylint: disable=abstract-method + + +@dns.immutable.immutable +class OPT(dns.rdata.Rdata): + """OPT record""" + + __slots__ = ["options"] + + def __init__(self, rdclass, rdtype, options): + """Initialize an OPT rdata. + + *rdclass*, an ``int`` is the rdataclass of the Rdata, + which is also the payload size. + + *rdtype*, an ``int`` is the rdatatype of the Rdata. + + *options*, a tuple of ``bytes`` + """ + + super().__init__(rdclass, rdtype) + + def as_option(option): + if not isinstance(option, dns.edns.Option): + raise ValueError("option is not a dns.edns.option") + return option + + self.options = self._as_tuple(options, as_option) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + for opt in self.options: + owire = opt.to_wire() + file.write(struct.pack("!HH", opt.otype, len(owire))) + file.write(owire) + + def to_text(self, origin=None, relativize=True, **kw): + return " ".join(opt.to_text() for opt in self.options) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + options = [] + while parser.remaining() > 0: + (otype, olen) = parser.get_struct("!HH") + with parser.restrict_to(olen): + opt = dns.edns.option_from_wire_parser(otype, parser) + options.append(opt) + return cls(rdclass, rdtype, options) + + @property + def payload(self): + "payload size" + return self.rdclass diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/PTR.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/PTR.py new file mode 100644 index 0000000..98c3616 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/PTR.py @@ -0,0 +1,24 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.nsbase + + +@dns.immutable.immutable +class PTR(dns.rdtypes.nsbase.NSBase): + """PTR record""" diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/RESINFO.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/RESINFO.py new file mode 100644 index 0000000..76c8ea2 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/RESINFO.py @@ -0,0 +1,24 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.txtbase + + +@dns.immutable.immutable +class RESINFO(dns.rdtypes.txtbase.TXTBase): + """RESINFO record""" diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/RP.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/RP.py new file mode 100644 index 0000000..a66cfc5 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/RP.py @@ -0,0 +1,58 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata + + +@dns.immutable.immutable +class RP(dns.rdata.Rdata): + """RP record""" + + # see: RFC 1183 + + __slots__ = ["mbox", "txt"] + + def __init__(self, rdclass, rdtype, mbox, txt): + super().__init__(rdclass, rdtype) + self.mbox = self._as_name(mbox) + self.txt = self._as_name(txt) + + def to_text(self, origin=None, relativize=True, **kw): + mbox = self.mbox.choose_relativity(origin, relativize) + txt = self.txt.choose_relativity(origin, relativize) + return f"{str(mbox)} {str(txt)}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + mbox = tok.get_name(origin, relativize, relativize_to) + txt = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, mbox, txt) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.mbox.to_wire(file, None, origin, canonicalize) + self.txt.to_wire(file, None, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + mbox = parser.get_name(origin) + txt = parser.get_name(origin) + return cls(rdclass, rdtype, mbox, txt) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/RRSIG.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/RRSIG.py new file mode 100644 index 0000000..8beb423 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/RRSIG.py @@ -0,0 +1,157 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import calendar +import struct +import time + +import dns.dnssectypes +import dns.exception +import dns.immutable +import dns.rdata +import dns.rdatatype + + +class BadSigTime(dns.exception.DNSException): + """Time in DNS SIG or RRSIG resource record cannot be parsed.""" + + +def sigtime_to_posixtime(what): + if len(what) <= 10 and what.isdigit(): + return int(what) + if len(what) != 14: + raise BadSigTime + year = int(what[0:4]) + month = int(what[4:6]) + day = int(what[6:8]) + hour = int(what[8:10]) + minute = int(what[10:12]) + second = int(what[12:14]) + return calendar.timegm((year, month, day, hour, minute, second, 0, 0, 0)) + + +def posixtime_to_sigtime(what): + return time.strftime("%Y%m%d%H%M%S", time.gmtime(what)) + + +@dns.immutable.immutable +class RRSIG(dns.rdata.Rdata): + """RRSIG record""" + + __slots__ = [ + "type_covered", + "algorithm", + "labels", + "original_ttl", + "expiration", + "inception", + "key_tag", + "signer", + "signature", + ] + + def __init__( + self, + rdclass, + rdtype, + type_covered, + algorithm, + labels, + original_ttl, + expiration, + inception, + key_tag, + signer, + signature, + ): + super().__init__(rdclass, rdtype) + self.type_covered = self._as_rdatatype(type_covered) + self.algorithm = dns.dnssectypes.Algorithm.make(algorithm) + self.labels = self._as_uint8(labels) + self.original_ttl = self._as_ttl(original_ttl) + self.expiration = self._as_uint32(expiration) + self.inception = self._as_uint32(inception) + self.key_tag = self._as_uint16(key_tag) + self.signer = self._as_name(signer) + self.signature = self._as_bytes(signature) + + def covers(self): + return self.type_covered + + def to_text(self, origin=None, relativize=True, **kw): + return "%s %d %d %d %s %s %d %s %s" % ( + dns.rdatatype.to_text(self.type_covered), + self.algorithm, + self.labels, + self.original_ttl, + posixtime_to_sigtime(self.expiration), + posixtime_to_sigtime(self.inception), + self.key_tag, + self.signer.choose_relativity(origin, relativize), + dns.rdata._base64ify(self.signature, **kw), + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + type_covered = dns.rdatatype.from_text(tok.get_string()) + algorithm = dns.dnssectypes.Algorithm.from_text(tok.get_string()) + labels = tok.get_int() + original_ttl = tok.get_ttl() + expiration = sigtime_to_posixtime(tok.get_string()) + inception = sigtime_to_posixtime(tok.get_string()) + key_tag = tok.get_int() + signer = tok.get_name(origin, relativize, relativize_to) + b64 = tok.concatenate_remaining_identifiers().encode() + signature = base64.b64decode(b64) + return cls( + rdclass, + rdtype, + type_covered, + algorithm, + labels, + original_ttl, + expiration, + inception, + key_tag, + signer, + signature, + ) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack( + "!HBBIIIH", + self.type_covered, + self.algorithm, + self.labels, + self.original_ttl, + self.expiration, + self.inception, + self.key_tag, + ) + file.write(header) + self.signer.to_wire(file, None, origin, canonicalize) + file.write(self.signature) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct("!HBBIIIH") + signer = parser.get_name(origin) + signature = parser.get_remaining() + return cls(rdclass, rdtype, *header, signer, signature) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/RT.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/RT.py new file mode 100644 index 0000000..5a4d45c --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/RT.py @@ -0,0 +1,24 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.mxbase + + +@dns.immutable.immutable +class RT(dns.rdtypes.mxbase.UncompressedDowncasingMX): + """RT record""" diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/SMIMEA.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/SMIMEA.py new file mode 100644 index 0000000..55d87bf --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/SMIMEA.py @@ -0,0 +1,9 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import dns.immutable +import dns.rdtypes.tlsabase + + +@dns.immutable.immutable +class SMIMEA(dns.rdtypes.tlsabase.TLSABase): + """SMIMEA record""" diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/SOA.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/SOA.py new file mode 100644 index 0000000..09aa832 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/SOA.py @@ -0,0 +1,86 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata + + +@dns.immutable.immutable +class SOA(dns.rdata.Rdata): + """SOA record""" + + # see: RFC 1035 + + __slots__ = ["mname", "rname", "serial", "refresh", "retry", "expire", "minimum"] + + def __init__( + self, rdclass, rdtype, mname, rname, serial, refresh, retry, expire, minimum + ): + super().__init__(rdclass, rdtype) + self.mname = self._as_name(mname) + self.rname = self._as_name(rname) + self.serial = self._as_uint32(serial) + self.refresh = self._as_ttl(refresh) + self.retry = self._as_ttl(retry) + self.expire = self._as_ttl(expire) + self.minimum = self._as_ttl(minimum) + + def to_text(self, origin=None, relativize=True, **kw): + mname = self.mname.choose_relativity(origin, relativize) + rname = self.rname.choose_relativity(origin, relativize) + return "%s %s %d %d %d %d %d" % ( + mname, + rname, + self.serial, + self.refresh, + self.retry, + self.expire, + self.minimum, + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + mname = tok.get_name(origin, relativize, relativize_to) + rname = tok.get_name(origin, relativize, relativize_to) + serial = tok.get_uint32() + refresh = tok.get_ttl() + retry = tok.get_ttl() + expire = tok.get_ttl() + minimum = tok.get_ttl() + return cls( + rdclass, rdtype, mname, rname, serial, refresh, retry, expire, minimum + ) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.mname.to_wire(file, compress, origin, canonicalize) + self.rname.to_wire(file, compress, origin, canonicalize) + five_ints = struct.pack( + "!IIIII", self.serial, self.refresh, self.retry, self.expire, self.minimum + ) + file.write(five_ints) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + mname = parser.get_name(origin) + rname = parser.get_name(origin) + return cls(rdclass, rdtype, mname, rname, *parser.get_struct("!IIIII")) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/SPF.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/SPF.py new file mode 100644 index 0000000..1df3b70 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/SPF.py @@ -0,0 +1,26 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.txtbase + + +@dns.immutable.immutable +class SPF(dns.rdtypes.txtbase.TXTBase): + """SPF record""" + + # see: RFC 4408 diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/SSHFP.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/SSHFP.py new file mode 100644 index 0000000..d2c4b07 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/SSHFP.py @@ -0,0 +1,68 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2005-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii +import struct + +import dns.immutable +import dns.rdata +import dns.rdatatype + + +@dns.immutable.immutable +class SSHFP(dns.rdata.Rdata): + """SSHFP record""" + + # See RFC 4255 + + __slots__ = ["algorithm", "fp_type", "fingerprint"] + + def __init__(self, rdclass, rdtype, algorithm, fp_type, fingerprint): + super().__init__(rdclass, rdtype) + self.algorithm = self._as_uint8(algorithm) + self.fp_type = self._as_uint8(fp_type) + self.fingerprint = self._as_bytes(fingerprint, True) + + def to_text(self, origin=None, relativize=True, **kw): + kw = kw.copy() + chunksize = kw.pop("chunksize", 128) + return "%d %d %s" % ( + self.algorithm, + self.fp_type, + dns.rdata._hexify(self.fingerprint, chunksize=chunksize, **kw), + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + algorithm = tok.get_uint8() + fp_type = tok.get_uint8() + fingerprint = tok.concatenate_remaining_identifiers().encode() + fingerprint = binascii.unhexlify(fingerprint) + return cls(rdclass, rdtype, algorithm, fp_type, fingerprint) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack("!BB", self.algorithm, self.fp_type) + file.write(header) + file.write(self.fingerprint) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct("BB") + fingerprint = parser.get_remaining() + return cls(rdclass, rdtype, header[0], header[1], fingerprint) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/TKEY.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/TKEY.py new file mode 100644 index 0000000..75f6224 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/TKEY.py @@ -0,0 +1,142 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import struct + +import dns.exception +import dns.immutable +import dns.rdata + + +@dns.immutable.immutable +class TKEY(dns.rdata.Rdata): + """TKEY Record""" + + __slots__ = [ + "algorithm", + "inception", + "expiration", + "mode", + "error", + "key", + "other", + ] + + def __init__( + self, + rdclass, + rdtype, + algorithm, + inception, + expiration, + mode, + error, + key, + other=b"", + ): + super().__init__(rdclass, rdtype) + self.algorithm = self._as_name(algorithm) + self.inception = self._as_uint32(inception) + self.expiration = self._as_uint32(expiration) + self.mode = self._as_uint16(mode) + self.error = self._as_uint16(error) + self.key = self._as_bytes(key) + self.other = self._as_bytes(other) + + def to_text(self, origin=None, relativize=True, **kw): + _algorithm = self.algorithm.choose_relativity(origin, relativize) + text = "%s %u %u %u %u %s" % ( + str(_algorithm), + self.inception, + self.expiration, + self.mode, + self.error, + dns.rdata._base64ify(self.key, 0), + ) + if len(self.other) > 0: + text += f" {dns.rdata._base64ify(self.other, 0)}" + + return text + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + algorithm = tok.get_name(relativize=False) + inception = tok.get_uint32() + expiration = tok.get_uint32() + mode = tok.get_uint16() + error = tok.get_uint16() + key_b64 = tok.get_string().encode() + key = base64.b64decode(key_b64) + other_b64 = tok.concatenate_remaining_identifiers(True).encode() + other = base64.b64decode(other_b64) + + return cls( + rdclass, rdtype, algorithm, inception, expiration, mode, error, key, other + ) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.algorithm.to_wire(file, compress, origin) + file.write( + struct.pack("!IIHH", self.inception, self.expiration, self.mode, self.error) + ) + file.write(struct.pack("!H", len(self.key))) + file.write(self.key) + file.write(struct.pack("!H", len(self.other))) + if len(self.other) > 0: + file.write(self.other) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + algorithm = parser.get_name(origin) + inception, expiration, mode, error = parser.get_struct("!IIHH") + key = parser.get_counted_bytes(2) + other = parser.get_counted_bytes(2) + + return cls( + rdclass, rdtype, algorithm, inception, expiration, mode, error, key, other + ) + + # Constants for the mode field - from RFC 2930: + # 2.5 The Mode Field + # + # The mode field specifies the general scheme for key agreement or + # the purpose of the TKEY DNS message. Servers and resolvers + # supporting this specification MUST implement the Diffie-Hellman key + # agreement mode and the key deletion mode for queries. All other + # modes are OPTIONAL. A server supporting TKEY that receives a TKEY + # request with a mode it does not support returns the BADMODE error. + # The following values of the Mode octet are defined, available, or + # reserved: + # + # Value Description + # ----- ----------- + # 0 - reserved, see section 7 + # 1 server assignment + # 2 Diffie-Hellman exchange + # 3 GSS-API negotiation + # 4 resolver assignment + # 5 key deletion + # 6-65534 - available, see section 7 + # 65535 - reserved, see section 7 + SERVER_ASSIGNMENT = 1 + DIFFIE_HELLMAN_EXCHANGE = 2 + GSSAPI_NEGOTIATION = 3 + RESOLVER_ASSIGNMENT = 4 + KEY_DELETION = 5 diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/TLSA.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/TLSA.py new file mode 100644 index 0000000..4dffc55 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/TLSA.py @@ -0,0 +1,9 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import dns.immutable +import dns.rdtypes.tlsabase + + +@dns.immutable.immutable +class TLSA(dns.rdtypes.tlsabase.TLSABase): + """TLSA record""" diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/TSIG.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/TSIG.py new file mode 100644 index 0000000..7942382 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/TSIG.py @@ -0,0 +1,160 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import struct + +import dns.exception +import dns.immutable +import dns.rcode +import dns.rdata + + +@dns.immutable.immutable +class TSIG(dns.rdata.Rdata): + """TSIG record""" + + __slots__ = [ + "algorithm", + "time_signed", + "fudge", + "mac", + "original_id", + "error", + "other", + ] + + def __init__( + self, + rdclass, + rdtype, + algorithm, + time_signed, + fudge, + mac, + original_id, + error, + other, + ): + """Initialize a TSIG rdata. + + *rdclass*, an ``int`` is the rdataclass of the Rdata. + + *rdtype*, an ``int`` is the rdatatype of the Rdata. + + *algorithm*, a ``dns.name.Name``. + + *time_signed*, an ``int``. + + *fudge*, an ``int`. + + *mac*, a ``bytes`` + + *original_id*, an ``int`` + + *error*, an ``int`` + + *other*, a ``bytes`` + """ + + super().__init__(rdclass, rdtype) + self.algorithm = self._as_name(algorithm) + self.time_signed = self._as_uint48(time_signed) + self.fudge = self._as_uint16(fudge) + self.mac = self._as_bytes(mac) + self.original_id = self._as_uint16(original_id) + self.error = dns.rcode.Rcode.make(error) + self.other = self._as_bytes(other) + + def to_text(self, origin=None, relativize=True, **kw): + algorithm = self.algorithm.choose_relativity(origin, relativize) + error = dns.rcode.to_text(self.error, True) + text = ( + f"{algorithm} {self.time_signed} {self.fudge} " + + f"{len(self.mac)} {dns.rdata._base64ify(self.mac, 0)} " + + f"{self.original_id} {error} {len(self.other)}" + ) + if self.other: + text += f" {dns.rdata._base64ify(self.other, 0)}" + return text + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + algorithm = tok.get_name(relativize=False) + time_signed = tok.get_uint48() + fudge = tok.get_uint16() + mac_len = tok.get_uint16() + mac = base64.b64decode(tok.get_string()) + if len(mac) != mac_len: + raise SyntaxError("invalid MAC") + original_id = tok.get_uint16() + error = dns.rcode.from_text(tok.get_string()) + other_len = tok.get_uint16() + if other_len > 0: + other = base64.b64decode(tok.get_string()) + if len(other) != other_len: + raise SyntaxError("invalid other data") + else: + other = b"" + return cls( + rdclass, + rdtype, + algorithm, + time_signed, + fudge, + mac, + original_id, + error, + other, + ) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.algorithm.to_wire(file, None, origin, False) + file.write( + struct.pack( + "!HIHH", + (self.time_signed >> 32) & 0xFFFF, + self.time_signed & 0xFFFFFFFF, + self.fudge, + len(self.mac), + ) + ) + file.write(self.mac) + file.write(struct.pack("!HHH", self.original_id, self.error, len(self.other))) + file.write(self.other) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + algorithm = parser.get_name() + time_signed = parser.get_uint48() + fudge = parser.get_uint16() + mac = parser.get_counted_bytes(2) + (original_id, error) = parser.get_struct("!HH") + other = parser.get_counted_bytes(2) + return cls( + rdclass, + rdtype, + algorithm, + time_signed, + fudge, + mac, + original_id, + error, + other, + ) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/TXT.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/TXT.py new file mode 100644 index 0000000..6d4dae2 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/TXT.py @@ -0,0 +1,24 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.txtbase + + +@dns.immutable.immutable +class TXT(dns.rdtypes.txtbase.TXTBase): + """TXT record""" diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/URI.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/URI.py new file mode 100644 index 0000000..2efbb30 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/URI.py @@ -0,0 +1,79 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# Copyright (C) 2015 Red Hat, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdtypes.util + + +@dns.immutable.immutable +class URI(dns.rdata.Rdata): + """URI record""" + + # see RFC 7553 + + __slots__ = ["priority", "weight", "target"] + + def __init__(self, rdclass, rdtype, priority, weight, target): + super().__init__(rdclass, rdtype) + self.priority = self._as_uint16(priority) + self.weight = self._as_uint16(weight) + self.target = self._as_bytes(target, True) + if len(self.target) == 0: + raise dns.exception.SyntaxError("URI target cannot be empty") + + def to_text(self, origin=None, relativize=True, **kw): + return '%d %d "%s"' % (self.priority, self.weight, self.target.decode()) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + priority = tok.get_uint16() + weight = tok.get_uint16() + target = tok.get().unescape() + if not (target.is_quoted_string() or target.is_identifier()): + raise dns.exception.SyntaxError("URI target must be a string") + return cls(rdclass, rdtype, priority, weight, target.value) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + two_ints = struct.pack("!HH", self.priority, self.weight) + file.write(two_ints) + file.write(self.target) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (priority, weight) = parser.get_struct("!HH") + target = parser.get_remaining() + if len(target) == 0: + raise dns.exception.FormError("URI target may not be empty") + return cls(rdclass, rdtype, priority, weight, target) + + def _processing_priority(self): + return self.priority + + def _processing_weight(self): + return self.weight + + @classmethod + def _processing_order(cls, iterable): + return dns.rdtypes.util.weighted_processing_order(iterable) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/WALLET.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/WALLET.py new file mode 100644 index 0000000..ff46476 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/WALLET.py @@ -0,0 +1,9 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import dns.immutable +import dns.rdtypes.txtbase + + +@dns.immutable.immutable +class WALLET(dns.rdtypes.txtbase.TXTBase): + """WALLET record""" diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/X25.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/X25.py new file mode 100644 index 0000000..2436ddb --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/X25.py @@ -0,0 +1,57 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class X25(dns.rdata.Rdata): + """X25 record""" + + # see RFC 1183 + + __slots__ = ["address"] + + def __init__(self, rdclass, rdtype, address): + super().__init__(rdclass, rdtype) + self.address = self._as_bytes(address, True, 255) + + def to_text(self, origin=None, relativize=True, **kw): + return f'"{dns.rdata._escapify(self.address)}"' + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + address = tok.get_string() + return cls(rdclass, rdtype, address) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + l = len(self.address) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.address) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + address = parser.get_counted_bytes() + return cls(rdclass, rdtype, address) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/ZONEMD.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/ZONEMD.py new file mode 100644 index 0000000..c90e3ee --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/ZONEMD.py @@ -0,0 +1,66 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import binascii +import struct + +import dns.immutable +import dns.rdata +import dns.rdatatype +import dns.zonetypes + + +@dns.immutable.immutable +class ZONEMD(dns.rdata.Rdata): + """ZONEMD record""" + + # See RFC 8976 + + __slots__ = ["serial", "scheme", "hash_algorithm", "digest"] + + def __init__(self, rdclass, rdtype, serial, scheme, hash_algorithm, digest): + super().__init__(rdclass, rdtype) + self.serial = self._as_uint32(serial) + self.scheme = dns.zonetypes.DigestScheme.make(scheme) + self.hash_algorithm = dns.zonetypes.DigestHashAlgorithm.make(hash_algorithm) + self.digest = self._as_bytes(digest) + + if self.scheme == 0: # reserved, RFC 8976 Sec. 5.2 + raise ValueError("scheme 0 is reserved") + if self.hash_algorithm == 0: # reserved, RFC 8976 Sec. 5.3 + raise ValueError("hash_algorithm 0 is reserved") + + hasher = dns.zonetypes._digest_hashers.get(self.hash_algorithm) + if hasher and hasher().digest_size != len(self.digest): + raise ValueError("digest length inconsistent with hash algorithm") + + def to_text(self, origin=None, relativize=True, **kw): + kw = kw.copy() + chunksize = kw.pop("chunksize", 128) + return "%d %d %d %s" % ( + self.serial, + self.scheme, + self.hash_algorithm, + dns.rdata._hexify(self.digest, chunksize=chunksize, **kw), + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + serial = tok.get_uint32() + scheme = tok.get_uint8() + hash_algorithm = tok.get_uint8() + digest = tok.concatenate_remaining_identifiers().encode() + digest = binascii.unhexlify(digest) + return cls(rdclass, rdtype, serial, scheme, hash_algorithm, digest) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack("!IBB", self.serial, self.scheme, self.hash_algorithm) + file.write(header) + file.write(self.digest) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct("!IBB") + digest = parser.get_remaining() + return cls(rdclass, rdtype, header[0], header[1], header[2], digest) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__init__.py b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__init__.py new file mode 100644 index 0000000..647b215 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__init__.py @@ -0,0 +1,70 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Class ANY (generic) rdata type classes.""" + +__all__ = [ + "AFSDB", + "AMTRELAY", + "AVC", + "CAA", + "CDNSKEY", + "CDS", + "CERT", + "CNAME", + "CSYNC", + "DLV", + "DNAME", + "DNSKEY", + "DS", + "EUI48", + "EUI64", + "GPOS", + "HINFO", + "HIP", + "ISDN", + "L32", + "L64", + "LOC", + "LP", + "MX", + "NID", + "NINFO", + "NS", + "NSEC", + "NSEC3", + "NSEC3PARAM", + "OPENPGPKEY", + "OPT", + "PTR", + "RESINFO", + "RP", + "RRSIG", + "RT", + "SMIMEA", + "SOA", + "SPF", + "SSHFP", + "TKEY", + "TLSA", + "TSIG", + "TXT", + "URI", + "WALLET", + "X25", + "ZONEMD", +] diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/AFSDB.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/AFSDB.cpython-311.pyc new file mode 100644 index 0000000..c7c9edb Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/AFSDB.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/AMTRELAY.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/AMTRELAY.cpython-311.pyc new file mode 100644 index 0000000..041b918 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/AMTRELAY.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/AVC.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/AVC.cpython-311.pyc new file mode 100644 index 0000000..31df52e Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/AVC.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/CAA.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/CAA.cpython-311.pyc new file mode 100644 index 0000000..caa3606 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/CAA.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/CDNSKEY.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/CDNSKEY.cpython-311.pyc new file mode 100644 index 0000000..ba97211 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/CDNSKEY.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/CDS.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/CDS.cpython-311.pyc new file mode 100644 index 0000000..f658e34 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/CDS.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/CERT.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/CERT.cpython-311.pyc new file mode 100644 index 0000000..1501037 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/CERT.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/CNAME.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/CNAME.cpython-311.pyc new file mode 100644 index 0000000..97805f0 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/CNAME.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/CSYNC.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/CSYNC.cpython-311.pyc new file mode 100644 index 0000000..2819b1c Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/CSYNC.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/DLV.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/DLV.cpython-311.pyc new file mode 100644 index 0000000..370cc9e Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/DLV.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/DNAME.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/DNAME.cpython-311.pyc new file mode 100644 index 0000000..e65a3a8 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/DNAME.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/DNSKEY.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/DNSKEY.cpython-311.pyc new file mode 100644 index 0000000..06625f7 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/DNSKEY.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/DS.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/DS.cpython-311.pyc new file mode 100644 index 0000000..f982199 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/DS.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/EUI48.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/EUI48.cpython-311.pyc new file mode 100644 index 0000000..bbc8e1c Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/EUI48.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/EUI64.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/EUI64.cpython-311.pyc new file mode 100644 index 0000000..7ea0580 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/EUI64.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/GPOS.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/GPOS.cpython-311.pyc new file mode 100644 index 0000000..2240727 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/GPOS.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/HINFO.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/HINFO.cpython-311.pyc new file mode 100644 index 0000000..858a2e6 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/HINFO.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/HIP.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/HIP.cpython-311.pyc new file mode 100644 index 0000000..913f6ea Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/HIP.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/ISDN.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/ISDN.cpython-311.pyc new file mode 100644 index 0000000..017ee94 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/ISDN.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/L32.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/L32.cpython-311.pyc new file mode 100644 index 0000000..8827a3c Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/L32.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/L64.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/L64.cpython-311.pyc new file mode 100644 index 0000000..d205e9f Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/L64.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/LOC.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/LOC.cpython-311.pyc new file mode 100644 index 0000000..169c91c Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/LOC.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/LP.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/LP.cpython-311.pyc new file mode 100644 index 0000000..7b3332d Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/LP.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/MX.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/MX.cpython-311.pyc new file mode 100644 index 0000000..754a72f Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/MX.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/NID.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/NID.cpython-311.pyc new file mode 100644 index 0000000..cb78400 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/NID.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/NINFO.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/NINFO.cpython-311.pyc new file mode 100644 index 0000000..5ad975f Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/NINFO.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/NS.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/NS.cpython-311.pyc new file mode 100644 index 0000000..720ce4b Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/NS.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/NSEC.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/NSEC.cpython-311.pyc new file mode 100644 index 0000000..92378ac Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/NSEC.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3.cpython-311.pyc new file mode 100644 index 0000000..c0fb60b Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3PARAM.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3PARAM.cpython-311.pyc new file mode 100644 index 0000000..072e5cf Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3PARAM.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/OPENPGPKEY.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/OPENPGPKEY.cpython-311.pyc new file mode 100644 index 0000000..246b7e0 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/OPENPGPKEY.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/OPT.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/OPT.cpython-311.pyc new file mode 100644 index 0000000..d348a9b Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/OPT.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/PTR.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/PTR.cpython-311.pyc new file mode 100644 index 0000000..f1fd9a7 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/PTR.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/RESINFO.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/RESINFO.cpython-311.pyc new file mode 100644 index 0000000..5e81fb3 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/RESINFO.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/RP.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/RP.cpython-311.pyc new file mode 100644 index 0000000..c3e1d8f Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/RP.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/RRSIG.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/RRSIG.cpython-311.pyc new file mode 100644 index 0000000..8e2e48a Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/RRSIG.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/RT.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/RT.cpython-311.pyc new file mode 100644 index 0000000..d7de5af Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/RT.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/SMIMEA.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/SMIMEA.cpython-311.pyc new file mode 100644 index 0000000..796c1d5 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/SMIMEA.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/SOA.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/SOA.cpython-311.pyc new file mode 100644 index 0000000..689b61d Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/SOA.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/SPF.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/SPF.cpython-311.pyc new file mode 100644 index 0000000..cbbc77e Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/SPF.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/SSHFP.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/SSHFP.cpython-311.pyc new file mode 100644 index 0000000..4bb1e1a Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/SSHFP.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/TKEY.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/TKEY.cpython-311.pyc new file mode 100644 index 0000000..9c6ccea Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/TKEY.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/TLSA.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/TLSA.cpython-311.pyc new file mode 100644 index 0000000..0d7e8c4 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/TLSA.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/TSIG.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/TSIG.cpython-311.pyc new file mode 100644 index 0000000..89b8f92 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/TSIG.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/TXT.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/TXT.cpython-311.pyc new file mode 100644 index 0000000..3a48325 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/TXT.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/URI.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/URI.cpython-311.pyc new file mode 100644 index 0000000..cec76ed Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/URI.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/WALLET.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/WALLET.cpython-311.pyc new file mode 100644 index 0000000..3fce293 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/WALLET.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/X25.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/X25.cpython-311.pyc new file mode 100644 index 0000000..b1fd3eb Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/X25.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/ZONEMD.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/ZONEMD.cpython-311.pyc new file mode 100644 index 0000000..4106418 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/ZONEMD.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..34595be Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/ANY/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/CH/A.py b/venv/lib/python3.11/site-packages/dns/rdtypes/CH/A.py new file mode 100644 index 0000000..832e8d3 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/CH/A.py @@ -0,0 +1,59 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.immutable +import dns.rdtypes.mxbase + + +@dns.immutable.immutable +class A(dns.rdata.Rdata): + """A record for Chaosnet""" + + # domain: the domain of the address + # address: the 16-bit address + + __slots__ = ["domain", "address"] + + def __init__(self, rdclass, rdtype, domain, address): + super().__init__(rdclass, rdtype) + self.domain = self._as_name(domain) + self.address = self._as_uint16(address) + + def to_text(self, origin=None, relativize=True, **kw): + domain = self.domain.choose_relativity(origin, relativize) + return f"{domain} {self.address:o}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + domain = tok.get_name(origin, relativize, relativize_to) + address = tok.get_uint16(base=8) + return cls(rdclass, rdtype, domain, address) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.domain.to_wire(file, compress, origin, canonicalize) + pref = struct.pack("!H", self.address) + file.write(pref) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + domain = parser.get_name(origin) + address = parser.get_uint16() + return cls(rdclass, rdtype, domain, address) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/CH/__init__.py b/venv/lib/python3.11/site-packages/dns/rdtypes/CH/__init__.py new file mode 100644 index 0000000..0760c26 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/CH/__init__.py @@ -0,0 +1,22 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Class CH rdata type classes.""" + +__all__ = [ + "A", +] diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/CH/__pycache__/A.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/CH/__pycache__/A.cpython-311.pyc new file mode 100644 index 0000000..df31736 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/CH/__pycache__/A.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/CH/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/CH/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..eda5c23 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/CH/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/A.py b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/A.py new file mode 100644 index 0000000..e09d611 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/A.py @@ -0,0 +1,51 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.exception +import dns.immutable +import dns.ipv4 +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class A(dns.rdata.Rdata): + """A record.""" + + __slots__ = ["address"] + + def __init__(self, rdclass, rdtype, address): + super().__init__(rdclass, rdtype) + self.address = self._as_ipv4_address(address) + + def to_text(self, origin=None, relativize=True, **kw): + return self.address + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + address = tok.get_identifier() + return cls(rdclass, rdtype, address) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(dns.ipv4.inet_aton(self.address)) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + address = parser.get_remaining() + return cls(rdclass, rdtype, address) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/AAAA.py b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/AAAA.py new file mode 100644 index 0000000..0cd139e --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/AAAA.py @@ -0,0 +1,51 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.exception +import dns.immutable +import dns.ipv6 +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class AAAA(dns.rdata.Rdata): + """AAAA record.""" + + __slots__ = ["address"] + + def __init__(self, rdclass, rdtype, address): + super().__init__(rdclass, rdtype) + self.address = self._as_ipv6_address(address) + + def to_text(self, origin=None, relativize=True, **kw): + return self.address + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + address = tok.get_identifier() + return cls(rdclass, rdtype, address) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(dns.ipv6.inet_aton(self.address)) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + address = parser.get_remaining() + return cls(rdclass, rdtype, address) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/APL.py b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/APL.py new file mode 100644 index 0000000..44cb3fe --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/APL.py @@ -0,0 +1,150 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii +import codecs +import struct + +import dns.exception +import dns.immutable +import dns.ipv4 +import dns.ipv6 +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class APLItem: + """An APL list item.""" + + __slots__ = ["family", "negation", "address", "prefix"] + + def __init__(self, family, negation, address, prefix): + self.family = dns.rdata.Rdata._as_uint16(family) + self.negation = dns.rdata.Rdata._as_bool(negation) + if self.family == 1: + self.address = dns.rdata.Rdata._as_ipv4_address(address) + self.prefix = dns.rdata.Rdata._as_int(prefix, 0, 32) + elif self.family == 2: + self.address = dns.rdata.Rdata._as_ipv6_address(address) + self.prefix = dns.rdata.Rdata._as_int(prefix, 0, 128) + else: + self.address = dns.rdata.Rdata._as_bytes(address, max_length=127) + self.prefix = dns.rdata.Rdata._as_uint8(prefix) + + def __str__(self): + if self.negation: + return "!%d:%s/%s" % (self.family, self.address, self.prefix) + else: + return "%d:%s/%s" % (self.family, self.address, self.prefix) + + def to_wire(self, file): + if self.family == 1: + address = dns.ipv4.inet_aton(self.address) + elif self.family == 2: + address = dns.ipv6.inet_aton(self.address) + else: + address = binascii.unhexlify(self.address) + # + # Truncate least significant zero bytes. + # + last = 0 + for i in range(len(address) - 1, -1, -1): + if address[i] != 0: + last = i + 1 + break + address = address[0:last] + l = len(address) + assert l < 128 + if self.negation: + l |= 0x80 + header = struct.pack("!HBB", self.family, self.prefix, l) + file.write(header) + file.write(address) + + +@dns.immutable.immutable +class APL(dns.rdata.Rdata): + """APL record.""" + + # see: RFC 3123 + + __slots__ = ["items"] + + def __init__(self, rdclass, rdtype, items): + super().__init__(rdclass, rdtype) + for item in items: + if not isinstance(item, APLItem): + raise ValueError("item not an APLItem") + self.items = tuple(items) + + def to_text(self, origin=None, relativize=True, **kw): + return " ".join(map(str, self.items)) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + items = [] + for token in tok.get_remaining(): + item = token.unescape().value + if item[0] == "!": + negation = True + item = item[1:] + else: + negation = False + (family, rest) = item.split(":", 1) + family = int(family) + (address, prefix) = rest.split("/", 1) + prefix = int(prefix) + item = APLItem(family, negation, address, prefix) + items.append(item) + + return cls(rdclass, rdtype, items) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + for item in self.items: + item.to_wire(file) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + items = [] + while parser.remaining() > 0: + header = parser.get_struct("!HBB") + afdlen = header[2] + if afdlen > 127: + negation = True + afdlen -= 128 + else: + negation = False + address = parser.get_bytes(afdlen) + l = len(address) + if header[0] == 1: + if l < 4: + address += b"\x00" * (4 - l) + elif header[0] == 2: + if l < 16: + address += b"\x00" * (16 - l) + else: + # + # This isn't really right according to the RFC, but it + # seems better than throwing an exception + # + address = codecs.encode(address, "hex_codec") + item = APLItem(header[0], negation, address, header[1]) + items.append(item) + return cls(rdclass, rdtype, items) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/DHCID.py b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/DHCID.py new file mode 100644 index 0000000..723492f --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/DHCID.py @@ -0,0 +1,54 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 + +import dns.exception +import dns.immutable +import dns.rdata + + +@dns.immutable.immutable +class DHCID(dns.rdata.Rdata): + """DHCID record""" + + # see: RFC 4701 + + __slots__ = ["data"] + + def __init__(self, rdclass, rdtype, data): + super().__init__(rdclass, rdtype) + self.data = self._as_bytes(data) + + def to_text(self, origin=None, relativize=True, **kw): + return dns.rdata._base64ify(self.data, **kw) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + b64 = tok.concatenate_remaining_identifiers().encode() + data = base64.b64decode(b64) + return cls(rdclass, rdtype, data) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(self.data) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + data = parser.get_remaining() + return cls(rdclass, rdtype, data) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/HTTPS.py b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/HTTPS.py new file mode 100644 index 0000000..15464cb --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/HTTPS.py @@ -0,0 +1,9 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import dns.immutable +import dns.rdtypes.svcbbase + + +@dns.immutable.immutable +class HTTPS(dns.rdtypes.svcbbase.SVCBBase): + """HTTPS record""" diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/IPSECKEY.py b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/IPSECKEY.py new file mode 100644 index 0000000..e3a6615 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/IPSECKEY.py @@ -0,0 +1,91 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import struct + +import dns.exception +import dns.immutable +import dns.rdtypes.util + + +class Gateway(dns.rdtypes.util.Gateway): + name = "IPSECKEY gateway" + + +@dns.immutable.immutable +class IPSECKEY(dns.rdata.Rdata): + """IPSECKEY record""" + + # see: RFC 4025 + + __slots__ = ["precedence", "gateway_type", "algorithm", "gateway", "key"] + + def __init__( + self, rdclass, rdtype, precedence, gateway_type, algorithm, gateway, key + ): + super().__init__(rdclass, rdtype) + gateway = Gateway(gateway_type, gateway) + self.precedence = self._as_uint8(precedence) + self.gateway_type = gateway.type + self.algorithm = self._as_uint8(algorithm) + self.gateway = gateway.gateway + self.key = self._as_bytes(key) + + def to_text(self, origin=None, relativize=True, **kw): + gateway = Gateway(self.gateway_type, self.gateway).to_text(origin, relativize) + return "%d %d %d %s %s" % ( + self.precedence, + self.gateway_type, + self.algorithm, + gateway, + dns.rdata._base64ify(self.key, **kw), + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + precedence = tok.get_uint8() + gateway_type = tok.get_uint8() + algorithm = tok.get_uint8() + gateway = Gateway.from_text( + gateway_type, tok, origin, relativize, relativize_to + ) + b64 = tok.concatenate_remaining_identifiers().encode() + key = base64.b64decode(b64) + return cls( + rdclass, rdtype, precedence, gateway_type, algorithm, gateway.gateway, key + ) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack("!BBB", self.precedence, self.gateway_type, self.algorithm) + file.write(header) + Gateway(self.gateway_type, self.gateway).to_wire( + file, compress, origin, canonicalize + ) + file.write(self.key) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct("!BBB") + gateway_type = header[1] + gateway = Gateway.from_wire_parser(gateway_type, parser, origin) + key = parser.get_remaining() + return cls( + rdclass, rdtype, header[0], gateway_type, header[2], gateway.gateway, key + ) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/KX.py b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/KX.py new file mode 100644 index 0000000..6073df4 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/KX.py @@ -0,0 +1,24 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.mxbase + + +@dns.immutable.immutable +class KX(dns.rdtypes.mxbase.UncompressedDowncasingMX): + """KX record""" diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/NAPTR.py b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/NAPTR.py new file mode 100644 index 0000000..195d1cb --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/NAPTR.py @@ -0,0 +1,110 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdtypes.util + + +def _write_string(file, s): + l = len(s) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(s) + + +@dns.immutable.immutable +class NAPTR(dns.rdata.Rdata): + """NAPTR record""" + + # see: RFC 3403 + + __slots__ = ["order", "preference", "flags", "service", "regexp", "replacement"] + + def __init__( + self, rdclass, rdtype, order, preference, flags, service, regexp, replacement + ): + super().__init__(rdclass, rdtype) + self.flags = self._as_bytes(flags, True, 255) + self.service = self._as_bytes(service, True, 255) + self.regexp = self._as_bytes(regexp, True, 255) + self.order = self._as_uint16(order) + self.preference = self._as_uint16(preference) + self.replacement = self._as_name(replacement) + + def to_text(self, origin=None, relativize=True, **kw): + replacement = self.replacement.choose_relativity(origin, relativize) + return '%d %d "%s" "%s" "%s" %s' % ( + self.order, + self.preference, + dns.rdata._escapify(self.flags), + dns.rdata._escapify(self.service), + dns.rdata._escapify(self.regexp), + replacement, + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + order = tok.get_uint16() + preference = tok.get_uint16() + flags = tok.get_string() + service = tok.get_string() + regexp = tok.get_string() + replacement = tok.get_name(origin, relativize, relativize_to) + return cls( + rdclass, rdtype, order, preference, flags, service, regexp, replacement + ) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + two_ints = struct.pack("!HH", self.order, self.preference) + file.write(two_ints) + _write_string(file, self.flags) + _write_string(file, self.service) + _write_string(file, self.regexp) + self.replacement.to_wire(file, compress, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (order, preference) = parser.get_struct("!HH") + strings = [] + for _ in range(3): + s = parser.get_counted_bytes() + strings.append(s) + replacement = parser.get_name(origin) + return cls( + rdclass, + rdtype, + order, + preference, + strings[0], + strings[1], + strings[2], + replacement, + ) + + def _processing_priority(self): + return (self.order, self.preference) + + @classmethod + def _processing_order(cls, iterable): + return dns.rdtypes.util.priority_processing_order(iterable) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/NSAP.py b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/NSAP.py new file mode 100644 index 0000000..d55edb7 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/NSAP.py @@ -0,0 +1,60 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class NSAP(dns.rdata.Rdata): + """NSAP record.""" + + # see: RFC 1706 + + __slots__ = ["address"] + + def __init__(self, rdclass, rdtype, address): + super().__init__(rdclass, rdtype) + self.address = self._as_bytes(address) + + def to_text(self, origin=None, relativize=True, **kw): + return f"0x{binascii.hexlify(self.address).decode()}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + address = tok.get_string() + if address[0:2] != "0x": + raise dns.exception.SyntaxError("string does not start with 0x") + address = address[2:].replace(".", "") + if len(address) % 2 != 0: + raise dns.exception.SyntaxError("hexstring has odd length") + address = binascii.unhexlify(address.encode()) + return cls(rdclass, rdtype, address) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(self.address) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + address = parser.get_remaining() + return cls(rdclass, rdtype, address) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/NSAP_PTR.py b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/NSAP_PTR.py new file mode 100644 index 0000000..ce1c663 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/NSAP_PTR.py @@ -0,0 +1,24 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.nsbase + + +@dns.immutable.immutable +class NSAP_PTR(dns.rdtypes.nsbase.UncompressedNS): + """NSAP-PTR record""" diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/PX.py b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/PX.py new file mode 100644 index 0000000..cdca153 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/PX.py @@ -0,0 +1,73 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdtypes.util + + +@dns.immutable.immutable +class PX(dns.rdata.Rdata): + """PX record.""" + + # see: RFC 2163 + + __slots__ = ["preference", "map822", "mapx400"] + + def __init__(self, rdclass, rdtype, preference, map822, mapx400): + super().__init__(rdclass, rdtype) + self.preference = self._as_uint16(preference) + self.map822 = self._as_name(map822) + self.mapx400 = self._as_name(mapx400) + + def to_text(self, origin=None, relativize=True, **kw): + map822 = self.map822.choose_relativity(origin, relativize) + mapx400 = self.mapx400.choose_relativity(origin, relativize) + return "%d %s %s" % (self.preference, map822, mapx400) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + preference = tok.get_uint16() + map822 = tok.get_name(origin, relativize, relativize_to) + mapx400 = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, preference, map822, mapx400) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + pref = struct.pack("!H", self.preference) + file.write(pref) + self.map822.to_wire(file, None, origin, canonicalize) + self.mapx400.to_wire(file, None, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + preference = parser.get_uint16() + map822 = parser.get_name(origin) + mapx400 = parser.get_name(origin) + return cls(rdclass, rdtype, preference, map822, mapx400) + + def _processing_priority(self): + return self.preference + + @classmethod + def _processing_order(cls, iterable): + return dns.rdtypes.util.priority_processing_order(iterable) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/SRV.py b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/SRV.py new file mode 100644 index 0000000..5adef98 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/SRV.py @@ -0,0 +1,75 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdtypes.util + + +@dns.immutable.immutable +class SRV(dns.rdata.Rdata): + """SRV record""" + + # see: RFC 2782 + + __slots__ = ["priority", "weight", "port", "target"] + + def __init__(self, rdclass, rdtype, priority, weight, port, target): + super().__init__(rdclass, rdtype) + self.priority = self._as_uint16(priority) + self.weight = self._as_uint16(weight) + self.port = self._as_uint16(port) + self.target = self._as_name(target) + + def to_text(self, origin=None, relativize=True, **kw): + target = self.target.choose_relativity(origin, relativize) + return "%d %d %d %s" % (self.priority, self.weight, self.port, target) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + priority = tok.get_uint16() + weight = tok.get_uint16() + port = tok.get_uint16() + target = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, priority, weight, port, target) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + three_ints = struct.pack("!HHH", self.priority, self.weight, self.port) + file.write(three_ints) + self.target.to_wire(file, compress, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (priority, weight, port) = parser.get_struct("!HHH") + target = parser.get_name(origin) + return cls(rdclass, rdtype, priority, weight, port, target) + + def _processing_priority(self): + return self.priority + + def _processing_weight(self): + return self.weight + + @classmethod + def _processing_order(cls, iterable): + return dns.rdtypes.util.weighted_processing_order(iterable) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/SVCB.py b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/SVCB.py new file mode 100644 index 0000000..ff3e932 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/SVCB.py @@ -0,0 +1,9 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import dns.immutable +import dns.rdtypes.svcbbase + + +@dns.immutable.immutable +class SVCB(dns.rdtypes.svcbbase.SVCBBase): + """SVCB record""" diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/WKS.py b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/WKS.py new file mode 100644 index 0000000..881a784 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/WKS.py @@ -0,0 +1,100 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import socket +import struct + +import dns.immutable +import dns.ipv4 +import dns.rdata + +try: + _proto_tcp = socket.getprotobyname("tcp") + _proto_udp = socket.getprotobyname("udp") +except OSError: + # Fall back to defaults in case /etc/protocols is unavailable. + _proto_tcp = 6 + _proto_udp = 17 + + +@dns.immutable.immutable +class WKS(dns.rdata.Rdata): + """WKS record""" + + # see: RFC 1035 + + __slots__ = ["address", "protocol", "bitmap"] + + def __init__(self, rdclass, rdtype, address, protocol, bitmap): + super().__init__(rdclass, rdtype) + self.address = self._as_ipv4_address(address) + self.protocol = self._as_uint8(protocol) + self.bitmap = self._as_bytes(bitmap) + + def to_text(self, origin=None, relativize=True, **kw): + bits = [] + for i, byte in enumerate(self.bitmap): + for j in range(0, 8): + if byte & (0x80 >> j): + bits.append(str(i * 8 + j)) + text = " ".join(bits) + return "%s %d %s" % (self.address, self.protocol, text) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + address = tok.get_string() + protocol = tok.get_string() + if protocol.isdigit(): + protocol = int(protocol) + else: + protocol = socket.getprotobyname(protocol) + bitmap = bytearray() + for token in tok.get_remaining(): + value = token.unescape().value + if value.isdigit(): + serv = int(value) + else: + if protocol != _proto_udp and protocol != _proto_tcp: + raise NotImplementedError("protocol must be TCP or UDP") + if protocol == _proto_udp: + protocol_text = "udp" + else: + protocol_text = "tcp" + serv = socket.getservbyname(value, protocol_text) + i = serv // 8 + l = len(bitmap) + if l < i + 1: + for _ in range(l, i + 1): + bitmap.append(0) + bitmap[i] = bitmap[i] | (0x80 >> (serv % 8)) + bitmap = dns.rdata._truncate_bitmap(bitmap) + return cls(rdclass, rdtype, address, protocol, bitmap) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(dns.ipv4.inet_aton(self.address)) + protocol = struct.pack("!B", self.protocol) + file.write(protocol) + file.write(self.bitmap) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + address = parser.get_bytes(4) + protocol = parser.get_uint8() + bitmap = parser.get_remaining() + return cls(rdclass, rdtype, address, protocol, bitmap) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__init__.py b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__init__.py new file mode 100644 index 0000000..dcec4dd --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__init__.py @@ -0,0 +1,35 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Class IN rdata type classes.""" + +__all__ = [ + "A", + "AAAA", + "APL", + "DHCID", + "HTTPS", + "IPSECKEY", + "KX", + "NAPTR", + "NSAP", + "NSAP_PTR", + "PX", + "SRV", + "SVCB", + "WKS", +] diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/A.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/A.cpython-311.pyc new file mode 100644 index 0000000..58d7aab Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/A.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/AAAA.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/AAAA.cpython-311.pyc new file mode 100644 index 0000000..3eac270 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/AAAA.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/APL.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/APL.cpython-311.pyc new file mode 100644 index 0000000..4c3d16f Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/APL.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/DHCID.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/DHCID.cpython-311.pyc new file mode 100644 index 0000000..c5bcfc0 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/DHCID.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/HTTPS.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/HTTPS.cpython-311.pyc new file mode 100644 index 0000000..28b8b79 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/HTTPS.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/IPSECKEY.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/IPSECKEY.cpython-311.pyc new file mode 100644 index 0000000..b4ec9c0 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/IPSECKEY.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/KX.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/KX.cpython-311.pyc new file mode 100644 index 0000000..4ab1c2c Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/KX.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/NAPTR.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/NAPTR.cpython-311.pyc new file mode 100644 index 0000000..354541f Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/NAPTR.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/NSAP.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/NSAP.cpython-311.pyc new file mode 100644 index 0000000..bbe4d2f Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/NSAP.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/NSAP_PTR.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/NSAP_PTR.cpython-311.pyc new file mode 100644 index 0000000..2788565 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/NSAP_PTR.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/PX.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/PX.cpython-311.pyc new file mode 100644 index 0000000..9d755bd Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/PX.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/SRV.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/SRV.cpython-311.pyc new file mode 100644 index 0000000..2fc990f Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/SRV.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/SVCB.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/SVCB.cpython-311.pyc new file mode 100644 index 0000000..6b51666 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/SVCB.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/WKS.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/WKS.cpython-311.pyc new file mode 100644 index 0000000..0a87a8f Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/WKS.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..ca4cecd Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/IN/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/__init__.py b/venv/lib/python3.11/site-packages/dns/rdtypes/__init__.py new file mode 100644 index 0000000..3997f84 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/__init__.py @@ -0,0 +1,33 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS rdata type classes""" + +__all__ = [ + "ANY", + "IN", + "CH", + "dnskeybase", + "dsbase", + "euibase", + "mxbase", + "nsbase", + "svcbbase", + "tlsabase", + "txtbase", + "util", +] diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..54b086d Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/dnskeybase.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/dnskeybase.cpython-311.pyc new file mode 100644 index 0000000..d37fe27 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/dnskeybase.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/dsbase.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/dsbase.cpython-311.pyc new file mode 100644 index 0000000..8b1f838 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/dsbase.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/euibase.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/euibase.cpython-311.pyc new file mode 100644 index 0000000..b05d8cc Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/euibase.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/mxbase.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/mxbase.cpython-311.pyc new file mode 100644 index 0000000..45f9a65 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/mxbase.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/nsbase.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/nsbase.cpython-311.pyc new file mode 100644 index 0000000..9524cda Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/nsbase.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/svcbbase.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/svcbbase.cpython-311.pyc new file mode 100644 index 0000000..8ad8bd5 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/svcbbase.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/tlsabase.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/tlsabase.cpython-311.pyc new file mode 100644 index 0000000..3c73a59 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/tlsabase.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/txtbase.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/txtbase.cpython-311.pyc new file mode 100644 index 0000000..9ea25ee Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/txtbase.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/util.cpython-311.pyc b/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/util.cpython-311.pyc new file mode 100644 index 0000000..b8e4908 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dns/rdtypes/__pycache__/util.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/dnskeybase.py b/venv/lib/python3.11/site-packages/dns/rdtypes/dnskeybase.py new file mode 100644 index 0000000..db300f8 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/dnskeybase.py @@ -0,0 +1,87 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import enum +import struct + +import dns.dnssectypes +import dns.exception +import dns.immutable +import dns.rdata + +# wildcard import +__all__ = ["SEP", "REVOKE", "ZONE"] # noqa: F822 + + +class Flag(enum.IntFlag): + SEP = 0x0001 + REVOKE = 0x0080 + ZONE = 0x0100 + + +@dns.immutable.immutable +class DNSKEYBase(dns.rdata.Rdata): + """Base class for rdata that is like a DNSKEY record""" + + __slots__ = ["flags", "protocol", "algorithm", "key"] + + def __init__(self, rdclass, rdtype, flags, protocol, algorithm, key): + super().__init__(rdclass, rdtype) + self.flags = Flag(self._as_uint16(flags)) + self.protocol = self._as_uint8(protocol) + self.algorithm = dns.dnssectypes.Algorithm.make(algorithm) + self.key = self._as_bytes(key) + + def to_text(self, origin=None, relativize=True, **kw): + return "%d %d %d %s" % ( + self.flags, + self.protocol, + self.algorithm, + dns.rdata._base64ify(self.key, **kw), + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + flags = tok.get_uint16() + protocol = tok.get_uint8() + algorithm = tok.get_string() + b64 = tok.concatenate_remaining_identifiers().encode() + key = base64.b64decode(b64) + return cls(rdclass, rdtype, flags, protocol, algorithm, key) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack("!HBB", self.flags, self.protocol, self.algorithm) + file.write(header) + file.write(self.key) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct("!HBB") + key = parser.get_remaining() + return cls(rdclass, rdtype, header[0], header[1], header[2], key) + + +### BEGIN generated Flag constants + +SEP = Flag.SEP +REVOKE = Flag.REVOKE +ZONE = Flag.ZONE + +### END generated Flag constants diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/dsbase.py b/venv/lib/python3.11/site-packages/dns/rdtypes/dsbase.py new file mode 100644 index 0000000..cd21f02 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/dsbase.py @@ -0,0 +1,85 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2010, 2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii +import struct + +import dns.dnssectypes +import dns.immutable +import dns.rdata +import dns.rdatatype + + +@dns.immutable.immutable +class DSBase(dns.rdata.Rdata): + """Base class for rdata that is like a DS record""" + + __slots__ = ["key_tag", "algorithm", "digest_type", "digest"] + + # Digest types registry: + # https://www.iana.org/assignments/ds-rr-types/ds-rr-types.xhtml + _digest_length_by_type = { + 1: 20, # SHA-1, RFC 3658 Sec. 2.4 + 2: 32, # SHA-256, RFC 4509 Sec. 2.2 + 3: 32, # GOST R 34.11-94, RFC 5933 Sec. 4 in conjunction with RFC 4490 Sec. 2.1 + 4: 48, # SHA-384, RFC 6605 Sec. 2 + } + + def __init__(self, rdclass, rdtype, key_tag, algorithm, digest_type, digest): + super().__init__(rdclass, rdtype) + self.key_tag = self._as_uint16(key_tag) + self.algorithm = dns.dnssectypes.Algorithm.make(algorithm) + self.digest_type = dns.dnssectypes.DSDigest.make(self._as_uint8(digest_type)) + self.digest = self._as_bytes(digest) + try: + if len(self.digest) != self._digest_length_by_type[self.digest_type]: + raise ValueError("digest length inconsistent with digest type") + except KeyError: + if self.digest_type == 0: # reserved, RFC 3658 Sec. 2.4 + raise ValueError("digest type 0 is reserved") + + def to_text(self, origin=None, relativize=True, **kw): + kw = kw.copy() + chunksize = kw.pop("chunksize", 128) + return "%d %d %d %s" % ( + self.key_tag, + self.algorithm, + self.digest_type, + dns.rdata._hexify(self.digest, chunksize=chunksize, **kw), + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + key_tag = tok.get_uint16() + algorithm = tok.get_string() + digest_type = tok.get_uint8() + digest = tok.concatenate_remaining_identifiers().encode() + digest = binascii.unhexlify(digest) + return cls(rdclass, rdtype, key_tag, algorithm, digest_type, digest) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack("!HBB", self.key_tag, self.algorithm, self.digest_type) + file.write(header) + file.write(self.digest) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct("!HBB") + digest = parser.get_remaining() + return cls(rdclass, rdtype, header[0], header[1], header[2], digest) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/euibase.py b/venv/lib/python3.11/site-packages/dns/rdtypes/euibase.py new file mode 100644 index 0000000..a39c166 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/euibase.py @@ -0,0 +1,70 @@ +# Copyright (C) 2015 Red Hat, Inc. +# Author: Petr Spacek +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii + +import dns.immutable +import dns.rdata + + +@dns.immutable.immutable +class EUIBase(dns.rdata.Rdata): + """EUIxx record""" + + # see: rfc7043.txt + + __slots__ = ["eui"] + # define these in subclasses + # byte_len = 6 # 0123456789ab (in hex) + # text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab + + def __init__(self, rdclass, rdtype, eui): + super().__init__(rdclass, rdtype) + self.eui = self._as_bytes(eui) + if len(self.eui) != self.byte_len: + raise dns.exception.FormError( + f"EUI{self.byte_len * 8} rdata has to have {self.byte_len} bytes" + ) + + def to_text(self, origin=None, relativize=True, **kw): + return dns.rdata._hexify(self.eui, chunksize=2, separator=b"-", **kw) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + text = tok.get_string() + if len(text) != cls.text_len: + raise dns.exception.SyntaxError( + f"Input text must have {cls.text_len} characters" + ) + for i in range(2, cls.byte_len * 3 - 1, 3): + if text[i] != "-": + raise dns.exception.SyntaxError(f"Dash expected at position {i}") + text = text.replace("-", "") + try: + data = binascii.unhexlify(text.encode()) + except (ValueError, TypeError) as ex: + raise dns.exception.SyntaxError(f"Hex decoding error: {str(ex)}") + return cls(rdclass, rdtype, data) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(self.eui) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + eui = parser.get_bytes(cls.byte_len) + return cls(rdclass, rdtype, eui) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/mxbase.py b/venv/lib/python3.11/site-packages/dns/rdtypes/mxbase.py new file mode 100644 index 0000000..6d5e3d8 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/mxbase.py @@ -0,0 +1,87 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""MX-like base classes.""" + +import struct + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdtypes.util + + +@dns.immutable.immutable +class MXBase(dns.rdata.Rdata): + """Base class for rdata that is like an MX record.""" + + __slots__ = ["preference", "exchange"] + + def __init__(self, rdclass, rdtype, preference, exchange): + super().__init__(rdclass, rdtype) + self.preference = self._as_uint16(preference) + self.exchange = self._as_name(exchange) + + def to_text(self, origin=None, relativize=True, **kw): + exchange = self.exchange.choose_relativity(origin, relativize) + return "%d %s" % (self.preference, exchange) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + preference = tok.get_uint16() + exchange = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, preference, exchange) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + pref = struct.pack("!H", self.preference) + file.write(pref) + self.exchange.to_wire(file, compress, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + preference = parser.get_uint16() + exchange = parser.get_name(origin) + return cls(rdclass, rdtype, preference, exchange) + + def _processing_priority(self): + return self.preference + + @classmethod + def _processing_order(cls, iterable): + return dns.rdtypes.util.priority_processing_order(iterable) + + +@dns.immutable.immutable +class UncompressedMX(MXBase): + """Base class for rdata that is like an MX record, but whose name + is not compressed when converted to DNS wire format, and whose + digestable form is not downcased.""" + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + super()._to_wire(file, None, origin, False) + + +@dns.immutable.immutable +class UncompressedDowncasingMX(MXBase): + """Base class for rdata that is like an MX record, but whose name + is not compressed when convert to DNS wire format.""" + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + super()._to_wire(file, None, origin, canonicalize) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/nsbase.py b/venv/lib/python3.11/site-packages/dns/rdtypes/nsbase.py new file mode 100644 index 0000000..904224f --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/nsbase.py @@ -0,0 +1,63 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""NS-like base classes.""" + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata + + +@dns.immutable.immutable +class NSBase(dns.rdata.Rdata): + """Base class for rdata that is like an NS record.""" + + __slots__ = ["target"] + + def __init__(self, rdclass, rdtype, target): + super().__init__(rdclass, rdtype) + self.target = self._as_name(target) + + def to_text(self, origin=None, relativize=True, **kw): + target = self.target.choose_relativity(origin, relativize) + return str(target) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + target = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, target) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.target.to_wire(file, compress, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + target = parser.get_name(origin) + return cls(rdclass, rdtype, target) + + +@dns.immutable.immutable +class UncompressedNS(NSBase): + """Base class for rdata that is like an NS record, but whose name + is not compressed when convert to DNS wire format, and whose + digestable form is not downcased.""" + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.target.to_wire(file, None, origin, False) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/svcbbase.py b/venv/lib/python3.11/site-packages/dns/rdtypes/svcbbase.py new file mode 100644 index 0000000..a2b15b9 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/svcbbase.py @@ -0,0 +1,585 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import base64 +import enum +import struct + +import dns.enum +import dns.exception +import dns.immutable +import dns.ipv4 +import dns.ipv6 +import dns.name +import dns.rdata +import dns.rdtypes.util +import dns.renderer +import dns.tokenizer +import dns.wire + +# Until there is an RFC, this module is experimental and may be changed in +# incompatible ways. + + +class UnknownParamKey(dns.exception.DNSException): + """Unknown SVCB ParamKey""" + + +class ParamKey(dns.enum.IntEnum): + """SVCB ParamKey""" + + MANDATORY = 0 + ALPN = 1 + NO_DEFAULT_ALPN = 2 + PORT = 3 + IPV4HINT = 4 + ECH = 5 + IPV6HINT = 6 + DOHPATH = 7 + OHTTP = 8 + + @classmethod + def _maximum(cls): + return 65535 + + @classmethod + def _short_name(cls): + return "SVCBParamKey" + + @classmethod + def _prefix(cls): + return "KEY" + + @classmethod + def _unknown_exception_class(cls): + return UnknownParamKey + + +class Emptiness(enum.IntEnum): + NEVER = 0 + ALWAYS = 1 + ALLOWED = 2 + + +def _validate_key(key): + force_generic = False + if isinstance(key, bytes): + # We decode to latin-1 so we get 0-255 as valid and do NOT interpret + # UTF-8 sequences + key = key.decode("latin-1") + if isinstance(key, str): + if key.lower().startswith("key"): + force_generic = True + if key[3:].startswith("0") and len(key) != 4: + # key has leading zeros + raise ValueError("leading zeros in key") + key = key.replace("-", "_") + return (ParamKey.make(key), force_generic) + + +def key_to_text(key): + return ParamKey.to_text(key).replace("_", "-").lower() + + +# Like rdata escapify, but escapes ',' too. + +_escaped = b'",\\' + + +def _escapify(qstring): + text = "" + for c in qstring: + if c in _escaped: + text += "\\" + chr(c) + elif c >= 0x20 and c < 0x7F: + text += chr(c) + else: + text += "\\%03d" % c + return text + + +def _unescape(value): + if value == "": + return value + unescaped = b"" + l = len(value) + i = 0 + while i < l: + c = value[i] + i += 1 + if c == "\\": + if i >= l: # pragma: no cover (can't happen via tokenizer get()) + raise dns.exception.UnexpectedEnd + c = value[i] + i += 1 + if c.isdigit(): + if i >= l: + raise dns.exception.UnexpectedEnd + c2 = value[i] + i += 1 + if i >= l: + raise dns.exception.UnexpectedEnd + c3 = value[i] + i += 1 + if not (c2.isdigit() and c3.isdigit()): + raise dns.exception.SyntaxError + codepoint = int(c) * 100 + int(c2) * 10 + int(c3) + if codepoint > 255: + raise dns.exception.SyntaxError + unescaped += b"%c" % (codepoint) + continue + unescaped += c.encode() + return unescaped + + +def _split(value): + l = len(value) + i = 0 + items = [] + unescaped = b"" + while i < l: + c = value[i] + i += 1 + if c == ord("\\"): + if i >= l: # pragma: no cover (can't happen via tokenizer get()) + raise dns.exception.UnexpectedEnd + c = value[i] + i += 1 + unescaped += b"%c" % (c) + elif c == ord(","): + items.append(unescaped) + unescaped = b"" + else: + unescaped += b"%c" % (c) + items.append(unescaped) + return items + + +@dns.immutable.immutable +class Param: + """Abstract base class for SVCB parameters""" + + @classmethod + def emptiness(cls): + return Emptiness.NEVER + + +@dns.immutable.immutable +class GenericParam(Param): + """Generic SVCB parameter""" + + def __init__(self, value): + self.value = dns.rdata.Rdata._as_bytes(value, True) + + @classmethod + def emptiness(cls): + return Emptiness.ALLOWED + + @classmethod + def from_value(cls, value): + if value is None or len(value) == 0: + return None + else: + return cls(_unescape(value)) + + def to_text(self): + return '"' + dns.rdata._escapify(self.value) + '"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + value = parser.get_bytes(parser.remaining()) + if len(value) == 0: + return None + else: + return cls(value) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + file.write(self.value) + + +@dns.immutable.immutable +class MandatoryParam(Param): + def __init__(self, keys): + # check for duplicates + keys = sorted([_validate_key(key)[0] for key in keys]) + prior_k = None + for k in keys: + if k == prior_k: + raise ValueError(f"duplicate key {k:d}") + prior_k = k + if k == ParamKey.MANDATORY: + raise ValueError("listed the mandatory key as mandatory") + self.keys = tuple(keys) + + @classmethod + def from_value(cls, value): + keys = [k.encode() for k in value.split(",")] + return cls(keys) + + def to_text(self): + return '"' + ",".join([key_to_text(key) for key in self.keys]) + '"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + keys = [] + last_key = -1 + while parser.remaining() > 0: + key = parser.get_uint16() + if key < last_key: + raise dns.exception.FormError("manadatory keys not ascending") + last_key = key + keys.append(key) + return cls(keys) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + for key in self.keys: + file.write(struct.pack("!H", key)) + + +@dns.immutable.immutable +class ALPNParam(Param): + def __init__(self, ids): + self.ids = dns.rdata.Rdata._as_tuple( + ids, lambda x: dns.rdata.Rdata._as_bytes(x, True, 255, False) + ) + + @classmethod + def from_value(cls, value): + return cls(_split(_unescape(value))) + + def to_text(self): + value = ",".join([_escapify(id) for id in self.ids]) + return '"' + dns.rdata._escapify(value.encode()) + '"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + ids = [] + while parser.remaining() > 0: + id = parser.get_counted_bytes() + ids.append(id) + return cls(ids) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + for id in self.ids: + file.write(struct.pack("!B", len(id))) + file.write(id) + + +@dns.immutable.immutable +class NoDefaultALPNParam(Param): + # We don't ever expect to instantiate this class, but we need + # a from_value() and a from_wire_parser(), so we just return None + # from the class methods when things are OK. + + @classmethod + def emptiness(cls): + return Emptiness.ALWAYS + + @classmethod + def from_value(cls, value): + if value is None or value == "": + return None + else: + raise ValueError("no-default-alpn with non-empty value") + + def to_text(self): + raise NotImplementedError # pragma: no cover + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + if parser.remaining() != 0: + raise dns.exception.FormError + return None + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + raise NotImplementedError # pragma: no cover + + +@dns.immutable.immutable +class PortParam(Param): + def __init__(self, port): + self.port = dns.rdata.Rdata._as_uint16(port) + + @classmethod + def from_value(cls, value): + value = int(value) + return cls(value) + + def to_text(self): + return f'"{self.port}"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + port = parser.get_uint16() + return cls(port) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + file.write(struct.pack("!H", self.port)) + + +@dns.immutable.immutable +class IPv4HintParam(Param): + def __init__(self, addresses): + self.addresses = dns.rdata.Rdata._as_tuple( + addresses, dns.rdata.Rdata._as_ipv4_address + ) + + @classmethod + def from_value(cls, value): + addresses = value.split(",") + return cls(addresses) + + def to_text(self): + return '"' + ",".join(self.addresses) + '"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + addresses = [] + while parser.remaining() > 0: + ip = parser.get_bytes(4) + addresses.append(dns.ipv4.inet_ntoa(ip)) + return cls(addresses) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + for address in self.addresses: + file.write(dns.ipv4.inet_aton(address)) + + +@dns.immutable.immutable +class IPv6HintParam(Param): + def __init__(self, addresses): + self.addresses = dns.rdata.Rdata._as_tuple( + addresses, dns.rdata.Rdata._as_ipv6_address + ) + + @classmethod + def from_value(cls, value): + addresses = value.split(",") + return cls(addresses) + + def to_text(self): + return '"' + ",".join(self.addresses) + '"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + addresses = [] + while parser.remaining() > 0: + ip = parser.get_bytes(16) + addresses.append(dns.ipv6.inet_ntoa(ip)) + return cls(addresses) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + for address in self.addresses: + file.write(dns.ipv6.inet_aton(address)) + + +@dns.immutable.immutable +class ECHParam(Param): + def __init__(self, ech): + self.ech = dns.rdata.Rdata._as_bytes(ech, True) + + @classmethod + def from_value(cls, value): + if "\\" in value: + raise ValueError("escape in ECH value") + value = base64.b64decode(value.encode()) + return cls(value) + + def to_text(self): + b64 = base64.b64encode(self.ech).decode("ascii") + return f'"{b64}"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + value = parser.get_bytes(parser.remaining()) + return cls(value) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + file.write(self.ech) + + +@dns.immutable.immutable +class OHTTPParam(Param): + # We don't ever expect to instantiate this class, but we need + # a from_value() and a from_wire_parser(), so we just return None + # from the class methods when things are OK. + + @classmethod + def emptiness(cls): + return Emptiness.ALWAYS + + @classmethod + def from_value(cls, value): + if value is None or value == "": + return None + else: + raise ValueError("ohttp with non-empty value") + + def to_text(self): + raise NotImplementedError # pragma: no cover + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + if parser.remaining() != 0: + raise dns.exception.FormError + return None + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + raise NotImplementedError # pragma: no cover + + +_class_for_key = { + ParamKey.MANDATORY: MandatoryParam, + ParamKey.ALPN: ALPNParam, + ParamKey.NO_DEFAULT_ALPN: NoDefaultALPNParam, + ParamKey.PORT: PortParam, + ParamKey.IPV4HINT: IPv4HintParam, + ParamKey.ECH: ECHParam, + ParamKey.IPV6HINT: IPv6HintParam, + ParamKey.OHTTP: OHTTPParam, +} + + +def _validate_and_define(params, key, value): + (key, force_generic) = _validate_key(_unescape(key)) + if key in params: + raise SyntaxError(f'duplicate key "{key:d}"') + cls = _class_for_key.get(key, GenericParam) + emptiness = cls.emptiness() + if value is None: + if emptiness == Emptiness.NEVER: + raise SyntaxError("value cannot be empty") + value = cls.from_value(value) + else: + if force_generic: + value = cls.from_wire_parser(dns.wire.Parser(_unescape(value))) + else: + value = cls.from_value(value) + params[key] = value + + +@dns.immutable.immutable +class SVCBBase(dns.rdata.Rdata): + """Base class for SVCB-like records""" + + # see: draft-ietf-dnsop-svcb-https-11 + + __slots__ = ["priority", "target", "params"] + + def __init__(self, rdclass, rdtype, priority, target, params): + super().__init__(rdclass, rdtype) + self.priority = self._as_uint16(priority) + self.target = self._as_name(target) + for k, v in params.items(): + k = ParamKey.make(k) + if not isinstance(v, Param) and v is not None: + raise ValueError(f"{k:d} not a Param") + self.params = dns.immutable.Dict(params) + # Make sure any parameter listed as mandatory is present in the + # record. + mandatory = params.get(ParamKey.MANDATORY) + if mandatory: + for key in mandatory.keys: + # Note we have to say "not in" as we have None as a value + # so a get() and a not None test would be wrong. + if key not in params: + raise ValueError(f"key {key:d} declared mandatory but not present") + # The no-default-alpn parameter requires the alpn parameter. + if ParamKey.NO_DEFAULT_ALPN in params: + if ParamKey.ALPN not in params: + raise ValueError("no-default-alpn present, but alpn missing") + + def to_text(self, origin=None, relativize=True, **kw): + target = self.target.choose_relativity(origin, relativize) + params = [] + for key in sorted(self.params.keys()): + value = self.params[key] + if value is None: + params.append(key_to_text(key)) + else: + kv = key_to_text(key) + "=" + value.to_text() + params.append(kv) + if len(params) > 0: + space = " " + else: + space = "" + return "%d %s%s%s" % (self.priority, target, space, " ".join(params)) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + priority = tok.get_uint16() + target = tok.get_name(origin, relativize, relativize_to) + if priority == 0: + token = tok.get() + if not token.is_eol_or_eof(): + raise SyntaxError("parameters in AliasMode") + tok.unget(token) + params = {} + while True: + token = tok.get() + if token.is_eol_or_eof(): + tok.unget(token) + break + if token.ttype != dns.tokenizer.IDENTIFIER: + raise SyntaxError("parameter is not an identifier") + equals = token.value.find("=") + if equals == len(token.value) - 1: + # 'key=', so next token should be a quoted string without + # any intervening whitespace. + key = token.value[:-1] + token = tok.get(want_leading=True) + if token.ttype != dns.tokenizer.QUOTED_STRING: + raise SyntaxError("whitespace after =") + value = token.value + elif equals > 0: + # key=value + key = token.value[:equals] + value = token.value[equals + 1 :] + elif equals == 0: + # =key + raise SyntaxError('parameter cannot start with "="') + else: + # key + key = token.value + value = None + _validate_and_define(params, key, value) + return cls(rdclass, rdtype, priority, target, params) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack("!H", self.priority)) + self.target.to_wire(file, None, origin, False) + for key in sorted(self.params): + file.write(struct.pack("!H", key)) + value = self.params[key] + with dns.renderer.prefixed_length(file, 2): + # Note that we're still writing a length of zero if the value is None + if value is not None: + value.to_wire(file, origin) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + priority = parser.get_uint16() + target = parser.get_name(origin) + if priority == 0 and parser.remaining() != 0: + raise dns.exception.FormError("parameters in AliasMode") + params = {} + prior_key = -1 + while parser.remaining() > 0: + key = parser.get_uint16() + if key < prior_key: + raise dns.exception.FormError("keys not in order") + prior_key = key + vlen = parser.get_uint16() + pcls = _class_for_key.get(key, GenericParam) + with parser.restrict_to(vlen): + value = pcls.from_wire_parser(parser, origin) + params[key] = value + return cls(rdclass, rdtype, priority, target, params) + + def _processing_priority(self): + return self.priority + + @classmethod + def _processing_order(cls, iterable): + return dns.rdtypes.util.priority_processing_order(iterable) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/tlsabase.py b/venv/lib/python3.11/site-packages/dns/rdtypes/tlsabase.py new file mode 100644 index 0000000..a059d2c --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/tlsabase.py @@ -0,0 +1,71 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2005-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii +import struct + +import dns.immutable +import dns.rdata +import dns.rdatatype + + +@dns.immutable.immutable +class TLSABase(dns.rdata.Rdata): + """Base class for TLSA and SMIMEA records""" + + # see: RFC 6698 + + __slots__ = ["usage", "selector", "mtype", "cert"] + + def __init__(self, rdclass, rdtype, usage, selector, mtype, cert): + super().__init__(rdclass, rdtype) + self.usage = self._as_uint8(usage) + self.selector = self._as_uint8(selector) + self.mtype = self._as_uint8(mtype) + self.cert = self._as_bytes(cert) + + def to_text(self, origin=None, relativize=True, **kw): + kw = kw.copy() + chunksize = kw.pop("chunksize", 128) + return "%d %d %d %s" % ( + self.usage, + self.selector, + self.mtype, + dns.rdata._hexify(self.cert, chunksize=chunksize, **kw), + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + usage = tok.get_uint8() + selector = tok.get_uint8() + mtype = tok.get_uint8() + cert = tok.concatenate_remaining_identifiers().encode() + cert = binascii.unhexlify(cert) + return cls(rdclass, rdtype, usage, selector, mtype, cert) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack("!BBB", self.usage, self.selector, self.mtype) + file.write(header) + file.write(self.cert) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct("BBB") + cert = parser.get_remaining() + return cls(rdclass, rdtype, header[0], header[1], header[2], cert) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/txtbase.py b/venv/lib/python3.11/site-packages/dns/rdtypes/txtbase.py new file mode 100644 index 0000000..73db6d9 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/txtbase.py @@ -0,0 +1,106 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""TXT-like base class.""" + +from typing import Any, Dict, Iterable, Optional, Tuple, Union + +import dns.exception +import dns.immutable +import dns.rdata +import dns.renderer +import dns.tokenizer + + +@dns.immutable.immutable +class TXTBase(dns.rdata.Rdata): + """Base class for rdata that is like a TXT record (see RFC 1035).""" + + __slots__ = ["strings"] + + def __init__( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + strings: Iterable[Union[bytes, str]], + ): + """Initialize a TXT-like rdata. + + *rdclass*, an ``int`` is the rdataclass of the Rdata. + + *rdtype*, an ``int`` is the rdatatype of the Rdata. + + *strings*, a tuple of ``bytes`` + """ + super().__init__(rdclass, rdtype) + self.strings: Tuple[bytes] = self._as_tuple( + strings, lambda x: self._as_bytes(x, True, 255) + ) + if len(self.strings) == 0: + raise ValueError("the list of strings must not be empty") + + def to_text( + self, + origin: Optional[dns.name.Name] = None, + relativize: bool = True, + **kw: Dict[str, Any], + ) -> str: + txt = "" + prefix = "" + for s in self.strings: + txt += f'{prefix}"{dns.rdata._escapify(s)}"' + prefix = " " + return txt + + @classmethod + def from_text( + cls, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + tok: dns.tokenizer.Tokenizer, + origin: Optional[dns.name.Name] = None, + relativize: bool = True, + relativize_to: Optional[dns.name.Name] = None, + ) -> dns.rdata.Rdata: + strings = [] + for token in tok.get_remaining(): + token = token.unescape_to_bytes() + # The 'if' below is always true in the current code, but we + # are leaving this check in in case things change some day. + if not ( + token.is_quoted_string() or token.is_identifier() + ): # pragma: no cover + raise dns.exception.SyntaxError("expected a string") + if len(token.value) > 255: + raise dns.exception.SyntaxError("string too long") + strings.append(token.value) + if len(strings) == 0: + raise dns.exception.UnexpectedEnd + return cls(rdclass, rdtype, strings) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + for s in self.strings: + with dns.renderer.prefixed_length(file, 1): + file.write(s) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + strings = [] + while parser.remaining() > 0: + s = parser.get_counted_bytes() + strings.append(s) + return cls(rdclass, rdtype, strings) diff --git a/venv/lib/python3.11/site-packages/dns/rdtypes/util.py b/venv/lib/python3.11/site-packages/dns/rdtypes/util.py new file mode 100644 index 0000000..653a0bf --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rdtypes/util.py @@ -0,0 +1,257 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import collections +import random +import struct +from typing import Any, List + +import dns.exception +import dns.ipv4 +import dns.ipv6 +import dns.name +import dns.rdata + + +class Gateway: + """A helper class for the IPSECKEY gateway and AMTRELAY relay fields""" + + name = "" + + def __init__(self, type, gateway=None): + self.type = dns.rdata.Rdata._as_uint8(type) + self.gateway = gateway + self._check() + + @classmethod + def _invalid_type(cls, gateway_type): + return f"invalid {cls.name} type: {gateway_type}" + + def _check(self): + if self.type == 0: + if self.gateway not in (".", None): + raise SyntaxError(f"invalid {self.name} for type 0") + self.gateway = None + elif self.type == 1: + # check that it's OK + dns.ipv4.inet_aton(self.gateway) + elif self.type == 2: + # check that it's OK + dns.ipv6.inet_aton(self.gateway) + elif self.type == 3: + if not isinstance(self.gateway, dns.name.Name): + raise SyntaxError(f"invalid {self.name}; not a name") + else: + raise SyntaxError(self._invalid_type(self.type)) + + def to_text(self, origin=None, relativize=True): + if self.type == 0: + return "." + elif self.type in (1, 2): + return self.gateway + elif self.type == 3: + return str(self.gateway.choose_relativity(origin, relativize)) + else: + raise ValueError(self._invalid_type(self.type)) # pragma: no cover + + @classmethod + def from_text( + cls, gateway_type, tok, origin=None, relativize=True, relativize_to=None + ): + if gateway_type in (0, 1, 2): + gateway = tok.get_string() + elif gateway_type == 3: + gateway = tok.get_name(origin, relativize, relativize_to) + else: + raise dns.exception.SyntaxError( + cls._invalid_type(gateway_type) + ) # pragma: no cover + return cls(gateway_type, gateway) + + # pylint: disable=unused-argument + def to_wire(self, file, compress=None, origin=None, canonicalize=False): + if self.type == 0: + pass + elif self.type == 1: + file.write(dns.ipv4.inet_aton(self.gateway)) + elif self.type == 2: + file.write(dns.ipv6.inet_aton(self.gateway)) + elif self.type == 3: + self.gateway.to_wire(file, None, origin, False) + else: + raise ValueError(self._invalid_type(self.type)) # pragma: no cover + + # pylint: enable=unused-argument + + @classmethod + def from_wire_parser(cls, gateway_type, parser, origin=None): + if gateway_type == 0: + gateway = None + elif gateway_type == 1: + gateway = dns.ipv4.inet_ntoa(parser.get_bytes(4)) + elif gateway_type == 2: + gateway = dns.ipv6.inet_ntoa(parser.get_bytes(16)) + elif gateway_type == 3: + gateway = parser.get_name(origin) + else: + raise dns.exception.FormError(cls._invalid_type(gateway_type)) + return cls(gateway_type, gateway) + + +class Bitmap: + """A helper class for the NSEC/NSEC3/CSYNC type bitmaps""" + + type_name = "" + + def __init__(self, windows=None): + last_window = -1 + self.windows = windows + for window, bitmap in self.windows: + if not isinstance(window, int): + raise ValueError(f"bad {self.type_name} window type") + if window <= last_window: + raise ValueError(f"bad {self.type_name} window order") + if window > 256: + raise ValueError(f"bad {self.type_name} window number") + last_window = window + if not isinstance(bitmap, bytes): + raise ValueError(f"bad {self.type_name} octets type") + if len(bitmap) == 0 or len(bitmap) > 32: + raise ValueError(f"bad {self.type_name} octets") + + def to_text(self) -> str: + text = "" + for window, bitmap in self.windows: + bits = [] + for i, byte in enumerate(bitmap): + for j in range(0, 8): + if byte & (0x80 >> j): + rdtype = window * 256 + i * 8 + j + bits.append(dns.rdatatype.to_text(rdtype)) + text += " " + " ".join(bits) + return text + + @classmethod + def from_text(cls, tok: "dns.tokenizer.Tokenizer") -> "Bitmap": + rdtypes = [] + for token in tok.get_remaining(): + rdtype = dns.rdatatype.from_text(token.unescape().value) + if rdtype == 0: + raise dns.exception.SyntaxError(f"{cls.type_name} with bit 0") + rdtypes.append(rdtype) + return cls.from_rdtypes(rdtypes) + + @classmethod + def from_rdtypes(cls, rdtypes: List[dns.rdatatype.RdataType]) -> "Bitmap": + rdtypes = sorted(rdtypes) + window = 0 + octets = 0 + prior_rdtype = 0 + bitmap = bytearray(b"\0" * 32) + windows = [] + for rdtype in rdtypes: + if rdtype == prior_rdtype: + continue + prior_rdtype = rdtype + new_window = rdtype // 256 + if new_window != window: + if octets != 0: + windows.append((window, bytes(bitmap[0:octets]))) + bitmap = bytearray(b"\0" * 32) + window = new_window + offset = rdtype % 256 + byte = offset // 8 + bit = offset % 8 + octets = byte + 1 + bitmap[byte] = bitmap[byte] | (0x80 >> bit) + if octets != 0: + windows.append((window, bytes(bitmap[0:octets]))) + return cls(windows) + + def to_wire(self, file: Any) -> None: + for window, bitmap in self.windows: + file.write(struct.pack("!BB", window, len(bitmap))) + file.write(bitmap) + + @classmethod + def from_wire_parser(cls, parser: "dns.wire.Parser") -> "Bitmap": + windows = [] + while parser.remaining() > 0: + window = parser.get_uint8() + bitmap = parser.get_counted_bytes() + windows.append((window, bitmap)) + return cls(windows) + + +def _priority_table(items): + by_priority = collections.defaultdict(list) + for rdata in items: + by_priority[rdata._processing_priority()].append(rdata) + return by_priority + + +def priority_processing_order(iterable): + items = list(iterable) + if len(items) == 1: + return items + by_priority = _priority_table(items) + ordered = [] + for k in sorted(by_priority.keys()): + rdatas = by_priority[k] + random.shuffle(rdatas) + ordered.extend(rdatas) + return ordered + + +_no_weight = 0.1 + + +def weighted_processing_order(iterable): + items = list(iterable) + if len(items) == 1: + return items + by_priority = _priority_table(items) + ordered = [] + for k in sorted(by_priority.keys()): + rdatas = by_priority[k] + total = sum(rdata._processing_weight() or _no_weight for rdata in rdatas) + while len(rdatas) > 1: + r = random.uniform(0, total) + for n, rdata in enumerate(rdatas): # noqa: B007 + weight = rdata._processing_weight() or _no_weight + if weight > r: + break + r -= weight + total -= weight + ordered.append(rdata) # pylint: disable=undefined-loop-variable + del rdatas[n] # pylint: disable=undefined-loop-variable + ordered.append(rdatas[0]) + return ordered + + +def parse_formatted_hex(formatted, num_chunks, chunk_size, separator): + if len(formatted) != num_chunks * (chunk_size + 1) - 1: + raise ValueError("invalid formatted hex string") + value = b"" + for _ in range(num_chunks): + chunk = formatted[0:chunk_size] + value += int(chunk, 16).to_bytes(chunk_size // 2, "big") + formatted = formatted[chunk_size:] + if len(formatted) > 0 and formatted[0] != separator: + raise ValueError("invalid formatted hex string") + formatted = formatted[1:] + return value diff --git a/venv/lib/python3.11/site-packages/dns/renderer.py b/venv/lib/python3.11/site-packages/dns/renderer.py new file mode 100644 index 0000000..a77481f --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/renderer.py @@ -0,0 +1,346 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Help for building DNS wire format messages""" + +import contextlib +import io +import random +import struct +import time + +import dns.exception +import dns.tsig + +QUESTION = 0 +ANSWER = 1 +AUTHORITY = 2 +ADDITIONAL = 3 + + +@contextlib.contextmanager +def prefixed_length(output, length_length): + output.write(b"\00" * length_length) + start = output.tell() + yield + end = output.tell() + length = end - start + if length > 0: + try: + output.seek(start - length_length) + try: + output.write(length.to_bytes(length_length, "big")) + except OverflowError: + raise dns.exception.FormError + finally: + output.seek(end) + + +class Renderer: + """Helper class for building DNS wire-format messages. + + Most applications can use the higher-level L{dns.message.Message} + class and its to_wire() method to generate wire-format messages. + This class is for those applications which need finer control + over the generation of messages. + + Typical use:: + + r = dns.renderer.Renderer(id=1, flags=0x80, max_size=512) + r.add_question(qname, qtype, qclass) + r.add_rrset(dns.renderer.ANSWER, rrset_1) + r.add_rrset(dns.renderer.ANSWER, rrset_2) + r.add_rrset(dns.renderer.AUTHORITY, ns_rrset) + r.add_rrset(dns.renderer.ADDITIONAL, ad_rrset_1) + r.add_rrset(dns.renderer.ADDITIONAL, ad_rrset_2) + r.add_edns(0, 0, 4096) + r.write_header() + r.add_tsig(keyname, secret, 300, 1, 0, '', request_mac) + wire = r.get_wire() + + If padding is going to be used, then the OPT record MUST be + written after everything else in the additional section except for + the TSIG (if any). + + output, an io.BytesIO, where rendering is written + + id: the message id + + flags: the message flags + + max_size: the maximum size of the message + + origin: the origin to use when rendering relative names + + compress: the compression table + + section: an int, the section currently being rendered + + counts: list of the number of RRs in each section + + mac: the MAC of the rendered message (if TSIG was used) + """ + + def __init__(self, id=None, flags=0, max_size=65535, origin=None): + """Initialize a new renderer.""" + + self.output = io.BytesIO() + if id is None: + self.id = random.randint(0, 65535) + else: + self.id = id + self.flags = flags + self.max_size = max_size + self.origin = origin + self.compress = {} + self.section = QUESTION + self.counts = [0, 0, 0, 0] + self.output.write(b"\x00" * 12) + self.mac = "" + self.reserved = 0 + self.was_padded = False + + def _rollback(self, where): + """Truncate the output buffer at offset *where*, and remove any + compression table entries that pointed beyond the truncation + point. + """ + + self.output.seek(where) + self.output.truncate() + keys_to_delete = [] + for k, v in self.compress.items(): + if v >= where: + keys_to_delete.append(k) + for k in keys_to_delete: + del self.compress[k] + + def _set_section(self, section): + """Set the renderer's current section. + + Sections must be rendered order: QUESTION, ANSWER, AUTHORITY, + ADDITIONAL. Sections may be empty. + + Raises dns.exception.FormError if an attempt was made to set + a section value less than the current section. + """ + + if self.section != section: + if self.section > section: + raise dns.exception.FormError + self.section = section + + @contextlib.contextmanager + def _track_size(self): + start = self.output.tell() + yield start + if self.output.tell() > self.max_size: + self._rollback(start) + raise dns.exception.TooBig + + @contextlib.contextmanager + def _temporarily_seek_to(self, where): + current = self.output.tell() + try: + self.output.seek(where) + yield + finally: + self.output.seek(current) + + def add_question(self, qname, rdtype, rdclass=dns.rdataclass.IN): + """Add a question to the message.""" + + self._set_section(QUESTION) + with self._track_size(): + qname.to_wire(self.output, self.compress, self.origin) + self.output.write(struct.pack("!HH", rdtype, rdclass)) + self.counts[QUESTION] += 1 + + def add_rrset(self, section, rrset, **kw): + """Add the rrset to the specified section. + + Any keyword arguments are passed on to the rdataset's to_wire() + routine. + """ + + self._set_section(section) + with self._track_size(): + n = rrset.to_wire(self.output, self.compress, self.origin, **kw) + self.counts[section] += n + + def add_rdataset(self, section, name, rdataset, **kw): + """Add the rdataset to the specified section, using the specified + name as the owner name. + + Any keyword arguments are passed on to the rdataset's to_wire() + routine. + """ + + self._set_section(section) + with self._track_size(): + n = rdataset.to_wire(name, self.output, self.compress, self.origin, **kw) + self.counts[section] += n + + def add_opt(self, opt, pad=0, opt_size=0, tsig_size=0): + """Add *opt* to the additional section, applying padding if desired. The + padding will take the specified precomputed OPT size and TSIG size into + account. + + Note that we don't have reliable way of knowing how big a GSS-TSIG digest + might be, so we we might not get an even multiple of the pad in that case.""" + if pad: + ttl = opt.ttl + assert opt_size >= 11 + opt_rdata = opt[0] + size_without_padding = self.output.tell() + opt_size + tsig_size + remainder = size_without_padding % pad + if remainder: + pad = b"\x00" * (pad - remainder) + else: + pad = b"" + options = list(opt_rdata.options) + options.append(dns.edns.GenericOption(dns.edns.OptionType.PADDING, pad)) + opt = dns.message.Message._make_opt(ttl, opt_rdata.rdclass, options) + self.was_padded = True + self.add_rrset(ADDITIONAL, opt) + + def add_edns(self, edns, ednsflags, payload, options=None): + """Add an EDNS OPT record to the message.""" + + # make sure the EDNS version in ednsflags agrees with edns + ednsflags &= 0xFF00FFFF + ednsflags |= edns << 16 + opt = dns.message.Message._make_opt(ednsflags, payload, options) + self.add_opt(opt) + + def add_tsig( + self, + keyname, + secret, + fudge, + id, + tsig_error, + other_data, + request_mac, + algorithm=dns.tsig.default_algorithm, + ): + """Add a TSIG signature to the message.""" + + s = self.output.getvalue() + + if isinstance(secret, dns.tsig.Key): + key = secret + else: + key = dns.tsig.Key(keyname, secret, algorithm) + tsig = dns.message.Message._make_tsig( + keyname, algorithm, 0, fudge, b"", id, tsig_error, other_data + ) + (tsig, _) = dns.tsig.sign(s, key, tsig[0], int(time.time()), request_mac) + self._write_tsig(tsig, keyname) + + def add_multi_tsig( + self, + ctx, + keyname, + secret, + fudge, + id, + tsig_error, + other_data, + request_mac, + algorithm=dns.tsig.default_algorithm, + ): + """Add a TSIG signature to the message. Unlike add_tsig(), this can be + used for a series of consecutive DNS envelopes, e.g. for a zone + transfer over TCP [RFC2845, 4.4]. + + For the first message in the sequence, give ctx=None. For each + subsequent message, give the ctx that was returned from the + add_multi_tsig() call for the previous message.""" + + s = self.output.getvalue() + + if isinstance(secret, dns.tsig.Key): + key = secret + else: + key = dns.tsig.Key(keyname, secret, algorithm) + tsig = dns.message.Message._make_tsig( + keyname, algorithm, 0, fudge, b"", id, tsig_error, other_data + ) + (tsig, ctx) = dns.tsig.sign( + s, key, tsig[0], int(time.time()), request_mac, ctx, True + ) + self._write_tsig(tsig, keyname) + return ctx + + def _write_tsig(self, tsig, keyname): + if self.was_padded: + compress = None + else: + compress = self.compress + self._set_section(ADDITIONAL) + with self._track_size(): + keyname.to_wire(self.output, compress, self.origin) + self.output.write( + struct.pack("!HHI", dns.rdatatype.TSIG, dns.rdataclass.ANY, 0) + ) + with prefixed_length(self.output, 2): + tsig.to_wire(self.output) + + self.counts[ADDITIONAL] += 1 + with self._temporarily_seek_to(10): + self.output.write(struct.pack("!H", self.counts[ADDITIONAL])) + + def write_header(self): + """Write the DNS message header. + + Writing the DNS message header is done after all sections + have been rendered, but before the optional TSIG signature + is added. + """ + + with self._temporarily_seek_to(0): + self.output.write( + struct.pack( + "!HHHHHH", + self.id, + self.flags, + self.counts[0], + self.counts[1], + self.counts[2], + self.counts[3], + ) + ) + + def get_wire(self): + """Return the wire format message.""" + + return self.output.getvalue() + + def reserve(self, size: int) -> None: + """Reserve *size* bytes.""" + if size < 0: + raise ValueError("reserved amount must be non-negative") + if size > self.max_size: + raise ValueError("cannot reserve more than the maximum size") + self.reserved += size + self.max_size -= size + + def release_reserved(self) -> None: + """Release the reserved bytes.""" + self.max_size += self.reserved + self.reserved = 0 diff --git a/venv/lib/python3.11/site-packages/dns/resolver.py b/venv/lib/python3.11/site-packages/dns/resolver.py new file mode 100644 index 0000000..3ba76e3 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/resolver.py @@ -0,0 +1,2053 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS stub resolver.""" + +import contextlib +import random +import socket +import sys +import threading +import time +import warnings +from typing import Any, Dict, Iterator, List, Optional, Sequence, Tuple, Union +from urllib.parse import urlparse + +import dns._ddr +import dns.edns +import dns.exception +import dns.flags +import dns.inet +import dns.ipv4 +import dns.ipv6 +import dns.message +import dns.name +import dns.rdata +import dns.nameserver +import dns.query +import dns.rcode +import dns.rdataclass +import dns.rdatatype +import dns.rdtypes.svcbbase +import dns.reversename +import dns.tsig + +if sys.platform == "win32": # pragma: no cover + import dns.win32util + + +class NXDOMAIN(dns.exception.DNSException): + """The DNS query name does not exist.""" + + supp_kwargs = {"qnames", "responses"} + fmt = None # we have our own __str__ implementation + + # pylint: disable=arguments-differ + + # We do this as otherwise mypy complains about unexpected keyword argument + # idna_exception + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def _check_kwargs(self, qnames, responses=None): + if not isinstance(qnames, (list, tuple, set)): + raise AttributeError("qnames must be a list, tuple or set") + if len(qnames) == 0: + raise AttributeError("qnames must contain at least one element") + if responses is None: + responses = {} + elif not isinstance(responses, dict): + raise AttributeError("responses must be a dict(qname=response)") + kwargs = dict(qnames=qnames, responses=responses) + return kwargs + + def __str__(self) -> str: + if "qnames" not in self.kwargs: + return super().__str__() + qnames = self.kwargs["qnames"] + if len(qnames) > 1: + msg = "None of DNS query names exist" + else: + msg = "The DNS query name does not exist" + qnames = ", ".join(map(str, qnames)) + return f"{msg}: {qnames}" + + @property + def canonical_name(self): + """Return the unresolved canonical name.""" + if "qnames" not in self.kwargs: + raise TypeError("parametrized exception required") + for qname in self.kwargs["qnames"]: + response = self.kwargs["responses"][qname] + try: + cname = response.canonical_name() + if cname != qname: + return cname + except Exception: # pragma: no cover + # We can just eat this exception as it means there was + # something wrong with the response. + pass + return self.kwargs["qnames"][0] + + def __add__(self, e_nx): + """Augment by results from another NXDOMAIN exception.""" + qnames0 = list(self.kwargs.get("qnames", [])) + responses0 = dict(self.kwargs.get("responses", {})) + responses1 = e_nx.kwargs.get("responses", {}) + for qname1 in e_nx.kwargs.get("qnames", []): + if qname1 not in qnames0: + qnames0.append(qname1) + if qname1 in responses1: + responses0[qname1] = responses1[qname1] + return NXDOMAIN(qnames=qnames0, responses=responses0) + + def qnames(self): + """All of the names that were tried. + + Returns a list of ``dns.name.Name``. + """ + return self.kwargs["qnames"] + + def responses(self): + """A map from queried names to their NXDOMAIN responses. + + Returns a dict mapping a ``dns.name.Name`` to a + ``dns.message.Message``. + """ + return self.kwargs["responses"] + + def response(self, qname): + """The response for query *qname*. + + Returns a ``dns.message.Message``. + """ + return self.kwargs["responses"][qname] + + +class YXDOMAIN(dns.exception.DNSException): + """The DNS query name is too long after DNAME substitution.""" + + +ErrorTuple = Tuple[ + Optional[str], + bool, + int, + Union[Exception, str], + Optional[dns.message.Message], +] + + +def _errors_to_text(errors: List[ErrorTuple]) -> List[str]: + """Turn a resolution errors trace into a list of text.""" + texts = [] + for err in errors: + texts.append(f"Server {err[0]} answered {err[3]}") + return texts + + +class LifetimeTimeout(dns.exception.Timeout): + """The resolution lifetime expired.""" + + msg = "The resolution lifetime expired." + fmt = f"{msg[:-1]} after {{timeout:.3f}} seconds: {{errors}}" + supp_kwargs = {"timeout", "errors"} + + # We do this as otherwise mypy complains about unexpected keyword argument + # idna_exception + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def _fmt_kwargs(self, **kwargs): + srv_msgs = _errors_to_text(kwargs["errors"]) + return super()._fmt_kwargs( + timeout=kwargs["timeout"], errors="; ".join(srv_msgs) + ) + + +# We added more detail to resolution timeouts, but they are still +# subclasses of dns.exception.Timeout for backwards compatibility. We also +# keep dns.resolver.Timeout defined for backwards compatibility. +Timeout = LifetimeTimeout + + +class NoAnswer(dns.exception.DNSException): + """The DNS response does not contain an answer to the question.""" + + fmt = "The DNS response does not contain an answer to the question: {query}" + supp_kwargs = {"response"} + + # We do this as otherwise mypy complains about unexpected keyword argument + # idna_exception + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def _fmt_kwargs(self, **kwargs): + return super()._fmt_kwargs(query=kwargs["response"].question) + + def response(self): + return self.kwargs["response"] + + +class NoNameservers(dns.exception.DNSException): + """All nameservers failed to answer the query. + + errors: list of servers and respective errors + The type of errors is + [(server IP address, any object convertible to string)]. + Non-empty errors list will add explanatory message () + """ + + msg = "All nameservers failed to answer the query." + fmt = f"{msg[:-1]} {{query}}: {{errors}}" + supp_kwargs = {"request", "errors"} + + # We do this as otherwise mypy complains about unexpected keyword argument + # idna_exception + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def _fmt_kwargs(self, **kwargs): + srv_msgs = _errors_to_text(kwargs["errors"]) + return super()._fmt_kwargs( + query=kwargs["request"].question, errors="; ".join(srv_msgs) + ) + + +class NotAbsolute(dns.exception.DNSException): + """An absolute domain name is required but a relative name was provided.""" + + +class NoRootSOA(dns.exception.DNSException): + """There is no SOA RR at the DNS root name. This should never happen!""" + + +class NoMetaqueries(dns.exception.DNSException): + """DNS metaqueries are not allowed.""" + + +class NoResolverConfiguration(dns.exception.DNSException): + """Resolver configuration could not be read or specified no nameservers.""" + + +class Answer: + """DNS stub resolver answer. + + Instances of this class bundle up the result of a successful DNS + resolution. + + For convenience, the answer object implements much of the sequence + protocol, forwarding to its ``rrset`` attribute. E.g. + ``for a in answer`` is equivalent to ``for a in answer.rrset``. + ``answer[i]`` is equivalent to ``answer.rrset[i]``, and + ``answer[i:j]`` is equivalent to ``answer.rrset[i:j]``. + + Note that CNAMEs or DNAMEs in the response may mean that answer + RRset's name might not be the query name. + """ + + def __init__( + self, + qname: dns.name.Name, + rdtype: dns.rdatatype.RdataType, + rdclass: dns.rdataclass.RdataClass, + response: dns.message.QueryMessage, + nameserver: Optional[str] = None, + port: Optional[int] = None, + ) -> None: + self.qname = qname + self.rdtype = rdtype + self.rdclass = rdclass + self.response = response + self.nameserver = nameserver + self.port = port + self.chaining_result = response.resolve_chaining() + # Copy some attributes out of chaining_result for backwards + # compatibility and convenience. + self.canonical_name = self.chaining_result.canonical_name + self.rrset = self.chaining_result.answer + self.expiration = time.time() + self.chaining_result.minimum_ttl + + def __getattr__(self, attr): # pragma: no cover + if attr == "name": + return self.rrset.name + elif attr == "ttl": + return self.rrset.ttl + elif attr == "covers": + return self.rrset.covers + elif attr == "rdclass": + return self.rrset.rdclass + elif attr == "rdtype": + return self.rrset.rdtype + else: + raise AttributeError(attr) + + def __len__(self) -> int: + return self.rrset and len(self.rrset) or 0 + + def __iter__(self) -> Iterator[dns.rdata.Rdata]: + return self.rrset and iter(self.rrset) or iter(tuple()) + + def __getitem__(self, i): + if self.rrset is None: + raise IndexError + return self.rrset[i] + + def __delitem__(self, i): + if self.rrset is None: + raise IndexError + del self.rrset[i] + + +class Answers(dict): + """A dict of DNS stub resolver answers, indexed by type.""" + + +class HostAnswers(Answers): + """A dict of DNS stub resolver answers to a host name lookup, indexed by + type. + """ + + @classmethod + def make( + cls, + v6: Optional[Answer] = None, + v4: Optional[Answer] = None, + add_empty: bool = True, + ) -> "HostAnswers": + answers = HostAnswers() + if v6 is not None and (add_empty or v6.rrset): + answers[dns.rdatatype.AAAA] = v6 + if v4 is not None and (add_empty or v4.rrset): + answers[dns.rdatatype.A] = v4 + return answers + + # Returns pairs of (address, family) from this result, potentially + # filtering by address family. + def addresses_and_families( + self, family: int = socket.AF_UNSPEC + ) -> Iterator[Tuple[str, int]]: + if family == socket.AF_UNSPEC: + yield from self.addresses_and_families(socket.AF_INET6) + yield from self.addresses_and_families(socket.AF_INET) + return + elif family == socket.AF_INET6: + answer = self.get(dns.rdatatype.AAAA) + elif family == socket.AF_INET: + answer = self.get(dns.rdatatype.A) + else: # pragma: no cover + raise NotImplementedError(f"unknown address family {family}") + if answer: + for rdata in answer: + yield (rdata.address, family) + + # Returns addresses from this result, potentially filtering by + # address family. + def addresses(self, family: int = socket.AF_UNSPEC) -> Iterator[str]: + return (pair[0] for pair in self.addresses_and_families(family)) + + # Returns the canonical name from this result. + def canonical_name(self) -> dns.name.Name: + answer = self.get(dns.rdatatype.AAAA, self.get(dns.rdatatype.A)) + return answer.canonical_name + + +class CacheStatistics: + """Cache Statistics""" + + def __init__(self, hits: int = 0, misses: int = 0) -> None: + self.hits = hits + self.misses = misses + + def reset(self) -> None: + self.hits = 0 + self.misses = 0 + + def clone(self) -> "CacheStatistics": + return CacheStatistics(self.hits, self.misses) + + +class CacheBase: + def __init__(self) -> None: + self.lock = threading.Lock() + self.statistics = CacheStatistics() + + def reset_statistics(self) -> None: + """Reset all statistics to zero.""" + with self.lock: + self.statistics.reset() + + def hits(self) -> int: + """How many hits has the cache had?""" + with self.lock: + return self.statistics.hits + + def misses(self) -> int: + """How many misses has the cache had?""" + with self.lock: + return self.statistics.misses + + def get_statistics_snapshot(self) -> CacheStatistics: + """Return a consistent snapshot of all the statistics. + + If running with multiple threads, it's better to take a + snapshot than to call statistics methods such as hits() and + misses() individually. + """ + with self.lock: + return self.statistics.clone() + + +CacheKey = Tuple[dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass] + + +class Cache(CacheBase): + """Simple thread-safe DNS answer cache.""" + + def __init__(self, cleaning_interval: float = 300.0) -> None: + """*cleaning_interval*, a ``float`` is the number of seconds between + periodic cleanings. + """ + + super().__init__() + self.data: Dict[CacheKey, Answer] = {} + self.cleaning_interval = cleaning_interval + self.next_cleaning: float = time.time() + self.cleaning_interval + + def _maybe_clean(self) -> None: + """Clean the cache if it's time to do so.""" + + now = time.time() + if self.next_cleaning <= now: + keys_to_delete = [] + for k, v in self.data.items(): + if v.expiration <= now: + keys_to_delete.append(k) + for k in keys_to_delete: + del self.data[k] + now = time.time() + self.next_cleaning = now + self.cleaning_interval + + def get(self, key: CacheKey) -> Optional[Answer]: + """Get the answer associated with *key*. + + Returns None if no answer is cached for the key. + + *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` + tuple whose values are the query name, rdtype, and rdclass respectively. + + Returns a ``dns.resolver.Answer`` or ``None``. + """ + + with self.lock: + self._maybe_clean() + v = self.data.get(key) + if v is None or v.expiration <= time.time(): + self.statistics.misses += 1 + return None + self.statistics.hits += 1 + return v + + def put(self, key: CacheKey, value: Answer) -> None: + """Associate key and value in the cache. + + *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` + tuple whose values are the query name, rdtype, and rdclass respectively. + + *value*, a ``dns.resolver.Answer``, the answer. + """ + + with self.lock: + self._maybe_clean() + self.data[key] = value + + def flush(self, key: Optional[CacheKey] = None) -> None: + """Flush the cache. + + If *key* is not ``None``, only that item is flushed. Otherwise the entire cache + is flushed. + + *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` + tuple whose values are the query name, rdtype, and rdclass respectively. + """ + + with self.lock: + if key is not None: + if key in self.data: + del self.data[key] + else: + self.data = {} + self.next_cleaning = time.time() + self.cleaning_interval + + +class LRUCacheNode: + """LRUCache node.""" + + def __init__(self, key, value): + self.key = key + self.value = value + self.hits = 0 + self.prev = self + self.next = self + + def link_after(self, node: "LRUCacheNode") -> None: + self.prev = node + self.next = node.next + node.next.prev = self + node.next = self + + def unlink(self) -> None: + self.next.prev = self.prev + self.prev.next = self.next + + +class LRUCache(CacheBase): + """Thread-safe, bounded, least-recently-used DNS answer cache. + + This cache is better than the simple cache (above) if you're + running a web crawler or other process that does a lot of + resolutions. The LRUCache has a maximum number of nodes, and when + it is full, the least-recently used node is removed to make space + for a new one. + """ + + def __init__(self, max_size: int = 100000) -> None: + """*max_size*, an ``int``, is the maximum number of nodes to cache; + it must be greater than 0. + """ + + super().__init__() + self.data: Dict[CacheKey, LRUCacheNode] = {} + self.set_max_size(max_size) + self.sentinel: LRUCacheNode = LRUCacheNode(None, None) + self.sentinel.prev = self.sentinel + self.sentinel.next = self.sentinel + + def set_max_size(self, max_size: int) -> None: + if max_size < 1: + max_size = 1 + self.max_size = max_size + + def get(self, key: CacheKey) -> Optional[Answer]: + """Get the answer associated with *key*. + + Returns None if no answer is cached for the key. + + *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` + tuple whose values are the query name, rdtype, and rdclass respectively. + + Returns a ``dns.resolver.Answer`` or ``None``. + """ + + with self.lock: + node = self.data.get(key) + if node is None: + self.statistics.misses += 1 + return None + # Unlink because we're either going to move the node to the front + # of the LRU list or we're going to free it. + node.unlink() + if node.value.expiration <= time.time(): + del self.data[node.key] + self.statistics.misses += 1 + return None + node.link_after(self.sentinel) + self.statistics.hits += 1 + node.hits += 1 + return node.value + + def get_hits_for_key(self, key: CacheKey) -> int: + """Return the number of cache hits associated with the specified key.""" + with self.lock: + node = self.data.get(key) + if node is None or node.value.expiration <= time.time(): + return 0 + else: + return node.hits + + def put(self, key: CacheKey, value: Answer) -> None: + """Associate key and value in the cache. + + *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` + tuple whose values are the query name, rdtype, and rdclass respectively. + + *value*, a ``dns.resolver.Answer``, the answer. + """ + + with self.lock: + node = self.data.get(key) + if node is not None: + node.unlink() + del self.data[node.key] + while len(self.data) >= self.max_size: + gnode = self.sentinel.prev + gnode.unlink() + del self.data[gnode.key] + node = LRUCacheNode(key, value) + node.link_after(self.sentinel) + self.data[key] = node + + def flush(self, key: Optional[CacheKey] = None) -> None: + """Flush the cache. + + If *key* is not ``None``, only that item is flushed. Otherwise the entire cache + is flushed. + + *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` + tuple whose values are the query name, rdtype, and rdclass respectively. + """ + + with self.lock: + if key is not None: + node = self.data.get(key) + if node is not None: + node.unlink() + del self.data[node.key] + else: + gnode = self.sentinel.next + while gnode != self.sentinel: + next = gnode.next + gnode.unlink() + gnode = next + self.data = {} + + +class _Resolution: + """Helper class for dns.resolver.Resolver.resolve(). + + All of the "business logic" of resolution is encapsulated in this + class, allowing us to have multiple resolve() implementations + using different I/O schemes without copying all of the + complicated logic. + + This class is a "friend" to dns.resolver.Resolver and manipulates + resolver data structures directly. + """ + + def __init__( + self, + resolver: "BaseResolver", + qname: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str], + rdclass: Union[dns.rdataclass.RdataClass, str], + tcp: bool, + raise_on_no_answer: bool, + search: Optional[bool], + ) -> None: + if isinstance(qname, str): + qname = dns.name.from_text(qname, None) + rdtype = dns.rdatatype.RdataType.make(rdtype) + if dns.rdatatype.is_metatype(rdtype): + raise NoMetaqueries + rdclass = dns.rdataclass.RdataClass.make(rdclass) + if dns.rdataclass.is_metaclass(rdclass): + raise NoMetaqueries + self.resolver = resolver + self.qnames_to_try = resolver._get_qnames_to_try(qname, search) + self.qnames = self.qnames_to_try[:] + self.rdtype = rdtype + self.rdclass = rdclass + self.tcp = tcp + self.raise_on_no_answer = raise_on_no_answer + self.nxdomain_responses: Dict[dns.name.Name, dns.message.QueryMessage] = {} + # Initialize other things to help analysis tools + self.qname = dns.name.empty + self.nameservers: List[dns.nameserver.Nameserver] = [] + self.current_nameservers: List[dns.nameserver.Nameserver] = [] + self.errors: List[ErrorTuple] = [] + self.nameserver: Optional[dns.nameserver.Nameserver] = None + self.tcp_attempt = False + self.retry_with_tcp = False + self.request: Optional[dns.message.QueryMessage] = None + self.backoff = 0.0 + + def next_request( + self, + ) -> Tuple[Optional[dns.message.QueryMessage], Optional[Answer]]: + """Get the next request to send, and check the cache. + + Returns a (request, answer) tuple. At most one of request or + answer will not be None. + """ + + # We return a tuple instead of Union[Message,Answer] as it lets + # the caller avoid isinstance(). + + while len(self.qnames) > 0: + self.qname = self.qnames.pop(0) + + # Do we know the answer? + if self.resolver.cache: + answer = self.resolver.cache.get( + (self.qname, self.rdtype, self.rdclass) + ) + if answer is not None: + if answer.rrset is None and self.raise_on_no_answer: + raise NoAnswer(response=answer.response) + else: + return (None, answer) + answer = self.resolver.cache.get( + (self.qname, dns.rdatatype.ANY, self.rdclass) + ) + if answer is not None and answer.response.rcode() == dns.rcode.NXDOMAIN: + # cached NXDOMAIN; record it and continue to next + # name. + self.nxdomain_responses[self.qname] = answer.response + continue + + # Build the request + request = dns.message.make_query(self.qname, self.rdtype, self.rdclass) + if self.resolver.keyname is not None: + request.use_tsig( + self.resolver.keyring, + self.resolver.keyname, + algorithm=self.resolver.keyalgorithm, + ) + request.use_edns( + self.resolver.edns, + self.resolver.ednsflags, + self.resolver.payload, + options=self.resolver.ednsoptions, + ) + if self.resolver.flags is not None: + request.flags = self.resolver.flags + + self.nameservers = self.resolver._enrich_nameservers( + self.resolver._nameservers, + self.resolver.nameserver_ports, + self.resolver.port, + ) + if self.resolver.rotate: + random.shuffle(self.nameservers) + self.current_nameservers = self.nameservers[:] + self.errors = [] + self.nameserver = None + self.tcp_attempt = False + self.retry_with_tcp = False + self.request = request + self.backoff = 0.10 + + return (request, None) + + # + # We've tried everything and only gotten NXDOMAINs. (We know + # it's only NXDOMAINs as anything else would have returned + # before now.) + # + raise NXDOMAIN(qnames=self.qnames_to_try, responses=self.nxdomain_responses) + + def next_nameserver(self) -> Tuple[dns.nameserver.Nameserver, bool, float]: + if self.retry_with_tcp: + assert self.nameserver is not None + assert not self.nameserver.is_always_max_size() + self.tcp_attempt = True + self.retry_with_tcp = False + return (self.nameserver, True, 0) + + backoff = 0.0 + if not self.current_nameservers: + if len(self.nameservers) == 0: + # Out of things to try! + raise NoNameservers(request=self.request, errors=self.errors) + self.current_nameservers = self.nameservers[:] + backoff = self.backoff + self.backoff = min(self.backoff * 2, 2) + + self.nameserver = self.current_nameservers.pop(0) + self.tcp_attempt = self.tcp or self.nameserver.is_always_max_size() + return (self.nameserver, self.tcp_attempt, backoff) + + def query_result( + self, response: Optional[dns.message.Message], ex: Optional[Exception] + ) -> Tuple[Optional[Answer], bool]: + # + # returns an (answer: Answer, end_loop: bool) tuple. + # + assert self.nameserver is not None + if ex: + # Exception during I/O or from_wire() + assert response is None + self.errors.append( + ( + str(self.nameserver), + self.tcp_attempt, + self.nameserver.answer_port(), + ex, + response, + ) + ) + if ( + isinstance(ex, dns.exception.FormError) + or isinstance(ex, EOFError) + or isinstance(ex, OSError) + or isinstance(ex, NotImplementedError) + ): + # This nameserver is no good, take it out of the mix. + self.nameservers.remove(self.nameserver) + elif isinstance(ex, dns.message.Truncated): + if self.tcp_attempt: + # Truncation with TCP is no good! + self.nameservers.remove(self.nameserver) + else: + self.retry_with_tcp = True + return (None, False) + # We got an answer! + assert response is not None + assert isinstance(response, dns.message.QueryMessage) + rcode = response.rcode() + if rcode == dns.rcode.NOERROR: + try: + answer = Answer( + self.qname, + self.rdtype, + self.rdclass, + response, + self.nameserver.answer_nameserver(), + self.nameserver.answer_port(), + ) + except Exception as e: + self.errors.append( + ( + str(self.nameserver), + self.tcp_attempt, + self.nameserver.answer_port(), + e, + response, + ) + ) + # The nameserver is no good, take it out of the mix. + self.nameservers.remove(self.nameserver) + return (None, False) + if self.resolver.cache: + self.resolver.cache.put((self.qname, self.rdtype, self.rdclass), answer) + if answer.rrset is None and self.raise_on_no_answer: + raise NoAnswer(response=answer.response) + return (answer, True) + elif rcode == dns.rcode.NXDOMAIN: + # Further validate the response by making an Answer, even + # if we aren't going to cache it. + try: + answer = Answer( + self.qname, dns.rdatatype.ANY, dns.rdataclass.IN, response + ) + except Exception as e: + self.errors.append( + ( + str(self.nameserver), + self.tcp_attempt, + self.nameserver.answer_port(), + e, + response, + ) + ) + # The nameserver is no good, take it out of the mix. + self.nameservers.remove(self.nameserver) + return (None, False) + self.nxdomain_responses[self.qname] = response + if self.resolver.cache: + self.resolver.cache.put( + (self.qname, dns.rdatatype.ANY, self.rdclass), answer + ) + # Make next_nameserver() return None, so caller breaks its + # inner loop and calls next_request(). + return (None, True) + elif rcode == dns.rcode.YXDOMAIN: + yex = YXDOMAIN() + self.errors.append( + ( + str(self.nameserver), + self.tcp_attempt, + self.nameserver.answer_port(), + yex, + response, + ) + ) + raise yex + else: + # + # We got a response, but we're not happy with the + # rcode in it. + # + if rcode != dns.rcode.SERVFAIL or not self.resolver.retry_servfail: + self.nameservers.remove(self.nameserver) + self.errors.append( + ( + str(self.nameserver), + self.tcp_attempt, + self.nameserver.answer_port(), + dns.rcode.to_text(rcode), + response, + ) + ) + return (None, False) + + +class BaseResolver: + """DNS stub resolver.""" + + # We initialize in reset() + # + # pylint: disable=attribute-defined-outside-init + + domain: dns.name.Name + nameserver_ports: Dict[str, int] + port: int + search: List[dns.name.Name] + use_search_by_default: bool + timeout: float + lifetime: float + keyring: Optional[Any] + keyname: Optional[Union[dns.name.Name, str]] + keyalgorithm: Union[dns.name.Name, str] + edns: int + ednsflags: int + ednsoptions: Optional[List[dns.edns.Option]] + payload: int + cache: Any + flags: Optional[int] + retry_servfail: bool + rotate: bool + ndots: Optional[int] + _nameservers: Sequence[Union[str, dns.nameserver.Nameserver]] + + def __init__( + self, filename: str = "/etc/resolv.conf", configure: bool = True + ) -> None: + """*filename*, a ``str`` or file object, specifying a file + in standard /etc/resolv.conf format. This parameter is meaningful + only when *configure* is true and the platform is POSIX. + + *configure*, a ``bool``. If True (the default), the resolver + instance is configured in the normal fashion for the operating + system the resolver is running on. (I.e. by reading a + /etc/resolv.conf file on POSIX systems and from the registry + on Windows systems.) + """ + + self.reset() + if configure: + if sys.platform == "win32": # pragma: no cover + self.read_registry() + elif filename: + self.read_resolv_conf(filename) + + def reset(self) -> None: + """Reset all resolver configuration to the defaults.""" + + self.domain = dns.name.Name(dns.name.from_text(socket.gethostname())[1:]) + if len(self.domain) == 0: # pragma: no cover + self.domain = dns.name.root + self._nameservers = [] + self.nameserver_ports = {} + self.port = 53 + self.search = [] + self.use_search_by_default = False + self.timeout = 2.0 + self.lifetime = 5.0 + self.keyring = None + self.keyname = None + self.keyalgorithm = dns.tsig.default_algorithm + self.edns = -1 + self.ednsflags = 0 + self.ednsoptions = None + self.payload = 0 + self.cache = None + self.flags = None + self.retry_servfail = False + self.rotate = False + self.ndots = None + + def read_resolv_conf(self, f: Any) -> None: + """Process *f* as a file in the /etc/resolv.conf format. If f is + a ``str``, it is used as the name of the file to open; otherwise it + is treated as the file itself. + + Interprets the following items: + + - nameserver - name server IP address + + - domain - local domain name + + - search - search list for host-name lookup + + - options - supported options are rotate, timeout, edns0, and ndots + + """ + + nameservers = [] + if isinstance(f, str): + try: + cm: contextlib.AbstractContextManager = open(f) + except OSError: + # /etc/resolv.conf doesn't exist, can't be read, etc. + raise NoResolverConfiguration(f"cannot open {f}") + else: + cm = contextlib.nullcontext(f) + with cm as f: + for l in f: + if len(l) == 0 or l[0] == "#" or l[0] == ";": + continue + tokens = l.split() + + # Any line containing less than 2 tokens is malformed + if len(tokens) < 2: + continue + + if tokens[0] == "nameserver": + nameservers.append(tokens[1]) + elif tokens[0] == "domain": + self.domain = dns.name.from_text(tokens[1]) + # domain and search are exclusive + self.search = [] + elif tokens[0] == "search": + # the last search wins + self.search = [] + for suffix in tokens[1:]: + self.search.append(dns.name.from_text(suffix)) + # We don't set domain as it is not used if + # len(self.search) > 0 + elif tokens[0] == "options": + for opt in tokens[1:]: + if opt == "rotate": + self.rotate = True + elif opt == "edns0": + self.use_edns() + elif "timeout" in opt: + try: + self.timeout = int(opt.split(":")[1]) + except (ValueError, IndexError): + pass + elif "ndots" in opt: + try: + self.ndots = int(opt.split(":")[1]) + except (ValueError, IndexError): + pass + if len(nameservers) == 0: + raise NoResolverConfiguration("no nameservers") + # Assigning directly instead of appending means we invoke the + # setter logic, with additonal checking and enrichment. + self.nameservers = nameservers + + def read_registry(self) -> None: # pragma: no cover + """Extract resolver configuration from the Windows registry.""" + try: + info = dns.win32util.get_dns_info() # type: ignore + if info.domain is not None: + self.domain = info.domain + self.nameservers = info.nameservers + self.search = info.search + except AttributeError: + raise NotImplementedError + + def _compute_timeout( + self, + start: float, + lifetime: Optional[float] = None, + errors: Optional[List[ErrorTuple]] = None, + ) -> float: + lifetime = self.lifetime if lifetime is None else lifetime + now = time.time() + duration = now - start + if errors is None: + errors = [] + if duration < 0: + if duration < -1: + # Time going backwards is bad. Just give up. + raise LifetimeTimeout(timeout=duration, errors=errors) + else: + # Time went backwards, but only a little. This can + # happen, e.g. under vmware with older linux kernels. + # Pretend it didn't happen. + duration = 0 + if duration >= lifetime: + raise LifetimeTimeout(timeout=duration, errors=errors) + return min(lifetime - duration, self.timeout) + + def _get_qnames_to_try( + self, qname: dns.name.Name, search: Optional[bool] + ) -> List[dns.name.Name]: + # This is a separate method so we can unit test the search + # rules without requiring the Internet. + if search is None: + search = self.use_search_by_default + qnames_to_try = [] + if qname.is_absolute(): + qnames_to_try.append(qname) + else: + abs_qname = qname.concatenate(dns.name.root) + if search: + if len(self.search) > 0: + # There is a search list, so use it exclusively + search_list = self.search[:] + elif self.domain != dns.name.root and self.domain is not None: + # We have some notion of a domain that isn't the root, so + # use it as the search list. + search_list = [self.domain] + else: + search_list = [] + # Figure out the effective ndots (default is 1) + if self.ndots is None: + ndots = 1 + else: + ndots = self.ndots + for suffix in search_list: + qnames_to_try.append(qname + suffix) + if len(qname) > ndots: + # The name has at least ndots dots, so we should try an + # absolute query first. + qnames_to_try.insert(0, abs_qname) + else: + # The name has less than ndots dots, so we should search + # first, then try the absolute name. + qnames_to_try.append(abs_qname) + else: + qnames_to_try.append(abs_qname) + return qnames_to_try + + def use_tsig( + self, + keyring: Any, + keyname: Optional[Union[dns.name.Name, str]] = None, + algorithm: Union[dns.name.Name, str] = dns.tsig.default_algorithm, + ) -> None: + """Add a TSIG signature to each query. + + The parameters are passed to ``dns.message.Message.use_tsig()``; + see its documentation for details. + """ + + self.keyring = keyring + self.keyname = keyname + self.keyalgorithm = algorithm + + def use_edns( + self, + edns: Optional[Union[int, bool]] = 0, + ednsflags: int = 0, + payload: int = dns.message.DEFAULT_EDNS_PAYLOAD, + options: Optional[List[dns.edns.Option]] = None, + ) -> None: + """Configure EDNS behavior. + + *edns*, an ``int``, is the EDNS level to use. Specifying + ``None``, ``False``, or ``-1`` means "do not use EDNS", and in this case + the other parameters are ignored. Specifying ``True`` is + equivalent to specifying 0, i.e. "use EDNS0". + + *ednsflags*, an ``int``, the EDNS flag values. + + *payload*, an ``int``, is the EDNS sender's payload field, which is the + maximum size of UDP datagram the sender can handle. I.e. how big + a response to this message can be. + + *options*, a list of ``dns.edns.Option`` objects or ``None``, the EDNS + options. + """ + + if edns is None or edns is False: + edns = -1 + elif edns is True: + edns = 0 + self.edns = edns + self.ednsflags = ednsflags + self.payload = payload + self.ednsoptions = options + + def set_flags(self, flags: int) -> None: + """Overrides the default flags with your own. + + *flags*, an ``int``, the message flags to use. + """ + + self.flags = flags + + @classmethod + def _enrich_nameservers( + cls, + nameservers: Sequence[Union[str, dns.nameserver.Nameserver]], + nameserver_ports: Dict[str, int], + default_port: int, + ) -> List[dns.nameserver.Nameserver]: + enriched_nameservers = [] + if isinstance(nameservers, list): + for nameserver in nameservers: + enriched_nameserver: dns.nameserver.Nameserver + if isinstance(nameserver, dns.nameserver.Nameserver): + enriched_nameserver = nameserver + elif dns.inet.is_address(nameserver): + port = nameserver_ports.get(nameserver, default_port) + enriched_nameserver = dns.nameserver.Do53Nameserver( + nameserver, port + ) + else: + try: + if urlparse(nameserver).scheme != "https": + raise NotImplementedError + except Exception: + raise ValueError( + f"nameserver {nameserver} is not a " + "dns.nameserver.Nameserver instance or text form, " + "IP address, nor a valid https URL" + ) + enriched_nameserver = dns.nameserver.DoHNameserver(nameserver) + enriched_nameservers.append(enriched_nameserver) + else: + raise ValueError( + f"nameservers must be a list or tuple (not a {type(nameservers)})" + ) + return enriched_nameservers + + @property + def nameservers( + self, + ) -> Sequence[Union[str, dns.nameserver.Nameserver]]: + return self._nameservers + + @nameservers.setter + def nameservers( + self, nameservers: Sequence[Union[str, dns.nameserver.Nameserver]] + ) -> None: + """ + *nameservers*, a ``list`` of nameservers, where a nameserver is either + a string interpretable as a nameserver, or a ``dns.nameserver.Nameserver`` + instance. + + Raises ``ValueError`` if *nameservers* is not a list of nameservers. + """ + # We just call _enrich_nameservers() for checking + self._enrich_nameservers(nameservers, self.nameserver_ports, self.port) + self._nameservers = nameservers + + +class Resolver(BaseResolver): + """DNS stub resolver.""" + + def resolve( + self, + qname: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, + rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, + tcp: bool = False, + source: Optional[str] = None, + raise_on_no_answer: bool = True, + source_port: int = 0, + lifetime: Optional[float] = None, + search: Optional[bool] = None, + ) -> Answer: # pylint: disable=arguments-differ + """Query nameservers to find the answer to the question. + + The *qname*, *rdtype*, and *rdclass* parameters may be objects + of the appropriate type, or strings that can be converted into objects + of the appropriate type. + + *qname*, a ``dns.name.Name`` or ``str``, the query name. + + *rdtype*, an ``int`` or ``str``, the query type. + + *rdclass*, an ``int`` or ``str``, the query class. + + *tcp*, a ``bool``. If ``True``, use TCP to make the query. + + *source*, a ``str`` or ``None``. If not ``None``, bind to this IP + address when making queries. + + *raise_on_no_answer*, a ``bool``. If ``True``, raise + ``dns.resolver.NoAnswer`` if there's no answer to the question. + + *source_port*, an ``int``, the port from which to send the message. + + *lifetime*, a ``float``, how many seconds a query should run + before timing out. + + *search*, a ``bool`` or ``None``, determines whether the + search list configured in the system's resolver configuration + are used for relative names, and whether the resolver's domain + may be added to relative names. The default is ``None``, + which causes the value of the resolver's + ``use_search_by_default`` attribute to be used. + + Raises ``dns.resolver.LifetimeTimeout`` if no answers could be found + in the specified lifetime. + + Raises ``dns.resolver.NXDOMAIN`` if the query name does not exist. + + Raises ``dns.resolver.YXDOMAIN`` if the query name is too long after + DNAME substitution. + + Raises ``dns.resolver.NoAnswer`` if *raise_on_no_answer* is + ``True`` and the query name exists but has no RRset of the + desired type and class. + + Raises ``dns.resolver.NoNameservers`` if no non-broken + nameservers are available to answer the question. + + Returns a ``dns.resolver.Answer`` instance. + + """ + + resolution = _Resolution( + self, qname, rdtype, rdclass, tcp, raise_on_no_answer, search + ) + start = time.time() + while True: + (request, answer) = resolution.next_request() + # Note we need to say "if answer is not None" and not just + # "if answer" because answer implements __len__, and python + # will call that. We want to return if we have an answer + # object, including in cases where its length is 0. + if answer is not None: + # cache hit! + return answer + assert request is not None # needed for type checking + done = False + while not done: + (nameserver, tcp, backoff) = resolution.next_nameserver() + if backoff: + time.sleep(backoff) + timeout = self._compute_timeout(start, lifetime, resolution.errors) + try: + response = nameserver.query( + request, + timeout=timeout, + source=source, + source_port=source_port, + max_size=tcp, + ) + except Exception as ex: + (_, done) = resolution.query_result(None, ex) + continue + (answer, done) = resolution.query_result(response, None) + # Note we need to say "if answer is not None" and not just + # "if answer" because answer implements __len__, and python + # will call that. We want to return if we have an answer + # object, including in cases where its length is 0. + if answer is not None: + return answer + + def query( + self, + qname: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, + rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, + tcp: bool = False, + source: Optional[str] = None, + raise_on_no_answer: bool = True, + source_port: int = 0, + lifetime: Optional[float] = None, + ) -> Answer: # pragma: no cover + """Query nameservers to find the answer to the question. + + This method calls resolve() with ``search=True``, and is + provided for backwards compatibility with prior versions of + dnspython. See the documentation for the resolve() method for + further details. + """ + warnings.warn( + "please use dns.resolver.Resolver.resolve() instead", + DeprecationWarning, + stacklevel=2, + ) + return self.resolve( + qname, + rdtype, + rdclass, + tcp, + source, + raise_on_no_answer, + source_port, + lifetime, + True, + ) + + def resolve_address(self, ipaddr: str, *args: Any, **kwargs: Any) -> Answer: + """Use a resolver to run a reverse query for PTR records. + + This utilizes the resolve() method to perform a PTR lookup on the + specified IP address. + + *ipaddr*, a ``str``, the IPv4 or IPv6 address you want to get + the PTR record for. + + All other arguments that can be passed to the resolve() function + except for rdtype and rdclass are also supported by this + function. + """ + # We make a modified kwargs for type checking happiness, as otherwise + # we get a legit warning about possibly having rdtype and rdclass + # in the kwargs more than once. + modified_kwargs: Dict[str, Any] = {} + modified_kwargs.update(kwargs) + modified_kwargs["rdtype"] = dns.rdatatype.PTR + modified_kwargs["rdclass"] = dns.rdataclass.IN + return self.resolve( + dns.reversename.from_address(ipaddr), *args, **modified_kwargs + ) + + def resolve_name( + self, + name: Union[dns.name.Name, str], + family: int = socket.AF_UNSPEC, + **kwargs: Any, + ) -> HostAnswers: + """Use a resolver to query for address records. + + This utilizes the resolve() method to perform A and/or AAAA lookups on + the specified name. + + *qname*, a ``dns.name.Name`` or ``str``, the name to resolve. + + *family*, an ``int``, the address family. If socket.AF_UNSPEC + (the default), both A and AAAA records will be retrieved. + + All other arguments that can be passed to the resolve() function + except for rdtype and rdclass are also supported by this + function. + """ + # We make a modified kwargs for type checking happiness, as otherwise + # we get a legit warning about possibly having rdtype and rdclass + # in the kwargs more than once. + modified_kwargs: Dict[str, Any] = {} + modified_kwargs.update(kwargs) + modified_kwargs.pop("rdtype", None) + modified_kwargs["rdclass"] = dns.rdataclass.IN + + if family == socket.AF_INET: + v4 = self.resolve(name, dns.rdatatype.A, **modified_kwargs) + return HostAnswers.make(v4=v4) + elif family == socket.AF_INET6: + v6 = self.resolve(name, dns.rdatatype.AAAA, **modified_kwargs) + return HostAnswers.make(v6=v6) + elif family != socket.AF_UNSPEC: # pragma: no cover + raise NotImplementedError(f"unknown address family {family}") + + raise_on_no_answer = modified_kwargs.pop("raise_on_no_answer", True) + lifetime = modified_kwargs.pop("lifetime", None) + start = time.time() + v6 = self.resolve( + name, + dns.rdatatype.AAAA, + raise_on_no_answer=False, + lifetime=self._compute_timeout(start, lifetime), + **modified_kwargs, + ) + # Note that setting name ensures we query the same name + # for A as we did for AAAA. (This is just in case search lists + # are active by default in the resolver configuration and + # we might be talking to a server that says NXDOMAIN when it + # wants to say NOERROR no data. + name = v6.qname + v4 = self.resolve( + name, + dns.rdatatype.A, + raise_on_no_answer=False, + lifetime=self._compute_timeout(start, lifetime), + **modified_kwargs, + ) + answers = HostAnswers.make(v6=v6, v4=v4, add_empty=not raise_on_no_answer) + if not answers: + raise NoAnswer(response=v6.response) + return answers + + # pylint: disable=redefined-outer-name + + def canonical_name(self, name: Union[dns.name.Name, str]) -> dns.name.Name: + """Determine the canonical name of *name*. + + The canonical name is the name the resolver uses for queries + after all CNAME and DNAME renamings have been applied. + + *name*, a ``dns.name.Name`` or ``str``, the query name. + + This method can raise any exception that ``resolve()`` can + raise, other than ``dns.resolver.NoAnswer`` and + ``dns.resolver.NXDOMAIN``. + + Returns a ``dns.name.Name``. + """ + try: + answer = self.resolve(name, raise_on_no_answer=False) + canonical_name = answer.canonical_name + except dns.resolver.NXDOMAIN as e: + canonical_name = e.canonical_name + return canonical_name + + # pylint: enable=redefined-outer-name + + def try_ddr(self, lifetime: float = 5.0) -> None: + """Try to update the resolver's nameservers using Discovery of Designated + Resolvers (DDR). If successful, the resolver will subsequently use + DNS-over-HTTPS or DNS-over-TLS for future queries. + + *lifetime*, a float, is the maximum time to spend attempting DDR. The default + is 5 seconds. + + If the SVCB query is successful and results in a non-empty list of nameservers, + then the resolver's nameservers are set to the returned servers in priority + order. + + The current implementation does not use any address hints from the SVCB record, + nor does it resolve addresses for the SCVB target name, rather it assumes that + the bootstrap nameserver will always be one of the addresses and uses it. + A future revision to the code may offer fuller support. The code verifies that + the bootstrap nameserver is in the Subject Alternative Name field of the + TLS certficate. + """ + try: + expiration = time.time() + lifetime + answer = self.resolve( + dns._ddr._local_resolver_name, "SVCB", lifetime=lifetime + ) + timeout = dns.query._remaining(expiration) + nameservers = dns._ddr._get_nameservers_sync(answer, timeout) + if len(nameservers) > 0: + self.nameservers = nameservers + except Exception: # pragma: no cover + pass + + +#: The default resolver. +default_resolver: Optional[Resolver] = None + + +def get_default_resolver() -> Resolver: + """Get the default resolver, initializing it if necessary.""" + if default_resolver is None: + reset_default_resolver() + assert default_resolver is not None + return default_resolver + + +def reset_default_resolver() -> None: + """Re-initialize default resolver. + + Note that the resolver configuration (i.e. /etc/resolv.conf on UNIX + systems) will be re-read immediately. + """ + + global default_resolver + default_resolver = Resolver() + + +def resolve( + qname: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, + rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, + tcp: bool = False, + source: Optional[str] = None, + raise_on_no_answer: bool = True, + source_port: int = 0, + lifetime: Optional[float] = None, + search: Optional[bool] = None, +) -> Answer: # pragma: no cover + """Query nameservers to find the answer to the question. + + This is a convenience function that uses the default resolver + object to make the query. + + See ``dns.resolver.Resolver.resolve`` for more information on the + parameters. + """ + + return get_default_resolver().resolve( + qname, + rdtype, + rdclass, + tcp, + source, + raise_on_no_answer, + source_port, + lifetime, + search, + ) + + +def query( + qname: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, + rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, + tcp: bool = False, + source: Optional[str] = None, + raise_on_no_answer: bool = True, + source_port: int = 0, + lifetime: Optional[float] = None, +) -> Answer: # pragma: no cover + """Query nameservers to find the answer to the question. + + This method calls resolve() with ``search=True``, and is + provided for backwards compatibility with prior versions of + dnspython. See the documentation for the resolve() method for + further details. + """ + warnings.warn( + "please use dns.resolver.resolve() instead", DeprecationWarning, stacklevel=2 + ) + return resolve( + qname, + rdtype, + rdclass, + tcp, + source, + raise_on_no_answer, + source_port, + lifetime, + True, + ) + + +def resolve_address(ipaddr: str, *args: Any, **kwargs: Any) -> Answer: + """Use a resolver to run a reverse query for PTR records. + + See ``dns.resolver.Resolver.resolve_address`` for more information on the + parameters. + """ + + return get_default_resolver().resolve_address(ipaddr, *args, **kwargs) + + +def resolve_name( + name: Union[dns.name.Name, str], family: int = socket.AF_UNSPEC, **kwargs: Any +) -> HostAnswers: + """Use a resolver to query for address records. + + See ``dns.resolver.Resolver.resolve_name`` for more information on the + parameters. + """ + + return get_default_resolver().resolve_name(name, family, **kwargs) + + +def canonical_name(name: Union[dns.name.Name, str]) -> dns.name.Name: + """Determine the canonical name of *name*. + + See ``dns.resolver.Resolver.canonical_name`` for more information on the + parameters and possible exceptions. + """ + + return get_default_resolver().canonical_name(name) + + +def try_ddr(lifetime: float = 5.0) -> None: # pragma: no cover + """Try to update the default resolver's nameservers using Discovery of Designated + Resolvers (DDR). If successful, the resolver will subsequently use + DNS-over-HTTPS or DNS-over-TLS for future queries. + + See :py:func:`dns.resolver.Resolver.try_ddr` for more information. + """ + return get_default_resolver().try_ddr(lifetime) + + +def zone_for_name( + name: Union[dns.name.Name, str], + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, + tcp: bool = False, + resolver: Optional[Resolver] = None, + lifetime: Optional[float] = None, +) -> dns.name.Name: + """Find the name of the zone which contains the specified name. + + *name*, an absolute ``dns.name.Name`` or ``str``, the query name. + + *rdclass*, an ``int``, the query class. + + *tcp*, a ``bool``. If ``True``, use TCP to make the query. + + *resolver*, a ``dns.resolver.Resolver`` or ``None``, the resolver to use. + If ``None``, the default, then the default resolver is used. + + *lifetime*, a ``float``, the total time to allow for the queries needed + to determine the zone. If ``None``, the default, then only the individual + query limits of the resolver apply. + + Raises ``dns.resolver.NoRootSOA`` if there is no SOA RR at the DNS + root. (This is only likely to happen if you're using non-default + root servers in your network and they are misconfigured.) + + Raises ``dns.resolver.LifetimeTimeout`` if the answer could not be + found in the allotted lifetime. + + Returns a ``dns.name.Name``. + """ + + if isinstance(name, str): + name = dns.name.from_text(name, dns.name.root) + if resolver is None: + resolver = get_default_resolver() + if not name.is_absolute(): + raise NotAbsolute(name) + start = time.time() + expiration: Optional[float] + if lifetime is not None: + expiration = start + lifetime + else: + expiration = None + while 1: + try: + rlifetime: Optional[float] + if expiration is not None: + rlifetime = expiration - time.time() + if rlifetime <= 0: + rlifetime = 0 + else: + rlifetime = None + answer = resolver.resolve( + name, dns.rdatatype.SOA, rdclass, tcp, lifetime=rlifetime + ) + assert answer.rrset is not None + if answer.rrset.name == name: + return name + # otherwise we were CNAMEd or DNAMEd and need to look higher + except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer) as e: + if isinstance(e, dns.resolver.NXDOMAIN): + response = e.responses().get(name) + else: + response = e.response() # pylint: disable=no-value-for-parameter + if response: + for rrs in response.authority: + if rrs.rdtype == dns.rdatatype.SOA and rrs.rdclass == rdclass: + (nr, _, _) = rrs.name.fullcompare(name) + if nr == dns.name.NAMERELN_SUPERDOMAIN: + # We're doing a proper superdomain check as + # if the name were equal we ought to have gotten + # it in the answer section! We are ignoring the + # possibility that the authority is insane and + # is including multiple SOA RRs for different + # authorities. + return rrs.name + # we couldn't extract anything useful from the response (e.g. it's + # a type 3 NXDOMAIN) + try: + name = name.parent() + except dns.name.NoParent: + raise NoRootSOA + + +def make_resolver_at( + where: Union[dns.name.Name, str], + port: int = 53, + family: int = socket.AF_UNSPEC, + resolver: Optional[Resolver] = None, +) -> Resolver: + """Make a stub resolver using the specified destination as the full resolver. + + *where*, a ``dns.name.Name`` or ``str`` the domain name or IP address of the + full resolver. + + *port*, an ``int``, the port to use. If not specified, the default is 53. + + *family*, an ``int``, the address family to use. This parameter is used if + *where* is not an address. The default is ``socket.AF_UNSPEC`` in which case + the first address returned by ``resolve_name()`` will be used, otherwise the + first address of the specified family will be used. + + *resolver*, a ``dns.resolver.Resolver`` or ``None``, the resolver to use for + resolution of hostnames. If not specified, the default resolver will be used. + + Returns a ``dns.resolver.Resolver`` or raises an exception. + """ + if resolver is None: + resolver = get_default_resolver() + nameservers: List[Union[str, dns.nameserver.Nameserver]] = [] + if isinstance(where, str) and dns.inet.is_address(where): + nameservers.append(dns.nameserver.Do53Nameserver(where, port)) + else: + for address in resolver.resolve_name(where, family).addresses(): + nameservers.append(dns.nameserver.Do53Nameserver(address, port)) + res = dns.resolver.Resolver(configure=False) + res.nameservers = nameservers + return res + + +def resolve_at( + where: Union[dns.name.Name, str], + qname: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, + rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, + tcp: bool = False, + source: Optional[str] = None, + raise_on_no_answer: bool = True, + source_port: int = 0, + lifetime: Optional[float] = None, + search: Optional[bool] = None, + port: int = 53, + family: int = socket.AF_UNSPEC, + resolver: Optional[Resolver] = None, +) -> Answer: + """Query nameservers to find the answer to the question. + + This is a convenience function that calls ``dns.resolver.make_resolver_at()`` to + make a resolver, and then uses it to resolve the query. + + See ``dns.resolver.Resolver.resolve`` for more information on the resolution + parameters, and ``dns.resolver.make_resolver_at`` for information about the resolver + parameters *where*, *port*, *family*, and *resolver*. + + If making more than one query, it is more efficient to call + ``dns.resolver.make_resolver_at()`` and then use that resolver for the queries + instead of calling ``resolve_at()`` multiple times. + """ + return make_resolver_at(where, port, family, resolver).resolve( + qname, + rdtype, + rdclass, + tcp, + source, + raise_on_no_answer, + source_port, + lifetime, + search, + ) + + +# +# Support for overriding the system resolver for all python code in the +# running process. +# + +_protocols_for_socktype = { + socket.SOCK_DGRAM: [socket.SOL_UDP], + socket.SOCK_STREAM: [socket.SOL_TCP], +} + +_resolver = None +_original_getaddrinfo = socket.getaddrinfo +_original_getnameinfo = socket.getnameinfo +_original_getfqdn = socket.getfqdn +_original_gethostbyname = socket.gethostbyname +_original_gethostbyname_ex = socket.gethostbyname_ex +_original_gethostbyaddr = socket.gethostbyaddr + + +def _getaddrinfo( + host=None, service=None, family=socket.AF_UNSPEC, socktype=0, proto=0, flags=0 +): + if flags & socket.AI_NUMERICHOST != 0: + # Short circuit directly into the system's getaddrinfo(). We're + # not adding any value in this case, and this avoids infinite loops + # because dns.query.* needs to call getaddrinfo() for IPv6 scoping + # reasons. We will also do this short circuit below if we + # discover that the host is an address literal. + return _original_getaddrinfo(host, service, family, socktype, proto, flags) + if flags & (socket.AI_ADDRCONFIG | socket.AI_V4MAPPED) != 0: + # Not implemented. We raise a gaierror as opposed to a + # NotImplementedError as it helps callers handle errors more + # appropriately. [Issue #316] + # + # We raise EAI_FAIL as opposed to EAI_SYSTEM because there is + # no EAI_SYSTEM on Windows [Issue #416]. We didn't go for + # EAI_BADFLAGS as the flags aren't bad, we just don't + # implement them. + raise socket.gaierror( + socket.EAI_FAIL, "Non-recoverable failure in name resolution" + ) + if host is None and service is None: + raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") + addrs = [] + canonical_name = None # pylint: disable=redefined-outer-name + # Is host None or an address literal? If so, use the system's + # getaddrinfo(). + if host is None: + return _original_getaddrinfo(host, service, family, socktype, proto, flags) + try: + # We don't care about the result of af_for_address(), we're just + # calling it so it raises an exception if host is not an IPv4 or + # IPv6 address. + dns.inet.af_for_address(host) + return _original_getaddrinfo(host, service, family, socktype, proto, flags) + except Exception: + pass + # Something needs resolution! + try: + answers = _resolver.resolve_name(host, family) + addrs = answers.addresses_and_families() + canonical_name = answers.canonical_name().to_text(True) + except dns.resolver.NXDOMAIN: + raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") + except Exception: + # We raise EAI_AGAIN here as the failure may be temporary + # (e.g. a timeout) and EAI_SYSTEM isn't defined on Windows. + # [Issue #416] + raise socket.gaierror(socket.EAI_AGAIN, "Temporary failure in name resolution") + port = None + try: + # Is it a port literal? + if service is None: + port = 0 + else: + port = int(service) + except Exception: + if flags & socket.AI_NUMERICSERV == 0: + try: + port = socket.getservbyname(service) + except Exception: + pass + if port is None: + raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") + tuples = [] + if socktype == 0: + socktypes = [socket.SOCK_DGRAM, socket.SOCK_STREAM] + else: + socktypes = [socktype] + if flags & socket.AI_CANONNAME != 0: + cname = canonical_name + else: + cname = "" + for addr, af in addrs: + for socktype in socktypes: + for proto in _protocols_for_socktype[socktype]: + addr_tuple = dns.inet.low_level_address_tuple((addr, port), af) + tuples.append((af, socktype, proto, cname, addr_tuple)) + if len(tuples) == 0: + raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") + return tuples + + +def _getnameinfo(sockaddr, flags=0): + host = sockaddr[0] + port = sockaddr[1] + if len(sockaddr) == 4: + scope = sockaddr[3] + family = socket.AF_INET6 + else: + scope = None + family = socket.AF_INET + tuples = _getaddrinfo(host, port, family, socket.SOCK_STREAM, socket.SOL_TCP, 0) + if len(tuples) > 1: + raise OSError("sockaddr resolved to multiple addresses") + addr = tuples[0][4][0] + if flags & socket.NI_DGRAM: + pname = "udp" + else: + pname = "tcp" + qname = dns.reversename.from_address(addr) + if flags & socket.NI_NUMERICHOST == 0: + try: + answer = _resolver.resolve(qname, "PTR") + hostname = answer.rrset[0].target.to_text(True) + except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer): + if flags & socket.NI_NAMEREQD: + raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") + hostname = addr + if scope is not None: + hostname += "%" + str(scope) + else: + hostname = addr + if scope is not None: + hostname += "%" + str(scope) + if flags & socket.NI_NUMERICSERV: + service = str(port) + else: + service = socket.getservbyport(port, pname) + return (hostname, service) + + +def _getfqdn(name=None): + if name is None: + name = socket.gethostname() + try: + (name, _, _) = _gethostbyaddr(name) + # Python's version checks aliases too, but our gethostbyname + # ignores them, so we do so here as well. + except Exception: # pragma: no cover + pass + return name + + +def _gethostbyname(name): + return _gethostbyname_ex(name)[2][0] + + +def _gethostbyname_ex(name): + aliases = [] + addresses = [] + tuples = _getaddrinfo( + name, 0, socket.AF_INET, socket.SOCK_STREAM, socket.SOL_TCP, socket.AI_CANONNAME + ) + canonical = tuples[0][3] + for item in tuples: + addresses.append(item[4][0]) + # XXX we just ignore aliases + return (canonical, aliases, addresses) + + +def _gethostbyaddr(ip): + try: + dns.ipv6.inet_aton(ip) + sockaddr = (ip, 80, 0, 0) + family = socket.AF_INET6 + except Exception: + try: + dns.ipv4.inet_aton(ip) + except Exception: + raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") + sockaddr = (ip, 80) + family = socket.AF_INET + (name, _) = _getnameinfo(sockaddr, socket.NI_NAMEREQD) + aliases = [] + addresses = [] + tuples = _getaddrinfo( + name, 0, family, socket.SOCK_STREAM, socket.SOL_TCP, socket.AI_CANONNAME + ) + canonical = tuples[0][3] + # We only want to include an address from the tuples if it's the + # same as the one we asked about. We do this comparison in binary + # to avoid any differences in text representations. + bin_ip = dns.inet.inet_pton(family, ip) + for item in tuples: + addr = item[4][0] + bin_addr = dns.inet.inet_pton(family, addr) + if bin_ip == bin_addr: + addresses.append(addr) + # XXX we just ignore aliases + return (canonical, aliases, addresses) + + +def override_system_resolver(resolver: Optional[Resolver] = None) -> None: + """Override the system resolver routines in the socket module with + versions which use dnspython's resolver. + + This can be useful in testing situations where you want to control + the resolution behavior of python code without having to change + the system's resolver settings (e.g. /etc/resolv.conf). + + The resolver to use may be specified; if it's not, the default + resolver will be used. + + resolver, a ``dns.resolver.Resolver`` or ``None``, the resolver to use. + """ + + if resolver is None: + resolver = get_default_resolver() + global _resolver + _resolver = resolver + socket.getaddrinfo = _getaddrinfo + socket.getnameinfo = _getnameinfo + socket.getfqdn = _getfqdn + socket.gethostbyname = _gethostbyname + socket.gethostbyname_ex = _gethostbyname_ex + socket.gethostbyaddr = _gethostbyaddr + + +def restore_system_resolver() -> None: + """Undo the effects of prior override_system_resolver().""" + + global _resolver + _resolver = None + socket.getaddrinfo = _original_getaddrinfo + socket.getnameinfo = _original_getnameinfo + socket.getfqdn = _original_getfqdn + socket.gethostbyname = _original_gethostbyname + socket.gethostbyname_ex = _original_gethostbyname_ex + socket.gethostbyaddr = _original_gethostbyaddr diff --git a/venv/lib/python3.11/site-packages/dns/reversename.py b/venv/lib/python3.11/site-packages/dns/reversename.py new file mode 100644 index 0000000..8236c71 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/reversename.py @@ -0,0 +1,105 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Reverse Map Names.""" + +import binascii + +import dns.ipv4 +import dns.ipv6 +import dns.name + +ipv4_reverse_domain = dns.name.from_text("in-addr.arpa.") +ipv6_reverse_domain = dns.name.from_text("ip6.arpa.") + + +def from_address( + text: str, + v4_origin: dns.name.Name = ipv4_reverse_domain, + v6_origin: dns.name.Name = ipv6_reverse_domain, +) -> dns.name.Name: + """Convert an IPv4 or IPv6 address in textual form into a Name object whose + value is the reverse-map domain name of the address. + + *text*, a ``str``, is an IPv4 or IPv6 address in textual form + (e.g. '127.0.0.1', '::1') + + *v4_origin*, a ``dns.name.Name`` to append to the labels corresponding to + the address if the address is an IPv4 address, instead of the default + (in-addr.arpa.) + + *v6_origin*, a ``dns.name.Name`` to append to the labels corresponding to + the address if the address is an IPv6 address, instead of the default + (ip6.arpa.) + + Raises ``dns.exception.SyntaxError`` if the address is badly formed. + + Returns a ``dns.name.Name``. + """ + + try: + v6 = dns.ipv6.inet_aton(text) + if dns.ipv6.is_mapped(v6): + parts = ["%d" % byte for byte in v6[12:]] + origin = v4_origin + else: + parts = [x for x in str(binascii.hexlify(v6).decode())] + origin = v6_origin + except Exception: + parts = ["%d" % byte for byte in dns.ipv4.inet_aton(text)] + origin = v4_origin + return dns.name.from_text(".".join(reversed(parts)), origin=origin) + + +def to_address( + name: dns.name.Name, + v4_origin: dns.name.Name = ipv4_reverse_domain, + v6_origin: dns.name.Name = ipv6_reverse_domain, +) -> str: + """Convert a reverse map domain name into textual address form. + + *name*, a ``dns.name.Name``, an IPv4 or IPv6 address in reverse-map name + form. + + *v4_origin*, a ``dns.name.Name`` representing the top-level domain for + IPv4 addresses, instead of the default (in-addr.arpa.) + + *v6_origin*, a ``dns.name.Name`` representing the top-level domain for + IPv4 addresses, instead of the default (ip6.arpa.) + + Raises ``dns.exception.SyntaxError`` if the name does not have a + reverse-map form. + + Returns a ``str``. + """ + + if name.is_subdomain(v4_origin): + name = name.relativize(v4_origin) + text = b".".join(reversed(name.labels)) + # run through inet_ntoa() to check syntax and make pretty. + return dns.ipv4.inet_ntoa(dns.ipv4.inet_aton(text)) + elif name.is_subdomain(v6_origin): + name = name.relativize(v6_origin) + labels = list(reversed(name.labels)) + parts = [] + for i in range(0, len(labels), 4): + parts.append(b"".join(labels[i : i + 4])) + text = b":".join(parts) + # run through inet_ntoa() to check syntax and make pretty. + return dns.ipv6.inet_ntoa(dns.ipv6.inet_aton(text)) + else: + raise dns.exception.SyntaxError("unknown reverse-map address family") diff --git a/venv/lib/python3.11/site-packages/dns/rrset.py b/venv/lib/python3.11/site-packages/dns/rrset.py new file mode 100644 index 0000000..6f39b10 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/rrset.py @@ -0,0 +1,285 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS RRsets (an RRset is a named rdataset)""" + +from typing import Any, Collection, Dict, Optional, Union, cast + +import dns.name +import dns.rdataclass +import dns.rdataset +import dns.renderer + + +class RRset(dns.rdataset.Rdataset): + """A DNS RRset (named rdataset). + + RRset inherits from Rdataset, and RRsets can be treated as + Rdatasets in most cases. There are, however, a few notable + exceptions. RRsets have different to_wire() and to_text() method + arguments, reflecting the fact that RRsets always have an owner + name. + """ + + __slots__ = ["name", "deleting"] + + def __init__( + self, + name: dns.name.Name, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + deleting: Optional[dns.rdataclass.RdataClass] = None, + ): + """Create a new RRset.""" + + super().__init__(rdclass, rdtype, covers) + self.name = name + self.deleting = deleting + + def _clone(self): + obj = super()._clone() + obj.name = self.name + obj.deleting = self.deleting + return obj + + def __repr__(self): + if self.covers == 0: + ctext = "" + else: + ctext = "(" + dns.rdatatype.to_text(self.covers) + ")" + if self.deleting is not None: + dtext = " delete=" + dns.rdataclass.to_text(self.deleting) + else: + dtext = "" + return ( + "" + ) + + def __str__(self): + return self.to_text() + + def __eq__(self, other): + if isinstance(other, RRset): + if self.name != other.name: + return False + elif not isinstance(other, dns.rdataset.Rdataset): + return False + return super().__eq__(other) + + def match(self, *args: Any, **kwargs: Any) -> bool: # type: ignore[override] + """Does this rrset match the specified attributes? + + Behaves as :py:func:`full_match()` if the first argument is a + ``dns.name.Name``, and as :py:func:`dns.rdataset.Rdataset.match()` + otherwise. + + (This behavior fixes a design mistake where the signature of this + method became incompatible with that of its superclass. The fix + makes RRsets matchable as Rdatasets while preserving backwards + compatibility.) + """ + if isinstance(args[0], dns.name.Name): + return self.full_match(*args, **kwargs) # type: ignore[arg-type] + else: + return super().match(*args, **kwargs) # type: ignore[arg-type] + + def full_match( + self, + name: dns.name.Name, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType, + deleting: Optional[dns.rdataclass.RdataClass] = None, + ) -> bool: + """Returns ``True`` if this rrset matches the specified name, class, + type, covers, and deletion state. + """ + if not super().match(rdclass, rdtype, covers): + return False + if self.name != name or self.deleting != deleting: + return False + return True + + # pylint: disable=arguments-differ + + def to_text( # type: ignore[override] + self, + origin: Optional[dns.name.Name] = None, + relativize: bool = True, + **kw: Dict[str, Any], + ) -> str: + """Convert the RRset into DNS zone file format. + + See ``dns.name.Name.choose_relativity`` for more information + on how *origin* and *relativize* determine the way names + are emitted. + + Any additional keyword arguments are passed on to the rdata + ``to_text()`` method. + + *origin*, a ``dns.name.Name`` or ``None``, the origin for relative + names. + + *relativize*, a ``bool``. If ``True``, names will be relativized + to *origin*. + """ + + return super().to_text( + self.name, origin, relativize, self.deleting, **kw # type: ignore + ) + + def to_wire( # type: ignore[override] + self, + file: Any, + compress: Optional[dns.name.CompressType] = None, # type: ignore + origin: Optional[dns.name.Name] = None, + **kw: Dict[str, Any], + ) -> int: + """Convert the RRset to wire format. + + All keyword arguments are passed to ``dns.rdataset.to_wire()``; see + that function for details. + + Returns an ``int``, the number of records emitted. + """ + + return super().to_wire( + self.name, file, compress, origin, self.deleting, **kw # type:ignore + ) + + # pylint: enable=arguments-differ + + def to_rdataset(self) -> dns.rdataset.Rdataset: + """Convert an RRset into an Rdataset. + + Returns a ``dns.rdataset.Rdataset``. + """ + return dns.rdataset.from_rdata_list(self.ttl, list(self)) + + +def from_text_list( + name: Union[dns.name.Name, str], + ttl: int, + rdclass: Union[dns.rdataclass.RdataClass, str], + rdtype: Union[dns.rdatatype.RdataType, str], + text_rdatas: Collection[str], + idna_codec: Optional[dns.name.IDNACodec] = None, + origin: Optional[dns.name.Name] = None, + relativize: bool = True, + relativize_to: Optional[dns.name.Name] = None, +) -> RRset: + """Create an RRset with the specified name, TTL, class, and type, and with + the specified list of rdatas in text format. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder to use; if ``None``, the default IDNA 2003 + encoder/decoder is used. + + *origin*, a ``dns.name.Name`` (or ``None``), the + origin to use for relative names. + + *relativize*, a ``bool``. If true, name will be relativized. + + *relativize_to*, a ``dns.name.Name`` (or ``None``), the origin to use + when relativizing names. If not set, the *origin* value will be used. + + Returns a ``dns.rrset.RRset`` object. + """ + + if isinstance(name, str): + name = dns.name.from_text(name, None, idna_codec=idna_codec) + rdclass = dns.rdataclass.RdataClass.make(rdclass) + rdtype = dns.rdatatype.RdataType.make(rdtype) + r = RRset(name, rdclass, rdtype) + r.update_ttl(ttl) + for t in text_rdatas: + rd = dns.rdata.from_text( + r.rdclass, r.rdtype, t, origin, relativize, relativize_to, idna_codec + ) + r.add(rd) + return r + + +def from_text( + name: Union[dns.name.Name, str], + ttl: int, + rdclass: Union[dns.rdataclass.RdataClass, str], + rdtype: Union[dns.rdatatype.RdataType, str], + *text_rdatas: Any, +) -> RRset: + """Create an RRset with the specified name, TTL, class, and type and with + the specified rdatas in text format. + + Returns a ``dns.rrset.RRset`` object. + """ + + return from_text_list( + name, ttl, rdclass, rdtype, cast(Collection[str], text_rdatas) + ) + + +def from_rdata_list( + name: Union[dns.name.Name, str], + ttl: int, + rdatas: Collection[dns.rdata.Rdata], + idna_codec: Optional[dns.name.IDNACodec] = None, +) -> RRset: + """Create an RRset with the specified name and TTL, and with + the specified list of rdata objects. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder to use; if ``None``, the default IDNA 2003 + encoder/decoder is used. + + Returns a ``dns.rrset.RRset`` object. + + """ + + if isinstance(name, str): + name = dns.name.from_text(name, None, idna_codec=idna_codec) + + if len(rdatas) == 0: + raise ValueError("rdata list must not be empty") + r = None + for rd in rdatas: + if r is None: + r = RRset(name, rd.rdclass, rd.rdtype) + r.update_ttl(ttl) + r.add(rd) + assert r is not None + return r + + +def from_rdata(name: Union[dns.name.Name, str], ttl: int, *rdatas: Any) -> RRset: + """Create an RRset with the specified name and TTL, and with + the specified rdata objects. + + Returns a ``dns.rrset.RRset`` object. + """ + + return from_rdata_list(name, ttl, cast(Collection[dns.rdata.Rdata], rdatas)) diff --git a/venv/lib/python3.11/site-packages/dns/serial.py b/venv/lib/python3.11/site-packages/dns/serial.py new file mode 100644 index 0000000..3417299 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/serial.py @@ -0,0 +1,118 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +"""Serial Number Arthimetic from RFC 1982""" + + +class Serial: + def __init__(self, value: int, bits: int = 32): + self.value = value % 2**bits + self.bits = bits + + def __repr__(self): + return f"dns.serial.Serial({self.value}, {self.bits})" + + def __eq__(self, other): + if isinstance(other, int): + other = Serial(other, self.bits) + elif not isinstance(other, Serial) or other.bits != self.bits: + return NotImplemented + return self.value == other.value + + def __ne__(self, other): + if isinstance(other, int): + other = Serial(other, self.bits) + elif not isinstance(other, Serial) or other.bits != self.bits: + return NotImplemented + return self.value != other.value + + def __lt__(self, other): + if isinstance(other, int): + other = Serial(other, self.bits) + elif not isinstance(other, Serial) or other.bits != self.bits: + return NotImplemented + if self.value < other.value and other.value - self.value < 2 ** (self.bits - 1): + return True + elif self.value > other.value and self.value - other.value > 2 ** ( + self.bits - 1 + ): + return True + else: + return False + + def __le__(self, other): + return self == other or self < other + + def __gt__(self, other): + if isinstance(other, int): + other = Serial(other, self.bits) + elif not isinstance(other, Serial) or other.bits != self.bits: + return NotImplemented + if self.value < other.value and other.value - self.value > 2 ** (self.bits - 1): + return True + elif self.value > other.value and self.value - other.value < 2 ** ( + self.bits - 1 + ): + return True + else: + return False + + def __ge__(self, other): + return self == other or self > other + + def __add__(self, other): + v = self.value + if isinstance(other, Serial): + delta = other.value + elif isinstance(other, int): + delta = other + else: + raise ValueError + if abs(delta) > (2 ** (self.bits - 1) - 1): + raise ValueError + v += delta + v = v % 2**self.bits + return Serial(v, self.bits) + + def __iadd__(self, other): + v = self.value + if isinstance(other, Serial): + delta = other.value + elif isinstance(other, int): + delta = other + else: + raise ValueError + if abs(delta) > (2 ** (self.bits - 1) - 1): + raise ValueError + v += delta + v = v % 2**self.bits + self.value = v + return self + + def __sub__(self, other): + v = self.value + if isinstance(other, Serial): + delta = other.value + elif isinstance(other, int): + delta = other + else: + raise ValueError + if abs(delta) > (2 ** (self.bits - 1) - 1): + raise ValueError + v -= delta + v = v % 2**self.bits + return Serial(v, self.bits) + + def __isub__(self, other): + v = self.value + if isinstance(other, Serial): + delta = other.value + elif isinstance(other, int): + delta = other + else: + raise ValueError + if abs(delta) > (2 ** (self.bits - 1) - 1): + raise ValueError + v -= delta + v = v % 2**self.bits + self.value = v + return self diff --git a/venv/lib/python3.11/site-packages/dns/set.py b/venv/lib/python3.11/site-packages/dns/set.py new file mode 100644 index 0000000..ae8f0dd --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/set.py @@ -0,0 +1,308 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import itertools + + +class Set: + """A simple set class. + + This class was originally used to deal with python not having a set class, and + originally the class used lists in its implementation. The ordered and indexable + nature of RRsets and Rdatasets is unfortunately widely used in dnspython + applications, so for backwards compatibility sets continue to be a custom class, now + based on an ordered dictionary. + """ + + __slots__ = ["items"] + + def __init__(self, items=None): + """Initialize the set. + + *items*, an iterable or ``None``, the initial set of items. + """ + + self.items = dict() + if items is not None: + for item in items: + # This is safe for how we use set, but if other code + # subclasses it could be a legitimate issue. + self.add(item) # lgtm[py/init-calls-subclass] + + def __repr__(self): + return f"dns.set.Set({repr(list(self.items.keys()))})" # pragma: no cover + + def add(self, item): + """Add an item to the set.""" + + if item not in self.items: + self.items[item] = None + + def remove(self, item): + """Remove an item from the set.""" + + try: + del self.items[item] + except KeyError: + raise ValueError + + def discard(self, item): + """Remove an item from the set if present.""" + + self.items.pop(item, None) + + def pop(self): + """Remove an arbitrary item from the set.""" + (k, _) = self.items.popitem() + return k + + def _clone(self) -> "Set": + """Make a (shallow) copy of the set. + + There is a 'clone protocol' that subclasses of this class + should use. To make a copy, first call your super's _clone() + method, and use the object returned as the new instance. Then + make shallow copies of the attributes defined in the subclass. + + This protocol allows us to write the set algorithms that + return new instances (e.g. union) once, and keep using them in + subclasses. + """ + + if hasattr(self, "_clone_class"): + cls = self._clone_class # type: ignore + else: + cls = self.__class__ + obj = cls.__new__(cls) + obj.items = dict() + obj.items.update(self.items) + return obj + + def __copy__(self): + """Make a (shallow) copy of the set.""" + + return self._clone() + + def copy(self): + """Make a (shallow) copy of the set.""" + + return self._clone() + + def union_update(self, other): + """Update the set, adding any elements from other which are not + already in the set. + """ + + if not isinstance(other, Set): + raise ValueError("other must be a Set instance") + if self is other: # lgtm[py/comparison-using-is] + return + for item in other.items: + self.add(item) + + def intersection_update(self, other): + """Update the set, removing any elements from other which are not + in both sets. + """ + + if not isinstance(other, Set): + raise ValueError("other must be a Set instance") + if self is other: # lgtm[py/comparison-using-is] + return + # we make a copy of the list so that we can remove items from + # the list without breaking the iterator. + for item in list(self.items): + if item not in other.items: + del self.items[item] + + def difference_update(self, other): + """Update the set, removing any elements from other which are in + the set. + """ + + if not isinstance(other, Set): + raise ValueError("other must be a Set instance") + if self is other: # lgtm[py/comparison-using-is] + self.items.clear() + else: + for item in other.items: + self.discard(item) + + def symmetric_difference_update(self, other): + """Update the set, retaining only elements unique to both sets.""" + + if not isinstance(other, Set): + raise ValueError("other must be a Set instance") + if self is other: # lgtm[py/comparison-using-is] + self.items.clear() + else: + overlap = self.intersection(other) + self.union_update(other) + self.difference_update(overlap) + + def union(self, other): + """Return a new set which is the union of ``self`` and ``other``. + + Returns the same Set type as this set. + """ + + obj = self._clone() + obj.union_update(other) + return obj + + def intersection(self, other): + """Return a new set which is the intersection of ``self`` and + ``other``. + + Returns the same Set type as this set. + """ + + obj = self._clone() + obj.intersection_update(other) + return obj + + def difference(self, other): + """Return a new set which ``self`` - ``other``, i.e. the items + in ``self`` which are not also in ``other``. + + Returns the same Set type as this set. + """ + + obj = self._clone() + obj.difference_update(other) + return obj + + def symmetric_difference(self, other): + """Return a new set which (``self`` - ``other``) | (``other`` + - ``self), ie: the items in either ``self`` or ``other`` which + are not contained in their intersection. + + Returns the same Set type as this set. + """ + + obj = self._clone() + obj.symmetric_difference_update(other) + return obj + + def __or__(self, other): + return self.union(other) + + def __and__(self, other): + return self.intersection(other) + + def __add__(self, other): + return self.union(other) + + def __sub__(self, other): + return self.difference(other) + + def __xor__(self, other): + return self.symmetric_difference(other) + + def __ior__(self, other): + self.union_update(other) + return self + + def __iand__(self, other): + self.intersection_update(other) + return self + + def __iadd__(self, other): + self.union_update(other) + return self + + def __isub__(self, other): + self.difference_update(other) + return self + + def __ixor__(self, other): + self.symmetric_difference_update(other) + return self + + def update(self, other): + """Update the set, adding any elements from other which are not + already in the set. + + *other*, the collection of items with which to update the set, which + may be any iterable type. + """ + + for item in other: + self.add(item) + + def clear(self): + """Make the set empty.""" + self.items.clear() + + def __eq__(self, other): + return self.items == other.items + + def __ne__(self, other): + return not self.__eq__(other) + + def __len__(self): + return len(self.items) + + def __iter__(self): + return iter(self.items) + + def __getitem__(self, i): + if isinstance(i, slice): + return list(itertools.islice(self.items, i.start, i.stop, i.step)) + else: + return next(itertools.islice(self.items, i, i + 1)) + + def __delitem__(self, i): + if isinstance(i, slice): + for elt in list(self[i]): + del self.items[elt] + else: + del self.items[self[i]] + + def issubset(self, other): + """Is this set a subset of *other*? + + Returns a ``bool``. + """ + + if not isinstance(other, Set): + raise ValueError("other must be a Set instance") + for item in self.items: + if item not in other.items: + return False + return True + + def issuperset(self, other): + """Is this set a superset of *other*? + + Returns a ``bool``. + """ + + if not isinstance(other, Set): + raise ValueError("other must be a Set instance") + for item in other.items: + if item not in self.items: + return False + return True + + def isdisjoint(self, other): + if not isinstance(other, Set): + raise ValueError("other must be a Set instance") + for item in other.items: + if item in self.items: + return False + return True diff --git a/venv/lib/python3.11/site-packages/dns/tokenizer.py b/venv/lib/python3.11/site-packages/dns/tokenizer.py new file mode 100644 index 0000000..ab205bc --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/tokenizer.py @@ -0,0 +1,708 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Tokenize DNS zone file format""" + +import io +import sys +from typing import Any, List, Optional, Tuple + +import dns.exception +import dns.name +import dns.ttl + +_DELIMITERS = {" ", "\t", "\n", ";", "(", ")", '"'} +_QUOTING_DELIMITERS = {'"'} + +EOF = 0 +EOL = 1 +WHITESPACE = 2 +IDENTIFIER = 3 +QUOTED_STRING = 4 +COMMENT = 5 +DELIMITER = 6 + + +class UngetBufferFull(dns.exception.DNSException): + """An attempt was made to unget a token when the unget buffer was full.""" + + +class Token: + """A DNS zone file format token. + + ttype: The token type + value: The token value + has_escape: Does the token value contain escapes? + """ + + def __init__( + self, + ttype: int, + value: Any = "", + has_escape: bool = False, + comment: Optional[str] = None, + ): + """Initialize a token instance.""" + + self.ttype = ttype + self.value = value + self.has_escape = has_escape + self.comment = comment + + def is_eof(self) -> bool: + return self.ttype == EOF + + def is_eol(self) -> bool: + return self.ttype == EOL + + def is_whitespace(self) -> bool: + return self.ttype == WHITESPACE + + def is_identifier(self) -> bool: + return self.ttype == IDENTIFIER + + def is_quoted_string(self) -> bool: + return self.ttype == QUOTED_STRING + + def is_comment(self) -> bool: + return self.ttype == COMMENT + + def is_delimiter(self) -> bool: # pragma: no cover (we don't return delimiters yet) + return self.ttype == DELIMITER + + def is_eol_or_eof(self) -> bool: + return self.ttype == EOL or self.ttype == EOF + + def __eq__(self, other): + if not isinstance(other, Token): + return False + return self.ttype == other.ttype and self.value == other.value + + def __ne__(self, other): + if not isinstance(other, Token): + return True + return self.ttype != other.ttype or self.value != other.value + + def __str__(self): + return '%d "%s"' % (self.ttype, self.value) + + def unescape(self) -> "Token": + if not self.has_escape: + return self + unescaped = "" + l = len(self.value) + i = 0 + while i < l: + c = self.value[i] + i += 1 + if c == "\\": + if i >= l: # pragma: no cover (can't happen via get()) + raise dns.exception.UnexpectedEnd + c = self.value[i] + i += 1 + if c.isdigit(): + if i >= l: + raise dns.exception.UnexpectedEnd + c2 = self.value[i] + i += 1 + if i >= l: + raise dns.exception.UnexpectedEnd + c3 = self.value[i] + i += 1 + if not (c2.isdigit() and c3.isdigit()): + raise dns.exception.SyntaxError + codepoint = int(c) * 100 + int(c2) * 10 + int(c3) + if codepoint > 255: + raise dns.exception.SyntaxError + c = chr(codepoint) + unescaped += c + return Token(self.ttype, unescaped) + + def unescape_to_bytes(self) -> "Token": + # We used to use unescape() for TXT-like records, but this + # caused problems as we'd process DNS escapes into Unicode code + # points instead of byte values, and then a to_text() of the + # processed data would not equal the original input. For + # example, \226 in the TXT record would have a to_text() of + # \195\162 because we applied UTF-8 encoding to Unicode code + # point 226. + # + # We now apply escapes while converting directly to bytes, + # avoiding this double encoding. + # + # This code also handles cases where the unicode input has + # non-ASCII code-points in it by converting it to UTF-8. TXT + # records aren't defined for Unicode, but this is the best we + # can do to preserve meaning. For example, + # + # foo\u200bbar + # + # (where \u200b is Unicode code point 0x200b) will be treated + # as if the input had been the UTF-8 encoding of that string, + # namely: + # + # foo\226\128\139bar + # + unescaped = b"" + l = len(self.value) + i = 0 + while i < l: + c = self.value[i] + i += 1 + if c == "\\": + if i >= l: # pragma: no cover (can't happen via get()) + raise dns.exception.UnexpectedEnd + c = self.value[i] + i += 1 + if c.isdigit(): + if i >= l: + raise dns.exception.UnexpectedEnd + c2 = self.value[i] + i += 1 + if i >= l: + raise dns.exception.UnexpectedEnd + c3 = self.value[i] + i += 1 + if not (c2.isdigit() and c3.isdigit()): + raise dns.exception.SyntaxError + codepoint = int(c) * 100 + int(c2) * 10 + int(c3) + if codepoint > 255: + raise dns.exception.SyntaxError + unescaped += b"%c" % (codepoint) + else: + # Note that as mentioned above, if c is a Unicode + # code point outside of the ASCII range, then this + # += is converting that code point to its UTF-8 + # encoding and appending multiple bytes to + # unescaped. + unescaped += c.encode() + else: + unescaped += c.encode() + return Token(self.ttype, bytes(unescaped)) + + +class Tokenizer: + """A DNS zone file format tokenizer. + + A token object is basically a (type, value) tuple. The valid + types are EOF, EOL, WHITESPACE, IDENTIFIER, QUOTED_STRING, + COMMENT, and DELIMITER. + + file: The file to tokenize + + ungotten_char: The most recently ungotten character, or None. + + ungotten_token: The most recently ungotten token, or None. + + multiline: The current multiline level. This value is increased + by one every time a '(' delimiter is read, and decreased by one every time + a ')' delimiter is read. + + quoting: This variable is true if the tokenizer is currently + reading a quoted string. + + eof: This variable is true if the tokenizer has encountered EOF. + + delimiters: The current delimiter dictionary. + + line_number: The current line number + + filename: A filename that will be returned by the where() method. + + idna_codec: A dns.name.IDNACodec, specifies the IDNA + encoder/decoder. If None, the default IDNA 2003 + encoder/decoder is used. + """ + + def __init__( + self, + f: Any = sys.stdin, + filename: Optional[str] = None, + idna_codec: Optional[dns.name.IDNACodec] = None, + ): + """Initialize a tokenizer instance. + + f: The file to tokenize. The default is sys.stdin. + This parameter may also be a string, in which case the tokenizer + will take its input from the contents of the string. + + filename: the name of the filename that the where() method + will return. + + idna_codec: A dns.name.IDNACodec, specifies the IDNA + encoder/decoder. If None, the default IDNA 2003 + encoder/decoder is used. + """ + + if isinstance(f, str): + f = io.StringIO(f) + if filename is None: + filename = "" + elif isinstance(f, bytes): + f = io.StringIO(f.decode()) + if filename is None: + filename = "" + else: + if filename is None: + if f is sys.stdin: + filename = "" + else: + filename = "" + self.file = f + self.ungotten_char: Optional[str] = None + self.ungotten_token: Optional[Token] = None + self.multiline = 0 + self.quoting = False + self.eof = False + self.delimiters = _DELIMITERS + self.line_number = 1 + assert filename is not None + self.filename = filename + if idna_codec is None: + self.idna_codec: dns.name.IDNACodec = dns.name.IDNA_2003 + else: + self.idna_codec = idna_codec + + def _get_char(self) -> str: + """Read a character from input.""" + + if self.ungotten_char is None: + if self.eof: + c = "" + else: + c = self.file.read(1) + if c == "": + self.eof = True + elif c == "\n": + self.line_number += 1 + else: + c = self.ungotten_char + self.ungotten_char = None + return c + + def where(self) -> Tuple[str, int]: + """Return the current location in the input. + + Returns a (string, int) tuple. The first item is the filename of + the input, the second is the current line number. + """ + + return (self.filename, self.line_number) + + def _unget_char(self, c: str) -> None: + """Unget a character. + + The unget buffer for characters is only one character large; it is + an error to try to unget a character when the unget buffer is not + empty. + + c: the character to unget + raises UngetBufferFull: there is already an ungotten char + """ + + if self.ungotten_char is not None: + # this should never happen! + raise UngetBufferFull # pragma: no cover + self.ungotten_char = c + + def skip_whitespace(self) -> int: + """Consume input until a non-whitespace character is encountered. + + The non-whitespace character is then ungotten, and the number of + whitespace characters consumed is returned. + + If the tokenizer is in multiline mode, then newlines are whitespace. + + Returns the number of characters skipped. + """ + + skipped = 0 + while True: + c = self._get_char() + if c != " " and c != "\t": + if (c != "\n") or not self.multiline: + self._unget_char(c) + return skipped + skipped += 1 + + def get(self, want_leading: bool = False, want_comment: bool = False) -> Token: + """Get the next token. + + want_leading: If True, return a WHITESPACE token if the + first character read is whitespace. The default is False. + + want_comment: If True, return a COMMENT token if the + first token read is a comment. The default is False. + + Raises dns.exception.UnexpectedEnd: input ended prematurely + + Raises dns.exception.SyntaxError: input was badly formed + + Returns a Token. + """ + + if self.ungotten_token is not None: + utoken = self.ungotten_token + self.ungotten_token = None + if utoken.is_whitespace(): + if want_leading: + return utoken + elif utoken.is_comment(): + if want_comment: + return utoken + else: + return utoken + skipped = self.skip_whitespace() + if want_leading and skipped > 0: + return Token(WHITESPACE, " ") + token = "" + ttype = IDENTIFIER + has_escape = False + while True: + c = self._get_char() + if c == "" or c in self.delimiters: + if c == "" and self.quoting: + raise dns.exception.UnexpectedEnd + if token == "" and ttype != QUOTED_STRING: + if c == "(": + self.multiline += 1 + self.skip_whitespace() + continue + elif c == ")": + if self.multiline <= 0: + raise dns.exception.SyntaxError + self.multiline -= 1 + self.skip_whitespace() + continue + elif c == '"': + if not self.quoting: + self.quoting = True + self.delimiters = _QUOTING_DELIMITERS + ttype = QUOTED_STRING + continue + else: + self.quoting = False + self.delimiters = _DELIMITERS + self.skip_whitespace() + continue + elif c == "\n": + return Token(EOL, "\n") + elif c == ";": + while 1: + c = self._get_char() + if c == "\n" or c == "": + break + token += c + if want_comment: + self._unget_char(c) + return Token(COMMENT, token) + elif c == "": + if self.multiline: + raise dns.exception.SyntaxError( + "unbalanced parentheses" + ) + return Token(EOF, comment=token) + elif self.multiline: + self.skip_whitespace() + token = "" + continue + else: + return Token(EOL, "\n", comment=token) + else: + # This code exists in case we ever want a + # delimiter to be returned. It never produces + # a token currently. + token = c + ttype = DELIMITER + else: + self._unget_char(c) + break + elif self.quoting and c == "\n": + raise dns.exception.SyntaxError("newline in quoted string") + elif c == "\\": + # + # It's an escape. Put it and the next character into + # the token; it will be checked later for goodness. + # + token += c + has_escape = True + c = self._get_char() + if c == "" or (c == "\n" and not self.quoting): + raise dns.exception.UnexpectedEnd + token += c + if token == "" and ttype != QUOTED_STRING: + if self.multiline: + raise dns.exception.SyntaxError("unbalanced parentheses") + ttype = EOF + return Token(ttype, token, has_escape) + + def unget(self, token: Token) -> None: + """Unget a token. + + The unget buffer for tokens is only one token large; it is + an error to try to unget a token when the unget buffer is not + empty. + + token: the token to unget + + Raises UngetBufferFull: there is already an ungotten token + """ + + if self.ungotten_token is not None: + raise UngetBufferFull + self.ungotten_token = token + + def next(self): + """Return the next item in an iteration. + + Returns a Token. + """ + + token = self.get() + if token.is_eof(): + raise StopIteration + return token + + __next__ = next + + def __iter__(self): + return self + + # Helpers + + def get_int(self, base: int = 10) -> int: + """Read the next token and interpret it as an unsigned integer. + + Raises dns.exception.SyntaxError if not an unsigned integer. + + Returns an int. + """ + + token = self.get().unescape() + if not token.is_identifier(): + raise dns.exception.SyntaxError("expecting an identifier") + if not token.value.isdigit(): + raise dns.exception.SyntaxError("expecting an integer") + return int(token.value, base) + + def get_uint8(self) -> int: + """Read the next token and interpret it as an 8-bit unsigned + integer. + + Raises dns.exception.SyntaxError if not an 8-bit unsigned integer. + + Returns an int. + """ + + value = self.get_int() + if value < 0 or value > 255: + raise dns.exception.SyntaxError( + "%d is not an unsigned 8-bit integer" % value + ) + return value + + def get_uint16(self, base: int = 10) -> int: + """Read the next token and interpret it as a 16-bit unsigned + integer. + + Raises dns.exception.SyntaxError if not a 16-bit unsigned integer. + + Returns an int. + """ + + value = self.get_int(base=base) + if value < 0 or value > 65535: + if base == 8: + raise dns.exception.SyntaxError( + f"{value:o} is not an octal unsigned 16-bit integer" + ) + else: + raise dns.exception.SyntaxError( + "%d is not an unsigned 16-bit integer" % value + ) + return value + + def get_uint32(self, base: int = 10) -> int: + """Read the next token and interpret it as a 32-bit unsigned + integer. + + Raises dns.exception.SyntaxError if not a 32-bit unsigned integer. + + Returns an int. + """ + + value = self.get_int(base=base) + if value < 0 or value > 4294967295: + raise dns.exception.SyntaxError( + "%d is not an unsigned 32-bit integer" % value + ) + return value + + def get_uint48(self, base: int = 10) -> int: + """Read the next token and interpret it as a 48-bit unsigned + integer. + + Raises dns.exception.SyntaxError if not a 48-bit unsigned integer. + + Returns an int. + """ + + value = self.get_int(base=base) + if value < 0 or value > 281474976710655: + raise dns.exception.SyntaxError( + "%d is not an unsigned 48-bit integer" % value + ) + return value + + def get_string(self, max_length: Optional[int] = None) -> str: + """Read the next token and interpret it as a string. + + Raises dns.exception.SyntaxError if not a string. + Raises dns.exception.SyntaxError if token value length + exceeds max_length (if specified). + + Returns a string. + """ + + token = self.get().unescape() + if not (token.is_identifier() or token.is_quoted_string()): + raise dns.exception.SyntaxError("expecting a string") + if max_length and len(token.value) > max_length: + raise dns.exception.SyntaxError("string too long") + return token.value + + def get_identifier(self) -> str: + """Read the next token, which should be an identifier. + + Raises dns.exception.SyntaxError if not an identifier. + + Returns a string. + """ + + token = self.get().unescape() + if not token.is_identifier(): + raise dns.exception.SyntaxError("expecting an identifier") + return token.value + + def get_remaining(self, max_tokens: Optional[int] = None) -> List[Token]: + """Return the remaining tokens on the line, until an EOL or EOF is seen. + + max_tokens: If not None, stop after this number of tokens. + + Returns a list of tokens. + """ + + tokens = [] + while True: + token = self.get() + if token.is_eol_or_eof(): + self.unget(token) + break + tokens.append(token) + if len(tokens) == max_tokens: + break + return tokens + + def concatenate_remaining_identifiers(self, allow_empty: bool = False) -> str: + """Read the remaining tokens on the line, which should be identifiers. + + Raises dns.exception.SyntaxError if there are no remaining tokens, + unless `allow_empty=True` is given. + + Raises dns.exception.SyntaxError if a token is seen that is not an + identifier. + + Returns a string containing a concatenation of the remaining + identifiers. + """ + s = "" + while True: + token = self.get().unescape() + if token.is_eol_or_eof(): + self.unget(token) + break + if not token.is_identifier(): + raise dns.exception.SyntaxError + s += token.value + if not (allow_empty or s): + raise dns.exception.SyntaxError("expecting another identifier") + return s + + def as_name( + self, + token: Token, + origin: Optional[dns.name.Name] = None, + relativize: bool = False, + relativize_to: Optional[dns.name.Name] = None, + ) -> dns.name.Name: + """Try to interpret the token as a DNS name. + + Raises dns.exception.SyntaxError if not a name. + + Returns a dns.name.Name. + """ + if not token.is_identifier(): + raise dns.exception.SyntaxError("expecting an identifier") + name = dns.name.from_text(token.value, origin, self.idna_codec) + return name.choose_relativity(relativize_to or origin, relativize) + + def get_name( + self, + origin: Optional[dns.name.Name] = None, + relativize: bool = False, + relativize_to: Optional[dns.name.Name] = None, + ) -> dns.name.Name: + """Read the next token and interpret it as a DNS name. + + Raises dns.exception.SyntaxError if not a name. + + Returns a dns.name.Name. + """ + + token = self.get() + return self.as_name(token, origin, relativize, relativize_to) + + def get_eol_as_token(self) -> Token: + """Read the next token and raise an exception if it isn't EOL or + EOF. + + Returns a string. + """ + + token = self.get() + if not token.is_eol_or_eof(): + raise dns.exception.SyntaxError( + 'expected EOL or EOF, got %d "%s"' % (token.ttype, token.value) + ) + return token + + def get_eol(self) -> str: + return self.get_eol_as_token().value + + def get_ttl(self) -> int: + """Read the next token and interpret it as a DNS TTL. + + Raises dns.exception.SyntaxError or dns.ttl.BadTTL if not an + identifier or badly formed. + + Returns an int. + """ + + token = self.get().unescape() + if not token.is_identifier(): + raise dns.exception.SyntaxError("expecting an identifier") + return dns.ttl.from_text(token.value) diff --git a/venv/lib/python3.11/site-packages/dns/transaction.py b/venv/lib/python3.11/site-packages/dns/transaction.py new file mode 100644 index 0000000..aa2e116 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/transaction.py @@ -0,0 +1,649 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import collections +from typing import Any, Callable, Iterator, List, Optional, Tuple, Union + +import dns.exception +import dns.name +import dns.node +import dns.rdataclass +import dns.rdataset +import dns.rdatatype +import dns.rrset +import dns.serial +import dns.ttl + + +class TransactionManager: + def reader(self) -> "Transaction": + """Begin a read-only transaction.""" + raise NotImplementedError # pragma: no cover + + def writer(self, replacement: bool = False) -> "Transaction": + """Begin a writable transaction. + + *replacement*, a ``bool``. If `True`, the content of the + transaction completely replaces any prior content. If False, + the default, then the content of the transaction updates the + existing content. + """ + raise NotImplementedError # pragma: no cover + + def origin_information( + self, + ) -> Tuple[Optional[dns.name.Name], bool, Optional[dns.name.Name]]: + """Returns a tuple + + (absolute_origin, relativize, effective_origin) + + giving the absolute name of the default origin for any + relative domain names, the "effective origin", and whether + names should be relativized. The "effective origin" is the + absolute origin if relativize is False, and the empty name if + relativize is true. (The effective origin is provided even + though it can be computed from the absolute_origin and + relativize setting because it avoids a lot of code + duplication.) + + If the returned names are `None`, then no origin information is + available. + + This information is used by code working with transactions to + allow it to coordinate relativization. The transaction code + itself takes what it gets (i.e. does not change name + relativity). + + """ + raise NotImplementedError # pragma: no cover + + def get_class(self) -> dns.rdataclass.RdataClass: + """The class of the transaction manager.""" + raise NotImplementedError # pragma: no cover + + def from_wire_origin(self) -> Optional[dns.name.Name]: + """Origin to use in from_wire() calls.""" + (absolute_origin, relativize, _) = self.origin_information() + if relativize: + return absolute_origin + else: + return None + + +class DeleteNotExact(dns.exception.DNSException): + """Existing data did not match data specified by an exact delete.""" + + +class ReadOnly(dns.exception.DNSException): + """Tried to write to a read-only transaction.""" + + +class AlreadyEnded(dns.exception.DNSException): + """Tried to use an already-ended transaction.""" + + +def _ensure_immutable_rdataset(rdataset): + if rdataset is None or isinstance(rdataset, dns.rdataset.ImmutableRdataset): + return rdataset + return dns.rdataset.ImmutableRdataset(rdataset) + + +def _ensure_immutable_node(node): + if node is None or node.is_immutable(): + return node + return dns.node.ImmutableNode(node) + + +CheckPutRdatasetType = Callable[ + ["Transaction", dns.name.Name, dns.rdataset.Rdataset], None +] +CheckDeleteRdatasetType = Callable[ + ["Transaction", dns.name.Name, dns.rdatatype.RdataType, dns.rdatatype.RdataType], + None, +] +CheckDeleteNameType = Callable[["Transaction", dns.name.Name], None] + + +class Transaction: + def __init__( + self, + manager: TransactionManager, + replacement: bool = False, + read_only: bool = False, + ): + self.manager = manager + self.replacement = replacement + self.read_only = read_only + self._ended = False + self._check_put_rdataset: List[CheckPutRdatasetType] = [] + self._check_delete_rdataset: List[CheckDeleteRdatasetType] = [] + self._check_delete_name: List[CheckDeleteNameType] = [] + + # + # This is the high level API + # + # Note that we currently use non-immutable types in the return type signature to + # avoid covariance problems, e.g. if the caller has a List[Rdataset], mypy will be + # unhappy if we return an ImmutableRdataset. + + def get( + self, + name: Optional[Union[dns.name.Name, str]], + rdtype: Union[dns.rdatatype.RdataType, str], + covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, + ) -> dns.rdataset.Rdataset: + """Return the rdataset associated with *name*, *rdtype*, and *covers*, + or `None` if not found. + + Note that the returned rdataset is immutable. + """ + self._check_ended() + if isinstance(name, str): + name = dns.name.from_text(name, None) + rdtype = dns.rdatatype.RdataType.make(rdtype) + covers = dns.rdatatype.RdataType.make(covers) + rdataset = self._get_rdataset(name, rdtype, covers) + return _ensure_immutable_rdataset(rdataset) + + def get_node(self, name: dns.name.Name) -> Optional[dns.node.Node]: + """Return the node at *name*, if any. + + Returns an immutable node or ``None``. + """ + return _ensure_immutable_node(self._get_node(name)) + + def _check_read_only(self) -> None: + if self.read_only: + raise ReadOnly + + def add(self, *args: Any) -> None: + """Add records. + + The arguments may be: + + - rrset + + - name, rdataset... + + - name, ttl, rdata... + """ + self._check_ended() + self._check_read_only() + self._add(False, args) + + def replace(self, *args: Any) -> None: + """Replace the existing rdataset at the name with the specified + rdataset, or add the specified rdataset if there was no existing + rdataset. + + The arguments may be: + + - rrset + + - name, rdataset... + + - name, ttl, rdata... + + Note that if you want to replace the entire node, you should do + a delete of the name followed by one or more calls to add() or + replace(). + """ + self._check_ended() + self._check_read_only() + self._add(True, args) + + def delete(self, *args: Any) -> None: + """Delete records. + + It is not an error if some of the records are not in the existing + set. + + The arguments may be: + + - rrset + + - name + + - name, rdatatype, [covers] + + - name, rdataset... + + - name, rdata... + """ + self._check_ended() + self._check_read_only() + self._delete(False, args) + + def delete_exact(self, *args: Any) -> None: + """Delete records. + + The arguments may be: + + - rrset + + - name + + - name, rdatatype, [covers] + + - name, rdataset... + + - name, rdata... + + Raises dns.transaction.DeleteNotExact if some of the records + are not in the existing set. + + """ + self._check_ended() + self._check_read_only() + self._delete(True, args) + + def name_exists(self, name: Union[dns.name.Name, str]) -> bool: + """Does the specified name exist?""" + self._check_ended() + if isinstance(name, str): + name = dns.name.from_text(name, None) + return self._name_exists(name) + + def update_serial( + self, + value: int = 1, + relative: bool = True, + name: dns.name.Name = dns.name.empty, + ) -> None: + """Update the serial number. + + *value*, an `int`, is an increment if *relative* is `True`, or the + actual value to set if *relative* is `False`. + + Raises `KeyError` if there is no SOA rdataset at *name*. + + Raises `ValueError` if *value* is negative or if the increment is + so large that it would cause the new serial to be less than the + prior value. + """ + self._check_ended() + if value < 0: + raise ValueError("negative update_serial() value") + if isinstance(name, str): + name = dns.name.from_text(name, None) + rdataset = self._get_rdataset(name, dns.rdatatype.SOA, dns.rdatatype.NONE) + if rdataset is None or len(rdataset) == 0: + raise KeyError + if relative: + serial = dns.serial.Serial(rdataset[0].serial) + value + else: + serial = dns.serial.Serial(value) + serial = serial.value # convert back to int + if serial == 0: + serial = 1 + rdata = rdataset[0].replace(serial=serial) + new_rdataset = dns.rdataset.from_rdata(rdataset.ttl, rdata) + self.replace(name, new_rdataset) + + def __iter__(self): + self._check_ended() + return self._iterate_rdatasets() + + def changed(self) -> bool: + """Has this transaction changed anything? + + For read-only transactions, the result is always `False`. + + For writable transactions, the result is `True` if at some time + during the life of the transaction, the content was changed. + """ + self._check_ended() + return self._changed() + + def commit(self) -> None: + """Commit the transaction. + + Normally transactions are used as context managers and commit + or rollback automatically, but it may be done explicitly if needed. + A ``dns.transaction.Ended`` exception will be raised if you try + to use a transaction after it has been committed or rolled back. + + Raises an exception if the commit fails (in which case the transaction + is also rolled back. + """ + self._end(True) + + def rollback(self) -> None: + """Rollback the transaction. + + Normally transactions are used as context managers and commit + or rollback automatically, but it may be done explicitly if needed. + A ``dns.transaction.AlreadyEnded`` exception will be raised if you try + to use a transaction after it has been committed or rolled back. + + Rollback cannot otherwise fail. + """ + self._end(False) + + def check_put_rdataset(self, check: CheckPutRdatasetType) -> None: + """Call *check* before putting (storing) an rdataset. + + The function is called with the transaction, the name, and the rdataset. + + The check function may safely make non-mutating transaction method + calls, but behavior is undefined if mutating transaction methods are + called. The check function should raise an exception if it objects to + the put, and otherwise should return ``None``. + """ + self._check_put_rdataset.append(check) + + def check_delete_rdataset(self, check: CheckDeleteRdatasetType) -> None: + """Call *check* before deleting an rdataset. + + The function is called with the transaction, the name, the rdatatype, + and the covered rdatatype. + + The check function may safely make non-mutating transaction method + calls, but behavior is undefined if mutating transaction methods are + called. The check function should raise an exception if it objects to + the put, and otherwise should return ``None``. + """ + self._check_delete_rdataset.append(check) + + def check_delete_name(self, check: CheckDeleteNameType) -> None: + """Call *check* before putting (storing) an rdataset. + + The function is called with the transaction and the name. + + The check function may safely make non-mutating transaction method + calls, but behavior is undefined if mutating transaction methods are + called. The check function should raise an exception if it objects to + the put, and otherwise should return ``None``. + """ + self._check_delete_name.append(check) + + def iterate_rdatasets( + self, + ) -> Iterator[Tuple[dns.name.Name, dns.rdataset.Rdataset]]: + """Iterate all the rdatasets in the transaction, returning + (`dns.name.Name`, `dns.rdataset.Rdataset`) tuples. + + Note that as is usual with python iterators, adding or removing items + while iterating will invalidate the iterator and may raise `RuntimeError` + or fail to iterate over all entries.""" + self._check_ended() + return self._iterate_rdatasets() + + def iterate_names(self) -> Iterator[dns.name.Name]: + """Iterate all the names in the transaction. + + Note that as is usual with python iterators, adding or removing names + while iterating will invalidate the iterator and may raise `RuntimeError` + or fail to iterate over all entries.""" + self._check_ended() + return self._iterate_names() + + # + # Helper methods + # + + def _raise_if_not_empty(self, method, args): + if len(args) != 0: + raise TypeError(f"extra parameters to {method}") + + def _rdataset_from_args(self, method, deleting, args): + try: + arg = args.popleft() + if isinstance(arg, dns.rrset.RRset): + rdataset = arg.to_rdataset() + elif isinstance(arg, dns.rdataset.Rdataset): + rdataset = arg + else: + if deleting: + ttl = 0 + else: + if isinstance(arg, int): + ttl = arg + if ttl > dns.ttl.MAX_TTL: + raise ValueError(f"{method}: TTL value too big") + else: + raise TypeError(f"{method}: expected a TTL") + arg = args.popleft() + if isinstance(arg, dns.rdata.Rdata): + rdataset = dns.rdataset.from_rdata(ttl, arg) + else: + raise TypeError(f"{method}: expected an Rdata") + return rdataset + except IndexError: + if deleting: + return None + else: + # reraise + raise TypeError(f"{method}: expected more arguments") + + def _add(self, replace, args): + try: + args = collections.deque(args) + if replace: + method = "replace()" + else: + method = "add()" + arg = args.popleft() + if isinstance(arg, str): + arg = dns.name.from_text(arg, None) + if isinstance(arg, dns.name.Name): + name = arg + rdataset = self._rdataset_from_args(method, False, args) + elif isinstance(arg, dns.rrset.RRset): + rrset = arg + name = rrset.name + # rrsets are also rdatasets, but they don't print the + # same and can't be stored in nodes, so convert. + rdataset = rrset.to_rdataset() + else: + raise TypeError( + f"{method} requires a name or RRset as the first argument" + ) + if rdataset.rdclass != self.manager.get_class(): + raise ValueError(f"{method} has objects of wrong RdataClass") + if rdataset.rdtype == dns.rdatatype.SOA: + (_, _, origin) = self._origin_information() + if name != origin: + raise ValueError(f"{method} has non-origin SOA") + self._raise_if_not_empty(method, args) + if not replace: + existing = self._get_rdataset(name, rdataset.rdtype, rdataset.covers) + if existing is not None: + if isinstance(existing, dns.rdataset.ImmutableRdataset): + trds = dns.rdataset.Rdataset( + existing.rdclass, existing.rdtype, existing.covers + ) + trds.update(existing) + existing = trds + rdataset = existing.union(rdataset) + self._checked_put_rdataset(name, rdataset) + except IndexError: + raise TypeError(f"not enough parameters to {method}") + + def _delete(self, exact, args): + try: + args = collections.deque(args) + if exact: + method = "delete_exact()" + else: + method = "delete()" + arg = args.popleft() + if isinstance(arg, str): + arg = dns.name.from_text(arg, None) + if isinstance(arg, dns.name.Name): + name = arg + if len(args) > 0 and ( + isinstance(args[0], int) or isinstance(args[0], str) + ): + # deleting by type and (optionally) covers + rdtype = dns.rdatatype.RdataType.make(args.popleft()) + if len(args) > 0: + covers = dns.rdatatype.RdataType.make(args.popleft()) + else: + covers = dns.rdatatype.NONE + self._raise_if_not_empty(method, args) + existing = self._get_rdataset(name, rdtype, covers) + if existing is None: + if exact: + raise DeleteNotExact(f"{method}: missing rdataset") + else: + self._checked_delete_rdataset(name, rdtype, covers) + return + else: + rdataset = self._rdataset_from_args(method, True, args) + elif isinstance(arg, dns.rrset.RRset): + rdataset = arg # rrsets are also rdatasets + name = rdataset.name + else: + raise TypeError( + f"{method} requires a name or RRset as the first argument" + ) + self._raise_if_not_empty(method, args) + if rdataset: + if rdataset.rdclass != self.manager.get_class(): + raise ValueError(f"{method} has objects of wrong RdataClass") + existing = self._get_rdataset(name, rdataset.rdtype, rdataset.covers) + if existing is not None: + if exact: + intersection = existing.intersection(rdataset) + if intersection != rdataset: + raise DeleteNotExact(f"{method}: missing rdatas") + rdataset = existing.difference(rdataset) + if len(rdataset) == 0: + self._checked_delete_rdataset( + name, rdataset.rdtype, rdataset.covers + ) + else: + self._checked_put_rdataset(name, rdataset) + elif exact: + raise DeleteNotExact(f"{method}: missing rdataset") + else: + if exact and not self._name_exists(name): + raise DeleteNotExact(f"{method}: name not known") + self._checked_delete_name(name) + except IndexError: + raise TypeError(f"not enough parameters to {method}") + + def _check_ended(self): + if self._ended: + raise AlreadyEnded + + def _end(self, commit): + self._check_ended() + try: + self._end_transaction(commit) + finally: + self._ended = True + + def _checked_put_rdataset(self, name, rdataset): + for check in self._check_put_rdataset: + check(self, name, rdataset) + self._put_rdataset(name, rdataset) + + def _checked_delete_rdataset(self, name, rdtype, covers): + for check in self._check_delete_rdataset: + check(self, name, rdtype, covers) + self._delete_rdataset(name, rdtype, covers) + + def _checked_delete_name(self, name): + for check in self._check_delete_name: + check(self, name) + self._delete_name(name) + + # + # Transactions are context managers. + # + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if not self._ended: + if exc_type is None: + self.commit() + else: + self.rollback() + return False + + # + # This is the low level API, which must be implemented by subclasses + # of Transaction. + # + + def _get_rdataset(self, name, rdtype, covers): + """Return the rdataset associated with *name*, *rdtype*, and *covers*, + or `None` if not found. + """ + raise NotImplementedError # pragma: no cover + + def _put_rdataset(self, name, rdataset): + """Store the rdataset.""" + raise NotImplementedError # pragma: no cover + + def _delete_name(self, name): + """Delete all data associated with *name*. + + It is not an error if the name does not exist. + """ + raise NotImplementedError # pragma: no cover + + def _delete_rdataset(self, name, rdtype, covers): + """Delete all data associated with *name*, *rdtype*, and *covers*. + + It is not an error if the rdataset does not exist. + """ + raise NotImplementedError # pragma: no cover + + def _name_exists(self, name): + """Does name exist? + + Returns a bool. + """ + raise NotImplementedError # pragma: no cover + + def _changed(self): + """Has this transaction changed anything?""" + raise NotImplementedError # pragma: no cover + + def _end_transaction(self, commit): + """End the transaction. + + *commit*, a bool. If ``True``, commit the transaction, otherwise + roll it back. + + If committing and the commit fails, then roll back and raise an + exception. + """ + raise NotImplementedError # pragma: no cover + + def _set_origin(self, origin): + """Set the origin. + + This method is called when reading a possibly relativized + source, and an origin setting operation occurs (e.g. $ORIGIN + in a zone file). + """ + raise NotImplementedError # pragma: no cover + + def _iterate_rdatasets(self): + """Return an iterator that yields (name, rdataset) tuples.""" + raise NotImplementedError # pragma: no cover + + def _iterate_names(self): + """Return an iterator that yields a name.""" + raise NotImplementedError # pragma: no cover + + def _get_node(self, name): + """Return the node at *name*, if any. + + Returns a node or ``None``. + """ + raise NotImplementedError # pragma: no cover + + # + # Low-level API with a default implementation, in case a subclass needs + # to override. + # + + def _origin_information(self): + # This is only used by _add() + return self.manager.origin_information() diff --git a/venv/lib/python3.11/site-packages/dns/tsig.py b/venv/lib/python3.11/site-packages/dns/tsig.py new file mode 100644 index 0000000..780852e --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/tsig.py @@ -0,0 +1,352 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS TSIG support.""" + +import base64 +import hashlib +import hmac +import struct + +import dns.exception +import dns.name +import dns.rcode +import dns.rdataclass + + +class BadTime(dns.exception.DNSException): + """The current time is not within the TSIG's validity time.""" + + +class BadSignature(dns.exception.DNSException): + """The TSIG signature fails to verify.""" + + +class BadKey(dns.exception.DNSException): + """The TSIG record owner name does not match the key.""" + + +class BadAlgorithm(dns.exception.DNSException): + """The TSIG algorithm does not match the key.""" + + +class PeerError(dns.exception.DNSException): + """Base class for all TSIG errors generated by the remote peer""" + + +class PeerBadKey(PeerError): + """The peer didn't know the key we used""" + + +class PeerBadSignature(PeerError): + """The peer didn't like the signature we sent""" + + +class PeerBadTime(PeerError): + """The peer didn't like the time we sent""" + + +class PeerBadTruncation(PeerError): + """The peer didn't like amount of truncation in the TSIG we sent""" + + +# TSIG Algorithms + +HMAC_MD5 = dns.name.from_text("HMAC-MD5.SIG-ALG.REG.INT") +HMAC_SHA1 = dns.name.from_text("hmac-sha1") +HMAC_SHA224 = dns.name.from_text("hmac-sha224") +HMAC_SHA256 = dns.name.from_text("hmac-sha256") +HMAC_SHA256_128 = dns.name.from_text("hmac-sha256-128") +HMAC_SHA384 = dns.name.from_text("hmac-sha384") +HMAC_SHA384_192 = dns.name.from_text("hmac-sha384-192") +HMAC_SHA512 = dns.name.from_text("hmac-sha512") +HMAC_SHA512_256 = dns.name.from_text("hmac-sha512-256") +GSS_TSIG = dns.name.from_text("gss-tsig") + +default_algorithm = HMAC_SHA256 + +mac_sizes = { + HMAC_SHA1: 20, + HMAC_SHA224: 28, + HMAC_SHA256: 32, + HMAC_SHA256_128: 16, + HMAC_SHA384: 48, + HMAC_SHA384_192: 24, + HMAC_SHA512: 64, + HMAC_SHA512_256: 32, + HMAC_MD5: 16, + GSS_TSIG: 128, # This is what we assume to be the worst case! +} + + +class GSSTSig: + """ + GSS-TSIG TSIG implementation. This uses the GSS-API context established + in the TKEY message handshake to sign messages using GSS-API message + integrity codes, per the RFC. + + In order to avoid a direct GSSAPI dependency, the keyring holds a ref + to the GSSAPI object required, rather than the key itself. + """ + + def __init__(self, gssapi_context): + self.gssapi_context = gssapi_context + self.data = b"" + self.name = "gss-tsig" + + def update(self, data): + self.data += data + + def sign(self): + # defer to the GSSAPI function to sign + return self.gssapi_context.get_signature(self.data) + + def verify(self, expected): + try: + # defer to the GSSAPI function to verify + return self.gssapi_context.verify_signature(self.data, expected) + except Exception: + # note the usage of a bare exception + raise BadSignature + + +class GSSTSigAdapter: + def __init__(self, keyring): + self.keyring = keyring + + def __call__(self, message, keyname): + if keyname in self.keyring: + key = self.keyring[keyname] + if isinstance(key, Key) and key.algorithm == GSS_TSIG: + if message: + GSSTSigAdapter.parse_tkey_and_step(key, message, keyname) + return key + else: + return None + + @classmethod + def parse_tkey_and_step(cls, key, message, keyname): + # if the message is a TKEY type, absorb the key material + # into the context using step(); this is used to allow the + # client to complete the GSSAPI negotiation before attempting + # to verify the signed response to a TKEY message exchange + try: + rrset = message.find_rrset( + message.answer, keyname, dns.rdataclass.ANY, dns.rdatatype.TKEY + ) + if rrset: + token = rrset[0].key + gssapi_context = key.secret + return gssapi_context.step(token) + except KeyError: + pass + + +class HMACTSig: + """ + HMAC TSIG implementation. This uses the HMAC python module to handle the + sign/verify operations. + """ + + _hashes = { + HMAC_SHA1: hashlib.sha1, + HMAC_SHA224: hashlib.sha224, + HMAC_SHA256: hashlib.sha256, + HMAC_SHA256_128: (hashlib.sha256, 128), + HMAC_SHA384: hashlib.sha384, + HMAC_SHA384_192: (hashlib.sha384, 192), + HMAC_SHA512: hashlib.sha512, + HMAC_SHA512_256: (hashlib.sha512, 256), + HMAC_MD5: hashlib.md5, + } + + def __init__(self, key, algorithm): + try: + hashinfo = self._hashes[algorithm] + except KeyError: + raise NotImplementedError(f"TSIG algorithm {algorithm} is not supported") + + # create the HMAC context + if isinstance(hashinfo, tuple): + self.hmac_context = hmac.new(key, digestmod=hashinfo[0]) + self.size = hashinfo[1] + else: + self.hmac_context = hmac.new(key, digestmod=hashinfo) + self.size = None + self.name = self.hmac_context.name + if self.size: + self.name += f"-{self.size}" + + def update(self, data): + return self.hmac_context.update(data) + + def sign(self): + # defer to the HMAC digest() function for that digestmod + digest = self.hmac_context.digest() + if self.size: + digest = digest[: (self.size // 8)] + return digest + + def verify(self, expected): + # re-digest and compare the results + mac = self.sign() + if not hmac.compare_digest(mac, expected): + raise BadSignature + + +def _digest(wire, key, rdata, time=None, request_mac=None, ctx=None, multi=None): + """Return a context containing the TSIG rdata for the input parameters + @rtype: dns.tsig.HMACTSig or dns.tsig.GSSTSig object + @raises ValueError: I{other_data} is too long + @raises NotImplementedError: I{algorithm} is not supported + """ + + first = not (ctx and multi) + if first: + ctx = get_context(key) + if request_mac: + ctx.update(struct.pack("!H", len(request_mac))) + ctx.update(request_mac) + ctx.update(struct.pack("!H", rdata.original_id)) + ctx.update(wire[2:]) + if first: + ctx.update(key.name.to_digestable()) + ctx.update(struct.pack("!H", dns.rdataclass.ANY)) + ctx.update(struct.pack("!I", 0)) + if time is None: + time = rdata.time_signed + upper_time = (time >> 32) & 0xFFFF + lower_time = time & 0xFFFFFFFF + time_encoded = struct.pack("!HIH", upper_time, lower_time, rdata.fudge) + other_len = len(rdata.other) + if other_len > 65535: + raise ValueError("TSIG Other Data is > 65535 bytes") + if first: + ctx.update(key.algorithm.to_digestable() + time_encoded) + ctx.update(struct.pack("!HH", rdata.error, other_len) + rdata.other) + else: + ctx.update(time_encoded) + return ctx + + +def _maybe_start_digest(key, mac, multi): + """If this is the first message in a multi-message sequence, + start a new context. + @rtype: dns.tsig.HMACTSig or dns.tsig.GSSTSig object + """ + if multi: + ctx = get_context(key) + ctx.update(struct.pack("!H", len(mac))) + ctx.update(mac) + return ctx + else: + return None + + +def sign(wire, key, rdata, time=None, request_mac=None, ctx=None, multi=False): + """Return a (tsig_rdata, mac, ctx) tuple containing the HMAC TSIG rdata + for the input parameters, the HMAC MAC calculated by applying the + TSIG signature algorithm, and the TSIG digest context. + @rtype: (string, dns.tsig.HMACTSig or dns.tsig.GSSTSig object) + @raises ValueError: I{other_data} is too long + @raises NotImplementedError: I{algorithm} is not supported + """ + + ctx = _digest(wire, key, rdata, time, request_mac, ctx, multi) + mac = ctx.sign() + tsig = rdata.replace(time_signed=time, mac=mac) + + return (tsig, _maybe_start_digest(key, mac, multi)) + + +def validate( + wire, key, owner, rdata, now, request_mac, tsig_start, ctx=None, multi=False +): + """Validate the specified TSIG rdata against the other input parameters. + + @raises FormError: The TSIG is badly formed. + @raises BadTime: There is too much time skew between the client and the + server. + @raises BadSignature: The TSIG signature did not validate + @rtype: dns.tsig.HMACTSig or dns.tsig.GSSTSig object""" + + (adcount,) = struct.unpack("!H", wire[10:12]) + if adcount == 0: + raise dns.exception.FormError + adcount -= 1 + new_wire = wire[0:10] + struct.pack("!H", adcount) + wire[12:tsig_start] + if rdata.error != 0: + if rdata.error == dns.rcode.BADSIG: + raise PeerBadSignature + elif rdata.error == dns.rcode.BADKEY: + raise PeerBadKey + elif rdata.error == dns.rcode.BADTIME: + raise PeerBadTime + elif rdata.error == dns.rcode.BADTRUNC: + raise PeerBadTruncation + else: + raise PeerError("unknown TSIG error code %d" % rdata.error) + if abs(rdata.time_signed - now) > rdata.fudge: + raise BadTime + if key.name != owner: + raise BadKey + if key.algorithm != rdata.algorithm: + raise BadAlgorithm + ctx = _digest(new_wire, key, rdata, None, request_mac, ctx, multi) + ctx.verify(rdata.mac) + return _maybe_start_digest(key, rdata.mac, multi) + + +def get_context(key): + """Returns an HMAC context for the specified key. + + @rtype: HMAC context + @raises NotImplementedError: I{algorithm} is not supported + """ + + if key.algorithm == GSS_TSIG: + return GSSTSig(key.secret) + else: + return HMACTSig(key.secret, key.algorithm) + + +class Key: + def __init__(self, name, secret, algorithm=default_algorithm): + if isinstance(name, str): + name = dns.name.from_text(name) + self.name = name + if isinstance(secret, str): + secret = base64.decodebytes(secret.encode()) + self.secret = secret + if isinstance(algorithm, str): + algorithm = dns.name.from_text(algorithm) + self.algorithm = algorithm + + def __eq__(self, other): + return ( + isinstance(other, Key) + and self.name == other.name + and self.secret == other.secret + and self.algorithm == other.algorithm + ) + + def __repr__(self): + r = f" Dict[dns.name.Name, dns.tsig.Key]: + """Convert a dictionary containing (textual DNS name, base64 secret) + pairs into a binary keyring which has (dns.name.Name, bytes) pairs, or + a dictionary containing (textual DNS name, (algorithm, base64 secret)) + pairs into a binary keyring which has (dns.name.Name, dns.tsig.Key) pairs. + @rtype: dict""" + + keyring = {} + for name, value in textring.items(): + kname = dns.name.from_text(name) + if isinstance(value, str): + keyring[kname] = dns.tsig.Key(kname, value).secret + else: + (algorithm, secret) = value + keyring[kname] = dns.tsig.Key(kname, secret, algorithm) + return keyring + + +def to_text(keyring: Dict[dns.name.Name, Any]) -> Dict[str, Any]: + """Convert a dictionary containing (dns.name.Name, dns.tsig.Key) pairs + into a text keyring which has (textual DNS name, (textual algorithm, + base64 secret)) pairs, or a dictionary containing (dns.name.Name, bytes) + pairs into a text keyring which has (textual DNS name, base64 secret) pairs. + @rtype: dict""" + + textring = {} + + def b64encode(secret): + return base64.encodebytes(secret).decode().rstrip() + + for name, key in keyring.items(): + tname = name.to_text() + if isinstance(key, bytes): + textring[tname] = b64encode(key) + else: + if isinstance(key.secret, bytes): + text_secret = b64encode(key.secret) + else: + text_secret = str(key.secret) + + textring[tname] = (key.algorithm.to_text(), text_secret) + return textring diff --git a/venv/lib/python3.11/site-packages/dns/ttl.py b/venv/lib/python3.11/site-packages/dns/ttl.py new file mode 100644 index 0000000..b9a99fe --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/ttl.py @@ -0,0 +1,92 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS TTL conversion.""" + +from typing import Union + +import dns.exception + +# Technically TTLs are supposed to be between 0 and 2**31 - 1, with values +# greater than that interpreted as 0, but we do not impose this policy here +# as values > 2**31 - 1 occur in real world data. +# +# We leave it to applications to impose tighter bounds if desired. +MAX_TTL = 2**32 - 1 + + +class BadTTL(dns.exception.SyntaxError): + """DNS TTL value is not well-formed.""" + + +def from_text(text: str) -> int: + """Convert the text form of a TTL to an integer. + + The BIND 8 units syntax for TTLs (e.g. '1w6d4h3m10s') is supported. + + *text*, a ``str``, the textual TTL. + + Raises ``dns.ttl.BadTTL`` if the TTL is not well-formed. + + Returns an ``int``. + """ + + if text.isdigit(): + total = int(text) + elif len(text) == 0: + raise BadTTL + else: + total = 0 + current = 0 + need_digit = True + for c in text: + if c.isdigit(): + current *= 10 + current += int(c) + need_digit = False + else: + if need_digit: + raise BadTTL + c = c.lower() + if c == "w": + total += current * 604800 + elif c == "d": + total += current * 86400 + elif c == "h": + total += current * 3600 + elif c == "m": + total += current * 60 + elif c == "s": + total += current + else: + raise BadTTL(f"unknown unit '{c}'") + current = 0 + need_digit = True + if not current == 0: + raise BadTTL("trailing integer") + if total < 0 or total > MAX_TTL: + raise BadTTL("TTL should be between 0 and 2**32 - 1 (inclusive)") + return total + + +def make(value: Union[int, str]) -> int: + if isinstance(value, int): + return value + elif isinstance(value, str): + return dns.ttl.from_text(value) + else: + raise ValueError("cannot convert value to TTL") diff --git a/venv/lib/python3.11/site-packages/dns/update.py b/venv/lib/python3.11/site-packages/dns/update.py new file mode 100644 index 0000000..bf1157a --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/update.py @@ -0,0 +1,386 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Dynamic Update Support""" + +from typing import Any, List, Optional, Union + +import dns.message +import dns.name +import dns.opcode +import dns.rdata +import dns.rdataclass +import dns.rdataset +import dns.rdatatype +import dns.tsig + + +class UpdateSection(dns.enum.IntEnum): + """Update sections""" + + ZONE = 0 + PREREQ = 1 + UPDATE = 2 + ADDITIONAL = 3 + + @classmethod + def _maximum(cls): + return 3 + + +class UpdateMessage(dns.message.Message): # lgtm[py/missing-equals] + # ignore the mypy error here as we mean to use a different enum + _section_enum = UpdateSection # type: ignore + + def __init__( + self, + zone: Optional[Union[dns.name.Name, str]] = None, + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, + keyring: Optional[Any] = None, + keyname: Optional[dns.name.Name] = None, + keyalgorithm: Union[dns.name.Name, str] = dns.tsig.default_algorithm, + id: Optional[int] = None, + ): + """Initialize a new DNS Update object. + + See the documentation of the Message class for a complete + description of the keyring dictionary. + + *zone*, a ``dns.name.Name``, ``str``, or ``None``, the zone + which is being updated. ``None`` should only be used by dnspython's + message constructors, as a zone is required for the convenience + methods like ``add()``, ``replace()``, etc. + + *rdclass*, an ``int`` or ``str``, the class of the zone. + + The *keyring*, *keyname*, and *keyalgorithm* parameters are passed to + ``use_tsig()``; see its documentation for details. + """ + super().__init__(id=id) + self.flags |= dns.opcode.to_flags(dns.opcode.UPDATE) + if isinstance(zone, str): + zone = dns.name.from_text(zone) + self.origin = zone + rdclass = dns.rdataclass.RdataClass.make(rdclass) + self.zone_rdclass = rdclass + if self.origin: + self.find_rrset( + self.zone, + self.origin, + rdclass, + dns.rdatatype.SOA, + create=True, + force_unique=True, + ) + if keyring is not None: + self.use_tsig(keyring, keyname, algorithm=keyalgorithm) + + @property + def zone(self) -> List[dns.rrset.RRset]: + """The zone section.""" + return self.sections[0] + + @zone.setter + def zone(self, v): + self.sections[0] = v + + @property + def prerequisite(self) -> List[dns.rrset.RRset]: + """The prerequisite section.""" + return self.sections[1] + + @prerequisite.setter + def prerequisite(self, v): + self.sections[1] = v + + @property + def update(self) -> List[dns.rrset.RRset]: + """The update section.""" + return self.sections[2] + + @update.setter + def update(self, v): + self.sections[2] = v + + def _add_rr(self, name, ttl, rd, deleting=None, section=None): + """Add a single RR to the update section.""" + + if section is None: + section = self.update + covers = rd.covers() + rrset = self.find_rrset( + section, name, self.zone_rdclass, rd.rdtype, covers, deleting, True, True + ) + rrset.add(rd, ttl) + + def _add(self, replace, section, name, *args): + """Add records. + + *replace* is the replacement mode. If ``False``, + RRs are added to an existing RRset; if ``True``, the RRset + is replaced with the specified contents. The second + argument is the section to add to. The third argument + is always a name. The other arguments can be: + + - rdataset... + + - ttl, rdata... + + - ttl, rdtype, string... + """ + + if isinstance(name, str): + name = dns.name.from_text(name, None) + if isinstance(args[0], dns.rdataset.Rdataset): + for rds in args: + if replace: + self.delete(name, rds.rdtype) + for rd in rds: + self._add_rr(name, rds.ttl, rd, section=section) + else: + args = list(args) + ttl = int(args.pop(0)) + if isinstance(args[0], dns.rdata.Rdata): + if replace: + self.delete(name, args[0].rdtype) + for rd in args: + self._add_rr(name, ttl, rd, section=section) + else: + rdtype = dns.rdatatype.RdataType.make(args.pop(0)) + if replace: + self.delete(name, rdtype) + for s in args: + rd = dns.rdata.from_text(self.zone_rdclass, rdtype, s, self.origin) + self._add_rr(name, ttl, rd, section=section) + + def add(self, name: Union[dns.name.Name, str], *args: Any) -> None: + """Add records. + + The first argument is always a name. The other + arguments can be: + + - rdataset... + + - ttl, rdata... + + - ttl, rdtype, string... + """ + + self._add(False, self.update, name, *args) + + def delete(self, name: Union[dns.name.Name, str], *args: Any) -> None: + """Delete records. + + The first argument is always a name. The other + arguments can be: + + - *empty* + + - rdataset... + + - rdata... + + - rdtype, [string...] + """ + + if isinstance(name, str): + name = dns.name.from_text(name, None) + if len(args) == 0: + self.find_rrset( + self.update, + name, + dns.rdataclass.ANY, + dns.rdatatype.ANY, + dns.rdatatype.NONE, + dns.rdataclass.ANY, + True, + True, + ) + elif isinstance(args[0], dns.rdataset.Rdataset): + for rds in args: + for rd in rds: + self._add_rr(name, 0, rd, dns.rdataclass.NONE) + else: + largs = list(args) + if isinstance(largs[0], dns.rdata.Rdata): + for rd in largs: + self._add_rr(name, 0, rd, dns.rdataclass.NONE) + else: + rdtype = dns.rdatatype.RdataType.make(largs.pop(0)) + if len(largs) == 0: + self.find_rrset( + self.update, + name, + self.zone_rdclass, + rdtype, + dns.rdatatype.NONE, + dns.rdataclass.ANY, + True, + True, + ) + else: + for s in largs: + rd = dns.rdata.from_text( + self.zone_rdclass, + rdtype, + s, # type: ignore[arg-type] + self.origin, + ) + self._add_rr(name, 0, rd, dns.rdataclass.NONE) + + def replace(self, name: Union[dns.name.Name, str], *args: Any) -> None: + """Replace records. + + The first argument is always a name. The other + arguments can be: + + - rdataset... + + - ttl, rdata... + + - ttl, rdtype, string... + + Note that if you want to replace the entire node, you should do + a delete of the name followed by one or more calls to add. + """ + + self._add(True, self.update, name, *args) + + def present(self, name: Union[dns.name.Name, str], *args: Any) -> None: + """Require that an owner name (and optionally an rdata type, + or specific rdataset) exists as a prerequisite to the + execution of the update. + + The first argument is always a name. + The other arguments can be: + + - rdataset... + + - rdata... + + - rdtype, string... + """ + + if isinstance(name, str): + name = dns.name.from_text(name, None) + if len(args) == 0: + self.find_rrset( + self.prerequisite, + name, + dns.rdataclass.ANY, + dns.rdatatype.ANY, + dns.rdatatype.NONE, + None, + True, + True, + ) + elif ( + isinstance(args[0], dns.rdataset.Rdataset) + or isinstance(args[0], dns.rdata.Rdata) + or len(args) > 1 + ): + if not isinstance(args[0], dns.rdataset.Rdataset): + # Add a 0 TTL + largs = list(args) + largs.insert(0, 0) # type: ignore[arg-type] + self._add(False, self.prerequisite, name, *largs) + else: + self._add(False, self.prerequisite, name, *args) + else: + rdtype = dns.rdatatype.RdataType.make(args[0]) + self.find_rrset( + self.prerequisite, + name, + dns.rdataclass.ANY, + rdtype, + dns.rdatatype.NONE, + None, + True, + True, + ) + + def absent( + self, + name: Union[dns.name.Name, str], + rdtype: Optional[Union[dns.rdatatype.RdataType, str]] = None, + ) -> None: + """Require that an owner name (and optionally an rdata type) does + not exist as a prerequisite to the execution of the update.""" + + if isinstance(name, str): + name = dns.name.from_text(name, None) + if rdtype is None: + self.find_rrset( + self.prerequisite, + name, + dns.rdataclass.NONE, + dns.rdatatype.ANY, + dns.rdatatype.NONE, + None, + True, + True, + ) + else: + rdtype = dns.rdatatype.RdataType.make(rdtype) + self.find_rrset( + self.prerequisite, + name, + dns.rdataclass.NONE, + rdtype, + dns.rdatatype.NONE, + None, + True, + True, + ) + + def _get_one_rr_per_rrset(self, value): + # Updates are always one_rr_per_rrset + return True + + def _parse_rr_header(self, section, name, rdclass, rdtype): + deleting = None + empty = False + if section == UpdateSection.ZONE: + if ( + dns.rdataclass.is_metaclass(rdclass) + or rdtype != dns.rdatatype.SOA + or self.zone + ): + raise dns.exception.FormError + else: + if not self.zone: + raise dns.exception.FormError + if rdclass in (dns.rdataclass.ANY, dns.rdataclass.NONE): + deleting = rdclass + rdclass = self.zone[0].rdclass + empty = ( + deleting == dns.rdataclass.ANY or section == UpdateSection.PREREQ + ) + return (rdclass, rdtype, deleting, empty) + + +# backwards compatibility +Update = UpdateMessage + +### BEGIN generated UpdateSection constants + +ZONE = UpdateSection.ZONE +PREREQ = UpdateSection.PREREQ +UPDATE = UpdateSection.UPDATE +ADDITIONAL = UpdateSection.ADDITIONAL + +### END generated UpdateSection constants diff --git a/venv/lib/python3.11/site-packages/dns/version.py b/venv/lib/python3.11/site-packages/dns/version.py new file mode 100644 index 0000000..9ed2ce1 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/version.py @@ -0,0 +1,58 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""dnspython release version information.""" + +#: MAJOR +MAJOR = 2 +#: MINOR +MINOR = 7 +#: MICRO +MICRO = 0 +#: RELEASELEVEL +RELEASELEVEL = 0x0F +#: SERIAL +SERIAL = 0 + +if RELEASELEVEL == 0x0F: # pragma: no cover lgtm[py/unreachable-statement] + #: version + version = "%d.%d.%d" % (MAJOR, MINOR, MICRO) # lgtm[py/unreachable-statement] +elif RELEASELEVEL == 0x00: # pragma: no cover lgtm[py/unreachable-statement] + version = "%d.%d.%ddev%d" % ( + MAJOR, + MINOR, + MICRO, + SERIAL, + ) # lgtm[py/unreachable-statement] +elif RELEASELEVEL == 0x0C: # pragma: no cover lgtm[py/unreachable-statement] + version = "%d.%d.%drc%d" % ( + MAJOR, + MINOR, + MICRO, + SERIAL, + ) # lgtm[py/unreachable-statement] +else: # pragma: no cover lgtm[py/unreachable-statement] + version = "%d.%d.%d%x%d" % ( + MAJOR, + MINOR, + MICRO, + RELEASELEVEL, + SERIAL, + ) # lgtm[py/unreachable-statement] + +#: hexversion +hexversion = MAJOR << 24 | MINOR << 16 | MICRO << 8 | RELEASELEVEL << 4 | SERIAL diff --git a/venv/lib/python3.11/site-packages/dns/versioned.py b/venv/lib/python3.11/site-packages/dns/versioned.py new file mode 100644 index 0000000..fd78e67 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/versioned.py @@ -0,0 +1,318 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +"""DNS Versioned Zones.""" + +import collections +import threading +from typing import Callable, Deque, Optional, Set, Union + +import dns.exception +import dns.immutable +import dns.name +import dns.node +import dns.rdataclass +import dns.rdataset +import dns.rdatatype +import dns.rdtypes.ANY.SOA +import dns.zone + + +class UseTransaction(dns.exception.DNSException): + """To alter a versioned zone, use a transaction.""" + + +# Backwards compatibility +Node = dns.zone.VersionedNode +ImmutableNode = dns.zone.ImmutableVersionedNode +Version = dns.zone.Version +WritableVersion = dns.zone.WritableVersion +ImmutableVersion = dns.zone.ImmutableVersion +Transaction = dns.zone.Transaction + + +class Zone(dns.zone.Zone): # lgtm[py/missing-equals] + __slots__ = [ + "_versions", + "_versions_lock", + "_write_txn", + "_write_waiters", + "_write_event", + "_pruning_policy", + "_readers", + ] + + node_factory = Node + + def __init__( + self, + origin: Optional[Union[dns.name.Name, str]], + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, + relativize: bool = True, + pruning_policy: Optional[Callable[["Zone", Version], Optional[bool]]] = None, + ): + """Initialize a versioned zone object. + + *origin* is the origin of the zone. It may be a ``dns.name.Name``, + a ``str``, or ``None``. If ``None``, then the zone's origin will + be set by the first ``$ORIGIN`` line in a zone file. + + *rdclass*, an ``int``, the zone's rdata class; the default is class IN. + + *relativize*, a ``bool``, determine's whether domain names are + relativized to the zone's origin. The default is ``True``. + + *pruning policy*, a function taking a ``Zone`` and a ``Version`` and returning + a ``bool``, or ``None``. Should the version be pruned? If ``None``, + the default policy, which retains one version is used. + """ + super().__init__(origin, rdclass, relativize) + self._versions: Deque[Version] = collections.deque() + self._version_lock = threading.Lock() + if pruning_policy is None: + self._pruning_policy = self._default_pruning_policy + else: + self._pruning_policy = pruning_policy + self._write_txn: Optional[Transaction] = None + self._write_event: Optional[threading.Event] = None + self._write_waiters: Deque[threading.Event] = collections.deque() + self._readers: Set[Transaction] = set() + self._commit_version_unlocked( + None, WritableVersion(self, replacement=True), origin + ) + + def reader( + self, id: Optional[int] = None, serial: Optional[int] = None + ) -> Transaction: # pylint: disable=arguments-differ + if id is not None and serial is not None: + raise ValueError("cannot specify both id and serial") + with self._version_lock: + if id is not None: + version = None + for v in reversed(self._versions): + if v.id == id: + version = v + break + if version is None: + raise KeyError("version not found") + elif serial is not None: + if self.relativize: + oname = dns.name.empty + else: + assert self.origin is not None + oname = self.origin + version = None + for v in reversed(self._versions): + n = v.nodes.get(oname) + if n: + rds = n.get_rdataset(self.rdclass, dns.rdatatype.SOA) + if rds and rds[0].serial == serial: + version = v + break + if version is None: + raise KeyError("serial not found") + else: + version = self._versions[-1] + txn = Transaction(self, False, version) + self._readers.add(txn) + return txn + + def writer(self, replacement: bool = False) -> Transaction: + event = None + while True: + with self._version_lock: + # Checking event == self._write_event ensures that either + # no one was waiting before we got lucky and found no write + # txn, or we were the one who was waiting and got woken up. + # This prevents "taking cuts" when creating a write txn. + if self._write_txn is None and event == self._write_event: + # Creating the transaction defers version setup + # (i.e. copying the nodes dictionary) until we + # give up the lock, so that we hold the lock as + # short a time as possible. This is why we call + # _setup_version() below. + self._write_txn = Transaction( + self, replacement, make_immutable=True + ) + # give up our exclusive right to make a Transaction + self._write_event = None + break + # Someone else is writing already, so we will have to + # wait, but we want to do the actual wait outside the + # lock. + event = threading.Event() + self._write_waiters.append(event) + # wait (note we gave up the lock!) + # + # We only wake one sleeper at a time, so it's important + # that no event waiter can exit this method (e.g. via + # cancellation) without returning a transaction or waking + # someone else up. + # + # This is not a problem with Threading module threads as + # they cannot be canceled, but could be an issue with trio + # tasks when we do the async version of writer(). + # I.e. we'd need to do something like: + # + # try: + # event.wait() + # except trio.Cancelled: + # with self._version_lock: + # self._maybe_wakeup_one_waiter_unlocked() + # raise + # + event.wait() + # Do the deferred version setup. + self._write_txn._setup_version() + return self._write_txn + + def _maybe_wakeup_one_waiter_unlocked(self): + if len(self._write_waiters) > 0: + self._write_event = self._write_waiters.popleft() + self._write_event.set() + + # pylint: disable=unused-argument + def _default_pruning_policy(self, zone, version): + return True + + # pylint: enable=unused-argument + + def _prune_versions_unlocked(self): + assert len(self._versions) > 0 + # Don't ever prune a version greater than or equal to one that + # a reader has open. This pins versions in memory while the + # reader is open, and importantly lets the reader open a txn on + # a successor version (e.g. if generating an IXFR). + # + # Note our definition of least_kept also ensures we do not try to + # delete the greatest version. + if len(self._readers) > 0: + least_kept = min(txn.version.id for txn in self._readers) + else: + least_kept = self._versions[-1].id + while self._versions[0].id < least_kept and self._pruning_policy( + self, self._versions[0] + ): + self._versions.popleft() + + def set_max_versions(self, max_versions: Optional[int]) -> None: + """Set a pruning policy that retains up to the specified number + of versions + """ + if max_versions is not None and max_versions < 1: + raise ValueError("max versions must be at least 1") + if max_versions is None: + + def policy(zone, _): # pylint: disable=unused-argument + return False + + else: + + def policy(zone, _): + return len(zone._versions) > max_versions + + self.set_pruning_policy(policy) + + def set_pruning_policy( + self, policy: Optional[Callable[["Zone", Version], Optional[bool]]] + ) -> None: + """Set the pruning policy for the zone. + + The *policy* function takes a `Version` and returns `True` if + the version should be pruned, and `False` otherwise. `None` + may also be specified for policy, in which case the default policy + is used. + + Pruning checking proceeds from the least version and the first + time the function returns `False`, the checking stops. I.e. the + retained versions are always a consecutive sequence. + """ + if policy is None: + policy = self._default_pruning_policy + with self._version_lock: + self._pruning_policy = policy + self._prune_versions_unlocked() + + def _end_read(self, txn): + with self._version_lock: + self._readers.remove(txn) + self._prune_versions_unlocked() + + def _end_write_unlocked(self, txn): + assert self._write_txn == txn + self._write_txn = None + self._maybe_wakeup_one_waiter_unlocked() + + def _end_write(self, txn): + with self._version_lock: + self._end_write_unlocked(txn) + + def _commit_version_unlocked(self, txn, version, origin): + self._versions.append(version) + self._prune_versions_unlocked() + self.nodes = version.nodes + if self.origin is None: + self.origin = origin + # txn can be None in __init__ when we make the empty version. + if txn is not None: + self._end_write_unlocked(txn) + + def _commit_version(self, txn, version, origin): + with self._version_lock: + self._commit_version_unlocked(txn, version, origin) + + def _get_next_version_id(self): + if len(self._versions) > 0: + id = self._versions[-1].id + 1 + else: + id = 1 + return id + + def find_node( + self, name: Union[dns.name.Name, str], create: bool = False + ) -> dns.node.Node: + if create: + raise UseTransaction + return super().find_node(name) + + def delete_node(self, name: Union[dns.name.Name, str]) -> None: + raise UseTransaction + + def find_rdataset( + self, + name: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str], + covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, + create: bool = False, + ) -> dns.rdataset.Rdataset: + if create: + raise UseTransaction + rdataset = super().find_rdataset(name, rdtype, covers) + return dns.rdataset.ImmutableRdataset(rdataset) + + def get_rdataset( + self, + name: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str], + covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, + create: bool = False, + ) -> Optional[dns.rdataset.Rdataset]: + if create: + raise UseTransaction + rdataset = super().get_rdataset(name, rdtype, covers) + if rdataset is not None: + return dns.rdataset.ImmutableRdataset(rdataset) + else: + return None + + def delete_rdataset( + self, + name: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str], + covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, + ) -> None: + raise UseTransaction + + def replace_rdataset( + self, name: Union[dns.name.Name, str], replacement: dns.rdataset.Rdataset + ) -> None: + raise UseTransaction diff --git a/venv/lib/python3.11/site-packages/dns/win32util.py b/venv/lib/python3.11/site-packages/dns/win32util.py new file mode 100644 index 0000000..9ed3f11 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/win32util.py @@ -0,0 +1,242 @@ +import sys + +import dns._features + +if sys.platform == "win32": + from typing import Any + + import dns.name + + _prefer_wmi = True + + import winreg # pylint: disable=import-error + + # Keep pylint quiet on non-windows. + try: + _ = WindowsError # pylint: disable=used-before-assignment + except NameError: + WindowsError = Exception + + if dns._features.have("wmi"): + import threading + + import pythoncom # pylint: disable=import-error + import wmi # pylint: disable=import-error + + _have_wmi = True + else: + _have_wmi = False + + def _config_domain(domain): + # Sometimes DHCP servers add a '.' prefix to the default domain, and + # Windows just stores such values in the registry (see #687). + # Check for this and fix it. + if domain.startswith("."): + domain = domain[1:] + return dns.name.from_text(domain) + + class DnsInfo: + def __init__(self): + self.domain = None + self.nameservers = [] + self.search = [] + + if _have_wmi: + + class _WMIGetter(threading.Thread): + # pylint: disable=possibly-used-before-assignment + def __init__(self): + super().__init__() + self.info = DnsInfo() + + def run(self): + pythoncom.CoInitialize() + try: + system = wmi.WMI() + for interface in system.Win32_NetworkAdapterConfiguration(): + if interface.IPEnabled and interface.DNSServerSearchOrder: + self.info.nameservers = list(interface.DNSServerSearchOrder) + if interface.DNSDomain: + self.info.domain = _config_domain(interface.DNSDomain) + if interface.DNSDomainSuffixSearchOrder: + self.info.search = [ + _config_domain(x) + for x in interface.DNSDomainSuffixSearchOrder + ] + break + finally: + pythoncom.CoUninitialize() + + def get(self): + # We always run in a separate thread to avoid any issues with + # the COM threading model. + self.start() + self.join() + return self.info + + else: + + class _WMIGetter: # type: ignore + pass + + class _RegistryGetter: + def __init__(self): + self.info = DnsInfo() + + def _split(self, text): + # The windows registry has used both " " and "," as a delimiter, and while + # it is currently using "," in Windows 10 and later, updates can seemingly + # leave a space in too, e.g. "a, b". So we just convert all commas to + # spaces, and use split() in its default configuration, which splits on + # all whitespace and ignores empty strings. + return text.replace(",", " ").split() + + def _config_nameservers(self, nameservers): + for ns in self._split(nameservers): + if ns not in self.info.nameservers: + self.info.nameservers.append(ns) + + def _config_search(self, search): + for s in self._split(search): + s = _config_domain(s) + if s not in self.info.search: + self.info.search.append(s) + + def _config_fromkey(self, key, always_try_domain): + try: + servers, _ = winreg.QueryValueEx(key, "NameServer") + except WindowsError: + servers = None + if servers: + self._config_nameservers(servers) + if servers or always_try_domain: + try: + dom, _ = winreg.QueryValueEx(key, "Domain") + if dom: + self.info.domain = _config_domain(dom) + except WindowsError: + pass + else: + try: + servers, _ = winreg.QueryValueEx(key, "DhcpNameServer") + except WindowsError: + servers = None + if servers: + self._config_nameservers(servers) + try: + dom, _ = winreg.QueryValueEx(key, "DhcpDomain") + if dom: + self.info.domain = _config_domain(dom) + except WindowsError: + pass + try: + search, _ = winreg.QueryValueEx(key, "SearchList") + except WindowsError: + search = None + if search is None: + try: + search, _ = winreg.QueryValueEx(key, "DhcpSearchList") + except WindowsError: + search = None + if search: + self._config_search(search) + + def _is_nic_enabled(self, lm, guid): + # Look in the Windows Registry to determine whether the network + # interface corresponding to the given guid is enabled. + # + # (Code contributed by Paul Marks, thanks!) + # + try: + # This hard-coded location seems to be consistent, at least + # from Windows 2000 through Vista. + connection_key = winreg.OpenKey( + lm, + r"SYSTEM\CurrentControlSet\Control\Network" + r"\{4D36E972-E325-11CE-BFC1-08002BE10318}" + rf"\{guid}\Connection", + ) + + try: + # The PnpInstanceID points to a key inside Enum + (pnp_id, ttype) = winreg.QueryValueEx( + connection_key, "PnpInstanceID" + ) + + if ttype != winreg.REG_SZ: + raise ValueError # pragma: no cover + + device_key = winreg.OpenKey( + lm, rf"SYSTEM\CurrentControlSet\Enum\{pnp_id}" + ) + + try: + # Get ConfigFlags for this device + (flags, ttype) = winreg.QueryValueEx(device_key, "ConfigFlags") + + if ttype != winreg.REG_DWORD: + raise ValueError # pragma: no cover + + # Based on experimentation, bit 0x1 indicates that the + # device is disabled. + # + # XXXRTH I suspect we really want to & with 0x03 so + # that CONFIGFLAGS_REMOVED devices are also ignored, + # but we're shifting to WMI as ConfigFlags is not + # supposed to be used. + return not flags & 0x1 + + finally: + device_key.Close() + finally: + connection_key.Close() + except Exception: # pragma: no cover + return False + + def get(self): + """Extract resolver configuration from the Windows registry.""" + + lm = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) + try: + tcp_params = winreg.OpenKey( + lm, r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters" + ) + try: + self._config_fromkey(tcp_params, True) + finally: + tcp_params.Close() + interfaces = winreg.OpenKey( + lm, + r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters\Interfaces", + ) + try: + i = 0 + while True: + try: + guid = winreg.EnumKey(interfaces, i) + i += 1 + key = winreg.OpenKey(interfaces, guid) + try: + if not self._is_nic_enabled(lm, guid): + continue + self._config_fromkey(key, False) + finally: + key.Close() + except OSError: + break + finally: + interfaces.Close() + finally: + lm.Close() + return self.info + + _getter_class: Any + if _have_wmi and _prefer_wmi: + _getter_class = _WMIGetter + else: + _getter_class = _RegistryGetter + + def get_dns_info(): + """Extract resolver configuration.""" + getter = _getter_class() + return getter.get() diff --git a/venv/lib/python3.11/site-packages/dns/wire.py b/venv/lib/python3.11/site-packages/dns/wire.py new file mode 100644 index 0000000..9f9b157 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/wire.py @@ -0,0 +1,89 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import contextlib +import struct +from typing import Iterator, Optional, Tuple + +import dns.exception +import dns.name + + +class Parser: + def __init__(self, wire: bytes, current: int = 0): + self.wire = wire + self.current = 0 + self.end = len(self.wire) + if current: + self.seek(current) + self.furthest = current + + def remaining(self) -> int: + return self.end - self.current + + def get_bytes(self, size: int) -> bytes: + assert size >= 0 + if size > self.remaining(): + raise dns.exception.FormError + output = self.wire[self.current : self.current + size] + self.current += size + self.furthest = max(self.furthest, self.current) + return output + + def get_counted_bytes(self, length_size: int = 1) -> bytes: + length = int.from_bytes(self.get_bytes(length_size), "big") + return self.get_bytes(length) + + def get_remaining(self) -> bytes: + return self.get_bytes(self.remaining()) + + def get_uint8(self) -> int: + return struct.unpack("!B", self.get_bytes(1))[0] + + def get_uint16(self) -> int: + return struct.unpack("!H", self.get_bytes(2))[0] + + def get_uint32(self) -> int: + return struct.unpack("!I", self.get_bytes(4))[0] + + def get_uint48(self) -> int: + return int.from_bytes(self.get_bytes(6), "big") + + def get_struct(self, format: str) -> Tuple: + return struct.unpack(format, self.get_bytes(struct.calcsize(format))) + + def get_name(self, origin: Optional["dns.name.Name"] = None) -> "dns.name.Name": + name = dns.name.from_wire_parser(self) + if origin: + name = name.relativize(origin) + return name + + def seek(self, where: int) -> None: + # Note that seeking to the end is OK! (If you try to read + # after such a seek, you'll get an exception as expected.) + if where < 0 or where > self.end: + raise dns.exception.FormError + self.current = where + + @contextlib.contextmanager + def restrict_to(self, size: int) -> Iterator: + assert size >= 0 + if size > self.remaining(): + raise dns.exception.FormError + saved_end = self.end + try: + self.end = self.current + size + yield + # We make this check here and not in the finally as we + # don't want to raise if we're already raising for some + # other reason. + if self.current != self.end: + raise dns.exception.FormError + finally: + self.end = saved_end + + @contextlib.contextmanager + def restore_furthest(self) -> Iterator: + try: + yield None + finally: + self.current = self.furthest diff --git a/venv/lib/python3.11/site-packages/dns/xfr.py b/venv/lib/python3.11/site-packages/dns/xfr.py new file mode 100644 index 0000000..520aa32 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/xfr.py @@ -0,0 +1,343 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +from typing import Any, List, Optional, Tuple, Union + +import dns.exception +import dns.message +import dns.name +import dns.rcode +import dns.rdataset +import dns.rdatatype +import dns.serial +import dns.transaction +import dns.tsig +import dns.zone + + +class TransferError(dns.exception.DNSException): + """A zone transfer response got a non-zero rcode.""" + + def __init__(self, rcode): + message = f"Zone transfer error: {dns.rcode.to_text(rcode)}" + super().__init__(message) + self.rcode = rcode + + +class SerialWentBackwards(dns.exception.FormError): + """The current serial number is less than the serial we know.""" + + +class UseTCP(dns.exception.DNSException): + """This IXFR cannot be completed with UDP.""" + + +class Inbound: + """ + State machine for zone transfers. + """ + + def __init__( + self, + txn_manager: dns.transaction.TransactionManager, + rdtype: dns.rdatatype.RdataType = dns.rdatatype.AXFR, + serial: Optional[int] = None, + is_udp: bool = False, + ): + """Initialize an inbound zone transfer. + + *txn_manager* is a :py:class:`dns.transaction.TransactionManager`. + + *rdtype* can be `dns.rdatatype.AXFR` or `dns.rdatatype.IXFR` + + *serial* is the base serial number for IXFRs, and is required in + that case. + + *is_udp*, a ``bool`` indidicates if UDP is being used for this + XFR. + """ + self.txn_manager = txn_manager + self.txn: Optional[dns.transaction.Transaction] = None + self.rdtype = rdtype + if rdtype == dns.rdatatype.IXFR: + if serial is None: + raise ValueError("a starting serial must be supplied for IXFRs") + elif is_udp: + raise ValueError("is_udp specified for AXFR") + self.serial = serial + self.is_udp = is_udp + (_, _, self.origin) = txn_manager.origin_information() + self.soa_rdataset: Optional[dns.rdataset.Rdataset] = None + self.done = False + self.expecting_SOA = False + self.delete_mode = False + + def process_message(self, message: dns.message.Message) -> bool: + """Process one message in the transfer. + + The message should have the same relativization as was specified when + the `dns.xfr.Inbound` was created. The message should also have been + created with `one_rr_per_rrset=True` because order matters. + + Returns `True` if the transfer is complete, and `False` otherwise. + """ + if self.txn is None: + replacement = self.rdtype == dns.rdatatype.AXFR + self.txn = self.txn_manager.writer(replacement) + rcode = message.rcode() + if rcode != dns.rcode.NOERROR: + raise TransferError(rcode) + # + # We don't require a question section, but if it is present is + # should be correct. + # + if len(message.question) > 0: + if message.question[0].name != self.origin: + raise dns.exception.FormError("wrong question name") + if message.question[0].rdtype != self.rdtype: + raise dns.exception.FormError("wrong question rdatatype") + answer_index = 0 + if self.soa_rdataset is None: + # + # This is the first message. We're expecting an SOA at + # the origin. + # + if not message.answer or message.answer[0].name != self.origin: + raise dns.exception.FormError("No answer or RRset not for zone origin") + rrset = message.answer[0] + rdataset = rrset + if rdataset.rdtype != dns.rdatatype.SOA: + raise dns.exception.FormError("first RRset is not an SOA") + answer_index = 1 + self.soa_rdataset = rdataset.copy() + if self.rdtype == dns.rdatatype.IXFR: + if self.soa_rdataset[0].serial == self.serial: + # + # We're already up-to-date. + # + self.done = True + elif dns.serial.Serial(self.soa_rdataset[0].serial) < self.serial: + # It went backwards! + raise SerialWentBackwards + else: + if self.is_udp and len(message.answer[answer_index:]) == 0: + # + # There are no more records, so this is the + # "truncated" response. Say to use TCP + # + raise UseTCP + # + # Note we're expecting another SOA so we can detect + # if this IXFR response is an AXFR-style response. + # + self.expecting_SOA = True + # + # Process the answer section (other than the initial SOA in + # the first message). + # + for rrset in message.answer[answer_index:]: + name = rrset.name + rdataset = rrset + if self.done: + raise dns.exception.FormError("answers after final SOA") + assert self.txn is not None # for mypy + if rdataset.rdtype == dns.rdatatype.SOA and name == self.origin: + # + # Every time we see an origin SOA delete_mode inverts + # + if self.rdtype == dns.rdatatype.IXFR: + self.delete_mode = not self.delete_mode + # + # If this SOA Rdataset is equal to the first we saw + # then we're finished. If this is an IXFR we also + # check that we're seeing the record in the expected + # part of the response. + # + if rdataset == self.soa_rdataset and ( + self.rdtype == dns.rdatatype.AXFR + or (self.rdtype == dns.rdatatype.IXFR and self.delete_mode) + ): + # + # This is the final SOA + # + if self.expecting_SOA: + # We got an empty IXFR sequence! + raise dns.exception.FormError("empty IXFR sequence") + if ( + self.rdtype == dns.rdatatype.IXFR + and self.serial != rdataset[0].serial + ): + raise dns.exception.FormError("unexpected end of IXFR sequence") + self.txn.replace(name, rdataset) + self.txn.commit() + self.txn = None + self.done = True + else: + # + # This is not the final SOA + # + self.expecting_SOA = False + if self.rdtype == dns.rdatatype.IXFR: + if self.delete_mode: + # This is the start of an IXFR deletion set + if rdataset[0].serial != self.serial: + raise dns.exception.FormError( + "IXFR base serial mismatch" + ) + else: + # This is the start of an IXFR addition set + self.serial = rdataset[0].serial + self.txn.replace(name, rdataset) + else: + # We saw a non-final SOA for the origin in an AXFR. + raise dns.exception.FormError("unexpected origin SOA in AXFR") + continue + if self.expecting_SOA: + # + # We made an IXFR request and are expecting another + # SOA RR, but saw something else, so this must be an + # AXFR response. + # + self.rdtype = dns.rdatatype.AXFR + self.expecting_SOA = False + self.delete_mode = False + self.txn.rollback() + self.txn = self.txn_manager.writer(True) + # + # Note we are falling through into the code below + # so whatever rdataset this was gets written. + # + # Add or remove the data + if self.delete_mode: + self.txn.delete_exact(name, rdataset) + else: + self.txn.add(name, rdataset) + if self.is_udp and not self.done: + # + # This is a UDP IXFR and we didn't get to done, and we didn't + # get the proper "truncated" response + # + raise dns.exception.FormError("unexpected end of UDP IXFR") + return self.done + + # + # Inbounds are context managers. + # + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if self.txn: + self.txn.rollback() + return False + + +def make_query( + txn_manager: dns.transaction.TransactionManager, + serial: Optional[int] = 0, + use_edns: Optional[Union[int, bool]] = None, + ednsflags: Optional[int] = None, + payload: Optional[int] = None, + request_payload: Optional[int] = None, + options: Optional[List[dns.edns.Option]] = None, + keyring: Any = None, + keyname: Optional[dns.name.Name] = None, + keyalgorithm: Union[dns.name.Name, str] = dns.tsig.default_algorithm, +) -> Tuple[dns.message.QueryMessage, Optional[int]]: + """Make an AXFR or IXFR query. + + *txn_manager* is a ``dns.transaction.TransactionManager``, typically a + ``dns.zone.Zone``. + + *serial* is an ``int`` or ``None``. If 0, then IXFR will be + attempted using the most recent serial number from the + *txn_manager*; it is the caller's responsibility to ensure there + are no write transactions active that could invalidate the + retrieved serial. If a serial cannot be determined, AXFR will be + forced. Other integer values are the starting serial to use. + ``None`` forces an AXFR. + + Please see the documentation for :py:func:`dns.message.make_query` and + :py:func:`dns.message.Message.use_tsig` for details on the other parameters + to this function. + + Returns a `(query, serial)` tuple. + """ + (zone_origin, _, origin) = txn_manager.origin_information() + if zone_origin is None: + raise ValueError("no zone origin") + if serial is None: + rdtype = dns.rdatatype.AXFR + elif not isinstance(serial, int): + raise ValueError("serial is not an integer") + elif serial == 0: + with txn_manager.reader() as txn: + rdataset = txn.get(origin, "SOA") + if rdataset: + serial = rdataset[0].serial + rdtype = dns.rdatatype.IXFR + else: + serial = None + rdtype = dns.rdatatype.AXFR + elif serial > 0 and serial < 4294967296: + rdtype = dns.rdatatype.IXFR + else: + raise ValueError("serial out-of-range") + rdclass = txn_manager.get_class() + q = dns.message.make_query( + zone_origin, + rdtype, + rdclass, + use_edns, + False, + ednsflags, + payload, + request_payload, + options, + ) + if serial is not None: + rdata = dns.rdata.from_text(rdclass, "SOA", f". . {serial} 0 0 0 0") + rrset = q.find_rrset( + q.authority, zone_origin, rdclass, dns.rdatatype.SOA, create=True + ) + rrset.add(rdata, 0) + if keyring is not None: + q.use_tsig(keyring, keyname, algorithm=keyalgorithm) + return (q, serial) + + +def extract_serial_from_query(query: dns.message.Message) -> Optional[int]: + """Extract the SOA serial number from query if it is an IXFR and return + it, otherwise return None. + + *query* is a dns.message.QueryMessage that is an IXFR or AXFR request. + + Raises if the query is not an IXFR or AXFR, or if an IXFR doesn't have + an appropriate SOA RRset in the authority section. + """ + if not isinstance(query, dns.message.QueryMessage): + raise ValueError("query not a QueryMessage") + question = query.question[0] + if question.rdtype == dns.rdatatype.AXFR: + return None + elif question.rdtype != dns.rdatatype.IXFR: + raise ValueError("query is not an AXFR or IXFR") + soa = query.find_rrset( + query.authority, question.name, question.rdclass, dns.rdatatype.SOA + ) + return soa[0].serial diff --git a/venv/lib/python3.11/site-packages/dns/zone.py b/venv/lib/python3.11/site-packages/dns/zone.py new file mode 100644 index 0000000..844919e --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/zone.py @@ -0,0 +1,1434 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Zones.""" + +import contextlib +import io +import os +import struct +from typing import ( + Any, + Callable, + Iterable, + Iterator, + List, + MutableMapping, + Optional, + Set, + Tuple, + Union, +) + +import dns.exception +import dns.grange +import dns.immutable +import dns.name +import dns.node +import dns.rdata +import dns.rdataclass +import dns.rdataset +import dns.rdatatype +import dns.rdtypes.ANY.SOA +import dns.rdtypes.ANY.ZONEMD +import dns.rrset +import dns.tokenizer +import dns.transaction +import dns.ttl +import dns.zonefile +from dns.zonetypes import DigestHashAlgorithm, DigestScheme, _digest_hashers + + +class BadZone(dns.exception.DNSException): + """The DNS zone is malformed.""" + + +class NoSOA(BadZone): + """The DNS zone has no SOA RR at its origin.""" + + +class NoNS(BadZone): + """The DNS zone has no NS RRset at its origin.""" + + +class UnknownOrigin(BadZone): + """The DNS zone's origin is unknown.""" + + +class UnsupportedDigestScheme(dns.exception.DNSException): + """The zone digest's scheme is unsupported.""" + + +class UnsupportedDigestHashAlgorithm(dns.exception.DNSException): + """The zone digest's origin is unsupported.""" + + +class NoDigest(dns.exception.DNSException): + """The DNS zone has no ZONEMD RRset at its origin.""" + + +class DigestVerificationFailure(dns.exception.DNSException): + """The ZONEMD digest failed to verify.""" + + +def _validate_name( + name: dns.name.Name, + origin: Optional[dns.name.Name], + relativize: bool, +) -> dns.name.Name: + # This name validation code is shared by Zone and Version + if origin is None: + # This should probably never happen as other code (e.g. + # _rr_line) will notice the lack of an origin before us, but + # we check just in case! + raise KeyError("no zone origin is defined") + if name.is_absolute(): + if not name.is_subdomain(origin): + raise KeyError("name parameter must be a subdomain of the zone origin") + if relativize: + name = name.relativize(origin) + else: + # We have a relative name. Make sure that the derelativized name is + # not too long. + try: + abs_name = name.derelativize(origin) + except dns.name.NameTooLong: + # We map dns.name.NameTooLong to KeyError to be consistent with + # the other exceptions above. + raise KeyError("relative name too long for zone") + if not relativize: + # We have a relative name in a non-relative zone, so use the + # derelativized name. + name = abs_name + return name + + +class Zone(dns.transaction.TransactionManager): + """A DNS zone. + + A ``Zone`` is a mapping from names to nodes. The zone object may be + treated like a Python dictionary, e.g. ``zone[name]`` will retrieve + the node associated with that name. The *name* may be a + ``dns.name.Name object``, or it may be a string. In either case, + if the name is relative it is treated as relative to the origin of + the zone. + """ + + node_factory: Callable[[], dns.node.Node] = dns.node.Node + map_factory: Callable[[], MutableMapping[dns.name.Name, dns.node.Node]] = dict + writable_version_factory: Optional[Callable[[], "WritableVersion"]] = None + immutable_version_factory: Optional[Callable[[], "ImmutableVersion"]] = None + + __slots__ = ["rdclass", "origin", "nodes", "relativize"] + + def __init__( + self, + origin: Optional[Union[dns.name.Name, str]], + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, + relativize: bool = True, + ): + """Initialize a zone object. + + *origin* is the origin of the zone. It may be a ``dns.name.Name``, + a ``str``, or ``None``. If ``None``, then the zone's origin will + be set by the first ``$ORIGIN`` line in a zone file. + + *rdclass*, an ``int``, the zone's rdata class; the default is class IN. + + *relativize*, a ``bool``, determine's whether domain names are + relativized to the zone's origin. The default is ``True``. + """ + + if origin is not None: + if isinstance(origin, str): + origin = dns.name.from_text(origin) + elif not isinstance(origin, dns.name.Name): + raise ValueError("origin parameter must be convertible to a DNS name") + if not origin.is_absolute(): + raise ValueError("origin parameter must be an absolute name") + self.origin = origin + self.rdclass = rdclass + self.nodes: MutableMapping[dns.name.Name, dns.node.Node] = self.map_factory() + self.relativize = relativize + + def __eq__(self, other): + """Two zones are equal if they have the same origin, class, and + nodes. + + Returns a ``bool``. + """ + + if not isinstance(other, Zone): + return False + if ( + self.rdclass != other.rdclass + or self.origin != other.origin + or self.nodes != other.nodes + ): + return False + return True + + def __ne__(self, other): + """Are two zones not equal? + + Returns a ``bool``. + """ + + return not self.__eq__(other) + + def _validate_name(self, name: Union[dns.name.Name, str]) -> dns.name.Name: + # Note that any changes in this method should have corresponding changes + # made in the Version _validate_name() method. + if isinstance(name, str): + name = dns.name.from_text(name, None) + elif not isinstance(name, dns.name.Name): + raise KeyError("name parameter must be convertible to a DNS name") + return _validate_name(name, self.origin, self.relativize) + + def __getitem__(self, key): + key = self._validate_name(key) + return self.nodes[key] + + def __setitem__(self, key, value): + key = self._validate_name(key) + self.nodes[key] = value + + def __delitem__(self, key): + key = self._validate_name(key) + del self.nodes[key] + + def __iter__(self): + return self.nodes.__iter__() + + def keys(self): + return self.nodes.keys() + + def values(self): + return self.nodes.values() + + def items(self): + return self.nodes.items() + + def get(self, key): + key = self._validate_name(key) + return self.nodes.get(key) + + def __contains__(self, key): + key = self._validate_name(key) + return key in self.nodes + + def find_node( + self, name: Union[dns.name.Name, str], create: bool = False + ) -> dns.node.Node: + """Find a node in the zone, possibly creating it. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *create*, a ``bool``. If true, the node will be created if it does + not exist. + + Raises ``KeyError`` if the name is not known and create was + not specified, or if the name was not a subdomain of the origin. + + Returns a ``dns.node.Node``. + """ + + name = self._validate_name(name) + node = self.nodes.get(name) + if node is None: + if not create: + raise KeyError + node = self.node_factory() + self.nodes[name] = node + return node + + def get_node( + self, name: Union[dns.name.Name, str], create: bool = False + ) -> Optional[dns.node.Node]: + """Get a node in the zone, possibly creating it. + + This method is like ``find_node()``, except it returns None instead + of raising an exception if the node does not exist and creation + has not been requested. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *create*, a ``bool``. If true, the node will be created if it does + not exist. + + Returns a ``dns.node.Node`` or ``None``. + """ + + try: + node = self.find_node(name, create) + except KeyError: + node = None + return node + + def delete_node(self, name: Union[dns.name.Name, str]) -> None: + """Delete the specified node if it exists. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + It is not an error if the node does not exist. + """ + + name = self._validate_name(name) + if name in self.nodes: + del self.nodes[name] + + def find_rdataset( + self, + name: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str], + covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, + create: bool = False, + ) -> dns.rdataset.Rdataset: + """Look for an rdataset with the specified name and type in the zone, + and return an rdataset encapsulating it. + + The rdataset returned is not a copy; changes to it will change + the zone. + + KeyError is raised if the name or type are not found. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdata type desired. + + *covers*, a ``dns.rdatatype.RdataType`` or ``str`` the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + + *create*, a ``bool``. If true, the node will be created if it does + not exist. + + Raises ``KeyError`` if the name is not known and create was + not specified, or if the name was not a subdomain of the origin. + + Returns a ``dns.rdataset.Rdataset``. + """ + + name = self._validate_name(name) + rdtype = dns.rdatatype.RdataType.make(rdtype) + covers = dns.rdatatype.RdataType.make(covers) + node = self.find_node(name, create) + return node.find_rdataset(self.rdclass, rdtype, covers, create) + + def get_rdataset( + self, + name: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str], + covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, + create: bool = False, + ) -> Optional[dns.rdataset.Rdataset]: + """Look for an rdataset with the specified name and type in the zone. + + This method is like ``find_rdataset()``, except it returns None instead + of raising an exception if the rdataset does not exist and creation + has not been requested. + + The rdataset returned is not a copy; changes to it will change + the zone. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdata type desired. + + *covers*, a ``dns.rdatatype.RdataType`` or ``str``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + + *create*, a ``bool``. If true, the node will be created if it does + not exist. + + Raises ``KeyError`` if the name is not known and create was + not specified, or if the name was not a subdomain of the origin. + + Returns a ``dns.rdataset.Rdataset`` or ``None``. + """ + + try: + rdataset = self.find_rdataset(name, rdtype, covers, create) + except KeyError: + rdataset = None + return rdataset + + def delete_rdataset( + self, + name: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str], + covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, + ) -> None: + """Delete the rdataset matching *rdtype* and *covers*, if it + exists at the node specified by *name*. + + It is not an error if the node does not exist, or if there is no matching + rdataset at the node. + + If the node has no rdatasets after the deletion, it will itself be deleted. + + *name*: the name of the node to find. The value may be a ``dns.name.Name`` or a + ``str``. If absolute, the name must be a subdomain of the zone's origin. If + ``zone.relativize`` is ``True``, then the name will be relativized. + + *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdata type desired. + + *covers*, a ``dns.rdatatype.RdataType`` or ``str`` or ``None``, the covered + type. Usually this value is ``dns.rdatatype.NONE``, but if the rdtype is + ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, then the covers value will be + the rdata type the SIG/RRSIG covers. The library treats the SIG and RRSIG types + as if they were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). This + makes RRSIGs much easier to work with than if RRSIGs covering different rdata + types were aggregated into a single RRSIG rdataset. + """ + + name = self._validate_name(name) + rdtype = dns.rdatatype.RdataType.make(rdtype) + covers = dns.rdatatype.RdataType.make(covers) + node = self.get_node(name) + if node is not None: + node.delete_rdataset(self.rdclass, rdtype, covers) + if len(node) == 0: + self.delete_node(name) + + def replace_rdataset( + self, name: Union[dns.name.Name, str], replacement: dns.rdataset.Rdataset + ) -> None: + """Replace an rdataset at name. + + It is not an error if there is no rdataset matching I{replacement}. + + Ownership of the *replacement* object is transferred to the zone; + in other words, this method does not store a copy of *replacement* + at the node, it stores *replacement* itself. + + If the node does not exist, it is created. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *replacement*, a ``dns.rdataset.Rdataset``, the replacement rdataset. + """ + + if replacement.rdclass != self.rdclass: + raise ValueError("replacement.rdclass != zone.rdclass") + node = self.find_node(name, True) + node.replace_rdataset(replacement) + + def find_rrset( + self, + name: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str], + covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, + ) -> dns.rrset.RRset: + """Look for an rdataset with the specified name and type in the zone, + and return an RRset encapsulating it. + + This method is less efficient than the similar + ``find_rdataset()`` because it creates an RRset instead of + returning the matching rdataset. It may be more convenient + for some uses since it returns an object which binds the owner + name to the rdataset. + + This method may not be used to create new nodes or rdatasets; + use ``find_rdataset`` instead. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdata type desired. + + *covers*, a ``dns.rdatatype.RdataType`` or ``str``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + + *create*, a ``bool``. If true, the node will be created if it does + not exist. + + Raises ``KeyError`` if the name is not known and create was + not specified, or if the name was not a subdomain of the origin. + + Returns a ``dns.rrset.RRset`` or ``None``. + """ + + vname = self._validate_name(name) + rdtype = dns.rdatatype.RdataType.make(rdtype) + covers = dns.rdatatype.RdataType.make(covers) + rdataset = self.nodes[vname].find_rdataset(self.rdclass, rdtype, covers) + rrset = dns.rrset.RRset(vname, self.rdclass, rdtype, covers) + rrset.update(rdataset) + return rrset + + def get_rrset( + self, + name: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str], + covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, + ) -> Optional[dns.rrset.RRset]: + """Look for an rdataset with the specified name and type in the zone, + and return an RRset encapsulating it. + + This method is less efficient than the similar ``get_rdataset()`` + because it creates an RRset instead of returning the matching + rdataset. It may be more convenient for some uses since it + returns an object which binds the owner name to the rdataset. + + This method may not be used to create new nodes or rdatasets; + use ``get_rdataset()`` instead. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *rdtype*, a ``dns.rdataset.Rdataset`` or ``str``, the rdata type desired. + + *covers*, a ``dns.rdataset.Rdataset`` or ``str``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + + *create*, a ``bool``. If true, the node will be created if it does + not exist. + + Returns a ``dns.rrset.RRset`` or ``None``. + """ + + try: + rrset = self.find_rrset(name, rdtype, covers) + except KeyError: + rrset = None + return rrset + + def iterate_rdatasets( + self, + rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.ANY, + covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, + ) -> Iterator[Tuple[dns.name.Name, dns.rdataset.Rdataset]]: + """Return a generator which yields (name, rdataset) tuples for + all rdatasets in the zone which have the specified *rdtype* + and *covers*. If *rdtype* is ``dns.rdatatype.ANY``, the default, + then all rdatasets will be matched. + + *rdtype*, a ``dns.rdataset.Rdataset`` or ``str``, the rdata type desired. + + *covers*, a ``dns.rdataset.Rdataset`` or ``str``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + """ + + rdtype = dns.rdatatype.RdataType.make(rdtype) + covers = dns.rdatatype.RdataType.make(covers) + for name, node in self.items(): + for rds in node: + if rdtype == dns.rdatatype.ANY or ( + rds.rdtype == rdtype and rds.covers == covers + ): + yield (name, rds) + + def iterate_rdatas( + self, + rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.ANY, + covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, + ) -> Iterator[Tuple[dns.name.Name, int, dns.rdata.Rdata]]: + """Return a generator which yields (name, ttl, rdata) tuples for + all rdatas in the zone which have the specified *rdtype* + and *covers*. If *rdtype* is ``dns.rdatatype.ANY``, the default, + then all rdatas will be matched. + + *rdtype*, a ``dns.rdataset.Rdataset`` or ``str``, the rdata type desired. + + *covers*, a ``dns.rdataset.Rdataset`` or ``str``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + """ + + rdtype = dns.rdatatype.RdataType.make(rdtype) + covers = dns.rdatatype.RdataType.make(covers) + for name, node in self.items(): + for rds in node: + if rdtype == dns.rdatatype.ANY or ( + rds.rdtype == rdtype and rds.covers == covers + ): + for rdata in rds: + yield (name, rds.ttl, rdata) + + def to_file( + self, + f: Any, + sorted: bool = True, + relativize: bool = True, + nl: Optional[str] = None, + want_comments: bool = False, + want_origin: bool = False, + ) -> None: + """Write a zone to a file. + + *f*, a file or `str`. If *f* is a string, it is treated + as the name of a file to open. + + *sorted*, a ``bool``. If True, the default, then the file + will be written with the names sorted in DNSSEC order from + least to greatest. Otherwise the names will be written in + whatever order they happen to have in the zone's dictionary. + + *relativize*, a ``bool``. If True, the default, then domain + names in the output will be relativized to the zone's origin + if possible. + + *nl*, a ``str`` or None. The end of line string. If not + ``None``, the output will use the platform's native + end-of-line marker (i.e. LF on POSIX, CRLF on Windows). + + *want_comments*, a ``bool``. If ``True``, emit end-of-line comments + as part of writing the file. If ``False``, the default, do not + emit them. + + *want_origin*, a ``bool``. If ``True``, emit a $ORIGIN line at + the start of the file. If ``False``, the default, do not emit + one. + """ + + if isinstance(f, str): + cm: contextlib.AbstractContextManager = open(f, "wb") + else: + cm = contextlib.nullcontext(f) + with cm as f: + # must be in this way, f.encoding may contain None, or even + # attribute may not be there + file_enc = getattr(f, "encoding", None) + if file_enc is None: + file_enc = "utf-8" + + if nl is None: + # binary mode, '\n' is not enough + nl_b = os.linesep.encode(file_enc) + nl = "\n" + elif isinstance(nl, str): + nl_b = nl.encode(file_enc) + else: + nl_b = nl + nl = nl.decode() + + if want_origin: + assert self.origin is not None + l = "$ORIGIN " + self.origin.to_text() + l_b = l.encode(file_enc) + try: + f.write(l_b) + f.write(nl_b) + except TypeError: # textual mode + f.write(l) + f.write(nl) + + if sorted: + names = list(self.keys()) + names.sort() + else: + names = self.keys() + for n in names: + l = self[n].to_text( + n, + origin=self.origin, + relativize=relativize, + want_comments=want_comments, + ) + l_b = l.encode(file_enc) + + try: + f.write(l_b) + f.write(nl_b) + except TypeError: # textual mode + f.write(l) + f.write(nl) + + def to_text( + self, + sorted: bool = True, + relativize: bool = True, + nl: Optional[str] = None, + want_comments: bool = False, + want_origin: bool = False, + ) -> str: + """Return a zone's text as though it were written to a file. + + *sorted*, a ``bool``. If True, the default, then the file + will be written with the names sorted in DNSSEC order from + least to greatest. Otherwise the names will be written in + whatever order they happen to have in the zone's dictionary. + + *relativize*, a ``bool``. If True, the default, then domain + names in the output will be relativized to the zone's origin + if possible. + + *nl*, a ``str`` or None. The end of line string. If not + ``None``, the output will use the platform's native + end-of-line marker (i.e. LF on POSIX, CRLF on Windows). + + *want_comments*, a ``bool``. If ``True``, emit end-of-line comments + as part of writing the file. If ``False``, the default, do not + emit them. + + *want_origin*, a ``bool``. If ``True``, emit a $ORIGIN line at + the start of the output. If ``False``, the default, do not emit + one. + + Returns a ``str``. + """ + temp_buffer = io.StringIO() + self.to_file(temp_buffer, sorted, relativize, nl, want_comments, want_origin) + return_value = temp_buffer.getvalue() + temp_buffer.close() + return return_value + + def check_origin(self) -> None: + """Do some simple checking of the zone's origin. + + Raises ``dns.zone.NoSOA`` if there is no SOA RRset. + + Raises ``dns.zone.NoNS`` if there is no NS RRset. + + Raises ``KeyError`` if there is no origin node. + """ + if self.relativize: + name = dns.name.empty + else: + assert self.origin is not None + name = self.origin + if self.get_rdataset(name, dns.rdatatype.SOA) is None: + raise NoSOA + if self.get_rdataset(name, dns.rdatatype.NS) is None: + raise NoNS + + def get_soa( + self, txn: Optional[dns.transaction.Transaction] = None + ) -> dns.rdtypes.ANY.SOA.SOA: + """Get the zone SOA rdata. + + Raises ``dns.zone.NoSOA`` if there is no SOA RRset. + + Returns a ``dns.rdtypes.ANY.SOA.SOA`` Rdata. + """ + if self.relativize: + origin_name = dns.name.empty + else: + if self.origin is None: + # get_soa() has been called very early, and there must not be + # an SOA if there is no origin. + raise NoSOA + origin_name = self.origin + soa: Optional[dns.rdataset.Rdataset] + if txn: + soa = txn.get(origin_name, dns.rdatatype.SOA) + else: + soa = self.get_rdataset(origin_name, dns.rdatatype.SOA) + if soa is None: + raise NoSOA + return soa[0] + + def _compute_digest( + self, + hash_algorithm: DigestHashAlgorithm, + scheme: DigestScheme = DigestScheme.SIMPLE, + ) -> bytes: + hashinfo = _digest_hashers.get(hash_algorithm) + if not hashinfo: + raise UnsupportedDigestHashAlgorithm + if scheme != DigestScheme.SIMPLE: + raise UnsupportedDigestScheme + + if self.relativize: + origin_name = dns.name.empty + else: + assert self.origin is not None + origin_name = self.origin + hasher = hashinfo() + for name, node in sorted(self.items()): + rrnamebuf = name.to_digestable(self.origin) + for rdataset in sorted(node, key=lambda rds: (rds.rdtype, rds.covers)): + if name == origin_name and dns.rdatatype.ZONEMD in ( + rdataset.rdtype, + rdataset.covers, + ): + continue + rrfixed = struct.pack( + "!HHI", rdataset.rdtype, rdataset.rdclass, rdataset.ttl + ) + rdatas = [rdata.to_digestable(self.origin) for rdata in rdataset] + for rdata in sorted(rdatas): + rrlen = struct.pack("!H", len(rdata)) + hasher.update(rrnamebuf + rrfixed + rrlen + rdata) + return hasher.digest() + + def compute_digest( + self, + hash_algorithm: DigestHashAlgorithm, + scheme: DigestScheme = DigestScheme.SIMPLE, + ) -> dns.rdtypes.ANY.ZONEMD.ZONEMD: + serial = self.get_soa().serial + digest = self._compute_digest(hash_algorithm, scheme) + return dns.rdtypes.ANY.ZONEMD.ZONEMD( + self.rdclass, dns.rdatatype.ZONEMD, serial, scheme, hash_algorithm, digest + ) + + def verify_digest( + self, zonemd: Optional[dns.rdtypes.ANY.ZONEMD.ZONEMD] = None + ) -> None: + digests: Union[dns.rdataset.Rdataset, List[dns.rdtypes.ANY.ZONEMD.ZONEMD]] + if zonemd: + digests = [zonemd] + else: + assert self.origin is not None + rds = self.get_rdataset(self.origin, dns.rdatatype.ZONEMD) + if rds is None: + raise NoDigest + digests = rds + for digest in digests: + try: + computed = self._compute_digest(digest.hash_algorithm, digest.scheme) + if computed == digest.digest: + return + except Exception: + pass + raise DigestVerificationFailure + + # TransactionManager methods + + def reader(self) -> "Transaction": + return Transaction(self, False, Version(self, 1, self.nodes, self.origin)) + + def writer(self, replacement: bool = False) -> "Transaction": + txn = Transaction(self, replacement) + txn._setup_version() + return txn + + def origin_information( + self, + ) -> Tuple[Optional[dns.name.Name], bool, Optional[dns.name.Name]]: + effective: Optional[dns.name.Name] + if self.relativize: + effective = dns.name.empty + else: + effective = self.origin + return (self.origin, self.relativize, effective) + + def get_class(self): + return self.rdclass + + # Transaction methods + + def _end_read(self, txn): + pass + + def _end_write(self, txn): + pass + + def _commit_version(self, _, version, origin): + self.nodes = version.nodes + if self.origin is None: + self.origin = origin + + def _get_next_version_id(self): + # Versions are ephemeral and all have id 1 + return 1 + + +# These classes used to be in dns.versioned, but have moved here so we can use +# the copy-on-write transaction mechanism for both kinds of zones. In a +# regular zone, the version only exists during the transaction, and the nodes +# are regular dns.node.Nodes. + +# A node with a version id. + + +class VersionedNode(dns.node.Node): # lgtm[py/missing-equals] + __slots__ = ["id"] + + def __init__(self): + super().__init__() + # A proper id will get set by the Version + self.id = 0 + + +@dns.immutable.immutable +class ImmutableVersionedNode(VersionedNode): + def __init__(self, node): + super().__init__() + self.id = node.id + self.rdatasets = tuple( + [dns.rdataset.ImmutableRdataset(rds) for rds in node.rdatasets] + ) + + def find_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + create: bool = False, + ) -> dns.rdataset.Rdataset: + if create: + raise TypeError("immutable") + return super().find_rdataset(rdclass, rdtype, covers, False) + + def get_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + create: bool = False, + ) -> Optional[dns.rdataset.Rdataset]: + if create: + raise TypeError("immutable") + return super().get_rdataset(rdclass, rdtype, covers, False) + + def delete_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + ) -> None: + raise TypeError("immutable") + + def replace_rdataset(self, replacement: dns.rdataset.Rdataset) -> None: + raise TypeError("immutable") + + def is_immutable(self) -> bool: + return True + + +class Version: + def __init__( + self, + zone: Zone, + id: int, + nodes: Optional[MutableMapping[dns.name.Name, dns.node.Node]] = None, + origin: Optional[dns.name.Name] = None, + ): + self.zone = zone + self.id = id + if nodes is not None: + self.nodes = nodes + else: + self.nodes = zone.map_factory() + self.origin = origin + + def _validate_name(self, name: dns.name.Name) -> dns.name.Name: + return _validate_name(name, self.origin, self.zone.relativize) + + def get_node(self, name: dns.name.Name) -> Optional[dns.node.Node]: + name = self._validate_name(name) + return self.nodes.get(name) + + def get_rdataset( + self, + name: dns.name.Name, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType, + ) -> Optional[dns.rdataset.Rdataset]: + node = self.get_node(name) + if node is None: + return None + return node.get_rdataset(self.zone.rdclass, rdtype, covers) + + def keys(self): + return self.nodes.keys() + + def items(self): + return self.nodes.items() + + +class WritableVersion(Version): + def __init__(self, zone: Zone, replacement: bool = False): + # The zone._versions_lock must be held by our caller in a versioned + # zone. + id = zone._get_next_version_id() + super().__init__(zone, id) + if not replacement: + # We copy the map, because that gives us a simple and thread-safe + # way of doing versions, and we have a garbage collector to help + # us. We only make new node objects if we actually change the + # node. + self.nodes.update(zone.nodes) + # We have to copy the zone origin as it may be None in the first + # version, and we don't want to mutate the zone until we commit. + self.origin = zone.origin + self.changed: Set[dns.name.Name] = set() + + def _maybe_cow(self, name: dns.name.Name) -> dns.node.Node: + name = self._validate_name(name) + node = self.nodes.get(name) + if node is None or name not in self.changed: + new_node = self.zone.node_factory() + if hasattr(new_node, "id"): + # We keep doing this for backwards compatibility, as earlier + # code used new_node.id != self.id for the "do we need to CoW?" + # test. Now we use the changed set as this works with both + # regular zones and versioned zones. + # + # We ignore the mypy error as this is safe but it doesn't see it. + new_node.id = self.id # type: ignore + if node is not None: + # moo! copy on write! + new_node.rdatasets.extend(node.rdatasets) + self.nodes[name] = new_node + self.changed.add(name) + return new_node + else: + return node + + def delete_node(self, name: dns.name.Name) -> None: + name = self._validate_name(name) + if name in self.nodes: + del self.nodes[name] + self.changed.add(name) + + def put_rdataset( + self, name: dns.name.Name, rdataset: dns.rdataset.Rdataset + ) -> None: + node = self._maybe_cow(name) + node.replace_rdataset(rdataset) + + def delete_rdataset( + self, + name: dns.name.Name, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType, + ) -> None: + node = self._maybe_cow(name) + node.delete_rdataset(self.zone.rdclass, rdtype, covers) + if len(node) == 0: + del self.nodes[name] + + +@dns.immutable.immutable +class ImmutableVersion(Version): + def __init__(self, version: WritableVersion): + # We tell super() that it's a replacement as we don't want it + # to copy the nodes, as we're about to do that with an + # immutable Dict. + super().__init__(version.zone, True) + # set the right id! + self.id = version.id + # keep the origin + self.origin = version.origin + # Make changed nodes immutable + for name in version.changed: + node = version.nodes.get(name) + # it might not exist if we deleted it in the version + if node: + version.nodes[name] = ImmutableVersionedNode(node) + # We're changing the type of the nodes dictionary here on purpose, so + # we ignore the mypy error. + self.nodes = dns.immutable.Dict( + version.nodes, True, self.zone.map_factory + ) # type: ignore + + +class Transaction(dns.transaction.Transaction): + def __init__(self, zone, replacement, version=None, make_immutable=False): + read_only = version is not None + super().__init__(zone, replacement, read_only) + self.version = version + self.make_immutable = make_immutable + + @property + def zone(self): + return self.manager + + def _setup_version(self): + assert self.version is None + factory = self.manager.writable_version_factory + if factory is None: + factory = WritableVersion + self.version = factory(self.zone, self.replacement) + + def _get_rdataset(self, name, rdtype, covers): + return self.version.get_rdataset(name, rdtype, covers) + + def _put_rdataset(self, name, rdataset): + assert not self.read_only + self.version.put_rdataset(name, rdataset) + + def _delete_name(self, name): + assert not self.read_only + self.version.delete_node(name) + + def _delete_rdataset(self, name, rdtype, covers): + assert not self.read_only + self.version.delete_rdataset(name, rdtype, covers) + + def _name_exists(self, name): + return self.version.get_node(name) is not None + + def _changed(self): + if self.read_only: + return False + else: + return len(self.version.changed) > 0 + + def _end_transaction(self, commit): + if self.read_only: + self.zone._end_read(self) + elif commit and len(self.version.changed) > 0: + if self.make_immutable: + factory = self.manager.immutable_version_factory + if factory is None: + factory = ImmutableVersion + version = factory(self.version) + else: + version = self.version + self.zone._commit_version(self, version, self.version.origin) + else: + # rollback + self.zone._end_write(self) + + def _set_origin(self, origin): + if self.version.origin is None: + self.version.origin = origin + + def _iterate_rdatasets(self): + for name, node in self.version.items(): + for rdataset in node: + yield (name, rdataset) + + def _iterate_names(self): + return self.version.keys() + + def _get_node(self, name): + return self.version.get_node(name) + + def _origin_information(self): + (absolute, relativize, effective) = self.manager.origin_information() + if absolute is None and self.version.origin is not None: + # No origin has been committed yet, but we've learned one as part of + # this txn. Use it. + absolute = self.version.origin + if relativize: + effective = dns.name.empty + else: + effective = absolute + return (absolute, relativize, effective) + + +def _from_text( + text: Any, + origin: Optional[Union[dns.name.Name, str]] = None, + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, + relativize: bool = True, + zone_factory: Any = Zone, + filename: Optional[str] = None, + allow_include: bool = False, + check_origin: bool = True, + idna_codec: Optional[dns.name.IDNACodec] = None, + allow_directives: Union[bool, Iterable[str]] = True, +) -> Zone: + # See the comments for the public APIs from_text() and from_file() for + # details. + + # 'text' can also be a file, but we don't publish that fact + # since it's an implementation detail. The official file + # interface is from_file(). + + if filename is None: + filename = "" + zone = zone_factory(origin, rdclass, relativize=relativize) + with zone.writer(True) as txn: + tok = dns.tokenizer.Tokenizer(text, filename, idna_codec=idna_codec) + reader = dns.zonefile.Reader( + tok, + rdclass, + txn, + allow_include=allow_include, + allow_directives=allow_directives, + ) + try: + reader.read() + except dns.zonefile.UnknownOrigin: + # for backwards compatibility + raise dns.zone.UnknownOrigin + # Now that we're done reading, do some basic checking of the zone. + if check_origin: + zone.check_origin() + return zone + + +def from_text( + text: str, + origin: Optional[Union[dns.name.Name, str]] = None, + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, + relativize: bool = True, + zone_factory: Any = Zone, + filename: Optional[str] = None, + allow_include: bool = False, + check_origin: bool = True, + idna_codec: Optional[dns.name.IDNACodec] = None, + allow_directives: Union[bool, Iterable[str]] = True, +) -> Zone: + """Build a zone object from a zone file format string. + + *text*, a ``str``, the zone file format input. + + *origin*, a ``dns.name.Name``, a ``str``, or ``None``. The origin + of the zone; if not specified, the first ``$ORIGIN`` statement in the + zone file will determine the origin of the zone. + + *rdclass*, a ``dns.rdataclass.RdataClass``, the zone's rdata class; the default is + class IN. + + *relativize*, a ``bool``, determine's whether domain names are + relativized to the zone's origin. The default is ``True``. + + *zone_factory*, the zone factory to use or ``None``. If ``None``, then + ``dns.zone.Zone`` will be used. The value may be any class or callable + that returns a subclass of ``dns.zone.Zone``. + + *filename*, a ``str`` or ``None``, the filename to emit when + describing where an error occurred; the default is ``''``. + + *allow_include*, a ``bool``. If ``True``, the default, then ``$INCLUDE`` + directives are permitted. If ``False``, then encoutering a ``$INCLUDE`` + will raise a ``SyntaxError`` exception. + + *check_origin*, a ``bool``. If ``True``, the default, then sanity + checks of the origin node will be made by calling the zone's + ``check_origin()`` method. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + *allow_directives*, a ``bool`` or an iterable of `str`. If ``True``, the default, + then directives are permitted, and the *allow_include* parameter controls whether + ``$INCLUDE`` is permitted. If ``False`` or an empty iterable, then no directive + processing is done and any directive-like text will be treated as a regular owner + name. If a non-empty iterable, then only the listed directives (including the + ``$``) are allowed. + + Raises ``dns.zone.NoSOA`` if there is no SOA RRset. + + Raises ``dns.zone.NoNS`` if there is no NS RRset. + + Raises ``KeyError`` if there is no origin node. + + Returns a subclass of ``dns.zone.Zone``. + """ + return _from_text( + text, + origin, + rdclass, + relativize, + zone_factory, + filename, + allow_include, + check_origin, + idna_codec, + allow_directives, + ) + + +def from_file( + f: Any, + origin: Optional[Union[dns.name.Name, str]] = None, + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, + relativize: bool = True, + zone_factory: Any = Zone, + filename: Optional[str] = None, + allow_include: bool = True, + check_origin: bool = True, + idna_codec: Optional[dns.name.IDNACodec] = None, + allow_directives: Union[bool, Iterable[str]] = True, +) -> Zone: + """Read a zone file and build a zone object. + + *f*, a file or ``str``. If *f* is a string, it is treated + as the name of a file to open. + + *origin*, a ``dns.name.Name``, a ``str``, or ``None``. The origin + of the zone; if not specified, the first ``$ORIGIN`` statement in the + zone file will determine the origin of the zone. + + *rdclass*, an ``int``, the zone's rdata class; the default is class IN. + + *relativize*, a ``bool``, determine's whether domain names are + relativized to the zone's origin. The default is ``True``. + + *zone_factory*, the zone factory to use or ``None``. If ``None``, then + ``dns.zone.Zone`` will be used. The value may be any class or callable + that returns a subclass of ``dns.zone.Zone``. + + *filename*, a ``str`` or ``None``, the filename to emit when + describing where an error occurred; the default is ``''``. + + *allow_include*, a ``bool``. If ``True``, the default, then ``$INCLUDE`` + directives are permitted. If ``False``, then encoutering a ``$INCLUDE`` + will raise a ``SyntaxError`` exception. + + *check_origin*, a ``bool``. If ``True``, the default, then sanity + checks of the origin node will be made by calling the zone's + ``check_origin()`` method. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + *allow_directives*, a ``bool`` or an iterable of `str`. If ``True``, the default, + then directives are permitted, and the *allow_include* parameter controls whether + ``$INCLUDE`` is permitted. If ``False`` or an empty iterable, then no directive + processing is done and any directive-like text will be treated as a regular owner + name. If a non-empty iterable, then only the listed directives (including the + ``$``) are allowed. + + Raises ``dns.zone.NoSOA`` if there is no SOA RRset. + + Raises ``dns.zone.NoNS`` if there is no NS RRset. + + Raises ``KeyError`` if there is no origin node. + + Returns a subclass of ``dns.zone.Zone``. + """ + + if isinstance(f, str): + if filename is None: + filename = f + cm: contextlib.AbstractContextManager = open(f) + else: + cm = contextlib.nullcontext(f) + with cm as f: + return _from_text( + f, + origin, + rdclass, + relativize, + zone_factory, + filename, + allow_include, + check_origin, + idna_codec, + allow_directives, + ) + assert False # make mypy happy lgtm[py/unreachable-statement] + + +def from_xfr( + xfr: Any, + zone_factory: Any = Zone, + relativize: bool = True, + check_origin: bool = True, +) -> Zone: + """Convert the output of a zone transfer generator into a zone object. + + *xfr*, a generator of ``dns.message.Message`` objects, typically + ``dns.query.xfr()``. + + *relativize*, a ``bool``, determine's whether domain names are + relativized to the zone's origin. The default is ``True``. + It is essential that the relativize setting matches the one specified + to the generator. + + *check_origin*, a ``bool``. If ``True``, the default, then sanity + checks of the origin node will be made by calling the zone's + ``check_origin()`` method. + + Raises ``dns.zone.NoSOA`` if there is no SOA RRset. + + Raises ``dns.zone.NoNS`` if there is no NS RRset. + + Raises ``KeyError`` if there is no origin node. + + Raises ``ValueError`` if no messages are yielded by the generator. + + Returns a subclass of ``dns.zone.Zone``. + """ + + z = None + for r in xfr: + if z is None: + if relativize: + origin = r.origin + else: + origin = r.answer[0].name + rdclass = r.answer[0].rdclass + z = zone_factory(origin, rdclass, relativize=relativize) + for rrset in r.answer: + znode = z.nodes.get(rrset.name) + if not znode: + znode = z.node_factory() + z.nodes[rrset.name] = znode + zrds = znode.find_rdataset(rrset.rdclass, rrset.rdtype, rrset.covers, True) + zrds.update_ttl(rrset.ttl) + for rd in rrset: + zrds.add(rd) + if z is None: + raise ValueError("empty transfer") + if check_origin: + z.check_origin() + return z diff --git a/venv/lib/python3.11/site-packages/dns/zonefile.py b/venv/lib/python3.11/site-packages/dns/zonefile.py new file mode 100644 index 0000000..d74510b --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/zonefile.py @@ -0,0 +1,744 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Zones.""" + +import re +import sys +from typing import Any, Iterable, List, Optional, Set, Tuple, Union + +import dns.exception +import dns.grange +import dns.name +import dns.node +import dns.rdata +import dns.rdataclass +import dns.rdatatype +import dns.rdtypes.ANY.SOA +import dns.rrset +import dns.tokenizer +import dns.transaction +import dns.ttl + + +class UnknownOrigin(dns.exception.DNSException): + """Unknown origin""" + + +class CNAMEAndOtherData(dns.exception.DNSException): + """A node has a CNAME and other data""" + + +def _check_cname_and_other_data(txn, name, rdataset): + rdataset_kind = dns.node.NodeKind.classify_rdataset(rdataset) + node = txn.get_node(name) + if node is None: + # empty nodes are neutral. + return + node_kind = node.classify() + if ( + node_kind == dns.node.NodeKind.CNAME + and rdataset_kind == dns.node.NodeKind.REGULAR + ): + raise CNAMEAndOtherData("rdataset type is not compatible with a CNAME node") + elif ( + node_kind == dns.node.NodeKind.REGULAR + and rdataset_kind == dns.node.NodeKind.CNAME + ): + raise CNAMEAndOtherData( + "CNAME rdataset is not compatible with a regular data node" + ) + # Otherwise at least one of the node and the rdataset is neutral, so + # adding the rdataset is ok + + +SavedStateType = Tuple[ + dns.tokenizer.Tokenizer, + Optional[dns.name.Name], # current_origin + Optional[dns.name.Name], # last_name + Optional[Any], # current_file + int, # last_ttl + bool, # last_ttl_known + int, # default_ttl + bool, +] # default_ttl_known + + +def _upper_dollarize(s): + s = s.upper() + if not s.startswith("$"): + s = "$" + s + return s + + +class Reader: + """Read a DNS zone file into a transaction.""" + + def __init__( + self, + tok: dns.tokenizer.Tokenizer, + rdclass: dns.rdataclass.RdataClass, + txn: dns.transaction.Transaction, + allow_include: bool = False, + allow_directives: Union[bool, Iterable[str]] = True, + force_name: Optional[dns.name.Name] = None, + force_ttl: Optional[int] = None, + force_rdclass: Optional[dns.rdataclass.RdataClass] = None, + force_rdtype: Optional[dns.rdatatype.RdataType] = None, + default_ttl: Optional[int] = None, + ): + self.tok = tok + (self.zone_origin, self.relativize, _) = txn.manager.origin_information() + self.current_origin = self.zone_origin + self.last_ttl = 0 + self.last_ttl_known = False + if force_ttl is not None: + default_ttl = force_ttl + if default_ttl is None: + self.default_ttl = 0 + self.default_ttl_known = False + else: + self.default_ttl = default_ttl + self.default_ttl_known = True + self.last_name = self.current_origin + self.zone_rdclass = rdclass + self.txn = txn + self.saved_state: List[SavedStateType] = [] + self.current_file: Optional[Any] = None + self.allowed_directives: Set[str] + if allow_directives is True: + self.allowed_directives = {"$GENERATE", "$ORIGIN", "$TTL"} + if allow_include: + self.allowed_directives.add("$INCLUDE") + elif allow_directives is False: + # allow_include was ignored in earlier releases if allow_directives was + # False, so we continue that. + self.allowed_directives = set() + else: + # Note that if directives are explicitly specified, then allow_include + # is ignored. + self.allowed_directives = set(_upper_dollarize(d) for d in allow_directives) + self.force_name = force_name + self.force_ttl = force_ttl + self.force_rdclass = force_rdclass + self.force_rdtype = force_rdtype + self.txn.check_put_rdataset(_check_cname_and_other_data) + + def _eat_line(self): + while 1: + token = self.tok.get() + if token.is_eol_or_eof(): + break + + def _get_identifier(self): + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + return token + + def _rr_line(self): + """Process one line from a DNS zone file.""" + token = None + # Name + if self.force_name is not None: + name = self.force_name + else: + if self.current_origin is None: + raise UnknownOrigin + token = self.tok.get(want_leading=True) + if not token.is_whitespace(): + self.last_name = self.tok.as_name(token, self.current_origin) + else: + token = self.tok.get() + if token.is_eol_or_eof(): + # treat leading WS followed by EOL/EOF as if they were EOL/EOF. + return + self.tok.unget(token) + name = self.last_name + if not name.is_subdomain(self.zone_origin): + self._eat_line() + return + if self.relativize: + name = name.relativize(self.zone_origin) + + # TTL + if self.force_ttl is not None: + ttl = self.force_ttl + self.last_ttl = ttl + self.last_ttl_known = True + else: + token = self._get_identifier() + ttl = None + try: + ttl = dns.ttl.from_text(token.value) + self.last_ttl = ttl + self.last_ttl_known = True + token = None + except dns.ttl.BadTTL: + self.tok.unget(token) + + # Class + if self.force_rdclass is not None: + rdclass = self.force_rdclass + else: + token = self._get_identifier() + try: + rdclass = dns.rdataclass.from_text(token.value) + except dns.exception.SyntaxError: + raise + except Exception: + rdclass = self.zone_rdclass + self.tok.unget(token) + if rdclass != self.zone_rdclass: + raise dns.exception.SyntaxError("RR class is not zone's class") + + if ttl is None: + # support for syntax + token = self._get_identifier() + ttl = None + try: + ttl = dns.ttl.from_text(token.value) + self.last_ttl = ttl + self.last_ttl_known = True + token = None + except dns.ttl.BadTTL: + if self.default_ttl_known: + ttl = self.default_ttl + elif self.last_ttl_known: + ttl = self.last_ttl + self.tok.unget(token) + + # Type + if self.force_rdtype is not None: + rdtype = self.force_rdtype + else: + token = self._get_identifier() + try: + rdtype = dns.rdatatype.from_text(token.value) + except Exception: + raise dns.exception.SyntaxError(f"unknown rdatatype '{token.value}'") + + try: + rd = dns.rdata.from_text( + rdclass, + rdtype, + self.tok, + self.current_origin, + self.relativize, + self.zone_origin, + ) + except dns.exception.SyntaxError: + # Catch and reraise. + raise + except Exception: + # All exceptions that occur in the processing of rdata + # are treated as syntax errors. This is not strictly + # correct, but it is correct almost all of the time. + # We convert them to syntax errors so that we can emit + # helpful filename:line info. + (ty, va) = sys.exc_info()[:2] + raise dns.exception.SyntaxError(f"caught exception {str(ty)}: {str(va)}") + + if not self.default_ttl_known and rdtype == dns.rdatatype.SOA: + # The pre-RFC2308 and pre-BIND9 behavior inherits the zone default + # TTL from the SOA minttl if no $TTL statement is present before the + # SOA is parsed. + self.default_ttl = rd.minimum + self.default_ttl_known = True + if ttl is None: + # if we didn't have a TTL on the SOA, set it! + ttl = rd.minimum + + # TTL check. We had to wait until now to do this as the SOA RR's + # own TTL can be inferred from its minimum. + if ttl is None: + raise dns.exception.SyntaxError("Missing default TTL value") + + self.txn.add(name, ttl, rd) + + def _parse_modify(self, side: str) -> Tuple[str, str, int, int, str]: + # Here we catch everything in '{' '}' in a group so we can replace it + # with ''. + is_generate1 = re.compile(r"^.*\$({(\+|-?)(\d+),(\d+),(.)}).*$") + is_generate2 = re.compile(r"^.*\$({(\+|-?)(\d+)}).*$") + is_generate3 = re.compile(r"^.*\$({(\+|-?)(\d+),(\d+)}).*$") + # Sometimes there are modifiers in the hostname. These come after + # the dollar sign. They are in the form: ${offset[,width[,base]]}. + # Make names + mod = "" + sign = "+" + offset = "0" + width = "0" + base = "d" + g1 = is_generate1.match(side) + if g1: + mod, sign, offset, width, base = g1.groups() + if sign == "": + sign = "+" + else: + g2 = is_generate2.match(side) + if g2: + mod, sign, offset = g2.groups() + if sign == "": + sign = "+" + width = "0" + base = "d" + else: + g3 = is_generate3.match(side) + if g3: + mod, sign, offset, width = g3.groups() + if sign == "": + sign = "+" + base = "d" + + ioffset = int(offset) + iwidth = int(width) + + if sign not in ["+", "-"]: + raise dns.exception.SyntaxError(f"invalid offset sign {sign}") + if base not in ["d", "o", "x", "X", "n", "N"]: + raise dns.exception.SyntaxError(f"invalid type {base}") + + return mod, sign, ioffset, iwidth, base + + def _generate_line(self): + # range lhs [ttl] [class] type rhs [ comment ] + """Process one line containing the GENERATE statement from a DNS + zone file.""" + if self.current_origin is None: + raise UnknownOrigin + + token = self.tok.get() + # Range (required) + try: + start, stop, step = dns.grange.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except Exception: + raise dns.exception.SyntaxError + + # lhs (required) + try: + lhs = token.value + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except Exception: + raise dns.exception.SyntaxError + + # TTL + try: + ttl = dns.ttl.from_text(token.value) + self.last_ttl = ttl + self.last_ttl_known = True + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.ttl.BadTTL: + if not (self.last_ttl_known or self.default_ttl_known): + raise dns.exception.SyntaxError("Missing default TTL value") + if self.default_ttl_known: + ttl = self.default_ttl + elif self.last_ttl_known: + ttl = self.last_ttl + # Class + try: + rdclass = dns.rdataclass.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.exception.SyntaxError: + raise dns.exception.SyntaxError + except Exception: + rdclass = self.zone_rdclass + if rdclass != self.zone_rdclass: + raise dns.exception.SyntaxError("RR class is not zone's class") + # Type + try: + rdtype = dns.rdatatype.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except Exception: + raise dns.exception.SyntaxError(f"unknown rdatatype '{token.value}'") + + # rhs (required) + rhs = token.value + + def _calculate_index(counter: int, offset_sign: str, offset: int) -> int: + """Calculate the index from the counter and offset.""" + if offset_sign == "-": + offset *= -1 + return counter + offset + + def _format_index(index: int, base: str, width: int) -> str: + """Format the index with the given base, and zero-fill it + to the given width.""" + if base in ["d", "o", "x", "X"]: + return format(index, base).zfill(width) + + # base can only be n or N here + hexa = _format_index(index, "x", width) + nibbles = ".".join(hexa[::-1])[:width] + if base == "N": + nibbles = nibbles.upper() + return nibbles + + lmod, lsign, loffset, lwidth, lbase = self._parse_modify(lhs) + rmod, rsign, roffset, rwidth, rbase = self._parse_modify(rhs) + for i in range(start, stop + 1, step): + # +1 because bind is inclusive and python is exclusive + + lindex = _calculate_index(i, lsign, loffset) + rindex = _calculate_index(i, rsign, roffset) + + lzfindex = _format_index(lindex, lbase, lwidth) + rzfindex = _format_index(rindex, rbase, rwidth) + + name = lhs.replace(f"${lmod}", lzfindex) + rdata = rhs.replace(f"${rmod}", rzfindex) + + self.last_name = dns.name.from_text( + name, self.current_origin, self.tok.idna_codec + ) + name = self.last_name + if not name.is_subdomain(self.zone_origin): + self._eat_line() + return + if self.relativize: + name = name.relativize(self.zone_origin) + + try: + rd = dns.rdata.from_text( + rdclass, + rdtype, + rdata, + self.current_origin, + self.relativize, + self.zone_origin, + ) + except dns.exception.SyntaxError: + # Catch and reraise. + raise + except Exception: + # All exceptions that occur in the processing of rdata + # are treated as syntax errors. This is not strictly + # correct, but it is correct almost all of the time. + # We convert them to syntax errors so that we can emit + # helpful filename:line info. + (ty, va) = sys.exc_info()[:2] + raise dns.exception.SyntaxError( + f"caught exception {str(ty)}: {str(va)}" + ) + + self.txn.add(name, ttl, rd) + + def read(self) -> None: + """Read a DNS zone file and build a zone object. + + @raises dns.zone.NoSOA: No SOA RR was found at the zone origin + @raises dns.zone.NoNS: No NS RRset was found at the zone origin + """ + + try: + while 1: + token = self.tok.get(True, True) + if token.is_eof(): + if self.current_file is not None: + self.current_file.close() + if len(self.saved_state) > 0: + ( + self.tok, + self.current_origin, + self.last_name, + self.current_file, + self.last_ttl, + self.last_ttl_known, + self.default_ttl, + self.default_ttl_known, + ) = self.saved_state.pop(-1) + continue + break + elif token.is_eol(): + continue + elif token.is_comment(): + self.tok.get_eol() + continue + elif token.value[0] == "$" and len(self.allowed_directives) > 0: + # Note that we only run directive processing code if at least + # one directive is allowed in order to be backwards compatible + c = token.value.upper() + if c not in self.allowed_directives: + raise dns.exception.SyntaxError( + f"zone file directive '{c}' is not allowed" + ) + if c == "$TTL": + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError("bad $TTL") + self.default_ttl = dns.ttl.from_text(token.value) + self.default_ttl_known = True + self.tok.get_eol() + elif c == "$ORIGIN": + self.current_origin = self.tok.get_name() + self.tok.get_eol() + if self.zone_origin is None: + self.zone_origin = self.current_origin + self.txn._set_origin(self.current_origin) + elif c == "$INCLUDE": + token = self.tok.get() + filename = token.value + token = self.tok.get() + new_origin: Optional[dns.name.Name] + if token.is_identifier(): + new_origin = dns.name.from_text( + token.value, self.current_origin, self.tok.idna_codec + ) + self.tok.get_eol() + elif not token.is_eol_or_eof(): + raise dns.exception.SyntaxError("bad origin in $INCLUDE") + else: + new_origin = self.current_origin + self.saved_state.append( + ( + self.tok, + self.current_origin, + self.last_name, + self.current_file, + self.last_ttl, + self.last_ttl_known, + self.default_ttl, + self.default_ttl_known, + ) + ) + self.current_file = open(filename) + self.tok = dns.tokenizer.Tokenizer(self.current_file, filename) + self.current_origin = new_origin + elif c == "$GENERATE": + self._generate_line() + else: + raise dns.exception.SyntaxError( + f"Unknown zone file directive '{c}'" + ) + continue + self.tok.unget(token) + self._rr_line() + except dns.exception.SyntaxError as detail: + (filename, line_number) = self.tok.where() + if detail is None: + detail = "syntax error" + ex = dns.exception.SyntaxError( + "%s:%d: %s" % (filename, line_number, detail) + ) + tb = sys.exc_info()[2] + raise ex.with_traceback(tb) from None + + +class RRsetsReaderTransaction(dns.transaction.Transaction): + def __init__(self, manager, replacement, read_only): + assert not read_only + super().__init__(manager, replacement, read_only) + self.rdatasets = {} + + def _get_rdataset(self, name, rdtype, covers): + return self.rdatasets.get((name, rdtype, covers)) + + def _get_node(self, name): + rdatasets = [] + for (rdataset_name, _, _), rdataset in self.rdatasets.items(): + if name == rdataset_name: + rdatasets.append(rdataset) + if len(rdatasets) == 0: + return None + node = dns.node.Node() + node.rdatasets = rdatasets + return node + + def _put_rdataset(self, name, rdataset): + self.rdatasets[(name, rdataset.rdtype, rdataset.covers)] = rdataset + + def _delete_name(self, name): + # First remove any changes involving the name + remove = [] + for key in self.rdatasets: + if key[0] == name: + remove.append(key) + if len(remove) > 0: + for key in remove: + del self.rdatasets[key] + + def _delete_rdataset(self, name, rdtype, covers): + try: + del self.rdatasets[(name, rdtype, covers)] + except KeyError: + pass + + def _name_exists(self, name): + for n, _, _ in self.rdatasets: + if n == name: + return True + return False + + def _changed(self): + return len(self.rdatasets) > 0 + + def _end_transaction(self, commit): + if commit and self._changed(): + rrsets = [] + for (name, _, _), rdataset in self.rdatasets.items(): + rrset = dns.rrset.RRset( + name, rdataset.rdclass, rdataset.rdtype, rdataset.covers + ) + rrset.update(rdataset) + rrsets.append(rrset) + self.manager.set_rrsets(rrsets) + + def _set_origin(self, origin): + pass + + def _iterate_rdatasets(self): + raise NotImplementedError # pragma: no cover + + def _iterate_names(self): + raise NotImplementedError # pragma: no cover + + +class RRSetsReaderManager(dns.transaction.TransactionManager): + def __init__( + self, origin=dns.name.root, relativize=False, rdclass=dns.rdataclass.IN + ): + self.origin = origin + self.relativize = relativize + self.rdclass = rdclass + self.rrsets = [] + + def reader(self): # pragma: no cover + raise NotImplementedError + + def writer(self, replacement=False): + assert replacement is True + return RRsetsReaderTransaction(self, True, False) + + def get_class(self): + return self.rdclass + + def origin_information(self): + if self.relativize: + effective = dns.name.empty + else: + effective = self.origin + return (self.origin, self.relativize, effective) + + def set_rrsets(self, rrsets): + self.rrsets = rrsets + + +def read_rrsets( + text: Any, + name: Optional[Union[dns.name.Name, str]] = None, + ttl: Optional[int] = None, + rdclass: Optional[Union[dns.rdataclass.RdataClass, str]] = dns.rdataclass.IN, + default_rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, + rdtype: Optional[Union[dns.rdatatype.RdataType, str]] = None, + default_ttl: Optional[Union[int, str]] = None, + idna_codec: Optional[dns.name.IDNACodec] = None, + origin: Optional[Union[dns.name.Name, str]] = dns.name.root, + relativize: bool = False, +) -> List[dns.rrset.RRset]: + """Read one or more rrsets from the specified text, possibly subject + to restrictions. + + *text*, a file object or a string, is the input to process. + + *name*, a string, ``dns.name.Name``, or ``None``, is the owner name of + the rrset. If not ``None``, then the owner name is "forced", and the + input must not specify an owner name. If ``None``, then any owner names + are allowed and must be present in the input. + + *ttl*, an ``int``, string, or None. If not ``None``, the the TTL is + forced to be the specified value and the input must not specify a TTL. + If ``None``, then a TTL may be specified in the input. If it is not + specified, then the *default_ttl* will be used. + + *rdclass*, a ``dns.rdataclass.RdataClass``, string, or ``None``. If + not ``None``, then the class is forced to the specified value, and the + input must not specify a class. If ``None``, then the input may specify + a class that matches *default_rdclass*. Note that it is not possible to + return rrsets with differing classes; specifying ``None`` for the class + simply allows the user to optionally type a class as that may be convenient + when cutting and pasting. + + *default_rdclass*, a ``dns.rdataclass.RdataClass`` or string. The class + of the returned rrsets. + + *rdtype*, a ``dns.rdatatype.RdataType``, string, or ``None``. If not + ``None``, then the type is forced to the specified value, and the + input must not specify a type. If ``None``, then a type must be present + for each RR. + + *default_ttl*, an ``int``, string, or ``None``. If not ``None``, then if + the TTL is not forced and is not specified, then this value will be used. + if ``None``, then if the TTL is not forced an error will occur if the TTL + is not specified. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. Note that codecs only apply to the owner name; dnspython does + not do IDNA for names in rdata, as there is no IDNA zonefile format. + + *origin*, a string, ``dns.name.Name``, or ``None``, is the origin for any + relative names in the input, and also the origin to relativize to if + *relativize* is ``True``. + + *relativize*, a bool. If ``True``, names are relativized to the *origin*; + if ``False`` then any relative names in the input are made absolute by + appending the *origin*. + """ + if isinstance(origin, str): + origin = dns.name.from_text(origin, dns.name.root, idna_codec) + if isinstance(name, str): + name = dns.name.from_text(name, origin, idna_codec) + if isinstance(ttl, str): + ttl = dns.ttl.from_text(ttl) + if isinstance(default_ttl, str): + default_ttl = dns.ttl.from_text(default_ttl) + if rdclass is not None: + rdclass = dns.rdataclass.RdataClass.make(rdclass) + else: + rdclass = None + default_rdclass = dns.rdataclass.RdataClass.make(default_rdclass) + if rdtype is not None: + rdtype = dns.rdatatype.RdataType.make(rdtype) + else: + rdtype = None + manager = RRSetsReaderManager(origin, relativize, default_rdclass) + with manager.writer(True) as txn: + tok = dns.tokenizer.Tokenizer(text, "", idna_codec=idna_codec) + reader = Reader( + tok, + default_rdclass, + txn, + allow_directives=False, + force_name=name, + force_ttl=ttl, + force_rdclass=rdclass, + force_rdtype=rdtype, + default_ttl=default_ttl, + ) + reader.read() + return manager.rrsets diff --git a/venv/lib/python3.11/site-packages/dns/zonetypes.py b/venv/lib/python3.11/site-packages/dns/zonetypes.py new file mode 100644 index 0000000..195ee2e --- /dev/null +++ b/venv/lib/python3.11/site-packages/dns/zonetypes.py @@ -0,0 +1,37 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +"""Common zone-related types.""" + +# This is a separate file to avoid import circularity between dns.zone and +# the implementation of the ZONEMD type. + +import hashlib + +import dns.enum + + +class DigestScheme(dns.enum.IntEnum): + """ZONEMD Scheme""" + + SIMPLE = 1 + + @classmethod + def _maximum(cls): + return 255 + + +class DigestHashAlgorithm(dns.enum.IntEnum): + """ZONEMD Hash Algorithm""" + + SHA384 = 1 + SHA512 = 2 + + @classmethod + def _maximum(cls): + return 255 + + +_digest_hashers = { + DigestHashAlgorithm.SHA384: hashlib.sha384, + DigestHashAlgorithm.SHA512: hashlib.sha512, +} diff --git a/venv/lib/python3.11/site-packages/dnspython-2.7.0.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/dnspython-2.7.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.11/site-packages/dnspython-2.7.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.11/site-packages/dnspython-2.7.0.dist-info/METADATA b/venv/lib/python3.11/site-packages/dnspython-2.7.0.dist-info/METADATA new file mode 100644 index 0000000..ca4a4f4 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dnspython-2.7.0.dist-info/METADATA @@ -0,0 +1,149 @@ +Metadata-Version: 2.3 +Name: dnspython +Version: 2.7.0 +Summary: DNS toolkit +Project-URL: homepage, https://www.dnspython.org +Project-URL: repository, https://github.com/rthalley/dnspython.git +Project-URL: documentation, https://dnspython.readthedocs.io/en/stable/ +Project-URL: issues, https://github.com/rthalley/dnspython/issues +Author-email: Bob Halley +License: ISC +License-File: LICENSE +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: ISC License (ISCL) +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Topic :: Internet :: Name Service (DNS) +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.9 +Provides-Extra: dev +Requires-Dist: black>=23.1.0; extra == 'dev' +Requires-Dist: coverage>=7.0; extra == 'dev' +Requires-Dist: flake8>=7; extra == 'dev' +Requires-Dist: hypercorn>=0.16.0; extra == 'dev' +Requires-Dist: mypy>=1.8; extra == 'dev' +Requires-Dist: pylint>=3; extra == 'dev' +Requires-Dist: pytest-cov>=4.1.0; extra == 'dev' +Requires-Dist: pytest>=7.4; extra == 'dev' +Requires-Dist: quart-trio>=0.11.0; extra == 'dev' +Requires-Dist: sphinx-rtd-theme>=2.0.0; extra == 'dev' +Requires-Dist: sphinx>=7.2.0; extra == 'dev' +Requires-Dist: twine>=4.0.0; extra == 'dev' +Requires-Dist: wheel>=0.42.0; extra == 'dev' +Provides-Extra: dnssec +Requires-Dist: cryptography>=43; extra == 'dnssec' +Provides-Extra: doh +Requires-Dist: h2>=4.1.0; extra == 'doh' +Requires-Dist: httpcore>=1.0.0; extra == 'doh' +Requires-Dist: httpx>=0.26.0; extra == 'doh' +Provides-Extra: doq +Requires-Dist: aioquic>=1.0.0; extra == 'doq' +Provides-Extra: idna +Requires-Dist: idna>=3.7; extra == 'idna' +Provides-Extra: trio +Requires-Dist: trio>=0.23; extra == 'trio' +Provides-Extra: wmi +Requires-Dist: wmi>=1.5.1; extra == 'wmi' +Description-Content-Type: text/markdown + +# dnspython + +[![Build Status](https://github.com/rthalley/dnspython/actions/workflows/ci.yml/badge.svg)](https://github.com/rthalley/dnspython/actions/) +[![Documentation Status](https://readthedocs.org/projects/dnspython/badge/?version=latest)](https://dnspython.readthedocs.io/en/latest/?badge=latest) +[![PyPI version](https://badge.fury.io/py/dnspython.svg)](https://badge.fury.io/py/dnspython) +[![License: ISC](https://img.shields.io/badge/License-ISC-brightgreen.svg)](https://opensource.org/licenses/ISC) +[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) + +## INTRODUCTION + +dnspython is a DNS toolkit for Python. It supports almost all record types. It +can be used for queries, zone transfers, and dynamic updates. It supports TSIG +authenticated messages and EDNS0. + +dnspython provides both high and low level access to DNS. The high level classes +perform queries for data of a given name, type, and class, and return an answer +set. The low level classes allow direct manipulation of DNS zones, messages, +names, and records. + +To see a few of the ways dnspython can be used, look in the `examples/` +directory. + +dnspython is a utility to work with DNS, `/etc/hosts` is thus not used. For +simple forward DNS lookups, it's better to use `socket.getaddrinfo()` or +`socket.gethostbyname()`. + +dnspython originated at Nominum where it was developed +to facilitate the testing of DNS software. + +## ABOUT THIS RELEASE + +This is dnspython 2.7.0. +Please read +[What's New](https://dnspython.readthedocs.io/en/stable/whatsnew.html) for +information about the changes in this release. + +## INSTALLATION + +* Many distributions have dnspython packaged for you, so you should + check there first. +* To use a wheel downloaded from PyPi, run: + + pip install dnspython + +* To install from the source code, go into the top-level of the source code + and run: + +``` + pip install --upgrade pip build + python -m build + pip install dist/*.whl +``` + +* To install the latest from the main branch, run `pip install git+https://github.com/rthalley/dnspython.git` + +Dnspython's default installation does not depend on any modules other than +those in the Python standard library. To use some features, additional modules +must be installed. For convenience, pip options are defined for the +requirements. + +If you want to use DNS-over-HTTPS, run +`pip install dnspython[doh]`. + +If you want to use DNSSEC functionality, run +`pip install dnspython[dnssec]`. + +If you want to use internationalized domain names (IDNA) +functionality, you must run +`pip install dnspython[idna]` + +If you want to use the Trio asynchronous I/O package, run +`pip install dnspython[trio]`. + +If you want to use WMI on Windows to determine the active DNS settings +instead of the default registry scanning method, run +`pip install dnspython[wmi]`. + +If you want to try the experimental DNS-over-QUIC code, run +`pip install dnspython[doq]`. + +Note that you can install any combination of the above, e.g.: +`pip install dnspython[doh,dnssec,idna]` + +### Notices + +Python 2.x support ended with the release of 1.16.0. Dnspython 2.6.x supports +Python 3.8 and later, though support for 3.8 ends on October 14, 2024. +Dnspython 2.7.x supports Python 3.9 and later. Future support is aligned with the +lifetime of the Python 3 versions. + +Documentation has moved to +[dnspython.readthedocs.io](https://dnspython.readthedocs.io). diff --git a/venv/lib/python3.11/site-packages/dnspython-2.7.0.dist-info/RECORD b/venv/lib/python3.11/site-packages/dnspython-2.7.0.dist-info/RECORD new file mode 100644 index 0000000..3abdd69 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dnspython-2.7.0.dist-info/RECORD @@ -0,0 +1,294 @@ +dns/__init__.py,sha256=YJZtDG14Idw5ui3h1nWooSwPM9gsxQgB8M0GBZ3aly0,1663 +dns/__pycache__/__init__.cpython-311.pyc,, +dns/__pycache__/_asyncbackend.cpython-311.pyc,, +dns/__pycache__/_asyncio_backend.cpython-311.pyc,, +dns/__pycache__/_ddr.cpython-311.pyc,, +dns/__pycache__/_features.cpython-311.pyc,, +dns/__pycache__/_immutable_ctx.cpython-311.pyc,, +dns/__pycache__/_trio_backend.cpython-311.pyc,, +dns/__pycache__/asyncbackend.cpython-311.pyc,, +dns/__pycache__/asyncquery.cpython-311.pyc,, +dns/__pycache__/asyncresolver.cpython-311.pyc,, +dns/__pycache__/dnssec.cpython-311.pyc,, +dns/__pycache__/dnssectypes.cpython-311.pyc,, +dns/__pycache__/e164.cpython-311.pyc,, +dns/__pycache__/edns.cpython-311.pyc,, +dns/__pycache__/entropy.cpython-311.pyc,, +dns/__pycache__/enum.cpython-311.pyc,, +dns/__pycache__/exception.cpython-311.pyc,, +dns/__pycache__/flags.cpython-311.pyc,, +dns/__pycache__/grange.cpython-311.pyc,, +dns/__pycache__/immutable.cpython-311.pyc,, +dns/__pycache__/inet.cpython-311.pyc,, +dns/__pycache__/ipv4.cpython-311.pyc,, +dns/__pycache__/ipv6.cpython-311.pyc,, +dns/__pycache__/message.cpython-311.pyc,, +dns/__pycache__/name.cpython-311.pyc,, +dns/__pycache__/namedict.cpython-311.pyc,, +dns/__pycache__/nameserver.cpython-311.pyc,, +dns/__pycache__/node.cpython-311.pyc,, +dns/__pycache__/opcode.cpython-311.pyc,, +dns/__pycache__/query.cpython-311.pyc,, +dns/__pycache__/rcode.cpython-311.pyc,, +dns/__pycache__/rdata.cpython-311.pyc,, +dns/__pycache__/rdataclass.cpython-311.pyc,, +dns/__pycache__/rdataset.cpython-311.pyc,, +dns/__pycache__/rdatatype.cpython-311.pyc,, +dns/__pycache__/renderer.cpython-311.pyc,, +dns/__pycache__/resolver.cpython-311.pyc,, +dns/__pycache__/reversename.cpython-311.pyc,, +dns/__pycache__/rrset.cpython-311.pyc,, +dns/__pycache__/serial.cpython-311.pyc,, +dns/__pycache__/set.cpython-311.pyc,, +dns/__pycache__/tokenizer.cpython-311.pyc,, +dns/__pycache__/transaction.cpython-311.pyc,, +dns/__pycache__/tsig.cpython-311.pyc,, +dns/__pycache__/tsigkeyring.cpython-311.pyc,, +dns/__pycache__/ttl.cpython-311.pyc,, +dns/__pycache__/update.cpython-311.pyc,, +dns/__pycache__/version.cpython-311.pyc,, +dns/__pycache__/versioned.cpython-311.pyc,, +dns/__pycache__/win32util.cpython-311.pyc,, +dns/__pycache__/wire.cpython-311.pyc,, +dns/__pycache__/xfr.cpython-311.pyc,, +dns/__pycache__/zone.cpython-311.pyc,, +dns/__pycache__/zonefile.cpython-311.pyc,, +dns/__pycache__/zonetypes.cpython-311.pyc,, +dns/_asyncbackend.py,sha256=pamIAWJ73e7ic2u7Q3RJyG6_6L8t78ccttvi65682MM,2396 +dns/_asyncio_backend.py,sha256=iLqhcUXqnFWC_2tcAp9U00NOGxT5GKPn4qeXS4iKaro,9051 +dns/_ddr.py,sha256=rHXKC8kncCTT9N4KBh1flicl79nyDjQ-DDvq30MJ3B8,5247 +dns/_features.py,sha256=Ig_leAKUT9RDiOVOfA0nXmmqpiPfnOnP9TcxlISUGSk,2492 +dns/_immutable_ctx.py,sha256=gtoCLMmdHXI23zt5lRSIS3A4Ca3jZJngebdoFFOtiwU,2459 +dns/_trio_backend.py,sha256=IXNdUP1MUBPyZRgAFhGH71KHtUCh3Rm5dM8SX4bMj2U,8473 +dns/asyncbackend.py,sha256=82fXTFls_m7F_ekQbgUGOkoBbs4BI-GBLDZAWNGUvJ0,2796 +dns/asyncquery.py,sha256=PMZ_D4Z8vgSioWHftyxNw7eax1IqrPleqY5FIi40hd8,30821 +dns/asyncresolver.py,sha256=GD86dCyW9YGKs6SggWXwBKEXifW7Qdx4cEAGFKY6fA4,17852 +dns/dnssec.py,sha256=gXmIrbKK1t1hE8ht-WlhUc0giy1PpLYj07r6o0pVATY,41717 +dns/dnssecalgs/__init__.py,sha256=OWvTadxZ3oF5PxVGodNimxBt_-3YUNTOSV62HrIb4PQ,4331 +dns/dnssecalgs/__pycache__/__init__.cpython-311.pyc,, +dns/dnssecalgs/__pycache__/base.cpython-311.pyc,, +dns/dnssecalgs/__pycache__/cryptography.cpython-311.pyc,, +dns/dnssecalgs/__pycache__/dsa.cpython-311.pyc,, +dns/dnssecalgs/__pycache__/ecdsa.cpython-311.pyc,, +dns/dnssecalgs/__pycache__/eddsa.cpython-311.pyc,, +dns/dnssecalgs/__pycache__/rsa.cpython-311.pyc,, +dns/dnssecalgs/base.py,sha256=jlTV_nd1Nqkvqyf-FVHIccXKFrE2LL6GVu6AW8QUh2E,2513 +dns/dnssecalgs/cryptography.py,sha256=3uqMfRm-zCkJPOrxUqlu9CmdxIMy71dVor9eAHi0wZM,2425 +dns/dnssecalgs/dsa.py,sha256=DNO68g_lbG7_oKcDN8c2xuzYRPbLaZc9Ns7oQoa0Vbc,3564 +dns/dnssecalgs/ecdsa.py,sha256=RfvFKRNExsYgd5SoXXRxMHkoBeF2Gktkz2rOwObEYAY,3172 +dns/dnssecalgs/eddsa.py,sha256=7VGARpVUzIYRjPh0gFapTPFzmsK8WJDqDZDLw2KLc8w,1981 +dns/dnssecalgs/rsa.py,sha256=_tNABpr6iwd8STBEHYIXfyLrgBpRNCj8K0UQj32_kOU,3622 +dns/dnssectypes.py,sha256=CyeuGTS_rM3zXr8wD9qMT9jkzvVfTY2JWckUcogG83E,1799 +dns/e164.py,sha256=EsK8cnOtOx7kQ0DmSwibcwkzp6efMWjbRiTyHZO8Q-M,3978 +dns/edns.py,sha256=-XDhC2jr7BRLsJrpCAWShxLn-3eG1oI0HhduWhLxdMw,17089 +dns/entropy.py,sha256=qkG8hXDLzrJS6R5My26iA59c0RhPwJNzuOhOCAZU5Bw,4242 +dns/enum.py,sha256=EepaunPKixTSrascy7iAe9UQEXXxP_MB5Gx4jUpHIhg,3691 +dns/exception.py,sha256=8vjxLf4T3T77vfANe_iKVeButAEhSJve6UrPjiBzht4,5953 +dns/flags.py,sha256=cQ3kTFyvcKiWHAxI5AwchNqxVOrsIrgJ6brgrH42Wq8,2750 +dns/grange.py,sha256=D016OrOv3i44G3mb_CzPFjDk61uZ6BMRib3yJnDQvbw,2144 +dns/immutable.py,sha256=InrtpKvPxl-74oYbzsyneZwAuX78hUqeG22f2aniZbk,2017 +dns/inet.py,sha256=j6jQs3K_ehVhDv-i4jwCKePr5HpEiSzvOXQ4uhgn1sU,5772 +dns/ipv4.py,sha256=qEUXtlqWDH_blicj6VMvyQhfX7-BF0gB_lWJliV-2FI,2552 +dns/ipv6.py,sha256=Ww8ayshM6FxtQsRYdXXuKkPFTad5ZcGbBd9lr1nFct4,6554 +dns/message.py,sha256=QOtdFBEAORhTKN0uQg86uSNvthdxJx40HhMQXYCBHng,68185 +dns/name.py,sha256=Bf3170QHhLFLDnMsWeJyik4i9ucBDbIY6Bydcz8H-2o,42778 +dns/namedict.py,sha256=hJRYpKeQv6Bd2LaUOPV0L_a0eXEIuqgggPXaH4c3Tow,4000 +dns/nameserver.py,sha256=hH4LLOkB4jeyO3VDUWK0lNpMJNNt_cFYf23-HdhpSmE,10115 +dns/node.py,sha256=NGZa0AUMq-CNledJ6wn1Rx6TFYc703cH2OraLysoNWM,12663 +dns/opcode.py,sha256=I6JyuFUL0msja_BYm6bzXHfbbfqUod_69Ss4xcv8xWQ,2730 +dns/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +dns/query.py,sha256=_Ev7EivZNEpgrUiPIn4BVnDRFCizcayHHcBXt0Ju3As,56298 +dns/quic/__init__.py,sha256=S5_2UuYzSU_LLtrLAf8DHh3KqNF2YHeKJ_-Wv991WlI,2272 +dns/quic/__pycache__/__init__.cpython-311.pyc,, +dns/quic/__pycache__/_asyncio.cpython-311.pyc,, +dns/quic/__pycache__/_common.cpython-311.pyc,, +dns/quic/__pycache__/_sync.cpython-311.pyc,, +dns/quic/__pycache__/_trio.cpython-311.pyc,, +dns/quic/_asyncio.py,sha256=dnABPz5f-JOJsA7D_BdPfuyzpkL_87AaY4CUcmgNj-g,9870 +dns/quic/_common.py,sha256=koWf6rq9_gUorIOV60QZKAHwZF5MuSgQvBznzA5rGzk,10857 +dns/quic/_sync.py,sha256=QF-dW19NwiDW_BDoJZkSQmHz2uEpfgedsUKTPc0JAio,10436 +dns/quic/_trio.py,sha256=01HH4_hU1VRx-BWXl8bQo4-LZem_eKRBNy6RolTZZXY,9248 +dns/rcode.py,sha256=N6JjrIQjCdJy0boKIp8Hcky5tm__LSDscpDz3rE_sgU,4156 +dns/rdata.py,sha256=uk82eldqpWR8L2zp_CB8JG6wWWfK7zdYowWISfMC2XE,31022 +dns/rdataclass.py,sha256=TK4W4ywB1L_X7EZqk2Gmwnu7vdQpolQF5DtQWyNk5xo,2984 +dns/rdataset.py,sha256=BMNvGAzE4HfYHA-pnhsKwELfpr-saz73BzYwMucoKj0,16664 +dns/rdatatype.py,sha256=wgKWnu4mAbXnmG8wKHpV8dZHkhMqNeSsWWlWFo5HcDY,7448 +dns/rdtypes/ANY/AFSDB.py,sha256=k75wMwreF1DAfDymu4lHh16BUx7ulVP3PLeQBZnkurY,1661 +dns/rdtypes/ANY/AMTRELAY.py,sha256=19jfS61mT1CQT-8vf67ZylhDS9JVRVp4WCbFE-7l0jM,3381 +dns/rdtypes/ANY/AVC.py,sha256=SpsXYzlBirRWN0mGnQe0MdN6H8fvlgXPJX5PjOHnEak,1024 +dns/rdtypes/ANY/CAA.py,sha256=AHh59Is-4WiVWd26yovnPM3hXqKS-yx7IWfXSS0NZhE,2511 +dns/rdtypes/ANY/CDNSKEY.py,sha256=bJAdrBMsFHIJz8TF1AxZoNbdxVWBCRTG-bR_uR_r_G4,1225 +dns/rdtypes/ANY/CDS.py,sha256=Y9nIRUCAabztVLbxm2SXAdYapFemCOUuGh5JqroCDUs,1163 +dns/rdtypes/ANY/CERT.py,sha256=2Cu2LQM6-K4darqhHv1EM_blmpYpnrBIIX1GnL_rxKE,3533 +dns/rdtypes/ANY/CNAME.py,sha256=IHGGq2BDpeKUahTr1pvyBQgm0NGBI_vQ3Vs5mKTXO4w,1206 +dns/rdtypes/ANY/CSYNC.py,sha256=KkZ_rG6PfeL14il97nmJGWWmUGGS5o9nd2EqbJqOuYo,2439 +dns/rdtypes/ANY/DLV.py,sha256=J-pOrw5xXsDoaB9G0r6znlYXJtqtcqhsl1OXs6CPRU4,986 +dns/rdtypes/ANY/DNAME.py,sha256=yqXRtx4dAWwB4YCCv-qW6uaxeGhg2LPQ2uyKwWaMdXs,1150 +dns/rdtypes/ANY/DNSKEY.py,sha256=MD8HUVH5XXeAGOnFWg5aVz_w-2tXYwCeVXmzExhiIeQ,1223 +dns/rdtypes/ANY/DS.py,sha256=_gf8vk1O_uY8QXFjsfUw-bny-fm6e-QpCk3PT0JCyoM,995 +dns/rdtypes/ANY/EUI48.py,sha256=x0BkK0sY_tgzuCwfDYpw6tyuChHjjtbRpAgYhO0Y44o,1151 +dns/rdtypes/ANY/EUI64.py,sha256=1jCff2-SXHJLDnNDnMW8Cd_o-ok0P3x6zKy_bcCU5h4,1161 +dns/rdtypes/ANY/GPOS.py,sha256=u4qwiDBVoC7bsKfxDKGbPjnOKddpdjy2p1AhziDWcPw,4439 +dns/rdtypes/ANY/HINFO.py,sha256=D2WvjTsvD_XqT8BepBIyjPL2iYGMgYqb1VQa9ApO0qE,2217 +dns/rdtypes/ANY/HIP.py,sha256=c32Ewlk88schJ1nPOmT5BVR60ttIM-uH8I8LaRAkFOA,3226 +dns/rdtypes/ANY/ISDN.py,sha256=L4C2Rxrr4JJN17lmJRbZN8RhM_ujjwIskY_4V4Gd3r4,2723 +dns/rdtypes/ANY/L32.py,sha256=TMz2kdGCd0siiQZyiocVDCSnvkOdjhUuYRFyf8o622M,1286 +dns/rdtypes/ANY/L64.py,sha256=sb2BjuPA0PQt67nEyT9rBt759C9e6lH71d3EJHGGnww,1592 +dns/rdtypes/ANY/LOC.py,sha256=NZKIUJULZ3BcK1-gnb2Mk76Pc4UUZry47C5n9VBvhnk,11995 +dns/rdtypes/ANY/LP.py,sha256=wTsKIjtK6vh66qZRLSsiE0k54GO8ieVBGZH8dzVvFnE,1338 +dns/rdtypes/ANY/MX.py,sha256=qQk83idY0-SbRMDmB15JOpJi7cSyiheF-ALUD0Ev19E,995 +dns/rdtypes/ANY/NID.py,sha256=N7Xx4kXf3yVAocTlCXQeJ3BtiQNPFPQVdL1iMuyl5W4,1544 +dns/rdtypes/ANY/NINFO.py,sha256=bdL_-6Bejb2EH-xwR1rfSr_9E3SDXLTAnov7x2924FI,1041 +dns/rdtypes/ANY/NS.py,sha256=ThfaPalUlhbyZyNyvBM3k-7onl3eJKq5wCORrOGtkMM,995 +dns/rdtypes/ANY/NSEC.py,sha256=kicEYxcKaLBpV6C_M8cHdDaqBoiYl6EYtPvjyR6kExI,2465 +dns/rdtypes/ANY/NSEC3.py,sha256=696h-Zz30bmcT0n1rqoEtS5wqE6jIgsVGzaw5TfdGJo,4331 +dns/rdtypes/ANY/NSEC3PARAM.py,sha256=08p6NWS4DiLav1wOuPbxUxB9MtY2IPjfOMCtJwzzMuA,2635 +dns/rdtypes/ANY/OPENPGPKEY.py,sha256=Va0FGo_8vm1OeX62N5iDTWukAdLwrjTXIZeQ6oanE78,1851 +dns/rdtypes/ANY/OPT.py,sha256=W36RslT_Psp95OPUC70knumOYjKpaRHvGT27I-NV2qc,2561 +dns/rdtypes/ANY/PTR.py,sha256=5HcR1D77Otyk91vVY4tmqrfZfSxSXWyWvwIW-rIH5gc,997 +dns/rdtypes/ANY/RESINFO.py,sha256=Kf2NcKbkeI5gFE1bJfQNqQCaitYyXfV_9nQYl1luUZ0,1008 +dns/rdtypes/ANY/RP.py,sha256=8doJlhjYDYiAT6KNF1mAaemJ20YJFUPvit8LOx4-I-U,2174 +dns/rdtypes/ANY/RRSIG.py,sha256=O8vwzS7ldfaj_x8DypvEGFsDSb7al-D7OEnprA3QQoo,4922 +dns/rdtypes/ANY/RT.py,sha256=2t9q3FZQ28iEyceeU25KU2Ur0T5JxELAu8BTwfOUgVw,1013 +dns/rdtypes/ANY/SMIMEA.py,sha256=6yjHuVDfIEodBU9wxbCGCDZ5cWYwyY6FCk-aq2VNU0s,222 +dns/rdtypes/ANY/SOA.py,sha256=Cn8yrag1YvrvwivQgWg-KXmOCaVQVdFHSkFF77w-CE0,3145 +dns/rdtypes/ANY/SPF.py,sha256=rA3Srs9ECQx-37lqm7Zf7aYmMpp_asv4tGS8_fSQ-CU,1022 +dns/rdtypes/ANY/SSHFP.py,sha256=l6TZH2R0kytiZGWez_g-Lq94o5a2xMuwLKwUwsPMx5w,2530 +dns/rdtypes/ANY/TKEY.py,sha256=1ecTuBse2b4QPH2qmx3vn-gfPK0INcKXfxrIyAJxFHA,4927 +dns/rdtypes/ANY/TLSA.py,sha256=cytzebS3W7FFr9qeJ9gFSHq_bOwUk9aRVlXWHfnVrRs,218 +dns/rdtypes/ANY/TSIG.py,sha256=4fNQJSNWZXUKZejCciwQuUJtTw2g-YbPmqHrEj_pitg,4750 +dns/rdtypes/ANY/TXT.py,sha256=F1U9gIAhwXIV4UVT7CwOCEn_su6G1nJIdgWJsLktk20,1000 +dns/rdtypes/ANY/URI.py,sha256=dpcS8KwcJ2WJ7BkOp4CZYaUyRuw7U2S9GzvVwKUihQg,2921 +dns/rdtypes/ANY/WALLET.py,sha256=IaP2g7Nq26jWGKa8MVxvJjWXLQ0wrNR1IWJVyyMG8oU,219 +dns/rdtypes/ANY/X25.py,sha256=BzEM7uOY7CMAm7QN-dSLj-_LvgnnohwJDUjMstzwqYo,1942 +dns/rdtypes/ANY/ZONEMD.py,sha256=JQicv69EvUxh4FCT7eZSLzzU5L5brw_dSM65Um2t5lQ,2393 +dns/rdtypes/ANY/__init__.py,sha256=My5jT8T5bA66zBydmRSxkmDCFxwI81B4DBRA_S36IL8,1526 +dns/rdtypes/ANY/__pycache__/AFSDB.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/AMTRELAY.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/AVC.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/CAA.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/CDNSKEY.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/CDS.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/CERT.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/CNAME.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/CSYNC.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/DLV.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/DNAME.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/DNSKEY.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/DS.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/EUI48.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/EUI64.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/GPOS.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/HINFO.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/HIP.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/ISDN.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/L32.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/L64.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/LOC.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/LP.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/MX.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/NID.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/NINFO.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/NS.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/NSEC.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/NSEC3.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/NSEC3PARAM.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/OPENPGPKEY.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/OPT.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/PTR.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/RESINFO.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/RP.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/RRSIG.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/RT.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/SMIMEA.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/SOA.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/SPF.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/SSHFP.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/TKEY.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/TLSA.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/TSIG.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/TXT.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/URI.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/WALLET.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/X25.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/ZONEMD.cpython-311.pyc,, +dns/rdtypes/ANY/__pycache__/__init__.cpython-311.pyc,, +dns/rdtypes/CH/A.py,sha256=-4G3ASZGj7oUlPfDxADibAB1WfTsZBavUO8ghDWarJ8,2212 +dns/rdtypes/CH/__init__.py,sha256=GD9YeDKb9VBDo-J5rrChX1MWEGyQXuR9Htnbhg_iYLc,923 +dns/rdtypes/CH/__pycache__/A.cpython-311.pyc,, +dns/rdtypes/CH/__pycache__/__init__.cpython-311.pyc,, +dns/rdtypes/IN/A.py,sha256=FfFn3SqbpneL9Ky63COP50V2ZFxqS1ldCKJh39Enwug,1814 +dns/rdtypes/IN/AAAA.py,sha256=AxrOlYy-1TTTWeQypDKeXrDCrdHGor0EKCE4fxzSQGo,1820 +dns/rdtypes/IN/APL.py,sha256=ppyFwn0KYMdyDzphxd0BUhgTmZv0QnDMRLjzQQM793U,5097 +dns/rdtypes/IN/DHCID.py,sha256=zRUh_EOxUPVpJjWY5m7taX8q4Oz5K70785ZtKv5OTCU,1856 +dns/rdtypes/IN/HTTPS.py,sha256=P-IjwcvDQMmtoBgsDHglXF7KgLX73G6jEDqCKsnaGpQ,220 +dns/rdtypes/IN/IPSECKEY.py,sha256=RyIy9K0Yt0uJRjdr6cj5S95ELHHbl--0xV-Qq9O3QQk,3290 +dns/rdtypes/IN/KX.py,sha256=K1JwItL0n5G-YGFCjWeh0C9DyDD8G8VzicsBeQiNAv0,1013 +dns/rdtypes/IN/NAPTR.py,sha256=SaOK-0hIYImwLtb5Hqewi-e49ykJaQiLNvk8ZzNoG7Q,3750 +dns/rdtypes/IN/NSAP.py,sha256=6YfWCVSIPTTBmRAzG8nVBj3LnohncXUhSFJHgp-TRdc,2163 +dns/rdtypes/IN/NSAP_PTR.py,sha256=iTxlV6fr_Y9lqivLLncSHxEhmFqz5UEElDW3HMBtuCU,1015 +dns/rdtypes/IN/PX.py,sha256=vHDNN2rfLObuUKwpYDIvpPB482BqXlHA-ZQpQn9Sb_E,2756 +dns/rdtypes/IN/SRV.py,sha256=a0zGaUwzvih_a4Q9BViUTFs7NZaCqgl7mls3-KRVHm8,2769 +dns/rdtypes/IN/SVCB.py,sha256=HeFmi2v01F00Hott8FlvQ4R7aPxFmT7RF-gt45R5K_M,218 +dns/rdtypes/IN/WKS.py,sha256=kErSG5AO2qIuot_hkMHnQuZB1_uUzUirNdqBoCp97rk,3652 +dns/rdtypes/IN/__init__.py,sha256=HbI8aw9HWroI6SgEvl8Sx6FdkDswCCXMbSRuJy5o8LQ,1083 +dns/rdtypes/IN/__pycache__/A.cpython-311.pyc,, +dns/rdtypes/IN/__pycache__/AAAA.cpython-311.pyc,, +dns/rdtypes/IN/__pycache__/APL.cpython-311.pyc,, +dns/rdtypes/IN/__pycache__/DHCID.cpython-311.pyc,, +dns/rdtypes/IN/__pycache__/HTTPS.cpython-311.pyc,, +dns/rdtypes/IN/__pycache__/IPSECKEY.cpython-311.pyc,, +dns/rdtypes/IN/__pycache__/KX.cpython-311.pyc,, +dns/rdtypes/IN/__pycache__/NAPTR.cpython-311.pyc,, +dns/rdtypes/IN/__pycache__/NSAP.cpython-311.pyc,, +dns/rdtypes/IN/__pycache__/NSAP_PTR.cpython-311.pyc,, +dns/rdtypes/IN/__pycache__/PX.cpython-311.pyc,, +dns/rdtypes/IN/__pycache__/SRV.cpython-311.pyc,, +dns/rdtypes/IN/__pycache__/SVCB.cpython-311.pyc,, +dns/rdtypes/IN/__pycache__/WKS.cpython-311.pyc,, +dns/rdtypes/IN/__pycache__/__init__.cpython-311.pyc,, +dns/rdtypes/__init__.py,sha256=NYizfGglJfhqt_GMtSSXf7YQXIEHHCiJ_Y_qaLVeiOI,1073 +dns/rdtypes/__pycache__/__init__.cpython-311.pyc,, +dns/rdtypes/__pycache__/dnskeybase.cpython-311.pyc,, +dns/rdtypes/__pycache__/dsbase.cpython-311.pyc,, +dns/rdtypes/__pycache__/euibase.cpython-311.pyc,, +dns/rdtypes/__pycache__/mxbase.cpython-311.pyc,, +dns/rdtypes/__pycache__/nsbase.cpython-311.pyc,, +dns/rdtypes/__pycache__/svcbbase.cpython-311.pyc,, +dns/rdtypes/__pycache__/tlsabase.cpython-311.pyc,, +dns/rdtypes/__pycache__/txtbase.cpython-311.pyc,, +dns/rdtypes/__pycache__/util.cpython-311.pyc,, +dns/rdtypes/dnskeybase.py,sha256=FoDllfa9Pz2j2rf45VyUUYUsIt3kjjrwDy6LxrlPb5s,2856 +dns/rdtypes/dsbase.py,sha256=I85Aps1lBsiItdqGpsNY1O8icosfPtkWjiUn1J1lLUQ,3427 +dns/rdtypes/euibase.py,sha256=1yKWOM4xBwLLIFFfEj7M9JMankO2l8ljxhG-5OOxXDg,2618 +dns/rdtypes/mxbase.py,sha256=DzjbiKoAAgpqbhwMBIFGA081jR5_doqGAq-kLvy2mns,3196 +dns/rdtypes/nsbase.py,sha256=tueXVV6E8lelebOmrmoOPq47eeRvOpsxHVXH4cOFxcs,2323 +dns/rdtypes/svcbbase.py,sha256=YOH3Wz3fp5GQjdTF7hU-1ys9iDkYEC5p4d0F32ivv5g,17612 +dns/rdtypes/tlsabase.py,sha256=pIiWem6sF4IwyyKmyqx5xg55IG0w3K9r502Yx8PdziA,2596 +dns/rdtypes/txtbase.py,sha256=Dt9ptWSWtnq0Qwlni6IT6YUz_DCixQDDUl5d4P_AfqY,3696 +dns/rdtypes/util.py,sha256=c3eLaucwuxXZjXWuNyCGKzwltgub4AjT4uLVytEuxSk,9017 +dns/renderer.py,sha256=5THf1iKql2JPL2sKZt2-b4zqHKfk_vlx0FEfPtMJysY,11254 +dns/resolver.py,sha256=FH_hiMeCdVYonIYmE3QqEWJKgHOOxlTcHS0dwd_loGY,73730 +dns/reversename.py,sha256=zoqXEbMZXm6R13nXbJHgTsf6L2C6uReODj6mqSHrTiE,3828 +dns/rrset.py,sha256=J-oQPEPJuKueLLiz1FN08P-ys9fjHhPWuwpDdrL4UTQ,9170 +dns/serial.py,sha256=-t5rPW-TcJwzBMfIJo7Tl-uDtaYtpqOfCVYx9dMaDCY,3606 +dns/set.py,sha256=hublMKCIhd9zp5Hz_fvQTwF-Ze28jn7mjqei6vTGWfs,9213 +dns/tokenizer.py,sha256=65vVkEeTuml3l2AT-NePE6Gt6ucDQNvSpeIVgMpP6G0,23583 +dns/transaction.py,sha256=UhwD6CLQI51dguuz__dxJS8V91vKAoqHdQDCBErJWxE,22589 +dns/tsig.py,sha256=I-Y-c3WMBX11bVioy5puFly2BhlpptUz82ikahxuh1c,11413 +dns/tsigkeyring.py,sha256=Z0xZemcU3XjZ9HlxBYv2E2PSuIhaFreqLDlD7HcmZDA,2633 +dns/ttl.py,sha256=Y4inc4bvkfKpogZn5i1n-tpg1CAjDJxH4_HvfeVjVsM,2977 +dns/update.py,sha256=y9d6LOO8xrUaH2UrZhy3ssnx8bJEsxqTArw5V8XqBRs,12243 +dns/version.py,sha256=GTecBDFJx8cKnGiCmxJhSVjk1EkqnuNVt4xailIi3sk,1926 +dns/versioned.py,sha256=3YQj8mzGmZEsjnuVJJjcWopVmDKYLhEj4hEGTLEwzco,11765 +dns/win32util.py,sha256=r9dOvC0Tq288vwPk-ngugsVpwB5YnfW22DaRv6TTPcU,8874 +dns/wire.py,sha256=vy0SolgECbO1UXB4dnhXhDeFKOJT29nQxXvSfKOgA5s,2830 +dns/xfr.py,sha256=aoW0UtvweaE0NV8cmzgMKLYQOa3hwJ3NudRuqjit4SU,13271 +dns/zone.py,sha256=lLAarSxPtpx4Sw29OQ0ifPshD4QauGu8RnPh2dEropA,52086 +dns/zonefile.py,sha256=Y9lm6I7n4eRS35CyclooiQ_jxiOs3pSyH_0uD4FQyag,27926 +dns/zonetypes.py,sha256=HrQNZxZ_gWLWI9dskix71msi9wkYK5pgrBBbPb1T74Y,690 +dnspython-2.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +dnspython-2.7.0.dist-info/METADATA,sha256=1lF6uqZwb6RAQFYVtBkLic6pBCe9t14TQWtkK9U5eyY,5763 +dnspython-2.7.0.dist-info/RECORD,, +dnspython-2.7.0.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87 +dnspython-2.7.0.dist-info/licenses/LICENSE,sha256=w-o_9WVLMpwZ07xfdIGvYjw93tSmFFWFSZ-EOtPXQc0,1526 diff --git a/venv/lib/python3.11/site-packages/dnspython-2.7.0.dist-info/WHEEL b/venv/lib/python3.11/site-packages/dnspython-2.7.0.dist-info/WHEEL new file mode 100644 index 0000000..cdd68a4 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dnspython-2.7.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.25.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.11/site-packages/dnspython-2.7.0.dist-info/licenses/LICENSE b/venv/lib/python3.11/site-packages/dnspython-2.7.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000..390a726 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dnspython-2.7.0.dist-info/licenses/LICENSE @@ -0,0 +1,35 @@ +ISC License + +Copyright (C) Dnspython Contributors + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all +copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL +DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR +PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + + + +Copyright (C) 2001-2017 Nominum, Inc. +Copyright (C) Google Inc. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose with or without fee is hereby granted, +provided that the above copyright notice and this permission notice +appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/venv/lib/python3.11/site-packages/dotenv/__init__.py b/venv/lib/python3.11/site-packages/dotenv/__init__.py new file mode 100644 index 0000000..7f4c631 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dotenv/__init__.py @@ -0,0 +1,49 @@ +from typing import Any, Optional + +from .main import (dotenv_values, find_dotenv, get_key, load_dotenv, set_key, + unset_key) + + +def load_ipython_extension(ipython: Any) -> None: + from .ipython import load_ipython_extension + load_ipython_extension(ipython) + + +def get_cli_string( + path: Optional[str] = None, + action: Optional[str] = None, + key: Optional[str] = None, + value: Optional[str] = None, + quote: Optional[str] = None, +): + """Returns a string suitable for running as a shell script. + + Useful for converting a arguments passed to a fabric task + to be passed to a `local` or `run` command. + """ + command = ['dotenv'] + if quote: + command.append(f'-q {quote}') + if path: + command.append(f'-f {path}') + if action: + command.append(action) + if key: + command.append(key) + if value: + if ' ' in value: + command.append(f'"{value}"') + else: + command.append(value) + + return ' '.join(command).strip() + + +__all__ = ['get_cli_string', + 'load_dotenv', + 'dotenv_values', + 'get_key', + 'set_key', + 'unset_key', + 'find_dotenv', + 'load_ipython_extension'] diff --git a/venv/lib/python3.11/site-packages/dotenv/__main__.py b/venv/lib/python3.11/site-packages/dotenv/__main__.py new file mode 100644 index 0000000..3977f55 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dotenv/__main__.py @@ -0,0 +1,6 @@ +"""Entry point for cli, enables execution with `python -m dotenv`""" + +from .cli import cli + +if __name__ == "__main__": + cli() diff --git a/venv/lib/python3.11/site-packages/dotenv/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/dotenv/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..640ef32 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dotenv/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dotenv/__pycache__/__main__.cpython-311.pyc b/venv/lib/python3.11/site-packages/dotenv/__pycache__/__main__.cpython-311.pyc new file mode 100644 index 0000000..80f269a Binary files /dev/null and b/venv/lib/python3.11/site-packages/dotenv/__pycache__/__main__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dotenv/__pycache__/cli.cpython-311.pyc b/venv/lib/python3.11/site-packages/dotenv/__pycache__/cli.cpython-311.pyc new file mode 100644 index 0000000..fd6cb60 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dotenv/__pycache__/cli.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dotenv/__pycache__/ipython.cpython-311.pyc b/venv/lib/python3.11/site-packages/dotenv/__pycache__/ipython.cpython-311.pyc new file mode 100644 index 0000000..1026864 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dotenv/__pycache__/ipython.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dotenv/__pycache__/main.cpython-311.pyc b/venv/lib/python3.11/site-packages/dotenv/__pycache__/main.cpython-311.pyc new file mode 100644 index 0000000..7dde923 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dotenv/__pycache__/main.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dotenv/__pycache__/parser.cpython-311.pyc b/venv/lib/python3.11/site-packages/dotenv/__pycache__/parser.cpython-311.pyc new file mode 100644 index 0000000..bbfbed5 Binary files /dev/null and b/venv/lib/python3.11/site-packages/dotenv/__pycache__/parser.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dotenv/__pycache__/variables.cpython-311.pyc b/venv/lib/python3.11/site-packages/dotenv/__pycache__/variables.cpython-311.pyc new file mode 100644 index 0000000..5be8e3d Binary files /dev/null and b/venv/lib/python3.11/site-packages/dotenv/__pycache__/variables.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dotenv/__pycache__/version.cpython-311.pyc b/venv/lib/python3.11/site-packages/dotenv/__pycache__/version.cpython-311.pyc new file mode 100644 index 0000000..85c868b Binary files /dev/null and b/venv/lib/python3.11/site-packages/dotenv/__pycache__/version.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/dotenv/cli.py b/venv/lib/python3.11/site-packages/dotenv/cli.py new file mode 100644 index 0000000..33ae148 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dotenv/cli.py @@ -0,0 +1,190 @@ +import json +import os +import shlex +import sys +from contextlib import contextmanager +from typing import Any, Dict, IO, Iterator, List, Optional + +try: + import click +except ImportError: + sys.stderr.write('It seems python-dotenv is not installed with cli option. \n' + 'Run pip install "python-dotenv[cli]" to fix this.') + sys.exit(1) + +from .main import dotenv_values, set_key, unset_key +from .version import __version__ + + +def enumerate_env() -> Optional[str]: + """ + Return a path for the ${pwd}/.env file. + + If pwd does not exist, return None. + """ + try: + cwd = os.getcwd() + except FileNotFoundError: + return None + path = os.path.join(cwd, '.env') + return path + + +@click.group() +@click.option('-f', '--file', default=enumerate_env(), + type=click.Path(file_okay=True), + help="Location of the .env file, defaults to .env file in current working directory.") +@click.option('-q', '--quote', default='always', + type=click.Choice(['always', 'never', 'auto']), + help="Whether to quote or not the variable values. Default mode is always. This does not affect parsing.") +@click.option('-e', '--export', default=False, + type=click.BOOL, + help="Whether to write the dot file as an executable bash script.") +@click.version_option(version=__version__) +@click.pass_context +def cli(ctx: click.Context, file: Any, quote: Any, export: Any) -> None: + """This script is used to set, get or unset values from a .env file.""" + ctx.obj = {'QUOTE': quote, 'EXPORT': export, 'FILE': file} + + +@contextmanager +def stream_file(path: os.PathLike) -> Iterator[IO[str]]: + """ + Open a file and yield the corresponding (decoded) stream. + + Exits with error code 2 if the file cannot be opened. + """ + + try: + with open(path) as stream: + yield stream + except OSError as exc: + print(f"Error opening env file: {exc}", file=sys.stderr) + exit(2) + + +@cli.command() +@click.pass_context +@click.option('--format', default='simple', + type=click.Choice(['simple', 'json', 'shell', 'export']), + help="The format in which to display the list. Default format is simple, " + "which displays name=value without quotes.") +def list(ctx: click.Context, format: bool) -> None: + """Display all the stored key/value.""" + file = ctx.obj['FILE'] + + with stream_file(file) as stream: + values = dotenv_values(stream=stream) + + if format == 'json': + click.echo(json.dumps(values, indent=2, sort_keys=True)) + else: + prefix = 'export ' if format == 'export' else '' + for k in sorted(values): + v = values[k] + if v is not None: + if format in ('export', 'shell'): + v = shlex.quote(v) + click.echo(f'{prefix}{k}={v}') + + +@cli.command() +@click.pass_context +@click.argument('key', required=True) +@click.argument('value', required=True) +def set(ctx: click.Context, key: Any, value: Any) -> None: + """Store the given key/value.""" + file = ctx.obj['FILE'] + quote = ctx.obj['QUOTE'] + export = ctx.obj['EXPORT'] + success, key, value = set_key(file, key, value, quote, export) + if success: + click.echo(f'{key}={value}') + else: + exit(1) + + +@cli.command() +@click.pass_context +@click.argument('key', required=True) +def get(ctx: click.Context, key: Any) -> None: + """Retrieve the value for the given key.""" + file = ctx.obj['FILE'] + + with stream_file(file) as stream: + values = dotenv_values(stream=stream) + + stored_value = values.get(key) + if stored_value: + click.echo(stored_value) + else: + exit(1) + + +@cli.command() +@click.pass_context +@click.argument('key', required=True) +def unset(ctx: click.Context, key: Any) -> None: + """Removes the given key.""" + file = ctx.obj['FILE'] + quote = ctx.obj['QUOTE'] + success, key = unset_key(file, key, quote) + if success: + click.echo(f"Successfully removed {key}") + else: + exit(1) + + +@cli.command(context_settings={'ignore_unknown_options': True}) +@click.pass_context +@click.option( + "--override/--no-override", + default=True, + help="Override variables from the environment file with those from the .env file.", +) +@click.argument('commandline', nargs=-1, type=click.UNPROCESSED) +def run(ctx: click.Context, override: bool, commandline: List[str]) -> None: + """Run command with environment variables present.""" + file = ctx.obj['FILE'] + if not os.path.isfile(file): + raise click.BadParameter( + f'Invalid value for \'-f\' "{file}" does not exist.', + ctx=ctx + ) + dotenv_as_dict = { + k: v + for (k, v) in dotenv_values(file).items() + if v is not None and (override or k not in os.environ) + } + + if not commandline: + click.echo('No command given.') + exit(1) + run_command(commandline, dotenv_as_dict) + + +def run_command(command: List[str], env: Dict[str, str]) -> None: + """Replace the current process with the specified command. + + Replaces the current process with the specified command and the variables from `env` + added in the current environment variables. + + Parameters + ---------- + command: List[str] + The command and it's parameters + env: Dict + The additional environment variables + + Returns + ------- + None + This function does not return any value. It replaces the current process with the new one. + + """ + # copy the current environment variables and add the vales from + # `env` + cmd_env = os.environ.copy() + cmd_env.update(env) + + os.execvpe(command[0], args=command, env=cmd_env) diff --git a/venv/lib/python3.11/site-packages/dotenv/ipython.py b/venv/lib/python3.11/site-packages/dotenv/ipython.py new file mode 100644 index 0000000..7df727c --- /dev/null +++ b/venv/lib/python3.11/site-packages/dotenv/ipython.py @@ -0,0 +1,39 @@ +from IPython.core.magic import Magics, line_magic, magics_class # type: ignore +from IPython.core.magic_arguments import (argument, magic_arguments, # type: ignore + parse_argstring) # type: ignore + +from .main import find_dotenv, load_dotenv + + +@magics_class +class IPythonDotEnv(Magics): + + @magic_arguments() + @argument( + '-o', '--override', action='store_true', + help="Indicate to override existing variables" + ) + @argument( + '-v', '--verbose', action='store_true', + help="Indicate function calls to be verbose" + ) + @argument('dotenv_path', nargs='?', type=str, default='.env', + help='Search in increasingly higher folders for the `dotenv_path`') + @line_magic + def dotenv(self, line): + args = parse_argstring(self.dotenv, line) + # Locate the .env file + dotenv_path = args.dotenv_path + try: + dotenv_path = find_dotenv(dotenv_path, True, True) + except IOError: + print("cannot find .env file") + return + + # Load the .env file + load_dotenv(dotenv_path, verbose=args.verbose, override=args.override) + + +def load_ipython_extension(ipython): + """Register the %dotenv magic.""" + ipython.register_magics(IPythonDotEnv) diff --git a/venv/lib/python3.11/site-packages/dotenv/main.py b/venv/lib/python3.11/site-packages/dotenv/main.py new file mode 100644 index 0000000..1848d60 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dotenv/main.py @@ -0,0 +1,398 @@ +import io +import logging +import os +import pathlib +import shutil +import sys +import tempfile +from collections import OrderedDict +from contextlib import contextmanager +from typing import IO, Dict, Iterable, Iterator, Mapping, Optional, Tuple, Union + +from .parser import Binding, parse_stream +from .variables import parse_variables + +# A type alias for a string path to be used for the paths in this file. +# These paths may flow to `open()` and `shutil.move()`; `shutil.move()` +# only accepts string paths, not byte paths or file descriptors. See +# https://github.com/python/typeshed/pull/6832. +StrPath = Union[str, "os.PathLike[str]"] + +logger = logging.getLogger(__name__) + + +def with_warn_for_invalid_lines(mappings: Iterator[Binding]) -> Iterator[Binding]: + for mapping in mappings: + if mapping.error: + logger.warning( + "python-dotenv could not parse statement starting at line %s", + mapping.original.line, + ) + yield mapping + + +class DotEnv: + def __init__( + self, + dotenv_path: Optional[StrPath], + stream: Optional[IO[str]] = None, + verbose: bool = False, + encoding: Optional[str] = None, + interpolate: bool = True, + override: bool = True, + ) -> None: + self.dotenv_path: Optional[StrPath] = dotenv_path + self.stream: Optional[IO[str]] = stream + self._dict: Optional[Dict[str, Optional[str]]] = None + self.verbose: bool = verbose + self.encoding: Optional[str] = encoding + self.interpolate: bool = interpolate + self.override: bool = override + + @contextmanager + def _get_stream(self) -> Iterator[IO[str]]: + if self.dotenv_path and os.path.isfile(self.dotenv_path): + with open(self.dotenv_path, encoding=self.encoding) as stream: + yield stream + elif self.stream is not None: + yield self.stream + else: + if self.verbose: + logger.info( + "python-dotenv could not find configuration file %s.", + self.dotenv_path or ".env", + ) + yield io.StringIO("") + + def dict(self) -> Dict[str, Optional[str]]: + """Return dotenv as dict""" + if self._dict: + return self._dict + + raw_values = self.parse() + + if self.interpolate: + self._dict = OrderedDict( + resolve_variables(raw_values, override=self.override) + ) + else: + self._dict = OrderedDict(raw_values) + + return self._dict + + def parse(self) -> Iterator[Tuple[str, Optional[str]]]: + with self._get_stream() as stream: + for mapping in with_warn_for_invalid_lines(parse_stream(stream)): + if mapping.key is not None: + yield mapping.key, mapping.value + + def set_as_environment_variables(self) -> bool: + """ + Load the current dotenv as system environment variable. + """ + if not self.dict(): + return False + + for k, v in self.dict().items(): + if k in os.environ and not self.override: + continue + if v is not None: + os.environ[k] = v + + return True + + def get(self, key: str) -> Optional[str]: + """ """ + data = self.dict() + + if key in data: + return data[key] + + if self.verbose: + logger.warning("Key %s not found in %s.", key, self.dotenv_path) + + return None + + +def get_key( + dotenv_path: StrPath, + key_to_get: str, + encoding: Optional[str] = "utf-8", +) -> Optional[str]: + """ + Get the value of a given key from the given .env. + + Returns `None` if the key isn't found or doesn't have a value. + """ + return DotEnv(dotenv_path, verbose=True, encoding=encoding).get(key_to_get) + + +@contextmanager +def rewrite( + path: StrPath, + encoding: Optional[str], +) -> Iterator[Tuple[IO[str], IO[str]]]: + pathlib.Path(path).touch() + + with tempfile.NamedTemporaryFile(mode="w", encoding=encoding, delete=False) as dest: + error = None + try: + with open(path, encoding=encoding) as source: + yield (source, dest) + except BaseException as err: + error = err + + if error is None: + shutil.move(dest.name, path) + else: + os.unlink(dest.name) + raise error from None + + +def set_key( + dotenv_path: StrPath, + key_to_set: str, + value_to_set: str, + quote_mode: str = "always", + export: bool = False, + encoding: Optional[str] = "utf-8", +) -> Tuple[Optional[bool], str, str]: + """ + Adds or Updates a key/value to the given .env + + If the .env path given doesn't exist, fails instead of risking creating + an orphan .env somewhere in the filesystem + """ + if quote_mode not in ("always", "auto", "never"): + raise ValueError(f"Unknown quote_mode: {quote_mode}") + + quote = quote_mode == "always" or ( + quote_mode == "auto" and not value_to_set.isalnum() + ) + + if quote: + value_out = "'{}'".format(value_to_set.replace("'", "\\'")) + else: + value_out = value_to_set + if export: + line_out = f"export {key_to_set}={value_out}\n" + else: + line_out = f"{key_to_set}={value_out}\n" + + with rewrite(dotenv_path, encoding=encoding) as (source, dest): + replaced = False + missing_newline = False + for mapping in with_warn_for_invalid_lines(parse_stream(source)): + if mapping.key == key_to_set: + dest.write(line_out) + replaced = True + else: + dest.write(mapping.original.string) + missing_newline = not mapping.original.string.endswith("\n") + if not replaced: + if missing_newline: + dest.write("\n") + dest.write(line_out) + + return True, key_to_set, value_to_set + + +def unset_key( + dotenv_path: StrPath, + key_to_unset: str, + quote_mode: str = "always", + encoding: Optional[str] = "utf-8", +) -> Tuple[Optional[bool], str]: + """ + Removes a given key from the given `.env` file. + + If the .env path given doesn't exist, fails. + If the given key doesn't exist in the .env, fails. + """ + if not os.path.exists(dotenv_path): + logger.warning("Can't delete from %s - it doesn't exist.", dotenv_path) + return None, key_to_unset + + removed = False + with rewrite(dotenv_path, encoding=encoding) as (source, dest): + for mapping in with_warn_for_invalid_lines(parse_stream(source)): + if mapping.key == key_to_unset: + removed = True + else: + dest.write(mapping.original.string) + + if not removed: + logger.warning( + "Key %s not removed from %s - key doesn't exist.", key_to_unset, dotenv_path + ) + return None, key_to_unset + + return removed, key_to_unset + + +def resolve_variables( + values: Iterable[Tuple[str, Optional[str]]], + override: bool, +) -> Mapping[str, Optional[str]]: + new_values: Dict[str, Optional[str]] = {} + + for name, value in values: + if value is None: + result = None + else: + atoms = parse_variables(value) + env: Dict[str, Optional[str]] = {} + if override: + env.update(os.environ) # type: ignore + env.update(new_values) + else: + env.update(new_values) + env.update(os.environ) # type: ignore + result = "".join(atom.resolve(env) for atom in atoms) + + new_values[name] = result + + return new_values + + +def _walk_to_root(path: str) -> Iterator[str]: + """ + Yield directories starting from the given directory up to the root + """ + if not os.path.exists(path): + raise IOError("Starting path not found") + + if os.path.isfile(path): + path = os.path.dirname(path) + + last_dir = None + current_dir = os.path.abspath(path) + while last_dir != current_dir: + yield current_dir + parent_dir = os.path.abspath(os.path.join(current_dir, os.path.pardir)) + last_dir, current_dir = current_dir, parent_dir + + +def find_dotenv( + filename: str = ".env", + raise_error_if_not_found: bool = False, + usecwd: bool = False, +) -> str: + """ + Search in increasingly higher folders for the given file + + Returns path to the file if found, or an empty string otherwise + """ + + def _is_interactive(): + """Decide whether this is running in a REPL or IPython notebook""" + try: + main = __import__("__main__", None, None, fromlist=["__file__"]) + except ModuleNotFoundError: + return False + return not hasattr(main, "__file__") + + def _is_debugger(): + return sys.gettrace() is not None + + if usecwd or _is_interactive() or _is_debugger() or getattr(sys, "frozen", False): + # Should work without __file__, e.g. in REPL or IPython notebook. + path = os.getcwd() + else: + # will work for .py files + frame = sys._getframe() + current_file = __file__ + + while frame.f_code.co_filename == current_file or not os.path.exists( + frame.f_code.co_filename + ): + assert frame.f_back is not None + frame = frame.f_back + frame_filename = frame.f_code.co_filename + path = os.path.dirname(os.path.abspath(frame_filename)) + + for dirname in _walk_to_root(path): + check_path = os.path.join(dirname, filename) + if os.path.isfile(check_path): + return check_path + + if raise_error_if_not_found: + raise IOError("File not found") + + return "" + + +def load_dotenv( + dotenv_path: Optional[StrPath] = None, + stream: Optional[IO[str]] = None, + verbose: bool = False, + override: bool = False, + interpolate: bool = True, + encoding: Optional[str] = "utf-8", +) -> bool: + """Parse a .env file and then load all the variables found as environment variables. + + Parameters: + dotenv_path: Absolute or relative path to .env file. + stream: Text stream (such as `io.StringIO`) with .env content, used if + `dotenv_path` is `None`. + verbose: Whether to output a warning the .env file is missing. + override: Whether to override the system environment variables with the variables + from the `.env` file. + encoding: Encoding to be used to read the file. + Returns: + Bool: True if at least one environment variable is set else False + + If both `dotenv_path` and `stream` are `None`, `find_dotenv()` is used to find the + .env file with it's default parameters. If you need to change the default parameters + of `find_dotenv()`, you can explicitly call `find_dotenv()` and pass the result + to this function as `dotenv_path`. + """ + if dotenv_path is None and stream is None: + dotenv_path = find_dotenv() + + dotenv = DotEnv( + dotenv_path=dotenv_path, + stream=stream, + verbose=verbose, + interpolate=interpolate, + override=override, + encoding=encoding, + ) + return dotenv.set_as_environment_variables() + + +def dotenv_values( + dotenv_path: Optional[StrPath] = None, + stream: Optional[IO[str]] = None, + verbose: bool = False, + interpolate: bool = True, + encoding: Optional[str] = "utf-8", +) -> Dict[str, Optional[str]]: + """ + Parse a .env file and return its content as a dict. + + The returned dict will have `None` values for keys without values in the .env file. + For example, `foo=bar` results in `{"foo": "bar"}` whereas `foo` alone results in + `{"foo": None}` + + Parameters: + dotenv_path: Absolute or relative path to the .env file. + stream: `StringIO` object with .env content, used if `dotenv_path` is `None`. + verbose: Whether to output a warning if the .env file is missing. + encoding: Encoding to be used to read the file. + + If both `dotenv_path` and `stream` are `None`, `find_dotenv()` is used to find the + .env file. + """ + if dotenv_path is None and stream is None: + dotenv_path = find_dotenv() + + return DotEnv( + dotenv_path=dotenv_path, + stream=stream, + verbose=verbose, + interpolate=interpolate, + override=True, + encoding=encoding, + ).dict() diff --git a/venv/lib/python3.11/site-packages/dotenv/parser.py b/venv/lib/python3.11/site-packages/dotenv/parser.py new file mode 100644 index 0000000..735f14a --- /dev/null +++ b/venv/lib/python3.11/site-packages/dotenv/parser.py @@ -0,0 +1,175 @@ +import codecs +import re +from typing import (IO, Iterator, Match, NamedTuple, Optional, # noqa:F401 + Pattern, Sequence, Tuple) + + +def make_regex(string: str, extra_flags: int = 0) -> Pattern[str]: + return re.compile(string, re.UNICODE | extra_flags) + + +_newline = make_regex(r"(\r\n|\n|\r)") +_multiline_whitespace = make_regex(r"\s*", extra_flags=re.MULTILINE) +_whitespace = make_regex(r"[^\S\r\n]*") +_export = make_regex(r"(?:export[^\S\r\n]+)?") +_single_quoted_key = make_regex(r"'([^']+)'") +_unquoted_key = make_regex(r"([^=\#\s]+)") +_equal_sign = make_regex(r"(=[^\S\r\n]*)") +_single_quoted_value = make_regex(r"'((?:\\'|[^'])*)'") +_double_quoted_value = make_regex(r'"((?:\\"|[^"])*)"') +_unquoted_value = make_regex(r"([^\r\n]*)") +_comment = make_regex(r"(?:[^\S\r\n]*#[^\r\n]*)?") +_end_of_line = make_regex(r"[^\S\r\n]*(?:\r\n|\n|\r|$)") +_rest_of_line = make_regex(r"[^\r\n]*(?:\r|\n|\r\n)?") +_double_quote_escapes = make_regex(r"\\[\\'\"abfnrtv]") +_single_quote_escapes = make_regex(r"\\[\\']") + + +class Original(NamedTuple): + string: str + line: int + + +class Binding(NamedTuple): + key: Optional[str] + value: Optional[str] + original: Original + error: bool + + +class Position: + def __init__(self, chars: int, line: int) -> None: + self.chars = chars + self.line = line + + @classmethod + def start(cls) -> "Position": + return cls(chars=0, line=1) + + def set(self, other: "Position") -> None: + self.chars = other.chars + self.line = other.line + + def advance(self, string: str) -> None: + self.chars += len(string) + self.line += len(re.findall(_newline, string)) + + +class Error(Exception): + pass + + +class Reader: + def __init__(self, stream: IO[str]) -> None: + self.string = stream.read() + self.position = Position.start() + self.mark = Position.start() + + def has_next(self) -> bool: + return self.position.chars < len(self.string) + + def set_mark(self) -> None: + self.mark.set(self.position) + + def get_marked(self) -> Original: + return Original( + string=self.string[self.mark.chars:self.position.chars], + line=self.mark.line, + ) + + def peek(self, count: int) -> str: + return self.string[self.position.chars:self.position.chars + count] + + def read(self, count: int) -> str: + result = self.string[self.position.chars:self.position.chars + count] + if len(result) < count: + raise Error("read: End of string") + self.position.advance(result) + return result + + def read_regex(self, regex: Pattern[str]) -> Sequence[str]: + match = regex.match(self.string, self.position.chars) + if match is None: + raise Error("read_regex: Pattern not found") + self.position.advance(self.string[match.start():match.end()]) + return match.groups() + + +def decode_escapes(regex: Pattern[str], string: str) -> str: + def decode_match(match: Match[str]) -> str: + return codecs.decode(match.group(0), 'unicode-escape') # type: ignore + + return regex.sub(decode_match, string) + + +def parse_key(reader: Reader) -> Optional[str]: + char = reader.peek(1) + if char == "#": + return None + elif char == "'": + (key,) = reader.read_regex(_single_quoted_key) + else: + (key,) = reader.read_regex(_unquoted_key) + return key + + +def parse_unquoted_value(reader: Reader) -> str: + (part,) = reader.read_regex(_unquoted_value) + return re.sub(r"\s+#.*", "", part).rstrip() + + +def parse_value(reader: Reader) -> str: + char = reader.peek(1) + if char == u"'": + (value,) = reader.read_regex(_single_quoted_value) + return decode_escapes(_single_quote_escapes, value) + elif char == u'"': + (value,) = reader.read_regex(_double_quoted_value) + return decode_escapes(_double_quote_escapes, value) + elif char in (u"", u"\n", u"\r"): + return u"" + else: + return parse_unquoted_value(reader) + + +def parse_binding(reader: Reader) -> Binding: + reader.set_mark() + try: + reader.read_regex(_multiline_whitespace) + if not reader.has_next(): + return Binding( + key=None, + value=None, + original=reader.get_marked(), + error=False, + ) + reader.read_regex(_export) + key = parse_key(reader) + reader.read_regex(_whitespace) + if reader.peek(1) == "=": + reader.read_regex(_equal_sign) + value: Optional[str] = parse_value(reader) + else: + value = None + reader.read_regex(_comment) + reader.read_regex(_end_of_line) + return Binding( + key=key, + value=value, + original=reader.get_marked(), + error=False, + ) + except Error: + reader.read_regex(_rest_of_line) + return Binding( + key=None, + value=None, + original=reader.get_marked(), + error=True, + ) + + +def parse_stream(stream: IO[str]) -> Iterator[Binding]: + reader = Reader(stream) + while reader.has_next(): + yield parse_binding(reader) diff --git a/venv/lib/python3.11/site-packages/dotenv/py.typed b/venv/lib/python3.11/site-packages/dotenv/py.typed new file mode 100644 index 0000000..7632ecf --- /dev/null +++ b/venv/lib/python3.11/site-packages/dotenv/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561 diff --git a/venv/lib/python3.11/site-packages/dotenv/variables.py b/venv/lib/python3.11/site-packages/dotenv/variables.py new file mode 100644 index 0000000..667f2f2 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dotenv/variables.py @@ -0,0 +1,86 @@ +import re +from abc import ABCMeta, abstractmethod +from typing import Iterator, Mapping, Optional, Pattern + +_posix_variable: Pattern[str] = re.compile( + r""" + \$\{ + (?P[^\}:]*) + (?::- + (?P[^\}]*) + )? + \} + """, + re.VERBOSE, +) + + +class Atom(metaclass=ABCMeta): + def __ne__(self, other: object) -> bool: + result = self.__eq__(other) + if result is NotImplemented: + return NotImplemented + return not result + + @abstractmethod + def resolve(self, env: Mapping[str, Optional[str]]) -> str: ... + + +class Literal(Atom): + def __init__(self, value: str) -> None: + self.value = value + + def __repr__(self) -> str: + return f"Literal(value={self.value})" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, self.__class__): + return NotImplemented + return self.value == other.value + + def __hash__(self) -> int: + return hash((self.__class__, self.value)) + + def resolve(self, env: Mapping[str, Optional[str]]) -> str: + return self.value + + +class Variable(Atom): + def __init__(self, name: str, default: Optional[str]) -> None: + self.name = name + self.default = default + + def __repr__(self) -> str: + return f"Variable(name={self.name}, default={self.default})" + + def __eq__(self, other: object) -> bool: + if not isinstance(other, self.__class__): + return NotImplemented + return (self.name, self.default) == (other.name, other.default) + + def __hash__(self) -> int: + return hash((self.__class__, self.name, self.default)) + + def resolve(self, env: Mapping[str, Optional[str]]) -> str: + default = self.default if self.default is not None else "" + result = env.get(self.name, default) + return result if result is not None else "" + + +def parse_variables(value: str) -> Iterator[Atom]: + cursor = 0 + + for match in _posix_variable.finditer(value): + (start, end) = match.span() + name = match["name"] + default = match["default"] + + if start > cursor: + yield Literal(value=value[cursor:start]) + + yield Variable(name=name, default=default) + cursor = end + + length = len(value) + if cursor < length: + yield Literal(value=value[cursor:length]) diff --git a/venv/lib/python3.11/site-packages/dotenv/version.py b/venv/lib/python3.11/site-packages/dotenv/version.py new file mode 100644 index 0000000..6849410 --- /dev/null +++ b/venv/lib/python3.11/site-packages/dotenv/version.py @@ -0,0 +1 @@ +__version__ = "1.1.0" diff --git a/venv/lib/python3.11/site-packages/email_validator-2.2.0.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/email_validator-2.2.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.11/site-packages/email_validator-2.2.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.11/site-packages/email_validator-2.2.0.dist-info/LICENSE b/venv/lib/python3.11/site-packages/email_validator-2.2.0.dist-info/LICENSE new file mode 100644 index 0000000..122e7a7 --- /dev/null +++ b/venv/lib/python3.11/site-packages/email_validator-2.2.0.dist-info/LICENSE @@ -0,0 +1,27 @@ +This is free and unencumbered software released into the public +domain. + +Anyone is free to copy, modify, publish, use, compile, sell, or +distribute this software, either in source code form or as a +compiled binary, for any purpose, commercial or non-commercial, +and by any means. + +In jurisdictions that recognize copyright laws, the author or +authors of this software dedicate any and all copyright +interest in the software to the public domain. We make this +dedication for the benefit of the public at large and to the +detriment of our heirs and successors. We intend this +dedication to be an overt act of relinquishment in perpetuity +of all present and future rights to this software under +copyright law. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF +CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +For more information, please refer to diff --git a/venv/lib/python3.11/site-packages/email_validator-2.2.0.dist-info/METADATA b/venv/lib/python3.11/site-packages/email_validator-2.2.0.dist-info/METADATA new file mode 100644 index 0000000..7f7350e --- /dev/null +++ b/venv/lib/python3.11/site-packages/email_validator-2.2.0.dist-info/METADATA @@ -0,0 +1,465 @@ +Metadata-Version: 2.1 +Name: email_validator +Version: 2.2.0 +Summary: A robust email address syntax and deliverability validation library. +Home-page: https://github.com/JoshData/python-email-validator +Author: Joshua Tauberer +Author-email: jt@occams.info +License: Unlicense +Keywords: email address validator +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: The Unlicense (Unlicense) +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.8 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: dnspython >=2.0.0 +Requires-Dist: idna >=2.0.0 + +email-validator: Validate Email Addresses +========================================= + +A robust email address syntax and deliverability validation library for +Python 3.8+ by [Joshua Tauberer](https://joshdata.me). + +This library validates that a string is of the form `name@example.com` +and optionally checks that the domain name is set up to receive email. +This is the sort of validation you would want when you are identifying +users by their email address like on a registration form. + +Key features: + +* Checks that an email address has the correct syntax --- great for + email-based registration/login forms or validing data. +* Gives friendly English error messages when validation fails that you + can display to end-users. +* Checks deliverability (optional): Does the domain name resolve? + (You can override the default DNS resolver to add query caching.) +* Supports internationalized domain names (like `@ツ.life`), + internationalized local parts (like `ツ@example.com`), + and optionally parses display names (e.g. `"My Name" `). +* Rejects addresses with invalid or unsafe Unicode characters, + obsolete email address syntax that you'd find unexpected, + special use domain names like `@localhost`, + and domains without a dot by default. + This is an opinionated library! +* Normalizes email addresses (important for internationalized + and quoted-string addresses! see below). +* Python type annotations are used. + +This is an opinionated library. You should definitely also consider using +the less-opinionated [pyIsEmail](https://github.com/michaelherold/pyIsEmail) +if it works better for you. + +[![Build Status](https://github.com/JoshData/python-email-validator/actions/workflows/test_and_build.yaml/badge.svg)](https://github.com/JoshData/python-email-validator/actions/workflows/test_and_build.yaml) + +View the [CHANGELOG / Release Notes](CHANGELOG.md) for the version history of changes in the library. Occasionally this README is ahead of the latest published package --- see the CHANGELOG for details. + +--- + +Installation +------------ + +This package [is on PyPI](https://pypi.org/project/email-validator/), so: + +```sh +pip install email-validator +``` + +(You might need to use `pip3` depending on your local environment.) + +Quick Start +----------- + +If you're validating a user's email address before creating a user +account in your application, you might do this: + +```python +from email_validator import validate_email, EmailNotValidError + +email = "my+address@example.org" + +try: + + # Check that the email address is valid. Turn on check_deliverability + # for first-time validations like on account creation pages (but not + # login pages). + emailinfo = validate_email(email, check_deliverability=False) + + # After this point, use only the normalized form of the email address, + # especially before going to a database query. + email = emailinfo.normalized + +except EmailNotValidError as e: + + # The exception message is human-readable explanation of why it's + # not a valid (or deliverable) email address. + print(str(e)) +``` + +This validates the address and gives you its normalized form. You should +**put the normalized form in your database** and always normalize before +checking if an address is in your database. When using this in a login form, +set `check_deliverability` to `False` to avoid unnecessary DNS queries. + +Usage +----- + +### Overview + +The module provides a function `validate_email(email_address)` which +takes an email address and: + +- Raises a `EmailNotValidError` with a helpful, human-readable error + message explaining why the email address is not valid, or +- Returns an object with a normalized form of the email address (which + you should use!) and other information about it. + +When an email address is not valid, `validate_email` raises either an +`EmailSyntaxError` if the form of the address is invalid or an +`EmailUndeliverableError` if the domain name fails DNS checks. Both +exception classes are subclasses of `EmailNotValidError`, which in turn +is a subclass of `ValueError`. + +But when an email address is valid, an object is returned containing +a normalized form of the email address (which you should use!) and +other information. + +The validator doesn't, by default, permit obsoleted forms of email addresses +that no one uses anymore even though they are still valid and deliverable, since +they will probably give you grief if you're using email for login. (See +later in the document about how to allow some obsolete forms.) + +The validator optionally checks that the domain name in the email address has +a DNS MX record indicating that it can receive email. (Except a Null MX record. +If there is no MX record, a fallback A/AAAA-record is permitted, unless +a reject-all SPF record is present.) DNS is slow and sometimes unavailable or +unreliable, so consider whether these checks are useful for your use case and +turn them off if they aren't. +There is nothing to be gained by trying to actually contact an SMTP server, so +that's not done here. For privacy, security, and practicality reasons, servers +are good at not giving away whether an address is +deliverable or not: email addresses that appear to accept mail at first +can bounce mail after a delay, and bounced mail may indicate a temporary +failure of a good email address (sometimes an intentional failure, like +greylisting). + +### Options + +The `validate_email` function also accepts the following keyword arguments +(defaults are as shown below): + +`check_deliverability=True`: If true, DNS queries are made to check that the domain name in the email address (the part after the @-sign) can receive mail, as described above. Set to `False` to skip this DNS-based check. It is recommended to pass `False` when performing validation for login pages (but not account creation pages) since re-validation of a previously validated domain in your database by querying DNS at every login is probably undesirable. You can also set `email_validator.CHECK_DELIVERABILITY` to `False` to turn this off for all calls by default. + +`dns_resolver=None`: Pass an instance of [dns.resolver.Resolver](https://dnspython.readthedocs.io/en/latest/resolver-class.html) to control the DNS resolver including setting a timeout and [a cache](https://dnspython.readthedocs.io/en/latest/resolver-caching.html). The `caching_resolver` function shown below is a helper function to construct a dns.resolver.Resolver with a [LRUCache](https://dnspython.readthedocs.io/en/latest/resolver-caching.html#dns.resolver.LRUCache). Reuse the same resolver instance across calls to `validate_email` to make use of the cache. + +`test_environment=False`: If `True`, DNS-based deliverability checks are disabled and `test` and `**.test` domain names are permitted (see below). You can also set `email_validator.TEST_ENVIRONMENT` to `True` to turn it on for all calls by default. + +`allow_smtputf8=True`: Set to `False` to prohibit internationalized addresses that would + require the + [SMTPUTF8](https://tools.ietf.org/html/rfc6531) extension. You can also set `email_validator.ALLOW_SMTPUTF8` to `False` to turn it off for all calls by default. + +`allow_quoted_local=False`: Set to `True` to allow obscure and potentially problematic email addresses in which the part of the address before the @-sign contains spaces, @-signs, or other surprising characters when the local part is surrounded in quotes (so-called quoted-string local parts). In the object returned by `validate_email`, the normalized local part removes any unnecessary backslash-escaping and even removes the surrounding quotes if the address would be valid without them. You can also set `email_validator.ALLOW_QUOTED_LOCAL` to `True` to turn this on for all calls by default. + +`allow_domain_literal=False`: Set to `True` to allow bracketed IPv4 and "IPv6:"-prefixd IPv6 addresses in the domain part of the email address. No deliverability checks are performed for these addresses. In the object returned by `validate_email`, the normalized domain will use the condensed IPv6 format, if applicable. The object's `domain_address` attribute will hold the parsed `ipaddress.IPv4Address` or `ipaddress.IPv6Address` object if applicable. You can also set `email_validator.ALLOW_DOMAIN_LITERAL` to `True` to turn this on for all calls by default. + +`allow_display_name=False`: Set to `True` to allow a display name and bracketed address in the input string, like `My Name `. It's implemented in the spirit but not the letter of RFC 5322 3.4, so it may be stricter or more relaxed than what you want. The display name, if present, is provided in the returned object's `display_name` field after being unquoted and unescaped. You can also set `email_validator.ALLOW_DISPLAY_NAME` to `True` to turn this on for all calls by default. + +`allow_empty_local=False`: Set to `True` to allow an empty local part (i.e. + `@example.com`), e.g. for validating Postfix aliases. + + +### DNS timeout and cache + +When validating many email addresses or to control the timeout (the default is 15 seconds), create a caching [dns.resolver.Resolver](https://dnspython.readthedocs.io/en/latest/resolver-class.html) to reuse in each call. The `caching_resolver` function returns one easily for you: + +```python +from email_validator import validate_email, caching_resolver + +resolver = caching_resolver(timeout=10) + +while True: + validate_email(email, dns_resolver=resolver) +``` + +### Test addresses + +This library rejects email addresses that use the [Special Use Domain Names](https://www.iana.org/assignments/special-use-domain-names/special-use-domain-names.xhtml) `invalid`, `localhost`, `test`, and some others by raising `EmailSyntaxError`. This is to protect your system from abuse: You probably don't want a user to be able to cause an email to be sent to `localhost` (although they might be able to still do so via a malicious MX record). However, in your non-production test environments you may want to use `@test` or `@myname.test` email addresses. There are three ways you can allow this: + +1. Add `test_environment=True` to the call to `validate_email` (see above). +2. Set `email_validator.TEST_ENVIRONMENT` to `True` globally. +3. Remove the special-use domain name that you want to use from `email_validator.SPECIAL_USE_DOMAIN_NAMES`, e.g.: + +```python +import email_validator +email_validator.SPECIAL_USE_DOMAIN_NAMES.remove("test") +``` + +It is tempting to use `@example.com/net/org` in tests. They are *not* in this library's `SPECIAL_USE_DOMAIN_NAMES` list so you can, but shouldn't, use them. These domains are reserved to IANA for use in documentation so there is no risk of accidentally emailing someone at those domains. But beware that this library will nevertheless reject these domain names if DNS-based deliverability checks are not disabled because these domains do not resolve to domains that accept email. In tests, consider using your own domain name or `@test` or `@myname.test` instead. + +Internationalized email addresses +--------------------------------- + +The email protocol SMTP and the domain name system DNS have historically +only allowed English (ASCII) characters in email addresses and domain names, +respectively. Each has adapted to internationalization in a separate +way, creating two separate aspects to email address internationalization. + +(If your mail submission library doesn't support Unicode at all, then +immediately prior to mail submission you must replace the email address with +its ASCII-ized form. This library gives you back the ASCII-ized form in the +`ascii_email` field in the returned object.) + +### Internationalized domain names (IDN) + +The first is [internationalized domain names (RFC +5891)](https://tools.ietf.org/html/rfc5891), a.k.a IDNA 2008. The DNS +system has not been updated with Unicode support. Instead, internationalized +domain names are converted into a special IDNA ASCII "[Punycode](https://www.rfc-editor.org/rfc/rfc3492.txt)" +form starting with `xn--`. When an email address has non-ASCII +characters in its domain part, the domain part is replaced with its IDNA +ASCII equivalent form in the process of mail transmission. Your mail +submission library probably does this for you transparently. ([Compliance +around the web is not very good though](http://archives.miloush.net/michkap/archive/2012/02/27/10273315.html).) This library conforms to IDNA 2008 +using the [idna](https://github.com/kjd/idna) module by Kim Davies. + +### Internationalized local parts + +The second sort of internationalization is internationalization in the +*local* part of the address (before the @-sign). In non-internationalized +email addresses, only English letters, numbers, and some punctuation +(`._!#$%&'^``*+-=~/?{|}`) are allowed. In internationalized email address +local parts, a wider range of Unicode characters are allowed. + +Email addresses with these non-ASCII characters require that your mail +submission library and all the mail servers along the route to the destination, +including your own outbound mail server, all support the +[SMTPUTF8 (RFC 6531)](https://tools.ietf.org/html/rfc6531) extension. +Support for SMTPUTF8 varies. If you know ahead of time that SMTPUTF8 is not +supported by your mail submission stack, then you must filter out addresses that +require SMTPUTF8 using the `allow_smtputf8=False` keyword argument (see above). +This will cause the validation function to raise a `EmailSyntaxError` if +delivery would require SMTPUTF8. If you do not set `allow_smtputf8=False`, +you can also check the value of the `smtputf8` field in the returned object. + +### Unsafe Unicode characters are rejected + +A surprisingly large number of Unicode characters are not safe to display, +especially when the email address is concatenated with other text, so this +library tries to protect you by not permitting reserved, non-, private use, +formatting (which can be used to alter the display order of characters), +whitespace, and control characters, and combining characters +as the first character of the local part and the domain name (so that they +cannot combine with something outside of the email address string or with +the @-sign). See https://qntm.org/safe and https://trojansource.codes/ +for relevant prior work. (Other than whitespace, these are checks that +you should be applying to nearly all user inputs in a security-sensitive +context.) This does not guard against the well known problem that many +Unicode characters look alike, which can be used to fool humans reading +displayed text. + + +Normalization +------------- + +### Unicode Normalization + +The use of Unicode in email addresses introduced a normalization +problem. Different Unicode strings can look identical and have the same +semantic meaning to the user. The `normalized` field returned on successful +validation provides the correctly normalized form of the given email +address. + +For example, the CJK fullwidth Latin letters are considered semantically +equivalent in domain names to their ASCII counterparts. This library +normalizes them to their ASCII counterparts (as required by IDNA): + +```python +emailinfo = validate_email("me@Domain.com") +print(emailinfo.normalized) +print(emailinfo.ascii_email) +# prints "me@domain.com" twice +``` + +Because an end-user might type their email address in different (but +equivalent) un-normalized forms at different times, you ought to +replace what they enter with the normalized form immediately prior to +going into your database (during account creation), querying your database +(during login), or sending outbound mail. + +The normalizations include lowercasing the domain part of the email +address (domain names are case-insensitive), [Unicode "NFC" +normalization](https://en.wikipedia.org/wiki/Unicode_equivalence) of the +whole address (which turns characters plus [combining +characters](https://en.wikipedia.org/wiki/Combining_character) into +precomposed characters where possible, replacement of [fullwidth and +halfwidth +characters](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms) +in the domain part, possibly other +[UTS46](http://unicode.org/reports/tr46) mappings on the domain part, +and conversion from Punycode to Unicode characters. + +Normalization may change the characters in the email address and the +length of the email address, such that a string might be a valid address +before normalization but invalid after, or vice versa. This library only +permits addresses that are valid both before and after normalization. + +(See [RFC 6532 (internationalized email) section +3.1](https://tools.ietf.org/html/rfc6532#section-3.1) and [RFC 5895 +(IDNA 2008) section 2](http://www.ietf.org/rfc/rfc5895.txt).) + +### Other Normalization + +Normalization is also applied to quoted-string local parts and domain +literal IPv6 addresses if you have allowed them by the `allow_quoted_local` +and `allow_domain_literal` options. In quoted-string local parts, unnecessary +backslash escaping is removed and even the surrounding quotes are removed if +they are unnecessary. For IPv6 domain literals, the IPv6 address is +normalized to condensed form. [RFC 2142](https://datatracker.ietf.org/doc/html/rfc2142) +also requires lowercase normalization for some specific mailbox names like `postmaster@`. + + +Examples +-------- + +For the email address `test@joshdata.me`, the returned object is: + +```python +ValidatedEmail( + normalized='test@joshdata.me', + local_part='test', + domain='joshdata.me', + ascii_email='test@joshdata.me', + ascii_local_part='test', + ascii_domain='joshdata.me', + smtputf8=False) +``` + +For the fictitious but valid address `example@ツ.ⓁⒾⒻⒺ`, which has an +internationalized domain but ASCII local part, the returned object is: + +```python +ValidatedEmail( + normalized='example@ツ.life', + local_part='example', + domain='ツ.life', + ascii_email='example@xn--bdk.life', + ascii_local_part='example', + ascii_domain='xn--bdk.life', + smtputf8=False) + +``` + +Note that `normalized` and other fields provide a normalized form of the +email address, domain name, and (in other cases) local part (see earlier +discussion of normalization), which you should use in your database. + +Calling `validate_email` with the ASCII form of the above email address, +`example@xn--bdk.life`, returns the exact same information (i.e., the +`normalized` field always will contain Unicode characters, not Punycode). + +For the fictitious address `ツ-test@joshdata.me`, which has an +internationalized local part, the returned object is: + +```python +ValidatedEmail( + normalized='ツ-test@joshdata.me', + local_part='ツ-test', + domain='joshdata.me', + ascii_email=None, + ascii_local_part=None, + ascii_domain='joshdata.me', + smtputf8=True) +``` + +Now `smtputf8` is `True` and `ascii_email` is `None` because the local +part of the address is internationalized. The `local_part` and `normalized` fields +return the normalized form of the address. + +Return value +------------ + +When an email address passes validation, the fields in the returned object +are: + +| Field | Value | +| -----:|-------| +| `normalized` | The normalized form of the email address that you should put in your database. This combines the `local_part` and `domain` fields (see below). | +| `ascii_email` | If set, an ASCII-only form of the normalized email address by replacing the domain part with [IDNA](https://tools.ietf.org/html/rfc5891) [Punycode](https://www.rfc-editor.org/rfc/rfc3492.txt). This field will be present when an ASCII-only form of the email address exists (including if the email address is already ASCII). If the local part of the email address contains internationalized characters, `ascii_email` will be `None`. If set, it merely combines `ascii_local_part` and `ascii_domain`. | +| `local_part` | The normalized local part of the given email address (before the @-sign). Normalization includes Unicode NFC normalization and removing unnecessary quoted-string quotes and backslashes. If `allow_quoted_local` is True and the surrounding quotes are necessary, the quotes _will_ be present in this field. | +| `ascii_local_part` | If set, the local part, which is composed of ASCII characters only. | +| `domain` | The canonical internationalized Unicode form of the domain part of the email address. If the returned string contains non-ASCII characters, either the [SMTPUTF8](https://tools.ietf.org/html/rfc6531) feature of your mail relay will be required to transmit the message or else the email address's domain part must be converted to IDNA ASCII first: Use `ascii_domain` field instead. | +| `ascii_domain` | The [IDNA](https://tools.ietf.org/html/rfc5891) [Punycode](https://www.rfc-editor.org/rfc/rfc3492.txt)-encoded form of the domain part of the given email address, as it would be transmitted on the wire. | +| `domain_address` | If domain literals are allowed and if the email address contains one, an `ipaddress.IPv4Address` or `ipaddress.IPv6Address` object. | +| `display_name` | If no display name was present and angle brackets do not surround the address, this will be `None`; otherwise, it will be set to the display name, or the empty string if there were angle brackets but no display name. If the display name was quoted, it will be unquoted and unescaped. | +| `smtputf8` | A boolean indicating that the [SMTPUTF8](https://tools.ietf.org/html/rfc6531) feature of your mail relay will be required to transmit messages to this address because the local part of the address has non-ASCII characters (the local part cannot be IDNA-encoded). If `allow_smtputf8=False` is passed as an argument, this flag will always be false because an exception is raised if it would have been true. | +| `mx` | A list of (priority, domain) tuples of MX records specified in the DNS for the domain (see [RFC 5321 section 5](https://tools.ietf.org/html/rfc5321#section-5)). May be `None` if the deliverability check could not be completed because of a temporary issue like a timeout. | +| `mx_fallback_type` | `None` if an `MX` record is found. If no MX records are actually specified in DNS and instead are inferred, through an obsolete mechanism, from A or AAAA records, the value is the type of DNS record used instead (`A` or `AAAA`). May be `None` if the deliverability check could not be completed because of a temporary issue like a timeout. | +| `spf` | Any SPF record found while checking deliverability. Only set if the SPF record is queried. | + +Assumptions +----------- + +By design, this validator does not pass all email addresses that +strictly conform to the standards. Many email address forms are obsolete +or likely to cause trouble: + +* The validator assumes the email address is intended to be + usable on the public Internet. The domain part + of the email address must be a resolvable domain name + (see the deliverability checks described above). + Most [Special Use Domain Names](https://www.iana.org/assignments/special-use-domain-names/special-use-domain-names.xhtml) + and their subdomains, as well as + domain names without a `.`, are rejected as a syntax error + (except see the `test_environment` parameter above). +* Obsolete email syntaxes are rejected: + The unusual ["(comment)" syntax](https://github.com/JoshData/python-email-validator/issues/77) + is rejected. Extremely old obsolete syntaxes are + rejected. Quoted-string local parts and domain-literal addresses + are rejected by default, but there are options to allow them (see above). + No one uses these forms anymore, and I can't think of any reason why anyone + using this library would need to accept them. + +Testing +------- + +Tests can be run using + +```sh +pip install -r test_requirements.txt +make test +``` + +Tests run with mocked DNS responses. When adding or changing tests, temporarily turn on the `BUILD_MOCKED_DNS_RESPONSE_DATA` flag in `tests/mocked_dns_responses.py` to re-build the database of mocked responses from live queries. + +For Project Maintainers +----------------------- + +The package is distributed as a universal wheel and as a source package. + +To release: + +* Update CHANGELOG.md. +* Update the version number in `email_validator/version.py`. +* Make & push a commit with the new version number and make sure tests pass. +* Make & push a tag (see command below). +* Make a release at https://github.com/JoshData/python-email-validator/releases/new. +* Publish a source and wheel distribution to pypi (see command below). + +```sh +git tag v$(cat email_validator/version.py | sed "s/.* = //" | sed 's/"//g') +git push --tags +./release_to_pypi.sh +``` + +License +------- + +This project is free of any copyright restrictions per the [Unlicense](https://unlicense.org/). (Prior to Feb. 4, 2024, the project was made available under the terms of the [CC0 1.0 Universal public domain dedication](http://creativecommons.org/publicdomain/zero/1.0/).) See [LICENSE](LICENSE) and [CONTRIBUTING.md](CONTRIBUTING.md). diff --git a/venv/lib/python3.11/site-packages/email_validator-2.2.0.dist-info/RECORD b/venv/lib/python3.11/site-packages/email_validator-2.2.0.dist-info/RECORD new file mode 100644 index 0000000..4d54bfd --- /dev/null +++ b/venv/lib/python3.11/site-packages/email_validator-2.2.0.dist-info/RECORD @@ -0,0 +1,26 @@ +../../../bin/email_validator,sha256=yZESENKEGZAphpnxJAhbVx7hTHhTdHytxrV1vUuuiIQ,249 +email_validator-2.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +email_validator-2.2.0.dist-info/LICENSE,sha256=ZyF5dS4QkTSj-yvdB4Cyn9t6A5dPD1hqE66tUSlWLUw,1212 +email_validator-2.2.0.dist-info/METADATA,sha256=vELkkg-p-qMuqNFX6uzDmMaruT7Pe5PDAQexHLAB4XM,25741 +email_validator-2.2.0.dist-info/RECORD,, +email_validator-2.2.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +email_validator-2.2.0.dist-info/WHEEL,sha256=cpQTJ5IWu9CdaPViMhC9YzF8gZuS5-vlfoFihTBC86A,91 +email_validator-2.2.0.dist-info/entry_points.txt,sha256=zRM_6bNIUSHTbNx5u6M3nK1MAguvryrc9hICC6HyrBg,66 +email_validator-2.2.0.dist-info/top_level.txt,sha256=fYDOSWFZke46ut7WqdOAJjjhlpPYAaOwOwIsh3s8oWI,16 +email_validator/__init__.py,sha256=g-TFM6vzpEt4dMG93giGlS343yXXXIy7EOLNFEn6DfA,4360 +email_validator/__main__.py,sha256=TIvjaG_OSFRciH0J2pnEJEdX3uJy3ZgocmasEqh9EEI,2243 +email_validator/__pycache__/__init__.cpython-311.pyc,, +email_validator/__pycache__/__main__.cpython-311.pyc,, +email_validator/__pycache__/deliverability.cpython-311.pyc,, +email_validator/__pycache__/exceptions_types.cpython-311.pyc,, +email_validator/__pycache__/rfc_constants.cpython-311.pyc,, +email_validator/__pycache__/syntax.cpython-311.pyc,, +email_validator/__pycache__/validate_email.cpython-311.pyc,, +email_validator/__pycache__/version.cpython-311.pyc,, +email_validator/deliverability.py,sha256=e6eODNSaLMiM29EZ3bWYDFkQDlMIdicBaykjYQJwYig,7222 +email_validator/exceptions_types.py,sha256=yLxXqwtl5dXa-938K7skLP1pMFgi0oovzCs74mX7TGs,6024 +email_validator/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +email_validator/rfc_constants.py,sha256=KVUshwIu699cle3UzDU2_fFBSQOO7p91Z_hrlNANtGM,2767 +email_validator/syntax.py,sha256=Mo5KLgEsbQcvNzs8zO5QbhzUK4MAjL9yJFDpwsF12lY,36005 +email_validator/validate_email.py,sha256=YUXY5Sv_mQ7Vuu_AmGdISza8v-VaABnNMLrlWv8EIl4,8401 +email_validator/version.py,sha256=DKk-1b-rZsJFxFi1JoJ7TmEvIEQ0rf-C9HAZWwvjuM0,22 diff --git a/venv/lib/python3.11/site-packages/email_validator-2.2.0.dist-info/REQUESTED b/venv/lib/python3.11/site-packages/email_validator-2.2.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/email_validator-2.2.0.dist-info/WHEEL b/venv/lib/python3.11/site-packages/email_validator-2.2.0.dist-info/WHEEL new file mode 100644 index 0000000..9086d27 --- /dev/null +++ b/venv/lib/python3.11/site-packages/email_validator-2.2.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (70.1.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.11/site-packages/email_validator-2.2.0.dist-info/entry_points.txt b/venv/lib/python3.11/site-packages/email_validator-2.2.0.dist-info/entry_points.txt new file mode 100644 index 0000000..03c6e23 --- /dev/null +++ b/venv/lib/python3.11/site-packages/email_validator-2.2.0.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +email_validator = email_validator.__main__:main diff --git a/venv/lib/python3.11/site-packages/email_validator-2.2.0.dist-info/top_level.txt b/venv/lib/python3.11/site-packages/email_validator-2.2.0.dist-info/top_level.txt new file mode 100644 index 0000000..798fd5e --- /dev/null +++ b/venv/lib/python3.11/site-packages/email_validator-2.2.0.dist-info/top_level.txt @@ -0,0 +1 @@ +email_validator diff --git a/venv/lib/python3.11/site-packages/email_validator/__init__.py b/venv/lib/python3.11/site-packages/email_validator/__init__.py new file mode 100644 index 0000000..626aa00 --- /dev/null +++ b/venv/lib/python3.11/site-packages/email_validator/__init__.py @@ -0,0 +1,101 @@ +from typing import TYPE_CHECKING + +# Export the main method, helper methods, and the public data types. +from .exceptions_types import ValidatedEmail, EmailNotValidError, \ + EmailSyntaxError, EmailUndeliverableError +from .validate_email import validate_email +from .version import __version__ + +__all__ = ["validate_email", + "ValidatedEmail", "EmailNotValidError", + "EmailSyntaxError", "EmailUndeliverableError", + "caching_resolver", "__version__"] + +if TYPE_CHECKING: + from .deliverability import caching_resolver +else: + def caching_resolver(*args, **kwargs): + # Lazy load `deliverability` as it is slow to import (due to dns.resolver) + from .deliverability import caching_resolver + + return caching_resolver(*args, **kwargs) + + +# These global attributes are a part of the library's API and can be +# changed by library users. + +# Default values for keyword arguments. + +ALLOW_SMTPUTF8 = True +ALLOW_QUOTED_LOCAL = False +ALLOW_DOMAIN_LITERAL = False +ALLOW_DISPLAY_NAME = False +GLOBALLY_DELIVERABLE = True +CHECK_DELIVERABILITY = True +TEST_ENVIRONMENT = False +DEFAULT_TIMEOUT = 15 # secs + +# IANA Special Use Domain Names +# Last Updated 2021-09-21 +# https://www.iana.org/assignments/special-use-domain-names/special-use-domain-names.txt +# +# The domain names without dots would be caught by the check that the domain +# name in an email address must have a period, but this list will also catch +# subdomains of these domains, which are also reserved. +SPECIAL_USE_DOMAIN_NAMES = [ + # The "arpa" entry here is consolidated from a lot of arpa subdomains + # for private address (i.e. non-routable IP addresses like 172.16.x.x) + # reverse mapping, plus some other subdomains. Although RFC 6761 says + # that application software should not treat these domains as special, + # they are private-use domains and so cannot have globally deliverable + # email addresses, which is an assumption of this library, and probably + # all of arpa is similarly special-use, so we reject it all. + "arpa", + + # RFC 6761 says applications "SHOULD NOT" treat the "example" domains + # as special, i.e. applications should accept these domains. + # + # The domain "example" alone fails our syntax validation because it + # lacks a dot (we assume no one has an email address on a TLD directly). + # "@example.com/net/org" will currently fail DNS-based deliverability + # checks because IANA publishes a NULL MX for these domains, and + # "@mail.example[.com/net/org]" and other subdomains will fail DNS- + # based deliverability checks because IANA does not publish MX or A + # DNS records for these subdomains. + # "example", # i.e. "wwww.example" + # "example.com", + # "example.net", + # "example.org", + + # RFC 6761 says that applications are permitted to treat this domain + # as special and that DNS should return an immediate negative response, + # so we also immediately reject this domain, which also follows the + # purpose of the domain. + "invalid", + + # RFC 6762 says that applications "may" treat ".local" as special and + # that "name resolution APIs and libraries SHOULD recognize these names + # as special," and since ".local" has no global definition, we reject + # it, as we expect email addresses to be gloally routable. + "local", + + # RFC 6761 says that applications (like this library) are permitted + # to treat "localhost" as special, and since it cannot have a globally + # deliverable email address, we reject it. + "localhost", + + # RFC 7686 says "applications that do not implement the Tor protocol + # SHOULD generate an error upon the use of .onion and SHOULD NOT + # perform a DNS lookup. + "onion", + + # Although RFC 6761 says that application software should not treat + # these domains as special, it also warns users that the address may + # resolve differently in different systems, and therefore it cannot + # have a globally routable email address, which is an assumption of + # this library, so we reject "@test" and "@*.test" addresses, unless + # the test_environment keyword argument is given, to allow their use + # in application-level test environments. These domains will generally + # fail deliverability checks because "test" is not an actual TLD. + "test", +] diff --git a/venv/lib/python3.11/site-packages/email_validator/__main__.py b/venv/lib/python3.11/site-packages/email_validator/__main__.py new file mode 100644 index 0000000..52791c7 --- /dev/null +++ b/venv/lib/python3.11/site-packages/email_validator/__main__.py @@ -0,0 +1,60 @@ +# A command-line tool for testing. +# +# Usage: +# +# python -m email_validator test@example.org +# python -m email_validator < LIST_OF_ADDRESSES.TXT +# +# Provide email addresses to validate either as a command-line argument +# or in STDIN separated by newlines. Validation errors will be printed for +# invalid email addresses. When passing an email address on the command +# line, if the email address is valid, information about it will be printed. +# When using STDIN, no output will be given for valid email addresses. +# +# Keyword arguments to validate_email can be set in environment variables +# of the same name but upprcase (see below). + +import json +import os +import sys +from typing import Any, Dict, Optional + +from .validate_email import validate_email, _Resolver +from .deliverability import caching_resolver +from .exceptions_types import EmailNotValidError + + +def main(dns_resolver: Optional[_Resolver] = None) -> None: + # The dns_resolver argument is for tests. + + # Set options from environment variables. + options: Dict[str, Any] = {} + for varname in ('ALLOW_SMTPUTF8', 'ALLOW_QUOTED_LOCAL', 'ALLOW_DOMAIN_LITERAL', + 'GLOBALLY_DELIVERABLE', 'CHECK_DELIVERABILITY', 'TEST_ENVIRONMENT'): + if varname in os.environ: + options[varname.lower()] = bool(os.environ[varname]) + for varname in ('DEFAULT_TIMEOUT',): + if varname in os.environ: + options[varname.lower()] = float(os.environ[varname]) + + if len(sys.argv) == 1: + # Validate the email addresses pased line-by-line on STDIN. + dns_resolver = dns_resolver or caching_resolver() + for line in sys.stdin: + email = line.strip() + try: + validate_email(email, dns_resolver=dns_resolver, **options) + except EmailNotValidError as e: + print(f"{email} {e}") + else: + # Validate the email address passed on the command line. + email = sys.argv[1] + try: + result = validate_email(email, dns_resolver=dns_resolver, **options) + print(json.dumps(result.as_dict(), indent=2, sort_keys=True, ensure_ascii=False)) + except EmailNotValidError as e: + print(e) + + +if __name__ == "__main__": + main() diff --git a/venv/lib/python3.11/site-packages/email_validator/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/email_validator/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..4a631b3 Binary files /dev/null and b/venv/lib/python3.11/site-packages/email_validator/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/email_validator/__pycache__/__main__.cpython-311.pyc b/venv/lib/python3.11/site-packages/email_validator/__pycache__/__main__.cpython-311.pyc new file mode 100644 index 0000000..2c784bc Binary files /dev/null and b/venv/lib/python3.11/site-packages/email_validator/__pycache__/__main__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/email_validator/__pycache__/deliverability.cpython-311.pyc b/venv/lib/python3.11/site-packages/email_validator/__pycache__/deliverability.cpython-311.pyc new file mode 100644 index 0000000..9e32c39 Binary files /dev/null and b/venv/lib/python3.11/site-packages/email_validator/__pycache__/deliverability.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/email_validator/__pycache__/exceptions_types.cpython-311.pyc b/venv/lib/python3.11/site-packages/email_validator/__pycache__/exceptions_types.cpython-311.pyc new file mode 100644 index 0000000..42646ee Binary files /dev/null and b/venv/lib/python3.11/site-packages/email_validator/__pycache__/exceptions_types.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/email_validator/__pycache__/rfc_constants.cpython-311.pyc b/venv/lib/python3.11/site-packages/email_validator/__pycache__/rfc_constants.cpython-311.pyc new file mode 100644 index 0000000..42580f6 Binary files /dev/null and b/venv/lib/python3.11/site-packages/email_validator/__pycache__/rfc_constants.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/email_validator/__pycache__/syntax.cpython-311.pyc b/venv/lib/python3.11/site-packages/email_validator/__pycache__/syntax.cpython-311.pyc new file mode 100644 index 0000000..829a089 Binary files /dev/null and b/venv/lib/python3.11/site-packages/email_validator/__pycache__/syntax.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/email_validator/__pycache__/validate_email.cpython-311.pyc b/venv/lib/python3.11/site-packages/email_validator/__pycache__/validate_email.cpython-311.pyc new file mode 100644 index 0000000..3616e0e Binary files /dev/null and b/venv/lib/python3.11/site-packages/email_validator/__pycache__/validate_email.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/email_validator/__pycache__/version.cpython-311.pyc b/venv/lib/python3.11/site-packages/email_validator/__pycache__/version.cpython-311.pyc new file mode 100644 index 0000000..4674446 Binary files /dev/null and b/venv/lib/python3.11/site-packages/email_validator/__pycache__/version.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/email_validator/deliverability.py b/venv/lib/python3.11/site-packages/email_validator/deliverability.py new file mode 100644 index 0000000..90f5f9a --- /dev/null +++ b/venv/lib/python3.11/site-packages/email_validator/deliverability.py @@ -0,0 +1,159 @@ +from typing import Any, List, Optional, Tuple, TypedDict + +import ipaddress + +from .exceptions_types import EmailUndeliverableError + +import dns.resolver +import dns.exception + + +def caching_resolver(*, timeout: Optional[int] = None, cache: Any = None, dns_resolver: Optional[dns.resolver.Resolver] = None) -> dns.resolver.Resolver: + if timeout is None: + from . import DEFAULT_TIMEOUT + timeout = DEFAULT_TIMEOUT + resolver = dns_resolver or dns.resolver.Resolver() + resolver.cache = cache or dns.resolver.LRUCache() + resolver.lifetime = timeout # timeout, in seconds + return resolver + + +DeliverabilityInfo = TypedDict("DeliverabilityInfo", { + "mx": List[Tuple[int, str]], + "mx_fallback_type": Optional[str], + "unknown-deliverability": str, +}, total=False) + + +def validate_email_deliverability(domain: str, domain_i18n: str, timeout: Optional[int] = None, dns_resolver: Optional[dns.resolver.Resolver] = None) -> DeliverabilityInfo: + # Check that the domain resolves to an MX record. If there is no MX record, + # try an A or AAAA record which is a deprecated fallback for deliverability. + # Raises an EmailUndeliverableError on failure. On success, returns a dict + # with deliverability information. + + # If no dns.resolver.Resolver was given, get dnspython's default resolver. + # Override the default resolver's timeout. This may affect other uses of + # dnspython in this process. + if dns_resolver is None: + from . import DEFAULT_TIMEOUT + if timeout is None: + timeout = DEFAULT_TIMEOUT + dns_resolver = dns.resolver.get_default_resolver() + dns_resolver.lifetime = timeout + elif timeout is not None: + raise ValueError("It's not valid to pass both timeout and dns_resolver.") + + deliverability_info: DeliverabilityInfo = {} + + try: + try: + # Try resolving for MX records (RFC 5321 Section 5). + response = dns_resolver.resolve(domain, "MX") + + # For reporting, put them in priority order and remove the trailing dot in the qnames. + mtas = sorted([(r.preference, str(r.exchange).rstrip('.')) for r in response]) + + # RFC 7505: Null MX (0, ".") records signify the domain does not accept email. + # Remove null MX records from the mtas list (but we've stripped trailing dots, + # so the 'exchange' is just "") so we can check if there are no non-null MX + # records remaining. + mtas = [(preference, exchange) for preference, exchange in mtas + if exchange != ""] + if len(mtas) == 0: # null MX only, if there were no MX records originally a NoAnswer exception would have occurred + raise EmailUndeliverableError(f"The domain name {domain_i18n} does not accept email.") + + deliverability_info["mx"] = mtas + deliverability_info["mx_fallback_type"] = None + + except dns.resolver.NoAnswer: + # If there was no MX record, fall back to an A or AAA record + # (RFC 5321 Section 5). Check A first since it's more common. + + # If the A/AAAA response has no Globally Reachable IP address, + # treat the response as if it were NoAnswer, i.e., the following + # address types are not allowed fallbacks: Private-Use, Loopback, + # Link-Local, and some other obscure ranges. See + # https://www.iana.org/assignments/iana-ipv4-special-registry/iana-ipv4-special-registry.xhtml + # https://www.iana.org/assignments/iana-ipv6-special-registry/iana-ipv6-special-registry.xhtml + # (Issue #134.) + def is_global_addr(address: Any) -> bool: + try: + ipaddr = ipaddress.ip_address(address) + except ValueError: + return False + return ipaddr.is_global + + try: + response = dns_resolver.resolve(domain, "A") + + if not any(is_global_addr(r.address) for r in response): + raise dns.resolver.NoAnswer # fall back to AAAA + + deliverability_info["mx"] = [(0, domain)] + deliverability_info["mx_fallback_type"] = "A" + + except dns.resolver.NoAnswer: + + # If there was no A record, fall back to an AAAA record. + # (It's unclear if SMTP servers actually do this.) + try: + response = dns_resolver.resolve(domain, "AAAA") + + if not any(is_global_addr(r.address) for r in response): + raise dns.resolver.NoAnswer + + deliverability_info["mx"] = [(0, domain)] + deliverability_info["mx_fallback_type"] = "AAAA" + + except dns.resolver.NoAnswer as e: + # If there was no MX, A, or AAAA record, then mail to + # this domain is not deliverable, although the domain + # name has other records (otherwise NXDOMAIN would + # have been raised). + raise EmailUndeliverableError(f"The domain name {domain_i18n} does not accept email.") from e + + # Check for a SPF (RFC 7208) reject-all record ("v=spf1 -all") which indicates + # no emails are sent from this domain (similar to a Null MX record + # but for sending rather than receiving). In combination with the + # absence of an MX record, this is probably a good sign that the + # domain is not used for email. + try: + response = dns_resolver.resolve(domain, "TXT") + for rec in response: + value = b"".join(rec.strings) + if value.startswith(b"v=spf1 "): + if value == b"v=spf1 -all": + raise EmailUndeliverableError(f"The domain name {domain_i18n} does not send email.") + except dns.resolver.NoAnswer: + # No TXT records means there is no SPF policy, so we cannot take any action. + pass + + except dns.resolver.NXDOMAIN as e: + # The domain name does not exist --- there are no records of any sort + # for the domain name. + raise EmailUndeliverableError(f"The domain name {domain_i18n} does not exist.") from e + + except dns.resolver.NoNameservers: + # All nameservers failed to answer the query. This might be a problem + # with local nameservers, maybe? We'll allow the domain to go through. + return { + "unknown-deliverability": "no_nameservers", + } + + except dns.exception.Timeout: + # A timeout could occur for various reasons, so don't treat it as a failure. + return { + "unknown-deliverability": "timeout", + } + + except EmailUndeliverableError: + # Don't let these get clobbered by the wider except block below. + raise + + except Exception as e: + # Unhandled conditions should not propagate. + raise EmailUndeliverableError( + "There was an error while checking if the domain name in the email address is deliverable: " + str(e) + ) from e + + return deliverability_info diff --git a/venv/lib/python3.11/site-packages/email_validator/exceptions_types.py b/venv/lib/python3.11/site-packages/email_validator/exceptions_types.py new file mode 100644 index 0000000..928a94f --- /dev/null +++ b/venv/lib/python3.11/site-packages/email_validator/exceptions_types.py @@ -0,0 +1,141 @@ +import warnings +from typing import Any, Dict, List, Optional, Tuple, Union + + +class EmailNotValidError(ValueError): + """Parent class of all exceptions raised by this module.""" + pass + + +class EmailSyntaxError(EmailNotValidError): + """Exception raised when an email address fails validation because of its form.""" + pass + + +class EmailUndeliverableError(EmailNotValidError): + """Exception raised when an email address fails validation because its domain name does not appear deliverable.""" + pass + + +class ValidatedEmail: + """The validate_email function returns objects of this type holding the normalized form of the email address + and other information.""" + + """The email address that was passed to validate_email. (If passed as bytes, this will be a string.)""" + original: str + + """The normalized email address, which should always be used in preference to the original address. + The normalized address converts an IDNA ASCII domain name to Unicode, if possible, and performs + Unicode normalization on the local part and on the domain (if originally Unicode). It is the + concatenation of the local_part and domain attributes, separated by an @-sign.""" + normalized: str + + """The local part of the email address after Unicode normalization.""" + local_part: str + + """The domain part of the email address after Unicode normalization or conversion to + Unicode from IDNA ascii.""" + domain: str + + """If the domain part is a domain literal, the IPv4Address or IPv6Address object.""" + domain_address: object + + """If not None, a form of the email address that uses 7-bit ASCII characters only.""" + ascii_email: Optional[str] + + """If not None, the local part of the email address using 7-bit ASCII characters only.""" + ascii_local_part: Optional[str] + + """A form of the domain name that uses 7-bit ASCII characters only.""" + ascii_domain: str + + """If True, the SMTPUTF8 feature of your mail relay will be required to transmit messages + to this address. This flag is True just when ascii_local_part is missing. Otherwise it + is False.""" + smtputf8: bool + + """If a deliverability check is performed and if it succeeds, a list of (priority, domain) + tuples of MX records specified in the DNS for the domain.""" + mx: List[Tuple[int, str]] + + """If no MX records are actually specified in DNS and instead are inferred, through an obsolete + mechanism, from A or AAAA records, the value is the type of DNS record used instead (`A` or `AAAA`).""" + mx_fallback_type: Optional[str] + + """The display name in the original input text, unquoted and unescaped, or None.""" + display_name: Optional[str] + + def __repr__(self) -> str: + return f"" + + """For backwards compatibility, support old field names.""" + def __getattr__(self, key: str) -> str: + if key == "original_email": + return self.original + if key == "email": + return self.normalized + raise AttributeError(key) + + @property + def email(self) -> str: + warnings.warn("ValidatedEmail.email is deprecated and will be removed, use ValidatedEmail.normalized instead", DeprecationWarning) + return self.normalized + + """For backwards compatibility, some fields are also exposed through a dict-like interface. Note + that some of the names changed when they became attributes.""" + def __getitem__(self, key: str) -> Union[Optional[str], bool, List[Tuple[int, str]]]: + warnings.warn("dict-like access to the return value of validate_email is deprecated and may not be supported in the future.", DeprecationWarning, stacklevel=2) + if key == "email": + return self.normalized + if key == "email_ascii": + return self.ascii_email + if key == "local": + return self.local_part + if key == "domain": + return self.ascii_domain + if key == "domain_i18n": + return self.domain + if key == "smtputf8": + return self.smtputf8 + if key == "mx": + return self.mx + if key == "mx-fallback": + return self.mx_fallback_type + raise KeyError() + + """Tests use this.""" + def __eq__(self, other: object) -> bool: + if not isinstance(other, ValidatedEmail): + return False + return ( + self.normalized == other.normalized + and self.local_part == other.local_part + and self.domain == other.domain + and getattr(self, 'ascii_email', None) == getattr(other, 'ascii_email', None) + and getattr(self, 'ascii_local_part', None) == getattr(other, 'ascii_local_part', None) + and getattr(self, 'ascii_domain', None) == getattr(other, 'ascii_domain', None) + and self.smtputf8 == other.smtputf8 + and repr(sorted(self.mx) if getattr(self, 'mx', None) else None) + == repr(sorted(other.mx) if getattr(other, 'mx', None) else None) + and getattr(self, 'mx_fallback_type', None) == getattr(other, 'mx_fallback_type', None) + and getattr(self, 'display_name', None) == getattr(other, 'display_name', None) + ) + + """This helps producing the README.""" + def as_constructor(self) -> str: + return "ValidatedEmail(" \ + + ",".join(f"\n {key}={repr(getattr(self, key))}" + for key in ('normalized', 'local_part', 'domain', + 'ascii_email', 'ascii_local_part', 'ascii_domain', + 'smtputf8', 'mx', 'mx_fallback_type', + 'display_name') + if hasattr(self, key) + ) \ + + ")" + + """Convenience method for accessing ValidatedEmail as a dict""" + def as_dict(self) -> Dict[str, Any]: + d = self.__dict__ + if d.get('domain_address'): + d['domain_address'] = repr(d['domain_address']) + return d diff --git a/venv/lib/python3.11/site-packages/email_validator/py.typed b/venv/lib/python3.11/site-packages/email_validator/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/email_validator/rfc_constants.py b/venv/lib/python3.11/site-packages/email_validator/rfc_constants.py new file mode 100644 index 0000000..39d8e31 --- /dev/null +++ b/venv/lib/python3.11/site-packages/email_validator/rfc_constants.py @@ -0,0 +1,51 @@ +# These constants are defined by the email specifications. + +import re + +# Based on RFC 5322 3.2.3, these characters are permitted in email +# addresses (not taking into account internationalization) separated by dots: +ATEXT = r'a-zA-Z0-9_!#\$%&\'\*\+\-/=\?\^`\{\|\}~' +ATEXT_RE = re.compile('[.' + ATEXT + ']') # ATEXT plus dots +DOT_ATOM_TEXT = re.compile('[' + ATEXT + ']+(?:\\.[' + ATEXT + r']+)*\Z') + +# RFC 6531 3.3 extends the allowed characters in internationalized +# addresses to also include three specific ranges of UTF8 defined in +# RFC 3629 section 4, which appear to be the Unicode code points from +# U+0080 to U+10FFFF. +ATEXT_INTL = ATEXT + "\u0080-\U0010FFFF" +ATEXT_INTL_DOT_RE = re.compile('[.' + ATEXT_INTL + ']') # ATEXT_INTL plus dots +DOT_ATOM_TEXT_INTL = re.compile('[' + ATEXT_INTL + ']+(?:\\.[' + ATEXT_INTL + r']+)*\Z') + +# The domain part of the email address, after IDNA (ASCII) encoding, +# must also satisfy the requirements of RFC 952/RFC 1123 2.1 which +# restrict the allowed characters of hostnames further. +ATEXT_HOSTNAME_INTL = re.compile(r"[a-zA-Z0-9\-\." + "\u0080-\U0010FFFF" + "]") +HOSTNAME_LABEL = r'(?:(?:[a-zA-Z0-9][a-zA-Z0-9\-]*)?[a-zA-Z0-9])' +DOT_ATOM_TEXT_HOSTNAME = re.compile(HOSTNAME_LABEL + r'(?:\.' + HOSTNAME_LABEL + r')*\Z') +DOMAIN_NAME_REGEX = re.compile(r"[A-Za-z]\Z") # all TLDs currently end with a letter + +# Domain literal (RFC 5322 3.4.1) +DOMAIN_LITERAL_CHARS = re.compile(r"[\u0021-\u00FA\u005E-\u007E]") + +# Quoted-string local part (RFC 5321 4.1.2, internationalized by RFC 6531 3.3) +# The permitted characters in a quoted string are the characters in the range +# 32-126, except that quotes and (literal) backslashes can only appear when escaped +# by a backslash. When internationalized, UTF-8 strings are also permitted except +# the ASCII characters that are not previously permitted (see above). +# QUOTED_LOCAL_PART_ADDR = re.compile(r"^\"((?:[\u0020-\u0021\u0023-\u005B\u005D-\u007E]|\\[\u0020-\u007E])*)\"@(.*)") +QTEXT_INTL = re.compile(r"[\u0020-\u007E\u0080-\U0010FFFF]") + +# Length constants +# RFC 3696 + errata 1003 + errata 1690 (https://www.rfc-editor.org/errata_search.php?rfc=3696&eid=1690) +# explains the maximum length of an email address is 254 octets. +EMAIL_MAX_LENGTH = 254 +LOCAL_PART_MAX_LENGTH = 64 +DNS_LABEL_LENGTH_LIMIT = 63 # in "octets", RFC 1035 2.3.1 +DOMAIN_MAX_LENGTH = 253 # in "octets" as transmitted, RFC 1035 2.3.4 and RFC 5321 4.5.3.1.2, and see https://stackoverflow.com/questions/32290167/what-is-the-maximum-length-of-a-dns-name + +# RFC 2142 +CASE_INSENSITIVE_MAILBOX_NAMES = [ + 'info', 'marketing', 'sales', 'support', # section 3 + 'abuse', 'noc', 'security', # section 4 + 'postmaster', 'hostmaster', 'usenet', 'news', 'webmaster', 'www', 'uucp', 'ftp', # section 5 +] diff --git a/venv/lib/python3.11/site-packages/email_validator/syntax.py b/venv/lib/python3.11/site-packages/email_validator/syntax.py new file mode 100644 index 0000000..c655451 --- /dev/null +++ b/venv/lib/python3.11/site-packages/email_validator/syntax.py @@ -0,0 +1,761 @@ +from .exceptions_types import EmailSyntaxError, ValidatedEmail +from .rfc_constants import EMAIL_MAX_LENGTH, LOCAL_PART_MAX_LENGTH, DOMAIN_MAX_LENGTH, \ + DOT_ATOM_TEXT, DOT_ATOM_TEXT_INTL, ATEXT_RE, ATEXT_INTL_DOT_RE, ATEXT_HOSTNAME_INTL, QTEXT_INTL, \ + DNS_LABEL_LENGTH_LIMIT, DOT_ATOM_TEXT_HOSTNAME, DOMAIN_NAME_REGEX, DOMAIN_LITERAL_CHARS + +import re +import unicodedata +import idna # implements IDNA 2008; Python's codec is only IDNA 2003 +import ipaddress +from typing import Optional, Tuple, TypedDict, Union + + +def split_email(email: str) -> Tuple[Optional[str], str, str, bool]: + # Return the display name, unescaped local part, and domain part + # of the address, and whether the local part was quoted. If no + # display name was present and angle brackets do not surround + # the address, display name will be None; otherwise, it will be + # set to the display name or the empty string if there were + # angle brackets but no display name. + + # Typical email addresses have a single @-sign and no quote + # characters, but the awkward "quoted string" local part form + # (RFC 5321 4.1.2) allows @-signs and escaped quotes to appear + # in the local part if the local part is quoted. + + # A `display name ` format is also present in MIME messages + # (RFC 5322 3.4) and this format is also often recognized in + # mail UIs. It's not allowed in SMTP commands or in typical web + # login forms, but parsing it has been requested, so it's done + # here as a convenience. It's implemented in the spirit but not + # the letter of RFC 5322 3.4 because MIME messages allow newlines + # and comments as a part of the CFWS rule, but this is typically + # not allowed in mail UIs (although comment syntax was requested + # once too). + # + # Display names are either basic characters (the same basic characters + # permitted in email addresses, but periods are not allowed and spaces + # are allowed; see RFC 5322 Appendix A.1.2), or or a quoted string with + # the same rules as a quoted local part. (Multiple quoted strings might + # be allowed? Unclear.) Optional space (RFC 5322 3.4 CFWS) and then the + # email address follows in angle brackets. + # + # An initial quote is ambiguous between starting a display name or + # a quoted local part --- fun. + # + # We assume the input string is already stripped of leading and + # trailing CFWS. + + def split_string_at_unquoted_special(text: str, specials: Tuple[str, ...]) -> Tuple[str, str]: + # Split the string at the first character in specials (an @-sign + # or left angle bracket) that does not occur within quotes and + # is not followed by a Unicode combining character. + # If no special character is found, raise an error. + inside_quote = False + escaped = False + left_part = "" + for i, c in enumerate(text): + # < plus U+0338 (Combining Long Solidus Overlay) normalizes to + # ≮ U+226E (Not Less-Than), and it would be confusing to treat + # the < as the start of "" syntax in that case. Liekwise, + # if anything combines with an @ or ", we should probably not + # treat it as a special character. + if unicodedata.normalize("NFC", text[i:])[0] != c: + left_part += c + + elif inside_quote: + left_part += c + if c == '\\' and not escaped: + escaped = True + elif c == '"' and not escaped: + # The only way to exit the quote is an unescaped quote. + inside_quote = False + escaped = False + else: + escaped = False + elif c == '"': + left_part += c + inside_quote = True + elif c in specials: + # When unquoted, stop before a special character. + break + else: + left_part += c + + if len(left_part) == len(text): + raise EmailSyntaxError("An email address must have an @-sign.") + + # The right part is whatever is left. + right_part = text[len(left_part):] + + return left_part, right_part + + def unquote_quoted_string(text: str) -> Tuple[str, bool]: + # Remove surrounding quotes and unescape escaped backslashes + # and quotes. Escapes are parsed liberally. I think only + # backslashes and quotes can be escaped but we'll allow anything + # to be. + quoted = False + escaped = False + value = "" + for i, c in enumerate(text): + if quoted: + if escaped: + value += c + escaped = False + elif c == '\\': + escaped = True + elif c == '"': + if i != len(text) - 1: + raise EmailSyntaxError("Extra character(s) found after close quote: " + + ", ".join(safe_character_display(c) for c in text[i + 1:])) + break + else: + value += c + elif i == 0 and c == '"': + quoted = True + else: + value += c + + return value, quoted + + # Split the string at the first unquoted @-sign or left angle bracket. + left_part, right_part = split_string_at_unquoted_special(email, ("@", "<")) + + # If the right part starts with an angle bracket, + # then the left part is a display name and the rest + # of the right part up to the final right angle bracket + # is the email address, . + if right_part.startswith("<"): + # Remove space between the display name and angle bracket. + left_part = left_part.rstrip() + + # Unquote and unescape the display name. + display_name, display_name_quoted = unquote_quoted_string(left_part) + + # Check that only basic characters are present in a + # non-quoted display name. + if not display_name_quoted: + bad_chars = { + safe_character_display(c) + for c in display_name + if (not ATEXT_RE.match(c) and c != ' ') or c == '.' + } + if bad_chars: + raise EmailSyntaxError("The display name contains invalid characters when not quoted: " + ", ".join(sorted(bad_chars)) + ".") + + # Check for other unsafe characters. + check_unsafe_chars(display_name, allow_space=True) + + # Check that the right part ends with an angle bracket + # but allow spaces after it, I guess. + if ">" not in right_part: + raise EmailSyntaxError("An open angle bracket at the start of the email address has to be followed by a close angle bracket at the end.") + right_part = right_part.rstrip(" ") + if right_part[-1] != ">": + raise EmailSyntaxError("There can't be anything after the email address.") + + # Remove the initial and trailing angle brackets. + addr_spec = right_part[1:].rstrip(">") + + # Split the email address at the first unquoted @-sign. + local_part, domain_part = split_string_at_unquoted_special(addr_spec, ("@",)) + + # Otherwise there is no display name. The left part is the local + # part and the right part is the domain. + else: + display_name = None + local_part, domain_part = left_part, right_part + + if domain_part.startswith("@"): + domain_part = domain_part[1:] + + # Unquote the local part if it is quoted. + local_part, is_quoted_local_part = unquote_quoted_string(local_part) + + return display_name, local_part, domain_part, is_quoted_local_part + + +def get_length_reason(addr: str, limit: int) -> str: + """Helper function to return an error message related to invalid length.""" + diff = len(addr) - limit + suffix = "s" if diff > 1 else "" + return f"({diff} character{suffix} too many)" + + +def safe_character_display(c: str) -> str: + # Return safely displayable characters in quotes. + if c == '\\': + return f"\"{c}\"" # can't use repr because it escapes it + if unicodedata.category(c)[0] in ("L", "N", "P", "S"): + return repr(c) + + # Construct a hex string in case the unicode name doesn't exist. + if ord(c) < 0xFFFF: + h = f"U+{ord(c):04x}".upper() + else: + h = f"U+{ord(c):08x}".upper() + + # Return the character name or, if it has no name, the hex string. + return unicodedata.name(c, h) + + +class LocalPartValidationResult(TypedDict): + local_part: str + ascii_local_part: Optional[str] + smtputf8: bool + + +def validate_email_local_part(local: str, allow_smtputf8: bool = True, allow_empty_local: bool = False, + quoted_local_part: bool = False) -> LocalPartValidationResult: + """Validates the syntax of the local part of an email address.""" + + if len(local) == 0: + if not allow_empty_local: + raise EmailSyntaxError("There must be something before the @-sign.") + + # The caller allows an empty local part. Useful for validating certain + # Postfix aliases. + return { + "local_part": local, + "ascii_local_part": local, + "smtputf8": False, + } + + # Check the length of the local part by counting characters. + # (RFC 5321 4.5.3.1.1) + # We're checking the number of characters here. If the local part + # is ASCII-only, then that's the same as bytes (octets). If it's + # internationalized, then the UTF-8 encoding may be longer, but + # that may not be relevant. We will check the total address length + # instead. + if len(local) > LOCAL_PART_MAX_LENGTH: + reason = get_length_reason(local, limit=LOCAL_PART_MAX_LENGTH) + raise EmailSyntaxError(f"The email address is too long before the @-sign {reason}.") + + # Check the local part against the non-internationalized regular expression. + # Most email addresses match this regex so it's probably fastest to check this first. + # (RFC 5322 3.2.3) + # All local parts matching the dot-atom rule are also valid as a quoted string + # so if it was originally quoted (quoted_local_part is True) and this regex matches, + # it's ok. + # (RFC 5321 4.1.2 / RFC 5322 3.2.4). + if DOT_ATOM_TEXT.match(local): + # It's valid. And since it's just the permitted ASCII characters, + # it's normalized and safe. If the local part was originally quoted, + # the quoting was unnecessary and it'll be returned as normalized to + # non-quoted form. + + # Return the local part and flag that SMTPUTF8 is not needed. + return { + "local_part": local, + "ascii_local_part": local, + "smtputf8": False, + } + + # The local part failed the basic dot-atom check. Try the extended character set + # for internationalized addresses. It's the same pattern but with additional + # characters permitted. + # RFC 6531 section 3.3. + valid: Optional[str] = None + requires_smtputf8 = False + if DOT_ATOM_TEXT_INTL.match(local): + # But international characters in the local part may not be permitted. + if not allow_smtputf8: + # Check for invalid characters against the non-internationalized + # permitted character set. + # (RFC 5322 3.2.3) + bad_chars = { + safe_character_display(c) + for c in local + if not ATEXT_RE.match(c) + } + if bad_chars: + raise EmailSyntaxError("Internationalized characters before the @-sign are not supported: " + ", ".join(sorted(bad_chars)) + ".") + + # Although the check above should always find something, fall back to this just in case. + raise EmailSyntaxError("Internationalized characters before the @-sign are not supported.") + + # It's valid. + valid = "dot-atom" + requires_smtputf8 = True + + # There are no syntactic restrictions on quoted local parts, so if + # it was originally quoted, it is probably valid. More characters + # are allowed, like @-signs, spaces, and quotes, and there are no + # restrictions on the placement of dots, as in dot-atom local parts. + elif quoted_local_part: + # Check for invalid characters in a quoted string local part. + # (RFC 5321 4.1.2. RFC 5322 lists additional permitted *obsolete* + # characters which are *not* allowed here. RFC 6531 section 3.3 + # extends the range to UTF8 strings.) + bad_chars = { + safe_character_display(c) + for c in local + if not QTEXT_INTL.match(c) + } + if bad_chars: + raise EmailSyntaxError("The email address contains invalid characters in quotes before the @-sign: " + ", ".join(sorted(bad_chars)) + ".") + + # See if any characters are outside of the ASCII range. + bad_chars = { + safe_character_display(c) + for c in local + if not (32 <= ord(c) <= 126) + } + if bad_chars: + requires_smtputf8 = True + + # International characters in the local part may not be permitted. + if not allow_smtputf8: + raise EmailSyntaxError("Internationalized characters before the @-sign are not supported: " + ", ".join(sorted(bad_chars)) + ".") + + # It's valid. + valid = "quoted" + + # If the local part matches the internationalized dot-atom form or was quoted, + # perform additional checks for Unicode strings. + if valid: + # Check that the local part is a valid, safe, and sensible Unicode string. + # Some of this may be redundant with the range U+0080 to U+10FFFF that is checked + # by DOT_ATOM_TEXT_INTL and QTEXT_INTL. Other characters may be permitted by the + # email specs, but they may not be valid, safe, or sensible Unicode strings. + # See the function for rationale. + check_unsafe_chars(local, allow_space=(valid == "quoted")) + + # Try encoding to UTF-8. Failure is possible with some characters like + # surrogate code points, but those are checked above. Still, we don't + # want to have an unhandled exception later. + try: + local.encode("utf8") + except ValueError as e: + raise EmailSyntaxError("The email address contains an invalid character.") from e + + # If this address passes only by the quoted string form, re-quote it + # and backslash-escape quotes and backslashes (removing any unnecessary + # escapes). Per RFC 5321 4.1.2, "all quoted forms MUST be treated as equivalent, + # and the sending system SHOULD transmit the form that uses the minimum quoting possible." + if valid == "quoted": + local = '"' + re.sub(r'(["\\])', r'\\\1', local) + '"' + + return { + "local_part": local, + "ascii_local_part": local if not requires_smtputf8 else None, + "smtputf8": requires_smtputf8, + } + + # It's not a valid local part. Let's find out why. + # (Since quoted local parts are all valid or handled above, these checks + # don't apply in those cases.) + + # Check for invalid characters. + # (RFC 5322 3.2.3, plus RFC 6531 3.3) + bad_chars = { + safe_character_display(c) + for c in local + if not ATEXT_INTL_DOT_RE.match(c) + } + if bad_chars: + raise EmailSyntaxError("The email address contains invalid characters before the @-sign: " + ", ".join(sorted(bad_chars)) + ".") + + # Check for dot errors imposted by the dot-atom rule. + # (RFC 5322 3.2.3) + check_dot_atom(local, 'An email address cannot start with a {}.', 'An email address cannot have a {} immediately before the @-sign.', is_hostname=False) + + # All of the reasons should already have been checked, but just in case + # we have a fallback message. + raise EmailSyntaxError("The email address contains invalid characters before the @-sign.") + + +def check_unsafe_chars(s: str, allow_space: bool = False) -> None: + # Check for unsafe characters or characters that would make the string + # invalid or non-sensible Unicode. + bad_chars = set() + for i, c in enumerate(s): + category = unicodedata.category(c) + if category[0] in ("L", "N", "P", "S"): + # Letters, numbers, punctuation, and symbols are permitted. + pass + elif category[0] == "M": + # Combining character in first position would combine with something + # outside of the email address if concatenated, so they are not safe. + # We also check if this occurs after the @-sign, which would not be + # sensible because it would modify the @-sign. + if i == 0: + bad_chars.add(c) + elif category == "Zs": + # Spaces outside of the ASCII range are not specifically disallowed in + # internationalized addresses as far as I can tell, but they violate + # the spirit of the non-internationalized specification that email + # addresses do not contain ASCII spaces when not quoted. Excluding + # ASCII spaces when not quoted is handled directly by the atom regex. + # + # In quoted-string local parts, spaces are explicitly permitted, and + # the ASCII space has category Zs, so we must allow it here, and we'll + # allow all Unicode spaces to be consistent. + if not allow_space: + bad_chars.add(c) + elif category[0] == "Z": + # The two line and paragraph separator characters (in categories Zl and Zp) + # are not specifically disallowed in internationalized addresses + # as far as I can tell, but they violate the spirit of the non-internationalized + # specification that email addresses do not contain line breaks when not quoted. + bad_chars.add(c) + elif category[0] == "C": + # Control, format, surrogate, private use, and unassigned code points (C) + # are all unsafe in various ways. Control and format characters can affect + # text rendering if the email address is concatenated with other text. + # Bidirectional format characters are unsafe, even if used properly, because + # they cause an email address to render as a different email address. + # Private use characters do not make sense for publicly deliverable + # email addresses. + bad_chars.add(c) + else: + # All categories should be handled above, but in case there is something new + # to the Unicode specification in the future, reject all other categories. + bad_chars.add(c) + if bad_chars: + raise EmailSyntaxError("The email address contains unsafe characters: " + + ", ".join(safe_character_display(c) for c in sorted(bad_chars)) + ".") + + +def check_dot_atom(label: str, start_descr: str, end_descr: str, is_hostname: bool) -> None: + # RFC 5322 3.2.3 + if label.endswith("."): + raise EmailSyntaxError(end_descr.format("period")) + if label.startswith("."): + raise EmailSyntaxError(start_descr.format("period")) + if ".." in label: + raise EmailSyntaxError("An email address cannot have two periods in a row.") + + if is_hostname: + # RFC 952 + if label.endswith("-"): + raise EmailSyntaxError(end_descr.format("hyphen")) + if label.startswith("-"): + raise EmailSyntaxError(start_descr.format("hyphen")) + if ".-" in label or "-." in label: + raise EmailSyntaxError("An email address cannot have a period and a hyphen next to each other.") + + +class DomainNameValidationResult(TypedDict): + ascii_domain: str + domain: str + + +def validate_email_domain_name(domain: str, test_environment: bool = False, globally_deliverable: bool = True) -> DomainNameValidationResult: + """Validates the syntax of the domain part of an email address.""" + + # Check for invalid characters. + # (RFC 952 plus RFC 6531 section 3.3 for internationalized addresses) + bad_chars = { + safe_character_display(c) + for c in domain + if not ATEXT_HOSTNAME_INTL.match(c) + } + if bad_chars: + raise EmailSyntaxError("The part after the @-sign contains invalid characters: " + ", ".join(sorted(bad_chars)) + ".") + + # Check for unsafe characters. + # Some of this may be redundant with the range U+0080 to U+10FFFF that is checked + # by DOT_ATOM_TEXT_INTL. Other characters may be permitted by the email specs, but + # they may not be valid, safe, or sensible Unicode strings. + check_unsafe_chars(domain) + + # Perform UTS-46 normalization, which includes casefolding, NFC normalization, + # and converting all label separators (the period/full stop, fullwidth full stop, + # ideographic full stop, and halfwidth ideographic full stop) to regular dots. + # It will also raise an exception if there is an invalid character in the input, + # such as "⒈" which is invalid because it would expand to include a dot and + # U+1FEF which normalizes to a backtick, which is not an allowed hostname character. + # Since several characters *are* normalized to a dot, this has to come before + # checks related to dots, like check_dot_atom which comes next. + original_domain = domain + try: + domain = idna.uts46_remap(domain, std3_rules=False, transitional=False) + except idna.IDNAError as e: + raise EmailSyntaxError(f"The part after the @-sign contains invalid characters ({e}).") from e + + # Check for invalid characters after Unicode normalization which are not caught + # by uts46_remap (see tests for examples). + bad_chars = { + safe_character_display(c) + for c in domain + if not ATEXT_HOSTNAME_INTL.match(c) + } + if bad_chars: + raise EmailSyntaxError("The part after the @-sign contains invalid characters after Unicode normalization: " + ", ".join(sorted(bad_chars)) + ".") + + # The domain part is made up dot-separated "labels." Each label must + # have at least one character and cannot start or end with dashes, which + # means there are some surprising restrictions on periods and dashes. + # Check that before we do IDNA encoding because the IDNA library gives + # unfriendly errors for these cases, but after UTS-46 normalization because + # it can insert periods and hyphens (from fullwidth characters). + # (RFC 952, RFC 1123 2.1, RFC 5322 3.2.3) + check_dot_atom(domain, 'An email address cannot have a {} immediately after the @-sign.', 'An email address cannot end with a {}.', is_hostname=True) + + # Check for RFC 5890's invalid R-LDH labels, which are labels that start + # with two characters other than "xn" and two dashes. + for label in domain.split("."): + if re.match(r"(?!xn)..--", label, re.I): + raise EmailSyntaxError("An email address cannot have two letters followed by two dashes immediately after the @-sign or after a period, except Punycode.") + + if DOT_ATOM_TEXT_HOSTNAME.match(domain): + # This is a valid non-internationalized domain. + ascii_domain = domain + else: + # If international characters are present in the domain name, convert + # the domain to IDNA ASCII. If internationalized characters are present, + # the MTA must either support SMTPUTF8 or the mail client must convert the + # domain name to IDNA before submission. + # + # For ASCII-only domains, the transformation does nothing and is safe to + # apply. However, to ensure we don't rely on the idna library for basic + # syntax checks, we don't use it if it's not needed. + # + # idna.encode also checks the domain name length after encoding but it + # doesn't give a nice error, so we call the underlying idna.alabel method + # directly. idna.alabel checks label length and doesn't give great messages, + # but we can't easily go to lower level methods. + try: + ascii_domain = ".".join( + idna.alabel(label).decode("ascii") + for label in domain.split(".") + ) + except idna.IDNAError as e: + # Some errors would have already been raised by idna.uts46_remap. + raise EmailSyntaxError(f"The part after the @-sign is invalid ({e}).") from e + + # Check the syntax of the string returned by idna.encode. + # It should never fail. + if not DOT_ATOM_TEXT_HOSTNAME.match(ascii_domain): + raise EmailSyntaxError("The email address contains invalid characters after the @-sign after IDNA encoding.") + + # Check the length of the domain name in bytes. + # (RFC 1035 2.3.4 and RFC 5321 4.5.3.1.2) + # We're checking the number of bytes ("octets") here, which can be much + # higher than the number of characters in internationalized domains, + # on the assumption that the domain may be transmitted without SMTPUTF8 + # as IDNA ASCII. (This is also checked by idna.encode, so this exception + # is never reached for internationalized domains.) + if len(ascii_domain) > DOMAIN_MAX_LENGTH: + if ascii_domain == original_domain: + reason = get_length_reason(ascii_domain, limit=DOMAIN_MAX_LENGTH) + raise EmailSyntaxError(f"The email address is too long after the @-sign {reason}.") + else: + diff = len(ascii_domain) - DOMAIN_MAX_LENGTH + s = "" if diff == 1 else "s" + raise EmailSyntaxError(f"The email address is too long after the @-sign ({diff} byte{s} too many after IDNA encoding).") + + # Also check the label length limit. + # (RFC 1035 2.3.1) + for label in ascii_domain.split("."): + if len(label) > DNS_LABEL_LENGTH_LIMIT: + reason = get_length_reason(label, limit=DNS_LABEL_LENGTH_LIMIT) + raise EmailSyntaxError(f"After the @-sign, periods cannot be separated by so many characters {reason}.") + + if globally_deliverable: + # All publicly deliverable addresses have domain names with at least + # one period, at least for gTLDs created since 2013 (per the ICANN Board + # New gTLD Program Committee, https://www.icann.org/en/announcements/details/new-gtld-dotless-domain-names-prohibited-30-8-2013-en). + # We'll consider the lack of a period a syntax error + # since that will match people's sense of what an email address looks + # like. We'll skip this in test environments to allow '@test' email + # addresses. + if "." not in ascii_domain and not (ascii_domain == "test" and test_environment): + raise EmailSyntaxError("The part after the @-sign is not valid. It should have a period.") + + # We also know that all TLDs currently end with a letter. + if not DOMAIN_NAME_REGEX.search(ascii_domain): + raise EmailSyntaxError("The part after the @-sign is not valid. It is not within a valid top-level domain.") + + # Check special-use and reserved domain names. + # Some might fail DNS-based deliverability checks, but that + # can be turned off, so we should fail them all sooner. + # See the references in __init__.py. + from . import SPECIAL_USE_DOMAIN_NAMES + for d in SPECIAL_USE_DOMAIN_NAMES: + # See the note near the definition of SPECIAL_USE_DOMAIN_NAMES. + if d == "test" and test_environment: + continue + + if ascii_domain == d or ascii_domain.endswith("." + d): + raise EmailSyntaxError("The part after the @-sign is a special-use or reserved name that cannot be used with email.") + + # We may have been given an IDNA ASCII domain to begin with. Check + # that the domain actually conforms to IDNA. It could look like IDNA + # but not be actual IDNA. For ASCII-only domains, the conversion out + # of IDNA just gives the same thing back. + # + # This gives us the canonical internationalized form of the domain, + # which we return to the caller as a part of the normalized email + # address. + try: + domain_i18n = idna.decode(ascii_domain.encode('ascii')) + except idna.IDNAError as e: + raise EmailSyntaxError(f"The part after the @-sign is not valid IDNA ({e}).") from e + + # Check that this normalized domain name has not somehow become + # an invalid domain name. All of the checks before this point + # using the idna package probably guarantee that we now have + # a valid international domain name in most respects. But it + # doesn't hurt to re-apply some tests to be sure. See the similar + # tests above. + + # Check for invalid and unsafe characters. We have no test + # case for this. + bad_chars = { + safe_character_display(c) + for c in domain + if not ATEXT_HOSTNAME_INTL.match(c) + } + if bad_chars: + raise EmailSyntaxError("The part after the @-sign contains invalid characters: " + ", ".join(sorted(bad_chars)) + ".") + check_unsafe_chars(domain) + + # Check that it can be encoded back to IDNA ASCII. We have no test + # case for this. + try: + idna.encode(domain_i18n) + except idna.IDNAError as e: + raise EmailSyntaxError(f"The part after the @-sign became invalid after normalizing to international characters ({e}).") from e + + # Return the IDNA ASCII-encoded form of the domain, which is how it + # would be transmitted on the wire (except when used with SMTPUTF8 + # possibly), as well as the canonical Unicode form of the domain, + # which is better for display purposes. This should also take care + # of RFC 6532 section 3.1's suggestion to apply Unicode NFC + # normalization to addresses. + return { + "ascii_domain": ascii_domain, + "domain": domain_i18n, + } + + +def validate_email_length(addrinfo: ValidatedEmail) -> None: + # There are three forms of the email address whose length must be checked: + # + # 1) The original email address string. Since callers may continue to use + # this string, even though we recommend using the normalized form, we + # should not pass validation when the original input is not valid. This + # form is checked first because it is the original input. + # 2) The normalized email address. We perform Unicode NFC normalization of + # the local part, we normalize the domain to internationalized characters + # (if originaly IDNA ASCII) which also includes Unicode normalization, + # and we may remove quotes in quoted local parts. We recommend that + # callers use this string, so it must be valid. + # 3) The email address with the IDNA ASCII representation of the domain + # name, since this string may be used with email stacks that don't + # support UTF-8. Since this is the least likely to be used by callers, + # it is checked last. Note that ascii_email will only be set if the + # local part is ASCII, but conceivably the caller may combine a + # internationalized local part with an ASCII domain, so we check this + # on that combination also. Since we only return the normalized local + # part, we use that (and not the unnormalized local part). + # + # In all cases, the length is checked in UTF-8 because the SMTPUTF8 + # extension to SMTP validates the length in bytes. + + addresses_to_check = [ + (addrinfo.original, None), + (addrinfo.normalized, "after normalization"), + ((addrinfo.ascii_local_part or addrinfo.local_part or "") + "@" + addrinfo.ascii_domain, "when the part after the @-sign is converted to IDNA ASCII"), + ] + + for addr, reason in addresses_to_check: + addr_len = len(addr) + addr_utf8_len = len(addr.encode("utf8")) + diff = addr_utf8_len - EMAIL_MAX_LENGTH + if diff > 0: + if reason is None and addr_len == addr_utf8_len: + # If there is no normalization or transcoding, + # we can give a simple count of the number of + # characters over the limit. + reason = get_length_reason(addr, limit=EMAIL_MAX_LENGTH) + elif reason is None: + # If there is no normalization but there is + # some transcoding to UTF-8, we can compute + # the minimum number of characters over the + # limit by dividing the number of bytes over + # the limit by the maximum number of bytes + # per character. + mbpc = max(len(c.encode("utf8")) for c in addr) + mchars = max(1, diff // mbpc) + suffix = "s" if diff > 1 else "" + if mchars == diff: + reason = f"({diff} character{suffix} too many)" + else: + reason = f"({mchars}-{diff} character{suffix} too many)" + else: + # Since there is normalization, the number of + # characters in the input that need to change is + # impossible to know. + suffix = "s" if diff > 1 else "" + reason += f" ({diff} byte{suffix} too many)" + raise EmailSyntaxError(f"The email address is too long {reason}.") + + +class DomainLiteralValidationResult(TypedDict): + domain_address: Union[ipaddress.IPv4Address, ipaddress.IPv6Address] + domain: str + + +def validate_email_domain_literal(domain_literal: str) -> DomainLiteralValidationResult: + # This is obscure domain-literal syntax. Parse it and return + # a compressed/normalized address. + # RFC 5321 4.1.3 and RFC 5322 3.4.1. + + addr: Union[ipaddress.IPv4Address, ipaddress.IPv6Address] + + # Try to parse the domain literal as an IPv4 address. + # There is no tag for IPv4 addresses, so we can never + # be sure if the user intends an IPv4 address. + if re.match(r"^[0-9\.]+$", domain_literal): + try: + addr = ipaddress.IPv4Address(domain_literal) + except ValueError as e: + raise EmailSyntaxError(f"The address in brackets after the @-sign is not valid: It is not an IPv4 address ({e}) or is missing an address literal tag.") from e + + # Return the IPv4Address object and the domain back unchanged. + return { + "domain_address": addr, + "domain": f"[{addr}]", + } + + # If it begins with "IPv6:" it's an IPv6 address. + if domain_literal.startswith("IPv6:"): + try: + addr = ipaddress.IPv6Address(domain_literal[5:]) + except ValueError as e: + raise EmailSyntaxError(f"The IPv6 address in brackets after the @-sign is not valid ({e}).") from e + + # Return the IPv6Address object and construct a normalized + # domain literal. + return { + "domain_address": addr, + "domain": f"[IPv6:{addr.compressed}]", + } + + # Nothing else is valid. + + if ":" not in domain_literal: + raise EmailSyntaxError("The part after the @-sign in brackets is not an IPv4 address and has no address literal tag.") + + # The tag (the part before the colon) has character restrictions, + # but since it must come from a registry of tags (in which only "IPv6" is defined), + # there's no need to check the syntax of the tag. See RFC 5321 4.1.2. + + # Check for permitted ASCII characters. This actually doesn't matter + # since there will be an exception after anyway. + bad_chars = { + safe_character_display(c) + for c in domain_literal + if not DOMAIN_LITERAL_CHARS.match(c) + } + if bad_chars: + raise EmailSyntaxError("The part after the @-sign contains invalid characters in brackets: " + ", ".join(sorted(bad_chars)) + ".") + + # There are no other domain literal tags. + # https://www.iana.org/assignments/address-literal-tags/address-literal-tags.xhtml + raise EmailSyntaxError("The part after the @-sign contains an invalid address literal tag in brackets.") diff --git a/venv/lib/python3.11/site-packages/email_validator/validate_email.py b/venv/lib/python3.11/site-packages/email_validator/validate_email.py new file mode 100644 index 0000000..a134c77 --- /dev/null +++ b/venv/lib/python3.11/site-packages/email_validator/validate_email.py @@ -0,0 +1,180 @@ +from typing import Optional, Union, TYPE_CHECKING +import unicodedata + +from .exceptions_types import EmailSyntaxError, ValidatedEmail +from .syntax import split_email, validate_email_local_part, validate_email_domain_name, validate_email_domain_literal, validate_email_length +from .rfc_constants import CASE_INSENSITIVE_MAILBOX_NAMES + +if TYPE_CHECKING: + import dns.resolver + _Resolver = dns.resolver.Resolver +else: + _Resolver = object + + +def validate_email( + email: Union[str, bytes], + /, # prior arguments are positional-only + *, # subsequent arguments are keyword-only + allow_smtputf8: Optional[bool] = None, + allow_empty_local: bool = False, + allow_quoted_local: Optional[bool] = None, + allow_domain_literal: Optional[bool] = None, + allow_display_name: Optional[bool] = None, + check_deliverability: Optional[bool] = None, + test_environment: Optional[bool] = None, + globally_deliverable: Optional[bool] = None, + timeout: Optional[int] = None, + dns_resolver: Optional[_Resolver] = None +) -> ValidatedEmail: + """ + Given an email address, and some options, returns a ValidatedEmail instance + with information about the address if it is valid or, if the address is not + valid, raises an EmailNotValidError. This is the main function of the module. + """ + + # Fill in default values of arguments. + from . import ALLOW_SMTPUTF8, ALLOW_QUOTED_LOCAL, ALLOW_DOMAIN_LITERAL, ALLOW_DISPLAY_NAME, \ + GLOBALLY_DELIVERABLE, CHECK_DELIVERABILITY, TEST_ENVIRONMENT, DEFAULT_TIMEOUT + if allow_smtputf8 is None: + allow_smtputf8 = ALLOW_SMTPUTF8 + if allow_quoted_local is None: + allow_quoted_local = ALLOW_QUOTED_LOCAL + if allow_domain_literal is None: + allow_domain_literal = ALLOW_DOMAIN_LITERAL + if allow_display_name is None: + allow_display_name = ALLOW_DISPLAY_NAME + if check_deliverability is None: + check_deliverability = CHECK_DELIVERABILITY + if test_environment is None: + test_environment = TEST_ENVIRONMENT + if globally_deliverable is None: + globally_deliverable = GLOBALLY_DELIVERABLE + if timeout is None and dns_resolver is None: + timeout = DEFAULT_TIMEOUT + + # Allow email to be a str or bytes instance. If bytes, + # it must be ASCII because that's how the bytes work + # on the wire with SMTP. + if not isinstance(email, str): + try: + email = email.decode("ascii") + except ValueError as e: + raise EmailSyntaxError("The email address is not valid ASCII.") from e + + # Split the address into the display name (or None), the local part + # (before the @-sign), and the domain part (after the @-sign). + # Normally, there is only one @-sign. But the awkward "quoted string" + # local part form (RFC 5321 4.1.2) allows @-signs in the local + # part if the local part is quoted. + display_name, local_part, domain_part, is_quoted_local_part \ + = split_email(email) + + # Collect return values in this instance. + ret = ValidatedEmail() + ret.original = ((local_part if not is_quoted_local_part + else ('"' + local_part + '"')) + + "@" + domain_part) # drop the display name, if any, for email length tests at the end + ret.display_name = display_name + + # Validate the email address's local part syntax and get a normalized form. + # If the original address was quoted and the decoded local part is a valid + # unquoted local part, then we'll get back a normalized (unescaped) local + # part. + local_part_info = validate_email_local_part(local_part, + allow_smtputf8=allow_smtputf8, + allow_empty_local=allow_empty_local, + quoted_local_part=is_quoted_local_part) + ret.local_part = local_part_info["local_part"] + ret.ascii_local_part = local_part_info["ascii_local_part"] + ret.smtputf8 = local_part_info["smtputf8"] + + # RFC 6532 section 3.1 says that Unicode NFC normalization should be applied, + # so we'll return the NFC-normalized local part. Since the caller may use that + # string in place of the original string, ensure it is also valid. + normalized_local_part = unicodedata.normalize("NFC", ret.local_part) + if normalized_local_part != ret.local_part: + try: + validate_email_local_part(normalized_local_part, + allow_smtputf8=allow_smtputf8, + allow_empty_local=allow_empty_local, + quoted_local_part=is_quoted_local_part) + except EmailSyntaxError as e: + raise EmailSyntaxError("After Unicode normalization: " + str(e)) from e + ret.local_part = normalized_local_part + + # If a quoted local part isn't allowed but is present, now raise an exception. + # This is done after any exceptions raised by validate_email_local_part so + # that mandatory checks have highest precedence. + if is_quoted_local_part and not allow_quoted_local: + raise EmailSyntaxError("Quoting the part before the @-sign is not allowed here.") + + # Some local parts are required to be case-insensitive, so we should normalize + # to lowercase. + # RFC 2142 + if ret.ascii_local_part is not None \ + and ret.ascii_local_part.lower() in CASE_INSENSITIVE_MAILBOX_NAMES \ + and ret.local_part is not None: + ret.ascii_local_part = ret.ascii_local_part.lower() + ret.local_part = ret.local_part.lower() + + # Validate the email address's domain part syntax and get a normalized form. + is_domain_literal = False + if len(domain_part) == 0: + raise EmailSyntaxError("There must be something after the @-sign.") + + elif domain_part.startswith("[") and domain_part.endswith("]"): + # Parse the address in the domain literal and get back a normalized domain. + domain_literal_info = validate_email_domain_literal(domain_part[1:-1]) + if not allow_domain_literal: + raise EmailSyntaxError("A bracketed IP address after the @-sign is not allowed here.") + ret.domain = domain_literal_info["domain"] + ret.ascii_domain = domain_literal_info["domain"] # Domain literals are always ASCII. + ret.domain_address = domain_literal_info["domain_address"] + is_domain_literal = True # Prevent deliverability checks. + + else: + # Check the syntax of the domain and get back a normalized + # internationalized and ASCII form. + domain_name_info = validate_email_domain_name(domain_part, test_environment=test_environment, globally_deliverable=globally_deliverable) + ret.domain = domain_name_info["domain"] + ret.ascii_domain = domain_name_info["ascii_domain"] + + # Construct the complete normalized form. + ret.normalized = ret.local_part + "@" + ret.domain + + # If the email address has an ASCII form, add it. + if not ret.smtputf8: + if not ret.ascii_domain: + raise Exception("Missing ASCII domain.") + ret.ascii_email = (ret.ascii_local_part or "") + "@" + ret.ascii_domain + else: + ret.ascii_email = None + + # Check the length of the address. + validate_email_length(ret) + + # Check that a display name is permitted. It's the last syntax check + # because we always check against optional parsing features last. + if display_name is not None and not allow_display_name: + raise EmailSyntaxError("A display name and angle brackets around the email address are not permitted here.") + + if check_deliverability and not test_environment: + # Validate the email address's deliverability using DNS + # and update the returned ValidatedEmail object with metadata. + + if is_domain_literal: + # There is nothing to check --- skip deliverability checks. + return ret + + # Lazy load `deliverability` as it is slow to import (due to dns.resolver) + from .deliverability import validate_email_deliverability + deliverability_info = validate_email_deliverability( + ret.ascii_domain, ret.domain, timeout, dns_resolver + ) + mx = deliverability_info.get("mx") + if mx is not None: + ret.mx = mx + ret.mx_fallback_type = deliverability_info.get("mx_fallback_type") + + return ret diff --git a/venv/lib/python3.11/site-packages/email_validator/version.py b/venv/lib/python3.11/site-packages/email_validator/version.py new file mode 100644 index 0000000..8a124bf --- /dev/null +++ b/venv/lib/python3.11/site-packages/email_validator/version.py @@ -0,0 +1 @@ +__version__ = "2.2.0" diff --git a/venv/lib/python3.11/site-packages/flask/__init__.py b/venv/lib/python3.11/site-packages/flask/__init__.py new file mode 100644 index 0000000..1999340 --- /dev/null +++ b/venv/lib/python3.11/site-packages/flask/__init__.py @@ -0,0 +1,71 @@ +from markupsafe import escape +from markupsafe import Markup + +from . import json as json +from .app import Flask as Flask +from .app import Request as Request +from .app import Response as Response +from .blueprints import Blueprint as Blueprint +from .config import Config as Config +from .ctx import after_this_request as after_this_request +from .ctx import copy_current_request_context as copy_current_request_context +from .ctx import has_app_context as has_app_context +from .ctx import has_request_context as has_request_context +from .globals import current_app as current_app +from .globals import g as g +from .globals import request as request +from .globals import session as session +from .helpers import abort as abort +from .helpers import flash as flash +from .helpers import get_flashed_messages as get_flashed_messages +from .helpers import get_template_attribute as get_template_attribute +from .helpers import make_response as make_response +from .helpers import redirect as redirect +from .helpers import send_file as send_file +from .helpers import send_from_directory as send_from_directory +from .helpers import stream_with_context as stream_with_context +from .helpers import url_for as url_for +from .json import jsonify as jsonify +from .signals import appcontext_popped as appcontext_popped +from .signals import appcontext_pushed as appcontext_pushed +from .signals import appcontext_tearing_down as appcontext_tearing_down +from .signals import before_render_template as before_render_template +from .signals import got_request_exception as got_request_exception +from .signals import message_flashed as message_flashed +from .signals import request_finished as request_finished +from .signals import request_started as request_started +from .signals import request_tearing_down as request_tearing_down +from .signals import signals_available as signals_available +from .signals import template_rendered as template_rendered +from .templating import render_template as render_template +from .templating import render_template_string as render_template_string +from .templating import stream_template as stream_template +from .templating import stream_template_string as stream_template_string + +__version__ = "2.2.5" + + +def __getattr__(name): + if name == "_app_ctx_stack": + import warnings + from .globals import __app_ctx_stack + + warnings.warn( + "'_app_ctx_stack' is deprecated and will be removed in Flask 2.3.", + DeprecationWarning, + stacklevel=2, + ) + return __app_ctx_stack + + if name == "_request_ctx_stack": + import warnings + from .globals import __request_ctx_stack + + warnings.warn( + "'_request_ctx_stack' is deprecated and will be removed in Flask 2.3.", + DeprecationWarning, + stacklevel=2, + ) + return __request_ctx_stack + + raise AttributeError(name) diff --git a/venv/lib/python3.11/site-packages/flask/__main__.py b/venv/lib/python3.11/site-packages/flask/__main__.py new file mode 100644 index 0000000..4e28416 --- /dev/null +++ b/venv/lib/python3.11/site-packages/flask/__main__.py @@ -0,0 +1,3 @@ +from .cli import main + +main() diff --git a/venv/lib/python3.11/site-packages/flask/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/flask/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..93c38af Binary files /dev/null and b/venv/lib/python3.11/site-packages/flask/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/flask/__pycache__/__main__.cpython-311.pyc b/venv/lib/python3.11/site-packages/flask/__pycache__/__main__.cpython-311.pyc new file mode 100644 index 0000000..ce2c9a4 Binary files /dev/null and b/venv/lib/python3.11/site-packages/flask/__pycache__/__main__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/flask/__pycache__/app.cpython-311.pyc b/venv/lib/python3.11/site-packages/flask/__pycache__/app.cpython-311.pyc new file mode 100644 index 0000000..6d6b10b Binary files /dev/null and b/venv/lib/python3.11/site-packages/flask/__pycache__/app.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/flask/__pycache__/blueprints.cpython-311.pyc b/venv/lib/python3.11/site-packages/flask/__pycache__/blueprints.cpython-311.pyc new file mode 100644 index 0000000..e9b5660 Binary files /dev/null and b/venv/lib/python3.11/site-packages/flask/__pycache__/blueprints.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/flask/__pycache__/cli.cpython-311.pyc b/venv/lib/python3.11/site-packages/flask/__pycache__/cli.cpython-311.pyc new file mode 100644 index 0000000..c475b84 Binary files /dev/null and b/venv/lib/python3.11/site-packages/flask/__pycache__/cli.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/flask/__pycache__/config.cpython-311.pyc b/venv/lib/python3.11/site-packages/flask/__pycache__/config.cpython-311.pyc new file mode 100644 index 0000000..1137609 Binary files /dev/null and b/venv/lib/python3.11/site-packages/flask/__pycache__/config.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/flask/__pycache__/ctx.cpython-311.pyc b/venv/lib/python3.11/site-packages/flask/__pycache__/ctx.cpython-311.pyc new file mode 100644 index 0000000..4a42366 Binary files /dev/null and b/venv/lib/python3.11/site-packages/flask/__pycache__/ctx.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/flask/__pycache__/debughelpers.cpython-311.pyc b/venv/lib/python3.11/site-packages/flask/__pycache__/debughelpers.cpython-311.pyc new file mode 100644 index 0000000..fb20dce Binary files /dev/null and b/venv/lib/python3.11/site-packages/flask/__pycache__/debughelpers.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/flask/__pycache__/globals.cpython-311.pyc b/venv/lib/python3.11/site-packages/flask/__pycache__/globals.cpython-311.pyc new file mode 100644 index 0000000..d921cb9 Binary files /dev/null and b/venv/lib/python3.11/site-packages/flask/__pycache__/globals.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/flask/__pycache__/helpers.cpython-311.pyc b/venv/lib/python3.11/site-packages/flask/__pycache__/helpers.cpython-311.pyc new file mode 100644 index 0000000..cbaa6bf Binary files /dev/null and b/venv/lib/python3.11/site-packages/flask/__pycache__/helpers.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/flask/__pycache__/logging.cpython-311.pyc b/venv/lib/python3.11/site-packages/flask/__pycache__/logging.cpython-311.pyc new file mode 100644 index 0000000..1d70b25 Binary files /dev/null and b/venv/lib/python3.11/site-packages/flask/__pycache__/logging.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/flask/__pycache__/scaffold.cpython-311.pyc b/venv/lib/python3.11/site-packages/flask/__pycache__/scaffold.cpython-311.pyc new file mode 100644 index 0000000..02e94cc Binary files /dev/null and b/venv/lib/python3.11/site-packages/flask/__pycache__/scaffold.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/flask/__pycache__/sessions.cpython-311.pyc b/venv/lib/python3.11/site-packages/flask/__pycache__/sessions.cpython-311.pyc new file mode 100644 index 0000000..199293a Binary files /dev/null and b/venv/lib/python3.11/site-packages/flask/__pycache__/sessions.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/flask/__pycache__/signals.cpython-311.pyc b/venv/lib/python3.11/site-packages/flask/__pycache__/signals.cpython-311.pyc new file mode 100644 index 0000000..9296897 Binary files /dev/null and b/venv/lib/python3.11/site-packages/flask/__pycache__/signals.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/flask/__pycache__/templating.cpython-311.pyc b/venv/lib/python3.11/site-packages/flask/__pycache__/templating.cpython-311.pyc new file mode 100644 index 0000000..50695f7 Binary files /dev/null and b/venv/lib/python3.11/site-packages/flask/__pycache__/templating.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/flask/__pycache__/testing.cpython-311.pyc b/venv/lib/python3.11/site-packages/flask/__pycache__/testing.cpython-311.pyc new file mode 100644 index 0000000..1f98060 Binary files /dev/null and b/venv/lib/python3.11/site-packages/flask/__pycache__/testing.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/flask/__pycache__/typing.cpython-311.pyc b/venv/lib/python3.11/site-packages/flask/__pycache__/typing.cpython-311.pyc new file mode 100644 index 0000000..330508d Binary files /dev/null and b/venv/lib/python3.11/site-packages/flask/__pycache__/typing.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/flask/__pycache__/views.cpython-311.pyc b/venv/lib/python3.11/site-packages/flask/__pycache__/views.cpython-311.pyc new file mode 100644 index 0000000..a7d5c8b Binary files /dev/null and b/venv/lib/python3.11/site-packages/flask/__pycache__/views.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/flask/__pycache__/wrappers.cpython-311.pyc b/venv/lib/python3.11/site-packages/flask/__pycache__/wrappers.cpython-311.pyc new file mode 100644 index 0000000..04507ac Binary files /dev/null and b/venv/lib/python3.11/site-packages/flask/__pycache__/wrappers.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/flask/app.py b/venv/lib/python3.11/site-packages/flask/app.py new file mode 100644 index 0000000..d904d6b --- /dev/null +++ b/venv/lib/python3.11/site-packages/flask/app.py @@ -0,0 +1,2552 @@ +import functools +import inspect +import json +import logging +import os +import sys +import typing as t +import weakref +from collections.abc import Iterator as _abc_Iterator +from datetime import timedelta +from itertools import chain +from threading import Lock +from types import TracebackType +from urllib.parse import quote as _url_quote + +import click +from werkzeug.datastructures import Headers +from werkzeug.datastructures import ImmutableDict +from werkzeug.exceptions import Aborter +from werkzeug.exceptions import BadRequest +from werkzeug.exceptions import BadRequestKeyError +from werkzeug.exceptions import HTTPException +from werkzeug.exceptions import InternalServerError +from werkzeug.routing import BuildError +from werkzeug.routing import Map +from werkzeug.routing import MapAdapter +from werkzeug.routing import RequestRedirect +from werkzeug.routing import RoutingException +from werkzeug.routing import Rule +from werkzeug.serving import is_running_from_reloader +from werkzeug.utils import redirect as _wz_redirect +from werkzeug.wrappers import Response as BaseResponse + +from . import cli +from . import typing as ft +from .config import Config +from .config import ConfigAttribute +from .ctx import _AppCtxGlobals +from .ctx import AppContext +from .ctx import RequestContext +from .globals import _cv_app +from .globals import _cv_request +from .globals import g +from .globals import request +from .globals import request_ctx +from .globals import session +from .helpers import _split_blueprint_path +from .helpers import get_debug_flag +from .helpers import get_flashed_messages +from .helpers import get_load_dotenv +from .helpers import locked_cached_property +from .json.provider import DefaultJSONProvider +from .json.provider import JSONProvider +from .logging import create_logger +from .scaffold import _endpoint_from_view_func +from .scaffold import _sentinel +from .scaffold import find_package +from .scaffold import Scaffold +from .scaffold import setupmethod +from .sessions import SecureCookieSessionInterface +from .sessions import SessionInterface +from .signals import appcontext_tearing_down +from .signals import got_request_exception +from .signals import request_finished +from .signals import request_started +from .signals import request_tearing_down +from .templating import DispatchingJinjaLoader +from .templating import Environment +from .wrappers import Request +from .wrappers import Response + +if t.TYPE_CHECKING: # pragma: no cover + import typing_extensions as te + from .blueprints import Blueprint + from .testing import FlaskClient + from .testing import FlaskCliRunner + +T_before_first_request = t.TypeVar( + "T_before_first_request", bound=ft.BeforeFirstRequestCallable +) +T_shell_context_processor = t.TypeVar( + "T_shell_context_processor", bound=ft.ShellContextProcessorCallable +) +T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) +T_template_filter = t.TypeVar("T_template_filter", bound=ft.TemplateFilterCallable) +T_template_global = t.TypeVar("T_template_global", bound=ft.TemplateGlobalCallable) +T_template_test = t.TypeVar("T_template_test", bound=ft.TemplateTestCallable) + +if sys.version_info >= (3, 8): + iscoroutinefunction = inspect.iscoroutinefunction +else: + + def iscoroutinefunction(func: t.Any) -> bool: + while inspect.ismethod(func): + func = func.__func__ + + while isinstance(func, functools.partial): + func = func.func + + return inspect.iscoroutinefunction(func) + + +def _make_timedelta(value: t.Union[timedelta, int, None]) -> t.Optional[timedelta]: + if value is None or isinstance(value, timedelta): + return value + + return timedelta(seconds=value) + + +class Flask(Scaffold): + """The flask object implements a WSGI application and acts as the central + object. It is passed the name of the module or package of the + application. Once it is created it will act as a central registry for + the view functions, the URL rules, template configuration and much more. + + The name of the package is used to resolve resources from inside the + package or the folder the module is contained in depending on if the + package parameter resolves to an actual python package (a folder with + an :file:`__init__.py` file inside) or a standard module (just a ``.py`` file). + + For more information about resource loading, see :func:`open_resource`. + + Usually you create a :class:`Flask` instance in your main module or + in the :file:`__init__.py` file of your package like this:: + + from flask import Flask + app = Flask(__name__) + + .. admonition:: About the First Parameter + + The idea of the first parameter is to give Flask an idea of what + belongs to your application. This name is used to find resources + on the filesystem, can be used by extensions to improve debugging + information and a lot more. + + So it's important what you provide there. If you are using a single + module, `__name__` is always the correct value. If you however are + using a package, it's usually recommended to hardcode the name of + your package there. + + For example if your application is defined in :file:`yourapplication/app.py` + you should create it with one of the two versions below:: + + app = Flask('yourapplication') + app = Flask(__name__.split('.')[0]) + + Why is that? The application will work even with `__name__`, thanks + to how resources are looked up. However it will make debugging more + painful. Certain extensions can make assumptions based on the + import name of your application. For example the Flask-SQLAlchemy + extension will look for the code in your application that triggered + an SQL query in debug mode. If the import name is not properly set + up, that debugging information is lost. (For example it would only + pick up SQL queries in `yourapplication.app` and not + `yourapplication.views.frontend`) + + .. versionadded:: 0.7 + The `static_url_path`, `static_folder`, and `template_folder` + parameters were added. + + .. versionadded:: 0.8 + The `instance_path` and `instance_relative_config` parameters were + added. + + .. versionadded:: 0.11 + The `root_path` parameter was added. + + .. versionadded:: 1.0 + The ``host_matching`` and ``static_host`` parameters were added. + + .. versionadded:: 1.0 + The ``subdomain_matching`` parameter was added. Subdomain + matching needs to be enabled manually now. Setting + :data:`SERVER_NAME` does not implicitly enable it. + + :param import_name: the name of the application package + :param static_url_path: can be used to specify a different path for the + static files on the web. Defaults to the name + of the `static_folder` folder. + :param static_folder: The folder with static files that is served at + ``static_url_path``. Relative to the application ``root_path`` + or an absolute path. Defaults to ``'static'``. + :param static_host: the host to use when adding the static route. + Defaults to None. Required when using ``host_matching=True`` + with a ``static_folder`` configured. + :param host_matching: set ``url_map.host_matching`` attribute. + Defaults to False. + :param subdomain_matching: consider the subdomain relative to + :data:`SERVER_NAME` when matching routes. Defaults to False. + :param template_folder: the folder that contains the templates that should + be used by the application. Defaults to + ``'templates'`` folder in the root path of the + application. + :param instance_path: An alternative instance path for the application. + By default the folder ``'instance'`` next to the + package or module is assumed to be the instance + path. + :param instance_relative_config: if set to ``True`` relative filenames + for loading the config are assumed to + be relative to the instance path instead + of the application root. + :param root_path: The path to the root of the application files. + This should only be set manually when it can't be detected + automatically, such as for namespace packages. + """ + + #: The class that is used for request objects. See :class:`~flask.Request` + #: for more information. + request_class = Request + + #: The class that is used for response objects. See + #: :class:`~flask.Response` for more information. + response_class = Response + + #: The class of the object assigned to :attr:`aborter`, created by + #: :meth:`create_aborter`. That object is called by + #: :func:`flask.abort` to raise HTTP errors, and can be + #: called directly as well. + #: + #: Defaults to :class:`werkzeug.exceptions.Aborter`. + #: + #: .. versionadded:: 2.2 + aborter_class = Aborter + + #: The class that is used for the Jinja environment. + #: + #: .. versionadded:: 0.11 + jinja_environment = Environment + + #: The class that is used for the :data:`~flask.g` instance. + #: + #: Example use cases for a custom class: + #: + #: 1. Store arbitrary attributes on flask.g. + #: 2. Add a property for lazy per-request database connectors. + #: 3. Return None instead of AttributeError on unexpected attributes. + #: 4. Raise exception if an unexpected attr is set, a "controlled" flask.g. + #: + #: In Flask 0.9 this property was called `request_globals_class` but it + #: was changed in 0.10 to :attr:`app_ctx_globals_class` because the + #: flask.g object is now application context scoped. + #: + #: .. versionadded:: 0.10 + app_ctx_globals_class = _AppCtxGlobals + + #: The class that is used for the ``config`` attribute of this app. + #: Defaults to :class:`~flask.Config`. + #: + #: Example use cases for a custom class: + #: + #: 1. Default values for certain config options. + #: 2. Access to config values through attributes in addition to keys. + #: + #: .. versionadded:: 0.11 + config_class = Config + + #: The testing flag. Set this to ``True`` to enable the test mode of + #: Flask extensions (and in the future probably also Flask itself). + #: For example this might activate test helpers that have an + #: additional runtime cost which should not be enabled by default. + #: + #: If this is enabled and PROPAGATE_EXCEPTIONS is not changed from the + #: default it's implicitly enabled. + #: + #: This attribute can also be configured from the config with the + #: ``TESTING`` configuration key. Defaults to ``False``. + testing = ConfigAttribute("TESTING") + + #: If a secret key is set, cryptographic components can use this to + #: sign cookies and other things. Set this to a complex random value + #: when you want to use the secure cookie for instance. + #: + #: This attribute can also be configured from the config with the + #: :data:`SECRET_KEY` configuration key. Defaults to ``None``. + secret_key = ConfigAttribute("SECRET_KEY") + + @property + def session_cookie_name(self) -> str: + """The name of the cookie set by the session interface. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. Use ``app.config["SESSION_COOKIE_NAME"]`` + instead. + """ + import warnings + + warnings.warn( + "'session_cookie_name' is deprecated and will be removed in Flask 2.3. Use" + " 'SESSION_COOKIE_NAME' in 'app.config' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self.config["SESSION_COOKIE_NAME"] + + @session_cookie_name.setter + def session_cookie_name(self, value: str) -> None: + import warnings + + warnings.warn( + "'session_cookie_name' is deprecated and will be removed in Flask 2.3. Use" + " 'SESSION_COOKIE_NAME' in 'app.config' instead.", + DeprecationWarning, + stacklevel=2, + ) + self.config["SESSION_COOKIE_NAME"] = value + + #: A :class:`~datetime.timedelta` which is used to set the expiration + #: date of a permanent session. The default is 31 days which makes a + #: permanent session survive for roughly one month. + #: + #: This attribute can also be configured from the config with the + #: ``PERMANENT_SESSION_LIFETIME`` configuration key. Defaults to + #: ``timedelta(days=31)`` + permanent_session_lifetime = ConfigAttribute( + "PERMANENT_SESSION_LIFETIME", get_converter=_make_timedelta + ) + + @property + def send_file_max_age_default(self) -> t.Optional[timedelta]: + """The default value for ``max_age`` for :func:`~flask.send_file`. The default + is ``None``, which tells the browser to use conditional requests instead of a + timed cache. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. Use + ``app.config["SEND_FILE_MAX_AGE_DEFAULT"]`` instead. + + .. versionchanged:: 2.0 + Defaults to ``None`` instead of 12 hours. + """ + import warnings + + warnings.warn( + "'send_file_max_age_default' is deprecated and will be removed in Flask" + " 2.3. Use 'SEND_FILE_MAX_AGE_DEFAULT' in 'app.config' instead.", + DeprecationWarning, + stacklevel=2, + ) + return _make_timedelta(self.config["SEND_FILE_MAX_AGE_DEFAULT"]) + + @send_file_max_age_default.setter + def send_file_max_age_default(self, value: t.Union[int, timedelta, None]) -> None: + import warnings + + warnings.warn( + "'send_file_max_age_default' is deprecated and will be removed in Flask" + " 2.3. Use 'SEND_FILE_MAX_AGE_DEFAULT' in 'app.config' instead.", + DeprecationWarning, + stacklevel=2, + ) + self.config["SEND_FILE_MAX_AGE_DEFAULT"] = _make_timedelta(value) + + @property + def use_x_sendfile(self) -> bool: + """Enable this to use the ``X-Sendfile`` feature, assuming the server supports + it, from :func:`~flask.send_file`. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. Use ``app.config["USE_X_SENDFILE"]`` instead. + """ + import warnings + + warnings.warn( + "'use_x_sendfile' is deprecated and will be removed in Flask 2.3. Use" + " 'USE_X_SENDFILE' in 'app.config' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self.config["USE_X_SENDFILE"] + + @use_x_sendfile.setter + def use_x_sendfile(self, value: bool) -> None: + import warnings + + warnings.warn( + "'use_x_sendfile' is deprecated and will be removed in Flask 2.3. Use" + " 'USE_X_SENDFILE' in 'app.config' instead.", + DeprecationWarning, + stacklevel=2, + ) + self.config["USE_X_SENDFILE"] = value + + _json_encoder: t.Union[t.Type[json.JSONEncoder], None] = None + _json_decoder: t.Union[t.Type[json.JSONDecoder], None] = None + + @property # type: ignore[override] + def json_encoder(self) -> t.Type[json.JSONEncoder]: + """The JSON encoder class to use. Defaults to + :class:`~flask.json.JSONEncoder`. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. Customize + :attr:`json_provider_class` instead. + + .. versionadded:: 0.10 + """ + import warnings + + warnings.warn( + "'app.json_encoder' is deprecated and will be removed in Flask 2.3." + " Customize 'app.json_provider_class' or 'app.json' instead.", + DeprecationWarning, + stacklevel=2, + ) + + if self._json_encoder is None: + from . import json + + return json.JSONEncoder + + return self._json_encoder + + @json_encoder.setter + def json_encoder(self, value: t.Type[json.JSONEncoder]) -> None: + import warnings + + warnings.warn( + "'app.json_encoder' is deprecated and will be removed in Flask 2.3." + " Customize 'app.json_provider_class' or 'app.json' instead.", + DeprecationWarning, + stacklevel=2, + ) + self._json_encoder = value + + @property # type: ignore[override] + def json_decoder(self) -> t.Type[json.JSONDecoder]: + """The JSON decoder class to use. Defaults to + :class:`~flask.json.JSONDecoder`. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. Customize + :attr:`json_provider_class` instead. + + .. versionadded:: 0.10 + """ + import warnings + + warnings.warn( + "'app.json_decoder' is deprecated and will be removed in Flask 2.3." + " Customize 'app.json_provider_class' or 'app.json' instead.", + DeprecationWarning, + stacklevel=2, + ) + + if self._json_decoder is None: + from . import json + + return json.JSONDecoder + + return self._json_decoder + + @json_decoder.setter + def json_decoder(self, value: t.Type[json.JSONDecoder]) -> None: + import warnings + + warnings.warn( + "'app.json_decoder' is deprecated and will be removed in Flask 2.3." + " Customize 'app.json_provider_class' or 'app.json' instead.", + DeprecationWarning, + stacklevel=2, + ) + self._json_decoder = value + + json_provider_class: t.Type[JSONProvider] = DefaultJSONProvider + """A subclass of :class:`~flask.json.provider.JSONProvider`. An + instance is created and assigned to :attr:`app.json` when creating + the app. + + The default, :class:`~flask.json.provider.DefaultJSONProvider`, uses + Python's built-in :mod:`json` library. A different provider can use + a different JSON library. + + .. versionadded:: 2.2 + """ + + #: Options that are passed to the Jinja environment in + #: :meth:`create_jinja_environment`. Changing these options after + #: the environment is created (accessing :attr:`jinja_env`) will + #: have no effect. + #: + #: .. versionchanged:: 1.1.0 + #: This is a ``dict`` instead of an ``ImmutableDict`` to allow + #: easier configuration. + #: + jinja_options: dict = {} + + #: Default configuration parameters. + default_config = ImmutableDict( + { + "ENV": None, + "DEBUG": None, + "TESTING": False, + "PROPAGATE_EXCEPTIONS": None, + "SECRET_KEY": None, + "PERMANENT_SESSION_LIFETIME": timedelta(days=31), + "USE_X_SENDFILE": False, + "SERVER_NAME": None, + "APPLICATION_ROOT": "/", + "SESSION_COOKIE_NAME": "session", + "SESSION_COOKIE_DOMAIN": None, + "SESSION_COOKIE_PATH": None, + "SESSION_COOKIE_HTTPONLY": True, + "SESSION_COOKIE_SECURE": False, + "SESSION_COOKIE_SAMESITE": None, + "SESSION_REFRESH_EACH_REQUEST": True, + "MAX_CONTENT_LENGTH": None, + "SEND_FILE_MAX_AGE_DEFAULT": None, + "TRAP_BAD_REQUEST_ERRORS": None, + "TRAP_HTTP_EXCEPTIONS": False, + "EXPLAIN_TEMPLATE_LOADING": False, + "PREFERRED_URL_SCHEME": "http", + "JSON_AS_ASCII": None, + "JSON_SORT_KEYS": None, + "JSONIFY_PRETTYPRINT_REGULAR": None, + "JSONIFY_MIMETYPE": None, + "TEMPLATES_AUTO_RELOAD": None, + "MAX_COOKIE_SIZE": 4093, + } + ) + + #: The rule object to use for URL rules created. This is used by + #: :meth:`add_url_rule`. Defaults to :class:`werkzeug.routing.Rule`. + #: + #: .. versionadded:: 0.7 + url_rule_class = Rule + + #: The map object to use for storing the URL rules and routing + #: configuration parameters. Defaults to :class:`werkzeug.routing.Map`. + #: + #: .. versionadded:: 1.1.0 + url_map_class = Map + + #: The :meth:`test_client` method creates an instance of this test + #: client class. Defaults to :class:`~flask.testing.FlaskClient`. + #: + #: .. versionadded:: 0.7 + test_client_class: t.Optional[t.Type["FlaskClient"]] = None + + #: The :class:`~click.testing.CliRunner` subclass, by default + #: :class:`~flask.testing.FlaskCliRunner` that is used by + #: :meth:`test_cli_runner`. Its ``__init__`` method should take a + #: Flask app object as the first argument. + #: + #: .. versionadded:: 1.0 + test_cli_runner_class: t.Optional[t.Type["FlaskCliRunner"]] = None + + #: the session interface to use. By default an instance of + #: :class:`~flask.sessions.SecureCookieSessionInterface` is used here. + #: + #: .. versionadded:: 0.8 + session_interface: SessionInterface = SecureCookieSessionInterface() + + def __init__( + self, + import_name: str, + static_url_path: t.Optional[str] = None, + static_folder: t.Optional[t.Union[str, os.PathLike]] = "static", + static_host: t.Optional[str] = None, + host_matching: bool = False, + subdomain_matching: bool = False, + template_folder: t.Optional[t.Union[str, os.PathLike]] = "templates", + instance_path: t.Optional[str] = None, + instance_relative_config: bool = False, + root_path: t.Optional[str] = None, + ): + super().__init__( + import_name=import_name, + static_folder=static_folder, + static_url_path=static_url_path, + template_folder=template_folder, + root_path=root_path, + ) + + if instance_path is None: + instance_path = self.auto_find_instance_path() + elif not os.path.isabs(instance_path): + raise ValueError( + "If an instance path is provided it must be absolute." + " A relative path was given instead." + ) + + #: Holds the path to the instance folder. + #: + #: .. versionadded:: 0.8 + self.instance_path = instance_path + + #: The configuration dictionary as :class:`Config`. This behaves + #: exactly like a regular dictionary but supports additional methods + #: to load a config from files. + self.config = self.make_config(instance_relative_config) + + #: An instance of :attr:`aborter_class` created by + #: :meth:`make_aborter`. This is called by :func:`flask.abort` + #: to raise HTTP errors, and can be called directly as well. + #: + #: .. versionadded:: 2.2 + #: Moved from ``flask.abort``, which calls this object. + self.aborter = self.make_aborter() + + self.json: JSONProvider = self.json_provider_class(self) + """Provides access to JSON methods. Functions in ``flask.json`` + will call methods on this provider when the application context + is active. Used for handling JSON requests and responses. + + An instance of :attr:`json_provider_class`. Can be customized by + changing that attribute on a subclass, or by assigning to this + attribute afterwards. + + The default, :class:`~flask.json.provider.DefaultJSONProvider`, + uses Python's built-in :mod:`json` library. A different provider + can use a different JSON library. + + .. versionadded:: 2.2 + """ + + #: A list of functions that are called by + #: :meth:`handle_url_build_error` when :meth:`.url_for` raises a + #: :exc:`~werkzeug.routing.BuildError`. Each function is called + #: with ``error``, ``endpoint`` and ``values``. If a function + #: returns ``None`` or raises a ``BuildError``, it is skipped. + #: Otherwise, its return value is returned by ``url_for``. + #: + #: .. versionadded:: 0.9 + self.url_build_error_handlers: t.List[ + t.Callable[[Exception, str, t.Dict[str, t.Any]], str] + ] = [] + + #: A list of functions that will be called at the beginning of the + #: first request to this instance. To register a function, use the + #: :meth:`before_first_request` decorator. + #: + #: .. deprecated:: 2.2 + #: Will be removed in Flask 2.3. Run setup code when + #: creating the application instead. + #: + #: .. versionadded:: 0.8 + self.before_first_request_funcs: t.List[ft.BeforeFirstRequestCallable] = [] + + #: A list of functions that are called when the application context + #: is destroyed. Since the application context is also torn down + #: if the request ends this is the place to store code that disconnects + #: from databases. + #: + #: .. versionadded:: 0.9 + self.teardown_appcontext_funcs: t.List[ft.TeardownCallable] = [] + + #: A list of shell context processor functions that should be run + #: when a shell context is created. + #: + #: .. versionadded:: 0.11 + self.shell_context_processors: t.List[ft.ShellContextProcessorCallable] = [] + + #: Maps registered blueprint names to blueprint objects. The + #: dict retains the order the blueprints were registered in. + #: Blueprints can be registered multiple times, this dict does + #: not track how often they were attached. + #: + #: .. versionadded:: 0.7 + self.blueprints: t.Dict[str, "Blueprint"] = {} + + #: a place where extensions can store application specific state. For + #: example this is where an extension could store database engines and + #: similar things. + #: + #: The key must match the name of the extension module. For example in + #: case of a "Flask-Foo" extension in `flask_foo`, the key would be + #: ``'foo'``. + #: + #: .. versionadded:: 0.7 + self.extensions: dict = {} + + #: The :class:`~werkzeug.routing.Map` for this instance. You can use + #: this to change the routing converters after the class was created + #: but before any routes are connected. Example:: + #: + #: from werkzeug.routing import BaseConverter + #: + #: class ListConverter(BaseConverter): + #: def to_python(self, value): + #: return value.split(',') + #: def to_url(self, values): + #: return ','.join(super(ListConverter, self).to_url(value) + #: for value in values) + #: + #: app = Flask(__name__) + #: app.url_map.converters['list'] = ListConverter + self.url_map = self.url_map_class() + + self.url_map.host_matching = host_matching + self.subdomain_matching = subdomain_matching + + # tracks internally if the application already handled at least one + # request. + self._got_first_request = False + self._before_request_lock = Lock() + + # Add a static route using the provided static_url_path, static_host, + # and static_folder if there is a configured static_folder. + # Note we do this without checking if static_folder exists. + # For one, it might be created while the server is running (e.g. during + # development). Also, Google App Engine stores static files somewhere + if self.has_static_folder: + assert ( + bool(static_host) == host_matching + ), "Invalid static_host/host_matching combination" + # Use a weakref to avoid creating a reference cycle between the app + # and the view function (see #3761). + self_ref = weakref.ref(self) + self.add_url_rule( + f"{self.static_url_path}/", + endpoint="static", + host=static_host, + view_func=lambda **kw: self_ref().send_static_file(**kw), # type: ignore # noqa: B950 + ) + + # Set the name of the Click group in case someone wants to add + # the app's commands to another CLI tool. + self.cli.name = self.name + + def _check_setup_finished(self, f_name: str) -> None: + if self._got_first_request: + raise AssertionError( + f"The setup method '{f_name}' can no longer be called" + " on the application. It has already handled its first" + " request, any changes will not be applied" + " consistently.\n" + "Make sure all imports, decorators, functions, etc." + " needed to set up the application are done before" + " running it." + ) + + @locked_cached_property + def name(self) -> str: # type: ignore + """The name of the application. This is usually the import name + with the difference that it's guessed from the run file if the + import name is main. This name is used as a display name when + Flask needs the name of the application. It can be set and overridden + to change the value. + + .. versionadded:: 0.8 + """ + if self.import_name == "__main__": + fn = getattr(sys.modules["__main__"], "__file__", None) + if fn is None: + return "__main__" + return os.path.splitext(os.path.basename(fn))[0] + return self.import_name + + @property + def propagate_exceptions(self) -> bool: + """Returns the value of the ``PROPAGATE_EXCEPTIONS`` configuration + value in case it's set, otherwise a sensible default is returned. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. + + .. versionadded:: 0.7 + """ + import warnings + + warnings.warn( + "'propagate_exceptions' is deprecated and will be removed in Flask 2.3.", + DeprecationWarning, + stacklevel=2, + ) + rv = self.config["PROPAGATE_EXCEPTIONS"] + if rv is not None: + return rv + return self.testing or self.debug + + @locked_cached_property + def logger(self) -> logging.Logger: + """A standard Python :class:`~logging.Logger` for the app, with + the same name as :attr:`name`. + + In debug mode, the logger's :attr:`~logging.Logger.level` will + be set to :data:`~logging.DEBUG`. + + If there are no handlers configured, a default handler will be + added. See :doc:`/logging` for more information. + + .. versionchanged:: 1.1.0 + The logger takes the same name as :attr:`name` rather than + hard-coding ``"flask.app"``. + + .. versionchanged:: 1.0.0 + Behavior was simplified. The logger is always named + ``"flask.app"``. The level is only set during configuration, + it doesn't check ``app.debug`` each time. Only one format is + used, not different ones depending on ``app.debug``. No + handlers are removed, and a handler is only added if no + handlers are already configured. + + .. versionadded:: 0.3 + """ + return create_logger(self) + + @locked_cached_property + def jinja_env(self) -> Environment: + """The Jinja environment used to load templates. + + The environment is created the first time this property is + accessed. Changing :attr:`jinja_options` after that will have no + effect. + """ + return self.create_jinja_environment() + + @property + def got_first_request(self) -> bool: + """This attribute is set to ``True`` if the application started + handling the first request. + + .. versionadded:: 0.8 + """ + return self._got_first_request + + def make_config(self, instance_relative: bool = False) -> Config: + """Used to create the config attribute by the Flask constructor. + The `instance_relative` parameter is passed in from the constructor + of Flask (there named `instance_relative_config`) and indicates if + the config should be relative to the instance path or the root path + of the application. + + .. versionadded:: 0.8 + """ + root_path = self.root_path + if instance_relative: + root_path = self.instance_path + defaults = dict(self.default_config) + defaults["ENV"] = os.environ.get("FLASK_ENV") or "production" + defaults["DEBUG"] = get_debug_flag() + return self.config_class(root_path, defaults) + + def make_aborter(self) -> Aborter: + """Create the object to assign to :attr:`aborter`. That object + is called by :func:`flask.abort` to raise HTTP errors, and can + be called directly as well. + + By default, this creates an instance of :attr:`aborter_class`, + which defaults to :class:`werkzeug.exceptions.Aborter`. + + .. versionadded:: 2.2 + """ + return self.aborter_class() + + def auto_find_instance_path(self) -> str: + """Tries to locate the instance path if it was not provided to the + constructor of the application class. It will basically calculate + the path to a folder named ``instance`` next to your main file or + the package. + + .. versionadded:: 0.8 + """ + prefix, package_path = find_package(self.import_name) + if prefix is None: + return os.path.join(package_path, "instance") + return os.path.join(prefix, "var", f"{self.name}-instance") + + def open_instance_resource(self, resource: str, mode: str = "rb") -> t.IO[t.AnyStr]: + """Opens a resource from the application's instance folder + (:attr:`instance_path`). Otherwise works like + :meth:`open_resource`. Instance resources can also be opened for + writing. + + :param resource: the name of the resource. To access resources within + subfolders use forward slashes as separator. + :param mode: resource file opening mode, default is 'rb'. + """ + return open(os.path.join(self.instance_path, resource), mode) + + @property + def templates_auto_reload(self) -> bool: + """Reload templates when they are changed. Used by + :meth:`create_jinja_environment`. It is enabled by default in debug mode. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. Use ``app.config["TEMPLATES_AUTO_RELOAD"]`` + instead. + + .. versionadded:: 1.0 + This property was added but the underlying config and behavior + already existed. + """ + import warnings + + warnings.warn( + "'templates_auto_reload' is deprecated and will be removed in Flask 2.3." + " Use 'TEMPLATES_AUTO_RELOAD' in 'app.config' instead.", + DeprecationWarning, + stacklevel=2, + ) + rv = self.config["TEMPLATES_AUTO_RELOAD"] + return rv if rv is not None else self.debug + + @templates_auto_reload.setter + def templates_auto_reload(self, value: bool) -> None: + import warnings + + warnings.warn( + "'templates_auto_reload' is deprecated and will be removed in Flask 2.3." + " Use 'TEMPLATES_AUTO_RELOAD' in 'app.config' instead.", + DeprecationWarning, + stacklevel=2, + ) + self.config["TEMPLATES_AUTO_RELOAD"] = value + + def create_jinja_environment(self) -> Environment: + """Create the Jinja environment based on :attr:`jinja_options` + and the various Jinja-related methods of the app. Changing + :attr:`jinja_options` after this will have no effect. Also adds + Flask-related globals and filters to the environment. + + .. versionchanged:: 0.11 + ``Environment.auto_reload`` set in accordance with + ``TEMPLATES_AUTO_RELOAD`` configuration option. + + .. versionadded:: 0.5 + """ + options = dict(self.jinja_options) + + if "autoescape" not in options: + options["autoescape"] = self.select_jinja_autoescape + + if "auto_reload" not in options: + auto_reload = self.config["TEMPLATES_AUTO_RELOAD"] + + if auto_reload is None: + auto_reload = self.debug + + options["auto_reload"] = auto_reload + + rv = self.jinja_environment(self, **options) + rv.globals.update( + url_for=self.url_for, + get_flashed_messages=get_flashed_messages, + config=self.config, + # request, session and g are normally added with the + # context processor for efficiency reasons but for imported + # templates we also want the proxies in there. + request=request, + session=session, + g=g, + ) + rv.policies["json.dumps_function"] = self.json.dumps + return rv + + def create_global_jinja_loader(self) -> DispatchingJinjaLoader: + """Creates the loader for the Jinja2 environment. Can be used to + override just the loader and keeping the rest unchanged. It's + discouraged to override this function. Instead one should override + the :meth:`jinja_loader` function instead. + + The global loader dispatches between the loaders of the application + and the individual blueprints. + + .. versionadded:: 0.7 + """ + return DispatchingJinjaLoader(self) + + def select_jinja_autoescape(self, filename: str) -> bool: + """Returns ``True`` if autoescaping should be active for the given + template name. If no template name is given, returns `True`. + + .. versionchanged:: 2.2 + Autoescaping is now enabled by default for ``.svg`` files. + + .. versionadded:: 0.5 + """ + if filename is None: + return True + return filename.endswith((".html", ".htm", ".xml", ".xhtml", ".svg")) + + def update_template_context(self, context: dict) -> None: + """Update the template context with some commonly used variables. + This injects request, session, config and g into the template + context as well as everything template context processors want + to inject. Note that the as of Flask 0.6, the original values + in the context will not be overridden if a context processor + decides to return a value with the same key. + + :param context: the context as a dictionary that is updated in place + to add extra variables. + """ + names: t.Iterable[t.Optional[str]] = (None,) + + # A template may be rendered outside a request context. + if request: + names = chain(names, reversed(request.blueprints)) + + # The values passed to render_template take precedence. Keep a + # copy to re-apply after all context functions. + orig_ctx = context.copy() + + for name in names: + if name in self.template_context_processors: + for func in self.template_context_processors[name]: + context.update(func()) + + context.update(orig_ctx) + + def make_shell_context(self) -> dict: + """Returns the shell context for an interactive shell for this + application. This runs all the registered shell context + processors. + + .. versionadded:: 0.11 + """ + rv = {"app": self, "g": g} + for processor in self.shell_context_processors: + rv.update(processor()) + return rv + + @property + def env(self) -> str: + """What environment the app is running in. This maps to the :data:`ENV` config + key. + + **Do not enable development when deploying in production.** + + Default: ``'production'`` + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. + """ + import warnings + + warnings.warn( + "'app.env' is deprecated and will be removed in Flask 2.3." + " Use 'app.debug' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self.config["ENV"] + + @env.setter + def env(self, value: str) -> None: + import warnings + + warnings.warn( + "'app.env' is deprecated and will be removed in Flask 2.3." + " Use 'app.debug' instead.", + DeprecationWarning, + stacklevel=2, + ) + self.config["ENV"] = value + + @property + def debug(self) -> bool: + """Whether debug mode is enabled. When using ``flask run`` to start the + development server, an interactive debugger will be shown for unhandled + exceptions, and the server will be reloaded when code changes. This maps to the + :data:`DEBUG` config key. It may not behave as expected if set late. + + **Do not enable debug mode when deploying in production.** + + Default: ``False`` + """ + return self.config["DEBUG"] + + @debug.setter + def debug(self, value: bool) -> None: + self.config["DEBUG"] = value + + if self.config["TEMPLATES_AUTO_RELOAD"] is None: + self.jinja_env.auto_reload = value + + def run( + self, + host: t.Optional[str] = None, + port: t.Optional[int] = None, + debug: t.Optional[bool] = None, + load_dotenv: bool = True, + **options: t.Any, + ) -> None: + """Runs the application on a local development server. + + Do not use ``run()`` in a production setting. It is not intended to + meet security and performance requirements for a production server. + Instead, see :doc:`/deploying/index` for WSGI server recommendations. + + If the :attr:`debug` flag is set the server will automatically reload + for code changes and show a debugger in case an exception happened. + + If you want to run the application in debug mode, but disable the + code execution on the interactive debugger, you can pass + ``use_evalex=False`` as parameter. This will keep the debugger's + traceback screen active, but disable code execution. + + It is not recommended to use this function for development with + automatic reloading as this is badly supported. Instead you should + be using the :command:`flask` command line script's ``run`` support. + + .. admonition:: Keep in Mind + + Flask will suppress any server error with a generic error page + unless it is in debug mode. As such to enable just the + interactive debugger without the code reloading, you have to + invoke :meth:`run` with ``debug=True`` and ``use_reloader=False``. + Setting ``use_debugger`` to ``True`` without being in debug mode + won't catch any exceptions because there won't be any to + catch. + + :param host: the hostname to listen on. Set this to ``'0.0.0.0'`` to + have the server available externally as well. Defaults to + ``'127.0.0.1'`` or the host in the ``SERVER_NAME`` config variable + if present. + :param port: the port of the webserver. Defaults to ``5000`` or the + port defined in the ``SERVER_NAME`` config variable if present. + :param debug: if given, enable or disable debug mode. See + :attr:`debug`. + :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` + files to set environment variables. Will also change the working + directory to the directory containing the first file found. + :param options: the options to be forwarded to the underlying Werkzeug + server. See :func:`werkzeug.serving.run_simple` for more + information. + + .. versionchanged:: 1.0 + If installed, python-dotenv will be used to load environment + variables from :file:`.env` and :file:`.flaskenv` files. + + The :envvar:`FLASK_DEBUG` environment variable will override :attr:`debug`. + + Threaded mode is enabled by default. + + .. versionchanged:: 0.10 + The default port is now picked from the ``SERVER_NAME`` + variable. + """ + # Ignore this call so that it doesn't start another server if + # the 'flask run' command is used. + if os.environ.get("FLASK_RUN_FROM_CLI") == "true": + if not is_running_from_reloader(): + click.secho( + " * Ignoring a call to 'app.run()' that would block" + " the current 'flask' CLI command.\n" + " Only call 'app.run()' in an 'if __name__ ==" + ' "__main__"\' guard.', + fg="red", + ) + + return + + if get_load_dotenv(load_dotenv): + cli.load_dotenv() + + # if set, let env vars override previous values + if "FLASK_ENV" in os.environ: + print( + "'FLASK_ENV' is deprecated and will not be used in" + " Flask 2.3. Use 'FLASK_DEBUG' instead.", + file=sys.stderr, + ) + self.config["ENV"] = os.environ.get("FLASK_ENV") or "production" + self.debug = get_debug_flag() + elif "FLASK_DEBUG" in os.environ: + self.debug = get_debug_flag() + + # debug passed to method overrides all other sources + if debug is not None: + self.debug = bool(debug) + + server_name = self.config.get("SERVER_NAME") + sn_host = sn_port = None + + if server_name: + sn_host, _, sn_port = server_name.partition(":") + + if not host: + if sn_host: + host = sn_host + else: + host = "127.0.0.1" + + if port or port == 0: + port = int(port) + elif sn_port: + port = int(sn_port) + else: + port = 5000 + + options.setdefault("use_reloader", self.debug) + options.setdefault("use_debugger", self.debug) + options.setdefault("threaded", True) + + cli.show_server_banner(self.debug, self.name) + + from werkzeug.serving import run_simple + + try: + run_simple(t.cast(str, host), port, self, **options) + finally: + # reset the first request information if the development server + # reset normally. This makes it possible to restart the server + # without reloader and that stuff from an interactive shell. + self._got_first_request = False + + def test_client(self, use_cookies: bool = True, **kwargs: t.Any) -> "FlaskClient": + """Creates a test client for this application. For information + about unit testing head over to :doc:`/testing`. + + Note that if you are testing for assertions or exceptions in your + application code, you must set ``app.testing = True`` in order for the + exceptions to propagate to the test client. Otherwise, the exception + will be handled by the application (not visible to the test client) and + the only indication of an AssertionError or other exception will be a + 500 status code response to the test client. See the :attr:`testing` + attribute. For example:: + + app.testing = True + client = app.test_client() + + The test client can be used in a ``with`` block to defer the closing down + of the context until the end of the ``with`` block. This is useful if + you want to access the context locals for testing:: + + with app.test_client() as c: + rv = c.get('/?vodka=42') + assert request.args['vodka'] == '42' + + Additionally, you may pass optional keyword arguments that will then + be passed to the application's :attr:`test_client_class` constructor. + For example:: + + from flask.testing import FlaskClient + + class CustomClient(FlaskClient): + def __init__(self, *args, **kwargs): + self._authentication = kwargs.pop("authentication") + super(CustomClient,self).__init__( *args, **kwargs) + + app.test_client_class = CustomClient + client = app.test_client(authentication='Basic ....') + + See :class:`~flask.testing.FlaskClient` for more information. + + .. versionchanged:: 0.4 + added support for ``with`` block usage for the client. + + .. versionadded:: 0.7 + The `use_cookies` parameter was added as well as the ability + to override the client to be used by setting the + :attr:`test_client_class` attribute. + + .. versionchanged:: 0.11 + Added `**kwargs` to support passing additional keyword arguments to + the constructor of :attr:`test_client_class`. + """ + cls = self.test_client_class + if cls is None: + from .testing import FlaskClient as cls + return cls( # type: ignore + self, self.response_class, use_cookies=use_cookies, **kwargs + ) + + def test_cli_runner(self, **kwargs: t.Any) -> "FlaskCliRunner": + """Create a CLI runner for testing CLI commands. + See :ref:`testing-cli`. + + Returns an instance of :attr:`test_cli_runner_class`, by default + :class:`~flask.testing.FlaskCliRunner`. The Flask app object is + passed as the first argument. + + .. versionadded:: 1.0 + """ + cls = self.test_cli_runner_class + + if cls is None: + from .testing import FlaskCliRunner as cls + + return cls(self, **kwargs) # type: ignore + + @setupmethod + def register_blueprint(self, blueprint: "Blueprint", **options: t.Any) -> None: + """Register a :class:`~flask.Blueprint` on the application. Keyword + arguments passed to this method will override the defaults set on the + blueprint. + + Calls the blueprint's :meth:`~flask.Blueprint.register` method after + recording the blueprint in the application's :attr:`blueprints`. + + :param blueprint: The blueprint to register. + :param url_prefix: Blueprint routes will be prefixed with this. + :param subdomain: Blueprint routes will match on this subdomain. + :param url_defaults: Blueprint routes will use these default values for + view arguments. + :param options: Additional keyword arguments are passed to + :class:`~flask.blueprints.BlueprintSetupState`. They can be + accessed in :meth:`~flask.Blueprint.record` callbacks. + + .. versionchanged:: 2.0.1 + The ``name`` option can be used to change the (pre-dotted) + name the blueprint is registered with. This allows the same + blueprint to be registered multiple times with unique names + for ``url_for``. + + .. versionadded:: 0.7 + """ + blueprint.register(self, options) + + def iter_blueprints(self) -> t.ValuesView["Blueprint"]: + """Iterates over all blueprints by the order they were registered. + + .. versionadded:: 0.11 + """ + return self.blueprints.values() + + @setupmethod + def add_url_rule( + self, + rule: str, + endpoint: t.Optional[str] = None, + view_func: t.Optional[ft.RouteCallable] = None, + provide_automatic_options: t.Optional[bool] = None, + **options: t.Any, + ) -> None: + if endpoint is None: + endpoint = _endpoint_from_view_func(view_func) # type: ignore + options["endpoint"] = endpoint + methods = options.pop("methods", None) + + # if the methods are not given and the view_func object knows its + # methods we can use that instead. If neither exists, we go with + # a tuple of only ``GET`` as default. + if methods is None: + methods = getattr(view_func, "methods", None) or ("GET",) + if isinstance(methods, str): + raise TypeError( + "Allowed methods must be a list of strings, for" + ' example: @app.route(..., methods=["POST"])' + ) + methods = {item.upper() for item in methods} + + # Methods that should always be added + required_methods = set(getattr(view_func, "required_methods", ())) + + # starting with Flask 0.8 the view_func object can disable and + # force-enable the automatic options handling. + if provide_automatic_options is None: + provide_automatic_options = getattr( + view_func, "provide_automatic_options", None + ) + + if provide_automatic_options is None: + if "OPTIONS" not in methods: + provide_automatic_options = True + required_methods.add("OPTIONS") + else: + provide_automatic_options = False + + # Add the required methods now. + methods |= required_methods + + rule = self.url_rule_class(rule, methods=methods, **options) + rule.provide_automatic_options = provide_automatic_options # type: ignore + + self.url_map.add(rule) + if view_func is not None: + old_func = self.view_functions.get(endpoint) + if old_func is not None and old_func != view_func: + raise AssertionError( + "View function mapping is overwriting an existing" + f" endpoint function: {endpoint}" + ) + self.view_functions[endpoint] = view_func + + @setupmethod + def template_filter( + self, name: t.Optional[str] = None + ) -> t.Callable[[T_template_filter], T_template_filter]: + """A decorator that is used to register custom template filter. + You can specify a name for the filter, otherwise the function + name will be used. Example:: + + @app.template_filter() + def reverse(s): + return s[::-1] + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + + def decorator(f: T_template_filter) -> T_template_filter: + self.add_template_filter(f, name=name) + return f + + return decorator + + @setupmethod + def add_template_filter( + self, f: ft.TemplateFilterCallable, name: t.Optional[str] = None + ) -> None: + """Register a custom template filter. Works exactly like the + :meth:`template_filter` decorator. + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + self.jinja_env.filters[name or f.__name__] = f + + @setupmethod + def template_test( + self, name: t.Optional[str] = None + ) -> t.Callable[[T_template_test], T_template_test]: + """A decorator that is used to register custom template test. + You can specify a name for the test, otherwise the function + name will be used. Example:: + + @app.template_test() + def is_prime(n): + if n == 2: + return True + for i in range(2, int(math.ceil(math.sqrt(n))) + 1): + if n % i == 0: + return False + return True + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + + def decorator(f: T_template_test) -> T_template_test: + self.add_template_test(f, name=name) + return f + + return decorator + + @setupmethod + def add_template_test( + self, f: ft.TemplateTestCallable, name: t.Optional[str] = None + ) -> None: + """Register a custom template test. Works exactly like the + :meth:`template_test` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + self.jinja_env.tests[name or f.__name__] = f + + @setupmethod + def template_global( + self, name: t.Optional[str] = None + ) -> t.Callable[[T_template_global], T_template_global]: + """A decorator that is used to register a custom template global function. + You can specify a name for the global function, otherwise the function + name will be used. Example:: + + @app.template_global() + def double(n): + return 2 * n + + .. versionadded:: 0.10 + + :param name: the optional name of the global function, otherwise the + function name will be used. + """ + + def decorator(f: T_template_global) -> T_template_global: + self.add_template_global(f, name=name) + return f + + return decorator + + @setupmethod + def add_template_global( + self, f: ft.TemplateGlobalCallable, name: t.Optional[str] = None + ) -> None: + """Register a custom template global function. Works exactly like the + :meth:`template_global` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the global function, otherwise the + function name will be used. + """ + self.jinja_env.globals[name or f.__name__] = f + + @setupmethod + def before_first_request(self, f: T_before_first_request) -> T_before_first_request: + """Registers a function to be run before the first request to this + instance of the application. + + The function will be called without any arguments and its return + value is ignored. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. Run setup code when creating + the application instead. + + .. versionadded:: 0.8 + """ + import warnings + + warnings.warn( + "'before_first_request' is deprecated and will be removed" + " in Flask 2.3. Run setup code while creating the" + " application instead.", + DeprecationWarning, + stacklevel=2, + ) + self.before_first_request_funcs.append(f) + return f + + @setupmethod + def teardown_appcontext(self, f: T_teardown) -> T_teardown: + """Registers a function to be called when the application + context is popped. The application context is typically popped + after the request context for each request, at the end of CLI + commands, or after a manually pushed context ends. + + .. code-block:: python + + with app.app_context(): + ... + + When the ``with`` block exits (or ``ctx.pop()`` is called), the + teardown functions are called just before the app context is + made inactive. Since a request context typically also manages an + application context it would also be called when you pop a + request context. + + When a teardown function was called because of an unhandled + exception it will be passed an error object. If an + :meth:`errorhandler` is registered, it will handle the exception + and the teardown will not receive it. + + Teardown functions must avoid raising exceptions. If they + execute code that might fail they must surround that code with a + ``try``/``except`` block and log any errors. + + The return values of teardown functions are ignored. + + .. versionadded:: 0.9 + """ + self.teardown_appcontext_funcs.append(f) + return f + + @setupmethod + def shell_context_processor( + self, f: T_shell_context_processor + ) -> T_shell_context_processor: + """Registers a shell context processor function. + + .. versionadded:: 0.11 + """ + self.shell_context_processors.append(f) + return f + + def _find_error_handler(self, e: Exception) -> t.Optional[ft.ErrorHandlerCallable]: + """Return a registered error handler for an exception in this order: + blueprint handler for a specific code, app handler for a specific code, + blueprint handler for an exception class, app handler for an exception + class, or ``None`` if a suitable handler is not found. + """ + exc_class, code = self._get_exc_class_and_code(type(e)) + names = (*request.blueprints, None) + + for c in (code, None) if code is not None else (None,): + for name in names: + handler_map = self.error_handler_spec[name][c] + + if not handler_map: + continue + + for cls in exc_class.__mro__: + handler = handler_map.get(cls) + + if handler is not None: + return handler + return None + + def handle_http_exception( + self, e: HTTPException + ) -> t.Union[HTTPException, ft.ResponseReturnValue]: + """Handles an HTTP exception. By default this will invoke the + registered error handlers and fall back to returning the + exception as response. + + .. versionchanged:: 1.0.3 + ``RoutingException``, used internally for actions such as + slash redirects during routing, is not passed to error + handlers. + + .. versionchanged:: 1.0 + Exceptions are looked up by code *and* by MRO, so + ``HTTPException`` subclasses can be handled with a catch-all + handler for the base ``HTTPException``. + + .. versionadded:: 0.3 + """ + # Proxy exceptions don't have error codes. We want to always return + # those unchanged as errors + if e.code is None: + return e + + # RoutingExceptions are used internally to trigger routing + # actions, such as slash redirects raising RequestRedirect. They + # are not raised or handled in user code. + if isinstance(e, RoutingException): + return e + + handler = self._find_error_handler(e) + if handler is None: + return e + return self.ensure_sync(handler)(e) + + def trap_http_exception(self, e: Exception) -> bool: + """Checks if an HTTP exception should be trapped or not. By default + this will return ``False`` for all exceptions except for a bad request + key error if ``TRAP_BAD_REQUEST_ERRORS`` is set to ``True``. It + also returns ``True`` if ``TRAP_HTTP_EXCEPTIONS`` is set to ``True``. + + This is called for all HTTP exceptions raised by a view function. + If it returns ``True`` for any exception the error handler for this + exception is not called and it shows up as regular exception in the + traceback. This is helpful for debugging implicitly raised HTTP + exceptions. + + .. versionchanged:: 1.0 + Bad request errors are not trapped by default in debug mode. + + .. versionadded:: 0.8 + """ + if self.config["TRAP_HTTP_EXCEPTIONS"]: + return True + + trap_bad_request = self.config["TRAP_BAD_REQUEST_ERRORS"] + + # if unset, trap key errors in debug mode + if ( + trap_bad_request is None + and self.debug + and isinstance(e, BadRequestKeyError) + ): + return True + + if trap_bad_request: + return isinstance(e, BadRequest) + + return False + + def handle_user_exception( + self, e: Exception + ) -> t.Union[HTTPException, ft.ResponseReturnValue]: + """This method is called whenever an exception occurs that + should be handled. A special case is :class:`~werkzeug + .exceptions.HTTPException` which is forwarded to the + :meth:`handle_http_exception` method. This function will either + return a response value or reraise the exception with the same + traceback. + + .. versionchanged:: 1.0 + Key errors raised from request data like ``form`` show the + bad key in debug mode rather than a generic bad request + message. + + .. versionadded:: 0.7 + """ + if isinstance(e, BadRequestKeyError) and ( + self.debug or self.config["TRAP_BAD_REQUEST_ERRORS"] + ): + e.show_exception = True + + if isinstance(e, HTTPException) and not self.trap_http_exception(e): + return self.handle_http_exception(e) + + handler = self._find_error_handler(e) + + if handler is None: + raise + + return self.ensure_sync(handler)(e) + + def handle_exception(self, e: Exception) -> Response: + """Handle an exception that did not have an error handler + associated with it, or that was raised from an error handler. + This always causes a 500 ``InternalServerError``. + + Always sends the :data:`got_request_exception` signal. + + If :attr:`propagate_exceptions` is ``True``, such as in debug + mode, the error will be re-raised so that the debugger can + display it. Otherwise, the original exception is logged, and + an :exc:`~werkzeug.exceptions.InternalServerError` is returned. + + If an error handler is registered for ``InternalServerError`` or + ``500``, it will be used. For consistency, the handler will + always receive the ``InternalServerError``. The original + unhandled exception is available as ``e.original_exception``. + + .. versionchanged:: 1.1.0 + Always passes the ``InternalServerError`` instance to the + handler, setting ``original_exception`` to the unhandled + error. + + .. versionchanged:: 1.1.0 + ``after_request`` functions and other finalization is done + even for the default 500 response when there is no handler. + + .. versionadded:: 0.3 + """ + exc_info = sys.exc_info() + got_request_exception.send(self, exception=e) + propagate = self.config["PROPAGATE_EXCEPTIONS"] + + if propagate is None: + propagate = self.testing or self.debug + + if propagate: + # Re-raise if called with an active exception, otherwise + # raise the passed in exception. + if exc_info[1] is e: + raise + + raise e + + self.log_exception(exc_info) + server_error: t.Union[InternalServerError, ft.ResponseReturnValue] + server_error = InternalServerError(original_exception=e) + handler = self._find_error_handler(server_error) + + if handler is not None: + server_error = self.ensure_sync(handler)(server_error) + + return self.finalize_request(server_error, from_error_handler=True) + + def log_exception( + self, + exc_info: t.Union[ + t.Tuple[type, BaseException, TracebackType], t.Tuple[None, None, None] + ], + ) -> None: + """Logs an exception. This is called by :meth:`handle_exception` + if debugging is disabled and right before the handler is called. + The default implementation logs the exception as error on the + :attr:`logger`. + + .. versionadded:: 0.8 + """ + self.logger.error( + f"Exception on {request.path} [{request.method}]", exc_info=exc_info + ) + + def raise_routing_exception(self, request: Request) -> "te.NoReturn": + """Intercept routing exceptions and possibly do something else. + + In debug mode, intercept a routing redirect and replace it with + an error if the body will be discarded. + + With modern Werkzeug this shouldn't occur, since it now uses a + 308 status which tells the browser to resend the method and + body. + + .. versionchanged:: 2.1 + Don't intercept 307 and 308 redirects. + + :meta private: + :internal: + """ + if ( + not self.debug + or not isinstance(request.routing_exception, RequestRedirect) + or request.routing_exception.code in {307, 308} + or request.method in {"GET", "HEAD", "OPTIONS"} + ): + raise request.routing_exception # type: ignore + + from .debughelpers import FormDataRoutingRedirect + + raise FormDataRoutingRedirect(request) + + def dispatch_request(self) -> ft.ResponseReturnValue: + """Does the request dispatching. Matches the URL and returns the + return value of the view or error handler. This does not have to + be a response object. In order to convert the return value to a + proper response object, call :func:`make_response`. + + .. versionchanged:: 0.7 + This no longer does the exception handling, this code was + moved to the new :meth:`full_dispatch_request`. + """ + req = request_ctx.request + if req.routing_exception is not None: + self.raise_routing_exception(req) + rule: Rule = req.url_rule # type: ignore[assignment] + # if we provide automatic options for this URL and the + # request came with the OPTIONS method, reply automatically + if ( + getattr(rule, "provide_automatic_options", False) + and req.method == "OPTIONS" + ): + return self.make_default_options_response() + # otherwise dispatch to the handler for that endpoint + view_args: t.Dict[str, t.Any] = req.view_args # type: ignore[assignment] + return self.ensure_sync(self.view_functions[rule.endpoint])(**view_args) + + def full_dispatch_request(self) -> Response: + """Dispatches the request and on top of that performs request + pre and postprocessing as well as HTTP exception catching and + error handling. + + .. versionadded:: 0.7 + """ + # Run before_first_request functions if this is the thread's first request. + # Inlined to avoid a method call on subsequent requests. + # This is deprecated, will be removed in Flask 2.3. + if not self._got_first_request: + with self._before_request_lock: + if not self._got_first_request: + for func in self.before_first_request_funcs: + self.ensure_sync(func)() + + self._got_first_request = True + + try: + request_started.send(self) + rv = self.preprocess_request() + if rv is None: + rv = self.dispatch_request() + except Exception as e: + rv = self.handle_user_exception(e) + return self.finalize_request(rv) + + def finalize_request( + self, + rv: t.Union[ft.ResponseReturnValue, HTTPException], + from_error_handler: bool = False, + ) -> Response: + """Given the return value from a view function this finalizes + the request by converting it into a response and invoking the + postprocessing functions. This is invoked for both normal + request dispatching as well as error handlers. + + Because this means that it might be called as a result of a + failure a special safe mode is available which can be enabled + with the `from_error_handler` flag. If enabled, failures in + response processing will be logged and otherwise ignored. + + :internal: + """ + response = self.make_response(rv) + try: + response = self.process_response(response) + request_finished.send(self, response=response) + except Exception: + if not from_error_handler: + raise + self.logger.exception( + "Request finalizing failed with an error while handling an error" + ) + return response + + def make_default_options_response(self) -> Response: + """This method is called to create the default ``OPTIONS`` response. + This can be changed through subclassing to change the default + behavior of ``OPTIONS`` responses. + + .. versionadded:: 0.7 + """ + adapter = request_ctx.url_adapter + methods = adapter.allowed_methods() # type: ignore[union-attr] + rv = self.response_class() + rv.allow.update(methods) + return rv + + def should_ignore_error(self, error: t.Optional[BaseException]) -> bool: + """This is called to figure out if an error should be ignored + or not as far as the teardown system is concerned. If this + function returns ``True`` then the teardown handlers will not be + passed the error. + + .. versionadded:: 0.10 + """ + return False + + def ensure_sync(self, func: t.Callable) -> t.Callable: + """Ensure that the function is synchronous for WSGI workers. + Plain ``def`` functions are returned as-is. ``async def`` + functions are wrapped to run and wait for the response. + + Override this method to change how the app runs async views. + + .. versionadded:: 2.0 + """ + if iscoroutinefunction(func): + return self.async_to_sync(func) + + return func + + def async_to_sync( + self, func: t.Callable[..., t.Coroutine] + ) -> t.Callable[..., t.Any]: + """Return a sync function that will run the coroutine function. + + .. code-block:: python + + result = app.async_to_sync(func)(*args, **kwargs) + + Override this method to change how the app converts async code + to be synchronously callable. + + .. versionadded:: 2.0 + """ + try: + from asgiref.sync import async_to_sync as asgiref_async_to_sync + except ImportError: + raise RuntimeError( + "Install Flask with the 'async' extra in order to use async views." + ) from None + + return asgiref_async_to_sync(func) + + def url_for( + self, + endpoint: str, + *, + _anchor: t.Optional[str] = None, + _method: t.Optional[str] = None, + _scheme: t.Optional[str] = None, + _external: t.Optional[bool] = None, + **values: t.Any, + ) -> str: + """Generate a URL to the given endpoint with the given values. + + This is called by :func:`flask.url_for`, and can be called + directly as well. + + An *endpoint* is the name of a URL rule, usually added with + :meth:`@app.route() `, and usually the same name as the + view function. A route defined in a :class:`~flask.Blueprint` + will prepend the blueprint's name separated by a ``.`` to the + endpoint. + + In some cases, such as email messages, you want URLs to include + the scheme and domain, like ``https://example.com/hello``. When + not in an active request, URLs will be external by default, but + this requires setting :data:`SERVER_NAME` so Flask knows what + domain to use. :data:`APPLICATION_ROOT` and + :data:`PREFERRED_URL_SCHEME` should also be configured as + needed. This config is only used when not in an active request. + + Functions can be decorated with :meth:`url_defaults` to modify + keyword arguments before the URL is built. + + If building fails for some reason, such as an unknown endpoint + or incorrect values, the app's :meth:`handle_url_build_error` + method is called. If that returns a string, that is returned, + otherwise a :exc:`~werkzeug.routing.BuildError` is raised. + + :param endpoint: The endpoint name associated with the URL to + generate. If this starts with a ``.``, the current blueprint + name (if any) will be used. + :param _anchor: If given, append this as ``#anchor`` to the URL. + :param _method: If given, generate the URL associated with this + method for the endpoint. + :param _scheme: If given, the URL will have this scheme if it + is external. + :param _external: If given, prefer the URL to be internal + (False) or require it to be external (True). External URLs + include the scheme and domain. When not in an active + request, URLs are external by default. + :param values: Values to use for the variable parts of the URL + rule. Unknown keys are appended as query string arguments, + like ``?a=b&c=d``. + + .. versionadded:: 2.2 + Moved from ``flask.url_for``, which calls this method. + """ + req_ctx = _cv_request.get(None) + + if req_ctx is not None: + url_adapter = req_ctx.url_adapter + blueprint_name = req_ctx.request.blueprint + + # If the endpoint starts with "." and the request matches a + # blueprint, the endpoint is relative to the blueprint. + if endpoint[:1] == ".": + if blueprint_name is not None: + endpoint = f"{blueprint_name}{endpoint}" + else: + endpoint = endpoint[1:] + + # When in a request, generate a URL without scheme and + # domain by default, unless a scheme is given. + if _external is None: + _external = _scheme is not None + else: + app_ctx = _cv_app.get(None) + + # If called by helpers.url_for, an app context is active, + # use its url_adapter. Otherwise, app.url_for was called + # directly, build an adapter. + if app_ctx is not None: + url_adapter = app_ctx.url_adapter + else: + url_adapter = self.create_url_adapter(None) + + if url_adapter is None: + raise RuntimeError( + "Unable to build URLs outside an active request" + " without 'SERVER_NAME' configured. Also configure" + " 'APPLICATION_ROOT' and 'PREFERRED_URL_SCHEME' as" + " needed." + ) + + # When outside a request, generate a URL with scheme and + # domain by default. + if _external is None: + _external = True + + # It is an error to set _scheme when _external=False, in order + # to avoid accidental insecure URLs. + if _scheme is not None and not _external: + raise ValueError("When specifying '_scheme', '_external' must be True.") + + self.inject_url_defaults(endpoint, values) + + try: + rv = url_adapter.build( # type: ignore[union-attr] + endpoint, + values, + method=_method, + url_scheme=_scheme, + force_external=_external, + ) + except BuildError as error: + values.update( + _anchor=_anchor, _method=_method, _scheme=_scheme, _external=_external + ) + return self.handle_url_build_error(error, endpoint, values) + + if _anchor is not None: + _anchor = _url_quote(_anchor, safe="%!#$&'()*+,/:;=?@") + rv = f"{rv}#{_anchor}" + + return rv + + def redirect(self, location: str, code: int = 302) -> BaseResponse: + """Create a redirect response object. + + This is called by :func:`flask.redirect`, and can be called + directly as well. + + :param location: The URL to redirect to. + :param code: The status code for the redirect. + + .. versionadded:: 2.2 + Moved from ``flask.redirect``, which calls this method. + """ + return _wz_redirect(location, code=code, Response=self.response_class) + + def make_response(self, rv: ft.ResponseReturnValue) -> Response: + """Convert the return value from a view function to an instance of + :attr:`response_class`. + + :param rv: the return value from the view function. The view function + must return a response. Returning ``None``, or the view ending + without returning, is not allowed. The following types are allowed + for ``view_rv``: + + ``str`` + A response object is created with the string encoded to UTF-8 + as the body. + + ``bytes`` + A response object is created with the bytes as the body. + + ``dict`` + A dictionary that will be jsonify'd before being returned. + + ``list`` + A list that will be jsonify'd before being returned. + + ``generator`` or ``iterator`` + A generator that returns ``str`` or ``bytes`` to be + streamed as the response. + + ``tuple`` + Either ``(body, status, headers)``, ``(body, status)``, or + ``(body, headers)``, where ``body`` is any of the other types + allowed here, ``status`` is a string or an integer, and + ``headers`` is a dictionary or a list of ``(key, value)`` + tuples. If ``body`` is a :attr:`response_class` instance, + ``status`` overwrites the exiting value and ``headers`` are + extended. + + :attr:`response_class` + The object is returned unchanged. + + other :class:`~werkzeug.wrappers.Response` class + The object is coerced to :attr:`response_class`. + + :func:`callable` + The function is called as a WSGI application. The result is + used to create a response object. + + .. versionchanged:: 2.2 + A generator will be converted to a streaming response. + A list will be converted to a JSON response. + + .. versionchanged:: 1.1 + A dict will be converted to a JSON response. + + .. versionchanged:: 0.9 + Previously a tuple was interpreted as the arguments for the + response object. + """ + + status = headers = None + + # unpack tuple returns + if isinstance(rv, tuple): + len_rv = len(rv) + + # a 3-tuple is unpacked directly + if len_rv == 3: + rv, status, headers = rv # type: ignore[misc] + # decide if a 2-tuple has status or headers + elif len_rv == 2: + if isinstance(rv[1], (Headers, dict, tuple, list)): + rv, headers = rv + else: + rv, status = rv # type: ignore[assignment,misc] + # other sized tuples are not allowed + else: + raise TypeError( + "The view function did not return a valid response tuple." + " The tuple must have the form (body, status, headers)," + " (body, status), or (body, headers)." + ) + + # the body must not be None + if rv is None: + raise TypeError( + f"The view function for {request.endpoint!r} did not" + " return a valid response. The function either returned" + " None or ended without a return statement." + ) + + # make sure the body is an instance of the response class + if not isinstance(rv, self.response_class): + if isinstance(rv, (str, bytes, bytearray)) or isinstance(rv, _abc_Iterator): + # let the response class set the status and headers instead of + # waiting to do it manually, so that the class can handle any + # special logic + rv = self.response_class( + rv, + status=status, + headers=headers, # type: ignore[arg-type] + ) + status = headers = None + elif isinstance(rv, (dict, list)): + rv = self.json.response(rv) + elif isinstance(rv, BaseResponse) or callable(rv): + # evaluate a WSGI callable, or coerce a different response + # class to the correct type + try: + rv = self.response_class.force_type( + rv, request.environ # type: ignore[arg-type] + ) + except TypeError as e: + raise TypeError( + f"{e}\nThe view function did not return a valid" + " response. The return type must be a string," + " dict, list, tuple with headers or status," + " Response instance, or WSGI callable, but it" + f" was a {type(rv).__name__}." + ).with_traceback(sys.exc_info()[2]) from None + else: + raise TypeError( + "The view function did not return a valid" + " response. The return type must be a string," + " dict, list, tuple with headers or status," + " Response instance, or WSGI callable, but it was a" + f" {type(rv).__name__}." + ) + + rv = t.cast(Response, rv) + # prefer the status if it was provided + if status is not None: + if isinstance(status, (str, bytes, bytearray)): + rv.status = status + else: + rv.status_code = status + + # extend existing headers with provided headers + if headers: + rv.headers.update(headers) # type: ignore[arg-type] + + return rv + + def create_url_adapter( + self, request: t.Optional[Request] + ) -> t.Optional[MapAdapter]: + """Creates a URL adapter for the given request. The URL adapter + is created at a point where the request context is not yet set + up so the request is passed explicitly. + + .. versionadded:: 0.6 + + .. versionchanged:: 0.9 + This can now also be called without a request object when the + URL adapter is created for the application context. + + .. versionchanged:: 1.0 + :data:`SERVER_NAME` no longer implicitly enables subdomain + matching. Use :attr:`subdomain_matching` instead. + """ + if request is not None: + # If subdomain matching is disabled (the default), use the + # default subdomain in all cases. This should be the default + # in Werkzeug but it currently does not have that feature. + if not self.subdomain_matching: + subdomain = self.url_map.default_subdomain or None + else: + subdomain = None + + return self.url_map.bind_to_environ( + request.environ, + server_name=self.config["SERVER_NAME"], + subdomain=subdomain, + ) + # We need at the very least the server name to be set for this + # to work. + if self.config["SERVER_NAME"] is not None: + return self.url_map.bind( + self.config["SERVER_NAME"], + script_name=self.config["APPLICATION_ROOT"], + url_scheme=self.config["PREFERRED_URL_SCHEME"], + ) + + return None + + def inject_url_defaults(self, endpoint: str, values: dict) -> None: + """Injects the URL defaults for the given endpoint directly into + the values dictionary passed. This is used internally and + automatically called on URL building. + + .. versionadded:: 0.7 + """ + names: t.Iterable[t.Optional[str]] = (None,) + + # url_for may be called outside a request context, parse the + # passed endpoint instead of using request.blueprints. + if "." in endpoint: + names = chain( + names, reversed(_split_blueprint_path(endpoint.rpartition(".")[0])) + ) + + for name in names: + if name in self.url_default_functions: + for func in self.url_default_functions[name]: + func(endpoint, values) + + def handle_url_build_error( + self, error: BuildError, endpoint: str, values: t.Dict[str, t.Any] + ) -> str: + """Called by :meth:`.url_for` if a + :exc:`~werkzeug.routing.BuildError` was raised. If this returns + a value, it will be returned by ``url_for``, otherwise the error + will be re-raised. + + Each function in :attr:`url_build_error_handlers` is called with + ``error``, ``endpoint`` and ``values``. If a function returns + ``None`` or raises a ``BuildError``, it is skipped. Otherwise, + its return value is returned by ``url_for``. + + :param error: The active ``BuildError`` being handled. + :param endpoint: The endpoint being built. + :param values: The keyword arguments passed to ``url_for``. + """ + for handler in self.url_build_error_handlers: + try: + rv = handler(error, endpoint, values) + except BuildError as e: + # make error available outside except block + error = e + else: + if rv is not None: + return rv + + # Re-raise if called with an active exception, otherwise raise + # the passed in exception. + if error is sys.exc_info()[1]: + raise + + raise error + + def preprocess_request(self) -> t.Optional[ft.ResponseReturnValue]: + """Called before the request is dispatched. Calls + :attr:`url_value_preprocessors` registered with the app and the + current blueprint (if any). Then calls :attr:`before_request_funcs` + registered with the app and the blueprint. + + If any :meth:`before_request` handler returns a non-None value, the + value is handled as if it was the return value from the view, and + further request handling is stopped. + """ + names = (None, *reversed(request.blueprints)) + + for name in names: + if name in self.url_value_preprocessors: + for url_func in self.url_value_preprocessors[name]: + url_func(request.endpoint, request.view_args) + + for name in names: + if name in self.before_request_funcs: + for before_func in self.before_request_funcs[name]: + rv = self.ensure_sync(before_func)() + + if rv is not None: + return rv + + return None + + def process_response(self, response: Response) -> Response: + """Can be overridden in order to modify the response object + before it's sent to the WSGI server. By default this will + call all the :meth:`after_request` decorated functions. + + .. versionchanged:: 0.5 + As of Flask 0.5 the functions registered for after request + execution are called in reverse order of registration. + + :param response: a :attr:`response_class` object. + :return: a new response object or the same, has to be an + instance of :attr:`response_class`. + """ + ctx = request_ctx._get_current_object() # type: ignore[attr-defined] + + for func in ctx._after_request_functions: + response = self.ensure_sync(func)(response) + + for name in chain(request.blueprints, (None,)): + if name in self.after_request_funcs: + for func in reversed(self.after_request_funcs[name]): + response = self.ensure_sync(func)(response) + + if not self.session_interface.is_null_session(ctx.session): + self.session_interface.save_session(self, ctx.session, response) + + return response + + def do_teardown_request( + self, exc: t.Optional[BaseException] = _sentinel # type: ignore + ) -> None: + """Called after the request is dispatched and the response is + returned, right before the request context is popped. + + This calls all functions decorated with + :meth:`teardown_request`, and :meth:`Blueprint.teardown_request` + if a blueprint handled the request. Finally, the + :data:`request_tearing_down` signal is sent. + + This is called by + :meth:`RequestContext.pop() `, + which may be delayed during testing to maintain access to + resources. + + :param exc: An unhandled exception raised while dispatching the + request. Detected from the current exception information if + not passed. Passed to each teardown function. + + .. versionchanged:: 0.9 + Added the ``exc`` argument. + """ + if exc is _sentinel: + exc = sys.exc_info()[1] + + for name in chain(request.blueprints, (None,)): + if name in self.teardown_request_funcs: + for func in reversed(self.teardown_request_funcs[name]): + self.ensure_sync(func)(exc) + + request_tearing_down.send(self, exc=exc) + + def do_teardown_appcontext( + self, exc: t.Optional[BaseException] = _sentinel # type: ignore + ) -> None: + """Called right before the application context is popped. + + When handling a request, the application context is popped + after the request context. See :meth:`do_teardown_request`. + + This calls all functions decorated with + :meth:`teardown_appcontext`. Then the + :data:`appcontext_tearing_down` signal is sent. + + This is called by + :meth:`AppContext.pop() `. + + .. versionadded:: 0.9 + """ + if exc is _sentinel: + exc = sys.exc_info()[1] + + for func in reversed(self.teardown_appcontext_funcs): + self.ensure_sync(func)(exc) + + appcontext_tearing_down.send(self, exc=exc) + + def app_context(self) -> AppContext: + """Create an :class:`~flask.ctx.AppContext`. Use as a ``with`` + block to push the context, which will make :data:`current_app` + point at this application. + + An application context is automatically pushed by + :meth:`RequestContext.push() ` + when handling a request, and when running a CLI command. Use + this to manually create a context outside of these situations. + + :: + + with app.app_context(): + init_db() + + See :doc:`/appcontext`. + + .. versionadded:: 0.9 + """ + return AppContext(self) + + def request_context(self, environ: dict) -> RequestContext: + """Create a :class:`~flask.ctx.RequestContext` representing a + WSGI environment. Use a ``with`` block to push the context, + which will make :data:`request` point at this request. + + See :doc:`/reqcontext`. + + Typically you should not call this from your own code. A request + context is automatically pushed by the :meth:`wsgi_app` when + handling a request. Use :meth:`test_request_context` to create + an environment and context instead of this method. + + :param environ: a WSGI environment + """ + return RequestContext(self, environ) + + def test_request_context(self, *args: t.Any, **kwargs: t.Any) -> RequestContext: + """Create a :class:`~flask.ctx.RequestContext` for a WSGI + environment created from the given values. This is mostly useful + during testing, where you may want to run a function that uses + request data without dispatching a full request. + + See :doc:`/reqcontext`. + + Use a ``with`` block to push the context, which will make + :data:`request` point at the request for the created + environment. :: + + with test_request_context(...): + generate_report() + + When using the shell, it may be easier to push and pop the + context manually to avoid indentation. :: + + ctx = app.test_request_context(...) + ctx.push() + ... + ctx.pop() + + Takes the same arguments as Werkzeug's + :class:`~werkzeug.test.EnvironBuilder`, with some defaults from + the application. See the linked Werkzeug docs for most of the + available arguments. Flask-specific behavior is listed here. + + :param path: URL path being requested. + :param base_url: Base URL where the app is being served, which + ``path`` is relative to. If not given, built from + :data:`PREFERRED_URL_SCHEME`, ``subdomain``, + :data:`SERVER_NAME`, and :data:`APPLICATION_ROOT`. + :param subdomain: Subdomain name to append to + :data:`SERVER_NAME`. + :param url_scheme: Scheme to use instead of + :data:`PREFERRED_URL_SCHEME`. + :param data: The request body, either as a string or a dict of + form keys and values. + :param json: If given, this is serialized as JSON and passed as + ``data``. Also defaults ``content_type`` to + ``application/json``. + :param args: other positional arguments passed to + :class:`~werkzeug.test.EnvironBuilder`. + :param kwargs: other keyword arguments passed to + :class:`~werkzeug.test.EnvironBuilder`. + """ + from .testing import EnvironBuilder + + builder = EnvironBuilder(self, *args, **kwargs) + + try: + return self.request_context(builder.get_environ()) + finally: + builder.close() + + def wsgi_app(self, environ: dict, start_response: t.Callable) -> t.Any: + """The actual WSGI application. This is not implemented in + :meth:`__call__` so that middlewares can be applied without + losing a reference to the app object. Instead of doing this:: + + app = MyMiddleware(app) + + It's a better idea to do this instead:: + + app.wsgi_app = MyMiddleware(app.wsgi_app) + + Then you still have the original application object around and + can continue to call methods on it. + + .. versionchanged:: 0.7 + Teardown events for the request and app contexts are called + even if an unhandled error occurs. Other events may not be + called depending on when an error occurs during dispatch. + See :ref:`callbacks-and-errors`. + + :param environ: A WSGI environment. + :param start_response: A callable accepting a status code, + a list of headers, and an optional exception context to + start the response. + """ + ctx = self.request_context(environ) + error: t.Optional[BaseException] = None + try: + try: + ctx.push() + response = self.full_dispatch_request() + except Exception as e: + error = e + response = self.handle_exception(e) + except: # noqa: B001 + error = sys.exc_info()[1] + raise + return response(environ, start_response) + finally: + if "werkzeug.debug.preserve_context" in environ: + environ["werkzeug.debug.preserve_context"](_cv_app.get()) + environ["werkzeug.debug.preserve_context"](_cv_request.get()) + + if error is not None and self.should_ignore_error(error): + error = None + + ctx.pop(error) + + def __call__(self, environ: dict, start_response: t.Callable) -> t.Any: + """The WSGI server calls the Flask application object as the + WSGI application. This calls :meth:`wsgi_app`, which can be + wrapped to apply middleware. + """ + return self.wsgi_app(environ, start_response) diff --git a/venv/lib/python3.11/site-packages/flask/blueprints.py b/venv/lib/python3.11/site-packages/flask/blueprints.py new file mode 100644 index 0000000..eb66423 --- /dev/null +++ b/venv/lib/python3.11/site-packages/flask/blueprints.py @@ -0,0 +1,712 @@ +import json +import os +import typing as t +from collections import defaultdict +from functools import update_wrapper + +from . import typing as ft +from .scaffold import _endpoint_from_view_func +from .scaffold import _sentinel +from .scaffold import Scaffold +from .scaffold import setupmethod + +if t.TYPE_CHECKING: # pragma: no cover + from .app import Flask + +DeferredSetupFunction = t.Callable[["BlueprintSetupState"], t.Callable] +T_after_request = t.TypeVar("T_after_request", bound=ft.AfterRequestCallable) +T_before_first_request = t.TypeVar( + "T_before_first_request", bound=ft.BeforeFirstRequestCallable +) +T_before_request = t.TypeVar("T_before_request", bound=ft.BeforeRequestCallable) +T_error_handler = t.TypeVar("T_error_handler", bound=ft.ErrorHandlerCallable) +T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) +T_template_context_processor = t.TypeVar( + "T_template_context_processor", bound=ft.TemplateContextProcessorCallable +) +T_template_filter = t.TypeVar("T_template_filter", bound=ft.TemplateFilterCallable) +T_template_global = t.TypeVar("T_template_global", bound=ft.TemplateGlobalCallable) +T_template_test = t.TypeVar("T_template_test", bound=ft.TemplateTestCallable) +T_url_defaults = t.TypeVar("T_url_defaults", bound=ft.URLDefaultCallable) +T_url_value_preprocessor = t.TypeVar( + "T_url_value_preprocessor", bound=ft.URLValuePreprocessorCallable +) + + +class BlueprintSetupState: + """Temporary holder object for registering a blueprint with the + application. An instance of this class is created by the + :meth:`~flask.Blueprint.make_setup_state` method and later passed + to all register callback functions. + """ + + def __init__( + self, + blueprint: "Blueprint", + app: "Flask", + options: t.Any, + first_registration: bool, + ) -> None: + #: a reference to the current application + self.app = app + + #: a reference to the blueprint that created this setup state. + self.blueprint = blueprint + + #: a dictionary with all options that were passed to the + #: :meth:`~flask.Flask.register_blueprint` method. + self.options = options + + #: as blueprints can be registered multiple times with the + #: application and not everything wants to be registered + #: multiple times on it, this attribute can be used to figure + #: out if the blueprint was registered in the past already. + self.first_registration = first_registration + + subdomain = self.options.get("subdomain") + if subdomain is None: + subdomain = self.blueprint.subdomain + + #: The subdomain that the blueprint should be active for, ``None`` + #: otherwise. + self.subdomain = subdomain + + url_prefix = self.options.get("url_prefix") + if url_prefix is None: + url_prefix = self.blueprint.url_prefix + #: The prefix that should be used for all URLs defined on the + #: blueprint. + self.url_prefix = url_prefix + + self.name = self.options.get("name", blueprint.name) + self.name_prefix = self.options.get("name_prefix", "") + + #: A dictionary with URL defaults that is added to each and every + #: URL that was defined with the blueprint. + self.url_defaults = dict(self.blueprint.url_values_defaults) + self.url_defaults.update(self.options.get("url_defaults", ())) + + def add_url_rule( + self, + rule: str, + endpoint: t.Optional[str] = None, + view_func: t.Optional[t.Callable] = None, + **options: t.Any, + ) -> None: + """A helper method to register a rule (and optionally a view function) + to the application. The endpoint is automatically prefixed with the + blueprint's name. + """ + if self.url_prefix is not None: + if rule: + rule = "/".join((self.url_prefix.rstrip("/"), rule.lstrip("/"))) + else: + rule = self.url_prefix + options.setdefault("subdomain", self.subdomain) + if endpoint is None: + endpoint = _endpoint_from_view_func(view_func) # type: ignore + defaults = self.url_defaults + if "defaults" in options: + defaults = dict(defaults, **options.pop("defaults")) + + self.app.add_url_rule( + rule, + f"{self.name_prefix}.{self.name}.{endpoint}".lstrip("."), + view_func, + defaults=defaults, + **options, + ) + + +class Blueprint(Scaffold): + """Represents a blueprint, a collection of routes and other + app-related functions that can be registered on a real application + later. + + A blueprint is an object that allows defining application functions + without requiring an application object ahead of time. It uses the + same decorators as :class:`~flask.Flask`, but defers the need for an + application by recording them for later registration. + + Decorating a function with a blueprint creates a deferred function + that is called with :class:`~flask.blueprints.BlueprintSetupState` + when the blueprint is registered on an application. + + See :doc:`/blueprints` for more information. + + :param name: The name of the blueprint. Will be prepended to each + endpoint name. + :param import_name: The name of the blueprint package, usually + ``__name__``. This helps locate the ``root_path`` for the + blueprint. + :param static_folder: A folder with static files that should be + served by the blueprint's static route. The path is relative to + the blueprint's root path. Blueprint static files are disabled + by default. + :param static_url_path: The url to serve static files from. + Defaults to ``static_folder``. If the blueprint does not have + a ``url_prefix``, the app's static route will take precedence, + and the blueprint's static files won't be accessible. + :param template_folder: A folder with templates that should be added + to the app's template search path. The path is relative to the + blueprint's root path. Blueprint templates are disabled by + default. Blueprint templates have a lower precedence than those + in the app's templates folder. + :param url_prefix: A path to prepend to all of the blueprint's URLs, + to make them distinct from the rest of the app's routes. + :param subdomain: A subdomain that blueprint routes will match on by + default. + :param url_defaults: A dict of default values that blueprint routes + will receive by default. + :param root_path: By default, the blueprint will automatically set + this based on ``import_name``. In certain situations this + automatic detection can fail, so the path can be specified + manually instead. + + .. versionchanged:: 1.1.0 + Blueprints have a ``cli`` group to register nested CLI commands. + The ``cli_group`` parameter controls the name of the group under + the ``flask`` command. + + .. versionadded:: 0.7 + """ + + _got_registered_once = False + + _json_encoder: t.Union[t.Type[json.JSONEncoder], None] = None + _json_decoder: t.Union[t.Type[json.JSONDecoder], None] = None + + @property + def json_encoder( + self, + ) -> t.Union[t.Type[json.JSONEncoder], None]: + """Blueprint-local JSON encoder class to use. Set to ``None`` to use the app's. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. Customize + :attr:`json_provider_class` instead. + + .. versionadded:: 0.10 + """ + import warnings + + warnings.warn( + "'bp.json_encoder' is deprecated and will be removed in Flask 2.3." + " Customize 'app.json_provider_class' or 'app.json' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._json_encoder + + @json_encoder.setter + def json_encoder(self, value: t.Union[t.Type[json.JSONEncoder], None]) -> None: + import warnings + + warnings.warn( + "'bp.json_encoder' is deprecated and will be removed in Flask 2.3." + " Customize 'app.json_provider_class' or 'app.json' instead.", + DeprecationWarning, + stacklevel=2, + ) + self._json_encoder = value + + @property + def json_decoder( + self, + ) -> t.Union[t.Type[json.JSONDecoder], None]: + """Blueprint-local JSON decoder class to use. Set to ``None`` to use the app's. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. Customize + :attr:`json_provider_class` instead. + + .. versionadded:: 0.10 + """ + import warnings + + warnings.warn( + "'bp.json_decoder' is deprecated and will be removed in Flask 2.3." + " Customize 'app.json_provider_class' or 'app.json' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._json_decoder + + @json_decoder.setter + def json_decoder(self, value: t.Union[t.Type[json.JSONDecoder], None]) -> None: + import warnings + + warnings.warn( + "'bp.json_decoder' is deprecated and will be removed in Flask 2.3." + " Customize 'app.json_provider_class' or 'app.json' instead.", + DeprecationWarning, + stacklevel=2, + ) + self._json_decoder = value + + def __init__( + self, + name: str, + import_name: str, + static_folder: t.Optional[t.Union[str, os.PathLike]] = None, + static_url_path: t.Optional[str] = None, + template_folder: t.Optional[t.Union[str, os.PathLike]] = None, + url_prefix: t.Optional[str] = None, + subdomain: t.Optional[str] = None, + url_defaults: t.Optional[dict] = None, + root_path: t.Optional[str] = None, + cli_group: t.Optional[str] = _sentinel, # type: ignore + ): + super().__init__( + import_name=import_name, + static_folder=static_folder, + static_url_path=static_url_path, + template_folder=template_folder, + root_path=root_path, + ) + + if "." in name: + raise ValueError("'name' may not contain a dot '.' character.") + + self.name = name + self.url_prefix = url_prefix + self.subdomain = subdomain + self.deferred_functions: t.List[DeferredSetupFunction] = [] + + if url_defaults is None: + url_defaults = {} + + self.url_values_defaults = url_defaults + self.cli_group = cli_group + self._blueprints: t.List[t.Tuple["Blueprint", dict]] = [] + + def _check_setup_finished(self, f_name: str) -> None: + if self._got_registered_once: + import warnings + + warnings.warn( + f"The setup method '{f_name}' can no longer be called on" + f" the blueprint '{self.name}'. It has already been" + " registered at least once, any changes will not be" + " applied consistently.\n" + "Make sure all imports, decorators, functions, etc." + " needed to set up the blueprint are done before" + " registering it.\n" + "This warning will become an exception in Flask 2.3.", + UserWarning, + stacklevel=3, + ) + + @setupmethod + def record(self, func: t.Callable) -> None: + """Registers a function that is called when the blueprint is + registered on the application. This function is called with the + state as argument as returned by the :meth:`make_setup_state` + method. + """ + self.deferred_functions.append(func) + + @setupmethod + def record_once(self, func: t.Callable) -> None: + """Works like :meth:`record` but wraps the function in another + function that will ensure the function is only called once. If the + blueprint is registered a second time on the application, the + function passed is not called. + """ + + def wrapper(state: BlueprintSetupState) -> None: + if state.first_registration: + func(state) + + self.record(update_wrapper(wrapper, func)) + + def make_setup_state( + self, app: "Flask", options: dict, first_registration: bool = False + ) -> BlueprintSetupState: + """Creates an instance of :meth:`~flask.blueprints.BlueprintSetupState` + object that is later passed to the register callback functions. + Subclasses can override this to return a subclass of the setup state. + """ + return BlueprintSetupState(self, app, options, first_registration) + + @setupmethod + def register_blueprint(self, blueprint: "Blueprint", **options: t.Any) -> None: + """Register a :class:`~flask.Blueprint` on this blueprint. Keyword + arguments passed to this method will override the defaults set + on the blueprint. + + .. versionchanged:: 2.0.1 + The ``name`` option can be used to change the (pre-dotted) + name the blueprint is registered with. This allows the same + blueprint to be registered multiple times with unique names + for ``url_for``. + + .. versionadded:: 2.0 + """ + if blueprint is self: + raise ValueError("Cannot register a blueprint on itself") + self._blueprints.append((blueprint, options)) + + def register(self, app: "Flask", options: dict) -> None: + """Called by :meth:`Flask.register_blueprint` to register all + views and callbacks registered on the blueprint with the + application. Creates a :class:`.BlueprintSetupState` and calls + each :meth:`record` callback with it. + + :param app: The application this blueprint is being registered + with. + :param options: Keyword arguments forwarded from + :meth:`~Flask.register_blueprint`. + + .. versionchanged:: 2.0.1 + Nested blueprints are registered with their dotted name. + This allows different blueprints with the same name to be + nested at different locations. + + .. versionchanged:: 2.0.1 + The ``name`` option can be used to change the (pre-dotted) + name the blueprint is registered with. This allows the same + blueprint to be registered multiple times with unique names + for ``url_for``. + + .. versionchanged:: 2.0.1 + Registering the same blueprint with the same name multiple + times is deprecated and will become an error in Flask 2.1. + """ + name_prefix = options.get("name_prefix", "") + self_name = options.get("name", self.name) + name = f"{name_prefix}.{self_name}".lstrip(".") + + if name in app.blueprints: + bp_desc = "this" if app.blueprints[name] is self else "a different" + existing_at = f" '{name}'" if self_name != name else "" + + raise ValueError( + f"The name '{self_name}' is already registered for" + f" {bp_desc} blueprint{existing_at}. Use 'name=' to" + f" provide a unique name." + ) + + first_bp_registration = not any(bp is self for bp in app.blueprints.values()) + first_name_registration = name not in app.blueprints + + app.blueprints[name] = self + self._got_registered_once = True + state = self.make_setup_state(app, options, first_bp_registration) + + if self.has_static_folder: + state.add_url_rule( + f"{self.static_url_path}/", + view_func=self.send_static_file, + endpoint="static", + ) + + # Merge blueprint data into parent. + if first_bp_registration or first_name_registration: + + def extend(bp_dict, parent_dict): + for key, values in bp_dict.items(): + key = name if key is None else f"{name}.{key}" + parent_dict[key].extend(values) + + for key, value in self.error_handler_spec.items(): + key = name if key is None else f"{name}.{key}" + value = defaultdict( + dict, + { + code: { + exc_class: func for exc_class, func in code_values.items() + } + for code, code_values in value.items() + }, + ) + app.error_handler_spec[key] = value + + for endpoint, func in self.view_functions.items(): + app.view_functions[endpoint] = func + + extend(self.before_request_funcs, app.before_request_funcs) + extend(self.after_request_funcs, app.after_request_funcs) + extend( + self.teardown_request_funcs, + app.teardown_request_funcs, + ) + extend(self.url_default_functions, app.url_default_functions) + extend(self.url_value_preprocessors, app.url_value_preprocessors) + extend(self.template_context_processors, app.template_context_processors) + + for deferred in self.deferred_functions: + deferred(state) + + cli_resolved_group = options.get("cli_group", self.cli_group) + + if self.cli.commands: + if cli_resolved_group is None: + app.cli.commands.update(self.cli.commands) + elif cli_resolved_group is _sentinel: + self.cli.name = name + app.cli.add_command(self.cli) + else: + self.cli.name = cli_resolved_group + app.cli.add_command(self.cli) + + for blueprint, bp_options in self._blueprints: + bp_options = bp_options.copy() + bp_url_prefix = bp_options.get("url_prefix") + + if bp_url_prefix is None: + bp_url_prefix = blueprint.url_prefix + + if state.url_prefix is not None and bp_url_prefix is not None: + bp_options["url_prefix"] = ( + state.url_prefix.rstrip("/") + "/" + bp_url_prefix.lstrip("/") + ) + elif bp_url_prefix is not None: + bp_options["url_prefix"] = bp_url_prefix + elif state.url_prefix is not None: + bp_options["url_prefix"] = state.url_prefix + + bp_options["name_prefix"] = name + blueprint.register(app, bp_options) + + @setupmethod + def add_url_rule( + self, + rule: str, + endpoint: t.Optional[str] = None, + view_func: t.Optional[ft.RouteCallable] = None, + provide_automatic_options: t.Optional[bool] = None, + **options: t.Any, + ) -> None: + """Register a URL rule with the blueprint. See :meth:`.Flask.add_url_rule` for + full documentation. + + The URL rule is prefixed with the blueprint's URL prefix. The endpoint name, + used with :func:`url_for`, is prefixed with the blueprint's name. + """ + if endpoint and "." in endpoint: + raise ValueError("'endpoint' may not contain a dot '.' character.") + + if view_func and hasattr(view_func, "__name__") and "." in view_func.__name__: + raise ValueError("'view_func' name may not contain a dot '.' character.") + + self.record( + lambda s: s.add_url_rule( + rule, + endpoint, + view_func, + provide_automatic_options=provide_automatic_options, + **options, + ) + ) + + @setupmethod + def app_template_filter( + self, name: t.Optional[str] = None + ) -> t.Callable[[T_template_filter], T_template_filter]: + """Register a template filter, available in any template rendered by the + application. Equivalent to :meth:`.Flask.template_filter`. + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + + def decorator(f: T_template_filter) -> T_template_filter: + self.add_app_template_filter(f, name=name) + return f + + return decorator + + @setupmethod + def add_app_template_filter( + self, f: ft.TemplateFilterCallable, name: t.Optional[str] = None + ) -> None: + """Register a template filter, available in any template rendered by the + application. Works like the :meth:`app_template_filter` decorator. Equivalent to + :meth:`.Flask.add_template_filter`. + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + + def register_template(state: BlueprintSetupState) -> None: + state.app.jinja_env.filters[name or f.__name__] = f + + self.record_once(register_template) + + @setupmethod + def app_template_test( + self, name: t.Optional[str] = None + ) -> t.Callable[[T_template_test], T_template_test]: + """Register a template test, available in any template rendered by the + application. Equivalent to :meth:`.Flask.template_test`. + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + + def decorator(f: T_template_test) -> T_template_test: + self.add_app_template_test(f, name=name) + return f + + return decorator + + @setupmethod + def add_app_template_test( + self, f: ft.TemplateTestCallable, name: t.Optional[str] = None + ) -> None: + """Register a template test, available in any template rendered by the + application. Works like the :meth:`app_template_test` decorator. Equivalent to + :meth:`.Flask.add_template_test`. + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + + def register_template(state: BlueprintSetupState) -> None: + state.app.jinja_env.tests[name or f.__name__] = f + + self.record_once(register_template) + + @setupmethod + def app_template_global( + self, name: t.Optional[str] = None + ) -> t.Callable[[T_template_global], T_template_global]: + """Register a template global, available in any template rendered by the + application. Equivalent to :meth:`.Flask.template_global`. + + .. versionadded:: 0.10 + + :param name: the optional name of the global, otherwise the + function name will be used. + """ + + def decorator(f: T_template_global) -> T_template_global: + self.add_app_template_global(f, name=name) + return f + + return decorator + + @setupmethod + def add_app_template_global( + self, f: ft.TemplateGlobalCallable, name: t.Optional[str] = None + ) -> None: + """Register a template global, available in any template rendered by the + application. Works like the :meth:`app_template_global` decorator. Equivalent to + :meth:`.Flask.add_template_global`. + + .. versionadded:: 0.10 + + :param name: the optional name of the global, otherwise the + function name will be used. + """ + + def register_template(state: BlueprintSetupState) -> None: + state.app.jinja_env.globals[name or f.__name__] = f + + self.record_once(register_template) + + @setupmethod + def before_app_request(self, f: T_before_request) -> T_before_request: + """Like :meth:`before_request`, but before every request, not only those handled + by the blueprint. Equivalent to :meth:`.Flask.before_request`. + """ + self.record_once( + lambda s: s.app.before_request_funcs.setdefault(None, []).append(f) + ) + return f + + @setupmethod + def before_app_first_request( + self, f: T_before_first_request + ) -> T_before_first_request: + """Register a function to run before the first request to the application is + handled by the worker. Equivalent to :meth:`.Flask.before_first_request`. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. Run setup code when creating + the application instead. + """ + import warnings + + warnings.warn( + "'before_app_first_request' is deprecated and will be" + " removed in Flask 2.3. Use 'record_once' instead to run" + " setup code when registering the blueprint.", + DeprecationWarning, + stacklevel=2, + ) + self.record_once(lambda s: s.app.before_first_request_funcs.append(f)) + return f + + @setupmethod + def after_app_request(self, f: T_after_request) -> T_after_request: + """Like :meth:`after_request`, but after every request, not only those handled + by the blueprint. Equivalent to :meth:`.Flask.after_request`. + """ + self.record_once( + lambda s: s.app.after_request_funcs.setdefault(None, []).append(f) + ) + return f + + @setupmethod + def teardown_app_request(self, f: T_teardown) -> T_teardown: + """Like :meth:`teardown_request`, but after every request, not only those + handled by the blueprint. Equivalent to :meth:`.Flask.teardown_request`. + """ + self.record_once( + lambda s: s.app.teardown_request_funcs.setdefault(None, []).append(f) + ) + return f + + @setupmethod + def app_context_processor( + self, f: T_template_context_processor + ) -> T_template_context_processor: + """Like :meth:`context_processor`, but for templates rendered by every view, not + only by the blueprint. Equivalent to :meth:`.Flask.context_processor`. + """ + self.record_once( + lambda s: s.app.template_context_processors.setdefault(None, []).append(f) + ) + return f + + @setupmethod + def app_errorhandler( + self, code: t.Union[t.Type[Exception], int] + ) -> t.Callable[[T_error_handler], T_error_handler]: + """Like :meth:`errorhandler`, but for every request, not only those handled by + the blueprint. Equivalent to :meth:`.Flask.errorhandler`. + """ + + def decorator(f: T_error_handler) -> T_error_handler: + self.record_once(lambda s: s.app.errorhandler(code)(f)) + return f + + return decorator + + @setupmethod + def app_url_value_preprocessor( + self, f: T_url_value_preprocessor + ) -> T_url_value_preprocessor: + """Like :meth:`url_value_preprocessor`, but for every request, not only those + handled by the blueprint. Equivalent to :meth:`.Flask.url_value_preprocessor`. + """ + self.record_once( + lambda s: s.app.url_value_preprocessors.setdefault(None, []).append(f) + ) + return f + + @setupmethod + def app_url_defaults(self, f: T_url_defaults) -> T_url_defaults: + """Like :meth:`url_defaults`, but for every request, not only those handled by + the blueprint. Equivalent to :meth:`.Flask.url_defaults`. + """ + self.record_once( + lambda s: s.app.url_default_functions.setdefault(None, []).append(f) + ) + return f diff --git a/venv/lib/python3.11/site-packages/flask/cli.py b/venv/lib/python3.11/site-packages/flask/cli.py new file mode 100644 index 0000000..37a15ff --- /dev/null +++ b/venv/lib/python3.11/site-packages/flask/cli.py @@ -0,0 +1,1054 @@ +from __future__ import annotations + +import ast +import inspect +import os +import platform +import re +import sys +import traceback +import typing as t +from functools import update_wrapper +from operator import attrgetter + +import click +from click.core import ParameterSource +from werkzeug import run_simple +from werkzeug.serving import is_running_from_reloader +from werkzeug.utils import import_string + +from .globals import current_app +from .helpers import get_debug_flag +from .helpers import get_load_dotenv + +if t.TYPE_CHECKING: + from .app import Flask + + +class NoAppException(click.UsageError): + """Raised if an application cannot be found or loaded.""" + + +def find_best_app(module): + """Given a module instance this tries to find the best possible + application in the module or raises an exception. + """ + from . import Flask + + # Search for the most common names first. + for attr_name in ("app", "application"): + app = getattr(module, attr_name, None) + + if isinstance(app, Flask): + return app + + # Otherwise find the only object that is a Flask instance. + matches = [v for v in module.__dict__.values() if isinstance(v, Flask)] + + if len(matches) == 1: + return matches[0] + elif len(matches) > 1: + raise NoAppException( + "Detected multiple Flask applications in module" + f" '{module.__name__}'. Use '{module.__name__}:name'" + " to specify the correct one." + ) + + # Search for app factory functions. + for attr_name in ("create_app", "make_app"): + app_factory = getattr(module, attr_name, None) + + if inspect.isfunction(app_factory): + try: + app = app_factory() + + if isinstance(app, Flask): + return app + except TypeError as e: + if not _called_with_wrong_args(app_factory): + raise + + raise NoAppException( + f"Detected factory '{attr_name}' in module '{module.__name__}'," + " but could not call it without arguments. Use" + f" '{module.__name__}:{attr_name}(args)'" + " to specify arguments." + ) from e + + raise NoAppException( + "Failed to find Flask application or factory in module" + f" '{module.__name__}'. Use '{module.__name__}:name'" + " to specify one." + ) + + +def _called_with_wrong_args(f): + """Check whether calling a function raised a ``TypeError`` because + the call failed or because something in the factory raised the + error. + + :param f: The function that was called. + :return: ``True`` if the call failed. + """ + tb = sys.exc_info()[2] + + try: + while tb is not None: + if tb.tb_frame.f_code is f.__code__: + # In the function, it was called successfully. + return False + + tb = tb.tb_next + + # Didn't reach the function. + return True + finally: + # Delete tb to break a circular reference. + # https://docs.python.org/2/library/sys.html#sys.exc_info + del tb + + +def find_app_by_string(module, app_name): + """Check if the given string is a variable name or a function. Call + a function to get the app instance, or return the variable directly. + """ + from . import Flask + + # Parse app_name as a single expression to determine if it's a valid + # attribute name or function call. + try: + expr = ast.parse(app_name.strip(), mode="eval").body + except SyntaxError: + raise NoAppException( + f"Failed to parse {app_name!r} as an attribute name or function call." + ) from None + + if isinstance(expr, ast.Name): + name = expr.id + args = [] + kwargs = {} + elif isinstance(expr, ast.Call): + # Ensure the function name is an attribute name only. + if not isinstance(expr.func, ast.Name): + raise NoAppException( + f"Function reference must be a simple name: {app_name!r}." + ) + + name = expr.func.id + + # Parse the positional and keyword arguments as literals. + try: + args = [ast.literal_eval(arg) for arg in expr.args] + kwargs = {kw.arg: ast.literal_eval(kw.value) for kw in expr.keywords} + except ValueError: + # literal_eval gives cryptic error messages, show a generic + # message with the full expression instead. + raise NoAppException( + f"Failed to parse arguments as literal values: {app_name!r}." + ) from None + else: + raise NoAppException( + f"Failed to parse {app_name!r} as an attribute name or function call." + ) + + try: + attr = getattr(module, name) + except AttributeError as e: + raise NoAppException( + f"Failed to find attribute {name!r} in {module.__name__!r}." + ) from e + + # If the attribute is a function, call it with any args and kwargs + # to get the real application. + if inspect.isfunction(attr): + try: + app = attr(*args, **kwargs) + except TypeError as e: + if not _called_with_wrong_args(attr): + raise + + raise NoAppException( + f"The factory {app_name!r} in module" + f" {module.__name__!r} could not be called with the" + " specified arguments." + ) from e + else: + app = attr + + if isinstance(app, Flask): + return app + + raise NoAppException( + "A valid Flask application was not obtained from" + f" '{module.__name__}:{app_name}'." + ) + + +def prepare_import(path): + """Given a filename this will try to calculate the python path, add it + to the search path and return the actual module name that is expected. + """ + path = os.path.realpath(path) + + fname, ext = os.path.splitext(path) + if ext == ".py": + path = fname + + if os.path.basename(path) == "__init__": + path = os.path.dirname(path) + + module_name = [] + + # move up until outside package structure (no __init__.py) + while True: + path, name = os.path.split(path) + module_name.append(name) + + if not os.path.exists(os.path.join(path, "__init__.py")): + break + + if sys.path[0] != path: + sys.path.insert(0, path) + + return ".".join(module_name[::-1]) + + +def locate_app(module_name, app_name, raise_if_not_found=True): + try: + __import__(module_name) + except ImportError: + # Reraise the ImportError if it occurred within the imported module. + # Determine this by checking whether the trace has a depth > 1. + if sys.exc_info()[2].tb_next: + raise NoAppException( + f"While importing {module_name!r}, an ImportError was" + f" raised:\n\n{traceback.format_exc()}" + ) from None + elif raise_if_not_found: + raise NoAppException(f"Could not import {module_name!r}.") from None + else: + return + + module = sys.modules[module_name] + + if app_name is None: + return find_best_app(module) + else: + return find_app_by_string(module, app_name) + + +def get_version(ctx, param, value): + if not value or ctx.resilient_parsing: + return + + import werkzeug + from . import __version__ + + click.echo( + f"Python {platform.python_version()}\n" + f"Flask {__version__}\n" + f"Werkzeug {werkzeug.__version__}", + color=ctx.color, + ) + ctx.exit() + + +version_option = click.Option( + ["--version"], + help="Show the Flask version.", + expose_value=False, + callback=get_version, + is_flag=True, + is_eager=True, +) + + +class ScriptInfo: + """Helper object to deal with Flask applications. This is usually not + necessary to interface with as it's used internally in the dispatching + to click. In future versions of Flask this object will most likely play + a bigger role. Typically it's created automatically by the + :class:`FlaskGroup` but you can also manually create it and pass it + onwards as click object. + """ + + def __init__( + self, + app_import_path: str | None = None, + create_app: t.Callable[..., Flask] | None = None, + set_debug_flag: bool = True, + ) -> None: + #: Optionally the import path for the Flask application. + self.app_import_path = app_import_path + #: Optionally a function that is passed the script info to create + #: the instance of the application. + self.create_app = create_app + #: A dictionary with arbitrary data that can be associated with + #: this script info. + self.data: t.Dict[t.Any, t.Any] = {} + self.set_debug_flag = set_debug_flag + self._loaded_app: Flask | None = None + + def load_app(self) -> Flask: + """Loads the Flask app (if not yet loaded) and returns it. Calling + this multiple times will just result in the already loaded app to + be returned. + """ + if self._loaded_app is not None: + return self._loaded_app + + if self.create_app is not None: + app = self.create_app() + else: + if self.app_import_path: + path, name = ( + re.split(r":(?![\\/])", self.app_import_path, 1) + [None] + )[:2] + import_name = prepare_import(path) + app = locate_app(import_name, name) + else: + for path in ("wsgi.py", "app.py"): + import_name = prepare_import(path) + app = locate_app(import_name, None, raise_if_not_found=False) + + if app: + break + + if not app: + raise NoAppException( + "Could not locate a Flask application. Use the" + " 'flask --app' option, 'FLASK_APP' environment" + " variable, or a 'wsgi.py' or 'app.py' file in the" + " current directory." + ) + + if self.set_debug_flag: + # Update the app's debug flag through the descriptor so that + # other values repopulate as well. + app.debug = get_debug_flag() + + self._loaded_app = app + return app + + +pass_script_info = click.make_pass_decorator(ScriptInfo, ensure=True) + + +def with_appcontext(f): + """Wraps a callback so that it's guaranteed to be executed with the + script's application context. + + Custom commands (and their options) registered under ``app.cli`` or + ``blueprint.cli`` will always have an app context available, this + decorator is not required in that case. + + .. versionchanged:: 2.2 + The app context is active for subcommands as well as the + decorated callback. The app context is always available to + ``app.cli`` command and parameter callbacks. + """ + + @click.pass_context + def decorator(__ctx, *args, **kwargs): + if not current_app: + app = __ctx.ensure_object(ScriptInfo).load_app() + __ctx.with_resource(app.app_context()) + + return __ctx.invoke(f, *args, **kwargs) + + return update_wrapper(decorator, f) + + +class AppGroup(click.Group): + """This works similar to a regular click :class:`~click.Group` but it + changes the behavior of the :meth:`command` decorator so that it + automatically wraps the functions in :func:`with_appcontext`. + + Not to be confused with :class:`FlaskGroup`. + """ + + def command(self, *args, **kwargs): + """This works exactly like the method of the same name on a regular + :class:`click.Group` but it wraps callbacks in :func:`with_appcontext` + unless it's disabled by passing ``with_appcontext=False``. + """ + wrap_for_ctx = kwargs.pop("with_appcontext", True) + + def decorator(f): + if wrap_for_ctx: + f = with_appcontext(f) + return click.Group.command(self, *args, **kwargs)(f) + + return decorator + + def group(self, *args, **kwargs): + """This works exactly like the method of the same name on a regular + :class:`click.Group` but it defaults the group class to + :class:`AppGroup`. + """ + kwargs.setdefault("cls", AppGroup) + return click.Group.group(self, *args, **kwargs) + + +def _set_app(ctx: click.Context, param: click.Option, value: str | None) -> str | None: + if value is None: + return None + + info = ctx.ensure_object(ScriptInfo) + info.app_import_path = value + return value + + +# This option is eager so the app will be available if --help is given. +# --help is also eager, so --app must be before it in the param list. +# no_args_is_help bypasses eager processing, so this option must be +# processed manually in that case to ensure FLASK_APP gets picked up. +_app_option = click.Option( + ["-A", "--app"], + metavar="IMPORT", + help=( + "The Flask application or factory function to load, in the form 'module:name'." + " Module can be a dotted import or file path. Name is not required if it is" + " 'app', 'application', 'create_app', or 'make_app', and can be 'name(args)' to" + " pass arguments." + ), + is_eager=True, + expose_value=False, + callback=_set_app, +) + + +def _set_debug(ctx: click.Context, param: click.Option, value: bool) -> bool | None: + # If the flag isn't provided, it will default to False. Don't use + # that, let debug be set by env in that case. + source = ctx.get_parameter_source(param.name) # type: ignore[arg-type] + + if source is not None and source in ( + ParameterSource.DEFAULT, + ParameterSource.DEFAULT_MAP, + ): + return None + + # Set with env var instead of ScriptInfo.load so that it can be + # accessed early during a factory function. + os.environ["FLASK_DEBUG"] = "1" if value else "0" + return value + + +_debug_option = click.Option( + ["--debug/--no-debug"], + help="Set debug mode.", + expose_value=False, + callback=_set_debug, +) + + +def _env_file_callback( + ctx: click.Context, param: click.Option, value: str | None +) -> str | None: + if value is None: + return None + + import importlib + + try: + importlib.import_module("dotenv") + except ImportError: + raise click.BadParameter( + "python-dotenv must be installed to load an env file.", + ctx=ctx, + param=param, + ) from None + + # Don't check FLASK_SKIP_DOTENV, that only disables automatically + # loading .env and .flaskenv files. + load_dotenv(value) + return value + + +# This option is eager so env vars are loaded as early as possible to be +# used by other options. +_env_file_option = click.Option( + ["-e", "--env-file"], + type=click.Path(exists=True, dir_okay=False), + help="Load environment variables from this file. python-dotenv must be installed.", + is_eager=True, + expose_value=False, + callback=_env_file_callback, +) + + +class FlaskGroup(AppGroup): + """Special subclass of the :class:`AppGroup` group that supports + loading more commands from the configured Flask app. Normally a + developer does not have to interface with this class but there are + some very advanced use cases for which it makes sense to create an + instance of this. see :ref:`custom-scripts`. + + :param add_default_commands: if this is True then the default run and + shell commands will be added. + :param add_version_option: adds the ``--version`` option. + :param create_app: an optional callback that is passed the script info and + returns the loaded app. + :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` + files to set environment variables. Will also change the working + directory to the directory containing the first file found. + :param set_debug_flag: Set the app's debug flag. + + .. versionchanged:: 2.2 + Added the ``-A/--app``, ``--debug/--no-debug``, ``-e/--env-file`` options. + + .. versionchanged:: 2.2 + An app context is pushed when running ``app.cli`` commands, so + ``@with_appcontext`` is no longer required for those commands. + + .. versionchanged:: 1.0 + If installed, python-dotenv will be used to load environment variables + from :file:`.env` and :file:`.flaskenv` files. + """ + + def __init__( + self, + add_default_commands: bool = True, + create_app: t.Callable[..., Flask] | None = None, + add_version_option: bool = True, + load_dotenv: bool = True, + set_debug_flag: bool = True, + **extra: t.Any, + ) -> None: + params = list(extra.pop("params", None) or ()) + # Processing is done with option callbacks instead of a group + # callback. This allows users to make a custom group callback + # without losing the behavior. --env-file must come first so + # that it is eagerly evaluated before --app. + params.extend((_env_file_option, _app_option, _debug_option)) + + if add_version_option: + params.append(version_option) + + if "context_settings" not in extra: + extra["context_settings"] = {} + + extra["context_settings"].setdefault("auto_envvar_prefix", "FLASK") + + super().__init__(params=params, **extra) + + self.create_app = create_app + self.load_dotenv = load_dotenv + self.set_debug_flag = set_debug_flag + + if add_default_commands: + self.add_command(run_command) + self.add_command(shell_command) + self.add_command(routes_command) + + self._loaded_plugin_commands = False + + def _load_plugin_commands(self): + if self._loaded_plugin_commands: + return + + if sys.version_info >= (3, 10): + from importlib import metadata + else: + # Use a backport on Python < 3.10. We technically have + # importlib.metadata on 3.8+, but the API changed in 3.10, + # so use the backport for consistency. + import importlib_metadata as metadata + + for ep in metadata.entry_points(group="flask.commands"): + self.add_command(ep.load(), ep.name) + + self._loaded_plugin_commands = True + + def get_command(self, ctx, name): + self._load_plugin_commands() + # Look up built-in and plugin commands, which should be + # available even if the app fails to load. + rv = super().get_command(ctx, name) + + if rv is not None: + return rv + + info = ctx.ensure_object(ScriptInfo) + + # Look up commands provided by the app, showing an error and + # continuing if the app couldn't be loaded. + try: + app = info.load_app() + except NoAppException as e: + click.secho(f"Error: {e.format_message()}\n", err=True, fg="red") + return None + + # Push an app context for the loaded app unless it is already + # active somehow. This makes the context available to parameter + # and command callbacks without needing @with_appcontext. + if not current_app or current_app._get_current_object() is not app: + ctx.with_resource(app.app_context()) + + return app.cli.get_command(ctx, name) + + def list_commands(self, ctx): + self._load_plugin_commands() + # Start with the built-in and plugin commands. + rv = set(super().list_commands(ctx)) + info = ctx.ensure_object(ScriptInfo) + + # Add commands provided by the app, showing an error and + # continuing if the app couldn't be loaded. + try: + rv.update(info.load_app().cli.list_commands(ctx)) + except NoAppException as e: + # When an app couldn't be loaded, show the error message + # without the traceback. + click.secho(f"Error: {e.format_message()}\n", err=True, fg="red") + except Exception: + # When any other errors occurred during loading, show the + # full traceback. + click.secho(f"{traceback.format_exc()}\n", err=True, fg="red") + + return sorted(rv) + + def make_context( + self, + info_name: str | None, + args: list[str], + parent: click.Context | None = None, + **extra: t.Any, + ) -> click.Context: + # Set a flag to tell app.run to become a no-op. If app.run was + # not in a __name__ == __main__ guard, it would start the server + # when importing, blocking whatever command is being called. + os.environ["FLASK_RUN_FROM_CLI"] = "true" + + # Attempt to load .env and .flask env files. The --env-file + # option can cause another file to be loaded. + if get_load_dotenv(self.load_dotenv): + load_dotenv() + + if "obj" not in extra and "obj" not in self.context_settings: + extra["obj"] = ScriptInfo( + create_app=self.create_app, set_debug_flag=self.set_debug_flag + ) + + return super().make_context(info_name, args, parent=parent, **extra) + + def parse_args(self, ctx: click.Context, args: list[str]) -> list[str]: + if not args and self.no_args_is_help: + # Attempt to load --env-file and --app early in case they + # were given as env vars. Otherwise no_args_is_help will not + # see commands from app.cli. + _env_file_option.handle_parse_result(ctx, {}, []) + _app_option.handle_parse_result(ctx, {}, []) + + return super().parse_args(ctx, args) + + +def _path_is_ancestor(path, other): + """Take ``other`` and remove the length of ``path`` from it. Then join it + to ``path``. If it is the original value, ``path`` is an ancestor of + ``other``.""" + return os.path.join(path, other[len(path) :].lstrip(os.sep)) == other + + +def load_dotenv(path: str | os.PathLike | None = None) -> bool: + """Load "dotenv" files in order of precedence to set environment variables. + + If an env var is already set it is not overwritten, so earlier files in the + list are preferred over later files. + + This is a no-op if `python-dotenv`_ is not installed. + + .. _python-dotenv: https://github.com/theskumar/python-dotenv#readme + + :param path: Load the file at this location instead of searching. + :return: ``True`` if a file was loaded. + + .. versionchanged:: 2.0 + The current directory is not changed to the location of the + loaded file. + + .. versionchanged:: 2.0 + When loading the env files, set the default encoding to UTF-8. + + .. versionchanged:: 1.1.0 + Returns ``False`` when python-dotenv is not installed, or when + the given path isn't a file. + + .. versionadded:: 1.0 + """ + try: + import dotenv + except ImportError: + if path or os.path.isfile(".env") or os.path.isfile(".flaskenv"): + click.secho( + " * Tip: There are .env or .flaskenv files present." + ' Do "pip install python-dotenv" to use them.', + fg="yellow", + err=True, + ) + + return False + + # Always return after attempting to load a given path, don't load + # the default files. + if path is not None: + if os.path.isfile(path): + return dotenv.load_dotenv(path, encoding="utf-8") + + return False + + loaded = False + + for name in (".env", ".flaskenv"): + path = dotenv.find_dotenv(name, usecwd=True) + + if not path: + continue + + dotenv.load_dotenv(path, encoding="utf-8") + loaded = True + + return loaded # True if at least one file was located and loaded. + + +def show_server_banner(debug, app_import_path): + """Show extra startup messages the first time the server is run, + ignoring the reloader. + """ + if is_running_from_reloader(): + return + + if app_import_path is not None: + click.echo(f" * Serving Flask app '{app_import_path}'") + + if debug is not None: + click.echo(f" * Debug mode: {'on' if debug else 'off'}") + + +class CertParamType(click.ParamType): + """Click option type for the ``--cert`` option. Allows either an + existing file, the string ``'adhoc'``, or an import for a + :class:`~ssl.SSLContext` object. + """ + + name = "path" + + def __init__(self): + self.path_type = click.Path(exists=True, dir_okay=False, resolve_path=True) + + def convert(self, value, param, ctx): + try: + import ssl + except ImportError: + raise click.BadParameter( + 'Using "--cert" requires Python to be compiled with SSL support.', + ctx, + param, + ) from None + + try: + return self.path_type(value, param, ctx) + except click.BadParameter: + value = click.STRING(value, param, ctx).lower() + + if value == "adhoc": + try: + import cryptography # noqa: F401 + except ImportError: + raise click.BadParameter( + "Using ad-hoc certificates requires the cryptography library.", + ctx, + param, + ) from None + + return value + + obj = import_string(value, silent=True) + + if isinstance(obj, ssl.SSLContext): + return obj + + raise + + +def _validate_key(ctx, param, value): + """The ``--key`` option must be specified when ``--cert`` is a file. + Modifies the ``cert`` param to be a ``(cert, key)`` pair if needed. + """ + cert = ctx.params.get("cert") + is_adhoc = cert == "adhoc" + + try: + import ssl + except ImportError: + is_context = False + else: + is_context = isinstance(cert, ssl.SSLContext) + + if value is not None: + if is_adhoc: + raise click.BadParameter( + 'When "--cert" is "adhoc", "--key" is not used.', ctx, param + ) + + if is_context: + raise click.BadParameter( + 'When "--cert" is an SSLContext object, "--key is not used.', ctx, param + ) + + if not cert: + raise click.BadParameter('"--cert" must also be specified.', ctx, param) + + ctx.params["cert"] = cert, value + + else: + if cert and not (is_adhoc or is_context): + raise click.BadParameter('Required when using "--cert".', ctx, param) + + return value + + +class SeparatedPathType(click.Path): + """Click option type that accepts a list of values separated by the + OS's path separator (``:``, ``;`` on Windows). Each value is + validated as a :class:`click.Path` type. + """ + + def convert(self, value, param, ctx): + items = self.split_envvar_value(value) + super_convert = super().convert + return [super_convert(item, param, ctx) for item in items] + + +@click.command("run", short_help="Run a development server.") +@click.option("--host", "-h", default="127.0.0.1", help="The interface to bind to.") +@click.option("--port", "-p", default=5000, help="The port to bind to.") +@click.option( + "--cert", + type=CertParamType(), + help="Specify a certificate file to use HTTPS.", + is_eager=True, +) +@click.option( + "--key", + type=click.Path(exists=True, dir_okay=False, resolve_path=True), + callback=_validate_key, + expose_value=False, + help="The key file to use when specifying a certificate.", +) +@click.option( + "--reload/--no-reload", + default=None, + help="Enable or disable the reloader. By default the reloader " + "is active if debug is enabled.", +) +@click.option( + "--debugger/--no-debugger", + default=None, + help="Enable or disable the debugger. By default the debugger " + "is active if debug is enabled.", +) +@click.option( + "--with-threads/--without-threads", + default=True, + help="Enable or disable multithreading.", +) +@click.option( + "--extra-files", + default=None, + type=SeparatedPathType(), + help=( + "Extra files that trigger a reload on change. Multiple paths" + f" are separated by {os.path.pathsep!r}." + ), +) +@click.option( + "--exclude-patterns", + default=None, + type=SeparatedPathType(), + help=( + "Files matching these fnmatch patterns will not trigger a reload" + " on change. Multiple patterns are separated by" + f" {os.path.pathsep!r}." + ), +) +@pass_script_info +def run_command( + info, + host, + port, + reload, + debugger, + with_threads, + cert, + extra_files, + exclude_patterns, +): + """Run a local development server. + + This server is for development purposes only. It does not provide + the stability, security, or performance of production WSGI servers. + + The reloader and debugger are enabled by default with the '--debug' + option. + """ + try: + app = info.load_app() + except Exception as e: + if is_running_from_reloader(): + # When reloading, print out the error immediately, but raise + # it later so the debugger or server can handle it. + traceback.print_exc() + err = e + + def app(environ, start_response): + raise err from None + + else: + # When not reloading, raise the error immediately so the + # command fails. + raise e from None + + debug = get_debug_flag() + + if reload is None: + reload = debug + + if debugger is None: + debugger = debug + + show_server_banner(debug, info.app_import_path) + + run_simple( + host, + port, + app, + use_reloader=reload, + use_debugger=debugger, + threaded=with_threads, + ssl_context=cert, + extra_files=extra_files, + exclude_patterns=exclude_patterns, + ) + + +run_command.params.insert(0, _debug_option) + + +@click.command("shell", short_help="Run a shell in the app context.") +@with_appcontext +def shell_command() -> None: + """Run an interactive Python shell in the context of a given + Flask application. The application will populate the default + namespace of this shell according to its configuration. + + This is useful for executing small snippets of management code + without having to manually configure the application. + """ + import code + + banner = ( + f"Python {sys.version} on {sys.platform}\n" + f"App: {current_app.import_name}\n" + f"Instance: {current_app.instance_path}" + ) + ctx: dict = {} + + # Support the regular Python interpreter startup script if someone + # is using it. + startup = os.environ.get("PYTHONSTARTUP") + if startup and os.path.isfile(startup): + with open(startup) as f: + eval(compile(f.read(), startup, "exec"), ctx) + + ctx.update(current_app.make_shell_context()) + + # Site, customize, or startup script can set a hook to call when + # entering interactive mode. The default one sets up readline with + # tab and history completion. + interactive_hook = getattr(sys, "__interactivehook__", None) + + if interactive_hook is not None: + try: + import readline + from rlcompleter import Completer + except ImportError: + pass + else: + # rlcompleter uses __main__.__dict__ by default, which is + # flask.__main__. Use the shell context instead. + readline.set_completer(Completer(ctx).complete) + + interactive_hook() + + code.interact(banner=banner, local=ctx) + + +@click.command("routes", short_help="Show the routes for the app.") +@click.option( + "--sort", + "-s", + type=click.Choice(("endpoint", "methods", "rule", "match")), + default="endpoint", + help=( + 'Method to sort routes by. "match" is the order that Flask will match ' + "routes when dispatching a request." + ), +) +@click.option("--all-methods", is_flag=True, help="Show HEAD and OPTIONS methods.") +@with_appcontext +def routes_command(sort: str, all_methods: bool) -> None: + """Show all registered routes with endpoints and methods.""" + + rules = list(current_app.url_map.iter_rules()) + if not rules: + click.echo("No routes were registered.") + return + + ignored_methods = set(() if all_methods else ("HEAD", "OPTIONS")) + + if sort in ("endpoint", "rule"): + rules = sorted(rules, key=attrgetter(sort)) + elif sort == "methods": + rules = sorted(rules, key=lambda rule: sorted(rule.methods)) # type: ignore + + rule_methods = [ + ", ".join(sorted(rule.methods - ignored_methods)) # type: ignore + for rule in rules + ] + + headers = ("Endpoint", "Methods", "Rule") + widths = ( + max(len(rule.endpoint) for rule in rules), + max(len(methods) for methods in rule_methods), + max(len(rule.rule) for rule in rules), + ) + widths = [max(len(h), w) for h, w in zip(headers, widths)] + row = "{{0:<{0}}} {{1:<{1}}} {{2:<{2}}}".format(*widths) + + click.echo(row.format(*headers).strip()) + click.echo(row.format(*("-" * width for width in widths))) + + for rule, methods in zip(rules, rule_methods): + click.echo(row.format(rule.endpoint, methods, rule.rule).rstrip()) + + +cli = FlaskGroup( + name="flask", + help="""\ +A general utility script for Flask applications. + +An application to load must be given with the '--app' option, +'FLASK_APP' environment variable, or with a 'wsgi.py' or 'app.py' file +in the current directory. +""", +) + + +def main() -> None: + cli.main() + + +if __name__ == "__main__": + main() diff --git a/venv/lib/python3.11/site-packages/flask/config.py b/venv/lib/python3.11/site-packages/flask/config.py new file mode 100644 index 0000000..d4fc310 --- /dev/null +++ b/venv/lib/python3.11/site-packages/flask/config.py @@ -0,0 +1,338 @@ +import errno +import json +import os +import types +import typing as t + +from werkzeug.utils import import_string + + +class ConfigAttribute: + """Makes an attribute forward to the config""" + + def __init__(self, name: str, get_converter: t.Optional[t.Callable] = None) -> None: + self.__name__ = name + self.get_converter = get_converter + + def __get__(self, obj: t.Any, owner: t.Any = None) -> t.Any: + if obj is None: + return self + rv = obj.config[self.__name__] + if self.get_converter is not None: + rv = self.get_converter(rv) + return rv + + def __set__(self, obj: t.Any, value: t.Any) -> None: + obj.config[self.__name__] = value + + +class Config(dict): + """Works exactly like a dict but provides ways to fill it from files + or special dictionaries. There are two common patterns to populate the + config. + + Either you can fill the config from a config file:: + + app.config.from_pyfile('yourconfig.cfg') + + Or alternatively you can define the configuration options in the + module that calls :meth:`from_object` or provide an import path to + a module that should be loaded. It is also possible to tell it to + use the same module and with that provide the configuration values + just before the call:: + + DEBUG = True + SECRET_KEY = 'development key' + app.config.from_object(__name__) + + In both cases (loading from any Python file or loading from modules), + only uppercase keys are added to the config. This makes it possible to use + lowercase values in the config file for temporary values that are not added + to the config or to define the config keys in the same file that implements + the application. + + Probably the most interesting way to load configurations is from an + environment variable pointing to a file:: + + app.config.from_envvar('YOURAPPLICATION_SETTINGS') + + In this case before launching the application you have to set this + environment variable to the file you want to use. On Linux and OS X + use the export statement:: + + export YOURAPPLICATION_SETTINGS='/path/to/config/file' + + On windows use `set` instead. + + :param root_path: path to which files are read relative from. When the + config object is created by the application, this is + the application's :attr:`~flask.Flask.root_path`. + :param defaults: an optional dictionary of default values + """ + + def __init__(self, root_path: str, defaults: t.Optional[dict] = None) -> None: + super().__init__(defaults or {}) + self.root_path = root_path + + def from_envvar(self, variable_name: str, silent: bool = False) -> bool: + """Loads a configuration from an environment variable pointing to + a configuration file. This is basically just a shortcut with nicer + error messages for this line of code:: + + app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS']) + + :param variable_name: name of the environment variable + :param silent: set to ``True`` if you want silent failure for missing + files. + :return: ``True`` if the file was loaded successfully. + """ + rv = os.environ.get(variable_name) + if not rv: + if silent: + return False + raise RuntimeError( + f"The environment variable {variable_name!r} is not set" + " and as such configuration could not be loaded. Set" + " this variable and make it point to a configuration" + " file" + ) + return self.from_pyfile(rv, silent=silent) + + def from_prefixed_env( + self, prefix: str = "FLASK", *, loads: t.Callable[[str], t.Any] = json.loads + ) -> bool: + """Load any environment variables that start with ``FLASK_``, + dropping the prefix from the env key for the config key. Values + are passed through a loading function to attempt to convert them + to more specific types than strings. + + Keys are loaded in :func:`sorted` order. + + The default loading function attempts to parse values as any + valid JSON type, including dicts and lists. + + Specific items in nested dicts can be set by separating the + keys with double underscores (``__``). If an intermediate key + doesn't exist, it will be initialized to an empty dict. + + :param prefix: Load env vars that start with this prefix, + separated with an underscore (``_``). + :param loads: Pass each string value to this function and use + the returned value as the config value. If any error is + raised it is ignored and the value remains a string. The + default is :func:`json.loads`. + + .. versionadded:: 2.1 + """ + prefix = f"{prefix}_" + len_prefix = len(prefix) + + for key in sorted(os.environ): + if not key.startswith(prefix): + continue + + value = os.environ[key] + + try: + value = loads(value) + except Exception: + # Keep the value as a string if loading failed. + pass + + # Change to key.removeprefix(prefix) on Python >= 3.9. + key = key[len_prefix:] + + if "__" not in key: + # A non-nested key, set directly. + self[key] = value + continue + + # Traverse nested dictionaries with keys separated by "__". + current = self + *parts, tail = key.split("__") + + for part in parts: + # If an intermediate dict does not exist, create it. + if part not in current: + current[part] = {} + + current = current[part] + + current[tail] = value + + return True + + def from_pyfile(self, filename: str, silent: bool = False) -> bool: + """Updates the values in the config from a Python file. This function + behaves as if the file was imported as module with the + :meth:`from_object` function. + + :param filename: the filename of the config. This can either be an + absolute filename or a filename relative to the + root path. + :param silent: set to ``True`` if you want silent failure for missing + files. + :return: ``True`` if the file was loaded successfully. + + .. versionadded:: 0.7 + `silent` parameter. + """ + filename = os.path.join(self.root_path, filename) + d = types.ModuleType("config") + d.__file__ = filename + try: + with open(filename, mode="rb") as config_file: + exec(compile(config_file.read(), filename, "exec"), d.__dict__) + except OSError as e: + if silent and e.errno in (errno.ENOENT, errno.EISDIR, errno.ENOTDIR): + return False + e.strerror = f"Unable to load configuration file ({e.strerror})" + raise + self.from_object(d) + return True + + def from_object(self, obj: t.Union[object, str]) -> None: + """Updates the values from the given object. An object can be of one + of the following two types: + + - a string: in this case the object with that name will be imported + - an actual object reference: that object is used directly + + Objects are usually either modules or classes. :meth:`from_object` + loads only the uppercase attributes of the module/class. A ``dict`` + object will not work with :meth:`from_object` because the keys of a + ``dict`` are not attributes of the ``dict`` class. + + Example of module-based configuration:: + + app.config.from_object('yourapplication.default_config') + from yourapplication import default_config + app.config.from_object(default_config) + + Nothing is done to the object before loading. If the object is a + class and has ``@property`` attributes, it needs to be + instantiated before being passed to this method. + + You should not use this function to load the actual configuration but + rather configuration defaults. The actual config should be loaded + with :meth:`from_pyfile` and ideally from a location not within the + package because the package might be installed system wide. + + See :ref:`config-dev-prod` for an example of class-based configuration + using :meth:`from_object`. + + :param obj: an import name or object + """ + if isinstance(obj, str): + obj = import_string(obj) + for key in dir(obj): + if key.isupper(): + self[key] = getattr(obj, key) + + def from_file( + self, + filename: str, + load: t.Callable[[t.IO[t.Any]], t.Mapping], + silent: bool = False, + ) -> bool: + """Update the values in the config from a file that is loaded + using the ``load`` parameter. The loaded data is passed to the + :meth:`from_mapping` method. + + .. code-block:: python + + import json + app.config.from_file("config.json", load=json.load) + + import toml + app.config.from_file("config.toml", load=toml.load) + + :param filename: The path to the data file. This can be an + absolute path or relative to the config root path. + :param load: A callable that takes a file handle and returns a + mapping of loaded data from the file. + :type load: ``Callable[[Reader], Mapping]`` where ``Reader`` + implements a ``read`` method. + :param silent: Ignore the file if it doesn't exist. + :return: ``True`` if the file was loaded successfully. + + .. versionadded:: 2.0 + """ + filename = os.path.join(self.root_path, filename) + + try: + with open(filename) as f: + obj = load(f) + except OSError as e: + if silent and e.errno in (errno.ENOENT, errno.EISDIR): + return False + + e.strerror = f"Unable to load configuration file ({e.strerror})" + raise + + return self.from_mapping(obj) + + def from_mapping( + self, mapping: t.Optional[t.Mapping[str, t.Any]] = None, **kwargs: t.Any + ) -> bool: + """Updates the config like :meth:`update` ignoring items with + non-upper keys. + + :return: Always returns ``True``. + + .. versionadded:: 0.11 + """ + mappings: t.Dict[str, t.Any] = {} + if mapping is not None: + mappings.update(mapping) + mappings.update(kwargs) + for key, value in mappings.items(): + if key.isupper(): + self[key] = value + return True + + def get_namespace( + self, namespace: str, lowercase: bool = True, trim_namespace: bool = True + ) -> t.Dict[str, t.Any]: + """Returns a dictionary containing a subset of configuration options + that match the specified namespace/prefix. Example usage:: + + app.config['IMAGE_STORE_TYPE'] = 'fs' + app.config['IMAGE_STORE_PATH'] = '/var/app/images' + app.config['IMAGE_STORE_BASE_URL'] = 'http://img.website.com' + image_store_config = app.config.get_namespace('IMAGE_STORE_') + + The resulting dictionary `image_store_config` would look like:: + + { + 'type': 'fs', + 'path': '/var/app/images', + 'base_url': 'http://img.website.com' + } + + This is often useful when configuration options map directly to + keyword arguments in functions or class constructors. + + :param namespace: a configuration namespace + :param lowercase: a flag indicating if the keys of the resulting + dictionary should be lowercase + :param trim_namespace: a flag indicating if the keys of the resulting + dictionary should not include the namespace + + .. versionadded:: 0.11 + """ + rv = {} + for k, v in self.items(): + if not k.startswith(namespace): + continue + if trim_namespace: + key = k[len(namespace) :] + else: + key = k + if lowercase: + key = key.lower() + rv[key] = v + return rv + + def __repr__(self) -> str: + return f"<{type(self).__name__} {dict.__repr__(self)}>" diff --git a/venv/lib/python3.11/site-packages/flask/ctx.py b/venv/lib/python3.11/site-packages/flask/ctx.py new file mode 100644 index 0000000..c79c26d --- /dev/null +++ b/venv/lib/python3.11/site-packages/flask/ctx.py @@ -0,0 +1,438 @@ +import contextvars +import sys +import typing as t +from functools import update_wrapper +from types import TracebackType + +from werkzeug.exceptions import HTTPException + +from . import typing as ft +from .globals import _cv_app +from .globals import _cv_request +from .signals import appcontext_popped +from .signals import appcontext_pushed + +if t.TYPE_CHECKING: # pragma: no cover + from .app import Flask + from .sessions import SessionMixin + from .wrappers import Request + + +# a singleton sentinel value for parameter defaults +_sentinel = object() + + +class _AppCtxGlobals: + """A plain object. Used as a namespace for storing data during an + application context. + + Creating an app context automatically creates this object, which is + made available as the :data:`g` proxy. + + .. describe:: 'key' in g + + Check whether an attribute is present. + + .. versionadded:: 0.10 + + .. describe:: iter(g) + + Return an iterator over the attribute names. + + .. versionadded:: 0.10 + """ + + # Define attr methods to let mypy know this is a namespace object + # that has arbitrary attributes. + + def __getattr__(self, name: str) -> t.Any: + try: + return self.__dict__[name] + except KeyError: + raise AttributeError(name) from None + + def __setattr__(self, name: str, value: t.Any) -> None: + self.__dict__[name] = value + + def __delattr__(self, name: str) -> None: + try: + del self.__dict__[name] + except KeyError: + raise AttributeError(name) from None + + def get(self, name: str, default: t.Optional[t.Any] = None) -> t.Any: + """Get an attribute by name, or a default value. Like + :meth:`dict.get`. + + :param name: Name of attribute to get. + :param default: Value to return if the attribute is not present. + + .. versionadded:: 0.10 + """ + return self.__dict__.get(name, default) + + def pop(self, name: str, default: t.Any = _sentinel) -> t.Any: + """Get and remove an attribute by name. Like :meth:`dict.pop`. + + :param name: Name of attribute to pop. + :param default: Value to return if the attribute is not present, + instead of raising a ``KeyError``. + + .. versionadded:: 0.11 + """ + if default is _sentinel: + return self.__dict__.pop(name) + else: + return self.__dict__.pop(name, default) + + def setdefault(self, name: str, default: t.Any = None) -> t.Any: + """Get the value of an attribute if it is present, otherwise + set and return a default value. Like :meth:`dict.setdefault`. + + :param name: Name of attribute to get. + :param default: Value to set and return if the attribute is not + present. + + .. versionadded:: 0.11 + """ + return self.__dict__.setdefault(name, default) + + def __contains__(self, item: str) -> bool: + return item in self.__dict__ + + def __iter__(self) -> t.Iterator[str]: + return iter(self.__dict__) + + def __repr__(self) -> str: + ctx = _cv_app.get(None) + if ctx is not None: + return f"" + return object.__repr__(self) + + +def after_this_request(f: ft.AfterRequestCallable) -> ft.AfterRequestCallable: + """Executes a function after this request. This is useful to modify + response objects. The function is passed the response object and has + to return the same or a new one. + + Example:: + + @app.route('/') + def index(): + @after_this_request + def add_header(response): + response.headers['X-Foo'] = 'Parachute' + return response + return 'Hello World!' + + This is more useful if a function other than the view function wants to + modify a response. For instance think of a decorator that wants to add + some headers without converting the return value into a response object. + + .. versionadded:: 0.9 + """ + ctx = _cv_request.get(None) + + if ctx is None: + raise RuntimeError( + "'after_this_request' can only be used when a request" + " context is active, such as in a view function." + ) + + ctx._after_request_functions.append(f) + return f + + +def copy_current_request_context(f: t.Callable) -> t.Callable: + """A helper function that decorates a function to retain the current + request context. This is useful when working with greenlets. The moment + the function is decorated a copy of the request context is created and + then pushed when the function is called. The current session is also + included in the copied request context. + + Example:: + + import gevent + from flask import copy_current_request_context + + @app.route('/') + def index(): + @copy_current_request_context + def do_some_work(): + # do some work here, it can access flask.request or + # flask.session like you would otherwise in the view function. + ... + gevent.spawn(do_some_work) + return 'Regular response' + + .. versionadded:: 0.10 + """ + ctx = _cv_request.get(None) + + if ctx is None: + raise RuntimeError( + "'copy_current_request_context' can only be used when a" + " request context is active, such as in a view function." + ) + + ctx = ctx.copy() + + def wrapper(*args, **kwargs): + with ctx: + return ctx.app.ensure_sync(f)(*args, **kwargs) + + return update_wrapper(wrapper, f) + + +def has_request_context() -> bool: + """If you have code that wants to test if a request context is there or + not this function can be used. For instance, you may want to take advantage + of request information if the request object is available, but fail + silently if it is unavailable. + + :: + + class User(db.Model): + + def __init__(self, username, remote_addr=None): + self.username = username + if remote_addr is None and has_request_context(): + remote_addr = request.remote_addr + self.remote_addr = remote_addr + + Alternatively you can also just test any of the context bound objects + (such as :class:`request` or :class:`g`) for truthness:: + + class User(db.Model): + + def __init__(self, username, remote_addr=None): + self.username = username + if remote_addr is None and request: + remote_addr = request.remote_addr + self.remote_addr = remote_addr + + .. versionadded:: 0.7 + """ + return _cv_request.get(None) is not None + + +def has_app_context() -> bool: + """Works like :func:`has_request_context` but for the application + context. You can also just do a boolean check on the + :data:`current_app` object instead. + + .. versionadded:: 0.9 + """ + return _cv_app.get(None) is not None + + +class AppContext: + """The app context contains application-specific information. An app + context is created and pushed at the beginning of each request if + one is not already active. An app context is also pushed when + running CLI commands. + """ + + def __init__(self, app: "Flask") -> None: + self.app = app + self.url_adapter = app.create_url_adapter(None) + self.g: _AppCtxGlobals = app.app_ctx_globals_class() + self._cv_tokens: t.List[contextvars.Token] = [] + + def push(self) -> None: + """Binds the app context to the current context.""" + self._cv_tokens.append(_cv_app.set(self)) + appcontext_pushed.send(self.app) + + def pop(self, exc: t.Optional[BaseException] = _sentinel) -> None: # type: ignore + """Pops the app context.""" + try: + if len(self._cv_tokens) == 1: + if exc is _sentinel: + exc = sys.exc_info()[1] + self.app.do_teardown_appcontext(exc) + finally: + ctx = _cv_app.get() + _cv_app.reset(self._cv_tokens.pop()) + + if ctx is not self: + raise AssertionError( + f"Popped wrong app context. ({ctx!r} instead of {self!r})" + ) + + appcontext_popped.send(self.app) + + def __enter__(self) -> "AppContext": + self.push() + return self + + def __exit__( + self, + exc_type: t.Optional[type], + exc_value: t.Optional[BaseException], + tb: t.Optional[TracebackType], + ) -> None: + self.pop(exc_value) + + +class RequestContext: + """The request context contains per-request information. The Flask + app creates and pushes it at the beginning of the request, then pops + it at the end of the request. It will create the URL adapter and + request object for the WSGI environment provided. + + Do not attempt to use this class directly, instead use + :meth:`~flask.Flask.test_request_context` and + :meth:`~flask.Flask.request_context` to create this object. + + When the request context is popped, it will evaluate all the + functions registered on the application for teardown execution + (:meth:`~flask.Flask.teardown_request`). + + The request context is automatically popped at the end of the + request. When using the interactive debugger, the context will be + restored so ``request`` is still accessible. Similarly, the test + client can preserve the context after the request ends. However, + teardown functions may already have closed some resources such as + database connections. + """ + + def __init__( + self, + app: "Flask", + environ: dict, + request: t.Optional["Request"] = None, + session: t.Optional["SessionMixin"] = None, + ) -> None: + self.app = app + if request is None: + request = app.request_class(environ) + request.json_module = app.json + self.request: Request = request + self.url_adapter = None + try: + self.url_adapter = app.create_url_adapter(self.request) + except HTTPException as e: + self.request.routing_exception = e + self.flashes: t.Optional[t.List[t.Tuple[str, str]]] = None + self.session: t.Optional["SessionMixin"] = session + # Functions that should be executed after the request on the response + # object. These will be called before the regular "after_request" + # functions. + self._after_request_functions: t.List[ft.AfterRequestCallable] = [] + + self._cv_tokens: t.List[t.Tuple[contextvars.Token, t.Optional[AppContext]]] = [] + + def copy(self) -> "RequestContext": + """Creates a copy of this request context with the same request object. + This can be used to move a request context to a different greenlet. + Because the actual request object is the same this cannot be used to + move a request context to a different thread unless access to the + request object is locked. + + .. versionadded:: 0.10 + + .. versionchanged:: 1.1 + The current session object is used instead of reloading the original + data. This prevents `flask.session` pointing to an out-of-date object. + """ + return self.__class__( + self.app, + environ=self.request.environ, + request=self.request, + session=self.session, + ) + + def match_request(self) -> None: + """Can be overridden by a subclass to hook into the matching + of the request. + """ + try: + result = self.url_adapter.match(return_rule=True) # type: ignore + self.request.url_rule, self.request.view_args = result # type: ignore + except HTTPException as e: + self.request.routing_exception = e + + def push(self) -> None: + # Before we push the request context we have to ensure that there + # is an application context. + app_ctx = _cv_app.get(None) + + if app_ctx is None or app_ctx.app is not self.app: + app_ctx = self.app.app_context() + app_ctx.push() + else: + app_ctx = None + + self._cv_tokens.append((_cv_request.set(self), app_ctx)) + + # Open the session at the moment that the request context is available. + # This allows a custom open_session method to use the request context. + # Only open a new session if this is the first time the request was + # pushed, otherwise stream_with_context loses the session. + if self.session is None: + session_interface = self.app.session_interface + self.session = session_interface.open_session(self.app, self.request) + + if self.session is None: + self.session = session_interface.make_null_session(self.app) + + # Match the request URL after loading the session, so that the + # session is available in custom URL converters. + if self.url_adapter is not None: + self.match_request() + + def pop(self, exc: t.Optional[BaseException] = _sentinel) -> None: # type: ignore + """Pops the request context and unbinds it by doing that. This will + also trigger the execution of functions registered by the + :meth:`~flask.Flask.teardown_request` decorator. + + .. versionchanged:: 0.9 + Added the `exc` argument. + """ + clear_request = len(self._cv_tokens) == 1 + + try: + if clear_request: + if exc is _sentinel: + exc = sys.exc_info()[1] + self.app.do_teardown_request(exc) + + request_close = getattr(self.request, "close", None) + if request_close is not None: + request_close() + finally: + ctx = _cv_request.get() + token, app_ctx = self._cv_tokens.pop() + _cv_request.reset(token) + + # get rid of circular dependencies at the end of the request + # so that we don't require the GC to be active. + if clear_request: + ctx.request.environ["werkzeug.request"] = None + + if app_ctx is not None: + app_ctx.pop(exc) + + if ctx is not self: + raise AssertionError( + f"Popped wrong request context. ({ctx!r} instead of {self!r})" + ) + + def __enter__(self) -> "RequestContext": + self.push() + return self + + def __exit__( + self, + exc_type: t.Optional[type], + exc_value: t.Optional[BaseException], + tb: t.Optional[TracebackType], + ) -> None: + self.pop(exc_value) + + def __repr__(self) -> str: + return ( + f"<{type(self).__name__} {self.request.url!r}" + f" [{self.request.method}] of {self.app.name}>" + ) diff --git a/venv/lib/python3.11/site-packages/flask/debughelpers.py b/venv/lib/python3.11/site-packages/flask/debughelpers.py new file mode 100644 index 0000000..b063989 --- /dev/null +++ b/venv/lib/python3.11/site-packages/flask/debughelpers.py @@ -0,0 +1,158 @@ +import typing as t + +from .app import Flask +from .blueprints import Blueprint +from .globals import request_ctx + + +class UnexpectedUnicodeError(AssertionError, UnicodeError): + """Raised in places where we want some better error reporting for + unexpected unicode or binary data. + """ + + +class DebugFilesKeyError(KeyError, AssertionError): + """Raised from request.files during debugging. The idea is that it can + provide a better error message than just a generic KeyError/BadRequest. + """ + + def __init__(self, request, key): + form_matches = request.form.getlist(key) + buf = [ + f"You tried to access the file {key!r} in the request.files" + " dictionary but it does not exist. The mimetype for the" + f" request is {request.mimetype!r} instead of" + " 'multipart/form-data' which means that no file contents" + " were transmitted. To fix this error you should provide" + ' enctype="multipart/form-data" in your form.' + ] + if form_matches: + names = ", ".join(repr(x) for x in form_matches) + buf.append( + "\n\nThe browser instead transmitted some file names. " + f"This was submitted: {names}" + ) + self.msg = "".join(buf) + + def __str__(self): + return self.msg + + +class FormDataRoutingRedirect(AssertionError): + """This exception is raised in debug mode if a routing redirect + would cause the browser to drop the method or body. This happens + when method is not GET, HEAD or OPTIONS and the status code is not + 307 or 308. + """ + + def __init__(self, request): + exc = request.routing_exception + buf = [ + f"A request was sent to '{request.url}', but routing issued" + f" a redirect to the canonical URL '{exc.new_url}'." + ] + + if f"{request.base_url}/" == exc.new_url.partition("?")[0]: + buf.append( + " The URL was defined with a trailing slash. Flask" + " will redirect to the URL with a trailing slash if it" + " was accessed without one." + ) + + buf.append( + " Send requests to the canonical URL, or use 307 or 308 for" + " routing redirects. Otherwise, browsers will drop form" + " data.\n\n" + "This exception is only raised in debug mode." + ) + super().__init__("".join(buf)) + + +def attach_enctype_error_multidict(request): + """Patch ``request.files.__getitem__`` to raise a descriptive error + about ``enctype=multipart/form-data``. + + :param request: The request to patch. + :meta private: + """ + oldcls = request.files.__class__ + + class newcls(oldcls): + def __getitem__(self, key): + try: + return super().__getitem__(key) + except KeyError as e: + if key not in request.form: + raise + + raise DebugFilesKeyError(request, key).with_traceback( + e.__traceback__ + ) from None + + newcls.__name__ = oldcls.__name__ + newcls.__module__ = oldcls.__module__ + request.files.__class__ = newcls + + +def _dump_loader_info(loader) -> t.Generator: + yield f"class: {type(loader).__module__}.{type(loader).__name__}" + for key, value in sorted(loader.__dict__.items()): + if key.startswith("_"): + continue + if isinstance(value, (tuple, list)): + if not all(isinstance(x, str) for x in value): + continue + yield f"{key}:" + for item in value: + yield f" - {item}" + continue + elif not isinstance(value, (str, int, float, bool)): + continue + yield f"{key}: {value!r}" + + +def explain_template_loading_attempts(app: Flask, template, attempts) -> None: + """This should help developers understand what failed""" + info = [f"Locating template {template!r}:"] + total_found = 0 + blueprint = None + if request_ctx and request_ctx.request.blueprint is not None: + blueprint = request_ctx.request.blueprint + + for idx, (loader, srcobj, triple) in enumerate(attempts): + if isinstance(srcobj, Flask): + src_info = f"application {srcobj.import_name!r}" + elif isinstance(srcobj, Blueprint): + src_info = f"blueprint {srcobj.name!r} ({srcobj.import_name})" + else: + src_info = repr(srcobj) + + info.append(f"{idx + 1:5}: trying loader of {src_info}") + + for line in _dump_loader_info(loader): + info.append(f" {line}") + + if triple is None: + detail = "no match" + else: + detail = f"found ({triple[1] or ''!r})" + total_found += 1 + info.append(f" -> {detail}") + + seems_fishy = False + if total_found == 0: + info.append("Error: the template could not be found.") + seems_fishy = True + elif total_found > 1: + info.append("Warning: multiple loaders returned a match for the template.") + seems_fishy = True + + if blueprint is not None and seems_fishy: + info.append( + " The template was looked up from an endpoint that belongs" + f" to the blueprint {blueprint!r}." + ) + info.append(" Maybe you did not place a template in the right folder?") + info.append(" See https://flask.palletsprojects.com/blueprints/#templates") + + app.logger.info("\n".join(info)) diff --git a/venv/lib/python3.11/site-packages/flask/globals.py b/venv/lib/python3.11/site-packages/flask/globals.py new file mode 100644 index 0000000..254da42 --- /dev/null +++ b/venv/lib/python3.11/site-packages/flask/globals.py @@ -0,0 +1,107 @@ +import typing as t +from contextvars import ContextVar + +from werkzeug.local import LocalProxy + +if t.TYPE_CHECKING: # pragma: no cover + from .app import Flask + from .ctx import _AppCtxGlobals + from .ctx import AppContext + from .ctx import RequestContext + from .sessions import SessionMixin + from .wrappers import Request + + +class _FakeStack: + def __init__(self, name: str, cv: ContextVar[t.Any]) -> None: + self.name = name + self.cv = cv + + def _warn(self): + import warnings + + warnings.warn( + f"'_{self.name}_ctx_stack' is deprecated and will be" + " removed in Flask 2.3. Use 'g' to store data, or" + f" '{self.name}_ctx' to access the current context.", + DeprecationWarning, + stacklevel=3, + ) + + def push(self, obj: t.Any) -> None: + self._warn() + self.cv.set(obj) + + def pop(self) -> t.Any: + self._warn() + ctx = self.cv.get(None) + self.cv.set(None) + return ctx + + @property + def top(self) -> t.Optional[t.Any]: + self._warn() + return self.cv.get(None) + + +_no_app_msg = """\ +Working outside of application context. + +This typically means that you attempted to use functionality that needed +the current application. To solve this, set up an application context +with app.app_context(). See the documentation for more information.\ +""" +_cv_app: ContextVar["AppContext"] = ContextVar("flask.app_ctx") +__app_ctx_stack = _FakeStack("app", _cv_app) +app_ctx: "AppContext" = LocalProxy( # type: ignore[assignment] + _cv_app, unbound_message=_no_app_msg +) +current_app: "Flask" = LocalProxy( # type: ignore[assignment] + _cv_app, "app", unbound_message=_no_app_msg +) +g: "_AppCtxGlobals" = LocalProxy( # type: ignore[assignment] + _cv_app, "g", unbound_message=_no_app_msg +) + +_no_req_msg = """\ +Working outside of request context. + +This typically means that you attempted to use functionality that needed +an active HTTP request. Consult the documentation on testing for +information about how to avoid this problem.\ +""" +_cv_request: ContextVar["RequestContext"] = ContextVar("flask.request_ctx") +__request_ctx_stack = _FakeStack("request", _cv_request) +request_ctx: "RequestContext" = LocalProxy( # type: ignore[assignment] + _cv_request, unbound_message=_no_req_msg +) +request: "Request" = LocalProxy( # type: ignore[assignment] + _cv_request, "request", unbound_message=_no_req_msg +) +session: "SessionMixin" = LocalProxy( # type: ignore[assignment] + _cv_request, "session", unbound_message=_no_req_msg +) + + +def __getattr__(name: str) -> t.Any: + if name == "_app_ctx_stack": + import warnings + + warnings.warn( + "'_app_ctx_stack' is deprecated and will be removed in Flask 2.3.", + DeprecationWarning, + stacklevel=2, + ) + return __app_ctx_stack + + if name == "_request_ctx_stack": + import warnings + + warnings.warn( + "'_request_ctx_stack' is deprecated and will be removed in Flask 2.3.", + DeprecationWarning, + stacklevel=2, + ) + return __request_ctx_stack + + raise AttributeError(name) diff --git a/venv/lib/python3.11/site-packages/flask/helpers.py b/venv/lib/python3.11/site-packages/flask/helpers.py new file mode 100644 index 0000000..3833cb8 --- /dev/null +++ b/venv/lib/python3.11/site-packages/flask/helpers.py @@ -0,0 +1,705 @@ +import os +import pkgutil +import socket +import sys +import typing as t +from datetime import datetime +from functools import lru_cache +from functools import update_wrapper +from threading import RLock + +import werkzeug.utils +from werkzeug.exceptions import abort as _wz_abort +from werkzeug.utils import redirect as _wz_redirect + +from .globals import _cv_request +from .globals import current_app +from .globals import request +from .globals import request_ctx +from .globals import session +from .signals import message_flashed + +if t.TYPE_CHECKING: # pragma: no cover + from werkzeug.wrappers import Response as BaseResponse + from .wrappers import Response + import typing_extensions as te + + +def get_env() -> str: + """Get the environment the app is running in, indicated by the + :envvar:`FLASK_ENV` environment variable. The default is + ``'production'``. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. + """ + import warnings + + warnings.warn( + "'FLASK_ENV' and 'get_env' are deprecated and will be removed" + " in Flask 2.3. Use 'FLASK_DEBUG' instead.", + DeprecationWarning, + stacklevel=2, + ) + return os.environ.get("FLASK_ENV") or "production" + + +def get_debug_flag() -> bool: + """Get whether debug mode should be enabled for the app, indicated by the + :envvar:`FLASK_DEBUG` environment variable. The default is ``False``. + """ + val = os.environ.get("FLASK_DEBUG") + + if not val: + env = os.environ.get("FLASK_ENV") + + if env is not None: + print( + "'FLASK_ENV' is deprecated and will not be used in" + " Flask 2.3. Use 'FLASK_DEBUG' instead.", + file=sys.stderr, + ) + return env == "development" + + return False + + return val.lower() not in {"0", "false", "no"} + + +def get_load_dotenv(default: bool = True) -> bool: + """Get whether the user has disabled loading default dotenv files by + setting :envvar:`FLASK_SKIP_DOTENV`. The default is ``True``, load + the files. + + :param default: What to return if the env var isn't set. + """ + val = os.environ.get("FLASK_SKIP_DOTENV") + + if not val: + return default + + return val.lower() in ("0", "false", "no") + + +def stream_with_context( + generator_or_function: t.Union[ + t.Iterator[t.AnyStr], t.Callable[..., t.Iterator[t.AnyStr]] + ] +) -> t.Iterator[t.AnyStr]: + """Request contexts disappear when the response is started on the server. + This is done for efficiency reasons and to make it less likely to encounter + memory leaks with badly written WSGI middlewares. The downside is that if + you are using streamed responses, the generator cannot access request bound + information any more. + + This function however can help you keep the context around for longer:: + + from flask import stream_with_context, request, Response + + @app.route('/stream') + def streamed_response(): + @stream_with_context + def generate(): + yield 'Hello ' + yield request.args['name'] + yield '!' + return Response(generate()) + + Alternatively it can also be used around a specific generator:: + + from flask import stream_with_context, request, Response + + @app.route('/stream') + def streamed_response(): + def generate(): + yield 'Hello ' + yield request.args['name'] + yield '!' + return Response(stream_with_context(generate())) + + .. versionadded:: 0.9 + """ + try: + gen = iter(generator_or_function) # type: ignore + except TypeError: + + def decorator(*args: t.Any, **kwargs: t.Any) -> t.Any: + gen = generator_or_function(*args, **kwargs) # type: ignore + return stream_with_context(gen) + + return update_wrapper(decorator, generator_or_function) # type: ignore + + def generator() -> t.Generator: + ctx = _cv_request.get(None) + if ctx is None: + raise RuntimeError( + "'stream_with_context' can only be used when a request" + " context is active, such as in a view function." + ) + with ctx: + # Dummy sentinel. Has to be inside the context block or we're + # not actually keeping the context around. + yield None + + # The try/finally is here so that if someone passes a WSGI level + # iterator in we're still running the cleanup logic. Generators + # don't need that because they are closed on their destruction + # automatically. + try: + yield from gen + finally: + if hasattr(gen, "close"): + gen.close() + + # The trick is to start the generator. Then the code execution runs until + # the first dummy None is yielded at which point the context was already + # pushed. This item is discarded. Then when the iteration continues the + # real generator is executed. + wrapped_g = generator() + next(wrapped_g) + return wrapped_g + + +def make_response(*args: t.Any) -> "Response": + """Sometimes it is necessary to set additional headers in a view. Because + views do not have to return response objects but can return a value that + is converted into a response object by Flask itself, it becomes tricky to + add headers to it. This function can be called instead of using a return + and you will get a response object which you can use to attach headers. + + If view looked like this and you want to add a new header:: + + def index(): + return render_template('index.html', foo=42) + + You can now do something like this:: + + def index(): + response = make_response(render_template('index.html', foo=42)) + response.headers['X-Parachutes'] = 'parachutes are cool' + return response + + This function accepts the very same arguments you can return from a + view function. This for example creates a response with a 404 error + code:: + + response = make_response(render_template('not_found.html'), 404) + + The other use case of this function is to force the return value of a + view function into a response which is helpful with view + decorators:: + + response = make_response(view_function()) + response.headers['X-Parachutes'] = 'parachutes are cool' + + Internally this function does the following things: + + - if no arguments are passed, it creates a new response argument + - if one argument is passed, :meth:`flask.Flask.make_response` + is invoked with it. + - if more than one argument is passed, the arguments are passed + to the :meth:`flask.Flask.make_response` function as tuple. + + .. versionadded:: 0.6 + """ + if not args: + return current_app.response_class() + if len(args) == 1: + args = args[0] + return current_app.make_response(args) # type: ignore + + +def url_for( + endpoint: str, + *, + _anchor: t.Optional[str] = None, + _method: t.Optional[str] = None, + _scheme: t.Optional[str] = None, + _external: t.Optional[bool] = None, + **values: t.Any, +) -> str: + """Generate a URL to the given endpoint with the given values. + + This requires an active request or application context, and calls + :meth:`current_app.url_for() `. See that method + for full documentation. + + :param endpoint: The endpoint name associated with the URL to + generate. If this starts with a ``.``, the current blueprint + name (if any) will be used. + :param _anchor: If given, append this as ``#anchor`` to the URL. + :param _method: If given, generate the URL associated with this + method for the endpoint. + :param _scheme: If given, the URL will have this scheme if it is + external. + :param _external: If given, prefer the URL to be internal (False) or + require it to be external (True). External URLs include the + scheme and domain. When not in an active request, URLs are + external by default. + :param values: Values to use for the variable parts of the URL rule. + Unknown keys are appended as query string arguments, like + ``?a=b&c=d``. + + .. versionchanged:: 2.2 + Calls ``current_app.url_for``, allowing an app to override the + behavior. + + .. versionchanged:: 0.10 + The ``_scheme`` parameter was added. + + .. versionchanged:: 0.9 + The ``_anchor`` and ``_method`` parameters were added. + + .. versionchanged:: 0.9 + Calls ``app.handle_url_build_error`` on build errors. + """ + return current_app.url_for( + endpoint, + _anchor=_anchor, + _method=_method, + _scheme=_scheme, + _external=_external, + **values, + ) + + +def redirect( + location: str, code: int = 302, Response: t.Optional[t.Type["BaseResponse"]] = None +) -> "BaseResponse": + """Create a redirect response object. + + If :data:`~flask.current_app` is available, it will use its + :meth:`~flask.Flask.redirect` method, otherwise it will use + :func:`werkzeug.utils.redirect`. + + :param location: The URL to redirect to. + :param code: The status code for the redirect. + :param Response: The response class to use. Not used when + ``current_app`` is active, which uses ``app.response_class``. + + .. versionadded:: 2.2 + Calls ``current_app.redirect`` if available instead of always + using Werkzeug's default ``redirect``. + """ + if current_app: + return current_app.redirect(location, code=code) + + return _wz_redirect(location, code=code, Response=Response) + + +def abort( + code: t.Union[int, "BaseResponse"], *args: t.Any, **kwargs: t.Any +) -> "te.NoReturn": + """Raise an :exc:`~werkzeug.exceptions.HTTPException` for the given + status code. + + If :data:`~flask.current_app` is available, it will call its + :attr:`~flask.Flask.aborter` object, otherwise it will use + :func:`werkzeug.exceptions.abort`. + + :param code: The status code for the exception, which must be + registered in ``app.aborter``. + :param args: Passed to the exception. + :param kwargs: Passed to the exception. + + .. versionadded:: 2.2 + Calls ``current_app.aborter`` if available instead of always + using Werkzeug's default ``abort``. + """ + if current_app: + current_app.aborter(code, *args, **kwargs) + + _wz_abort(code, *args, **kwargs) + + +def get_template_attribute(template_name: str, attribute: str) -> t.Any: + """Loads a macro (or variable) a template exports. This can be used to + invoke a macro from within Python code. If you for example have a + template named :file:`_cider.html` with the following contents: + + .. sourcecode:: html+jinja + + {% macro hello(name) %}Hello {{ name }}!{% endmacro %} + + You can access this from Python code like this:: + + hello = get_template_attribute('_cider.html', 'hello') + return hello('World') + + .. versionadded:: 0.2 + + :param template_name: the name of the template + :param attribute: the name of the variable of macro to access + """ + return getattr(current_app.jinja_env.get_template(template_name).module, attribute) + + +def flash(message: str, category: str = "message") -> None: + """Flashes a message to the next request. In order to remove the + flashed message from the session and to display it to the user, + the template has to call :func:`get_flashed_messages`. + + .. versionchanged:: 0.3 + `category` parameter added. + + :param message: the message to be flashed. + :param category: the category for the message. The following values + are recommended: ``'message'`` for any kind of message, + ``'error'`` for errors, ``'info'`` for information + messages and ``'warning'`` for warnings. However any + kind of string can be used as category. + """ + # Original implementation: + # + # session.setdefault('_flashes', []).append((category, message)) + # + # This assumed that changes made to mutable structures in the session are + # always in sync with the session object, which is not true for session + # implementations that use external storage for keeping their keys/values. + flashes = session.get("_flashes", []) + flashes.append((category, message)) + session["_flashes"] = flashes + message_flashed.send( + current_app._get_current_object(), # type: ignore + message=message, + category=category, + ) + + +def get_flashed_messages( + with_categories: bool = False, category_filter: t.Iterable[str] = () +) -> t.Union[t.List[str], t.List[t.Tuple[str, str]]]: + """Pulls all flashed messages from the session and returns them. + Further calls in the same request to the function will return + the same messages. By default just the messages are returned, + but when `with_categories` is set to ``True``, the return value will + be a list of tuples in the form ``(category, message)`` instead. + + Filter the flashed messages to one or more categories by providing those + categories in `category_filter`. This allows rendering categories in + separate html blocks. The `with_categories` and `category_filter` + arguments are distinct: + + * `with_categories` controls whether categories are returned with message + text (``True`` gives a tuple, where ``False`` gives just the message text). + * `category_filter` filters the messages down to only those matching the + provided categories. + + See :doc:`/patterns/flashing` for examples. + + .. versionchanged:: 0.3 + `with_categories` parameter added. + + .. versionchanged:: 0.9 + `category_filter` parameter added. + + :param with_categories: set to ``True`` to also receive categories. + :param category_filter: filter of categories to limit return values. Only + categories in the list will be returned. + """ + flashes = request_ctx.flashes + if flashes is None: + flashes = session.pop("_flashes") if "_flashes" in session else [] + request_ctx.flashes = flashes + if category_filter: + flashes = list(filter(lambda f: f[0] in category_filter, flashes)) + if not with_categories: + return [x[1] for x in flashes] + return flashes + + +def _prepare_send_file_kwargs(**kwargs: t.Any) -> t.Dict[str, t.Any]: + if kwargs.get("max_age") is None: + kwargs["max_age"] = current_app.get_send_file_max_age + + kwargs.update( + environ=request.environ, + use_x_sendfile=current_app.config["USE_X_SENDFILE"], + response_class=current_app.response_class, + _root_path=current_app.root_path, # type: ignore + ) + return kwargs + + +def send_file( + path_or_file: t.Union[os.PathLike, str, t.BinaryIO], + mimetype: t.Optional[str] = None, + as_attachment: bool = False, + download_name: t.Optional[str] = None, + conditional: bool = True, + etag: t.Union[bool, str] = True, + last_modified: t.Optional[t.Union[datetime, int, float]] = None, + max_age: t.Optional[ + t.Union[int, t.Callable[[t.Optional[str]], t.Optional[int]]] + ] = None, +) -> "Response": + """Send the contents of a file to the client. + + The first argument can be a file path or a file-like object. Paths + are preferred in most cases because Werkzeug can manage the file and + get extra information from the path. Passing a file-like object + requires that the file is opened in binary mode, and is mostly + useful when building a file in memory with :class:`io.BytesIO`. + + Never pass file paths provided by a user. The path is assumed to be + trusted, so a user could craft a path to access a file you didn't + intend. Use :func:`send_from_directory` to safely serve + user-requested paths from within a directory. + + If the WSGI server sets a ``file_wrapper`` in ``environ``, it is + used, otherwise Werkzeug's built-in wrapper is used. Alternatively, + if the HTTP server supports ``X-Sendfile``, configuring Flask with + ``USE_X_SENDFILE = True`` will tell the server to send the given + path, which is much more efficient than reading it in Python. + + :param path_or_file: The path to the file to send, relative to the + current working directory if a relative path is given. + Alternatively, a file-like object opened in binary mode. Make + sure the file pointer is seeked to the start of the data. + :param mimetype: The MIME type to send for the file. If not + provided, it will try to detect it from the file name. + :param as_attachment: Indicate to a browser that it should offer to + save the file instead of displaying it. + :param download_name: The default name browsers will use when saving + the file. Defaults to the passed file name. + :param conditional: Enable conditional and range responses based on + request headers. Requires passing a file path and ``environ``. + :param etag: Calculate an ETag for the file, which requires passing + a file path. Can also be a string to use instead. + :param last_modified: The last modified time to send for the file, + in seconds. If not provided, it will try to detect it from the + file path. + :param max_age: How long the client should cache the file, in + seconds. If set, ``Cache-Control`` will be ``public``, otherwise + it will be ``no-cache`` to prefer conditional caching. + + .. versionchanged:: 2.0 + ``download_name`` replaces the ``attachment_filename`` + parameter. If ``as_attachment=False``, it is passed with + ``Content-Disposition: inline`` instead. + + .. versionchanged:: 2.0 + ``max_age`` replaces the ``cache_timeout`` parameter. + ``conditional`` is enabled and ``max_age`` is not set by + default. + + .. versionchanged:: 2.0 + ``etag`` replaces the ``add_etags`` parameter. It can be a + string to use instead of generating one. + + .. versionchanged:: 2.0 + Passing a file-like object that inherits from + :class:`~io.TextIOBase` will raise a :exc:`ValueError` rather + than sending an empty file. + + .. versionadded:: 2.0 + Moved the implementation to Werkzeug. This is now a wrapper to + pass some Flask-specific arguments. + + .. versionchanged:: 1.1 + ``filename`` may be a :class:`~os.PathLike` object. + + .. versionchanged:: 1.1 + Passing a :class:`~io.BytesIO` object supports range requests. + + .. versionchanged:: 1.0.3 + Filenames are encoded with ASCII instead of Latin-1 for broader + compatibility with WSGI servers. + + .. versionchanged:: 1.0 + UTF-8 filenames as specified in :rfc:`2231` are supported. + + .. versionchanged:: 0.12 + The filename is no longer automatically inferred from file + objects. If you want to use automatic MIME and etag support, + pass a filename via ``filename_or_fp`` or + ``attachment_filename``. + + .. versionchanged:: 0.12 + ``attachment_filename`` is preferred over ``filename`` for MIME + detection. + + .. versionchanged:: 0.9 + ``cache_timeout`` defaults to + :meth:`Flask.get_send_file_max_age`. + + .. versionchanged:: 0.7 + MIME guessing and etag support for file-like objects was + deprecated because it was unreliable. Pass a filename if you are + able to, otherwise attach an etag yourself. + + .. versionchanged:: 0.5 + The ``add_etags``, ``cache_timeout`` and ``conditional`` + parameters were added. The default behavior is to add etags. + + .. versionadded:: 0.2 + """ + return werkzeug.utils.send_file( # type: ignore[return-value] + **_prepare_send_file_kwargs( + path_or_file=path_or_file, + environ=request.environ, + mimetype=mimetype, + as_attachment=as_attachment, + download_name=download_name, + conditional=conditional, + etag=etag, + last_modified=last_modified, + max_age=max_age, + ) + ) + + +def send_from_directory( + directory: t.Union[os.PathLike, str], + path: t.Union[os.PathLike, str], + **kwargs: t.Any, +) -> "Response": + """Send a file from within a directory using :func:`send_file`. + + .. code-block:: python + + @app.route("/uploads/") + def download_file(name): + return send_from_directory( + app.config['UPLOAD_FOLDER'], name, as_attachment=True + ) + + This is a secure way to serve files from a folder, such as static + files or uploads. Uses :func:`~werkzeug.security.safe_join` to + ensure the path coming from the client is not maliciously crafted to + point outside the specified directory. + + If the final path does not point to an existing regular file, + raises a 404 :exc:`~werkzeug.exceptions.NotFound` error. + + :param directory: The directory that ``path`` must be located under, + relative to the current application's root path. + :param path: The path to the file to send, relative to + ``directory``. + :param kwargs: Arguments to pass to :func:`send_file`. + + .. versionchanged:: 2.0 + ``path`` replaces the ``filename`` parameter. + + .. versionadded:: 2.0 + Moved the implementation to Werkzeug. This is now a wrapper to + pass some Flask-specific arguments. + + .. versionadded:: 0.5 + """ + return werkzeug.utils.send_from_directory( # type: ignore[return-value] + directory, path, **_prepare_send_file_kwargs(**kwargs) + ) + + +def get_root_path(import_name: str) -> str: + """Find the root path of a package, or the path that contains a + module. If it cannot be found, returns the current working + directory. + + Not to be confused with the value returned by :func:`find_package`. + + :meta private: + """ + # Module already imported and has a file attribute. Use that first. + mod = sys.modules.get(import_name) + + if mod is not None and hasattr(mod, "__file__") and mod.__file__ is not None: + return os.path.dirname(os.path.abspath(mod.__file__)) + + # Next attempt: check the loader. + loader = pkgutil.get_loader(import_name) + + # Loader does not exist or we're referring to an unloaded main + # module or a main module without path (interactive sessions), go + # with the current working directory. + if loader is None or import_name == "__main__": + return os.getcwd() + + if hasattr(loader, "get_filename"): + filepath = loader.get_filename(import_name) + else: + # Fall back to imports. + __import__(import_name) + mod = sys.modules[import_name] + filepath = getattr(mod, "__file__", None) + + # If we don't have a file path it might be because it is a + # namespace package. In this case pick the root path from the + # first module that is contained in the package. + if filepath is None: + raise RuntimeError( + "No root path can be found for the provided module" + f" {import_name!r}. This can happen because the module" + " came from an import hook that does not provide file" + " name information or because it's a namespace package." + " In this case the root path needs to be explicitly" + " provided." + ) + + # filepath is import_name.py for a module, or __init__.py for a package. + return os.path.dirname(os.path.abspath(filepath)) + + +class locked_cached_property(werkzeug.utils.cached_property): + """A :func:`property` that is only evaluated once. Like + :class:`werkzeug.utils.cached_property` except access uses a lock + for thread safety. + + .. versionchanged:: 2.0 + Inherits from Werkzeug's ``cached_property`` (and ``property``). + """ + + def __init__( + self, + fget: t.Callable[[t.Any], t.Any], + name: t.Optional[str] = None, + doc: t.Optional[str] = None, + ) -> None: + super().__init__(fget, name=name, doc=doc) + self.lock = RLock() + + def __get__(self, obj: object, type: type = None) -> t.Any: # type: ignore + if obj is None: + return self + + with self.lock: + return super().__get__(obj, type=type) + + def __set__(self, obj: object, value: t.Any) -> None: + with self.lock: + super().__set__(obj, value) + + def __delete__(self, obj: object) -> None: + with self.lock: + super().__delete__(obj) + + +def is_ip(value: str) -> bool: + """Determine if the given string is an IP address. + + :param value: value to check + :type value: str + + :return: True if string is an IP address + :rtype: bool + """ + for family in (socket.AF_INET, socket.AF_INET6): + try: + socket.inet_pton(family, value) + except OSError: + pass + else: + return True + + return False + + +@lru_cache(maxsize=None) +def _split_blueprint_path(name: str) -> t.List[str]: + out: t.List[str] = [name] + + if "." in name: + out.extend(_split_blueprint_path(name.rpartition(".")[0])) + + return out diff --git a/venv/lib/python3.11/site-packages/flask/json/__init__.py b/venv/lib/python3.11/site-packages/flask/json/__init__.py new file mode 100644 index 0000000..65d8829 --- /dev/null +++ b/venv/lib/python3.11/site-packages/flask/json/__init__.py @@ -0,0 +1,342 @@ +from __future__ import annotations + +import json as _json +import typing as t + +from jinja2.utils import htmlsafe_json_dumps as _jinja_htmlsafe_dumps + +from ..globals import current_app +from .provider import _default + +if t.TYPE_CHECKING: # pragma: no cover + from ..app import Flask + from ..wrappers import Response + + +class JSONEncoder(_json.JSONEncoder): + """The default JSON encoder. Handles extra types compared to the + built-in :class:`json.JSONEncoder`. + + - :class:`datetime.datetime` and :class:`datetime.date` are + serialized to :rfc:`822` strings. This is the same as the HTTP + date format. + - :class:`decimal.Decimal` is serialized to a string. + - :class:`uuid.UUID` is serialized to a string. + - :class:`dataclasses.dataclass` is passed to + :func:`dataclasses.asdict`. + - :class:`~markupsafe.Markup` (or any object with a ``__html__`` + method) will call the ``__html__`` method to get a string. + + Assign a subclass of this to :attr:`flask.Flask.json_encoder` or + :attr:`flask.Blueprint.json_encoder` to override the default. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. Use ``app.json`` instead. + """ + + def __init__(self, **kwargs) -> None: + import warnings + + warnings.warn( + "'JSONEncoder' is deprecated and will be removed in" + " Flask 2.3. Use 'Flask.json' to provide an alternate" + " JSON implementation instead.", + DeprecationWarning, + stacklevel=3, + ) + super().__init__(**kwargs) + + def default(self, o: t.Any) -> t.Any: + """Convert ``o`` to a JSON serializable type. See + :meth:`json.JSONEncoder.default`. Python does not support + overriding how basic types like ``str`` or ``list`` are + serialized, they are handled before this method. + """ + return _default(o) + + +class JSONDecoder(_json.JSONDecoder): + """The default JSON decoder. + + This does not change any behavior from the built-in + :class:`json.JSONDecoder`. + + Assign a subclass of this to :attr:`flask.Flask.json_decoder` or + :attr:`flask.Blueprint.json_decoder` to override the default. + + .. deprecated:: 2.2 + Will be removed in Flask 2.3. Use ``app.json`` instead. + """ + + def __init__(self, **kwargs) -> None: + import warnings + + warnings.warn( + "'JSONDecoder' is deprecated and will be removed in" + " Flask 2.3. Use 'Flask.json' to provide an alternate" + " JSON implementation instead.", + DeprecationWarning, + stacklevel=3, + ) + super().__init__(**kwargs) + + +def dumps(obj: t.Any, *, app: Flask | None = None, **kwargs: t.Any) -> str: + """Serialize data as JSON. + + If :data:`~flask.current_app` is available, it will use its + :meth:`app.json.dumps() ` + method, otherwise it will use :func:`json.dumps`. + + :param obj: The data to serialize. + :param kwargs: Arguments passed to the ``dumps`` implementation. + + .. versionchanged:: 2.2 + Calls ``current_app.json.dumps``, allowing an app to override + the behavior. + + .. versionchanged:: 2.2 + The ``app`` parameter will be removed in Flask 2.3. + + .. versionchanged:: 2.0.2 + :class:`decimal.Decimal` is supported by converting to a string. + + .. versionchanged:: 2.0 + ``encoding`` will be removed in Flask 2.1. + + .. versionchanged:: 1.0.3 + ``app`` can be passed directly, rather than requiring an app + context for configuration. + """ + if app is not None: + import warnings + + warnings.warn( + "The 'app' parameter is deprecated and will be removed in" + " Flask 2.3. Call 'app.json.dumps' directly instead.", + DeprecationWarning, + stacklevel=2, + ) + else: + app = current_app + + if app: + return app.json.dumps(obj, **kwargs) + + kwargs.setdefault("default", _default) + return _json.dumps(obj, **kwargs) + + +def dump( + obj: t.Any, fp: t.IO[str], *, app: Flask | None = None, **kwargs: t.Any +) -> None: + """Serialize data as JSON and write to a file. + + If :data:`~flask.current_app` is available, it will use its + :meth:`app.json.dump() ` + method, otherwise it will use :func:`json.dump`. + + :param obj: The data to serialize. + :param fp: A file opened for writing text. Should use the UTF-8 + encoding to be valid JSON. + :param kwargs: Arguments passed to the ``dump`` implementation. + + .. versionchanged:: 2.2 + Calls ``current_app.json.dump``, allowing an app to override + the behavior. + + .. versionchanged:: 2.2 + The ``app`` parameter will be removed in Flask 2.3. + + .. versionchanged:: 2.0 + Writing to a binary file, and the ``encoding`` argument, will be + removed in Flask 2.1. + """ + if app is not None: + import warnings + + warnings.warn( + "The 'app' parameter is deprecated and will be removed in" + " Flask 2.3. Call 'app.json.dump' directly instead.", + DeprecationWarning, + stacklevel=2, + ) + else: + app = current_app + + if app: + app.json.dump(obj, fp, **kwargs) + else: + kwargs.setdefault("default", _default) + _json.dump(obj, fp, **kwargs) + + +def loads(s: str | bytes, *, app: Flask | None = None, **kwargs: t.Any) -> t.Any: + """Deserialize data as JSON. + + If :data:`~flask.current_app` is available, it will use its + :meth:`app.json.loads() ` + method, otherwise it will use :func:`json.loads`. + + :param s: Text or UTF-8 bytes. + :param kwargs: Arguments passed to the ``loads`` implementation. + + .. versionchanged:: 2.2 + Calls ``current_app.json.loads``, allowing an app to override + the behavior. + + .. versionchanged:: 2.2 + The ``app`` parameter will be removed in Flask 2.3. + + .. versionchanged:: 2.0 + ``encoding`` will be removed in Flask 2.1. The data must be a + string or UTF-8 bytes. + + .. versionchanged:: 1.0.3 + ``app`` can be passed directly, rather than requiring an app + context for configuration. + """ + if app is not None: + import warnings + + warnings.warn( + "The 'app' parameter is deprecated and will be removed in" + " Flask 2.3. Call 'app.json.loads' directly instead.", + DeprecationWarning, + stacklevel=2, + ) + else: + app = current_app + + if app: + return app.json.loads(s, **kwargs) + + return _json.loads(s, **kwargs) + + +def load(fp: t.IO[t.AnyStr], *, app: Flask | None = None, **kwargs: t.Any) -> t.Any: + """Deserialize data as JSON read from a file. + + If :data:`~flask.current_app` is available, it will use its + :meth:`app.json.load() ` + method, otherwise it will use :func:`json.load`. + + :param fp: A file opened for reading text or UTF-8 bytes. + :param kwargs: Arguments passed to the ``load`` implementation. + + .. versionchanged:: 2.2 + Calls ``current_app.json.load``, allowing an app to override + the behavior. + + .. versionchanged:: 2.2 + The ``app`` parameter will be removed in Flask 2.3. + + .. versionchanged:: 2.0 + ``encoding`` will be removed in Flask 2.1. The file must be text + mode, or binary mode with UTF-8 bytes. + """ + if app is not None: + import warnings + + warnings.warn( + "The 'app' parameter is deprecated and will be removed in" + " Flask 2.3. Call 'app.json.load' directly instead.", + DeprecationWarning, + stacklevel=2, + ) + else: + app = current_app + + if app: + return app.json.load(fp, **kwargs) + + return _json.load(fp, **kwargs) + + +def htmlsafe_dumps(obj: t.Any, **kwargs: t.Any) -> str: + """Serialize an object to a string of JSON with :func:`dumps`, then + replace HTML-unsafe characters with Unicode escapes and mark the + result safe with :class:`~markupsafe.Markup`. + + This is available in templates as the ``|tojson`` filter. + + The returned string is safe to render in HTML documents and + `` +
+""" + +__all__ = ["RecaptchaWidget"] + + +class RecaptchaWidget: + def recaptcha_html(self, public_key): + html = current_app.config.get("RECAPTCHA_HTML") + if html: + return Markup(html) + params = current_app.config.get("RECAPTCHA_PARAMETERS") + script = current_app.config.get("RECAPTCHA_SCRIPT") + if not script: + script = RECAPTCHA_SCRIPT_DEFAULT + if params: + script += "?" + urlencode(params) + attrs = current_app.config.get("RECAPTCHA_DATA_ATTRS", {}) + attrs["sitekey"] = public_key + snippet = " ".join(f'data-{k}="{attrs[k]}"' for k in attrs) # noqa: B028, B907 + div_class = current_app.config.get("RECAPTCHA_DIV_CLASS") + if not div_class: + div_class = RECAPTCHA_DIV_CLASS_DEFAULT + return Markup(RECAPTCHA_TEMPLATE % (script, div_class, snippet)) + + def __call__(self, field, error=None, **kwargs): + """Returns the recaptcha input HTML.""" + + try: + public_key = current_app.config["RECAPTCHA_PUBLIC_KEY"] + except KeyError: + raise RuntimeError("RECAPTCHA_PUBLIC_KEY config not set") from None + + return self.recaptcha_html(public_key) diff --git a/venv/lib/python3.11/site-packages/greenlet-3.2.1.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/greenlet-3.2.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet-3.2.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.11/site-packages/greenlet-3.2.1.dist-info/METADATA b/venv/lib/python3.11/site-packages/greenlet-3.2.1.dist-info/METADATA new file mode 100644 index 0000000..255db2a --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet-3.2.1.dist-info/METADATA @@ -0,0 +1,116 @@ +Metadata-Version: 2.4 +Name: greenlet +Version: 3.2.1 +Summary: Lightweight in-process concurrent programming +Home-page: https://greenlet.readthedocs.io/ +Author: Alexey Borzenkov +Author-email: snaury@gmail.com +Maintainer: Jason Madden +Maintainer-email: jason@seecoresoftware.com +License: MIT AND Python-2.0 +Project-URL: Bug Tracker, https://github.com/python-greenlet/greenlet/issues +Project-URL: Source Code, https://github.com/python-greenlet/greenlet/ +Project-URL: Documentation, https://greenlet.readthedocs.io/ +Project-URL: Changes, https://greenlet.readthedocs.io/en/latest/changes.html +Keywords: greenlet coroutine concurrency threads cooperative +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: Programming Language :: C +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Operating System :: OS Independent +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.9 +Description-Content-Type: text/x-rst +License-File: LICENSE +License-File: LICENSE.PSF +Provides-Extra: docs +Requires-Dist: Sphinx; extra == "docs" +Requires-Dist: furo; extra == "docs" +Provides-Extra: test +Requires-Dist: objgraph; extra == "test" +Requires-Dist: psutil; extra == "test" +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: description-content-type +Dynamic: home-page +Dynamic: keywords +Dynamic: license +Dynamic: license-file +Dynamic: maintainer +Dynamic: maintainer-email +Dynamic: platform +Dynamic: project-url +Dynamic: provides-extra +Dynamic: requires-python +Dynamic: summary + +.. This file is included into docs/history.rst + + +Greenlets are lightweight coroutines for in-process concurrent +programming. + +The "greenlet" package is a spin-off of `Stackless`_, a version of +CPython that supports micro-threads called "tasklets". Tasklets run +pseudo-concurrently (typically in a single or a few OS-level threads) +and are synchronized with data exchanges on "channels". + +A "greenlet", on the other hand, is a still more primitive notion of +micro-thread with no implicit scheduling; coroutines, in other words. +This is useful when you want to control exactly when your code runs. +You can build custom scheduled micro-threads on top of greenlet; +however, it seems that greenlets are useful on their own as a way to +make advanced control flow structures. For example, we can recreate +generators; the difference with Python's own generators is that our +generators can call nested functions and the nested functions can +yield values too. (Additionally, you don't need a "yield" keyword. See +the example in `test_generator.py +`_). + +Greenlets are provided as a C extension module for the regular unmodified +interpreter. + +.. _`Stackless`: http://www.stackless.com + + +Who is using Greenlet? +====================== + +There are several libraries that use Greenlet as a more flexible +alternative to Python's built in coroutine support: + + - `Concurrence`_ + - `Eventlet`_ + - `Gevent`_ + +.. _Concurrence: http://opensource.hyves.org/concurrence/ +.. _Eventlet: http://eventlet.net/ +.. _Gevent: http://www.gevent.org/ + +Getting Greenlet +================ + +The easiest way to get Greenlet is to install it with pip:: + + pip install greenlet + + +Source code archives and binary distributions are available on the +python package index at https://pypi.org/project/greenlet + +The source code repository is hosted on github: +https://github.com/python-greenlet/greenlet + +Documentation is available on readthedocs.org: +https://greenlet.readthedocs.io diff --git a/venv/lib/python3.11/site-packages/greenlet-3.2.1.dist-info/RECORD b/venv/lib/python3.11/site-packages/greenlet-3.2.1.dist-info/RECORD new file mode 100644 index 0000000..cfc3684 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet-3.2.1.dist-info/RECORD @@ -0,0 +1,120 @@ +../../../include/site/python3.11/greenlet/greenlet.h,sha256=sz5pYRSQqedgOt2AMgxLZdTjO-qcr_JMvgiEJR9IAJ8,4755 +greenlet-3.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +greenlet-3.2.1.dist-info/METADATA,sha256=siiBF54Eyo_UmnGY57I8K3BGmoJiSQjw7l_hJJLIE2o,4077 +greenlet-3.2.1.dist-info/RECORD,, +greenlet-3.2.1.dist-info/WHEEL,sha256=YEIQkAzxidqCAIe77o5t2zjQul117lbrKJ8UijL1I6c,152 +greenlet-3.2.1.dist-info/licenses/LICENSE,sha256=dpgx1uXfrywggC-sz_H6-0wgJd2PYlPfpH_K1Z1NCXk,1434 +greenlet-3.2.1.dist-info/licenses/LICENSE.PSF,sha256=5f88I8EQ5JTNfXNsEP2W1GJFe6_soxCEDbZScpjH1Gs,2424 +greenlet-3.2.1.dist-info/top_level.txt,sha256=YSnRsCRoO61JGlP57o8iKL6rdLWDWuiyKD8ekpWUsDc,9 +greenlet/CObjects.cpp,sha256=OPej1bWBgc4sRrTRQ2aFFML9pzDYKlKhlJSjsI0X_eU,3508 +greenlet/PyGreenlet.cpp,sha256=ogWsQ5VhSdItWRLLpWOgSuqYuM3QwQ4cVCxOQIgHx6E,23441 +greenlet/PyGreenlet.hpp,sha256=2ZQlOxYNoy7QwD7mppFoOXe_At56NIsJ0eNsE_hoSsw,1463 +greenlet/PyGreenletUnswitchable.cpp,sha256=PQE0fSZa_IOyUM44IESHkJoD2KtGW3dkhkmZSYY3WHs,4375 +greenlet/PyModule.cpp,sha256=J2TH06dGcNEarioS6NbWXkdME8hJY05XVbdqLrfO5w4,8587 +greenlet/TBrokenGreenlet.cpp,sha256=smN26uC7ahAbNYiS10rtWPjCeTG4jevM8siA2sjJiXg,1021 +greenlet/TExceptionState.cpp,sha256=U7Ctw9fBdNraS0d174MoQW7bN-ae209Ta0JuiKpcpVI,1359 +greenlet/TGreenlet.cpp,sha256=HGYGKpmKYqQ842tASW-QaaV8wua4a5XV_quYKPDsV_Y,25731 +greenlet/TGreenlet.hpp,sha256=jlUrNW5POg7ivpAwABKQQNSJojX5FhO49dcBI-rav-E,27990 +greenlet/TGreenletGlobals.cpp,sha256=YyEmDjKf1g32bsL-unIUScFLnnA1fzLWf2gOMd-D0Zw,3264 +greenlet/TMainGreenlet.cpp,sha256=fvgb8HHB-FVTPEKjR1s_ifCZSpp5D5YQByik0CnIABg,3276 +greenlet/TPythonState.cpp,sha256=FxRdi76lTGXaQKWwkq82VaCfIRdF2Z-fh-TlRTMjYqg,15359 +greenlet/TStackState.cpp,sha256=V444I8Jj9DhQz-9leVW_9dtiSRjaE1NMlgDG02Xxq-Y,7381 +greenlet/TThreadState.hpp,sha256=2Jgg7DtGggMYR_x3CLAvAFf1mIdIDtQvSSItcdmX4ZQ,19131 +greenlet/TThreadStateCreator.hpp,sha256=uYTexDWooXSSgUc5uh-Mhm5BQi3-kR6CqpizvNynBFQ,2610 +greenlet/TThreadStateDestroy.cpp,sha256=36yBCAMq3beXTZd-XnFA7DwaHVSOx2vc28-nf0spysU,8169 +greenlet/TUserGreenlet.cpp,sha256=uemg0lwKXtYB0yzmvyYdIIAsKnNkifXM1OJ2OlrFP1A,23553 +greenlet/__init__.py,sha256=5XdAT8H-WgxtWGP8Q2iGzZvJTtnYqQeacpS7-_e8jRI,1723 +greenlet/__pycache__/__init__.cpython-311.pyc,, +greenlet/_greenlet.cpython-311-x86_64-linux-gnu.so,sha256=Y4YcF7dmgm2voSA5b__W51VbznVJFhvZrIwn5l3qCx8,1365192 +greenlet/greenlet.cpp,sha256=WdItb1yWL9WNsTqJNf0Iw8ZwDHD49pkDP0rIRGBg2pw,10996 +greenlet/greenlet.h,sha256=sz5pYRSQqedgOt2AMgxLZdTjO-qcr_JMvgiEJR9IAJ8,4755 +greenlet/greenlet_allocator.hpp,sha256=kxyWW4Qdwlrc7ufgdb5vd6Y7jhauQ699Kod0mqiO1iM,1582 +greenlet/greenlet_compiler_compat.hpp,sha256=nRxpLN9iNbnLVyFDeVmOwyeeNm6scQrOed1l7JQYMCM,4346 +greenlet/greenlet_cpython_compat.hpp,sha256=XrsoFv8nKavrdxly5_-q9lqGeE8d3wKt1YtldsAHAT8,4068 +greenlet/greenlet_exceptions.hpp,sha256=06Bx81DtVaJTa6RtiMcV141b-XHv4ppEgVItkblcLWY,4503 +greenlet/greenlet_internal.hpp,sha256=Ajc-_09W4xWzm9XfyXHAeQAFUgKGKsnJwYsTCoNy3ns,2709 +greenlet/greenlet_refs.hpp,sha256=OnbA91yZf3QHH6-eJccvoNDAaN-pQBMMrclFU1Ot3J4,34436 +greenlet/greenlet_slp_switch.hpp,sha256=kM1QHA2iV-gH4cFyN6lfIagHQxvJZjWOVJdIxRE3TlQ,3198 +greenlet/greenlet_thread_support.hpp,sha256=XUJ6ljWjf9OYyuOILiz8e_yHvT3fbaUiHdhiPNQUV4s,867 +greenlet/platform/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +greenlet/platform/__pycache__/__init__.cpython-311.pyc,, +greenlet/platform/setup_switch_x64_masm.cmd,sha256=ZpClUJeU0ujEPSTWNSepP0W2f9XiYQKA8QKSoVou8EU,143 +greenlet/platform/switch_aarch64_gcc.h,sha256=GKC0yWNXnbK2X--X6aguRCMj2Tg7hDU1Zkl3RljDvC8,4307 +greenlet/platform/switch_alpha_unix.h,sha256=Z-SvF8JQV3oxWT8JRbL9RFu4gRFxPdJ7cviM8YayMmw,671 +greenlet/platform/switch_amd64_unix.h,sha256=EcSFCBlodEBhqhKjcJqY_5Dn_jn7pKpkJlOvp7gFXLI,2748 +greenlet/platform/switch_arm32_gcc.h,sha256=Z3KkHszdgq6uU4YN3BxvKMG2AdDnovwCCNrqGWZ1Lyo,2479 +greenlet/platform/switch_arm32_ios.h,sha256=mm5_R9aXB92hyxzFRwB71M60H6AlvHjrpTrc72Pz3l8,1892 +greenlet/platform/switch_arm64_masm.asm,sha256=4kpTtfy7rfcr8j1CpJLAK21EtZpGDAJXWRU68HEy5A8,1245 +greenlet/platform/switch_arm64_masm.obj,sha256=DmLnIB_icoEHAz1naue_pJPTZgR9ElM7-Nmztr-o9_U,746 +greenlet/platform/switch_arm64_msvc.h,sha256=RqK5MHLmXI3Q-FQ7tm32KWnbDNZKnkJdq8CR89cz640,398 +greenlet/platform/switch_csky_gcc.h,sha256=kDikyiPpewP71KoBZQO_MukDTXTXBiC7x-hF0_2DL0w,1331 +greenlet/platform/switch_loongarch64_linux.h,sha256=7M-Dhc4Q8tRbJCJhalDLwU6S9Mx8MjmN1RbTDgIvQTM,779 +greenlet/platform/switch_m68k_gcc.h,sha256=VSa6NpZhvyyvF-Q58CTIWSpEDo4FKygOyTz00whctlw,928 +greenlet/platform/switch_mips_unix.h,sha256=E0tYsqc5anDY1BhenU1l8DW-nVHC_BElzLgJw3TGtPk,1426 +greenlet/platform/switch_ppc64_aix.h,sha256=_BL0iyRr3ZA5iPlr3uk9SJ5sNRWGYLrXcZ5z-CE9anE,3860 +greenlet/platform/switch_ppc64_linux.h,sha256=0rriT5XyxPb0GqsSSn_bP9iQsnjsPbBmu0yqo5goSyQ,3815 +greenlet/platform/switch_ppc_aix.h,sha256=pHA4slEjUFP3J3SYm1TAlNPhgb2G_PAtax5cO8BEe1A,2941 +greenlet/platform/switch_ppc_linux.h,sha256=YwrlKUzxlXuiKMQqr6MFAV1bPzWnmvk6X1AqJZEpOWU,2759 +greenlet/platform/switch_ppc_macosx.h,sha256=Z6KN_ud0n6nC3ltJrNz2qtvER6vnRAVRNH9mdIDpMxY,2624 +greenlet/platform/switch_ppc_unix.h,sha256=-ZG7MSSPEA5N4qO9PQChtyEJ-Fm6qInhyZm_ZBHTtMg,2652 +greenlet/platform/switch_riscv_unix.h,sha256=606V6ACDf79Fz_WGItnkgbjIJ0pGg_sHmPyDxQYKK58,949 +greenlet/platform/switch_s390_unix.h,sha256=RRlGu957ybmq95qNNY4Qw1mcaoT3eBnW5KbVwu48KX8,2763 +greenlet/platform/switch_sh_gcc.h,sha256=mcRJBTu-2UBf4kZtX601qofwuDuy-Y-hnxJtrcaB7do,901 +greenlet/platform/switch_sparc_sun_gcc.h,sha256=xZish9GsMHBienUbUMsX1-ZZ-as7hs36sVhYIE3ew8Y,2797 +greenlet/platform/switch_x32_unix.h,sha256=nM98PKtzTWc1lcM7TRMUZJzskVdR1C69U1UqZRWX0GE,1509 +greenlet/platform/switch_x64_masm.asm,sha256=nu6n2sWyXuXfpPx40d9YmLfHXUc1sHgeTvX1kUzuvEM,1841 +greenlet/platform/switch_x64_masm.obj,sha256=GNtTNxYdo7idFUYsQv-mrXWgyT5EJ93-9q90lN6svtQ,1078 +greenlet/platform/switch_x64_msvc.h,sha256=LIeasyKo_vHzspdMzMHbosRhrBfKI4BkQOh4qcTHyJw,1805 +greenlet/platform/switch_x86_msvc.h,sha256=TtGOwinbFfnn6clxMNkCz8i6OmgB6kVRrShoF5iT9to,12838 +greenlet/platform/switch_x86_unix.h,sha256=VplW9H0FF0cZHw1DhJdIUs5q6YLS4cwb2nYwjF83R1s,3059 +greenlet/slp_platformselect.h,sha256=J01Fd1y2sFLxkBfsixdpexCVWaAdeprDTEly-ujDQAk,3841 +greenlet/tests/__init__.py,sha256=sqxm7-dZuGBwmNI0n6xrcQJGoHHjoXUGyUTnvHidcYM,9361 +greenlet/tests/__pycache__/__init__.cpython-311.pyc,, +greenlet/tests/__pycache__/fail_clearing_run_switches.cpython-311.pyc,, +greenlet/tests/__pycache__/fail_cpp_exception.cpython-311.pyc,, +greenlet/tests/__pycache__/fail_initialstub_already_started.cpython-311.pyc,, +greenlet/tests/__pycache__/fail_slp_switch.cpython-311.pyc,, +greenlet/tests/__pycache__/fail_switch_three_greenlets.cpython-311.pyc,, +greenlet/tests/__pycache__/fail_switch_three_greenlets2.cpython-311.pyc,, +greenlet/tests/__pycache__/fail_switch_two_greenlets.cpython-311.pyc,, +greenlet/tests/__pycache__/leakcheck.cpython-311.pyc,, +greenlet/tests/__pycache__/test_contextvars.cpython-311.pyc,, +greenlet/tests/__pycache__/test_cpp.cpython-311.pyc,, +greenlet/tests/__pycache__/test_extension_interface.cpython-311.pyc,, +greenlet/tests/__pycache__/test_gc.cpython-311.pyc,, +greenlet/tests/__pycache__/test_generator.cpython-311.pyc,, +greenlet/tests/__pycache__/test_generator_nested.cpython-311.pyc,, +greenlet/tests/__pycache__/test_greenlet.cpython-311.pyc,, +greenlet/tests/__pycache__/test_greenlet_trash.cpython-311.pyc,, +greenlet/tests/__pycache__/test_leaks.cpython-311.pyc,, +greenlet/tests/__pycache__/test_stack_saved.cpython-311.pyc,, +greenlet/tests/__pycache__/test_throw.cpython-311.pyc,, +greenlet/tests/__pycache__/test_tracing.cpython-311.pyc,, +greenlet/tests/__pycache__/test_version.cpython-311.pyc,, +greenlet/tests/__pycache__/test_weakref.cpython-311.pyc,, +greenlet/tests/_test_extension.c,sha256=vkeGA-6oeJcGILsD7oIrT1qZop2GaTOHXiNT7mcSl-0,5773 +greenlet/tests/_test_extension.cpython-311-x86_64-linux-gnu.so,sha256=p118NJ4hObhSNcvKLduspwQExvXHPDAbWVVMU6o3dqs,17256 +greenlet/tests/_test_extension_cpp.cpp,sha256=e0kVnaB8CCaEhE9yHtNyfqTjevsPDKKx-zgxk7PPK48,6565 +greenlet/tests/_test_extension_cpp.cpython-311-x86_64-linux-gnu.so,sha256=Fo31LLhJtv2-gJiGnKZJG6OCcIJexLtz8Ql88a93Hus,57920 +greenlet/tests/fail_clearing_run_switches.py,sha256=o433oA_nUCtOPaMEGc8VEhZIKa71imVHXFw7TsXaP8M,1263 +greenlet/tests/fail_cpp_exception.py,sha256=o_ZbipWikok8Bjc-vjiQvcb5FHh2nVW-McGKMLcMzh0,985 +greenlet/tests/fail_initialstub_already_started.py,sha256=txENn5IyzGx2p-XR1XB7qXmC8JX_4mKDEA8kYBXUQKc,1961 +greenlet/tests/fail_slp_switch.py,sha256=rJBZcZfTWR3e2ERQtPAud6YKShiDsP84PmwOJbp4ey0,524 +greenlet/tests/fail_switch_three_greenlets.py,sha256=zSitV7rkNnaoHYVzAGGLnxz-yPtohXJJzaE8ehFDQ0M,956 +greenlet/tests/fail_switch_three_greenlets2.py,sha256=FPJensn2EJxoropl03JSTVP3kgP33k04h6aDWWozrOk,1285 +greenlet/tests/fail_switch_two_greenlets.py,sha256=1CaI8s3504VbbF1vj1uBYuy-zxBHVzHPIAd1LIc8ONg,817 +greenlet/tests/leakcheck.py,sha256=inbfM7_oVzd8jIKGxCgo4JqpFZaDAnWPkSULJ8vIE1s,11964 +greenlet/tests/test_contextvars.py,sha256=xutO-qZgKTwKsA9lAqTjIcTBEiQV4RpNKM-vO2_YCVU,10541 +greenlet/tests/test_cpp.py,sha256=hpxhFAdKJTpAVZP8CBGs1ZcrKdscI9BaDZk4btkI5d4,2736 +greenlet/tests/test_extension_interface.py,sha256=eJ3cwLacdK2WbsrC-4DgeyHdwLRcG4zx7rrkRtqSzC4,3829 +greenlet/tests/test_gc.py,sha256=PCOaRpIyjNnNlDogGL3FZU_lrdXuM-pv1rxeE5TP5mc,2923 +greenlet/tests/test_generator.py,sha256=tONXiTf98VGm347o1b-810daPiwdla5cbpFg6QI1R1g,1240 +greenlet/tests/test_generator_nested.py,sha256=7v4HOYrf1XZP39dk5IUMubdZ8yc3ynwZcqj9GUJyMSA,3718 +greenlet/tests/test_greenlet.py,sha256=rYWDvMx7ZpMlQju9KRxsBR61ela7HSJCg98JtR7RPOQ,46251 +greenlet/tests/test_greenlet_trash.py,sha256=n2dBlQfOoEO1ODatFi8QdhboH3fB86YtqzcYMYOXxbw,7947 +greenlet/tests/test_leaks.py,sha256=Qeso_qH9MCWJOkk2I3VcTh7UhaNvWxrzAmNBta-fUyY,17714 +greenlet/tests/test_stack_saved.py,sha256=eyzqNY2VCGuGlxhT_In6TvZ6Okb0AXFZVyBEnK1jDwA,446 +greenlet/tests/test_throw.py,sha256=u2TQ_WvvCd6N6JdXWIxVEcXkKu5fepDlz9dktYdmtng,3712 +greenlet/tests/test_tracing.py,sha256=VlwzMU0C1noospZhuUMyB7MHw200emIvGCN_6G2p2ZU,8250 +greenlet/tests/test_version.py,sha256=O9DpAITsOFgiRcjd4odQ7ejmwx_N9Q1zQENVcbtFHIc,1339 +greenlet/tests/test_weakref.py,sha256=F8M23btEF87bIbpptLNBORosbQqNZGiYeKMqYjWrsak,883 diff --git a/venv/lib/python3.11/site-packages/greenlet-3.2.1.dist-info/WHEEL b/venv/lib/python3.11/site-packages/greenlet-3.2.1.dist-info/WHEEL new file mode 100644 index 0000000..6148fe4 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet-3.2.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (79.0.0) +Root-Is-Purelib: false +Tag: cp311-cp311-manylinux_2_24_x86_64 +Tag: cp311-cp311-manylinux_2_28_x86_64 + diff --git a/venv/lib/python3.11/site-packages/greenlet-3.2.1.dist-info/licenses/LICENSE b/venv/lib/python3.11/site-packages/greenlet-3.2.1.dist-info/licenses/LICENSE new file mode 100644 index 0000000..b73a4a1 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet-3.2.1.dist-info/licenses/LICENSE @@ -0,0 +1,30 @@ +The following files are derived from Stackless Python and are subject to the +same license as Stackless Python: + + src/greenlet/slp_platformselect.h + files in src/greenlet/platform/ directory + +See LICENSE.PSF and http://www.stackless.com/ for details. + +Unless otherwise noted, the files in greenlet have been released under the +following MIT license: + +Copyright (c) Armin Rigo, Christian Tismer and contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/venv/lib/python3.11/site-packages/greenlet-3.2.1.dist-info/licenses/LICENSE.PSF b/venv/lib/python3.11/site-packages/greenlet-3.2.1.dist-info/licenses/LICENSE.PSF new file mode 100644 index 0000000..d3b509a --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet-3.2.1.dist-info/licenses/LICENSE.PSF @@ -0,0 +1,47 @@ +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011 Python Software Foundation; All Rights Reserved" are retained in Python +alone or in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. diff --git a/venv/lib/python3.11/site-packages/greenlet-3.2.1.dist-info/top_level.txt b/venv/lib/python3.11/site-packages/greenlet-3.2.1.dist-info/top_level.txt new file mode 100644 index 0000000..46725be --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet-3.2.1.dist-info/top_level.txt @@ -0,0 +1 @@ +greenlet diff --git a/venv/lib/python3.11/site-packages/greenlet/CObjects.cpp b/venv/lib/python3.11/site-packages/greenlet/CObjects.cpp new file mode 100644 index 0000000..c135995 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/CObjects.cpp @@ -0,0 +1,157 @@ +#ifndef COBJECTS_CPP +#define COBJECTS_CPP +/***************************************************************************** + * C interface + * + * These are exported using the CObject API + */ +#ifdef __clang__ +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wunused-function" +#endif + +#include "greenlet_exceptions.hpp" + +#include "greenlet_internal.hpp" +#include "greenlet_refs.hpp" + + +#include "TThreadStateDestroy.cpp" + +#include "PyGreenlet.hpp" + +using greenlet::PyErrOccurred; +using greenlet::Require; + + + +extern "C" { +static PyGreenlet* +PyGreenlet_GetCurrent(void) +{ + return GET_THREAD_STATE().state().get_current().relinquish_ownership(); +} + +static int +PyGreenlet_SetParent(PyGreenlet* g, PyGreenlet* nparent) +{ + return green_setparent((PyGreenlet*)g, (PyObject*)nparent, NULL); +} + +static PyGreenlet* +PyGreenlet_New(PyObject* run, PyGreenlet* parent) +{ + using greenlet::refs::NewDictReference; + // In the past, we didn't use green_new and green_init, but that + // was a maintenance issue because we duplicated code. This way is + // much safer, but slightly slower. If that's a problem, we could + // refactor green_init to separate argument parsing from initialization. + OwnedGreenlet g = OwnedGreenlet::consuming(green_new(&PyGreenlet_Type, nullptr, nullptr)); + if (!g) { + return NULL; + } + + try { + NewDictReference kwargs; + if (run) { + kwargs.SetItem(mod_globs->str_run, run); + } + if (parent) { + kwargs.SetItem("parent", (PyObject*)parent); + } + + Require(green_init(g.borrow(), mod_globs->empty_tuple, kwargs.borrow())); + } + catch (const PyErrOccurred&) { + return nullptr; + } + + return g.relinquish_ownership(); +} + +static PyObject* +PyGreenlet_Switch(PyGreenlet* self, PyObject* args, PyObject* kwargs) +{ + if (!PyGreenlet_Check(self)) { + PyErr_BadArgument(); + return NULL; + } + + if (args == NULL) { + args = mod_globs->empty_tuple; + } + + if (kwargs == NULL || !PyDict_Check(kwargs)) { + kwargs = NULL; + } + + return green_switch(self, args, kwargs); +} + +static PyObject* +PyGreenlet_Throw(PyGreenlet* self, PyObject* typ, PyObject* val, PyObject* tb) +{ + if (!PyGreenlet_Check(self)) { + PyErr_BadArgument(); + return nullptr; + } + try { + PyErrPieces err_pieces(typ, val, tb); + return internal_green_throw(self, err_pieces).relinquish_ownership(); + } + catch (const PyErrOccurred&) { + return nullptr; + } +} + + + +static int +Extern_PyGreenlet_MAIN(PyGreenlet* self) +{ + if (!PyGreenlet_Check(self)) { + PyErr_BadArgument(); + return -1; + } + return self->pimpl->main(); +} + +static int +Extern_PyGreenlet_ACTIVE(PyGreenlet* self) +{ + if (!PyGreenlet_Check(self)) { + PyErr_BadArgument(); + return -1; + } + return self->pimpl->active(); +} + +static int +Extern_PyGreenlet_STARTED(PyGreenlet* self) +{ + if (!PyGreenlet_Check(self)) { + PyErr_BadArgument(); + return -1; + } + return self->pimpl->started(); +} + +static PyGreenlet* +Extern_PyGreenlet_GET_PARENT(PyGreenlet* self) +{ + if (!PyGreenlet_Check(self)) { + PyErr_BadArgument(); + return NULL; + } + // This can return NULL even if there is no exception + return self->pimpl->parent().acquire(); +} +} // extern C. + +/** End C API ****************************************************************/ +#ifdef __clang__ +# pragma clang diagnostic pop +#endif + + +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/PyGreenlet.cpp b/venv/lib/python3.11/site-packages/greenlet/PyGreenlet.cpp new file mode 100644 index 0000000..29c0bba --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/PyGreenlet.cpp @@ -0,0 +1,738 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +#ifndef PYGREENLET_CPP +#define PYGREENLET_CPP +/***************** +The Python slot functions for TGreenlet. + */ + + +#define PY_SSIZE_T_CLEAN +#include +#include "structmember.h" // PyMemberDef + +#include "greenlet_internal.hpp" +#include "TThreadStateDestroy.cpp" +#include "TGreenlet.hpp" +// #include "TUserGreenlet.cpp" +// #include "TMainGreenlet.cpp" +// #include "TBrokenGreenlet.cpp" + + +#include "greenlet_refs.hpp" +#include "greenlet_slp_switch.hpp" + +#include "greenlet_thread_support.hpp" +#include "TGreenlet.hpp" + +#include "TGreenletGlobals.cpp" +#include "TThreadStateDestroy.cpp" +#include "PyGreenlet.hpp" +// #include "TGreenlet.cpp" + +// #include "TExceptionState.cpp" +// #include "TPythonState.cpp" +// #include "TStackState.cpp" + +using greenlet::LockGuard; +using greenlet::LockInitError; +using greenlet::PyErrOccurred; +using greenlet::Require; + +using greenlet::g_handle_exit; +using greenlet::single_result; + +using greenlet::Greenlet; +using greenlet::UserGreenlet; +using greenlet::MainGreenlet; +using greenlet::BrokenGreenlet; +using greenlet::ThreadState; +using greenlet::PythonState; + + + +static PyGreenlet* +green_new(PyTypeObject* type, PyObject* UNUSED(args), PyObject* UNUSED(kwds)) +{ + PyGreenlet* o = + (PyGreenlet*)PyBaseObject_Type.tp_new(type, mod_globs->empty_tuple, mod_globs->empty_dict); + if (o) { + new UserGreenlet(o, GET_THREAD_STATE().state().borrow_current()); + assert(Py_REFCNT(o) == 1); + } + return o; +} + + +// green_init is used in the tp_init slot. So it's important that +// it can be called directly from CPython. Thus, we don't use +// BorrowedGreenlet and BorrowedObject --- although in theory +// these should be binary layout compatible, that may not be +// guaranteed to be the case (32-bit linux ppc possibly). +static int +green_init(PyGreenlet* self, PyObject* args, PyObject* kwargs) +{ + PyArgParseParam run; + PyArgParseParam nparent; + static const char* kwlist[] = { + "run", + "parent", + NULL + }; + + // recall: The O specifier does NOT increase the reference count. + if (!PyArg_ParseTupleAndKeywords( + args, kwargs, "|OO:green", (char**)kwlist, &run, &nparent)) { + return -1; + } + + if (run) { + if (green_setrun(self, run, NULL)) { + return -1; + } + } + if (nparent && !nparent.is_None()) { + return green_setparent(self, nparent, NULL); + } + return 0; +} + + + +static int +green_traverse(PyGreenlet* self, visitproc visit, void* arg) +{ + // We must only visit referenced objects, i.e. only objects + // Py_INCREF'ed by this greenlet (directly or indirectly): + // + // - stack_prev is not visited: holds previous stack pointer, but it's not + // referenced + // - frames are not visited as we don't strongly reference them; + // alive greenlets are not garbage collected + // anyway. This can be a problem, however, if this greenlet is + // never allowed to finish, and is referenced from the frame: we + // have an uncollectible cycle in that case. Note that the + // frame object itself is also frequently not even tracked by the GC + // starting with Python 3.7 (frames are allocated by the + // interpreter untracked, and only become tracked when their + // evaluation is finished if they have a refcount > 1). All of + // this is to say that we should probably strongly reference + // the frame object. Doing so, while always allowing GC on a + // greenlet, solves several leaks for us. + + Py_VISIT(self->dict); + if (!self->pimpl) { + // Hmm. I have seen this at interpreter shutdown time, + // I think. That's very odd because this doesn't go away until + // we're ``green_dealloc()``, at which point we shouldn't be + // traversed anymore. + return 0; + } + + return self->pimpl->tp_traverse(visit, arg); +} + +static int +green_is_gc(PyObject* _self) +{ + BorrowedGreenlet self(_self); + int result = 0; + /* Main greenlet can be garbage collected since it can only + become unreachable if the underlying thread exited. + Active greenlets --- including those that are suspended --- + cannot be garbage collected, however. + */ + if (self->main() || !self->active()) { + result = 1; + } + // The main greenlet pointer will eventually go away after the thread dies. + if (self->was_running_in_dead_thread()) { + // Our thread is dead! We can never run again. Might as well + // GC us. Note that if a tuple containing only us and other + // immutable objects had been scanned before this, when we + // would have returned 0, the tuple will take itself out of GC + // tracking and never be investigated again. So that could + // result in both us and the tuple leaking due to an + // unreachable/uncollectible reference. The same goes for + // dictionaries. + // + // It's not a great idea to be changing our GC state on the + // fly. + result = 1; + } + return result; +} + + +static int +green_clear(PyGreenlet* self) +{ + /* Greenlet is only cleared if it is about to be collected. + Since active greenlets are not garbage collectable, we can + be sure that, even if they are deallocated during clear, + nothing they reference is in unreachable or finalizers, + so even if it switches we are relatively safe. */ + // XXX: Are we responsible for clearing weakrefs here? + Py_CLEAR(self->dict); + return self->pimpl->tp_clear(); +} + +/** + * Returns 0 on failure (the object was resurrected) or 1 on success. + **/ +static int +_green_dealloc_kill_started_non_main_greenlet(BorrowedGreenlet self) +{ + /* Hacks hacks hacks copied from instance_dealloc() */ + /* Temporarily resurrect the greenlet. */ + assert(self.REFCNT() == 0); + Py_SET_REFCNT(self.borrow(), 1); + /* Save the current exception, if any. */ + PyErrPieces saved_err; + try { + // BY THE TIME WE GET HERE, the state may actually be going + // away + // if we're shutting down the interpreter and freeing thread + // entries, + // this could result in freeing greenlets that were leaked. So + // we can't try to read the state. + self->deallocing_greenlet_in_thread( + self->thread_state() + ? static_cast(GET_THREAD_STATE()) + : nullptr); + } + catch (const PyErrOccurred&) { + PyErr_WriteUnraisable(self.borrow_o()); + /* XXX what else should we do? */ + } + /* Check for no resurrection must be done while we keep + * our internal reference, otherwise PyFile_WriteObject + * causes recursion if using Py_INCREF/Py_DECREF + */ + if (self.REFCNT() == 1 && self->active()) { + /* Not resurrected, but still not dead! + XXX what else should we do? we complain. */ + PyObject* f = PySys_GetObject("stderr"); + Py_INCREF(self.borrow_o()); /* leak! */ + if (f != NULL) { + PyFile_WriteString("GreenletExit did not kill ", f); + PyFile_WriteObject(self.borrow_o(), f, 0); + PyFile_WriteString("\n", f); + } + } + /* Restore the saved exception. */ + saved_err.PyErrRestore(); + /* Undo the temporary resurrection; can't use DECREF here, + * it would cause a recursive call. + */ + assert(self.REFCNT() > 0); + + Py_ssize_t refcnt = self.REFCNT() - 1; + Py_SET_REFCNT(self.borrow_o(), refcnt); + if (refcnt != 0) { + /* Resurrected! */ + _Py_NewReference(self.borrow_o()); + Py_SET_REFCNT(self.borrow_o(), refcnt); + /* Better to use tp_finalizer slot (PEP 442) + * and call ``PyObject_CallFinalizerFromDealloc``, + * but that's only supported in Python 3.4+; see + * Modules/_io/iobase.c for an example. + * + * The following approach is copied from iobase.c in CPython 2.7. + * (along with much of this function in general). Here's their + * comment: + * + * When called from a heap type's dealloc, the type will be + * decref'ed on return (see e.g. subtype_dealloc in typeobject.c). */ + if (PyType_HasFeature(self.TYPE(), Py_TPFLAGS_HEAPTYPE)) { + Py_INCREF(self.TYPE()); + } + + PyObject_GC_Track((PyObject*)self); + + _Py_DEC_REFTOTAL; +#ifdef COUNT_ALLOCS + --Py_TYPE(self)->tp_frees; + --Py_TYPE(self)->tp_allocs; +#endif /* COUNT_ALLOCS */ + return 0; + } + return 1; +} + + +static void +green_dealloc(PyGreenlet* self) +{ + PyObject_GC_UnTrack(self); + BorrowedGreenlet me(self); + if (me->active() + && me->started() + && !me->main()) { + if (!_green_dealloc_kill_started_non_main_greenlet(me)) { + return; + } + } + + if (self->weakreflist != NULL) { + PyObject_ClearWeakRefs((PyObject*)self); + } + Py_CLEAR(self->dict); + + if (self->pimpl) { + // In case deleting this, which frees some memory, + // somehow winds up calling back into us. That's usually a + //bug in our code. + Greenlet* p = self->pimpl; + self->pimpl = nullptr; + delete p; + } + // and finally we're done. self is now invalid. + Py_TYPE(self)->tp_free((PyObject*)self); +} + + + +static OwnedObject +internal_green_throw(BorrowedGreenlet self, PyErrPieces& err_pieces) +{ + PyObject* result = nullptr; + err_pieces.PyErrRestore(); + assert(PyErr_Occurred()); + if (self->started() && !self->active()) { + /* dead greenlet: turn GreenletExit into a regular return */ + result = g_handle_exit(OwnedObject()).relinquish_ownership(); + } + self->args() <<= result; + + return single_result(self->g_switch()); +} + + + +PyDoc_STRVAR( + green_switch_doc, + "switch(*args, **kwargs)\n" + "\n" + "Switch execution to this greenlet.\n" + "\n" + "If this greenlet has never been run, then this greenlet\n" + "will be switched to using the body of ``self.run(*args, **kwargs)``.\n" + "\n" + "If the greenlet is active (has been run, but was switch()'ed\n" + "out before leaving its run function), then this greenlet will\n" + "be resumed and the return value to its switch call will be\n" + "None if no arguments are given, the given argument if one\n" + "argument is given, or the args tuple and keyword args dict if\n" + "multiple arguments are given.\n" + "\n" + "If the greenlet is dead, or is the current greenlet then this\n" + "function will simply return the arguments using the same rules as\n" + "above.\n"); + +static PyObject* +green_switch(PyGreenlet* self, PyObject* args, PyObject* kwargs) +{ + using greenlet::SwitchingArgs; + SwitchingArgs switch_args(OwnedObject::owning(args), OwnedObject::owning(kwargs)); + self->pimpl->may_switch_away(); + self->pimpl->args() <<= switch_args; + + // If we're switching out of a greenlet, and that switch is the + // last thing the greenlet does, the greenlet ought to be able to + // go ahead and die at that point. Currently, someone else must + // manually switch back to the greenlet so that we "fall off the + // end" and can perform cleanup. You'd think we'd be able to + // figure out that this is happening using the frame's ``f_lasti`` + // member, which is supposed to be an index into + // ``frame->f_code->co_code``, the bytecode string. However, in + // recent interpreters, ``f_lasti`` tends not to be updated thanks + // to things like the PREDICT() macros in ceval.c. So it doesn't + // really work to do that in many cases. For example, the Python + // code: + // def run(): + // greenlet.getcurrent().parent.switch() + // produces bytecode of len 16, with the actual call to switch() + // being at index 10 (in Python 3.10). However, the reported + // ``f_lasti`` we actually see is...5! (Which happens to be the + // second byte of the CALL_METHOD op for ``getcurrent()``). + + try { + //OwnedObject result = single_result(self->pimpl->g_switch()); + OwnedObject result(single_result(self->pimpl->g_switch())); +#ifndef NDEBUG + // Note that the current greenlet isn't necessarily self. If self + // finished, we went to one of its parents. + assert(!self->pimpl->args()); + + const BorrowedGreenlet& current = GET_THREAD_STATE().state().borrow_current(); + // It's possible it's never been switched to. + assert(!current->args()); +#endif + PyObject* p = result.relinquish_ownership(); + + if (!p && !PyErr_Occurred()) { + // This shouldn't be happening anymore, so the asserts + // are there for debug builds. Non-debug builds + // crash "gracefully" in this case, although there is an + // argument to be made for killing the process in all + // cases --- for this to be the case, our switches + // probably nested in an incorrect way, so the state is + // suspicious. Nothing should be corrupt though, just + // confused at the Python level. Letting this propagate is + // probably good enough. + assert(p || PyErr_Occurred()); + throw PyErrOccurred( + mod_globs->PyExc_GreenletError, + "Greenlet.switch() returned NULL without an exception set." + ); + } + return p; + } + catch(const PyErrOccurred&) { + return nullptr; + } +} + +PyDoc_STRVAR( + green_throw_doc, + "Switches execution to this greenlet, but immediately raises the\n" + "given exception in this greenlet. If no argument is provided, the " + "exception\n" + "defaults to `greenlet.GreenletExit`. The normal exception\n" + "propagation rules apply, as described for `switch`. Note that calling " + "this\n" + "method is almost equivalent to the following::\n" + "\n" + " def raiser():\n" + " raise typ, val, tb\n" + " g_raiser = greenlet(raiser, parent=g)\n" + " g_raiser.switch()\n" + "\n" + "except that this trick does not work for the\n" + "`greenlet.GreenletExit` exception, which would not propagate\n" + "from ``g_raiser`` to ``g``.\n"); + +static PyObject* +green_throw(PyGreenlet* self, PyObject* args) +{ + PyArgParseParam typ(mod_globs->PyExc_GreenletExit); + PyArgParseParam val; + PyArgParseParam tb; + + if (!PyArg_ParseTuple(args, "|OOO:throw", &typ, &val, &tb)) { + return nullptr; + } + + assert(typ.borrow() || val.borrow()); + + self->pimpl->may_switch_away(); + try { + // Both normalizing the error and the actual throw_greenlet + // could throw PyErrOccurred. + PyErrPieces err_pieces(typ.borrow(), val.borrow(), tb.borrow()); + + return internal_green_throw(self, err_pieces).relinquish_ownership(); + } + catch (const PyErrOccurred&) { + return nullptr; + } +} + +static int +green_bool(PyGreenlet* self) +{ + return self->pimpl->active(); +} + +/** + * CAUTION: Allocates memory, may run GC and arbitrary Python code. + */ +static PyObject* +green_getdict(PyGreenlet* self, void* UNUSED(context)) +{ + if (self->dict == NULL) { + self->dict = PyDict_New(); + if (self->dict == NULL) { + return NULL; + } + } + Py_INCREF(self->dict); + return self->dict; +} + +static int +green_setdict(PyGreenlet* self, PyObject* val, void* UNUSED(context)) +{ + PyObject* tmp; + + if (val == NULL) { + PyErr_SetString(PyExc_TypeError, "__dict__ may not be deleted"); + return -1; + } + if (!PyDict_Check(val)) { + PyErr_SetString(PyExc_TypeError, "__dict__ must be a dictionary"); + return -1; + } + tmp = self->dict; + Py_INCREF(val); + self->dict = val; + Py_XDECREF(tmp); + return 0; +} + +static bool +_green_not_dead(BorrowedGreenlet self) +{ + // XXX: Where else should we do this? + // Probably on entry to most Python-facing functions? + if (self->was_running_in_dead_thread()) { + self->deactivate_and_free(); + return false; + } + return self->active() || !self->started(); +} + + +static PyObject* +green_getdead(PyGreenlet* self, void* UNUSED(context)) +{ + if (_green_not_dead(self)) { + Py_RETURN_FALSE; + } + else { + Py_RETURN_TRUE; + } +} + +static PyObject* +green_get_stack_saved(PyGreenlet* self, void* UNUSED(context)) +{ + return PyLong_FromSsize_t(self->pimpl->stack_saved()); +} + + +static PyObject* +green_getrun(PyGreenlet* self, void* UNUSED(context)) +{ + try { + OwnedObject result(BorrowedGreenlet(self)->run()); + return result.relinquish_ownership(); + } + catch(const PyErrOccurred&) { + return nullptr; + } +} + + +static int +green_setrun(PyGreenlet* self, PyObject* nrun, void* UNUSED(context)) +{ + try { + BorrowedGreenlet(self)->run(nrun); + return 0; + } + catch(const PyErrOccurred&) { + return -1; + } +} + +static PyObject* +green_getparent(PyGreenlet* self, void* UNUSED(context)) +{ + return BorrowedGreenlet(self)->parent().acquire_or_None(); +} + + +static int +green_setparent(PyGreenlet* self, PyObject* nparent, void* UNUSED(context)) +{ + try { + BorrowedGreenlet(self)->parent(nparent); + } + catch(const PyErrOccurred&) { + return -1; + } + return 0; +} + + +static PyObject* +green_getcontext(const PyGreenlet* self, void* UNUSED(context)) +{ + const Greenlet *const g = self->pimpl; + try { + OwnedObject result(g->context()); + return result.relinquish_ownership(); + } + catch(const PyErrOccurred&) { + return nullptr; + } +} + +static int +green_setcontext(PyGreenlet* self, PyObject* nctx, void* UNUSED(context)) +{ + try { + BorrowedGreenlet(self)->context(nctx); + return 0; + } + catch(const PyErrOccurred&) { + return -1; + } +} + + +static PyObject* +green_getframe(PyGreenlet* self, void* UNUSED(context)) +{ + const PythonState::OwnedFrame& top_frame = BorrowedGreenlet(self)->top_frame(); + return top_frame.acquire_or_None(); +} + + +static PyObject* +green_getstate(PyGreenlet* self) +{ + PyErr_Format(PyExc_TypeError, + "cannot serialize '%s' object", + Py_TYPE(self)->tp_name); + return nullptr; +} + +static PyObject* +green_repr(PyGreenlet* _self) +{ + BorrowedGreenlet self(_self); + /* + Return a string like + + + The handling of greenlets across threads is not super good. + We mostly use the internal definitions of these terms, but they + generally should make sense to users as well. + */ + PyObject* result; + int never_started = !self->started() && !self->active(); + + const char* const tp_name = Py_TYPE(self)->tp_name; + + if (_green_not_dead(self)) { + /* XXX: The otid= is almost useless because you can't correlate it to + any thread identifier exposed to Python. We could use + PyThreadState_GET()->thread_id, but we'd need to save that in the + greenlet, or save the whole PyThreadState object itself. + + As it stands, its only useful for identifying greenlets from the same thread. + */ + const char* state_in_thread; + if (self->was_running_in_dead_thread()) { + // The thread it was running in is dead! + // This can happen, especially at interpreter shut down. + // It complicates debugging output because it may be + // impossible to access the current thread state at that + // time. Thus, don't access the current thread state. + state_in_thread = " (thread exited)"; + } + else { + state_in_thread = GET_THREAD_STATE().state().is_current(self) + ? " current" + : (self->started() ? " suspended" : ""); + } + result = PyUnicode_FromFormat( + "<%s object at %p (otid=%p)%s%s%s%s>", + tp_name, + self.borrow_o(), + self->thread_state(), + state_in_thread, + self->active() ? " active" : "", + never_started ? " pending" : " started", + self->main() ? " main" : "" + ); + } + else { + result = PyUnicode_FromFormat( + "<%s object at %p (otid=%p) %sdead>", + tp_name, + self.borrow_o(), + self->thread_state(), + self->was_running_in_dead_thread() + ? "(thread exited) " + : "" + ); + } + + return result; +} + + +static PyMethodDef green_methods[] = { + { + .ml_name="switch", + .ml_meth=reinterpret_cast(green_switch), + .ml_flags=METH_VARARGS | METH_KEYWORDS, + .ml_doc=green_switch_doc + }, + {.ml_name="throw", .ml_meth=(PyCFunction)green_throw, .ml_flags=METH_VARARGS, .ml_doc=green_throw_doc}, + {.ml_name="__getstate__", .ml_meth=(PyCFunction)green_getstate, .ml_flags=METH_NOARGS, .ml_doc=NULL}, + {.ml_name=NULL, .ml_meth=NULL} /* sentinel */ +}; + +static PyGetSetDef green_getsets[] = { + /* name, getter, setter, doc, context pointer */ + {.name="__dict__", .get=(getter)green_getdict, .set=(setter)green_setdict}, + {.name="run", .get=(getter)green_getrun, .set=(setter)green_setrun}, + {.name="parent", .get=(getter)green_getparent, .set=(setter)green_setparent}, + {.name="gr_frame", .get=(getter)green_getframe }, + { + .name="gr_context", + .get=(getter)green_getcontext, + .set=(setter)green_setcontext + }, + {.name="dead", .get=(getter)green_getdead}, + {.name="_stack_saved", .get=(getter)green_get_stack_saved}, + {.name=NULL} +}; + +static PyMemberDef green_members[] = { + {.name=NULL} +}; + +static PyNumberMethods green_as_number = { + .nb_bool=(inquiry)green_bool, +}; + + +PyTypeObject PyGreenlet_Type = { + .ob_base=PyVarObject_HEAD_INIT(NULL, 0) + .tp_name="greenlet.greenlet", /* tp_name */ + .tp_basicsize=sizeof(PyGreenlet), /* tp_basicsize */ + /* methods */ + .tp_dealloc=(destructor)green_dealloc, /* tp_dealloc */ + .tp_repr=(reprfunc)green_repr, /* tp_repr */ + .tp_as_number=&green_as_number, /* tp_as _number*/ + .tp_flags=G_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ + .tp_doc="greenlet(run=None, parent=None) -> greenlet\n\n" + "Creates a new greenlet object (without running it).\n\n" + " - *run* -- The callable to invoke.\n" + " - *parent* -- The parent greenlet. The default is the current " + "greenlet.", /* tp_doc */ + .tp_traverse=(traverseproc)green_traverse, /* tp_traverse */ + .tp_clear=(inquiry)green_clear, /* tp_clear */ + .tp_weaklistoffset=offsetof(PyGreenlet, weakreflist), /* tp_weaklistoffset */ + + .tp_methods=green_methods, /* tp_methods */ + .tp_members=green_members, /* tp_members */ + .tp_getset=green_getsets, /* tp_getset */ + .tp_dictoffset=offsetof(PyGreenlet, dict), /* tp_dictoffset */ + .tp_init=(initproc)green_init, /* tp_init */ + .tp_alloc=PyType_GenericAlloc, /* tp_alloc */ + .tp_new=(newfunc)green_new, /* tp_new */ + .tp_free=PyObject_GC_Del, /* tp_free */ + .tp_is_gc=(inquiry)green_is_gc, /* tp_is_gc */ +}; + +#endif + +// Local Variables: +// flycheck-clang-include-path: ("/opt/local/Library/Frameworks/Python.framework/Versions/3.8/include/python3.8") +// End: diff --git a/venv/lib/python3.11/site-packages/greenlet/PyGreenlet.hpp b/venv/lib/python3.11/site-packages/greenlet/PyGreenlet.hpp new file mode 100644 index 0000000..df6cd80 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/PyGreenlet.hpp @@ -0,0 +1,35 @@ +#ifndef PYGREENLET_HPP +#define PYGREENLET_HPP + + +#include "greenlet.h" +#include "greenlet_compiler_compat.hpp" +#include "greenlet_refs.hpp" + + +using greenlet::refs::OwnedGreenlet; +using greenlet::refs::BorrowedGreenlet; +using greenlet::refs::BorrowedObject;; +using greenlet::refs::OwnedObject; +using greenlet::refs::PyErrPieces; + + +// XXX: These doesn't really belong here, it's not a Python slot. +static OwnedObject internal_green_throw(BorrowedGreenlet self, PyErrPieces& err_pieces); + +static PyGreenlet* green_new(PyTypeObject* type, PyObject* UNUSED(args), PyObject* UNUSED(kwds)); +static int green_clear(PyGreenlet* self); +static int green_init(PyGreenlet* self, PyObject* args, PyObject* kwargs); +static int green_setparent(PyGreenlet* self, PyObject* nparent, void* UNUSED(context)); +static int green_setrun(PyGreenlet* self, PyObject* nrun, void* UNUSED(context)); +static int green_traverse(PyGreenlet* self, visitproc visit, void* arg); +static void green_dealloc(PyGreenlet* self); +static PyObject* green_getparent(PyGreenlet* self, void* UNUSED(context)); + +static int green_is_gc(PyObject* self); +static PyObject* green_getdead(PyGreenlet* self, void* UNUSED(context)); +static PyObject* green_getrun(PyGreenlet* self, void* UNUSED(context)); +static int green_setcontext(PyGreenlet* self, PyObject* nctx, void* UNUSED(context)); +static PyObject* green_getframe(PyGreenlet* self, void* UNUSED(context)); +static PyObject* green_repr(PyGreenlet* self); +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/PyGreenletUnswitchable.cpp b/venv/lib/python3.11/site-packages/greenlet/PyGreenletUnswitchable.cpp new file mode 100644 index 0000000..1b768ee --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/PyGreenletUnswitchable.cpp @@ -0,0 +1,147 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/** + Implementation of the Python slots for PyGreenletUnswitchable_Type +*/ +#ifndef PY_GREENLET_UNSWITCHABLE_CPP +#define PY_GREENLET_UNSWITCHABLE_CPP + + + +#define PY_SSIZE_T_CLEAN +#include +#include "structmember.h" // PyMemberDef + +#include "greenlet_internal.hpp" +// Code after this point can assume access to things declared in stdint.h, +// including the fixed-width types. This goes for the platform-specific switch functions +// as well. +#include "greenlet_refs.hpp" +#include "greenlet_slp_switch.hpp" + +#include "greenlet_thread_support.hpp" +#include "TGreenlet.hpp" + +#include "TGreenlet.cpp" +#include "TGreenletGlobals.cpp" +#include "TThreadStateDestroy.cpp" + + +using greenlet::LockGuard; +using greenlet::LockInitError; +using greenlet::PyErrOccurred; +using greenlet::Require; + +using greenlet::g_handle_exit; +using greenlet::single_result; + +using greenlet::Greenlet; +using greenlet::UserGreenlet; +using greenlet::MainGreenlet; +using greenlet::BrokenGreenlet; +using greenlet::ThreadState; +using greenlet::PythonState; + + +#include "PyGreenlet.hpp" + +static PyGreenlet* +green_unswitchable_new(PyTypeObject* type, PyObject* UNUSED(args), PyObject* UNUSED(kwds)) +{ + PyGreenlet* o = + (PyGreenlet*)PyBaseObject_Type.tp_new(type, mod_globs->empty_tuple, mod_globs->empty_dict); + if (o) { + new BrokenGreenlet(o, GET_THREAD_STATE().state().borrow_current()); + assert(Py_REFCNT(o) == 1); + } + return o; +} + +static PyObject* +green_unswitchable_getforce(PyGreenlet* self, void* UNUSED(context)) +{ + BrokenGreenlet* broken = dynamic_cast(self->pimpl); + return PyBool_FromLong(broken->_force_switch_error); +} + +static int +green_unswitchable_setforce(PyGreenlet* self, PyObject* nforce, void* UNUSED(context)) +{ + if (!nforce) { + PyErr_SetString( + PyExc_AttributeError, + "Cannot delete force_switch_error" + ); + return -1; + } + BrokenGreenlet* broken = dynamic_cast(self->pimpl); + int is_true = PyObject_IsTrue(nforce); + if (is_true == -1) { + return -1; + } + broken->_force_switch_error = is_true; + return 0; +} + +static PyObject* +green_unswitchable_getforceslp(PyGreenlet* self, void* UNUSED(context)) +{ + BrokenGreenlet* broken = dynamic_cast(self->pimpl); + return PyBool_FromLong(broken->_force_slp_switch_error); +} + +static int +green_unswitchable_setforceslp(PyGreenlet* self, PyObject* nforce, void* UNUSED(context)) +{ + if (!nforce) { + PyErr_SetString( + PyExc_AttributeError, + "Cannot delete force_slp_switch_error" + ); + return -1; + } + BrokenGreenlet* broken = dynamic_cast(self->pimpl); + int is_true = PyObject_IsTrue(nforce); + if (is_true == -1) { + return -1; + } + broken->_force_slp_switch_error = is_true; + return 0; +} + +static PyGetSetDef green_unswitchable_getsets[] = { + /* name, getter, setter, doc, closure (context pointer) */ + { + .name="force_switch_error", + .get=(getter)green_unswitchable_getforce, + .set=(setter)green_unswitchable_setforce, + .doc=NULL + }, + { + .name="force_slp_switch_error", + .get=(getter)green_unswitchable_getforceslp, + .set=(setter)green_unswitchable_setforceslp, + .doc=nullptr + }, + {.name=nullptr} +}; + +PyTypeObject PyGreenletUnswitchable_Type = { + .ob_base=PyVarObject_HEAD_INIT(NULL, 0) + .tp_name="greenlet._greenlet.UnswitchableGreenlet", + .tp_dealloc= (destructor)green_dealloc, /* tp_dealloc */ + .tp_flags=G_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ + .tp_doc="Undocumented internal class", /* tp_doc */ + .tp_traverse=(traverseproc)green_traverse, /* tp_traverse */ + .tp_clear=(inquiry)green_clear, /* tp_clear */ + + .tp_getset=green_unswitchable_getsets, /* tp_getset */ + .tp_base=&PyGreenlet_Type, /* tp_base */ + .tp_init=(initproc)green_init, /* tp_init */ + .tp_alloc=PyType_GenericAlloc, /* tp_alloc */ + .tp_new=(newfunc)green_unswitchable_new, /* tp_new */ + .tp_free=PyObject_GC_Del, /* tp_free */ + .tp_is_gc=(inquiry)green_is_gc, /* tp_is_gc */ +}; + + +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/PyModule.cpp b/venv/lib/python3.11/site-packages/greenlet/PyModule.cpp new file mode 100644 index 0000000..6adcb5c --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/PyModule.cpp @@ -0,0 +1,292 @@ +#ifndef PY_MODULE_CPP +#define PY_MODULE_CPP + +#include "greenlet_internal.hpp" + + +#include "TGreenletGlobals.cpp" +#include "TMainGreenlet.cpp" +#include "TThreadStateDestroy.cpp" + +using greenlet::LockGuard; +using greenlet::ThreadState; + +#ifdef __clang__ +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wunused-function" +# pragma clang diagnostic ignored "-Wunused-variable" +#endif + +PyDoc_STRVAR(mod_getcurrent_doc, + "getcurrent() -> greenlet\n" + "\n" + "Returns the current greenlet (i.e. the one which called this " + "function).\n"); + +static PyObject* +mod_getcurrent(PyObject* UNUSED(module)) +{ + return GET_THREAD_STATE().state().get_current().relinquish_ownership_o(); +} + +PyDoc_STRVAR(mod_settrace_doc, + "settrace(callback) -> object\n" + "\n" + "Sets a new tracing function and returns the previous one.\n"); +static PyObject* +mod_settrace(PyObject* UNUSED(module), PyObject* args) +{ + PyArgParseParam tracefunc; + if (!PyArg_ParseTuple(args, "O", &tracefunc)) { + return NULL; + } + ThreadState& state = GET_THREAD_STATE(); + OwnedObject previous = state.get_tracefunc(); + if (!previous) { + previous = Py_None; + } + + state.set_tracefunc(tracefunc); + + return previous.relinquish_ownership(); +} + +PyDoc_STRVAR(mod_gettrace_doc, + "gettrace() -> object\n" + "\n" + "Returns the currently set tracing function, or None.\n"); + +static PyObject* +mod_gettrace(PyObject* UNUSED(module)) +{ + OwnedObject tracefunc = GET_THREAD_STATE().state().get_tracefunc(); + if (!tracefunc) { + tracefunc = Py_None; + } + return tracefunc.relinquish_ownership(); +} + + + +PyDoc_STRVAR(mod_set_thread_local_doc, + "set_thread_local(key, value) -> None\n" + "\n" + "Set a value in the current thread-local dictionary. Debugging only.\n"); + +static PyObject* +mod_set_thread_local(PyObject* UNUSED(module), PyObject* args) +{ + PyArgParseParam key; + PyArgParseParam value; + PyObject* result = NULL; + + if (PyArg_UnpackTuple(args, "set_thread_local", 2, 2, &key, &value)) { + if(PyDict_SetItem( + PyThreadState_GetDict(), // borrow + key, + value) == 0 ) { + // success + Py_INCREF(Py_None); + result = Py_None; + } + } + return result; +} + +PyDoc_STRVAR(mod_get_pending_cleanup_count_doc, + "get_pending_cleanup_count() -> Integer\n" + "\n" + "Get the number of greenlet cleanup operations pending. Testing only.\n"); + + +static PyObject* +mod_get_pending_cleanup_count(PyObject* UNUSED(module)) +{ + LockGuard cleanup_lock(*mod_globs->thread_states_to_destroy_lock); + return PyLong_FromSize_t(mod_globs->thread_states_to_destroy.size()); +} + +PyDoc_STRVAR(mod_get_total_main_greenlets_doc, + "get_total_main_greenlets() -> Integer\n" + "\n" + "Quickly return the number of main greenlets that exist. Testing only.\n"); + +static PyObject* +mod_get_total_main_greenlets(PyObject* UNUSED(module)) +{ + return PyLong_FromSize_t(G_TOTAL_MAIN_GREENLETS); +} + + + +PyDoc_STRVAR(mod_get_clocks_used_doing_optional_cleanup_doc, + "get_clocks_used_doing_optional_cleanup() -> Integer\n" + "\n" + "Get the number of clock ticks the program has used doing optional " + "greenlet cleanup.\n" + "Beginning in greenlet 2.0, greenlet tries to find and dispose of greenlets\n" + "that leaked after a thread exited. This requires invoking Python's garbage collector,\n" + "which may have a performance cost proportional to the number of live objects.\n" + "This function returns the amount of processor time\n" + "greenlet has used to do this. In programs that run with very large amounts of live\n" + "objects, this metric can be used to decide whether the cost of doing this cleanup\n" + "is worth the memory leak being corrected. If not, you can disable the cleanup\n" + "using ``enable_optional_cleanup(False)``.\n" + "The units are arbitrary and can only be compared to themselves (similarly to ``time.clock()``);\n" + "for example, to see how it scales with your heap. You can attempt to convert them into seconds\n" + "by dividing by the value of CLOCKS_PER_SEC." + "If cleanup has been disabled, returns None." + "\n" + "This is an implementation specific, provisional API. It may be changed or removed\n" + "in the future.\n" + ".. versionadded:: 2.0" + ); +static PyObject* +mod_get_clocks_used_doing_optional_cleanup(PyObject* UNUSED(module)) +{ + std::clock_t& clocks = ThreadState::clocks_used_doing_gc(); + + if (clocks == std::clock_t(-1)) { + Py_RETURN_NONE; + } + // This might not actually work on some implementations; clock_t + // is an opaque type. + return PyLong_FromSsize_t(clocks); +} + +PyDoc_STRVAR(mod_enable_optional_cleanup_doc, + "mod_enable_optional_cleanup(bool) -> None\n" + "\n" + "Enable or disable optional cleanup operations.\n" + "See ``get_clocks_used_doing_optional_cleanup()`` for details.\n" + ); +static PyObject* +mod_enable_optional_cleanup(PyObject* UNUSED(module), PyObject* flag) +{ + int is_true = PyObject_IsTrue(flag); + if (is_true == -1) { + return nullptr; + } + + std::clock_t& clocks = ThreadState::clocks_used_doing_gc(); + if (is_true) { + // If we already have a value, we don't want to lose it. + if (clocks == std::clock_t(-1)) { + clocks = 0; + } + } + else { + clocks = std::clock_t(-1); + } + Py_RETURN_NONE; +} + + + + +#if !GREENLET_PY313 +PyDoc_STRVAR(mod_get_tstate_trash_delete_nesting_doc, + "get_tstate_trash_delete_nesting() -> Integer\n" + "\n" + "Return the 'trash can' nesting level. Testing only.\n"); +static PyObject* +mod_get_tstate_trash_delete_nesting(PyObject* UNUSED(module)) +{ + PyThreadState* tstate = PyThreadState_GET(); + +#if GREENLET_PY312 + return PyLong_FromLong(tstate->trash.delete_nesting); +#else + return PyLong_FromLong(tstate->trash_delete_nesting); +#endif +} +#endif + + + + +static PyMethodDef GreenMethods[] = { + { + .ml_name="getcurrent", + .ml_meth=(PyCFunction)mod_getcurrent, + .ml_flags=METH_NOARGS, + .ml_doc=mod_getcurrent_doc + }, + { + .ml_name="settrace", + .ml_meth=(PyCFunction)mod_settrace, + .ml_flags=METH_VARARGS, + .ml_doc=mod_settrace_doc + }, + { + .ml_name="gettrace", + .ml_meth=(PyCFunction)mod_gettrace, + .ml_flags=METH_NOARGS, + .ml_doc=mod_gettrace_doc + }, + { + .ml_name="set_thread_local", + .ml_meth=(PyCFunction)mod_set_thread_local, + .ml_flags=METH_VARARGS, + .ml_doc=mod_set_thread_local_doc + }, + { + .ml_name="get_pending_cleanup_count", + .ml_meth=(PyCFunction)mod_get_pending_cleanup_count, + .ml_flags=METH_NOARGS, + .ml_doc=mod_get_pending_cleanup_count_doc + }, + { + .ml_name="get_total_main_greenlets", + .ml_meth=(PyCFunction)mod_get_total_main_greenlets, + .ml_flags=METH_NOARGS, + .ml_doc=mod_get_total_main_greenlets_doc + }, + { + .ml_name="get_clocks_used_doing_optional_cleanup", + .ml_meth=(PyCFunction)mod_get_clocks_used_doing_optional_cleanup, + .ml_flags=METH_NOARGS, + .ml_doc=mod_get_clocks_used_doing_optional_cleanup_doc + }, + { + .ml_name="enable_optional_cleanup", + .ml_meth=(PyCFunction)mod_enable_optional_cleanup, + .ml_flags=METH_O, + .ml_doc=mod_enable_optional_cleanup_doc + }, +#if !GREENLET_PY313 + { + .ml_name="get_tstate_trash_delete_nesting", + .ml_meth=(PyCFunction)mod_get_tstate_trash_delete_nesting, + .ml_flags=METH_NOARGS, + .ml_doc=mod_get_tstate_trash_delete_nesting_doc + }, +#endif + {.ml_name=NULL, .ml_meth=NULL} /* Sentinel */ +}; + +static const char* const copy_on_greentype[] = { + "getcurrent", + "error", + "GreenletExit", + "settrace", + "gettrace", + NULL +}; + +static struct PyModuleDef greenlet_module_def = { + .m_base=PyModuleDef_HEAD_INIT, + .m_name="greenlet._greenlet", + .m_doc=NULL, + .m_size=-1, + .m_methods=GreenMethods, +}; + + +#endif + +#ifdef __clang__ +# pragma clang diagnostic pop +#elif defined(__GNUC__) +# pragma GCC diagnostic pop +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/TBrokenGreenlet.cpp b/venv/lib/python3.11/site-packages/greenlet/TBrokenGreenlet.cpp new file mode 100644 index 0000000..7e9ab5b --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/TBrokenGreenlet.cpp @@ -0,0 +1,45 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/** + * Implementation of greenlet::UserGreenlet. + * + * Format with: + * clang-format -i --style=file src/greenlet/greenlet.c + * + * + * Fix missing braces with: + * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" +*/ + +#include "TGreenlet.hpp" + +namespace greenlet { + +void* BrokenGreenlet::operator new(size_t UNUSED(count)) +{ + return allocator.allocate(1); +} + + +void BrokenGreenlet::operator delete(void* ptr) +{ + return allocator.deallocate(static_cast(ptr), + 1); +} + +greenlet::PythonAllocator greenlet::BrokenGreenlet::allocator; + +bool +BrokenGreenlet::force_slp_switch_error() const noexcept +{ + return this->_force_slp_switch_error; +} + +UserGreenlet::switchstack_result_t BrokenGreenlet::g_switchstack(void) +{ + if (this->_force_switch_error) { + return switchstack_result_t(-1); + } + return UserGreenlet::g_switchstack(); +} + +}; //namespace greenlet diff --git a/venv/lib/python3.11/site-packages/greenlet/TExceptionState.cpp b/venv/lib/python3.11/site-packages/greenlet/TExceptionState.cpp new file mode 100644 index 0000000..08a94ae --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/TExceptionState.cpp @@ -0,0 +1,62 @@ +#ifndef GREENLET_EXCEPTION_STATE_CPP +#define GREENLET_EXCEPTION_STATE_CPP + +#include +#include "TGreenlet.hpp" + +namespace greenlet { + + +ExceptionState::ExceptionState() +{ + this->clear(); +} + +void ExceptionState::operator<<(const PyThreadState *const tstate) noexcept +{ + this->exc_info = tstate->exc_info; + this->exc_state = tstate->exc_state; +} + +void ExceptionState::operator>>(PyThreadState *const tstate) noexcept +{ + tstate->exc_state = this->exc_state; + tstate->exc_info = + this->exc_info ? this->exc_info : &tstate->exc_state; + this->clear(); +} + +void ExceptionState::clear() noexcept +{ + this->exc_info = nullptr; + this->exc_state.exc_value = nullptr; +#if !GREENLET_PY311 + this->exc_state.exc_type = nullptr; + this->exc_state.exc_traceback = nullptr; +#endif + this->exc_state.previous_item = nullptr; +} + +int ExceptionState::tp_traverse(visitproc visit, void* arg) noexcept +{ + Py_VISIT(this->exc_state.exc_value); +#if !GREENLET_PY311 + Py_VISIT(this->exc_state.exc_type); + Py_VISIT(this->exc_state.exc_traceback); +#endif + return 0; +} + +void ExceptionState::tp_clear() noexcept +{ + Py_CLEAR(this->exc_state.exc_value); +#if !GREENLET_PY311 + Py_CLEAR(this->exc_state.exc_type); + Py_CLEAR(this->exc_state.exc_traceback); +#endif +} + + +}; // namespace greenlet + +#endif // GREENLET_EXCEPTION_STATE_CPP diff --git a/venv/lib/python3.11/site-packages/greenlet/TGreenlet.cpp b/venv/lib/python3.11/site-packages/greenlet/TGreenlet.cpp new file mode 100644 index 0000000..4698a17 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/TGreenlet.cpp @@ -0,0 +1,718 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/** + * Implementation of greenlet::Greenlet. + * + * Format with: + * clang-format -i --style=file src/greenlet/greenlet.c + * + * + * Fix missing braces with: + * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" +*/ +#ifndef TGREENLET_CPP +#define TGREENLET_CPP +#include "greenlet_internal.hpp" +#include "TGreenlet.hpp" + + +#include "TGreenletGlobals.cpp" +#include "TThreadStateDestroy.cpp" + +namespace greenlet { + +Greenlet::Greenlet(PyGreenlet* p) + : Greenlet(p, StackState()) +{ +} + +Greenlet::Greenlet(PyGreenlet* p, const StackState& initial_stack) + : _self(p), stack_state(initial_stack) +{ + assert(p->pimpl == nullptr); + p->pimpl = this; +} + +Greenlet::~Greenlet() +{ + // XXX: Can't do this. tp_clear is a virtual function, and by the + // time we're here, we've sliced off our child classes. + //this->tp_clear(); + this->_self->pimpl = nullptr; +} + +bool +Greenlet::force_slp_switch_error() const noexcept +{ + return false; +} + +void +Greenlet::release_args() +{ + this->switch_args.CLEAR(); +} + +/** + * CAUTION: This will allocate memory and may trigger garbage + * collection and arbitrary Python code. + */ +OwnedObject +Greenlet::throw_GreenletExit_during_dealloc(const ThreadState& UNUSED(current_thread_state)) +{ + // If we're killed because we lost all references in the + // middle of a switch, that's ok. Don't reset the args/kwargs, + // we still want to pass them to the parent. + PyErr_SetString(mod_globs->PyExc_GreenletExit, + "Killing the greenlet because all references have vanished."); + // To get here it had to have run before + return this->g_switch(); +} + +inline void +Greenlet::slp_restore_state() noexcept +{ +#ifdef SLP_BEFORE_RESTORE_STATE + SLP_BEFORE_RESTORE_STATE(); +#endif + this->stack_state.copy_heap_to_stack( + this->thread_state()->borrow_current()->stack_state); +} + + +inline int +Greenlet::slp_save_state(char *const stackref) noexcept +{ + // XXX: This used to happen in the middle, before saving, but + // after finding the next owner. Does that matter? This is + // only defined for Sparc/GCC where it flushes register + // windows to the stack (I think) +#ifdef SLP_BEFORE_SAVE_STATE + SLP_BEFORE_SAVE_STATE(); +#endif + return this->stack_state.copy_stack_to_heap(stackref, + this->thread_state()->borrow_current()->stack_state); +} + +/** + * CAUTION: This will allocate memory and may trigger garbage + * collection and arbitrary Python code. + */ +OwnedObject +Greenlet::on_switchstack_or_initialstub_failure( + Greenlet* target, + const Greenlet::switchstack_result_t& err, + const bool target_was_me, + const bool was_initial_stub) +{ + // If we get here, either g_initialstub() + // failed, or g_switchstack() failed. Either one of those + // cases SHOULD leave us in the original greenlet with a valid stack. + if (!PyErr_Occurred()) { + PyErr_SetString( + PyExc_SystemError, + was_initial_stub + ? "Failed to switch stacks into a greenlet for the first time." + : "Failed to switch stacks into a running greenlet."); + } + this->release_args(); + + if (target && !target_was_me) { + target->murder_in_place(); + } + + assert(!err.the_new_current_greenlet); + assert(!err.origin_greenlet); + return OwnedObject(); + +} + +OwnedGreenlet +Greenlet::g_switchstack_success() noexcept +{ + PyThreadState* tstate = PyThreadState_GET(); + // restore the saved state + this->python_state >> tstate; + this->exception_state >> tstate; + + // The thread state hasn't been changed yet. + ThreadState* thread_state = this->thread_state(); + OwnedGreenlet result(thread_state->get_current()); + thread_state->set_current(this->self()); + //assert(thread_state->borrow_current().borrow() == this->_self); + return result; +} + +Greenlet::switchstack_result_t +Greenlet::g_switchstack(void) +{ + // if any of these assertions fail, it's likely because we + // switched away and tried to switch back to us. Early stages of + // switching are not reentrant because we re-use ``this->args()``. + // Switching away would happen if we trigger a garbage collection + // (by just using some Python APIs that happen to allocate Python + // objects) and some garbage had weakref callbacks or __del__ that + // switches (people don't write code like that by hand, but with + // gevent it's possible without realizing it) + assert(this->args() || PyErr_Occurred()); + { /* save state */ + if (this->thread_state()->is_current(this->self())) { + // Hmm, nothing to do. + // TODO: Does this bypass trace events that are + // important? + return switchstack_result_t(0, + this, this->thread_state()->borrow_current()); + } + BorrowedGreenlet current = this->thread_state()->borrow_current(); + PyThreadState* tstate = PyThreadState_GET(); + + current->python_state << tstate; + current->exception_state << tstate; + this->python_state.will_switch_from(tstate); + switching_thread_state = this; + current->expose_frames(); + } + assert(this->args() || PyErr_Occurred()); + // If this is the first switch into a greenlet, this will + // return twice, once with 1 in the new greenlet, once with 0 + // in the origin. + int err; + if (this->force_slp_switch_error()) { + err = -1; + } + else { + err = slp_switch(); + } + + if (err < 0) { /* error */ + // Tested by + // test_greenlet.TestBrokenGreenlets.test_failed_to_slp_switch_into_running + // + // It's not clear if it's worth trying to clean up and + // continue here. Failing to switch stacks is a big deal which + // may not be recoverable (who knows what state the stack is in). + // Also, we've stolen references in preparation for calling + // ``g_switchstack_success()`` and we don't have a clean + // mechanism for backing that all out. + Py_FatalError("greenlet: Failed low-level slp_switch(). The stack is probably corrupt."); + } + + // No stack-based variables are valid anymore. + + // But the global is volatile so we can reload it without the + // compiler caching it from earlier. + Greenlet* greenlet_that_switched_in = switching_thread_state; // aka this + switching_thread_state = nullptr; + // except that no stack variables are valid, we would: + // assert(this == greenlet_that_switched_in); + + // switchstack success is where we restore the exception state, + // etc. It returns the origin greenlet because its convenient. + + OwnedGreenlet origin = greenlet_that_switched_in->g_switchstack_success(); + assert(greenlet_that_switched_in->args() || PyErr_Occurred()); + return switchstack_result_t(err, greenlet_that_switched_in, origin); +} + + +inline void +Greenlet::check_switch_allowed() const +{ + // TODO: Make this take a parameter of the current greenlet, + // or current main greenlet, to make the check for + // cross-thread switching cheaper. Surely somewhere up the + // call stack we've already accessed the thread local variable. + + // We expect to always have a main greenlet now; accessing the thread state + // created it. However, if we get here and cleanup has already + // begun because we're a greenlet that was running in a + // (now dead) thread, these invariants will not hold true. In + // fact, accessing `this->thread_state` may not even be possible. + + // If the thread this greenlet was running in is dead, + // we'll still have a reference to a main greenlet, but the + // thread state pointer we have is bogus. + // TODO: Give the objects an API to determine if they belong + // to a dead thread. + + const BorrowedMainGreenlet main_greenlet = this->find_main_greenlet_in_lineage(); + + if (!main_greenlet) { + throw PyErrOccurred(mod_globs->PyExc_GreenletError, + "cannot switch to a garbage collected greenlet"); + } + + if (!main_greenlet->thread_state()) { + throw PyErrOccurred(mod_globs->PyExc_GreenletError, + "cannot switch to a different thread (which happens to have exited)"); + } + + // The main greenlet we found was from the .parent lineage. + // That may or may not have any relationship to the main + // greenlet of the running thread. We can't actually access + // our this->thread_state members to try to check that, + // because it could be in the process of getting destroyed, + // but setting the main_greenlet->thread_state member to NULL + // may not be visible yet. So we need to check against the + // current thread state (once the cheaper checks are out of + // the way) + const BorrowedMainGreenlet current_main_greenlet = GET_THREAD_STATE().state().borrow_main_greenlet(); + if ( + // lineage main greenlet is not this thread's greenlet + current_main_greenlet != main_greenlet + || ( + // atteched to some thread + this->main_greenlet() + // XXX: Same condition as above. Was this supposed to be + // this->main_greenlet()? + && current_main_greenlet != main_greenlet) + // switching into a known dead thread (XXX: which, if we get here, + // is bad, because we just accessed the thread state, which is + // gone!) + || (!current_main_greenlet->thread_state())) { + // CAUTION: This may trigger memory allocations, gc, and + // arbitrary Python code. + throw PyErrOccurred( + mod_globs->PyExc_GreenletError, + "Cannot switch to a different thread\n\tCurrent: %R\n\tExpected: %R", + current_main_greenlet, main_greenlet); + } +} + +const OwnedObject +Greenlet::context() const +{ + using greenlet::PythonStateContext; + OwnedObject result; + + if (this->is_currently_running_in_some_thread()) { + /* Currently running greenlet: context is stored in the thread state, + not the greenlet object. */ + if (GET_THREAD_STATE().state().is_current(this->self())) { + result = PythonStateContext::context(PyThreadState_GET()); + } + else { + throw ValueError( + "cannot get context of a " + "greenlet that is running in a different thread"); + } + } + else { + /* Greenlet is not running: just return context. */ + result = this->python_state.context(); + } + if (!result) { + result = OwnedObject::None(); + } + return result; +} + + +void +Greenlet::context(BorrowedObject given) +{ + using greenlet::PythonStateContext; + if (!given) { + throw AttributeError("can't delete context attribute"); + } + if (given.is_None()) { + /* "Empty context" is stored as NULL, not None. */ + given = nullptr; + } + + //checks type, incrs refcnt + greenlet::refs::OwnedContext context(given); + PyThreadState* tstate = PyThreadState_GET(); + + if (this->is_currently_running_in_some_thread()) { + if (!GET_THREAD_STATE().state().is_current(this->self())) { + throw ValueError("cannot set context of a greenlet" + " that is running in a different thread"); + } + + /* Currently running greenlet: context is stored in the thread state, + not the greenlet object. */ + OwnedObject octx = OwnedObject::consuming(PythonStateContext::context(tstate)); + PythonStateContext::context(tstate, context.relinquish_ownership()); + } + else { + /* Greenlet is not running: just set context. Note that the + greenlet may be dead.*/ + this->python_state.context() = context; + } +} + +/** + * CAUTION: May invoke arbitrary Python code. + * + * Figure out what the result of ``greenlet.switch(arg, kwargs)`` + * should be and transfers ownership of it to the left-hand-side. + * + * If switch() was just passed an arg tuple, then we'll just return that. + * If only keyword arguments were passed, then we'll pass the keyword + * argument dict. Otherwise, we'll create a tuple of (args, kwargs) and + * return both. + * + * CAUTION: This may allocate a new tuple object, which may + * cause the Python garbage collector to run, which in turn may + * run arbitrary Python code that switches. + */ +OwnedObject& operator<<=(OwnedObject& lhs, greenlet::SwitchingArgs& rhs) noexcept +{ + // Because this may invoke arbitrary Python code, which could + // result in switching back to us, we need to get the + // arguments locally on the stack. + assert(rhs); + OwnedObject args = rhs.args(); + OwnedObject kwargs = rhs.kwargs(); + rhs.CLEAR(); + // We shouldn't be called twice for the same switch. + assert(args || kwargs); + assert(!rhs); + + if (!kwargs) { + lhs = args; + } + else if (!PyDict_Size(kwargs.borrow())) { + lhs = args; + } + else if (!PySequence_Length(args.borrow())) { + lhs = kwargs; + } + else { + // PyTuple_Pack allocates memory, may GC, may run arbitrary + // Python code. + lhs = OwnedObject::consuming(PyTuple_Pack(2, args.borrow(), kwargs.borrow())); + } + return lhs; +} + +static OwnedObject +g_handle_exit(const OwnedObject& greenlet_result) +{ + if (!greenlet_result && mod_globs->PyExc_GreenletExit.PyExceptionMatches()) { + /* catch and ignore GreenletExit */ + PyErrFetchParam val; + PyErr_Fetch(PyErrFetchParam(), val, PyErrFetchParam()); + if (!val) { + return OwnedObject::None(); + } + return OwnedObject(val); + } + + if (greenlet_result) { + // package the result into a 1-tuple + // PyTuple_Pack increments the reference of its arguments, + // so we always need to decref the greenlet result; + // the owner will do that. + return OwnedObject::consuming(PyTuple_Pack(1, greenlet_result.borrow())); + } + + return OwnedObject(); +} + + + +/** + * May run arbitrary Python code. + */ +OwnedObject +Greenlet::g_switch_finish(const switchstack_result_t& err) +{ + assert(err.the_new_current_greenlet == this); + + ThreadState& state = *this->thread_state(); + // Because calling the trace function could do arbitrary things, + // including switching away from this greenlet and then maybe + // switching back, we need to capture the arguments now so that + // they don't change. + OwnedObject result; + if (this->args()) { + result <<= this->args(); + } + else { + assert(PyErr_Occurred()); + } + assert(!this->args()); + try { + // Our only caller handles the bad error case + assert(err.status >= 0); + assert(state.borrow_current() == this->self()); + if (OwnedObject tracefunc = state.get_tracefunc()) { + assert(result || PyErr_Occurred()); + g_calltrace(tracefunc, + result ? mod_globs->event_switch : mod_globs->event_throw, + err.origin_greenlet, + this->self()); + } + // The above could have invoked arbitrary Python code, but + // it couldn't switch back to this object and *also* + // throw an exception, so the args won't have changed. + + if (PyErr_Occurred()) { + // We get here if we fell of the end of the run() function + // raising an exception. The switch itself was + // successful, but the function raised. + // valgrind reports that memory allocated here can still + // be reached after a test run. + throw PyErrOccurred::from_current(); + } + return result; + } + catch (const PyErrOccurred&) { + /* Turn switch errors into switch throws */ + /* Turn trace errors into switch throws */ + this->release_args(); + throw; + } +} + +void +Greenlet::g_calltrace(const OwnedObject& tracefunc, + const greenlet::refs::ImmortalEventName& event, + const BorrowedGreenlet& origin, + const BorrowedGreenlet& target) +{ + PyErrPieces saved_exc; + try { + TracingGuard tracing_guard; + // TODO: We have saved the active exception (if any) that's + // about to be raised. In the 'throw' case, we could provide + // the exception to the tracefunction, which seems very helpful. + tracing_guard.CallTraceFunction(tracefunc, event, origin, target); + } + catch (const PyErrOccurred&) { + // In case of exceptions trace function is removed, + // and any existing exception is replaced with the tracing + // exception. + GET_THREAD_STATE().state().set_tracefunc(Py_None); + throw; + } + + saved_exc.PyErrRestore(); + assert( + (event == mod_globs->event_throw && PyErr_Occurred()) + || (event == mod_globs->event_switch && !PyErr_Occurred()) + ); +} + +void +Greenlet::murder_in_place() +{ + if (this->active()) { + assert(!this->is_currently_running_in_some_thread()); + this->deactivate_and_free(); + } +} + +inline void +Greenlet::deactivate_and_free() +{ + if (!this->active()) { + return; + } + // Throw away any saved stack. + this->stack_state = StackState(); + assert(!this->stack_state.active()); + // Throw away any Python references. + // We're holding a borrowed reference to the last + // frame we executed. Since we borrowed it, the + // normal traversal, clear, and dealloc functions + // ignore it, meaning it leaks. (The thread state + // object can't find it to clear it when that's + // deallocated either, because by definition if we + // got an object on this list, it wasn't + // running and the thread state doesn't have + // this frame.) + // So here, we *do* clear it. + this->python_state.tp_clear(true); +} + +bool +Greenlet::belongs_to_thread(const ThreadState* thread_state) const +{ + if (!this->thread_state() // not running anywhere, or thread + // exited + || !thread_state) { // same, or there is no thread state. + return false; + } + return true; +} + + +void +Greenlet::deallocing_greenlet_in_thread(const ThreadState* current_thread_state) +{ + /* Cannot raise an exception to kill the greenlet if + it is not running in the same thread! */ + if (this->belongs_to_thread(current_thread_state)) { + assert(current_thread_state); + // To get here it had to have run before + /* Send the greenlet a GreenletExit exception. */ + + // We don't care about the return value, only whether an + // exception happened. + this->throw_GreenletExit_during_dealloc(*current_thread_state); + return; + } + + // Not the same thread! Temporarily save the greenlet + // into its thread's deleteme list, *if* it exists. + // If that thread has already exited, and processed its pending + // cleanup, we'll never be able to clean everything up: we won't + // be able to raise an exception. + // That's mostly OK! Since we can't add it to a list, our refcount + // won't increase, and we'll go ahead with the DECREFs later. + ThreadState *const thread_state = this->thread_state(); + if (thread_state) { + thread_state->delete_when_thread_running(this->self()); + } + else { + // The thread is dead, we can't raise an exception. + // We need to make it look non-active, though, so that dealloc + // finishes killing it. + this->deactivate_and_free(); + } + return; +} + + +int +Greenlet::tp_traverse(visitproc visit, void* arg) +{ + + int result; + if ((result = this->exception_state.tp_traverse(visit, arg)) != 0) { + return result; + } + //XXX: This is ugly. But so is handling everything having to do + //with the top frame. + bool visit_top_frame = this->was_running_in_dead_thread(); + // When true, the thread is dead. Our implicit weak reference to the + // frame is now all that's left; we consider ourselves to + // strongly own it now. + if ((result = this->python_state.tp_traverse(visit, arg, visit_top_frame)) != 0) { + return result; + } + return 0; +} + +int +Greenlet::tp_clear() +{ + bool own_top_frame = this->was_running_in_dead_thread(); + this->exception_state.tp_clear(); + this->python_state.tp_clear(own_top_frame); + return 0; +} + +bool Greenlet::is_currently_running_in_some_thread() const +{ + return this->stack_state.active() && !this->python_state.top_frame(); +} + +#if GREENLET_PY312 +void GREENLET_NOINLINE(Greenlet::expose_frames)() +{ + if (!this->python_state.top_frame()) { + return; + } + + _PyInterpreterFrame* last_complete_iframe = nullptr; + _PyInterpreterFrame* iframe = this->python_state.top_frame()->f_frame; + while (iframe) { + // We must make a copy before looking at the iframe contents, + // since iframe might point to a portion of the greenlet's C stack + // that was spilled when switching greenlets. + _PyInterpreterFrame iframe_copy; + this->stack_state.copy_from_stack(&iframe_copy, iframe, sizeof(*iframe)); + if (!_PyFrame_IsIncomplete(&iframe_copy)) { + // If the iframe were OWNED_BY_CSTACK then it would always be + // incomplete. Since it's not incomplete, it's not on the C stack + // and we can access it through the original `iframe` pointer + // directly. This is important since GetFrameObject might + // lazily _create_ the frame object and we don't want the + // interpreter to lose track of it. + assert(iframe_copy.owner != FRAME_OWNED_BY_CSTACK); + + // We really want to just write: + // PyFrameObject* frame = _PyFrame_GetFrameObject(iframe); + // but _PyFrame_GetFrameObject calls _PyFrame_MakeAndSetFrameObject + // which is not a visible symbol in libpython. The easiest + // way to get a public function to call it is using + // PyFrame_GetBack, which is defined as follows: + // assert(frame != NULL); + // assert(!_PyFrame_IsIncomplete(frame->f_frame)); + // PyFrameObject *back = frame->f_back; + // if (back == NULL) { + // _PyInterpreterFrame *prev = frame->f_frame->previous; + // prev = _PyFrame_GetFirstComplete(prev); + // if (prev) { + // back = _PyFrame_GetFrameObject(prev); + // } + // } + // return (PyFrameObject*)Py_XNewRef(back); + if (!iframe->frame_obj) { + PyFrameObject dummy_frame; + _PyInterpreterFrame dummy_iframe; + dummy_frame.f_back = nullptr; + dummy_frame.f_frame = &dummy_iframe; + // force the iframe to be considered complete without + // needing to check its code object: + dummy_iframe.owner = FRAME_OWNED_BY_GENERATOR; + dummy_iframe.previous = iframe; + assert(!_PyFrame_IsIncomplete(&dummy_iframe)); + // Drop the returned reference immediately; the iframe + // continues to hold a strong reference + Py_XDECREF(PyFrame_GetBack(&dummy_frame)); + assert(iframe->frame_obj); + } + + // This is a complete frame, so make the last one of those we saw + // point at it, bypassing any incomplete frames (which may have + // been on the C stack) in between the two. We're overwriting + // last_complete_iframe->previous and need that to be reversible, + // so we store the original previous ptr in the frame object + // (which we must have created on a previous iteration through + // this loop). The frame object has a bunch of storage that is + // only used when its iframe is OWNED_BY_FRAME_OBJECT, which only + // occurs when the frame object outlives the frame's execution, + // which can't have happened yet because the frame is currently + // executing as far as the interpreter is concerned. So, we can + // reuse it for our own purposes. + assert(iframe->owner == FRAME_OWNED_BY_THREAD + || iframe->owner == FRAME_OWNED_BY_GENERATOR); + if (last_complete_iframe) { + assert(last_complete_iframe->frame_obj); + memcpy(&last_complete_iframe->frame_obj->_f_frame_data[0], + &last_complete_iframe->previous, sizeof(void *)); + last_complete_iframe->previous = iframe; + } + last_complete_iframe = iframe; + } + // Frames that are OWNED_BY_FRAME_OBJECT are linked via the + // frame's f_back while all others are linked via the iframe's + // previous ptr. Since all the frames we traverse are running + // as far as the interpreter is concerned, we don't have to + // worry about the OWNED_BY_FRAME_OBJECT case. + iframe = iframe_copy.previous; + } + + // Give the outermost complete iframe a null previous pointer to + // account for any potential incomplete/C-stack iframes between it + // and the actual top-of-stack + if (last_complete_iframe) { + assert(last_complete_iframe->frame_obj); + memcpy(&last_complete_iframe->frame_obj->_f_frame_data[0], + &last_complete_iframe->previous, sizeof(void *)); + last_complete_iframe->previous = nullptr; + } +} +#else +void Greenlet::expose_frames() +{ + +} +#endif + +}; // namespace greenlet +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/TGreenlet.hpp b/venv/lib/python3.11/site-packages/greenlet/TGreenlet.hpp new file mode 100644 index 0000000..9e917e7 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/TGreenlet.hpp @@ -0,0 +1,818 @@ +#ifndef GREENLET_GREENLET_HPP +#define GREENLET_GREENLET_HPP +/* + * Declarations of the core data structures. +*/ + +#define PY_SSIZE_T_CLEAN +#include + +#include "greenlet_compiler_compat.hpp" +#include "greenlet_refs.hpp" +#include "greenlet_cpython_compat.hpp" +#include "greenlet_allocator.hpp" + +using greenlet::refs::OwnedObject; +using greenlet::refs::OwnedGreenlet; +using greenlet::refs::OwnedMainGreenlet; +using greenlet::refs::BorrowedGreenlet; + +#if PY_VERSION_HEX < 0x30B00A6 +# define _PyCFrame CFrame +# define _PyInterpreterFrame _interpreter_frame +#endif + +#if GREENLET_PY312 +# define Py_BUILD_CORE +# include "internal/pycore_frame.h" +#endif + +#if GREENLET_PY314 +# include "internal/pycore_interpframe_structs.h" +# include "internal/pycore_interpframe.h" +#endif + +// XXX: TODO: Work to remove all virtual functions +// for speed of calling and size of objects (no vtable). +// One pattern is the Curiously Recurring Template +namespace greenlet +{ + class ExceptionState + { + private: + G_NO_COPIES_OF_CLS(ExceptionState); + + // Even though these are borrowed objects, we actually own + // them, when they're not null. + // XXX: Express that in the API. + private: + _PyErr_StackItem* exc_info; + _PyErr_StackItem exc_state; + public: + ExceptionState(); + void operator<<(const PyThreadState *const tstate) noexcept; + void operator>>(PyThreadState* tstate) noexcept; + void clear() noexcept; + + int tp_traverse(visitproc visit, void* arg) noexcept; + void tp_clear() noexcept; + }; + + template + void operator<<(const PyThreadState *const tstate, T& exc); + + class PythonStateContext + { + protected: + greenlet::refs::OwnedContext _context; + public: + inline const greenlet::refs::OwnedContext& context() const + { + return this->_context; + } + inline greenlet::refs::OwnedContext& context() + { + return this->_context; + } + + inline void tp_clear() + { + this->_context.CLEAR(); + } + + template + inline static PyObject* context(T* tstate) + { + return tstate->context; + } + + template + inline static void context(T* tstate, PyObject* new_context) + { + tstate->context = new_context; + tstate->context_ver++; + } + }; + class SwitchingArgs; + class PythonState : public PythonStateContext + { + public: + typedef greenlet::refs::OwnedReference OwnedFrame; + private: + G_NO_COPIES_OF_CLS(PythonState); + // We own this if we're suspended (although currently we don't + // tp_traverse into it; that's a TODO). If we're running, it's + // empty. If we get deallocated and *still* have a frame, it + // won't be reachable from the place that normally decref's + // it, so we need to do it (hence owning it). + OwnedFrame _top_frame; +#if GREENLET_USE_CFRAME + _PyCFrame* cframe; + int use_tracing; +#endif +#if GREENLET_PY312 + int py_recursion_depth; + int c_recursion_depth; +#else + int recursion_depth; +#endif +#if GREENLET_PY313 + PyObject *delete_later; +#else + int trash_delete_nesting; +#endif +#if GREENLET_PY311 + _PyInterpreterFrame* current_frame; + _PyStackChunk* datastack_chunk; + PyObject** datastack_top; + PyObject** datastack_limit; +#endif + // The PyInterpreterFrame list on 3.12+ contains some entries that are + // on the C stack, which can't be directly accessed while a greenlet is + // suspended. In order to keep greenlet gr_frame introspection working, + // we adjust stack switching to rewrite the interpreter frame list + // to skip these C-stack frames; we call this "exposing" the greenlet's + // frames because it makes them valid to work with in Python. Then when + // the greenlet is resumed we need to remember to reverse the operation + // we did. The C-stack frames are "entry frames" which are a low-level + // interpreter detail; they're not needed for introspection, but do + // need to be present for the eval loop to work. + void unexpose_frames(); + + public: + + PythonState(); + // You can use this for testing whether we have a frame + // or not. It returns const so they can't modify it. + const OwnedFrame& top_frame() const noexcept; + + inline void operator<<(const PyThreadState *const tstate) noexcept; + inline void operator>>(PyThreadState* tstate) noexcept; + void clear() noexcept; + + int tp_traverse(visitproc visit, void* arg, bool visit_top_frame) noexcept; + void tp_clear(bool own_top_frame) noexcept; + void set_initial_state(const PyThreadState* const tstate) noexcept; +#if GREENLET_USE_CFRAME + void set_new_cframe(_PyCFrame& frame) noexcept; +#endif + + void may_switch_away() noexcept; + inline void will_switch_from(PyThreadState *const origin_tstate) noexcept; + void did_finish(PyThreadState* tstate) noexcept; + }; + + class StackState + { + // By having only plain C (POD) members, no virtual functions + // or bases, we get a trivial assignment operator generated + // for us. However, that's not safe since we do manage memory. + // So we declare an assignment operator that only works if we + // don't have any memory allocated. (We don't use + // std::shared_ptr for reference counting just to keep this + // object small) + private: + char* _stack_start; + char* stack_stop; + char* stack_copy; + intptr_t _stack_saved; + StackState* stack_prev; + inline int copy_stack_to_heap_up_to(const char* const stop) noexcept; + inline void free_stack_copy() noexcept; + + public: + /** + * Creates a started, but inactive, state, using *current* + * as the previous. + */ + StackState(void* mark, StackState& current); + /** + * Creates an inactive, unstarted, state. + */ + StackState(); + ~StackState(); + StackState(const StackState& other); + StackState& operator=(const StackState& other); + inline void copy_heap_to_stack(const StackState& current) noexcept; + inline int copy_stack_to_heap(char* const stackref, const StackState& current) noexcept; + inline bool started() const noexcept; + inline bool main() const noexcept; + inline bool active() const noexcept; + inline void set_active() noexcept; + inline void set_inactive() noexcept; + inline intptr_t stack_saved() const noexcept; + inline char* stack_start() const noexcept; + static inline StackState make_main() noexcept; +#ifdef GREENLET_USE_STDIO + friend std::ostream& operator<<(std::ostream& os, const StackState& s); +#endif + + // Fill in [dest, dest + n) with the values that would be at + // [src, src + n) while this greenlet is running. This is like memcpy + // except that if the greenlet is suspended it accounts for the portion + // of the greenlet's stack that was spilled to the heap. `src` may + // be on this greenlet's stack, or on the heap, but not on a different + // greenlet's stack. + void copy_from_stack(void* dest, const void* src, size_t n) const; + }; +#ifdef GREENLET_USE_STDIO + std::ostream& operator<<(std::ostream& os, const StackState& s); +#endif + + class SwitchingArgs + { + private: + G_NO_ASSIGNMENT_OF_CLS(SwitchingArgs); + // If args and kwargs are both false (NULL), this is a *throw*, not a + // switch. PyErr_... must have been called already. + OwnedObject _args; + OwnedObject _kwargs; + public: + + SwitchingArgs() + {} + + SwitchingArgs(const OwnedObject& args, const OwnedObject& kwargs) + : _args(args), + _kwargs(kwargs) + {} + + SwitchingArgs(const SwitchingArgs& other) + : _args(other._args), + _kwargs(other._kwargs) + {} + + const OwnedObject& args() + { + return this->_args; + } + + const OwnedObject& kwargs() + { + return this->_kwargs; + } + + /** + * Moves ownership from the argument to this object. + */ + SwitchingArgs& operator<<=(SwitchingArgs& other) + { + if (this != &other) { + this->_args = other._args; + this->_kwargs = other._kwargs; + other.CLEAR(); + } + return *this; + } + + /** + * Acquires ownership of the argument (consumes the reference). + */ + SwitchingArgs& operator<<=(PyObject* args) + { + this->_args = OwnedObject::consuming(args); + this->_kwargs.CLEAR(); + return *this; + } + + /** + * Acquires ownership of the argument. + * + * Sets the args to be the given value; clears the kwargs. + */ + SwitchingArgs& operator<<=(OwnedObject& args) + { + assert(&args != &this->_args); + this->_args = args; + this->_kwargs.CLEAR(); + args.CLEAR(); + + return *this; + } + + explicit operator bool() const noexcept + { + return this->_args || this->_kwargs; + } + + inline void CLEAR() + { + this->_args.CLEAR(); + this->_kwargs.CLEAR(); + } + + const std::string as_str() const noexcept + { + return PyUnicode_AsUTF8( + OwnedObject::consuming( + PyUnicode_FromFormat( + "SwitchingArgs(args=%R, kwargs=%R)", + this->_args.borrow(), + this->_kwargs.borrow() + ) + ).borrow() + ); + } + }; + + class ThreadState; + + class UserGreenlet; + class MainGreenlet; + + class Greenlet + { + private: + G_NO_COPIES_OF_CLS(Greenlet); + PyGreenlet* const _self; + private: + // XXX: Work to remove these. + friend class ThreadState; + friend class UserGreenlet; + friend class MainGreenlet; + protected: + ExceptionState exception_state; + SwitchingArgs switch_args; + StackState stack_state; + PythonState python_state; + Greenlet(PyGreenlet* p, const StackState& initial_state); + public: + // This constructor takes ownership of the PyGreenlet, by + // setting ``p->pimpl = this;``. + Greenlet(PyGreenlet* p); + virtual ~Greenlet(); + + const OwnedObject context() const; + + // You MUST call this _very_ early in the switching process to + // prepare anything that may need prepared. This might perform + // garbage collections or otherwise run arbitrary Python code. + // + // One specific use of it is for Python 3.11+, preventing + // running arbitrary code at unsafe times. See + // PythonState::may_switch_away(). + inline void may_switch_away() + { + this->python_state.may_switch_away(); + } + + inline void context(refs::BorrowedObject new_context); + + inline SwitchingArgs& args() + { + return this->switch_args; + } + + virtual const refs::BorrowedMainGreenlet main_greenlet() const = 0; + + inline intptr_t stack_saved() const noexcept + { + return this->stack_state.stack_saved(); + } + + // This is used by the macro SLP_SAVE_STATE to compute the + // difference in stack sizes. It might be nice to handle the + // computation ourself, but the type of the result + // varies by platform, so doing it in the macro is the + // simplest way. + inline const char* stack_start() const noexcept + { + return this->stack_state.stack_start(); + } + + virtual OwnedObject throw_GreenletExit_during_dealloc(const ThreadState& current_thread_state); + virtual OwnedObject g_switch() = 0; + /** + * Force the greenlet to appear dead. Used when it's not + * possible to throw an exception into a greenlet anymore. + * + * This losses access to the thread state and the main greenlet. + */ + virtual void murder_in_place(); + + /** + * Called when somebody notices we were running in a dead + * thread to allow cleaning up resources (because we can't + * raise GreenletExit into it anymore). + * This is very similar to ``murder_in_place()``, except that + * it DOES NOT lose the main greenlet or thread state. + */ + inline void deactivate_and_free(); + + + // Called when some thread wants to deallocate a greenlet + // object. + // The thread may or may not be the same thread the greenlet + // was running in. + // The thread state will be null if the thread the greenlet + // was running in was known to have exited. + void deallocing_greenlet_in_thread(const ThreadState* current_state); + + // Must be called on 3.12+ before exposing a suspended greenlet's + // frames to user code. This rewrites the linked list of interpreter + // frames to skip the ones that are being stored on the C stack (which + // can't be safely accessed while the greenlet is suspended because + // that stack space might be hosting a different greenlet), and + // sets PythonState::frames_were_exposed so we remember to restore + // the original list before resuming the greenlet. The C-stack frames + // are a low-level interpreter implementation detail; while they're + // important to the bytecode eval loop, they're superfluous for + // introspection purposes. + void expose_frames(); + + + // TODO: Figure out how to make these non-public. + inline void slp_restore_state() noexcept; + inline int slp_save_state(char *const stackref) noexcept; + + inline bool is_currently_running_in_some_thread() const; + virtual bool belongs_to_thread(const ThreadState* state) const; + + inline bool started() const + { + return this->stack_state.started(); + } + inline bool active() const + { + return this->stack_state.active(); + } + inline bool main() const + { + return this->stack_state.main(); + } + virtual refs::BorrowedMainGreenlet find_main_greenlet_in_lineage() const = 0; + + virtual const OwnedGreenlet parent() const = 0; + virtual void parent(const refs::BorrowedObject new_parent) = 0; + + inline const PythonState::OwnedFrame& top_frame() + { + return this->python_state.top_frame(); + } + + virtual const OwnedObject& run() const = 0; + virtual void run(const refs::BorrowedObject nrun) = 0; + + + virtual int tp_traverse(visitproc visit, void* arg); + virtual int tp_clear(); + + + // Return the thread state that the greenlet is running in, or + // null if the greenlet is not running or the thread is known + // to have exited. + virtual ThreadState* thread_state() const noexcept = 0; + + // Return true if the greenlet is known to have been running + // (active) in a thread that has now exited. + virtual bool was_running_in_dead_thread() const noexcept = 0; + + // Return a borrowed greenlet that is the Python object + // this object represents. + inline BorrowedGreenlet self() const noexcept + { + return BorrowedGreenlet(this->_self); + } + + // For testing. If this returns true, we should pretend that + // slp_switch() failed. + virtual bool force_slp_switch_error() const noexcept; + + protected: + inline void release_args(); + + // The functions that must not be inlined are declared virtual. + // We also mark them as protected, not private, so that the + // compiler is forced to call them through a function pointer. + // (A sufficiently smart compiler could directly call a private + // virtual function since it can never be overridden in a + // subclass). + + // Also TODO: Switch away from integer error codes and to enums, + // or throw exceptions when possible. + struct switchstack_result_t + { + int status; + Greenlet* the_new_current_greenlet; + OwnedGreenlet origin_greenlet; + + switchstack_result_t() + : status(0), + the_new_current_greenlet(nullptr) + {} + + switchstack_result_t(int err) + : status(err), + the_new_current_greenlet(nullptr) + {} + + switchstack_result_t(int err, Greenlet* state, OwnedGreenlet& origin) + : status(err), + the_new_current_greenlet(state), + origin_greenlet(origin) + { + } + + switchstack_result_t(int err, Greenlet* state, const BorrowedGreenlet& origin) + : status(err), + the_new_current_greenlet(state), + origin_greenlet(origin) + { + } + + switchstack_result_t(const switchstack_result_t& other) + : status(other.status), + the_new_current_greenlet(other.the_new_current_greenlet), + origin_greenlet(other.origin_greenlet) + {} + + switchstack_result_t& operator=(const switchstack_result_t& other) + { + this->status = other.status; + this->the_new_current_greenlet = other.the_new_current_greenlet; + this->origin_greenlet = other.origin_greenlet; + return *this; + } + }; + + OwnedObject on_switchstack_or_initialstub_failure( + Greenlet* target, + const switchstack_result_t& err, + const bool target_was_me=false, + const bool was_initial_stub=false); + + // Returns the previous greenlet we just switched away from. + virtual OwnedGreenlet g_switchstack_success() noexcept; + + + // Check the preconditions for switching to this greenlet; if they + // aren't met, throws PyErrOccurred. Most callers will want to + // catch this and clear the arguments + inline void check_switch_allowed() const; + class GreenletStartedWhileInPython : public std::runtime_error + { + public: + GreenletStartedWhileInPython() : std::runtime_error("") + {} + }; + + protected: + + + /** + Perform a stack switch into this greenlet. + + This temporarily sets the global variable + ``switching_thread_state`` to this greenlet; as soon as the + call to ``slp_switch`` completes, this is reset to NULL. + Consequently, this depends on the GIL. + + TODO: Adopt the stackman model and pass ``slp_switch`` a + callback function and context pointer; this eliminates the + need for global variables altogether. + + Because the stack switch happens in this function, this + function can't use its own stack (local) variables, set + before the switch, and then accessed after the switch. + + Further, you con't even access ``g_thread_state_global`` + before and after the switch from the global variable. + Because it is thread local some compilers cache it in a + register/on the stack, notably new versions of MSVC; this + breaks with strange crashes sometime later, because writing + to anything in ``g_thread_state_global`` after the switch + is actually writing to random memory. For this reason, we + call a non-inlined function to finish the operation. (XXX: + The ``/GT`` MSVC compiler argument probably fixes that.) + + It is very important that stack switch is 'atomic', i.e. no + calls into other Python code allowed (except very few that + are safe), because global variables are very fragile. (This + should no longer be the case with thread-local variables.) + + */ + // Made virtual to facilitate subclassing UserGreenlet for testing. + virtual switchstack_result_t g_switchstack(void); + +class TracingGuard +{ +private: + PyThreadState* tstate; +public: + TracingGuard() + : tstate(PyThreadState_GET()) + { + PyThreadState_EnterTracing(this->tstate); + } + + ~TracingGuard() + { + PyThreadState_LeaveTracing(this->tstate); + this->tstate = nullptr; + } + + inline void CallTraceFunction(const OwnedObject& tracefunc, + const greenlet::refs::ImmortalEventName& event, + const BorrowedGreenlet& origin, + const BorrowedGreenlet& target) + { + // TODO: This calls tracefunc(event, (origin, target)). Add a shortcut + // function for that that's specialized to avoid the Py_BuildValue + // string parsing, or start with just using "ON" format with PyTuple_Pack(2, + // origin, target). That seems like what the N format is meant + // for. + // XXX: Why does event not automatically cast back to a PyObject? + // It tries to call the "deleted constructor ImmortalEventName + // const" instead. + assert(tracefunc); + assert(event); + assert(origin); + assert(target); + greenlet::refs::NewReference retval( + PyObject_CallFunction( + tracefunc.borrow(), + "O(OO)", + event.borrow(), + origin.borrow(), + target.borrow() + )); + if (!retval) { + throw PyErrOccurred::from_current(); + } + } +}; + + static void + g_calltrace(const OwnedObject& tracefunc, + const greenlet::refs::ImmortalEventName& event, + const greenlet::refs::BorrowedGreenlet& origin, + const BorrowedGreenlet& target); + private: + OwnedObject g_switch_finish(const switchstack_result_t& err); + + }; + + class UserGreenlet : public Greenlet + { + private: + static greenlet::PythonAllocator allocator; + OwnedMainGreenlet _main_greenlet; + OwnedObject _run_callable; + OwnedGreenlet _parent; + public: + static void* operator new(size_t UNUSED(count)); + static void operator delete(void* ptr); + + UserGreenlet(PyGreenlet* p, BorrowedGreenlet the_parent); + virtual ~UserGreenlet(); + + virtual refs::BorrowedMainGreenlet find_main_greenlet_in_lineage() const; + virtual bool was_running_in_dead_thread() const noexcept; + virtual ThreadState* thread_state() const noexcept; + virtual OwnedObject g_switch(); + virtual const OwnedObject& run() const + { + if (this->started() || !this->_run_callable) { + throw AttributeError("run"); + } + return this->_run_callable; + } + virtual void run(const refs::BorrowedObject nrun); + + virtual const OwnedGreenlet parent() const; + virtual void parent(const refs::BorrowedObject new_parent); + + virtual const refs::BorrowedMainGreenlet main_greenlet() const; + + virtual void murder_in_place(); + virtual bool belongs_to_thread(const ThreadState* state) const; + virtual int tp_traverse(visitproc visit, void* arg); + virtual int tp_clear(); + class ParentIsCurrentGuard + { + private: + OwnedGreenlet oldparent; + UserGreenlet* greenlet; + G_NO_COPIES_OF_CLS(ParentIsCurrentGuard); + public: + ParentIsCurrentGuard(UserGreenlet* p, const ThreadState& thread_state); + ~ParentIsCurrentGuard(); + }; + virtual OwnedObject throw_GreenletExit_during_dealloc(const ThreadState& current_thread_state); + protected: + virtual switchstack_result_t g_initialstub(void* mark); + private: + // This function isn't meant to return. + // This accepts raw pointers and the ownership of them at the + // same time. The caller should use ``inner_bootstrap(origin.relinquish_ownership())``. + void inner_bootstrap(PyGreenlet* origin_greenlet, PyObject* run); + }; + + class BrokenGreenlet : public UserGreenlet + { + private: + static greenlet::PythonAllocator allocator; + public: + bool _force_switch_error = false; + bool _force_slp_switch_error = false; + + static void* operator new(size_t UNUSED(count)); + static void operator delete(void* ptr); + BrokenGreenlet(PyGreenlet* p, BorrowedGreenlet the_parent) + : UserGreenlet(p, the_parent) + {} + virtual ~BrokenGreenlet() + {} + + virtual switchstack_result_t g_switchstack(void); + virtual bool force_slp_switch_error() const noexcept; + + }; + + class MainGreenlet : public Greenlet + { + private: + static greenlet::PythonAllocator allocator; + refs::BorrowedMainGreenlet _self; + ThreadState* _thread_state; + G_NO_COPIES_OF_CLS(MainGreenlet); + public: + static void* operator new(size_t UNUSED(count)); + static void operator delete(void* ptr); + + MainGreenlet(refs::BorrowedMainGreenlet::PyType*, ThreadState*); + virtual ~MainGreenlet(); + + + virtual const OwnedObject& run() const; + virtual void run(const refs::BorrowedObject nrun); + + virtual const OwnedGreenlet parent() const; + virtual void parent(const refs::BorrowedObject new_parent); + + virtual const refs::BorrowedMainGreenlet main_greenlet() const; + + virtual refs::BorrowedMainGreenlet find_main_greenlet_in_lineage() const; + virtual bool was_running_in_dead_thread() const noexcept; + virtual ThreadState* thread_state() const noexcept; + void thread_state(ThreadState*) noexcept; + virtual OwnedObject g_switch(); + virtual int tp_traverse(visitproc visit, void* arg); + }; + + // Instantiate one on the stack to save the GC state, + // and then disable GC. When it goes out of scope, GC will be + // restored to its original state. Sadly, these APIs are only + // available on 3.10+; luckily, we only need them on 3.11+. +#if GREENLET_PY310 + class GCDisabledGuard + { + private: + int was_enabled = 0; + public: + GCDisabledGuard() + : was_enabled(PyGC_IsEnabled()) + { + PyGC_Disable(); + } + + ~GCDisabledGuard() + { + if (this->was_enabled) { + PyGC_Enable(); + } + } + }; +#endif + + OwnedObject& operator<<=(OwnedObject& lhs, greenlet::SwitchingArgs& rhs) noexcept; + + //TODO: Greenlet::g_switch() should call this automatically on its + //return value. As it is, the module code is calling it. + static inline OwnedObject + single_result(const OwnedObject& results) + { + if (results + && PyTuple_Check(results.borrow()) + && PyTuple_GET_SIZE(results.borrow()) == 1) { + PyObject* result = PyTuple_GET_ITEM(results.borrow(), 0); + assert(result); + return OwnedObject::owning(result); + } + return results; + } + + + static OwnedObject + g_handle_exit(const OwnedObject& greenlet_result); + + + template + void operator<<(const PyThreadState *const lhs, T& rhs) + { + rhs.operator<<(lhs); + } + +} // namespace greenlet ; + +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/TGreenletGlobals.cpp b/venv/lib/python3.11/site-packages/greenlet/TGreenletGlobals.cpp new file mode 100644 index 0000000..0087d2f --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/TGreenletGlobals.cpp @@ -0,0 +1,94 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/** + * Implementation of GreenletGlobals. + * + * Format with: + * clang-format -i --style=file src/greenlet/greenlet.c + * + * + * Fix missing braces with: + * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" +*/ +#ifndef T_GREENLET_GLOBALS +#define T_GREENLET_GLOBALS + +#include "greenlet_refs.hpp" +#include "greenlet_exceptions.hpp" +#include "greenlet_thread_support.hpp" +#include "greenlet_internal.hpp" + +namespace greenlet { + +// This encapsulates what were previously module global "constants" +// established at init time. +// This is a step towards Python3 style module state that allows +// reloading. +// +// In an earlier iteration of this code, we used placement new to be +// able to allocate this object statically still, so that references +// to its members don't incur an extra pointer indirection. +// But under some scenarios, that could result in crashes at +// shutdown because apparently the destructor was getting run twice? +class GreenletGlobals +{ + +public: + const greenlet::refs::ImmortalEventName event_switch; + const greenlet::refs::ImmortalEventName event_throw; + const greenlet::refs::ImmortalException PyExc_GreenletError; + const greenlet::refs::ImmortalException PyExc_GreenletExit; + const greenlet::refs::ImmortalObject empty_tuple; + const greenlet::refs::ImmortalObject empty_dict; + const greenlet::refs::ImmortalString str_run; + Mutex* const thread_states_to_destroy_lock; + greenlet::cleanup_queue_t thread_states_to_destroy; + + GreenletGlobals() : + event_switch("switch"), + event_throw("throw"), + PyExc_GreenletError("greenlet.error"), + PyExc_GreenletExit("greenlet.GreenletExit", PyExc_BaseException), + empty_tuple(Require(PyTuple_New(0))), + empty_dict(Require(PyDict_New())), + str_run("run"), + thread_states_to_destroy_lock(new Mutex()) + {} + + ~GreenletGlobals() + { + // This object is (currently) effectively immortal, and not + // just because of those placement new tricks; if we try to + // deallocate the static object we allocated, and overwrote, + // we would be doing so at C++ teardown time, which is after + // the final Python GIL is released, and we can't use the API + // then. + // (The members will still be destructed, but they also don't + // do any deallocation.) + } + + void queue_to_destroy(ThreadState* ts) const + { + // we're currently accessed through a static const object, + // implicitly marking our members as const, so code can't just + // call push_back (or pop_back) without casting away the + // const. + // + // Do that for callers. + greenlet::cleanup_queue_t& q = const_cast(this->thread_states_to_destroy); + q.push_back(ts); + } + + ThreadState* take_next_to_destroy() const + { + greenlet::cleanup_queue_t& q = const_cast(this->thread_states_to_destroy); + ThreadState* result = q.back(); + q.pop_back(); + return result; + } +}; + +}; // namespace greenlet + +static const greenlet::GreenletGlobals* mod_globs; + +#endif // T_GREENLET_GLOBALS diff --git a/venv/lib/python3.11/site-packages/greenlet/TMainGreenlet.cpp b/venv/lib/python3.11/site-packages/greenlet/TMainGreenlet.cpp new file mode 100644 index 0000000..a2a9cfe --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/TMainGreenlet.cpp @@ -0,0 +1,153 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/** + * Implementation of greenlet::MainGreenlet. + * + * Format with: + * clang-format -i --style=file src/greenlet/greenlet.c + * + * + * Fix missing braces with: + * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" +*/ +#ifndef T_MAIN_GREENLET_CPP +#define T_MAIN_GREENLET_CPP + +#include "TGreenlet.hpp" + + + +// Protected by the GIL. Incremented when we create a main greenlet, +// in a new thread, decremented when it is destroyed. +static Py_ssize_t G_TOTAL_MAIN_GREENLETS; + +namespace greenlet { +greenlet::PythonAllocator MainGreenlet::allocator; + +void* MainGreenlet::operator new(size_t UNUSED(count)) +{ + return allocator.allocate(1); +} + + +void MainGreenlet::operator delete(void* ptr) +{ + return allocator.deallocate(static_cast(ptr), + 1); +} + + +MainGreenlet::MainGreenlet(PyGreenlet* p, ThreadState* state) + : Greenlet(p, StackState::make_main()), + _self(p), + _thread_state(state) +{ + G_TOTAL_MAIN_GREENLETS++; +} + +MainGreenlet::~MainGreenlet() +{ + G_TOTAL_MAIN_GREENLETS--; + this->tp_clear(); +} + +ThreadState* +MainGreenlet::thread_state() const noexcept +{ + return this->_thread_state; +} + +void +MainGreenlet::thread_state(ThreadState* t) noexcept +{ + assert(!t); + this->_thread_state = t; +} + + +const BorrowedMainGreenlet +MainGreenlet::main_greenlet() const +{ + return this->_self; +} + +BorrowedMainGreenlet +MainGreenlet::find_main_greenlet_in_lineage() const +{ + return BorrowedMainGreenlet(this->_self); +} + +bool +MainGreenlet::was_running_in_dead_thread() const noexcept +{ + return !this->_thread_state; +} + +OwnedObject +MainGreenlet::g_switch() +{ + try { + this->check_switch_allowed(); + } + catch (const PyErrOccurred&) { + this->release_args(); + throw; + } + + switchstack_result_t err = this->g_switchstack(); + if (err.status < 0) { + // XXX: This code path is untested, but it is shared + // with the UserGreenlet path that is tested. + return this->on_switchstack_or_initialstub_failure( + this, + err, + true, // target was me + false // was initial stub + ); + } + + return err.the_new_current_greenlet->g_switch_finish(err); +} + +int +MainGreenlet::tp_traverse(visitproc visit, void* arg) +{ + if (this->_thread_state) { + // we've already traversed main, (self), don't do it again. + int result = this->_thread_state->tp_traverse(visit, arg, false); + if (result) { + return result; + } + } + return Greenlet::tp_traverse(visit, arg); +} + +const OwnedObject& +MainGreenlet::run() const +{ + throw AttributeError("Main greenlets do not have a run attribute."); +} + +void +MainGreenlet::run(const BorrowedObject UNUSED(nrun)) +{ + throw AttributeError("Main greenlets do not have a run attribute."); +} + +void +MainGreenlet::parent(const BorrowedObject raw_new_parent) +{ + if (!raw_new_parent) { + throw AttributeError("can't delete attribute"); + } + throw AttributeError("cannot set the parent of a main greenlet"); +} + +const OwnedGreenlet +MainGreenlet::parent() const +{ + return OwnedGreenlet(); // null becomes None +} + +}; // namespace greenlet + +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/TPythonState.cpp b/venv/lib/python3.11/site-packages/greenlet/TPythonState.cpp new file mode 100644 index 0000000..cc5dff5 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/TPythonState.cpp @@ -0,0 +1,393 @@ +#ifndef GREENLET_PYTHON_STATE_CPP +#define GREENLET_PYTHON_STATE_CPP + +#include +#include "TGreenlet.hpp" + +namespace greenlet { + +PythonState::PythonState() + : _top_frame() +#if GREENLET_USE_CFRAME + ,cframe(nullptr) + ,use_tracing(0) +#endif +#if GREENLET_PY312 + ,py_recursion_depth(0) + ,c_recursion_depth(0) +#else + ,recursion_depth(0) +#endif +#if GREENLET_PY313 + ,delete_later(nullptr) +#else + ,trash_delete_nesting(0) +#endif +#if GREENLET_PY311 + ,current_frame(nullptr) + ,datastack_chunk(nullptr) + ,datastack_top(nullptr) + ,datastack_limit(nullptr) +#endif +{ +#if GREENLET_USE_CFRAME + /* + The PyThreadState->cframe pointer usually points to memory on + the stack, alloceted in a call into PyEval_EvalFrameDefault. + + Initially, before any evaluation begins, it points to the + initial PyThreadState object's ``root_cframe`` object, which is + statically allocated for the lifetime of the thread. + + A greenlet can last for longer than a call to + PyEval_EvalFrameDefault, so we can't set its ``cframe`` pointer + to be the current ``PyThreadState->cframe``; nor could we use + one from the greenlet parent for the same reason. Yet a further + no: we can't allocate one scoped to the greenlet and then + destroy it when the greenlet is deallocated, because inside the + interpreter the _PyCFrame objects form a linked list, and that too + can result in accessing memory beyond its dynamic lifetime (if + the greenlet doesn't actually finish before it dies, its entry + could still be in the list). + + Using the ``root_cframe`` is problematic, though, because its + members are never modified by the interpreter and are set to 0, + meaning that its ``use_tracing`` flag is never updated. We don't + want to modify that value in the ``root_cframe`` ourself: it + *shouldn't* matter much because we should probably never get + back to the point where that's the only cframe on the stack; + even if it did matter, the major consequence of an incorrect + value for ``use_tracing`` is that if its true the interpreter + does some extra work --- however, it's just good code hygiene. + + Our solution: before a greenlet runs, after its initial + creation, it uses the ``root_cframe`` just to have something to + put there. However, once the greenlet is actually switched to + for the first time, ``g_initialstub`` (which doesn't actually + "return" while the greenlet is running) stores a new _PyCFrame on + its local stack, and copies the appropriate values from the + currently running _PyCFrame; this is then made the _PyCFrame for the + newly-minted greenlet. ``g_initialstub`` then proceeds to call + ``glet.run()``, which results in ``PyEval_...`` adding the + _PyCFrame to the list. Switches continue as normal. Finally, when + the greenlet finishes, the call to ``glet.run()`` returns and + the _PyCFrame is taken out of the linked list and the stack value + is now unused and free to expire. + + XXX: I think we can do better. If we're deallocing in the same + thread, can't we traverse the list and unlink our frame? + Can we just keep a reference to the thread state in case we + dealloc in another thread? (Is that even possible if we're still + running and haven't returned from g_initialstub?) + */ + this->cframe = &PyThreadState_GET()->root_cframe; +#endif +} + + +inline void PythonState::may_switch_away() noexcept +{ +#if GREENLET_PY311 + // PyThreadState_GetFrame is probably going to have to allocate a + // new frame object. That may trigger garbage collection. Because + // we call this during the early phases of a switch (it doesn't + // matter to which greenlet, as this has a global effect), if a GC + // triggers a switch away, two things can happen, both bad: + // - We might not get switched back to, halting forward progress. + // this is pathological, but possible. + // - We might get switched back to with a different set of + // arguments or a throw instead of a switch. That would corrupt + // our state (specifically, PyErr_Occurred() and this->args() + // would no longer agree). + // + // Thus, when we call this API, we need to have GC disabled. + // This method serves as a bottleneck we call when maybe beginning + // a switch. In this way, it is always safe -- no risk of GC -- to + // use ``_GetFrame()`` whenever we need to, just as it was in + // <=3.10 (because subsequent calls will be cached and not + // allocate memory). + + GCDisabledGuard no_gc; + Py_XDECREF(PyThreadState_GetFrame(PyThreadState_GET())); +#endif +} + +void PythonState::operator<<(const PyThreadState *const tstate) noexcept +{ + this->_context.steal(tstate->context); +#if GREENLET_USE_CFRAME + /* + IMPORTANT: ``cframe`` is a pointer into the STACK. Thus, because + the call to ``slp_switch()`` changes the contents of the stack, + you cannot read from ``ts_current->cframe`` after that call and + necessarily get the same values you get from reading it here. + Anything you need to restore from now to then must be saved in a + global/threadlocal variable (because we can't use stack + variables here either). For things that need to persist across + the switch, use `will_switch_from`. + */ + this->cframe = tstate->cframe; + #if !GREENLET_PY312 + this->use_tracing = tstate->cframe->use_tracing; + #endif +#endif // GREENLET_USE_CFRAME +#if GREENLET_PY311 + #if GREENLET_PY312 + this->py_recursion_depth = tstate->py_recursion_limit - tstate->py_recursion_remaining; + this->c_recursion_depth = Py_C_RECURSION_LIMIT - tstate->c_recursion_remaining; + #else // not 312 + this->recursion_depth = tstate->recursion_limit - tstate->recursion_remaining; + #endif // GREENLET_PY312 + #if GREENLET_PY313 + this->current_frame = tstate->current_frame; + #elif GREENLET_USE_CFRAME + this->current_frame = tstate->cframe->current_frame; + #endif + this->datastack_chunk = tstate->datastack_chunk; + this->datastack_top = tstate->datastack_top; + this->datastack_limit = tstate->datastack_limit; + + PyFrameObject *frame = PyThreadState_GetFrame((PyThreadState *)tstate); + Py_XDECREF(frame); // PyThreadState_GetFrame gives us a new + // reference. + this->_top_frame.steal(frame); + #if GREENLET_PY313 + this->delete_later = Py_XNewRef(tstate->delete_later); + #elif GREENLET_PY312 + this->trash_delete_nesting = tstate->trash.delete_nesting; + #else // not 312 + this->trash_delete_nesting = tstate->trash_delete_nesting; + #endif // GREENLET_PY312 +#else // Not 311 + this->recursion_depth = tstate->recursion_depth; + this->_top_frame.steal(tstate->frame); + this->trash_delete_nesting = tstate->trash_delete_nesting; +#endif // GREENLET_PY311 +} + +#if GREENLET_PY312 +void GREENLET_NOINLINE(PythonState::unexpose_frames)() +{ + if (!this->top_frame()) { + return; + } + + // See GreenletState::expose_frames() and the comment on frames_were_exposed + // for more information about this logic. + _PyInterpreterFrame *iframe = this->_top_frame->f_frame; + while (iframe != nullptr) { + _PyInterpreterFrame *prev_exposed = iframe->previous; + assert(iframe->frame_obj); + memcpy(&iframe->previous, &iframe->frame_obj->_f_frame_data[0], + sizeof(void *)); + iframe = prev_exposed; + } +} +#else +void PythonState::unexpose_frames() +{} +#endif + +void PythonState::operator>>(PyThreadState *const tstate) noexcept +{ + tstate->context = this->_context.relinquish_ownership(); + /* Incrementing this value invalidates the contextvars cache, + which would otherwise remain valid across switches */ + tstate->context_ver++; +#if GREENLET_USE_CFRAME + tstate->cframe = this->cframe; + /* + If we were tracing, we need to keep tracing. + There should never be the possibility of hitting the + root_cframe here. See note above about why we can't + just copy this from ``origin->cframe->use_tracing``. + */ + #if !GREENLET_PY312 + tstate->cframe->use_tracing = this->use_tracing; + #endif +#endif // GREENLET_USE_CFRAME +#if GREENLET_PY311 + #if GREENLET_PY312 + tstate->py_recursion_remaining = tstate->py_recursion_limit - this->py_recursion_depth; + tstate->c_recursion_remaining = Py_C_RECURSION_LIMIT - this->c_recursion_depth; + this->unexpose_frames(); + #else // \/ 3.11 + tstate->recursion_remaining = tstate->recursion_limit - this->recursion_depth; + #endif // GREENLET_PY312 + #if GREENLET_PY313 + tstate->current_frame = this->current_frame; + #elif GREENLET_USE_CFRAME + tstate->cframe->current_frame = this->current_frame; + #endif + tstate->datastack_chunk = this->datastack_chunk; + tstate->datastack_top = this->datastack_top; + tstate->datastack_limit = this->datastack_limit; + this->_top_frame.relinquish_ownership(); + #if GREENLET_PY313 + Py_XDECREF(tstate->delete_later); + tstate->delete_later = this->delete_later; + Py_CLEAR(this->delete_later); + #elif GREENLET_PY312 + tstate->trash.delete_nesting = this->trash_delete_nesting; + #else // not 3.12 + tstate->trash_delete_nesting = this->trash_delete_nesting; + #endif // GREENLET_PY312 +#else // not 3.11 + tstate->frame = this->_top_frame.relinquish_ownership(); + tstate->recursion_depth = this->recursion_depth; + tstate->trash_delete_nesting = this->trash_delete_nesting; +#endif // GREENLET_PY311 +} + +inline void PythonState::will_switch_from(PyThreadState *const origin_tstate) noexcept +{ +#if GREENLET_USE_CFRAME && !GREENLET_PY312 + // The weird thing is, we don't actually save this for an + // effect on the current greenlet, it's saved for an + // effect on the target greenlet. That is, we want + // continuity of this setting across the greenlet switch. + this->use_tracing = origin_tstate->cframe->use_tracing; +#endif +} + +void PythonState::set_initial_state(const PyThreadState* const tstate) noexcept +{ + this->_top_frame = nullptr; +#if GREENLET_PY312 + this->py_recursion_depth = tstate->py_recursion_limit - tstate->py_recursion_remaining; + // XXX: TODO: Comment from a reviewer: + // Should this be ``Py_C_RECURSION_LIMIT - tstate->c_recursion_remaining``? + // But to me it looks more like that might not be the right + // initialization either? + this->c_recursion_depth = tstate->py_recursion_limit - tstate->py_recursion_remaining; +#elif GREENLET_PY311 + this->recursion_depth = tstate->recursion_limit - tstate->recursion_remaining; +#else + this->recursion_depth = tstate->recursion_depth; +#endif +} +// TODO: Better state management about when we own the top frame. +int PythonState::tp_traverse(visitproc visit, void* arg, bool own_top_frame) noexcept +{ + Py_VISIT(this->_context.borrow()); + if (own_top_frame) { + Py_VISIT(this->_top_frame.borrow()); + } + return 0; +} + +void PythonState::tp_clear(bool own_top_frame) noexcept +{ + PythonStateContext::tp_clear(); + // If we get here owning a frame, + // we got dealloc'd without being finished. We may or may not be + // in the same thread. + if (own_top_frame) { + this->_top_frame.CLEAR(); + } +} + +#if GREENLET_USE_CFRAME +void PythonState::set_new_cframe(_PyCFrame& frame) noexcept +{ + frame = *PyThreadState_GET()->cframe; + /* Make the target greenlet refer to the stack value. */ + this->cframe = &frame; + /* + And restore the link to the previous frame so this one gets + unliked appropriately. + */ + this->cframe->previous = &PyThreadState_GET()->root_cframe; +} +#endif + +const PythonState::OwnedFrame& PythonState::top_frame() const noexcept +{ + return this->_top_frame; +} + +void PythonState::did_finish(PyThreadState* tstate) noexcept +{ +#if GREENLET_PY311 + // See https://github.com/gevent/gevent/issues/1924 and + // https://github.com/python-greenlet/greenlet/issues/328. In + // short, Python 3.11 allocates memory for frames as a sort of + // linked list that's kept as part of PyThreadState in the + // ``datastack_chunk`` member and friends. These are saved and + // restored as part of switching greenlets. + // + // When we initially switch to a greenlet, we set those to NULL. + // That causes the frame management code to treat this like a + // brand new thread and start a fresh list of chunks, beginning + // with a new "root" chunk. As we make calls in this greenlet, + // those chunks get added, and as calls return, they get popped. + // But the frame code (pystate.c) is careful to make sure that the + // root chunk never gets popped. + // + // Thus, when a greenlet exits for the last time, there will be at + // least a single root chunk that we must be responsible for + // deallocating. + // + // The complex part is that these chunks are allocated and freed + // using ``_PyObject_VirtualAlloc``/``Free``. Those aren't public + // functions, and they aren't exported for linking. It so happens + // that we know they are just thin wrappers around the Arena + // allocator, so we can use that directly to deallocate in a + // compatible way. + // + // CAUTION: Check this implementation detail on every major version. + // + // It might be nice to be able to do this in our destructor, but + // can we be sure that no one else is using that memory? Plus, as + // described below, our pointers may not even be valid anymore. As + // a special case, there is one time that we know we can do this, + // and that's from the destructor of the associated UserGreenlet + // (NOT main greenlet) + PyObjectArenaAllocator alloc; + _PyStackChunk* chunk = nullptr; + if (tstate) { + // We really did finish, we can never be switched to again. + chunk = tstate->datastack_chunk; + // Unfortunately, we can't do much sanity checking. Our + // this->datastack_chunk pointer is out of date (evaluation may + // have popped down through it already) so we can't verify that + // we deallocate it. I don't think we can even check datastack_top + // for the same reason. + + PyObject_GetArenaAllocator(&alloc); + tstate->datastack_chunk = nullptr; + tstate->datastack_limit = nullptr; + tstate->datastack_top = nullptr; + + } + else if (this->datastack_chunk) { + // The UserGreenlet (NOT the main greenlet!) is being deallocated. If we're + // still holding a stack chunk, it's garbage because we know + // we can never switch back to let cPython clean it up. + // Because the last time we got switched away from, and we + // haven't run since then, we know our chain is valid and can + // be dealloced. + chunk = this->datastack_chunk; + PyObject_GetArenaAllocator(&alloc); + } + + if (alloc.free && chunk) { + // In case the arena mechanism has been torn down already. + while (chunk) { + _PyStackChunk *prev = chunk->previous; + chunk->previous = nullptr; + alloc.free(alloc.ctx, chunk, chunk->size); + chunk = prev; + } + } + + this->datastack_chunk = nullptr; + this->datastack_limit = nullptr; + this->datastack_top = nullptr; +#endif +} + + +}; // namespace greenlet + +#endif // GREENLET_PYTHON_STATE_CPP diff --git a/venv/lib/python3.11/site-packages/greenlet/TStackState.cpp b/venv/lib/python3.11/site-packages/greenlet/TStackState.cpp new file mode 100644 index 0000000..9743ab5 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/TStackState.cpp @@ -0,0 +1,265 @@ +#ifndef GREENLET_STACK_STATE_CPP +#define GREENLET_STACK_STATE_CPP + +#include "TGreenlet.hpp" + +namespace greenlet { + +#ifdef GREENLET_USE_STDIO +#include +using std::cerr; +using std::endl; + +std::ostream& operator<<(std::ostream& os, const StackState& s) +{ + os << "StackState(stack_start=" << (void*)s._stack_start + << ", stack_stop=" << (void*)s.stack_stop + << ", stack_copy=" << (void*)s.stack_copy + << ", stack_saved=" << s._stack_saved + << ", stack_prev=" << s.stack_prev + << ", addr=" << &s + << ")"; + return os; +} +#endif + +StackState::StackState(void* mark, StackState& current) + : _stack_start(nullptr), + stack_stop((char*)mark), + stack_copy(nullptr), + _stack_saved(0), + /* Skip a dying greenlet */ + stack_prev(current._stack_start + ? ¤t + : current.stack_prev) +{ +} + +StackState::StackState() + : _stack_start(nullptr), + stack_stop(nullptr), + stack_copy(nullptr), + _stack_saved(0), + stack_prev(nullptr) +{ +} + +StackState::StackState(const StackState& other) +// can't use a delegating constructor because of +// MSVC for Python 2.7 + : _stack_start(nullptr), + stack_stop(nullptr), + stack_copy(nullptr), + _stack_saved(0), + stack_prev(nullptr) +{ + this->operator=(other); +} + +StackState& StackState::operator=(const StackState& other) +{ + if (&other == this) { + return *this; + } + if (other._stack_saved) { + throw std::runtime_error("Refusing to steal memory."); + } + + //If we have memory allocated, dispose of it + this->free_stack_copy(); + + this->_stack_start = other._stack_start; + this->stack_stop = other.stack_stop; + this->stack_copy = other.stack_copy; + this->_stack_saved = other._stack_saved; + this->stack_prev = other.stack_prev; + return *this; +} + +inline void StackState::free_stack_copy() noexcept +{ + PyMem_Free(this->stack_copy); + this->stack_copy = nullptr; + this->_stack_saved = 0; +} + +inline void StackState::copy_heap_to_stack(const StackState& current) noexcept +{ + + /* Restore the heap copy back into the C stack */ + if (this->_stack_saved != 0) { + memcpy(this->_stack_start, this->stack_copy, this->_stack_saved); + this->free_stack_copy(); + } + StackState* owner = const_cast(¤t); + if (!owner->_stack_start) { + owner = owner->stack_prev; /* greenlet is dying, skip it */ + } + while (owner && owner->stack_stop <= this->stack_stop) { + // cerr << "\tOwner: " << owner << endl; + owner = owner->stack_prev; /* find greenlet with more stack */ + } + this->stack_prev = owner; + // cerr << "\tFinished with: " << *this << endl; +} + +inline int StackState::copy_stack_to_heap_up_to(const char* const stop) noexcept +{ + /* Save more of g's stack into the heap -- at least up to 'stop' + g->stack_stop |________| + | | + | __ stop . . . . . + | | ==> . . + |________| _______ + | | | | + | | | | + g->stack_start | | |_______| g->stack_copy + */ + intptr_t sz1 = this->_stack_saved; + intptr_t sz2 = stop - this->_stack_start; + assert(this->_stack_start); + if (sz2 > sz1) { + char* c = (char*)PyMem_Realloc(this->stack_copy, sz2); + if (!c) { + PyErr_NoMemory(); + return -1; + } + memcpy(c + sz1, this->_stack_start + sz1, sz2 - sz1); + this->stack_copy = c; + this->_stack_saved = sz2; + } + return 0; +} + +inline int StackState::copy_stack_to_heap(char* const stackref, + const StackState& current) noexcept +{ + /* must free all the C stack up to target_stop */ + const char* const target_stop = this->stack_stop; + + StackState* owner = const_cast(¤t); + assert(owner->_stack_saved == 0); // everything is present on the stack + if (!owner->_stack_start) { + owner = owner->stack_prev; /* not saved if dying */ + } + else { + owner->_stack_start = stackref; + } + + while (owner->stack_stop < target_stop) { + /* ts_current is entierely within the area to free */ + if (owner->copy_stack_to_heap_up_to(owner->stack_stop)) { + return -1; /* XXX */ + } + owner = owner->stack_prev; + } + if (owner != this) { + if (owner->copy_stack_to_heap_up_to(target_stop)) { + return -1; /* XXX */ + } + } + return 0; +} + +inline bool StackState::started() const noexcept +{ + return this->stack_stop != nullptr; +} + +inline bool StackState::main() const noexcept +{ + return this->stack_stop == (char*)-1; +} + +inline bool StackState::active() const noexcept +{ + return this->_stack_start != nullptr; +} + +inline void StackState::set_active() noexcept +{ + assert(this->_stack_start == nullptr); + this->_stack_start = (char*)1; +} + +inline void StackState::set_inactive() noexcept +{ + this->_stack_start = nullptr; + // XXX: What if we still have memory out there? + // That case is actually triggered by + // test_issue251_issue252_explicit_reference_not_collectable (greenlet.tests.test_leaks.TestLeaks) + // and + // test_issue251_issue252_need_to_collect_in_background + // (greenlet.tests.test_leaks.TestLeaks) + // + // Those objects never get deallocated, so the destructor never + // runs. + // It *seems* safe to clean up the memory here? + if (this->_stack_saved) { + this->free_stack_copy(); + } +} + +inline intptr_t StackState::stack_saved() const noexcept +{ + return this->_stack_saved; +} + +inline char* StackState::stack_start() const noexcept +{ + return this->_stack_start; +} + + +inline StackState StackState::make_main() noexcept +{ + StackState s; + s._stack_start = (char*)1; + s.stack_stop = (char*)-1; + return s; +} + +StackState::~StackState() +{ + if (this->_stack_saved != 0) { + this->free_stack_copy(); + } +} + +void StackState::copy_from_stack(void* vdest, const void* vsrc, size_t n) const +{ + char* dest = static_cast(vdest); + const char* src = static_cast(vsrc); + if (src + n <= this->_stack_start + || src >= this->_stack_start + this->_stack_saved + || this->_stack_saved == 0) { + // Nothing we're copying was spilled from the stack + memcpy(dest, src, n); + return; + } + + if (src < this->_stack_start) { + // Copy the part before the saved stack. + // We know src + n > _stack_start due to the test above. + const size_t nbefore = this->_stack_start - src; + memcpy(dest, src, nbefore); + dest += nbefore; + src += nbefore; + n -= nbefore; + } + // We know src >= _stack_start after the before-copy, and + // src < _stack_start + _stack_saved due to the first if condition + size_t nspilled = std::min(n, this->_stack_start + this->_stack_saved - src); + memcpy(dest, this->stack_copy + (src - this->_stack_start), nspilled); + dest += nspilled; + src += nspilled; + n -= nspilled; + if (n > 0) { + // Copy the part after the saved stack + memcpy(dest, src, n); + } +} + +}; // namespace greenlet + +#endif // GREENLET_STACK_STATE_CPP diff --git a/venv/lib/python3.11/site-packages/greenlet/TThreadState.hpp b/venv/lib/python3.11/site-packages/greenlet/TThreadState.hpp new file mode 100644 index 0000000..e4e6f6c --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/TThreadState.hpp @@ -0,0 +1,497 @@ +#ifndef GREENLET_THREAD_STATE_HPP +#define GREENLET_THREAD_STATE_HPP + +#include +#include + +#include "greenlet_internal.hpp" +#include "greenlet_refs.hpp" +#include "greenlet_thread_support.hpp" + +using greenlet::refs::BorrowedObject; +using greenlet::refs::BorrowedGreenlet; +using greenlet::refs::BorrowedMainGreenlet; +using greenlet::refs::OwnedMainGreenlet; +using greenlet::refs::OwnedObject; +using greenlet::refs::OwnedGreenlet; +using greenlet::refs::OwnedList; +using greenlet::refs::PyErrFetchParam; +using greenlet::refs::PyArgParseParam; +using greenlet::refs::ImmortalString; +using greenlet::refs::CreatedModule; +using greenlet::refs::PyErrPieces; +using greenlet::refs::NewReference; + +namespace greenlet { +/** + * Thread-local state of greenlets. + * + * Each native thread will get exactly one of these objects, + * automatically accessed through the best available thread-local + * mechanism the compiler supports (``thread_local`` for C++11 + * compilers or ``__thread``/``declspec(thread)`` for older GCC/clang + * or MSVC, respectively.) + * + * Previously, we kept thread-local state mostly in a bunch of + * ``static volatile`` variables in the main greenlet file.. This had + * the problem of requiring extra checks, loops, and great care + * accessing these variables if we potentially invoked any Python code + * that could release the GIL, because the state could change out from + * under us. Making the variables thread-local solves this problem. + * + * When we detected that a greenlet API accessing the current greenlet + * was invoked from a different thread than the greenlet belonged to, + * we stored a reference to the greenlet in the Python thread + * dictionary for the thread the greenlet belonged to. This could lead + * to memory leaks if the thread then exited (because of a reference + * cycle, as greenlets referred to the thread dictionary, and deleting + * non-current greenlets leaked their frame plus perhaps arguments on + * the C stack). If a thread exited while still having running + * greenlet objects (perhaps that had just switched back to the main + * greenlet), and did not invoke one of the greenlet APIs *in that + * thread, immediately before it exited, without some other thread + * then being invoked*, such a leak was guaranteed. + * + * This can be partly solved by using compiler thread-local variables + * instead of the Python thread dictionary, thus avoiding a cycle. + * + * To fully solve this problem, we need a reliable way to know that a + * thread is done and we should clean up the main greenlet. On POSIX, + * we can use the destructor function of ``pthread_key_create``, but + * there's nothing similar on Windows; a C++11 thread local object + * reliably invokes its destructor when the thread it belongs to exits + * (non-C++11 compilers offer ``__thread`` or ``declspec(thread)`` to + * create thread-local variables, but they can't hold C++ objects that + * invoke destructors; the C++11 version is the most portable solution + * I found). When the thread exits, we can drop references and + * otherwise manipulate greenlets and frames that we know can no + * longer be switched to. For compilers that don't support C++11 + * thread locals, we have a solution that uses the python thread + * dictionary, though it may not collect everything as promptly as + * other compilers do, if some other library is using the thread + * dictionary and has a cycle or extra reference. + * + * There are two small wrinkles. The first is that when the thread + * exits, it is too late to actually invoke Python APIs: the Python + * thread state is gone, and the GIL is released. To solve *this* + * problem, our destructor uses ``Py_AddPendingCall`` to transfer the + * destruction work to the main thread. (This is not an issue for the + * dictionary solution.) + * + * The second is that once the thread exits, the thread local object + * is invalid and we can't even access a pointer to it, so we can't + * pass it to ``Py_AddPendingCall``. This is handled by actually using + * a second object that's thread local (ThreadStateCreator) and having + * it dynamically allocate this object so it can live until the + * pending call runs. + */ + + + +class ThreadState { +private: + // As of commit 08ad1dd7012b101db953f492e0021fb08634afad + // this class needed 56 bytes in o Py_DEBUG build + // on 64-bit macOS 11. + // Adding the vector takes us up to 80 bytes () + + /* Strong reference to the main greenlet */ + OwnedMainGreenlet main_greenlet; + + /* Strong reference to the current greenlet. */ + OwnedGreenlet current_greenlet; + + /* Strong reference to the trace function, if any. */ + OwnedObject tracefunc; + + typedef std::vector > deleteme_t; + /* A vector of raw PyGreenlet pointers representing things that need + deleted when this thread is running. The vector owns the + references, but you need to manually INCREF/DECREF as you use + them. We don't use a vector because we + make copy of this vector, and that would become O(n) as all the + refcounts are incremented in the copy. + */ + deleteme_t deleteme; + +#ifdef GREENLET_NEEDS_EXCEPTION_STATE_SAVED + void* exception_state; +#endif + + static std::clock_t _clocks_used_doing_gc; + static ImmortalString get_referrers_name; + static PythonAllocator allocator; + + G_NO_COPIES_OF_CLS(ThreadState); + + + // Allocates a main greenlet for the thread state. If this fails, + // exits the process. Called only during constructing a ThreadState. + MainGreenlet* alloc_main() + { + PyGreenlet* gmain; + + /* create the main greenlet for this thread */ + gmain = reinterpret_cast(PyType_GenericAlloc(&PyGreenlet_Type, 0)); + if (gmain == NULL) { + throw PyFatalError("alloc_main failed to alloc"); //exits the process + } + + MainGreenlet* const main = new MainGreenlet(gmain, this); + + assert(Py_REFCNT(gmain) == 1); + assert(gmain->pimpl == main); + return main; + } + + +public: + static void* operator new(size_t UNUSED(count)) + { + return ThreadState::allocator.allocate(1); + } + + static void operator delete(void* ptr) + { + return ThreadState::allocator.deallocate(static_cast(ptr), + 1); + } + + static void init() + { + ThreadState::get_referrers_name = "get_referrers"; + ThreadState::_clocks_used_doing_gc = 0; + } + + ThreadState() + { + +#ifdef GREENLET_NEEDS_EXCEPTION_STATE_SAVED + this->exception_state = slp_get_exception_state(); +#endif + + // XXX: Potentially dangerous, exposing a not fully + // constructed object. + MainGreenlet* const main = this->alloc_main(); + this->main_greenlet = OwnedMainGreenlet::consuming( + main->self() + ); + assert(this->main_greenlet); + this->current_greenlet = main->self(); + // The main greenlet starts with 1 refs: The returned one. We + // then copied it to the current greenlet. + assert(this->main_greenlet.REFCNT() == 2); + } + + inline void restore_exception_state() + { +#ifdef GREENLET_NEEDS_EXCEPTION_STATE_SAVED + // It's probably important this be inlined and only call C + // functions to avoid adding an SEH frame. + slp_set_exception_state(this->exception_state); +#endif + } + + inline bool has_main_greenlet() const noexcept + { + return bool(this->main_greenlet); + } + + // Called from the ThreadStateCreator when we're in non-standard + // threading mode. In that case, there is an object in the Python + // thread state dictionary that points to us. The main greenlet + // also traverses into us, in which case it's crucial not to + // traverse back into the main greenlet. + int tp_traverse(visitproc visit, void* arg, bool traverse_main=true) + { + if (traverse_main) { + Py_VISIT(main_greenlet.borrow_o()); + } + if (traverse_main || current_greenlet != main_greenlet) { + Py_VISIT(current_greenlet.borrow_o()); + } + Py_VISIT(tracefunc.borrow()); + return 0; + } + + inline BorrowedMainGreenlet borrow_main_greenlet() const noexcept + { + assert(this->main_greenlet); + assert(this->main_greenlet.REFCNT() >= 2); + return this->main_greenlet; + }; + + inline OwnedMainGreenlet get_main_greenlet() const noexcept + { + return this->main_greenlet; + } + + /** + * In addition to returning a new reference to the currunt + * greenlet, this performs any maintenance needed. + */ + inline OwnedGreenlet get_current() + { + /* green_dealloc() cannot delete greenlets from other threads, so + it stores them in the thread dict; delete them now. */ + this->clear_deleteme_list(); + //assert(this->current_greenlet->main_greenlet == this->main_greenlet); + //assert(this->main_greenlet->main_greenlet == this->main_greenlet); + return this->current_greenlet; + } + + /** + * As for non-const get_current(); + */ + inline BorrowedGreenlet borrow_current() + { + this->clear_deleteme_list(); + return this->current_greenlet; + } + + /** + * Does no maintenance. + */ + inline OwnedGreenlet get_current() const + { + return this->current_greenlet; + } + + template + inline bool is_current(const refs::PyObjectPointer& obj) const + { + return this->current_greenlet.borrow_o() == obj.borrow_o(); + } + + inline void set_current(const OwnedGreenlet& target) + { + this->current_greenlet = target; + } + +private: + /** + * Deref and remove the greenlets from the deleteme list. Must be + * holding the GIL. + * + * If *murder* is true, then we must be called from a different + * thread than the one that these greenlets were running in. + * In that case, if the greenlet was actually running, we destroy + * the frame reference and otherwise make it appear dead before + * proceeding; otherwise, we would try (and fail) to raise an + * exception in it and wind up right back in this list. + */ + inline void clear_deleteme_list(const bool murder=false) + { + if (!this->deleteme.empty()) { + // It's possible we could add items to this list while + // running Python code if there's a thread switch, so we + // need to defensively copy it before that can happen. + deleteme_t copy = this->deleteme; + this->deleteme.clear(); // in case things come back on the list + for(deleteme_t::iterator it = copy.begin(), end = copy.end(); + it != end; + ++it ) { + PyGreenlet* to_del = *it; + if (murder) { + // Force each greenlet to appear dead; we can't raise an + // exception into it anymore anyway. + to_del->pimpl->murder_in_place(); + } + + // The only reference to these greenlets should be in + // this list, decreffing them should let them be + // deleted again, triggering calls to green_dealloc() + // in the correct thread (if we're not murdering). + // This may run arbitrary Python code and switch + // threads or greenlets! + Py_DECREF(to_del); + if (PyErr_Occurred()) { + PyErr_WriteUnraisable(nullptr); + PyErr_Clear(); + } + } + } + } + +public: + + /** + * Returns a new reference, or a false object. + */ + inline OwnedObject get_tracefunc() const + { + return tracefunc; + }; + + + inline void set_tracefunc(BorrowedObject tracefunc) + { + assert(tracefunc); + if (tracefunc == BorrowedObject(Py_None)) { + this->tracefunc.CLEAR(); + } + else { + this->tracefunc = tracefunc; + } + } + + /** + * Given a reference to a greenlet that some other thread + * attempted to delete (has a refcount of 0) store it for later + * deletion when the thread this state belongs to is current. + */ + inline void delete_when_thread_running(PyGreenlet* to_del) + { + Py_INCREF(to_del); + this->deleteme.push_back(to_del); + } + + /** + * Set to std::clock_t(-1) to disable. + */ + inline static std::clock_t& clocks_used_doing_gc() + { + return ThreadState::_clocks_used_doing_gc; + } + + ~ThreadState() + { + if (!PyInterpreterState_Head()) { + // We shouldn't get here (our callers protect us) + // but if we do, all we can do is bail early. + return; + } + + // We should not have an "origin" greenlet; that only exists + // for the temporary time during a switch, which should not + // be in progress as the thread dies. + //assert(!this->switching_state.origin); + + this->tracefunc.CLEAR(); + + // Forcibly GC as much as we can. + this->clear_deleteme_list(true); + + // The pending call did this. + assert(this->main_greenlet->thread_state() == nullptr); + + // If the main greenlet is the current greenlet, + // then we "fell off the end" and the thread died. + // It's possible that there is some other greenlet that + // switched to us, leaving a reference to the main greenlet + // on the stack, somewhere uncollectible. Try to detect that. + if (this->current_greenlet == this->main_greenlet && this->current_greenlet) { + assert(this->current_greenlet->is_currently_running_in_some_thread()); + // Drop one reference we hold. + this->current_greenlet.CLEAR(); + assert(!this->current_greenlet); + // Only our reference to the main greenlet should be left, + // But hold onto the pointer in case we need to do extra cleanup. + PyGreenlet* old_main_greenlet = this->main_greenlet.borrow(); + Py_ssize_t cnt = this->main_greenlet.REFCNT(); + this->main_greenlet.CLEAR(); + if (ThreadState::_clocks_used_doing_gc != std::clock_t(-1) + && cnt == 2 && Py_REFCNT(old_main_greenlet) == 1) { + // Highly likely that the reference is somewhere on + // the stack, not reachable by GC. Verify. + // XXX: This is O(n) in the total number of objects. + // TODO: Add a way to disable this at runtime, and + // another way to report on it. + std::clock_t begin = std::clock(); + NewReference gc(PyImport_ImportModule("gc")); + if (gc) { + OwnedObject get_referrers = gc.PyRequireAttr(ThreadState::get_referrers_name); + OwnedList refs(get_referrers.PyCall(old_main_greenlet)); + if (refs && refs.empty()) { + assert(refs.REFCNT() == 1); + // We found nothing! So we left a dangling + // reference: Probably the last thing some + // other greenlet did was call + // 'getcurrent().parent.switch()' to switch + // back to us. Clean it up. This will be the + // case on CPython 3.7 and newer, as they use + // an internal calling conversion that avoids + // creating method objects and storing them on + // the stack. + Py_DECREF(old_main_greenlet); + } + else if (refs + && refs.size() == 1 + && PyCFunction_Check(refs.at(0)) + && Py_REFCNT(refs.at(0)) == 2) { + assert(refs.REFCNT() == 1); + // Ok, we found a C method that refers to the + // main greenlet, and its only referenced + // twice, once in the list we just created, + // once from...somewhere else. If we can't + // find where else, then this is a leak. + // This happens in older versions of CPython + // that create a bound method object somewhere + // on the stack that we'll never get back to. + if (PyCFunction_GetFunction(refs.at(0).borrow()) == (PyCFunction)green_switch) { + BorrowedObject function_w = refs.at(0); + refs.clear(); // destroy the reference + // from the list. + // back to one reference. Can *it* be + // found? + assert(function_w.REFCNT() == 1); + refs = get_referrers.PyCall(function_w); + if (refs && refs.empty()) { + // Nope, it can't be found so it won't + // ever be GC'd. Drop it. + Py_CLEAR(function_w); + } + } + } + std::clock_t end = std::clock(); + ThreadState::_clocks_used_doing_gc += (end - begin); + } + } + } + + // We need to make sure this greenlet appears to be dead, + // because otherwise deallocing it would fail to raise an + // exception in it (the thread is dead) and put it back in our + // deleteme list. + if (this->current_greenlet) { + this->current_greenlet->murder_in_place(); + this->current_greenlet.CLEAR(); + } + + if (this->main_greenlet) { + // Couldn't have been the main greenlet that was running + // when the thread exited (because we already cleared this + // pointer if it was). This shouldn't be possible? + + // If the main greenlet was current when the thread died (it + // should be, right?) then we cleared its self pointer above + // when we cleared the current greenlet's main greenlet pointer. + // assert(this->main_greenlet->main_greenlet == this->main_greenlet + // || !this->main_greenlet->main_greenlet); + // // self reference, probably gone + // this->main_greenlet->main_greenlet.CLEAR(); + + // This will actually go away when the ivar is destructed. + this->main_greenlet.CLEAR(); + } + + if (PyErr_Occurred()) { + PyErr_WriteUnraisable(NULL); + PyErr_Clear(); + } + + } + +}; + +ImmortalString ThreadState::get_referrers_name(nullptr); +PythonAllocator ThreadState::allocator; +std::clock_t ThreadState::_clocks_used_doing_gc(0); + + + + + +}; // namespace greenlet + +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/TThreadStateCreator.hpp b/venv/lib/python3.11/site-packages/greenlet/TThreadStateCreator.hpp new file mode 100644 index 0000000..2ec7ab5 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/TThreadStateCreator.hpp @@ -0,0 +1,102 @@ +#ifndef GREENLET_THREAD_STATE_CREATOR_HPP +#define GREENLET_THREAD_STATE_CREATOR_HPP + +#include +#include + +#include "greenlet_internal.hpp" +#include "greenlet_refs.hpp" +#include "greenlet_thread_support.hpp" + +#include "TThreadState.hpp" + +namespace greenlet { + + +typedef void (*ThreadStateDestructor)(ThreadState* const); + +template +class ThreadStateCreator +{ +private: + // Initialized to 1, and, if still 1, created on access. + // Set to 0 on destruction. + ThreadState* _state; + G_NO_COPIES_OF_CLS(ThreadStateCreator); + + inline bool has_initialized_state() const noexcept + { + return this->_state != (ThreadState*)1; + } + + inline bool has_state() const noexcept + { + return this->has_initialized_state() && this->_state != nullptr; + } + +public: + + // Only one of these, auto created per thread. + // Constructing the state constructs the MainGreenlet. + ThreadStateCreator() : + _state((ThreadState*)1) + { + } + + ~ThreadStateCreator() + { + if (this->has_state()) { + Destructor(this->_state); + } + + this->_state = nullptr; + } + + inline ThreadState& state() + { + // The main greenlet will own this pointer when it is created, + // which will be right after this. The plan is to give every + // greenlet a pointer to the main greenlet for the thread it + // runs in; if we are doing something cross-thread, we need to + // access the pointer from the main greenlet. Deleting the + // thread, and hence the thread-local storage, will delete the + // state pointer in the main greenlet. + if (!this->has_initialized_state()) { + // XXX: Assuming allocation never fails + this->_state = new ThreadState; + // For non-standard threading, we need to store an object + // in the Python thread state dictionary so that it can be + // DECREF'd when the thread ends (ideally; the dict could + // last longer) and clean this object up. + } + if (!this->_state) { + throw std::runtime_error("Accessing state after destruction."); + } + return *this->_state; + } + + operator ThreadState&() + { + return this->state(); + } + + operator ThreadState*() + { + return &this->state(); + } + + inline int tp_traverse(visitproc visit, void* arg) + { + if (this->has_state()) { + return this->_state->tp_traverse(visit, arg); + } + return 0; + } + +}; + + + +}; // namespace greenlet + +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/TThreadStateDestroy.cpp b/venv/lib/python3.11/site-packages/greenlet/TThreadStateDestroy.cpp new file mode 100644 index 0000000..449b788 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/TThreadStateDestroy.cpp @@ -0,0 +1,217 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/** + * Implementation of the ThreadState destructors. + * + * Format with: + * clang-format -i --style=file src/greenlet/greenlet.c + * + * + * Fix missing braces with: + * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" +*/ +#ifndef T_THREADSTATE_DESTROY +#define T_THREADSTATE_DESTROY + +#include "TGreenlet.hpp" + +#include "greenlet_thread_support.hpp" +#include "greenlet_compiler_compat.hpp" +#include "TGreenletGlobals.cpp" +#include "TThreadState.hpp" +#include "TThreadStateCreator.hpp" + +namespace greenlet { + +extern "C" { + +struct ThreadState_DestroyNoGIL +{ + /** + This function uses the same lock that the PendingCallback does + */ + static void + MarkGreenletDeadAndQueueCleanup(ThreadState* const state) + { +#if GREENLET_BROKEN_THREAD_LOCAL_CLEANUP_JUST_LEAK + return; +#endif + // We are *NOT* holding the GIL. Our thread is in the middle + // of its death throes and the Python thread state is already + // gone so we can't use most Python APIs. One that is safe is + // ``Py_AddPendingCall``, unless the interpreter itself has + // been torn down. There is a limited number of calls that can + // be queued: 32 (NPENDINGCALLS) in CPython 3.10, so we + // coalesce these calls using our own queue. + + if (!MarkGreenletDeadIfNeeded(state)) { + // No state, or no greenlet + return; + } + + // XXX: Because we don't have the GIL, this is a race condition. + if (!PyInterpreterState_Head()) { + // We have to leak the thread state, if the + // interpreter has shut down when we're getting + // deallocated, we can't run the cleanup code that + // deleting it would imply. + return; + } + + AddToCleanupQueue(state); + + } + +private: + + // If the state has an allocated main greenlet: + // - mark the greenlet as dead by disassociating it from the state; + // - return 1 + // Otherwise, return 0. + static bool + MarkGreenletDeadIfNeeded(ThreadState* const state) + { + if (state && state->has_main_greenlet()) { + // mark the thread as dead ASAP. + // this is racy! If we try to throw or switch to a + // greenlet from this thread from some other thread before + // we clear the state pointer, it won't realize the state + // is dead which can crash the process. + PyGreenlet* p(state->borrow_main_greenlet().borrow()); + assert(p->pimpl->thread_state() == state || p->pimpl->thread_state() == nullptr); + dynamic_cast(p->pimpl)->thread_state(nullptr); + return true; + } + return false; + } + + static void + AddToCleanupQueue(ThreadState* const state) + { + assert(state && state->has_main_greenlet()); + + // NOTE: Because we're not holding the GIL here, some other + // Python thread could run and call ``os.fork()``, which would + // be bad if that happened while we are holding the cleanup + // lock (it wouldn't function in the child process). + // Make a best effort to try to keep the duration we hold the + // lock short. + // TODO: On platforms that support it, use ``pthread_atfork`` to + // drop this lock. + LockGuard cleanup_lock(*mod_globs->thread_states_to_destroy_lock); + + mod_globs->queue_to_destroy(state); + if (mod_globs->thread_states_to_destroy.size() == 1) { + // We added the first item to the queue. We need to schedule + // the cleanup. + + // A size greater than 1 means that we have already added the pending call, + // and in fact, it may be executing now. + // If it is executing, our lock makes sure that it will see the item we just added + // to the queue on its next iteration (after we release the lock) + // + // A size of 1 means there is no pending call, OR the pending call is + // currently executing, has dropped the lock, and is deleting the last item + // from the queue; its next iteration will go ahead and delete the item we just added. + // And the pending call we schedule here will have no work to do. + int result = AddPendingCall( + PendingCallback_DestroyQueueWithGIL, + nullptr); + if (result < 0) { + // Hmm, what can we do here? + fprintf(stderr, + "greenlet: WARNING: failed in call to Py_AddPendingCall; " + "expect a memory leak.\n"); + } + } + } + + static int + PendingCallback_DestroyQueueWithGIL(void* UNUSED(arg)) + { + // We're holding the GIL here, so no Python code should be able to + // run to call ``os.fork()``. + while (1) { + ThreadState* to_destroy; + { + LockGuard cleanup_lock(*mod_globs->thread_states_to_destroy_lock); + if (mod_globs->thread_states_to_destroy.empty()) { + break; + } + to_destroy = mod_globs->take_next_to_destroy(); + } + assert(to_destroy); + assert(to_destroy->has_main_greenlet()); + // Drop the lock while we do the actual deletion. + // This allows other calls to MarkGreenletDeadAndQueueCleanup + // to enter and add to our queue. + DestroyOneWithGIL(to_destroy); + } + return 0; + } + + static void + DestroyOneWithGIL(const ThreadState* const state) + { + // Holding the GIL. + // Passed a non-shared pointer to the actual thread state. + // state -> main greenlet + assert(state->has_main_greenlet()); + PyGreenlet* main(state->borrow_main_greenlet()); + // When we need to do cross-thread operations, we check this. + // A NULL value means the thread died some time ago. + // We do this here, rather than in a Python dealloc function + // for the greenlet, in case there's still a reference out + // there. + dynamic_cast(main->pimpl)->thread_state(nullptr); + + delete state; // Deleting this runs the destructor, DECREFs the main greenlet. + } + + + static int AddPendingCall(int (*func)(void*), void* arg) + { + // If the interpreter is in the middle of finalizing, we can't add a + // pending call. Trying to do so will end up in a SIGSEGV, as + // Py_AddPendingCall will not be able to get the interpreter and will + // try to dereference a NULL pointer. It's possible this can still + // segfault if we happen to get context switched, and maybe we should + // just always implement our own AddPendingCall, but I'd like to see if + // this works first +#if GREENLET_PY313 + if (Py_IsFinalizing()) { +#else + if (_Py_IsFinalizing()) { +#endif +#ifdef GREENLET_DEBUG + // No need to log in the general case. Yes, we'll leak, + // but we're shutting down so it should be ok. + fprintf(stderr, + "greenlet: WARNING: Interpreter is finalizing. Ignoring " + "call to Py_AddPendingCall; \n"); +#endif + return 0; + } + return Py_AddPendingCall(func, arg); + } + + + + + +}; +}; + +}; // namespace greenlet + +// The intent when GET_THREAD_STATE() is needed multiple times in a +// function is to take a reference to its return value in a local +// variable, to avoid the thread-local indirection. On some platforms +// (macOS), accessing a thread-local involves a function call (plus an +// initial function call in each function that uses a thread local); +// in contrast, static volatile variables are at some pre-computed +// offset. +typedef greenlet::ThreadStateCreator ThreadStateCreator; +static thread_local ThreadStateCreator g_thread_state_global; +#define GET_THREAD_STATE() g_thread_state_global + +#endif //T_THREADSTATE_DESTROY diff --git a/venv/lib/python3.11/site-packages/greenlet/TUserGreenlet.cpp b/venv/lib/python3.11/site-packages/greenlet/TUserGreenlet.cpp new file mode 100644 index 0000000..73a8133 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/TUserGreenlet.cpp @@ -0,0 +1,662 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/** + * Implementation of greenlet::UserGreenlet. + * + * Format with: + * clang-format -i --style=file src/greenlet/greenlet.c + * + * + * Fix missing braces with: + * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" +*/ +#ifndef T_USER_GREENLET_CPP +#define T_USER_GREENLET_CPP + +#include "greenlet_internal.hpp" +#include "TGreenlet.hpp" + +#include "TThreadStateDestroy.cpp" + + +namespace greenlet { +using greenlet::refs::BorrowedMainGreenlet; +greenlet::PythonAllocator UserGreenlet::allocator; + +void* UserGreenlet::operator new(size_t UNUSED(count)) +{ + return allocator.allocate(1); +} + + +void UserGreenlet::operator delete(void* ptr) +{ + return allocator.deallocate(static_cast(ptr), + 1); +} + + +UserGreenlet::UserGreenlet(PyGreenlet* p, BorrowedGreenlet the_parent) + : Greenlet(p), _parent(the_parent) +{ +} + +UserGreenlet::~UserGreenlet() +{ + // Python 3.11: If we don't clear out the raw frame datastack + // when deleting an unfinished greenlet, + // TestLeaks.test_untracked_memory_doesnt_increase_unfinished_thread_dealloc_in_main fails. + this->python_state.did_finish(nullptr); + this->tp_clear(); +} + + +const BorrowedMainGreenlet +UserGreenlet::main_greenlet() const +{ + return this->_main_greenlet; +} + + +BorrowedMainGreenlet +UserGreenlet::find_main_greenlet_in_lineage() const +{ + if (this->started()) { + assert(this->_main_greenlet); + return BorrowedMainGreenlet(this->_main_greenlet); + } + + if (!this->_parent) { + /* garbage collected greenlet in chain */ + // XXX: WHAT? + return BorrowedMainGreenlet(nullptr); + } + + return this->_parent->find_main_greenlet_in_lineage(); +} + + +/** + * CAUTION: This will allocate memory and may trigger garbage + * collection and arbitrary Python code. + */ +OwnedObject +UserGreenlet::throw_GreenletExit_during_dealloc(const ThreadState& current_thread_state) +{ + /* The dying greenlet cannot be a parent of ts_current + because the 'parent' field chain would hold a + reference */ + UserGreenlet::ParentIsCurrentGuard with_current_parent(this, current_thread_state); + + // We don't care about the return value, only whether an + // exception happened. Whether or not an exception happens, + // we need to restore the parent in case the greenlet gets + // resurrected. + return Greenlet::throw_GreenletExit_during_dealloc(current_thread_state); +} + +ThreadState* +UserGreenlet::thread_state() const noexcept +{ + // TODO: maybe make this throw, if the thread state isn't there? + // if (!this->main_greenlet) { + // throw std::runtime_error("No thread state"); // TODO: Better exception + // } + if (!this->_main_greenlet) { + return nullptr; + } + return this->_main_greenlet->thread_state(); +} + + +bool +UserGreenlet::was_running_in_dead_thread() const noexcept +{ + return this->_main_greenlet && !this->thread_state(); +} + +OwnedObject +UserGreenlet::g_switch() +{ + assert(this->args() || PyErr_Occurred()); + + try { + this->check_switch_allowed(); + } + catch (const PyErrOccurred&) { + this->release_args(); + throw; + } + + // Switching greenlets used to attempt to clean out ones that need + // deleted *if* we detected a thread switch. Should it still do + // that? + // An issue is that if we delete a greenlet from another thread, + // it gets queued to this thread, and ``kill_greenlet()`` switches + // back into the greenlet + + /* find the real target by ignoring dead greenlets, + and if necessary starting a greenlet. */ + switchstack_result_t err; + Greenlet* target = this; + // TODO: probably cleaner to handle the case where we do + // switch to ourself separately from the other cases. + // This can probably even further be simplified if we keep + // track of the switching_state we're going for and just call + // into g_switch() if it's not ourself. The main problem with that + // is that we would be using more stack space. + bool target_was_me = true; + bool was_initial_stub = false; + while (target) { + if (target->active()) { + if (!target_was_me) { + target->args() <<= this->args(); + assert(!this->args()); + } + err = target->g_switchstack(); + break; + } + if (!target->started()) { + // We never encounter a main greenlet that's not started. + assert(!target->main()); + UserGreenlet* real_target = static_cast(target); + assert(real_target); + void* dummymarker; + was_initial_stub = true; + if (!target_was_me) { + target->args() <<= this->args(); + assert(!this->args()); + } + try { + // This can only throw back to us while we're + // still in this greenlet. Once the new greenlet + // is bootstrapped, it has its own exception state. + err = real_target->g_initialstub(&dummymarker); + } + catch (const PyErrOccurred&) { + this->release_args(); + throw; + } + catch (const GreenletStartedWhileInPython&) { + // The greenlet was started sometime before this + // greenlet actually switched to it, i.e., + // "concurrent" calls to switch() or throw(). + // We need to retry the switch. + // Note that the current greenlet has been reset + // to this one (or we wouldn't be running!) + continue; + } + break; + } + + target = target->parent(); + target_was_me = false; + } + // The ``this`` pointer and all other stack or register based + // variables are invalid now, at least where things succeed + // above. + // But this one, probably not so much? It's not clear if it's + // safe to throw an exception at this point. + + if (err.status < 0) { + // If we get here, either g_initialstub() + // failed, or g_switchstack() failed. Either one of those + // cases SHOULD leave us in the original greenlet with a valid + // stack. + return this->on_switchstack_or_initialstub_failure(target, err, target_was_me, was_initial_stub); + } + + // err.the_new_current_greenlet would be the same as ``target``, + // if target wasn't probably corrupt. + return err.the_new_current_greenlet->g_switch_finish(err); +} + + + +Greenlet::switchstack_result_t +UserGreenlet::g_initialstub(void* mark) +{ + OwnedObject run; + + // We need to grab a reference to the current switch arguments + // in case we're entered concurrently during the call to + // GetAttr() and have to try again. + // We'll restore them when we return in that case. + // Scope them tightly to avoid ref leaks. + { + SwitchingArgs args(this->args()); + + /* save exception in case getattr clears it */ + PyErrPieces saved; + + /* + self.run is the object to call in the new greenlet. + This could run arbitrary python code and switch greenlets! + */ + run = this->self().PyRequireAttr(mod_globs->str_run); + /* restore saved exception */ + saved.PyErrRestore(); + + + /* recheck that it's safe to switch in case greenlet reparented anywhere above */ + this->check_switch_allowed(); + + /* by the time we got here another start could happen elsewhere, + * that means it should now be a regular switch. + * This can happen if the Python code is a subclass that implements + * __getattribute__ or __getattr__, or makes ``run`` a descriptor; + * all of those can run arbitrary code that switches back into + * this greenlet. + */ + if (this->stack_state.started()) { + // the successful switch cleared these out, we need to + // restore our version. They will be copied on up to the + // next target. + assert(!this->args()); + this->args() <<= args; + throw GreenletStartedWhileInPython(); + } + } + + // Sweet, if we got here, we have the go-ahead and will switch + // greenlets. + // Nothing we do from here on out should allow for a thread or + // greenlet switch: No arbitrary calls to Python, including + // decref'ing + +#if GREENLET_USE_CFRAME + /* OK, we need it, we're about to switch greenlets, save the state. */ + /* + See green_new(). This is a stack-allocated variable used + while *self* is in PyObject_Call(). + We want to defer copying the state info until we're sure + we need it and are in a stable place to do so. + */ + _PyCFrame trace_info; + + this->python_state.set_new_cframe(trace_info); +#endif + /* start the greenlet */ + ThreadState& thread_state = GET_THREAD_STATE().state(); + this->stack_state = StackState(mark, + thread_state.borrow_current()->stack_state); + this->python_state.set_initial_state(PyThreadState_GET()); + this->exception_state.clear(); + this->_main_greenlet = thread_state.get_main_greenlet(); + + /* perform the initial switch */ + switchstack_result_t err = this->g_switchstack(); + /* returns twice! + The 1st time with ``err == 1``: we are in the new greenlet. + This one owns a greenlet that used to be current. + The 2nd time with ``err <= 0``: back in the caller's + greenlet; this happens if the child finishes or switches + explicitly to us. Either way, the ``err`` variable is + created twice at the same memory location, but possibly + having different ``origin`` values. Note that it's not + constructed for the second time until the switch actually happens. + */ + if (err.status == 1) { + // In the new greenlet. + + // This never returns! Calling inner_bootstrap steals + // the contents of our run object within this stack frame, so + // it is not valid to do anything with it. + try { + this->inner_bootstrap(err.origin_greenlet.relinquish_ownership(), + run.relinquish_ownership()); + } + // Getting a C++ exception here isn't good. It's probably a + // bug in the underlying greenlet, meaning it's probably a + // C++ extension. We're going to abort anyway, but try to + // display some nice information *if* possible. Some obscure + // platforms don't properly support this (old 32-bit Arm, see see + // https://github.com/python-greenlet/greenlet/issues/385); that's not + // great, but should usually be OK because, as mentioned above, we're + // terminating anyway. + // + // The catching is tested by + // ``test_cpp.CPPTests.test_unhandled_exception_in_greenlet_aborts``. + // + // PyErrOccurred can theoretically be thrown by + // inner_bootstrap() -> g_switch_finish(), but that should + // never make it back to here. It is a std::exception and + // would be caught if it is. + catch (const std::exception& e) { + std::string base = "greenlet: Unhandled C++ exception: "; + base += e.what(); + Py_FatalError(base.c_str()); + } + catch (...) { + // Some compilers/runtimes use exceptions internally. + // It appears that GCC on Linux with libstdc++ throws an + // exception internally at process shutdown time to unwind + // stacks and clean up resources. Depending on exactly + // where we are when the process exits, that could result + // in an unknown exception getting here. If we + // Py_FatalError() or abort() here, we interfere with + // orderly process shutdown. Throwing the exception on up + // is the right thing to do. + // + // gevent's ``examples/dns_mass_resolve.py`` demonstrates this. +#ifndef NDEBUG + fprintf(stderr, + "greenlet: inner_bootstrap threw unknown exception; " + "is the process terminating?\n"); +#endif + throw; + } + Py_FatalError("greenlet: inner_bootstrap returned with no exception.\n"); + } + + + // In contrast, notice that we're keeping the origin greenlet + // around as an owned reference; we need it to call the trace + // function for the switch back into the parent. It was only + // captured at the time the switch actually happened, though, + // so we haven't been keeping an extra reference around this + // whole time. + + /* back in the parent */ + if (err.status < 0) { + /* start failed badly, restore greenlet state */ + this->stack_state = StackState(); + this->_main_greenlet.CLEAR(); + // CAUTION: This may run arbitrary Python code. + run.CLEAR(); // inner_bootstrap didn't run, we own the reference. + } + + // In the success case, the spawned code (inner_bootstrap) will + // take care of decrefing this, so we relinquish ownership so as + // to not double-decref. + + run.relinquish_ownership(); + + return err; +} + + +void +UserGreenlet::inner_bootstrap(PyGreenlet* origin_greenlet, PyObject* run) +{ + // The arguments here would be another great place for move. + // As it is, we take them as a reference so that when we clear + // them we clear what's on the stack above us. Do that NOW, and + // without using a C++ RAII object, + // so there's no way that exiting the parent frame can clear it, + // or we clear it unexpectedly. This arises in the context of the + // interpreter shutting down. See https://github.com/python-greenlet/greenlet/issues/325 + //PyObject* run = _run.relinquish_ownership(); + + /* in the new greenlet */ + assert(this->thread_state()->borrow_current() == BorrowedGreenlet(this->_self)); + // C++ exceptions cannot propagate to the parent greenlet from + // here. (TODO: Do we need a catch(...) clause, perhaps on the + // function itself? ALl we could do is terminate the program.) + // NOTE: On 32-bit Windows, the call chain is extremely + // important here in ways that are subtle, having to do with + // the depth of the SEH list. The call to restore it MUST NOT + // add a new SEH handler to the list, or we'll restore it to + // the wrong thing. + this->thread_state()->restore_exception_state(); + /* stack variables from above are no good and also will not unwind! */ + // EXCEPT: That can't be true, we access run, among others, here. + + this->stack_state.set_active(); /* running */ + + // We're about to possibly run Python code again, which + // could switch back/away to/from us, so we need to grab the + // arguments locally. + SwitchingArgs args; + args <<= this->args(); + assert(!this->args()); + + // XXX: We could clear this much earlier, right? + // Or would that introduce the possibility of running Python + // code when we don't want to? + // CAUTION: This may run arbitrary Python code. + this->_run_callable.CLEAR(); + + + // The first switch we need to manually call the trace + // function here instead of in g_switch_finish, because we + // never return there. + if (OwnedObject tracefunc = this->thread_state()->get_tracefunc()) { + OwnedGreenlet trace_origin; + trace_origin = origin_greenlet; + try { + g_calltrace(tracefunc, + args ? mod_globs->event_switch : mod_globs->event_throw, + trace_origin, + this->_self); + } + catch (const PyErrOccurred&) { + /* Turn trace errors into switch throws */ + args.CLEAR(); + } + } + + // We no longer need the origin, it was only here for + // tracing. + // We may never actually exit this stack frame so we need + // to explicitly clear it. + // This could run Python code and switch. + Py_CLEAR(origin_greenlet); + + OwnedObject result; + if (!args) { + /* pending exception */ + result = NULL; + } + else { + /* call g.run(*args, **kwargs) */ + // This could result in further switches + try { + //result = run.PyCall(args.args(), args.kwargs()); + // CAUTION: Just invoking this, before the function even + // runs, may cause memory allocations, which may trigger + // GC, which may run arbitrary Python code. + result = OwnedObject::consuming(PyObject_Call(run, args.args().borrow(), args.kwargs().borrow())); + } + catch (...) { + // Unhandled C++ exception! + + // If we declare ourselves as noexcept, if we don't catch + // this here, most platforms will just abort() the + // process. But on 64-bit Windows with older versions of + // the C runtime, this can actually corrupt memory and + // just return. We see this when compiling with the + // Windows 7.0 SDK targeting Windows Server 2008, but not + // when using the Appveyor Visual Studio 2019 image. So + // this currently only affects Python 2.7 on Windows 64. + // That is, the tests pass and the runtime aborts + // everywhere else. + // + // However, if we catch it and try to continue with a + // Python error, then all Windows 64 bit platforms corrupt + // memory. So all we can do is manually abort, hopefully + // with a good error message. (Note that the above was + // tested WITHOUT the `/EHr` switch being used at compile + // time, so MSVC may have "optimized" out important + // checking. Using that switch, we may be in a better + // place in terms of memory corruption.) But sometimes it + // can't be caught here at all, which is confusing but not + // terribly surprising; so again, the G_NOEXCEPT_WIN32 + // plus "/EHr". + // + // Hopefully the basic C stdlib is still functional enough + // for us to at least print an error. + // + // It gets more complicated than that, though, on some + // platforms, specifically at least Linux/gcc/libstdc++. They use + // an exception to unwind the stack when a background + // thread exits. (See comments about noexcept.) So this + // may not actually represent anything untoward. On those + // platforms we allow throws of this to propagate, or + // attempt to anyway. +# if defined(WIN32) || defined(_WIN32) + Py_FatalError( + "greenlet: Unhandled C++ exception from a greenlet run function. " + "Because memory is likely corrupted, terminating process."); + std::abort(); +#else + throw; +#endif + } + } + // These lines may run arbitrary code + args.CLEAR(); + Py_CLEAR(run); + + if (!result + && mod_globs->PyExc_GreenletExit.PyExceptionMatches() + && (this->args())) { + // This can happen, for example, if our only reference + // goes away after we switch back to the parent. + // See test_dealloc_switch_args_not_lost + PyErrPieces clear_error; + result <<= this->args(); + result = single_result(result); + } + this->release_args(); + this->python_state.did_finish(PyThreadState_GET()); + + result = g_handle_exit(result); + assert(this->thread_state()->borrow_current() == this->_self); + + /* jump back to parent */ + this->stack_state.set_inactive(); /* dead */ + + + // TODO: Can we decref some things here? Release our main greenlet + // and maybe parent? + for (Greenlet* parent = this->_parent; + parent; + parent = parent->parent()) { + // We need to somewhere consume a reference to + // the result; in most cases we'll never have control + // back in this stack frame again. Calling + // green_switch actually adds another reference! + // This would probably be clearer with a specific API + // to hand results to the parent. + parent->args() <<= result; + assert(!result); + // The parent greenlet now owns the result; in the + // typical case we'll never get back here to assign to + // result and thus release the reference. + try { + result = parent->g_switch(); + } + catch (const PyErrOccurred&) { + // Ignore, keep passing the error on up. + } + + /* Return here means switch to parent failed, + * in which case we throw *current* exception + * to the next parent in chain. + */ + assert(!result); + } + /* We ran out of parents, cannot continue */ + PyErr_WriteUnraisable(this->self().borrow_o()); + Py_FatalError("greenlet: ran out of parent greenlets while propagating exception; " + "cannot continue"); + std::abort(); +} + +void +UserGreenlet::run(const BorrowedObject nrun) +{ + if (this->started()) { + throw AttributeError( + "run cannot be set " + "after the start of the greenlet"); + } + this->_run_callable = nrun; +} + +const OwnedGreenlet +UserGreenlet::parent() const +{ + return this->_parent; +} + +void +UserGreenlet::parent(const BorrowedObject raw_new_parent) +{ + if (!raw_new_parent) { + throw AttributeError("can't delete attribute"); + } + + BorrowedMainGreenlet main_greenlet_of_new_parent; + BorrowedGreenlet new_parent(raw_new_parent.borrow()); // could + // throw + // TypeError! + for (BorrowedGreenlet p = new_parent; p; p = p->parent()) { + if (p == this->self()) { + throw ValueError("cyclic parent chain"); + } + main_greenlet_of_new_parent = p->main_greenlet(); + } + + if (!main_greenlet_of_new_parent) { + throw ValueError("parent must not be garbage collected"); + } + + if (this->started() + && this->_main_greenlet != main_greenlet_of_new_parent) { + throw ValueError("parent cannot be on a different thread"); + } + + this->_parent = new_parent; +} + +void +UserGreenlet::murder_in_place() +{ + this->_main_greenlet.CLEAR(); + Greenlet::murder_in_place(); +} + +bool +UserGreenlet::belongs_to_thread(const ThreadState* thread_state) const +{ + return Greenlet::belongs_to_thread(thread_state) && this->_main_greenlet == thread_state->borrow_main_greenlet(); +} + + +int +UserGreenlet::tp_traverse(visitproc visit, void* arg) +{ + Py_VISIT(this->_parent.borrow_o()); + Py_VISIT(this->_main_greenlet.borrow_o()); + Py_VISIT(this->_run_callable.borrow_o()); + + return Greenlet::tp_traverse(visit, arg); +} + +int +UserGreenlet::tp_clear() +{ + Greenlet::tp_clear(); + this->_parent.CLEAR(); + this->_main_greenlet.CLEAR(); + this->_run_callable.CLEAR(); + return 0; +} + +UserGreenlet::ParentIsCurrentGuard::ParentIsCurrentGuard(UserGreenlet* p, + const ThreadState& thread_state) + : oldparent(p->_parent), + greenlet(p) +{ + p->_parent = thread_state.get_current(); +} + +UserGreenlet::ParentIsCurrentGuard::~ParentIsCurrentGuard() +{ + this->greenlet->_parent = oldparent; + oldparent.CLEAR(); +} + +}; //namespace greenlet +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/__init__.py b/venv/lib/python3.11/site-packages/greenlet/__init__.py new file mode 100644 index 0000000..3386a09 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/__init__.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +""" +The root of the greenlet package. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +__all__ = [ + '__version__', + '_C_API', + + 'GreenletExit', + 'error', + + 'getcurrent', + 'greenlet', + + 'gettrace', + 'settrace', +] + +# pylint:disable=no-name-in-module + +### +# Metadata +### +__version__ = '3.2.1' +from ._greenlet import _C_API # pylint:disable=no-name-in-module + +### +# Exceptions +### +from ._greenlet import GreenletExit +from ._greenlet import error + +### +# greenlets +### +from ._greenlet import getcurrent +from ._greenlet import greenlet + +### +# tracing +### +try: + from ._greenlet import gettrace + from ._greenlet import settrace +except ImportError: + # Tracing wasn't supported. + # XXX: The option to disable it was removed in 1.0, + # so this branch should be dead code. + pass + +### +# Constants +# These constants aren't documented and aren't recommended. +# In 1.0, USE_GC and USE_TRACING are always true, and USE_CONTEXT_VARS +# is the same as ``sys.version_info[:2] >= 3.7`` +### +from ._greenlet import GREENLET_USE_CONTEXT_VARS # pylint:disable=unused-import +from ._greenlet import GREENLET_USE_GC # pylint:disable=unused-import +from ._greenlet import GREENLET_USE_TRACING # pylint:disable=unused-import + +# Controlling the use of the gc module. Provisional API for this greenlet +# implementation in 2.0. +from ._greenlet import CLOCKS_PER_SEC # pylint:disable=unused-import +from ._greenlet import enable_optional_cleanup # pylint:disable=unused-import +from ._greenlet import get_clocks_used_doing_optional_cleanup # pylint:disable=unused-import + +# Other APIS in the _greenlet module are for test support. diff --git a/venv/lib/python3.11/site-packages/greenlet/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..bd8d444 Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/greenlet/_greenlet.cpython-311-x86_64-linux-gnu.so b/venv/lib/python3.11/site-packages/greenlet/_greenlet.cpython-311-x86_64-linux-gnu.so new file mode 100644 index 0000000..d8b3d1e Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/_greenlet.cpython-311-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.11/site-packages/greenlet/greenlet.cpp b/venv/lib/python3.11/site-packages/greenlet/greenlet.cpp new file mode 100644 index 0000000..e8d92a0 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/greenlet.cpp @@ -0,0 +1,320 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +/* Format with: + * clang-format -i --style=file src/greenlet/greenlet.c + * + * + * Fix missing braces with: + * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" +*/ +#include +#include +#include +#include + + +#define PY_SSIZE_T_CLEAN +#include +#include "structmember.h" // PyMemberDef + +#include "greenlet_internal.hpp" +// Code after this point can assume access to things declared in stdint.h, +// including the fixed-width types. This goes for the platform-specific switch functions +// as well. +#include "greenlet_refs.hpp" +#include "greenlet_slp_switch.hpp" + +#include "greenlet_thread_support.hpp" +#include "TGreenlet.hpp" + +#include "TGreenletGlobals.cpp" + +#include "TGreenlet.cpp" +#include "TMainGreenlet.cpp" +#include "TUserGreenlet.cpp" +#include "TBrokenGreenlet.cpp" +#include "TExceptionState.cpp" +#include "TPythonState.cpp" +#include "TStackState.cpp" + +#include "TThreadState.hpp" +#include "TThreadStateCreator.hpp" +#include "TThreadStateDestroy.cpp" + +#include "PyGreenlet.cpp" +#include "PyGreenletUnswitchable.cpp" +#include "CObjects.cpp" + +using greenlet::LockGuard; +using greenlet::LockInitError; +using greenlet::PyErrOccurred; +using greenlet::Require; + +using greenlet::g_handle_exit; +using greenlet::single_result; + +using greenlet::Greenlet; +using greenlet::UserGreenlet; +using greenlet::MainGreenlet; +using greenlet::BrokenGreenlet; +using greenlet::ThreadState; +using greenlet::PythonState; + + + +// ******* Implementation of things from included files +template +greenlet::refs::_BorrowedGreenlet& greenlet::refs::_BorrowedGreenlet::operator=(const greenlet::refs::BorrowedObject& other) +{ + this->_set_raw_pointer(static_cast(other)); + return *this; +} + +template +inline greenlet::refs::_BorrowedGreenlet::operator Greenlet*() const noexcept +{ + if (!this->p) { + return nullptr; + } + return reinterpret_cast(this->p)->pimpl; +} + +template +greenlet::refs::_BorrowedGreenlet::_BorrowedGreenlet(const BorrowedObject& p) + : BorrowedReference(nullptr) +{ + + this->_set_raw_pointer(p.borrow()); +} + +template +inline greenlet::refs::_OwnedGreenlet::operator Greenlet*() const noexcept +{ + if (!this->p) { + return nullptr; + } + return reinterpret_cast(this->p)->pimpl; +} + + + +#ifdef __clang__ +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wmissing-field-initializers" +# pragma clang diagnostic ignored "-Wwritable-strings" +#elif defined(__GNUC__) +# pragma GCC diagnostic push +// warning: ISO C++ forbids converting a string constant to ‘char*’ +// (The python APIs aren't const correct and accept writable char*) +# pragma GCC diagnostic ignored "-Wwrite-strings" +#endif + + +/*********************************************************** + +A PyGreenlet is a range of C stack addresses that must be +saved and restored in such a way that the full range of the +stack contains valid data when we switch to it. + +Stack layout for a greenlet: + + | ^^^ | + | older data | + | | + stack_stop . |_______________| + . | | + . | greenlet data | + . | in stack | + . * |_______________| . . _____________ stack_copy + stack_saved + . | | | | + . | data | |greenlet data| + . | unrelated | | saved | + . | to | | in heap | + stack_start . | this | . . |_____________| stack_copy + | greenlet | + | | + | newer data | + | vvv | + + +Note that a greenlet's stack data is typically partly at its correct +place in the stack, and partly saved away in the heap, but always in +the above configuration: two blocks, the more recent one in the heap +and the older one still in the stack (either block may be empty). + +Greenlets are chained: each points to the previous greenlet, which is +the one that owns the data currently in the C stack above my +stack_stop. The currently running greenlet is the first element of +this chain. The main (initial) greenlet is the last one. Greenlets +whose stack is entirely in the heap can be skipped from the chain. + +The chain is not related to execution order, but only to the order +in which bits of C stack happen to belong to greenlets at a particular +point in time. + +The main greenlet doesn't have a stack_stop: it is responsible for the +complete rest of the C stack, and we don't know where it begins. We +use (char*) -1, the largest possible address. + +States: + stack_stop == NULL && stack_start == NULL: did not start yet + stack_stop != NULL && stack_start == NULL: already finished + stack_stop != NULL && stack_start != NULL: active + +The running greenlet's stack_start is undefined but not NULL. + + ***********************************************************/ + + + + +/***********************************************************/ + +/* Some functions must not be inlined: + * slp_restore_state, when inlined into slp_switch might cause + it to restore stack over its own local variables + * slp_save_state, when inlined would add its own local + variables to the saved stack, wasting space + * slp_switch, cannot be inlined for obvious reasons + * g_initialstub, when inlined would receive a pointer into its + own stack frame, leading to incomplete stack save/restore + +g_initialstub is a member function and declared virtual so that the +compiler always calls it through a vtable. + +slp_save_state and slp_restore_state are also member functions. They +are called from trampoline functions that themselves are declared as +not eligible for inlining. +*/ + +extern "C" { +static int GREENLET_NOINLINE(slp_save_state_trampoline)(char* stackref) +{ + return switching_thread_state->slp_save_state(stackref); +} +static void GREENLET_NOINLINE(slp_restore_state_trampoline)() +{ + switching_thread_state->slp_restore_state(); +} +} + + +/***********************************************************/ + + +#include "PyModule.cpp" + + + +static PyObject* +greenlet_internal_mod_init() noexcept +{ + static void* _PyGreenlet_API[PyGreenlet_API_pointers]; + + try { + CreatedModule m(greenlet_module_def); + + Require(PyType_Ready(&PyGreenlet_Type)); + Require(PyType_Ready(&PyGreenletUnswitchable_Type)); + + mod_globs = new greenlet::GreenletGlobals; + ThreadState::init(); + + m.PyAddObject("greenlet", PyGreenlet_Type); + m.PyAddObject("UnswitchableGreenlet", PyGreenletUnswitchable_Type); + m.PyAddObject("error", mod_globs->PyExc_GreenletError); + m.PyAddObject("GreenletExit", mod_globs->PyExc_GreenletExit); + + m.PyAddObject("GREENLET_USE_GC", 1); + m.PyAddObject("GREENLET_USE_TRACING", 1); + m.PyAddObject("GREENLET_USE_CONTEXT_VARS", 1L); + m.PyAddObject("GREENLET_USE_STANDARD_THREADING", 1L); + + OwnedObject clocks_per_sec = OwnedObject::consuming(PyLong_FromSsize_t(CLOCKS_PER_SEC)); + m.PyAddObject("CLOCKS_PER_SEC", clocks_per_sec); + + /* also publish module-level data as attributes of the greentype. */ + // XXX: This is weird, and enables a strange pattern of + // confusing the class greenlet with the module greenlet; with + // the exception of (possibly) ``getcurrent()``, this + // shouldn't be encouraged so don't add new items here. + for (const char* const* p = copy_on_greentype; *p; p++) { + OwnedObject o = m.PyRequireAttr(*p); + PyDict_SetItemString(PyGreenlet_Type.tp_dict, *p, o.borrow()); + } + + /* + * Expose C API + */ + + /* types */ + _PyGreenlet_API[PyGreenlet_Type_NUM] = (void*)&PyGreenlet_Type; + + /* exceptions */ + _PyGreenlet_API[PyExc_GreenletError_NUM] = (void*)mod_globs->PyExc_GreenletError; + _PyGreenlet_API[PyExc_GreenletExit_NUM] = (void*)mod_globs->PyExc_GreenletExit; + + /* methods */ + _PyGreenlet_API[PyGreenlet_New_NUM] = (void*)PyGreenlet_New; + _PyGreenlet_API[PyGreenlet_GetCurrent_NUM] = (void*)PyGreenlet_GetCurrent; + _PyGreenlet_API[PyGreenlet_Throw_NUM] = (void*)PyGreenlet_Throw; + _PyGreenlet_API[PyGreenlet_Switch_NUM] = (void*)PyGreenlet_Switch; + _PyGreenlet_API[PyGreenlet_SetParent_NUM] = (void*)PyGreenlet_SetParent; + + /* Previously macros, but now need to be functions externally. */ + _PyGreenlet_API[PyGreenlet_MAIN_NUM] = (void*)Extern_PyGreenlet_MAIN; + _PyGreenlet_API[PyGreenlet_STARTED_NUM] = (void*)Extern_PyGreenlet_STARTED; + _PyGreenlet_API[PyGreenlet_ACTIVE_NUM] = (void*)Extern_PyGreenlet_ACTIVE; + _PyGreenlet_API[PyGreenlet_GET_PARENT_NUM] = (void*)Extern_PyGreenlet_GET_PARENT; + + /* XXX: Note that our module name is ``greenlet._greenlet``, but for + backwards compatibility with existing C code, we need the _C_API to + be directly in greenlet. + */ + const NewReference c_api_object(Require( + PyCapsule_New( + (void*)_PyGreenlet_API, + "greenlet._C_API", + NULL))); + m.PyAddObject("_C_API", c_api_object); + assert(c_api_object.REFCNT() == 2); + + // cerr << "Sizes:" + // << "\n\tGreenlet : " << sizeof(Greenlet) + // << "\n\tUserGreenlet : " << sizeof(UserGreenlet) + // << "\n\tMainGreenlet : " << sizeof(MainGreenlet) + // << "\n\tExceptionState : " << sizeof(greenlet::ExceptionState) + // << "\n\tPythonState : " << sizeof(greenlet::PythonState) + // << "\n\tStackState : " << sizeof(greenlet::StackState) + // << "\n\tSwitchingArgs : " << sizeof(greenlet::SwitchingArgs) + // << "\n\tOwnedObject : " << sizeof(greenlet::refs::OwnedObject) + // << "\n\tBorrowedObject : " << sizeof(greenlet::refs::BorrowedObject) + // << "\n\tPyGreenlet : " << sizeof(PyGreenlet) + // << endl; + + return m.borrow(); // But really it's the main reference. + } + catch (const LockInitError& e) { + PyErr_SetString(PyExc_MemoryError, e.what()); + return NULL; + } + catch (const PyErrOccurred&) { + return NULL; + } + +} + +extern "C" { + +PyMODINIT_FUNC +PyInit__greenlet(void) +{ + return greenlet_internal_mod_init(); +} + +}; // extern C + +#ifdef __clang__ +# pragma clang diagnostic pop +#elif defined(__GNUC__) +# pragma GCC diagnostic pop +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/greenlet.h b/venv/lib/python3.11/site-packages/greenlet/greenlet.h new file mode 100644 index 0000000..d02a16e --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/greenlet.h @@ -0,0 +1,164 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ + +/* Greenlet object interface */ + +#ifndef Py_GREENLETOBJECT_H +#define Py_GREENLETOBJECT_H + + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* This is deprecated and undocumented. It does not change. */ +#define GREENLET_VERSION "1.0.0" + +#ifndef GREENLET_MODULE +#define implementation_ptr_t void* +#endif + +typedef struct _greenlet { + PyObject_HEAD + PyObject* weakreflist; + PyObject* dict; + implementation_ptr_t pimpl; +} PyGreenlet; + +#define PyGreenlet_Check(op) (op && PyObject_TypeCheck(op, &PyGreenlet_Type)) + + +/* C API functions */ + +/* Total number of symbols that are exported */ +#define PyGreenlet_API_pointers 12 + +#define PyGreenlet_Type_NUM 0 +#define PyExc_GreenletError_NUM 1 +#define PyExc_GreenletExit_NUM 2 + +#define PyGreenlet_New_NUM 3 +#define PyGreenlet_GetCurrent_NUM 4 +#define PyGreenlet_Throw_NUM 5 +#define PyGreenlet_Switch_NUM 6 +#define PyGreenlet_SetParent_NUM 7 + +#define PyGreenlet_MAIN_NUM 8 +#define PyGreenlet_STARTED_NUM 9 +#define PyGreenlet_ACTIVE_NUM 10 +#define PyGreenlet_GET_PARENT_NUM 11 + +#ifndef GREENLET_MODULE +/* This section is used by modules that uses the greenlet C API */ +static void** _PyGreenlet_API = NULL; + +# define PyGreenlet_Type \ + (*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM]) + +# define PyExc_GreenletError \ + ((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM]) + +# define PyExc_GreenletExit \ + ((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM]) + +/* + * PyGreenlet_New(PyObject *args) + * + * greenlet.greenlet(run, parent=None) + */ +# define PyGreenlet_New \ + (*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \ + _PyGreenlet_API[PyGreenlet_New_NUM]) + +/* + * PyGreenlet_GetCurrent(void) + * + * greenlet.getcurrent() + */ +# define PyGreenlet_GetCurrent \ + (*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM]) + +/* + * PyGreenlet_Throw( + * PyGreenlet *greenlet, + * PyObject *typ, + * PyObject *val, + * PyObject *tb) + * + * g.throw(...) + */ +# define PyGreenlet_Throw \ + (*(PyObject * (*)(PyGreenlet * self, \ + PyObject * typ, \ + PyObject * val, \ + PyObject * tb)) \ + _PyGreenlet_API[PyGreenlet_Throw_NUM]) + +/* + * PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args) + * + * g.switch(*args, **kwargs) + */ +# define PyGreenlet_Switch \ + (*(PyObject * \ + (*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \ + _PyGreenlet_API[PyGreenlet_Switch_NUM]) + +/* + * PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent) + * + * g.parent = new_parent + */ +# define PyGreenlet_SetParent \ + (*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \ + _PyGreenlet_API[PyGreenlet_SetParent_NUM]) + +/* + * PyGreenlet_GetParent(PyObject* greenlet) + * + * return greenlet.parent; + * + * This could return NULL even if there is no exception active. + * If it does not return NULL, you are responsible for decrementing the + * reference count. + */ +# define PyGreenlet_GetParent \ + (*(PyGreenlet* (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_GET_PARENT_NUM]) + +/* + * deprecated, undocumented alias. + */ +# define PyGreenlet_GET_PARENT PyGreenlet_GetParent + +# define PyGreenlet_MAIN \ + (*(int (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_MAIN_NUM]) + +# define PyGreenlet_STARTED \ + (*(int (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_STARTED_NUM]) + +# define PyGreenlet_ACTIVE \ + (*(int (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_ACTIVE_NUM]) + + + + +/* Macro that imports greenlet and initializes C API */ +/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we + keep the older definition to be sure older code that might have a copy of + the header still works. */ +# define PyGreenlet_Import() \ + { \ + _PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \ + } + +#endif /* GREENLET_MODULE */ + +#ifdef __cplusplus +} +#endif +#endif /* !Py_GREENLETOBJECT_H */ diff --git a/venv/lib/python3.11/site-packages/greenlet/greenlet_allocator.hpp b/venv/lib/python3.11/site-packages/greenlet/greenlet_allocator.hpp new file mode 100644 index 0000000..b452f54 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/greenlet_allocator.hpp @@ -0,0 +1,63 @@ +#ifndef GREENLET_ALLOCATOR_HPP +#define GREENLET_ALLOCATOR_HPP + +#define PY_SSIZE_T_CLEAN +#include +#include +#include "greenlet_compiler_compat.hpp" + + +namespace greenlet +{ + // This allocator is stateless; all instances are identical. + // It can *ONLY* be used when we're sure we're holding the GIL + // (Python's allocators require the GIL). + template + struct PythonAllocator : public std::allocator { + + PythonAllocator(const PythonAllocator& UNUSED(other)) + : std::allocator() + { + } + + PythonAllocator(const std::allocator other) + : std::allocator(other) + {} + + template + PythonAllocator(const std::allocator& other) + : std::allocator(other) + { + } + + PythonAllocator() : std::allocator() {} + + T* allocate(size_t number_objects, const void* UNUSED(hint)=0) + { + void* p; + if (number_objects == 1) + p = PyObject_Malloc(sizeof(T)); + else + p = PyMem_Malloc(sizeof(T) * number_objects); + return static_cast(p); + } + + void deallocate(T* t, size_t n) + { + void* p = t; + if (n == 1) { + PyObject_Free(p); + } + else + PyMem_Free(p); + } + // This member is deprecated in C++17 and removed in C++20 + template< class U > + struct rebind { + typedef PythonAllocator other; + }; + + }; +} + +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/greenlet_compiler_compat.hpp b/venv/lib/python3.11/site-packages/greenlet/greenlet_compiler_compat.hpp new file mode 100644 index 0000000..af24bd8 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/greenlet_compiler_compat.hpp @@ -0,0 +1,98 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +#ifndef GREENLET_COMPILER_COMPAT_HPP +#define GREENLET_COMPILER_COMPAT_HPP + +/** + * Definitions to aid with compatibility with different compilers. + * + * .. caution:: Use extreme care with noexcept. + * Some compilers and runtimes, specifically gcc/libgcc/libstdc++ on + * Linux, implement stack unwinding by throwing an uncatchable + * exception, one that specifically does not appear to be an active + * exception to the rest of the runtime. If this happens while we're in a noexcept function, + * we have violated our dynamic exception contract, and so the runtime + * will call std::terminate(), which kills the process with the + * unhelpful message "terminate called without an active exception". + * + * This has happened in this scenario: A background thread is running + * a greenlet that has made a native call and released the GIL. + * Meanwhile, the main thread finishes and starts shutting down the + * interpreter. When the background thread is scheduled again and + * attempts to obtain the GIL, it notices that the interpreter is + * exiting and calls ``pthread_exit()``. This in turn starts to unwind + * the stack by throwing that exception. But we had the ``PyCall`` + * functions annotated as noexcept, so the runtime terminated us. + * + * #2 0x00007fab26fec2b7 in std::terminate() () from /lib/x86_64-linux-gnu/libstdc++.so.6 + * #3 0x00007fab26febb3c in __gxx_personality_v0 () from /lib/x86_64-linux-gnu/libstdc++.so.6 + * #4 0x00007fab26f34de6 in ?? () from /lib/x86_64-linux-gnu/libgcc_s.so.1 + * #6 0x00007fab276a34c6 in __GI___pthread_unwind at ./nptl/unwind.c:130 + * #7 0x00007fab2769bd3a in __do_cancel () at ../sysdeps/nptl/pthreadP.h:280 + * #8 __GI___pthread_exit (value=value@entry=0x0) at ./nptl/pthread_exit.c:36 + * #9 0x000000000052e567 in PyThread_exit_thread () at ../Python/thread_pthread.h:370 + * #10 0x00000000004d60b5 in take_gil at ../Python/ceval_gil.h:224 + * #11 0x00000000004d65f9 in PyEval_RestoreThread at ../Python/ceval.c:467 + * #12 0x000000000060cce3 in setipaddr at ../Modules/socketmodule.c:1203 + * #13 0x00000000006101cd in socket_gethostbyname + */ + +#include + +# define G_NO_COPIES_OF_CLS(Cls) private: \ + Cls(const Cls& other) = delete; \ + Cls& operator=(const Cls& other) = delete + +# define G_NO_ASSIGNMENT_OF_CLS(Cls) private: \ + Cls& operator=(const Cls& other) = delete + +# define G_NO_COPY_CONSTRUCTOR_OF_CLS(Cls) private: \ + Cls(const Cls& other) = delete; + + +// CAUTION: MSVC is stupidly picky: +// +// "The compiler ignores, without warning, any __declspec keywords +// placed after * or & and in front of the variable identifier in a +// declaration." +// (https://docs.microsoft.com/en-us/cpp/cpp/declspec?view=msvc-160) +// +// So pointer return types must be handled differently (because of the +// trailing *), or you get inscrutable compiler warnings like "error +// C2059: syntax error: ''" +// +// In C++ 11, there is a standard syntax for attributes, and +// GCC defines an attribute to use with this: [[gnu:noinline]]. +// In the future, this is expected to become standard. + +#if defined(__GNUC__) || defined(__clang__) +/* We used to check for GCC 4+ or 3.4+, but those compilers are + laughably out of date. Just assume they support it. */ +# define GREENLET_NOINLINE(name) __attribute__((noinline)) name +# define GREENLET_NOINLINE_P(rtype, name) rtype __attribute__((noinline)) name +# define UNUSED(x) UNUSED_ ## x __attribute__((__unused__)) +#elif defined(_MSC_VER) +/* We used to check for && (_MSC_VER >= 1300) but that's also out of date. */ +# define GREENLET_NOINLINE(name) __declspec(noinline) name +# define GREENLET_NOINLINE_P(rtype, name) __declspec(noinline) rtype name +# define UNUSED(x) UNUSED_ ## x +#endif + +#if defined(_MSC_VER) +# define G_NOEXCEPT_WIN32 noexcept +#else +# define G_NOEXCEPT_WIN32 +#endif + +#if defined(__GNUC__) && defined(__POWERPC__) && defined(__APPLE__) +// 32-bit PPC/MacOSX. Only known to be tested on unreleased versions +// of macOS 10.6 using a macports build gcc 14. It appears that +// running C++ destructors of thread-local variables is broken. + +// See https://github.com/python-greenlet/greenlet/pull/419 +# define GREENLET_BROKEN_THREAD_LOCAL_CLEANUP_JUST_LEAK 1 +#else +# define GREENLET_BROKEN_THREAD_LOCAL_CLEANUP_JUST_LEAK 0 +#endif + + +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/greenlet_cpython_compat.hpp b/venv/lib/python3.11/site-packages/greenlet/greenlet_cpython_compat.hpp new file mode 100644 index 0000000..979d6f9 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/greenlet_cpython_compat.hpp @@ -0,0 +1,148 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +#ifndef GREENLET_CPYTHON_COMPAT_H +#define GREENLET_CPYTHON_COMPAT_H + +/** + * Helpers for compatibility with multiple versions of CPython. + */ + +#define PY_SSIZE_T_CLEAN +#include "Python.h" + + +#if PY_VERSION_HEX >= 0x30A00B1 +# define GREENLET_PY310 1 +#else +# define GREENLET_PY310 0 +#endif + +/* +Python 3.10 beta 1 changed tstate->use_tracing to a nested cframe member. +See https://github.com/python/cpython/pull/25276 +We have to save and restore this as well. + +Python 3.13 removed PyThreadState.cframe (GH-108035). +*/ +#if GREENLET_PY310 && PY_VERSION_HEX < 0x30D0000 +# define GREENLET_USE_CFRAME 1 +#else +# define GREENLET_USE_CFRAME 0 +#endif + + +#if PY_VERSION_HEX >= 0x30B00A4 +/* +Greenlet won't compile on anything older than Python 3.11 alpha 4 (see +https://bugs.python.org/issue46090). Summary of breaking internal changes: +- Python 3.11 alpha 1 changed how frame objects are represented internally. + - https://github.com/python/cpython/pull/30122 +- Python 3.11 alpha 3 changed how recursion limits are stored. + - https://github.com/python/cpython/pull/29524 +- Python 3.11 alpha 4 changed how exception state is stored. It also includes a + change to help greenlet save and restore the interpreter frame "data stack". + - https://github.com/python/cpython/pull/30122 + - https://github.com/python/cpython/pull/30234 +*/ +# define GREENLET_PY311 1 +#else +# define GREENLET_PY311 0 +#endif + + +#if PY_VERSION_HEX >= 0x30C0000 +# define GREENLET_PY312 1 +#else +# define GREENLET_PY312 0 +#endif + +#if PY_VERSION_HEX >= 0x30D0000 +# define GREENLET_PY313 1 +#else +# define GREENLET_PY313 0 +#endif + +#if PY_VERSION_HEX >= 0x30E0000 +# define GREENLET_PY314 1 +#else +# define GREENLET_PY314 0 +#endif + +#ifndef Py_SET_REFCNT +/* Py_REFCNT and Py_SIZE macros are converted to functions +https://bugs.python.org/issue39573 */ +# define Py_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) +#endif + +#ifndef _Py_DEC_REFTOTAL +/* _Py_DEC_REFTOTAL macro has been removed from Python 3.9 by: + https://github.com/python/cpython/commit/49932fec62c616ec88da52642339d83ae719e924 + + The symbol we use to replace it was removed by at least 3.12. +*/ +# ifdef Py_REF_DEBUG +# if GREENLET_PY312 +# define _Py_DEC_REFTOTAL +# else +# define _Py_DEC_REFTOTAL _Py_RefTotal-- +# endif +# else +# define _Py_DEC_REFTOTAL +# endif +#endif +// Define these flags like Cython does if we're on an old version. +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif + +#ifndef Py_TPFLAGS_HAVE_VERSION_TAG + #define Py_TPFLAGS_HAVE_VERSION_TAG 0 +#endif + +#define G_TPFLAGS_DEFAULT Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_VERSION_TAG | Py_TPFLAGS_CHECKTYPES | Py_TPFLAGS_HAVE_NEWBUFFER | Py_TPFLAGS_HAVE_GC + + +#if PY_VERSION_HEX < 0x03090000 +// The official version only became available in 3.9 +# define PyObject_GC_IsTracked(o) _PyObject_GC_IS_TRACKED(o) +#endif + + +// bpo-43760 added PyThreadState_EnterTracing() to Python 3.11.0a2 +#if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION) +static inline void PyThreadState_EnterTracing(PyThreadState *tstate) +{ + tstate->tracing++; +#if PY_VERSION_HEX >= 0x030A00A1 + tstate->cframe->use_tracing = 0; +#else + tstate->use_tracing = 0; +#endif +} +#endif + +// bpo-43760 added PyThreadState_LeaveTracing() to Python 3.11.0a2 +#if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION) +static inline void PyThreadState_LeaveTracing(PyThreadState *tstate) +{ + tstate->tracing--; + int use_tracing = (tstate->c_tracefunc != NULL + || tstate->c_profilefunc != NULL); +#if PY_VERSION_HEX >= 0x030A00A1 + tstate->cframe->use_tracing = use_tracing; +#else + tstate->use_tracing = use_tracing; +#endif +} +#endif + +#if !defined(Py_C_RECURSION_LIMIT) && defined(C_RECURSION_LIMIT) +# define Py_C_RECURSION_LIMIT C_RECURSION_LIMIT +#endif + +#endif /* GREENLET_CPYTHON_COMPAT_H */ diff --git a/venv/lib/python3.11/site-packages/greenlet/greenlet_exceptions.hpp b/venv/lib/python3.11/site-packages/greenlet/greenlet_exceptions.hpp new file mode 100644 index 0000000..617f07c --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/greenlet_exceptions.hpp @@ -0,0 +1,171 @@ +#ifndef GREENLET_EXCEPTIONS_HPP +#define GREENLET_EXCEPTIONS_HPP + +#define PY_SSIZE_T_CLEAN +#include +#include +#include + +#ifdef __clang__ +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wunused-function" +#endif + +namespace greenlet { + + class PyErrOccurred : public std::runtime_error + { + public: + + // CAUTION: In debug builds, may run arbitrary Python code. + static const PyErrOccurred + from_current() + { + assert(PyErr_Occurred()); +#ifndef NDEBUG + // This is not exception safe, and + // not necessarily safe in general (what if it switches?) + // But we only do this in debug mode, where we are in + // tight control of what exceptions are getting raised and + // can prevent those issues. + + // You can't call PyObject_Str with a pending exception. + PyObject* typ; + PyObject* val; + PyObject* tb; + + PyErr_Fetch(&typ, &val, &tb); + PyObject* typs = PyObject_Str(typ); + PyObject* vals = PyObject_Str(val ? val : typ); + const char* typ_msg = PyUnicode_AsUTF8(typs); + const char* val_msg = PyUnicode_AsUTF8(vals); + PyErr_Restore(typ, val, tb); + + std::string msg(typ_msg); + msg += ": "; + msg += val_msg; + PyErrOccurred ex(msg); + Py_XDECREF(typs); + Py_XDECREF(vals); + + return ex; +#else + return PyErrOccurred(); +#endif + } + + PyErrOccurred() : std::runtime_error("") + { + assert(PyErr_Occurred()); + } + + PyErrOccurred(const std::string& msg) : std::runtime_error(msg) + { + assert(PyErr_Occurred()); + } + + PyErrOccurred(PyObject* exc_kind, const char* const msg) + : std::runtime_error(msg) + { + PyErr_SetString(exc_kind, msg); + } + + PyErrOccurred(PyObject* exc_kind, const std::string msg) + : std::runtime_error(msg) + { + // This copies the c_str, so we don't have any lifetime + // issues to worry about. + PyErr_SetString(exc_kind, msg.c_str()); + } + + PyErrOccurred(PyObject* exc_kind, + const std::string msg, //This is the format + //string; that's not + //usually safe! + + PyObject* borrowed_obj_one, PyObject* borrowed_obj_two) + : std::runtime_error(msg) + { + + //This is designed specifically for the + //``check_switch_allowed`` function. + + // PyObject_Str and PyObject_Repr are safe to call with + // NULL pointers; they return the string "" in that + // case. + // This function always returns null. + PyErr_Format(exc_kind, + msg.c_str(), + borrowed_obj_one, borrowed_obj_two); + } + }; + + class TypeError : public PyErrOccurred + { + public: + TypeError(const char* const what) + : PyErrOccurred(PyExc_TypeError, what) + { + } + TypeError(const std::string what) + : PyErrOccurred(PyExc_TypeError, what) + { + } + }; + + class ValueError : public PyErrOccurred + { + public: + ValueError(const char* const what) + : PyErrOccurred(PyExc_ValueError, what) + { + } + }; + + class AttributeError : public PyErrOccurred + { + public: + AttributeError(const char* const what) + : PyErrOccurred(PyExc_AttributeError, what) + { + } + }; + + /** + * Calls `Py_FatalError` when constructed, so you can't actually + * throw this. It just makes static analysis easier. + */ + class PyFatalError : public std::runtime_error + { + public: + PyFatalError(const char* const msg) + : std::runtime_error(msg) + { + Py_FatalError(msg); + } + }; + + static inline PyObject* + Require(PyObject* p, const std::string& msg="") + { + if (!p) { + throw PyErrOccurred(msg); + } + return p; + }; + + static inline void + Require(const int retval) + { + if (retval < 0) { + throw PyErrOccurred(); + } + }; + + +}; +#ifdef __clang__ +# pragma clang diagnostic pop +#endif + +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/greenlet_internal.hpp b/venv/lib/python3.11/site-packages/greenlet/greenlet_internal.hpp new file mode 100644 index 0000000..f2b15d5 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/greenlet_internal.hpp @@ -0,0 +1,107 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ +#ifndef GREENLET_INTERNAL_H +#define GREENLET_INTERNAL_H +#ifdef __clang__ +# pragma clang diagnostic push +# pragma clang diagnostic ignored "-Wunused-function" +#endif + +/** + * Implementation helpers. + * + * C++ templates and inline functions should go here. + */ +#define PY_SSIZE_T_CLEAN +#include "greenlet_compiler_compat.hpp" +#include "greenlet_cpython_compat.hpp" +#include "greenlet_exceptions.hpp" +#include "TGreenlet.hpp" +#include "greenlet_allocator.hpp" + +#include +#include + +#define GREENLET_MODULE +struct _greenlet; +typedef struct _greenlet PyGreenlet; +namespace greenlet { + + class ThreadState; + // We can't use the PythonAllocator for this, because we push to it + // from the thread state destructor, which doesn't have the GIL, + // and Python's allocators can only be called with the GIL. + typedef std::vector cleanup_queue_t; + +}; + + +#define implementation_ptr_t greenlet::Greenlet* + + +#include "greenlet.h" + +void +greenlet::refs::MainGreenletExactChecker(void *p) +{ + if (!p) { + return; + } + // We control the class of the main greenlet exactly. + if (Py_TYPE(p) != &PyGreenlet_Type) { + std::string err("MainGreenlet: Expected exactly a greenlet, not a "); + err += Py_TYPE(p)->tp_name; + throw greenlet::TypeError(err); + } + + // Greenlets from dead threads no longer respond to main() with a + // true value; so in that case we need to perform an additional + // check. + Greenlet* g = static_cast(p)->pimpl; + if (g->main()) { + return; + } + if (!dynamic_cast(g)) { + std::string err("MainGreenlet: Expected exactly a main greenlet, not a "); + err += Py_TYPE(p)->tp_name; + throw greenlet::TypeError(err); + } +} + + + +template +inline greenlet::Greenlet* greenlet::refs::_OwnedGreenlet::operator->() const noexcept +{ + return reinterpret_cast(this->p)->pimpl; +} + +template +inline greenlet::Greenlet* greenlet::refs::_BorrowedGreenlet::operator->() const noexcept +{ + return reinterpret_cast(this->p)->pimpl; +} + +#include +#include + + +extern PyTypeObject PyGreenlet_Type; + + + +/** + * Forward declarations needed in multiple files. + */ +static PyObject* green_switch(PyGreenlet* self, PyObject* args, PyObject* kwargs); + + +#ifdef __clang__ +# pragma clang diagnostic pop +#endif + + +#endif + +// Local Variables: +// flycheck-clang-include-path: ("../../include" "/opt/local/Library/Frameworks/Python.framework/Versions/3.10/include/python3.10") +// End: diff --git a/venv/lib/python3.11/site-packages/greenlet/greenlet_refs.hpp b/venv/lib/python3.11/site-packages/greenlet/greenlet_refs.hpp new file mode 100644 index 0000000..b7e5e3f --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/greenlet_refs.hpp @@ -0,0 +1,1118 @@ +#ifndef GREENLET_REFS_HPP +#define GREENLET_REFS_HPP + +#define PY_SSIZE_T_CLEAN +#include + +#include + +//#include "greenlet_internal.hpp" +#include "greenlet_compiler_compat.hpp" +#include "greenlet_cpython_compat.hpp" +#include "greenlet_exceptions.hpp" + +struct _greenlet; +struct _PyMainGreenlet; + +typedef struct _greenlet PyGreenlet; +extern PyTypeObject PyGreenlet_Type; + + +#ifdef GREENLET_USE_STDIO +#include +using std::cerr; +using std::endl; +#endif + +namespace greenlet +{ + class Greenlet; + + namespace refs + { + // Type checkers throw a TypeError if the argument is not + // null, and isn't of the required Python type. + // (We can't use most of the defined type checkers + // like PyList_Check, etc, directly, because they are + // implemented as macros.) + typedef void (*TypeChecker)(void*); + + void + NoOpChecker(void*) + { + return; + } + + void + GreenletChecker(void *p) + { + if (!p) { + return; + } + + PyTypeObject* typ = Py_TYPE(p); + // fast, common path. (PyObject_TypeCheck is a macro or + // static inline function, and it also does a + // direct comparison of the type pointers, but its fast + // path only handles one type) + if (typ == &PyGreenlet_Type) { + return; + } + + if (!PyObject_TypeCheck(p, &PyGreenlet_Type)) { + std::string err("GreenletChecker: Expected any type of greenlet, not "); + err += Py_TYPE(p)->tp_name; + throw TypeError(err); + } + } + + void + MainGreenletExactChecker(void *p); + + template + class PyObjectPointer; + + template + class OwnedReference; + + + template + class BorrowedReference; + + typedef BorrowedReference BorrowedObject; + typedef OwnedReference OwnedObject; + + class ImmortalObject; + class ImmortalString; + + template + class _OwnedGreenlet; + + typedef _OwnedGreenlet OwnedGreenlet; + typedef _OwnedGreenlet OwnedMainGreenlet; + + template + class _BorrowedGreenlet; + + typedef _BorrowedGreenlet BorrowedGreenlet; + + void + ContextExactChecker(void *p) + { + if (!p) { + return; + } + if (!PyContext_CheckExact(p)) { + throw TypeError( + "greenlet context must be a contextvars.Context or None" + ); + } + } + + typedef OwnedReference OwnedContext; + } +} + +namespace greenlet { + + + namespace refs { + // A set of classes to make reference counting rules in python + // code explicit. + // + // Rules of use: + // (1) Functions returning a new reference that the caller of the + // function is expected to dispose of should return a + // ``OwnedObject`` object. This object automatically releases its + // reference when it goes out of scope. It works like a ``std::shared_ptr`` + // and can be copied or used as a function parameter (but don't do + // that). Note that constructing a ``OwnedObject`` from a + // PyObject* steals the reference. + // (2) Parameters to functions should be either a + // ``OwnedObject&``, or, more generally, a ``PyObjectPointer&``. + // If the function needs to create its own new reference, it can + // do so by copying to a local ``OwnedObject``. + // (3) Functions returning an existing pointer that is NOT + // incref'd, and which the caller MUST NOT decref, + // should return a ``BorrowedObject``. + + // XXX: The following two paragraphs do not hold for all platforms. + // Notably, 32-bit PPC Linux passes structs by reference, not by + // value, so this actually doesn't work. (Although that's the only + // platform that doesn't work on.) DO NOT ATTEMPT IT. The + // unfortunate consequence of that is that the slots which we + // *know* are already type safe will wind up calling the type + // checker function (when we had the slots accepting + // BorrowedGreenlet, this was bypassed), so this slows us down. + // TODO: Optimize this again. + + // For a class with a single pointer member, whose constructor + // does nothing but copy a pointer parameter into the member, and + // which can then be converted back to the pointer type, compilers + // generate code that's the same as just passing the pointer. + // That is, func(BorrowedObject x) called like ``PyObject* p = + // ...; f(p)`` has 0 overhead. Similarly, they "unpack" to the + // pointer type with 0 overhead. + // + // If there are no virtual functions, no complex inheritance (maybe?) and + // no destructor, these can be directly used as parameters in + // Python callbacks like tp_init: the layout is the same as a + // single pointer. Only subclasses with trivial constructors that + // do nothing but set the single pointer member are safe to use + // that way. + + + // This is the base class for things that can be done with a + // PyObject pointer. It assumes nothing about memory management. + // NOTE: Nothing is virtual, so subclasses shouldn't add new + // storage fields or try to override these methods. + template + class PyObjectPointer + { + public: + typedef T PyType; + protected: + T* p; + public: + PyObjectPointer(T* it=nullptr) : p(it) + { + TC(p); + } + + // We don't allow automatic casting to PyObject* at this + // level, because then we could be passed to Py_DECREF/INCREF, + // but we want nothing to do with memory management. If you + // know better, then you can use the get() method, like on a + // std::shared_ptr. Except we name it borrow() to clarify that + // if this is a reference-tracked object, the pointer you get + // back will go away when the object does. + // TODO: This should probably not exist here, but be moved + // down to relevant sub-types. + + T* borrow() const noexcept + { + return this->p; + } + + PyObject* borrow_o() const noexcept + { + return reinterpret_cast(this->p); + } + + T* operator->() const noexcept + { + return this->p; + } + + bool is_None() const noexcept + { + return this->p == Py_None; + } + + PyObject* acquire_or_None() const noexcept + { + PyObject* result = this->p ? reinterpret_cast(this->p) : Py_None; + Py_INCREF(result); + return result; + } + + explicit operator bool() const noexcept + { + return this->p != nullptr; + } + + bool operator!() const noexcept + { + return this->p == nullptr; + } + + Py_ssize_t REFCNT() const noexcept + { + return p ? Py_REFCNT(p) : -42; + } + + PyTypeObject* TYPE() const noexcept + { + return p ? Py_TYPE(p) : nullptr; + } + + inline OwnedObject PyStr() const noexcept; + inline const std::string as_str() const noexcept; + inline OwnedObject PyGetAttr(const ImmortalObject& name) const noexcept; + inline OwnedObject PyRequireAttr(const char* const name) const; + inline OwnedObject PyRequireAttr(const ImmortalString& name) const; + inline OwnedObject PyCall(const BorrowedObject& arg) const; + inline OwnedObject PyCall(PyGreenlet* arg) const ; + inline OwnedObject PyCall(PyObject* arg) const ; + // PyObject_Call(this, args, kwargs); + inline OwnedObject PyCall(const BorrowedObject args, + const BorrowedObject kwargs) const; + inline OwnedObject PyCall(const OwnedObject& args, + const OwnedObject& kwargs) const; + + protected: + void _set_raw_pointer(void* t) + { + TC(t); + p = reinterpret_cast(t); + } + void* _get_raw_pointer() const + { + return p; + } + }; + +#ifdef GREENLET_USE_STDIO + template + std::ostream& operator<<(std::ostream& os, const PyObjectPointer& s) + { + const std::type_info& t = typeid(s); + os << t.name() + << "(addr=" << s.borrow() + << ", refcnt=" << s.REFCNT() + << ", value=" << s.as_str() + << ")"; + + return os; + } +#endif + + template + inline bool operator==(const PyObjectPointer& lhs, const PyObject* const rhs) noexcept + { + return static_cast(lhs.borrow_o()) == static_cast(rhs); + } + + template + inline bool operator==(const PyObjectPointer& lhs, const PyObjectPointer& rhs) noexcept + { + return lhs.borrow_o() == rhs.borrow_o(); + } + + template + inline bool operator!=(const PyObjectPointer& lhs, + const PyObjectPointer& rhs) noexcept + { + return lhs.borrow_o() != rhs.borrow_o(); + } + + template + class OwnedReference : public PyObjectPointer + { + private: + friend class OwnedList; + + protected: + explicit OwnedReference(T* it) : PyObjectPointer(it) + { + } + + public: + + // Constructors + + static OwnedReference consuming(PyObject* p) + { + return OwnedReference(reinterpret_cast(p)); + } + + static OwnedReference owning(T* p) + { + OwnedReference result(p); + Py_XINCREF(result.p); + return result; + } + + OwnedReference() : PyObjectPointer(nullptr) + {} + + explicit OwnedReference(const PyObjectPointer<>& other) + : PyObjectPointer(nullptr) + { + T* op = other.borrow(); + TC(op); + this->p = other.borrow(); + Py_XINCREF(this->p); + } + + // It would be good to make use of the C++11 distinction + // between move and copy operations, e.g., constructing from a + // pointer should be a move operation. + // In the common case of ``OwnedObject x = Py_SomeFunction()``, + // the call to the copy constructor will be elided completely. + OwnedReference(const OwnedReference& other) + : PyObjectPointer(other.p) + { + Py_XINCREF(this->p); + } + + static OwnedReference None() + { + Py_INCREF(Py_None); + return OwnedReference(Py_None); + } + + // We can assign from exactly our type without any extra checking + OwnedReference& operator=(const OwnedReference& other) + { + Py_XINCREF(other.p); + const T* tmp = this->p; + this->p = other.p; + Py_XDECREF(tmp); + return *this; + } + + OwnedReference& operator=(const BorrowedReference other) + { + return this->operator=(other.borrow()); + } + + OwnedReference& operator=(T* const other) + { + TC(other); + Py_XINCREF(other); + T* tmp = this->p; + this->p = other; + Py_XDECREF(tmp); + return *this; + } + + // We can assign from an arbitrary reference type + // if it passes our check. + template + OwnedReference& operator=(const OwnedReference& other) + { + X* op = other.borrow(); + TC(op); + return this->operator=(reinterpret_cast(op)); + } + + inline void steal(T* other) + { + assert(this->p == nullptr); + TC(other); + this->p = other; + } + + T* relinquish_ownership() + { + T* result = this->p; + this->p = nullptr; + return result; + } + + T* acquire() const + { + // Return a new reference. + // TODO: This may go away when we have reference objects + // throughout the code. + Py_XINCREF(this->p); + return this->p; + } + + // Nothing else declares a destructor, we're the leaf, so we + // should be able to get away without virtual. + ~OwnedReference() + { + Py_CLEAR(this->p); + } + + void CLEAR() + { + Py_CLEAR(this->p); + assert(this->p == nullptr); + } + }; + + static inline + void operator<<=(PyObject*& target, OwnedObject& o) + { + target = o.relinquish_ownership(); + } + + + class NewReference : public OwnedObject + { + private: + G_NO_COPIES_OF_CLS(NewReference); + public: + // Consumes the reference. Only use this + // for API return values. + NewReference(PyObject* it) : OwnedObject(it) + { + } + }; + + class NewDictReference : public NewReference + { + private: + G_NO_COPIES_OF_CLS(NewDictReference); + public: + NewDictReference() : NewReference(PyDict_New()) + { + if (!this->p) { + throw PyErrOccurred(); + } + } + + void SetItem(const char* const key, PyObject* value) + { + Require(PyDict_SetItemString(this->p, key, value)); + } + + void SetItem(const PyObjectPointer<>& key, PyObject* value) + { + Require(PyDict_SetItem(this->p, key.borrow_o(), value)); + } + }; + + template + class _OwnedGreenlet: public OwnedReference + { + private: + protected: + _OwnedGreenlet(T* it) : OwnedReference(it) + {} + + public: + _OwnedGreenlet() : OwnedReference() + {} + + _OwnedGreenlet(const _OwnedGreenlet& other) : OwnedReference(other) + { + } + _OwnedGreenlet(OwnedMainGreenlet& other) : + OwnedReference(reinterpret_cast(other.acquire())) + { + } + _OwnedGreenlet(const BorrowedGreenlet& other); + // Steals a reference. + static _OwnedGreenlet consuming(PyGreenlet* it) + { + return _OwnedGreenlet(reinterpret_cast(it)); + } + + inline _OwnedGreenlet& operator=(const OwnedGreenlet& other) + { + return this->operator=(other.borrow()); + } + + inline _OwnedGreenlet& operator=(const BorrowedGreenlet& other); + + _OwnedGreenlet& operator=(const OwnedMainGreenlet& other) + { + PyGreenlet* owned = other.acquire(); + Py_XDECREF(this->p); + this->p = reinterpret_cast(owned); + return *this; + } + + _OwnedGreenlet& operator=(T* const other) + { + OwnedReference::operator=(other); + return *this; + } + + T* relinquish_ownership() + { + T* result = this->p; + this->p = nullptr; + return result; + } + + PyObject* relinquish_ownership_o() + { + return reinterpret_cast(relinquish_ownership()); + } + + inline Greenlet* operator->() const noexcept; + inline operator Greenlet*() const noexcept; + }; + + template + class BorrowedReference : public PyObjectPointer + { + public: + // Allow implicit creation from PyObject* pointers as we + // transition to using these classes. Also allow automatic + // conversion to PyObject* for passing to C API calls and even + // for Py_INCREF/DECREF, because we ourselves do no memory management. + BorrowedReference(T* it) : PyObjectPointer(it) + {} + + BorrowedReference(const PyObjectPointer& ref) : PyObjectPointer(ref.borrow()) + {} + + BorrowedReference() : PyObjectPointer(nullptr) + {} + + operator T*() const + { + return this->p; + } + }; + + typedef BorrowedReference BorrowedObject; + //typedef BorrowedReference BorrowedGreenlet; + + template + class _BorrowedGreenlet : public BorrowedReference + { + public: + _BorrowedGreenlet() : + BorrowedReference(nullptr) + {} + + _BorrowedGreenlet(T* it) : + BorrowedReference(it) + {} + + _BorrowedGreenlet(const BorrowedObject& it); + + _BorrowedGreenlet(const OwnedGreenlet& it) : + BorrowedReference(it.borrow()) + {} + + _BorrowedGreenlet& operator=(const BorrowedObject& other); + + // We get one of these for PyGreenlet, but one for PyObject + // is handy as well + operator PyObject*() const + { + return reinterpret_cast(this->p); + } + Greenlet* operator->() const noexcept; + operator Greenlet*() const noexcept; + }; + + typedef _BorrowedGreenlet BorrowedGreenlet; + + template + _OwnedGreenlet::_OwnedGreenlet(const BorrowedGreenlet& other) + : OwnedReference(reinterpret_cast(other.borrow())) + { + Py_XINCREF(this->p); + } + + + class BorrowedMainGreenlet + : public _BorrowedGreenlet + { + public: + BorrowedMainGreenlet(const OwnedMainGreenlet& it) : + _BorrowedGreenlet(it.borrow()) + {} + BorrowedMainGreenlet(PyGreenlet* it=nullptr) + : _BorrowedGreenlet(it) + {} + }; + + template + _OwnedGreenlet& _OwnedGreenlet::operator=(const BorrowedGreenlet& other) + { + return this->operator=(other.borrow()); + } + + + class ImmortalObject : public PyObjectPointer<> + { + private: + G_NO_ASSIGNMENT_OF_CLS(ImmortalObject); + public: + explicit ImmortalObject(PyObject* it) : PyObjectPointer<>(it) + { + } + + ImmortalObject(const ImmortalObject& other) + : PyObjectPointer<>(other.p) + { + + } + + /** + * Become the new owner of the object. Does not change the + * reference count. + */ + ImmortalObject& operator=(PyObject* it) + { + assert(this->p == nullptr); + this->p = it; + return *this; + } + + static ImmortalObject consuming(PyObject* it) + { + return ImmortalObject(it); + } + + inline operator PyObject*() const + { + return this->p; + } + }; + + class ImmortalString : public ImmortalObject + { + private: + G_NO_COPIES_OF_CLS(ImmortalString); + const char* str; + public: + ImmortalString(const char* const str) : + ImmortalObject(str ? Require(PyUnicode_InternFromString(str)) : nullptr) + { + this->str = str; + } + + inline ImmortalString& operator=(const char* const str) + { + if (!this->p) { + this->p = Require(PyUnicode_InternFromString(str)); + this->str = str; + } + else { + assert(this->str == str); + } + return *this; + } + + inline operator std::string() const + { + return this->str; + } + + }; + + class ImmortalEventName : public ImmortalString + { + private: + G_NO_COPIES_OF_CLS(ImmortalEventName); + public: + ImmortalEventName(const char* const str) : ImmortalString(str) + {} + }; + + class ImmortalException : public ImmortalObject + { + private: + G_NO_COPIES_OF_CLS(ImmortalException); + public: + ImmortalException(const char* const name, PyObject* base=nullptr) : + ImmortalObject(name + // Python 2.7 isn't const correct + ? Require(PyErr_NewException((char*)name, base, nullptr)) + : nullptr) + {} + + inline bool PyExceptionMatches() const + { + return PyErr_ExceptionMatches(this->p) > 0; + } + + }; + + template + inline OwnedObject PyObjectPointer::PyStr() const noexcept + { + if (!this->p) { + return OwnedObject(); + } + return OwnedObject::consuming(PyObject_Str(reinterpret_cast(this->p))); + } + + template + inline const std::string PyObjectPointer::as_str() const noexcept + { + // NOTE: This is not Python exception safe. + if (this->p) { + // The Python APIs return a cached char* value that's only valid + // as long as the original object stays around, and we're + // about to (probably) toss it. Hence the copy to std::string. + OwnedObject py_str = this->PyStr(); + if (!py_str) { + return "(nil)"; + } + return PyUnicode_AsUTF8(py_str.borrow()); + } + return "(nil)"; + } + + template + inline OwnedObject PyObjectPointer::PyGetAttr(const ImmortalObject& name) const noexcept + { + assert(this->p); + return OwnedObject::consuming(PyObject_GetAttr(reinterpret_cast(this->p), name)); + } + + template + inline OwnedObject PyObjectPointer::PyRequireAttr(const char* const name) const + { + assert(this->p); + return OwnedObject::consuming(Require(PyObject_GetAttrString(this->p, name), name)); + } + + template + inline OwnedObject PyObjectPointer::PyRequireAttr(const ImmortalString& name) const + { + assert(this->p); + return OwnedObject::consuming(Require( + PyObject_GetAttr( + reinterpret_cast(this->p), + name + ), + name + )); + } + + template + inline OwnedObject PyObjectPointer::PyCall(const BorrowedObject& arg) const + { + return this->PyCall(arg.borrow()); + } + + template + inline OwnedObject PyObjectPointer::PyCall(PyGreenlet* arg) const + { + return this->PyCall(reinterpret_cast(arg)); + } + + template + inline OwnedObject PyObjectPointer::PyCall(PyObject* arg) const + { + assert(this->p); + return OwnedObject::consuming(PyObject_CallFunctionObjArgs(this->p, arg, NULL)); + } + + template + inline OwnedObject PyObjectPointer::PyCall(const BorrowedObject args, + const BorrowedObject kwargs) const + { + assert(this->p); + return OwnedObject::consuming(PyObject_Call(this->p, args, kwargs)); + } + + template + inline OwnedObject PyObjectPointer::PyCall(const OwnedObject& args, + const OwnedObject& kwargs) const + { + assert(this->p); + return OwnedObject::consuming(PyObject_Call(this->p, args.borrow(), kwargs.borrow())); + } + + inline void + ListChecker(void * p) + { + if (!p) { + return; + } + if (!PyList_Check(p)) { + throw TypeError("Expected a list"); + } + } + + class OwnedList : public OwnedReference + { + private: + G_NO_ASSIGNMENT_OF_CLS(OwnedList); + public: + // TODO: Would like to use move. + explicit OwnedList(const OwnedObject& other) + : OwnedReference(other) + { + } + + OwnedList& operator=(const OwnedObject& other) + { + if (other && PyList_Check(other.p)) { + // Valid list. Own a new reference to it, discard the + // reference to what we did own. + PyObject* new_ptr = other.p; + Py_INCREF(new_ptr); + Py_XDECREF(this->p); + this->p = new_ptr; + } + else { + // Either the other object was NULL (an error) or it + // wasn't a list. Either way, we're now invalidated. + Py_XDECREF(this->p); + this->p = nullptr; + } + return *this; + } + + inline bool empty() const + { + return PyList_GET_SIZE(p) == 0; + } + + inline Py_ssize_t size() const + { + return PyList_GET_SIZE(p); + } + + inline BorrowedObject at(const Py_ssize_t index) const + { + return PyList_GET_ITEM(p, index); + } + + inline void clear() + { + PyList_SetSlice(p, 0, PyList_GET_SIZE(p), NULL); + } + }; + + // Use this to represent the module object used at module init + // time. + // This could either be a borrowed (Py2) or new (Py3) reference; + // either way, we don't want to do any memory management + // on it here, Python itself will handle that. + // XXX: Actually, that's not quite right. On Python 3, if an + // exception occurs before we return to the interpreter, this will + // leak; but all previous versions also had that problem. + class CreatedModule : public PyObjectPointer<> + { + private: + G_NO_COPIES_OF_CLS(CreatedModule); + public: + CreatedModule(PyModuleDef& mod_def) : PyObjectPointer<>( + Require(PyModule_Create(&mod_def))) + { + } + + // PyAddObject(): Add a reference to the object to the module. + // On return, the reference count of the object is unchanged. + // + // The docs warn that PyModule_AddObject only steals the + // reference on success, so if it fails after we've incref'd + // or allocated, we're responsible for the decref. + void PyAddObject(const char* name, const long new_bool) + { + OwnedObject p = OwnedObject::consuming(Require(PyBool_FromLong(new_bool))); + this->PyAddObject(name, p); + } + + void PyAddObject(const char* name, const OwnedObject& new_object) + { + // The caller already owns a reference they will decref + // when their variable goes out of scope, we still need to + // incref/decref. + this->PyAddObject(name, new_object.borrow()); + } + + void PyAddObject(const char* name, const ImmortalObject& new_object) + { + this->PyAddObject(name, new_object.borrow()); + } + + void PyAddObject(const char* name, PyTypeObject& type) + { + this->PyAddObject(name, reinterpret_cast(&type)); + } + + void PyAddObject(const char* name, PyObject* new_object) + { + Py_INCREF(new_object); + try { + Require(PyModule_AddObject(this->p, name, new_object)); + } + catch (const PyErrOccurred&) { + Py_DECREF(p); + throw; + } + } + }; + + class PyErrFetchParam : public PyObjectPointer<> + { + // Not an owned object, because we can't be initialized with + // one, and we only sometimes acquire ownership. + private: + G_NO_COPIES_OF_CLS(PyErrFetchParam); + public: + // To allow declaring these and passing them to + // PyErr_Fetch we implement the empty constructor, + // and the address operator. + PyErrFetchParam() : PyObjectPointer<>(nullptr) + { + } + + PyObject** operator&() + { + return &this->p; + } + + // This allows us to pass one directly without the &, + // BUT it has higher precedence than the bool operator + // if it's not explicit. + operator PyObject**() + { + return &this->p; + } + + // We don't want to be able to pass these to Py_DECREF and + // such so we don't have the implicit PyObject* conversion. + + inline PyObject* relinquish_ownership() + { + PyObject* result = this->p; + this->p = nullptr; + return result; + } + + ~PyErrFetchParam() + { + Py_XDECREF(p); + } + }; + + class OwnedErrPiece : public OwnedObject + { + private: + + public: + // Unlike OwnedObject, this increments the refcount. + OwnedErrPiece(PyObject* p=nullptr) : OwnedObject(p) + { + this->acquire(); + } + + PyObject** operator&() + { + return &this->p; + } + + inline operator PyObject*() const + { + return this->p; + } + + operator PyTypeObject*() const + { + return reinterpret_cast(this->p); + } + }; + + class PyErrPieces + { + private: + OwnedErrPiece type; + OwnedErrPiece instance; + OwnedErrPiece traceback; + bool restored; + public: + // Takes new references; if we're destroyed before + // restoring the error, we drop the references. + PyErrPieces(PyObject* t, PyObject* v, PyObject* tb) : + type(t), + instance(v), + traceback(tb), + restored(0) + { + this->normalize(); + } + + PyErrPieces() : + restored(0) + { + // PyErr_Fetch transfers ownership to us, so + // we don't actually need to INCREF; but we *do* + // need to DECREF if we're not restored. + PyErrFetchParam t, v, tb; + PyErr_Fetch(&t, &v, &tb); + type.steal(t.relinquish_ownership()); + instance.steal(v.relinquish_ownership()); + traceback.steal(tb.relinquish_ownership()); + } + + void PyErrRestore() + { + // can only do this once + assert(!this->restored); + this->restored = true; + PyErr_Restore( + this->type.relinquish_ownership(), + this->instance.relinquish_ownership(), + this->traceback.relinquish_ownership()); + assert(!this->type && !this->instance && !this->traceback); + } + + private: + void normalize() + { + // First, check the traceback argument, replacing None, + // with NULL + if (traceback.is_None()) { + traceback = nullptr; + } + + if (traceback && !PyTraceBack_Check(traceback.borrow())) { + throw PyErrOccurred(PyExc_TypeError, + "throw() third argument must be a traceback object"); + } + + if (PyExceptionClass_Check(type)) { + // If we just had a type, we'll now have a type and + // instance. + // The type's refcount will have gone up by one + // because of the instance and the instance will have + // a refcount of one. Either way, we owned, and still + // do own, exactly one reference. + PyErr_NormalizeException(&type, &instance, &traceback); + + } + else if (PyExceptionInstance_Check(type)) { + /* Raising an instance --- usually that means an + object that is a subclass of BaseException, but on + Python 2, that can also mean an arbitrary old-style + object. The value should be a dummy. */ + if (instance && !instance.is_None()) { + throw PyErrOccurred( + PyExc_TypeError, + "instance exception may not have a separate value"); + } + /* Normalize to raise , */ + this->instance = this->type; + this->type = PyExceptionInstance_Class(instance.borrow()); + + /* + It would be tempting to do this: + + Py_ssize_t type_count = Py_REFCNT(Py_TYPE(instance.borrow())); + this->type = PyExceptionInstance_Class(instance.borrow()); + assert(this->type.REFCNT() == type_count + 1); + + But that doesn't work on Python 2 in the case of + old-style instances: The result of Py_TYPE is going to + be the global shared that all + old-style classes have, while the return of Instance_Class() + will be the Python-level class object. The two are unrelated. + */ + } + else { + /* Not something you can raise. throw() fails. */ + PyErr_Format(PyExc_TypeError, + "exceptions must be classes, or instances, not %s", + Py_TYPE(type.borrow())->tp_name); + throw PyErrOccurred(); + } + } + }; + + // PyArg_Parse's O argument returns a borrowed reference. + class PyArgParseParam : public BorrowedObject + { + private: + G_NO_COPIES_OF_CLS(PyArgParseParam); + public: + explicit PyArgParseParam(PyObject* p=nullptr) : BorrowedObject(p) + { + } + + inline PyObject** operator&() + { + return &this->p; + } + }; + +};}; + +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/greenlet_slp_switch.hpp b/venv/lib/python3.11/site-packages/greenlet/greenlet_slp_switch.hpp new file mode 100644 index 0000000..bd4b7ae --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/greenlet_slp_switch.hpp @@ -0,0 +1,99 @@ +#ifndef GREENLET_SLP_SWITCH_HPP +#define GREENLET_SLP_SWITCH_HPP + +#include "greenlet_compiler_compat.hpp" +#include "greenlet_refs.hpp" + +/* + * the following macros are spliced into the OS/compiler + * specific code, in order to simplify maintenance. + */ +// We can save about 10% of the time it takes to switch greenlets if +// we thread the thread state through the slp_save_state() and the +// following slp_restore_state() calls from +// slp_switch()->g_switchstack() (which already needs to access it). +// +// However: +// +// that requires changing the prototypes and implementations of the +// switching functions. If we just change the prototype of +// slp_switch() to accept the argument and update the macros, without +// changing the implementation of slp_switch(), we get crashes on +// 64-bit Linux and 32-bit x86 (for reasons that aren't 100% clear); +// on the other hand, 64-bit macOS seems to be fine. Also, 64-bit +// windows is an issue because slp_switch is written fully in assembly +// and currently ignores its argument so some code would have to be +// adjusted there to pass the argument on to the +// ``slp_save_state_asm()`` function (but interestingly, because of +// the calling convention, the extra argument is just ignored and +// things function fine, albeit slower, if we just modify +// ``slp_save_state_asm`()` to fetch the pointer to pass to the +// macro.) +// +// Our compromise is to use a *glabal*, untracked, weak, pointer +// to the necessary thread state during the process of switching only. +// This is safe because we're protected by the GIL, and if we're +// running this code, the thread isn't exiting. This also nets us a +// 10-12% speed improvement. + +static greenlet::Greenlet* volatile switching_thread_state = nullptr; + + +extern "C" { +static int GREENLET_NOINLINE(slp_save_state_trampoline)(char* stackref); +static void GREENLET_NOINLINE(slp_restore_state_trampoline)(); +} + + +#define SLP_SAVE_STATE(stackref, stsizediff) \ +do { \ + assert(switching_thread_state); \ + stackref += STACK_MAGIC; \ + if (slp_save_state_trampoline((char*)stackref)) \ + return -1; \ + if (!switching_thread_state->active()) \ + return 1; \ + stsizediff = switching_thread_state->stack_start() - (char*)stackref; \ +} while (0) + +#define SLP_RESTORE_STATE() slp_restore_state_trampoline() + +#define SLP_EVAL +extern "C" { +#define slp_switch GREENLET_NOINLINE(slp_switch) +#include "slp_platformselect.h" +} +#undef slp_switch + +#ifndef STACK_MAGIC +# error \ + "greenlet needs to be ported to this platform, or taught how to detect your compiler properly." +#endif /* !STACK_MAGIC */ + + + +#ifdef EXTERNAL_ASM +/* CCP addition: Make these functions, to be called from assembler. + * The token include file for the given platform should enable the + * EXTERNAL_ASM define so that this is included. + */ +extern "C" { +intptr_t +slp_save_state_asm(intptr_t* ref) +{ + intptr_t diff; + SLP_SAVE_STATE(ref, diff); + return diff; +} + +void +slp_restore_state_asm(void) +{ + SLP_RESTORE_STATE(); +} + +extern int slp_switch(void); +}; +#endif + +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/greenlet_thread_support.hpp b/venv/lib/python3.11/site-packages/greenlet/greenlet_thread_support.hpp new file mode 100644 index 0000000..3ded7d2 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/greenlet_thread_support.hpp @@ -0,0 +1,31 @@ +#ifndef GREENLET_THREAD_SUPPORT_HPP +#define GREENLET_THREAD_SUPPORT_HPP + +/** + * Defines various utility functions to help greenlet integrate well + * with threads. This used to be needed when we supported Python + * 2.7 on Windows, which used a very old compiler. We wrote an + * alternative implementation using Python APIs and POSIX or Windows + * APIs, but that's no longer needed. So this file is a shadow of its + * former self --- but may be needed in the future. + */ + +#include +#include +#include + +#include "greenlet_compiler_compat.hpp" + +namespace greenlet { + typedef std::mutex Mutex; + typedef std::lock_guard LockGuard; + class LockInitError : public std::runtime_error + { + public: + LockInitError(const char* what) : std::runtime_error(what) + {}; + }; +}; + + +#endif /* GREENLET_THREAD_SUPPORT_HPP */ diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/__init__.py b/venv/lib/python3.11/site-packages/greenlet/platform/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/platform/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..306a303 Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/platform/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/setup_switch_x64_masm.cmd b/venv/lib/python3.11/site-packages/greenlet/platform/setup_switch_x64_masm.cmd new file mode 100644 index 0000000..038ced2 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/setup_switch_x64_masm.cmd @@ -0,0 +1,2 @@ +call "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\vcvarsall.bat" amd64 +ml64 /nologo /c /Fo switch_x64_masm.obj switch_x64_masm.asm diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_aarch64_gcc.h b/venv/lib/python3.11/site-packages/greenlet/platform/switch_aarch64_gcc.h new file mode 100644 index 0000000..058617c --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_aarch64_gcc.h @@ -0,0 +1,124 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 07-Sep-16 Add clang support using x register naming. Fredrik Fornwall + * 13-Apr-13 Add support for strange GCC caller-save decisions + * 08-Apr-13 File creation. Michael Matz + * + * NOTES + * + * Simply save all callee saved registers + * + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 +#define REGS_TO_SAVE "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26", \ + "x27", "x28", "x30" /* aka lr */, \ + "v8", "v9", "v10", "v11", \ + "v12", "v13", "v14", "v15" + +/* + * Recall: + asm asm-qualifiers ( AssemblerTemplate + : OutputOperands + [ : InputOperands + [ : Clobbers ] ]) + + or (if asm-qualifiers contains 'goto') + + asm asm-qualifiers ( AssemblerTemplate + : OutputOperands + : InputOperands + : Clobbers + : GotoLabels) + + and OutputOperands are + + [ [asmSymbolicName] ] constraint (cvariablename) + + When a name is given, refer to it as ``%[the name]``. + When not given, ``%i`` where ``i`` is the zero-based index. + + constraints starting with ``=`` means only writing; ``+`` means + reading and writing. + + This is followed by ``r`` (must be register) or ``m`` (must be memory) + and these can be combined. + + The ``cvariablename`` is actually an lvalue expression. + + In AArch65, 31 general purpose registers. If named X0... they are + 64-bit. If named W0... they are the bottom 32 bits of the + corresponding 64 bit register. + + XZR and WZR are hardcoded to 0, and ignore writes. + + Arguments are in X0..X7. C++ uses X0 for ``this``. X0 holds simple return + values (?) + + Whenever a W register is written, the top half of the X register is zeroed. + */ + +static int +slp_switch(void) +{ + int err; + void *fp; + /* Windowz uses a 32-bit long on a 64-bit platform, unlike the rest of + the world, and in theory we can be compiled with GCC/llvm on 64-bit + windows. So we need a fixed-width type. + */ + int64_t *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("str x29, %0" : "=m"(fp) : : ); + __asm__ ("mov %0, sp" : "=r" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "add sp,sp,%0\n" + "add x29,x29,%0\n" + : + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + /* SLP_SAVE_STATE macro contains some return statements + (of -1 and 1). It falls through only when + the return value of slp_save_state() is zero, which + is placed in x0. + In that case we (slp_switch) also want to return zero + (also in x0 of course). + Now, some GCC versions (seen with 4.8) think it's a + good idea to save/restore x0 around the call to + slp_restore_state(), instead of simply zeroing it + at the return below. But slp_restore_state + writes random values to the stack slot used for this + save/restore (from when it once was saved above in + SLP_SAVE_STATE, when it was still uninitialized), so + "restoring" that precious zero actually makes us + return random values. There are some ways to make + GCC not use that zero value in the normal return path + (e.g. making err volatile, but that costs a little + stack space), and the simplest is to call a function + that returns an unknown value (which happens to be zero), + so the saved/restored value is unused. + + Thus, this line stores a 0 into the ``err`` variable + (which must be held in a register for this instruction, + of course). The ``w`` qualifier causes the instruction + to use W0 instead of X0, otherwise we get a warning + about a value size mismatch (because err is an int, + and aarch64 platforms are LP64: 32-bit int, 64 bit long + and pointer). + */ + __asm__ volatile ("mov %w0, #0" : "=r" (err)); + } + __asm__ volatile ("ldr x29, %0" : : "m" (fp) :); + __asm__ volatile ("" : : : REGS_TO_SAVE); + return err; +} + +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_alpha_unix.h b/venv/lib/python3.11/site-packages/greenlet/platform/switch_alpha_unix.h new file mode 100644 index 0000000..7e07abf --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_alpha_unix.h @@ -0,0 +1,30 @@ +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "$9", "$10", "$11", "$12", "$13", "$14", "$15", \ + "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9" + +static int +slp_switch(void) +{ + int ret; + long *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("mov $30, %0" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "addq $30, %0, $30\n\t" + : /* no outputs */ + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("mov $31, %0" : "=r" (ret) : ); + return ret; +} + +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_amd64_unix.h b/venv/lib/python3.11/site-packages/greenlet/platform/switch_amd64_unix.h new file mode 100644 index 0000000..d470110 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_amd64_unix.h @@ -0,0 +1,87 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 3-May-13 Ralf Schmitt + * Add support for strange GCC caller-save decisions + * (ported from switch_aarch64_gcc.h) + * 18-Aug-11 Alexey Borzenkov + * Correctly save rbp, csr and cw + * 01-Apr-04 Hye-Shik Chang + * Ported from i386 to amd64. + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for spark + * 31-Avr-02 Armin Rigo + * Added ebx, esi and edi register-saves. + * 01-Mar-02 Samual M. Rushing + * Ported from i386. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +/* #define STACK_MAGIC 3 */ +/* the above works fine with gcc 2.96, but 2.95.3 wants this */ +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "r12", "r13", "r14", "r15" + +static int +slp_switch(void) +{ + int err; + void* rbp; + void* rbx; + unsigned int csr; + unsigned short cw; + /* This used to be declared 'register', but that does nothing in + modern compilers and is explicitly forbidden in some new + standards. */ + long *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("fstcw %0" : "=m" (cw)); + __asm__ volatile ("stmxcsr %0" : "=m" (csr)); + __asm__ volatile ("movq %%rbp, %0" : "=m" (rbp)); + __asm__ volatile ("movq %%rbx, %0" : "=m" (rbx)); + __asm__ ("movq %%rsp, %0" : "=g" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "addq %0, %%rsp\n" + "addq %0, %%rbp\n" + : + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + __asm__ volatile ("xorq %%rax, %%rax" : "=a" (err)); + } + __asm__ volatile ("movq %0, %%rbx" : : "m" (rbx)); + __asm__ volatile ("movq %0, %%rbp" : : "m" (rbp)); + __asm__ volatile ("ldmxcsr %0" : : "m" (csr)); + __asm__ volatile ("fldcw %0" : : "m" (cw)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_arm32_gcc.h b/venv/lib/python3.11/site-packages/greenlet/platform/switch_arm32_gcc.h new file mode 100644 index 0000000..655003a --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_arm32_gcc.h @@ -0,0 +1,79 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 14-Aug-06 File creation. Ported from Arm Thumb. Sylvain Baro + * 3-Sep-06 Commented out saving of r1-r3 (r4 already commented out) as I + * read that these do not need to be saved. Also added notes and + * errors related to the frame pointer. Richard Tew. + * + * NOTES + * + * It is not possible to detect if fp is used or not, so the supplied + * switch function needs to support it, so that you can remove it if + * it does not apply to you. + * + * POSSIBLE ERRORS + * + * "fp cannot be used in asm here" + * + * - Try commenting out "fp" in REGS_TO_SAVE. + * + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 +#define REG_SP "sp" +#define REG_SPSP "sp,sp" +#ifdef __thumb__ +#define REG_FP "r7" +#define REG_FPFP "r7,r7" +#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r8", "r9", "r10", "r11", "lr" +#else +#define REG_FP "fp" +#define REG_FPFP "fp,fp" +#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r7", "r8", "r9", "r10", "lr" +#endif +#if defined(__SOFTFP__) +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL +#elif defined(__VFP_FP__) +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "d8", "d9", "d10", "d11", \ + "d12", "d13", "d14", "d15" +#elif defined(__MAVERICK__) +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "mvf4", "mvf5", "mvf6", "mvf7", \ + "mvf8", "mvf9", "mvf10", "mvf11", \ + "mvf12", "mvf13", "mvf14", "mvf15" +#else +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "f4", "f5", "f6", "f7" +#endif + +static int +#ifdef __GNUC__ +__attribute__((optimize("no-omit-frame-pointer"))) +#endif +slp_switch(void) +{ + void *fp; + int *stackref, stsizediff; + int result; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("mov r0," REG_FP "\n\tstr r0,%0" : "=m" (fp) : : "r0"); + __asm__ ("mov %0," REG_SP : "=r" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "add " REG_SPSP ",%0\n" + "add " REG_FPFP ",%0\n" + : + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("ldr r0,%1\n\tmov " REG_FP ",r0\n\tmov %0, #0" : "=r" (result) : "m" (fp) : "r0"); + __asm__ volatile ("" : : : REGS_TO_SAVE); + return result; +} + +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_arm32_ios.h b/venv/lib/python3.11/site-packages/greenlet/platform/switch_arm32_ios.h new file mode 100644 index 0000000..9e640e1 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_arm32_ios.h @@ -0,0 +1,67 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 31-May-15 iOS support. Ported from arm32. Proton + * + * NOTES + * + * It is not possible to detect if fp is used or not, so the supplied + * switch function needs to support it, so that you can remove it if + * it does not apply to you. + * + * POSSIBLE ERRORS + * + * "fp cannot be used in asm here" + * + * - Try commenting out "fp" in REGS_TO_SAVE. + * + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 0 +#define REG_SP "sp" +#define REG_SPSP "sp,sp" +#define REG_FP "r7" +#define REG_FPFP "r7,r7" +#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r8", "r10", "r11", "lr" +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "d8", "d9", "d10", "d11", \ + "d12", "d13", "d14", "d15" + +static int +#ifdef __GNUC__ +__attribute__((optimize("no-omit-frame-pointer"))) +#endif +slp_switch(void) +{ + void *fp; + int *stackref, stsizediff, result; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("str " REG_FP ",%0" : "=m" (fp)); + __asm__ ("mov %0," REG_SP : "=r" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "add " REG_SPSP ",%0\n" + "add " REG_FPFP ",%0\n" + : + : "r" (stsizediff) + : REGS_TO_SAVE /* Clobber registers, force compiler to + * recalculate address of void *fp from REG_SP or REG_FP */ + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ( + "ldr " REG_FP ", %1\n\t" + "mov %0, #0" + : "=r" (result) + : "m" (fp) + : REGS_TO_SAVE /* Force compiler to restore saved registers after this */ + ); + return result; +} + +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_arm64_masm.asm b/venv/lib/python3.11/site-packages/greenlet/platform/switch_arm64_masm.asm new file mode 100644 index 0000000..29f9c22 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_arm64_masm.asm @@ -0,0 +1,53 @@ + AREA switch_arm64_masm, CODE, READONLY; + GLOBAL slp_switch [FUNC] + EXTERN slp_save_state_asm + EXTERN slp_restore_state_asm + +slp_switch + ; push callee saved registers to stack + stp x19, x20, [sp, #-16]! + stp x21, x22, [sp, #-16]! + stp x23, x24, [sp, #-16]! + stp x25, x26, [sp, #-16]! + stp x27, x28, [sp, #-16]! + stp x29, x30, [sp, #-16]! + stp d8, d9, [sp, #-16]! + stp d10, d11, [sp, #-16]! + stp d12, d13, [sp, #-16]! + stp d14, d15, [sp, #-16]! + + ; call slp_save_state_asm with stack pointer + mov x0, sp + bl slp_save_state_asm + + ; early return for return value of 1 and -1 + cmp x0, #-1 + b.eq RETURN + cmp x0, #1 + b.eq RETURN + + ; increment stack and frame pointer + add sp, sp, x0 + add x29, x29, x0 + + bl slp_restore_state_asm + + ; store return value for successful completion of routine + mov x0, #0 + +RETURN + ; pop registers from stack + ldp d14, d15, [sp], #16 + ldp d12, d13, [sp], #16 + ldp d10, d11, [sp], #16 + ldp d8, d9, [sp], #16 + ldp x29, x30, [sp], #16 + ldp x27, x28, [sp], #16 + ldp x25, x26, [sp], #16 + ldp x23, x24, [sp], #16 + ldp x21, x22, [sp], #16 + ldp x19, x20, [sp], #16 + + ret + + END diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_arm64_masm.obj b/venv/lib/python3.11/site-packages/greenlet/platform/switch_arm64_masm.obj new file mode 100644 index 0000000..f6f220e Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/platform/switch_arm64_masm.obj differ diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_arm64_msvc.h b/venv/lib/python3.11/site-packages/greenlet/platform/switch_arm64_msvc.h new file mode 100644 index 0000000..7ab7f45 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_arm64_msvc.h @@ -0,0 +1,17 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 21-Oct-21 Niyas Sait + * First version to enable win/arm64 support. + */ + +#define STACK_REFPLUS 1 +#define STACK_MAGIC 0 + +/* Use the generic support for an external assembly language slp_switch function. */ +#define EXTERNAL_ASM + +#ifdef SLP_EVAL +/* This always uses the external masm assembly file. */ +#endif \ No newline at end of file diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_csky_gcc.h b/venv/lib/python3.11/site-packages/greenlet/platform/switch_csky_gcc.h new file mode 100644 index 0000000..ac469d3 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_csky_gcc.h @@ -0,0 +1,48 @@ +#ifdef SLP_EVAL +#define STACK_MAGIC 0 +#define REG_FP "r8" +#ifdef __CSKYABIV2__ +#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r7", "r9", "r10", "r11", "r15",\ + "r16", "r17", "r18", "r19", "r20", "r21", "r22",\ + "r23", "r24", "r25" + +#if defined (__CSKY_HARD_FLOAT__) || (__CSKY_VDSP__) +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "vr8", "vr9", "vr10", "vr11", "vr12",\ + "vr13", "vr14", "vr15" +#else +#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL +#endif +#else +#define REGS_TO_SAVE "r9", "r10", "r11", "r12", "r13", "r15" +#endif + + +static int +#ifdef __GNUC__ +__attribute__((optimize("no-omit-frame-pointer"))) +#endif +slp_switch(void) +{ + int *stackref, stsizediff; + int result; + + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ ("mov %0, sp" : "=r" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "addu sp,%0\n" + "addu "REG_FP",%0\n" + : + : "r" (stsizediff) + ); + + SLP_RESTORE_STATE(); + } + __asm__ volatile ("movi %0, 0" : "=r" (result)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + + return result; +} + +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_loongarch64_linux.h b/venv/lib/python3.11/site-packages/greenlet/platform/switch_loongarch64_linux.h new file mode 100644 index 0000000..9eaf34e --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_loongarch64_linux.h @@ -0,0 +1,31 @@ +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "s0", "s1", "s2", "s3", "s4", "s5", \ + "s6", "s7", "s8", "fp", \ + "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31" + +static int +slp_switch(void) +{ + int ret; + long *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("move %0, $sp" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "add.d $sp, $sp, %0\n\t" + : /* no outputs */ + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("move %0, $zero" : "=r" (ret) : ); + return ret; +} + +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_m68k_gcc.h b/venv/lib/python3.11/site-packages/greenlet/platform/switch_m68k_gcc.h new file mode 100644 index 0000000..da761c2 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_m68k_gcc.h @@ -0,0 +1,38 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 2014-01-06 Andreas Schwab + * File created. + */ + +#ifdef SLP_EVAL + +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "%d2", "%d3", "%d4", "%d5", "%d6", "%d7", \ + "%a2", "%a3", "%a4" + +static int +slp_switch(void) +{ + int err; + int *stackref, stsizediff; + void *fp, *a5; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("move.l %%fp, %0" : "=m"(fp)); + __asm__ volatile ("move.l %%a5, %0" : "=m"(a5)); + __asm__ ("move.l %%sp, %0" : "=r"(stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ("add.l %0, %%sp; add.l %0, %%fp" : : "r"(stsizediff)); + SLP_RESTORE_STATE(); + __asm__ volatile ("clr.l %0" : "=g" (err)); + } + __asm__ volatile ("move.l %0, %%a5" : : "m"(a5)); + __asm__ volatile ("move.l %0, %%fp" : : "m"(fp)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + return err; +} + +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_mips_unix.h b/venv/lib/python3.11/site-packages/greenlet/platform/switch_mips_unix.h new file mode 100644 index 0000000..b9003e9 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_mips_unix.h @@ -0,0 +1,64 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 20-Sep-14 Matt Madison + * Re-code the saving of the gp register for MIPS64. + * 05-Jan-08 Thiemo Seufer + * Ported from ppc. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "$16", "$17", "$18", "$19", "$20", "$21", "$22", \ + "$23", "$30" +static int +slp_switch(void) +{ + int err; + int *stackref, stsizediff; +#ifdef __mips64 + uint64_t gpsave; +#endif + __asm__ __volatile__ ("" : : : REGS_TO_SAVE); +#ifdef __mips64 + __asm__ __volatile__ ("sd $28,%0" : "=m" (gpsave) : : ); +#endif + __asm__ ("move %0, $29" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ __volatile__ ( +#ifdef __mips64 + "daddu $29, %0\n" +#else + "addu $29, %0\n" +#endif + : /* no outputs */ + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } +#ifdef __mips64 + __asm__ __volatile__ ("ld $28,%0" : : "m" (gpsave) : ); +#endif + __asm__ __volatile__ ("" : : : REGS_TO_SAVE); + __asm__ __volatile__ ("move %0, $0" : "=r" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_ppc64_aix.h b/venv/lib/python3.11/site-packages/greenlet/platform/switch_ppc64_aix.h new file mode 100644 index 0000000..e7e0b87 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_ppc64_aix.h @@ -0,0 +1,103 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 16-Oct-20 Jesse Gorzinski + * Copied from Linux PPC64 implementation + * 04-Sep-18 Alexey Borzenkov + * Workaround a gcc bug using manual save/restore of r30 + * 21-Mar-18 Tulio Magno Quites Machado Filho + * Added r30 to the list of saved registers in order to fully comply with + * both ppc64 ELFv1 ABI and the ppc64le ELFv2 ABI, that classify this + * register as a nonvolatile register used for local variables. + * 21-Mar-18 Laszlo Boszormenyi + * Save r2 (TOC pointer) manually. + * 10-Dec-13 Ulrich Weigand + * Support ELFv2 ABI. Save float/vector registers. + * 09-Mar-12 Michael Ellerman + * 64-bit implementation, copied from 32-bit. + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 04-Oct-02 Gustavo Niemeyer + * Ported from MacOS version. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + * 31-Jul-12 Trevor Bowen + * Changed memory constraints to register only. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 6 + +#if defined(__ALTIVEC__) +#define ALTIVEC_REGS \ + "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", \ + "v28", "v29", "v30", "v31", +#else +#define ALTIVEC_REGS +#endif + +#define REGS_TO_SAVE "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "r31", \ + "fr14", "fr15", "fr16", "fr17", "fr18", "fr19", "fr20", "fr21", \ + "fr22", "fr23", "fr24", "fr25", "fr26", "fr27", "fr28", "fr29", \ + "fr30", "fr31", \ + ALTIVEC_REGS \ + "cr2", "cr3", "cr4" + +static int +slp_switch(void) +{ + int err; + long *stackref, stsizediff; + void * toc; + void * r30; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("std 2, %0" : "=m" (toc)); + __asm__ volatile ("std 30, %0" : "=m" (r30)); + __asm__ ("mr %0, 1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "mr 11, %0\n" + "add 1, 1, 11\n" + : /* no outputs */ + : "r" (stsizediff) + : "11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("ld 30, %0" : : "m" (r30)); + __asm__ volatile ("ld 2, %0" : : "m" (toc)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_ppc64_linux.h b/venv/lib/python3.11/site-packages/greenlet/platform/switch_ppc64_linux.h new file mode 100644 index 0000000..3c324d0 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_ppc64_linux.h @@ -0,0 +1,105 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 04-Sep-18 Alexey Borzenkov + * Workaround a gcc bug using manual save/restore of r30 + * 21-Mar-18 Tulio Magno Quites Machado Filho + * Added r30 to the list of saved registers in order to fully comply with + * both ppc64 ELFv1 ABI and the ppc64le ELFv2 ABI, that classify this + * register as a nonvolatile register used for local variables. + * 21-Mar-18 Laszlo Boszormenyi + * Save r2 (TOC pointer) manually. + * 10-Dec-13 Ulrich Weigand + * Support ELFv2 ABI. Save float/vector registers. + * 09-Mar-12 Michael Ellerman + * 64-bit implementation, copied from 32-bit. + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 04-Oct-02 Gustavo Niemeyer + * Ported from MacOS version. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + * 31-Jul-12 Trevor Bowen + * Changed memory constraints to register only. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#if _CALL_ELF == 2 +#define STACK_MAGIC 4 +#else +#define STACK_MAGIC 6 +#endif + +#if defined(__ALTIVEC__) +#define ALTIVEC_REGS \ + "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", \ + "v28", "v29", "v30", "v31", +#else +#define ALTIVEC_REGS +#endif + +#define REGS_TO_SAVE "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "r31", \ + "fr14", "fr15", "fr16", "fr17", "fr18", "fr19", "fr20", "fr21", \ + "fr22", "fr23", "fr24", "fr25", "fr26", "fr27", "fr28", "fr29", \ + "fr30", "fr31", \ + ALTIVEC_REGS \ + "cr2", "cr3", "cr4" + +static int +slp_switch(void) +{ + int err; + long *stackref, stsizediff; + void * toc; + void * r30; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("std 2, %0" : "=m" (toc)); + __asm__ volatile ("std 30, %0" : "=m" (r30)); + __asm__ ("mr %0, 1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "mr 11, %0\n" + "add 1, 1, 11\n" + : /* no outputs */ + : "r" (stsizediff) + : "11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("ld 30, %0" : : "m" (r30)); + __asm__ volatile ("ld 2, %0" : : "m" (toc)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_ppc_aix.h b/venv/lib/python3.11/site-packages/greenlet/platform/switch_ppc_aix.h new file mode 100644 index 0000000..6d93c13 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_ppc_aix.h @@ -0,0 +1,87 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 07-Mar-11 Floris Bruynooghe + * Do not add stsizediff to general purpose + * register (GPR) 30 as this is a non-volatile and + * unused by the PowerOpen Environment, therefore + * this was modifying a user register instead of the + * frame pointer (which does not seem to exist). + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 04-Oct-02 Gustavo Niemeyer + * Ported from MacOS version. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 3 + +/* !!!!WARNING!!!! need to add "r31" in the next line if this header file + * is meant to be compiled non-dynamically! + */ +#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "cr2", "cr3", "cr4" +static int +slp_switch(void) +{ + int err; + int *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ ("mr %0, 1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "mr 11, %0\n" + "add 1, 1, 11\n" + : /* no outputs */ + : "r" (stsizediff) + : "11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_ppc_linux.h b/venv/lib/python3.11/site-packages/greenlet/platform/switch_ppc_linux.h new file mode 100644 index 0000000..e83ad70 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_ppc_linux.h @@ -0,0 +1,84 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 04-Oct-02 Gustavo Niemeyer + * Ported from MacOS version. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + * 31-Jul-12 Trevor Bowen + * Changed memory constraints to register only. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 3 + +/* !!!!WARNING!!!! need to add "r31" in the next line if this header file + * is meant to be compiled non-dynamically! + */ +#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "cr2", "cr3", "cr4" +static int +slp_switch(void) +{ + int err; + int *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ ("mr %0, 1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "mr 11, %0\n" + "add 1, 1, 11\n" + "add 30, 30, 11\n" + : /* no outputs */ + : "r" (stsizediff) + : "11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_ppc_macosx.h b/venv/lib/python3.11/site-packages/greenlet/platform/switch_ppc_macosx.h new file mode 100644 index 0000000..bd414c6 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_ppc_macosx.h @@ -0,0 +1,82 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 3 + +/* !!!!WARNING!!!! need to add "r31" in the next line if this header file + * is meant to be compiled non-dynamically! + */ +#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "cr2", "cr3", "cr4" + +static int +slp_switch(void) +{ + int err; + int *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ ("; asm block 2\n\tmr %0, r1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "; asm block 3\n" + "\tmr r11, %0\n" + "\tadd r1, r1, r11\n" + "\tadd r30, r30, r11\n" + : /* no outputs */ + : "r" (stsizediff) + : "r11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_ppc_unix.h b/venv/lib/python3.11/site-packages/greenlet/platform/switch_ppc_unix.h new file mode 100644 index 0000000..bb18808 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_ppc_unix.h @@ -0,0 +1,82 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'r31' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 14-Jan-04 Bob Ippolito + * added cr2-cr4 to the registers to be saved. + * Open questions: Should we save FP registers? + * What about vector registers? + * Differences between darwin and unix? + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 04-Oct-02 Gustavo Niemeyer + * Ported from MacOS version. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 29-Jun-02 Christian Tismer + * Added register 13-29, 31 saves. The same way as + * Armin Rigo did for the x86_unix version. + * This seems to be now fully functional! + * 04-Mar-02 Hye-Shik Chang + * Ported from i386. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 3 + +/* !!!!WARNING!!!! need to add "r31" in the next line if this header file + * is meant to be compiled non-dynamically! + */ +#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ + "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ + "cr2", "cr3", "cr4" +static int +slp_switch(void) +{ + int err; + int *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ ("mr %0, 1" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "mr 11, %0\n" + "add 1, 1, 11\n" + "add 30, 30, 11\n" + : /* no outputs */ + : "r" (stsizediff) + : "11" + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("li %0, 0" : "=r" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_riscv_unix.h b/venv/lib/python3.11/site-packages/greenlet/platform/switch_riscv_unix.h new file mode 100644 index 0000000..8761122 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_riscv_unix.h @@ -0,0 +1,41 @@ +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "s1", "s2", "s3", "s4", "s5", \ + "s6", "s7", "s8", "s9", "s10", "s11", "fs0", "fs1", \ + "fs2", "fs3", "fs4", "fs5", "fs6", "fs7", "fs8", "fs9", \ + "fs10", "fs11" + +static int +slp_switch(void) +{ + int ret; + long fp; + long *stackref, stsizediff; + + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("mv %0, fp" : "=r" (fp) : ); + __asm__ volatile ("mv %0, sp" : "=r" (stackref) : ); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "add sp, sp, %0\n\t" + "add fp, fp, %0\n\t" + : /* no outputs */ + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); +#if __riscv_xlen == 32 + __asm__ volatile ("lw fp, %0" : : "m" (fp)); +#else + __asm__ volatile ("ld fp, %0" : : "m" (fp)); +#endif + __asm__ volatile ("mv %0, zero" : "=r" (ret) : ); + return ret; +} + +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_s390_unix.h b/venv/lib/python3.11/site-packages/greenlet/platform/switch_s390_unix.h new file mode 100644 index 0000000..9199367 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_s390_unix.h @@ -0,0 +1,87 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 25-Jan-12 Alexey Borzenkov + * Fixed Linux/S390 port to work correctly with + * different optimization options both on 31-bit + * and 64-bit. Thanks to Stefan Raabe for lots + * of testing. + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 06-Oct-02 Gustavo Niemeyer + * Ported to Linux/S390. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#ifdef __s390x__ +#define STACK_MAGIC 20 /* 20 * 8 = 160 bytes of function call area */ +#else +#define STACK_MAGIC 24 /* 24 * 4 = 96 bytes of function call area */ +#endif + +/* Technically, r11-r13 also need saving, but function prolog starts + with stm(g) and since there are so many saved registers already + it won't be optimized, resulting in all r6-r15 being saved */ +#define REGS_TO_SAVE "r6", "r7", "r8", "r9", "r10", "r14", \ + "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", \ + "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15" + +static int +slp_switch(void) +{ + int ret; + long *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); +#ifdef __s390x__ + __asm__ volatile ("lgr %0, 15" : "=r" (stackref) : ); +#else + __asm__ volatile ("lr %0, 15" : "=r" (stackref) : ); +#endif + { + SLP_SAVE_STATE(stackref, stsizediff); +/* N.B. + r11 may be used as the frame pointer, and in that case it cannot be + clobbered and needs offsetting just like the stack pointer (but in cases + where frame pointer isn't used we might clobber it accidentally). What's + scary is that r11 is 2nd (and even 1st when GOT is used) callee saved + register that gcc would chose for surviving function calls. However, + since r6-r10 are clobbered above, their cost for reuse is reduced, so + gcc IRA will chose them over r11 (not seeing r11 is implicitly saved), + making it relatively safe to offset in all cases. :) */ + __asm__ volatile ( +#ifdef __s390x__ + "agr 15, %0\n\t" + "agr 11, %0" +#else + "ar 15, %0\n\t" + "ar 11, %0" +#endif + : /* no outputs */ + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("lhi %0, 0" : "=r" (ret) : ); + return ret; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_sh_gcc.h b/venv/lib/python3.11/site-packages/greenlet/platform/switch_sh_gcc.h new file mode 100644 index 0000000..5ecc3b3 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_sh_gcc.h @@ -0,0 +1,36 @@ +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL +#define STACK_MAGIC 0 +#define REGS_TO_SAVE "r8", "r9", "r10", "r11", "r13", \ + "fr12", "fr13", "fr14", "fr15" + +// r12 Global context pointer, GP +// r14 Frame pointer, FP +// r15 Stack pointer, SP + +static int +slp_switch(void) +{ + int err; + void* fp; + int *stackref, stsizediff; + __asm__ volatile("" : : : REGS_TO_SAVE); + __asm__ volatile("mov.l r14, %0" : "=m"(fp) : :); + __asm__("mov r15, %0" : "=r"(stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile( + "add %0, r15\n" + "add %0, r14\n" + : /* no outputs */ + : "r"(stsizediff)); + SLP_RESTORE_STATE(); + __asm__ volatile("mov r0, %0" : "=r"(err) : :); + } + __asm__ volatile("mov.l %0, r14" : : "m"(fp) :); + __asm__ volatile("" : : : REGS_TO_SAVE); + return err; +} + +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_sparc_sun_gcc.h b/venv/lib/python3.11/site-packages/greenlet/platform/switch_sparc_sun_gcc.h new file mode 100644 index 0000000..96990c3 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_sparc_sun_gcc.h @@ -0,0 +1,92 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 16-May-15 Alexey Borzenkov + * Move stack spilling code inside save/restore functions + * 30-Aug-13 Floris Bruynooghe + Clean the register windows again before returning. + This does not clobber the PIC register as it leaves + the current window intact and is required for multi- + threaded code to work correctly. + * 08-Mar-11 Floris Bruynooghe + * No need to set return value register explicitly + * before the stack and framepointer are adjusted + * as none of the other registers are influenced by + * this. Also don't needlessly clean the windows + * ('ta %0" :: "i" (ST_CLEAN_WINDOWS)') as that + * clobbers the gcc PIC register (%l7). + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * added support for SunOS sparc with gcc + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + + +#define STACK_MAGIC 0 + + +#if defined(__sparcv9) +#define SLP_FLUSHW __asm__ volatile ("flushw") +#else +#define SLP_FLUSHW __asm__ volatile ("ta 3") /* ST_FLUSH_WINDOWS */ +#endif + +/* On sparc we need to spill register windows inside save/restore functions */ +#define SLP_BEFORE_SAVE_STATE() SLP_FLUSHW +#define SLP_BEFORE_RESTORE_STATE() SLP_FLUSHW + + +static int +slp_switch(void) +{ + int err; + int *stackref, stsizediff; + + /* Put current stack pointer into stackref. + * Register spilling is done in save/restore. + */ + __asm__ volatile ("mov %%sp, %0" : "=r" (stackref)); + + { + /* Thou shalt put SLP_SAVE_STATE into a local block */ + /* Copy the current stack onto the heap */ + SLP_SAVE_STATE(stackref, stsizediff); + + /* Increment stack and frame pointer by stsizediff */ + __asm__ volatile ( + "add %0, %%sp, %%sp\n\t" + "add %0, %%fp, %%fp" + : : "r" (stsizediff)); + + /* Copy new stack from it's save store on the heap */ + SLP_RESTORE_STATE(); + + __asm__ volatile ("mov %1, %0" : "=r" (err) : "i" (0)); + return err; + } +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_x32_unix.h b/venv/lib/python3.11/site-packages/greenlet/platform/switch_x32_unix.h new file mode 100644 index 0000000..893369c --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_x32_unix.h @@ -0,0 +1,63 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 17-Aug-12 Fantix King + * Ported from amd64. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 0 + +#define REGS_TO_SAVE "r12", "r13", "r14", "r15" + + +static int +slp_switch(void) +{ + void* ebp; + void* ebx; + unsigned int csr; + unsigned short cw; + int err; + int *stackref, stsizediff; + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("fstcw %0" : "=m" (cw)); + __asm__ volatile ("stmxcsr %0" : "=m" (csr)); + __asm__ volatile ("movl %%ebp, %0" : "=m" (ebp)); + __asm__ volatile ("movl %%ebx, %0" : "=m" (ebx)); + __asm__ ("movl %%esp, %0" : "=g" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "addl %0, %%esp\n" + "addl %0, %%ebp\n" + : + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + } + __asm__ volatile ("movl %0, %%ebx" : : "m" (ebx)); + __asm__ volatile ("movl %0, %%ebp" : : "m" (ebp)); + __asm__ volatile ("ldmxcsr %0" : : "m" (csr)); + __asm__ volatile ("fldcw %0" : : "m" (cw)); + __asm__ volatile ("" : : : REGS_TO_SAVE); + __asm__ volatile ("xorl %%eax, %%eax" : "=a" (err)); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_x64_masm.asm b/venv/lib/python3.11/site-packages/greenlet/platform/switch_x64_masm.asm new file mode 100644 index 0000000..f5c72a2 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_x64_masm.asm @@ -0,0 +1,111 @@ +; +; stack switching code for MASM on x641 +; Kristjan Valur Jonsson, sept 2005 +; + + +;prototypes for our calls +slp_save_state_asm PROTO +slp_restore_state_asm PROTO + + +pushxmm MACRO reg + sub rsp, 16 + .allocstack 16 + movaps [rsp], reg ; faster than movups, but we must be aligned + ; .savexmm128 reg, offset (don't know what offset is, no documentation) +ENDM +popxmm MACRO reg + movaps reg, [rsp] ; faster than movups, but we must be aligned + add rsp, 16 +ENDM + +pushreg MACRO reg + push reg + .pushreg reg +ENDM +popreg MACRO reg + pop reg +ENDM + + +.code +slp_switch PROC FRAME + ;realign stack to 16 bytes after return address push, makes the following faster + sub rsp,8 + .allocstack 8 + + pushxmm xmm15 + pushxmm xmm14 + pushxmm xmm13 + pushxmm xmm12 + pushxmm xmm11 + pushxmm xmm10 + pushxmm xmm9 + pushxmm xmm8 + pushxmm xmm7 + pushxmm xmm6 + + pushreg r15 + pushreg r14 + pushreg r13 + pushreg r12 + + pushreg rbp + pushreg rbx + pushreg rdi + pushreg rsi + + sub rsp, 10h ;allocate the singlefunction argument (must be multiple of 16) + .allocstack 10h +.endprolog + + lea rcx, [rsp+10h] ;load stack base that we are saving + call slp_save_state_asm ;pass stackpointer, return offset in eax + cmp rax, 1 + je EXIT1 + cmp rax, -1 + je EXIT2 + ;actual stack switch: + add rsp, rax + call slp_restore_state_asm + xor rax, rax ;return 0 + +EXIT: + + add rsp, 10h + popreg rsi + popreg rdi + popreg rbx + popreg rbp + + popreg r12 + popreg r13 + popreg r14 + popreg r15 + + popxmm xmm6 + popxmm xmm7 + popxmm xmm8 + popxmm xmm9 + popxmm xmm10 + popxmm xmm11 + popxmm xmm12 + popxmm xmm13 + popxmm xmm14 + popxmm xmm15 + + add rsp, 8 + ret + +EXIT1: + mov rax, 1 + jmp EXIT + +EXIT2: + sar rax, 1 + jmp EXIT + +slp_switch ENDP + +END \ No newline at end of file diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_x64_masm.obj b/venv/lib/python3.11/site-packages/greenlet/platform/switch_x64_masm.obj new file mode 100644 index 0000000..64e3e6b Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/platform/switch_x64_masm.obj differ diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_x64_msvc.h b/venv/lib/python3.11/site-packages/greenlet/platform/switch_x64_msvc.h new file mode 100644 index 0000000..601ea56 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_x64_msvc.h @@ -0,0 +1,60 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 26-Sep-02 Christian Tismer + * again as a result of virtualized stack access, + * the compiler used less registers. Needed to + * explicit mention registers in order to get them saved. + * Thanks to Jeff Senn for pointing this out and help. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 01-Mar-02 Christian Tismer + * Initial final version after lots of iterations for i386. + */ + +/* Avoid alloca redefined warning on mingw64 */ +#ifndef alloca +#define alloca _alloca +#endif + +#define STACK_REFPLUS 1 +#define STACK_MAGIC 0 + +/* Use the generic support for an external assembly language slp_switch function. */ +#define EXTERNAL_ASM + +#ifdef SLP_EVAL +/* This always uses the external masm assembly file. */ +#endif + +/* + * further self-processing support + */ + +/* we have IsBadReadPtr available, so we can peek at objects */ +/* +#define STACKLESS_SPY + +#ifdef IMPLEMENT_STACKLESSMODULE +#include "Windows.h" +#define CANNOT_READ_MEM(p, bytes) IsBadReadPtr(p, bytes) + +static int IS_ON_STACK(void*p) +{ + int stackref; + intptr_t stackbase = ((intptr_t)&stackref) & 0xfffff000; + return (intptr_t)p >= stackbase && (intptr_t)p < stackbase + 0x00100000; +} + +#endif +*/ \ No newline at end of file diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_x86_msvc.h b/venv/lib/python3.11/site-packages/greenlet/platform/switch_x86_msvc.h new file mode 100644 index 0000000..0f3a59f --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_x86_msvc.h @@ -0,0 +1,326 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 26-Sep-02 Christian Tismer + * again as a result of virtualized stack access, + * the compiler used less registers. Needed to + * explicit mention registers in order to get them saved. + * Thanks to Jeff Senn for pointing this out and help. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for sparc + * 01-Mar-02 Christian Tismer + * Initial final version after lots of iterations for i386. + */ + +#define alloca _alloca + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +#define STACK_MAGIC 0 + +/* Some magic to quell warnings and keep slp_switch() from crashing when built + with VC90. Disable global optimizations, and the warning: frame pointer + register 'ebp' modified by inline assembly code. + + We used to just disable global optimizations ("g") but upstream stackless + Python, as well as stackman, turn off all optimizations. + +References: +https://github.com/stackless-dev/stackman/blob/dbc72fe5207a2055e658c819fdeab9731dee78b9/stackman/platforms/switch_x86_msvc.h +https://github.com/stackless-dev/stackless/blob/main-slp/Stackless/platf/switch_x86_msvc.h +*/ +#define WIN32_LEAN_AND_MEAN +#include + +#pragma optimize("", off) /* so that autos are stored on the stack */ +#pragma warning(disable:4731) +#pragma warning(disable:4733) /* disable warning about modifying FS[0] */ + +/** + * Most modern compilers and environments handle C++ exceptions without any + * special help from us. MSVC on 32-bit windows is an exception. There, C++ + * exceptions are dealt with using Windows' Structured Exception Handling + * (SEH). + * + * SEH is implemented as a singly linked list of nodes. The + * head of this list is stored in the Thread Information Block, which itself + * is pointed to from the FS register. It's the first field in the structure, + * or offset 0, so we can access it using assembly FS:[0], or the compiler + * intrinsics and field offset information from the headers (as we do below). + * Somewhat unusually, the tail of the list doesn't have prev == NULL, it has + * prev == 0xFFFFFFFF. + * + * SEH was designed for C, and traditionally uses the MSVC compiler + * intrinsincs __try{}/__except{}. It is also utilized for C++ exceptions by + * MSVC; there, every throw of a C++ exception raises a SEH error with the + * ExceptionCode 0xE06D7363; the SEH handler list is then traversed to + * deal with the exception. + * + * If the SEH list is corrupt, then when a C++ exception is thrown the program + * will abruptly exit with exit code 1. This does not use std::terminate(), so + * std::set_terminate() is useless to debug this. + * + * The SEH list is closely tied to the call stack; entering a function that + * uses __try{} or most C++ functions will push a new handler onto the front + * of the list. Returning from the function will remove the handler. Saving + * and restoring the head node of the SEH list (FS:[0]) per-greenlet is NOT + * ENOUGH to make SEH or exceptions work. + * + * Stack switching breaks SEH because the call stack no longer necessarily + * matches the SEH list. For example, given greenlet A that switches to + * greenlet B, at the moment of entering greenlet B, we will have any SEH + * handlers from greenlet A on the SEH list; greenlet B can then add its own + * handlers to the SEH list. When greenlet B switches back to greenlet A, + * greenlet B's handlers would still be on the SEH stack, but when switch() + * returns control to greenlet A, we have replaced the contents of the stack + * in memory, so all the address that greenlet B added to the SEH list are now + * invalid: part of the call stack has been unwound, but the SEH list was out + * of sync with the call stack. The net effect is that exception handling + * stops working. + * + * Thus, when switching greenlets, we need to be sure that the SEH list + * matches the effective call stack, "cutting out" any handlers that were + * pushed by the greenlet that switched out and which are no longer valid. + * + * The easiest way to do this is to capture the SEH list at the time the main + * greenlet for a thread is created, and, when initially starting a greenlet, + * start a new SEH list for it, which contains nothing but the handler + * established for the new greenlet itself, with the tail being the handlers + * for the main greenlet. If we then save and restore the SEH per-greenlet, + * they won't interfere with each others SEH lists. (No greenlet can unwind + * the call stack past the handlers established by the main greenlet). + * + * By observation, a new thread starts with three SEH handlers on the list. By + * the time we get around to creating the main greenlet, though, there can be + * many more, established by transient calls that lead to the creation of the + * main greenlet. Therefore, 3 is a magic constant telling us when to perform + * the initial slice. + * + * All of this can be debugged using a vectored exception handler, which + * operates independently of the SEH handler list, and is called first. + * Walking the SEH list at key points can also be helpful. + * + * References: + * https://en.wikipedia.org/wiki/Win32_Thread_Information_Block + * https://devblogs.microsoft.com/oldnewthing/20100730-00/?p=13273 + * https://docs.microsoft.com/en-us/cpp/cpp/try-except-statement?view=msvc-160 + * https://docs.microsoft.com/en-us/cpp/cpp/structured-exception-handling-c-cpp?view=msvc-160 + * https://docs.microsoft.com/en-us/windows/win32/debug/structured-exception-handling + * https://docs.microsoft.com/en-us/windows/win32/debug/using-a-vectored-exception-handler + * https://bytepointer.com/resources/pietrek_crash_course_depths_of_win32_seh.htm + */ +#define GREENLET_NEEDS_EXCEPTION_STATE_SAVED + + +typedef struct _GExceptionRegistration { + struct _GExceptionRegistration* prev; + void* handler_f; +} GExceptionRegistration; + +static void +slp_set_exception_state(const void *const seh_state) +{ + // Because the stack from from which we do this is ALSO a handler, and + // that one we want to keep, we need to relink the current SEH handler + // frame to point to this one, cutting out the middle men, as it were. + // + // Entering a try block doesn't change the SEH frame, but entering a + // function containing a try block does. + GExceptionRegistration* current_seh_state = (GExceptionRegistration*)__readfsdword(FIELD_OFFSET(NT_TIB, ExceptionList)); + current_seh_state->prev = (GExceptionRegistration*)seh_state; +} + + +static GExceptionRegistration* +x86_slp_get_third_oldest_handler() +{ + GExceptionRegistration* a = NULL; /* Closest to the top */ + GExceptionRegistration* b = NULL; /* second */ + GExceptionRegistration* c = NULL; + GExceptionRegistration* seh_state = (GExceptionRegistration*)__readfsdword(FIELD_OFFSET(NT_TIB, ExceptionList)); + a = b = c = seh_state; + + while (seh_state && seh_state != (GExceptionRegistration*)0xFFFFFFFF) { + if ((void*)seh_state->prev < (void*)100) { + fprintf(stderr, "\tERROR: Broken SEH chain.\n"); + return NULL; + } + a = b; + b = c; + c = seh_state; + + seh_state = seh_state->prev; + } + return a ? a : (b ? b : c); +} + + +static void* +slp_get_exception_state() +{ + // XXX: There appear to be three SEH handlers on the stack already at the + // start of the thread. Is that a guarantee? Almost certainly not. Yet in + // all observed cases it has been three. This is consistent with + // faulthandler off or on, and optimizations off or on. It may not be + // consistent with other operating system versions, though: we only have + // CI on one or two versions (don't ask what there are). + // In theory we could capture the number of handlers on the chain when + // PyInit__greenlet is called: there are probably only the default + // handlers at that point (unless we're embedded and people have used + // __try/__except or a C++ handler)? + return x86_slp_get_third_oldest_handler(); +} + +static int +slp_switch(void) +{ + /* MASM syntax is typically reversed from other assemblers. + It is usually + */ + int *stackref, stsizediff; + /* store the structured exception state for this stack */ + DWORD seh_state = __readfsdword(FIELD_OFFSET(NT_TIB, ExceptionList)); + __asm mov stackref, esp; + /* modify EBX, ESI and EDI in order to get them preserved */ + __asm mov ebx, ebx; + __asm xchg esi, edi; + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm { + mov eax, stsizediff + add esp, eax + add ebp, eax + } + SLP_RESTORE_STATE(); + } + __writefsdword(FIELD_OFFSET(NT_TIB, ExceptionList), seh_state); + return 0; +} + +/* re-enable ebp warning and global optimizations. */ +#pragma optimize("", on) +#pragma warning(default:4731) +#pragma warning(default:4733) /* disable warning about modifying FS[0] */ + + +#endif + +/* + * further self-processing support + */ + +/* we have IsBadReadPtr available, so we can peek at objects */ +#define STACKLESS_SPY + +#ifdef GREENLET_DEBUG + +#define CANNOT_READ_MEM(p, bytes) IsBadReadPtr(p, bytes) + +static int IS_ON_STACK(void*p) +{ + int stackref; + int stackbase = ((int)&stackref) & 0xfffff000; + return (int)p >= stackbase && (int)p < stackbase + 0x00100000; +} + +static void +x86_slp_show_seh_chain() +{ + GExceptionRegistration* seh_state = (GExceptionRegistration*)__readfsdword(FIELD_OFFSET(NT_TIB, ExceptionList)); + fprintf(stderr, "====== SEH Chain ======\n"); + while (seh_state && seh_state != (GExceptionRegistration*)0xFFFFFFFF) { + fprintf(stderr, "\tSEH_chain addr: %p handler: %p prev: %p\n", + seh_state, + seh_state->handler_f, seh_state->prev); + if ((void*)seh_state->prev < (void*)100) { + fprintf(stderr, "\tERROR: Broken chain.\n"); + break; + } + seh_state = seh_state->prev; + } + fprintf(stderr, "====== End SEH Chain ======\n"); + fflush(NULL); + return; +} + +//addVectoredExceptionHandler constants: +//CALL_FIRST means call this exception handler first; +//CALL_LAST means call this exception handler last +#define CALL_FIRST 1 +#define CALL_LAST 0 + +LONG WINAPI +GreenletVectorHandler(PEXCEPTION_POINTERS ExceptionInfo) +{ + // We get one of these for every C++ exception, with code + // E06D7363 + // This is a special value that means "C++ exception from MSVC" + // https://devblogs.microsoft.com/oldnewthing/20100730-00/?p=13273 + // + // Install in the module init function with: + // AddVectoredExceptionHandler(CALL_FIRST, GreenletVectorHandler); + PEXCEPTION_RECORD ExceptionRecord = ExceptionInfo->ExceptionRecord; + + fprintf(stderr, + "GOT VECTORED EXCEPTION:\n" + "\tExceptionCode : %p\n" + "\tExceptionFlags : %p\n" + "\tExceptionAddr : %p\n" + "\tNumberparams : %ld\n", + ExceptionRecord->ExceptionCode, + ExceptionRecord->ExceptionFlags, + ExceptionRecord->ExceptionAddress, + ExceptionRecord->NumberParameters + ); + if (ExceptionRecord->ExceptionFlags & 1) { + fprintf(stderr, "\t\tEH_NONCONTINUABLE\n" ); + } + if (ExceptionRecord->ExceptionFlags & 2) { + fprintf(stderr, "\t\tEH_UNWINDING\n" ); + } + if (ExceptionRecord->ExceptionFlags & 4) { + fprintf(stderr, "\t\tEH_EXIT_UNWIND\n" ); + } + if (ExceptionRecord->ExceptionFlags & 8) { + fprintf(stderr, "\t\tEH_STACK_INVALID\n" ); + } + if (ExceptionRecord->ExceptionFlags & 0x10) { + fprintf(stderr, "\t\tEH_NESTED_CALL\n" ); + } + if (ExceptionRecord->ExceptionFlags & 0x20) { + fprintf(stderr, "\t\tEH_TARGET_UNWIND\n" ); + } + if (ExceptionRecord->ExceptionFlags & 0x40) { + fprintf(stderr, "\t\tEH_COLLIDED_UNWIND\n" ); + } + fprintf(stderr, "\n"); + fflush(NULL); + for(DWORD i = 0; i < ExceptionRecord->NumberParameters; i++) { + fprintf(stderr, "\t\t\tParam %ld: %lX\n", i, ExceptionRecord->ExceptionInformation[i]); + } + + if (ExceptionRecord->NumberParameters == 3) { + fprintf(stderr, "\tAbout to traverse SEH chain\n"); + // C++ Exception records have 3 params. + x86_slp_show_seh_chain(); + } + + return EXCEPTION_CONTINUE_SEARCH; +} + + + + +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/platform/switch_x86_unix.h b/venv/lib/python3.11/site-packages/greenlet/platform/switch_x86_unix.h new file mode 100644 index 0000000..493fa6b --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/platform/switch_x86_unix.h @@ -0,0 +1,105 @@ +/* + * this is the internal transfer function. + * + * HISTORY + * 3-May-13 Ralf Schmitt + * Add support for strange GCC caller-save decisions + * (ported from switch_aarch64_gcc.h) + * 19-Aug-11 Alexey Borzenkov + * Correctly save ebp, ebx and cw + * 07-Sep-05 (py-dev mailing list discussion) + * removed 'ebx' from the register-saved. !!!! WARNING !!!! + * It means that this file can no longer be compiled statically! + * It is now only suitable as part of a dynamic library! + * 24-Nov-02 Christian Tismer + * needed to add another magic constant to insure + * that f in slp_eval_frame(PyFrameObject *f) + * STACK_REFPLUS will probably be 1 in most cases. + * gets included into the saved stack area. + * 17-Sep-02 Christian Tismer + * after virtualizing stack save/restore, the + * stack size shrunk a bit. Needed to introduce + * an adjustment STACK_MAGIC per platform. + * 15-Sep-02 Gerd Woetzel + * slightly changed framework for spark + * 31-Avr-02 Armin Rigo + * Added ebx, esi and edi register-saves. + * 01-Mar-02 Samual M. Rushing + * Ported from i386. + */ + +#define STACK_REFPLUS 1 + +#ifdef SLP_EVAL + +/* #define STACK_MAGIC 3 */ +/* the above works fine with gcc 2.96, but 2.95.3 wants this */ +#define STACK_MAGIC 0 + +#if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5) +# define ATTR_NOCLONE __attribute__((noclone)) +#else +# define ATTR_NOCLONE +#endif + +static int +slp_switch(void) +{ + int err; +#ifdef _WIN32 + void *seh; +#endif + void *ebp, *ebx; + unsigned short cw; + int *stackref, stsizediff; + __asm__ volatile ("" : : : "esi", "edi"); + __asm__ volatile ("fstcw %0" : "=m" (cw)); + __asm__ volatile ("movl %%ebp, %0" : "=m" (ebp)); + __asm__ volatile ("movl %%ebx, %0" : "=m" (ebx)); +#ifdef _WIN32 + __asm__ volatile ( + "movl %%fs:0x0, %%eax\n" + "movl %%eax, %0\n" + : "=m" (seh) + : + : "eax"); +#endif + __asm__ ("movl %%esp, %0" : "=g" (stackref)); + { + SLP_SAVE_STATE(stackref, stsizediff); + __asm__ volatile ( + "addl %0, %%esp\n" + "addl %0, %%ebp\n" + : + : "r" (stsizediff) + ); + SLP_RESTORE_STATE(); + __asm__ volatile ("xorl %%eax, %%eax" : "=a" (err)); + } +#ifdef _WIN32 + __asm__ volatile ( + "movl %0, %%eax\n" + "movl %%eax, %%fs:0x0\n" + : + : "m" (seh) + : "eax"); +#endif + __asm__ volatile ("movl %0, %%ebx" : : "m" (ebx)); + __asm__ volatile ("movl %0, %%ebp" : : "m" (ebp)); + __asm__ volatile ("fldcw %0" : : "m" (cw)); + __asm__ volatile ("" : : : "esi", "edi"); + return err; +} + +#endif + +/* + * further self-processing support + */ + +/* + * if you want to add self-inspection tools, place them + * here. See the x86_msvc for the necessary defines. + * These features are highly experimental und not + * essential yet. + */ diff --git a/venv/lib/python3.11/site-packages/greenlet/slp_platformselect.h b/venv/lib/python3.11/site-packages/greenlet/slp_platformselect.h new file mode 100644 index 0000000..225c67b --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/slp_platformselect.h @@ -0,0 +1,75 @@ +/* + * Platform Selection for Stackless Python + */ +#ifdef __cplusplus +extern "C" { +#endif + +#if defined(MS_WIN32) && !defined(MS_WIN64) && defined(_M_IX86) && defined(_MSC_VER) +# include "platform/switch_x86_msvc.h" /* MS Visual Studio on X86 */ +#elif defined(MS_WIN64) && defined(_M_X64) && defined(_MSC_VER) || defined(__MINGW64__) +# include "platform/switch_x64_msvc.h" /* MS Visual Studio on X64 */ +#elif defined(MS_WIN64) && defined(_M_ARM64) +# include "platform/switch_arm64_msvc.h" /* MS Visual Studio on ARM64 */ +#elif defined(__GNUC__) && defined(__amd64__) && defined(__ILP32__) +# include "platform/switch_x32_unix.h" /* gcc on amd64 with x32 ABI */ +#elif defined(__GNUC__) && defined(__amd64__) +# include "platform/switch_amd64_unix.h" /* gcc on amd64 */ +#elif defined(__GNUC__) && defined(__i386__) +# include "platform/switch_x86_unix.h" /* gcc on X86 */ +#elif defined(__GNUC__) && defined(__powerpc64__) && (defined(__linux__) || defined(__FreeBSD__)) +# include "platform/switch_ppc64_linux.h" /* gcc on PowerPC 64-bit */ +#elif defined(__GNUC__) && defined(__PPC__) && (defined(__linux__) || defined(__FreeBSD__)) +# include "platform/switch_ppc_linux.h" /* gcc on PowerPC */ +#elif defined(__GNUC__) && defined(__POWERPC__) && defined(__APPLE__) +# include "platform/switch_ppc_macosx.h" /* Apple MacOS X on 32-bit PowerPC */ +#elif defined(__GNUC__) && defined(__powerpc64__) && defined(_AIX) +# include "platform/switch_ppc64_aix.h" /* gcc on AIX/PowerPC 64-bit */ +#elif defined(__GNUC__) && defined(_ARCH_PPC) && defined(_AIX) +# include "platform/switch_ppc_aix.h" /* gcc on AIX/PowerPC */ +#elif defined(__GNUC__) && defined(__powerpc__) && defined(__NetBSD__) +#include "platform/switch_ppc_unix.h" /* gcc on NetBSD/powerpc */ +#elif defined(__GNUC__) && defined(sparc) +# include "platform/switch_sparc_sun_gcc.h" /* SunOS sparc with gcc */ +#elif defined(__SUNPRO_C) && defined(sparc) && defined(sun) +# include "platform/switch_sparc_sun_gcc.h" /* SunStudio on amd64 */ +#elif defined(__SUNPRO_C) && defined(__amd64__) && defined(sun) +# include "platform/switch_amd64_unix.h" /* SunStudio on amd64 */ +#elif defined(__SUNPRO_C) && defined(__i386__) && defined(sun) +# include "platform/switch_x86_unix.h" /* SunStudio on x86 */ +#elif defined(__GNUC__) && defined(__s390__) && defined(__linux__) +# include "platform/switch_s390_unix.h" /* Linux/S390 */ +#elif defined(__GNUC__) && defined(__s390x__) && defined(__linux__) +# include "platform/switch_s390_unix.h" /* Linux/S390 zSeries (64-bit) */ +#elif defined(__GNUC__) && defined(__arm__) +# ifdef __APPLE__ +# include +# endif +# if TARGET_OS_IPHONE +# include "platform/switch_arm32_ios.h" /* iPhone OS on arm32 */ +# else +# include "platform/switch_arm32_gcc.h" /* gcc using arm32 */ +# endif +#elif defined(__GNUC__) && defined(__mips__) && defined(__linux__) +# include "platform/switch_mips_unix.h" /* Linux/MIPS */ +#elif defined(__GNUC__) && defined(__aarch64__) +# include "platform/switch_aarch64_gcc.h" /* Aarch64 ABI */ +#elif defined(__GNUC__) && defined(__mc68000__) +# include "platform/switch_m68k_gcc.h" /* gcc on m68k */ +#elif defined(__GNUC__) && defined(__csky__) +#include "platform/switch_csky_gcc.h" /* gcc on csky */ +# elif defined(__GNUC__) && defined(__riscv) +# include "platform/switch_riscv_unix.h" /* gcc on RISC-V */ +#elif defined(__GNUC__) && defined(__alpha__) +# include "platform/switch_alpha_unix.h" /* gcc on DEC Alpha */ +#elif defined(MS_WIN32) && defined(__llvm__) && defined(__aarch64__) +# include "platform/switch_aarch64_gcc.h" /* LLVM Aarch64 ABI for Windows */ +#elif defined(__GNUC__) && defined(__loongarch64) && defined(__linux__) +# include "platform/switch_loongarch64_linux.h" /* LoongArch64 */ +#elif defined(__GNUC__) && defined(__sh__) +# include "platform/switch_sh_gcc.h" /* SuperH */ +#endif + +#ifdef __cplusplus +}; +#endif diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/__init__.py b/venv/lib/python3.11/site-packages/greenlet/tests/__init__.py new file mode 100644 index 0000000..5929f2a --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/tests/__init__.py @@ -0,0 +1,240 @@ +# -*- coding: utf-8 -*- +""" +Tests for greenlet. + +""" +import os +import sys +import unittest + +from gc import collect +from gc import get_objects +from threading import active_count as active_thread_count +from time import sleep +from time import time + +import psutil + +from greenlet import greenlet as RawGreenlet +from greenlet import getcurrent + +from greenlet._greenlet import get_pending_cleanup_count +from greenlet._greenlet import get_total_main_greenlets + +from . import leakcheck + +PY312 = sys.version_info[:2] >= (3, 12) +PY313 = sys.version_info[:2] >= (3, 13) +# XXX: First tested on 3.14a7. Revisit all uses of this on later versions to ensure they +# are still valid. +PY314 = sys.version_info[:2] >= (3, 14) + +WIN = sys.platform.startswith("win") +RUNNING_ON_GITHUB_ACTIONS = os.environ.get('GITHUB_ACTIONS') +RUNNING_ON_TRAVIS = os.environ.get('TRAVIS') or RUNNING_ON_GITHUB_ACTIONS +RUNNING_ON_APPVEYOR = os.environ.get('APPVEYOR') +RUNNING_ON_CI = RUNNING_ON_TRAVIS or RUNNING_ON_APPVEYOR +RUNNING_ON_MANYLINUX = os.environ.get('GREENLET_MANYLINUX') + +class TestCaseMetaClass(type): + # wrap each test method with + # a) leak checks + def __new__(cls, classname, bases, classDict): + # pylint and pep8 fight over what this should be called (mcs or cls). + # pylint gets it right, but we can't scope disable pep8, so we go with + # its convention. + # pylint: disable=bad-mcs-classmethod-argument + check_totalrefcount = True + + # Python 3: must copy, we mutate the classDict. Interestingly enough, + # it doesn't actually error out, but under 3.6 we wind up wrapping + # and re-wrapping the same items over and over and over. + for key, value in list(classDict.items()): + if key.startswith('test') and callable(value): + classDict.pop(key) + if check_totalrefcount: + value = leakcheck.wrap_refcount(value) + classDict[key] = value + return type.__new__(cls, classname, bases, classDict) + + +class TestCase(unittest.TestCase, metaclass=TestCaseMetaClass): + + cleanup_attempt_sleep_duration = 0.001 + cleanup_max_sleep_seconds = 1 + + def wait_for_pending_cleanups(self, + initial_active_threads=None, + initial_main_greenlets=None): + initial_active_threads = initial_active_threads or self.threads_before_test + initial_main_greenlets = initial_main_greenlets or self.main_greenlets_before_test + sleep_time = self.cleanup_attempt_sleep_duration + # NOTE: This is racy! A Python-level thread object may be dead + # and gone, but the C thread may not yet have fired its + # destructors and added to the queue. There's no particular + # way to know that's about to happen. We try to watch the + # Python threads to make sure they, at least, have gone away. + # Counting the main greenlets, which we can easily do deterministically, + # also helps. + + # Always sleep at least once to let other threads run + sleep(sleep_time) + quit_after = time() + self.cleanup_max_sleep_seconds + # TODO: We could add an API that calls us back when a particular main greenlet is deleted? + # It would have to drop the GIL + while ( + get_pending_cleanup_count() + or active_thread_count() > initial_active_threads + or (not self.expect_greenlet_leak + and get_total_main_greenlets() > initial_main_greenlets)): + sleep(sleep_time) + if time() > quit_after: + print("Time limit exceeded.") + print("Threads: Waiting for only", initial_active_threads, + "-->", active_thread_count()) + print("MGlets : Waiting for only", initial_main_greenlets, + "-->", get_total_main_greenlets()) + break + collect() + + def count_objects(self, kind=list, exact_kind=True): + # pylint:disable=unidiomatic-typecheck + # Collect the garbage. + for _ in range(3): + collect() + if exact_kind: + return sum( + 1 + for x in get_objects() + if type(x) is kind + ) + # instances + return sum( + 1 + for x in get_objects() + if isinstance(x, kind) + ) + + greenlets_before_test = 0 + threads_before_test = 0 + main_greenlets_before_test = 0 + expect_greenlet_leak = False + + def count_greenlets(self): + """ + Find all the greenlets and subclasses tracked by the GC. + """ + return self.count_objects(RawGreenlet, False) + + def setUp(self): + # Ensure the main greenlet exists, otherwise the first test + # gets a false positive leak + super().setUp() + getcurrent() + self.threads_before_test = active_thread_count() + self.main_greenlets_before_test = get_total_main_greenlets() + self.wait_for_pending_cleanups(self.threads_before_test, self.main_greenlets_before_test) + self.greenlets_before_test = self.count_greenlets() + + def tearDown(self): + if getattr(self, 'skipTearDown', False): + return + + self.wait_for_pending_cleanups(self.threads_before_test, self.main_greenlets_before_test) + super().tearDown() + + def get_expected_returncodes_for_aborted_process(self): + import signal + # The child should be aborted in an unusual way. On POSIX + # platforms, this is done with abort() and signal.SIGABRT, + # which is reflected in a negative return value; however, on + # Windows, even though we observe the child print "Fatal + # Python error: Aborted" and in older versions of the C + # runtime "This application has requested the Runtime to + # terminate it in an unusual way," it always has an exit code + # of 3. This is interesting because 3 is the error code for + # ERROR_PATH_NOT_FOUND; BUT: the C runtime abort() function + # also uses this code. + # + # If we link to the static C library on Windows, the error + # code changes to '0xc0000409' (hex(3221226505)), which + # apparently is STATUS_STACK_BUFFER_OVERRUN; but "What this + # means is that nowadays when you get a + # STATUS_STACK_BUFFER_OVERRUN, it doesn’t actually mean that + # there is a stack buffer overrun. It just means that the + # application decided to terminate itself with great haste." + # + # + # On windows, we've also seen '0xc0000005' (hex(3221225477)). + # That's "Access Violation" + # + # See + # https://devblogs.microsoft.com/oldnewthing/20110519-00/?p=10623 + # and + # https://docs.microsoft.com/en-us/previous-versions/k089yyh0(v=vs.140)?redirectedfrom=MSDN + # and + # https://devblogs.microsoft.com/oldnewthing/20190108-00/?p=100655 + expected_exit = ( + -signal.SIGABRT, + # But beginning on Python 3.11, the faulthandler + # that prints the C backtraces sometimes segfaults after + # reporting the exception but before printing the stack. + # This has only been seen on linux/gcc. + -signal.SIGSEGV, + ) if not WIN else ( + 3, + 0xc0000409, + 0xc0000005, + ) + return expected_exit + + def get_process_uss(self): + """ + Return the current process's USS in bytes. + + uss is available on Linux, macOS, Windows. Also known as + "Unique Set Size", this is the memory which is unique to a + process and which would be freed if the process was terminated + right now. + + If this is not supported by ``psutil``, this raises the + :exc:`unittest.SkipTest` exception. + """ + try: + return psutil.Process().memory_full_info().uss + except AttributeError as e: + raise unittest.SkipTest("uss not supported") from e + + def run_script(self, script_name, show_output=True): + import subprocess + script = os.path.join( + os.path.dirname(__file__), + script_name, + ) + + try: + return subprocess.check_output([sys.executable, script], + encoding='utf-8', + stderr=subprocess.STDOUT) + except subprocess.CalledProcessError as ex: + if show_output: + print('-----') + print('Failed to run script', script) + print('~~~~~') + print(ex.output) + print('------') + raise + + + def assertScriptRaises(self, script_name, exitcodes=None): + import subprocess + with self.assertRaises(subprocess.CalledProcessError) as exc: + output = self.run_script(script_name, show_output=False) + __traceback_info__ = output + # We're going to fail the assertion if we get here, at least + # preserve the output in the traceback. + + if exitcodes is None: + exitcodes = self.get_expected_returncodes_for_aborted_process() + self.assertIn(exc.exception.returncode, exitcodes) + return exc.exception diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..fa07118 Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/fail_clearing_run_switches.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/fail_clearing_run_switches.cpython-311.pyc new file mode 100644 index 0000000..9dbc2d5 Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/fail_clearing_run_switches.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/fail_cpp_exception.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/fail_cpp_exception.cpython-311.pyc new file mode 100644 index 0000000..1cfa86a Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/fail_cpp_exception.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/fail_initialstub_already_started.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/fail_initialstub_already_started.cpython-311.pyc new file mode 100644 index 0000000..c93d0e2 Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/fail_initialstub_already_started.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/fail_slp_switch.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/fail_slp_switch.cpython-311.pyc new file mode 100644 index 0000000..4cba5a9 Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/fail_slp_switch.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/fail_switch_three_greenlets.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/fail_switch_three_greenlets.cpython-311.pyc new file mode 100644 index 0000000..0692b9d Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/fail_switch_three_greenlets.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/fail_switch_three_greenlets2.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/fail_switch_three_greenlets2.cpython-311.pyc new file mode 100644 index 0000000..8889596 Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/fail_switch_three_greenlets2.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/fail_switch_two_greenlets.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/fail_switch_two_greenlets.cpython-311.pyc new file mode 100644 index 0000000..0d3b68f Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/fail_switch_two_greenlets.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/leakcheck.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/leakcheck.cpython-311.pyc new file mode 100644 index 0000000..afeed66 Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/leakcheck.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_contextvars.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_contextvars.cpython-311.pyc new file mode 100644 index 0000000..6d352a7 Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_contextvars.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_cpp.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_cpp.cpython-311.pyc new file mode 100644 index 0000000..e02f744 Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_cpp.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_extension_interface.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_extension_interface.cpython-311.pyc new file mode 100644 index 0000000..db62292 Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_extension_interface.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_gc.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_gc.cpython-311.pyc new file mode 100644 index 0000000..8a0aeb8 Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_gc.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_generator.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_generator.cpython-311.pyc new file mode 100644 index 0000000..5001f90 Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_generator.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_generator_nested.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_generator_nested.cpython-311.pyc new file mode 100644 index 0000000..4a83b57 Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_generator_nested.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_greenlet.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_greenlet.cpython-311.pyc new file mode 100644 index 0000000..43949b7 Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_greenlet.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_greenlet_trash.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_greenlet_trash.cpython-311.pyc new file mode 100644 index 0000000..547a322 Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_greenlet_trash.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_leaks.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_leaks.cpython-311.pyc new file mode 100644 index 0000000..2a74f76 Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_leaks.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_stack_saved.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_stack_saved.cpython-311.pyc new file mode 100644 index 0000000..d16f9a4 Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_stack_saved.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_throw.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_throw.cpython-311.pyc new file mode 100644 index 0000000..f5ab71a Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_throw.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_tracing.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_tracing.cpython-311.pyc new file mode 100644 index 0000000..c24a285 Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_tracing.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_version.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_version.cpython-311.pyc new file mode 100644 index 0000000..50e2f8e Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_version.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_weakref.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_weakref.cpython-311.pyc new file mode 100644 index 0000000..2fa39a0 Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/tests/__pycache__/test_weakref.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/_test_extension.c b/venv/lib/python3.11/site-packages/greenlet/tests/_test_extension.c new file mode 100644 index 0000000..05e81c0 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/tests/_test_extension.c @@ -0,0 +1,231 @@ +/* This is a set of functions used by test_extension_interface.py to test the + * Greenlet C API. + */ + +#include "../greenlet.h" + +#ifndef Py_RETURN_NONE +# define Py_RETURN_NONE return Py_INCREF(Py_None), Py_None +#endif + +#define TEST_MODULE_NAME "_test_extension" + +static PyObject* +test_switch(PyObject* self, PyObject* greenlet) +{ + PyObject* result = NULL; + + if (greenlet == NULL || !PyGreenlet_Check(greenlet)) { + PyErr_BadArgument(); + return NULL; + } + + result = PyGreenlet_Switch((PyGreenlet*)greenlet, NULL, NULL); + if (result == NULL) { + if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_AssertionError, + "greenlet.switch() failed for some reason."); + } + return NULL; + } + Py_INCREF(result); + return result; +} + +static PyObject* +test_switch_kwargs(PyObject* self, PyObject* args, PyObject* kwargs) +{ + PyGreenlet* g = NULL; + PyObject* result = NULL; + + PyArg_ParseTuple(args, "O!", &PyGreenlet_Type, &g); + + if (g == NULL || !PyGreenlet_Check(g)) { + PyErr_BadArgument(); + return NULL; + } + + result = PyGreenlet_Switch(g, NULL, kwargs); + if (result == NULL) { + if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_AssertionError, + "greenlet.switch() failed for some reason."); + } + return NULL; + } + Py_XINCREF(result); + return result; +} + +static PyObject* +test_getcurrent(PyObject* self) +{ + PyGreenlet* g = PyGreenlet_GetCurrent(); + if (g == NULL || !PyGreenlet_Check(g) || !PyGreenlet_ACTIVE(g)) { + PyErr_SetString(PyExc_AssertionError, + "getcurrent() returned an invalid greenlet"); + Py_XDECREF(g); + return NULL; + } + Py_DECREF(g); + Py_RETURN_NONE; +} + +static PyObject* +test_setparent(PyObject* self, PyObject* arg) +{ + PyGreenlet* current; + PyGreenlet* greenlet = NULL; + + if (arg == NULL || !PyGreenlet_Check(arg)) { + PyErr_BadArgument(); + return NULL; + } + if ((current = PyGreenlet_GetCurrent()) == NULL) { + return NULL; + } + greenlet = (PyGreenlet*)arg; + if (PyGreenlet_SetParent(greenlet, current)) { + Py_DECREF(current); + return NULL; + } + Py_DECREF(current); + if (PyGreenlet_Switch(greenlet, NULL, NULL) == NULL) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject* +test_new_greenlet(PyObject* self, PyObject* callable) +{ + PyObject* result = NULL; + PyGreenlet* greenlet = PyGreenlet_New(callable, NULL); + + if (!greenlet) { + return NULL; + } + + result = PyGreenlet_Switch(greenlet, NULL, NULL); + Py_CLEAR(greenlet); + if (result == NULL) { + return NULL; + } + + Py_INCREF(result); + return result; +} + +static PyObject* +test_raise_dead_greenlet(PyObject* self) +{ + PyErr_SetString(PyExc_GreenletExit, "test GreenletExit exception."); + return NULL; +} + +static PyObject* +test_raise_greenlet_error(PyObject* self) +{ + PyErr_SetString(PyExc_GreenletError, "test greenlet.error exception"); + return NULL; +} + +static PyObject* +test_throw(PyObject* self, PyGreenlet* g) +{ + const char msg[] = "take that sucka!"; + PyObject* msg_obj = Py_BuildValue("s", msg); + PyGreenlet_Throw(g, PyExc_ValueError, msg_obj, NULL); + Py_DECREF(msg_obj); + if (PyErr_Occurred()) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject* +test_throw_exact(PyObject* self, PyObject* args) +{ + PyGreenlet* g = NULL; + PyObject* typ = NULL; + PyObject* val = NULL; + PyObject* tb = NULL; + + if (!PyArg_ParseTuple(args, "OOOO:throw", &g, &typ, &val, &tb)) { + return NULL; + } + + PyGreenlet_Throw(g, typ, val, tb); + if (PyErr_Occurred()) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyMethodDef test_methods[] = { + {"test_switch", + (PyCFunction)test_switch, + METH_O, + "Switch to the provided greenlet sending provided arguments, and \n" + "return the results."}, + {"test_switch_kwargs", + (PyCFunction)test_switch_kwargs, + METH_VARARGS | METH_KEYWORDS, + "Switch to the provided greenlet sending the provided keyword args."}, + {"test_getcurrent", + (PyCFunction)test_getcurrent, + METH_NOARGS, + "Test PyGreenlet_GetCurrent()"}, + {"test_setparent", + (PyCFunction)test_setparent, + METH_O, + "Se the parent of the provided greenlet and switch to it."}, + {"test_new_greenlet", + (PyCFunction)test_new_greenlet, + METH_O, + "Test PyGreenlet_New()"}, + {"test_raise_dead_greenlet", + (PyCFunction)test_raise_dead_greenlet, + METH_NOARGS, + "Just raise greenlet.GreenletExit"}, + {"test_raise_greenlet_error", + (PyCFunction)test_raise_greenlet_error, + METH_NOARGS, + "Just raise greenlet.error"}, + {"test_throw", + (PyCFunction)test_throw, + METH_O, + "Throw a ValueError at the provided greenlet"}, + {"test_throw_exact", + (PyCFunction)test_throw_exact, + METH_VARARGS, + "Throw exactly the arguments given at the provided greenlet"}, + {NULL, NULL, 0, NULL} +}; + + +#define INITERROR return NULL + +static struct PyModuleDef moduledef = {PyModuleDef_HEAD_INIT, + TEST_MODULE_NAME, + NULL, + 0, + test_methods, + NULL, + NULL, + NULL, + NULL}; + +PyMODINIT_FUNC +PyInit__test_extension(void) +{ + PyObject* module = NULL; + module = PyModule_Create(&moduledef); + + if (module == NULL) { + return NULL; + } + + PyGreenlet_Import(); + return module; +} diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/_test_extension.cpython-311-x86_64-linux-gnu.so b/venv/lib/python3.11/site-packages/greenlet/tests/_test_extension.cpython-311-x86_64-linux-gnu.so new file mode 100644 index 0000000..902c76a Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/tests/_test_extension.cpython-311-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/_test_extension_cpp.cpp b/venv/lib/python3.11/site-packages/greenlet/tests/_test_extension_cpp.cpp new file mode 100644 index 0000000..5cbe6a7 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/tests/_test_extension_cpp.cpp @@ -0,0 +1,226 @@ +/* This is a set of functions used to test C++ exceptions are not + * broken during greenlet switches + */ + +#include "../greenlet.h" +#include "../greenlet_compiler_compat.hpp" +#include +#include + +struct exception_t { + int depth; + exception_t(int depth) : depth(depth) {} +}; + +/* Functions are called via pointers to prevent inlining */ +static void (*p_test_exception_throw_nonstd)(int depth); +static void (*p_test_exception_throw_std)(); +static PyObject* (*p_test_exception_switch_recurse)(int depth, int left); + +static void +test_exception_throw_nonstd(int depth) +{ + throw exception_t(depth); +} + +static void +test_exception_throw_std() +{ + throw std::runtime_error("Thrown from an extension."); +} + +static PyObject* +test_exception_switch_recurse(int depth, int left) +{ + if (left > 0) { + return p_test_exception_switch_recurse(depth, left - 1); + } + + PyObject* result = NULL; + PyGreenlet* self = PyGreenlet_GetCurrent(); + if (self == NULL) + return NULL; + + try { + if (PyGreenlet_Switch(PyGreenlet_GET_PARENT(self), NULL, NULL) == NULL) { + Py_DECREF(self); + return NULL; + } + p_test_exception_throw_nonstd(depth); + PyErr_SetString(PyExc_RuntimeError, + "throwing C++ exception didn't work"); + } + catch (const exception_t& e) { + if (e.depth != depth) + PyErr_SetString(PyExc_AssertionError, "depth mismatch"); + else + result = PyLong_FromLong(depth); + } + catch (...) { + PyErr_SetString(PyExc_RuntimeError, "unexpected C++ exception"); + } + + Py_DECREF(self); + return result; +} + +/* test_exception_switch(int depth) + * - recurses depth times + * - switches to parent inside try/catch block + * - throws an exception that (expected to be caught in the same function) + * - verifies depth matches (exceptions shouldn't be caught in other greenlets) + */ +static PyObject* +test_exception_switch(PyObject* UNUSED(self), PyObject* args) +{ + int depth; + if (!PyArg_ParseTuple(args, "i", &depth)) + return NULL; + return p_test_exception_switch_recurse(depth, depth); +} + + +static PyObject* +py_test_exception_throw_nonstd(PyObject* self, PyObject* args) +{ + if (!PyArg_ParseTuple(args, "")) + return NULL; + p_test_exception_throw_nonstd(0); + PyErr_SetString(PyExc_AssertionError, "unreachable code running after throw"); + return NULL; +} + +static PyObject* +py_test_exception_throw_std(PyObject* self, PyObject* args) +{ + if (!PyArg_ParseTuple(args, "")) + return NULL; + p_test_exception_throw_std(); + PyErr_SetString(PyExc_AssertionError, "unreachable code running after throw"); + return NULL; +} + +static PyObject* +py_test_call(PyObject* self, PyObject* arg) +{ + PyObject* noargs = PyTuple_New(0); + PyObject* ret = PyObject_Call(arg, noargs, nullptr); + Py_DECREF(noargs); + return ret; +} + + + +/* test_exception_switch_and_do_in_g2(g2func) + * - creates new greenlet g2 to run g2func + * - switches to g2 inside try/catch block + * - verifies that no exception has been caught + * + * it is used together with test_exception_throw to verify that unhandled + * exceptions thrown in one greenlet do not propagate to other greenlet nor + * segfault the process. + */ +static PyObject* +test_exception_switch_and_do_in_g2(PyObject* self, PyObject* args) +{ + PyObject* g2func = NULL; + PyObject* result = NULL; + + if (!PyArg_ParseTuple(args, "O", &g2func)) + return NULL; + PyGreenlet* g2 = PyGreenlet_New(g2func, NULL); + if (!g2) { + return NULL; + } + + try { + result = PyGreenlet_Switch(g2, NULL, NULL); + if (!result) { + return NULL; + } + } + catch (const exception_t& e) { + /* if we are here the memory can be already corrupted and the program + * might crash before below py-level exception might become printed. + * -> print something to stderr to make it clear that we had entered + * this catch block. + * See comments in inner_bootstrap() + */ +#if defined(WIN32) || defined(_WIN32) + fprintf(stderr, "C++ exception unexpectedly caught in g1\n"); + PyErr_SetString(PyExc_AssertionError, "C++ exception unexpectedly caught in g1"); + Py_XDECREF(result); + return NULL; +#else + throw; +#endif + } + + Py_XDECREF(result); + Py_RETURN_NONE; +} + +static PyMethodDef test_methods[] = { + {"test_exception_switch", + (PyCFunction)&test_exception_switch, + METH_VARARGS, + "Switches to parent twice, to test exception handling and greenlet " + "switching."}, + {"test_exception_switch_and_do_in_g2", + (PyCFunction)&test_exception_switch_and_do_in_g2, + METH_VARARGS, + "Creates new greenlet g2 to run g2func and switches to it inside try/catch " + "block. Used together with test_exception_throw to verify that unhandled " + "C++ exceptions thrown in a greenlet doe not corrupt memory."}, + {"test_exception_throw_nonstd", + (PyCFunction)&py_test_exception_throw_nonstd, + METH_VARARGS, + "Throws non-standard C++ exception. Calling this function directly should abort the process." + }, + {"test_exception_throw_std", + (PyCFunction)&py_test_exception_throw_std, + METH_VARARGS, + "Throws standard C++ exception. Calling this function directly should abort the process." + }, + {"test_call", + (PyCFunction)&py_test_call, + METH_O, + "Call the given callable. Unlike calling it directly, this creates a " + "new C-level stack frame, which may be helpful in testing." + }, + {NULL, NULL, 0, NULL} +}; + + +static struct PyModuleDef moduledef = {PyModuleDef_HEAD_INIT, + "greenlet.tests._test_extension_cpp", + NULL, + 0, + test_methods, + NULL, + NULL, + NULL, + NULL}; + +PyMODINIT_FUNC +PyInit__test_extension_cpp(void) +{ + PyObject* module = NULL; + + module = PyModule_Create(&moduledef); + + if (module == NULL) { + return NULL; + } + + PyGreenlet_Import(); + if (_PyGreenlet_API == NULL) { + return NULL; + } + + p_test_exception_throw_nonstd = test_exception_throw_nonstd; + p_test_exception_throw_std = test_exception_throw_std; + p_test_exception_switch_recurse = test_exception_switch_recurse; + + return module; +} diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/_test_extension_cpp.cpython-311-x86_64-linux-gnu.so b/venv/lib/python3.11/site-packages/greenlet/tests/_test_extension_cpp.cpython-311-x86_64-linux-gnu.so new file mode 100644 index 0000000..eaafde3 Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/tests/_test_extension_cpp.cpython-311-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/fail_clearing_run_switches.py b/venv/lib/python3.11/site-packages/greenlet/tests/fail_clearing_run_switches.py new file mode 100644 index 0000000..6dd1492 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/tests/fail_clearing_run_switches.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +""" +If we have a run callable passed to the constructor or set as an +attribute, but we don't actually use that (because ``__getattribute__`` +or the like interferes), then when we clear callable before beginning +to run, there's an opportunity for Python code to run. + +""" +import greenlet + +g = None +main = greenlet.getcurrent() + +results = [] + +class RunCallable: + + def __del__(self): + results.append(('RunCallable', '__del__')) + main.switch('from RunCallable') + + +class G(greenlet.greenlet): + + def __getattribute__(self, name): + if name == 'run': + results.append(('G.__getattribute__', 'run')) + return run_func + return object.__getattribute__(self, name) + + +def run_func(): + results.append(('run_func', 'enter')) + + +g = G(RunCallable()) +# Try to start G. It will get to the point where it deletes +# its run callable C++ variable in inner_bootstrap. That triggers +# the __del__ method, which switches back to main before g +# actually even starts running. +x = g.switch() +results.append(('main: g.switch()', x)) +# In the C++ code, this results in g->g_switch() appearing to return, even though +# it has yet to run. +print('In main with', x, flush=True) +g.switch() +print('RESULTS', results) diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/fail_cpp_exception.py b/venv/lib/python3.11/site-packages/greenlet/tests/fail_cpp_exception.py new file mode 100644 index 0000000..fa4dc2e --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/tests/fail_cpp_exception.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +""" +Helper for testing a C++ exception throw aborts the process. + +Takes one argument, the name of the function in :mod:`_test_extension_cpp` to call. +""" +import sys +import greenlet +from greenlet.tests import _test_extension_cpp +print('fail_cpp_exception is running') + +def run_unhandled_exception_in_greenlet_aborts(): + def _(): + _test_extension_cpp.test_exception_switch_and_do_in_g2( + _test_extension_cpp.test_exception_throw_nonstd + ) + g1 = greenlet.greenlet(_) + g1.switch() + + +func_name = sys.argv[1] +try: + func = getattr(_test_extension_cpp, func_name) +except AttributeError: + if func_name == run_unhandled_exception_in_greenlet_aborts.__name__: + func = run_unhandled_exception_in_greenlet_aborts + elif func_name == 'run_as_greenlet_target': + g = greenlet.greenlet(_test_extension_cpp.test_exception_throw_std) + func = g.switch + else: + raise +print('raising', func, flush=True) +func() diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/fail_initialstub_already_started.py b/venv/lib/python3.11/site-packages/greenlet/tests/fail_initialstub_already_started.py new file mode 100644 index 0000000..c1a44ef --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/tests/fail_initialstub_already_started.py @@ -0,0 +1,78 @@ +""" +Testing initialstub throwing an already started exception. +""" + +import greenlet + +a = None +b = None +c = None +main = greenlet.getcurrent() + +# If we switch into a dead greenlet, +# we go looking for its parents. +# if a parent is not yet started, we start it. + +results = [] + +def a_run(*args): + #results.append('A') + results.append(('Begin A', args)) + + +def c_run(): + results.append('Begin C') + b.switch('From C') + results.append('C done') + +class A(greenlet.greenlet): pass + +class B(greenlet.greenlet): + doing_it = False + def __getattribute__(self, name): + if name == 'run' and not self.doing_it: + assert greenlet.getcurrent() is c + self.doing_it = True + results.append('Switch to b from B.__getattribute__ in ' + + type(greenlet.getcurrent()).__name__) + b.switch() + results.append('B.__getattribute__ back from main in ' + + type(greenlet.getcurrent()).__name__) + if name == 'run': + name = '_B_run' + return object.__getattribute__(self, name) + + def _B_run(self, *arg): + results.append(('Begin B', arg)) + results.append('_B_run switching to main') + main.switch('From B') + +class C(greenlet.greenlet): + pass +a = A(a_run) +b = B(parent=a) +c = C(c_run, b) + +# Start a child; while running, it will start B, +# but starting B will ALSO start B. +result = c.switch() +results.append(('main from c', result)) + +# Switch back to C, which was in the middle of switching +# already. This will throw the ``GreenletStartedWhileInPython`` +# exception, which results in parent A getting started (B is finished) +c.switch() + +results.append(('A dead?', a.dead, 'B dead?', b.dead, 'C dead?', c.dead)) + +# A and B should both be dead now. +assert a.dead +assert b.dead +assert not c.dead + +result = c.switch() +results.append(('main from c.2', result)) +# Now C is dead +assert c.dead + +print("RESULTS:", results) diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/fail_slp_switch.py b/venv/lib/python3.11/site-packages/greenlet/tests/fail_slp_switch.py new file mode 100644 index 0000000..0990526 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/tests/fail_slp_switch.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +""" +A test helper for seeing what happens when slp_switch() +fails. +""" +# pragma: no cover + +import greenlet + + +print('fail_slp_switch is running', flush=True) + +runs = [] +def func(): + runs.append(1) + greenlet.getcurrent().parent.switch() + runs.append(2) + greenlet.getcurrent().parent.switch() + runs.append(3) + +g = greenlet._greenlet.UnswitchableGreenlet(func) +g.switch() +assert runs == [1] +g.switch() +assert runs == [1, 2] +g.force_slp_switch_error = True + +# This should crash. +g.switch() diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/fail_switch_three_greenlets.py b/venv/lib/python3.11/site-packages/greenlet/tests/fail_switch_three_greenlets.py new file mode 100644 index 0000000..e151b19 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/tests/fail_switch_three_greenlets.py @@ -0,0 +1,44 @@ +""" +Uses a trace function to switch greenlets at unexpected times. + +In the trace function, we switch from the current greenlet to another +greenlet, which switches +""" +import greenlet + +g1 = None +g2 = None + +switch_to_g2 = False + +def tracefunc(*args): + print('TRACE', *args) + global switch_to_g2 + if switch_to_g2: + switch_to_g2 = False + g2.switch() + print('\tLEAVE TRACE', *args) + +def g1_run(): + print('In g1_run') + global switch_to_g2 + switch_to_g2 = True + from_parent = greenlet.getcurrent().parent.switch() + print('Return to g1_run') + print('From parent', from_parent) + +def g2_run(): + #g1.switch() + greenlet.getcurrent().parent.switch() + +greenlet.settrace(tracefunc) + +g1 = greenlet.greenlet(g1_run) +g2 = greenlet.greenlet(g2_run) + +# This switch didn't actually finish! +# And if it did, it would raise TypeError +# because g1_run() doesn't take any arguments. +g1.switch(1) +print('Back in main') +g1.switch(2) diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/fail_switch_three_greenlets2.py b/venv/lib/python3.11/site-packages/greenlet/tests/fail_switch_three_greenlets2.py new file mode 100644 index 0000000..1f6b66b --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/tests/fail_switch_three_greenlets2.py @@ -0,0 +1,55 @@ +""" +Like fail_switch_three_greenlets, but the call into g1_run would actually be +valid. +""" +import greenlet + +g1 = None +g2 = None + +switch_to_g2 = True + +results = [] + +def tracefunc(*args): + results.append(('trace', args[0])) + print('TRACE', *args) + global switch_to_g2 + if switch_to_g2: + switch_to_g2 = False + g2.switch('g2 from tracefunc') + print('\tLEAVE TRACE', *args) + +def g1_run(arg): + results.append(('g1 arg', arg)) + print('In g1_run') + from_parent = greenlet.getcurrent().parent.switch('from g1_run') + results.append(('g1 from parent', from_parent)) + return 'g1 done' + +def g2_run(arg): + #g1.switch() + results.append(('g2 arg', arg)) + parent = greenlet.getcurrent().parent.switch('from g2_run') + global switch_to_g2 + switch_to_g2 = False + results.append(('g2 from parent', parent)) + return 'g2 done' + + +greenlet.settrace(tracefunc) + +g1 = greenlet.greenlet(g1_run) +g2 = greenlet.greenlet(g2_run) + +x = g1.switch('g1 from main') +results.append(('main g1', x)) +print('Back in main', x) +x = g1.switch('g2 from main') +results.append(('main g2', x)) +print('back in amain again', x) +x = g1.switch('g1 from main 2') +results.append(('main g1.2', x)) +x = g2.switch() +results.append(('main g2.2', x)) +print("RESULTS:", results) diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/fail_switch_two_greenlets.py b/venv/lib/python3.11/site-packages/greenlet/tests/fail_switch_two_greenlets.py new file mode 100644 index 0000000..3e52345 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/tests/fail_switch_two_greenlets.py @@ -0,0 +1,41 @@ +""" +Uses a trace function to switch greenlets at unexpected times. + +In the trace function, we switch from the current greenlet to another +greenlet, which switches +""" +import greenlet + +g1 = None +g2 = None + +switch_to_g2 = False + +def tracefunc(*args): + print('TRACE', *args) + global switch_to_g2 + if switch_to_g2: + switch_to_g2 = False + g2.switch() + print('\tLEAVE TRACE', *args) + +def g1_run(): + print('In g1_run') + global switch_to_g2 + switch_to_g2 = True + greenlet.getcurrent().parent.switch() + print('Return to g1_run') + print('Falling off end of g1_run') + +def g2_run(): + g1.switch() + print('Falling off end of g2') + +greenlet.settrace(tracefunc) + +g1 = greenlet.greenlet(g1_run) +g2 = greenlet.greenlet(g2_run) + +g1.switch() +print('Falling off end of main') +g2.switch() diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/leakcheck.py b/venv/lib/python3.11/site-packages/greenlet/tests/leakcheck.py new file mode 100644 index 0000000..a5152fb --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/tests/leakcheck.py @@ -0,0 +1,319 @@ +# Copyright (c) 2018 gevent community +# Copyright (c) 2021 greenlet community +# +# This was originally part of gevent's test suite. The main author +# (Jason Madden) vendored a copy of it into greenlet. +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import print_function + +import os +import sys +import gc + +from functools import wraps +import unittest + + +import objgraph + +# graphviz 0.18 (Nov 7 2021), available only on Python 3.6 and newer, +# has added type hints (sigh). It wants to use ``typing.Literal`` for +# some stuff, but that's only available on Python 3.9+. If that's not +# found, it creates a ``unittest.mock.MagicMock`` object and annotates +# with that. These are GC'able objects, and doing almost *anything* +# with them results in an explosion of objects. For example, trying to +# compare them for equality creates new objects. This causes our +# leakchecks to fail, with reports like: +# +# greenlet.tests.leakcheck.LeakCheckError: refcount increased by [337, 1333, 343, 430, 530, 643, 769] +# _Call 1820 +546 +# dict 4094 +76 +# MagicProxy 585 +73 +# tuple 2693 +66 +# _CallList 24 +3 +# weakref 1441 +1 +# function 5996 +1 +# type 736 +1 +# cell 592 +1 +# MagicMock 8 +1 +# +# To avoid this, we *could* filter this type of object out early. In +# principle it could leak, but we don't use mocks in greenlet, so it +# doesn't leak from us. However, a further issue is that ``MagicMock`` +# objects have subobjects that are also GC'able, like ``_Call``, and +# those create new mocks of their own too. So we'd have to filter them +# as well, and they're not public. That's OK, we can workaround the +# problem by being very careful to never compare by equality or other +# user-defined operators, only using object identity or other builtin +# functions. + +RUNNING_ON_GITHUB_ACTIONS = os.environ.get('GITHUB_ACTIONS') +RUNNING_ON_TRAVIS = os.environ.get('TRAVIS') or RUNNING_ON_GITHUB_ACTIONS +RUNNING_ON_APPVEYOR = os.environ.get('APPVEYOR') +RUNNING_ON_CI = RUNNING_ON_TRAVIS or RUNNING_ON_APPVEYOR +RUNNING_ON_MANYLINUX = os.environ.get('GREENLET_MANYLINUX') +SKIP_LEAKCHECKS = RUNNING_ON_MANYLINUX or os.environ.get('GREENLET_SKIP_LEAKCHECKS') +SKIP_FAILING_LEAKCHECKS = os.environ.get('GREENLET_SKIP_FAILING_LEAKCHECKS') +ONLY_FAILING_LEAKCHECKS = os.environ.get('GREENLET_ONLY_FAILING_LEAKCHECKS') + +def ignores_leakcheck(func): + """ + Ignore the given object during leakchecks. + + Can be applied to a method, in which case the method will run, but + will not be subject to leak checks. + + If applied to a class, the entire class will be skipped during leakchecks. This + is intended to be used for classes that are very slow and cause problems such as + test timeouts; typically it will be used for classes that are subclasses of a base + class and specify variants of behaviour (such as pool sizes). + """ + func.ignore_leakcheck = True + return func + +def fails_leakcheck(func): + """ + Mark that the function is known to leak. + """ + func.fails_leakcheck = True + if SKIP_FAILING_LEAKCHECKS: + func = unittest.skip("Skipping known failures")(func) + return func + +class LeakCheckError(AssertionError): + pass + +if hasattr(sys, 'getobjects'): + # In a Python build with ``--with-trace-refs``, make objgraph + # trace *all* the objects, not just those that are tracked by the + # GC + class _MockGC(object): + def get_objects(self): + return sys.getobjects(0) # pylint:disable=no-member + def __getattr__(self, name): + return getattr(gc, name) + objgraph.gc = _MockGC() + fails_strict_leakcheck = fails_leakcheck +else: + def fails_strict_leakcheck(func): + """ + Decorator for a function that is known to fail when running + strict (``sys.getobjects()``) leakchecks. + + This type of leakcheck finds all objects, even those, such as + strings, which are not tracked by the garbage collector. + """ + return func + +class ignores_types_in_strict_leakcheck(object): + def __init__(self, types): + self.types = types + def __call__(self, func): + func.leakcheck_ignore_types = self.types + return func + +class _RefCountChecker(object): + + # Some builtin things that we ignore + # XXX: Those things were ignored by gevent, but they're important here, + # presumably. + IGNORED_TYPES = () #(tuple, dict, types.FrameType, types.TracebackType) + + def __init__(self, testcase, function): + self.testcase = testcase + self.function = function + self.deltas = [] + self.peak_stats = {} + self.ignored_types = () + + # The very first time we are called, we have already been + # self.setUp() by the test runner, so we don't need to do it again. + self.needs_setUp = False + + def _include_object_p(self, obj): + # pylint:disable=too-many-return-statements + # + # See the comment block at the top. We must be careful to + # avoid invoking user-defined operations. + if obj is self: + return False + kind = type(obj) + # ``self._include_object_p == obj`` returns NotImplemented + # for non-function objects, which causes the interpreter + # to try to reverse the order of arguments...which leads + # to the explosion of mock objects. We don't want that, so we implement + # the check manually. + if kind == type(self._include_object_p): + try: + # pylint:disable=not-callable + exact_method_equals = self._include_object_p.__eq__(obj) + except AttributeError: + # Python 2.7 methods may only have __cmp__, and that raises a + # TypeError for non-method arguments + # pylint:disable=no-member + exact_method_equals = self._include_object_p.__cmp__(obj) == 0 + + if exact_method_equals is not NotImplemented and exact_method_equals: + return False + + # Similarly, we need to check identity in our __dict__ to avoid mock explosions. + for x in self.__dict__.values(): + if obj is x: + return False + + + if kind in self.ignored_types or kind in self.IGNORED_TYPES: + return False + + return True + + def _growth(self): + return objgraph.growth(limit=None, peak_stats=self.peak_stats, + filter=self._include_object_p) + + def _report_diff(self, growth): + if not growth: + return "" + + lines = [] + width = max(len(name) for name, _, _ in growth) + for name, count, delta in growth: + lines.append('%-*s%9d %+9d' % (width, name, count, delta)) + + diff = '\n'.join(lines) + return diff + + + def _run_test(self, args, kwargs): + gc_enabled = gc.isenabled() + gc.disable() + + if self.needs_setUp: + self.testcase.setUp() + self.testcase.skipTearDown = False + try: + self.function(self.testcase, *args, **kwargs) + finally: + self.testcase.tearDown() + self.testcase.doCleanups() + self.testcase.skipTearDown = True + self.needs_setUp = True + if gc_enabled: + gc.enable() + + def _growth_after(self): + # Grab post snapshot + # pylint:disable=no-member + if 'urlparse' in sys.modules: + sys.modules['urlparse'].clear_cache() + if 'urllib.parse' in sys.modules: + sys.modules['urllib.parse'].clear_cache() + + return self._growth() + + def _check_deltas(self, growth): + # Return false when we have decided there is no leak, + # true if we should keep looping, raises an assertion + # if we have decided there is a leak. + + deltas = self.deltas + if not deltas: + # We haven't run yet, no data, keep looping + return True + + if gc.garbage: + raise LeakCheckError("Generated uncollectable garbage %r" % (gc.garbage,)) + + + # the following configurations are classified as "no leak" + # [0, 0] + # [x, 0, 0] + # [... a, b, c, d] where a+b+c+d = 0 + # + # the following configurations are classified as "leak" + # [... z, z, z] where z > 0 + + if deltas[-2:] == [0, 0] and len(deltas) in (2, 3): + return False + + if deltas[-3:] == [0, 0, 0]: + return False + + if len(deltas) >= 4 and sum(deltas[-4:]) == 0: + return False + + if len(deltas) >= 3 and deltas[-1] > 0 and deltas[-1] == deltas[-2] and deltas[-2] == deltas[-3]: + diff = self._report_diff(growth) + raise LeakCheckError('refcount increased by %r\n%s' % (deltas, diff)) + + # OK, we don't know for sure yet. Let's search for more + if sum(deltas[-3:]) <= 0 or sum(deltas[-4:]) <= 0 or deltas[-4:].count(0) >= 2: + # this is suspicious, so give a few more runs + limit = 11 + else: + limit = 7 + if len(deltas) >= limit: + raise LeakCheckError('refcount increased by %r\n%s' + % (deltas, + self._report_diff(growth))) + + # We couldn't decide yet, keep going + return True + + def __call__(self, args, kwargs): + for _ in range(3): + gc.collect() + + expect_failure = getattr(self.function, 'fails_leakcheck', False) + if expect_failure: + self.testcase.expect_greenlet_leak = True + self.ignored_types = getattr(self.function, "leakcheck_ignore_types", ()) + + # Capture state before; the incremental will be + # updated by each call to _growth_after + growth = self._growth() + + try: + while self._check_deltas(growth): + self._run_test(args, kwargs) + + growth = self._growth_after() + + self.deltas.append(sum((stat[2] for stat in growth))) + except LeakCheckError: + if not expect_failure: + raise + else: + if expect_failure: + raise LeakCheckError("Expected %s to leak but it did not." % (self.function,)) + +def wrap_refcount(method): + if getattr(method, 'ignore_leakcheck', False) or SKIP_LEAKCHECKS: + return method + + @wraps(method) + def wrapper(self, *args, **kwargs): # pylint:disable=too-many-branches + if getattr(self, 'ignore_leakcheck', False): + raise unittest.SkipTest("This class ignored during leakchecks") + if ONLY_FAILING_LEAKCHECKS and not getattr(method, 'fails_leakcheck', False): + raise unittest.SkipTest("Only running tests that fail leakchecks.") + return _RefCountChecker(self, method)(args, kwargs) + + return wrapper diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/test_contextvars.py b/venv/lib/python3.11/site-packages/greenlet/tests/test_contextvars.py new file mode 100644 index 0000000..b0d1ccf --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/tests/test_contextvars.py @@ -0,0 +1,312 @@ +from __future__ import print_function + +import gc +import sys +import unittest + +from functools import partial +from unittest import skipUnless +from unittest import skipIf + +from greenlet import greenlet +from greenlet import getcurrent +from . import TestCase +from . import PY314 + +try: + from contextvars import Context + from contextvars import ContextVar + from contextvars import copy_context + # From the documentation: + # + # Important: Context Variables should be created at the top module + # level and never in closures. Context objects hold strong + # references to context variables which prevents context variables + # from being properly garbage collected. + ID_VAR = ContextVar("id", default=None) + VAR_VAR = ContextVar("var", default=None) + ContextVar = None +except ImportError: + Context = ContextVar = copy_context = None + +# We don't support testing if greenlet's built-in context var support is disabled. +@skipUnless(Context is not None, "ContextVar not supported") +class ContextVarsTests(TestCase): + def _new_ctx_run(self, *args, **kwargs): + return copy_context().run(*args, **kwargs) + + def _increment(self, greenlet_id, callback, counts, expect): + ctx_var = ID_VAR + if expect is None: + self.assertIsNone(ctx_var.get()) + else: + self.assertEqual(ctx_var.get(), expect) + ctx_var.set(greenlet_id) + for _ in range(2): + counts[ctx_var.get()] += 1 + callback() + + def _test_context(self, propagate_by): + # pylint:disable=too-many-branches + ID_VAR.set(0) + + callback = getcurrent().switch + counts = dict((i, 0) for i in range(5)) + + lets = [ + greenlet(partial( + partial( + copy_context().run, + self._increment + ) if propagate_by == "run" else self._increment, + greenlet_id=i, + callback=callback, + counts=counts, + expect=( + i - 1 if propagate_by == "share" else + 0 if propagate_by in ("set", "run") else None + ) + )) + for i in range(1, 5) + ] + + for let in lets: + if propagate_by == "set": + let.gr_context = copy_context() + elif propagate_by == "share": + let.gr_context = getcurrent().gr_context + + for i in range(2): + counts[ID_VAR.get()] += 1 + for let in lets: + let.switch() + + if propagate_by == "run": + # Must leave each context.run() in reverse order of entry + for let in reversed(lets): + let.switch() + else: + # No context.run(), so fine to exit in any order. + for let in lets: + let.switch() + + for let in lets: + self.assertTrue(let.dead) + # When using run(), we leave the run() as the greenlet dies, + # and there's no context "underneath". When not using run(), + # gr_context still reflects the context the greenlet was + # running in. + if propagate_by == 'run': + self.assertIsNone(let.gr_context) + else: + self.assertIsNotNone(let.gr_context) + + + if propagate_by == "share": + self.assertEqual(counts, {0: 1, 1: 1, 2: 1, 3: 1, 4: 6}) + else: + self.assertEqual(set(counts.values()), set([2])) + + def test_context_propagated_by_context_run(self): + self._new_ctx_run(self._test_context, "run") + + def test_context_propagated_by_setting_attribute(self): + self._new_ctx_run(self._test_context, "set") + + def test_context_not_propagated(self): + self._new_ctx_run(self._test_context, None) + + def test_context_shared(self): + self._new_ctx_run(self._test_context, "share") + + def test_break_ctxvars(self): + let1 = greenlet(copy_context().run) + let2 = greenlet(copy_context().run) + let1.switch(getcurrent().switch) + let2.switch(getcurrent().switch) + # Since let2 entered the current context and let1 exits its own, the + # interpreter emits: + # RuntimeError: cannot exit context: thread state references a different context object + let1.switch() + + def test_not_broken_if_using_attribute_instead_of_context_run(self): + let1 = greenlet(getcurrent().switch) + let2 = greenlet(getcurrent().switch) + let1.gr_context = copy_context() + let2.gr_context = copy_context() + let1.switch() + let2.switch() + let1.switch() + let2.switch() + + def test_context_assignment_while_running(self): + # pylint:disable=too-many-statements + ID_VAR.set(None) + + def target(): + self.assertIsNone(ID_VAR.get()) + self.assertIsNone(gr.gr_context) + + # Context is created on first use + ID_VAR.set(1) + self.assertIsInstance(gr.gr_context, Context) + self.assertEqual(ID_VAR.get(), 1) + self.assertEqual(gr.gr_context[ID_VAR], 1) + + # Clearing the context makes it get re-created as another + # empty context when next used + old_context = gr.gr_context + gr.gr_context = None # assign None while running + self.assertIsNone(ID_VAR.get()) + self.assertIsNone(gr.gr_context) + ID_VAR.set(2) + self.assertIsInstance(gr.gr_context, Context) + self.assertEqual(ID_VAR.get(), 2) + self.assertEqual(gr.gr_context[ID_VAR], 2) + + new_context = gr.gr_context + getcurrent().parent.switch((old_context, new_context)) + # parent switches us back to old_context + + self.assertEqual(ID_VAR.get(), 1) + gr.gr_context = new_context # assign non-None while running + self.assertEqual(ID_VAR.get(), 2) + + getcurrent().parent.switch() + # parent switches us back to no context + self.assertIsNone(ID_VAR.get()) + self.assertIsNone(gr.gr_context) + gr.gr_context = old_context + self.assertEqual(ID_VAR.get(), 1) + + getcurrent().parent.switch() + # parent switches us back to no context + self.assertIsNone(ID_VAR.get()) + self.assertIsNone(gr.gr_context) + + gr = greenlet(target) + + with self.assertRaisesRegex(AttributeError, "can't delete context attribute"): + del gr.gr_context + + self.assertIsNone(gr.gr_context) + old_context, new_context = gr.switch() + self.assertIs(new_context, gr.gr_context) + self.assertEqual(old_context[ID_VAR], 1) + self.assertEqual(new_context[ID_VAR], 2) + self.assertEqual(new_context.run(ID_VAR.get), 2) + gr.gr_context = old_context # assign non-None while suspended + gr.switch() + self.assertIs(gr.gr_context, new_context) + gr.gr_context = None # assign None while suspended + gr.switch() + self.assertIs(gr.gr_context, old_context) + gr.gr_context = None + gr.switch() + self.assertIsNone(gr.gr_context) + + # Make sure there are no reference leaks + gr = None + gc.collect() + # Python 3.14 elides reference counting operations + # in some cases. See https://github.com/python/cpython/pull/130708 + self.assertEqual(sys.getrefcount(old_context), 2 if not PY314 else 1) + self.assertEqual(sys.getrefcount(new_context), 2 if not PY314 else 1) + + def test_context_assignment_different_thread(self): + import threading + VAR_VAR.set(None) + ctx = Context() + + is_running = threading.Event() + should_suspend = threading.Event() + did_suspend = threading.Event() + should_exit = threading.Event() + holder = [] + + def greenlet_in_thread_fn(): + VAR_VAR.set(1) + is_running.set() + should_suspend.wait(10) + VAR_VAR.set(2) + getcurrent().parent.switch() + holder.append(VAR_VAR.get()) + + def thread_fn(): + gr = greenlet(greenlet_in_thread_fn) + gr.gr_context = ctx + holder.append(gr) + gr.switch() + did_suspend.set() + should_exit.wait(10) + gr.switch() + del gr + greenlet() # trigger cleanup + + thread = threading.Thread(target=thread_fn, daemon=True) + thread.start() + is_running.wait(10) + gr = holder[0] + + # Can't access or modify context if the greenlet is running + # in a different thread + with self.assertRaisesRegex(ValueError, "running in a different"): + getattr(gr, 'gr_context') + with self.assertRaisesRegex(ValueError, "running in a different"): + gr.gr_context = None + + should_suspend.set() + did_suspend.wait(10) + + # OK to access and modify context if greenlet is suspended + self.assertIs(gr.gr_context, ctx) + self.assertEqual(gr.gr_context[VAR_VAR], 2) + gr.gr_context = None + + should_exit.set() + thread.join(10) + + self.assertEqual(holder, [gr, None]) + + # Context can still be accessed/modified when greenlet is dead: + self.assertIsNone(gr.gr_context) + gr.gr_context = ctx + self.assertIs(gr.gr_context, ctx) + + # Otherwise we leak greenlets on some platforms. + # XXX: Should be able to do this automatically + del holder[:] + gr = None + thread = None + + def test_context_assignment_wrong_type(self): + g = greenlet() + with self.assertRaisesRegex(TypeError, + "greenlet context must be a contextvars.Context or None"): + g.gr_context = self + + +@skipIf(Context is not None, "ContextVar supported") +class NoContextVarsTests(TestCase): + def test_contextvars_errors(self): + let1 = greenlet(getcurrent().switch) + self.assertFalse(hasattr(let1, 'gr_context')) + with self.assertRaises(AttributeError): + getattr(let1, 'gr_context') + + with self.assertRaises(AttributeError): + let1.gr_context = None + + let1.switch() + + with self.assertRaises(AttributeError): + getattr(let1, 'gr_context') + + with self.assertRaises(AttributeError): + let1.gr_context = None + + del let1 + + +if __name__ == '__main__': + unittest.main() diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/test_cpp.py b/venv/lib/python3.11/site-packages/greenlet/tests/test_cpp.py new file mode 100644 index 0000000..2d0cc9c --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/tests/test_cpp.py @@ -0,0 +1,73 @@ +from __future__ import print_function +from __future__ import absolute_import + +import subprocess +import unittest + +import greenlet +from . import _test_extension_cpp +from . import TestCase +from . import WIN + +class CPPTests(TestCase): + def test_exception_switch(self): + greenlets = [] + for i in range(4): + g = greenlet.greenlet(_test_extension_cpp.test_exception_switch) + g.switch(i) + greenlets.append(g) + for i, g in enumerate(greenlets): + self.assertEqual(g.switch(), i) + + def _do_test_unhandled_exception(self, target): + import os + import sys + script = os.path.join( + os.path.dirname(__file__), + 'fail_cpp_exception.py', + ) + args = [sys.executable, script, target.__name__ if not isinstance(target, str) else target] + __traceback_info__ = args + with self.assertRaises(subprocess.CalledProcessError) as exc: + subprocess.check_output( + args, + encoding='utf-8', + stderr=subprocess.STDOUT + ) + + ex = exc.exception + expected_exit = self.get_expected_returncodes_for_aborted_process() + self.assertIn(ex.returncode, expected_exit) + self.assertIn('fail_cpp_exception is running', ex.output) + return ex.output + + + def test_unhandled_nonstd_exception_aborts(self): + # verify that plain unhandled throw aborts + self._do_test_unhandled_exception(_test_extension_cpp.test_exception_throw_nonstd) + + def test_unhandled_std_exception_aborts(self): + # verify that plain unhandled throw aborts + self._do_test_unhandled_exception(_test_extension_cpp.test_exception_throw_std) + + @unittest.skipIf(WIN, "XXX: This does not crash on Windows") + # Meaning the exception is getting lost somewhere... + def test_unhandled_std_exception_as_greenlet_function_aborts(self): + # verify that plain unhandled throw aborts + output = self._do_test_unhandled_exception('run_as_greenlet_target') + self.assertIn( + # We really expect this to be prefixed with "greenlet: Unhandled C++ exception:" + # as added by our handler for std::exception (see TUserGreenlet.cpp), but + # that's not correct everywhere --- our handler never runs before std::terminate + # gets called (for example, on arm32). + 'Thrown from an extension.', + output + ) + + def test_unhandled_exception_in_greenlet_aborts(self): + # verify that unhandled throw called in greenlet aborts too + self._do_test_unhandled_exception('run_unhandled_exception_in_greenlet_aborts') + + +if __name__ == '__main__': + unittest.main() diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/test_extension_interface.py b/venv/lib/python3.11/site-packages/greenlet/tests/test_extension_interface.py new file mode 100644 index 0000000..34b6656 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/tests/test_extension_interface.py @@ -0,0 +1,115 @@ +from __future__ import print_function +from __future__ import absolute_import + +import sys + +import greenlet +from . import _test_extension +from . import TestCase + +# pylint:disable=c-extension-no-member + +class CAPITests(TestCase): + def test_switch(self): + self.assertEqual( + 50, _test_extension.test_switch(greenlet.greenlet(lambda: 50))) + + def test_switch_kwargs(self): + def adder(x, y): + return x * y + g = greenlet.greenlet(adder) + self.assertEqual(6, _test_extension.test_switch_kwargs(g, x=3, y=2)) + + def test_setparent(self): + # pylint:disable=disallowed-name + def foo(): + def bar(): + greenlet.getcurrent().parent.switch() + + # This final switch should go back to the main greenlet, since + # the test_setparent() function in the C extension should have + # reparented this greenlet. + greenlet.getcurrent().parent.switch() + raise AssertionError("Should never have reached this code") + child = greenlet.greenlet(bar) + child.switch() + greenlet.getcurrent().parent.switch(child) + greenlet.getcurrent().parent.throw( + AssertionError("Should never reach this code")) + foo_child = greenlet.greenlet(foo).switch() + self.assertEqual(None, _test_extension.test_setparent(foo_child)) + + def test_getcurrent(self): + _test_extension.test_getcurrent() + + def test_new_greenlet(self): + self.assertEqual(-15, _test_extension.test_new_greenlet(lambda: -15)) + + def test_raise_greenlet_dead(self): + self.assertRaises( + greenlet.GreenletExit, _test_extension.test_raise_dead_greenlet) + + def test_raise_greenlet_error(self): + self.assertRaises( + greenlet.error, _test_extension.test_raise_greenlet_error) + + def test_throw(self): + seen = [] + + def foo(): # pylint:disable=disallowed-name + try: + greenlet.getcurrent().parent.switch() + except ValueError: + seen.append(sys.exc_info()[1]) + except greenlet.GreenletExit: + raise AssertionError + g = greenlet.greenlet(foo) + g.switch() + _test_extension.test_throw(g) + self.assertEqual(len(seen), 1) + self.assertTrue( + isinstance(seen[0], ValueError), + "ValueError was not raised in foo()") + self.assertEqual( + str(seen[0]), + 'take that sucka!', + "message doesn't match") + + def test_non_traceback_param(self): + with self.assertRaises(TypeError) as exc: + _test_extension.test_throw_exact( + greenlet.getcurrent(), + Exception, + Exception(), + self + ) + self.assertEqual(str(exc.exception), + "throw() third argument must be a traceback object") + + def test_instance_of_wrong_type(self): + with self.assertRaises(TypeError) as exc: + _test_extension.test_throw_exact( + greenlet.getcurrent(), + Exception(), + BaseException(), + None, + ) + + self.assertEqual(str(exc.exception), + "instance exception may not have a separate value") + + def test_not_throwable(self): + with self.assertRaises(TypeError) as exc: + _test_extension.test_throw_exact( + greenlet.getcurrent(), + "abc", + None, + None, + ) + self.assertEqual(str(exc.exception), + "exceptions must be classes, or instances, not str") + + +if __name__ == '__main__': + import unittest + unittest.main() diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/test_gc.py b/venv/lib/python3.11/site-packages/greenlet/tests/test_gc.py new file mode 100644 index 0000000..994addb --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/tests/test_gc.py @@ -0,0 +1,86 @@ +import gc + +import weakref + +import greenlet + + +from . import TestCase +from .leakcheck import fails_leakcheck +# These only work with greenlet gc support +# which is no longer optional. +assert greenlet.GREENLET_USE_GC + +class GCTests(TestCase): + def test_dead_circular_ref(self): + o = weakref.ref(greenlet.greenlet(greenlet.getcurrent).switch()) + gc.collect() + if o() is not None: + import sys + print("O IS NOT NONE.", sys.getrefcount(o())) + self.assertIsNone(o()) + self.assertFalse(gc.garbage, gc.garbage) + + def test_circular_greenlet(self): + class circular_greenlet(greenlet.greenlet): + self = None + o = circular_greenlet() + o.self = o + o = weakref.ref(o) + gc.collect() + self.assertIsNone(o()) + self.assertFalse(gc.garbage, gc.garbage) + + def test_inactive_ref(self): + class inactive_greenlet(greenlet.greenlet): + def __init__(self): + greenlet.greenlet.__init__(self, run=self.run) + + def run(self): + pass + o = inactive_greenlet() + o = weakref.ref(o) + gc.collect() + self.assertIsNone(o()) + self.assertFalse(gc.garbage, gc.garbage) + + @fails_leakcheck + def test_finalizer_crash(self): + # This test is designed to crash when active greenlets + # are made garbage collectable, until the underlying + # problem is resolved. How does it work: + # - order of object creation is important + # - array is created first, so it is moved to unreachable first + # - we create a cycle between a greenlet and this array + # - we create an object that participates in gc, is only + # referenced by a greenlet, and would corrupt gc lists + # on destruction, the easiest is to use an object with + # a finalizer + # - because array is the first object in unreachable it is + # cleared first, which causes all references to greenlet + # to disappear and causes greenlet to be destroyed, but since + # it is still live it causes a switch during gc, which causes + # an object with finalizer to be destroyed, which causes stack + # corruption and then a crash + + class object_with_finalizer(object): + def __del__(self): + pass + array = [] + parent = greenlet.getcurrent() + def greenlet_body(): + greenlet.getcurrent().object = object_with_finalizer() + try: + parent.switch() + except greenlet.GreenletExit: + print("Got greenlet exit!") + finally: + del greenlet.getcurrent().object + g = greenlet.greenlet(greenlet_body) + g.array = array + array.append(g) + g.switch() + del array + del g + greenlet.getcurrent() + gc.collect() diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/test_generator.py b/venv/lib/python3.11/site-packages/greenlet/tests/test_generator.py new file mode 100644 index 0000000..ca4a644 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/tests/test_generator.py @@ -0,0 +1,59 @@ + +from greenlet import greenlet + +from . import TestCase + +class genlet(greenlet): + parent = None + def __init__(self, *args, **kwds): + self.args = args + self.kwds = kwds + + def run(self): + fn, = self.fn + fn(*self.args, **self.kwds) + + def __iter__(self): + return self + + def __next__(self): + self.parent = greenlet.getcurrent() + result = self.switch() + if self: + return result + + raise StopIteration + + next = __next__ + + +def Yield(value): + g = greenlet.getcurrent() + while not isinstance(g, genlet): + if g is None: + raise RuntimeError('yield outside a genlet') + g = g.parent + g.parent.switch(value) + + +def generator(func): + class Generator(genlet): + fn = (func,) + return Generator + +# ____________________________________________________________ + + +class GeneratorTests(TestCase): + def test_generator(self): + seen = [] + + def g(n): + for i in range(n): + seen.append(i) + Yield(i) + g = generator(g) + for _ in range(3): + for j in g(5): + seen.append(j) + self.assertEqual(seen, 3 * [0, 0, 1, 1, 2, 2, 3, 3, 4, 4]) diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/test_generator_nested.py b/venv/lib/python3.11/site-packages/greenlet/tests/test_generator_nested.py new file mode 100644 index 0000000..8d752a6 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/tests/test_generator_nested.py @@ -0,0 +1,168 @@ + +from greenlet import greenlet +from . import TestCase +from .leakcheck import fails_leakcheck + +class genlet(greenlet): + parent = None + def __init__(self, *args, **kwds): + self.args = args + self.kwds = kwds + self.child = None + + def run(self): + # Note the function is packed in a tuple + # to avoid creating a bound method for it. + fn, = self.fn + fn(*self.args, **self.kwds) + + def __iter__(self): + return self + + def set_child(self, child): + self.child = child + + def __next__(self): + if self.child: + child = self.child + while child.child: + tmp = child + child = child.child + tmp.child = None + + result = child.switch() + else: + self.parent = greenlet.getcurrent() + result = self.switch() + + if self: + return result + + raise StopIteration + + next = __next__ + +def Yield(value, level=1): + g = greenlet.getcurrent() + + while level != 0: + if not isinstance(g, genlet): + raise RuntimeError('yield outside a genlet') + if level > 1: + g.parent.set_child(g) + g = g.parent + level -= 1 + + g.switch(value) + + +def Genlet(func): + class TheGenlet(genlet): + fn = (func,) + return TheGenlet + +# ____________________________________________________________ + + +def g1(n, seen): + for i in range(n): + seen.append(i + 1) + yield i + + +def g2(n, seen): + for i in range(n): + seen.append(i + 1) + Yield(i) + +g2 = Genlet(g2) + + +def nested(i): + Yield(i) + + +def g3(n, seen): + for i in range(n): + seen.append(i + 1) + nested(i) +g3 = Genlet(g3) + + +def a(n): + if n == 0: + return + for ii in ax(n - 1): + Yield(ii) + Yield(n) +ax = Genlet(a) + + +def perms(l): + if len(l) > 1: + for e in l: + # No syntactical sugar for generator expressions + x = [Yield([e] + p) for p in perms([x for x in l if x != e])] + assert x + else: + Yield(l) +perms = Genlet(perms) + + +def gr1(n): + for ii in range(1, n): + Yield(ii) + Yield(ii * ii, 2) + +gr1 = Genlet(gr1) + + +def gr2(n, seen): + for ii in gr1(n): + seen.append(ii) + +gr2 = Genlet(gr2) + + +class NestedGeneratorTests(TestCase): + def test_layered_genlets(self): + seen = [] + for ii in gr2(5, seen): + seen.append(ii) + self.assertEqual(seen, [1, 1, 2, 4, 3, 9, 4, 16]) + + @fails_leakcheck + def test_permutations(self): + gen_perms = perms(list(range(4))) + permutations = list(gen_perms) + self.assertEqual(len(permutations), 4 * 3 * 2 * 1) + self.assertIn([0, 1, 2, 3], permutations) + self.assertIn([3, 2, 1, 0], permutations) + res = [] + for ii in zip(perms(list(range(4))), perms(list(range(3)))): + res.append(ii) + self.assertEqual( + res, + [([0, 1, 2, 3], [0, 1, 2]), ([0, 1, 3, 2], [0, 2, 1]), + ([0, 2, 1, 3], [1, 0, 2]), ([0, 2, 3, 1], [1, 2, 0]), + ([0, 3, 1, 2], [2, 0, 1]), ([0, 3, 2, 1], [2, 1, 0])]) + # XXX Test to make sure we are working as a generator expression + + def test_genlet_simple(self): + for g in g1, g2, g3: + seen = [] + for _ in range(3): + for j in g(5, seen): + seen.append(j) + self.assertEqual(seen, 3 * [1, 0, 2, 1, 3, 2, 4, 3, 5, 4]) + + def test_genlet_bad(self): + try: + Yield(10) + except RuntimeError: + pass + + def test_nested_genlets(self): + seen = [] + for ii in ax(5): + seen.append(ii) diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/test_greenlet.py b/venv/lib/python3.11/site-packages/greenlet/tests/test_greenlet.py new file mode 100644 index 0000000..fd05c0d --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/tests/test_greenlet.py @@ -0,0 +1,1327 @@ +import gc +import sys +import time +import threading +import unittest + +from abc import ABCMeta +from abc import abstractmethod + +import greenlet +from greenlet import greenlet as RawGreenlet +from . import TestCase +from . import RUNNING_ON_MANYLINUX +from . import PY313 +from . import PY314 +from .leakcheck import fails_leakcheck + + +# We manually manage locks in many tests +# pylint:disable=consider-using-with +# pylint:disable=too-many-public-methods +# This module is quite large. +# TODO: Refactor into separate test files. For example, +# put all the regression tests that used to produce +# crashes in test_greenlet_no_crash; put tests that DO deliberately crash +# the interpreter into test_greenlet_crash. +# pylint:disable=too-many-lines + +class SomeError(Exception): + pass + + +def fmain(seen): + try: + greenlet.getcurrent().parent.switch() + except: + seen.append(sys.exc_info()[0]) + raise + raise SomeError + + +def send_exception(g, exc): + # note: send_exception(g, exc) can be now done with g.throw(exc). + # the purpose of this test is to explicitly check the propagation rules. + def crasher(exc): + raise exc + g1 = RawGreenlet(crasher, parent=g) + g1.switch(exc) + + +class TestGreenlet(TestCase): + + def _do_simple_test(self): + lst = [] + + def f(): + lst.append(1) + greenlet.getcurrent().parent.switch() + lst.append(3) + g = RawGreenlet(f) + lst.append(0) + g.switch() + lst.append(2) + g.switch() + lst.append(4) + self.assertEqual(lst, list(range(5))) + + def test_simple(self): + self._do_simple_test() + + def test_switch_no_run_raises_AttributeError(self): + g = RawGreenlet() + with self.assertRaises(AttributeError) as exc: + g.switch() + + self.assertIn("run", str(exc.exception)) + + def test_throw_no_run_raises_AttributeError(self): + g = RawGreenlet() + with self.assertRaises(AttributeError) as exc: + g.throw(SomeError) + + self.assertIn("run", str(exc.exception)) + + def test_parent_equals_None(self): + g = RawGreenlet(parent=None) + self.assertIsNotNone(g) + self.assertIs(g.parent, greenlet.getcurrent()) + + def test_run_equals_None(self): + g = RawGreenlet(run=None) + self.assertIsNotNone(g) + self.assertIsNone(g.run) + + def test_two_children(self): + lst = [] + + def f(): + lst.append(1) + greenlet.getcurrent().parent.switch() + lst.extend([1, 1]) + g = RawGreenlet(f) + h = RawGreenlet(f) + g.switch() + self.assertEqual(len(lst), 1) + h.switch() + self.assertEqual(len(lst), 2) + h.switch() + self.assertEqual(len(lst), 4) + self.assertEqual(h.dead, True) + g.switch() + self.assertEqual(len(lst), 6) + self.assertEqual(g.dead, True) + + def test_two_recursive_children(self): + lst = [] + + def f(): + lst.append('b') + greenlet.getcurrent().parent.switch() + + def g(): + lst.append('a') + g = RawGreenlet(f) + g.switch() + lst.append('c') + self.assertEqual(sys.getrefcount(g), 2 if not PY314 else 1) + g = RawGreenlet(g) + # Python 3.14 elides reference counting operations + # in some cases. See https://github.com/python/cpython/pull/130708 + self.assertEqual(sys.getrefcount(g), 2 if not PY314 else 1) + g.switch() + self.assertEqual(lst, ['a', 'b', 'c']) + # Just the one in this frame, plus the one on the stack we pass to the function + self.assertEqual(sys.getrefcount(g), 2 if not PY314 else 1) + + def test_threads(self): + success = [] + + def f(): + self._do_simple_test() + success.append(True) + ths = [threading.Thread(target=f) for i in range(10)] + for th in ths: + th.start() + for th in ths: + th.join(10) + self.assertEqual(len(success), len(ths)) + + def test_exception(self): + seen = [] + g1 = RawGreenlet(fmain) + g2 = RawGreenlet(fmain) + g1.switch(seen) + g2.switch(seen) + g2.parent = g1 + + self.assertEqual(seen, []) + #with self.assertRaises(SomeError): + # p("***Switching back") + # g2.switch() + # Creating this as a bound method can reveal bugs that + # are hidden on newer versions of Python that avoid creating + # bound methods for direct expressions; IOW, don't use the `with` + # form! + self.assertRaises(SomeError, g2.switch) + self.assertEqual(seen, [SomeError]) + + value = g2.switch() + self.assertEqual(value, ()) + self.assertEqual(seen, [SomeError]) + + value = g2.switch(25) + self.assertEqual(value, 25) + self.assertEqual(seen, [SomeError]) + + + def test_send_exception(self): + seen = [] + g1 = RawGreenlet(fmain) + g1.switch(seen) + self.assertRaises(KeyError, send_exception, g1, KeyError) + self.assertEqual(seen, [KeyError]) + + def test_dealloc(self): + seen = [] + g1 = RawGreenlet(fmain) + g2 = RawGreenlet(fmain) + g1.switch(seen) + g2.switch(seen) + self.assertEqual(seen, []) + del g1 + gc.collect() + self.assertEqual(seen, [greenlet.GreenletExit]) + del g2 + gc.collect() + self.assertEqual(seen, [greenlet.GreenletExit, greenlet.GreenletExit]) + + def test_dealloc_catches_GreenletExit_throws_other(self): + def run(): + try: + greenlet.getcurrent().parent.switch() + except greenlet.GreenletExit: + raise SomeError from None + + g = RawGreenlet(run) + g.switch() + # Destroying the only reference to the greenlet causes it + # to get GreenletExit; when it in turn raises, even though we're the parent + # we don't get the exception, it just gets printed. + # When we run on 3.8 only, we can use sys.unraisablehook + oldstderr = sys.stderr + from io import StringIO + stderr = sys.stderr = StringIO() + try: + del g + finally: + sys.stderr = oldstderr + + v = stderr.getvalue() + self.assertIn("Exception", v) + self.assertIn('ignored', v) + self.assertIn("SomeError", v) + + + @unittest.skipIf( + PY313 and RUNNING_ON_MANYLINUX, + "Sometimes flaky (getting one GreenletExit in the second list)" + # Probably due to funky timing interactions? + # TODO: FIXME Make that work. + ) + + def test_dealloc_other_thread(self): + seen = [] + someref = [] + + bg_glet_created_running_and_no_longer_ref_in_bg = threading.Event() + fg_ref_released = threading.Event() + bg_should_be_clear = threading.Event() + ok_to_exit_bg_thread = threading.Event() + + def f(): + g1 = RawGreenlet(fmain) + g1.switch(seen) + someref.append(g1) + del g1 + gc.collect() + + bg_glet_created_running_and_no_longer_ref_in_bg.set() + fg_ref_released.wait(3) + + RawGreenlet() # trigger release + bg_should_be_clear.set() + ok_to_exit_bg_thread.wait(3) + RawGreenlet() # One more time + + t = threading.Thread(target=f) + t.start() + bg_glet_created_running_and_no_longer_ref_in_bg.wait(10) + + self.assertEqual(seen, []) + self.assertEqual(len(someref), 1) + del someref[:] + gc.collect() + # g1 is not released immediately because it's from another thread + self.assertEqual(seen, []) + fg_ref_released.set() + bg_should_be_clear.wait(3) + try: + self.assertEqual(seen, [greenlet.GreenletExit]) + finally: + ok_to_exit_bg_thread.set() + t.join(10) + del seen[:] + del someref[:] + + def test_frame(self): + def f1(): + f = sys._getframe(0) # pylint:disable=protected-access + self.assertEqual(f.f_back, None) + greenlet.getcurrent().parent.switch(f) + return "meaning of life" + g = RawGreenlet(f1) + frame = g.switch() + self.assertTrue(frame is g.gr_frame) + self.assertTrue(g) + + from_g = g.switch() + self.assertFalse(g) + self.assertEqual(from_g, 'meaning of life') + self.assertEqual(g.gr_frame, None) + + def test_thread_bug(self): + def runner(x): + g = RawGreenlet(lambda: time.sleep(x)) + g.switch() + t1 = threading.Thread(target=runner, args=(0.2,)) + t2 = threading.Thread(target=runner, args=(0.3,)) + t1.start() + t2.start() + t1.join(10) + t2.join(10) + + def test_switch_kwargs(self): + def run(a, b): + self.assertEqual(a, 4) + self.assertEqual(b, 2) + return 42 + x = RawGreenlet(run).switch(a=4, b=2) + self.assertEqual(x, 42) + + def test_switch_kwargs_to_parent(self): + def run(x): + greenlet.getcurrent().parent.switch(x=x) + greenlet.getcurrent().parent.switch(2, x=3) + return x, x ** 2 + g = RawGreenlet(run) + self.assertEqual({'x': 3}, g.switch(3)) + self.assertEqual(((2,), {'x': 3}), g.switch()) + self.assertEqual((3, 9), g.switch()) + + def test_switch_to_another_thread(self): + data = {} + created_event = threading.Event() + done_event = threading.Event() + + def run(): + data['g'] = RawGreenlet(lambda: None) + created_event.set() + done_event.wait(10) + thread = threading.Thread(target=run) + thread.start() + created_event.wait(10) + with self.assertRaises(greenlet.error): + data['g'].switch() + done_event.set() + thread.join(10) + # XXX: Should handle this automatically + data.clear() + + def test_exc_state(self): + def f(): + try: + raise ValueError('fun') + except: # pylint:disable=bare-except + exc_info = sys.exc_info() + RawGreenlet(h).switch() + self.assertEqual(exc_info, sys.exc_info()) + + def h(): + self.assertEqual(sys.exc_info(), (None, None, None)) + + RawGreenlet(f).switch() + + def test_instance_dict(self): + def f(): + greenlet.getcurrent().test = 42 + def deldict(g): + del g.__dict__ + def setdict(g, value): + g.__dict__ = value + g = RawGreenlet(f) + self.assertEqual(g.__dict__, {}) + g.switch() + self.assertEqual(g.test, 42) + self.assertEqual(g.__dict__, {'test': 42}) + g.__dict__ = g.__dict__ + self.assertEqual(g.__dict__, {'test': 42}) + self.assertRaises(TypeError, deldict, g) + self.assertRaises(TypeError, setdict, g, 42) + + def test_running_greenlet_has_no_run(self): + has_run = [] + def func(): + has_run.append( + hasattr(greenlet.getcurrent(), 'run') + ) + + g = RawGreenlet(func) + g.switch() + self.assertEqual(has_run, [False]) + + def test_deepcopy(self): + import copy + self.assertRaises(TypeError, copy.copy, RawGreenlet()) + self.assertRaises(TypeError, copy.deepcopy, RawGreenlet()) + + def test_parent_restored_on_kill(self): + hub = RawGreenlet(lambda: None) + main = greenlet.getcurrent() + result = [] + def worker(): + try: + # Wait to be killed by going back to the test. + main.switch() + except greenlet.GreenletExit: + # Resurrect and switch to parent + result.append(greenlet.getcurrent().parent) + result.append(greenlet.getcurrent()) + hub.switch() + g = RawGreenlet(worker, parent=hub) + g.switch() + # delete the only reference, thereby raising GreenletExit + del g + self.assertTrue(result) + self.assertIs(result[0], main) + self.assertIs(result[1].parent, hub) + # Delete them, thereby breaking the cycle between the greenlet + # and the frame, which otherwise would never be collectable + # XXX: We should be able to automatically fix this. + del result[:] + hub = None + main = None + + def test_parent_return_failure(self): + # No run causes AttributeError on switch + g1 = RawGreenlet() + # Greenlet that implicitly switches to parent + g2 = RawGreenlet(lambda: None, parent=g1) + # AttributeError should propagate to us, no fatal errors + with self.assertRaises(AttributeError): + g2.switch() + + def test_throw_exception_not_lost(self): + class mygreenlet(RawGreenlet): + def __getattribute__(self, name): + try: + raise Exception # pylint:disable=broad-exception-raised + except: # pylint:disable=bare-except + pass + return RawGreenlet.__getattribute__(self, name) + g = mygreenlet(lambda: None) + self.assertRaises(SomeError, g.throw, SomeError()) + + @fails_leakcheck + def _do_test_throw_to_dead_thread_doesnt_crash(self, wait_for_cleanup=False): + result = [] + def worker(): + greenlet.getcurrent().parent.switch() + + def creator(): + g = RawGreenlet(worker) + g.switch() + result.append(g) + if wait_for_cleanup: + # Let this greenlet eventually be cleaned up. + g.switch() + greenlet.getcurrent() + t = threading.Thread(target=creator) + t.start() + t.join(10) + del t + # But, depending on the operating system, the thread + # deallocator may not actually have run yet! So we can't be + # sure about the error message unless we wait. + if wait_for_cleanup: + self.wait_for_pending_cleanups() + with self.assertRaises(greenlet.error) as exc: + result[0].throw(SomeError) + + if not wait_for_cleanup: + s = str(exc.exception) + self.assertTrue( + s == "cannot switch to a different thread (which happens to have exited)" + or 'Cannot switch' in s + ) + else: + self.assertEqual( + str(exc.exception), + "cannot switch to a different thread (which happens to have exited)", + ) + + if hasattr(result[0].gr_frame, 'clear'): + # The frame is actually executing (it thinks), we can't clear it. + with self.assertRaises(RuntimeError): + result[0].gr_frame.clear() + # Unfortunately, this doesn't actually clear the references, they're in the + # fast local array. + if not wait_for_cleanup: + # f_locals has no clear method in Python 3.13 + if hasattr(result[0].gr_frame.f_locals, 'clear'): + result[0].gr_frame.f_locals.clear() + else: + self.assertIsNone(result[0].gr_frame) + + del creator + worker = None + del result[:] + # XXX: we ought to be able to automatically fix this. + # See issue 252 + self.expect_greenlet_leak = True # direct us not to wait for it to go away + + @fails_leakcheck + def test_throw_to_dead_thread_doesnt_crash(self): + self._do_test_throw_to_dead_thread_doesnt_crash() + + def test_throw_to_dead_thread_doesnt_crash_wait(self): + self._do_test_throw_to_dead_thread_doesnt_crash(True) + + @fails_leakcheck + def test_recursive_startup(self): + class convoluted(RawGreenlet): + def __init__(self): + RawGreenlet.__init__(self) + self.count = 0 + def __getattribute__(self, name): + if name == 'run' and self.count == 0: + self.count = 1 + self.switch(43) + return RawGreenlet.__getattribute__(self, name) + def run(self, value): + while True: + self.parent.switch(value) + g = convoluted() + self.assertEqual(g.switch(42), 43) + # Exits the running greenlet, otherwise it leaks + # XXX: We should be able to automatically fix this + #g.throw(greenlet.GreenletExit) + #del g + self.expect_greenlet_leak = True + + def test_threaded_updatecurrent(self): + # released when main thread should execute + lock1 = threading.Lock() + lock1.acquire() + # released when another thread should execute + lock2 = threading.Lock() + lock2.acquire() + class finalized(object): + def __del__(self): + # happens while in green_updatecurrent() in main greenlet + # should be very careful not to accidentally call it again + # at the same time we must make sure another thread executes + lock2.release() + lock1.acquire() + # now ts_current belongs to another thread + def deallocator(): + greenlet.getcurrent().parent.switch() + def fthread(): + lock2.acquire() + greenlet.getcurrent() + del g[0] + lock1.release() + lock2.acquire() + greenlet.getcurrent() + lock1.release() + main = greenlet.getcurrent() + g = [RawGreenlet(deallocator)] + g[0].bomb = finalized() + g[0].switch() + t = threading.Thread(target=fthread) + t.start() + # let another thread grab ts_current and deallocate g[0] + lock2.release() + lock1.acquire() + # this is the corner stone + # getcurrent() will notice that ts_current belongs to another thread + # and start the update process, which would notice that g[0] should + # be deallocated, and that will execute an object's finalizer. Now, + # that object will let another thread run so it can grab ts_current + # again, which would likely crash the interpreter if there's no + # check for this case at the end of green_updatecurrent(). This test + # passes if getcurrent() returns correct result, but it's likely + # to randomly crash if it's not anyway. + self.assertEqual(greenlet.getcurrent(), main) + # wait for another thread to complete, just in case + t.join(10) + + def test_dealloc_switch_args_not_lost(self): + seen = [] + def worker(): + # wait for the value + value = greenlet.getcurrent().parent.switch() + # delete all references to ourself + del worker[0] + initiator.parent = greenlet.getcurrent().parent + # switch to main with the value, but because + # ts_current is the last reference to us we + # return here immediately, where we resurrect ourself. + try: + greenlet.getcurrent().parent.switch(value) + finally: + seen.append(greenlet.getcurrent()) + def initiator(): + return 42 # implicitly falls thru to parent + + worker = [RawGreenlet(worker)] + + worker[0].switch() # prime worker + initiator = RawGreenlet(initiator, worker[0]) + value = initiator.switch() + self.assertTrue(seen) + self.assertEqual(value, 42) + + def test_tuple_subclass(self): + # The point of this test is to see what happens when a custom + # tuple subclass is used as an object passed directly to the C + # function ``green_switch``; part of ``green_switch`` checks + # the ``len()`` of the ``args`` tuple, and that can call back + # into Python. Here, when it calls back into Python, we + # recursively enter ``green_switch`` again. + + # This test is really only relevant on Python 2. The builtin + # `apply` function directly passes the given args tuple object + # to the underlying function, whereas the Python 3 version + # unpacks and repacks into an actual tuple. This could still + # happen using the C API on Python 3 though. We should write a + # builtin version of apply() ourself. + def _apply(func, a, k): + func(*a, **k) + + class mytuple(tuple): + def __len__(self): + greenlet.getcurrent().switch() + return tuple.__len__(self) + args = mytuple() + kwargs = dict(a=42) + def switchapply(): + _apply(greenlet.getcurrent().parent.switch, args, kwargs) + g = RawGreenlet(switchapply) + self.assertEqual(g.switch(), kwargs) + + def test_abstract_subclasses(self): + AbstractSubclass = ABCMeta( + 'AbstractSubclass', + (RawGreenlet,), + {'run': abstractmethod(lambda self: None)}) + + class BadSubclass(AbstractSubclass): + pass + + class GoodSubclass(AbstractSubclass): + def run(self): + pass + + GoodSubclass() # should not raise + self.assertRaises(TypeError, BadSubclass) + + def test_implicit_parent_with_threads(self): + if not gc.isenabled(): + return # cannot test with disabled gc + N = gc.get_threshold()[0] + if N < 50: + return # cannot test with such a small N + def attempt(): + lock1 = threading.Lock() + lock1.acquire() + lock2 = threading.Lock() + lock2.acquire() + recycled = [False] + def another_thread(): + lock1.acquire() # wait for gc + greenlet.getcurrent() # update ts_current + lock2.release() # release gc + t = threading.Thread(target=another_thread) + t.start() + class gc_callback(object): + def __del__(self): + lock1.release() + lock2.acquire() + recycled[0] = True + class garbage(object): + def __init__(self): + self.cycle = self + self.callback = gc_callback() + l = [] + x = range(N*2) + current = greenlet.getcurrent() + g = garbage() + for _ in x: + g = None # lose reference to garbage + if recycled[0]: + # gc callback called prematurely + t.join(10) + return False + last = RawGreenlet() + if recycled[0]: + break # yes! gc called in green_new + l.append(last) # increase allocation counter + else: + # gc callback not called when expected + gc.collect() + if recycled[0]: + t.join(10) + return False + self.assertEqual(last.parent, current) + for g in l: + self.assertEqual(g.parent, current) + return True + for _ in range(5): + if attempt(): + break + + def test_issue_245_reference_counting_subclass_no_threads(self): + # https://github.com/python-greenlet/greenlet/issues/245 + # Before the fix, this crashed pretty reliably on + # Python 3.10, at least on macOS; but much less reliably on other + # interpreters (memory layout must have changed). + # The threaded test crashed more reliably on more interpreters. + from greenlet import getcurrent + from greenlet import GreenletExit + + class Greenlet(RawGreenlet): + pass + + initial_refs = sys.getrefcount(Greenlet) + # This has to be an instance variable because + # Python 2 raises a SyntaxError if we delete a local + # variable referenced in an inner scope. + self.glets = [] # pylint:disable=attribute-defined-outside-init + + def greenlet_main(): + try: + getcurrent().parent.switch() + except GreenletExit: + self.glets.append(getcurrent()) + + # Before the + for _ in range(10): + Greenlet(greenlet_main).switch() + + del self.glets + self.assertEqual(sys.getrefcount(Greenlet), initial_refs) + + @unittest.skipIf( + PY313 and RUNNING_ON_MANYLINUX, + "The manylinux images appear to hang on this test on 3.13rc2" + # Or perhaps I just got tired of waiting for the 450s timeout. + # Still, it shouldn't take anywhere near that long. Does not reproduce in + # Ubuntu images, on macOS or Windows. + ) + def test_issue_245_reference_counting_subclass_threads(self): + # https://github.com/python-greenlet/greenlet/issues/245 + from threading import Thread + from threading import Event + + from greenlet import getcurrent + + class MyGreenlet(RawGreenlet): + pass + + glets = [] + ref_cleared = Event() + + def greenlet_main(): + getcurrent().parent.switch() + + def thread_main(greenlet_running_event): + mine = MyGreenlet(greenlet_main) + glets.append(mine) + # The greenlets being deleted must be active + mine.switch() + # Don't keep any reference to it in this thread + del mine + # Let main know we published our greenlet. + greenlet_running_event.set() + # Wait for main to let us know the references are + # gone and the greenlet objects no longer reachable + ref_cleared.wait(10) + # The creating thread must call getcurrent() (or a few other + # greenlet APIs) because that's when the thread-local list of dead + # greenlets gets cleared. + getcurrent() + + # We start with 3 references to the subclass: + # - This module + # - Its __mro__ + # - The __subclassess__ attribute of greenlet + # - (If we call gc.get_referents(), we find four entries, including + # some other tuple ``(greenlet)`` that I'm not sure about but must be part + # of the machinery.) + # + # On Python 3.10 it's often enough to just run 3 threads; on Python 2.7, + # more threads are needed, and the results are still + # non-deterministic. Presumably the memory layouts are different + initial_refs = sys.getrefcount(MyGreenlet) + thread_ready_events = [] + for _ in range( + initial_refs + 45 + ): + event = Event() + thread = Thread(target=thread_main, args=(event,)) + thread_ready_events.append(event) + thread.start() + + + for done_event in thread_ready_events: + done_event.wait(10) + + + del glets[:] + ref_cleared.set() + # Let any other thread run; it will crash the interpreter + # if not fixed (or silently corrupt memory and we possibly crash + # later). + self.wait_for_pending_cleanups() + self.assertEqual(sys.getrefcount(MyGreenlet), initial_refs) + + def test_falling_off_end_switches_to_unstarted_parent_raises_error(self): + def no_args(): + return 13 + + parent_never_started = RawGreenlet(no_args) + + def leaf(): + return 42 + + child = RawGreenlet(leaf, parent_never_started) + + # Because the run function takes to arguments + with self.assertRaises(TypeError): + child.switch() + + def test_falling_off_end_switches_to_unstarted_parent_works(self): + def one_arg(x): + return (x, 24) + + parent_never_started = RawGreenlet(one_arg) + + def leaf(): + return 42 + + child = RawGreenlet(leaf, parent_never_started) + + result = child.switch() + self.assertEqual(result, (42, 24)) + + def test_switch_to_dead_greenlet_with_unstarted_perverse_parent(self): + class Parent(RawGreenlet): + def __getattribute__(self, name): + if name == 'run': + raise SomeError + + + parent_never_started = Parent() + seen = [] + child = RawGreenlet(lambda: seen.append(42), parent_never_started) + # Because we automatically start the parent when the child is + # finished + with self.assertRaises(SomeError): + child.switch() + + self.assertEqual(seen, [42]) + + with self.assertRaises(SomeError): + child.switch() + self.assertEqual(seen, [42]) + + def test_switch_to_dead_greenlet_reparent(self): + seen = [] + parent_never_started = RawGreenlet(lambda: seen.append(24)) + child = RawGreenlet(lambda: seen.append(42)) + + child.switch() + self.assertEqual(seen, [42]) + + child.parent = parent_never_started + # This actually is the same as switching to the parent. + result = child.switch() + self.assertIsNone(result) + self.assertEqual(seen, [42, 24]) + + def test_can_access_f_back_of_suspended_greenlet(self): + # This tests our frame rewriting to work around Python 3.12+ having + # some interpreter frames on the C stack. It will crash in the absence + # of that logic. + main = greenlet.getcurrent() + + def outer(): + inner() + + def inner(): + main.switch(sys._getframe(0)) + + hub = RawGreenlet(outer) + # start it + hub.switch() + + # start another greenlet to make sure we aren't relying on + # anything in `hub` still being on the C stack + unrelated = RawGreenlet(lambda: None) + unrelated.switch() + + # now it is suspended + self.assertIsNotNone(hub.gr_frame) + self.assertEqual(hub.gr_frame.f_code.co_name, "inner") + self.assertIsNotNone(hub.gr_frame.f_back) + self.assertEqual(hub.gr_frame.f_back.f_code.co_name, "outer") + # The next line is what would crash + self.assertIsNone(hub.gr_frame.f_back.f_back) + + def test_get_stack_with_nested_c_calls(self): + from functools import partial + from . import _test_extension_cpp + + def recurse(v): + if v > 0: + return v * _test_extension_cpp.test_call(partial(recurse, v - 1)) + return greenlet.getcurrent().parent.switch() + + gr = RawGreenlet(recurse) + gr.switch(5) + frame = gr.gr_frame + for i in range(5): + self.assertEqual(frame.f_locals["v"], i) + frame = frame.f_back + self.assertEqual(frame.f_locals["v"], 5) + self.assertIsNone(frame.f_back) + self.assertEqual(gr.switch(10), 1200) # 1200 = 5! * 10 + + def test_frames_always_exposed(self): + # On Python 3.12 this will crash if we don't set the + # gr_frames_always_exposed attribute. More background: + # https://github.com/python-greenlet/greenlet/issues/388 + main = greenlet.getcurrent() + + def outer(): + inner(sys._getframe(0)) + + def inner(frame): + main.switch(frame) + + gr = RawGreenlet(outer) + frame = gr.switch() + + # Do something else to clobber the part of the C stack used by `gr`, + # so we can't skate by on "it just happened to still be there" + unrelated = RawGreenlet(lambda: None) + unrelated.switch() + + self.assertEqual(frame.f_code.co_name, "outer") + # The next line crashes on 3.12 if we haven't exposed the frames. + self.assertIsNone(frame.f_back) + + +class TestGreenletSetParentErrors(TestCase): + def test_threaded_reparent(self): + data = {} + created_event = threading.Event() + done_event = threading.Event() + + def run(): + data['g'] = RawGreenlet(lambda: None) + created_event.set() + done_event.wait(10) + + def blank(): + greenlet.getcurrent().parent.switch() + + thread = threading.Thread(target=run) + thread.start() + created_event.wait(10) + g = RawGreenlet(blank) + g.switch() + with self.assertRaises(ValueError) as exc: + g.parent = data['g'] + done_event.set() + thread.join(10) + + self.assertEqual(str(exc.exception), "parent cannot be on a different thread") + + def test_unexpected_reparenting(self): + another = [] + def worker(): + g = RawGreenlet(lambda: None) + another.append(g) + g.switch() + t = threading.Thread(target=worker) + t.start() + t.join(10) + # The first time we switch (running g_initialstub(), which is + # when we look up the run attribute) we attempt to change the + # parent to one from another thread (which also happens to be + # dead). ``g_initialstub()`` should detect this and raise a + # greenlet error. + # + # EXCEPT: With the fix for #252, this is actually detected + # sooner, when setting the parent itself. Prior to that fix, + # the main greenlet from the background thread kept a valid + # value for ``run_info``, and appeared to be a valid parent + # until we actually started the greenlet. But now that it's + # cleared, this test is catching whether ``green_setparent`` + # can detect the dead thread. + # + # Further refactoring once again changes this back to a greenlet.error + # + # We need to wait for the cleanup to happen, but we're + # deliberately leaking a main greenlet here. + self.wait_for_pending_cleanups(initial_main_greenlets=self.main_greenlets_before_test + 1) + + class convoluted(RawGreenlet): + def __getattribute__(self, name): + if name == 'run': + self.parent = another[0] # pylint:disable=attribute-defined-outside-init + return RawGreenlet.__getattribute__(self, name) + g = convoluted(lambda: None) + with self.assertRaises(greenlet.error) as exc: + g.switch() + self.assertEqual(str(exc.exception), + "cannot switch to a different thread (which happens to have exited)") + del another[:] + + def test_unexpected_reparenting_thread_running(self): + # Like ``test_unexpected_reparenting``, except the background thread is + # actually still alive. + another = [] + switched_to_greenlet = threading.Event() + keep_main_alive = threading.Event() + def worker(): + g = RawGreenlet(lambda: None) + another.append(g) + g.switch() + switched_to_greenlet.set() + keep_main_alive.wait(10) + class convoluted(RawGreenlet): + def __getattribute__(self, name): + if name == 'run': + self.parent = another[0] # pylint:disable=attribute-defined-outside-init + return RawGreenlet.__getattribute__(self, name) + + t = threading.Thread(target=worker) + t.start() + + switched_to_greenlet.wait(10) + try: + g = convoluted(lambda: None) + + with self.assertRaises(greenlet.error) as exc: + g.switch() + self.assertIn("Cannot switch to a different thread", str(exc.exception)) + self.assertIn("Expected", str(exc.exception)) + self.assertIn("Current", str(exc.exception)) + finally: + keep_main_alive.set() + t.join(10) + # XXX: Should handle this automatically. + del another[:] + + def test_cannot_delete_parent(self): + worker = RawGreenlet(lambda: None) + self.assertIs(worker.parent, greenlet.getcurrent()) + + with self.assertRaises(AttributeError) as exc: + del worker.parent + self.assertEqual(str(exc.exception), "can't delete attribute") + + def test_cannot_delete_parent_of_main(self): + with self.assertRaises(AttributeError) as exc: + del greenlet.getcurrent().parent + self.assertEqual(str(exc.exception), "can't delete attribute") + + + def test_main_greenlet_parent_is_none(self): + # assuming we're in a main greenlet here. + self.assertIsNone(greenlet.getcurrent().parent) + + def test_set_parent_wrong_types(self): + def bg(): + # Go back to main. + greenlet.getcurrent().parent.switch() + + def check(glet): + for p in None, 1, self, "42": + with self.assertRaises(TypeError) as exc: + glet.parent = p + + self.assertEqual( + str(exc.exception), + "GreenletChecker: Expected any type of greenlet, not " + type(p).__name__) + + # First, not running + g = RawGreenlet(bg) + self.assertFalse(g) + check(g) + + # Then when running. + g.switch() + self.assertTrue(g) + check(g) + + # Let it finish + g.switch() + + + def test_trivial_cycle(self): + glet = RawGreenlet(lambda: None) + with self.assertRaises(ValueError) as exc: + glet.parent = glet + self.assertEqual(str(exc.exception), "cyclic parent chain") + + def test_trivial_cycle_main(self): + # This used to produce a ValueError, but we catch it earlier than that now. + with self.assertRaises(AttributeError) as exc: + greenlet.getcurrent().parent = greenlet.getcurrent() + self.assertEqual(str(exc.exception), "cannot set the parent of a main greenlet") + + def test_deeper_cycle(self): + g1 = RawGreenlet(lambda: None) + g2 = RawGreenlet(lambda: None) + g3 = RawGreenlet(lambda: None) + + g1.parent = g2 + g2.parent = g3 + with self.assertRaises(ValueError) as exc: + g3.parent = g1 + self.assertEqual(str(exc.exception), "cyclic parent chain") + + +class TestRepr(TestCase): + + def assertEndsWith(self, got, suffix): + self.assertTrue(got.endswith(suffix), (got, suffix)) + + def test_main_while_running(self): + r = repr(greenlet.getcurrent()) + self.assertEndsWith(r, " current active started main>") + + def test_main_in_background(self): + main = greenlet.getcurrent() + def run(): + return repr(main) + + g = RawGreenlet(run) + r = g.switch() + self.assertEndsWith(r, ' suspended active started main>') + + def test_initial(self): + r = repr(RawGreenlet()) + self.assertEndsWith(r, ' pending>') + + def test_main_from_other_thread(self): + main = greenlet.getcurrent() + + class T(threading.Thread): + original_main = thread_main = None + main_glet = None + def run(self): + self.original_main = repr(main) + self.main_glet = greenlet.getcurrent() + self.thread_main = repr(self.main_glet) + + t = T() + t.start() + t.join(10) + + self.assertEndsWith(t.original_main, ' suspended active started main>') + self.assertEndsWith(t.thread_main, ' current active started main>') + # give the machinery time to notice the death of the thread, + # and clean it up. Note that we don't use + # ``expect_greenlet_leak`` or wait_for_pending_cleanups, + # because at this point we know we have an extra greenlet + # still reachable. + for _ in range(3): + time.sleep(0.001) + + # In the past, main greenlets, even from dead threads, never + # really appear dead. We have fixed that, and we also report + # that the thread is dead in the repr. (Do this multiple times + # to make sure that we don't self-modify and forget our state + # in the C++ code). + for _ in range(3): + self.assertTrue(t.main_glet.dead) + r = repr(t.main_glet) + self.assertEndsWith(r, ' (thread exited) dead>') + + def test_dead(self): + g = RawGreenlet(lambda: None) + g.switch() + self.assertEndsWith(repr(g), ' dead>') + self.assertNotIn('suspended', repr(g)) + self.assertNotIn('started', repr(g)) + self.assertNotIn('active', repr(g)) + + def test_formatting_produces_native_str(self): + # https://github.com/python-greenlet/greenlet/issues/218 + # %s formatting on Python 2 was producing unicode, not str. + + g_dead = RawGreenlet(lambda: None) + g_not_started = RawGreenlet(lambda: None) + g_cur = greenlet.getcurrent() + + for g in g_dead, g_not_started, g_cur: + + self.assertIsInstance( + '%s' % (g,), + str + ) + self.assertIsInstance( + '%r' % (g,), + str, + ) + + +class TestMainGreenlet(TestCase): + # Tests some implementation details, and relies on some + # implementation details. + + def _check_current_is_main(self): + # implementation detail + assert 'main' in repr(greenlet.getcurrent()) + + t = type(greenlet.getcurrent()) + assert 'main' not in repr(t) + return t + + def test_main_greenlet_type_can_be_subclassed(self): + main_type = self._check_current_is_main() + subclass = type('subclass', (main_type,), {}) + self.assertIsNotNone(subclass) + + def test_main_greenlet_is_greenlet(self): + self._check_current_is_main() + self.assertIsInstance(greenlet.getcurrent(), RawGreenlet) + + + +class TestBrokenGreenlets(TestCase): + # Tests for things that used to, or still do, terminate the interpreter. + # This often means doing unsavory things. + + def test_failed_to_initialstub(self): + def func(): + raise AssertionError("Never get here") + + + g = greenlet._greenlet.UnswitchableGreenlet(func) + g.force_switch_error = True + + with self.assertRaisesRegex(SystemError, + "Failed to switch stacks into a greenlet for the first time."): + g.switch() + + def test_failed_to_switch_into_running(self): + runs = [] + def func(): + runs.append(1) + greenlet.getcurrent().parent.switch() + runs.append(2) + greenlet.getcurrent().parent.switch() + runs.append(3) # pragma: no cover + + g = greenlet._greenlet.UnswitchableGreenlet(func) + g.switch() + self.assertEqual(runs, [1]) + g.switch() + self.assertEqual(runs, [1, 2]) + g.force_switch_error = True + + with self.assertRaisesRegex(SystemError, + "Failed to switch stacks into a running greenlet."): + g.switch() + + # If we stopped here, we would fail the leakcheck, because we've left + # the ``inner_bootstrap()`` C frame and its descendents hanging around, + # which have a bunch of Python references. They'll never get cleaned up + # if we don't let the greenlet finish. + g.force_switch_error = False + g.switch() + self.assertEqual(runs, [1, 2, 3]) + + def test_failed_to_slp_switch_into_running(self): + ex = self.assertScriptRaises('fail_slp_switch.py') + + self.assertIn('fail_slp_switch is running', ex.output) + self.assertIn(ex.returncode, self.get_expected_returncodes_for_aborted_process()) + + def test_reentrant_switch_two_greenlets(self): + # Before we started capturing the arguments in g_switch_finish, this could crash. + output = self.run_script('fail_switch_two_greenlets.py') + self.assertIn('In g1_run', output) + self.assertIn('TRACE', output) + self.assertIn('LEAVE TRACE', output) + self.assertIn('Falling off end of main', output) + self.assertIn('Falling off end of g1_run', output) + self.assertIn('Falling off end of g2', output) + + def test_reentrant_switch_three_greenlets(self): + # On debug builds of greenlet, this used to crash with an assertion error; + # on non-debug versions, it ran fine (which it should not do!). + # Now it always crashes correctly with a TypeError + ex = self.assertScriptRaises('fail_switch_three_greenlets.py', exitcodes=(1,)) + + self.assertIn('TypeError', ex.output) + self.assertIn('positional arguments', ex.output) + + def test_reentrant_switch_three_greenlets2(self): + # This actually passed on debug and non-debug builds. It + # should probably have been triggering some debug assertions + # but it didn't. + # + # I think the fixes for the above test also kicked in here. + output = self.run_script('fail_switch_three_greenlets2.py') + self.assertIn( + "RESULTS: [('trace', 'switch'), " + "('trace', 'switch'), ('g2 arg', 'g2 from tracefunc'), " + "('trace', 'switch'), ('main g1', 'from g2_run'), ('trace', 'switch'), " + "('g1 arg', 'g1 from main'), ('trace', 'switch'), ('main g2', 'from g1_run'), " + "('trace', 'switch'), ('g1 from parent', 'g1 from main 2'), ('trace', 'switch'), " + "('main g1.2', 'g1 done'), ('trace', 'switch'), ('g2 from parent', ()), " + "('trace', 'switch'), ('main g2.2', 'g2 done')]", + output + ) + + def test_reentrant_switch_GreenletAlreadyStartedInPython(self): + output = self.run_script('fail_initialstub_already_started.py') + + self.assertIn( + "RESULTS: ['Begin C', 'Switch to b from B.__getattribute__ in C', " + "('Begin B', ()), '_B_run switching to main', ('main from c', 'From B'), " + "'B.__getattribute__ back from main in C', ('Begin A', (None,)), " + "('A dead?', True, 'B dead?', True, 'C dead?', False), " + "'C done', ('main from c.2', None)]", + output + ) + + def test_reentrant_switch_run_callable_has_del(self): + output = self.run_script('fail_clearing_run_switches.py') + self.assertIn( + "RESULTS [" + "('G.__getattribute__', 'run'), ('RunCallable', '__del__'), " + "('main: g.switch()', 'from RunCallable'), ('run_func', 'enter')" + "]", + output + ) + +if __name__ == '__main__': + unittest.main() diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/test_greenlet_trash.py b/venv/lib/python3.11/site-packages/greenlet/tests/test_greenlet_trash.py new file mode 100644 index 0000000..c1fc137 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/tests/test_greenlet_trash.py @@ -0,0 +1,187 @@ +# -*- coding: utf-8 -*- +""" +Tests for greenlets interacting with the CPython trash can API. + +The CPython trash can API is not designed to be re-entered from a +single thread. But this can happen using greenlets, if something +during the object deallocation process switches greenlets, and this second +greenlet then causes the trash can to get entered again. Here, we do this +very explicitly, but in other cases (like gevent) it could be arbitrarily more +complicated: for example, a weakref callback might try to acquire a lock that's +already held by another greenlet; that would allow a greenlet switch to occur. + +See https://github.com/gevent/gevent/issues/1909 + +This test is fragile and relies on details of the CPython +implementation (like most of the rest of this package): + + - We enter the trashcan and deferred deallocation after + ``_PyTrash_UNWIND_LEVEL`` calls. This constant, defined in + CPython's object.c, is generally 50. That's basically how many objects are required to + get us into the deferred deallocation situation. + + - The test fails by hitting an ``assert()`` in object.c; if the + build didn't enable assert, then we don't catch this. + + - If the test fails in that way, the interpreter crashes. +""" +from __future__ import print_function, absolute_import, division + +import unittest + + +class TestTrashCanReEnter(unittest.TestCase): + + def test_it(self): + try: + # pylint:disable-next=no-name-in-module + from greenlet._greenlet import get_tstate_trash_delete_nesting # pylint:disable=unused-import + except ImportError: + import sys + # Python 3.13 has not "trash delete nesting" anymore (but "delete later") + assert sys.version_info[:2] >= (3, 13) + self.skipTest("get_tstate_trash_delete_nesting is not available.") + + # Try several times to trigger it, because it isn't 100% + # reliable. + for _ in range(10): + self.check_it() + + def check_it(self): # pylint:disable=too-many-statements + import greenlet + from greenlet._greenlet import get_tstate_trash_delete_nesting # pylint:disable=no-name-in-module + main = greenlet.getcurrent() + + assert get_tstate_trash_delete_nesting() == 0 + + # We expect to be in deferred deallocation after this many + # deallocations have occurred. TODO: I wish we had a better way to do + # this --- that was before get_tstate_trash_delete_nesting; perhaps + # we can use that API to do better? + TRASH_UNWIND_LEVEL = 50 + # How many objects to put in a container; it's the container that + # queues objects for deferred deallocation. + OBJECTS_PER_CONTAINER = 500 + + class Dealloc: # define the class here because we alter class variables each time we run. + """ + An object with a ``__del__`` method. When it starts getting deallocated + from a deferred trash can run, it switches greenlets, allocates more objects + which then also go in the trash can. If we don't save state appropriately, + nesting gets out of order and we can crash the interpreter. + """ + + #: Has our deallocation actually run and switched greenlets? + #: When it does, this will be set to the current greenlet. This should + #: be happening in the main greenlet, so we check that down below. + SPAWNED = False + + #: Has the background greenlet run? + BG_RAN = False + + BG_GLET = None + + #: How many of these things have ever been allocated. + CREATED = 0 + + #: How many of these things have ever been deallocated. + DESTROYED = 0 + + #: How many were destroyed not in the main greenlet. There should always + #: be some. + #: If the test is broken or things change in the trashcan implementation, + #: this may not be correct. + DESTROYED_BG = 0 + + def __init__(self, sequence_number): + """ + :param sequence_number: The ordinal of this object during + one particular creation run. This is used to detect (guess, really) + when we have entered the trash can's deferred deallocation. + """ + self.i = sequence_number + Dealloc.CREATED += 1 + + def __del__(self): + if self.i == TRASH_UNWIND_LEVEL and not self.SPAWNED: + Dealloc.SPAWNED = greenlet.getcurrent() + other = Dealloc.BG_GLET = greenlet.greenlet(background_greenlet) + x = other.switch() + assert x == 42 + # It's important that we don't switch back to the greenlet, + # we leave it hanging there in an incomplete state. But we don't let it + # get collected, either. If we complete it now, while we're still + # in the scope of the initial trash can, things work out and we + # don't see the problem. We need this greenlet to complete + # at some point in the future, after we've exited this trash can invocation. + del other + elif self.i == 40 and greenlet.getcurrent() is not main: + Dealloc.BG_RAN = True + try: + main.switch(42) + except greenlet.GreenletExit as ex: + # We expect this; all references to us go away + # while we're still running, and we need to finish deleting + # ourself. + Dealloc.BG_RAN = type(ex) + del ex + + # Record the fact that we're dead last of all. This ensures that + # we actually get returned too. + Dealloc.DESTROYED += 1 + if greenlet.getcurrent() is not main: + Dealloc.DESTROYED_BG += 1 + + + def background_greenlet(): + # We direct through a second function, instead of + # directly calling ``make_some()``, so that we have complete + # control over when these objects are destroyed: we need them + # to be destroyed in the context of the background greenlet + t = make_some() + del t # Triggere deletion. + + def make_some(): + t = () + i = OBJECTS_PER_CONTAINER + while i: + # Nest the tuples; it's the recursion that gets us + # into trash. + t = (Dealloc(i), t) + i -= 1 + return t + + + some = make_some() + self.assertEqual(Dealloc.CREATED, OBJECTS_PER_CONTAINER) + self.assertEqual(Dealloc.DESTROYED, 0) + + # If we're going to crash, it should be on the following line. + # We only crash if ``assert()`` is enabled, of course. + del some + + # For non-debug builds of CPython, we won't crash. The best we can do is check + # the nesting level explicitly. + self.assertEqual(0, get_tstate_trash_delete_nesting()) + + # Discard this, raising GreenletExit into where it is waiting. + Dealloc.BG_GLET = None + # The same nesting level maintains. + self.assertEqual(0, get_tstate_trash_delete_nesting()) + + # We definitely cleaned some up in the background + self.assertGreater(Dealloc.DESTROYED_BG, 0) + + # Make sure all the cleanups happened. + self.assertIs(Dealloc.SPAWNED, main) + self.assertTrue(Dealloc.BG_RAN) + self.assertEqual(Dealloc.BG_RAN, greenlet.GreenletExit) + self.assertEqual(Dealloc.CREATED, Dealloc.DESTROYED ) + self.assertEqual(Dealloc.CREATED, OBJECTS_PER_CONTAINER * 2) + + import gc + gc.collect() + + +if __name__ == '__main__': + unittest.main() diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/test_leaks.py b/venv/lib/python3.11/site-packages/greenlet/tests/test_leaks.py new file mode 100644 index 0000000..99da4eb --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/tests/test_leaks.py @@ -0,0 +1,447 @@ +# -*- coding: utf-8 -*- +""" +Testing scenarios that may have leaked. +""" +from __future__ import print_function, absolute_import, division + +import sys +import gc + +import time +import weakref +import threading + + +import greenlet +from . import TestCase +from . import PY314 +from .leakcheck import fails_leakcheck +from .leakcheck import ignores_leakcheck +from .leakcheck import RUNNING_ON_MANYLINUX + +# pylint:disable=protected-access + +assert greenlet.GREENLET_USE_GC # Option to disable this was removed in 1.0 + +class HasFinalizerTracksInstances(object): + EXTANT_INSTANCES = set() + def __init__(self, msg): + self.msg = sys.intern(msg) + self.EXTANT_INSTANCES.add(id(self)) + def __del__(self): + self.EXTANT_INSTANCES.remove(id(self)) + def __repr__(self): + return "" % ( + id(self), self.msg + ) + @classmethod + def reset(cls): + cls.EXTANT_INSTANCES.clear() + + +class TestLeaks(TestCase): + + def test_arg_refs(self): + args = ('a', 'b', 'c') + refcount_before = sys.getrefcount(args) + # pylint:disable=unnecessary-lambda + g = greenlet.greenlet( + lambda *args: greenlet.getcurrent().parent.switch(*args)) + for _ in range(100): + g.switch(*args) + self.assertEqual(sys.getrefcount(args), refcount_before) + + def test_kwarg_refs(self): + kwargs = {} + self.assertEqual(sys.getrefcount(kwargs), 2 if not PY314 else 1) + # pylint:disable=unnecessary-lambda + g = greenlet.greenlet( + lambda **gkwargs: greenlet.getcurrent().parent.switch(**gkwargs)) + for _ in range(100): + g.switch(**kwargs) + # Python 3.14 elides reference counting operations + # in some cases. See https://github.com/python/cpython/pull/130708 + self.assertEqual(sys.getrefcount(kwargs), 2 if not PY314 else 1) + + + @staticmethod + def __recycle_threads(): + # By introducing a thread that does sleep we allow other threads, + # that have triggered their __block condition, but did not have a + # chance to deallocate their thread state yet, to finally do so. + # The way it works is by requiring a GIL switch (different thread), + # which does a GIL release (sleep), which might do a GIL switch + # to finished threads and allow them to clean up. + def worker(): + time.sleep(0.001) + t = threading.Thread(target=worker) + t.start() + time.sleep(0.001) + t.join(10) + + def test_threaded_leak(self): + gg = [] + def worker(): + # only main greenlet present + gg.append(weakref.ref(greenlet.getcurrent())) + for _ in range(2): + t = threading.Thread(target=worker) + t.start() + t.join(10) + del t + greenlet.getcurrent() # update ts_current + self.__recycle_threads() + greenlet.getcurrent() # update ts_current + gc.collect() + greenlet.getcurrent() # update ts_current + for g in gg: + self.assertIsNone(g()) + + def test_threaded_adv_leak(self): + gg = [] + def worker(): + # main and additional *finished* greenlets + ll = greenlet.getcurrent().ll = [] + def additional(): + ll.append(greenlet.getcurrent()) + for _ in range(2): + greenlet.greenlet(additional).switch() + gg.append(weakref.ref(greenlet.getcurrent())) + for _ in range(2): + t = threading.Thread(target=worker) + t.start() + t.join(10) + del t + greenlet.getcurrent() # update ts_current + self.__recycle_threads() + greenlet.getcurrent() # update ts_current + gc.collect() + greenlet.getcurrent() # update ts_current + for g in gg: + self.assertIsNone(g()) + + def assertClocksUsed(self): + used = greenlet._greenlet.get_clocks_used_doing_optional_cleanup() + self.assertGreaterEqual(used, 0) + # we don't lose the value + greenlet._greenlet.enable_optional_cleanup(True) + used2 = greenlet._greenlet.get_clocks_used_doing_optional_cleanup() + self.assertEqual(used, used2) + self.assertGreater(greenlet._greenlet.CLOCKS_PER_SEC, 1) + + def _check_issue251(self, + manually_collect_background=True, + explicit_reference_to_switch=False): + # See https://github.com/python-greenlet/greenlet/issues/251 + # Killing a greenlet (probably not the main one) + # in one thread from another thread would + # result in leaking a list (the ts_delkey list). + # We no longer use lists to hold that stuff, though. + + # For the test to be valid, even empty lists have to be tracked by the + # GC + + assert gc.is_tracked([]) + HasFinalizerTracksInstances.reset() + greenlet.getcurrent() + greenlets_before = self.count_objects(greenlet.greenlet, exact_kind=False) + + background_glet_running = threading.Event() + background_glet_killed = threading.Event() + background_greenlets = [] + + # XXX: Switching this to a greenlet subclass that overrides + # run results in all callers failing the leaktest; that + # greenlet instance is leaked. There's a bound method for + # run() living on the stack of the greenlet in g_initialstub, + # and since we don't manually switch back to the background + # greenlet to let it "fall off the end" and exit the + # g_initialstub function, it never gets cleaned up. Making the + # garbage collector aware of this bound method (making it an + # attribute of the greenlet structure and traversing into it) + # doesn't help, for some reason. + def background_greenlet(): + # Throw control back to the main greenlet. + jd = HasFinalizerTracksInstances("DELETING STACK OBJECT") + greenlet._greenlet.set_thread_local( + 'test_leaks_key', + HasFinalizerTracksInstances("DELETING THREAD STATE")) + # Explicitly keeping 'switch' in a local variable + # breaks this test in all versions + if explicit_reference_to_switch: + s = greenlet.getcurrent().parent.switch + s([jd]) + else: + greenlet.getcurrent().parent.switch([jd]) + + bg_main_wrefs = [] + + def background_thread(): + glet = greenlet.greenlet(background_greenlet) + bg_main_wrefs.append(weakref.ref(glet.parent)) + + background_greenlets.append(glet) + glet.switch() # Be sure it's active. + # Control is ours again. + del glet # Delete one reference from the thread it runs in. + background_glet_running.set() + background_glet_killed.wait(10) + + # To trigger the background collection of the dead + # greenlet, thus clearing out the contents of the list, we + # need to run some APIs. See issue 252. + if manually_collect_background: + greenlet.getcurrent() + + + t = threading.Thread(target=background_thread) + t.start() + background_glet_running.wait(10) + greenlet.getcurrent() + lists_before = self.count_objects(list, exact_kind=True) + + assert len(background_greenlets) == 1 + self.assertFalse(background_greenlets[0].dead) + # Delete the last reference to the background greenlet + # from a different thread. This puts it in the background thread's + # ts_delkey list. + del background_greenlets[:] + background_glet_killed.set() + + # Now wait for the background thread to die. + t.join(10) + del t + # As part of the fix for 252, we need to cycle the ceval.c + # interpreter loop to be sure it has had a chance to process + # the pending call. + self.wait_for_pending_cleanups() + + lists_after = self.count_objects(list, exact_kind=True) + greenlets_after = self.count_objects(greenlet.greenlet, exact_kind=False) + + # On 2.7, we observe that lists_after is smaller than + # lists_before. No idea what lists got cleaned up. All the + # Python 3 versions match exactly. + self.assertLessEqual(lists_after, lists_before) + # On versions after 3.6, we've successfully cleaned up the + # greenlet references thanks to the internal "vectorcall" + # protocol; prior to that, there is a reference path through + # the ``greenlet.switch`` method still on the stack that we + # can't reach to clean up. The C code goes through terrific + # lengths to clean that up. + if not explicit_reference_to_switch \ + and greenlet._greenlet.get_clocks_used_doing_optional_cleanup() is not None: + # If cleanup was disabled, though, we may not find it. + self.assertEqual(greenlets_after, greenlets_before) + if manually_collect_background: + # TODO: Figure out how to make this work! + # The one on the stack is still leaking somehow + # in the non-manually-collect state. + self.assertEqual(HasFinalizerTracksInstances.EXTANT_INSTANCES, set()) + else: + # The explicit reference prevents us from collecting it + # and it isn't always found by the GC either for some + # reason. The entire frame is leaked somehow, on some + # platforms (e.g., MacPorts builds of Python (all + # versions!)), but not on other platforms (the linux and + # windows builds on GitHub actions and Appveyor). So we'd + # like to write a test that proves that the main greenlet + # sticks around, and we can on my machine (macOS 11.6, + # MacPorts builds of everything) but we can't write that + # same test on other platforms. However, hopefully iteration + # done by leakcheck will find it. + pass + + if greenlet._greenlet.get_clocks_used_doing_optional_cleanup() is not None: + self.assertClocksUsed() + + def test_issue251_killing_cross_thread_leaks_list(self): + self._check_issue251() + + def test_issue251_with_cleanup_disabled(self): + greenlet._greenlet.enable_optional_cleanup(False) + try: + self._check_issue251() + finally: + greenlet._greenlet.enable_optional_cleanup(True) + + @fails_leakcheck + def test_issue251_issue252_need_to_collect_in_background(self): + # Between greenlet 1.1.2 and the next version, this was still + # failing because the leak of the list still exists when we + # don't call a greenlet API before exiting the thread. The + # proximate cause is that neither of the two greenlets from + # the background thread are actually being destroyed, even + # though the GC is in fact visiting both objects. It's not + # clear where that leak is? For some reason the thread-local + # dict holding it isn't being cleaned up. + # + # The leak, I think, is in the CPYthon internal function that + # calls into green_switch(). The argument tuple is still on + # the C stack somewhere and can't be reached? That doesn't + # make sense, because the tuple should be collectable when + # this object goes away. + # + # Note that this test sometimes spuriously passes on Linux, + # for some reason, but I've never seen it pass on macOS. + self._check_issue251(manually_collect_background=False) + + @fails_leakcheck + def test_issue251_issue252_need_to_collect_in_background_cleanup_disabled(self): + self.expect_greenlet_leak = True + greenlet._greenlet.enable_optional_cleanup(False) + try: + self._check_issue251(manually_collect_background=False) + finally: + greenlet._greenlet.enable_optional_cleanup(True) + + @fails_leakcheck + def test_issue251_issue252_explicit_reference_not_collectable(self): + self._check_issue251( + manually_collect_background=False, + explicit_reference_to_switch=True) + + UNTRACK_ATTEMPTS = 100 + + def _only_test_some_versions(self): + # We're only looking for this problem specifically on 3.11, + # and this set of tests is relatively fragile, depending on + # OS and memory management details. So we want to run it on 3.11+ + # (obviously) but not every older 3.x version in order to reduce + # false negatives. At the moment, those false results seem to have + # resolved, so we are actually running this on 3.8+ + assert sys.version_info[0] >= 3 + if sys.version_info[:2] < (3, 8): + self.skipTest('Only observed on 3.11') + if RUNNING_ON_MANYLINUX: + self.skipTest("Slow and not worth repeating here") + + @ignores_leakcheck + # Because we're just trying to track raw memory, not objects, and running + # the leakcheck makes an already slow test slower. + def test_untracked_memory_doesnt_increase(self): + # See https://github.com/gevent/gevent/issues/1924 + # and https://github.com/python-greenlet/greenlet/issues/328 + self._only_test_some_versions() + def f(): + return 1 + + ITER = 10000 + def run_it(): + for _ in range(ITER): + greenlet.greenlet(f).switch() + + # Establish baseline + for _ in range(3): + run_it() + + # uss: (Linux, macOS, Windows): aka "Unique Set Size", this is + # the memory which is unique to a process and which would be + # freed if the process was terminated right now. + uss_before = self.get_process_uss() + + for count in range(self.UNTRACK_ATTEMPTS): + uss_before = max(uss_before, self.get_process_uss()) + run_it() + + uss_after = self.get_process_uss() + if uss_after <= uss_before and count > 1: + break + + self.assertLessEqual(uss_after, uss_before) + + def _check_untracked_memory_thread(self, deallocate_in_thread=True): + self._only_test_some_versions() + # Like the above test, but what if there are a bunch of + # unfinished greenlets in a thread that dies? + # Does it matter if we deallocate in the thread or not? + EXIT_COUNT = [0] + + def f(): + try: + greenlet.getcurrent().parent.switch() + except greenlet.GreenletExit: + EXIT_COUNT[0] += 1 + raise + return 1 + + ITER = 10000 + def run_it(): + glets = [] + for _ in range(ITER): + # Greenlet starts, switches back to us. + # We keep a strong reference to the greenlet though so it doesn't + # get a GreenletExit exception. + g = greenlet.greenlet(f) + glets.append(g) + g.switch() + + return glets + + test = self + + class ThreadFunc: + uss_before = uss_after = 0 + glets = () + ITER = 2 + def __call__(self): + self.uss_before = test.get_process_uss() + + for _ in range(self.ITER): + self.glets += tuple(run_it()) + + for g in self.glets: + test.assertIn('suspended active', str(g)) + # Drop them. + if deallocate_in_thread: + self.glets = () + self.uss_after = test.get_process_uss() + + # Establish baseline + uss_before = uss_after = None + for count in range(self.UNTRACK_ATTEMPTS): + EXIT_COUNT[0] = 0 + thread_func = ThreadFunc() + t = threading.Thread(target=thread_func) + t.start() + t.join(30) + self.assertFalse(t.is_alive()) + + if uss_before is None: + uss_before = thread_func.uss_before + + uss_before = max(uss_before, thread_func.uss_before) + if deallocate_in_thread: + self.assertEqual(thread_func.glets, ()) + self.assertEqual(EXIT_COUNT[0], ITER * thread_func.ITER) + + del thread_func # Deallocate the greenlets; but this won't raise into them + del t + if not deallocate_in_thread: + self.assertEqual(EXIT_COUNT[0], 0) + if deallocate_in_thread: + self.wait_for_pending_cleanups() + + uss_after = self.get_process_uss() + # See if we achieve a non-growth state at some point. Break when we do. + if uss_after <= uss_before and count > 1: + break + + self.wait_for_pending_cleanups() + uss_after = self.get_process_uss() + self.assertLessEqual(uss_after, uss_before, "after attempts %d" % (count,)) + + @ignores_leakcheck + # Because we're just trying to track raw memory, not objects, and running + # the leakcheck makes an already slow test slower. + def test_untracked_memory_doesnt_increase_unfinished_thread_dealloc_in_thread(self): + self._check_untracked_memory_thread(deallocate_in_thread=True) + + @ignores_leakcheck + # Because the main greenlets from the background threads do not exit in a timely fashion, + # we fail the object-based leakchecks. + def test_untracked_memory_doesnt_increase_unfinished_thread_dealloc_in_main(self): + self._check_untracked_memory_thread(deallocate_in_thread=False) + +if __name__ == '__main__': + __import__('unittest').main() diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/test_stack_saved.py b/venv/lib/python3.11/site-packages/greenlet/tests/test_stack_saved.py new file mode 100644 index 0000000..b362bf9 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/tests/test_stack_saved.py @@ -0,0 +1,19 @@ +import greenlet +from . import TestCase + + +class Test(TestCase): + + def test_stack_saved(self): + main = greenlet.getcurrent() + self.assertEqual(main._stack_saved, 0) + + def func(): + main.switch(main._stack_saved) + + g = greenlet.greenlet(func) + x = g.switch() + self.assertGreater(x, 0) + self.assertGreater(g._stack_saved, 0) + g.switch() + self.assertEqual(g._stack_saved, 0) diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/test_throw.py b/venv/lib/python3.11/site-packages/greenlet/tests/test_throw.py new file mode 100644 index 0000000..f4f9a14 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/tests/test_throw.py @@ -0,0 +1,128 @@ +import sys + + +from greenlet import greenlet +from . import TestCase + +def switch(*args): + return greenlet.getcurrent().parent.switch(*args) + + +class ThrowTests(TestCase): + def test_class(self): + def f(): + try: + switch("ok") + except RuntimeError: + switch("ok") + return + switch("fail") + g = greenlet(f) + res = g.switch() + self.assertEqual(res, "ok") + res = g.throw(RuntimeError) + self.assertEqual(res, "ok") + + def test_val(self): + def f(): + try: + switch("ok") + except RuntimeError: + val = sys.exc_info()[1] + if str(val) == "ciao": + switch("ok") + return + switch("fail") + + g = greenlet(f) + res = g.switch() + self.assertEqual(res, "ok") + res = g.throw(RuntimeError("ciao")) + self.assertEqual(res, "ok") + + g = greenlet(f) + res = g.switch() + self.assertEqual(res, "ok") + res = g.throw(RuntimeError, "ciao") + self.assertEqual(res, "ok") + + def test_kill(self): + def f(): + switch("ok") + switch("fail") + g = greenlet(f) + res = g.switch() + self.assertEqual(res, "ok") + res = g.throw() + self.assertTrue(isinstance(res, greenlet.GreenletExit)) + self.assertTrue(g.dead) + res = g.throw() # immediately eaten by the already-dead greenlet + self.assertTrue(isinstance(res, greenlet.GreenletExit)) + + def test_throw_goes_to_original_parent(self): + main = greenlet.getcurrent() + + def f1(): + try: + main.switch("f1 ready to catch") + except IndexError: + return "caught" + return "normal exit" + + def f2(): + main.switch("from f2") + + g1 = greenlet(f1) + g2 = greenlet(f2, parent=g1) + with self.assertRaises(IndexError): + g2.throw(IndexError) + self.assertTrue(g2.dead) + self.assertTrue(g1.dead) + + g1 = greenlet(f1) + g2 = greenlet(f2, parent=g1) + res = g1.switch() + self.assertEqual(res, "f1 ready to catch") + res = g2.throw(IndexError) + self.assertEqual(res, "caught") + self.assertTrue(g2.dead) + self.assertTrue(g1.dead) + + g1 = greenlet(f1) + g2 = greenlet(f2, parent=g1) + res = g1.switch() + self.assertEqual(res, "f1 ready to catch") + res = g2.switch() + self.assertEqual(res, "from f2") + res = g2.throw(IndexError) + self.assertEqual(res, "caught") + self.assertTrue(g2.dead) + self.assertTrue(g1.dead) + + def test_non_traceback_param(self): + with self.assertRaises(TypeError) as exc: + greenlet.getcurrent().throw( + Exception, + Exception(), + self + ) + self.assertEqual(str(exc.exception), + "throw() third argument must be a traceback object") + + def test_instance_of_wrong_type(self): + with self.assertRaises(TypeError) as exc: + greenlet.getcurrent().throw( + Exception(), + BaseException() + ) + + self.assertEqual(str(exc.exception), + "instance exception may not have a separate value") + + def test_not_throwable(self): + with self.assertRaises(TypeError) as exc: + greenlet.getcurrent().throw( + "abc" + ) + self.assertEqual(str(exc.exception), + "exceptions must be classes, or instances, not str") diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/test_tracing.py b/venv/lib/python3.11/site-packages/greenlet/tests/test_tracing.py new file mode 100644 index 0000000..c044d4b --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/tests/test_tracing.py @@ -0,0 +1,291 @@ +from __future__ import print_function +import sys +import greenlet +import unittest + +from . import TestCase +from . import PY312 + +# https://discuss.python.org/t/cpython-3-12-greenlet-and-tracing-profiling-how-to-not-crash-and-get-correct-results/33144/2 +DEBUG_BUILD_PY312 = ( + PY312 and hasattr(sys, 'gettotalrefcount'), + "Broken on debug builds of Python 3.12" +) + +class SomeError(Exception): + pass + +class GreenletTracer(object): + oldtrace = None + + def __init__(self, error_on_trace=False): + self.actions = [] + self.error_on_trace = error_on_trace + + def __call__(self, *args): + self.actions.append(args) + if self.error_on_trace: + raise SomeError + + def __enter__(self): + self.oldtrace = greenlet.settrace(self) + return self.actions + + def __exit__(self, *args): + greenlet.settrace(self.oldtrace) + + +class TestGreenletTracing(TestCase): + """ + Tests of ``greenlet.settrace()`` + """ + + def test_a_greenlet_tracing(self): + main = greenlet.getcurrent() + def dummy(): + pass + def dummyexc(): + raise SomeError() + + with GreenletTracer() as actions: + g1 = greenlet.greenlet(dummy) + g1.switch() + g2 = greenlet.greenlet(dummyexc) + self.assertRaises(SomeError, g2.switch) + + self.assertEqual(actions, [ + ('switch', (main, g1)), + ('switch', (g1, main)), + ('switch', (main, g2)), + ('throw', (g2, main)), + ]) + + def test_b_exception_disables_tracing(self): + main = greenlet.getcurrent() + def dummy(): + main.switch() + g = greenlet.greenlet(dummy) + g.switch() + with GreenletTracer(error_on_trace=True) as actions: + self.assertRaises(SomeError, g.switch) + self.assertEqual(greenlet.gettrace(), None) + + self.assertEqual(actions, [ + ('switch', (main, g)), + ]) + + def test_set_same_tracer_twice(self): + # https://github.com/python-greenlet/greenlet/issues/332 + # Our logic in asserting that the tracefunction should + # gain a reference was incorrect if the same tracefunction was set + # twice. + tracer = GreenletTracer() + with tracer: + greenlet.settrace(tracer) + + +class PythonTracer(object): + oldtrace = None + + def __init__(self): + self.actions = [] + + def __call__(self, frame, event, arg): + # Record the co_name so we have an idea what function we're in. + self.actions.append((event, frame.f_code.co_name)) + + def __enter__(self): + self.oldtrace = sys.setprofile(self) + return self.actions + + def __exit__(self, *args): + sys.setprofile(self.oldtrace) + +def tpt_callback(): + return 42 + +class TestPythonTracing(TestCase): + """ + Tests of the interaction of ``sys.settrace()`` + with greenlet facilities. + + NOTE: Most of this is probably CPython specific. + """ + + maxDiff = None + + def test_trace_events_trivial(self): + with PythonTracer() as actions: + tpt_callback() + # If we use the sys.settrace instead of setprofile, we get + # this: + + # self.assertEqual(actions, [ + # ('call', 'tpt_callback'), + # ('call', '__exit__'), + # ]) + + self.assertEqual(actions, [ + ('return', '__enter__'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('call', '__exit__'), + ('c_call', '__exit__'), + ]) + + def _trace_switch(self, glet): + with PythonTracer() as actions: + glet.switch() + return actions + + def _check_trace_events_func_already_set(self, glet): + actions = self._trace_switch(glet) + self.assertEqual(actions, [ + ('return', '__enter__'), + ('c_call', '_trace_switch'), + ('call', 'run'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('return', 'run'), + ('c_return', '_trace_switch'), + ('call', '__exit__'), + ('c_call', '__exit__'), + ]) + + def test_trace_events_into_greenlet_func_already_set(self): + def run(): + return tpt_callback() + + self._check_trace_events_func_already_set(greenlet.greenlet(run)) + + def test_trace_events_into_greenlet_subclass_already_set(self): + class X(greenlet.greenlet): + def run(self): + return tpt_callback() + self._check_trace_events_func_already_set(X()) + + def _check_trace_events_from_greenlet_sets_profiler(self, g, tracer): + g.switch() + tpt_callback() + tracer.__exit__() + self.assertEqual(tracer.actions, [ + ('return', '__enter__'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('return', 'run'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('call', '__exit__'), + ('c_call', '__exit__'), + ]) + + + def test_trace_events_from_greenlet_func_sets_profiler(self): + tracer = PythonTracer() + def run(): + tracer.__enter__() + return tpt_callback() + + self._check_trace_events_from_greenlet_sets_profiler(greenlet.greenlet(run), + tracer) + + def test_trace_events_from_greenlet_subclass_sets_profiler(self): + tracer = PythonTracer() + class X(greenlet.greenlet): + def run(self): + tracer.__enter__() + return tpt_callback() + + self._check_trace_events_from_greenlet_sets_profiler(X(), tracer) + + @unittest.skipIf(*DEBUG_BUILD_PY312) + def test_trace_events_multiple_greenlets_switching(self): + tracer = PythonTracer() + + g1 = None + g2 = None + + def g1_run(): + tracer.__enter__() + tpt_callback() + g2.switch() + tpt_callback() + return 42 + + def g2_run(): + tpt_callback() + tracer.__exit__() + tpt_callback() + g1.switch() + + g1 = greenlet.greenlet(g1_run) + g2 = greenlet.greenlet(g2_run) + + x = g1.switch() + self.assertEqual(x, 42) + tpt_callback() # ensure not in the trace + self.assertEqual(tracer.actions, [ + ('return', '__enter__'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('c_call', 'g1_run'), + ('call', 'g2_run'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('call', '__exit__'), + ('c_call', '__exit__'), + ]) + + @unittest.skipIf(*DEBUG_BUILD_PY312) + def test_trace_events_multiple_greenlets_switching_siblings(self): + # Like the first version, but get both greenlets running first + # as "siblings" and then establish the tracing. + tracer = PythonTracer() + + g1 = None + g2 = None + + def g1_run(): + greenlet.getcurrent().parent.switch() + tracer.__enter__() + tpt_callback() + g2.switch() + tpt_callback() + return 42 + + def g2_run(): + greenlet.getcurrent().parent.switch() + + tpt_callback() + tracer.__exit__() + tpt_callback() + g1.switch() + + g1 = greenlet.greenlet(g1_run) + g2 = greenlet.greenlet(g2_run) + + # Start g1 + g1.switch() + # And it immediately returns control to us. + # Start g2 + g2.switch() + # Which also returns. Now kick of the real part of the + # test. + x = g1.switch() + self.assertEqual(x, 42) + + tpt_callback() # ensure not in the trace + self.assertEqual(tracer.actions, [ + ('return', '__enter__'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('c_call', 'g1_run'), + ('call', 'tpt_callback'), + ('return', 'tpt_callback'), + ('call', '__exit__'), + ('c_call', '__exit__'), + ]) + + +if __name__ == '__main__': + unittest.main() diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/test_version.py b/venv/lib/python3.11/site-packages/greenlet/tests/test_version.py new file mode 100644 index 0000000..96c17cf --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/tests/test_version.py @@ -0,0 +1,41 @@ +#! /usr/bin/env python +from __future__ import absolute_import +from __future__ import print_function + +import sys +import os +from unittest import TestCase as NonLeakingTestCase + +import greenlet + +# No reason to run this multiple times under leakchecks, +# it doesn't do anything. +class VersionTests(NonLeakingTestCase): + def test_version(self): + def find_dominating_file(name): + if os.path.exists(name): + return name + + tried = [] + here = os.path.abspath(os.path.dirname(__file__)) + for i in range(10): + up = ['..'] * i + path = [here] + up + [name] + fname = os.path.join(*path) + fname = os.path.abspath(fname) + tried.append(fname) + if os.path.exists(fname): + return fname + raise AssertionError("Could not find file " + name + "; checked " + str(tried)) + + try: + setup_py = find_dominating_file('setup.py') + except AssertionError as e: + self.skipTest("Unable to find setup.py; must be out of tree. " + str(e)) + + + invoke_setup = "%s %s --version" % (sys.executable, setup_py) + with os.popen(invoke_setup) as f: + sversion = f.read().strip() + + self.assertEqual(sversion, greenlet.__version__) diff --git a/venv/lib/python3.11/site-packages/greenlet/tests/test_weakref.py b/venv/lib/python3.11/site-packages/greenlet/tests/test_weakref.py new file mode 100644 index 0000000..05a38a7 --- /dev/null +++ b/venv/lib/python3.11/site-packages/greenlet/tests/test_weakref.py @@ -0,0 +1,35 @@ +import gc +import weakref + + +import greenlet +from . import TestCase + +class WeakRefTests(TestCase): + def test_dead_weakref(self): + def _dead_greenlet(): + g = greenlet.greenlet(lambda: None) + g.switch() + return g + o = weakref.ref(_dead_greenlet()) + gc.collect() + self.assertEqual(o(), None) + + def test_inactive_weakref(self): + o = weakref.ref(greenlet.greenlet()) + gc.collect() + self.assertEqual(o(), None) + + def test_dealloc_weakref(self): + seen = [] + def worker(): + try: + greenlet.getcurrent().parent.switch() + finally: + seen.append(g()) + g = greenlet.greenlet(worker) + g.switch() + g2 = greenlet.greenlet(lambda: None, g) + g = weakref.ref(g2) + g2 = None + self.assertEqual(seen, [None]) diff --git a/venv/lib/python3.11/site-packages/gunicorn-21.2.0.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/gunicorn-21.2.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn-21.2.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.11/site-packages/gunicorn-21.2.0.dist-info/METADATA b/venv/lib/python3.11/site-packages/gunicorn-21.2.0.dist-info/METADATA new file mode 100644 index 0000000..ad966b9 --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn-21.2.0.dist-info/METADATA @@ -0,0 +1,124 @@ +Metadata-Version: 2.1 +Name: gunicorn +Version: 21.2.0 +Summary: WSGI HTTP Server for UNIX +Home-page: https://gunicorn.org +Author: Benoit Chesneau +Author-email: benoitc@gunicorn.org +License: MIT +Project-URL: Documentation, https://docs.gunicorn.org +Project-URL: Homepage, https://gunicorn.org +Project-URL: Issue tracker, https://github.com/benoitc/gunicorn/issues +Project-URL: Source code, https://github.com/benoitc/gunicorn +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Other Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: POSIX +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet +Classifier: Topic :: Utilities +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Server +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Requires-Python: >=3.5 +Requires-Dist: packaging +Requires-Dist: importlib-metadata ; python_version < "3.8" +Provides-Extra: eventlet +Requires-Dist: eventlet (>=0.24.1) ; extra == 'eventlet' +Provides-Extra: gevent +Requires-Dist: gevent (>=1.4.0) ; extra == 'gevent' +Provides-Extra: gthread +Provides-Extra: setproctitle +Requires-Dist: setproctitle ; extra == 'setproctitle' +Provides-Extra: tornado +Requires-Dist: tornado (>=0.2) ; extra == 'tornado' + +Gunicorn +-------- + +.. image:: https://img.shields.io/pypi/v/gunicorn.svg?style=flat + :alt: PyPI version + :target: https://pypi.python.org/pypi/gunicorn + +.. image:: https://img.shields.io/pypi/pyversions/gunicorn.svg + :alt: Supported Python versions + :target: https://pypi.python.org/pypi/gunicorn + +.. image:: https://github.com/benoitc/gunicorn/actions/workflows/tox.yml/badge.svg + :alt: Build Status + :target: https://github.com/benoitc/gunicorn/actions/workflows/tox.yml + +.. image:: https://github.com/benoitc/gunicorn/actions/workflows/lint.yml/badge.svg + :alt: Lint Status + :target: https://github.com/benoitc/gunicorn/actions/workflows/lint.yml + +Gunicorn 'Green Unicorn' is a Python WSGI HTTP Server for UNIX. It's a pre-fork +worker model ported from Ruby's Unicorn_ project. The Gunicorn server is broadly +compatible with various web frameworks, simply implemented, light on server +resource usage, and fairly speedy. + +Feel free to join us in `#gunicorn`_ on `Libera.chat`_. + +Documentation +------------- + +The documentation is hosted at https://docs.gunicorn.org. + +Installation +------------ + +Gunicorn requires **Python 3.x >= 3.5**. + +Install from PyPI:: + + $ pip install gunicorn + + +Usage +----- + +Basic usage:: + + $ gunicorn [OPTIONS] APP_MODULE + +Where ``APP_MODULE`` is of the pattern ``$(MODULE_NAME):$(VARIABLE_NAME)``. The +module name can be a full dotted path. The variable name refers to a WSGI +callable that should be found in the specified module. + +Example with test app:: + + $ cd examples + $ gunicorn --workers=2 test:app + + +Contributing +------------ + +See `our complete contributor's guide `_ for more details. + + +License +------- + +Gunicorn is released under the MIT License. See the LICENSE_ file for more +details. + +.. _Unicorn: https://bogomips.org/unicorn/ +.. _`#gunicorn`: https://web.libera.chat/?channels=#gunicorn +.. _`Libera.chat`: https://libera.chat/ +.. _LICENSE: https://github.com/benoitc/gunicorn/blob/master/LICENSE diff --git a/venv/lib/python3.11/site-packages/gunicorn-21.2.0.dist-info/RECORD b/venv/lib/python3.11/site-packages/gunicorn-21.2.0.dist-info/RECORD new file mode 100644 index 0000000..237b80e --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn-21.2.0.dist-info/RECORD @@ -0,0 +1,76 @@ +../../../bin/gunicorn,sha256=Ea-Nzmv8QGo1c4MOnh4YD8-enAWLwFLvr9sSfS71Apk,243 +gunicorn-21.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +gunicorn-21.2.0.dist-info/METADATA,sha256=5D3VZHwuMtS6iruVsiafVSSiuCpM_vfR3f9_o274SyU,4129 +gunicorn-21.2.0.dist-info/RECORD,, +gunicorn-21.2.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +gunicorn-21.2.0.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 +gunicorn-21.2.0.dist-info/entry_points.txt,sha256=bF8VNiG4H8W83JfEBcqcPMydv9hl04CS4kwh1KOYrFY,113 +gunicorn-21.2.0.dist-info/top_level.txt,sha256=cdMaa2yhxb8do-WioY9qRHUCfwf55YztjwQCncaInoE,9 +gunicorn/__init__.py,sha256=cp8ijnQcq-iYGac8T6m8hahC9rvVAf8JoV7pyTQj6aI,279 +gunicorn/__main__.py,sha256=kv-LQeOm8rXRw_NQTj8Tg3l3jv9eKMAm6jyKzYY0Hs8,171 +gunicorn/__pycache__/__init__.cpython-311.pyc,, +gunicorn/__pycache__/__main__.cpython-311.pyc,, +gunicorn/__pycache__/arbiter.cpython-311.pyc,, +gunicorn/__pycache__/config.cpython-311.pyc,, +gunicorn/__pycache__/debug.cpython-311.pyc,, +gunicorn/__pycache__/errors.cpython-311.pyc,, +gunicorn/__pycache__/glogging.cpython-311.pyc,, +gunicorn/__pycache__/pidfile.cpython-311.pyc,, +gunicorn/__pycache__/reloader.cpython-311.pyc,, +gunicorn/__pycache__/sock.cpython-311.pyc,, +gunicorn/__pycache__/systemd.cpython-311.pyc,, +gunicorn/__pycache__/util.cpython-311.pyc,, +gunicorn/app/__init__.py,sha256=GuqstqdkizeV4HRbd8aGMBn0Q8IDOyRU1wMMNqNe5GY,127 +gunicorn/app/__pycache__/__init__.cpython-311.pyc,, +gunicorn/app/__pycache__/base.cpython-311.pyc,, +gunicorn/app/__pycache__/pasterapp.cpython-311.pyc,, +gunicorn/app/__pycache__/wsgiapp.cpython-311.pyc,, +gunicorn/app/base.py,sha256=CWl34bhgOEAGeW_esL4dylax_7mdx-Kbfde_YdC9Pa0,7400 +gunicorn/app/pasterapp.py,sha256=Bb0JwQNqZxmZ-gvvZUGWAEc9RX2BdhdhfhJ2a12Xafo,2038 +gunicorn/app/wsgiapp.py,sha256=Ktb5z0GPkCpDqQ0zS8zccYCvqJi8Su4zOekwKJulwBA,1926 +gunicorn/arbiter.py,sha256=oPx6GAV8pbFVW9q_5REnXb-RuCslWlchj29gE03fwu8,21509 +gunicorn/config.py,sha256=PEvdKwnlZDKb74gUIwWeJ-ZQCc6dXsQtBfNZDsBvoUA,63419 +gunicorn/debug.py,sha256=lxrs_952G7RHeKqpooLHV_szSpvh-b_dO4YEcfT7LUc,2293 +gunicorn/errors.py,sha256=JlDBjag90gMiRwLHG3xzEJzDOntSl1iM32R277-U6j0,919 +gunicorn/glogging.py,sha256=uP8_tdLRRxmW4t9klPB9uNeKNE0_QvEFIuvNAoqZErA,15303 +gunicorn/http/__init__.py,sha256=b4TF3x5F0VYOPTOeNYwRGR1EYHBaPMhZRMoNeuD5-n0,277 +gunicorn/http/__pycache__/__init__.cpython-311.pyc,, +gunicorn/http/__pycache__/body.cpython-311.pyc,, +gunicorn/http/__pycache__/errors.cpython-311.pyc,, +gunicorn/http/__pycache__/message.cpython-311.pyc,, +gunicorn/http/__pycache__/parser.cpython-311.pyc,, +gunicorn/http/__pycache__/unreader.cpython-311.pyc,, +gunicorn/http/__pycache__/wsgi.cpython-311.pyc,, +gunicorn/http/body.py,sha256=F925FyTotfQ2iQ-LTBQmjTsWh9NI_IJSFdEFp1qWUEo,7296 +gunicorn/http/errors.py,sha256=sNjF2lm4m2qyZ9l95_U33FRxPXpxXzjnZyYqWS-hxd4,2850 +gunicorn/http/message.py,sha256=zQxlleTpIBvC6rKWkHiySaUr02zuO3BpX2Dy3YQbmwg,11958 +gunicorn/http/parser.py,sha256=6eNGDUMEURYqzCXsftv3a4hYuD_fBvttZxOJuRbdKNg,1364 +gunicorn/http/unreader.py,sha256=pXVde3fNCUIO2FLOSJ0iNtEEpA0m8GH6_R2Sl-cB-J8,1943 +gunicorn/http/wsgi.py,sha256=bzt62ykG1BdxbnKOzGyyGT_V14Y_OLfmvWP7Z4PsqWs,12366 +gunicorn/instrument/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +gunicorn/instrument/__pycache__/__init__.cpython-311.pyc,, +gunicorn/instrument/__pycache__/statsd.cpython-311.pyc,, +gunicorn/instrument/statsd.py,sha256=Qikvsbn9aGFzmeN1efK9u29tCFsVXLS4zWd1WMdiTEs,4690 +gunicorn/pidfile.py,sha256=U3TpoE5_05wQxonGS4pV-aLkq8BMSvql142XJnE2olw,2367 +gunicorn/reloader.py,sha256=wgP05Ou1ysQoVa-xpvSGkG_DLcmd1KRdGPPpvhIjao4,3791 +gunicorn/sock.py,sha256=a_FQgrf_leQFpdjNQtiRPrMLy2Tpoy--kNVIwoARzSk,6887 +gunicorn/systemd.py,sha256=iBnPrn0LcdL_skwjQFPkWV_33HQzGiBMkqHr4gXCeZA,2520 +gunicorn/util.py,sha256=KL4KpZiMtP2dleVpXS_wr2wiyQkDHZeSDx0DVGAvRGw,19127 +gunicorn/workers/__init__.py,sha256=Gv_JJXKofikyiPbRAUQ0IXIchKxgt0Gu-8y-nYRN9vY,594 +gunicorn/workers/__pycache__/__init__.cpython-311.pyc,, +gunicorn/workers/__pycache__/base.cpython-311.pyc,, +gunicorn/workers/__pycache__/base_async.cpython-311.pyc,, +gunicorn/workers/__pycache__/geventlet.cpython-311.pyc,, +gunicorn/workers/__pycache__/ggevent.cpython-311.pyc,, +gunicorn/workers/__pycache__/gthread.cpython-311.pyc,, +gunicorn/workers/__pycache__/gtornado.cpython-311.pyc,, +gunicorn/workers/__pycache__/sync.cpython-311.pyc,, +gunicorn/workers/__pycache__/workertmp.cpython-311.pyc,, +gunicorn/workers/base.py,sha256=6aTkFWuu7ou6LiwkBlCZdXJgwnOnRYcoBW5oCr-b1Ac,9197 +gunicorn/workers/base_async.py,sha256=U4rnAY_PGtFe_33eUoOIptilh-Lir8t_6XZAvLQyTYA,5681 +gunicorn/workers/geventlet.py,sha256=SC7uAaoIoMhHNMdE75i_Kw-b4rwHokef8J-5bC5Eghk,6091 +gunicorn/workers/ggevent.py,sha256=OUJAxJMrMP7QR2LzK0H3Hbu0C5cvr3bn4PIGKG_cwP0,5800 +gunicorn/workers/gthread.py,sha256=2e4fhNwTmgn1dkw7aAsO8zaF-NKjDg_2EShutCgPaPY,12575 +gunicorn/workers/gtornado.py,sha256=4ms0vyMr7h1GgUOfq40CVaSjCWB8brelnRLCQdLyONI,5854 +gunicorn/workers/sync.py,sha256=ECdaeyb_29IF1Nb-7EbvZwXBUCIkrSg3849PVSAhGis,7272 +gunicorn/workers/workertmp.py,sha256=yN6GQ9_Z2958BbsHTV-sYenDU-gvTJGFprRD3J-Kp2U,1651 diff --git a/venv/lib/python3.11/site-packages/gunicorn-21.2.0.dist-info/REQUESTED b/venv/lib/python3.11/site-packages/gunicorn-21.2.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/gunicorn-21.2.0.dist-info/WHEEL b/venv/lib/python3.11/site-packages/gunicorn-21.2.0.dist-info/WHEEL new file mode 100644 index 0000000..1f37c02 --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn-21.2.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.40.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.11/site-packages/gunicorn-21.2.0.dist-info/entry_points.txt b/venv/lib/python3.11/site-packages/gunicorn-21.2.0.dist-info/entry_points.txt new file mode 100644 index 0000000..fd14749 --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn-21.2.0.dist-info/entry_points.txt @@ -0,0 +1,5 @@ +[console_scripts] +gunicorn = gunicorn.app.wsgiapp:run + +[paste.server_runner] +main = gunicorn.app.pasterapp:serve diff --git a/venv/lib/python3.11/site-packages/gunicorn-21.2.0.dist-info/top_level.txt b/venv/lib/python3.11/site-packages/gunicorn-21.2.0.dist-info/top_level.txt new file mode 100644 index 0000000..8f22dcc --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn-21.2.0.dist-info/top_level.txt @@ -0,0 +1 @@ +gunicorn diff --git a/venv/lib/python3.11/site-packages/gunicorn/__init__.py b/venv/lib/python3.11/site-packages/gunicorn/__init__.py new file mode 100644 index 0000000..adf5e89 --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/__init__.py @@ -0,0 +1,9 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +version_info = (21, 2, 0) +__version__ = ".".join([str(v) for v in version_info]) +SERVER = "gunicorn" +SERVER_SOFTWARE = "%s/%s" % (SERVER, __version__) diff --git a/venv/lib/python3.11/site-packages/gunicorn/__main__.py b/venv/lib/python3.11/site-packages/gunicorn/__main__.py new file mode 100644 index 0000000..49ba696 --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/__main__.py @@ -0,0 +1,7 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +from gunicorn.app.wsgiapp import run +run() diff --git a/venv/lib/python3.11/site-packages/gunicorn/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..4b9bdbb Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/__pycache__/__main__.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/__pycache__/__main__.cpython-311.pyc new file mode 100644 index 0000000..6961dbe Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/__pycache__/__main__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/__pycache__/arbiter.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/__pycache__/arbiter.cpython-311.pyc new file mode 100644 index 0000000..1520a38 Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/__pycache__/arbiter.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/__pycache__/config.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/__pycache__/config.cpython-311.pyc new file mode 100644 index 0000000..a1f6f9a Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/__pycache__/config.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/__pycache__/debug.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/__pycache__/debug.cpython-311.pyc new file mode 100644 index 0000000..2db8255 Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/__pycache__/debug.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/__pycache__/errors.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/__pycache__/errors.cpython-311.pyc new file mode 100644 index 0000000..4f97d33 Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/__pycache__/errors.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/__pycache__/glogging.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/__pycache__/glogging.cpython-311.pyc new file mode 100644 index 0000000..08c68ef Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/__pycache__/glogging.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/__pycache__/pidfile.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/__pycache__/pidfile.cpython-311.pyc new file mode 100644 index 0000000..e1d02db Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/__pycache__/pidfile.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/__pycache__/reloader.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/__pycache__/reloader.cpython-311.pyc new file mode 100644 index 0000000..d2ddad9 Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/__pycache__/reloader.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/__pycache__/sock.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/__pycache__/sock.cpython-311.pyc new file mode 100644 index 0000000..e404d18 Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/__pycache__/sock.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/__pycache__/systemd.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/__pycache__/systemd.cpython-311.pyc new file mode 100644 index 0000000..03f5a90 Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/__pycache__/systemd.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/__pycache__/util.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/__pycache__/util.cpython-311.pyc new file mode 100644 index 0000000..aa4e713 Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/__pycache__/util.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/app/__init__.py b/venv/lib/python3.11/site-packages/gunicorn/app/__init__.py new file mode 100644 index 0000000..87f0611 --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/app/__init__.py @@ -0,0 +1,4 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. diff --git a/venv/lib/python3.11/site-packages/gunicorn/app/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/app/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..f28d9f1 Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/app/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/app/__pycache__/base.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/app/__pycache__/base.cpython-311.pyc new file mode 100644 index 0000000..9265c8a Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/app/__pycache__/base.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/app/__pycache__/pasterapp.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/app/__pycache__/pasterapp.cpython-311.pyc new file mode 100644 index 0000000..a75a4d3 Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/app/__pycache__/pasterapp.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/app/__pycache__/wsgiapp.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/app/__pycache__/wsgiapp.cpython-311.pyc new file mode 100644 index 0000000..35f51cf Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/app/__pycache__/wsgiapp.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/app/base.py b/venv/lib/python3.11/site-packages/gunicorn/app/base.py new file mode 100644 index 0000000..dbd05bc --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/app/base.py @@ -0,0 +1,236 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. +import importlib.util +import importlib.machinery +import os +import sys +import traceback + +from gunicorn import util +from gunicorn.arbiter import Arbiter +from gunicorn.config import Config, get_default_config_file +from gunicorn import debug + + +class BaseApplication(object): + """ + An application interface for configuring and loading + the various necessities for any given web framework. + """ + def __init__(self, usage=None, prog=None): + self.usage = usage + self.cfg = None + self.callable = None + self.prog = prog + self.logger = None + self.do_load_config() + + def do_load_config(self): + """ + Loads the configuration + """ + try: + self.load_default_config() + self.load_config() + except Exception as e: + print("\nError: %s" % str(e), file=sys.stderr) + sys.stderr.flush() + sys.exit(1) + + def load_default_config(self): + # init configuration + self.cfg = Config(self.usage, prog=self.prog) + + def init(self, parser, opts, args): + raise NotImplementedError + + def load(self): + raise NotImplementedError + + def load_config(self): + """ + This method is used to load the configuration from one or several input(s). + Custom Command line, configuration file. + You have to override this method in your class. + """ + raise NotImplementedError + + def reload(self): + self.do_load_config() + if self.cfg.spew: + debug.spew() + + def wsgi(self): + if self.callable is None: + self.callable = self.load() + return self.callable + + def run(self): + try: + Arbiter(self).run() + except RuntimeError as e: + print("\nError: %s\n" % e, file=sys.stderr) + sys.stderr.flush() + sys.exit(1) + + +class Application(BaseApplication): + + # 'init' and 'load' methods are implemented by WSGIApplication. + # pylint: disable=abstract-method + + def chdir(self): + # chdir to the configured path before loading, + # default is the current dir + os.chdir(self.cfg.chdir) + + # add the path to sys.path + if self.cfg.chdir not in sys.path: + sys.path.insert(0, self.cfg.chdir) + + def get_config_from_filename(self, filename): + + if not os.path.exists(filename): + raise RuntimeError("%r doesn't exist" % filename) + + ext = os.path.splitext(filename)[1] + + try: + module_name = '__config__' + if ext in [".py", ".pyc"]: + spec = importlib.util.spec_from_file_location(module_name, filename) + else: + msg = "configuration file should have a valid Python extension.\n" + util.warn(msg) + loader_ = importlib.machinery.SourceFileLoader(module_name, filename) + spec = importlib.util.spec_from_file_location(module_name, filename, loader=loader_) + mod = importlib.util.module_from_spec(spec) + sys.modules[module_name] = mod + spec.loader.exec_module(mod) + except Exception: + print("Failed to read config file: %s" % filename, file=sys.stderr) + traceback.print_exc() + sys.stderr.flush() + sys.exit(1) + + return vars(mod) + + def get_config_from_module_name(self, module_name): + return vars(importlib.import_module(module_name)) + + def load_config_from_module_name_or_filename(self, location): + """ + Loads the configuration file: the file is a python file, otherwise raise an RuntimeError + Exception or stop the process if the configuration file contains a syntax error. + """ + + if location.startswith("python:"): + module_name = location[len("python:"):] + cfg = self.get_config_from_module_name(module_name) + else: + if location.startswith("file:"): + filename = location[len("file:"):] + else: + filename = location + cfg = self.get_config_from_filename(filename) + + for k, v in cfg.items(): + # Ignore unknown names + if k not in self.cfg.settings: + continue + try: + self.cfg.set(k.lower(), v) + except Exception: + print("Invalid value for %s: %s\n" % (k, v), file=sys.stderr) + sys.stderr.flush() + raise + + return cfg + + def load_config_from_file(self, filename): + return self.load_config_from_module_name_or_filename(location=filename) + + def load_config(self): + # parse console args + parser = self.cfg.parser() + args = parser.parse_args() + + # optional settings from apps + cfg = self.init(parser, args, args.args) + + # set up import paths and follow symlinks + self.chdir() + + # Load up the any app specific configuration + if cfg: + for k, v in cfg.items(): + self.cfg.set(k.lower(), v) + + env_args = parser.parse_args(self.cfg.get_cmd_args_from_env()) + + if args.config: + self.load_config_from_file(args.config) + elif env_args.config: + self.load_config_from_file(env_args.config) + else: + default_config = get_default_config_file() + if default_config is not None: + self.load_config_from_file(default_config) + + # Load up environment configuration + for k, v in vars(env_args).items(): + if v is None: + continue + if k == "args": + continue + self.cfg.set(k.lower(), v) + + # Lastly, update the configuration with any command line settings. + for k, v in vars(args).items(): + if v is None: + continue + if k == "args": + continue + self.cfg.set(k.lower(), v) + + # current directory might be changed by the config now + # set up import paths and follow symlinks + self.chdir() + + def run(self): + if self.cfg.print_config: + print(self.cfg) + + if self.cfg.print_config or self.cfg.check_config: + try: + self.load() + except Exception: + msg = "\nError while loading the application:\n" + print(msg, file=sys.stderr) + traceback.print_exc() + sys.stderr.flush() + sys.exit(1) + sys.exit(0) + + if self.cfg.spew: + debug.spew() + + if self.cfg.daemon: + if os.environ.get('NOTIFY_SOCKET'): + msg = "Warning: you shouldn't specify `daemon = True`" \ + " when launching by systemd with `Type = notify`" + print(msg, file=sys.stderr, flush=True) + + util.daemonize(self.cfg.enable_stdio_inheritance) + + # set python paths + if self.cfg.pythonpath: + paths = self.cfg.pythonpath.split(",") + for path in paths: + pythonpath = os.path.abspath(path) + if pythonpath not in sys.path: + sys.path.insert(0, pythonpath) + + super().run() diff --git a/venv/lib/python3.11/site-packages/gunicorn/app/pasterapp.py b/venv/lib/python3.11/site-packages/gunicorn/app/pasterapp.py new file mode 100644 index 0000000..4c9fc7d --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/app/pasterapp.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import configparser +import os + +from paste.deploy import loadapp + +from gunicorn.app.wsgiapp import WSGIApplication +from gunicorn.config import get_default_config_file + + +def get_wsgi_app(config_uri, name=None, defaults=None): + if ':' not in config_uri: + config_uri = "config:%s" % config_uri + + return loadapp( + config_uri, + name=name, + relative_to=os.getcwd(), + global_conf=defaults, + ) + + +def has_logging_config(config_file): + parser = configparser.ConfigParser() + parser.read([config_file]) + return parser.has_section('loggers') + + +def serve(app, global_conf, **local_conf): + """\ + A Paste Deployment server runner. + + Example configuration: + + [server:main] + use = egg:gunicorn#main + host = 127.0.0.1 + port = 5000 + """ + config_file = global_conf['__file__'] + gunicorn_config_file = local_conf.pop('config', None) + + host = local_conf.pop('host', '') + port = local_conf.pop('port', '') + if host and port: + local_conf['bind'] = '%s:%s' % (host, port) + elif host: + local_conf['bind'] = host.split(',') + + class PasterServerApplication(WSGIApplication): + def load_config(self): + self.cfg.set("default_proc_name", config_file) + + if has_logging_config(config_file): + self.cfg.set("logconfig", config_file) + + if gunicorn_config_file: + self.load_config_from_file(gunicorn_config_file) + else: + default_gunicorn_config_file = get_default_config_file() + if default_gunicorn_config_file is not None: + self.load_config_from_file(default_gunicorn_config_file) + + for k, v in local_conf.items(): + if v is not None: + self.cfg.set(k.lower(), v) + + def load(self): + return app + + PasterServerApplication().run() diff --git a/venv/lib/python3.11/site-packages/gunicorn/app/wsgiapp.py b/venv/lib/python3.11/site-packages/gunicorn/app/wsgiapp.py new file mode 100644 index 0000000..36cfba9 --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/app/wsgiapp.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import os + +from gunicorn.errors import ConfigError +from gunicorn.app.base import Application +from gunicorn import util + + +class WSGIApplication(Application): + def init(self, parser, opts, args): + self.app_uri = None + + if opts.paste: + from .pasterapp import has_logging_config + + config_uri = os.path.abspath(opts.paste) + config_file = config_uri.split('#')[0] + + if not os.path.exists(config_file): + raise ConfigError("%r not found" % config_file) + + self.cfg.set("default_proc_name", config_file) + self.app_uri = config_uri + + if has_logging_config(config_file): + self.cfg.set("logconfig", config_file) + + return + + if len(args) > 0: + self.cfg.set("default_proc_name", args[0]) + self.app_uri = args[0] + + def load_config(self): + super().load_config() + + if self.app_uri is None: + if self.cfg.wsgi_app is not None: + self.app_uri = self.cfg.wsgi_app + else: + raise ConfigError("No application module specified.") + + def load_wsgiapp(self): + return util.import_app(self.app_uri) + + def load_pasteapp(self): + from .pasterapp import get_wsgi_app + return get_wsgi_app(self.app_uri, defaults=self.cfg.paste_global_conf) + + def load(self): + if self.cfg.paste is not None: + return self.load_pasteapp() + else: + return self.load_wsgiapp() + + +def run(): + """\ + The ``gunicorn`` command line runner for launching Gunicorn with + generic WSGI applications. + """ + from gunicorn.app.wsgiapp import WSGIApplication + WSGIApplication("%(prog)s [OPTIONS] [APP_MODULE]").run() + + +if __name__ == '__main__': + run() diff --git a/venv/lib/python3.11/site-packages/gunicorn/arbiter.py b/venv/lib/python3.11/site-packages/gunicorn/arbiter.py new file mode 100644 index 0000000..08523d4 --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/arbiter.py @@ -0,0 +1,672 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. +import errno +import os +import random +import select +import signal +import sys +import time +import traceback + +from gunicorn.errors import HaltServer, AppImportError +from gunicorn.pidfile import Pidfile +from gunicorn import sock, systemd, util + +from gunicorn import __version__, SERVER_SOFTWARE + + +class Arbiter(object): + """ + Arbiter maintain the workers processes alive. It launches or + kills them if needed. It also manages application reloading + via SIGHUP/USR2. + """ + + # A flag indicating if a worker failed to + # to boot. If a worker process exist with + # this error code, the arbiter will terminate. + WORKER_BOOT_ERROR = 3 + + # A flag indicating if an application failed to be loaded + APP_LOAD_ERROR = 4 + + START_CTX = {} + + LISTENERS = [] + WORKERS = {} + PIPE = [] + + # I love dynamic languages + SIG_QUEUE = [] + SIGNALS = [getattr(signal, "SIG%s" % x) + for x in "HUP QUIT INT TERM TTIN TTOU USR1 USR2 WINCH".split()] + SIG_NAMES = dict( + (getattr(signal, name), name[3:].lower()) for name in dir(signal) + if name[:3] == "SIG" and name[3] != "_" + ) + + def __init__(self, app): + os.environ["SERVER_SOFTWARE"] = SERVER_SOFTWARE + + self._num_workers = None + self._last_logged_active_worker_count = None + self.log = None + + self.setup(app) + + self.pidfile = None + self.systemd = False + self.worker_age = 0 + self.reexec_pid = 0 + self.master_pid = 0 + self.master_name = "Master" + + cwd = util.getcwd() + + args = sys.argv[:] + args.insert(0, sys.executable) + + # init start context + self.START_CTX = { + "args": args, + "cwd": cwd, + 0: sys.executable + } + + def _get_num_workers(self): + return self._num_workers + + def _set_num_workers(self, value): + old_value = self._num_workers + self._num_workers = value + self.cfg.nworkers_changed(self, value, old_value) + num_workers = property(_get_num_workers, _set_num_workers) + + def setup(self, app): + self.app = app + self.cfg = app.cfg + + if self.log is None: + self.log = self.cfg.logger_class(app.cfg) + + # reopen files + if 'GUNICORN_FD' in os.environ: + self.log.reopen_files() + + self.worker_class = self.cfg.worker_class + self.address = self.cfg.address + self.num_workers = self.cfg.workers + self.timeout = self.cfg.timeout + self.proc_name = self.cfg.proc_name + + self.log.debug('Current configuration:\n{0}'.format( + '\n'.join( + ' {0}: {1}'.format(config, value.value) + for config, value + in sorted(self.cfg.settings.items(), + key=lambda setting: setting[1])))) + + # set enviroment' variables + if self.cfg.env: + for k, v in self.cfg.env.items(): + os.environ[k] = v + + if self.cfg.preload_app: + self.app.wsgi() + + def start(self): + """\ + Initialize the arbiter. Start listening and set pidfile if needed. + """ + self.log.info("Starting gunicorn %s", __version__) + + if 'GUNICORN_PID' in os.environ: + self.master_pid = int(os.environ.get('GUNICORN_PID')) + self.proc_name = self.proc_name + ".2" + self.master_name = "Master.2" + + self.pid = os.getpid() + if self.cfg.pidfile is not None: + pidname = self.cfg.pidfile + if self.master_pid != 0: + pidname += ".2" + self.pidfile = Pidfile(pidname) + self.pidfile.create(self.pid) + self.cfg.on_starting(self) + + self.init_signals() + + if not self.LISTENERS: + fds = None + listen_fds = systemd.listen_fds() + if listen_fds: + self.systemd = True + fds = range(systemd.SD_LISTEN_FDS_START, + systemd.SD_LISTEN_FDS_START + listen_fds) + + elif self.master_pid: + fds = [] + for fd in os.environ.pop('GUNICORN_FD').split(','): + fds.append(int(fd)) + + self.LISTENERS = sock.create_sockets(self.cfg, self.log, fds) + + listeners_str = ",".join([str(lnr) for lnr in self.LISTENERS]) + self.log.debug("Arbiter booted") + self.log.info("Listening at: %s (%s)", listeners_str, self.pid) + self.log.info("Using worker: %s", self.cfg.worker_class_str) + systemd.sd_notify("READY=1\nSTATUS=Gunicorn arbiter booted", self.log) + + # check worker class requirements + if hasattr(self.worker_class, "check_config"): + self.worker_class.check_config(self.cfg, self.log) + + self.cfg.when_ready(self) + + def init_signals(self): + """\ + Initialize master signal handling. Most of the signals + are queued. Child signals only wake up the master. + """ + # close old PIPE + for p in self.PIPE: + os.close(p) + + # initialize the pipe + self.PIPE = pair = os.pipe() + for p in pair: + util.set_non_blocking(p) + util.close_on_exec(p) + + self.log.close_on_exec() + + # initialize all signals + for s in self.SIGNALS: + signal.signal(s, self.signal) + signal.signal(signal.SIGCHLD, self.handle_chld) + + def signal(self, sig, frame): + if len(self.SIG_QUEUE) < 5: + self.SIG_QUEUE.append(sig) + self.wakeup() + + def run(self): + "Main master loop." + self.start() + util._setproctitle("master [%s]" % self.proc_name) + + try: + self.manage_workers() + + while True: + self.maybe_promote_master() + + sig = self.SIG_QUEUE.pop(0) if self.SIG_QUEUE else None + if sig is None: + self.sleep() + self.murder_workers() + self.manage_workers() + continue + + if sig not in self.SIG_NAMES: + self.log.info("Ignoring unknown signal: %s", sig) + continue + + signame = self.SIG_NAMES.get(sig) + handler = getattr(self, "handle_%s" % signame, None) + if not handler: + self.log.error("Unhandled signal: %s", signame) + continue + self.log.info("Handling signal: %s", signame) + handler() + self.wakeup() + except (StopIteration, KeyboardInterrupt): + self.halt() + except HaltServer as inst: + self.halt(reason=inst.reason, exit_status=inst.exit_status) + except SystemExit: + raise + except Exception: + self.log.error("Unhandled exception in main loop", + exc_info=True) + self.stop(False) + if self.pidfile is not None: + self.pidfile.unlink() + sys.exit(-1) + + def handle_chld(self, sig, frame): + "SIGCHLD handling" + self.reap_workers() + self.wakeup() + + def handle_hup(self): + """\ + HUP handling. + - Reload configuration + - Start the new worker processes with a new configuration + - Gracefully shutdown the old worker processes + """ + self.log.info("Hang up: %s", self.master_name) + self.reload() + + def handle_term(self): + "SIGTERM handling" + raise StopIteration + + def handle_int(self): + "SIGINT handling" + self.stop(False) + raise StopIteration + + def handle_quit(self): + "SIGQUIT handling" + self.stop(False) + raise StopIteration + + def handle_ttin(self): + """\ + SIGTTIN handling. + Increases the number of workers by one. + """ + self.num_workers += 1 + self.manage_workers() + + def handle_ttou(self): + """\ + SIGTTOU handling. + Decreases the number of workers by one. + """ + if self.num_workers <= 1: + return + self.num_workers -= 1 + self.manage_workers() + + def handle_usr1(self): + """\ + SIGUSR1 handling. + Kill all workers by sending them a SIGUSR1 + """ + self.log.reopen_files() + self.kill_workers(signal.SIGUSR1) + + def handle_usr2(self): + """\ + SIGUSR2 handling. + Creates a new arbiter/worker set as a fork of the current + arbiter without affecting old workers. Use this to do live + deployment with the ability to backout a change. + """ + self.reexec() + + def handle_winch(self): + """SIGWINCH handling""" + if self.cfg.daemon: + self.log.info("graceful stop of workers") + self.num_workers = 0 + self.kill_workers(signal.SIGTERM) + else: + self.log.debug("SIGWINCH ignored. Not daemonized") + + def maybe_promote_master(self): + if self.master_pid == 0: + return + + if self.master_pid != os.getppid(): + self.log.info("Master has been promoted.") + # reset master infos + self.master_name = "Master" + self.master_pid = 0 + self.proc_name = self.cfg.proc_name + del os.environ['GUNICORN_PID'] + # rename the pidfile + if self.pidfile is not None: + self.pidfile.rename(self.cfg.pidfile) + # reset proctitle + util._setproctitle("master [%s]" % self.proc_name) + + def wakeup(self): + """\ + Wake up the arbiter by writing to the PIPE + """ + try: + os.write(self.PIPE[1], b'.') + except IOError as e: + if e.errno not in [errno.EAGAIN, errno.EINTR]: + raise + + def halt(self, reason=None, exit_status=0): + """ halt arbiter """ + self.stop() + + log_func = self.log.info if exit_status == 0 else self.log.error + log_func("Shutting down: %s", self.master_name) + if reason is not None: + log_func("Reason: %s", reason) + + if self.pidfile is not None: + self.pidfile.unlink() + self.cfg.on_exit(self) + sys.exit(exit_status) + + def sleep(self): + """\ + Sleep until PIPE is readable or we timeout. + A readable PIPE means a signal occurred. + """ + try: + ready = select.select([self.PIPE[0]], [], [], 1.0) + if not ready[0]: + return + while os.read(self.PIPE[0], 1): + pass + except (select.error, OSError) as e: + # TODO: select.error is a subclass of OSError since Python 3.3. + error_number = getattr(e, 'errno', e.args[0]) + if error_number not in [errno.EAGAIN, errno.EINTR]: + raise + except KeyboardInterrupt: + sys.exit() + + def stop(self, graceful=True): + """\ + Stop workers + + :attr graceful: boolean, If True (the default) workers will be + killed gracefully (ie. trying to wait for the current connection) + """ + unlink = ( + self.reexec_pid == self.master_pid == 0 + and not self.systemd + and not self.cfg.reuse_port + ) + sock.close_sockets(self.LISTENERS, unlink) + + self.LISTENERS = [] + sig = signal.SIGTERM + if not graceful: + sig = signal.SIGQUIT + limit = time.time() + self.cfg.graceful_timeout + # instruct the workers to exit + self.kill_workers(sig) + # wait until the graceful timeout + while self.WORKERS and time.time() < limit: + time.sleep(0.1) + + self.kill_workers(signal.SIGKILL) + + def reexec(self): + """\ + Relaunch the master and workers. + """ + if self.reexec_pid != 0: + self.log.warning("USR2 signal ignored. Child exists.") + return + + if self.master_pid != 0: + self.log.warning("USR2 signal ignored. Parent exists.") + return + + master_pid = os.getpid() + self.reexec_pid = os.fork() + if self.reexec_pid != 0: + return + + self.cfg.pre_exec(self) + + environ = self.cfg.env_orig.copy() + environ['GUNICORN_PID'] = str(master_pid) + + if self.systemd: + environ['LISTEN_PID'] = str(os.getpid()) + environ['LISTEN_FDS'] = str(len(self.LISTENERS)) + else: + environ['GUNICORN_FD'] = ','.join( + str(lnr.fileno()) for lnr in self.LISTENERS) + + os.chdir(self.START_CTX['cwd']) + + # exec the process using the original environment + os.execvpe(self.START_CTX[0], self.START_CTX['args'], environ) + + def reload(self): + old_address = self.cfg.address + + # reset old environment + for k in self.cfg.env: + if k in self.cfg.env_orig: + # reset the key to the value it had before + # we launched gunicorn + os.environ[k] = self.cfg.env_orig[k] + else: + # delete the value set by gunicorn + try: + del os.environ[k] + except KeyError: + pass + + # reload conf + self.app.reload() + self.setup(self.app) + + # reopen log files + self.log.reopen_files() + + # do we need to change listener ? + if old_address != self.cfg.address: + # close all listeners + for lnr in self.LISTENERS: + lnr.close() + # init new listeners + self.LISTENERS = sock.create_sockets(self.cfg, self.log) + listeners_str = ",".join([str(lnr) for lnr in self.LISTENERS]) + self.log.info("Listening at: %s", listeners_str) + + # do some actions on reload + self.cfg.on_reload(self) + + # unlink pidfile + if self.pidfile is not None: + self.pidfile.unlink() + + # create new pidfile + if self.cfg.pidfile is not None: + self.pidfile = Pidfile(self.cfg.pidfile) + self.pidfile.create(self.pid) + + # set new proc_name + util._setproctitle("master [%s]" % self.proc_name) + + # spawn new workers + for _ in range(self.cfg.workers): + self.spawn_worker() + + # manage workers + self.manage_workers() + + def murder_workers(self): + """\ + Kill unused/idle workers + """ + if not self.timeout: + return + workers = list(self.WORKERS.items()) + for (pid, worker) in workers: + try: + if time.time() - worker.tmp.last_update() <= self.timeout: + continue + except (OSError, ValueError): + continue + + if not worker.aborted: + self.log.critical("WORKER TIMEOUT (pid:%s)", pid) + worker.aborted = True + self.kill_worker(pid, signal.SIGABRT) + else: + self.kill_worker(pid, signal.SIGKILL) + + def reap_workers(self): + """\ + Reap workers to avoid zombie processes + """ + try: + while True: + wpid, status = os.waitpid(-1, os.WNOHANG) + if not wpid: + break + if self.reexec_pid == wpid: + self.reexec_pid = 0 + else: + # A worker was terminated. If the termination reason was + # that it could not boot, we'll shut it down to avoid + # infinite start/stop cycles. + exitcode = status >> 8 + if exitcode != 0: + self.log.error('Worker (pid:%s) exited with code %s', wpid, exitcode) + if exitcode == self.WORKER_BOOT_ERROR: + reason = "Worker failed to boot." + raise HaltServer(reason, self.WORKER_BOOT_ERROR) + if exitcode == self.APP_LOAD_ERROR: + reason = "App failed to load." + raise HaltServer(reason, self.APP_LOAD_ERROR) + + if exitcode > 0: + # If the exit code of the worker is greater than 0, + # let the user know. + self.log.error("Worker (pid:%s) exited with code %s.", + wpid, exitcode) + elif status > 0: + # If the exit code of the worker is 0 and the status + # is greater than 0, then it was most likely killed + # via a signal. + try: + sig_name = signal.Signals(status).name + except ValueError: + sig_name = "code {}".format(status) + msg = "Worker (pid:{}) was sent {}!".format( + wpid, sig_name) + + # Additional hint for SIGKILL + if status == signal.SIGKILL: + msg += " Perhaps out of memory?" + self.log.error(msg) + + worker = self.WORKERS.pop(wpid, None) + if not worker: + continue + worker.tmp.close() + self.cfg.child_exit(self, worker) + except OSError as e: + if e.errno != errno.ECHILD: + raise + + def manage_workers(self): + """\ + Maintain the number of workers by spawning or killing + as required. + """ + if len(self.WORKERS) < self.num_workers: + self.spawn_workers() + + workers = self.WORKERS.items() + workers = sorted(workers, key=lambda w: w[1].age) + while len(workers) > self.num_workers: + (pid, _) = workers.pop(0) + self.kill_worker(pid, signal.SIGTERM) + + active_worker_count = len(workers) + if self._last_logged_active_worker_count != active_worker_count: + self._last_logged_active_worker_count = active_worker_count + self.log.debug("{0} workers".format(active_worker_count), + extra={"metric": "gunicorn.workers", + "value": active_worker_count, + "mtype": "gauge"}) + + def spawn_worker(self): + self.worker_age += 1 + worker = self.worker_class(self.worker_age, self.pid, self.LISTENERS, + self.app, self.timeout / 2.0, + self.cfg, self.log) + self.cfg.pre_fork(self, worker) + pid = os.fork() + if pid != 0: + worker.pid = pid + self.WORKERS[pid] = worker + return pid + + # Do not inherit the temporary files of other workers + for sibling in self.WORKERS.values(): + sibling.tmp.close() + + # Process Child + worker.pid = os.getpid() + try: + util._setproctitle("worker [%s]" % self.proc_name) + self.log.info("Booting worker with pid: %s", worker.pid) + self.cfg.post_fork(self, worker) + worker.init_process() + sys.exit(0) + except SystemExit: + raise + except AppImportError as e: + self.log.debug("Exception while loading the application", + exc_info=True) + print("%s" % e, file=sys.stderr) + sys.stderr.flush() + sys.exit(self.APP_LOAD_ERROR) + except Exception: + self.log.exception("Exception in worker process") + if not worker.booted: + sys.exit(self.WORKER_BOOT_ERROR) + sys.exit(-1) + finally: + self.log.info("Worker exiting (pid: %s)", worker.pid) + try: + worker.tmp.close() + self.cfg.worker_exit(self, worker) + except Exception: + self.log.warning("Exception during worker exit:\n%s", + traceback.format_exc()) + + def spawn_workers(self): + """\ + Spawn new workers as needed. + + This is where a worker process leaves the main loop + of the master process. + """ + + for _ in range(self.num_workers - len(self.WORKERS)): + self.spawn_worker() + time.sleep(0.1 * random.random()) + + def kill_workers(self, sig): + """\ + Kill all workers with the signal `sig` + :attr sig: `signal.SIG*` value + """ + worker_pids = list(self.WORKERS.keys()) + for pid in worker_pids: + self.kill_worker(pid, sig) + + def kill_worker(self, pid, sig): + """\ + Kill a worker + + :attr pid: int, worker pid + :attr sig: `signal.SIG*` value + """ + try: + os.kill(pid, sig) + except OSError as e: + if e.errno == errno.ESRCH: + try: + worker = self.WORKERS.pop(pid) + worker.tmp.close() + self.cfg.worker_exit(self, worker) + return + except (KeyError, OSError): + return + raise diff --git a/venv/lib/python3.11/site-packages/gunicorn/config.py b/venv/lib/python3.11/site-packages/gunicorn/config.py new file mode 100644 index 0000000..84e7619 --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/config.py @@ -0,0 +1,2258 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +# Please remember to run "make -C docs html" after update "desc" attributes. + +import argparse +import copy +import grp +import inspect +import os +import pwd +import re +import shlex +import ssl +import sys +import textwrap + +from gunicorn import __version__, util +from gunicorn.errors import ConfigError +from gunicorn.reloader import reloader_engines + +KNOWN_SETTINGS = [] +PLATFORM = sys.platform + + +def make_settings(ignore=None): + settings = {} + ignore = ignore or () + for s in KNOWN_SETTINGS: + setting = s() + if setting.name in ignore: + continue + settings[setting.name] = setting.copy() + return settings + + +def auto_int(_, x): + # for compatible with octal numbers in python3 + if re.match(r'0(\d)', x, re.IGNORECASE): + x = x.replace('0', '0o', 1) + return int(x, 0) + + +class Config(object): + + def __init__(self, usage=None, prog=None): + self.settings = make_settings() + self.usage = usage + self.prog = prog or os.path.basename(sys.argv[0]) + self.env_orig = os.environ.copy() + + def __str__(self): + lines = [] + kmax = max(len(k) for k in self.settings) + for k in sorted(self.settings): + v = self.settings[k].value + if callable(v): + v = "<{}()>".format(v.__qualname__) + lines.append("{k:{kmax}} = {v}".format(k=k, v=v, kmax=kmax)) + return "\n".join(lines) + + def __getattr__(self, name): + if name not in self.settings: + raise AttributeError("No configuration setting for: %s" % name) + return self.settings[name].get() + + def __setattr__(self, name, value): + if name != "settings" and name in self.settings: + raise AttributeError("Invalid access!") + super().__setattr__(name, value) + + def set(self, name, value): + if name not in self.settings: + raise AttributeError("No configuration setting for: %s" % name) + self.settings[name].set(value) + + def get_cmd_args_from_env(self): + if 'GUNICORN_CMD_ARGS' in self.env_orig: + return shlex.split(self.env_orig['GUNICORN_CMD_ARGS']) + return [] + + def parser(self): + kwargs = { + "usage": self.usage, + "prog": self.prog + } + parser = argparse.ArgumentParser(**kwargs) + parser.add_argument("-v", "--version", + action="version", default=argparse.SUPPRESS, + version="%(prog)s (version " + __version__ + ")\n", + help="show program's version number and exit") + parser.add_argument("args", nargs="*", help=argparse.SUPPRESS) + + keys = sorted(self.settings, key=self.settings.__getitem__) + for k in keys: + self.settings[k].add_option(parser) + + return parser + + @property + def worker_class_str(self): + uri = self.settings['worker_class'].get() + + # are we using a threaded worker? + is_sync = uri.endswith('SyncWorker') or uri == 'sync' + if is_sync and self.threads > 1: + return "gthread" + return uri + + @property + def worker_class(self): + uri = self.settings['worker_class'].get() + + # are we using a threaded worker? + is_sync = uri.endswith('SyncWorker') or uri == 'sync' + if is_sync and self.threads > 1: + uri = "gunicorn.workers.gthread.ThreadWorker" + + worker_class = util.load_class(uri) + if hasattr(worker_class, "setup"): + worker_class.setup() + return worker_class + + @property + def address(self): + s = self.settings['bind'].get() + return [util.parse_address(util.bytes_to_str(bind)) for bind in s] + + @property + def uid(self): + return self.settings['user'].get() + + @property + def gid(self): + return self.settings['group'].get() + + @property + def proc_name(self): + pn = self.settings['proc_name'].get() + if pn is not None: + return pn + else: + return self.settings['default_proc_name'].get() + + @property + def logger_class(self): + uri = self.settings['logger_class'].get() + if uri == "simple": + # support the default + uri = LoggerClass.default + + # if default logger is in use, and statsd is on, automagically switch + # to the statsd logger + if uri == LoggerClass.default: + if 'statsd_host' in self.settings and self.settings['statsd_host'].value is not None: + uri = "gunicorn.instrument.statsd.Statsd" + + logger_class = util.load_class( + uri, + default="gunicorn.glogging.Logger", + section="gunicorn.loggers") + + if hasattr(logger_class, "install"): + logger_class.install() + return logger_class + + @property + def is_ssl(self): + return self.certfile or self.keyfile + + @property + def ssl_options(self): + opts = {} + for name, value in self.settings.items(): + if value.section == 'SSL': + opts[name] = value.get() + return opts + + @property + def env(self): + raw_env = self.settings['raw_env'].get() + env = {} + + if not raw_env: + return env + + for e in raw_env: + s = util.bytes_to_str(e) + try: + k, v = s.split('=', 1) + except ValueError: + raise RuntimeError("environment setting %r invalid" % s) + + env[k] = v + + return env + + @property + def sendfile(self): + if self.settings['sendfile'].get() is not None: + return False + + if 'SENDFILE' in os.environ: + sendfile = os.environ['SENDFILE'].lower() + return sendfile in ['y', '1', 'yes', 'true'] + + return True + + @property + def reuse_port(self): + return self.settings['reuse_port'].get() + + @property + def paste_global_conf(self): + raw_global_conf = self.settings['raw_paste_global_conf'].get() + if raw_global_conf is None: + return None + + global_conf = {} + for e in raw_global_conf: + s = util.bytes_to_str(e) + try: + k, v = re.split(r'(?" % ( + self.__class__.__module__, + self.__class__.__name__, + id(self), + self.value, + ) + + +Setting = SettingMeta('Setting', (Setting,), {}) + + +def validate_bool(val): + if val is None: + return + + if isinstance(val, bool): + return val + if not isinstance(val, str): + raise TypeError("Invalid type for casting: %s" % val) + if val.lower().strip() == "true": + return True + elif val.lower().strip() == "false": + return False + else: + raise ValueError("Invalid boolean: %s" % val) + + +def validate_dict(val): + if not isinstance(val, dict): + raise TypeError("Value is not a dictionary: %s " % val) + return val + + +def validate_pos_int(val): + if not isinstance(val, int): + val = int(val, 0) + else: + # Booleans are ints! + val = int(val) + if val < 0: + raise ValueError("Value must be positive: %s" % val) + return val + + +def validate_ssl_version(val): + if val != SSLVersion.default: + sys.stderr.write("Warning: option `ssl_version` is deprecated and it is ignored. Use ssl_context instead.\n") + return val + + +def validate_string(val): + if val is None: + return None + if not isinstance(val, str): + raise TypeError("Not a string: %s" % val) + return val.strip() + + +def validate_file_exists(val): + if val is None: + return None + if not os.path.exists(val): + raise ValueError("File %s does not exists." % val) + return val + + +def validate_list_string(val): + if not val: + return [] + + # legacy syntax + if isinstance(val, str): + val = [val] + + return [validate_string(v) for v in val] + + +def validate_list_of_existing_files(val): + return [validate_file_exists(v) for v in validate_list_string(val)] + + +def validate_string_to_list(val): + val = validate_string(val) + + if not val: + return [] + + return [v.strip() for v in val.split(",") if v] + + +def validate_class(val): + if inspect.isfunction(val) or inspect.ismethod(val): + val = val() + if inspect.isclass(val): + return val + return validate_string(val) + + +def validate_callable(arity): + def _validate_callable(val): + if isinstance(val, str): + try: + mod_name, obj_name = val.rsplit(".", 1) + except ValueError: + raise TypeError("Value '%s' is not import string. " + "Format: module[.submodules...].object" % val) + try: + mod = __import__(mod_name, fromlist=[obj_name]) + val = getattr(mod, obj_name) + except ImportError as e: + raise TypeError(str(e)) + except AttributeError: + raise TypeError("Can not load '%s' from '%s'" + "" % (obj_name, mod_name)) + if not callable(val): + raise TypeError("Value is not callable: %s" % val) + if arity != -1 and arity != util.get_arity(val): + raise TypeError("Value must have an arity of: %s" % arity) + return val + return _validate_callable + + +def validate_user(val): + if val is None: + return os.geteuid() + if isinstance(val, int): + return val + elif val.isdigit(): + return int(val) + else: + try: + return pwd.getpwnam(val).pw_uid + except KeyError: + raise ConfigError("No such user: '%s'" % val) + + +def validate_group(val): + if val is None: + return os.getegid() + + if isinstance(val, int): + return val + elif val.isdigit(): + return int(val) + else: + try: + return grp.getgrnam(val).gr_gid + except KeyError: + raise ConfigError("No such group: '%s'" % val) + + +def validate_post_request(val): + val = validate_callable(-1)(val) + + largs = util.get_arity(val) + if largs == 4: + return val + elif largs == 3: + return lambda worker, req, env, _r: val(worker, req, env) + elif largs == 2: + return lambda worker, req, _e, _r: val(worker, req) + else: + raise TypeError("Value must have an arity of: 4") + + +def validate_chdir(val): + # valid if the value is a string + val = validate_string(val) + + # transform relative paths + path = os.path.abspath(os.path.normpath(os.path.join(util.getcwd(), val))) + + # test if the path exists + if not os.path.exists(path): + raise ConfigError("can't chdir to %r" % val) + + return path + + +def validate_statsd_address(val): + val = validate_string(val) + if val is None: + return None + + # As of major release 20, util.parse_address would recognize unix:PORT + # as a UDS address, breaking backwards compatibility. We defend against + # that regression here (this is also unit-tested). + # Feel free to remove in the next major release. + unix_hostname_regression = re.match(r'^unix:(\d+)$', val) + if unix_hostname_regression: + return ('unix', int(unix_hostname_regression.group(1))) + + try: + address = util.parse_address(val, default_port='8125') + except RuntimeError: + raise TypeError("Value must be one of ('host:port', 'unix://PATH')") + + return address + + +def validate_reload_engine(val): + if val not in reloader_engines: + raise ConfigError("Invalid reload_engine: %r" % val) + + return val + + +def get_default_config_file(): + config_path = os.path.join(os.path.abspath(os.getcwd()), + 'gunicorn.conf.py') + if os.path.exists(config_path): + return config_path + return None + + +class ConfigFile(Setting): + name = "config" + section = "Config File" + cli = ["-c", "--config"] + meta = "CONFIG" + validator = validate_string + default = "./gunicorn.conf.py" + desc = """\ + :ref:`The Gunicorn config file`. + + A string of the form ``PATH``, ``file:PATH``, or ``python:MODULE_NAME``. + + Only has an effect when specified on the command line or as part of an + application specific configuration. + + By default, a file named ``gunicorn.conf.py`` will be read from the same + directory where gunicorn is being run. + + .. versionchanged:: 19.4 + Loading the config from a Python module requires the ``python:`` + prefix. + """ + + +class WSGIApp(Setting): + name = "wsgi_app" + section = "Config File" + meta = "STRING" + validator = validate_string + default = None + desc = """\ + A WSGI application path in pattern ``$(MODULE_NAME):$(VARIABLE_NAME)``. + + .. versionadded:: 20.1.0 + """ + + +class Bind(Setting): + name = "bind" + action = "append" + section = "Server Socket" + cli = ["-b", "--bind"] + meta = "ADDRESS" + validator = validate_list_string + + if 'PORT' in os.environ: + default = ['0.0.0.0:{0}'.format(os.environ.get('PORT'))] + else: + default = ['127.0.0.1:8000'] + + desc = """\ + The socket to bind. + + A string of the form: ``HOST``, ``HOST:PORT``, ``unix:PATH``, + ``fd://FD``. An IP is a valid ``HOST``. + + .. versionchanged:: 20.0 + Support for ``fd://FD`` got added. + + Multiple addresses can be bound. ex.:: + + $ gunicorn -b 127.0.0.1:8000 -b [::1]:8000 test:app + + will bind the `test:app` application on localhost both on ipv6 + and ipv4 interfaces. + + If the ``PORT`` environment variable is defined, the default + is ``['0.0.0.0:$PORT']``. If it is not defined, the default + is ``['127.0.0.1:8000']``. + """ + + +class Backlog(Setting): + name = "backlog" + section = "Server Socket" + cli = ["--backlog"] + meta = "INT" + validator = validate_pos_int + type = int + default = 2048 + desc = """\ + The maximum number of pending connections. + + This refers to the number of clients that can be waiting to be served. + Exceeding this number results in the client getting an error when + attempting to connect. It should only affect servers under significant + load. + + Must be a positive integer. Generally set in the 64-2048 range. + """ + + +class Workers(Setting): + name = "workers" + section = "Worker Processes" + cli = ["-w", "--workers"] + meta = "INT" + validator = validate_pos_int + type = int + default = int(os.environ.get("WEB_CONCURRENCY", 1)) + desc = """\ + The number of worker processes for handling requests. + + A positive integer generally in the ``2-4 x $(NUM_CORES)`` range. + You'll want to vary this a bit to find the best for your particular + application's work load. + + By default, the value of the ``WEB_CONCURRENCY`` environment variable, + which is set by some Platform-as-a-Service providers such as Heroku. If + it is not defined, the default is ``1``. + """ + + +class WorkerClass(Setting): + name = "worker_class" + section = "Worker Processes" + cli = ["-k", "--worker-class"] + meta = "STRING" + validator = validate_class + default = "sync" + desc = """\ + The type of workers to use. + + The default class (``sync``) should handle most "normal" types of + workloads. You'll want to read :doc:`design` for information on when + you might want to choose one of the other worker classes. Required + libraries may be installed using setuptools' ``extras_require`` feature. + + A string referring to one of the following bundled classes: + + * ``sync`` + * ``eventlet`` - Requires eventlet >= 0.24.1 (or install it via + ``pip install gunicorn[eventlet]``) + * ``gevent`` - Requires gevent >= 1.4 (or install it via + ``pip install gunicorn[gevent]``) + * ``tornado`` - Requires tornado >= 0.2 (or install it via + ``pip install gunicorn[tornado]``) + * ``gthread`` - Python 2 requires the futures package to be installed + (or install it via ``pip install gunicorn[gthread]``) + + Optionally, you can provide your own worker by giving Gunicorn a + Python path to a subclass of ``gunicorn.workers.base.Worker``. + This alternative syntax will load the gevent class: + ``gunicorn.workers.ggevent.GeventWorker``. + """ + + +class WorkerThreads(Setting): + name = "threads" + section = "Worker Processes" + cli = ["--threads"] + meta = "INT" + validator = validate_pos_int + type = int + default = 1 + desc = """\ + The number of worker threads for handling requests. + + Run each worker with the specified number of threads. + + A positive integer generally in the ``2-4 x $(NUM_CORES)`` range. + You'll want to vary this a bit to find the best for your particular + application's work load. + + If it is not defined, the default is ``1``. + + This setting only affects the Gthread worker type. + + .. note:: + If you try to use the ``sync`` worker type and set the ``threads`` + setting to more than 1, the ``gthread`` worker type will be used + instead. + """ + + +class WorkerConnections(Setting): + name = "worker_connections" + section = "Worker Processes" + cli = ["--worker-connections"] + meta = "INT" + validator = validate_pos_int + type = int + default = 1000 + desc = """\ + The maximum number of simultaneous clients. + + This setting only affects the ``gthread``, ``eventlet`` and ``gevent`` worker types. + """ + + +class MaxRequests(Setting): + name = "max_requests" + section = "Worker Processes" + cli = ["--max-requests"] + meta = "INT" + validator = validate_pos_int + type = int + default = 0 + desc = """\ + The maximum number of requests a worker will process before restarting. + + Any value greater than zero will limit the number of requests a worker + will process before automatically restarting. This is a simple method + to help limit the damage of memory leaks. + + If this is set to zero (the default) then the automatic worker + restarts are disabled. + """ + + +class MaxRequestsJitter(Setting): + name = "max_requests_jitter" + section = "Worker Processes" + cli = ["--max-requests-jitter"] + meta = "INT" + validator = validate_pos_int + type = int + default = 0 + desc = """\ + The maximum jitter to add to the *max_requests* setting. + + The jitter causes the restart per worker to be randomized by + ``randint(0, max_requests_jitter)``. This is intended to stagger worker + restarts to avoid all workers restarting at the same time. + + .. versionadded:: 19.2 + """ + + +class Timeout(Setting): + name = "timeout" + section = "Worker Processes" + cli = ["-t", "--timeout"] + meta = "INT" + validator = validate_pos_int + type = int + default = 30 + desc = """\ + Workers silent for more than this many seconds are killed and restarted. + + Value is a positive number or 0. Setting it to 0 has the effect of + infinite timeouts by disabling timeouts for all workers entirely. + + Generally, the default of thirty seconds should suffice. Only set this + noticeably higher if you're sure of the repercussions for sync workers. + For the non sync workers it just means that the worker process is still + communicating and is not tied to the length of time required to handle a + single request. + """ + + +class GracefulTimeout(Setting): + name = "graceful_timeout" + section = "Worker Processes" + cli = ["--graceful-timeout"] + meta = "INT" + validator = validate_pos_int + type = int + default = 30 + desc = """\ + Timeout for graceful workers restart. + + After receiving a restart signal, workers have this much time to finish + serving requests. Workers still alive after the timeout (starting from + the receipt of the restart signal) are force killed. + """ + + +class Keepalive(Setting): + name = "keepalive" + section = "Worker Processes" + cli = ["--keep-alive"] + meta = "INT" + validator = validate_pos_int + type = int + default = 2 + desc = """\ + The number of seconds to wait for requests on a Keep-Alive connection. + + Generally set in the 1-5 seconds range for servers with direct connection + to the client (e.g. when you don't have separate load balancer). When + Gunicorn is deployed behind a load balancer, it often makes sense to + set this to a higher value. + + .. note:: + ``sync`` worker does not support persistent connections and will + ignore this option. + """ + + +class LimitRequestLine(Setting): + name = "limit_request_line" + section = "Security" + cli = ["--limit-request-line"] + meta = "INT" + validator = validate_pos_int + type = int + default = 4094 + desc = """\ + The maximum size of HTTP request line in bytes. + + This parameter is used to limit the allowed size of a client's + HTTP request-line. Since the request-line consists of the HTTP + method, URI, and protocol version, this directive places a + restriction on the length of a request-URI allowed for a request + on the server. A server needs this value to be large enough to + hold any of its resource names, including any information that + might be passed in the query part of a GET request. Value is a number + from 0 (unlimited) to 8190. + + This parameter can be used to prevent any DDOS attack. + """ + + +class LimitRequestFields(Setting): + name = "limit_request_fields" + section = "Security" + cli = ["--limit-request-fields"] + meta = "INT" + validator = validate_pos_int + type = int + default = 100 + desc = """\ + Limit the number of HTTP headers fields in a request. + + This parameter is used to limit the number of headers in a request to + prevent DDOS attack. Used with the *limit_request_field_size* it allows + more safety. By default this value is 100 and can't be larger than + 32768. + """ + + +class LimitRequestFieldSize(Setting): + name = "limit_request_field_size" + section = "Security" + cli = ["--limit-request-field_size"] + meta = "INT" + validator = validate_pos_int + type = int + default = 8190 + desc = """\ + Limit the allowed size of an HTTP request header field. + + Value is a positive number or 0. Setting it to 0 will allow unlimited + header field sizes. + + .. warning:: + Setting this parameter to a very high or unlimited value can open + up for DDOS attacks. + """ + + +class Reload(Setting): + name = "reload" + section = 'Debugging' + cli = ['--reload'] + validator = validate_bool + action = 'store_true' + default = False + + desc = '''\ + Restart workers when code changes. + + This setting is intended for development. It will cause workers to be + restarted whenever application code changes. + + The reloader is incompatible with application preloading. When using a + paste configuration be sure that the server block does not import any + application code or the reload will not work as designed. + + The default behavior is to attempt inotify with a fallback to file + system polling. Generally, inotify should be preferred if available + because it consumes less system resources. + + .. note:: + In order to use the inotify reloader, you must have the ``inotify`` + package installed. + ''' + + +class ReloadEngine(Setting): + name = "reload_engine" + section = "Debugging" + cli = ["--reload-engine"] + meta = "STRING" + validator = validate_reload_engine + default = "auto" + desc = """\ + The implementation that should be used to power :ref:`reload`. + + Valid engines are: + + * ``'auto'`` + * ``'poll'`` + * ``'inotify'`` (requires inotify) + + .. versionadded:: 19.7 + """ + + +class ReloadExtraFiles(Setting): + name = "reload_extra_files" + action = "append" + section = "Debugging" + cli = ["--reload-extra-file"] + meta = "FILES" + validator = validate_list_of_existing_files + default = [] + desc = """\ + Extends :ref:`reload` option to also watch and reload on additional files + (e.g., templates, configurations, specifications, etc.). + + .. versionadded:: 19.8 + """ + + +class Spew(Setting): + name = "spew" + section = "Debugging" + cli = ["--spew"] + validator = validate_bool + action = "store_true" + default = False + desc = """\ + Install a trace function that spews every line executed by the server. + + This is the nuclear option. + """ + + +class ConfigCheck(Setting): + name = "check_config" + section = "Debugging" + cli = ["--check-config"] + validator = validate_bool + action = "store_true" + default = False + desc = """\ + Check the configuration and exit. The exit status is 0 if the + configuration is correct, and 1 if the configuration is incorrect. + """ + + +class PrintConfig(Setting): + name = "print_config" + section = "Debugging" + cli = ["--print-config"] + validator = validate_bool + action = "store_true" + default = False + desc = """\ + Print the configuration settings as fully resolved. Implies :ref:`check-config`. + """ + + +class PreloadApp(Setting): + name = "preload_app" + section = "Server Mechanics" + cli = ["--preload"] + validator = validate_bool + action = "store_true" + default = False + desc = """\ + Load application code before the worker processes are forked. + + By preloading an application you can save some RAM resources as well as + speed up server boot times. Although, if you defer application loading + to each worker process, you can reload your application code easily by + restarting workers. + """ + + +class Sendfile(Setting): + name = "sendfile" + section = "Server Mechanics" + cli = ["--no-sendfile"] + validator = validate_bool + action = "store_const" + const = False + + desc = """\ + Disables the use of ``sendfile()``. + + If not set, the value of the ``SENDFILE`` environment variable is used + to enable or disable its usage. + + .. versionadded:: 19.2 + .. versionchanged:: 19.4 + Swapped ``--sendfile`` with ``--no-sendfile`` to actually allow + disabling. + .. versionchanged:: 19.6 + added support for the ``SENDFILE`` environment variable + """ + + +class ReusePort(Setting): + name = "reuse_port" + section = "Server Mechanics" + cli = ["--reuse-port"] + validator = validate_bool + action = "store_true" + default = False + + desc = """\ + Set the ``SO_REUSEPORT`` flag on the listening socket. + + .. versionadded:: 19.8 + """ + + +class Chdir(Setting): + name = "chdir" + section = "Server Mechanics" + cli = ["--chdir"] + validator = validate_chdir + default = util.getcwd() + default_doc = "``'.'``" + desc = """\ + Change directory to specified directory before loading apps. + """ + + +class Daemon(Setting): + name = "daemon" + section = "Server Mechanics" + cli = ["-D", "--daemon"] + validator = validate_bool + action = "store_true" + default = False + desc = """\ + Daemonize the Gunicorn process. + + Detaches the server from the controlling terminal and enters the + background. + """ + + +class Env(Setting): + name = "raw_env" + action = "append" + section = "Server Mechanics" + cli = ["-e", "--env"] + meta = "ENV" + validator = validate_list_string + default = [] + + desc = """\ + Set environment variables in the execution environment. + + Should be a list of strings in the ``key=value`` format. + + For example on the command line: + + .. code-block:: console + + $ gunicorn -b 127.0.0.1:8000 --env FOO=1 test:app + + Or in the configuration file: + + .. code-block:: python + + raw_env = ["FOO=1"] + """ + + +class Pidfile(Setting): + name = "pidfile" + section = "Server Mechanics" + cli = ["-p", "--pid"] + meta = "FILE" + validator = validate_string + default = None + desc = """\ + A filename to use for the PID file. + + If not set, no PID file will be written. + """ + + +class WorkerTmpDir(Setting): + name = "worker_tmp_dir" + section = "Server Mechanics" + cli = ["--worker-tmp-dir"] + meta = "DIR" + validator = validate_string + default = None + desc = """\ + A directory to use for the worker heartbeat temporary file. + + If not set, the default temporary directory will be used. + + .. note:: + The current heartbeat system involves calling ``os.fchmod`` on + temporary file handlers and may block a worker for arbitrary time + if the directory is on a disk-backed filesystem. + + See :ref:`blocking-os-fchmod` for more detailed information + and a solution for avoiding this problem. + """ + + +class User(Setting): + name = "user" + section = "Server Mechanics" + cli = ["-u", "--user"] + meta = "USER" + validator = validate_user + default = os.geteuid() + default_doc = "``os.geteuid()``" + desc = """\ + Switch worker processes to run as this user. + + A valid user id (as an integer) or the name of a user that can be + retrieved with a call to ``pwd.getpwnam(value)`` or ``None`` to not + change the worker process user. + """ + + +class Group(Setting): + name = "group" + section = "Server Mechanics" + cli = ["-g", "--group"] + meta = "GROUP" + validator = validate_group + default = os.getegid() + default_doc = "``os.getegid()``" + desc = """\ + Switch worker process to run as this group. + + A valid group id (as an integer) or the name of a user that can be + retrieved with a call to ``pwd.getgrnam(value)`` or ``None`` to not + change the worker processes group. + """ + + +class Umask(Setting): + name = "umask" + section = "Server Mechanics" + cli = ["-m", "--umask"] + meta = "INT" + validator = validate_pos_int + type = auto_int + default = 0 + desc = """\ + A bit mask for the file mode on files written by Gunicorn. + + Note that this affects unix socket permissions. + + A valid value for the ``os.umask(mode)`` call or a string compatible + with ``int(value, 0)`` (``0`` means Python guesses the base, so values + like ``0``, ``0xFF``, ``0022`` are valid for decimal, hex, and octal + representations) + """ + + +class Initgroups(Setting): + name = "initgroups" + section = "Server Mechanics" + cli = ["--initgroups"] + validator = validate_bool + action = 'store_true' + default = False + + desc = """\ + If true, set the worker process's group access list with all of the + groups of which the specified username is a member, plus the specified + group id. + + .. versionadded:: 19.7 + """ + + +class TmpUploadDir(Setting): + name = "tmp_upload_dir" + section = "Server Mechanics" + meta = "DIR" + validator = validate_string + default = None + desc = """\ + Directory to store temporary request data as they are read. + + This may disappear in the near future. + + This path should be writable by the process permissions set for Gunicorn + workers. If not specified, Gunicorn will choose a system generated + temporary directory. + """ + + +class SecureSchemeHeader(Setting): + name = "secure_scheme_headers" + section = "Server Mechanics" + validator = validate_dict + default = { + "X-FORWARDED-PROTOCOL": "ssl", + "X-FORWARDED-PROTO": "https", + "X-FORWARDED-SSL": "on" + } + desc = """\ + + A dictionary containing headers and values that the front-end proxy + uses to indicate HTTPS requests. If the source IP is permitted by + ``forwarded-allow-ips`` (below), *and* at least one request header matches + a key-value pair listed in this dictionary, then Gunicorn will set + ``wsgi.url_scheme`` to ``https``, so your application can tell that the + request is secure. + + If the other headers listed in this dictionary are not present in the request, they will be ignored, + but if the other headers are present and do not match the provided values, then + the request will fail to parse. See the note below for more detailed examples of this behaviour. + + The dictionary should map upper-case header names to exact string + values. The value comparisons are case-sensitive, unlike the header + names, so make sure they're exactly what your front-end proxy sends + when handling HTTPS requests. + + It is important that your front-end proxy configuration ensures that + the headers defined here can not be passed directly from the client. + """ + + +class ForwardedAllowIPS(Setting): + name = "forwarded_allow_ips" + section = "Server Mechanics" + cli = ["--forwarded-allow-ips"] + meta = "STRING" + validator = validate_string_to_list + default = os.environ.get("FORWARDED_ALLOW_IPS", "127.0.0.1") + desc = """\ + Front-end's IPs from which allowed to handle set secure headers. + (comma separate). + + Set to ``*`` to disable checking of Front-end IPs (useful for setups + where you don't know in advance the IP address of Front-end, but + you still trust the environment). + + By default, the value of the ``FORWARDED_ALLOW_IPS`` environment + variable. If it is not defined, the default is ``"127.0.0.1"``. + + .. note:: + + The interplay between the request headers, the value of ``forwarded_allow_ips``, and the value of + ``secure_scheme_headers`` is complex. Various scenarios are documented below to further elaborate. + In each case, we have a request from the remote address 134.213.44.18, and the default value of + ``secure_scheme_headers``: + + .. code:: + + secure_scheme_headers = { + 'X-FORWARDED-PROTOCOL': 'ssl', + 'X-FORWARDED-PROTO': 'https', + 'X-FORWARDED-SSL': 'on' + } + + + .. list-table:: + :header-rows: 1 + :align: center + :widths: auto + + * - ``forwarded-allow-ips`` + - Secure Request Headers + - Result + - Explanation + * - .. code:: + + ["127.0.0.1"] + - .. code:: + + X-Forwarded-Proto: https + - .. code:: + + wsgi.url_scheme = "http" + - IP address was not allowed + * - .. code:: + + "*" + - + - .. code:: + + wsgi.url_scheme = "http" + - IP address allowed, but no secure headers provided + * - .. code:: + + "*" + - .. code:: + + X-Forwarded-Proto: https + - .. code:: + + wsgi.url_scheme = "https" + - IP address allowed, one request header matched + * - .. code:: + + ["134.213.44.18"] + - .. code:: + + X-Forwarded-Ssl: on + X-Forwarded-Proto: http + - ``InvalidSchemeHeaders()`` raised + - IP address allowed, but the two secure headers disagreed on if HTTPS was used + + + """ + + +class AccessLog(Setting): + name = "accesslog" + section = "Logging" + cli = ["--access-logfile"] + meta = "FILE" + validator = validate_string + default = None + desc = """\ + The Access log file to write to. + + ``'-'`` means log to stdout. + """ + + +class DisableRedirectAccessToSyslog(Setting): + name = "disable_redirect_access_to_syslog" + section = "Logging" + cli = ["--disable-redirect-access-to-syslog"] + validator = validate_bool + action = 'store_true' + default = False + desc = """\ + Disable redirect access logs to syslog. + + .. versionadded:: 19.8 + """ + + +class AccessLogFormat(Setting): + name = "access_log_format" + section = "Logging" + cli = ["--access-logformat"] + meta = "STRING" + validator = validate_string + default = '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"' + desc = """\ + The access log format. + + =========== =========== + Identifier Description + =========== =========== + h remote address + l ``'-'`` + u user name + t date of the request + r status line (e.g. ``GET / HTTP/1.1``) + m request method + U URL path without query string + q query string + H protocol + s status + B response length + b response length or ``'-'`` (CLF format) + f referer + a user agent + T request time in seconds + M request time in milliseconds + D request time in microseconds + L request time in decimal seconds + p process ID + {header}i request header + {header}o response header + {variable}e environment variable + =========== =========== + + Use lowercase for header and environment variable names, and put + ``{...}x`` names inside ``%(...)s``. For example:: + + %({x-forwarded-for}i)s + """ + + +class ErrorLog(Setting): + name = "errorlog" + section = "Logging" + cli = ["--error-logfile", "--log-file"] + meta = "FILE" + validator = validate_string + default = '-' + desc = """\ + The Error log file to write to. + + Using ``'-'`` for FILE makes gunicorn log to stderr. + + .. versionchanged:: 19.2 + Log to stderr by default. + + """ + + +class Loglevel(Setting): + name = "loglevel" + section = "Logging" + cli = ["--log-level"] + meta = "LEVEL" + validator = validate_string + default = "info" + desc = """\ + The granularity of Error log outputs. + + Valid level names are: + + * ``'debug'`` + * ``'info'`` + * ``'warning'`` + * ``'error'`` + * ``'critical'`` + """ + + +class CaptureOutput(Setting): + name = "capture_output" + section = "Logging" + cli = ["--capture-output"] + validator = validate_bool + action = 'store_true' + default = False + desc = """\ + Redirect stdout/stderr to specified file in :ref:`errorlog`. + + .. versionadded:: 19.6 + """ + + +class LoggerClass(Setting): + name = "logger_class" + section = "Logging" + cli = ["--logger-class"] + meta = "STRING" + validator = validate_class + default = "gunicorn.glogging.Logger" + desc = """\ + The logger you want to use to log events in Gunicorn. + + The default class (``gunicorn.glogging.Logger``) handles most + normal usages in logging. It provides error and access logging. + + You can provide your own logger by giving Gunicorn a Python path to a + class that quacks like ``gunicorn.glogging.Logger``. + """ + + +class LogConfig(Setting): + name = "logconfig" + section = "Logging" + cli = ["--log-config"] + meta = "FILE" + validator = validate_string + default = None + desc = """\ + The log config file to use. + Gunicorn uses the standard Python logging module's Configuration + file format. + """ + + +class LogConfigDict(Setting): + name = "logconfig_dict" + section = "Logging" + validator = validate_dict + default = {} + desc = """\ + The log config dictionary to use, using the standard Python + logging module's dictionary configuration format. This option + takes precedence over the :ref:`logconfig` and :ref:`logConfigJson` options, + which uses the older file configuration format and JSON + respectively. + + Format: https://docs.python.org/3/library/logging.config.html#logging.config.dictConfig + + For more context you can look at the default configuration dictionary for logging, + which can be found at ``gunicorn.glogging.CONFIG_DEFAULTS``. + + .. versionadded:: 19.8 + """ + + +class LogConfigJson(Setting): + name = "logconfig_json" + section = "Logging" + cli = ["--log-config-json"] + meta = "FILE" + validator = validate_string + default = None + desc = """\ + The log config to read config from a JSON file + + Format: https://docs.python.org/3/library/logging.config.html#logging.config.jsonConfig + + .. versionadded:: 20.0 + """ + + +class SyslogTo(Setting): + name = "syslog_addr" + section = "Logging" + cli = ["--log-syslog-to"] + meta = "SYSLOG_ADDR" + validator = validate_string + + if PLATFORM == "darwin": + default = "unix:///var/run/syslog" + elif PLATFORM in ('freebsd', 'dragonfly', ): + default = "unix:///var/run/log" + elif PLATFORM == "openbsd": + default = "unix:///dev/log" + else: + default = "udp://localhost:514" + + desc = """\ + Address to send syslog messages. + + Address is a string of the form: + + * ``unix://PATH#TYPE`` : for unix domain socket. ``TYPE`` can be ``stream`` + for the stream driver or ``dgram`` for the dgram driver. + ``stream`` is the default. + * ``udp://HOST:PORT`` : for UDP sockets + * ``tcp://HOST:PORT`` : for TCP sockets + + """ + + +class Syslog(Setting): + name = "syslog" + section = "Logging" + cli = ["--log-syslog"] + validator = validate_bool + action = 'store_true' + default = False + desc = """\ + Send *Gunicorn* logs to syslog. + + .. versionchanged:: 19.8 + You can now disable sending access logs by using the + :ref:`disable-redirect-access-to-syslog` setting. + """ + + +class SyslogPrefix(Setting): + name = "syslog_prefix" + section = "Logging" + cli = ["--log-syslog-prefix"] + meta = "SYSLOG_PREFIX" + validator = validate_string + default = None + desc = """\ + Makes Gunicorn use the parameter as program-name in the syslog entries. + + All entries will be prefixed by ``gunicorn.``. By default the + program name is the name of the process. + """ + + +class SyslogFacility(Setting): + name = "syslog_facility" + section = "Logging" + cli = ["--log-syslog-facility"] + meta = "SYSLOG_FACILITY" + validator = validate_string + default = "user" + desc = """\ + Syslog facility name + """ + + +class EnableStdioInheritance(Setting): + name = "enable_stdio_inheritance" + section = "Logging" + cli = ["-R", "--enable-stdio-inheritance"] + validator = validate_bool + default = False + action = "store_true" + desc = """\ + Enable stdio inheritance. + + Enable inheritance for stdio file descriptors in daemon mode. + + Note: To disable the Python stdout buffering, you can to set the user + environment variable ``PYTHONUNBUFFERED`` . + """ + + +# statsD monitoring +class StatsdHost(Setting): + name = "statsd_host" + section = "Logging" + cli = ["--statsd-host"] + meta = "STATSD_ADDR" + default = None + validator = validate_statsd_address + desc = """\ + The address of the StatsD server to log to. + + Address is a string of the form: + + * ``unix://PATH`` : for a unix domain socket. + * ``HOST:PORT`` : for a network address + + .. versionadded:: 19.1 + """ + + +# Datadog Statsd (dogstatsd) tags. https://docs.datadoghq.com/developers/dogstatsd/ +class DogstatsdTags(Setting): + name = "dogstatsd_tags" + section = "Logging" + cli = ["--dogstatsd-tags"] + meta = "DOGSTATSD_TAGS" + default = "" + validator = validate_string + desc = """\ + A comma-delimited list of datadog statsd (dogstatsd) tags to append to + statsd metrics. + + .. versionadded:: 20 + """ + + +class StatsdPrefix(Setting): + name = "statsd_prefix" + section = "Logging" + cli = ["--statsd-prefix"] + meta = "STATSD_PREFIX" + default = "" + validator = validate_string + desc = """\ + Prefix to use when emitting statsd metrics (a trailing ``.`` is added, + if not provided). + + .. versionadded:: 19.2 + """ + + +class Procname(Setting): + name = "proc_name" + section = "Process Naming" + cli = ["-n", "--name"] + meta = "STRING" + validator = validate_string + default = None + desc = """\ + A base to use with setproctitle for process naming. + + This affects things like ``ps`` and ``top``. If you're going to be + running more than one instance of Gunicorn you'll probably want to set a + name to tell them apart. This requires that you install the setproctitle + module. + + If not set, the *default_proc_name* setting will be used. + """ + + +class DefaultProcName(Setting): + name = "default_proc_name" + section = "Process Naming" + validator = validate_string + default = "gunicorn" + desc = """\ + Internal setting that is adjusted for each type of application. + """ + + +class PythonPath(Setting): + name = "pythonpath" + section = "Server Mechanics" + cli = ["--pythonpath"] + meta = "STRING" + validator = validate_string + default = None + desc = """\ + A comma-separated list of directories to add to the Python path. + + e.g. + ``'/home/djangoprojects/myproject,/home/python/mylibrary'``. + """ + + +class Paste(Setting): + name = "paste" + section = "Server Mechanics" + cli = ["--paste", "--paster"] + meta = "STRING" + validator = validate_string + default = None + desc = """\ + Load a PasteDeploy config file. The argument may contain a ``#`` + symbol followed by the name of an app section from the config file, + e.g. ``production.ini#admin``. + + At this time, using alternate server blocks is not supported. Use the + command line arguments to control server configuration instead. + """ + + +class OnStarting(Setting): + name = "on_starting" + section = "Server Hooks" + validator = validate_callable(1) + type = callable + + def on_starting(server): + pass + default = staticmethod(on_starting) + desc = """\ + Called just before the master process is initialized. + + The callable needs to accept a single instance variable for the Arbiter. + """ + + +class OnReload(Setting): + name = "on_reload" + section = "Server Hooks" + validator = validate_callable(1) + type = callable + + def on_reload(server): + pass + default = staticmethod(on_reload) + desc = """\ + Called to recycle workers during a reload via SIGHUP. + + The callable needs to accept a single instance variable for the Arbiter. + """ + + +class WhenReady(Setting): + name = "when_ready" + section = "Server Hooks" + validator = validate_callable(1) + type = callable + + def when_ready(server): + pass + default = staticmethod(when_ready) + desc = """\ + Called just after the server is started. + + The callable needs to accept a single instance variable for the Arbiter. + """ + + +class Prefork(Setting): + name = "pre_fork" + section = "Server Hooks" + validator = validate_callable(2) + type = callable + + def pre_fork(server, worker): + pass + default = staticmethod(pre_fork) + desc = """\ + Called just before a worker is forked. + + The callable needs to accept two instance variables for the Arbiter and + new Worker. + """ + + +class Postfork(Setting): + name = "post_fork" + section = "Server Hooks" + validator = validate_callable(2) + type = callable + + def post_fork(server, worker): + pass + default = staticmethod(post_fork) + desc = """\ + Called just after a worker has been forked. + + The callable needs to accept two instance variables for the Arbiter and + new Worker. + """ + + +class PostWorkerInit(Setting): + name = "post_worker_init" + section = "Server Hooks" + validator = validate_callable(1) + type = callable + + def post_worker_init(worker): + pass + + default = staticmethod(post_worker_init) + desc = """\ + Called just after a worker has initialized the application. + + The callable needs to accept one instance variable for the initialized + Worker. + """ + + +class WorkerInt(Setting): + name = "worker_int" + section = "Server Hooks" + validator = validate_callable(1) + type = callable + + def worker_int(worker): + pass + + default = staticmethod(worker_int) + desc = """\ + Called just after a worker exited on SIGINT or SIGQUIT. + + The callable needs to accept one instance variable for the initialized + Worker. + """ + + +class WorkerAbort(Setting): + name = "worker_abort" + section = "Server Hooks" + validator = validate_callable(1) + type = callable + + def worker_abort(worker): + pass + + default = staticmethod(worker_abort) + desc = """\ + Called when a worker received the SIGABRT signal. + + This call generally happens on timeout. + + The callable needs to accept one instance variable for the initialized + Worker. + """ + + +class PreExec(Setting): + name = "pre_exec" + section = "Server Hooks" + validator = validate_callable(1) + type = callable + + def pre_exec(server): + pass + default = staticmethod(pre_exec) + desc = """\ + Called just before a new master process is forked. + + The callable needs to accept a single instance variable for the Arbiter. + """ + + +class PreRequest(Setting): + name = "pre_request" + section = "Server Hooks" + validator = validate_callable(2) + type = callable + + def pre_request(worker, req): + worker.log.debug("%s %s", req.method, req.path) + default = staticmethod(pre_request) + desc = """\ + Called just before a worker processes the request. + + The callable needs to accept two instance variables for the Worker and + the Request. + """ + + +class PostRequest(Setting): + name = "post_request" + section = "Server Hooks" + validator = validate_post_request + type = callable + + def post_request(worker, req, environ, resp): + pass + default = staticmethod(post_request) + desc = """\ + Called after a worker processes the request. + + The callable needs to accept two instance variables for the Worker and + the Request. + """ + + +class ChildExit(Setting): + name = "child_exit" + section = "Server Hooks" + validator = validate_callable(2) + type = callable + + def child_exit(server, worker): + pass + default = staticmethod(child_exit) + desc = """\ + Called just after a worker has been exited, in the master process. + + The callable needs to accept two instance variables for the Arbiter and + the just-exited Worker. + + .. versionadded:: 19.7 + """ + + +class WorkerExit(Setting): + name = "worker_exit" + section = "Server Hooks" + validator = validate_callable(2) + type = callable + + def worker_exit(server, worker): + pass + default = staticmethod(worker_exit) + desc = """\ + Called just after a worker has been exited, in the worker process. + + The callable needs to accept two instance variables for the Arbiter and + the just-exited Worker. + """ + + +class NumWorkersChanged(Setting): + name = "nworkers_changed" + section = "Server Hooks" + validator = validate_callable(3) + type = callable + + def nworkers_changed(server, new_value, old_value): + pass + default = staticmethod(nworkers_changed) + desc = """\ + Called just after *num_workers* has been changed. + + The callable needs to accept an instance variable of the Arbiter and + two integers of number of workers after and before change. + + If the number of workers is set for the first time, *old_value* would + be ``None``. + """ + + +class OnExit(Setting): + name = "on_exit" + section = "Server Hooks" + validator = validate_callable(1) + + def on_exit(server): + pass + + default = staticmethod(on_exit) + desc = """\ + Called just before exiting Gunicorn. + + The callable needs to accept a single instance variable for the Arbiter. + """ + + +class NewSSLContext(Setting): + name = "ssl_context" + section = "Server Hooks" + validator = validate_callable(2) + type = callable + + def ssl_context(config, default_ssl_context_factory): + return default_ssl_context_factory() + + default = staticmethod(ssl_context) + desc = """\ + Called when SSLContext is needed. + + Allows customizing SSL context. + + The callable needs to accept an instance variable for the Config and + a factory function that returns default SSLContext which is initialized + with certificates, private key, cert_reqs, and ciphers according to + config and can be further customized by the callable. + The callable needs to return SSLContext object. + + Following example shows a configuration file that sets the minimum TLS version to 1.3: + + .. code-block:: python + + def ssl_context(conf, default_ssl_context_factory): + import ssl + context = default_ssl_context_factory() + context.minimum_version = ssl.TLSVersion.TLSv1_3 + return context + + .. versionadded:: 20.2 + """ + + +class ProxyProtocol(Setting): + name = "proxy_protocol" + section = "Server Mechanics" + cli = ["--proxy-protocol"] + validator = validate_bool + default = False + action = "store_true" + desc = """\ + Enable detect PROXY protocol (PROXY mode). + + Allow using HTTP and Proxy together. It may be useful for work with + stunnel as HTTPS frontend and Gunicorn as HTTP server. + + PROXY protocol: http://haproxy.1wt.eu/download/1.5/doc/proxy-protocol.txt + + Example for stunnel config:: + + [https] + protocol = proxy + accept = 443 + connect = 80 + cert = /etc/ssl/certs/stunnel.pem + key = /etc/ssl/certs/stunnel.key + """ + + +class ProxyAllowFrom(Setting): + name = "proxy_allow_ips" + section = "Server Mechanics" + cli = ["--proxy-allow-from"] + validator = validate_string_to_list + default = "127.0.0.1" + desc = """\ + Front-end's IPs from which allowed accept proxy requests (comma separate). + + Set to ``*`` to disable checking of Front-end IPs (useful for setups + where you don't know in advance the IP address of Front-end, but + you still trust the environment) + """ + + +class KeyFile(Setting): + name = "keyfile" + section = "SSL" + cli = ["--keyfile"] + meta = "FILE" + validator = validate_string + default = None + desc = """\ + SSL key file + """ + + +class CertFile(Setting): + name = "certfile" + section = "SSL" + cli = ["--certfile"] + meta = "FILE" + validator = validate_string + default = None + desc = """\ + SSL certificate file + """ + + +class SSLVersion(Setting): + name = "ssl_version" + section = "SSL" + cli = ["--ssl-version"] + validator = validate_ssl_version + + if hasattr(ssl, "PROTOCOL_TLS"): + default = ssl.PROTOCOL_TLS + else: + default = ssl.PROTOCOL_SSLv23 + + default = ssl.PROTOCOL_SSLv23 + desc = """\ + SSL version to use (see stdlib ssl module's). + + .. deprecated:: 20.2 + The option is deprecated and it is currently ignored. Use :ref:`ssl-context` instead. + + ============= ============ + --ssl-version Description + ============= ============ + SSLv3 SSLv3 is not-secure and is strongly discouraged. + SSLv23 Alias for TLS. Deprecated in Python 3.6, use TLS. + TLS Negotiate highest possible version between client/server. + Can yield SSL. (Python 3.6+) + TLSv1 TLS 1.0 + TLSv1_1 TLS 1.1 (Python 3.4+) + TLSv1_2 TLS 1.2 (Python 3.4+) + TLS_SERVER Auto-negotiate the highest protocol version like TLS, + but only support server-side SSLSocket connections. + (Python 3.6+) + ============= ============ + + .. versionchanged:: 19.7 + The default value has been changed from ``ssl.PROTOCOL_TLSv1`` to + ``ssl.PROTOCOL_SSLv23``. + .. versionchanged:: 20.0 + This setting now accepts string names based on ``ssl.PROTOCOL_`` + constants. + .. versionchanged:: 20.0.1 + The default value has been changed from ``ssl.PROTOCOL_SSLv23`` to + ``ssl.PROTOCOL_TLS`` when Python >= 3.6 . + """ + + +class CertReqs(Setting): + name = "cert_reqs" + section = "SSL" + cli = ["--cert-reqs"] + validator = validate_pos_int + default = ssl.CERT_NONE + desc = """\ + Whether client certificate is required (see stdlib ssl module's) + + =========== =========================== + --cert-reqs Description + =========== =========================== + `0` no client veirifcation + `1` ssl.CERT_OPTIONAL + `2` ssl.CERT_REQUIRED + =========== =========================== + """ + + +class CACerts(Setting): + name = "ca_certs" + section = "SSL" + cli = ["--ca-certs"] + meta = "FILE" + validator = validate_string + default = None + desc = """\ + CA certificates file + """ + + +class SuppressRaggedEOFs(Setting): + name = "suppress_ragged_eofs" + section = "SSL" + cli = ["--suppress-ragged-eofs"] + action = "store_true" + default = True + validator = validate_bool + desc = """\ + Suppress ragged EOFs (see stdlib ssl module's) + """ + + +class DoHandshakeOnConnect(Setting): + name = "do_handshake_on_connect" + section = "SSL" + cli = ["--do-handshake-on-connect"] + validator = validate_bool + action = "store_true" + default = False + desc = """\ + Whether to perform SSL handshake on socket connect (see stdlib ssl module's) + """ + + +class Ciphers(Setting): + name = "ciphers" + section = "SSL" + cli = ["--ciphers"] + validator = validate_string + default = None + desc = """\ + SSL Cipher suite to use, in the format of an OpenSSL cipher list. + + By default we use the default cipher list from Python's ``ssl`` module, + which contains ciphers considered strong at the time of each Python + release. + + As a recommended alternative, the Open Web App Security Project (OWASP) + offers `a vetted set of strong cipher strings rated A+ to C- + `_. + OWASP provides details on user-agent compatibility at each security level. + + See the `OpenSSL Cipher List Format Documentation + `_ + for details on the format of an OpenSSL cipher list. + """ + + +class PasteGlobalConf(Setting): + name = "raw_paste_global_conf" + action = "append" + section = "Server Mechanics" + cli = ["--paste-global"] + meta = "CONF" + validator = validate_list_string + default = [] + + desc = """\ + Set a PasteDeploy global config variable in ``key=value`` form. + + The option can be specified multiple times. + + The variables are passed to the the PasteDeploy entrypoint. Example:: + + $ gunicorn -b 127.0.0.1:8000 --paste development.ini --paste-global FOO=1 --paste-global BAR=2 + + .. versionadded:: 19.7 + """ + + +class StripHeaderSpaces(Setting): + name = "strip_header_spaces" + section = "Server Mechanics" + cli = ["--strip-header-spaces"] + validator = validate_bool + action = "store_true" + default = False + desc = """\ + Strip spaces present between the header name and the the ``:``. + + This is known to induce vulnerabilities and is not compliant with the HTTP/1.1 standard. + See https://portswigger.net/research/http-desync-attacks-request-smuggling-reborn. + + Use with care and only if necessary. + """ diff --git a/venv/lib/python3.11/site-packages/gunicorn/debug.py b/venv/lib/python3.11/site-packages/gunicorn/debug.py new file mode 100644 index 0000000..a492df9 --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/debug.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +"""The debug module contains utilities and functions for better +debugging Gunicorn.""" + +import sys +import linecache +import re +import inspect + +__all__ = ['spew', 'unspew'] + +_token_spliter = re.compile(r'\W+') + + +class Spew(object): + + def __init__(self, trace_names=None, show_values=True): + self.trace_names = trace_names + self.show_values = show_values + + def __call__(self, frame, event, arg): + if event == 'line': + lineno = frame.f_lineno + if '__file__' in frame.f_globals: + filename = frame.f_globals['__file__'] + if (filename.endswith('.pyc') or + filename.endswith('.pyo')): + filename = filename[:-1] + name = frame.f_globals['__name__'] + line = linecache.getline(filename, lineno) + else: + name = '[unknown]' + try: + src = inspect.getsourcelines(frame) + line = src[lineno] + except IOError: + line = 'Unknown code named [%s]. VM instruction #%d' % ( + frame.f_code.co_name, frame.f_lasti) + if self.trace_names is None or name in self.trace_names: + print('%s:%s: %s' % (name, lineno, line.rstrip())) + if not self.show_values: + return self + details = [] + tokens = _token_spliter.split(line) + for tok in tokens: + if tok in frame.f_globals: + details.append('%s=%r' % (tok, frame.f_globals[tok])) + if tok in frame.f_locals: + details.append('%s=%r' % (tok, frame.f_locals[tok])) + if details: + print("\t%s" % ' '.join(details)) + return self + + +def spew(trace_names=None, show_values=False): + """Install a trace hook which writes incredibly detailed logs + about what code is being executed to stdout. + """ + sys.settrace(Spew(trace_names, show_values)) + + +def unspew(): + """Remove the trace hook installed by spew. + """ + sys.settrace(None) diff --git a/venv/lib/python3.11/site-packages/gunicorn/errors.py b/venv/lib/python3.11/site-packages/gunicorn/errors.py new file mode 100644 index 0000000..727d336 --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/errors.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +# We don't need to call super() in __init__ methods of our +# BaseException and Exception classes because we also define +# our own __str__ methods so there is no need to pass 'message' +# to the base class to get a meaningful output from 'str(exc)'. +# pylint: disable=super-init-not-called + + +# we inherit from BaseException here to make sure to not be caught +# at application level +class HaltServer(BaseException): + def __init__(self, reason, exit_status=1): + self.reason = reason + self.exit_status = exit_status + + def __str__(self): + return "" % (self.reason, self.exit_status) + + +class ConfigError(Exception): + """ Exception raised on config error """ + + +class AppImportError(Exception): + """ Exception raised when loading an application """ diff --git a/venv/lib/python3.11/site-packages/gunicorn/glogging.py b/venv/lib/python3.11/site-packages/gunicorn/glogging.py new file mode 100644 index 0000000..b552e26 --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/glogging.py @@ -0,0 +1,474 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import base64 +import binascii +import json +import time +import logging +logging.Logger.manager.emittedNoHandlerWarning = 1 # noqa +from logging.config import dictConfig +from logging.config import fileConfig +import os +import socket +import sys +import threading +import traceback + +from gunicorn import util + + +# syslog facility codes +SYSLOG_FACILITIES = { + "auth": 4, + "authpriv": 10, + "cron": 9, + "daemon": 3, + "ftp": 11, + "kern": 0, + "lpr": 6, + "mail": 2, + "news": 7, + "security": 4, # DEPRECATED + "syslog": 5, + "user": 1, + "uucp": 8, + "local0": 16, + "local1": 17, + "local2": 18, + "local3": 19, + "local4": 20, + "local5": 21, + "local6": 22, + "local7": 23 +} + +CONFIG_DEFAULTS = { + "version": 1, + "disable_existing_loggers": False, + "root": {"level": "INFO", "handlers": ["console"]}, + "loggers": { + "gunicorn.error": { + "level": "INFO", + "handlers": ["error_console"], + "propagate": True, + "qualname": "gunicorn.error" + }, + + "gunicorn.access": { + "level": "INFO", + "handlers": ["console"], + "propagate": True, + "qualname": "gunicorn.access" + } + }, + "handlers": { + "console": { + "class": "logging.StreamHandler", + "formatter": "generic", + "stream": "ext://sys.stdout" + }, + "error_console": { + "class": "logging.StreamHandler", + "formatter": "generic", + "stream": "ext://sys.stderr" + }, + }, + "formatters": { + "generic": { + "format": "%(asctime)s [%(process)d] [%(levelname)s] %(message)s", + "datefmt": "[%Y-%m-%d %H:%M:%S %z]", + "class": "logging.Formatter" + } + } +} + + +def loggers(): + """ get list of all loggers """ + root = logging.root + existing = list(root.manager.loggerDict.keys()) + return [logging.getLogger(name) for name in existing] + + +class SafeAtoms(dict): + + def __init__(self, atoms): + dict.__init__(self) + for key, value in atoms.items(): + if isinstance(value, str): + self[key] = value.replace('"', '\\"') + else: + self[key] = value + + def __getitem__(self, k): + if k.startswith("{"): + kl = k.lower() + if kl in self: + return super().__getitem__(kl) + else: + return "-" + if k in self: + return super().__getitem__(k) + else: + return '-' + + +def parse_syslog_address(addr): + + # unix domain socket type depends on backend + # SysLogHandler will try both when given None + if addr.startswith("unix://"): + sock_type = None + + # set socket type only if explicitly requested + parts = addr.split("#", 1) + if len(parts) == 2: + addr = parts[0] + if parts[1] == "dgram": + sock_type = socket.SOCK_DGRAM + + return (sock_type, addr.split("unix://")[1]) + + if addr.startswith("udp://"): + addr = addr.split("udp://")[1] + socktype = socket.SOCK_DGRAM + elif addr.startswith("tcp://"): + addr = addr.split("tcp://")[1] + socktype = socket.SOCK_STREAM + else: + raise RuntimeError("invalid syslog address") + + if '[' in addr and ']' in addr: + host = addr.split(']')[0][1:].lower() + elif ':' in addr: + host = addr.split(':')[0].lower() + elif addr == "": + host = "localhost" + else: + host = addr.lower() + + addr = addr.split(']')[-1] + if ":" in addr: + port = addr.split(':', 1)[1] + if not port.isdigit(): + raise RuntimeError("%r is not a valid port number." % port) + port = int(port) + else: + port = 514 + + return (socktype, (host, port)) + + +class Logger(object): + + LOG_LEVELS = { + "critical": logging.CRITICAL, + "error": logging.ERROR, + "warning": logging.WARNING, + "info": logging.INFO, + "debug": logging.DEBUG + } + loglevel = logging.INFO + + error_fmt = r"%(asctime)s [%(process)d] [%(levelname)s] %(message)s" + datefmt = r"[%Y-%m-%d %H:%M:%S %z]" + + access_fmt = "%(message)s" + syslog_fmt = "[%(process)d] %(message)s" + + atoms_wrapper_class = SafeAtoms + + def __init__(self, cfg): + self.error_log = logging.getLogger("gunicorn.error") + self.error_log.propagate = False + self.access_log = logging.getLogger("gunicorn.access") + self.access_log.propagate = False + self.error_handlers = [] + self.access_handlers = [] + self.logfile = None + self.lock = threading.Lock() + self.cfg = cfg + self.setup(cfg) + + def setup(self, cfg): + self.loglevel = self.LOG_LEVELS.get(cfg.loglevel.lower(), logging.INFO) + self.error_log.setLevel(self.loglevel) + self.access_log.setLevel(logging.INFO) + + # set gunicorn.error handler + if self.cfg.capture_output and cfg.errorlog != "-": + for stream in sys.stdout, sys.stderr: + stream.flush() + + self.logfile = open(cfg.errorlog, 'a+') + os.dup2(self.logfile.fileno(), sys.stdout.fileno()) + os.dup2(self.logfile.fileno(), sys.stderr.fileno()) + + self._set_handler(self.error_log, cfg.errorlog, + logging.Formatter(self.error_fmt, self.datefmt)) + + # set gunicorn.access handler + if cfg.accesslog is not None: + self._set_handler( + self.access_log, cfg.accesslog, + fmt=logging.Formatter(self.access_fmt), stream=sys.stdout + ) + + # set syslog handler + if cfg.syslog: + self._set_syslog_handler( + self.error_log, cfg, self.syslog_fmt, "error" + ) + if not cfg.disable_redirect_access_to_syslog: + self._set_syslog_handler( + self.access_log, cfg, self.syslog_fmt, "access" + ) + + if cfg.logconfig_dict: + config = CONFIG_DEFAULTS.copy() + config.update(cfg.logconfig_dict) + try: + dictConfig(config) + except ( + AttributeError, + ImportError, + ValueError, + TypeError + ) as exc: + raise RuntimeError(str(exc)) + elif cfg.logconfig_json: + config = CONFIG_DEFAULTS.copy() + if os.path.exists(cfg.logconfig_json): + try: + config_json = json.load(open(cfg.logconfig_json)) + config.update(config_json) + dictConfig(config) + except ( + json.JSONDecodeError, + AttributeError, + ImportError, + ValueError, + TypeError + ) as exc: + raise RuntimeError(str(exc)) + elif cfg.logconfig: + if os.path.exists(cfg.logconfig): + defaults = CONFIG_DEFAULTS.copy() + defaults['__file__'] = cfg.logconfig + defaults['here'] = os.path.dirname(cfg.logconfig) + fileConfig(cfg.logconfig, defaults=defaults, + disable_existing_loggers=False) + else: + msg = "Error: log config '%s' not found" + raise RuntimeError(msg % cfg.logconfig) + + def critical(self, msg, *args, **kwargs): + self.error_log.critical(msg, *args, **kwargs) + + def error(self, msg, *args, **kwargs): + self.error_log.error(msg, *args, **kwargs) + + def warning(self, msg, *args, **kwargs): + self.error_log.warning(msg, *args, **kwargs) + + def info(self, msg, *args, **kwargs): + self.error_log.info(msg, *args, **kwargs) + + def debug(self, msg, *args, **kwargs): + self.error_log.debug(msg, *args, **kwargs) + + def exception(self, msg, *args, **kwargs): + self.error_log.exception(msg, *args, **kwargs) + + def log(self, lvl, msg, *args, **kwargs): + if isinstance(lvl, str): + lvl = self.LOG_LEVELS.get(lvl.lower(), logging.INFO) + self.error_log.log(lvl, msg, *args, **kwargs) + + def atoms(self, resp, req, environ, request_time): + """ Gets atoms for log formatting. + """ + status = resp.status + if isinstance(status, str): + status = status.split(None, 1)[0] + atoms = { + 'h': environ.get('REMOTE_ADDR', '-'), + 'l': '-', + 'u': self._get_user(environ) or '-', + 't': self.now(), + 'r': "%s %s %s" % (environ['REQUEST_METHOD'], + environ['RAW_URI'], + environ["SERVER_PROTOCOL"]), + 's': status, + 'm': environ.get('REQUEST_METHOD'), + 'U': environ.get('PATH_INFO'), + 'q': environ.get('QUERY_STRING'), + 'H': environ.get('SERVER_PROTOCOL'), + 'b': getattr(resp, 'sent', None) is not None and str(resp.sent) or '-', + 'B': getattr(resp, 'sent', None), + 'f': environ.get('HTTP_REFERER', '-'), + 'a': environ.get('HTTP_USER_AGENT', '-'), + 'T': request_time.seconds, + 'D': (request_time.seconds * 1000000) + request_time.microseconds, + 'M': (request_time.seconds * 1000) + int(request_time.microseconds / 1000), + 'L': "%d.%06d" % (request_time.seconds, request_time.microseconds), + 'p': "<%s>" % os.getpid() + } + + # add request headers + if hasattr(req, 'headers'): + req_headers = req.headers + else: + req_headers = req + + if hasattr(req_headers, "items"): + req_headers = req_headers.items() + + atoms.update({"{%s}i" % k.lower(): v for k, v in req_headers}) + + resp_headers = resp.headers + if hasattr(resp_headers, "items"): + resp_headers = resp_headers.items() + + # add response headers + atoms.update({"{%s}o" % k.lower(): v for k, v in resp_headers}) + + # add environ variables + environ_variables = environ.items() + atoms.update({"{%s}e" % k.lower(): v for k, v in environ_variables}) + + return atoms + + def access(self, resp, req, environ, request_time): + """ See http://httpd.apache.org/docs/2.0/logs.html#combined + for format details + """ + + if not (self.cfg.accesslog or self.cfg.logconfig or + self.cfg.logconfig_dict or self.cfg.logconfig_json or + (self.cfg.syslog and not self.cfg.disable_redirect_access_to_syslog)): + return + + # wrap atoms: + # - make sure atoms will be test case insensitively + # - if atom doesn't exist replace it by '-' + safe_atoms = self.atoms_wrapper_class( + self.atoms(resp, req, environ, request_time) + ) + + try: + self.access_log.info(self.cfg.access_log_format, safe_atoms) + except Exception: + self.error(traceback.format_exc()) + + def now(self): + """ return date in Apache Common Log Format """ + return time.strftime('[%d/%b/%Y:%H:%M:%S %z]') + + def reopen_files(self): + if self.cfg.capture_output and self.cfg.errorlog != "-": + for stream in sys.stdout, sys.stderr: + stream.flush() + + with self.lock: + if self.logfile is not None: + self.logfile.close() + self.logfile = open(self.cfg.errorlog, 'a+') + os.dup2(self.logfile.fileno(), sys.stdout.fileno()) + os.dup2(self.logfile.fileno(), sys.stderr.fileno()) + + for log in loggers(): + for handler in log.handlers: + if isinstance(handler, logging.FileHandler): + handler.acquire() + try: + if handler.stream: + handler.close() + handler.stream = handler._open() + finally: + handler.release() + + def close_on_exec(self): + for log in loggers(): + for handler in log.handlers: + if isinstance(handler, logging.FileHandler): + handler.acquire() + try: + if handler.stream: + util.close_on_exec(handler.stream.fileno()) + finally: + handler.release() + + def _get_gunicorn_handler(self, log): + for h in log.handlers: + if getattr(h, "_gunicorn", False): + return h + + def _set_handler(self, log, output, fmt, stream=None): + # remove previous gunicorn log handler + h = self._get_gunicorn_handler(log) + if h: + log.handlers.remove(h) + + if output is not None: + if output == "-": + h = logging.StreamHandler(stream) + else: + util.check_is_writable(output) + h = logging.FileHandler(output) + # make sure the user can reopen the file + try: + os.chown(h.baseFilename, self.cfg.user, self.cfg.group) + except OSError: + # it's probably OK there, we assume the user has given + # /dev/null as a parameter. + pass + + h.setFormatter(fmt) + h._gunicorn = True + log.addHandler(h) + + def _set_syslog_handler(self, log, cfg, fmt, name): + # setup format + prefix = cfg.syslog_prefix or cfg.proc_name.replace(":", ".") + + prefix = "gunicorn.%s.%s" % (prefix, name) + + # set format + fmt = logging.Formatter(r"%s: %s" % (prefix, fmt)) + + # syslog facility + try: + facility = SYSLOG_FACILITIES[cfg.syslog_facility.lower()] + except KeyError: + raise RuntimeError("unknown facility name") + + # parse syslog address + socktype, addr = parse_syslog_address(cfg.syslog_addr) + + # finally setup the syslog handler + h = logging.handlers.SysLogHandler(address=addr, + facility=facility, socktype=socktype) + + h.setFormatter(fmt) + h._gunicorn = True + log.addHandler(h) + + def _get_user(self, environ): + user = None + http_auth = environ.get("HTTP_AUTHORIZATION") + if http_auth and http_auth.lower().startswith('basic'): + auth = http_auth.split(" ", 1) + if len(auth) == 2: + try: + # b64decode doesn't accept unicode in Python < 3.3 + # so we need to convert it to a byte string + auth = base64.b64decode(auth[1].strip().encode('utf-8')) + # b64decode returns a byte string + user = auth.split(b":", 1)[0].decode("UTF-8") + except (TypeError, binascii.Error, UnicodeDecodeError) as exc: + self.debug("Couldn't get username: %s", exc) + return user diff --git a/venv/lib/python3.11/site-packages/gunicorn/http/__init__.py b/venv/lib/python3.11/site-packages/gunicorn/http/__init__.py new file mode 100644 index 0000000..1da6f3e --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/http/__init__.py @@ -0,0 +1,9 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +from gunicorn.http.message import Message, Request +from gunicorn.http.parser import RequestParser + +__all__ = ['Message', 'Request', 'RequestParser'] diff --git a/venv/lib/python3.11/site-packages/gunicorn/http/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/http/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..52063a0 Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/http/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/http/__pycache__/body.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/http/__pycache__/body.cpython-311.pyc new file mode 100644 index 0000000..799c601 Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/http/__pycache__/body.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/http/__pycache__/errors.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/http/__pycache__/errors.cpython-311.pyc new file mode 100644 index 0000000..8f108c8 Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/http/__pycache__/errors.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/http/__pycache__/message.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/http/__pycache__/message.cpython-311.pyc new file mode 100644 index 0000000..cec3e33 Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/http/__pycache__/message.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/http/__pycache__/parser.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/http/__pycache__/parser.cpython-311.pyc new file mode 100644 index 0000000..99d57ad Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/http/__pycache__/parser.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/http/__pycache__/unreader.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/http/__pycache__/unreader.cpython-311.pyc new file mode 100644 index 0000000..b3a5f4d Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/http/__pycache__/unreader.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/http/__pycache__/wsgi.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/http/__pycache__/wsgi.cpython-311.pyc new file mode 100644 index 0000000..ee36711 Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/http/__pycache__/wsgi.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/http/body.py b/venv/lib/python3.11/site-packages/gunicorn/http/body.py new file mode 100644 index 0000000..aa1af2c --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/http/body.py @@ -0,0 +1,262 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import io +import sys + +from gunicorn.http.errors import (NoMoreData, ChunkMissingTerminator, + InvalidChunkSize) + + +class ChunkedReader(object): + def __init__(self, req, unreader): + self.req = req + self.parser = self.parse_chunked(unreader) + self.buf = io.BytesIO() + + def read(self, size): + if not isinstance(size, int): + raise TypeError("size must be an integer type") + if size < 0: + raise ValueError("Size must be positive.") + if size == 0: + return b"" + + if self.parser: + while self.buf.tell() < size: + try: + self.buf.write(next(self.parser)) + except StopIteration: + self.parser = None + break + + data = self.buf.getvalue() + ret, rest = data[:size], data[size:] + self.buf = io.BytesIO() + self.buf.write(rest) + return ret + + def parse_trailers(self, unreader, data): + buf = io.BytesIO() + buf.write(data) + + idx = buf.getvalue().find(b"\r\n\r\n") + done = buf.getvalue()[:2] == b"\r\n" + while idx < 0 and not done: + self.get_data(unreader, buf) + idx = buf.getvalue().find(b"\r\n\r\n") + done = buf.getvalue()[:2] == b"\r\n" + if done: + unreader.unread(buf.getvalue()[2:]) + return b"" + self.req.trailers = self.req.parse_headers(buf.getvalue()[:idx]) + unreader.unread(buf.getvalue()[idx + 4:]) + + def parse_chunked(self, unreader): + (size, rest) = self.parse_chunk_size(unreader) + while size > 0: + while size > len(rest): + size -= len(rest) + yield rest + rest = unreader.read() + if not rest: + raise NoMoreData() + yield rest[:size] + # Remove \r\n after chunk + rest = rest[size:] + while len(rest) < 2: + rest += unreader.read() + if rest[:2] != b'\r\n': + raise ChunkMissingTerminator(rest[:2]) + (size, rest) = self.parse_chunk_size(unreader, data=rest[2:]) + + def parse_chunk_size(self, unreader, data=None): + buf = io.BytesIO() + if data is not None: + buf.write(data) + + idx = buf.getvalue().find(b"\r\n") + while idx < 0: + self.get_data(unreader, buf) + idx = buf.getvalue().find(b"\r\n") + + data = buf.getvalue() + line, rest_chunk = data[:idx], data[idx + 2:] + + chunk_size = line.split(b";", 1)[0].strip() + try: + chunk_size = int(chunk_size, 16) + except ValueError: + raise InvalidChunkSize(chunk_size) + + if chunk_size == 0: + try: + self.parse_trailers(unreader, rest_chunk) + except NoMoreData: + pass + return (0, None) + return (chunk_size, rest_chunk) + + def get_data(self, unreader, buf): + data = unreader.read() + if not data: + raise NoMoreData() + buf.write(data) + + +class LengthReader(object): + def __init__(self, unreader, length): + self.unreader = unreader + self.length = length + + def read(self, size): + if not isinstance(size, int): + raise TypeError("size must be an integral type") + + size = min(self.length, size) + if size < 0: + raise ValueError("Size must be positive.") + if size == 0: + return b"" + + buf = io.BytesIO() + data = self.unreader.read() + while data: + buf.write(data) + if buf.tell() >= size: + break + data = self.unreader.read() + + buf = buf.getvalue() + ret, rest = buf[:size], buf[size:] + self.unreader.unread(rest) + self.length -= size + return ret + + +class EOFReader(object): + def __init__(self, unreader): + self.unreader = unreader + self.buf = io.BytesIO() + self.finished = False + + def read(self, size): + if not isinstance(size, int): + raise TypeError("size must be an integral type") + if size < 0: + raise ValueError("Size must be positive.") + if size == 0: + return b"" + + if self.finished: + data = self.buf.getvalue() + ret, rest = data[:size], data[size:] + self.buf = io.BytesIO() + self.buf.write(rest) + return ret + + data = self.unreader.read() + while data: + self.buf.write(data) + if self.buf.tell() > size: + break + data = self.unreader.read() + + if not data: + self.finished = True + + data = self.buf.getvalue() + ret, rest = data[:size], data[size:] + self.buf = io.BytesIO() + self.buf.write(rest) + return ret + + +class Body(object): + def __init__(self, reader): + self.reader = reader + self.buf = io.BytesIO() + + def __iter__(self): + return self + + def __next__(self): + ret = self.readline() + if not ret: + raise StopIteration() + return ret + + next = __next__ + + def getsize(self, size): + if size is None: + return sys.maxsize + elif not isinstance(size, int): + raise TypeError("size must be an integral type") + elif size < 0: + return sys.maxsize + return size + + def read(self, size=None): + size = self.getsize(size) + if size == 0: + return b"" + + if size < self.buf.tell(): + data = self.buf.getvalue() + ret, rest = data[:size], data[size:] + self.buf = io.BytesIO() + self.buf.write(rest) + return ret + + while size > self.buf.tell(): + data = self.reader.read(1024) + if not data: + break + self.buf.write(data) + + data = self.buf.getvalue() + ret, rest = data[:size], data[size:] + self.buf = io.BytesIO() + self.buf.write(rest) + return ret + + def readline(self, size=None): + size = self.getsize(size) + if size == 0: + return b"" + + data = self.buf.getvalue() + self.buf = io.BytesIO() + + ret = [] + while 1: + idx = data.find(b"\n", 0, size) + idx = idx + 1 if idx >= 0 else size if len(data) >= size else 0 + if idx: + ret.append(data[:idx]) + self.buf.write(data[idx:]) + break + + ret.append(data) + size -= len(data) + data = self.reader.read(min(1024, size)) + if not data: + break + + return b"".join(ret) + + def readlines(self, size=None): + ret = [] + data = self.read() + while data: + pos = data.find(b"\n") + if pos < 0: + ret.append(data) + data = b"" + else: + line, data = data[:pos + 1], data[pos + 1:] + ret.append(line) + return ret diff --git a/venv/lib/python3.11/site-packages/gunicorn/http/errors.py b/venv/lib/python3.11/site-packages/gunicorn/http/errors.py new file mode 100644 index 0000000..7839ef0 --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/http/errors.py @@ -0,0 +1,120 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +# We don't need to call super() in __init__ methods of our +# BaseException and Exception classes because we also define +# our own __str__ methods so there is no need to pass 'message' +# to the base class to get a meaningful output from 'str(exc)'. +# pylint: disable=super-init-not-called + + +class ParseException(Exception): + pass + + +class NoMoreData(IOError): + def __init__(self, buf=None): + self.buf = buf + + def __str__(self): + return "No more data after: %r" % self.buf + + +class InvalidRequestLine(ParseException): + def __init__(self, req): + self.req = req + self.code = 400 + + def __str__(self): + return "Invalid HTTP request line: %r" % self.req + + +class InvalidRequestMethod(ParseException): + def __init__(self, method): + self.method = method + + def __str__(self): + return "Invalid HTTP method: %r" % self.method + + +class InvalidHTTPVersion(ParseException): + def __init__(self, version): + self.version = version + + def __str__(self): + return "Invalid HTTP Version: %r" % self.version + + +class InvalidHeader(ParseException): + def __init__(self, hdr, req=None): + self.hdr = hdr + self.req = req + + def __str__(self): + return "Invalid HTTP Header: %r" % self.hdr + + +class InvalidHeaderName(ParseException): + def __init__(self, hdr): + self.hdr = hdr + + def __str__(self): + return "Invalid HTTP header name: %r" % self.hdr + + +class InvalidChunkSize(IOError): + def __init__(self, data): + self.data = data + + def __str__(self): + return "Invalid chunk size: %r" % self.data + + +class ChunkMissingTerminator(IOError): + def __init__(self, term): + self.term = term + + def __str__(self): + return "Invalid chunk terminator is not '\\r\\n': %r" % self.term + + +class LimitRequestLine(ParseException): + def __init__(self, size, max_size): + self.size = size + self.max_size = max_size + + def __str__(self): + return "Request Line is too large (%s > %s)" % (self.size, self.max_size) + + +class LimitRequestHeaders(ParseException): + def __init__(self, msg): + self.msg = msg + + def __str__(self): + return self.msg + + +class InvalidProxyLine(ParseException): + def __init__(self, line): + self.line = line + self.code = 400 + + def __str__(self): + return "Invalid PROXY line: %r" % self.line + + +class ForbiddenProxyRequest(ParseException): + def __init__(self, host): + self.host = host + self.code = 403 + + def __str__(self): + return "Proxy request from %r not allowed" % self.host + + +class InvalidSchemeHeaders(ParseException): + def __str__(self): + return "Contradictory scheme headers" diff --git a/venv/lib/python3.11/site-packages/gunicorn/http/message.py b/venv/lib/python3.11/site-packages/gunicorn/http/message.py new file mode 100644 index 0000000..1f93c71 --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/http/message.py @@ -0,0 +1,360 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import io +import re +import socket + +from gunicorn.http.body import ChunkedReader, LengthReader, EOFReader, Body +from gunicorn.http.errors import ( + InvalidHeader, InvalidHeaderName, NoMoreData, + InvalidRequestLine, InvalidRequestMethod, InvalidHTTPVersion, + LimitRequestLine, LimitRequestHeaders, +) +from gunicorn.http.errors import InvalidProxyLine, ForbiddenProxyRequest +from gunicorn.http.errors import InvalidSchemeHeaders +from gunicorn.util import bytes_to_str, split_request_uri + +MAX_REQUEST_LINE = 8190 +MAX_HEADERS = 32768 +DEFAULT_MAX_HEADERFIELD_SIZE = 8190 + +HEADER_RE = re.compile(r"[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\"]") +METH_RE = re.compile(r"[A-Z0-9$-_.]{3,20}") +VERSION_RE = re.compile(r"HTTP/(\d+)\.(\d+)") + + +class Message(object): + def __init__(self, cfg, unreader, peer_addr): + self.cfg = cfg + self.unreader = unreader + self.peer_addr = peer_addr + self.remote_addr = peer_addr + self.version = None + self.headers = [] + self.trailers = [] + self.body = None + self.scheme = "https" if cfg.is_ssl else "http" + + # set headers limits + self.limit_request_fields = cfg.limit_request_fields + if (self.limit_request_fields <= 0 + or self.limit_request_fields > MAX_HEADERS): + self.limit_request_fields = MAX_HEADERS + self.limit_request_field_size = cfg.limit_request_field_size + if self.limit_request_field_size < 0: + self.limit_request_field_size = DEFAULT_MAX_HEADERFIELD_SIZE + + # set max header buffer size + max_header_field_size = self.limit_request_field_size or DEFAULT_MAX_HEADERFIELD_SIZE + self.max_buffer_headers = self.limit_request_fields * \ + (max_header_field_size + 2) + 4 + + unused = self.parse(self.unreader) + self.unreader.unread(unused) + self.set_body_reader() + + def parse(self, unreader): + raise NotImplementedError() + + def parse_headers(self, data): + cfg = self.cfg + headers = [] + + # Split lines on \r\n keeping the \r\n on each line + lines = [bytes_to_str(line) + "\r\n" for line in data.split(b"\r\n")] + + # handle scheme headers + scheme_header = False + secure_scheme_headers = {} + if ('*' in cfg.forwarded_allow_ips or + not isinstance(self.peer_addr, tuple) + or self.peer_addr[0] in cfg.forwarded_allow_ips): + secure_scheme_headers = cfg.secure_scheme_headers + + # Parse headers into key/value pairs paying attention + # to continuation lines. + while lines: + if len(headers) >= self.limit_request_fields: + raise LimitRequestHeaders("limit request headers fields") + + # Parse initial header name : value pair. + curr = lines.pop(0) + header_length = len(curr) + if curr.find(":") < 0: + raise InvalidHeader(curr.strip()) + name, value = curr.split(":", 1) + if self.cfg.strip_header_spaces: + name = name.rstrip(" \t").upper() + else: + name = name.upper() + if HEADER_RE.search(name): + raise InvalidHeaderName(name) + + name, value = name.strip(), [value.lstrip()] + + # Consume value continuation lines + while lines and lines[0].startswith((" ", "\t")): + curr = lines.pop(0) + header_length += len(curr) + if header_length > self.limit_request_field_size > 0: + raise LimitRequestHeaders("limit request headers " + "fields size") + value.append(curr) + value = ''.join(value).rstrip() + + if header_length > self.limit_request_field_size > 0: + raise LimitRequestHeaders("limit request headers fields size") + + if name in secure_scheme_headers: + secure = value == secure_scheme_headers[name] + scheme = "https" if secure else "http" + if scheme_header: + if scheme != self.scheme: + raise InvalidSchemeHeaders() + else: + scheme_header = True + self.scheme = scheme + + headers.append((name, value)) + + return headers + + def set_body_reader(self): + chunked = False + content_length = None + + for (name, value) in self.headers: + if name == "CONTENT-LENGTH": + if content_length is not None: + raise InvalidHeader("CONTENT-LENGTH", req=self) + content_length = value + elif name == "TRANSFER-ENCODING": + if value.lower() == "chunked": + chunked = True + + if chunked: + self.body = Body(ChunkedReader(self, self.unreader)) + elif content_length is not None: + try: + if str(content_length).isnumeric(): + content_length = int(content_length) + else: + raise InvalidHeader("CONTENT-LENGTH", req=self) + except ValueError: + raise InvalidHeader("CONTENT-LENGTH", req=self) + + if content_length < 0: + raise InvalidHeader("CONTENT-LENGTH", req=self) + + self.body = Body(LengthReader(self.unreader, content_length)) + else: + self.body = Body(EOFReader(self.unreader)) + + def should_close(self): + for (h, v) in self.headers: + if h == "CONNECTION": + v = v.lower().strip() + if v == "close": + return True + elif v == "keep-alive": + return False + break + return self.version <= (1, 0) + + +class Request(Message): + def __init__(self, cfg, unreader, peer_addr, req_number=1): + self.method = None + self.uri = None + self.path = None + self.query = None + self.fragment = None + + # get max request line size + self.limit_request_line = cfg.limit_request_line + if (self.limit_request_line < 0 + or self.limit_request_line >= MAX_REQUEST_LINE): + self.limit_request_line = MAX_REQUEST_LINE + + self.req_number = req_number + self.proxy_protocol_info = None + super().__init__(cfg, unreader, peer_addr) + + def get_data(self, unreader, buf, stop=False): + data = unreader.read() + if not data: + if stop: + raise StopIteration() + raise NoMoreData(buf.getvalue()) + buf.write(data) + + def parse(self, unreader): + buf = io.BytesIO() + self.get_data(unreader, buf, stop=True) + + # get request line + line, rbuf = self.read_line(unreader, buf, self.limit_request_line) + + # proxy protocol + if self.proxy_protocol(bytes_to_str(line)): + # get next request line + buf = io.BytesIO() + buf.write(rbuf) + line, rbuf = self.read_line(unreader, buf, self.limit_request_line) + + self.parse_request_line(line) + buf = io.BytesIO() + buf.write(rbuf) + + # Headers + data = buf.getvalue() + idx = data.find(b"\r\n\r\n") + + done = data[:2] == b"\r\n" + while True: + idx = data.find(b"\r\n\r\n") + done = data[:2] == b"\r\n" + + if idx < 0 and not done: + self.get_data(unreader, buf) + data = buf.getvalue() + if len(data) > self.max_buffer_headers: + raise LimitRequestHeaders("max buffer headers") + else: + break + + if done: + self.unreader.unread(data[2:]) + return b"" + + self.headers = self.parse_headers(data[:idx]) + + ret = data[idx + 4:] + buf = None + return ret + + def read_line(self, unreader, buf, limit=0): + data = buf.getvalue() + + while True: + idx = data.find(b"\r\n") + if idx >= 0: + # check if the request line is too large + if idx > limit > 0: + raise LimitRequestLine(idx, limit) + break + if len(data) - 2 > limit > 0: + raise LimitRequestLine(len(data), limit) + self.get_data(unreader, buf) + data = buf.getvalue() + + return (data[:idx], # request line, + data[idx + 2:]) # residue in the buffer, skip \r\n + + def proxy_protocol(self, line): + """\ + Detect, check and parse proxy protocol. + + :raises: ForbiddenProxyRequest, InvalidProxyLine. + :return: True for proxy protocol line else False + """ + if not self.cfg.proxy_protocol: + return False + + if self.req_number != 1: + return False + + if not line.startswith("PROXY"): + return False + + self.proxy_protocol_access_check() + self.parse_proxy_protocol(line) + + return True + + def proxy_protocol_access_check(self): + # check in allow list + if ("*" not in self.cfg.proxy_allow_ips and + isinstance(self.peer_addr, tuple) and + self.peer_addr[0] not in self.cfg.proxy_allow_ips): + raise ForbiddenProxyRequest(self.peer_addr[0]) + + def parse_proxy_protocol(self, line): + bits = line.split() + + if len(bits) != 6: + raise InvalidProxyLine(line) + + # Extract data + proto = bits[1] + s_addr = bits[2] + d_addr = bits[3] + + # Validation + if proto not in ["TCP4", "TCP6"]: + raise InvalidProxyLine("protocol '%s' not supported" % proto) + if proto == "TCP4": + try: + socket.inet_pton(socket.AF_INET, s_addr) + socket.inet_pton(socket.AF_INET, d_addr) + except socket.error: + raise InvalidProxyLine(line) + elif proto == "TCP6": + try: + socket.inet_pton(socket.AF_INET6, s_addr) + socket.inet_pton(socket.AF_INET6, d_addr) + except socket.error: + raise InvalidProxyLine(line) + + try: + s_port = int(bits[4]) + d_port = int(bits[5]) + except ValueError: + raise InvalidProxyLine("invalid port %s" % line) + + if not ((0 <= s_port <= 65535) and (0 <= d_port <= 65535)): + raise InvalidProxyLine("invalid port %s" % line) + + # Set data + self.proxy_protocol_info = { + "proxy_protocol": proto, + "client_addr": s_addr, + "client_port": s_port, + "proxy_addr": d_addr, + "proxy_port": d_port + } + + def parse_request_line(self, line_bytes): + bits = [bytes_to_str(bit) for bit in line_bytes.split(None, 2)] + if len(bits) != 3: + raise InvalidRequestLine(bytes_to_str(line_bytes)) + + # Method + if not METH_RE.match(bits[0]): + raise InvalidRequestMethod(bits[0]) + self.method = bits[0].upper() + + # URI + self.uri = bits[1] + + try: + parts = split_request_uri(self.uri) + except ValueError: + raise InvalidRequestLine(bytes_to_str(line_bytes)) + self.path = parts.path or "" + self.query = parts.query or "" + self.fragment = parts.fragment or "" + + # Version + match = VERSION_RE.match(bits[2]) + if match is None: + raise InvalidHTTPVersion(bits[2]) + self.version = (int(match.group(1)), int(match.group(2))) + + def set_body_reader(self): + super().set_body_reader() + if isinstance(self.body.reader, EOFReader): + self.body = Body(LengthReader(self.unreader, 0)) diff --git a/venv/lib/python3.11/site-packages/gunicorn/http/parser.py b/venv/lib/python3.11/site-packages/gunicorn/http/parser.py new file mode 100644 index 0000000..5d689f0 --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/http/parser.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +from gunicorn.http.message import Request +from gunicorn.http.unreader import SocketUnreader, IterUnreader + + +class Parser(object): + + mesg_class = None + + def __init__(self, cfg, source, source_addr): + self.cfg = cfg + if hasattr(source, "recv"): + self.unreader = SocketUnreader(source) + else: + self.unreader = IterUnreader(source) + self.mesg = None + self.source_addr = source_addr + + # request counter (for keepalive connetions) + self.req_count = 0 + + def __iter__(self): + return self + + def __next__(self): + # Stop if HTTP dictates a stop. + if self.mesg and self.mesg.should_close(): + raise StopIteration() + + # Discard any unread body of the previous message + if self.mesg: + data = self.mesg.body.read(8192) + while data: + data = self.mesg.body.read(8192) + + # Parse the next request + self.req_count += 1 + self.mesg = self.mesg_class(self.cfg, self.unreader, self.source_addr, self.req_count) + if not self.mesg: + raise StopIteration() + return self.mesg + + next = __next__ + + +class RequestParser(Parser): + + mesg_class = Request diff --git a/venv/lib/python3.11/site-packages/gunicorn/http/unreader.py b/venv/lib/python3.11/site-packages/gunicorn/http/unreader.py new file mode 100644 index 0000000..273bfc3 --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/http/unreader.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import io +import os + +# Classes that can undo reading data from +# a given type of data source. + + +class Unreader(object): + def __init__(self): + self.buf = io.BytesIO() + + def chunk(self): + raise NotImplementedError() + + def read(self, size=None): + if size is not None and not isinstance(size, int): + raise TypeError("size parameter must be an int or long.") + + if size is not None: + if size == 0: + return b"" + if size < 0: + size = None + + self.buf.seek(0, os.SEEK_END) + + if size is None and self.buf.tell(): + ret = self.buf.getvalue() + self.buf = io.BytesIO() + return ret + if size is None: + d = self.chunk() + return d + + while self.buf.tell() < size: + chunk = self.chunk() + if not chunk: + ret = self.buf.getvalue() + self.buf = io.BytesIO() + return ret + self.buf.write(chunk) + data = self.buf.getvalue() + self.buf = io.BytesIO() + self.buf.write(data[size:]) + return data[:size] + + def unread(self, data): + self.buf.seek(0, os.SEEK_END) + self.buf.write(data) + + +class SocketUnreader(Unreader): + def __init__(self, sock, max_chunk=8192): + super().__init__() + self.sock = sock + self.mxchunk = max_chunk + + def chunk(self): + return self.sock.recv(self.mxchunk) + + +class IterUnreader(Unreader): + def __init__(self, iterable): + super().__init__() + self.iter = iter(iterable) + + def chunk(self): + if not self.iter: + return b"" + try: + return next(self.iter) + except StopIteration: + self.iter = None + return b"" diff --git a/venv/lib/python3.11/site-packages/gunicorn/http/wsgi.py b/venv/lib/python3.11/site-packages/gunicorn/http/wsgi.py new file mode 100644 index 0000000..25715ea --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/http/wsgi.py @@ -0,0 +1,393 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import io +import logging +import os +import re +import sys + +from gunicorn.http.message import HEADER_RE +from gunicorn.http.errors import InvalidHeader, InvalidHeaderName +from gunicorn import SERVER_SOFTWARE, SERVER +from gunicorn import util + +# Send files in at most 1GB blocks as some operating systems can have problems +# with sending files in blocks over 2GB. +BLKSIZE = 0x3FFFFFFF + +HEADER_VALUE_RE = re.compile(r'[\x00-\x1F\x7F]') + +log = logging.getLogger(__name__) + + +class FileWrapper(object): + + def __init__(self, filelike, blksize=8192): + self.filelike = filelike + self.blksize = blksize + if hasattr(filelike, 'close'): + self.close = filelike.close + + def __getitem__(self, key): + data = self.filelike.read(self.blksize) + if data: + return data + raise IndexError + + +class WSGIErrorsWrapper(io.RawIOBase): + + def __init__(self, cfg): + # There is no public __init__ method for RawIOBase so + # we don't need to call super() in the __init__ method. + # pylint: disable=super-init-not-called + errorlog = logging.getLogger("gunicorn.error") + handlers = errorlog.handlers + self.streams = [] + + if cfg.errorlog == "-": + self.streams.append(sys.stderr) + handlers = handlers[1:] + + for h in handlers: + if hasattr(h, "stream"): + self.streams.append(h.stream) + + def write(self, data): + for stream in self.streams: + try: + stream.write(data) + except UnicodeError: + stream.write(data.encode("UTF-8")) + stream.flush() + + +def base_environ(cfg): + return { + "wsgi.errors": WSGIErrorsWrapper(cfg), + "wsgi.version": (1, 0), + "wsgi.multithread": False, + "wsgi.multiprocess": (cfg.workers > 1), + "wsgi.run_once": False, + "wsgi.file_wrapper": FileWrapper, + "wsgi.input_terminated": True, + "SERVER_SOFTWARE": SERVER_SOFTWARE, + } + + +def default_environ(req, sock, cfg): + env = base_environ(cfg) + env.update({ + "wsgi.input": req.body, + "gunicorn.socket": sock, + "REQUEST_METHOD": req.method, + "QUERY_STRING": req.query, + "RAW_URI": req.uri, + "SERVER_PROTOCOL": "HTTP/%s" % ".".join([str(v) for v in req.version]) + }) + return env + + +def proxy_environ(req): + info = req.proxy_protocol_info + + if not info: + return {} + + return { + "PROXY_PROTOCOL": info["proxy_protocol"], + "REMOTE_ADDR": info["client_addr"], + "REMOTE_PORT": str(info["client_port"]), + "PROXY_ADDR": info["proxy_addr"], + "PROXY_PORT": str(info["proxy_port"]), + } + + +def create(req, sock, client, server, cfg): + resp = Response(req, sock, cfg) + + # set initial environ + environ = default_environ(req, sock, cfg) + + # default variables + host = None + script_name = os.environ.get("SCRIPT_NAME", "") + + # add the headers to the environ + for hdr_name, hdr_value in req.headers: + if hdr_name == "EXPECT": + # handle expect + if hdr_value.lower() == "100-continue": + sock.send(b"HTTP/1.1 100 Continue\r\n\r\n") + elif hdr_name == 'HOST': + host = hdr_value + elif hdr_name == "SCRIPT_NAME": + script_name = hdr_value + elif hdr_name == "CONTENT-TYPE": + environ['CONTENT_TYPE'] = hdr_value + continue + elif hdr_name == "CONTENT-LENGTH": + environ['CONTENT_LENGTH'] = hdr_value + continue + + key = 'HTTP_' + hdr_name.replace('-', '_') + if key in environ: + hdr_value = "%s,%s" % (environ[key], hdr_value) + environ[key] = hdr_value + + # set the url scheme + environ['wsgi.url_scheme'] = req.scheme + + # set the REMOTE_* keys in environ + # authors should be aware that REMOTE_HOST and REMOTE_ADDR + # may not qualify the remote addr: + # http://www.ietf.org/rfc/rfc3875 + if isinstance(client, str): + environ['REMOTE_ADDR'] = client + elif isinstance(client, bytes): + environ['REMOTE_ADDR'] = client.decode() + else: + environ['REMOTE_ADDR'] = client[0] + environ['REMOTE_PORT'] = str(client[1]) + + # handle the SERVER_* + # Normally only the application should use the Host header but since the + # WSGI spec doesn't support unix sockets, we are using it to create + # viable SERVER_* if possible. + if isinstance(server, str): + server = server.split(":") + if len(server) == 1: + # unix socket + if host: + server = host.split(':') + if len(server) == 1: + if req.scheme == "http": + server.append(80) + elif req.scheme == "https": + server.append(443) + else: + server.append('') + else: + # no host header given which means that we are not behind a + # proxy, so append an empty port. + server.append('') + environ['SERVER_NAME'] = server[0] + environ['SERVER_PORT'] = str(server[1]) + + # set the path and script name + path_info = req.path + if script_name: + path_info = path_info.split(script_name, 1)[1] + environ['PATH_INFO'] = util.unquote_to_wsgi_str(path_info) + environ['SCRIPT_NAME'] = script_name + + # override the environ with the correct remote and server address if + # we are behind a proxy using the proxy protocol. + environ.update(proxy_environ(req)) + return resp, environ + + +class Response(object): + + def __init__(self, req, sock, cfg): + self.req = req + self.sock = sock + self.version = SERVER + self.status = None + self.chunked = False + self.must_close = False + self.headers = [] + self.headers_sent = False + self.response_length = None + self.sent = 0 + self.upgrade = False + self.cfg = cfg + + def force_close(self): + self.must_close = True + + def should_close(self): + if self.must_close or self.req.should_close(): + return True + if self.response_length is not None or self.chunked: + return False + if self.req.method == 'HEAD': + return False + if self.status_code < 200 or self.status_code in (204, 304): + return False + return True + + def start_response(self, status, headers, exc_info=None): + if exc_info: + try: + if self.status and self.headers_sent: + util.reraise(exc_info[0], exc_info[1], exc_info[2]) + finally: + exc_info = None + elif self.status is not None: + raise AssertionError("Response headers already set!") + + self.status = status + + # get the status code from the response here so we can use it to check + # the need for the connection header later without parsing the string + # each time. + try: + self.status_code = int(self.status.split()[0]) + except ValueError: + self.status_code = None + + self.process_headers(headers) + self.chunked = self.is_chunked() + return self.write + + def process_headers(self, headers): + for name, value in headers: + if not isinstance(name, str): + raise TypeError('%r is not a string' % name) + + if HEADER_RE.search(name): + raise InvalidHeaderName('%r' % name) + + if not isinstance(value, str): + raise TypeError('%r is not a string' % value) + + if HEADER_VALUE_RE.search(value): + raise InvalidHeader('%r' % value) + + value = value.strip() + lname = name.lower().strip() + if lname == "content-length": + self.response_length = int(value) + elif util.is_hoppish(name): + if lname == "connection": + # handle websocket + if value.lower().strip() == "upgrade": + self.upgrade = True + elif lname == "upgrade": + if value.lower().strip() == "websocket": + self.headers.append((name.strip(), value)) + + # ignore hopbyhop headers + continue + self.headers.append((name.strip(), value)) + + def is_chunked(self): + # Only use chunked responses when the client is + # speaking HTTP/1.1 or newer and there was + # no Content-Length header set. + if self.response_length is not None: + return False + elif self.req.version <= (1, 0): + return False + elif self.req.method == 'HEAD': + # Responses to a HEAD request MUST NOT contain a response body. + return False + elif self.status_code in (204, 304): + # Do not use chunked responses when the response is guaranteed to + # not have a response body. + return False + return True + + def default_headers(self): + # set the connection header + if self.upgrade: + connection = "upgrade" + elif self.should_close(): + connection = "close" + else: + connection = "keep-alive" + + headers = [ + "HTTP/%s.%s %s\r\n" % (self.req.version[0], + self.req.version[1], self.status), + "Server: %s\r\n" % self.version, + "Date: %s\r\n" % util.http_date(), + "Connection: %s\r\n" % connection + ] + if self.chunked: + headers.append("Transfer-Encoding: chunked\r\n") + return headers + + def send_headers(self): + if self.headers_sent: + return + tosend = self.default_headers() + tosend.extend(["%s: %s\r\n" % (k, v) for k, v in self.headers]) + + header_str = "%s\r\n" % "".join(tosend) + util.write(self.sock, util.to_bytestring(header_str, "latin-1")) + self.headers_sent = True + + def write(self, arg): + self.send_headers() + if not isinstance(arg, bytes): + raise TypeError('%r is not a byte' % arg) + arglen = len(arg) + tosend = arglen + if self.response_length is not None: + if self.sent >= self.response_length: + # Never write more than self.response_length bytes + return + + tosend = min(self.response_length - self.sent, tosend) + if tosend < arglen: + arg = arg[:tosend] + + # Sending an empty chunk signals the end of the + # response and prematurely closes the response + if self.chunked and tosend == 0: + return + + self.sent += tosend + util.write(self.sock, arg, self.chunked) + + def can_sendfile(self): + return self.cfg.sendfile is not False + + def sendfile(self, respiter): + if self.cfg.is_ssl or not self.can_sendfile(): + return False + + if not util.has_fileno(respiter.filelike): + return False + + fileno = respiter.filelike.fileno() + try: + offset = os.lseek(fileno, 0, os.SEEK_CUR) + if self.response_length is None: + filesize = os.fstat(fileno).st_size + nbytes = filesize - offset + else: + nbytes = self.response_length + except (OSError, io.UnsupportedOperation): + return False + + self.send_headers() + + if self.is_chunked(): + chunk_size = "%X\r\n" % nbytes + self.sock.sendall(chunk_size.encode('utf-8')) + if nbytes > 0: + self.sock.sendfile(respiter.filelike, offset=offset, count=nbytes) + + if self.is_chunked(): + self.sock.sendall(b"\r\n") + + os.lseek(fileno, offset, os.SEEK_SET) + + return True + + def write_file(self, respiter): + if not self.sendfile(respiter): + for item in respiter: + self.write(item) + + def close(self): + if not self.headers_sent: + self.send_headers() + if self.chunked: + util.write_chunk(self.sock, b"") diff --git a/venv/lib/python3.11/site-packages/gunicorn/instrument/__init__.py b/venv/lib/python3.11/site-packages/gunicorn/instrument/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/gunicorn/instrument/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/instrument/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..72f226f Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/instrument/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/instrument/__pycache__/statsd.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/instrument/__pycache__/statsd.cpython-311.pyc new file mode 100644 index 0000000..92de132 Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/instrument/__pycache__/statsd.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/instrument/statsd.py b/venv/lib/python3.11/site-packages/gunicorn/instrument/statsd.py new file mode 100644 index 0000000..2c54b2e --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/instrument/statsd.py @@ -0,0 +1,133 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +"Bare-bones implementation of statsD's protocol, client-side" + +import logging +import socket +from re import sub + +from gunicorn.glogging import Logger + +# Instrumentation constants +METRIC_VAR = "metric" +VALUE_VAR = "value" +MTYPE_VAR = "mtype" +GAUGE_TYPE = "gauge" +COUNTER_TYPE = "counter" +HISTOGRAM_TYPE = "histogram" + + +class Statsd(Logger): + """statsD-based instrumentation, that passes as a logger + """ + def __init__(self, cfg): + Logger.__init__(self, cfg) + self.prefix = sub(r"^(.+[^.]+)\.*$", "\\g<1>.", cfg.statsd_prefix) + + if isinstance(cfg.statsd_host, str): + address_family = socket.AF_UNIX + else: + address_family = socket.AF_INET + + try: + self.sock = socket.socket(address_family, socket.SOCK_DGRAM) + self.sock.connect(cfg.statsd_host) + except Exception: + self.sock = None + + self.dogstatsd_tags = cfg.dogstatsd_tags + + # Log errors and warnings + def critical(self, msg, *args, **kwargs): + Logger.critical(self, msg, *args, **kwargs) + self.increment("gunicorn.log.critical", 1) + + def error(self, msg, *args, **kwargs): + Logger.error(self, msg, *args, **kwargs) + self.increment("gunicorn.log.error", 1) + + def warning(self, msg, *args, **kwargs): + Logger.warning(self, msg, *args, **kwargs) + self.increment("gunicorn.log.warning", 1) + + def exception(self, msg, *args, **kwargs): + Logger.exception(self, msg, *args, **kwargs) + self.increment("gunicorn.log.exception", 1) + + # Special treatment for info, the most common log level + def info(self, msg, *args, **kwargs): + self.log(logging.INFO, msg, *args, **kwargs) + + # skip the run-of-the-mill logs + def debug(self, msg, *args, **kwargs): + self.log(logging.DEBUG, msg, *args, **kwargs) + + def log(self, lvl, msg, *args, **kwargs): + """Log a given statistic if metric, value and type are present + """ + try: + extra = kwargs.get("extra", None) + if extra is not None: + metric = extra.get(METRIC_VAR, None) + value = extra.get(VALUE_VAR, None) + typ = extra.get(MTYPE_VAR, None) + if metric and value and typ: + if typ == GAUGE_TYPE: + self.gauge(metric, value) + elif typ == COUNTER_TYPE: + self.increment(metric, value) + elif typ == HISTOGRAM_TYPE: + self.histogram(metric, value) + else: + pass + + # Log to parent logger only if there is something to say + if msg: + Logger.log(self, lvl, msg, *args, **kwargs) + except Exception: + Logger.warning(self, "Failed to log to statsd", exc_info=True) + + # access logging + def access(self, resp, req, environ, request_time): + """Measure request duration + request_time is a datetime.timedelta + """ + Logger.access(self, resp, req, environ, request_time) + duration_in_ms = request_time.seconds * 1000 + float(request_time.microseconds) / 10 ** 3 + status = resp.status + if isinstance(status, str): + status = int(status.split(None, 1)[0]) + self.histogram("gunicorn.request.duration", duration_in_ms) + self.increment("gunicorn.requests", 1) + self.increment("gunicorn.request.status.%d" % status, 1) + + # statsD methods + # you can use those directly if you want + def gauge(self, name, value): + self._sock_send("{0}{1}:{2}|g".format(self.prefix, name, value)) + + def increment(self, name, value, sampling_rate=1.0): + self._sock_send("{0}{1}:{2}|c|@{3}".format(self.prefix, name, value, sampling_rate)) + + def decrement(self, name, value, sampling_rate=1.0): + self._sock_send("{0}{1}:-{2}|c|@{3}".format(self.prefix, name, value, sampling_rate)) + + def histogram(self, name, value): + self._sock_send("{0}{1}:{2}|ms".format(self.prefix, name, value)) + + def _sock_send(self, msg): + try: + if isinstance(msg, str): + msg = msg.encode("ascii") + + # http://docs.datadoghq.com/guides/dogstatsd/#datagram-format + if self.dogstatsd_tags: + msg = msg + b"|#" + self.dogstatsd_tags.encode('ascii') + + if self.sock: + self.sock.send(msg) + except Exception: + Logger.warning(self, "Error sending message to statsd", exc_info=True) diff --git a/venv/lib/python3.11/site-packages/gunicorn/pidfile.py b/venv/lib/python3.11/site-packages/gunicorn/pidfile.py new file mode 100644 index 0000000..585b02a --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/pidfile.py @@ -0,0 +1,86 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import errno +import os +import tempfile + + +class Pidfile(object): + """\ + Manage a PID file. If a specific name is provided + it and '"%s.oldpid" % name' will be used. Otherwise + we create a temp file using os.mkstemp. + """ + + def __init__(self, fname): + self.fname = fname + self.pid = None + + def create(self, pid): + oldpid = self.validate() + if oldpid: + if oldpid == os.getpid(): + return + msg = "Already running on PID %s (or pid file '%s' is stale)" + raise RuntimeError(msg % (oldpid, self.fname)) + + self.pid = pid + + # Write pidfile + fdir = os.path.dirname(self.fname) + if fdir and not os.path.isdir(fdir): + raise RuntimeError("%s doesn't exist. Can't create pidfile." % fdir) + fd, fname = tempfile.mkstemp(dir=fdir) + os.write(fd, ("%s\n" % self.pid).encode('utf-8')) + if self.fname: + os.rename(fname, self.fname) + else: + self.fname = fname + os.close(fd) + + # set permissions to -rw-r--r-- + os.chmod(self.fname, 420) + + def rename(self, path): + self.unlink() + self.fname = path + self.create(self.pid) + + def unlink(self): + """ delete pidfile""" + try: + with open(self.fname, "r") as f: + pid1 = int(f.read() or 0) + + if pid1 == self.pid: + os.unlink(self.fname) + except Exception: + pass + + def validate(self): + """ Validate pidfile and make it stale if needed""" + if not self.fname: + return + try: + with open(self.fname, "r") as f: + try: + wpid = int(f.read()) + except ValueError: + return + + try: + os.kill(wpid, 0) + return wpid + except OSError as e: + if e.args[0] == errno.EPERM: + return wpid + if e.args[0] == errno.ESRCH: + return + raise + except IOError as e: + if e.args[0] == errno.ENOENT: + return + raise diff --git a/venv/lib/python3.11/site-packages/gunicorn/reloader.py b/venv/lib/python3.11/site-packages/gunicorn/reloader.py new file mode 100644 index 0000000..88b540b --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/reloader.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. +# pylint: disable=no-else-continue + +import os +import os.path +import re +import sys +import time +import threading + +COMPILED_EXT_RE = re.compile(r'py[co]$') + + +class Reloader(threading.Thread): + def __init__(self, extra_files=None, interval=1, callback=None): + super().__init__() + self.daemon = True + self._extra_files = set(extra_files or ()) + self._interval = interval + self._callback = callback + + def add_extra_file(self, filename): + self._extra_files.add(filename) + + def get_files(self): + fnames = [ + COMPILED_EXT_RE.sub('py', module.__file__) + for module in tuple(sys.modules.values()) + if getattr(module, '__file__', None) + ] + + fnames.extend(self._extra_files) + + return fnames + + def run(self): + mtimes = {} + while True: + for filename in self.get_files(): + try: + mtime = os.stat(filename).st_mtime + except OSError: + continue + old_time = mtimes.get(filename) + if old_time is None: + mtimes[filename] = mtime + continue + elif mtime > old_time: + if self._callback: + self._callback(filename) + time.sleep(self._interval) + + +has_inotify = False +if sys.platform.startswith('linux'): + try: + from inotify.adapters import Inotify + import inotify.constants + has_inotify = True + except ImportError: + pass + + +if has_inotify: + + class InotifyReloader(threading.Thread): + event_mask = (inotify.constants.IN_CREATE | inotify.constants.IN_DELETE + | inotify.constants.IN_DELETE_SELF | inotify.constants.IN_MODIFY + | inotify.constants.IN_MOVE_SELF | inotify.constants.IN_MOVED_FROM + | inotify.constants.IN_MOVED_TO) + + def __init__(self, extra_files=None, callback=None): + super().__init__() + self.daemon = True + self._callback = callback + self._dirs = set() + self._watcher = Inotify() + + for extra_file in extra_files: + self.add_extra_file(extra_file) + + def add_extra_file(self, filename): + dirname = os.path.dirname(filename) + + if dirname in self._dirs: + return + + self._watcher.add_watch(dirname, mask=self.event_mask) + self._dirs.add(dirname) + + def get_dirs(self): + fnames = [ + os.path.dirname(os.path.abspath(COMPILED_EXT_RE.sub('py', module.__file__))) + for module in tuple(sys.modules.values()) + if getattr(module, '__file__', None) + ] + + return set(fnames) + + def run(self): + self._dirs = self.get_dirs() + + for dirname in self._dirs: + if os.path.isdir(dirname): + self._watcher.add_watch(dirname, mask=self.event_mask) + + for event in self._watcher.event_gen(): + if event is None: + continue + + filename = event[3] + + self._callback(filename) + +else: + + class InotifyReloader(object): + def __init__(self, extra_files=None, callback=None): + raise ImportError('You must have the inotify module installed to ' + 'use the inotify reloader') + + +preferred_reloader = InotifyReloader if has_inotify else Reloader + +reloader_engines = { + 'auto': preferred_reloader, + 'poll': Reloader, + 'inotify': InotifyReloader, +} diff --git a/venv/lib/python3.11/site-packages/gunicorn/sock.py b/venv/lib/python3.11/site-packages/gunicorn/sock.py new file mode 100644 index 0000000..7700146 --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/sock.py @@ -0,0 +1,232 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import errno +import os +import socket +import ssl +import stat +import sys +import time + +from gunicorn import util + + +class BaseSocket(object): + + def __init__(self, address, conf, log, fd=None): + self.log = log + self.conf = conf + + self.cfg_addr = address + if fd is None: + sock = socket.socket(self.FAMILY, socket.SOCK_STREAM) + bound = False + else: + sock = socket.fromfd(fd, self.FAMILY, socket.SOCK_STREAM) + os.close(fd) + bound = True + + self.sock = self.set_options(sock, bound=bound) + + def __str__(self): + return "" % self.sock.fileno() + + def __getattr__(self, name): + return getattr(self.sock, name) + + def set_options(self, sock, bound=False): + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + if (self.conf.reuse_port + and hasattr(socket, 'SO_REUSEPORT')): # pragma: no cover + try: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + except socket.error as err: + if err.errno not in (errno.ENOPROTOOPT, errno.EINVAL): + raise + if not bound: + self.bind(sock) + sock.setblocking(0) + + # make sure that the socket can be inherited + if hasattr(sock, "set_inheritable"): + sock.set_inheritable(True) + + sock.listen(self.conf.backlog) + return sock + + def bind(self, sock): + sock.bind(self.cfg_addr) + + def close(self): + if self.sock is None: + return + + try: + self.sock.close() + except socket.error as e: + self.log.info("Error while closing socket %s", str(e)) + + self.sock = None + + +class TCPSocket(BaseSocket): + + FAMILY = socket.AF_INET + + def __str__(self): + if self.conf.is_ssl: + scheme = "https" + else: + scheme = "http" + + addr = self.sock.getsockname() + return "%s://%s:%d" % (scheme, addr[0], addr[1]) + + def set_options(self, sock, bound=False): + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + return super().set_options(sock, bound=bound) + + +class TCP6Socket(TCPSocket): + + FAMILY = socket.AF_INET6 + + def __str__(self): + (host, port, _, _) = self.sock.getsockname() + return "http://[%s]:%d" % (host, port) + + +class UnixSocket(BaseSocket): + + FAMILY = socket.AF_UNIX + + def __init__(self, addr, conf, log, fd=None): + if fd is None: + try: + st = os.stat(addr) + except OSError as e: + if e.args[0] != errno.ENOENT: + raise + else: + if stat.S_ISSOCK(st.st_mode): + os.remove(addr) + else: + raise ValueError("%r is not a socket" % addr) + super().__init__(addr, conf, log, fd=fd) + + def __str__(self): + return "unix:%s" % self.cfg_addr + + def bind(self, sock): + old_umask = os.umask(self.conf.umask) + sock.bind(self.cfg_addr) + util.chown(self.cfg_addr, self.conf.uid, self.conf.gid) + os.umask(old_umask) + + +def _sock_type(addr): + if isinstance(addr, tuple): + if util.is_ipv6(addr[0]): + sock_type = TCP6Socket + else: + sock_type = TCPSocket + elif isinstance(addr, (str, bytes)): + sock_type = UnixSocket + else: + raise TypeError("Unable to create socket from: %r" % addr) + return sock_type + + +def create_sockets(conf, log, fds=None): + """ + Create a new socket for the configured addresses or file descriptors. + + If a configured address is a tuple then a TCP socket is created. + If it is a string, a Unix socket is created. Otherwise, a TypeError is + raised. + """ + listeners = [] + + # get it only once + addr = conf.address + fdaddr = [bind for bind in addr if isinstance(bind, int)] + if fds: + fdaddr += list(fds) + laddr = [bind for bind in addr if not isinstance(bind, int)] + + # check ssl config early to raise the error on startup + # only the certfile is needed since it can contains the keyfile + if conf.certfile and not os.path.exists(conf.certfile): + raise ValueError('certfile "%s" does not exist' % conf.certfile) + + if conf.keyfile and not os.path.exists(conf.keyfile): + raise ValueError('keyfile "%s" does not exist' % conf.keyfile) + + # sockets are already bound + if fdaddr: + for fd in fdaddr: + sock = socket.fromfd(fd, socket.AF_UNIX, socket.SOCK_STREAM) + sock_name = sock.getsockname() + sock_type = _sock_type(sock_name) + listener = sock_type(sock_name, conf, log, fd=fd) + listeners.append(listener) + + return listeners + + # no sockets is bound, first initialization of gunicorn in this env. + for addr in laddr: + sock_type = _sock_type(addr) + sock = None + for i in range(5): + try: + sock = sock_type(addr, conf, log) + except socket.error as e: + if e.args[0] == errno.EADDRINUSE: + log.error("Connection in use: %s", str(addr)) + if e.args[0] == errno.EADDRNOTAVAIL: + log.error("Invalid address: %s", str(addr)) + if i < 5: + msg = "connection to {addr} failed: {error}" + log.debug(msg.format(addr=str(addr), error=str(e))) + log.error("Retrying in 1 second.") + time.sleep(1) + else: + break + + if sock is None: + log.error("Can't connect to %s", str(addr)) + sys.exit(1) + + listeners.append(sock) + + return listeners + + +def close_sockets(listeners, unlink=True): + for sock in listeners: + sock_name = sock.getsockname() + sock.close() + if unlink and _sock_type(sock_name) is UnixSocket: + os.unlink(sock_name) + + +def ssl_context(conf): + def default_ssl_context_factory(): + context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH, cafile=conf.ca_certs) + context.load_cert_chain(certfile=conf.certfile, keyfile=conf.keyfile) + context.verify_mode = conf.cert_reqs + if conf.ciphers: + context.set_ciphers(conf.ciphers) + return context + + return conf.ssl_context(conf, default_ssl_context_factory) + + +def ssl_wrap_socket(sock, conf): + return ssl_context(conf).wrap_socket(sock, + server_side=True, + suppress_ragged_eofs=conf.suppress_ragged_eofs, + do_handshake_on_connect=conf.do_handshake_on_connect) diff --git a/venv/lib/python3.11/site-packages/gunicorn/systemd.py b/venv/lib/python3.11/site-packages/gunicorn/systemd.py new file mode 100644 index 0000000..5bc1a74 --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/systemd.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import os +import socket + +SD_LISTEN_FDS_START = 3 + + +def listen_fds(unset_environment=True): + """ + Get the number of sockets inherited from systemd socket activation. + + :param unset_environment: clear systemd environment variables unless False + :type unset_environment: bool + :return: the number of sockets to inherit from systemd socket activation + :rtype: int + + Returns zero immediately if $LISTEN_PID is not set to the current pid. + Otherwise, returns the number of systemd activation sockets specified by + $LISTEN_FDS. + + When $LISTEN_PID matches the current pid, unsets the environment variables + unless the ``unset_environment`` flag is ``False``. + + .. note:: + Unlike the sd_listen_fds C function, this implementation does not set + the FD_CLOEXEC flag because the gunicorn arbiter never needs to do this. + + .. seealso:: + ``_ + + """ + fds = int(os.environ.get('LISTEN_FDS', 0)) + listen_pid = int(os.environ.get('LISTEN_PID', 0)) + + if listen_pid != os.getpid(): + return 0 + + if unset_environment: + os.environ.pop('LISTEN_PID', None) + os.environ.pop('LISTEN_FDS', None) + + return fds + + +def sd_notify(state, logger, unset_environment=False): + """Send a notification to systemd. state is a string; see + the man page of sd_notify (http://www.freedesktop.org/software/systemd/man/sd_notify.html) + for a description of the allowable values. + + If the unset_environment parameter is True, sd_notify() will unset + the $NOTIFY_SOCKET environment variable before returning (regardless of + whether the function call itself succeeded or not). Further calls to + sd_notify() will then fail, but the variable is no longer inherited by + child processes. + """ + + addr = os.environ.get('NOTIFY_SOCKET') + if addr is None: + # not run in a service, just a noop + return + try: + sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM | socket.SOCK_CLOEXEC) + if addr[0] == '@': + addr = '\0' + addr[1:] + sock.connect(addr) + sock.sendall(state.encode('utf-8')) + except Exception: + logger.debug("Exception while invoking sd_notify()", exc_info=True) + finally: + if unset_environment: + os.environ.pop('NOTIFY_SOCKET') + sock.close() diff --git a/venv/lib/python3.11/site-packages/gunicorn/util.py b/venv/lib/python3.11/site-packages/gunicorn/util.py new file mode 100644 index 0000000..751deea --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/util.py @@ -0,0 +1,654 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. +import ast +import email.utils +import errno +import fcntl +import html +import importlib +import inspect +import io +import logging +import os +import pwd +import random +import re +import socket +import sys +import textwrap +import time +import traceback +import warnings + +try: + import importlib.metadata as importlib_metadata +except (ModuleNotFoundError, ImportError): + import importlib_metadata + +from gunicorn.errors import AppImportError +from gunicorn.workers import SUPPORTED_WORKERS +import urllib.parse + +REDIRECT_TO = getattr(os, 'devnull', '/dev/null') + +# Server and Date aren't technically hop-by-hop +# headers, but they are in the purview of the +# origin server which the WSGI spec says we should +# act like. So we drop them and add our own. +# +# In the future, concatenation server header values +# might be better, but nothing else does it and +# dropping them is easier. +hop_headers = set(""" + connection keep-alive proxy-authenticate proxy-authorization + te trailers transfer-encoding upgrade + server date + """.split()) + +try: + from setproctitle import setproctitle + + def _setproctitle(title): + setproctitle("gunicorn: %s" % title) +except ImportError: + def _setproctitle(title): + pass + + +def load_entry_point(distribution, group, name): + dist_obj = importlib_metadata.distribution(distribution) + eps = [ep for ep in dist_obj.entry_points + if ep.group == group and ep.name == name] + if not eps: + raise ImportError("Entry point %r not found" % ((group, name),)) + return eps[0].load() + + +def load_class(uri, default="gunicorn.workers.sync.SyncWorker", + section="gunicorn.workers"): + if inspect.isclass(uri): + return uri + if uri.startswith("egg:"): + # uses entry points + entry_str = uri.split("egg:")[1] + try: + dist, name = entry_str.rsplit("#", 1) + except ValueError: + dist = entry_str + name = default + + try: + return load_entry_point(dist, section, name) + except Exception: + exc = traceback.format_exc() + msg = "class uri %r invalid or not found: \n\n[%s]" + raise RuntimeError(msg % (uri, exc)) + else: + components = uri.split('.') + if len(components) == 1: + while True: + if uri.startswith("#"): + uri = uri[1:] + + if uri in SUPPORTED_WORKERS: + components = SUPPORTED_WORKERS[uri].split(".") + break + + try: + return load_entry_point( + "gunicorn", section, uri + ) + except Exception: + exc = traceback.format_exc() + msg = "class uri %r invalid or not found: \n\n[%s]" + raise RuntimeError(msg % (uri, exc)) + + klass = components.pop(-1) + + try: + mod = importlib.import_module('.'.join(components)) + except Exception: + exc = traceback.format_exc() + msg = "class uri %r invalid or not found: \n\n[%s]" + raise RuntimeError(msg % (uri, exc)) + return getattr(mod, klass) + + +positionals = ( + inspect.Parameter.POSITIONAL_ONLY, + inspect.Parameter.POSITIONAL_OR_KEYWORD, +) + + +def get_arity(f): + sig = inspect.signature(f) + arity = 0 + + for param in sig.parameters.values(): + if param.kind in positionals: + arity += 1 + + return arity + + +def get_username(uid): + """ get the username for a user id""" + return pwd.getpwuid(uid).pw_name + + +def set_owner_process(uid, gid, initgroups=False): + """ set user and group of workers processes """ + + if gid: + if uid: + try: + username = get_username(uid) + except KeyError: + initgroups = False + + # versions of python < 2.6.2 don't manage unsigned int for + # groups like on osx or fedora + gid = abs(gid) & 0x7FFFFFFF + + if initgroups: + os.initgroups(username, gid) + elif gid != os.getgid(): + os.setgid(gid) + + if uid and uid != os.getuid(): + os.setuid(uid) + + +def chown(path, uid, gid): + os.chown(path, uid, gid) + + +if sys.platform.startswith("win"): + def _waitfor(func, pathname, waitall=False): + # Perform the operation + func(pathname) + # Now setup the wait loop + if waitall: + dirname = pathname + else: + dirname, name = os.path.split(pathname) + dirname = dirname or '.' + # Check for `pathname` to be removed from the filesystem. + # The exponential backoff of the timeout amounts to a total + # of ~1 second after which the deletion is probably an error + # anyway. + # Testing on a i7@4.3GHz shows that usually only 1 iteration is + # required when contention occurs. + timeout = 0.001 + while timeout < 1.0: + # Note we are only testing for the existence of the file(s) in + # the contents of the directory regardless of any security or + # access rights. If we have made it this far, we have sufficient + # permissions to do that much using Python's equivalent of the + # Windows API FindFirstFile. + # Other Windows APIs can fail or give incorrect results when + # dealing with files that are pending deletion. + L = os.listdir(dirname) + if not L if waitall else name in L: + return + # Increase the timeout and try again + time.sleep(timeout) + timeout *= 2 + warnings.warn('tests may fail, delete still pending for ' + pathname, + RuntimeWarning, stacklevel=4) + + def _unlink(filename): + _waitfor(os.unlink, filename) +else: + _unlink = os.unlink + + +def unlink(filename): + try: + _unlink(filename) + except OSError as error: + # The filename need not exist. + if error.errno not in (errno.ENOENT, errno.ENOTDIR): + raise + + +def is_ipv6(addr): + try: + socket.inet_pton(socket.AF_INET6, addr) + except socket.error: # not a valid address + return False + except ValueError: # ipv6 not supported on this platform + return False + return True + + +def parse_address(netloc, default_port='8000'): + if re.match(r'unix:(//)?', netloc): + return re.split(r'unix:(//)?', netloc)[-1] + + if netloc.startswith("fd://"): + fd = netloc[5:] + try: + return int(fd) + except ValueError: + raise RuntimeError("%r is not a valid file descriptor." % fd) from None + + if netloc.startswith("tcp://"): + netloc = netloc.split("tcp://")[1] + host, port = netloc, default_port + + if '[' in netloc and ']' in netloc: + host = netloc.split(']')[0][1:] + port = (netloc.split(']:') + [default_port])[1] + elif ':' in netloc: + host, port = (netloc.split(':') + [default_port])[:2] + elif netloc == "": + host, port = "0.0.0.0", default_port + + try: + port = int(port) + except ValueError: + raise RuntimeError("%r is not a valid port number." % port) + + return host.lower(), port + + +def close_on_exec(fd): + flags = fcntl.fcntl(fd, fcntl.F_GETFD) + flags |= fcntl.FD_CLOEXEC + fcntl.fcntl(fd, fcntl.F_SETFD, flags) + + +def set_non_blocking(fd): + flags = fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK + fcntl.fcntl(fd, fcntl.F_SETFL, flags) + + +def close(sock): + try: + sock.close() + except socket.error: + pass + + +try: + from os import closerange +except ImportError: + def closerange(fd_low, fd_high): + # Iterate through and close all file descriptors. + for fd in range(fd_low, fd_high): + try: + os.close(fd) + except OSError: # ERROR, fd wasn't open to begin with (ignored) + pass + + +def write_chunk(sock, data): + if isinstance(data, str): + data = data.encode('utf-8') + chunk_size = "%X\r\n" % len(data) + chunk = b"".join([chunk_size.encode('utf-8'), data, b"\r\n"]) + sock.sendall(chunk) + + +def write(sock, data, chunked=False): + if chunked: + return write_chunk(sock, data) + sock.sendall(data) + + +def write_nonblock(sock, data, chunked=False): + timeout = sock.gettimeout() + if timeout != 0.0: + try: + sock.setblocking(0) + return write(sock, data, chunked) + finally: + sock.setblocking(1) + else: + return write(sock, data, chunked) + + +def write_error(sock, status_int, reason, mesg): + html_error = textwrap.dedent("""\ + + + %(reason)s + + +

%(reason)s

+ %(mesg)s + + + """) % {"reason": reason, "mesg": html.escape(mesg)} + + http = textwrap.dedent("""\ + HTTP/1.1 %s %s\r + Connection: close\r + Content-Type: text/html\r + Content-Length: %d\r + \r + %s""") % (str(status_int), reason, len(html_error), html_error) + write_nonblock(sock, http.encode('latin1')) + + +def _called_with_wrong_args(f): + """Check whether calling a function raised a ``TypeError`` because + the call failed or because something in the function raised the + error. + + :param f: The function that was called. + :return: ``True`` if the call failed. + """ + tb = sys.exc_info()[2] + + try: + while tb is not None: + if tb.tb_frame.f_code is f.__code__: + # In the function, it was called successfully. + return False + + tb = tb.tb_next + + # Didn't reach the function. + return True + finally: + # Delete tb to break a circular reference in Python 2. + # https://docs.python.org/2/library/sys.html#sys.exc_info + del tb + + +def import_app(module): + parts = module.split(":", 1) + if len(parts) == 1: + obj = "application" + else: + module, obj = parts[0], parts[1] + + try: + mod = importlib.import_module(module) + except ImportError: + if module.endswith(".py") and os.path.exists(module): + msg = "Failed to find application, did you mean '%s:%s'?" + raise ImportError(msg % (module.rsplit(".", 1)[0], obj)) + raise + + # Parse obj as a single expression to determine if it's a valid + # attribute name or function call. + try: + expression = ast.parse(obj, mode="eval").body + except SyntaxError: + raise AppImportError( + "Failed to parse %r as an attribute name or function call." % obj + ) + + if isinstance(expression, ast.Name): + name = expression.id + args = kwargs = None + elif isinstance(expression, ast.Call): + # Ensure the function name is an attribute name only. + if not isinstance(expression.func, ast.Name): + raise AppImportError("Function reference must be a simple name: %r" % obj) + + name = expression.func.id + + # Parse the positional and keyword arguments as literals. + try: + args = [ast.literal_eval(arg) for arg in expression.args] + kwargs = {kw.arg: ast.literal_eval(kw.value) for kw in expression.keywords} + except ValueError: + # literal_eval gives cryptic error messages, show a generic + # message with the full expression instead. + raise AppImportError( + "Failed to parse arguments as literal values: %r" % obj + ) + else: + raise AppImportError( + "Failed to parse %r as an attribute name or function call." % obj + ) + + is_debug = logging.root.level == logging.DEBUG + try: + app = getattr(mod, name) + except AttributeError: + if is_debug: + traceback.print_exception(*sys.exc_info()) + raise AppImportError("Failed to find attribute %r in %r." % (name, module)) + + # If the expression was a function call, call the retrieved object + # to get the real application. + if args is not None: + try: + app = app(*args, **kwargs) + except TypeError as e: + # If the TypeError was due to bad arguments to the factory + # function, show Python's nice error message without a + # traceback. + if _called_with_wrong_args(app): + raise AppImportError( + "".join(traceback.format_exception_only(TypeError, e)).strip() + ) + + # Otherwise it was raised from within the function, show the + # full traceback. + raise + + if app is None: + raise AppImportError("Failed to find application object: %r" % obj) + + if not callable(app): + raise AppImportError("Application object must be callable.") + return app + + +def getcwd(): + # get current path, try to use PWD env first + try: + a = os.stat(os.environ['PWD']) + b = os.stat(os.getcwd()) + if a.st_ino == b.st_ino and a.st_dev == b.st_dev: + cwd = os.environ['PWD'] + else: + cwd = os.getcwd() + except Exception: + cwd = os.getcwd() + return cwd + + +def http_date(timestamp=None): + """Return the current date and time formatted for a message header.""" + if timestamp is None: + timestamp = time.time() + s = email.utils.formatdate(timestamp, localtime=False, usegmt=True) + return s + + +def is_hoppish(header): + return header.lower().strip() in hop_headers + + +def daemonize(enable_stdio_inheritance=False): + """\ + Standard daemonization of a process. + http://www.faqs.org/faqs/unix-faq/programmer/faq/ section 1.7 + """ + if 'GUNICORN_FD' not in os.environ: + if os.fork(): + os._exit(0) + os.setsid() + + if os.fork(): + os._exit(0) + + os.umask(0o22) + + # In both the following any file descriptors above stdin + # stdout and stderr are left untouched. The inheritance + # option simply allows one to have output go to a file + # specified by way of shell redirection when not wanting + # to use --error-log option. + + if not enable_stdio_inheritance: + # Remap all of stdin, stdout and stderr on to + # /dev/null. The expectation is that users have + # specified the --error-log option. + + closerange(0, 3) + + fd_null = os.open(REDIRECT_TO, os.O_RDWR) + # PEP 446, make fd for /dev/null inheritable + os.set_inheritable(fd_null, True) + + # expect fd_null to be always 0 here, but in-case not ... + if fd_null != 0: + os.dup2(fd_null, 0) + + os.dup2(fd_null, 1) + os.dup2(fd_null, 2) + + else: + fd_null = os.open(REDIRECT_TO, os.O_RDWR) + + # Always redirect stdin to /dev/null as we would + # never expect to need to read interactive input. + + if fd_null != 0: + os.close(0) + os.dup2(fd_null, 0) + + # If stdout and stderr are still connected to + # their original file descriptors we check to see + # if they are associated with terminal devices. + # When they are we map them to /dev/null so that + # are still detached from any controlling terminal + # properly. If not we preserve them as they are. + # + # If stdin and stdout were not hooked up to the + # original file descriptors, then all bets are + # off and all we can really do is leave them as + # they were. + # + # This will allow 'gunicorn ... > output.log 2>&1' + # to work with stdout/stderr going to the file + # as expected. + # + # Note that if using --error-log option, the log + # file specified through shell redirection will + # only be used up until the log file specified + # by the option takes over. As it replaces stdout + # and stderr at the file descriptor level, then + # anything using stdout or stderr, including having + # cached a reference to them, will still work. + + def redirect(stream, fd_expect): + try: + fd = stream.fileno() + if fd == fd_expect and stream.isatty(): + os.close(fd) + os.dup2(fd_null, fd) + except AttributeError: + pass + + redirect(sys.stdout, 1) + redirect(sys.stderr, 2) + + +def seed(): + try: + random.seed(os.urandom(64)) + except NotImplementedError: + random.seed('%s.%s' % (time.time(), os.getpid())) + + +def check_is_writable(path): + try: + with open(path, 'a') as f: + f.close() + except IOError as e: + raise RuntimeError("Error: '%s' isn't writable [%r]" % (path, e)) + + +def to_bytestring(value, encoding="utf8"): + """Converts a string argument to a byte string""" + if isinstance(value, bytes): + return value + if not isinstance(value, str): + raise TypeError('%r is not a string' % value) + + return value.encode(encoding) + + +def has_fileno(obj): + if not hasattr(obj, "fileno"): + return False + + # check BytesIO case and maybe others + try: + obj.fileno() + except (AttributeError, IOError, io.UnsupportedOperation): + return False + + return True + + +def warn(msg): + print("!!!", file=sys.stderr) + + lines = msg.splitlines() + for i, line in enumerate(lines): + if i == 0: + line = "WARNING: %s" % line + print("!!! %s" % line, file=sys.stderr) + + print("!!!\n", file=sys.stderr) + sys.stderr.flush() + + +def make_fail_app(msg): + msg = to_bytestring(msg) + + def app(environ, start_response): + start_response("500 Internal Server Error", [ + ("Content-Type", "text/plain"), + ("Content-Length", str(len(msg))) + ]) + return [msg] + + return app + + +def split_request_uri(uri): + if uri.startswith("//"): + # When the path starts with //, urlsplit considers it as a + # relative uri while the RFC says we should consider it as abs_path + # http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2 + # We use temporary dot prefix to workaround this behaviour + parts = urllib.parse.urlsplit("." + uri) + return parts._replace(path=parts.path[1:]) + + return urllib.parse.urlsplit(uri) + + +# From six.reraise +def reraise(tp, value, tb=None): + try: + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + finally: + value = None + tb = None + + +def bytes_to_str(b): + if isinstance(b, str): + return b + return str(b, 'latin1') + + +def unquote_to_wsgi_str(string): + return urllib.parse.unquote_to_bytes(string).decode('latin-1') diff --git a/venv/lib/python3.11/site-packages/gunicorn/workers/__init__.py b/venv/lib/python3.11/site-packages/gunicorn/workers/__init__.py new file mode 100644 index 0000000..ae753e1 --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/workers/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +# supported gunicorn workers. +SUPPORTED_WORKERS = { + "sync": "gunicorn.workers.sync.SyncWorker", + "eventlet": "gunicorn.workers.geventlet.EventletWorker", + "gevent": "gunicorn.workers.ggevent.GeventWorker", + "gevent_wsgi": "gunicorn.workers.ggevent.GeventPyWSGIWorker", + "gevent_pywsgi": "gunicorn.workers.ggevent.GeventPyWSGIWorker", + "tornado": "gunicorn.workers.gtornado.TornadoWorker", + "gthread": "gunicorn.workers.gthread.ThreadWorker", +} diff --git a/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..b4fb48d Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/base.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/base.cpython-311.pyc new file mode 100644 index 0000000..d62c866 Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/base.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/base_async.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/base_async.cpython-311.pyc new file mode 100644 index 0000000..908fa48 Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/base_async.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/geventlet.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/geventlet.cpython-311.pyc new file mode 100644 index 0000000..305e36d Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/geventlet.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/ggevent.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/ggevent.cpython-311.pyc new file mode 100644 index 0000000..220d7bd Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/ggevent.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/gthread.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/gthread.cpython-311.pyc new file mode 100644 index 0000000..f982c30 Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/gthread.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/gtornado.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/gtornado.cpython-311.pyc new file mode 100644 index 0000000..848742c Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/gtornado.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/sync.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/sync.cpython-311.pyc new file mode 100644 index 0000000..98963aa Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/sync.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/workertmp.cpython-311.pyc b/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/workertmp.cpython-311.pyc new file mode 100644 index 0000000..57daac3 Binary files /dev/null and b/venv/lib/python3.11/site-packages/gunicorn/workers/__pycache__/workertmp.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/gunicorn/workers/base.py b/venv/lib/python3.11/site-packages/gunicorn/workers/base.py new file mode 100644 index 0000000..f321dd2 --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/workers/base.py @@ -0,0 +1,275 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import io +import os +import signal +import sys +import time +import traceback +from datetime import datetime +from random import randint +from ssl import SSLError + +from gunicorn import util +from gunicorn.http.errors import ( + ForbiddenProxyRequest, InvalidHeader, + InvalidHeaderName, InvalidHTTPVersion, + InvalidProxyLine, InvalidRequestLine, + InvalidRequestMethod, InvalidSchemeHeaders, + LimitRequestHeaders, LimitRequestLine, +) +from gunicorn.http.wsgi import Response, default_environ +from gunicorn.reloader import reloader_engines +from gunicorn.workers.workertmp import WorkerTmp + + +class Worker(object): + + SIGNALS = [getattr(signal, "SIG%s" % x) for x in ( + "ABRT HUP QUIT INT TERM USR1 USR2 WINCH CHLD".split() + )] + + PIPE = [] + + def __init__(self, age, ppid, sockets, app, timeout, cfg, log): + """\ + This is called pre-fork so it shouldn't do anything to the + current process. If there's a need to make process wide + changes you'll want to do that in ``self.init_process()``. + """ + self.age = age + self.pid = "[booting]" + self.ppid = ppid + self.sockets = sockets + self.app = app + self.timeout = timeout + self.cfg = cfg + self.booted = False + self.aborted = False + self.reloader = None + + self.nr = 0 + + if cfg.max_requests > 0: + jitter = randint(0, cfg.max_requests_jitter) + self.max_requests = cfg.max_requests + jitter + else: + self.max_requests = sys.maxsize + + self.alive = True + self.log = log + self.tmp = WorkerTmp(cfg) + + def __str__(self): + return "" % self.pid + + def notify(self): + """\ + Your worker subclass must arrange to have this method called + once every ``self.timeout`` seconds. If you fail in accomplishing + this task, the master process will murder your workers. + """ + self.tmp.notify() + + def run(self): + """\ + This is the mainloop of a worker process. You should override + this method in a subclass to provide the intended behaviour + for your particular evil schemes. + """ + raise NotImplementedError() + + def init_process(self): + """\ + If you override this method in a subclass, the last statement + in the function should be to call this method with + super().init_process() so that the ``run()`` loop is initiated. + """ + + # set environment' variables + if self.cfg.env: + for k, v in self.cfg.env.items(): + os.environ[k] = v + + util.set_owner_process(self.cfg.uid, self.cfg.gid, + initgroups=self.cfg.initgroups) + + # Reseed the random number generator + util.seed() + + # For waking ourselves up + self.PIPE = os.pipe() + for p in self.PIPE: + util.set_non_blocking(p) + util.close_on_exec(p) + + # Prevent fd inheritance + for s in self.sockets: + util.close_on_exec(s) + util.close_on_exec(self.tmp.fileno()) + + self.wait_fds = self.sockets + [self.PIPE[0]] + + self.log.close_on_exec() + + self.init_signals() + + # start the reloader + if self.cfg.reload: + def changed(fname): + self.log.info("Worker reloading: %s modified", fname) + self.alive = False + os.write(self.PIPE[1], b"1") + self.cfg.worker_int(self) + time.sleep(0.1) + sys.exit(0) + + reloader_cls = reloader_engines[self.cfg.reload_engine] + self.reloader = reloader_cls(extra_files=self.cfg.reload_extra_files, + callback=changed) + + self.load_wsgi() + if self.reloader: + self.reloader.start() + + self.cfg.post_worker_init(self) + + # Enter main run loop + self.booted = True + self.run() + + def load_wsgi(self): + try: + self.wsgi = self.app.wsgi() + except SyntaxError as e: + if not self.cfg.reload: + raise + + self.log.exception(e) + + # fix from PR #1228 + # storing the traceback into exc_tb will create a circular reference. + # per https://docs.python.org/2/library/sys.html#sys.exc_info warning, + # delete the traceback after use. + try: + _, exc_val, exc_tb = sys.exc_info() + self.reloader.add_extra_file(exc_val.filename) + + tb_string = io.StringIO() + traceback.print_tb(exc_tb, file=tb_string) + self.wsgi = util.make_fail_app(tb_string.getvalue()) + finally: + del exc_tb + + def init_signals(self): + # reset signaling + for s in self.SIGNALS: + signal.signal(s, signal.SIG_DFL) + # init new signaling + signal.signal(signal.SIGQUIT, self.handle_quit) + signal.signal(signal.SIGTERM, self.handle_exit) + signal.signal(signal.SIGINT, self.handle_quit) + signal.signal(signal.SIGWINCH, self.handle_winch) + signal.signal(signal.SIGUSR1, self.handle_usr1) + signal.signal(signal.SIGABRT, self.handle_abort) + + # Don't let SIGTERM and SIGUSR1 disturb active requests + # by interrupting system calls + signal.siginterrupt(signal.SIGTERM, False) + signal.siginterrupt(signal.SIGUSR1, False) + + if hasattr(signal, 'set_wakeup_fd'): + signal.set_wakeup_fd(self.PIPE[1]) + + def handle_usr1(self, sig, frame): + self.log.reopen_files() + + def handle_exit(self, sig, frame): + self.alive = False + + def handle_quit(self, sig, frame): + self.alive = False + # worker_int callback + self.cfg.worker_int(self) + time.sleep(0.1) + sys.exit(0) + + def handle_abort(self, sig, frame): + self.alive = False + self.cfg.worker_abort(self) + sys.exit(1) + + def handle_error(self, req, client, addr, exc): + request_start = datetime.now() + addr = addr or ('', -1) # unix socket case + if isinstance(exc, ( + InvalidRequestLine, InvalidRequestMethod, + InvalidHTTPVersion, InvalidHeader, InvalidHeaderName, + LimitRequestLine, LimitRequestHeaders, + InvalidProxyLine, ForbiddenProxyRequest, + InvalidSchemeHeaders, + SSLError, + )): + + status_int = 400 + reason = "Bad Request" + + if isinstance(exc, InvalidRequestLine): + mesg = "Invalid Request Line '%s'" % str(exc) + elif isinstance(exc, InvalidRequestMethod): + mesg = "Invalid Method '%s'" % str(exc) + elif isinstance(exc, InvalidHTTPVersion): + mesg = "Invalid HTTP Version '%s'" % str(exc) + elif isinstance(exc, (InvalidHeaderName, InvalidHeader,)): + mesg = "%s" % str(exc) + if not req and hasattr(exc, "req"): + req = exc.req # for access log + elif isinstance(exc, LimitRequestLine): + mesg = "%s" % str(exc) + elif isinstance(exc, LimitRequestHeaders): + reason = "Request Header Fields Too Large" + mesg = "Error parsing headers: '%s'" % str(exc) + status_int = 431 + elif isinstance(exc, InvalidProxyLine): + mesg = "'%s'" % str(exc) + elif isinstance(exc, ForbiddenProxyRequest): + reason = "Forbidden" + mesg = "Request forbidden" + status_int = 403 + elif isinstance(exc, InvalidSchemeHeaders): + mesg = "%s" % str(exc) + elif isinstance(exc, SSLError): + reason = "Forbidden" + mesg = "'%s'" % str(exc) + status_int = 403 + + msg = "Invalid request from ip={ip}: {error}" + self.log.warning(msg.format(ip=addr[0], error=str(exc))) + else: + if hasattr(req, "uri"): + self.log.exception("Error handling request %s", req.uri) + status_int = 500 + reason = "Internal Server Error" + mesg = "" + + if req is not None: + request_time = datetime.now() - request_start + environ = default_environ(req, client, self.cfg) + environ['REMOTE_ADDR'] = addr[0] + environ['REMOTE_PORT'] = str(addr[1]) + resp = Response(req, client, self.cfg) + resp.status = "%s %s" % (status_int, reason) + resp.response_length = len(mesg) + self.log.access(resp, req, environ, request_time) + + try: + util.write_error(client, status_int, reason, mesg) + except Exception: + self.log.debug("Failed to send error message.") + + def handle_winch(self, sig, fname): + # Ignore SIGWINCH in worker. Fixes a crash on OpenBSD. + self.log.debug("worker: SIGWINCH ignored.") diff --git a/venv/lib/python3.11/site-packages/gunicorn/workers/base_async.py b/venv/lib/python3.11/site-packages/gunicorn/workers/base_async.py new file mode 100644 index 0000000..b059a7c --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/workers/base_async.py @@ -0,0 +1,148 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +from datetime import datetime +import errno +import socket +import ssl +import sys + +from gunicorn import http +from gunicorn.http import wsgi +from gunicorn import util +from gunicorn.workers import base + +ALREADY_HANDLED = object() + + +class AsyncWorker(base.Worker): + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.worker_connections = self.cfg.worker_connections + + def timeout_ctx(self): + raise NotImplementedError() + + def is_already_handled(self, respiter): + # some workers will need to overload this function to raise a StopIteration + return respiter == ALREADY_HANDLED + + def handle(self, listener, client, addr): + req = None + try: + parser = http.RequestParser(self.cfg, client, addr) + try: + listener_name = listener.getsockname() + if not self.cfg.keepalive: + req = next(parser) + self.handle_request(listener_name, req, client, addr) + else: + # keepalive loop + proxy_protocol_info = {} + while True: + req = None + with self.timeout_ctx(): + req = next(parser) + if not req: + break + if req.proxy_protocol_info: + proxy_protocol_info = req.proxy_protocol_info + else: + req.proxy_protocol_info = proxy_protocol_info + self.handle_request(listener_name, req, client, addr) + except http.errors.NoMoreData as e: + self.log.debug("Ignored premature client disconnection. %s", e) + except StopIteration as e: + self.log.debug("Closing connection. %s", e) + except ssl.SSLError: + # pass to next try-except level + util.reraise(*sys.exc_info()) + except EnvironmentError: + # pass to next try-except level + util.reraise(*sys.exc_info()) + except Exception as e: + self.handle_error(req, client, addr, e) + except ssl.SSLError as e: + if e.args[0] == ssl.SSL_ERROR_EOF: + self.log.debug("ssl connection closed") + client.close() + else: + self.log.debug("Error processing SSL request.") + self.handle_error(req, client, addr, e) + except EnvironmentError as e: + if e.errno not in (errno.EPIPE, errno.ECONNRESET, errno.ENOTCONN): + self.log.exception("Socket error processing request.") + else: + if e.errno == errno.ECONNRESET: + self.log.debug("Ignoring connection reset") + elif e.errno == errno.ENOTCONN: + self.log.debug("Ignoring socket not connected") + else: + self.log.debug("Ignoring EPIPE") + except Exception as e: + self.handle_error(req, client, addr, e) + finally: + util.close(client) + + def handle_request(self, listener_name, req, sock, addr): + request_start = datetime.now() + environ = {} + resp = None + try: + self.cfg.pre_request(self, req) + resp, environ = wsgi.create(req, sock, addr, + listener_name, self.cfg) + environ["wsgi.multithread"] = True + self.nr += 1 + if self.nr >= self.max_requests: + if self.alive: + self.log.info("Autorestarting worker after current request.") + self.alive = False + + if not self.alive or not self.cfg.keepalive: + resp.force_close() + + respiter = self.wsgi(environ, resp.start_response) + if self.is_already_handled(respiter): + return False + try: + if isinstance(respiter, environ['wsgi.file_wrapper']): + resp.write_file(respiter) + else: + for item in respiter: + resp.write(item) + resp.close() + finally: + request_time = datetime.now() - request_start + self.log.access(resp, req, environ, request_time) + if hasattr(respiter, "close"): + respiter.close() + if resp.should_close(): + raise StopIteration() + except StopIteration: + raise + except EnvironmentError: + # If the original exception was a socket.error we delegate + # handling it to the caller (where handle() might ignore it) + util.reraise(*sys.exc_info()) + except Exception: + if resp and resp.headers_sent: + # If the requests have already been sent, we should close the + # connection to indicate the error. + self.log.exception("Error handling request") + try: + sock.shutdown(socket.SHUT_RDWR) + sock.close() + except EnvironmentError: + pass + raise StopIteration() + raise + finally: + try: + self.cfg.post_request(self, req, environ, resp) + except Exception: + self.log.exception("Exception in post_request hook") + return True diff --git a/venv/lib/python3.11/site-packages/gunicorn/workers/geventlet.py b/venv/lib/python3.11/site-packages/gunicorn/workers/geventlet.py new file mode 100644 index 0000000..c42ed11 --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/workers/geventlet.py @@ -0,0 +1,187 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +from functools import partial +import sys + +try: + import eventlet +except ImportError: + raise RuntimeError("eventlet worker requires eventlet 0.24.1 or higher") +else: + from packaging.version import parse as parse_version + if parse_version(eventlet.__version__) < parse_version('0.24.1'): + raise RuntimeError("eventlet worker requires eventlet 0.24.1 or higher") + +from eventlet import hubs, greenthread +from eventlet.greenio import GreenSocket +import eventlet.wsgi +import greenlet + +from gunicorn.workers.base_async import AsyncWorker +from gunicorn.sock import ssl_wrap_socket + +# ALREADY_HANDLED is removed in 0.30.3+ now it's `WSGI_LOCAL.already_handled: bool` +# https://github.com/eventlet/eventlet/pull/544 +EVENTLET_WSGI_LOCAL = getattr(eventlet.wsgi, "WSGI_LOCAL", None) +EVENTLET_ALREADY_HANDLED = getattr(eventlet.wsgi, "ALREADY_HANDLED", None) + + +def _eventlet_socket_sendfile(self, file, offset=0, count=None): + # Based on the implementation in gevent which in turn is slightly + # modified from the standard library implementation. + if self.gettimeout() == 0: + raise ValueError("non-blocking sockets are not supported") + if offset: + file.seek(offset) + blocksize = min(count, 8192) if count else 8192 + total_sent = 0 + # localize variable access to minimize overhead + file_read = file.read + sock_send = self.send + try: + while True: + if count: + blocksize = min(count - total_sent, blocksize) + if blocksize <= 0: + break + data = memoryview(file_read(blocksize)) + if not data: + break # EOF + while True: + try: + sent = sock_send(data) + except BlockingIOError: + continue + else: + total_sent += sent + if sent < len(data): + data = data[sent:] + else: + break + return total_sent + finally: + if total_sent > 0 and hasattr(file, 'seek'): + file.seek(offset + total_sent) + + +def _eventlet_serve(sock, handle, concurrency): + """ + Serve requests forever. + + This code is nearly identical to ``eventlet.convenience.serve`` except + that it attempts to join the pool at the end, which allows for gunicorn + graceful shutdowns. + """ + pool = eventlet.greenpool.GreenPool(concurrency) + server_gt = eventlet.greenthread.getcurrent() + + while True: + try: + conn, addr = sock.accept() + gt = pool.spawn(handle, conn, addr) + gt.link(_eventlet_stop, server_gt, conn) + conn, addr, gt = None, None, None + except eventlet.StopServe: + sock.close() + pool.waitall() + return + + +def _eventlet_stop(client, server, conn): + """ + Stop a greenlet handling a request and close its connection. + + This code is lifted from eventlet so as not to depend on undocumented + functions in the library. + """ + try: + try: + client.wait() + finally: + conn.close() + except greenlet.GreenletExit: + pass + except Exception: + greenthread.kill(server, *sys.exc_info()) + + +def patch_sendfile(): + # As of eventlet 0.25.1, GreenSocket.sendfile doesn't exist, + # meaning the native implementations of socket.sendfile will be used. + # If os.sendfile exists, it will attempt to use that, failing explicitly + # if the socket is in non-blocking mode, which the underlying + # socket object /is/. Even the regular _sendfile_use_send will + # fail in that way; plus, it would use the underlying socket.send which isn't + # properly cooperative. So we have to monkey-patch a working socket.sendfile() + # into GreenSocket; in this method, `self.send` will be the GreenSocket's + # send method which is properly cooperative. + if not hasattr(GreenSocket, 'sendfile'): + GreenSocket.sendfile = _eventlet_socket_sendfile + + +class EventletWorker(AsyncWorker): + + def patch(self): + hubs.use_hub() + eventlet.monkey_patch() + patch_sendfile() + + def is_already_handled(self, respiter): + # eventlet >= 0.30.3 + if getattr(EVENTLET_WSGI_LOCAL, "already_handled", None): + raise StopIteration() + # eventlet < 0.30.3 + if respiter == EVENTLET_ALREADY_HANDLED: + raise StopIteration() + return super().is_already_handled(respiter) + + def init_process(self): + self.patch() + super().init_process() + + def handle_quit(self, sig, frame): + eventlet.spawn(super().handle_quit, sig, frame) + + def handle_usr1(self, sig, frame): + eventlet.spawn(super().handle_usr1, sig, frame) + + def timeout_ctx(self): + return eventlet.Timeout(self.cfg.keepalive or None, False) + + def handle(self, listener, client, addr): + if self.cfg.is_ssl: + client = ssl_wrap_socket(client, self.cfg) + super().handle(listener, client, addr) + + def run(self): + acceptors = [] + for sock in self.sockets: + gsock = GreenSocket(sock) + gsock.setblocking(1) + hfun = partial(self.handle, gsock) + acceptor = eventlet.spawn(_eventlet_serve, gsock, hfun, + self.worker_connections) + + acceptors.append(acceptor) + eventlet.sleep(0.0) + + while self.alive: + self.notify() + eventlet.sleep(1.0) + + self.notify() + t = None + try: + with eventlet.Timeout(self.cfg.graceful_timeout) as t: + for a in acceptors: + a.kill(eventlet.StopServe()) + for a in acceptors: + a.wait() + except eventlet.Timeout as te: + if te != t: + raise + for a in acceptors: + a.kill() diff --git a/venv/lib/python3.11/site-packages/gunicorn/workers/ggevent.py b/venv/lib/python3.11/site-packages/gunicorn/workers/ggevent.py new file mode 100644 index 0000000..2125a32 --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/workers/ggevent.py @@ -0,0 +1,190 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import os +import sys +from datetime import datetime +from functools import partial +import time + +try: + import gevent +except ImportError: + raise RuntimeError("gevent worker requires gevent 1.4 or higher") +else: + from packaging.version import parse as parse_version + if parse_version(gevent.__version__) < parse_version('1.4'): + raise RuntimeError("gevent worker requires gevent 1.4 or higher") + +from gevent.pool import Pool +from gevent.server import StreamServer +from gevent import hub, monkey, socket, pywsgi + +import gunicorn +from gunicorn.http.wsgi import base_environ +from gunicorn.sock import ssl_context +from gunicorn.workers.base_async import AsyncWorker + +VERSION = "gevent/%s gunicorn/%s" % (gevent.__version__, gunicorn.__version__) + + +class GeventWorker(AsyncWorker): + + server_class = None + wsgi_handler = None + + def patch(self): + monkey.patch_all() + + # patch sockets + sockets = [] + for s in self.sockets: + sockets.append(socket.socket(s.FAMILY, socket.SOCK_STREAM, + fileno=s.sock.fileno())) + self.sockets = sockets + + def notify(self): + super().notify() + if self.ppid != os.getppid(): + self.log.info("Parent changed, shutting down: %s", self) + sys.exit(0) + + def timeout_ctx(self): + return gevent.Timeout(self.cfg.keepalive, False) + + def run(self): + servers = [] + ssl_args = {} + + if self.cfg.is_ssl: + ssl_args = {"ssl_context": ssl_context(self.cfg)} + + for s in self.sockets: + s.setblocking(1) + pool = Pool(self.worker_connections) + if self.server_class is not None: + environ = base_environ(self.cfg) + environ.update({ + "wsgi.multithread": True, + "SERVER_SOFTWARE": VERSION, + }) + server = self.server_class( + s, application=self.wsgi, spawn=pool, log=self.log, + handler_class=self.wsgi_handler, environ=environ, + **ssl_args) + else: + hfun = partial(self.handle, s) + server = StreamServer(s, handle=hfun, spawn=pool, **ssl_args) + if self.cfg.workers > 1: + server.max_accept = 1 + + server.start() + servers.append(server) + + while self.alive: + self.notify() + gevent.sleep(1.0) + + try: + # Stop accepting requests + for server in servers: + if hasattr(server, 'close'): # gevent 1.0 + server.close() + if hasattr(server, 'kill'): # gevent < 1.0 + server.kill() + + # Handle current requests until graceful_timeout + ts = time.time() + while time.time() - ts <= self.cfg.graceful_timeout: + accepting = 0 + for server in servers: + if server.pool.free_count() != server.pool.size: + accepting += 1 + + # if no server is accepting a connection, we can exit + if not accepting: + return + + self.notify() + gevent.sleep(1.0) + + # Force kill all active the handlers + self.log.warning("Worker graceful timeout (pid:%s)", self.pid) + for server in servers: + server.stop(timeout=1) + except Exception: + pass + + def handle(self, listener, client, addr): + # Connected socket timeout defaults to socket.getdefaulttimeout(). + # This forces to blocking mode. + client.setblocking(1) + super().handle(listener, client, addr) + + def handle_request(self, listener_name, req, sock, addr): + try: + super().handle_request(listener_name, req, sock, addr) + except gevent.GreenletExit: + pass + except SystemExit: + pass + + def handle_quit(self, sig, frame): + # Move this out of the signal handler so we can use + # blocking calls. See #1126 + gevent.spawn(super().handle_quit, sig, frame) + + def handle_usr1(self, sig, frame): + # Make the gevent workers handle the usr1 signal + # by deferring to a new greenlet. See #1645 + gevent.spawn(super().handle_usr1, sig, frame) + + def init_process(self): + self.patch() + hub.reinit() + super().init_process() + + +class GeventResponse(object): + + status = None + headers = None + sent = None + + def __init__(self, status, headers, clength): + self.status = status + self.headers = headers + self.sent = clength + + +class PyWSGIHandler(pywsgi.WSGIHandler): + + def log_request(self): + start = datetime.fromtimestamp(self.time_start) + finish = datetime.fromtimestamp(self.time_finish) + response_time = finish - start + resp_headers = getattr(self, 'response_headers', {}) + resp = GeventResponse(self.status, resp_headers, self.response_length) + if hasattr(self, 'headers'): + req_headers = self.headers.items() + else: + req_headers = [] + self.server.log.access(resp, req_headers, self.environ, response_time) + + def get_environ(self): + env = super().get_environ() + env['gunicorn.sock'] = self.socket + env['RAW_URI'] = self.path + return env + + +class PyWSGIServer(pywsgi.WSGIServer): + pass + + +class GeventPyWSGIWorker(GeventWorker): + "The Gevent StreamServer based workers." + server_class = PyWSGIServer + wsgi_handler = PyWSGIHandler diff --git a/venv/lib/python3.11/site-packages/gunicorn/workers/gthread.py b/venv/lib/python3.11/site-packages/gunicorn/workers/gthread.py new file mode 100644 index 0000000..c9c4234 --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/workers/gthread.py @@ -0,0 +1,373 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +# design: +# A threaded worker accepts connections in the main loop, accepted +# connections are added to the thread pool as a connection job. +# Keepalive connections are put back in the loop waiting for an event. +# If no event happen after the keep alive timeout, the connection is +# closed. +# pylint: disable=no-else-break + +from concurrent import futures +import errno +import os +import selectors +import socket +import ssl +import sys +import time +from collections import deque +from datetime import datetime +from functools import partial +from threading import RLock + +from . import base +from .. import http +from .. import util +from .. import sock +from ..http import wsgi + + +class TConn(object): + + def __init__(self, cfg, sock, client, server): + self.cfg = cfg + self.sock = sock + self.client = client + self.server = server + + self.timeout = None + self.parser = None + self.initialized = False + + # set the socket to non blocking + self.sock.setblocking(False) + + def init(self): + self.initialized = True + self.sock.setblocking(True) + + if self.parser is None: + # wrap the socket if needed + if self.cfg.is_ssl: + self.sock = sock.ssl_wrap_socket(self.sock, self.cfg) + + # initialize the parser + self.parser = http.RequestParser(self.cfg, self.sock, self.client) + + def set_timeout(self): + # set the timeout + self.timeout = time.time() + self.cfg.keepalive + + def close(self): + util.close(self.sock) + + +class ThreadWorker(base.Worker): + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.worker_connections = self.cfg.worker_connections + self.max_keepalived = self.cfg.worker_connections - self.cfg.threads + # initialise the pool + self.tpool = None + self.poller = None + self._lock = None + self.futures = deque() + self._keep = deque() + self.nr_conns = 0 + + @classmethod + def check_config(cls, cfg, log): + max_keepalived = cfg.worker_connections - cfg.threads + + if max_keepalived <= 0 and cfg.keepalive: + log.warning("No keepalived connections can be handled. " + + "Check the number of worker connections and threads.") + + def init_process(self): + self.tpool = self.get_thread_pool() + self.poller = selectors.DefaultSelector() + self._lock = RLock() + super().init_process() + + def get_thread_pool(self): + """Override this method to customize how the thread pool is created""" + return futures.ThreadPoolExecutor(max_workers=self.cfg.threads) + + def handle_quit(self, sig, frame): + self.alive = False + # worker_int callback + self.cfg.worker_int(self) + self.tpool.shutdown(False) + time.sleep(0.1) + sys.exit(0) + + def _wrap_future(self, fs, conn): + fs.conn = conn + self.futures.append(fs) + fs.add_done_callback(self.finish_request) + + def enqueue_req(self, conn): + conn.init() + # submit the connection to a worker + fs = self.tpool.submit(self.handle, conn) + self._wrap_future(fs, conn) + + def accept(self, server, listener): + try: + sock, client = listener.accept() + # initialize the connection object + conn = TConn(self.cfg, sock, client, server) + + self.nr_conns += 1 + # wait until socket is readable + with self._lock: + self.poller.register(conn.sock, selectors.EVENT_READ, + partial(self.on_client_socket_readable, conn)) + except EnvironmentError as e: + if e.errno not in (errno.EAGAIN, errno.ECONNABORTED, + errno.EWOULDBLOCK): + raise + + def on_client_socket_readable(self, conn, client): + with self._lock: + # unregister the client from the poller + self.poller.unregister(client) + + if conn.initialized: + # remove the connection from keepalive + try: + self._keep.remove(conn) + except ValueError: + # race condition + return + + # submit the connection to a worker + self.enqueue_req(conn) + + def murder_keepalived(self): + now = time.time() + while True: + with self._lock: + try: + # remove the connection from the queue + conn = self._keep.popleft() + except IndexError: + break + + delta = conn.timeout - now + if delta > 0: + # add the connection back to the queue + with self._lock: + self._keep.appendleft(conn) + break + else: + self.nr_conns -= 1 + # remove the socket from the poller + with self._lock: + try: + self.poller.unregister(conn.sock) + except EnvironmentError as e: + if e.errno != errno.EBADF: + raise + except KeyError: + # already removed by the system, continue + pass + except ValueError: + # already removed by the system continue + pass + + # close the socket + conn.close() + + def is_parent_alive(self): + # If our parent changed then we shut down. + if self.ppid != os.getppid(): + self.log.info("Parent changed, shutting down: %s", self) + return False + return True + + def run(self): + # init listeners, add them to the event loop + for sock in self.sockets: + sock.setblocking(False) + # a race condition during graceful shutdown may make the listener + # name unavailable in the request handler so capture it once here + server = sock.getsockname() + acceptor = partial(self.accept, server) + self.poller.register(sock, selectors.EVENT_READ, acceptor) + + while self.alive: + # notify the arbiter we are alive + self.notify() + + # can we accept more connections? + if self.nr_conns < self.worker_connections: + # wait for an event + events = self.poller.select(1.0) + for key, _ in events: + callback = key.data + callback(key.fileobj) + + # check (but do not wait) for finished requests + result = futures.wait(self.futures, timeout=0, + return_when=futures.FIRST_COMPLETED) + else: + # wait for a request to finish + result = futures.wait(self.futures, timeout=1.0, + return_when=futures.FIRST_COMPLETED) + + # clean up finished requests + for fut in result.done: + self.futures.remove(fut) + + if not self.is_parent_alive(): + break + + # handle keepalive timeouts + self.murder_keepalived() + + self.tpool.shutdown(False) + self.poller.close() + + for s in self.sockets: + s.close() + + futures.wait(self.futures, timeout=self.cfg.graceful_timeout) + + def finish_request(self, fs): + if fs.cancelled(): + self.nr_conns -= 1 + fs.conn.close() + return + + try: + (keepalive, conn) = fs.result() + # if the connection should be kept alived add it + # to the eventloop and record it + if keepalive and self.alive: + # flag the socket as non blocked + conn.sock.setblocking(False) + + # register the connection + conn.set_timeout() + with self._lock: + self._keep.append(conn) + + # add the socket to the event loop + self.poller.register(conn.sock, selectors.EVENT_READ, + partial(self.on_client_socket_readable, conn)) + else: + self.nr_conns -= 1 + conn.close() + except Exception: + # an exception happened, make sure to close the + # socket. + self.nr_conns -= 1 + fs.conn.close() + + def handle(self, conn): + keepalive = False + req = None + try: + req = next(conn.parser) + if not req: + return (False, conn) + + # handle the request + keepalive = self.handle_request(req, conn) + if keepalive: + return (keepalive, conn) + except http.errors.NoMoreData as e: + self.log.debug("Ignored premature client disconnection. %s", e) + + except StopIteration as e: + self.log.debug("Closing connection. %s", e) + except ssl.SSLError as e: + if e.args[0] == ssl.SSL_ERROR_EOF: + self.log.debug("ssl connection closed") + conn.sock.close() + else: + self.log.debug("Error processing SSL request.") + self.handle_error(req, conn.sock, conn.client, e) + + except EnvironmentError as e: + if e.errno not in (errno.EPIPE, errno.ECONNRESET, errno.ENOTCONN): + self.log.exception("Socket error processing request.") + else: + if e.errno == errno.ECONNRESET: + self.log.debug("Ignoring connection reset") + elif e.errno == errno.ENOTCONN: + self.log.debug("Ignoring socket not connected") + else: + self.log.debug("Ignoring connection epipe") + except Exception as e: + self.handle_error(req, conn.sock, conn.client, e) + + return (False, conn) + + def handle_request(self, req, conn): + environ = {} + resp = None + try: + self.cfg.pre_request(self, req) + request_start = datetime.now() + resp, environ = wsgi.create(req, conn.sock, conn.client, + conn.server, self.cfg) + environ["wsgi.multithread"] = True + self.nr += 1 + if self.nr >= self.max_requests: + if self.alive: + self.log.info("Autorestarting worker after current request.") + self.alive = False + resp.force_close() + + if not self.alive or not self.cfg.keepalive: + resp.force_close() + elif len(self._keep) >= self.max_keepalived: + resp.force_close() + + respiter = self.wsgi(environ, resp.start_response) + try: + if isinstance(respiter, environ['wsgi.file_wrapper']): + resp.write_file(respiter) + else: + for item in respiter: + resp.write(item) + + resp.close() + finally: + request_time = datetime.now() - request_start + self.log.access(resp, req, environ, request_time) + if hasattr(respiter, "close"): + respiter.close() + + if resp.should_close(): + self.log.debug("Closing connection.") + return False + except EnvironmentError: + # pass to next try-except level + util.reraise(*sys.exc_info()) + except Exception: + if resp and resp.headers_sent: + # If the requests have already been sent, we should close the + # connection to indicate the error. + self.log.exception("Error handling request") + try: + conn.sock.shutdown(socket.SHUT_RDWR) + conn.sock.close() + except EnvironmentError: + pass + raise StopIteration() + raise + finally: + try: + self.cfg.post_request(self, req, environ, resp) + except Exception: + self.log.exception("Exception in post_request hook") + + return True diff --git a/venv/lib/python3.11/site-packages/gunicorn/workers/gtornado.py b/venv/lib/python3.11/site-packages/gunicorn/workers/gtornado.py new file mode 100644 index 0000000..2850611 --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/workers/gtornado.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import os +import sys + +try: + import tornado +except ImportError: + raise RuntimeError("You need tornado installed to use this worker.") +import tornado.web +import tornado.httpserver +from tornado.ioloop import IOLoop, PeriodicCallback +from tornado.wsgi import WSGIContainer +from gunicorn.workers.base import Worker +from gunicorn import __version__ as gversion +from gunicorn.sock import ssl_context + + +# Tornado 5.0 updated its IOLoop, and the `io_loop` arguments to many +# Tornado functions have been removed in Tornado 5.0. Also, they no +# longer store PeriodCallbacks in ioloop._callbacks. Instead we store +# them on our side, and use stop() on them when stopping the worker. +# See https://www.tornadoweb.org/en/stable/releases/v5.0.0.html#backwards-compatibility-notes +# for more details. +TORNADO5 = tornado.version_info >= (5, 0, 0) + + +class TornadoWorker(Worker): + + @classmethod + def setup(cls): + web = sys.modules.pop("tornado.web") + old_clear = web.RequestHandler.clear + + def clear(self): + old_clear(self) + if "Gunicorn" not in self._headers["Server"]: + self._headers["Server"] += " (Gunicorn/%s)" % gversion + web.RequestHandler.clear = clear + sys.modules["tornado.web"] = web + + def handle_exit(self, sig, frame): + if self.alive: + super().handle_exit(sig, frame) + + def handle_request(self): + self.nr += 1 + if self.alive and self.nr >= self.max_requests: + self.log.info("Autorestarting worker after current request.") + self.alive = False + + def watchdog(self): + if self.alive: + self.notify() + + if self.ppid != os.getppid(): + self.log.info("Parent changed, shutting down: %s", self) + self.alive = False + + def heartbeat(self): + if not self.alive: + if self.server_alive: + if hasattr(self, 'server'): + try: + self.server.stop() + except Exception: + pass + self.server_alive = False + else: + if TORNADO5: + for callback in self.callbacks: + callback.stop() + self.ioloop.stop() + else: + if not self.ioloop._callbacks: + self.ioloop.stop() + + def init_process(self): + # IOLoop cannot survive a fork or be shared across processes + # in any way. When multiple processes are being used, each process + # should create its own IOLoop. We should clear current IOLoop + # if exists before os.fork. + IOLoop.clear_current() + super().init_process() + + def run(self): + self.ioloop = IOLoop.instance() + self.alive = True + self.server_alive = False + + if TORNADO5: + self.callbacks = [] + self.callbacks.append(PeriodicCallback(self.watchdog, 1000)) + self.callbacks.append(PeriodicCallback(self.heartbeat, 1000)) + for callback in self.callbacks: + callback.start() + else: + PeriodicCallback(self.watchdog, 1000, io_loop=self.ioloop).start() + PeriodicCallback(self.heartbeat, 1000, io_loop=self.ioloop).start() + + # Assume the app is a WSGI callable if its not an + # instance of tornado.web.Application or is an + # instance of tornado.wsgi.WSGIApplication + app = self.wsgi + + if tornado.version_info[0] < 6: + if not isinstance(app, tornado.web.Application) or \ + isinstance(app, tornado.wsgi.WSGIApplication): + app = WSGIContainer(app) + elif not isinstance(app, WSGIContainer) and \ + not isinstance(app, tornado.web.Application): + app = WSGIContainer(app) + + # Monkey-patching HTTPConnection.finish to count the + # number of requests being handled by Tornado. This + # will help gunicorn shutdown the worker if max_requests + # is exceeded. + httpserver = sys.modules["tornado.httpserver"] + if hasattr(httpserver, 'HTTPConnection'): + old_connection_finish = httpserver.HTTPConnection.finish + + def finish(other): + self.handle_request() + old_connection_finish(other) + httpserver.HTTPConnection.finish = finish + sys.modules["tornado.httpserver"] = httpserver + + server_class = tornado.httpserver.HTTPServer + else: + + class _HTTPServer(tornado.httpserver.HTTPServer): + + def on_close(instance, server_conn): + self.handle_request() + super(_HTTPServer, instance).on_close(server_conn) + + server_class = _HTTPServer + + if self.cfg.is_ssl: + if TORNADO5: + server = server_class(app, ssl_options=ssl_context(self.cfg)) + else: + server = server_class(app, io_loop=self.ioloop, + ssl_options=ssl_context(self.cfg)) + else: + if TORNADO5: + server = server_class(app) + else: + server = server_class(app, io_loop=self.ioloop) + + self.server = server + self.server_alive = True + + for s in self.sockets: + s.setblocking(0) + if hasattr(server, "add_socket"): # tornado > 2.0 + server.add_socket(s) + elif hasattr(server, "_sockets"): # tornado 2.0 + server._sockets[s.fileno()] = s + + server.no_keep_alive = self.cfg.keepalive <= 0 + server.start(num_processes=1) + + self.ioloop.start() diff --git a/venv/lib/python3.11/site-packages/gunicorn/workers/sync.py b/venv/lib/python3.11/site-packages/gunicorn/workers/sync.py new file mode 100644 index 0000000..39a209f --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/workers/sync.py @@ -0,0 +1,210 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. +# + +from datetime import datetime +import errno +import os +import select +import socket +import ssl +import sys + +from gunicorn import http +from gunicorn.http import wsgi +from gunicorn import sock +from gunicorn import util +from gunicorn.workers import base + + +class StopWaiting(Exception): + """ exception raised to stop waiting for a connection """ + + +class SyncWorker(base.Worker): + + def accept(self, listener): + client, addr = listener.accept() + client.setblocking(1) + util.close_on_exec(client) + self.handle(listener, client, addr) + + def wait(self, timeout): + try: + self.notify() + ret = select.select(self.wait_fds, [], [], timeout) + if ret[0]: + if self.PIPE[0] in ret[0]: + os.read(self.PIPE[0], 1) + return ret[0] + + except select.error as e: + if e.args[0] == errno.EINTR: + return self.sockets + if e.args[0] == errno.EBADF: + if self.nr < 0: + return self.sockets + else: + raise StopWaiting + raise + + def is_parent_alive(self): + # If our parent changed then we shut down. + if self.ppid != os.getppid(): + self.log.info("Parent changed, shutting down: %s", self) + return False + return True + + def run_for_one(self, timeout): + listener = self.sockets[0] + while self.alive: + self.notify() + + # Accept a connection. If we get an error telling us + # that no connection is waiting we fall down to the + # select which is where we'll wait for a bit for new + # workers to come give us some love. + try: + self.accept(listener) + # Keep processing clients until no one is waiting. This + # prevents the need to select() for every client that we + # process. + continue + + except EnvironmentError as e: + if e.errno not in (errno.EAGAIN, errno.ECONNABORTED, + errno.EWOULDBLOCK): + raise + + if not self.is_parent_alive(): + return + + try: + self.wait(timeout) + except StopWaiting: + return + + def run_for_multiple(self, timeout): + while self.alive: + self.notify() + + try: + ready = self.wait(timeout) + except StopWaiting: + return + + if ready is not None: + for listener in ready: + if listener == self.PIPE[0]: + continue + + try: + self.accept(listener) + except EnvironmentError as e: + if e.errno not in (errno.EAGAIN, errno.ECONNABORTED, + errno.EWOULDBLOCK): + raise + + if not self.is_parent_alive(): + return + + def run(self): + # if no timeout is given the worker will never wait and will + # use the CPU for nothing. This minimal timeout prevent it. + timeout = self.timeout or 0.5 + + # self.socket appears to lose its blocking status after + # we fork in the arbiter. Reset it here. + for s in self.sockets: + s.setblocking(0) + + if len(self.sockets) > 1: + self.run_for_multiple(timeout) + else: + self.run_for_one(timeout) + + def handle(self, listener, client, addr): + req = None + try: + if self.cfg.is_ssl: + client = sock.ssl_wrap_socket(client, self.cfg) + parser = http.RequestParser(self.cfg, client, addr) + req = next(parser) + self.handle_request(listener, req, client, addr) + except http.errors.NoMoreData as e: + self.log.debug("Ignored premature client disconnection. %s", e) + except StopIteration as e: + self.log.debug("Closing connection. %s", e) + except ssl.SSLError as e: + if e.args[0] == ssl.SSL_ERROR_EOF: + self.log.debug("ssl connection closed") + client.close() + else: + self.log.debug("Error processing SSL request.") + self.handle_error(req, client, addr, e) + except EnvironmentError as e: + if e.errno not in (errno.EPIPE, errno.ECONNRESET, errno.ENOTCONN): + self.log.exception("Socket error processing request.") + else: + if e.errno == errno.ECONNRESET: + self.log.debug("Ignoring connection reset") + elif e.errno == errno.ENOTCONN: + self.log.debug("Ignoring socket not connected") + else: + self.log.debug("Ignoring EPIPE") + except Exception as e: + self.handle_error(req, client, addr, e) + finally: + util.close(client) + + def handle_request(self, listener, req, client, addr): + environ = {} + resp = None + try: + self.cfg.pre_request(self, req) + request_start = datetime.now() + resp, environ = wsgi.create(req, client, addr, + listener.getsockname(), self.cfg) + # Force the connection closed until someone shows + # a buffering proxy that supports Keep-Alive to + # the backend. + resp.force_close() + self.nr += 1 + if self.nr >= self.max_requests: + self.log.info("Autorestarting worker after current request.") + self.alive = False + respiter = self.wsgi(environ, resp.start_response) + try: + if isinstance(respiter, environ['wsgi.file_wrapper']): + resp.write_file(respiter) + else: + for item in respiter: + resp.write(item) + resp.close() + finally: + request_time = datetime.now() - request_start + self.log.access(resp, req, environ, request_time) + if hasattr(respiter, "close"): + respiter.close() + except EnvironmentError: + # pass to next try-except level + util.reraise(*sys.exc_info()) + except Exception: + if resp and resp.headers_sent: + # If the requests have already been sent, we should close the + # connection to indicate the error. + self.log.exception("Error handling request") + try: + client.shutdown(socket.SHUT_RDWR) + client.close() + except EnvironmentError: + pass + raise StopIteration() + raise + finally: + try: + self.cfg.post_request(self, req, environ, resp) + except Exception: + self.log.exception("Exception in post_request hook") diff --git a/venv/lib/python3.11/site-packages/gunicorn/workers/workertmp.py b/venv/lib/python3.11/site-packages/gunicorn/workers/workertmp.py new file mode 100644 index 0000000..cc79ecd --- /dev/null +++ b/venv/lib/python3.11/site-packages/gunicorn/workers/workertmp.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + +import os +import platform +import tempfile + +from gunicorn import util + +PLATFORM = platform.system() +IS_CYGWIN = PLATFORM.startswith('CYGWIN') + + +class WorkerTmp(object): + + def __init__(self, cfg): + old_umask = os.umask(cfg.umask) + fdir = cfg.worker_tmp_dir + if fdir and not os.path.isdir(fdir): + raise RuntimeError("%s doesn't exist. Can't create workertmp." % fdir) + fd, name = tempfile.mkstemp(prefix="wgunicorn-", dir=fdir) + os.umask(old_umask) + + # change the owner and group of the file if the worker will run as + # a different user or group, so that the worker can modify the file + if cfg.uid != os.geteuid() or cfg.gid != os.getegid(): + util.chown(name, cfg.uid, cfg.gid) + + # unlink the file so we don't leak temporary files + try: + if not IS_CYGWIN: + util.unlink(name) + # In Python 3.8, open() emits RuntimeWarning if buffering=1 for binary mode. + # Because we never write to this file, pass 0 to switch buffering off. + self._tmp = os.fdopen(fd, 'w+b', 0) + except Exception: + os.close(fd) + raise + + self.spinner = 0 + + def notify(self): + self.spinner = (self.spinner + 1) % 2 + os.fchmod(self._tmp.fileno(), self.spinner) + + def last_update(self): + return os.fstat(self._tmp.fileno()).st_ctime + + def fileno(self): + return self._tmp.fileno() + + def close(self): + return self._tmp.close() diff --git a/venv/lib/python3.11/site-packages/h11-0.16.0.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/h11-0.16.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.11/site-packages/h11-0.16.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.11/site-packages/h11-0.16.0.dist-info/METADATA b/venv/lib/python3.11/site-packages/h11-0.16.0.dist-info/METADATA new file mode 100644 index 0000000..8a2f639 --- /dev/null +++ b/venv/lib/python3.11/site-packages/h11-0.16.0.dist-info/METADATA @@ -0,0 +1,202 @@ +Metadata-Version: 2.4 +Name: h11 +Version: 0.16.0 +Summary: A pure-Python, bring-your-own-I/O implementation of HTTP/1.1 +Home-page: https://github.com/python-hyper/h11 +Author: Nathaniel J. Smith +Author-email: njs@pobox.com +License: MIT +Classifier: Development Status :: 3 - Alpha +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: System :: Networking +Requires-Python: >=3.8 +License-File: LICENSE.txt +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: home-page +Dynamic: license +Dynamic: license-file +Dynamic: requires-python +Dynamic: summary + +h11 +=== + +.. image:: https://travis-ci.org/python-hyper/h11.svg?branch=master + :target: https://travis-ci.org/python-hyper/h11 + :alt: Automated test status + +.. image:: https://codecov.io/gh/python-hyper/h11/branch/master/graph/badge.svg + :target: https://codecov.io/gh/python-hyper/h11 + :alt: Test coverage + +.. image:: https://readthedocs.org/projects/h11/badge/?version=latest + :target: http://h11.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + +This is a little HTTP/1.1 library written from scratch in Python, +heavily inspired by `hyper-h2 `_. + +It's a "bring-your-own-I/O" library; h11 contains no IO code +whatsoever. This means you can hook h11 up to your favorite network +API, and that could be anything you want: synchronous, threaded, +asynchronous, or your own implementation of `RFC 6214 +`_ -- h11 won't judge you. +(Compare this to the current state of the art, where every time a `new +network API `_ comes along then someone +gets to start over reimplementing the entire HTTP protocol from +scratch.) Cory Benfield made an `excellent blog post describing the +benefits of this approach +`_, or if you like video +then here's his `PyCon 2016 talk on the same theme +`_. + +This also means that h11 is not immediately useful out of the box: +it's a toolkit for building programs that speak HTTP, not something +that could directly replace ``requests`` or ``twisted.web`` or +whatever. But h11 makes it much easier to implement something like +``requests`` or ``twisted.web``. + +At a high level, working with h11 goes like this: + +1) First, create an ``h11.Connection`` object to track the state of a + single HTTP/1.1 connection. + +2) When you read data off the network, pass it to + ``conn.receive_data(...)``; you'll get back a list of objects + representing high-level HTTP "events". + +3) When you want to send a high-level HTTP event, create the + corresponding "event" object and pass it to ``conn.send(...)``; + this will give you back some bytes that you can then push out + through the network. + +For example, a client might instantiate and then send a +``h11.Request`` object, then zero or more ``h11.Data`` objects for the +request body (e.g., if this is a POST), and then a +``h11.EndOfMessage`` to indicate the end of the message. Then the +server would then send back a ``h11.Response``, some ``h11.Data``, and +its own ``h11.EndOfMessage``. If either side violates the protocol, +you'll get a ``h11.ProtocolError`` exception. + +h11 is suitable for implementing both servers and clients, and has a +pleasantly symmetric API: the events you send as a client are exactly +the ones that you receive as a server and vice-versa. + +`Here's an example of a tiny HTTP client +`_ + +It also has `a fine manual `_. + +FAQ +--- + +*Whyyyyy?* + +I wanted to play with HTTP in `Curio +`__ and `Trio +`__, which at the time didn't have any +HTTP libraries. So I thought, no big deal, Python has, like, a dozen +different implementations of HTTP, surely I can find one that's +reusable. I didn't find one, but I did find Cory's call-to-arms +blog-post. So I figured, well, fine, if I have to implement HTTP from +scratch, at least I can make sure no-one *else* has to ever again. + +*Should I use it?* + +Maybe. You should be aware that it's a very young project. But, it's +feature complete and has an exhaustive test-suite and complete docs, +so the next step is for people to try using it and see how it goes +:-). If you do then please let us know -- if nothing else we'll want +to talk to you before making any incompatible changes! + +*What are the features/limitations?* + +Roughly speaking, it's trying to be a robust, complete, and non-hacky +implementation of the first "chapter" of the HTTP/1.1 spec: `RFC 7230: +HTTP/1.1 Message Syntax and Routing +`_. That is, it mostly focuses on +implementing HTTP at the level of taking bytes on and off the wire, +and the headers related to that, and tries to be anal about spec +conformance. It doesn't know about higher-level concerns like URL +routing, conditional GETs, cross-origin cookie policies, or content +negotiation. But it does know how to take care of framing, +cross-version differences in keep-alive handling, and the "obsolete +line folding" rule, so you can focus your energies on the hard / +interesting parts for your application, and it tries to support the +full specification in the sense that any useful HTTP/1.1 conformant +application should be able to use h11. + +It's pure Python, and has no dependencies outside of the standard +library. + +It has a test suite with 100.0% coverage for both statements and +branches. + +Currently it supports Python 3 (testing on 3.8-3.12) and PyPy 3. +The last Python 2-compatible version was h11 0.11.x. +(Originally it had a Cython wrapper for `http-parser +`_ and a beautiful nested state +machine implemented with ``yield from`` to postprocess the output. But +I had to take these out -- the new *parser* needs fewer lines-of-code +than the old *parser wrapper*, is written in pure Python, uses no +exotic language syntax, and has more features. It's sad, really; that +old state machine was really slick. I just need a few sentences here +to mourn that.) + +I don't know how fast it is. I haven't benchmarked or profiled it yet, +so it's probably got a few pointless hot spots, and I've been trying +to err on the side of simplicity and robustness instead of +micro-optimization. But at the architectural level I tried hard to +avoid fundamentally bad decisions, e.g., I believe that all the +parsing algorithms remain linear-time even in the face of pathological +input like slowloris, and there are no byte-by-byte loops. (I also +believe that it maintains bounded memory usage in the face of +arbitrary/pathological input.) + +The whole library is ~800 lines-of-code. You can read and understand +the whole thing in less than an hour. Most of the energy invested in +this so far has been spent on trying to keep things simple by +minimizing special-cases and ad hoc state manipulation; even though it +is now quite small and simple, I'm still annoyed that I haven't +figured out how to make it even smaller and simpler. (Unfortunately, +HTTP does not lend itself to simplicity.) + +The API is ~feature complete and I don't expect the general outlines +to change much, but you can't judge an API's ergonomics until you +actually document and use it, so I'd expect some changes in the +details. + +*How do I try it?* + +.. code-block:: sh + + $ pip install h11 + $ git clone git@github.com:python-hyper/h11 + $ cd h11/examples + $ python basic-client.py + +and go from there. + +*License?* + +MIT + +*Code of conduct?* + +Contributors are requested to follow our `code of conduct +`_ in +all project spaces. diff --git a/venv/lib/python3.11/site-packages/h11-0.16.0.dist-info/RECORD b/venv/lib/python3.11/site-packages/h11-0.16.0.dist-info/RECORD new file mode 100644 index 0000000..aa15b9a --- /dev/null +++ b/venv/lib/python3.11/site-packages/h11-0.16.0.dist-info/RECORD @@ -0,0 +1,29 @@ +h11-0.16.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +h11-0.16.0.dist-info/METADATA,sha256=KPMmCYrAn8unm48YD5YIfIQf4kViFct7hyqcfVzRnWQ,8348 +h11-0.16.0.dist-info/RECORD,, +h11-0.16.0.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91 +h11-0.16.0.dist-info/licenses/LICENSE.txt,sha256=N9tbuFkm2yikJ6JYZ_ELEjIAOuob5pzLhRE4rbjm82E,1124 +h11-0.16.0.dist-info/top_level.txt,sha256=F7dC4jl3zeh8TGHEPaWJrMbeuoWbS379Gwdi-Yvdcis,4 +h11/__init__.py,sha256=iO1KzkSO42yZ6ffg-VMgbx_ZVTWGUY00nRYEWn-s3kY,1507 +h11/__pycache__/__init__.cpython-311.pyc,, +h11/__pycache__/_abnf.cpython-311.pyc,, +h11/__pycache__/_connection.cpython-311.pyc,, +h11/__pycache__/_events.cpython-311.pyc,, +h11/__pycache__/_headers.cpython-311.pyc,, +h11/__pycache__/_readers.cpython-311.pyc,, +h11/__pycache__/_receivebuffer.cpython-311.pyc,, +h11/__pycache__/_state.cpython-311.pyc,, +h11/__pycache__/_util.cpython-311.pyc,, +h11/__pycache__/_version.cpython-311.pyc,, +h11/__pycache__/_writers.cpython-311.pyc,, +h11/_abnf.py,sha256=ybixr0xsupnkA6GFAyMubuXF6Tc1lb_hF890NgCsfNc,4815 +h11/_connection.py,sha256=k9YRVf6koZqbttBW36xSWaJpWdZwa-xQVU9AHEo9DuI,26863 +h11/_events.py,sha256=I97aXoal1Wu7dkL548BANBUCkOIbe-x5CioYA9IBY14,11792 +h11/_headers.py,sha256=P7D-lBNxHwdLZPLimmYwrPG-9ZkjElvvJZJdZAgSP-4,10412 +h11/_readers.py,sha256=a4RypORUCC3d0q_kxPuBIM7jTD8iLt5X91TH0FsduN4,8590 +h11/_receivebuffer.py,sha256=xrspsdsNgWFxRfQcTXxR8RrdjRXXTK0Io5cQYWpJ1Ws,5252 +h11/_state.py,sha256=_5LG_BGR8FCcFQeBPH-TMHgm_-B-EUcWCnQof_9XjFE,13231 +h11/_util.py,sha256=LWkkjXyJaFlAy6Lt39w73UStklFT5ovcvo0TkY7RYuk,4888 +h11/_version.py,sha256=GVSsbPSPDcOuF6ptfIiXnVJoaEm3ygXbMnqlr_Giahw,686 +h11/_writers.py,sha256=oFKm6PtjeHfbj4RLX7VB7KDc1gIY53gXG3_HR9ltmTA,5081 +h11/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7 diff --git a/venv/lib/python3.11/site-packages/h11-0.16.0.dist-info/WHEEL b/venv/lib/python3.11/site-packages/h11-0.16.0.dist-info/WHEEL new file mode 100644 index 0000000..1eb3c49 --- /dev/null +++ b/venv/lib/python3.11/site-packages/h11-0.16.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (78.1.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.11/site-packages/h11-0.16.0.dist-info/licenses/LICENSE.txt b/venv/lib/python3.11/site-packages/h11-0.16.0.dist-info/licenses/LICENSE.txt new file mode 100644 index 0000000..8f080ea --- /dev/null +++ b/venv/lib/python3.11/site-packages/h11-0.16.0.dist-info/licenses/LICENSE.txt @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2016 Nathaniel J. Smith and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/lib/python3.11/site-packages/h11-0.16.0.dist-info/top_level.txt b/venv/lib/python3.11/site-packages/h11-0.16.0.dist-info/top_level.txt new file mode 100644 index 0000000..0d24def --- /dev/null +++ b/venv/lib/python3.11/site-packages/h11-0.16.0.dist-info/top_level.txt @@ -0,0 +1 @@ +h11 diff --git a/venv/lib/python3.11/site-packages/h11/__init__.py b/venv/lib/python3.11/site-packages/h11/__init__.py new file mode 100644 index 0000000..989e92c --- /dev/null +++ b/venv/lib/python3.11/site-packages/h11/__init__.py @@ -0,0 +1,62 @@ +# A highish-level implementation of the HTTP/1.1 wire protocol (RFC 7230), +# containing no networking code at all, loosely modelled on hyper-h2's generic +# implementation of HTTP/2 (and in particular the h2.connection.H2Connection +# class). There's still a bunch of subtle details you need to get right if you +# want to make this actually useful, because it doesn't implement all the +# semantics to check that what you're asking to write to the wire is sensible, +# but at least it gets you out of dealing with the wire itself. + +from h11._connection import Connection, NEED_DATA, PAUSED +from h11._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from h11._state import ( + CLIENT, + CLOSED, + DONE, + ERROR, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from h11._util import LocalProtocolError, ProtocolError, RemoteProtocolError +from h11._version import __version__ + +PRODUCT_ID = "python-h11/" + __version__ + + +__all__ = ( + "Connection", + "NEED_DATA", + "PAUSED", + "ConnectionClosed", + "Data", + "EndOfMessage", + "Event", + "InformationalResponse", + "Request", + "Response", + "CLIENT", + "CLOSED", + "DONE", + "ERROR", + "IDLE", + "MUST_CLOSE", + "SEND_BODY", + "SEND_RESPONSE", + "SERVER", + "SWITCHED_PROTOCOL", + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", +) diff --git a/venv/lib/python3.11/site-packages/h11/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/h11/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..1b731c1 Binary files /dev/null and b/venv/lib/python3.11/site-packages/h11/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/h11/__pycache__/_abnf.cpython-311.pyc b/venv/lib/python3.11/site-packages/h11/__pycache__/_abnf.cpython-311.pyc new file mode 100644 index 0000000..b61c7bf Binary files /dev/null and b/venv/lib/python3.11/site-packages/h11/__pycache__/_abnf.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/h11/__pycache__/_connection.cpython-311.pyc b/venv/lib/python3.11/site-packages/h11/__pycache__/_connection.cpython-311.pyc new file mode 100644 index 0000000..b87c271 Binary files /dev/null and b/venv/lib/python3.11/site-packages/h11/__pycache__/_connection.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/h11/__pycache__/_events.cpython-311.pyc b/venv/lib/python3.11/site-packages/h11/__pycache__/_events.cpython-311.pyc new file mode 100644 index 0000000..2a90985 Binary files /dev/null and b/venv/lib/python3.11/site-packages/h11/__pycache__/_events.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/h11/__pycache__/_headers.cpython-311.pyc b/venv/lib/python3.11/site-packages/h11/__pycache__/_headers.cpython-311.pyc new file mode 100644 index 0000000..ec6d832 Binary files /dev/null and b/venv/lib/python3.11/site-packages/h11/__pycache__/_headers.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/h11/__pycache__/_readers.cpython-311.pyc b/venv/lib/python3.11/site-packages/h11/__pycache__/_readers.cpython-311.pyc new file mode 100644 index 0000000..0d2c4ee Binary files /dev/null and b/venv/lib/python3.11/site-packages/h11/__pycache__/_readers.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/h11/__pycache__/_receivebuffer.cpython-311.pyc b/venv/lib/python3.11/site-packages/h11/__pycache__/_receivebuffer.cpython-311.pyc new file mode 100644 index 0000000..1c6ecbc Binary files /dev/null and b/venv/lib/python3.11/site-packages/h11/__pycache__/_receivebuffer.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/h11/__pycache__/_state.cpython-311.pyc b/venv/lib/python3.11/site-packages/h11/__pycache__/_state.cpython-311.pyc new file mode 100644 index 0000000..89577d0 Binary files /dev/null and b/venv/lib/python3.11/site-packages/h11/__pycache__/_state.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/h11/__pycache__/_util.cpython-311.pyc b/venv/lib/python3.11/site-packages/h11/__pycache__/_util.cpython-311.pyc new file mode 100644 index 0000000..bcfcfb8 Binary files /dev/null and b/venv/lib/python3.11/site-packages/h11/__pycache__/_util.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/h11/__pycache__/_version.cpython-311.pyc b/venv/lib/python3.11/site-packages/h11/__pycache__/_version.cpython-311.pyc new file mode 100644 index 0000000..5dafc12 Binary files /dev/null and b/venv/lib/python3.11/site-packages/h11/__pycache__/_version.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/h11/__pycache__/_writers.cpython-311.pyc b/venv/lib/python3.11/site-packages/h11/__pycache__/_writers.cpython-311.pyc new file mode 100644 index 0000000..6c2c0e0 Binary files /dev/null and b/venv/lib/python3.11/site-packages/h11/__pycache__/_writers.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/h11/_abnf.py b/venv/lib/python3.11/site-packages/h11/_abnf.py new file mode 100644 index 0000000..933587f --- /dev/null +++ b/venv/lib/python3.11/site-packages/h11/_abnf.py @@ -0,0 +1,132 @@ +# We use native strings for all the re patterns, to take advantage of string +# formatting, and then convert to bytestrings when compiling the final re +# objects. + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#whitespace +# OWS = *( SP / HTAB ) +# ; optional whitespace +OWS = r"[ \t]*" + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.token.separators +# token = 1*tchar +# +# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" +# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" +# / DIGIT / ALPHA +# ; any VCHAR, except delimiters +token = r"[-!#$%&'*+.^_`|~0-9a-zA-Z]+" + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#header.fields +# field-name = token +field_name = token + +# The standard says: +# +# field-value = *( field-content / obs-fold ) +# field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] +# field-vchar = VCHAR / obs-text +# obs-fold = CRLF 1*( SP / HTAB ) +# ; obsolete line folding +# ; see Section 3.2.4 +# +# https://tools.ietf.org/html/rfc5234#appendix-B.1 +# +# VCHAR = %x21-7E +# ; visible (printing) characters +# +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.quoted-string +# obs-text = %x80-FF +# +# However, the standard definition of field-content is WRONG! It disallows +# fields containing a single visible character surrounded by whitespace, +# e.g. "foo a bar". +# +# See: https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189 +# +# So our definition of field_content attempts to fix it up... +# +# Also, we allow lots of control characters, because apparently people assume +# that they're legal in practice (e.g., google analytics makes cookies with +# \x01 in them!): +# https://github.com/python-hyper/h11/issues/57 +# We still don't allow NUL or whitespace, because those are often treated as +# meta-characters and letting them through can lead to nasty issues like SSRF. +vchar = r"[\x21-\x7e]" +vchar_or_obs_text = r"[^\x00\s]" +field_vchar = vchar_or_obs_text +field_content = r"{field_vchar}+(?:[ \t]+{field_vchar}+)*".format(**globals()) + +# We handle obs-fold at a different level, and our fixed-up field_content +# already grows to swallow the whole value, so ? instead of * +field_value = r"({field_content})?".format(**globals()) + +# header-field = field-name ":" OWS field-value OWS +header_field = ( + r"(?P{field_name})" + r":" + r"{OWS}" + r"(?P{field_value})" + r"{OWS}".format(**globals()) +) + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#request.line +# +# request-line = method SP request-target SP HTTP-version CRLF +# method = token +# HTTP-version = HTTP-name "/" DIGIT "." DIGIT +# HTTP-name = %x48.54.54.50 ; "HTTP", case-sensitive +# +# request-target is complicated (see RFC 7230 sec 5.3) -- could be path, full +# URL, host+port (for connect), or even "*", but in any case we are guaranteed +# that it contists of the visible printing characters. +method = token +request_target = r"{vchar}+".format(**globals()) +http_version = r"HTTP/(?P[0-9]\.[0-9])" +request_line = ( + r"(?P{method})" + r" " + r"(?P{request_target})" + r" " + r"{http_version}".format(**globals()) +) + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#status.line +# +# status-line = HTTP-version SP status-code SP reason-phrase CRLF +# status-code = 3DIGIT +# reason-phrase = *( HTAB / SP / VCHAR / obs-text ) +status_code = r"[0-9]{3}" +reason_phrase = r"([ \t]|{vchar_or_obs_text})*".format(**globals()) +status_line = ( + r"{http_version}" + r" " + r"(?P{status_code})" + # However, there are apparently a few too many servers out there that just + # leave out the reason phrase: + # https://github.com/scrapy/scrapy/issues/345#issuecomment-281756036 + # https://github.com/seanmonstar/httparse/issues/29 + # so make it optional. ?: is a non-capturing group. + r"(?: (?P{reason_phrase}))?".format(**globals()) +) + +HEXDIG = r"[0-9A-Fa-f]" +# Actually +# +# chunk-size = 1*HEXDIG +# +# but we impose an upper-limit to avoid ridiculosity. len(str(2**64)) == 20 +chunk_size = r"({HEXDIG}){{1,20}}".format(**globals()) +# Actually +# +# chunk-ext = *( ";" chunk-ext-name [ "=" chunk-ext-val ] ) +# +# but we aren't parsing the things so we don't really care. +chunk_ext = r";.*" +chunk_header = ( + r"(?P{chunk_size})" + r"(?P{chunk_ext})?" + r"{OWS}\r\n".format( + **globals() + ) # Even though the specification does not allow for extra whitespaces, + # we are lenient with trailing whitespaces because some servers on the wild use it. +) diff --git a/venv/lib/python3.11/site-packages/h11/_connection.py b/venv/lib/python3.11/site-packages/h11/_connection.py new file mode 100644 index 0000000..e37d82a --- /dev/null +++ b/venv/lib/python3.11/site-packages/h11/_connection.py @@ -0,0 +1,659 @@ +# This contains the main Connection class. Everything in h11 revolves around +# this. +from typing import ( + Any, + Callable, + cast, + Dict, + List, + Optional, + overload, + Tuple, + Type, + Union, +) + +from ._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from ._headers import get_comma_header, has_expect_100_continue, set_comma_header +from ._readers import READERS, ReadersType +from ._receivebuffer import ReceiveBuffer +from ._state import ( + _SWITCH_CONNECT, + _SWITCH_UPGRADE, + CLIENT, + ConnectionState, + DONE, + ERROR, + MIGHT_SWITCH_PROTOCOL, + SEND_BODY, + SERVER, + SWITCHED_PROTOCOL, +) +from ._util import ( # Import the internal things we need + LocalProtocolError, + RemoteProtocolError, + Sentinel, +) +from ._writers import WRITERS, WritersType + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = ["Connection", "NEED_DATA", "PAUSED"] + + +class NEED_DATA(Sentinel, metaclass=Sentinel): + pass + + +class PAUSED(Sentinel, metaclass=Sentinel): + pass + + +# If we ever have this much buffered without it making a complete parseable +# event, we error out. The only time we really buffer is when reading the +# request/response line + headers together, so this is effectively the limit on +# the size of that. +# +# Some precedents for defaults: +# - node.js: 80 * 1024 +# - tomcat: 8 * 1024 +# - IIS: 16 * 1024 +# - Apache: <8 KiB per line> +DEFAULT_MAX_INCOMPLETE_EVENT_SIZE = 16 * 1024 + + +# RFC 7230's rules for connection lifecycles: +# - If either side says they want to close the connection, then the connection +# must close. +# - HTTP/1.1 defaults to keep-alive unless someone says Connection: close +# - HTTP/1.0 defaults to close unless both sides say Connection: keep-alive +# (and even this is a mess -- e.g. if you're implementing a proxy then +# sending Connection: keep-alive is forbidden). +# +# We simplify life by simply not supporting keep-alive with HTTP/1.0 peers. So +# our rule is: +# - If someone says Connection: close, we will close +# - If someone uses HTTP/1.0, we will close. +def _keep_alive(event: Union[Request, Response]) -> bool: + connection = get_comma_header(event.headers, b"connection") + if b"close" in connection: + return False + if getattr(event, "http_version", b"1.1") < b"1.1": + return False + return True + + +def _body_framing( + request_method: bytes, event: Union[Request, Response] +) -> Tuple[str, Union[Tuple[()], Tuple[int]]]: + # Called when we enter SEND_BODY to figure out framing information for + # this body. + # + # These are the only two events that can trigger a SEND_BODY state: + assert type(event) in (Request, Response) + # Returns one of: + # + # ("content-length", count) + # ("chunked", ()) + # ("http/1.0", ()) + # + # which are (lookup key, *args) for constructing body reader/writer + # objects. + # + # Reference: https://tools.ietf.org/html/rfc7230#section-3.3.3 + # + # Step 1: some responses always have an empty body, regardless of what the + # headers say. + if type(event) is Response: + if ( + event.status_code in (204, 304) + or request_method == b"HEAD" + or (request_method == b"CONNECT" and 200 <= event.status_code < 300) + ): + return ("content-length", (0,)) + # Section 3.3.3 also lists another case -- responses with status_code + # < 200. For us these are InformationalResponses, not Responses, so + # they can't get into this function in the first place. + assert event.status_code >= 200 + + # Step 2: check for Transfer-Encoding (T-E beats C-L): + transfer_encodings = get_comma_header(event.headers, b"transfer-encoding") + if transfer_encodings: + assert transfer_encodings == [b"chunked"] + return ("chunked", ()) + + # Step 3: check for Content-Length + content_lengths = get_comma_header(event.headers, b"content-length") + if content_lengths: + return ("content-length", (int(content_lengths[0]),)) + + # Step 4: no applicable headers; fallback/default depends on type + if type(event) is Request: + return ("content-length", (0,)) + else: + return ("http/1.0", ()) + + +################################################################ +# +# The main Connection class +# +################################################################ + + +class Connection: + """An object encapsulating the state of an HTTP connection. + + Args: + our_role: If you're implementing a client, pass :data:`h11.CLIENT`. If + you're implementing a server, pass :data:`h11.SERVER`. + + max_incomplete_event_size (int): + The maximum number of bytes we're willing to buffer of an + incomplete event. In practice this mostly sets a limit on the + maximum size of the request/response line + headers. If this is + exceeded, then :meth:`next_event` will raise + :exc:`RemoteProtocolError`. + + """ + + def __init__( + self, + our_role: Type[Sentinel], + max_incomplete_event_size: int = DEFAULT_MAX_INCOMPLETE_EVENT_SIZE, + ) -> None: + self._max_incomplete_event_size = max_incomplete_event_size + # State and role tracking + if our_role not in (CLIENT, SERVER): + raise ValueError(f"expected CLIENT or SERVER, not {our_role!r}") + self.our_role = our_role + self.their_role: Type[Sentinel] + if our_role is CLIENT: + self.their_role = SERVER + else: + self.their_role = CLIENT + self._cstate = ConnectionState() + + # Callables for converting data->events or vice-versa given the + # current state + self._writer = self._get_io_object(self.our_role, None, WRITERS) + self._reader = self._get_io_object(self.their_role, None, READERS) + + # Holds any unprocessed received data + self._receive_buffer = ReceiveBuffer() + # If this is true, then it indicates that the incoming connection was + # closed *after* the end of whatever's in self._receive_buffer: + self._receive_buffer_closed = False + + # Extra bits of state that don't fit into the state machine. + # + # These two are only used to interpret framing headers for figuring + # out how to read/write response bodies. their_http_version is also + # made available as a convenient public API. + self.their_http_version: Optional[bytes] = None + self._request_method: Optional[bytes] = None + # This is pure flow-control and doesn't at all affect the set of legal + # transitions, so no need to bother ConnectionState with it: + self.client_is_waiting_for_100_continue = False + + @property + def states(self) -> Dict[Type[Sentinel], Type[Sentinel]]: + """A dictionary like:: + + {CLIENT: , SERVER: } + + See :ref:`state-machine` for details. + + """ + return dict(self._cstate.states) + + @property + def our_state(self) -> Type[Sentinel]: + """The current state of whichever role we are playing. See + :ref:`state-machine` for details. + """ + return self._cstate.states[self.our_role] + + @property + def their_state(self) -> Type[Sentinel]: + """The current state of whichever role we are NOT playing. See + :ref:`state-machine` for details. + """ + return self._cstate.states[self.their_role] + + @property + def they_are_waiting_for_100_continue(self) -> bool: + return self.their_role is CLIENT and self.client_is_waiting_for_100_continue + + def start_next_cycle(self) -> None: + """Attempt to reset our connection state for a new request/response + cycle. + + If both client and server are in :data:`DONE` state, then resets them + both to :data:`IDLE` state in preparation for a new request/response + cycle on this same connection. Otherwise, raises a + :exc:`LocalProtocolError`. + + See :ref:`keepalive-and-pipelining`. + + """ + old_states = dict(self._cstate.states) + self._cstate.start_next_cycle() + self._request_method = None + # self.their_http_version gets left alone, since it presumably lasts + # beyond a single request/response cycle + assert not self.client_is_waiting_for_100_continue + self._respond_to_state_changes(old_states) + + def _process_error(self, role: Type[Sentinel]) -> None: + old_states = dict(self._cstate.states) + self._cstate.process_error(role) + self._respond_to_state_changes(old_states) + + def _server_switch_event(self, event: Event) -> Optional[Type[Sentinel]]: + if type(event) is InformationalResponse and event.status_code == 101: + return _SWITCH_UPGRADE + if type(event) is Response: + if ( + _SWITCH_CONNECT in self._cstate.pending_switch_proposals + and 200 <= event.status_code < 300 + ): + return _SWITCH_CONNECT + return None + + # All events go through here + def _process_event(self, role: Type[Sentinel], event: Event) -> None: + # First, pass the event through the state machine to make sure it + # succeeds. + old_states = dict(self._cstate.states) + if role is CLIENT and type(event) is Request: + if event.method == b"CONNECT": + self._cstate.process_client_switch_proposal(_SWITCH_CONNECT) + if get_comma_header(event.headers, b"upgrade"): + self._cstate.process_client_switch_proposal(_SWITCH_UPGRADE) + server_switch_event = None + if role is SERVER: + server_switch_event = self._server_switch_event(event) + self._cstate.process_event(role, type(event), server_switch_event) + + # Then perform the updates triggered by it. + + if type(event) is Request: + self._request_method = event.method + + if role is self.their_role and type(event) in ( + Request, + Response, + InformationalResponse, + ): + event = cast(Union[Request, Response, InformationalResponse], event) + self.their_http_version = event.http_version + + # Keep alive handling + # + # RFC 7230 doesn't really say what one should do if Connection: close + # shows up on a 1xx InformationalResponse. I think the idea is that + # this is not supposed to happen. In any case, if it does happen, we + # ignore it. + if type(event) in (Request, Response) and not _keep_alive( + cast(Union[Request, Response], event) + ): + self._cstate.process_keep_alive_disabled() + + # 100-continue + if type(event) is Request and has_expect_100_continue(event): + self.client_is_waiting_for_100_continue = True + if type(event) in (InformationalResponse, Response): + self.client_is_waiting_for_100_continue = False + if role is CLIENT and type(event) in (Data, EndOfMessage): + self.client_is_waiting_for_100_continue = False + + self._respond_to_state_changes(old_states, event) + + def _get_io_object( + self, + role: Type[Sentinel], + event: Optional[Event], + io_dict: Union[ReadersType, WritersType], + ) -> Optional[Callable[..., Any]]: + # event may be None; it's only used when entering SEND_BODY + state = self._cstate.states[role] + if state is SEND_BODY: + # Special case: the io_dict has a dict of reader/writer factories + # that depend on the request/response framing. + framing_type, args = _body_framing( + cast(bytes, self._request_method), cast(Union[Request, Response], event) + ) + return io_dict[SEND_BODY][framing_type](*args) # type: ignore[index] + else: + # General case: the io_dict just has the appropriate reader/writer + # for this state + return io_dict.get((role, state)) # type: ignore[return-value] + + # This must be called after any action that might have caused + # self._cstate.states to change. + def _respond_to_state_changes( + self, + old_states: Dict[Type[Sentinel], Type[Sentinel]], + event: Optional[Event] = None, + ) -> None: + # Update reader/writer + if self.our_state != old_states[self.our_role]: + self._writer = self._get_io_object(self.our_role, event, WRITERS) + if self.their_state != old_states[self.their_role]: + self._reader = self._get_io_object(self.their_role, event, READERS) + + @property + def trailing_data(self) -> Tuple[bytes, bool]: + """Data that has been received, but not yet processed, represented as + a tuple with two elements, where the first is a byte-string containing + the unprocessed data itself, and the second is a bool that is True if + the receive connection was closed. + + See :ref:`switching-protocols` for discussion of why you'd want this. + """ + return (bytes(self._receive_buffer), self._receive_buffer_closed) + + def receive_data(self, data: bytes) -> None: + """Add data to our internal receive buffer. + + This does not actually do any processing on the data, just stores + it. To trigger processing, you have to call :meth:`next_event`. + + Args: + data (:term:`bytes-like object`): + The new data that was just received. + + Special case: If *data* is an empty byte-string like ``b""``, + then this indicates that the remote side has closed the + connection (end of file). Normally this is convenient, because + standard Python APIs like :meth:`file.read` or + :meth:`socket.recv` use ``b""`` to indicate end-of-file, while + other failures to read are indicated using other mechanisms + like raising :exc:`TimeoutError`. When using such an API you + can just blindly pass through whatever you get from ``read`` + to :meth:`receive_data`, and everything will work. + + But, if you have an API where reading an empty string is a + valid non-EOF condition, then you need to be aware of this and + make sure to check for such strings and avoid passing them to + :meth:`receive_data`. + + Returns: + Nothing, but after calling this you should call :meth:`next_event` + to parse the newly received data. + + Raises: + RuntimeError: + Raised if you pass an empty *data*, indicating EOF, and then + pass a non-empty *data*, indicating more data that somehow + arrived after the EOF. + + (Calling ``receive_data(b"")`` multiple times is fine, + and equivalent to calling it once.) + + """ + if data: + if self._receive_buffer_closed: + raise RuntimeError("received close, then received more data?") + self._receive_buffer += data + else: + self._receive_buffer_closed = True + + def _extract_next_receive_event( + self, + ) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]: + state = self.their_state + # We don't pause immediately when they enter DONE, because even in + # DONE state we can still process a ConnectionClosed() event. But + # if we have data in our buffer, then we definitely aren't getting + # a ConnectionClosed() immediately and we need to pause. + if state is DONE and self._receive_buffer: + return PAUSED + if state is MIGHT_SWITCH_PROTOCOL or state is SWITCHED_PROTOCOL: + return PAUSED + assert self._reader is not None + event = self._reader(self._receive_buffer) + if event is None: + if not self._receive_buffer and self._receive_buffer_closed: + # In some unusual cases (basically just HTTP/1.0 bodies), EOF + # triggers an actual protocol event; in that case, we want to + # return that event, and then the state will change and we'll + # get called again to generate the actual ConnectionClosed(). + if hasattr(self._reader, "read_eof"): + event = self._reader.read_eof() + else: + event = ConnectionClosed() + if event is None: + event = NEED_DATA + return event # type: ignore[no-any-return] + + def next_event(self) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]: + """Parse the next event out of our receive buffer, update our internal + state, and return it. + + This is a mutating operation -- think of it like calling :func:`next` + on an iterator. + + Returns: + : One of three things: + + 1) An event object -- see :ref:`events`. + + 2) The special constant :data:`NEED_DATA`, which indicates that + you need to read more data from your socket and pass it to + :meth:`receive_data` before this method will be able to return + any more events. + + 3) The special constant :data:`PAUSED`, which indicates that we + are not in a state where we can process incoming data (usually + because the peer has finished their part of the current + request/response cycle, and you have not yet called + :meth:`start_next_cycle`). See :ref:`flow-control` for details. + + Raises: + RemoteProtocolError: + The peer has misbehaved. You should close the connection + (possibly after sending some kind of 4xx response). + + Once this method returns :class:`ConnectionClosed` once, then all + subsequent calls will also return :class:`ConnectionClosed`. + + If this method raises any exception besides :exc:`RemoteProtocolError` + then that's a bug -- if it happens please file a bug report! + + If this method raises any exception then it also sets + :attr:`Connection.their_state` to :data:`ERROR` -- see + :ref:`error-handling` for discussion. + + """ + + if self.their_state is ERROR: + raise RemoteProtocolError("Can't receive data when peer state is ERROR") + try: + event = self._extract_next_receive_event() + if event not in [NEED_DATA, PAUSED]: + self._process_event(self.their_role, cast(Event, event)) + if event is NEED_DATA: + if len(self._receive_buffer) > self._max_incomplete_event_size: + # 431 is "Request header fields too large" which is pretty + # much the only situation where we can get here + raise RemoteProtocolError( + "Receive buffer too long", error_status_hint=431 + ) + if self._receive_buffer_closed: + # We're still trying to complete some event, but that's + # never going to happen because no more data is coming + raise RemoteProtocolError("peer unexpectedly closed connection") + return event + except BaseException as exc: + self._process_error(self.their_role) + if isinstance(exc, LocalProtocolError): + exc._reraise_as_remote_protocol_error() + else: + raise + + @overload + def send(self, event: ConnectionClosed) -> None: + ... + + @overload + def send( + self, event: Union[Request, InformationalResponse, Response, Data, EndOfMessage] + ) -> bytes: + ... + + @overload + def send(self, event: Event) -> Optional[bytes]: + ... + + def send(self, event: Event) -> Optional[bytes]: + """Convert a high-level event into bytes that can be sent to the peer, + while updating our internal state machine. + + Args: + event: The :ref:`event ` to send. + + Returns: + If ``type(event) is ConnectionClosed``, then returns + ``None``. Otherwise, returns a :term:`bytes-like object`. + + Raises: + LocalProtocolError: + Sending this event at this time would violate our + understanding of the HTTP/1.1 protocol. + + If this method raises any exception then it also sets + :attr:`Connection.our_state` to :data:`ERROR` -- see + :ref:`error-handling` for discussion. + + """ + data_list = self.send_with_data_passthrough(event) + if data_list is None: + return None + else: + return b"".join(data_list) + + def send_with_data_passthrough(self, event: Event) -> Optional[List[bytes]]: + """Identical to :meth:`send`, except that in situations where + :meth:`send` returns a single :term:`bytes-like object`, this instead + returns a list of them -- and when sending a :class:`Data` event, this + list is guaranteed to contain the exact object you passed in as + :attr:`Data.data`. See :ref:`sendfile` for discussion. + + """ + if self.our_state is ERROR: + raise LocalProtocolError("Can't send data when our state is ERROR") + try: + if type(event) is Response: + event = self._clean_up_response_headers_for_sending(event) + # We want to call _process_event before calling the writer, + # because if someone tries to do something invalid then this will + # give a sensible error message, while our writers all just assume + # they will only receive valid events. But, _process_event might + # change self._writer. So we have to do a little dance: + writer = self._writer + self._process_event(self.our_role, event) + if type(event) is ConnectionClosed: + return None + else: + # In any situation where writer is None, process_event should + # have raised ProtocolError + assert writer is not None + data_list: List[bytes] = [] + writer(event, data_list.append) + return data_list + except: + self._process_error(self.our_role) + raise + + def send_failed(self) -> None: + """Notify the state machine that we failed to send the data it gave + us. + + This causes :attr:`Connection.our_state` to immediately become + :data:`ERROR` -- see :ref:`error-handling` for discussion. + + """ + self._process_error(self.our_role) + + # When sending a Response, we take responsibility for a few things: + # + # - Sometimes you MUST set Connection: close. We take care of those + # times. (You can also set it yourself if you want, and if you do then + # we'll respect that and close the connection at the right time. But you + # don't have to worry about that unless you want to.) + # + # - The user has to set Content-Length if they want it. Otherwise, for + # responses that have bodies (e.g. not HEAD), then we will automatically + # select the right mechanism for streaming a body of unknown length, + # which depends on depending on the peer's HTTP version. + # + # This function's *only* responsibility is making sure headers are set up + # right -- everything downstream just looks at the headers. There are no + # side channels. + def _clean_up_response_headers_for_sending(self, response: Response) -> Response: + assert type(response) is Response + + headers = response.headers + need_close = False + + # HEAD requests need some special handling: they always act like they + # have Content-Length: 0, and that's how _body_framing treats + # them. But their headers are supposed to match what we would send if + # the request was a GET. (Technically there is one deviation allowed: + # we're allowed to leave out the framing headers -- see + # https://tools.ietf.org/html/rfc7231#section-4.3.2 . But it's just as + # easy to get them right.) + method_for_choosing_headers = cast(bytes, self._request_method) + if method_for_choosing_headers == b"HEAD": + method_for_choosing_headers = b"GET" + framing_type, _ = _body_framing(method_for_choosing_headers, response) + if framing_type in ("chunked", "http/1.0"): + # This response has a body of unknown length. + # If our peer is HTTP/1.1, we use Transfer-Encoding: chunked + # If our peer is HTTP/1.0, we use no framing headers, and close the + # connection afterwards. + # + # Make sure to clear Content-Length (in principle user could have + # set both and then we ignored Content-Length b/c + # Transfer-Encoding overwrote it -- this would be naughty of them, + # but the HTTP spec says that if our peer does this then we have + # to fix it instead of erroring out, so we'll accord the user the + # same respect). + headers = set_comma_header(headers, b"content-length", []) + if self.their_http_version is None or self.their_http_version < b"1.1": + # Either we never got a valid request and are sending back an + # error (their_http_version is None), so we assume the worst; + # or else we did get a valid HTTP/1.0 request, so we know that + # they don't understand chunked encoding. + headers = set_comma_header(headers, b"transfer-encoding", []) + # This is actually redundant ATM, since currently we + # unconditionally disable keep-alive when talking to HTTP/1.0 + # peers. But let's be defensive just in case we add + # Connection: keep-alive support later: + if self._request_method != b"HEAD": + need_close = True + else: + headers = set_comma_header(headers, b"transfer-encoding", [b"chunked"]) + + if not self._cstate.keep_alive or need_close: + # Make sure Connection: close is set + connection = set(get_comma_header(headers, b"connection")) + connection.discard(b"keep-alive") + connection.add(b"close") + headers = set_comma_header(headers, b"connection", sorted(connection)) + + return Response( + headers=headers, + status_code=response.status_code, + http_version=response.http_version, + reason=response.reason, + ) diff --git a/venv/lib/python3.11/site-packages/h11/_events.py b/venv/lib/python3.11/site-packages/h11/_events.py new file mode 100644 index 0000000..ca1c3ad --- /dev/null +++ b/venv/lib/python3.11/site-packages/h11/_events.py @@ -0,0 +1,369 @@ +# High level events that make up HTTP/1.1 conversations. Loosely inspired by +# the corresponding events in hyper-h2: +# +# http://python-hyper.org/h2/en/stable/api.html#events +# +# Don't subclass these. Stuff will break. + +import re +from abc import ABC +from dataclasses import dataclass +from typing import List, Tuple, Union + +from ._abnf import method, request_target +from ._headers import Headers, normalize_and_validate +from ._util import bytesify, LocalProtocolError, validate + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = [ + "Event", + "Request", + "InformationalResponse", + "Response", + "Data", + "EndOfMessage", + "ConnectionClosed", +] + +method_re = re.compile(method.encode("ascii")) +request_target_re = re.compile(request_target.encode("ascii")) + + +class Event(ABC): + """ + Base class for h11 events. + """ + + __slots__ = () + + +@dataclass(init=False, frozen=True) +class Request(Event): + """The beginning of an HTTP request. + + Fields: + + .. attribute:: method + + An HTTP method, e.g. ``b"GET"`` or ``b"POST"``. Always a byte + string. :term:`Bytes-like objects ` and native + strings containing only ascii characters will be automatically + converted to byte strings. + + .. attribute:: target + + The target of an HTTP request, e.g. ``b"/index.html"``, or one of the + more exotic formats described in `RFC 7320, section 5.3 + `_. Always a byte + string. :term:`Bytes-like objects ` and native + strings containing only ascii characters will be automatically + converted to byte strings. + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + """ + + __slots__ = ("method", "headers", "target", "http_version") + + method: bytes + headers: Headers + target: bytes + http_version: bytes + + def __init__( + self, + *, + method: Union[bytes, str], + headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], + target: Union[bytes, str], + http_version: Union[bytes, str] = b"1.1", + _parsed: bool = False, + ) -> None: + super().__init__() + if isinstance(headers, Headers): + object.__setattr__(self, "headers", headers) + else: + object.__setattr__( + self, "headers", normalize_and_validate(headers, _parsed=_parsed) + ) + if not _parsed: + object.__setattr__(self, "method", bytesify(method)) + object.__setattr__(self, "target", bytesify(target)) + object.__setattr__(self, "http_version", bytesify(http_version)) + else: + object.__setattr__(self, "method", method) + object.__setattr__(self, "target", target) + object.__setattr__(self, "http_version", http_version) + + # "A server MUST respond with a 400 (Bad Request) status code to any + # HTTP/1.1 request message that lacks a Host header field and to any + # request message that contains more than one Host header field or a + # Host header field with an invalid field-value." + # -- https://tools.ietf.org/html/rfc7230#section-5.4 + host_count = 0 + for name, value in self.headers: + if name == b"host": + host_count += 1 + if self.http_version == b"1.1" and host_count == 0: + raise LocalProtocolError("Missing mandatory Host: header") + if host_count > 1: + raise LocalProtocolError("Found multiple Host: headers") + + validate(method_re, self.method, "Illegal method characters") + validate(request_target_re, self.target, "Illegal target characters") + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class _ResponseBase(Event): + __slots__ = ("headers", "http_version", "reason", "status_code") + + headers: Headers + http_version: bytes + reason: bytes + status_code: int + + def __init__( + self, + *, + headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], + status_code: int, + http_version: Union[bytes, str] = b"1.1", + reason: Union[bytes, str] = b"", + _parsed: bool = False, + ) -> None: + super().__init__() + if isinstance(headers, Headers): + object.__setattr__(self, "headers", headers) + else: + object.__setattr__( + self, "headers", normalize_and_validate(headers, _parsed=_parsed) + ) + if not _parsed: + object.__setattr__(self, "reason", bytesify(reason)) + object.__setattr__(self, "http_version", bytesify(http_version)) + if not isinstance(status_code, int): + raise LocalProtocolError("status code must be integer") + # Because IntEnum objects are instances of int, but aren't + # duck-compatible (sigh), see gh-72. + object.__setattr__(self, "status_code", int(status_code)) + else: + object.__setattr__(self, "reason", reason) + object.__setattr__(self, "http_version", http_version) + object.__setattr__(self, "status_code", status_code) + + self.__post_init__() + + def __post_init__(self) -> None: + pass + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class InformationalResponse(_ResponseBase): + """An HTTP informational response. + + Fields: + + .. attribute:: status_code + + The status code of this response, as an integer. For an + :class:`InformationalResponse`, this is always in the range [100, + 200). + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for + details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + .. attribute:: reason + + The reason phrase of this response, as a byte string. For example: + ``b"OK"``, or ``b"Not Found"``. + + """ + + def __post_init__(self) -> None: + if not (100 <= self.status_code < 200): + raise LocalProtocolError( + "InformationalResponse status_code should be in range " + "[100, 200), not {}".format(self.status_code) + ) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class Response(_ResponseBase): + """The beginning of an HTTP response. + + Fields: + + .. attribute:: status_code + + The status code of this response, as an integer. For an + :class:`Response`, this is always in the range [200, + 1000). + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + .. attribute:: reason + + The reason phrase of this response, as a byte string. For example: + ``b"OK"``, or ``b"Not Found"``. + + """ + + def __post_init__(self) -> None: + if not (200 <= self.status_code < 1000): + raise LocalProtocolError( + "Response status_code should be in range [200, 1000), not {}".format( + self.status_code + ) + ) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class Data(Event): + """Part of an HTTP message body. + + Fields: + + .. attribute:: data + + A :term:`bytes-like object` containing part of a message body. Or, if + using the ``combine=False`` argument to :meth:`Connection.send`, then + any object that your socket writing code knows what to do with, and for + which calling :func:`len` returns the number of bytes that will be + written -- see :ref:`sendfile` for details. + + .. attribute:: chunk_start + + A marker that indicates whether this data object is from the start of a + chunked transfer encoding chunk. This field is ignored when when a Data + event is provided to :meth:`Connection.send`: it is only valid on + events emitted from :meth:`Connection.next_event`. You probably + shouldn't use this attribute at all; see + :ref:`chunk-delimiters-are-bad` for details. + + .. attribute:: chunk_end + + A marker that indicates whether this data object is the last for a + given chunked transfer encoding chunk. This field is ignored when when + a Data event is provided to :meth:`Connection.send`: it is only valid + on events emitted from :meth:`Connection.next_event`. You probably + shouldn't use this attribute at all; see + :ref:`chunk-delimiters-are-bad` for details. + + """ + + __slots__ = ("data", "chunk_start", "chunk_end") + + data: bytes + chunk_start: bool + chunk_end: bool + + def __init__( + self, data: bytes, chunk_start: bool = False, chunk_end: bool = False + ) -> None: + object.__setattr__(self, "data", data) + object.__setattr__(self, "chunk_start", chunk_start) + object.__setattr__(self, "chunk_end", chunk_end) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +# XX FIXME: "A recipient MUST ignore (or consider as an error) any fields that +# are forbidden to be sent in a trailer, since processing them as if they were +# present in the header section might bypass external security filters." +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#chunked.trailer.part +# Unfortunately, the list of forbidden fields is long and vague :-/ +@dataclass(init=False, frozen=True) +class EndOfMessage(Event): + """The end of an HTTP message. + + Fields: + + .. attribute:: headers + + Default value: ``[]`` + + Any trailing headers attached to this message, represented as a list of + (name, value) pairs. See :ref:`the header normalization rules + ` for details. + + Must be empty unless ``Transfer-Encoding: chunked`` is in use. + + """ + + __slots__ = ("headers",) + + headers: Headers + + def __init__( + self, + *, + headers: Union[ + Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]], None + ] = None, + _parsed: bool = False, + ) -> None: + super().__init__() + if headers is None: + headers = Headers([]) + elif not isinstance(headers, Headers): + headers = normalize_and_validate(headers, _parsed=_parsed) + + object.__setattr__(self, "headers", headers) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(frozen=True) +class ConnectionClosed(Event): + """This event indicates that the sender has closed their outgoing + connection. + + Note that this does not necessarily mean that they can't *receive* further + data, because TCP connections are composed to two one-way channels which + can be closed independently. See :ref:`closing` for details. + + No fields. + """ + + pass diff --git a/venv/lib/python3.11/site-packages/h11/_headers.py b/venv/lib/python3.11/site-packages/h11/_headers.py new file mode 100644 index 0000000..31da3e2 --- /dev/null +++ b/venv/lib/python3.11/site-packages/h11/_headers.py @@ -0,0 +1,282 @@ +import re +from typing import AnyStr, cast, List, overload, Sequence, Tuple, TYPE_CHECKING, Union + +from ._abnf import field_name, field_value +from ._util import bytesify, LocalProtocolError, validate + +if TYPE_CHECKING: + from ._events import Request + +try: + from typing import Literal +except ImportError: + from typing_extensions import Literal # type: ignore + +CONTENT_LENGTH_MAX_DIGITS = 20 # allow up to 1 billion TB - 1 + + +# Facts +# ----- +# +# Headers are: +# keys: case-insensitive ascii +# values: mixture of ascii and raw bytes +# +# "Historically, HTTP has allowed field content with text in the ISO-8859-1 +# charset [ISO-8859-1], supporting other charsets only through use of +# [RFC2047] encoding. In practice, most HTTP header field values use only a +# subset of the US-ASCII charset [USASCII]. Newly defined header fields SHOULD +# limit their field values to US-ASCII octets. A recipient SHOULD treat other +# octets in field content (obs-text) as opaque data." +# And it deprecates all non-ascii values +# +# Leading/trailing whitespace in header names is forbidden +# +# Values get leading/trailing whitespace stripped +# +# Content-Disposition actually needs to contain unicode semantically; to +# accomplish this it has a terrifically weird way of encoding the filename +# itself as ascii (and even this still has lots of cross-browser +# incompatibilities) +# +# Order is important: +# "a proxy MUST NOT change the order of these field values when forwarding a +# message" +# (and there are several headers where the order indicates a preference) +# +# Multiple occurences of the same header: +# "A sender MUST NOT generate multiple header fields with the same field name +# in a message unless either the entire field value for that header field is +# defined as a comma-separated list [or the header is Set-Cookie which gets a +# special exception]" - RFC 7230. (cookies are in RFC 6265) +# +# So every header aside from Set-Cookie can be merged by b", ".join if it +# occurs repeatedly. But, of course, they can't necessarily be split by +# .split(b","), because quoting. +# +# Given all this mess (case insensitive, duplicates allowed, order is +# important, ...), there doesn't appear to be any standard way to handle +# headers in Python -- they're almost like dicts, but... actually just +# aren't. For now we punt and just use a super simple representation: headers +# are a list of pairs +# +# [(name1, value1), (name2, value2), ...] +# +# where all entries are bytestrings, names are lowercase and have no +# leading/trailing whitespace, and values are bytestrings with no +# leading/trailing whitespace. Searching and updating are done via naive O(n) +# methods. +# +# Maybe a dict-of-lists would be better? + +_content_length_re = re.compile(rb"[0-9]+") +_field_name_re = re.compile(field_name.encode("ascii")) +_field_value_re = re.compile(field_value.encode("ascii")) + + +class Headers(Sequence[Tuple[bytes, bytes]]): + """ + A list-like interface that allows iterating over headers as byte-pairs + of (lowercased-name, value). + + Internally we actually store the representation as three-tuples, + including both the raw original casing, in order to preserve casing + over-the-wire, and the lowercased name, for case-insensitive comparisions. + + r = Request( + method="GET", + target="/", + headers=[("Host", "example.org"), ("Connection", "keep-alive")], + http_version="1.1", + ) + assert r.headers == [ + (b"host", b"example.org"), + (b"connection", b"keep-alive") + ] + assert r.headers.raw_items() == [ + (b"Host", b"example.org"), + (b"Connection", b"keep-alive") + ] + """ + + __slots__ = "_full_items" + + def __init__(self, full_items: List[Tuple[bytes, bytes, bytes]]) -> None: + self._full_items = full_items + + def __bool__(self) -> bool: + return bool(self._full_items) + + def __eq__(self, other: object) -> bool: + return list(self) == list(other) # type: ignore + + def __len__(self) -> int: + return len(self._full_items) + + def __repr__(self) -> str: + return "" % repr(list(self)) + + def __getitem__(self, idx: int) -> Tuple[bytes, bytes]: # type: ignore[override] + _, name, value = self._full_items[idx] + return (name, value) + + def raw_items(self) -> List[Tuple[bytes, bytes]]: + return [(raw_name, value) for raw_name, _, value in self._full_items] + + +HeaderTypes = Union[ + List[Tuple[bytes, bytes]], + List[Tuple[bytes, str]], + List[Tuple[str, bytes]], + List[Tuple[str, str]], +] + + +@overload +def normalize_and_validate(headers: Headers, _parsed: Literal[True]) -> Headers: + ... + + +@overload +def normalize_and_validate(headers: HeaderTypes, _parsed: Literal[False]) -> Headers: + ... + + +@overload +def normalize_and_validate( + headers: Union[Headers, HeaderTypes], _parsed: bool = False +) -> Headers: + ... + + +def normalize_and_validate( + headers: Union[Headers, HeaderTypes], _parsed: bool = False +) -> Headers: + new_headers = [] + seen_content_length = None + saw_transfer_encoding = False + for name, value in headers: + # For headers coming out of the parser, we can safely skip some steps, + # because it always returns bytes and has already run these regexes + # over the data: + if not _parsed: + name = bytesify(name) + value = bytesify(value) + validate(_field_name_re, name, "Illegal header name {!r}", name) + validate(_field_value_re, value, "Illegal header value {!r}", value) + assert isinstance(name, bytes) + assert isinstance(value, bytes) + + raw_name = name + name = name.lower() + if name == b"content-length": + lengths = {length.strip() for length in value.split(b",")} + if len(lengths) != 1: + raise LocalProtocolError("conflicting Content-Length headers") + value = lengths.pop() + validate(_content_length_re, value, "bad Content-Length") + if len(value) > CONTENT_LENGTH_MAX_DIGITS: + raise LocalProtocolError("bad Content-Length") + if seen_content_length is None: + seen_content_length = value + new_headers.append((raw_name, name, value)) + elif seen_content_length != value: + raise LocalProtocolError("conflicting Content-Length headers") + elif name == b"transfer-encoding": + # "A server that receives a request message with a transfer coding + # it does not understand SHOULD respond with 501 (Not + # Implemented)." + # https://tools.ietf.org/html/rfc7230#section-3.3.1 + if saw_transfer_encoding: + raise LocalProtocolError( + "multiple Transfer-Encoding headers", error_status_hint=501 + ) + # "All transfer-coding names are case-insensitive" + # -- https://tools.ietf.org/html/rfc7230#section-4 + value = value.lower() + if value != b"chunked": + raise LocalProtocolError( + "Only Transfer-Encoding: chunked is supported", + error_status_hint=501, + ) + saw_transfer_encoding = True + new_headers.append((raw_name, name, value)) + else: + new_headers.append((raw_name, name, value)) + return Headers(new_headers) + + +def get_comma_header(headers: Headers, name: bytes) -> List[bytes]: + # Should only be used for headers whose value is a list of + # comma-separated, case-insensitive values. + # + # The header name `name` is expected to be lower-case bytes. + # + # Connection: meets these criteria (including cast insensitivity). + # + # Content-Length: technically is just a single value (1*DIGIT), but the + # standard makes reference to implementations that do multiple values, and + # using this doesn't hurt. Ditto, case insensitivity doesn't things either + # way. + # + # Transfer-Encoding: is more complex (allows for quoted strings), so + # splitting on , is actually wrong. For example, this is legal: + # + # Transfer-Encoding: foo; options="1,2", chunked + # + # and should be parsed as + # + # foo; options="1,2" + # chunked + # + # but this naive function will parse it as + # + # foo; options="1 + # 2" + # chunked + # + # However, this is okay because the only thing we are going to do with + # any Transfer-Encoding is reject ones that aren't just "chunked", so + # both of these will be treated the same anyway. + # + # Expect: the only legal value is the literal string + # "100-continue". Splitting on commas is harmless. Case insensitive. + # + out: List[bytes] = [] + for _, found_name, found_raw_value in headers._full_items: + if found_name == name: + found_raw_value = found_raw_value.lower() + for found_split_value in found_raw_value.split(b","): + found_split_value = found_split_value.strip() + if found_split_value: + out.append(found_split_value) + return out + + +def set_comma_header(headers: Headers, name: bytes, new_values: List[bytes]) -> Headers: + # The header name `name` is expected to be lower-case bytes. + # + # Note that when we store the header we use title casing for the header + # names, in order to match the conventional HTTP header style. + # + # Simply calling `.title()` is a blunt approach, but it's correct + # here given the cases where we're using `set_comma_header`... + # + # Connection, Content-Length, Transfer-Encoding. + new_headers: List[Tuple[bytes, bytes]] = [] + for found_raw_name, found_name, found_raw_value in headers._full_items: + if found_name != name: + new_headers.append((found_raw_name, found_raw_value)) + for new_value in new_values: + new_headers.append((name.title(), new_value)) + return normalize_and_validate(new_headers) + + +def has_expect_100_continue(request: "Request") -> bool: + # https://tools.ietf.org/html/rfc7231#section-5.1.1 + # "A server that receives a 100-continue expectation in an HTTP/1.0 request + # MUST ignore that expectation." + if request.http_version < b"1.1": + return False + expect = get_comma_header(request.headers, b"expect") + return b"100-continue" in expect diff --git a/venv/lib/python3.11/site-packages/h11/_readers.py b/venv/lib/python3.11/site-packages/h11/_readers.py new file mode 100644 index 0000000..576804c --- /dev/null +++ b/venv/lib/python3.11/site-packages/h11/_readers.py @@ -0,0 +1,250 @@ +# Code to read HTTP data +# +# Strategy: each reader is a callable which takes a ReceiveBuffer object, and +# either: +# 1) consumes some of it and returns an Event +# 2) raises a LocalProtocolError (for consistency -- e.g. we call validate() +# and it might raise a LocalProtocolError, so simpler just to always use +# this) +# 3) returns None, meaning "I need more data" +# +# If they have a .read_eof attribute, then this will be called if an EOF is +# received -- but this is optional. Either way, the actual ConnectionClosed +# event will be generated afterwards. +# +# READERS is a dict describing how to pick a reader. It maps states to either: +# - a reader +# - or, for body readers, a dict of per-framing reader factories + +import re +from typing import Any, Callable, Dict, Iterable, NoReturn, Optional, Tuple, Type, Union + +from ._abnf import chunk_header, header_field, request_line, status_line +from ._events import Data, EndOfMessage, InformationalResponse, Request, Response +from ._receivebuffer import ReceiveBuffer +from ._state import ( + CLIENT, + CLOSED, + DONE, + IDLE, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, +) +from ._util import LocalProtocolError, RemoteProtocolError, Sentinel, validate + +__all__ = ["READERS"] + +header_field_re = re.compile(header_field.encode("ascii")) +obs_fold_re = re.compile(rb"[ \t]+") + + +def _obsolete_line_fold(lines: Iterable[bytes]) -> Iterable[bytes]: + it = iter(lines) + last: Optional[bytes] = None + for line in it: + match = obs_fold_re.match(line) + if match: + if last is None: + raise LocalProtocolError("continuation line at start of headers") + if not isinstance(last, bytearray): + # Cast to a mutable type, avoiding copy on append to ensure O(n) time + last = bytearray(last) + last += b" " + last += line[match.end() :] + else: + if last is not None: + yield last + last = line + if last is not None: + yield last + + +def _decode_header_lines( + lines: Iterable[bytes], +) -> Iterable[Tuple[bytes, bytes]]: + for line in _obsolete_line_fold(lines): + matches = validate(header_field_re, line, "illegal header line: {!r}", line) + yield (matches["field_name"], matches["field_value"]) + + +request_line_re = re.compile(request_line.encode("ascii")) + + +def maybe_read_from_IDLE_client(buf: ReceiveBuffer) -> Optional[Request]: + lines = buf.maybe_extract_lines() + if lines is None: + if buf.is_next_line_obviously_invalid_request_line(): + raise LocalProtocolError("illegal request line") + return None + if not lines: + raise LocalProtocolError("no request line received") + matches = validate( + request_line_re, lines[0], "illegal request line: {!r}", lines[0] + ) + return Request( + headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches + ) + + +status_line_re = re.compile(status_line.encode("ascii")) + + +def maybe_read_from_SEND_RESPONSE_server( + buf: ReceiveBuffer, +) -> Union[InformationalResponse, Response, None]: + lines = buf.maybe_extract_lines() + if lines is None: + if buf.is_next_line_obviously_invalid_request_line(): + raise LocalProtocolError("illegal request line") + return None + if not lines: + raise LocalProtocolError("no response line received") + matches = validate(status_line_re, lines[0], "illegal status line: {!r}", lines[0]) + http_version = ( + b"1.1" if matches["http_version"] is None else matches["http_version"] + ) + reason = b"" if matches["reason"] is None else matches["reason"] + status_code = int(matches["status_code"]) + class_: Union[Type[InformationalResponse], Type[Response]] = ( + InformationalResponse if status_code < 200 else Response + ) + return class_( + headers=list(_decode_header_lines(lines[1:])), + _parsed=True, + status_code=status_code, + reason=reason, + http_version=http_version, + ) + + +class ContentLengthReader: + def __init__(self, length: int) -> None: + self._length = length + self._remaining = length + + def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: + if self._remaining == 0: + return EndOfMessage() + data = buf.maybe_extract_at_most(self._remaining) + if data is None: + return None + self._remaining -= len(data) + return Data(data=data) + + def read_eof(self) -> NoReturn: + raise RemoteProtocolError( + "peer closed connection without sending complete message body " + "(received {} bytes, expected {})".format( + self._length - self._remaining, self._length + ) + ) + + +chunk_header_re = re.compile(chunk_header.encode("ascii")) + + +class ChunkedReader: + def __init__(self) -> None: + self._bytes_in_chunk = 0 + # After reading a chunk, we have to throw away the trailing \r\n. + # This tracks the bytes that we need to match and throw away. + self._bytes_to_discard = b"" + self._reading_trailer = False + + def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: + if self._reading_trailer: + lines = buf.maybe_extract_lines() + if lines is None: + return None + return EndOfMessage(headers=list(_decode_header_lines(lines))) + if self._bytes_to_discard: + data = buf.maybe_extract_at_most(len(self._bytes_to_discard)) + if data is None: + return None + if data != self._bytes_to_discard[: len(data)]: + raise LocalProtocolError( + f"malformed chunk footer: {data!r} (expected {self._bytes_to_discard!r})" + ) + self._bytes_to_discard = self._bytes_to_discard[len(data) :] + if self._bytes_to_discard: + return None + # else, fall through and read some more + assert self._bytes_to_discard == b"" + if self._bytes_in_chunk == 0: + # We need to refill our chunk count + chunk_header = buf.maybe_extract_next_line() + if chunk_header is None: + return None + matches = validate( + chunk_header_re, + chunk_header, + "illegal chunk header: {!r}", + chunk_header, + ) + # XX FIXME: we discard chunk extensions. Does anyone care? + self._bytes_in_chunk = int(matches["chunk_size"], base=16) + if self._bytes_in_chunk == 0: + self._reading_trailer = True + return self(buf) + chunk_start = True + else: + chunk_start = False + assert self._bytes_in_chunk > 0 + data = buf.maybe_extract_at_most(self._bytes_in_chunk) + if data is None: + return None + self._bytes_in_chunk -= len(data) + if self._bytes_in_chunk == 0: + self._bytes_to_discard = b"\r\n" + chunk_end = True + else: + chunk_end = False + return Data(data=data, chunk_start=chunk_start, chunk_end=chunk_end) + + def read_eof(self) -> NoReturn: + raise RemoteProtocolError( + "peer closed connection without sending complete message body " + "(incomplete chunked read)" + ) + + +class Http10Reader: + def __call__(self, buf: ReceiveBuffer) -> Optional[Data]: + data = buf.maybe_extract_at_most(999999999) + if data is None: + return None + return Data(data=data) + + def read_eof(self) -> EndOfMessage: + return EndOfMessage() + + +def expect_nothing(buf: ReceiveBuffer) -> None: + if buf: + raise LocalProtocolError("Got data when expecting EOF") + return None + + +ReadersType = Dict[ + Union[Type[Sentinel], Tuple[Type[Sentinel], Type[Sentinel]]], + Union[Callable[..., Any], Dict[str, Callable[..., Any]]], +] + +READERS: ReadersType = { + (CLIENT, IDLE): maybe_read_from_IDLE_client, + (SERVER, IDLE): maybe_read_from_SEND_RESPONSE_server, + (SERVER, SEND_RESPONSE): maybe_read_from_SEND_RESPONSE_server, + (CLIENT, DONE): expect_nothing, + (CLIENT, MUST_CLOSE): expect_nothing, + (CLIENT, CLOSED): expect_nothing, + (SERVER, DONE): expect_nothing, + (SERVER, MUST_CLOSE): expect_nothing, + (SERVER, CLOSED): expect_nothing, + SEND_BODY: { + "chunked": ChunkedReader, + "content-length": ContentLengthReader, + "http/1.0": Http10Reader, + }, +} diff --git a/venv/lib/python3.11/site-packages/h11/_receivebuffer.py b/venv/lib/python3.11/site-packages/h11/_receivebuffer.py new file mode 100644 index 0000000..e5c4e08 --- /dev/null +++ b/venv/lib/python3.11/site-packages/h11/_receivebuffer.py @@ -0,0 +1,153 @@ +import re +import sys +from typing import List, Optional, Union + +__all__ = ["ReceiveBuffer"] + + +# Operations we want to support: +# - find next \r\n or \r\n\r\n (\n or \n\n are also acceptable), +# or wait until there is one +# - read at-most-N bytes +# Goals: +# - on average, do this fast +# - worst case, do this in O(n) where n is the number of bytes processed +# Plan: +# - store bytearray, offset, how far we've searched for a separator token +# - use the how-far-we've-searched data to avoid rescanning +# - while doing a stream of uninterrupted processing, advance offset instead +# of constantly copying +# WARNING: +# - I haven't benchmarked or profiled any of this yet. +# +# Note that starting in Python 3.4, deleting the initial n bytes from a +# bytearray is amortized O(n), thanks to some excellent work by Antoine +# Martin: +# +# https://bugs.python.org/issue19087 +# +# This means that if we only supported 3.4+, we could get rid of the code here +# involving self._start and self.compress, because it's doing exactly the same +# thing that bytearray now does internally. +# +# BUT unfortunately, we still support 2.7, and reading short segments out of a +# long buffer MUST be O(bytes read) to avoid DoS issues, so we can't actually +# delete this code. Yet: +# +# https://pythonclock.org/ +# +# (Two things to double-check first though: make sure PyPy also has the +# optimization, and benchmark to make sure it's a win, since we do have a +# slightly clever thing where we delay calling compress() until we've +# processed a whole event, which could in theory be slightly more efficient +# than the internal bytearray support.) +blank_line_regex = re.compile(b"\n\r?\n", re.MULTILINE) + + +class ReceiveBuffer: + def __init__(self) -> None: + self._data = bytearray() + self._next_line_search = 0 + self._multiple_lines_search = 0 + + def __iadd__(self, byteslike: Union[bytes, bytearray]) -> "ReceiveBuffer": + self._data += byteslike + return self + + def __bool__(self) -> bool: + return bool(len(self)) + + def __len__(self) -> int: + return len(self._data) + + # for @property unprocessed_data + def __bytes__(self) -> bytes: + return bytes(self._data) + + def _extract(self, count: int) -> bytearray: + # extracting an initial slice of the data buffer and return it + out = self._data[:count] + del self._data[:count] + + self._next_line_search = 0 + self._multiple_lines_search = 0 + + return out + + def maybe_extract_at_most(self, count: int) -> Optional[bytearray]: + """ + Extract a fixed number of bytes from the buffer. + """ + out = self._data[:count] + if not out: + return None + + return self._extract(count) + + def maybe_extract_next_line(self) -> Optional[bytearray]: + """ + Extract the first line, if it is completed in the buffer. + """ + # Only search in buffer space that we've not already looked at. + search_start_index = max(0, self._next_line_search - 1) + partial_idx = self._data.find(b"\r\n", search_start_index) + + if partial_idx == -1: + self._next_line_search = len(self._data) + return None + + # + 2 is to compensate len(b"\r\n") + idx = partial_idx + 2 + + return self._extract(idx) + + def maybe_extract_lines(self) -> Optional[List[bytearray]]: + """ + Extract everything up to the first blank line, and return a list of lines. + """ + # Handle the case where we have an immediate empty line. + if self._data[:1] == b"\n": + self._extract(1) + return [] + + if self._data[:2] == b"\r\n": + self._extract(2) + return [] + + # Only search in buffer space that we've not already looked at. + match = blank_line_regex.search(self._data, self._multiple_lines_search) + if match is None: + self._multiple_lines_search = max(0, len(self._data) - 2) + return None + + # Truncate the buffer and return it. + idx = match.span(0)[-1] + out = self._extract(idx) + lines = out.split(b"\n") + + for line in lines: + if line.endswith(b"\r"): + del line[-1] + + assert lines[-2] == lines[-1] == b"" + + del lines[-2:] + + return lines + + # In theory we should wait until `\r\n` before starting to validate + # incoming data. However it's interesting to detect (very) invalid data + # early given they might not even contain `\r\n` at all (hence only + # timeout will get rid of them). + # This is not a 100% effective detection but more of a cheap sanity check + # allowing for early abort in some useful cases. + # This is especially interesting when peer is messing up with HTTPS and + # sent us a TLS stream where we were expecting plain HTTP given all + # versions of TLS so far start handshake with a 0x16 message type code. + def is_next_line_obviously_invalid_request_line(self) -> bool: + try: + # HTTP header line must not contain non-printable characters + # and should not start with a space + return self._data[0] < 0x21 + except IndexError: + return False diff --git a/venv/lib/python3.11/site-packages/h11/_state.py b/venv/lib/python3.11/site-packages/h11/_state.py new file mode 100644 index 0000000..3ad444b --- /dev/null +++ b/venv/lib/python3.11/site-packages/h11/_state.py @@ -0,0 +1,365 @@ +################################################################ +# The core state machine +################################################################ +# +# Rule 1: everything that affects the state machine and state transitions must +# live here in this file. As much as possible goes into the table-based +# representation, but for the bits that don't quite fit, the actual code and +# state must nonetheless live here. +# +# Rule 2: this file does not know about what role we're playing; it only knows +# about HTTP request/response cycles in the abstract. This ensures that we +# don't cheat and apply different rules to local and remote parties. +# +# +# Theory of operation +# =================== +# +# Possibly the simplest way to think about this is that we actually have 5 +# different state machines here. Yes, 5. These are: +# +# 1) The client state, with its complicated automaton (see the docs) +# 2) The server state, with its complicated automaton (see the docs) +# 3) The keep-alive state, with possible states {True, False} +# 4) The SWITCH_CONNECT state, with possible states {False, True} +# 5) The SWITCH_UPGRADE state, with possible states {False, True} +# +# For (3)-(5), the first state listed is the initial state. +# +# (1)-(3) are stored explicitly in member variables. The last +# two are stored implicitly in the pending_switch_proposals set as: +# (state of 4) == (_SWITCH_CONNECT in pending_switch_proposals) +# (state of 5) == (_SWITCH_UPGRADE in pending_switch_proposals) +# +# And each of these machines has two different kinds of transitions: +# +# a) Event-triggered +# b) State-triggered +# +# Event triggered is the obvious thing that you'd think it is: some event +# happens, and if it's the right event at the right time then a transition +# happens. But there are somewhat complicated rules for which machines can +# "see" which events. (As a rule of thumb, if a machine "sees" an event, this +# means two things: the event can affect the machine, and if the machine is +# not in a state where it expects that event then it's an error.) These rules +# are: +# +# 1) The client machine sees all h11.events objects emitted by the client. +# +# 2) The server machine sees all h11.events objects emitted by the server. +# +# It also sees the client's Request event. +# +# And sometimes, server events are annotated with a _SWITCH_* event. For +# example, we can have a (Response, _SWITCH_CONNECT) event, which is +# different from a regular Response event. +# +# 3) The keep-alive machine sees the process_keep_alive_disabled() event +# (which is derived from Request/Response events), and this event +# transitions it from True -> False, or from False -> False. There's no way +# to transition back. +# +# 4&5) The _SWITCH_* machines transition from False->True when we get a +# Request that proposes the relevant type of switch (via +# process_client_switch_proposals), and they go from True->False when we +# get a Response that has no _SWITCH_* annotation. +# +# So that's event-triggered transitions. +# +# State-triggered transitions are less standard. What they do here is couple +# the machines together. The way this works is, when certain *joint* +# configurations of states are achieved, then we automatically transition to a +# new *joint* state. So, for example, if we're ever in a joint state with +# +# client: DONE +# keep-alive: False +# +# then the client state immediately transitions to: +# +# client: MUST_CLOSE +# +# This is fundamentally different from an event-based transition, because it +# doesn't matter how we arrived at the {client: DONE, keep-alive: False} state +# -- maybe the client transitioned SEND_BODY -> DONE, or keep-alive +# transitioned True -> False. Either way, once this precondition is satisfied, +# this transition is immediately triggered. +# +# What if two conflicting state-based transitions get enabled at the same +# time? In practice there's only one case where this arises (client DONE -> +# MIGHT_SWITCH_PROTOCOL versus DONE -> MUST_CLOSE), and we resolve it by +# explicitly prioritizing the DONE -> MIGHT_SWITCH_PROTOCOL transition. +# +# Implementation +# -------------- +# +# The event-triggered transitions for the server and client machines are all +# stored explicitly in a table. Ditto for the state-triggered transitions that +# involve just the server and client state. +# +# The transitions for the other machines, and the state-triggered transitions +# that involve the other machines, are written out as explicit Python code. +# +# It'd be nice if there were some cleaner way to do all this. This isn't +# *too* terrible, but I feel like it could probably be better. +# +# WARNING +# ------- +# +# The script that generates the state machine diagrams for the docs knows how +# to read out the EVENT_TRIGGERED_TRANSITIONS and STATE_TRIGGERED_TRANSITIONS +# tables. But it can't automatically read the transitions that are written +# directly in Python code. So if you touch those, you need to also update the +# script to keep it in sync! +from typing import cast, Dict, Optional, Set, Tuple, Type, Union + +from ._events import * +from ._util import LocalProtocolError, Sentinel + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = [ + "CLIENT", + "SERVER", + "IDLE", + "SEND_RESPONSE", + "SEND_BODY", + "DONE", + "MUST_CLOSE", + "CLOSED", + "MIGHT_SWITCH_PROTOCOL", + "SWITCHED_PROTOCOL", + "ERROR", +] + + +class CLIENT(Sentinel, metaclass=Sentinel): + pass + + +class SERVER(Sentinel, metaclass=Sentinel): + pass + + +# States +class IDLE(Sentinel, metaclass=Sentinel): + pass + + +class SEND_RESPONSE(Sentinel, metaclass=Sentinel): + pass + + +class SEND_BODY(Sentinel, metaclass=Sentinel): + pass + + +class DONE(Sentinel, metaclass=Sentinel): + pass + + +class MUST_CLOSE(Sentinel, metaclass=Sentinel): + pass + + +class CLOSED(Sentinel, metaclass=Sentinel): + pass + + +class ERROR(Sentinel, metaclass=Sentinel): + pass + + +# Switch types +class MIGHT_SWITCH_PROTOCOL(Sentinel, metaclass=Sentinel): + pass + + +class SWITCHED_PROTOCOL(Sentinel, metaclass=Sentinel): + pass + + +class _SWITCH_UPGRADE(Sentinel, metaclass=Sentinel): + pass + + +class _SWITCH_CONNECT(Sentinel, metaclass=Sentinel): + pass + + +EventTransitionType = Dict[ + Type[Sentinel], + Dict[ + Type[Sentinel], + Dict[Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], Type[Sentinel]], + ], +] + +EVENT_TRIGGERED_TRANSITIONS: EventTransitionType = { + CLIENT: { + IDLE: {Request: SEND_BODY, ConnectionClosed: CLOSED}, + SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, + DONE: {ConnectionClosed: CLOSED}, + MUST_CLOSE: {ConnectionClosed: CLOSED}, + CLOSED: {ConnectionClosed: CLOSED}, + MIGHT_SWITCH_PROTOCOL: {}, + SWITCHED_PROTOCOL: {}, + ERROR: {}, + }, + SERVER: { + IDLE: { + ConnectionClosed: CLOSED, + Response: SEND_BODY, + # Special case: server sees client Request events, in this form + (Request, CLIENT): SEND_RESPONSE, + }, + SEND_RESPONSE: { + InformationalResponse: SEND_RESPONSE, + Response: SEND_BODY, + (InformationalResponse, _SWITCH_UPGRADE): SWITCHED_PROTOCOL, + (Response, _SWITCH_CONNECT): SWITCHED_PROTOCOL, + }, + SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, + DONE: {ConnectionClosed: CLOSED}, + MUST_CLOSE: {ConnectionClosed: CLOSED}, + CLOSED: {ConnectionClosed: CLOSED}, + SWITCHED_PROTOCOL: {}, + ERROR: {}, + }, +} + +StateTransitionType = Dict[ + Tuple[Type[Sentinel], Type[Sentinel]], Dict[Type[Sentinel], Type[Sentinel]] +] + +# NB: there are also some special-case state-triggered transitions hard-coded +# into _fire_state_triggered_transitions below. +STATE_TRIGGERED_TRANSITIONS: StateTransitionType = { + # (Client state, Server state) -> new states + # Protocol negotiation + (MIGHT_SWITCH_PROTOCOL, SWITCHED_PROTOCOL): {CLIENT: SWITCHED_PROTOCOL}, + # Socket shutdown + (CLOSED, DONE): {SERVER: MUST_CLOSE}, + (CLOSED, IDLE): {SERVER: MUST_CLOSE}, + (ERROR, DONE): {SERVER: MUST_CLOSE}, + (DONE, CLOSED): {CLIENT: MUST_CLOSE}, + (IDLE, CLOSED): {CLIENT: MUST_CLOSE}, + (DONE, ERROR): {CLIENT: MUST_CLOSE}, +} + + +class ConnectionState: + def __init__(self) -> None: + # Extra bits of state that don't quite fit into the state model. + + # If this is False then it enables the automatic DONE -> MUST_CLOSE + # transition. Don't set this directly; call .keep_alive_disabled() + self.keep_alive = True + + # This is a subset of {UPGRADE, CONNECT}, containing the proposals + # made by the client for switching protocols. + self.pending_switch_proposals: Set[Type[Sentinel]] = set() + + self.states: Dict[Type[Sentinel], Type[Sentinel]] = {CLIENT: IDLE, SERVER: IDLE} + + def process_error(self, role: Type[Sentinel]) -> None: + self.states[role] = ERROR + self._fire_state_triggered_transitions() + + def process_keep_alive_disabled(self) -> None: + self.keep_alive = False + self._fire_state_triggered_transitions() + + def process_client_switch_proposal(self, switch_event: Type[Sentinel]) -> None: + self.pending_switch_proposals.add(switch_event) + self._fire_state_triggered_transitions() + + def process_event( + self, + role: Type[Sentinel], + event_type: Type[Event], + server_switch_event: Optional[Type[Sentinel]] = None, + ) -> None: + _event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]] = event_type + if server_switch_event is not None: + assert role is SERVER + if server_switch_event not in self.pending_switch_proposals: + raise LocalProtocolError( + "Received server _SWITCH_UPGRADE event without a pending proposal" + ) + _event_type = (event_type, server_switch_event) + if server_switch_event is None and _event_type is Response: + self.pending_switch_proposals = set() + self._fire_event_triggered_transitions(role, _event_type) + # Special case: the server state does get to see Request + # events. + if _event_type is Request: + assert role is CLIENT + self._fire_event_triggered_transitions(SERVER, (Request, CLIENT)) + self._fire_state_triggered_transitions() + + def _fire_event_triggered_transitions( + self, + role: Type[Sentinel], + event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], + ) -> None: + state = self.states[role] + try: + new_state = EVENT_TRIGGERED_TRANSITIONS[role][state][event_type] + except KeyError: + event_type = cast(Type[Event], event_type) + raise LocalProtocolError( + "can't handle event type {} when role={} and state={}".format( + event_type.__name__, role, self.states[role] + ) + ) from None + self.states[role] = new_state + + def _fire_state_triggered_transitions(self) -> None: + # We apply these rules repeatedly until converging on a fixed point + while True: + start_states = dict(self.states) + + # It could happen that both these special-case transitions are + # enabled at the same time: + # + # DONE -> MIGHT_SWITCH_PROTOCOL + # DONE -> MUST_CLOSE + # + # For example, this will always be true of a HTTP/1.0 client + # requesting CONNECT. If this happens, the protocol switch takes + # priority. From there the client will either go to + # SWITCHED_PROTOCOL, in which case it's none of our business when + # they close the connection, or else the server will deny the + # request, in which case the client will go back to DONE and then + # from there to MUST_CLOSE. + if self.pending_switch_proposals: + if self.states[CLIENT] is DONE: + self.states[CLIENT] = MIGHT_SWITCH_PROTOCOL + + if not self.pending_switch_proposals: + if self.states[CLIENT] is MIGHT_SWITCH_PROTOCOL: + self.states[CLIENT] = DONE + + if not self.keep_alive: + for role in (CLIENT, SERVER): + if self.states[role] is DONE: + self.states[role] = MUST_CLOSE + + # Tabular state-triggered transitions + joint_state = (self.states[CLIENT], self.states[SERVER]) + changes = STATE_TRIGGERED_TRANSITIONS.get(joint_state, {}) + self.states.update(changes) + + if self.states == start_states: + # Fixed point reached + return + + def start_next_cycle(self) -> None: + if self.states != {CLIENT: DONE, SERVER: DONE}: + raise LocalProtocolError( + f"not in a reusable state. self.states={self.states}" + ) + # Can't reach DONE/DONE with any of these active, but still, let's be + # sure. + assert self.keep_alive + assert not self.pending_switch_proposals + self.states = {CLIENT: IDLE, SERVER: IDLE} diff --git a/venv/lib/python3.11/site-packages/h11/_util.py b/venv/lib/python3.11/site-packages/h11/_util.py new file mode 100644 index 0000000..6718445 --- /dev/null +++ b/venv/lib/python3.11/site-packages/h11/_util.py @@ -0,0 +1,135 @@ +from typing import Any, Dict, NoReturn, Pattern, Tuple, Type, TypeVar, Union + +__all__ = [ + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", + "validate", + "bytesify", +] + + +class ProtocolError(Exception): + """Exception indicating a violation of the HTTP/1.1 protocol. + + This as an abstract base class, with two concrete base classes: + :exc:`LocalProtocolError`, which indicates that you tried to do something + that HTTP/1.1 says is illegal, and :exc:`RemoteProtocolError`, which + indicates that the remote peer tried to do something that HTTP/1.1 says is + illegal. See :ref:`error-handling` for details. + + In addition to the normal :exc:`Exception` features, it has one attribute: + + .. attribute:: error_status_hint + + This gives a suggestion as to what status code a server might use if + this error occurred as part of a request. + + For a :exc:`RemoteProtocolError`, this is useful as a suggestion for + how you might want to respond to a misbehaving peer, if you're + implementing a server. + + For a :exc:`LocalProtocolError`, this can be taken as a suggestion for + how your peer might have responded to *you* if h11 had allowed you to + continue. + + The default is 400 Bad Request, a generic catch-all for protocol + violations. + + """ + + def __init__(self, msg: str, error_status_hint: int = 400) -> None: + if type(self) is ProtocolError: + raise TypeError("tried to directly instantiate ProtocolError") + Exception.__init__(self, msg) + self.error_status_hint = error_status_hint + + +# Strategy: there are a number of public APIs where a LocalProtocolError can +# be raised (send(), all the different event constructors, ...), and only one +# public API where RemoteProtocolError can be raised +# (receive_data()). Therefore we always raise LocalProtocolError internally, +# and then receive_data will translate this into a RemoteProtocolError. +# +# Internally: +# LocalProtocolError is the generic "ProtocolError". +# Externally: +# LocalProtocolError is for local errors and RemoteProtocolError is for +# remote errors. +class LocalProtocolError(ProtocolError): + def _reraise_as_remote_protocol_error(self) -> NoReturn: + # After catching a LocalProtocolError, use this method to re-raise it + # as a RemoteProtocolError. This method must be called from inside an + # except: block. + # + # An easy way to get an equivalent RemoteProtocolError is just to + # modify 'self' in place. + self.__class__ = RemoteProtocolError # type: ignore + # But the re-raising is somewhat non-trivial -- you might think that + # now that we've modified the in-flight exception object, that just + # doing 'raise' to re-raise it would be enough. But it turns out that + # this doesn't work, because Python tracks the exception type + # (exc_info[0]) separately from the exception object (exc_info[1]), + # and we only modified the latter. So we really do need to re-raise + # the new type explicitly. + # On py3, the traceback is part of the exception object, so our + # in-place modification preserved it and we can just re-raise: + raise self + + +class RemoteProtocolError(ProtocolError): + pass + + +def validate( + regex: Pattern[bytes], data: bytes, msg: str = "malformed data", *format_args: Any +) -> Dict[str, bytes]: + match = regex.fullmatch(data) + if not match: + if format_args: + msg = msg.format(*format_args) + raise LocalProtocolError(msg) + return match.groupdict() + + +# Sentinel values +# +# - Inherit identity-based comparison and hashing from object +# - Have a nice repr +# - Have a *bonus property*: type(sentinel) is sentinel +# +# The bonus property is useful if you want to take the return value from +# next_event() and do some sort of dispatch based on type(event). + +_T_Sentinel = TypeVar("_T_Sentinel", bound="Sentinel") + + +class Sentinel(type): + def __new__( + cls: Type[_T_Sentinel], + name: str, + bases: Tuple[type, ...], + namespace: Dict[str, Any], + **kwds: Any + ) -> _T_Sentinel: + assert bases == (Sentinel,) + v = super().__new__(cls, name, bases, namespace, **kwds) + v.__class__ = v # type: ignore + return v + + def __repr__(self) -> str: + return self.__name__ + + +# Used for methods, request targets, HTTP versions, header names, and header +# values. Accepts ascii-strings, or bytes/bytearray/memoryview/..., and always +# returns bytes. +def bytesify(s: Union[bytes, bytearray, memoryview, int, str]) -> bytes: + # Fast-path: + if type(s) is bytes: + return s + if isinstance(s, str): + s = s.encode("ascii") + if isinstance(s, int): + raise TypeError("expected bytes-like object, not int") + return bytes(s) diff --git a/venv/lib/python3.11/site-packages/h11/_version.py b/venv/lib/python3.11/site-packages/h11/_version.py new file mode 100644 index 0000000..76e7327 --- /dev/null +++ b/venv/lib/python3.11/site-packages/h11/_version.py @@ -0,0 +1,16 @@ +# This file must be kept very simple, because it is consumed from several +# places -- it is imported by h11/__init__.py, execfile'd by setup.py, etc. + +# We use a simple scheme: +# 1.0.0 -> 1.0.0+dev -> 1.1.0 -> 1.1.0+dev +# where the +dev versions are never released into the wild, they're just what +# we stick into the VCS in between releases. +# +# This is compatible with PEP 440: +# http://legacy.python.org/dev/peps/pep-0440/ +# via the use of the "local suffix" "+dev", which is disallowed on index +# servers and causes 1.0.0+dev to sort after plain 1.0.0, which is what we +# want. (Contrast with the special suffix 1.0.0.dev, which sorts *before* +# 1.0.0.) + +__version__ = "0.16.0" diff --git a/venv/lib/python3.11/site-packages/h11/_writers.py b/venv/lib/python3.11/site-packages/h11/_writers.py new file mode 100644 index 0000000..939cdb9 --- /dev/null +++ b/venv/lib/python3.11/site-packages/h11/_writers.py @@ -0,0 +1,145 @@ +# Code to read HTTP data +# +# Strategy: each writer takes an event + a write-some-bytes function, which is +# calls. +# +# WRITERS is a dict describing how to pick a reader. It maps states to either: +# - a writer +# - or, for body writers, a dict of framin-dependent writer factories + +from typing import Any, Callable, Dict, List, Tuple, Type, Union + +from ._events import Data, EndOfMessage, Event, InformationalResponse, Request, Response +from ._headers import Headers +from ._state import CLIENT, IDLE, SEND_BODY, SEND_RESPONSE, SERVER +from ._util import LocalProtocolError, Sentinel + +__all__ = ["WRITERS"] + +Writer = Callable[[bytes], Any] + + +def write_headers(headers: Headers, write: Writer) -> None: + # "Since the Host field-value is critical information for handling a + # request, a user agent SHOULD generate Host as the first header field + # following the request-line." - RFC 7230 + raw_items = headers._full_items + for raw_name, name, value in raw_items: + if name == b"host": + write(b"%s: %s\r\n" % (raw_name, value)) + for raw_name, name, value in raw_items: + if name != b"host": + write(b"%s: %s\r\n" % (raw_name, value)) + write(b"\r\n") + + +def write_request(request: Request, write: Writer) -> None: + if request.http_version != b"1.1": + raise LocalProtocolError("I only send HTTP/1.1") + write(b"%s %s HTTP/1.1\r\n" % (request.method, request.target)) + write_headers(request.headers, write) + + +# Shared between InformationalResponse and Response +def write_any_response( + response: Union[InformationalResponse, Response], write: Writer +) -> None: + if response.http_version != b"1.1": + raise LocalProtocolError("I only send HTTP/1.1") + status_bytes = str(response.status_code).encode("ascii") + # We don't bother sending ascii status messages like "OK"; they're + # optional and ignored by the protocol. (But the space after the numeric + # status code is mandatory.) + # + # XX FIXME: could at least make an effort to pull out the status message + # from stdlib's http.HTTPStatus table. Or maybe just steal their enums + # (either by import or copy/paste). We already accept them as status codes + # since they're of type IntEnum < int. + write(b"HTTP/1.1 %s %s\r\n" % (status_bytes, response.reason)) + write_headers(response.headers, write) + + +class BodyWriter: + def __call__(self, event: Event, write: Writer) -> None: + if type(event) is Data: + self.send_data(event.data, write) + elif type(event) is EndOfMessage: + self.send_eom(event.headers, write) + else: # pragma: no cover + assert False + + def send_data(self, data: bytes, write: Writer) -> None: + pass + + def send_eom(self, headers: Headers, write: Writer) -> None: + pass + + +# +# These are all careful not to do anything to 'data' except call len(data) and +# write(data). This allows us to transparently pass-through funny objects, +# like placeholder objects referring to files on disk that will be sent via +# sendfile(2). +# +class ContentLengthWriter(BodyWriter): + def __init__(self, length: int) -> None: + self._length = length + + def send_data(self, data: bytes, write: Writer) -> None: + self._length -= len(data) + if self._length < 0: + raise LocalProtocolError("Too much data for declared Content-Length") + write(data) + + def send_eom(self, headers: Headers, write: Writer) -> None: + if self._length != 0: + raise LocalProtocolError("Too little data for declared Content-Length") + if headers: + raise LocalProtocolError("Content-Length and trailers don't mix") + + +class ChunkedWriter(BodyWriter): + def send_data(self, data: bytes, write: Writer) -> None: + # if we encoded 0-length data in the naive way, it would look like an + # end-of-message. + if not data: + return + write(b"%x\r\n" % len(data)) + write(data) + write(b"\r\n") + + def send_eom(self, headers: Headers, write: Writer) -> None: + write(b"0\r\n") + write_headers(headers, write) + + +class Http10Writer(BodyWriter): + def send_data(self, data: bytes, write: Writer) -> None: + write(data) + + def send_eom(self, headers: Headers, write: Writer) -> None: + if headers: + raise LocalProtocolError("can't send trailers to HTTP/1.0 client") + # no need to close the socket ourselves, that will be taken care of by + # Connection: close machinery + + +WritersType = Dict[ + Union[Tuple[Type[Sentinel], Type[Sentinel]], Type[Sentinel]], + Union[ + Dict[str, Type[BodyWriter]], + Callable[[Union[InformationalResponse, Response], Writer], None], + Callable[[Request, Writer], None], + ], +] + +WRITERS: WritersType = { + (CLIENT, IDLE): write_request, + (SERVER, IDLE): write_any_response, + (SERVER, SEND_RESPONSE): write_any_response, + SEND_BODY: { + "chunked": ChunkedWriter, + "content-length": ContentLengthWriter, + "http/1.0": Http10Writer, + }, +} diff --git a/venv/lib/python3.11/site-packages/h11/py.typed b/venv/lib/python3.11/site-packages/h11/py.typed new file mode 100644 index 0000000..f5642f7 --- /dev/null +++ b/venv/lib/python3.11/site-packages/h11/py.typed @@ -0,0 +1 @@ +Marker diff --git a/venv/lib/python3.11/site-packages/httpcore-1.0.9.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/httpcore-1.0.9.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore-1.0.9.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.11/site-packages/httpcore-1.0.9.dist-info/METADATA b/venv/lib/python3.11/site-packages/httpcore-1.0.9.dist-info/METADATA new file mode 100644 index 0000000..8056834 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore-1.0.9.dist-info/METADATA @@ -0,0 +1,625 @@ +Metadata-Version: 2.4 +Name: httpcore +Version: 1.0.9 +Summary: A minimal low-level HTTP client. +Project-URL: Documentation, https://www.encode.io/httpcore +Project-URL: Homepage, https://www.encode.io/httpcore/ +Project-URL: Source, https://github.com/encode/httpcore +Author-email: Tom Christie +License-Expression: BSD-3-Clause +License-File: LICENSE.md +Classifier: Development Status :: 3 - Alpha +Classifier: Environment :: Web Environment +Classifier: Framework :: AsyncIO +Classifier: Framework :: Trio +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Internet :: WWW/HTTP +Requires-Python: >=3.8 +Requires-Dist: certifi +Requires-Dist: h11>=0.16 +Provides-Extra: asyncio +Requires-Dist: anyio<5.0,>=4.0; extra == 'asyncio' +Provides-Extra: http2 +Requires-Dist: h2<5,>=3; extra == 'http2' +Provides-Extra: socks +Requires-Dist: socksio==1.*; extra == 'socks' +Provides-Extra: trio +Requires-Dist: trio<1.0,>=0.22.0; extra == 'trio' +Description-Content-Type: text/markdown + +# HTTP Core + +[![Test Suite](https://github.com/encode/httpcore/workflows/Test%20Suite/badge.svg)](https://github.com/encode/httpcore/actions) +[![Package version](https://badge.fury.io/py/httpcore.svg)](https://pypi.org/project/httpcore/) + +> *Do one thing, and do it well.* + +The HTTP Core package provides a minimal low-level HTTP client, which does +one thing only. Sending HTTP requests. + +It does not provide any high level model abstractions over the API, +does not handle redirects, multipart uploads, building authentication headers, +transparent HTTP caching, URL parsing, session cookie handling, +content or charset decoding, handling JSON, environment based configuration +defaults, or any of that Jazz. + +Some things HTTP Core does do: + +* Sending HTTP requests. +* Thread-safe / task-safe connection pooling. +* HTTP(S) proxy & SOCKS proxy support. +* Supports HTTP/1.1 and HTTP/2. +* Provides both sync and async interfaces. +* Async backend support for `asyncio` and `trio`. + +## Requirements + +Python 3.8+ + +## Installation + +For HTTP/1.1 only support, install with: + +```shell +$ pip install httpcore +``` + +There are also a number of optional extras available... + +```shell +$ pip install httpcore['asyncio,trio,http2,socks'] +``` + +## Sending requests + +Send an HTTP request: + +```python +import httpcore + +response = httpcore.request("GET", "https://www.example.com/") + +print(response) +# +print(response.status) +# 200 +print(response.headers) +# [(b'Accept-Ranges', b'bytes'), (b'Age', b'557328'), (b'Cache-Control', b'max-age=604800'), ...] +print(response.content) +# b'\n\n\nExample Domain\n\n\n ...' +``` + +The top-level `httpcore.request()` function is provided for convenience. In practice whenever you're working with `httpcore` you'll want to use the connection pooling functionality that it provides. + +```python +import httpcore + +http = httpcore.ConnectionPool() +response = http.request("GET", "https://www.example.com/") +``` + +Once you're ready to get going, [head over to the documentation](https://www.encode.io/httpcore/). + +## Motivation + +You *probably* don't want to be using HTTP Core directly. It might make sense if +you're writing something like a proxy service in Python, and you just want +something at the lowest possible level, but more typically you'll want to use +a higher level client library, such as `httpx`. + +The motivation for `httpcore` is: + +* To provide a reusable low-level client library, that other packages can then build on top of. +* To provide a *really clear interface split* between the networking code and client logic, + so that each is easier to understand and reason about in isolation. + +## Dependencies + +The `httpcore` package has the following dependencies... + +* `h11` +* `certifi` + +And the following optional extras... + +* `anyio` - Required by `pip install httpcore['asyncio']`. +* `trio` - Required by `pip install httpcore['trio']`. +* `h2` - Required by `pip install httpcore['http2']`. +* `socksio` - Required by `pip install httpcore['socks']`. + +## Versioning + +We use [SEMVER for our versioning policy](https://semver.org/). + +For changes between package versions please see our [project changelog](CHANGELOG.md). + +We recommend pinning your requirements either the most current major version, or a more specific version range: + +```python +pip install 'httpcore==1.*' +``` +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). + +## Version 1.0.9 (April 24th, 2025) + +- Resolve https://github.com/advisories/GHSA-vqfr-h8mv-ghfj with h11 dependency update. (#1008) + +## Version 1.0.8 (April 11th, 2025) + +- Fix `AttributeError` when importing on Python 3.14. (#1005) + +## Version 1.0.7 (November 15th, 2024) + +- Support `proxy=…` configuration on `ConnectionPool()`. (#974) + +## Version 1.0.6 (October 1st, 2024) + +- Relax `trio` dependency pinning. (#956) +- Handle `trio` raising `NotImplementedError` on unsupported platforms. (#955) +- Handle mapping `ssl.SSLError` to `httpcore.ConnectError`. (#918) + +## 1.0.5 (March 27th, 2024) + +- Handle `EndOfStream` exception for anyio backend. (#899) +- Allow trio `0.25.*` series in package dependancies. (#903) + +## 1.0.4 (February 21st, 2024) + +- Add `target` request extension. (#888) +- Fix support for connection `Upgrade` and `CONNECT` when some data in the stream has been read. (#882) + +## 1.0.3 (February 13th, 2024) + +- Fix support for async cancellations. (#880) +- Fix trace extension when used with socks proxy. (#849) +- Fix SSL context for connections using the "wss" scheme (#869) + +## 1.0.2 (November 10th, 2023) + +- Fix `float("inf")` timeouts in `Event.wait` function. (#846) + +## 1.0.1 (November 3rd, 2023) + +- Fix pool timeout to account for the total time spent retrying. (#823) +- Raise a neater RuntimeError when the correct async deps are not installed. (#826) +- Add support for synchronous TLS-in-TLS streams. (#840) + +## 1.0.0 (October 6th, 2023) + +From version 1.0 our async support is now optional, as the package has minimal dependencies by default. + +For async support use either `pip install 'httpcore[asyncio]'` or `pip install 'httpcore[trio]'`. + +The project versioning policy is now explicitly governed by SEMVER. See https://semver.org/. + +- Async support becomes fully optional. (#809) +- Add support for Python 3.12. (#807) + +## 0.18.0 (September 8th, 2023) + +- Add support for HTTPS proxies. (#745, #786) +- Drop Python 3.7 support. (#727) +- Handle `sni_hostname` extension with SOCKS proxy. (#774) +- Handle HTTP/1.1 half-closed connections gracefully. (#641) +- Change the type of `Extensions` from `Mapping[Str, Any]` to `MutableMapping[Str, Any]`. (#762) + +## 0.17.3 (July 5th, 2023) + +- Support async cancellations, ensuring that the connection pool is left in a clean state when cancellations occur. (#726) +- The networking backend interface has [been added to the public API](https://www.encode.io/httpcore/network-backends). Some classes which were previously private implementation detail are now part of the top-level public API. (#699) +- Graceful handling of HTTP/2 GoAway frames, with requests being transparently retried on a new connection. (#730) +- Add exceptions when a synchronous `trace callback` is passed to an asynchronous request or an asynchronous `trace callback` is passed to a synchronous request. (#717) +- Drop Python 3.7 support. (#727) + +## 0.17.2 (May 23th, 2023) + +- Add `socket_options` argument to `ConnectionPool` and `HTTProxy` classes. (#668) +- Improve logging with per-module logger names. (#690) +- Add `sni_hostname` request extension. (#696) +- Resolve race condition during import of `anyio` package. (#692) +- Enable TCP_NODELAY for all synchronous sockets. (#651) + +## 0.17.1 (May 17th, 2023) + +- If 'retries' is set, then allow retries if an SSL handshake error occurs. (#669) +- Improve correctness of tracebacks on network exceptions, by raising properly chained exceptions. (#678) +- Prevent connection-hanging behaviour when HTTP/2 connections are closed by a server-sent 'GoAway' frame. (#679) +- Fix edge-case exception when removing requests from the connection pool. (#680) +- Fix pool timeout edge-case. (#688) + +## 0.17.0 (March 16th, 2023) + +- Add DEBUG level logging. (#648) +- Respect HTTP/2 max concurrent streams when settings updates are sent by server. (#652) +- Increase the allowable HTTP header size to 100kB. (#647) +- Add `retries` option to SOCKS proxy classes. (#643) + +## 0.16.3 (December 20th, 2022) + +- Allow `ws` and `wss` schemes. Allows us to properly support websocket upgrade connections. (#625) +- Forwarding HTTP proxies use a connection-per-remote-host. Required by some proxy implementations. (#637) +- Don't raise `RuntimeError` when closing a connection pool with active connections. Removes some error cases when cancellations are used. (#631) +- Lazy import `anyio`, so that it's no longer a hard dependancy, and isn't imported if unused. (#639) + +## 0.16.2 (November 25th, 2022) + +- Revert 'Fix async cancellation behaviour', which introduced race conditions. (#627) +- Raise `RuntimeError` if attempting to us UNIX domain sockets on Windows. (#619) + +## 0.16.1 (November 17th, 2022) + +- Fix HTTP/1.1 interim informational responses, such as "100 Continue". (#605) + +## 0.16.0 (October 11th, 2022) + +- Support HTTP/1.1 informational responses. (#581) +- Fix async cancellation behaviour. (#580) +- Support `h11` 0.14. (#579) + +## 0.15.0 (May 17th, 2022) + +- Drop Python 3.6 support (#535) +- Ensure HTTP proxy CONNECT requests include `timeout` configuration. (#506) +- Switch to explicit `typing.Optional` for type hints. (#513) +- For `trio` map OSError exceptions to `ConnectError`. (#543) + +## 0.14.7 (February 4th, 2022) + +- Requests which raise a PoolTimeout need to be removed from the pool queue. (#502) +- Fix AttributeError that happened when Socks5Connection were terminated. (#501) + +## 0.14.6 (February 1st, 2022) + +- Fix SOCKS support for `http://` URLs. (#492) +- Resolve race condition around exceptions during streaming a response. (#491) + +## 0.14.5 (January 18th, 2022) + +- SOCKS proxy support. (#478) +- Add proxy_auth argument to HTTPProxy. (#481) +- Improve error message on 'RemoteProtocolError' exception when server disconnects without sending a response. (#479) + +## 0.14.4 (January 5th, 2022) + +- Support HTTP/2 on HTTPS tunnelling proxies. (#468) +- Fix proxy headers missing on HTTP forwarding. (#456) +- Only instantiate SSL context if required. (#457) +- More robust HTTP/2 handling. (#253, #439, #440, #441) + +## 0.14.3 (November 17th, 2021) + +- Fix race condition when removing closed connections from the pool. (#437) + +## 0.14.2 (November 16th, 2021) + +- Failed connections no longer remain in the pool. (Pull #433) + +## 0.14.1 (November 12th, 2021) + +- `max_connections` becomes optional. (Pull #429) +- `certifi` is now included in the install dependancies. (Pull #428) +- `h2` is now strictly optional. (Pull #428) + +## 0.14.0 (November 11th, 2021) + +The 0.14 release is a complete reworking of `httpcore`, comprehensively addressing some underlying issues in the connection pooling, as well as substantially redesigning the API to be more user friendly. + +Some of the lower-level API design also makes the components more easily testable in isolation, and the package now has 100% test coverage. + +See [discussion #419](https://github.com/encode/httpcore/discussions/419) for a little more background. + +There's some other neat bits in there too, such as the "trace" extension, which gives a hook into inspecting the internal events that occur during the request/response cycle. This extension is needed for the HTTPX cli, in order to... + +* Log the point at which the connection is established, and the IP/port on which it is made. +* Determine if the outgoing request should log as HTTP/1.1 or HTTP/2, rather than having to assume it's HTTP/2 if the --http2 flag was passed. (Which may not actually be true.) +* Log SSL version info / certificate info. + +Note that `curio` support is not currently available in 0.14.0. If you're using `httpcore` with `curio` please get in touch, so we can assess if we ought to prioritize it as a feature or not. + +## 0.13.7 (September 13th, 2021) + +- Fix broken error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #403) + +## 0.13.6 (June 15th, 2021) + +### Fixed + +- Close sockets when read or write timeouts occur. (Pull #365) + +## 0.13.5 (June 14th, 2021) + +### Fixed + +- Resolved niggles with AnyIO EOF behaviours. (Pull #358, #362) + +## 0.13.4 (June 9th, 2021) + +### Added + +- Improved error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #354) + +### Fixed + +- Switched to `anyio` as the default backend implementation when running with `asyncio`. Resolves some awkward [TLS timeout issues](https://github.com/encode/httpx/discussions/1511). + +## 0.13.3 (May 6th, 2021) + +### Added + +- Support HTTP/2 prior knowledge, using `httpcore.SyncConnectionPool(http1=False)`. (Pull #333) + +### Fixed + +- Handle cases where environment does not provide `select.poll` support. (Pull #331) + +## 0.13.2 (April 29th, 2021) + +### Added + +- Improve error message for specific case of `RemoteProtocolError` where server disconnects without sending a response. (Pull #313) + +## 0.13.1 (April 28th, 2021) + +### Fixed + +- More resiliant testing for closed connections. (Pull #311) +- Don't raise exceptions on ungraceful connection closes. (Pull #310) + +## 0.13.0 (April 21st, 2021) + +The 0.13 release updates the core API in order to match the HTTPX Transport API, +introduced in HTTPX 0.18 onwards. + +An example of making requests with the new interface is: + +```python +with httpcore.SyncConnectionPool() as http: + status_code, headers, stream, extensions = http.handle_request( + method=b'GET', + url=(b'https', b'example.org', 443, b'/'), + headers=[(b'host', b'example.org'), (b'user-agent', b'httpcore')] + stream=httpcore.ByteStream(b''), + extensions={} + ) + body = stream.read() + print(status_code, body) +``` + +### Changed + +- The `.request()` method is now `handle_request()`. (Pull #296) +- The `.arequest()` method is now `.handle_async_request()`. (Pull #296) +- The `headers` argument is no longer optional. (Pull #296) +- The `stream` argument is no longer optional. (Pull #296) +- The `ext` argument is now named `extensions`, and is no longer optional. (Pull #296) +- The `"reason"` extension keyword is now named `"reason_phrase"`. (Pull #296) +- The `"reason_phrase"` and `"http_version"` extensions now use byte strings for their values. (Pull #296) +- The `httpcore.PlainByteStream()` class becomes `httpcore.ByteStream()`. (Pull #296) + +### Added + +- Streams now support a `.read()` interface. (Pull #296) + +### Fixed + +- Task cancellation no longer leaks connections from the connection pool. (Pull #305) + +## 0.12.3 (December 7th, 2020) + +### Fixed + +- Abort SSL connections on close rather than waiting for remote EOF when using `asyncio`. (Pull #167) +- Fix exception raised in case of connect timeouts when using the `anyio` backend. (Pull #236) +- Fix `Host` header precedence for `:authority` in HTTP/2. (Pull #241, #243) +- Handle extra edge case when detecting for socket readability when using `asyncio`. (Pull #242, #244) +- Fix `asyncio` SSL warning when using proxy tunneling. (Pull #249) + +## 0.12.2 (November 20th, 2020) + +### Fixed + +- Properly wrap connect errors on the asyncio backend. (Pull #235) +- Fix `ImportError` occurring on Python 3.9 when using the HTTP/1.1 sync client in a multithreaded context. (Pull #237) + +## 0.12.1 (November 7th, 2020) + +### Added + +- Add connect retries. (Pull #221) + +### Fixed + +- Tweak detection of dropped connections, resolving an issue with open files limits on Linux. (Pull #185) +- Avoid leaking connections when establishing an HTTP tunnel to a proxy has failed. (Pull #223) +- Properly wrap OS errors when using `trio`. (Pull #225) + +## 0.12.0 (October 6th, 2020) + +### Changed + +- HTTP header casing is now preserved, rather than always sent in lowercase. (#216 and python-hyper/h11#104) + +### Added + +- Add Python 3.9 to officially supported versions. + +### Fixed + +- Gracefully handle a stdlib asyncio bug when a connection is closed while it is in a paused-for-reading state. (#201) + +## 0.11.1 (September 28nd, 2020) + +### Fixed + +- Add await to async semaphore release() coroutine (#197) +- Drop incorrect curio classifier (#192) + +## 0.11.0 (September 22nd, 2020) + +The Transport API with 0.11.0 has a couple of significant changes. + +Firstly we've moved changed the request interface in order to allow extensions, which will later enable us to support features +such as trailing headers, HTTP/2 server push, and CONNECT/Upgrade connections. + +The interface changes from: + +```python +def request(method, url, headers, stream, timeout): + return (http_version, status_code, reason, headers, stream) +``` + +To instead including an optional dictionary of extensions on the request and response: + +```python +def request(method, url, headers, stream, ext): + return (status_code, headers, stream, ext) +``` + +Having an open-ended extensions point will allow us to add later support for various optional features, that wouldn't otherwise be supported without these API changes. + +In particular: + +* Trailing headers support. +* HTTP/2 Server Push +* sendfile. +* Exposing raw connection on CONNECT, Upgrade, HTTP/2 bi-di streaming. +* Exposing debug information out of the API, including template name, template context. + +Currently extensions are limited to: + +* request: `timeout` - Optional. Timeout dictionary. +* response: `http_version` - Optional. Include the HTTP version used on the response. +* response: `reason` - Optional. Include the reason phrase used on the response. Only valid with HTTP/1.*. + +See https://github.com/encode/httpx/issues/1274#issuecomment-694884553 for the history behind this. + +Secondly, the async version of `request` is now namespaced as `arequest`. + +This allows concrete transports to support both sync and async implementations on the same class. + +### Added + +- Add curio support. (Pull #168) +- Add anyio support, with `backend="anyio"`. (Pull #169) + +### Changed + +- Update the Transport API to use 'ext' for optional extensions. (Pull #190) +- Update the Transport API to use `.request` and `.arequest` so implementations can support both sync and async. (Pull #189) + +## 0.10.2 (August 20th, 2020) + +### Added + +- Added Unix Domain Socket support. (Pull #139) + +### Fixed + +- Always include the port on proxy CONNECT requests. (Pull #154) +- Fix `max_keepalive_connections` configuration. (Pull #153) +- Fixes behaviour in HTTP/1.1 where server disconnects can be used to signal the end of the response body. (Pull #164) + +## 0.10.1 (August 7th, 2020) + +- Include `max_keepalive_connections` on `AsyncHTTPProxy`/`SyncHTTPProxy` classes. + +## 0.10.0 (August 7th, 2020) + +The most notable change in the 0.10.0 release is that HTTP/2 support is now fully optional. + +Use either `pip install httpcore` for HTTP/1.1 support only, or `pip install httpcore[http2]` for HTTP/1.1 and HTTP/2 support. + +### Added + +- HTTP/2 support becomes optional. (Pull #121, #130) +- Add `local_address=...` support. (Pull #100, #134) +- Add `PlainByteStream`, `IteratorByteStream`, `AsyncIteratorByteStream`. The `AsyncByteSteam` and `SyncByteStream` classes are now pure interface classes. (#133) +- Add `LocalProtocolError`, `RemoteProtocolError` exceptions. (Pull #129) +- Add `UnsupportedProtocol` exception. (Pull #128) +- Add `.get_connection_info()` method. (Pull #102, #137) +- Add better TRACE logs. (Pull #101) + +### Changed + +- `max_keepalive` is deprecated in favour of `max_keepalive_connections`. (Pull #140) + +### Fixed + +- Improve handling of server disconnects. (Pull #112) + +## 0.9.1 (May 27th, 2020) + +### Fixed + +- Proper host resolution for sync case, including IPv6 support. (Pull #97) +- Close outstanding connections when connection pool is closed. (Pull #98) + +## 0.9.0 (May 21th, 2020) + +### Changed + +- URL port becomes an `Optional[int]` instead of `int`. (Pull #92) + +### Fixed + +- Honor HTTP/2 max concurrent streams settings. (Pull #89, #90) +- Remove incorrect debug log. (Pull #83) + +## 0.8.4 (May 11th, 2020) + +### Added + +- Logging via HTTPCORE_LOG_LEVEL and HTTPX_LOG_LEVEL environment variables +and TRACE level logging. (Pull #79) + +### Fixed + +- Reuse of connections on HTTP/2 in close concurrency situations. (Pull #81) + +## 0.8.3 (May 6rd, 2020) + +### Fixed + +- Include `Host` and `Accept` headers on proxy "CONNECT" requests. +- De-duplicate any headers also contained in proxy_headers. +- HTTP/2 flag not being passed down to proxy connections. + +## 0.8.2 (May 3rd, 2020) + +### Fixed + +- Fix connections using proxy forwarding requests not being added to the +connection pool properly. (Pull #70) + +## 0.8.1 (April 30th, 2020) + +### Changed + +- Allow inherintance of both `httpcore.AsyncByteStream`, `httpcore.SyncByteStream` without type conflicts. + +## 0.8.0 (April 30th, 2020) + +### Fixed + +- Fixed tunnel proxy support. + +### Added + +- New `TimeoutException` base class. + +## 0.7.0 (March 5th, 2020) + +- First integration with HTTPX. diff --git a/venv/lib/python3.11/site-packages/httpcore-1.0.9.dist-info/RECORD b/venv/lib/python3.11/site-packages/httpcore-1.0.9.dist-info/RECORD new file mode 100644 index 0000000..75e36f8 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore-1.0.9.dist-info/RECORD @@ -0,0 +1,68 @@ +httpcore-1.0.9.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +httpcore-1.0.9.dist-info/METADATA,sha256=_i1P2mGZEol4d54M8n88BFxTGGP83Zh-rMdPOhjUHCE,21529 +httpcore-1.0.9.dist-info/RECORD,, +httpcore-1.0.9.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87 +httpcore-1.0.9.dist-info/licenses/LICENSE.md,sha256=_ctZFUx0y6uhahEkL3dAvqnyPW_rVUeRfYxflKgDkqU,1518 +httpcore/__init__.py,sha256=9kT_kqChCCJUTHww24ZmR_ezcdbpRYWksD-gYNzkZP8,3445 +httpcore/__pycache__/__init__.cpython-311.pyc,, +httpcore/__pycache__/_api.cpython-311.pyc,, +httpcore/__pycache__/_exceptions.cpython-311.pyc,, +httpcore/__pycache__/_models.cpython-311.pyc,, +httpcore/__pycache__/_ssl.cpython-311.pyc,, +httpcore/__pycache__/_synchronization.cpython-311.pyc,, +httpcore/__pycache__/_trace.cpython-311.pyc,, +httpcore/__pycache__/_utils.cpython-311.pyc,, +httpcore/_api.py,sha256=unZmeDschBWCGCPCwkS3Wot9euK6bg_kKxLtGTxw214,3146 +httpcore/_async/__init__.py,sha256=EWdl2v4thnAHzJpqjU4h2a8DUiGAvNiWrkii9pfhTf0,1221 +httpcore/_async/__pycache__/__init__.cpython-311.pyc,, +httpcore/_async/__pycache__/connection.cpython-311.pyc,, +httpcore/_async/__pycache__/connection_pool.cpython-311.pyc,, +httpcore/_async/__pycache__/http11.cpython-311.pyc,, +httpcore/_async/__pycache__/http2.cpython-311.pyc,, +httpcore/_async/__pycache__/http_proxy.cpython-311.pyc,, +httpcore/_async/__pycache__/interfaces.cpython-311.pyc,, +httpcore/_async/__pycache__/socks_proxy.cpython-311.pyc,, +httpcore/_async/connection.py,sha256=6OcPXqMEfc0BU38_-iHUNDd1vKSTc2UVT09XqNb_BOk,8449 +httpcore/_async/connection_pool.py,sha256=DOIQ2s2ZCf9qfwxhzMprTPLqCL8OxGXiKF6qRHxvVyY,17307 +httpcore/_async/http11.py,sha256=-qM9bV7PjSQF5vxs37-eUXOIFwbIjPcZbNliuX9TtBw,13880 +httpcore/_async/http2.py,sha256=azX1fcmtXaIwjputFlZ4vd92J8xwjGOa9ax9QIv4394,23936 +httpcore/_async/http_proxy.py,sha256=2zVkrlv-Ds-rWGaqaXlrhEJiAQFPo23BT3Gq_sWoBXU,14701 +httpcore/_async/interfaces.py,sha256=jTiaWL83pgpGC9ziv90ZfwaKNMmHwmOalzaKiuTxATo,4455 +httpcore/_async/socks_proxy.py,sha256=lLKgLlggPfhFlqi0ODeBkOWvt9CghBBUyqsnsU1tx6Q,13841 +httpcore/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +httpcore/_backends/__pycache__/__init__.cpython-311.pyc,, +httpcore/_backends/__pycache__/anyio.cpython-311.pyc,, +httpcore/_backends/__pycache__/auto.cpython-311.pyc,, +httpcore/_backends/__pycache__/base.cpython-311.pyc,, +httpcore/_backends/__pycache__/mock.cpython-311.pyc,, +httpcore/_backends/__pycache__/sync.cpython-311.pyc,, +httpcore/_backends/__pycache__/trio.cpython-311.pyc,, +httpcore/_backends/anyio.py,sha256=x8PgEhXRC8bVqsdzk_YJx8Y6d9Tub06CuUSwnbmtqoY,5252 +httpcore/_backends/auto.py,sha256=zO136PKZmsaTDK-HRk84eA-MUg8_2wJf4NvmK432Aio,1662 +httpcore/_backends/base.py,sha256=aShgRdZnMmRhFWHetjumlM73f8Kz1YOAyCUP_4kHslA,3042 +httpcore/_backends/mock.py,sha256=er9T436uSe7NLrfiLa4x6Nuqg5ivQ693CxWYCWsgbH4,4077 +httpcore/_backends/sync.py,sha256=bhE4d9iK9Umxdsdsgm2EfKnXaBms2WggGYU-7jmUujU,7977 +httpcore/_backends/trio.py,sha256=LHu4_Mr5MswQmmT3yE4oLgf9b_JJfeVS4BjDxeJc7Ro,5996 +httpcore/_exceptions.py,sha256=looCKga3_YVYu3s-d3L9RMPRJyhsY7fiuuGxvkOD0c0,1184 +httpcore/_models.py,sha256=IO2CcXcdpovRcLTdGFGB6RyBZdEm2h_TOmoCc4rEKho,17623 +httpcore/_ssl.py,sha256=srqmSNU4iOUvWF-SrJvb8G_YEbHFELOXQOwdDIBTS9c,187 +httpcore/_sync/__init__.py,sha256=JBDIgXt5la1LCJ1sLQeKhjKFpLnpNr8Svs6z2ni3fgg,1141 +httpcore/_sync/__pycache__/__init__.cpython-311.pyc,, +httpcore/_sync/__pycache__/connection.cpython-311.pyc,, +httpcore/_sync/__pycache__/connection_pool.cpython-311.pyc,, +httpcore/_sync/__pycache__/http11.cpython-311.pyc,, +httpcore/_sync/__pycache__/http2.cpython-311.pyc,, +httpcore/_sync/__pycache__/http_proxy.cpython-311.pyc,, +httpcore/_sync/__pycache__/interfaces.cpython-311.pyc,, +httpcore/_sync/__pycache__/socks_proxy.cpython-311.pyc,, +httpcore/_sync/connection.py,sha256=9exGOb3PB-Mp2T1-sckSeL2t-tJ_9-NXomV8ihmWCgU,8238 +httpcore/_sync/connection_pool.py,sha256=a-T8LTsUxc7r0Ww1atfHSDoWPjQ0fA8Ul7S3-F0Mj70,16955 +httpcore/_sync/http11.py,sha256=IFobD1Md5JFlJGKWnh1_Q3epikUryI8qo09v8MiJIEA,13476 +httpcore/_sync/http2.py,sha256=AxU4yhcq68Bn5vqdJYtiXKYUj7nvhYbxz3v4rT4xnvA,23400 +httpcore/_sync/http_proxy.py,sha256=_al_6crKuEZu2wyvu493RZImJdBJnj5oGKNjLOJL2Zo,14463 +httpcore/_sync/interfaces.py,sha256=snXON42vUDHO5JBJvo8D4VWk2Wat44z2OXXHDrjbl94,4344 +httpcore/_sync/socks_proxy.py,sha256=zegZW9Snqj2_992DFJa8_CppOVBkVL4AgwduRkStakQ,13614 +httpcore/_synchronization.py,sha256=zSi13mAColBnknjZBknUC6hKNDQT4C6ijnezZ-r0T2s,9434 +httpcore/_trace.py,sha256=ck6ZoIzYTkdNAIfq5MGeKqBXDtqjOX-qfYwmZFbrGco,3952 +httpcore/_utils.py,sha256=_RLgXYOAYC350ikALV59GZ68IJrdocRZxPs9PjmzdFY,1537 +httpcore/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.11/site-packages/httpcore-1.0.9.dist-info/WHEEL b/venv/lib/python3.11/site-packages/httpcore-1.0.9.dist-info/WHEEL new file mode 100644 index 0000000..12228d4 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore-1.0.9.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.27.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.11/site-packages/httpcore-1.0.9.dist-info/licenses/LICENSE.md b/venv/lib/python3.11/site-packages/httpcore-1.0.9.dist-info/licenses/LICENSE.md new file mode 100644 index 0000000..311b2b5 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore-1.0.9.dist-info/licenses/LICENSE.md @@ -0,0 +1,27 @@ +Copyright © 2020, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.11/site-packages/httpcore/__init__.py b/venv/lib/python3.11/site-packages/httpcore/__init__.py new file mode 100644 index 0000000..9a92dc4 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/__init__.py @@ -0,0 +1,141 @@ +from ._api import request, stream +from ._async import ( + AsyncConnectionInterface, + AsyncConnectionPool, + AsyncHTTP2Connection, + AsyncHTTP11Connection, + AsyncHTTPConnection, + AsyncHTTPProxy, + AsyncSOCKSProxy, +) +from ._backends.base import ( + SOCKET_OPTION, + AsyncNetworkBackend, + AsyncNetworkStream, + NetworkBackend, + NetworkStream, +) +from ._backends.mock import AsyncMockBackend, AsyncMockStream, MockBackend, MockStream +from ._backends.sync import SyncBackend +from ._exceptions import ( + ConnectError, + ConnectionNotAvailable, + ConnectTimeout, + LocalProtocolError, + NetworkError, + PoolTimeout, + ProtocolError, + ProxyError, + ReadError, + ReadTimeout, + RemoteProtocolError, + TimeoutException, + UnsupportedProtocol, + WriteError, + WriteTimeout, +) +from ._models import URL, Origin, Proxy, Request, Response +from ._ssl import default_ssl_context +from ._sync import ( + ConnectionInterface, + ConnectionPool, + HTTP2Connection, + HTTP11Connection, + HTTPConnection, + HTTPProxy, + SOCKSProxy, +) + +# The 'httpcore.AnyIOBackend' class is conditional on 'anyio' being installed. +try: + from ._backends.anyio import AnyIOBackend +except ImportError: # pragma: nocover + + class AnyIOBackend: # type: ignore + def __init__(self, *args, **kwargs): # type: ignore + msg = ( + "Attempted to use 'httpcore.AnyIOBackend' but 'anyio' is not installed." + ) + raise RuntimeError(msg) + + +# The 'httpcore.TrioBackend' class is conditional on 'trio' being installed. +try: + from ._backends.trio import TrioBackend +except ImportError: # pragma: nocover + + class TrioBackend: # type: ignore + def __init__(self, *args, **kwargs): # type: ignore + msg = "Attempted to use 'httpcore.TrioBackend' but 'trio' is not installed." + raise RuntimeError(msg) + + +__all__ = [ + # top-level requests + "request", + "stream", + # models + "Origin", + "URL", + "Request", + "Response", + "Proxy", + # async + "AsyncHTTPConnection", + "AsyncConnectionPool", + "AsyncHTTPProxy", + "AsyncHTTP11Connection", + "AsyncHTTP2Connection", + "AsyncConnectionInterface", + "AsyncSOCKSProxy", + # sync + "HTTPConnection", + "ConnectionPool", + "HTTPProxy", + "HTTP11Connection", + "HTTP2Connection", + "ConnectionInterface", + "SOCKSProxy", + # network backends, implementations + "SyncBackend", + "AnyIOBackend", + "TrioBackend", + # network backends, mock implementations + "AsyncMockBackend", + "AsyncMockStream", + "MockBackend", + "MockStream", + # network backends, interface + "AsyncNetworkStream", + "AsyncNetworkBackend", + "NetworkStream", + "NetworkBackend", + # util + "default_ssl_context", + "SOCKET_OPTION", + # exceptions + "ConnectionNotAvailable", + "ProxyError", + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", + "UnsupportedProtocol", + "TimeoutException", + "PoolTimeout", + "ConnectTimeout", + "ReadTimeout", + "WriteTimeout", + "NetworkError", + "ConnectError", + "ReadError", + "WriteError", +] + +__version__ = "1.0.9" + + +__locals = locals() +for __name in __all__: + # Exclude SOCKET_OPTION, it causes AttributeError on Python 3.14 + if not __name.startswith(("__", "SOCKET_OPTION")): + setattr(__locals[__name], "__module__", "httpcore") # noqa diff --git a/venv/lib/python3.11/site-packages/httpcore/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..244e000 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/__pycache__/_api.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/__pycache__/_api.cpython-311.pyc new file mode 100644 index 0000000..877535d Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/__pycache__/_api.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/__pycache__/_exceptions.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/__pycache__/_exceptions.cpython-311.pyc new file mode 100644 index 0000000..a53cfab Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/__pycache__/_exceptions.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/__pycache__/_models.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/__pycache__/_models.cpython-311.pyc new file mode 100644 index 0000000..ff1ed5c Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/__pycache__/_models.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/__pycache__/_ssl.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/__pycache__/_ssl.cpython-311.pyc new file mode 100644 index 0000000..3e6ce65 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/__pycache__/_ssl.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/__pycache__/_synchronization.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/__pycache__/_synchronization.cpython-311.pyc new file mode 100644 index 0000000..c769d71 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/__pycache__/_synchronization.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/__pycache__/_trace.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/__pycache__/_trace.cpython-311.pyc new file mode 100644 index 0000000..3b494ec Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/__pycache__/_trace.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/__pycache__/_utils.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/__pycache__/_utils.cpython-311.pyc new file mode 100644 index 0000000..4095c06 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/__pycache__/_utils.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/_api.py b/venv/lib/python3.11/site-packages/httpcore/_api.py new file mode 100644 index 0000000..38b961d --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_api.py @@ -0,0 +1,94 @@ +from __future__ import annotations + +import contextlib +import typing + +from ._models import URL, Extensions, HeaderTypes, Response +from ._sync.connection_pool import ConnectionPool + + +def request( + method: bytes | str, + url: URL | bytes | str, + *, + headers: HeaderTypes = None, + content: bytes | typing.Iterator[bytes] | None = None, + extensions: Extensions | None = None, +) -> Response: + """ + Sends an HTTP request, returning the response. + + ``` + response = httpcore.request("GET", "https://www.example.com/") + ``` + + Arguments: + method: The HTTP method for the request. Typically one of `"GET"`, + `"OPTIONS"`, `"HEAD"`, `"POST"`, `"PUT"`, `"PATCH"`, or `"DELETE"`. + url: The URL of the HTTP request. Either as an instance of `httpcore.URL`, + or as str/bytes. + headers: The HTTP request headers. Either as a dictionary of str/bytes, + or as a list of two-tuples of str/bytes. + content: The content of the request body. Either as bytes, + or as a bytes iterator. + extensions: A dictionary of optional extra information included on the request. + Possible keys include `"timeout"`. + + Returns: + An instance of `httpcore.Response`. + """ + with ConnectionPool() as pool: + return pool.request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + + +@contextlib.contextmanager +def stream( + method: bytes | str, + url: URL | bytes | str, + *, + headers: HeaderTypes = None, + content: bytes | typing.Iterator[bytes] | None = None, + extensions: Extensions | None = None, +) -> typing.Iterator[Response]: + """ + Sends an HTTP request, returning the response within a content manager. + + ``` + with httpcore.stream("GET", "https://www.example.com/") as response: + ... + ``` + + When using the `stream()` function, the body of the response will not be + automatically read. If you want to access the response body you should + either use `content = response.read()`, or `for chunk in response.iter_content()`. + + Arguments: + method: The HTTP method for the request. Typically one of `"GET"`, + `"OPTIONS"`, `"HEAD"`, `"POST"`, `"PUT"`, `"PATCH"`, or `"DELETE"`. + url: The URL of the HTTP request. Either as an instance of `httpcore.URL`, + or as str/bytes. + headers: The HTTP request headers. Either as a dictionary of str/bytes, + or as a list of two-tuples of str/bytes. + content: The content of the request body. Either as bytes, + or as a bytes iterator. + extensions: A dictionary of optional extra information included on the request. + Possible keys include `"timeout"`. + + Returns: + An instance of `httpcore.Response`. + """ + with ConnectionPool() as pool: + with pool.stream( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) as response: + yield response diff --git a/venv/lib/python3.11/site-packages/httpcore/_async/__init__.py b/venv/lib/python3.11/site-packages/httpcore/_async/__init__.py new file mode 100644 index 0000000..88dc7f0 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_async/__init__.py @@ -0,0 +1,39 @@ +from .connection import AsyncHTTPConnection +from .connection_pool import AsyncConnectionPool +from .http11 import AsyncHTTP11Connection +from .http_proxy import AsyncHTTPProxy +from .interfaces import AsyncConnectionInterface + +try: + from .http2 import AsyncHTTP2Connection +except ImportError: # pragma: nocover + + class AsyncHTTP2Connection: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use http2 support, but the `h2` package is not " + "installed. Use 'pip install httpcore[http2]'." + ) + + +try: + from .socks_proxy import AsyncSOCKSProxy +except ImportError: # pragma: nocover + + class AsyncSOCKSProxy: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use SOCKS support, but the `socksio` package is not " + "installed. Use 'pip install httpcore[socks]'." + ) + + +__all__ = [ + "AsyncHTTPConnection", + "AsyncConnectionPool", + "AsyncHTTPProxy", + "AsyncHTTP11Connection", + "AsyncHTTP2Connection", + "AsyncConnectionInterface", + "AsyncSOCKSProxy", +] diff --git a/venv/lib/python3.11/site-packages/httpcore/_async/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/_async/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..7319634 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/_async/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/_async/__pycache__/connection.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/_async/__pycache__/connection.cpython-311.pyc new file mode 100644 index 0000000..4258856 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/_async/__pycache__/connection.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-311.pyc new file mode 100644 index 0000000..6cd8ad9 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/_async/__pycache__/http11.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/_async/__pycache__/http11.cpython-311.pyc new file mode 100644 index 0000000..24b60b2 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/_async/__pycache__/http11.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/_async/__pycache__/http2.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/_async/__pycache__/http2.cpython-311.pyc new file mode 100644 index 0000000..f8c0597 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/_async/__pycache__/http2.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-311.pyc new file mode 100644 index 0000000..0da3b8b Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/_async/__pycache__/interfaces.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/_async/__pycache__/interfaces.cpython-311.pyc new file mode 100644 index 0000000..65e0a22 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/_async/__pycache__/interfaces.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/_async/__pycache__/socks_proxy.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/_async/__pycache__/socks_proxy.cpython-311.pyc new file mode 100644 index 0000000..6c945cd Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/_async/__pycache__/socks_proxy.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/_async/connection.py b/venv/lib/python3.11/site-packages/httpcore/_async/connection.py new file mode 100644 index 0000000..b42581d --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_async/connection.py @@ -0,0 +1,222 @@ +from __future__ import annotations + +import itertools +import logging +import ssl +import types +import typing + +from .._backends.auto import AutoBackend +from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream +from .._exceptions import ConnectError, ConnectTimeout +from .._models import Origin, Request, Response +from .._ssl import default_ssl_context +from .._synchronization import AsyncLock +from .._trace import Trace +from .http11 import AsyncHTTP11Connection +from .interfaces import AsyncConnectionInterface + +RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. + + +logger = logging.getLogger("httpcore.connection") + + +def exponential_backoff(factor: float) -> typing.Iterator[float]: + """ + Generate a geometric sequence that has a ratio of 2 and starts with 0. + + For example: + - `factor = 2`: `0, 2, 4, 8, 16, 32, 64, ...` + - `factor = 3`: `0, 3, 6, 12, 24, 48, 96, ...` + """ + yield 0 + for n in itertools.count(): + yield factor * 2**n + + +class AsyncHTTPConnection(AsyncConnectionInterface): + def __init__( + self, + origin: Origin, + ssl_context: ssl.SSLContext | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: str | None = None, + uds: str | None = None, + network_backend: AsyncNetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + self._origin = origin + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend: AsyncNetworkBackend = ( + AutoBackend() if network_backend is None else network_backend + ) + self._connection: AsyncConnectionInterface | None = None + self._connect_failed: bool = False + self._request_lock = AsyncLock() + self._socket_options = socket_options + + async def handle_async_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection to {self._origin}" + ) + + try: + async with self._request_lock: + if self._connection is None: + stream = await self._connect(request) + + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import AsyncHTTP2Connection + + self._connection = AsyncHTTP2Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = AsyncHTTP11Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except BaseException as exc: + self._connect_failed = True + raise exc + + return await self._connection.handle_async_request(request) + + async def _connect(self, request: Request) -> AsyncNetworkStream: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + retries_left = self._retries + delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) + + while True: + try: + if self._uds is None: + kwargs = { + "host": self._origin.host.decode("ascii"), + "port": self._origin.port, + "local_address": self._local_address, + "timeout": timeout, + "socket_options": self._socket_options, + } + async with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = await self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + else: + kwargs = { + "path": self._uds, + "timeout": timeout, + "socket_options": self._socket_options, + } + async with Trace( + "connect_unix_socket", logger, request, kwargs + ) as trace: + stream = await self._network_backend.connect_unix_socket( + **kwargs + ) + trace.return_value = stream + + if self._origin.scheme in (b"https", b"wss"): + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._origin.host.decode("ascii"), + "timeout": timeout, + } + async with Trace("start_tls", logger, request, kwargs) as trace: + stream = await stream.start_tls(**kwargs) + trace.return_value = stream + return stream + except (ConnectError, ConnectTimeout): + if retries_left <= 0: + raise + retries_left -= 1 + delay = next(delays) + async with Trace("retry", logger, request, kwargs) as trace: + await self._network_backend.sleep(delay) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + async def aclose(self) -> None: + if self._connection is not None: + async with Trace("close", logger, None, {}): + await self._connection.aclose() + + def is_available(self) -> bool: + if self._connection is None: + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + async def __aenter__(self) -> AsyncHTTPConnection: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + await self.aclose() diff --git a/venv/lib/python3.11/site-packages/httpcore/_async/connection_pool.py b/venv/lib/python3.11/site-packages/httpcore/_async/connection_pool.py new file mode 100644 index 0000000..96e973d --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_async/connection_pool.py @@ -0,0 +1,420 @@ +from __future__ import annotations + +import ssl +import sys +import types +import typing + +from .._backends.auto import AutoBackend +from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend +from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol +from .._models import Origin, Proxy, Request, Response +from .._synchronization import AsyncEvent, AsyncShieldCancellation, AsyncThreadLock +from .connection import AsyncHTTPConnection +from .interfaces import AsyncConnectionInterface, AsyncRequestInterface + + +class AsyncPoolRequest: + def __init__(self, request: Request) -> None: + self.request = request + self.connection: AsyncConnectionInterface | None = None + self._connection_acquired = AsyncEvent() + + def assign_to_connection(self, connection: AsyncConnectionInterface | None) -> None: + self.connection = connection + self._connection_acquired.set() + + def clear_connection(self) -> None: + self.connection = None + self._connection_acquired = AsyncEvent() + + async def wait_for_connection( + self, timeout: float | None = None + ) -> AsyncConnectionInterface: + if self.connection is None: + await self._connection_acquired.wait(timeout=timeout) + assert self.connection is not None + return self.connection + + def is_queued(self) -> bool: + return self.connection is None + + +class AsyncConnectionPool(AsyncRequestInterface): + """ + A connection pool for making HTTP requests. + """ + + def __init__( + self, + ssl_context: ssl.SSLContext | None = None, + proxy: Proxy | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: str | None = None, + uds: str | None = None, + network_backend: AsyncNetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish a + connection. + local_address: Local address to connect from. Can also be used to connect + using a particular address family. Using `local_address="0.0.0.0"` + will connect using an `AF_INET` address (IPv4), while using + `local_address="::"` will connect using an `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + socket_options: Socket options that have to be included + in the TCP socket when the connection was established. + """ + self._ssl_context = ssl_context + self._proxy = proxy + self._max_connections = ( + sys.maxsize if max_connections is None else max_connections + ) + self._max_keepalive_connections = ( + sys.maxsize + if max_keepalive_connections is None + else max_keepalive_connections + ) + self._max_keepalive_connections = min( + self._max_connections, self._max_keepalive_connections + ) + + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend = ( + AutoBackend() if network_backend is None else network_backend + ) + self._socket_options = socket_options + + # The mutable state on a connection pool is the queue of incoming requests, + # and the set of connections that are servicing those requests. + self._connections: list[AsyncConnectionInterface] = [] + self._requests: list[AsyncPoolRequest] = [] + + # We only mutate the state of the connection pool within an 'optional_thread_lock' + # context. This holds a threading lock unless we're running in async mode, + # in which case it is a no-op. + self._optional_thread_lock = AsyncThreadLock() + + def create_connection(self, origin: Origin) -> AsyncConnectionInterface: + if self._proxy is not None: + if self._proxy.url.scheme in (b"socks5", b"socks5h"): + from .socks_proxy import AsyncSocks5Connection + + return AsyncSocks5Connection( + proxy_origin=self._proxy.url.origin, + proxy_auth=self._proxy.auth, + remote_origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + elif origin.scheme == b"http": + from .http_proxy import AsyncForwardHTTPConnection + + return AsyncForwardHTTPConnection( + proxy_origin=self._proxy.url.origin, + proxy_headers=self._proxy.headers, + proxy_ssl_context=self._proxy.ssl_context, + remote_origin=origin, + keepalive_expiry=self._keepalive_expiry, + network_backend=self._network_backend, + ) + from .http_proxy import AsyncTunnelHTTPConnection + + return AsyncTunnelHTTPConnection( + proxy_origin=self._proxy.url.origin, + proxy_headers=self._proxy.headers, + proxy_ssl_context=self._proxy.ssl_context, + remote_origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + return AsyncHTTPConnection( + origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + retries=self._retries, + local_address=self._local_address, + uds=self._uds, + network_backend=self._network_backend, + socket_options=self._socket_options, + ) + + @property + def connections(self) -> list[AsyncConnectionInterface]: + """ + Return a list of the connections currently in the pool. + + For example: + + ```python + >>> pool.connections + [ + , + , + , + ] + ``` + """ + return list(self._connections) + + async def handle_async_request(self, request: Request) -> Response: + """ + Send an HTTP request, and return an HTTP response. + + This is the core implementation that is called into by `.request()` or `.stream()`. + """ + scheme = request.url.scheme.decode() + if scheme == "": + raise UnsupportedProtocol( + "Request URL is missing an 'http://' or 'https://' protocol." + ) + if scheme not in ("http", "https", "ws", "wss"): + raise UnsupportedProtocol( + f"Request URL has an unsupported protocol '{scheme}://'." + ) + + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("pool", None) + + with self._optional_thread_lock: + # Add the incoming request to our request queue. + pool_request = AsyncPoolRequest(request) + self._requests.append(pool_request) + + try: + while True: + with self._optional_thread_lock: + # Assign incoming requests to available connections, + # closing or creating new connections as required. + closing = self._assign_requests_to_connections() + await self._close_connections(closing) + + # Wait until this request has an assigned connection. + connection = await pool_request.wait_for_connection(timeout=timeout) + + try: + # Send the request on the assigned connection. + response = await connection.handle_async_request( + pool_request.request + ) + except ConnectionNotAvailable: + # In some cases a connection may initially be available to + # handle a request, but then become unavailable. + # + # In this case we clear the connection and try again. + pool_request.clear_connection() + else: + break # pragma: nocover + + except BaseException as exc: + with self._optional_thread_lock: + # For any exception or cancellation we remove the request from + # the queue, and then re-assign requests to connections. + self._requests.remove(pool_request) + closing = self._assign_requests_to_connections() + + await self._close_connections(closing) + raise exc from None + + # Return the response. Note that in this case we still have to manage + # the point at which the response is closed. + assert isinstance(response.stream, typing.AsyncIterable) + return Response( + status=response.status, + headers=response.headers, + content=PoolByteStream( + stream=response.stream, pool_request=pool_request, pool=self + ), + extensions=response.extensions, + ) + + def _assign_requests_to_connections(self) -> list[AsyncConnectionInterface]: + """ + Manage the state of the connection pool, assigning incoming + requests to connections as available. + + Called whenever a new request is added or removed from the pool. + + Any closing connections are returned, allowing the I/O for closing + those connections to be handled seperately. + """ + closing_connections = [] + + # First we handle cleaning up any connections that are closed, + # have expired their keep-alive, or surplus idle connections. + for connection in list(self._connections): + if connection.is_closed(): + # log: "removing closed connection" + self._connections.remove(connection) + elif connection.has_expired(): + # log: "closing expired connection" + self._connections.remove(connection) + closing_connections.append(connection) + elif ( + connection.is_idle() + and len([connection.is_idle() for connection in self._connections]) + > self._max_keepalive_connections + ): + # log: "closing idle connection" + self._connections.remove(connection) + closing_connections.append(connection) + + # Assign queued requests to connections. + queued_requests = [request for request in self._requests if request.is_queued()] + for pool_request in queued_requests: + origin = pool_request.request.url.origin + available_connections = [ + connection + for connection in self._connections + if connection.can_handle_request(origin) and connection.is_available() + ] + idle_connections = [ + connection for connection in self._connections if connection.is_idle() + ] + + # There are three cases for how we may be able to handle the request: + # + # 1. There is an existing connection that can handle the request. + # 2. We can create a new connection to handle the request. + # 3. We can close an idle connection and then create a new connection + # to handle the request. + if available_connections: + # log: "reusing existing connection" + connection = available_connections[0] + pool_request.assign_to_connection(connection) + elif len(self._connections) < self._max_connections: + # log: "creating new connection" + connection = self.create_connection(origin) + self._connections.append(connection) + pool_request.assign_to_connection(connection) + elif idle_connections: + # log: "closing idle connection" + connection = idle_connections[0] + self._connections.remove(connection) + closing_connections.append(connection) + # log: "creating new connection" + connection = self.create_connection(origin) + self._connections.append(connection) + pool_request.assign_to_connection(connection) + + return closing_connections + + async def _close_connections(self, closing: list[AsyncConnectionInterface]) -> None: + # Close connections which have been removed from the pool. + with AsyncShieldCancellation(): + for connection in closing: + await connection.aclose() + + async def aclose(self) -> None: + # Explicitly close the connection pool. + # Clears all existing requests and connections. + with self._optional_thread_lock: + closing_connections = list(self._connections) + self._connections = [] + await self._close_connections(closing_connections) + + async def __aenter__(self) -> AsyncConnectionPool: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + await self.aclose() + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + with self._optional_thread_lock: + request_is_queued = [request.is_queued() for request in self._requests] + connection_is_idle = [ + connection.is_idle() for connection in self._connections + ] + + num_active_requests = request_is_queued.count(False) + num_queued_requests = request_is_queued.count(True) + num_active_connections = connection_is_idle.count(False) + num_idle_connections = connection_is_idle.count(True) + + requests_info = ( + f"Requests: {num_active_requests} active, {num_queued_requests} queued" + ) + connection_info = ( + f"Connections: {num_active_connections} active, {num_idle_connections} idle" + ) + + return f"<{class_name} [{requests_info} | {connection_info}]>" + + +class PoolByteStream: + def __init__( + self, + stream: typing.AsyncIterable[bytes], + pool_request: AsyncPoolRequest, + pool: AsyncConnectionPool, + ) -> None: + self._stream = stream + self._pool_request = pool_request + self._pool = pool + self._closed = False + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + try: + async for part in self._stream: + yield part + except BaseException as exc: + await self.aclose() + raise exc from None + + async def aclose(self) -> None: + if not self._closed: + self._closed = True + with AsyncShieldCancellation(): + if hasattr(self._stream, "aclose"): + await self._stream.aclose() + + with self._pool._optional_thread_lock: + self._pool._requests.remove(self._pool_request) + closing = self._pool._assign_requests_to_connections() + + await self._pool._close_connections(closing) diff --git a/venv/lib/python3.11/site-packages/httpcore/_async/http11.py b/venv/lib/python3.11/site-packages/httpcore/_async/http11.py new file mode 100644 index 0000000..e6d6d70 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_async/http11.py @@ -0,0 +1,379 @@ +from __future__ import annotations + +import enum +import logging +import ssl +import time +import types +import typing + +import h11 + +from .._backends.base import AsyncNetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, + WriteError, + map_exceptions, +) +from .._models import Origin, Request, Response +from .._synchronization import AsyncLock, AsyncShieldCancellation +from .._trace import Trace +from .interfaces import AsyncConnectionInterface + +logger = logging.getLogger("httpcore.http11") + + +# A subset of `h11.Event` types supported by `_send_event` +H11SendEvent = typing.Union[ + h11.Request, + h11.Data, + h11.EndOfMessage, +] + + +class HTTPConnectionState(enum.IntEnum): + NEW = 0 + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class AsyncHTTP11Connection(AsyncConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024 + + def __init__( + self, + origin: Origin, + stream: AsyncNetworkStream, + keepalive_expiry: float | None = None, + ) -> None: + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: float | None = keepalive_expiry + self._expire_at: float | None = None + self._state = HTTPConnectionState.NEW + self._state_lock = AsyncLock() + self._request_count = 0 + self._h11_state = h11.Connection( + our_role=h11.CLIENT, + max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE, + ) + + async def handle_async_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + async with self._state_lock: + if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE): + self._request_count += 1 + self._state = HTTPConnectionState.ACTIVE + self._expire_at = None + else: + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request} + try: + async with Trace( + "send_request_headers", logger, request, kwargs + ) as trace: + await self._send_request_headers(**kwargs) + async with Trace("send_request_body", logger, request, kwargs) as trace: + await self._send_request_body(**kwargs) + except WriteError: + # If we get a write error while we're writing the request, + # then we supress this error and move on to attempting to + # read the response. Servers can sometimes close the request + # pre-emptively and then respond with a well formed HTTP + # error response. + pass + + async with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + ( + http_version, + status, + reason_phrase, + headers, + trailing_data, + ) = await self._receive_response_headers(**kwargs) + trace.return_value = ( + http_version, + status, + reason_phrase, + headers, + ) + + network_stream = self._network_stream + + # CONNECT or Upgrade request + if (status == 101) or ( + (request.method == b"CONNECT") and (200 <= status < 300) + ): + network_stream = AsyncHTTP11UpgradeStream(network_stream, trailing_data) + + return Response( + status=status, + headers=headers, + content=HTTP11ConnectionByteStream(self, request), + extensions={ + "http_version": http_version, + "reason_phrase": reason_phrase, + "network_stream": network_stream, + }, + ) + except BaseException as exc: + with AsyncShieldCancellation(): + async with Trace("response_closed", logger, request) as trace: + await self._response_closed() + raise exc + + # Sending the request... + + async def _send_request_headers(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): + event = h11.Request( + method=request.method, + target=request.url.target, + headers=request.headers, + ) + await self._send_event(event, timeout=timeout) + + async def _send_request_body(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + assert isinstance(request.stream, typing.AsyncIterable) + async for chunk in request.stream: + event = h11.Data(data=chunk) + await self._send_event(event, timeout=timeout) + + await self._send_event(h11.EndOfMessage(), timeout=timeout) + + async def _send_event(self, event: h11.Event, timeout: float | None = None) -> None: + bytes_to_send = self._h11_state.send(event) + if bytes_to_send is not None: + await self._network_stream.write(bytes_to_send, timeout=timeout) + + # Receiving the response... + + async def _receive_response_headers( + self, request: Request + ) -> tuple[bytes, int, bytes, list[tuple[bytes, bytes]], bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = await self._receive_event(timeout=timeout) + if isinstance(event, h11.Response): + break + if ( + isinstance(event, h11.InformationalResponse) + and event.status_code == 101 + ): + break + + http_version = b"HTTP/" + event.http_version + + # h11 version 0.11+ supports a `raw_items` interface to get the + # raw header casing, rather than the enforced lowercase headers. + headers = event.headers.raw_items() + + trailing_data, _ = self._h11_state.trailing_data + + return http_version, event.status_code, event.reason, headers, trailing_data + + async def _receive_response_body( + self, request: Request + ) -> typing.AsyncIterator[bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = await self._receive_event(timeout=timeout) + if isinstance(event, h11.Data): + yield bytes(event.data) + elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): + break + + async def _receive_event( + self, timeout: float | None = None + ) -> h11.Event | type[h11.PAUSED]: + while True: + with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): + event = self._h11_state.next_event() + + if event is h11.NEED_DATA: + data = await self._network_stream.read( + self.READ_NUM_BYTES, timeout=timeout + ) + + # If we feed this case through h11 we'll raise an exception like: + # + # httpcore.RemoteProtocolError: can't handle event type + # ConnectionClosed when role=SERVER and state=SEND_RESPONSE + # + # Which is accurate, but not very informative from an end-user + # perspective. Instead we handle this case distinctly and treat + # it as a ConnectError. + if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: + msg = "Server disconnected without sending a response." + raise RemoteProtocolError(msg) + + self._h11_state.receive_data(data) + else: + # mypy fails to narrow the type in the above if statement above + return event # type: ignore[return-value] + + async def _response_closed(self) -> None: + async with self._state_lock: + if ( + self._h11_state.our_state is h11.DONE + and self._h11_state.their_state is h11.DONE + ): + self._state = HTTPConnectionState.IDLE + self._h11_state.start_next_cycle() + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + else: + await self.aclose() + + # Once the connection is no longer required... + + async def aclose(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._state = HTTPConnectionState.CLOSED + await self._network_stream.aclose() + + # The AsyncConnectionInterface methods provide information about the state of + # the connection, allowing for a connection pooling implementation to + # determine when to reuse and when to close the connection... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + # Note that HTTP/1.1 connections in the "NEW" state are not treated as + # being "available". The control flow which created the connection will + # be able to send an outgoing request, but the connection will not be + # acquired from the connection pool for any other request. + return self._state == HTTPConnectionState.IDLE + + def has_expired(self) -> bool: + now = time.monotonic() + keepalive_expired = self._expire_at is not None and now > self._expire_at + + # If the HTTP connection is idle but the socket is readable, then the + # only valid state is that the socket is about to return b"", indicating + # a server-initiated disconnect. + server_disconnected = ( + self._state == HTTPConnectionState.IDLE + and self._network_stream.get_extra_info("is_readable") + ) + + return keepalive_expired or server_disconnected + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/1.1, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + async def __aenter__(self) -> AsyncHTTP11Connection: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + await self.aclose() + + +class HTTP11ConnectionByteStream: + def __init__(self, connection: AsyncHTTP11Connection, request: Request) -> None: + self._connection = connection + self._request = request + self._closed = False + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + kwargs = {"request": self._request} + try: + async with Trace("receive_response_body", logger, self._request, kwargs): + async for chunk in self._connection._receive_response_body(**kwargs): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with AsyncShieldCancellation(): + await self.aclose() + raise exc + + async def aclose(self) -> None: + if not self._closed: + self._closed = True + async with Trace("response_closed", logger, self._request): + await self._connection._response_closed() + + +class AsyncHTTP11UpgradeStream(AsyncNetworkStream): + def __init__(self, stream: AsyncNetworkStream, leading_data: bytes) -> None: + self._stream = stream + self._leading_data = leading_data + + async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + if self._leading_data: + buffer = self._leading_data[:max_bytes] + self._leading_data = self._leading_data[max_bytes:] + return buffer + else: + return await self._stream.read(max_bytes, timeout) + + async def write(self, buffer: bytes, timeout: float | None = None) -> None: + await self._stream.write(buffer, timeout) + + async def aclose(self) -> None: + await self._stream.aclose() + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> AsyncNetworkStream: + return await self._stream.start_tls(ssl_context, server_hostname, timeout) + + def get_extra_info(self, info: str) -> typing.Any: + return self._stream.get_extra_info(info) diff --git a/venv/lib/python3.11/site-packages/httpcore/_async/http2.py b/venv/lib/python3.11/site-packages/httpcore/_async/http2.py new file mode 100644 index 0000000..dbd0bee --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_async/http2.py @@ -0,0 +1,592 @@ +from __future__ import annotations + +import enum +import logging +import time +import types +import typing + +import h2.config +import h2.connection +import h2.events +import h2.exceptions +import h2.settings + +from .._backends.base import AsyncNetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, +) +from .._models import Origin, Request, Response +from .._synchronization import AsyncLock, AsyncSemaphore, AsyncShieldCancellation +from .._trace import Trace +from .interfaces import AsyncConnectionInterface + +logger = logging.getLogger("httpcore.http2") + + +def has_body_headers(request: Request) -> bool: + return any( + k.lower() == b"content-length" or k.lower() == b"transfer-encoding" + for k, v in request.headers + ) + + +class HTTPConnectionState(enum.IntEnum): + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class AsyncHTTP2Connection(AsyncConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + CONFIG = h2.config.H2Configuration(validate_inbound_headers=False) + + def __init__( + self, + origin: Origin, + stream: AsyncNetworkStream, + keepalive_expiry: float | None = None, + ): + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: float | None = keepalive_expiry + self._h2_state = h2.connection.H2Connection(config=self.CONFIG) + self._state = HTTPConnectionState.IDLE + self._expire_at: float | None = None + self._request_count = 0 + self._init_lock = AsyncLock() + self._state_lock = AsyncLock() + self._read_lock = AsyncLock() + self._write_lock = AsyncLock() + self._sent_connection_init = False + self._used_all_stream_ids = False + self._connection_error = False + + # Mapping from stream ID to response stream events. + self._events: dict[ + int, + list[ + h2.events.ResponseReceived + | h2.events.DataReceived + | h2.events.StreamEnded + | h2.events.StreamReset, + ], + ] = {} + + # Connection terminated events are stored as state since + # we need to handle them for all streams. + self._connection_terminated: h2.events.ConnectionTerminated | None = None + + self._read_exception: Exception | None = None + self._write_exception: Exception | None = None + + async def handle_async_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + # This cannot occur in normal operation, since the connection pool + # will only send requests on connections that handle them. + # It's in place simply for resilience as a guard against incorrect + # usage, for anyone working directly with httpcore connections. + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + async with self._state_lock: + if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE): + self._request_count += 1 + self._expire_at = None + self._state = HTTPConnectionState.ACTIVE + else: + raise ConnectionNotAvailable() + + async with self._init_lock: + if not self._sent_connection_init: + try: + sci_kwargs = {"request": request} + async with Trace( + "send_connection_init", logger, request, sci_kwargs + ): + await self._send_connection_init(**sci_kwargs) + except BaseException as exc: + with AsyncShieldCancellation(): + await self.aclose() + raise exc + + self._sent_connection_init = True + + # Initially start with just 1 until the remote server provides + # its max_concurrent_streams value + self._max_streams = 1 + + local_settings_max_streams = ( + self._h2_state.local_settings.max_concurrent_streams + ) + self._max_streams_semaphore = AsyncSemaphore(local_settings_max_streams) + + for _ in range(local_settings_max_streams - self._max_streams): + await self._max_streams_semaphore.acquire() + + await self._max_streams_semaphore.acquire() + + try: + stream_id = self._h2_state.get_next_available_stream_id() + self._events[stream_id] = [] + except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover + self._used_all_stream_ids = True + self._request_count -= 1 + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request, "stream_id": stream_id} + async with Trace("send_request_headers", logger, request, kwargs): + await self._send_request_headers(request=request, stream_id=stream_id) + async with Trace("send_request_body", logger, request, kwargs): + await self._send_request_body(request=request, stream_id=stream_id) + async with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + status, headers = await self._receive_response( + request=request, stream_id=stream_id + ) + trace.return_value = (status, headers) + + return Response( + status=status, + headers=headers, + content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id), + extensions={ + "http_version": b"HTTP/2", + "network_stream": self._network_stream, + "stream_id": stream_id, + }, + ) + except BaseException as exc: # noqa: PIE786 + with AsyncShieldCancellation(): + kwargs = {"stream_id": stream_id} + async with Trace("response_closed", logger, request, kwargs): + await self._response_closed(stream_id=stream_id) + + if isinstance(exc, h2.exceptions.ProtocolError): + # One case where h2 can raise a protocol error is when a + # closed frame has been seen by the state machine. + # + # This happens when one stream is reading, and encounters + # a GOAWAY event. Other flows of control may then raise + # a protocol error at any point they interact with the 'h2_state'. + # + # In this case we'll have stored the event, and should raise + # it as a RemoteProtocolError. + if self._connection_terminated: # pragma: nocover + raise RemoteProtocolError(self._connection_terminated) + # If h2 raises a protocol error in some other state then we + # must somehow have made a protocol violation. + raise LocalProtocolError(exc) # pragma: nocover + + raise exc + + async def _send_connection_init(self, request: Request) -> None: + """ + The HTTP/2 connection requires some initial setup before we can start + using individual request/response streams on it. + """ + # Need to set these manually here instead of manipulating via + # __setitem__() otherwise the H2Connection will emit SettingsUpdate + # frames in addition to sending the undesired defaults. + self._h2_state.local_settings = h2.settings.Settings( + client=True, + initial_values={ + # Disable PUSH_PROMISE frames from the server since we don't do anything + # with them for now. Maybe when we support caching? + h2.settings.SettingCodes.ENABLE_PUSH: 0, + # These two are taken from h2 for safe defaults + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100, + h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536, + }, + ) + + # Some websites (*cough* Yahoo *cough*) balk at this setting being + # present in the initial handshake since it's not defined in the original + # RFC despite the RFC mandating ignoring settings you don't know about. + del self._h2_state.local_settings[ + h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL + ] + + self._h2_state.initiate_connection() + self._h2_state.increment_flow_control_window(2**24) + await self._write_outgoing_data(request) + + # Sending the request... + + async def _send_request_headers(self, request: Request, stream_id: int) -> None: + """ + Send the request headers to a given stream ID. + """ + end_stream = not has_body_headers(request) + + # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. + # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require + # HTTP/1.1 style headers, and map them appropriately if we end up on + # an HTTP/2 connection. + authority = [v for k, v in request.headers if k.lower() == b"host"][0] + + headers = [ + (b":method", request.method), + (b":authority", authority), + (b":scheme", request.url.scheme), + (b":path", request.url.target), + ] + [ + (k.lower(), v) + for k, v in request.headers + if k.lower() + not in ( + b"host", + b"transfer-encoding", + ) + ] + + self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) + self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id) + await self._write_outgoing_data(request) + + async def _send_request_body(self, request: Request, stream_id: int) -> None: + """ + Iterate over the request body sending it to a given stream ID. + """ + if not has_body_headers(request): + return + + assert isinstance(request.stream, typing.AsyncIterable) + async for data in request.stream: + await self._send_stream_data(request, stream_id, data) + await self._send_end_stream(request, stream_id) + + async def _send_stream_data( + self, request: Request, stream_id: int, data: bytes + ) -> None: + """ + Send a single chunk of data in one or more data frames. + """ + while data: + max_flow = await self._wait_for_outgoing_flow(request, stream_id) + chunk_size = min(len(data), max_flow) + chunk, data = data[:chunk_size], data[chunk_size:] + self._h2_state.send_data(stream_id, chunk) + await self._write_outgoing_data(request) + + async def _send_end_stream(self, request: Request, stream_id: int) -> None: + """ + Send an empty data frame on on a given stream ID with the END_STREAM flag set. + """ + self._h2_state.end_stream(stream_id) + await self._write_outgoing_data(request) + + # Receiving the response... + + async def _receive_response( + self, request: Request, stream_id: int + ) -> tuple[int, list[tuple[bytes, bytes]]]: + """ + Return the response status code and headers for a given stream ID. + """ + while True: + event = await self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.ResponseReceived): + break + + status_code = 200 + headers = [] + assert event.headers is not None + for k, v in event.headers: + if k == b":status": + status_code = int(v.decode("ascii", errors="ignore")) + elif not k.startswith(b":"): + headers.append((k, v)) + + return (status_code, headers) + + async def _receive_response_body( + self, request: Request, stream_id: int + ) -> typing.AsyncIterator[bytes]: + """ + Iterator that returns the bytes of the response body for a given stream ID. + """ + while True: + event = await self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.DataReceived): + assert event.flow_controlled_length is not None + assert event.data is not None + amount = event.flow_controlled_length + self._h2_state.acknowledge_received_data(amount, stream_id) + await self._write_outgoing_data(request) + yield event.data + elif isinstance(event, h2.events.StreamEnded): + break + + async def _receive_stream_event( + self, request: Request, stream_id: int + ) -> h2.events.ResponseReceived | h2.events.DataReceived | h2.events.StreamEnded: + """ + Return the next available event for a given stream ID. + + Will read more data from the network if required. + """ + while not self._events.get(stream_id): + await self._receive_events(request, stream_id) + event = self._events[stream_id].pop(0) + if isinstance(event, h2.events.StreamReset): + raise RemoteProtocolError(event) + return event + + async def _receive_events( + self, request: Request, stream_id: int | None = None + ) -> None: + """ + Read some data from the network until we see one or more events + for a given stream ID. + """ + async with self._read_lock: + if self._connection_terminated is not None: + last_stream_id = self._connection_terminated.last_stream_id + if stream_id and last_stream_id and stream_id > last_stream_id: + self._request_count -= 1 + raise ConnectionNotAvailable() + raise RemoteProtocolError(self._connection_terminated) + + # This conditional is a bit icky. We don't want to block reading if we've + # actually got an event to return for a given stream. We need to do that + # check *within* the atomic read lock. Though it also need to be optional, + # because when we call it from `_wait_for_outgoing_flow` we *do* want to + # block until we've available flow control, event when we have events + # pending for the stream ID we're attempting to send on. + if stream_id is None or not self._events.get(stream_id): + events = await self._read_incoming_data(request) + for event in events: + if isinstance(event, h2.events.RemoteSettingsChanged): + async with Trace( + "receive_remote_settings", logger, request + ) as trace: + await self._receive_remote_settings_change(event) + trace.return_value = event + + elif isinstance( + event, + ( + h2.events.ResponseReceived, + h2.events.DataReceived, + h2.events.StreamEnded, + h2.events.StreamReset, + ), + ): + if event.stream_id in self._events: + self._events[event.stream_id].append(event) + + elif isinstance(event, h2.events.ConnectionTerminated): + self._connection_terminated = event + + await self._write_outgoing_data(request) + + async def _receive_remote_settings_change( + self, event: h2.events.RemoteSettingsChanged + ) -> None: + max_concurrent_streams = event.changed_settings.get( + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS + ) + if max_concurrent_streams: + new_max_streams = min( + max_concurrent_streams.new_value, + self._h2_state.local_settings.max_concurrent_streams, + ) + if new_max_streams and new_max_streams != self._max_streams: + while new_max_streams > self._max_streams: + await self._max_streams_semaphore.release() + self._max_streams += 1 + while new_max_streams < self._max_streams: + await self._max_streams_semaphore.acquire() + self._max_streams -= 1 + + async def _response_closed(self, stream_id: int) -> None: + await self._max_streams_semaphore.release() + del self._events[stream_id] + async with self._state_lock: + if self._connection_terminated and not self._events: + await self.aclose() + + elif self._state == HTTPConnectionState.ACTIVE and not self._events: + self._state = HTTPConnectionState.IDLE + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + if self._used_all_stream_ids: # pragma: nocover + await self.aclose() + + async def aclose(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._h2_state.close_connection() + self._state = HTTPConnectionState.CLOSED + await self._network_stream.aclose() + + # Wrappers around network read/write operations... + + async def _read_incoming_data(self, request: Request) -> list[h2.events.Event]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + if self._read_exception is not None: + raise self._read_exception # pragma: nocover + + try: + data = await self._network_stream.read(self.READ_NUM_BYTES, timeout) + if data == b"": + raise RemoteProtocolError("Server disconnected") + except Exception as exc: + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future reads. + # (For example, this means that a single read timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._read_exception = exc + self._connection_error = True + raise exc + + events: list[h2.events.Event] = self._h2_state.receive_data(data) + + return events + + async def _write_outgoing_data(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + async with self._write_lock: + data_to_send = self._h2_state.data_to_send() + + if self._write_exception is not None: + raise self._write_exception # pragma: nocover + + try: + await self._network_stream.write(data_to_send, timeout) + except Exception as exc: # pragma: nocover + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future write. + # (For example, this means that a single write timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._write_exception = exc + self._connection_error = True + raise exc + + # Flow control... + + async def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int: + """ + Returns the maximum allowable outgoing flow for a given stream. + + If the allowable flow is zero, then waits on the network until + WindowUpdated frames have increased the flow rate. + https://tools.ietf.org/html/rfc7540#section-6.9 + """ + local_flow: int = self._h2_state.local_flow_control_window(stream_id) + max_frame_size: int = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + while flow == 0: + await self._receive_events(request) + local_flow = self._h2_state.local_flow_control_window(stream_id) + max_frame_size = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + return flow + + # Interface for connection pooling... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + return ( + self._state != HTTPConnectionState.CLOSED + and not self._connection_error + and not self._used_all_stream_ids + and not ( + self._h2_state.state_machine.state + == h2.connection.ConnectionState.CLOSED + ) + ) + + def has_expired(self) -> bool: + now = time.monotonic() + return self._expire_at is not None and now > self._expire_at + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/2, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + async def __aenter__(self) -> AsyncHTTP2Connection: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + await self.aclose() + + +class HTTP2ConnectionByteStream: + def __init__( + self, connection: AsyncHTTP2Connection, request: Request, stream_id: int + ) -> None: + self._connection = connection + self._request = request + self._stream_id = stream_id + self._closed = False + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + kwargs = {"request": self._request, "stream_id": self._stream_id} + try: + async with Trace("receive_response_body", logger, self._request, kwargs): + async for chunk in self._connection._receive_response_body( + request=self._request, stream_id=self._stream_id + ): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with AsyncShieldCancellation(): + await self.aclose() + raise exc + + async def aclose(self) -> None: + if not self._closed: + self._closed = True + kwargs = {"stream_id": self._stream_id} + async with Trace("response_closed", logger, self._request, kwargs): + await self._connection._response_closed(stream_id=self._stream_id) diff --git a/venv/lib/python3.11/site-packages/httpcore/_async/http_proxy.py b/venv/lib/python3.11/site-packages/httpcore/_async/http_proxy.py new file mode 100644 index 0000000..cc9d920 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_async/http_proxy.py @@ -0,0 +1,367 @@ +from __future__ import annotations + +import base64 +import logging +import ssl +import typing + +from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend +from .._exceptions import ProxyError +from .._models import ( + URL, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, +) +from .._ssl import default_ssl_context +from .._synchronization import AsyncLock +from .._trace import Trace +from .connection import AsyncHTTPConnection +from .connection_pool import AsyncConnectionPool +from .http11 import AsyncHTTP11Connection +from .interfaces import AsyncConnectionInterface + +ByteOrStr = typing.Union[bytes, str] +HeadersAsSequence = typing.Sequence[typing.Tuple[ByteOrStr, ByteOrStr]] +HeadersAsMapping = typing.Mapping[ByteOrStr, ByteOrStr] + + +logger = logging.getLogger("httpcore.proxy") + + +def merge_headers( + default_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, + override_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, +) -> list[tuple[bytes, bytes]]: + """ + Append default_headers and override_headers, de-duplicating if a key exists + in both cases. + """ + default_headers = [] if default_headers is None else list(default_headers) + override_headers = [] if override_headers is None else list(override_headers) + has_override = set(key.lower() for key, value in override_headers) + default_headers = [ + (key, value) + for key, value in default_headers + if key.lower() not in has_override + ] + return default_headers + override_headers + + +class AsyncHTTPProxy(AsyncConnectionPool): # pragma: nocover + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: URL | bytes | str, + proxy_auth: tuple[bytes | str, bytes | str] | None = None, + proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None, + ssl_context: ssl.SSLContext | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: str | None = None, + uds: str | None = None, + network_backend: AsyncNetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + proxy_auth: Any proxy authentication as a two-tuple of + (username, password). May be either bytes or ascii-only str. + proxy_headers: Any HTTP headers to use for the proxy requests. + For example `{"Proxy-Authorization": "Basic :"}`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + proxy_ssl_context: The same as `ssl_context`, but for a proxy server rather than a remote origin. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + local_address=local_address, + uds=uds, + socket_options=socket_options, + ) + + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if ( + self._proxy_url.scheme == b"http" and proxy_ssl_context is not None + ): # pragma: no cover + raise RuntimeError( + "The `proxy_ssl_context` argument is not allowed for the http scheme" + ) + + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + if proxy_auth is not None: + username = enforce_bytes(proxy_auth[0], name="proxy_auth") + password = enforce_bytes(proxy_auth[1], name="proxy_auth") + userpass = username + b":" + password + authorization = b"Basic " + base64.b64encode(userpass) + self._proxy_headers = [ + (b"Proxy-Authorization", authorization) + ] + self._proxy_headers + + def create_connection(self, origin: Origin) -> AsyncConnectionInterface: + if origin.scheme == b"http": + return AsyncForwardHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + keepalive_expiry=self._keepalive_expiry, + network_backend=self._network_backend, + proxy_ssl_context=self._proxy_ssl_context, + ) + return AsyncTunnelHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + ssl_context=self._ssl_context, + proxy_ssl_context=self._proxy_ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class AsyncForwardHTTPConnection(AsyncConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None, + keepalive_expiry: float | None = None, + network_backend: AsyncNetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + ) -> None: + self._connection = AsyncHTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._remote_origin = remote_origin + + async def handle_async_request(self, request: Request) -> Response: + headers = merge_headers(self._proxy_headers, request.headers) + url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=bytes(request.url), + ) + proxy_request = Request( + method=request.method, + url=url, + headers=headers, + content=request.stream, + extensions=request.extensions, + ) + return await self._connection.handle_async_request(proxy_request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + async def aclose(self) -> None: + await self._connection.aclose() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + +class AsyncTunnelHTTPConnection(AsyncConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + ssl_context: ssl.SSLContext | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + proxy_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + network_backend: AsyncNetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + self._connection: AsyncConnectionInterface = AsyncHTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._connect_lock = AsyncLock() + self._connected = False + + async def handle_async_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("connect", None) + + async with self._connect_lock: + if not self._connected: + target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port) + + connect_url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=target, + ) + connect_headers = merge_headers( + [(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers + ) + connect_request = Request( + method=b"CONNECT", + url=connect_url, + headers=connect_headers, + extensions=request.extensions, + ) + connect_response = await self._connection.handle_async_request( + connect_request + ) + + if connect_response.status < 200 or connect_response.status > 299: + reason_bytes = connect_response.extensions.get("reason_phrase", b"") + reason_str = reason_bytes.decode("ascii", errors="ignore") + msg = "%d %s" % (connect_response.status, reason_str) + await self._connection.aclose() + raise ProxyError(msg) + + stream = connect_response.extensions["network_stream"] + + # Upgrade the stream to SSL + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + async with Trace("start_tls", logger, request, kwargs) as trace: + stream = await stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import AsyncHTTP2Connection + + self._connection = AsyncHTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = AsyncHTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + + self._connected = True + return await self._connection.handle_async_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + async def aclose(self) -> None: + await self._connection.aclose() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/venv/lib/python3.11/site-packages/httpcore/_async/interfaces.py b/venv/lib/python3.11/site-packages/httpcore/_async/interfaces.py new file mode 100644 index 0000000..361583b --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_async/interfaces.py @@ -0,0 +1,137 @@ +from __future__ import annotations + +import contextlib +import typing + +from .._models import ( + URL, + Extensions, + HeaderTypes, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, + include_request_headers, +) + + +class AsyncRequestInterface: + async def request( + self, + method: bytes | str, + url: URL | bytes | str, + *, + headers: HeaderTypes = None, + content: bytes | typing.AsyncIterator[bytes] | None = None, + extensions: Extensions | None = None, + ) -> Response: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = await self.handle_async_request(request) + try: + await response.aread() + finally: + await response.aclose() + return response + + @contextlib.asynccontextmanager + async def stream( + self, + method: bytes | str, + url: URL | bytes | str, + *, + headers: HeaderTypes = None, + content: bytes | typing.AsyncIterator[bytes] | None = None, + extensions: Extensions | None = None, + ) -> typing.AsyncIterator[Response]: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = await self.handle_async_request(request) + try: + yield response + finally: + await response.aclose() + + async def handle_async_request(self, request: Request) -> Response: + raise NotImplementedError() # pragma: nocover + + +class AsyncConnectionInterface(AsyncRequestInterface): + async def aclose(self) -> None: + raise NotImplementedError() # pragma: nocover + + def info(self) -> str: + raise NotImplementedError() # pragma: nocover + + def can_handle_request(self, origin: Origin) -> bool: + raise NotImplementedError() # pragma: nocover + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an + outgoing request. + + An HTTP/1.1 connection will only be available if it is currently idle. + + An HTTP/2 connection will be available so long as the stream ID space is + not yet exhausted, and the connection is not in an error state. + + While the connection is being established we may not yet know if it is going + to result in an HTTP/1.1 or HTTP/2 connection. The connection should be + treated as being available, but might ultimately raise `NewConnectionRequired` + required exceptions if multiple requests are attempted over a connection + that ends up being established as HTTP/1.1. + """ + raise NotImplementedError() # pragma: nocover + + def has_expired(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + + This either means that the connection is idle and it has passed the + expiry time on its keep-alive, or that server has sent an EOF. + """ + raise NotImplementedError() # pragma: nocover + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + raise NotImplementedError() # pragma: nocover + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + + Used when a response is closed to determine if the connection may be + returned to the connection pool or not. + """ + raise NotImplementedError() # pragma: nocover diff --git a/venv/lib/python3.11/site-packages/httpcore/_async/socks_proxy.py b/venv/lib/python3.11/site-packages/httpcore/_async/socks_proxy.py new file mode 100644 index 0000000..b363f55 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_async/socks_proxy.py @@ -0,0 +1,341 @@ +from __future__ import annotations + +import logging +import ssl + +import socksio + +from .._backends.auto import AutoBackend +from .._backends.base import AsyncNetworkBackend, AsyncNetworkStream +from .._exceptions import ConnectionNotAvailable, ProxyError +from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url +from .._ssl import default_ssl_context +from .._synchronization import AsyncLock +from .._trace import Trace +from .connection_pool import AsyncConnectionPool +from .http11 import AsyncHTTP11Connection +from .interfaces import AsyncConnectionInterface + +logger = logging.getLogger("httpcore.socks") + + +AUTH_METHODS = { + b"\x00": "NO AUTHENTICATION REQUIRED", + b"\x01": "GSSAPI", + b"\x02": "USERNAME/PASSWORD", + b"\xff": "NO ACCEPTABLE METHODS", +} + +REPLY_CODES = { + b"\x00": "Succeeded", + b"\x01": "General SOCKS server failure", + b"\x02": "Connection not allowed by ruleset", + b"\x03": "Network unreachable", + b"\x04": "Host unreachable", + b"\x05": "Connection refused", + b"\x06": "TTL expired", + b"\x07": "Command not supported", + b"\x08": "Address type not supported", +} + + +async def _init_socks5_connection( + stream: AsyncNetworkStream, + *, + host: bytes, + port: int, + auth: tuple[bytes, bytes] | None = None, +) -> None: + conn = socksio.socks5.SOCKS5Connection() + + # Auth method request + auth_method = ( + socksio.socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED + if auth is None + else socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD + ) + conn.send(socksio.socks5.SOCKS5AuthMethodsRequest([auth_method])) + outgoing_bytes = conn.data_to_send() + await stream.write(outgoing_bytes) + + # Auth method response + incoming_bytes = await stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socksio.socks5.SOCKS5AuthReply) + if response.method != auth_method: + requested = AUTH_METHODS.get(auth_method, "UNKNOWN") + responded = AUTH_METHODS.get(response.method, "UNKNOWN") + raise ProxyError( + f"Requested {requested} from proxy server, but got {responded}." + ) + + if response.method == socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD: + # Username/password request + assert auth is not None + username, password = auth + conn.send(socksio.socks5.SOCKS5UsernamePasswordRequest(username, password)) + outgoing_bytes = conn.data_to_send() + await stream.write(outgoing_bytes) + + # Username/password response + incoming_bytes = await stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socksio.socks5.SOCKS5UsernamePasswordReply) + if not response.success: + raise ProxyError("Invalid username/password") + + # Connect request + conn.send( + socksio.socks5.SOCKS5CommandRequest.from_address( + socksio.socks5.SOCKS5Command.CONNECT, (host, port) + ) + ) + outgoing_bytes = conn.data_to_send() + await stream.write(outgoing_bytes) + + # Connect response + incoming_bytes = await stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socksio.socks5.SOCKS5Reply) + if response.reply_code != socksio.socks5.SOCKS5ReplyCode.SUCCEEDED: + reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN") + raise ProxyError(f"Proxy Server could not connect: {reply_code}.") + + +class AsyncSOCKSProxy(AsyncConnectionPool): # pragma: nocover + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: URL | bytes | str, + proxy_auth: tuple[bytes | str, bytes | str] | None = None, + ssl_context: ssl.SSLContext | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + network_backend: AsyncNetworkBackend | None = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + ) + self._ssl_context = ssl_context + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if proxy_auth is not None: + username, password = proxy_auth + username_bytes = enforce_bytes(username, name="proxy_auth") + password_bytes = enforce_bytes(password, name="proxy_auth") + self._proxy_auth: tuple[bytes, bytes] | None = ( + username_bytes, + password_bytes, + ) + else: + self._proxy_auth = None + + def create_connection(self, origin: Origin) -> AsyncConnectionInterface: + return AsyncSocks5Connection( + proxy_origin=self._proxy_url.origin, + remote_origin=origin, + proxy_auth=self._proxy_auth, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class AsyncSocks5Connection(AsyncConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_auth: tuple[bytes, bytes] | None = None, + ssl_context: ssl.SSLContext | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + network_backend: AsyncNetworkBackend | None = None, + ) -> None: + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._proxy_auth = proxy_auth + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + + self._network_backend: AsyncNetworkBackend = ( + AutoBackend() if network_backend is None else network_backend + ) + self._connect_lock = AsyncLock() + self._connection: AsyncConnectionInterface | None = None + self._connect_failed = False + + async def handle_async_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + async with self._connect_lock: + if self._connection is None: + try: + # Connect to the proxy + kwargs = { + "host": self._proxy_origin.host.decode("ascii"), + "port": self._proxy_origin.port, + "timeout": timeout, + } + async with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = await self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + + # Connect to the remote host using socks5 + kwargs = { + "stream": stream, + "host": self._remote_origin.host.decode("ascii"), + "port": self._remote_origin.port, + "auth": self._proxy_auth, + } + async with Trace( + "setup_socks5_connection", logger, request, kwargs + ) as trace: + await _init_socks5_connection(**kwargs) + trace.return_value = stream + + # Upgrade the stream to SSL + if self._remote_origin.scheme == b"https": + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ( + ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ) + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + async with Trace("start_tls", logger, request, kwargs) as trace: + stream = await stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or ( + self._http2 and not self._http1 + ): # pragma: nocover + from .http2 import AsyncHTTP2Connection + + self._connection = AsyncHTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = AsyncHTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except Exception as exc: + self._connect_failed = True + raise exc + elif not self._connection.is_available(): # pragma: nocover + raise ConnectionNotAvailable() + + return await self._connection.handle_async_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + async def aclose(self) -> None: + if self._connection is not None: + await self._connection.aclose() + + def is_available(self) -> bool: + if self._connection is None: # pragma: nocover + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._remote_origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: # pragma: nocover + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/venv/lib/python3.11/site-packages/httpcore/_backends/__init__.py b/venv/lib/python3.11/site-packages/httpcore/_backends/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/httpcore/_backends/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/_backends/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..7b2e90d Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/_backends/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/_backends/__pycache__/anyio.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/_backends/__pycache__/anyio.cpython-311.pyc new file mode 100644 index 0000000..a9e971c Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/_backends/__pycache__/anyio.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/_backends/__pycache__/auto.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/_backends/__pycache__/auto.cpython-311.pyc new file mode 100644 index 0000000..e288d5f Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/_backends/__pycache__/auto.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/_backends/__pycache__/base.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/_backends/__pycache__/base.cpython-311.pyc new file mode 100644 index 0000000..5b20f83 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/_backends/__pycache__/base.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/_backends/__pycache__/mock.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/_backends/__pycache__/mock.cpython-311.pyc new file mode 100644 index 0000000..f475653 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/_backends/__pycache__/mock.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/_backends/__pycache__/sync.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/_backends/__pycache__/sync.cpython-311.pyc new file mode 100644 index 0000000..2b3abe1 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/_backends/__pycache__/sync.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/_backends/__pycache__/trio.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/_backends/__pycache__/trio.cpython-311.pyc new file mode 100644 index 0000000..3d28784 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/_backends/__pycache__/trio.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/_backends/anyio.py b/venv/lib/python3.11/site-packages/httpcore/_backends/anyio.py new file mode 100644 index 0000000..a140095 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_backends/anyio.py @@ -0,0 +1,146 @@ +from __future__ import annotations + +import ssl +import typing + +import anyio + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .._utils import is_socket_readable +from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream + + +class AnyIOStream(AsyncNetworkStream): + def __init__(self, stream: anyio.abc.ByteStream) -> None: + self._stream = stream + + async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + exc_map = { + TimeoutError: ReadTimeout, + anyio.BrokenResourceError: ReadError, + anyio.ClosedResourceError: ReadError, + anyio.EndOfStream: ReadError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + try: + return await self._stream.receive(max_bytes=max_bytes) + except anyio.EndOfStream: # pragma: nocover + return b"" + + async def write(self, buffer: bytes, timeout: float | None = None) -> None: + if not buffer: + return + + exc_map = { + TimeoutError: WriteTimeout, + anyio.BrokenResourceError: WriteError, + anyio.ClosedResourceError: WriteError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + await self._stream.send(item=buffer) + + async def aclose(self) -> None: + await self._stream.aclose() + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> AsyncNetworkStream: + exc_map = { + TimeoutError: ConnectTimeout, + anyio.BrokenResourceError: ConnectError, + anyio.EndOfStream: ConnectError, + ssl.SSLError: ConnectError, + } + with map_exceptions(exc_map): + try: + with anyio.fail_after(timeout): + ssl_stream = await anyio.streams.tls.TLSStream.wrap( + self._stream, + ssl_context=ssl_context, + hostname=server_hostname, + standard_compatible=False, + server_side=False, + ) + except Exception as exc: # pragma: nocover + await self.aclose() + raise exc + return AnyIOStream(ssl_stream) + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object": + return self._stream.extra(anyio.streams.tls.TLSAttribute.ssl_object, None) + if info == "client_addr": + return self._stream.extra(anyio.abc.SocketAttribute.local_address, None) + if info == "server_addr": + return self._stream.extra(anyio.abc.SocketAttribute.remote_address, None) + if info == "socket": + return self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) + if info == "is_readable": + sock = self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) + return is_socket_readable(sock) + return None + + +class AnyIOBackend(AsyncNetworkBackend): + async def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: # pragma: nocover + if socket_options is None: + socket_options = [] + exc_map = { + TimeoutError: ConnectTimeout, + OSError: ConnectError, + anyio.BrokenResourceError: ConnectError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + stream: anyio.abc.ByteStream = await anyio.connect_tcp( + remote_host=host, + remote_port=port, + local_host=local_address, + ) + # By default TCP sockets opened in `asyncio` include TCP_NODELAY. + for option in socket_options: + stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return AnyIOStream(stream) + + async def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: # pragma: nocover + if socket_options is None: + socket_options = [] + exc_map = { + TimeoutError: ConnectTimeout, + OSError: ConnectError, + anyio.BrokenResourceError: ConnectError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + stream: anyio.abc.ByteStream = await anyio.connect_unix(path) + for option in socket_options: + stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return AnyIOStream(stream) + + async def sleep(self, seconds: float) -> None: + await anyio.sleep(seconds) # pragma: nocover diff --git a/venv/lib/python3.11/site-packages/httpcore/_backends/auto.py b/venv/lib/python3.11/site-packages/httpcore/_backends/auto.py new file mode 100644 index 0000000..49f0e69 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_backends/auto.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +import typing + +from .._synchronization import current_async_library +from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream + + +class AutoBackend(AsyncNetworkBackend): + async def _init_backend(self) -> None: + if not (hasattr(self, "_backend")): + backend = current_async_library() + if backend == "trio": + from .trio import TrioBackend + + self._backend: AsyncNetworkBackend = TrioBackend() + else: + from .anyio import AnyIOBackend + + self._backend = AnyIOBackend() + + async def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: + await self._init_backend() + return await self._backend.connect_tcp( + host, + port, + timeout=timeout, + local_address=local_address, + socket_options=socket_options, + ) + + async def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: # pragma: nocover + await self._init_backend() + return await self._backend.connect_unix_socket( + path, timeout=timeout, socket_options=socket_options + ) + + async def sleep(self, seconds: float) -> None: # pragma: nocover + await self._init_backend() + return await self._backend.sleep(seconds) diff --git a/venv/lib/python3.11/site-packages/httpcore/_backends/base.py b/venv/lib/python3.11/site-packages/httpcore/_backends/base.py new file mode 100644 index 0000000..cf55c8b --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_backends/base.py @@ -0,0 +1,101 @@ +from __future__ import annotations + +import ssl +import time +import typing + +SOCKET_OPTION = typing.Union[ + typing.Tuple[int, int, int], + typing.Tuple[int, int, typing.Union[bytes, bytearray]], + typing.Tuple[int, int, None, int], +] + + +class NetworkStream: + def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + raise NotImplementedError() # pragma: nocover + + def write(self, buffer: bytes, timeout: float | None = None) -> None: + raise NotImplementedError() # pragma: nocover + + def close(self) -> None: + raise NotImplementedError() # pragma: nocover + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> NetworkStream: + raise NotImplementedError() # pragma: nocover + + def get_extra_info(self, info: str) -> typing.Any: + return None # pragma: nocover + + +class NetworkBackend: + def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> NetworkStream: + raise NotImplementedError() # pragma: nocover + + def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> NetworkStream: + raise NotImplementedError() # pragma: nocover + + def sleep(self, seconds: float) -> None: + time.sleep(seconds) # pragma: nocover + + +class AsyncNetworkStream: + async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + raise NotImplementedError() # pragma: nocover + + async def write(self, buffer: bytes, timeout: float | None = None) -> None: + raise NotImplementedError() # pragma: nocover + + async def aclose(self) -> None: + raise NotImplementedError() # pragma: nocover + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> AsyncNetworkStream: + raise NotImplementedError() # pragma: nocover + + def get_extra_info(self, info: str) -> typing.Any: + return None # pragma: nocover + + +class AsyncNetworkBackend: + async def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: + raise NotImplementedError() # pragma: nocover + + async def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: + raise NotImplementedError() # pragma: nocover + + async def sleep(self, seconds: float) -> None: + raise NotImplementedError() # pragma: nocover diff --git a/venv/lib/python3.11/site-packages/httpcore/_backends/mock.py b/venv/lib/python3.11/site-packages/httpcore/_backends/mock.py new file mode 100644 index 0000000..9b6edca --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_backends/mock.py @@ -0,0 +1,143 @@ +from __future__ import annotations + +import ssl +import typing + +from .._exceptions import ReadError +from .base import ( + SOCKET_OPTION, + AsyncNetworkBackend, + AsyncNetworkStream, + NetworkBackend, + NetworkStream, +) + + +class MockSSLObject: + def __init__(self, http2: bool): + self._http2 = http2 + + def selected_alpn_protocol(self) -> str: + return "h2" if self._http2 else "http/1.1" + + +class MockStream(NetworkStream): + def __init__(self, buffer: list[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + self._closed = False + + def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + if self._closed: + raise ReadError("Connection closed") + if not self._buffer: + return b"" + return self._buffer.pop(0) + + def write(self, buffer: bytes, timeout: float | None = None) -> None: + pass + + def close(self) -> None: + self._closed = True + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> NetworkStream: + return self + + def get_extra_info(self, info: str) -> typing.Any: + return MockSSLObject(http2=self._http2) if info == "ssl_object" else None + + def __repr__(self) -> str: + return "" + + +class MockBackend(NetworkBackend): + def __init__(self, buffer: list[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + + def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> NetworkStream: + return MockStream(list(self._buffer), http2=self._http2) + + def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> NetworkStream: + return MockStream(list(self._buffer), http2=self._http2) + + def sleep(self, seconds: float) -> None: + pass + + +class AsyncMockStream(AsyncNetworkStream): + def __init__(self, buffer: list[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + self._closed = False + + async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + if self._closed: + raise ReadError("Connection closed") + if not self._buffer: + return b"" + return self._buffer.pop(0) + + async def write(self, buffer: bytes, timeout: float | None = None) -> None: + pass + + async def aclose(self) -> None: + self._closed = True + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> AsyncNetworkStream: + return self + + def get_extra_info(self, info: str) -> typing.Any: + return MockSSLObject(http2=self._http2) if info == "ssl_object" else None + + def __repr__(self) -> str: + return "" + + +class AsyncMockBackend(AsyncNetworkBackend): + def __init__(self, buffer: list[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + + async def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: + return AsyncMockStream(list(self._buffer), http2=self._http2) + + async def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: + return AsyncMockStream(list(self._buffer), http2=self._http2) + + async def sleep(self, seconds: float) -> None: + pass diff --git a/venv/lib/python3.11/site-packages/httpcore/_backends/sync.py b/venv/lib/python3.11/site-packages/httpcore/_backends/sync.py new file mode 100644 index 0000000..4018a09 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_backends/sync.py @@ -0,0 +1,241 @@ +from __future__ import annotations + +import functools +import socket +import ssl +import sys +import typing + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ExceptionMapping, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .._utils import is_socket_readable +from .base import SOCKET_OPTION, NetworkBackend, NetworkStream + + +class TLSinTLSStream(NetworkStream): # pragma: no cover + """ + Because the standard `SSLContext.wrap_socket` method does + not work for `SSLSocket` objects, we need this class + to implement TLS stream using an underlying `SSLObject` + instance in order to support TLS on top of TLS. + """ + + # Defined in RFC 8449 + TLS_RECORD_SIZE = 16384 + + def __init__( + self, + sock: socket.socket, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ): + self._sock = sock + self._incoming = ssl.MemoryBIO() + self._outgoing = ssl.MemoryBIO() + + self.ssl_obj = ssl_context.wrap_bio( + incoming=self._incoming, + outgoing=self._outgoing, + server_hostname=server_hostname, + ) + + self._sock.settimeout(timeout) + self._perform_io(self.ssl_obj.do_handshake) + + def _perform_io( + self, + func: typing.Callable[..., typing.Any], + ) -> typing.Any: + ret = None + + while True: + errno = None + try: + ret = func() + except (ssl.SSLWantReadError, ssl.SSLWantWriteError) as e: + errno = e.errno + + self._sock.sendall(self._outgoing.read()) + + if errno == ssl.SSL_ERROR_WANT_READ: + buf = self._sock.recv(self.TLS_RECORD_SIZE) + + if buf: + self._incoming.write(buf) + else: + self._incoming.write_eof() + if errno is None: + return ret + + def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError} + with map_exceptions(exc_map): + self._sock.settimeout(timeout) + return typing.cast( + bytes, self._perform_io(functools.partial(self.ssl_obj.read, max_bytes)) + ) + + def write(self, buffer: bytes, timeout: float | None = None) -> None: + exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError} + with map_exceptions(exc_map): + self._sock.settimeout(timeout) + while buffer: + nsent = self._perform_io(functools.partial(self.ssl_obj.write, buffer)) + buffer = buffer[nsent:] + + def close(self) -> None: + self._sock.close() + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> NetworkStream: + raise NotImplementedError() + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object": + return self.ssl_obj + if info == "client_addr": + return self._sock.getsockname() + if info == "server_addr": + return self._sock.getpeername() + if info == "socket": + return self._sock + if info == "is_readable": + return is_socket_readable(self._sock) + return None + + +class SyncStream(NetworkStream): + def __init__(self, sock: socket.socket) -> None: + self._sock = sock + + def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError} + with map_exceptions(exc_map): + self._sock.settimeout(timeout) + return self._sock.recv(max_bytes) + + def write(self, buffer: bytes, timeout: float | None = None) -> None: + if not buffer: + return + + exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError} + with map_exceptions(exc_map): + while buffer: + self._sock.settimeout(timeout) + n = self._sock.send(buffer) + buffer = buffer[n:] + + def close(self) -> None: + self._sock.close() + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> NetworkStream: + exc_map: ExceptionMapping = { + socket.timeout: ConnectTimeout, + OSError: ConnectError, + } + with map_exceptions(exc_map): + try: + if isinstance(self._sock, ssl.SSLSocket): # pragma: no cover + # If the underlying socket has already been upgraded + # to the TLS layer (i.e. is an instance of SSLSocket), + # we need some additional smarts to support TLS-in-TLS. + return TLSinTLSStream( + self._sock, ssl_context, server_hostname, timeout + ) + else: + self._sock.settimeout(timeout) + sock = ssl_context.wrap_socket( + self._sock, server_hostname=server_hostname + ) + except Exception as exc: # pragma: nocover + self.close() + raise exc + return SyncStream(sock) + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object" and isinstance(self._sock, ssl.SSLSocket): + return self._sock._sslobj # type: ignore + if info == "client_addr": + return self._sock.getsockname() + if info == "server_addr": + return self._sock.getpeername() + if info == "socket": + return self._sock + if info == "is_readable": + return is_socket_readable(self._sock) + return None + + +class SyncBackend(NetworkBackend): + def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> NetworkStream: + # Note that we automatically include `TCP_NODELAY` + # in addition to any other custom socket options. + if socket_options is None: + socket_options = [] # pragma: no cover + address = (host, port) + source_address = None if local_address is None else (local_address, 0) + exc_map: ExceptionMapping = { + socket.timeout: ConnectTimeout, + OSError: ConnectError, + } + + with map_exceptions(exc_map): + sock = socket.create_connection( + address, + timeout, + source_address=source_address, + ) + for option in socket_options: + sock.setsockopt(*option) # pragma: no cover + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + return SyncStream(sock) + + def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> NetworkStream: # pragma: nocover + if sys.platform == "win32": + raise RuntimeError( + "Attempted to connect to a UNIX socket on a Windows system." + ) + if socket_options is None: + socket_options = [] + + exc_map: ExceptionMapping = { + socket.timeout: ConnectTimeout, + OSError: ConnectError, + } + with map_exceptions(exc_map): + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + for option in socket_options: + sock.setsockopt(*option) + sock.settimeout(timeout) + sock.connect(path) + return SyncStream(sock) diff --git a/venv/lib/python3.11/site-packages/httpcore/_backends/trio.py b/venv/lib/python3.11/site-packages/httpcore/_backends/trio.py new file mode 100644 index 0000000..6f53f5f --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_backends/trio.py @@ -0,0 +1,159 @@ +from __future__ import annotations + +import ssl +import typing + +import trio + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ExceptionMapping, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream + + +class TrioStream(AsyncNetworkStream): + def __init__(self, stream: trio.abc.Stream) -> None: + self._stream = stream + + async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ReadTimeout, + trio.BrokenResourceError: ReadError, + trio.ClosedResourceError: ReadError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + data: bytes = await self._stream.receive_some(max_bytes=max_bytes) + return data + + async def write(self, buffer: bytes, timeout: float | None = None) -> None: + if not buffer: + return + + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: WriteTimeout, + trio.BrokenResourceError: WriteError, + trio.ClosedResourceError: WriteError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + await self._stream.send_all(data=buffer) + + async def aclose(self) -> None: + await self._stream.aclose() + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> AsyncNetworkStream: + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + } + ssl_stream = trio.SSLStream( + self._stream, + ssl_context=ssl_context, + server_hostname=server_hostname, + https_compatible=True, + server_side=False, + ) + with map_exceptions(exc_map): + try: + with trio.fail_after(timeout_or_inf): + await ssl_stream.do_handshake() + except Exception as exc: # pragma: nocover + await self.aclose() + raise exc + return TrioStream(ssl_stream) + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object" and isinstance(self._stream, trio.SSLStream): + # Type checkers cannot see `_ssl_object` attribute because trio._ssl.SSLStream uses __getattr__/__setattr__. + # Tracked at https://github.com/python-trio/trio/issues/542 + return self._stream._ssl_object # type: ignore[attr-defined] + if info == "client_addr": + return self._get_socket_stream().socket.getsockname() + if info == "server_addr": + return self._get_socket_stream().socket.getpeername() + if info == "socket": + stream = self._stream + while isinstance(stream, trio.SSLStream): + stream = stream.transport_stream + assert isinstance(stream, trio.SocketStream) + return stream.socket + if info == "is_readable": + socket = self.get_extra_info("socket") + return socket.is_readable() + return None + + def _get_socket_stream(self) -> trio.SocketStream: + stream = self._stream + while isinstance(stream, trio.SSLStream): + stream = stream.transport_stream + assert isinstance(stream, trio.SocketStream) + return stream + + +class TrioBackend(AsyncNetworkBackend): + async def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: + # By default for TCP sockets, trio enables TCP_NODELAY. + # https://trio.readthedocs.io/en/stable/reference-io.html#trio.SocketStream + if socket_options is None: + socket_options = [] # pragma: no cover + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + OSError: ConnectError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + stream: trio.abc.Stream = await trio.open_tcp_stream( + host=host, port=port, local_address=local_address + ) + for option in socket_options: + stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return TrioStream(stream) + + async def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: # pragma: nocover + if socket_options is None: + socket_options = [] + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + OSError: ConnectError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + stream: trio.abc.Stream = await trio.open_unix_socket(path) + for option in socket_options: + stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return TrioStream(stream) + + async def sleep(self, seconds: float) -> None: + await trio.sleep(seconds) # pragma: nocover diff --git a/venv/lib/python3.11/site-packages/httpcore/_exceptions.py b/venv/lib/python3.11/site-packages/httpcore/_exceptions.py new file mode 100644 index 0000000..bc28d44 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_exceptions.py @@ -0,0 +1,81 @@ +import contextlib +import typing + +ExceptionMapping = typing.Mapping[typing.Type[Exception], typing.Type[Exception]] + + +@contextlib.contextmanager +def map_exceptions(map: ExceptionMapping) -> typing.Iterator[None]: + try: + yield + except Exception as exc: # noqa: PIE786 + for from_exc, to_exc in map.items(): + if isinstance(exc, from_exc): + raise to_exc(exc) from exc + raise # pragma: nocover + + +class ConnectionNotAvailable(Exception): + pass + + +class ProxyError(Exception): + pass + + +class UnsupportedProtocol(Exception): + pass + + +class ProtocolError(Exception): + pass + + +class RemoteProtocolError(ProtocolError): + pass + + +class LocalProtocolError(ProtocolError): + pass + + +# Timeout errors + + +class TimeoutException(Exception): + pass + + +class PoolTimeout(TimeoutException): + pass + + +class ConnectTimeout(TimeoutException): + pass + + +class ReadTimeout(TimeoutException): + pass + + +class WriteTimeout(TimeoutException): + pass + + +# Network errors + + +class NetworkError(Exception): + pass + + +class ConnectError(NetworkError): + pass + + +class ReadError(NetworkError): + pass + + +class WriteError(NetworkError): + pass diff --git a/venv/lib/python3.11/site-packages/httpcore/_models.py b/venv/lib/python3.11/site-packages/httpcore/_models.py new file mode 100644 index 0000000..8a65f13 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_models.py @@ -0,0 +1,516 @@ +from __future__ import annotations + +import base64 +import ssl +import typing +import urllib.parse + +# Functions for typechecking... + + +ByteOrStr = typing.Union[bytes, str] +HeadersAsSequence = typing.Sequence[typing.Tuple[ByteOrStr, ByteOrStr]] +HeadersAsMapping = typing.Mapping[ByteOrStr, ByteOrStr] +HeaderTypes = typing.Union[HeadersAsSequence, HeadersAsMapping, None] + +Extensions = typing.MutableMapping[str, typing.Any] + + +def enforce_bytes(value: bytes | str, *, name: str) -> bytes: + """ + Any arguments that are ultimately represented as bytes can be specified + either as bytes or as strings. + + However we enforce that any string arguments must only contain characters in + the plain ASCII range. chr(0)...chr(127). If you need to use characters + outside that range then be precise, and use a byte-wise argument. + """ + if isinstance(value, str): + try: + return value.encode("ascii") + except UnicodeEncodeError: + raise TypeError(f"{name} strings may not include unicode characters.") + elif isinstance(value, bytes): + return value + + seen_type = type(value).__name__ + raise TypeError(f"{name} must be bytes or str, but got {seen_type}.") + + +def enforce_url(value: URL | bytes | str, *, name: str) -> URL: + """ + Type check for URL parameters. + """ + if isinstance(value, (bytes, str)): + return URL(value) + elif isinstance(value, URL): + return value + + seen_type = type(value).__name__ + raise TypeError(f"{name} must be a URL, bytes, or str, but got {seen_type}.") + + +def enforce_headers( + value: HeadersAsMapping | HeadersAsSequence | None = None, *, name: str +) -> list[tuple[bytes, bytes]]: + """ + Convienence function that ensure all items in request or response headers + are either bytes or strings in the plain ASCII range. + """ + if value is None: + return [] + elif isinstance(value, typing.Mapping): + return [ + ( + enforce_bytes(k, name="header name"), + enforce_bytes(v, name="header value"), + ) + for k, v in value.items() + ] + elif isinstance(value, typing.Sequence): + return [ + ( + enforce_bytes(k, name="header name"), + enforce_bytes(v, name="header value"), + ) + for k, v in value + ] + + seen_type = type(value).__name__ + raise TypeError( + f"{name} must be a mapping or sequence of two-tuples, but got {seen_type}." + ) + + +def enforce_stream( + value: bytes | typing.Iterable[bytes] | typing.AsyncIterable[bytes] | None, + *, + name: str, +) -> typing.Iterable[bytes] | typing.AsyncIterable[bytes]: + if value is None: + return ByteStream(b"") + elif isinstance(value, bytes): + return ByteStream(value) + return value + + +# * https://tools.ietf.org/html/rfc3986#section-3.2.3 +# * https://url.spec.whatwg.org/#url-miscellaneous +# * https://url.spec.whatwg.org/#scheme-state +DEFAULT_PORTS = { + b"ftp": 21, + b"http": 80, + b"https": 443, + b"ws": 80, + b"wss": 443, +} + + +def include_request_headers( + headers: list[tuple[bytes, bytes]], + *, + url: "URL", + content: None | bytes | typing.Iterable[bytes] | typing.AsyncIterable[bytes], +) -> list[tuple[bytes, bytes]]: + headers_set = set(k.lower() for k, v in headers) + + if b"host" not in headers_set: + default_port = DEFAULT_PORTS.get(url.scheme) + if url.port is None or url.port == default_port: + header_value = url.host + else: + header_value = b"%b:%d" % (url.host, url.port) + headers = [(b"Host", header_value)] + headers + + if ( + content is not None + and b"content-length" not in headers_set + and b"transfer-encoding" not in headers_set + ): + if isinstance(content, bytes): + content_length = str(len(content)).encode("ascii") + headers += [(b"Content-Length", content_length)] + else: + headers += [(b"Transfer-Encoding", b"chunked")] # pragma: nocover + + return headers + + +# Interfaces for byte streams... + + +class ByteStream: + """ + A container for non-streaming content, and that supports both sync and async + stream iteration. + """ + + def __init__(self, content: bytes) -> None: + self._content = content + + def __iter__(self) -> typing.Iterator[bytes]: + yield self._content + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + yield self._content + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{len(self._content)} bytes]>" + + +class Origin: + def __init__(self, scheme: bytes, host: bytes, port: int) -> None: + self.scheme = scheme + self.host = host + self.port = port + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, Origin) + and self.scheme == other.scheme + and self.host == other.host + and self.port == other.port + ) + + def __str__(self) -> str: + scheme = self.scheme.decode("ascii") + host = self.host.decode("ascii") + port = str(self.port) + return f"{scheme}://{host}:{port}" + + +class URL: + """ + Represents the URL against which an HTTP request may be made. + + The URL may either be specified as a plain string, for convienence: + + ```python + url = httpcore.URL("https://www.example.com/") + ``` + + Or be constructed with explicitily pre-parsed components: + + ```python + url = httpcore.URL(scheme=b'https', host=b'www.example.com', port=None, target=b'/') + ``` + + Using this second more explicit style allows integrations that are using + `httpcore` to pass through URLs that have already been parsed in order to use + libraries such as `rfc-3986` rather than relying on the stdlib. It also ensures + that URL parsing is treated identically at both the networking level and at any + higher layers of abstraction. + + The four components are important here, as they allow the URL to be precisely + specified in a pre-parsed format. They also allow certain types of request to + be created that could not otherwise be expressed. + + For example, an HTTP request to `http://www.example.com/` forwarded via a proxy + at `http://localhost:8080`... + + ```python + # Constructs an HTTP request with a complete URL as the target: + # GET https://www.example.com/ HTTP/1.1 + url = httpcore.URL( + scheme=b'http', + host=b'localhost', + port=8080, + target=b'https://www.example.com/' + ) + request = httpcore.Request( + method="GET", + url=url + ) + ``` + + Another example is constructing an `OPTIONS *` request... + + ```python + # Constructs an 'OPTIONS *' HTTP request: + # OPTIONS * HTTP/1.1 + url = httpcore.URL(scheme=b'https', host=b'www.example.com', target=b'*') + request = httpcore.Request(method="OPTIONS", url=url) + ``` + + This kind of request is not possible to formulate with a URL string, + because the `/` delimiter is always used to demark the target from the + host/port portion of the URL. + + For convenience, string-like arguments may be specified either as strings or + as bytes. However, once a request is being issue over-the-wire, the URL + components are always ultimately required to be a bytewise representation. + + In order to avoid any ambiguity over character encodings, when strings are used + as arguments, they must be strictly limited to the ASCII range `chr(0)`-`chr(127)`. + If you require a bytewise representation that is outside this range you must + handle the character encoding directly, and pass a bytes instance. + """ + + def __init__( + self, + url: bytes | str = "", + *, + scheme: bytes | str = b"", + host: bytes | str = b"", + port: int | None = None, + target: bytes | str = b"", + ) -> None: + """ + Parameters: + url: The complete URL as a string or bytes. + scheme: The URL scheme as a string or bytes. + Typically either `"http"` or `"https"`. + host: The URL host as a string or bytes. Such as `"www.example.com"`. + port: The port to connect to. Either an integer or `None`. + target: The target of the HTTP request. Such as `"/items?search=red"`. + """ + if url: + parsed = urllib.parse.urlparse(enforce_bytes(url, name="url")) + self.scheme = parsed.scheme + self.host = parsed.hostname or b"" + self.port = parsed.port + self.target = (parsed.path or b"/") + ( + b"?" + parsed.query if parsed.query else b"" + ) + else: + self.scheme = enforce_bytes(scheme, name="scheme") + self.host = enforce_bytes(host, name="host") + self.port = port + self.target = enforce_bytes(target, name="target") + + @property + def origin(self) -> Origin: + default_port = { + b"http": 80, + b"https": 443, + b"ws": 80, + b"wss": 443, + b"socks5": 1080, + b"socks5h": 1080, + }[self.scheme] + return Origin( + scheme=self.scheme, host=self.host, port=self.port or default_port + ) + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, URL) + and other.scheme == self.scheme + and other.host == self.host + and other.port == self.port + and other.target == self.target + ) + + def __bytes__(self) -> bytes: + if self.port is None: + return b"%b://%b%b" % (self.scheme, self.host, self.target) + return b"%b://%b:%d%b" % (self.scheme, self.host, self.port, self.target) + + def __repr__(self) -> str: + return ( + f"{self.__class__.__name__}(scheme={self.scheme!r}, " + f"host={self.host!r}, port={self.port!r}, target={self.target!r})" + ) + + +class Request: + """ + An HTTP request. + """ + + def __init__( + self, + method: bytes | str, + url: URL | bytes | str, + *, + headers: HeaderTypes = None, + content: bytes + | typing.Iterable[bytes] + | typing.AsyncIterable[bytes] + | None = None, + extensions: Extensions | None = None, + ) -> None: + """ + Parameters: + method: The HTTP request method, either as a string or bytes. + For example: `GET`. + url: The request URL, either as a `URL` instance, or as a string or bytes. + For example: `"https://www.example.com".` + headers: The HTTP request headers. + content: The content of the request body. + extensions: A dictionary of optional extra information included on + the request. Possible keys include `"timeout"`, and `"trace"`. + """ + self.method: bytes = enforce_bytes(method, name="method") + self.url: URL = enforce_url(url, name="url") + self.headers: list[tuple[bytes, bytes]] = enforce_headers( + headers, name="headers" + ) + self.stream: typing.Iterable[bytes] | typing.AsyncIterable[bytes] = ( + enforce_stream(content, name="content") + ) + self.extensions = {} if extensions is None else extensions + + if "target" in self.extensions: + self.url = URL( + scheme=self.url.scheme, + host=self.url.host, + port=self.url.port, + target=self.extensions["target"], + ) + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.method!r}]>" + + +class Response: + """ + An HTTP response. + """ + + def __init__( + self, + status: int, + *, + headers: HeaderTypes = None, + content: bytes + | typing.Iterable[bytes] + | typing.AsyncIterable[bytes] + | None = None, + extensions: Extensions | None = None, + ) -> None: + """ + Parameters: + status: The HTTP status code of the response. For example `200`. + headers: The HTTP response headers. + content: The content of the response body. + extensions: A dictionary of optional extra information included on + the responseself.Possible keys include `"http_version"`, + `"reason_phrase"`, and `"network_stream"`. + """ + self.status: int = status + self.headers: list[tuple[bytes, bytes]] = enforce_headers( + headers, name="headers" + ) + self.stream: typing.Iterable[bytes] | typing.AsyncIterable[bytes] = ( + enforce_stream(content, name="content") + ) + self.extensions = {} if extensions is None else extensions + + self._stream_consumed = False + + @property + def content(self) -> bytes: + if not hasattr(self, "_content"): + if isinstance(self.stream, typing.Iterable): + raise RuntimeError( + "Attempted to access 'response.content' on a streaming response. " + "Call 'response.read()' first." + ) + else: + raise RuntimeError( + "Attempted to access 'response.content' on a streaming response. " + "Call 'await response.aread()' first." + ) + return self._content + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.status}]>" + + # Sync interface... + + def read(self) -> bytes: + if not isinstance(self.stream, typing.Iterable): # pragma: nocover + raise RuntimeError( + "Attempted to read an asynchronous response using 'response.read()'. " + "You should use 'await response.aread()' instead." + ) + if not hasattr(self, "_content"): + self._content = b"".join([part for part in self.iter_stream()]) + return self._content + + def iter_stream(self) -> typing.Iterator[bytes]: + if not isinstance(self.stream, typing.Iterable): # pragma: nocover + raise RuntimeError( + "Attempted to stream an asynchronous response using 'for ... in " + "response.iter_stream()'. " + "You should use 'async for ... in response.aiter_stream()' instead." + ) + if self._stream_consumed: + raise RuntimeError( + "Attempted to call 'for ... in response.iter_stream()' more than once." + ) + self._stream_consumed = True + for chunk in self.stream: + yield chunk + + def close(self) -> None: + if not isinstance(self.stream, typing.Iterable): # pragma: nocover + raise RuntimeError( + "Attempted to close an asynchronous response using 'response.close()'. " + "You should use 'await response.aclose()' instead." + ) + if hasattr(self.stream, "close"): + self.stream.close() + + # Async interface... + + async def aread(self) -> bytes: + if not isinstance(self.stream, typing.AsyncIterable): # pragma: nocover + raise RuntimeError( + "Attempted to read an synchronous response using " + "'await response.aread()'. " + "You should use 'response.read()' instead." + ) + if not hasattr(self, "_content"): + self._content = b"".join([part async for part in self.aiter_stream()]) + return self._content + + async def aiter_stream(self) -> typing.AsyncIterator[bytes]: + if not isinstance(self.stream, typing.AsyncIterable): # pragma: nocover + raise RuntimeError( + "Attempted to stream an synchronous response using 'async for ... in " + "response.aiter_stream()'. " + "You should use 'for ... in response.iter_stream()' instead." + ) + if self._stream_consumed: + raise RuntimeError( + "Attempted to call 'async for ... in response.aiter_stream()' " + "more than once." + ) + self._stream_consumed = True + async for chunk in self.stream: + yield chunk + + async def aclose(self) -> None: + if not isinstance(self.stream, typing.AsyncIterable): # pragma: nocover + raise RuntimeError( + "Attempted to close a synchronous response using " + "'await response.aclose()'. " + "You should use 'response.close()' instead." + ) + if hasattr(self.stream, "aclose"): + await self.stream.aclose() + + +class Proxy: + def __init__( + self, + url: URL | bytes | str, + auth: tuple[bytes | str, bytes | str] | None = None, + headers: HeadersAsMapping | HeadersAsSequence | None = None, + ssl_context: ssl.SSLContext | None = None, + ): + self.url = enforce_url(url, name="url") + self.headers = enforce_headers(headers, name="headers") + self.ssl_context = ssl_context + + if auth is not None: + username = enforce_bytes(auth[0], name="auth") + password = enforce_bytes(auth[1], name="auth") + userpass = username + b":" + password + authorization = b"Basic " + base64.b64encode(userpass) + self.auth: tuple[bytes, bytes] | None = (username, password) + self.headers = [(b"Proxy-Authorization", authorization)] + self.headers + else: + self.auth = None diff --git a/venv/lib/python3.11/site-packages/httpcore/_ssl.py b/venv/lib/python3.11/site-packages/httpcore/_ssl.py new file mode 100644 index 0000000..c99c5a6 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_ssl.py @@ -0,0 +1,9 @@ +import ssl + +import certifi + + +def default_ssl_context() -> ssl.SSLContext: + context = ssl.create_default_context() + context.load_verify_locations(certifi.where()) + return context diff --git a/venv/lib/python3.11/site-packages/httpcore/_sync/__init__.py b/venv/lib/python3.11/site-packages/httpcore/_sync/__init__.py new file mode 100644 index 0000000..b476d76 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_sync/__init__.py @@ -0,0 +1,39 @@ +from .connection import HTTPConnection +from .connection_pool import ConnectionPool +from .http11 import HTTP11Connection +from .http_proxy import HTTPProxy +from .interfaces import ConnectionInterface + +try: + from .http2 import HTTP2Connection +except ImportError: # pragma: nocover + + class HTTP2Connection: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use http2 support, but the `h2` package is not " + "installed. Use 'pip install httpcore[http2]'." + ) + + +try: + from .socks_proxy import SOCKSProxy +except ImportError: # pragma: nocover + + class SOCKSProxy: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use SOCKS support, but the `socksio` package is not " + "installed. Use 'pip install httpcore[socks]'." + ) + + +__all__ = [ + "HTTPConnection", + "ConnectionPool", + "HTTPProxy", + "HTTP11Connection", + "HTTP2Connection", + "ConnectionInterface", + "SOCKSProxy", +] diff --git a/venv/lib/python3.11/site-packages/httpcore/_sync/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/_sync/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..0575741 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/_sync/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/_sync/__pycache__/connection.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/_sync/__pycache__/connection.cpython-311.pyc new file mode 100644 index 0000000..f5537f0 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/_sync/__pycache__/connection.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/_sync/__pycache__/connection_pool.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/_sync/__pycache__/connection_pool.cpython-311.pyc new file mode 100644 index 0000000..884d513 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/_sync/__pycache__/connection_pool.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/_sync/__pycache__/http11.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/_sync/__pycache__/http11.cpython-311.pyc new file mode 100644 index 0000000..4256130 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/_sync/__pycache__/http11.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/_sync/__pycache__/http2.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/_sync/__pycache__/http2.cpython-311.pyc new file mode 100644 index 0000000..7d4e749 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/_sync/__pycache__/http2.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/_sync/__pycache__/http_proxy.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/_sync/__pycache__/http_proxy.cpython-311.pyc new file mode 100644 index 0000000..0978464 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/_sync/__pycache__/http_proxy.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/_sync/__pycache__/interfaces.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/_sync/__pycache__/interfaces.cpython-311.pyc new file mode 100644 index 0000000..64a3ee7 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/_sync/__pycache__/interfaces.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/_sync/__pycache__/socks_proxy.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpcore/_sync/__pycache__/socks_proxy.cpython-311.pyc new file mode 100644 index 0000000..5d70033 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpcore/_sync/__pycache__/socks_proxy.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpcore/_sync/connection.py b/venv/lib/python3.11/site-packages/httpcore/_sync/connection.py new file mode 100644 index 0000000..363f8be --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_sync/connection.py @@ -0,0 +1,222 @@ +from __future__ import annotations + +import itertools +import logging +import ssl +import types +import typing + +from .._backends.sync import SyncBackend +from .._backends.base import SOCKET_OPTION, NetworkBackend, NetworkStream +from .._exceptions import ConnectError, ConnectTimeout +from .._models import Origin, Request, Response +from .._ssl import default_ssl_context +from .._synchronization import Lock +from .._trace import Trace +from .http11 import HTTP11Connection +from .interfaces import ConnectionInterface + +RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. + + +logger = logging.getLogger("httpcore.connection") + + +def exponential_backoff(factor: float) -> typing.Iterator[float]: + """ + Generate a geometric sequence that has a ratio of 2 and starts with 0. + + For example: + - `factor = 2`: `0, 2, 4, 8, 16, 32, 64, ...` + - `factor = 3`: `0, 3, 6, 12, 24, 48, 96, ...` + """ + yield 0 + for n in itertools.count(): + yield factor * 2**n + + +class HTTPConnection(ConnectionInterface): + def __init__( + self, + origin: Origin, + ssl_context: ssl.SSLContext | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: str | None = None, + uds: str | None = None, + network_backend: NetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + self._origin = origin + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend: NetworkBackend = ( + SyncBackend() if network_backend is None else network_backend + ) + self._connection: ConnectionInterface | None = None + self._connect_failed: bool = False + self._request_lock = Lock() + self._socket_options = socket_options + + def handle_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection to {self._origin}" + ) + + try: + with self._request_lock: + if self._connection is None: + stream = self._connect(request) + + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import HTTP2Connection + + self._connection = HTTP2Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = HTTP11Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except BaseException as exc: + self._connect_failed = True + raise exc + + return self._connection.handle_request(request) + + def _connect(self, request: Request) -> NetworkStream: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + retries_left = self._retries + delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) + + while True: + try: + if self._uds is None: + kwargs = { + "host": self._origin.host.decode("ascii"), + "port": self._origin.port, + "local_address": self._local_address, + "timeout": timeout, + "socket_options": self._socket_options, + } + with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + else: + kwargs = { + "path": self._uds, + "timeout": timeout, + "socket_options": self._socket_options, + } + with Trace( + "connect_unix_socket", logger, request, kwargs + ) as trace: + stream = self._network_backend.connect_unix_socket( + **kwargs + ) + trace.return_value = stream + + if self._origin.scheme in (b"https", b"wss"): + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._origin.host.decode("ascii"), + "timeout": timeout, + } + with Trace("start_tls", logger, request, kwargs) as trace: + stream = stream.start_tls(**kwargs) + trace.return_value = stream + return stream + except (ConnectError, ConnectTimeout): + if retries_left <= 0: + raise + retries_left -= 1 + delay = next(delays) + with Trace("retry", logger, request, kwargs) as trace: + self._network_backend.sleep(delay) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def close(self) -> None: + if self._connection is not None: + with Trace("close", logger, None, {}): + self._connection.close() + + def is_available(self) -> bool: + if self._connection is None: + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + def __enter__(self) -> HTTPConnection: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + self.close() diff --git a/venv/lib/python3.11/site-packages/httpcore/_sync/connection_pool.py b/venv/lib/python3.11/site-packages/httpcore/_sync/connection_pool.py new file mode 100644 index 0000000..9ccfa53 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_sync/connection_pool.py @@ -0,0 +1,420 @@ +from __future__ import annotations + +import ssl +import sys +import types +import typing + +from .._backends.sync import SyncBackend +from .._backends.base import SOCKET_OPTION, NetworkBackend +from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol +from .._models import Origin, Proxy, Request, Response +from .._synchronization import Event, ShieldCancellation, ThreadLock +from .connection import HTTPConnection +from .interfaces import ConnectionInterface, RequestInterface + + +class PoolRequest: + def __init__(self, request: Request) -> None: + self.request = request + self.connection: ConnectionInterface | None = None + self._connection_acquired = Event() + + def assign_to_connection(self, connection: ConnectionInterface | None) -> None: + self.connection = connection + self._connection_acquired.set() + + def clear_connection(self) -> None: + self.connection = None + self._connection_acquired = Event() + + def wait_for_connection( + self, timeout: float | None = None + ) -> ConnectionInterface: + if self.connection is None: + self._connection_acquired.wait(timeout=timeout) + assert self.connection is not None + return self.connection + + def is_queued(self) -> bool: + return self.connection is None + + +class ConnectionPool(RequestInterface): + """ + A connection pool for making HTTP requests. + """ + + def __init__( + self, + ssl_context: ssl.SSLContext | None = None, + proxy: Proxy | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: str | None = None, + uds: str | None = None, + network_backend: NetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish a + connection. + local_address: Local address to connect from. Can also be used to connect + using a particular address family. Using `local_address="0.0.0.0"` + will connect using an `AF_INET` address (IPv4), while using + `local_address="::"` will connect using an `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + socket_options: Socket options that have to be included + in the TCP socket when the connection was established. + """ + self._ssl_context = ssl_context + self._proxy = proxy + self._max_connections = ( + sys.maxsize if max_connections is None else max_connections + ) + self._max_keepalive_connections = ( + sys.maxsize + if max_keepalive_connections is None + else max_keepalive_connections + ) + self._max_keepalive_connections = min( + self._max_connections, self._max_keepalive_connections + ) + + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend = ( + SyncBackend() if network_backend is None else network_backend + ) + self._socket_options = socket_options + + # The mutable state on a connection pool is the queue of incoming requests, + # and the set of connections that are servicing those requests. + self._connections: list[ConnectionInterface] = [] + self._requests: list[PoolRequest] = [] + + # We only mutate the state of the connection pool within an 'optional_thread_lock' + # context. This holds a threading lock unless we're running in async mode, + # in which case it is a no-op. + self._optional_thread_lock = ThreadLock() + + def create_connection(self, origin: Origin) -> ConnectionInterface: + if self._proxy is not None: + if self._proxy.url.scheme in (b"socks5", b"socks5h"): + from .socks_proxy import Socks5Connection + + return Socks5Connection( + proxy_origin=self._proxy.url.origin, + proxy_auth=self._proxy.auth, + remote_origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + elif origin.scheme == b"http": + from .http_proxy import ForwardHTTPConnection + + return ForwardHTTPConnection( + proxy_origin=self._proxy.url.origin, + proxy_headers=self._proxy.headers, + proxy_ssl_context=self._proxy.ssl_context, + remote_origin=origin, + keepalive_expiry=self._keepalive_expiry, + network_backend=self._network_backend, + ) + from .http_proxy import TunnelHTTPConnection + + return TunnelHTTPConnection( + proxy_origin=self._proxy.url.origin, + proxy_headers=self._proxy.headers, + proxy_ssl_context=self._proxy.ssl_context, + remote_origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + return HTTPConnection( + origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + retries=self._retries, + local_address=self._local_address, + uds=self._uds, + network_backend=self._network_backend, + socket_options=self._socket_options, + ) + + @property + def connections(self) -> list[ConnectionInterface]: + """ + Return a list of the connections currently in the pool. + + For example: + + ```python + >>> pool.connections + [ + , + , + , + ] + ``` + """ + return list(self._connections) + + def handle_request(self, request: Request) -> Response: + """ + Send an HTTP request, and return an HTTP response. + + This is the core implementation that is called into by `.request()` or `.stream()`. + """ + scheme = request.url.scheme.decode() + if scheme == "": + raise UnsupportedProtocol( + "Request URL is missing an 'http://' or 'https://' protocol." + ) + if scheme not in ("http", "https", "ws", "wss"): + raise UnsupportedProtocol( + f"Request URL has an unsupported protocol '{scheme}://'." + ) + + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("pool", None) + + with self._optional_thread_lock: + # Add the incoming request to our request queue. + pool_request = PoolRequest(request) + self._requests.append(pool_request) + + try: + while True: + with self._optional_thread_lock: + # Assign incoming requests to available connections, + # closing or creating new connections as required. + closing = self._assign_requests_to_connections() + self._close_connections(closing) + + # Wait until this request has an assigned connection. + connection = pool_request.wait_for_connection(timeout=timeout) + + try: + # Send the request on the assigned connection. + response = connection.handle_request( + pool_request.request + ) + except ConnectionNotAvailable: + # In some cases a connection may initially be available to + # handle a request, but then become unavailable. + # + # In this case we clear the connection and try again. + pool_request.clear_connection() + else: + break # pragma: nocover + + except BaseException as exc: + with self._optional_thread_lock: + # For any exception or cancellation we remove the request from + # the queue, and then re-assign requests to connections. + self._requests.remove(pool_request) + closing = self._assign_requests_to_connections() + + self._close_connections(closing) + raise exc from None + + # Return the response. Note that in this case we still have to manage + # the point at which the response is closed. + assert isinstance(response.stream, typing.Iterable) + return Response( + status=response.status, + headers=response.headers, + content=PoolByteStream( + stream=response.stream, pool_request=pool_request, pool=self + ), + extensions=response.extensions, + ) + + def _assign_requests_to_connections(self) -> list[ConnectionInterface]: + """ + Manage the state of the connection pool, assigning incoming + requests to connections as available. + + Called whenever a new request is added or removed from the pool. + + Any closing connections are returned, allowing the I/O for closing + those connections to be handled seperately. + """ + closing_connections = [] + + # First we handle cleaning up any connections that are closed, + # have expired their keep-alive, or surplus idle connections. + for connection in list(self._connections): + if connection.is_closed(): + # log: "removing closed connection" + self._connections.remove(connection) + elif connection.has_expired(): + # log: "closing expired connection" + self._connections.remove(connection) + closing_connections.append(connection) + elif ( + connection.is_idle() + and len([connection.is_idle() for connection in self._connections]) + > self._max_keepalive_connections + ): + # log: "closing idle connection" + self._connections.remove(connection) + closing_connections.append(connection) + + # Assign queued requests to connections. + queued_requests = [request for request in self._requests if request.is_queued()] + for pool_request in queued_requests: + origin = pool_request.request.url.origin + available_connections = [ + connection + for connection in self._connections + if connection.can_handle_request(origin) and connection.is_available() + ] + idle_connections = [ + connection for connection in self._connections if connection.is_idle() + ] + + # There are three cases for how we may be able to handle the request: + # + # 1. There is an existing connection that can handle the request. + # 2. We can create a new connection to handle the request. + # 3. We can close an idle connection and then create a new connection + # to handle the request. + if available_connections: + # log: "reusing existing connection" + connection = available_connections[0] + pool_request.assign_to_connection(connection) + elif len(self._connections) < self._max_connections: + # log: "creating new connection" + connection = self.create_connection(origin) + self._connections.append(connection) + pool_request.assign_to_connection(connection) + elif idle_connections: + # log: "closing idle connection" + connection = idle_connections[0] + self._connections.remove(connection) + closing_connections.append(connection) + # log: "creating new connection" + connection = self.create_connection(origin) + self._connections.append(connection) + pool_request.assign_to_connection(connection) + + return closing_connections + + def _close_connections(self, closing: list[ConnectionInterface]) -> None: + # Close connections which have been removed from the pool. + with ShieldCancellation(): + for connection in closing: + connection.close() + + def close(self) -> None: + # Explicitly close the connection pool. + # Clears all existing requests and connections. + with self._optional_thread_lock: + closing_connections = list(self._connections) + self._connections = [] + self._close_connections(closing_connections) + + def __enter__(self) -> ConnectionPool: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + self.close() + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + with self._optional_thread_lock: + request_is_queued = [request.is_queued() for request in self._requests] + connection_is_idle = [ + connection.is_idle() for connection in self._connections + ] + + num_active_requests = request_is_queued.count(False) + num_queued_requests = request_is_queued.count(True) + num_active_connections = connection_is_idle.count(False) + num_idle_connections = connection_is_idle.count(True) + + requests_info = ( + f"Requests: {num_active_requests} active, {num_queued_requests} queued" + ) + connection_info = ( + f"Connections: {num_active_connections} active, {num_idle_connections} idle" + ) + + return f"<{class_name} [{requests_info} | {connection_info}]>" + + +class PoolByteStream: + def __init__( + self, + stream: typing.Iterable[bytes], + pool_request: PoolRequest, + pool: ConnectionPool, + ) -> None: + self._stream = stream + self._pool_request = pool_request + self._pool = pool + self._closed = False + + def __iter__(self) -> typing.Iterator[bytes]: + try: + for part in self._stream: + yield part + except BaseException as exc: + self.close() + raise exc from None + + def close(self) -> None: + if not self._closed: + self._closed = True + with ShieldCancellation(): + if hasattr(self._stream, "close"): + self._stream.close() + + with self._pool._optional_thread_lock: + self._pool._requests.remove(self._pool_request) + closing = self._pool._assign_requests_to_connections() + + self._pool._close_connections(closing) diff --git a/venv/lib/python3.11/site-packages/httpcore/_sync/http11.py b/venv/lib/python3.11/site-packages/httpcore/_sync/http11.py new file mode 100644 index 0000000..ebd3a97 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_sync/http11.py @@ -0,0 +1,379 @@ +from __future__ import annotations + +import enum +import logging +import ssl +import time +import types +import typing + +import h11 + +from .._backends.base import NetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, + WriteError, + map_exceptions, +) +from .._models import Origin, Request, Response +from .._synchronization import Lock, ShieldCancellation +from .._trace import Trace +from .interfaces import ConnectionInterface + +logger = logging.getLogger("httpcore.http11") + + +# A subset of `h11.Event` types supported by `_send_event` +H11SendEvent = typing.Union[ + h11.Request, + h11.Data, + h11.EndOfMessage, +] + + +class HTTPConnectionState(enum.IntEnum): + NEW = 0 + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class HTTP11Connection(ConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024 + + def __init__( + self, + origin: Origin, + stream: NetworkStream, + keepalive_expiry: float | None = None, + ) -> None: + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: float | None = keepalive_expiry + self._expire_at: float | None = None + self._state = HTTPConnectionState.NEW + self._state_lock = Lock() + self._request_count = 0 + self._h11_state = h11.Connection( + our_role=h11.CLIENT, + max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE, + ) + + def handle_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + with self._state_lock: + if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE): + self._request_count += 1 + self._state = HTTPConnectionState.ACTIVE + self._expire_at = None + else: + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request} + try: + with Trace( + "send_request_headers", logger, request, kwargs + ) as trace: + self._send_request_headers(**kwargs) + with Trace("send_request_body", logger, request, kwargs) as trace: + self._send_request_body(**kwargs) + except WriteError: + # If we get a write error while we're writing the request, + # then we supress this error and move on to attempting to + # read the response. Servers can sometimes close the request + # pre-emptively and then respond with a well formed HTTP + # error response. + pass + + with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + ( + http_version, + status, + reason_phrase, + headers, + trailing_data, + ) = self._receive_response_headers(**kwargs) + trace.return_value = ( + http_version, + status, + reason_phrase, + headers, + ) + + network_stream = self._network_stream + + # CONNECT or Upgrade request + if (status == 101) or ( + (request.method == b"CONNECT") and (200 <= status < 300) + ): + network_stream = HTTP11UpgradeStream(network_stream, trailing_data) + + return Response( + status=status, + headers=headers, + content=HTTP11ConnectionByteStream(self, request), + extensions={ + "http_version": http_version, + "reason_phrase": reason_phrase, + "network_stream": network_stream, + }, + ) + except BaseException as exc: + with ShieldCancellation(): + with Trace("response_closed", logger, request) as trace: + self._response_closed() + raise exc + + # Sending the request... + + def _send_request_headers(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): + event = h11.Request( + method=request.method, + target=request.url.target, + headers=request.headers, + ) + self._send_event(event, timeout=timeout) + + def _send_request_body(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + assert isinstance(request.stream, typing.Iterable) + for chunk in request.stream: + event = h11.Data(data=chunk) + self._send_event(event, timeout=timeout) + + self._send_event(h11.EndOfMessage(), timeout=timeout) + + def _send_event(self, event: h11.Event, timeout: float | None = None) -> None: + bytes_to_send = self._h11_state.send(event) + if bytes_to_send is not None: + self._network_stream.write(bytes_to_send, timeout=timeout) + + # Receiving the response... + + def _receive_response_headers( + self, request: Request + ) -> tuple[bytes, int, bytes, list[tuple[bytes, bytes]], bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = self._receive_event(timeout=timeout) + if isinstance(event, h11.Response): + break + if ( + isinstance(event, h11.InformationalResponse) + and event.status_code == 101 + ): + break + + http_version = b"HTTP/" + event.http_version + + # h11 version 0.11+ supports a `raw_items` interface to get the + # raw header casing, rather than the enforced lowercase headers. + headers = event.headers.raw_items() + + trailing_data, _ = self._h11_state.trailing_data + + return http_version, event.status_code, event.reason, headers, trailing_data + + def _receive_response_body( + self, request: Request + ) -> typing.Iterator[bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = self._receive_event(timeout=timeout) + if isinstance(event, h11.Data): + yield bytes(event.data) + elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): + break + + def _receive_event( + self, timeout: float | None = None + ) -> h11.Event | type[h11.PAUSED]: + while True: + with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): + event = self._h11_state.next_event() + + if event is h11.NEED_DATA: + data = self._network_stream.read( + self.READ_NUM_BYTES, timeout=timeout + ) + + # If we feed this case through h11 we'll raise an exception like: + # + # httpcore.RemoteProtocolError: can't handle event type + # ConnectionClosed when role=SERVER and state=SEND_RESPONSE + # + # Which is accurate, but not very informative from an end-user + # perspective. Instead we handle this case distinctly and treat + # it as a ConnectError. + if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: + msg = "Server disconnected without sending a response." + raise RemoteProtocolError(msg) + + self._h11_state.receive_data(data) + else: + # mypy fails to narrow the type in the above if statement above + return event # type: ignore[return-value] + + def _response_closed(self) -> None: + with self._state_lock: + if ( + self._h11_state.our_state is h11.DONE + and self._h11_state.their_state is h11.DONE + ): + self._state = HTTPConnectionState.IDLE + self._h11_state.start_next_cycle() + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + else: + self.close() + + # Once the connection is no longer required... + + def close(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._state = HTTPConnectionState.CLOSED + self._network_stream.close() + + # The ConnectionInterface methods provide information about the state of + # the connection, allowing for a connection pooling implementation to + # determine when to reuse and when to close the connection... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + # Note that HTTP/1.1 connections in the "NEW" state are not treated as + # being "available". The control flow which created the connection will + # be able to send an outgoing request, but the connection will not be + # acquired from the connection pool for any other request. + return self._state == HTTPConnectionState.IDLE + + def has_expired(self) -> bool: + now = time.monotonic() + keepalive_expired = self._expire_at is not None and now > self._expire_at + + # If the HTTP connection is idle but the socket is readable, then the + # only valid state is that the socket is about to return b"", indicating + # a server-initiated disconnect. + server_disconnected = ( + self._state == HTTPConnectionState.IDLE + and self._network_stream.get_extra_info("is_readable") + ) + + return keepalive_expired or server_disconnected + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/1.1, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + def __enter__(self) -> HTTP11Connection: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + self.close() + + +class HTTP11ConnectionByteStream: + def __init__(self, connection: HTTP11Connection, request: Request) -> None: + self._connection = connection + self._request = request + self._closed = False + + def __iter__(self) -> typing.Iterator[bytes]: + kwargs = {"request": self._request} + try: + with Trace("receive_response_body", logger, self._request, kwargs): + for chunk in self._connection._receive_response_body(**kwargs): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with ShieldCancellation(): + self.close() + raise exc + + def close(self) -> None: + if not self._closed: + self._closed = True + with Trace("response_closed", logger, self._request): + self._connection._response_closed() + + +class HTTP11UpgradeStream(NetworkStream): + def __init__(self, stream: NetworkStream, leading_data: bytes) -> None: + self._stream = stream + self._leading_data = leading_data + + def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + if self._leading_data: + buffer = self._leading_data[:max_bytes] + self._leading_data = self._leading_data[max_bytes:] + return buffer + else: + return self._stream.read(max_bytes, timeout) + + def write(self, buffer: bytes, timeout: float | None = None) -> None: + self._stream.write(buffer, timeout) + + def close(self) -> None: + self._stream.close() + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> NetworkStream: + return self._stream.start_tls(ssl_context, server_hostname, timeout) + + def get_extra_info(self, info: str) -> typing.Any: + return self._stream.get_extra_info(info) diff --git a/venv/lib/python3.11/site-packages/httpcore/_sync/http2.py b/venv/lib/python3.11/site-packages/httpcore/_sync/http2.py new file mode 100644 index 0000000..ddcc189 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_sync/http2.py @@ -0,0 +1,592 @@ +from __future__ import annotations + +import enum +import logging +import time +import types +import typing + +import h2.config +import h2.connection +import h2.events +import h2.exceptions +import h2.settings + +from .._backends.base import NetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, +) +from .._models import Origin, Request, Response +from .._synchronization import Lock, Semaphore, ShieldCancellation +from .._trace import Trace +from .interfaces import ConnectionInterface + +logger = logging.getLogger("httpcore.http2") + + +def has_body_headers(request: Request) -> bool: + return any( + k.lower() == b"content-length" or k.lower() == b"transfer-encoding" + for k, v in request.headers + ) + + +class HTTPConnectionState(enum.IntEnum): + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class HTTP2Connection(ConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + CONFIG = h2.config.H2Configuration(validate_inbound_headers=False) + + def __init__( + self, + origin: Origin, + stream: NetworkStream, + keepalive_expiry: float | None = None, + ): + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: float | None = keepalive_expiry + self._h2_state = h2.connection.H2Connection(config=self.CONFIG) + self._state = HTTPConnectionState.IDLE + self._expire_at: float | None = None + self._request_count = 0 + self._init_lock = Lock() + self._state_lock = Lock() + self._read_lock = Lock() + self._write_lock = Lock() + self._sent_connection_init = False + self._used_all_stream_ids = False + self._connection_error = False + + # Mapping from stream ID to response stream events. + self._events: dict[ + int, + list[ + h2.events.ResponseReceived + | h2.events.DataReceived + | h2.events.StreamEnded + | h2.events.StreamReset, + ], + ] = {} + + # Connection terminated events are stored as state since + # we need to handle them for all streams. + self._connection_terminated: h2.events.ConnectionTerminated | None = None + + self._read_exception: Exception | None = None + self._write_exception: Exception | None = None + + def handle_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + # This cannot occur in normal operation, since the connection pool + # will only send requests on connections that handle them. + # It's in place simply for resilience as a guard against incorrect + # usage, for anyone working directly with httpcore connections. + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + with self._state_lock: + if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE): + self._request_count += 1 + self._expire_at = None + self._state = HTTPConnectionState.ACTIVE + else: + raise ConnectionNotAvailable() + + with self._init_lock: + if not self._sent_connection_init: + try: + sci_kwargs = {"request": request} + with Trace( + "send_connection_init", logger, request, sci_kwargs + ): + self._send_connection_init(**sci_kwargs) + except BaseException as exc: + with ShieldCancellation(): + self.close() + raise exc + + self._sent_connection_init = True + + # Initially start with just 1 until the remote server provides + # its max_concurrent_streams value + self._max_streams = 1 + + local_settings_max_streams = ( + self._h2_state.local_settings.max_concurrent_streams + ) + self._max_streams_semaphore = Semaphore(local_settings_max_streams) + + for _ in range(local_settings_max_streams - self._max_streams): + self._max_streams_semaphore.acquire() + + self._max_streams_semaphore.acquire() + + try: + stream_id = self._h2_state.get_next_available_stream_id() + self._events[stream_id] = [] + except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover + self._used_all_stream_ids = True + self._request_count -= 1 + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request, "stream_id": stream_id} + with Trace("send_request_headers", logger, request, kwargs): + self._send_request_headers(request=request, stream_id=stream_id) + with Trace("send_request_body", logger, request, kwargs): + self._send_request_body(request=request, stream_id=stream_id) + with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + status, headers = self._receive_response( + request=request, stream_id=stream_id + ) + trace.return_value = (status, headers) + + return Response( + status=status, + headers=headers, + content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id), + extensions={ + "http_version": b"HTTP/2", + "network_stream": self._network_stream, + "stream_id": stream_id, + }, + ) + except BaseException as exc: # noqa: PIE786 + with ShieldCancellation(): + kwargs = {"stream_id": stream_id} + with Trace("response_closed", logger, request, kwargs): + self._response_closed(stream_id=stream_id) + + if isinstance(exc, h2.exceptions.ProtocolError): + # One case where h2 can raise a protocol error is when a + # closed frame has been seen by the state machine. + # + # This happens when one stream is reading, and encounters + # a GOAWAY event. Other flows of control may then raise + # a protocol error at any point they interact with the 'h2_state'. + # + # In this case we'll have stored the event, and should raise + # it as a RemoteProtocolError. + if self._connection_terminated: # pragma: nocover + raise RemoteProtocolError(self._connection_terminated) + # If h2 raises a protocol error in some other state then we + # must somehow have made a protocol violation. + raise LocalProtocolError(exc) # pragma: nocover + + raise exc + + def _send_connection_init(self, request: Request) -> None: + """ + The HTTP/2 connection requires some initial setup before we can start + using individual request/response streams on it. + """ + # Need to set these manually here instead of manipulating via + # __setitem__() otherwise the H2Connection will emit SettingsUpdate + # frames in addition to sending the undesired defaults. + self._h2_state.local_settings = h2.settings.Settings( + client=True, + initial_values={ + # Disable PUSH_PROMISE frames from the server since we don't do anything + # with them for now. Maybe when we support caching? + h2.settings.SettingCodes.ENABLE_PUSH: 0, + # These two are taken from h2 for safe defaults + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100, + h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536, + }, + ) + + # Some websites (*cough* Yahoo *cough*) balk at this setting being + # present in the initial handshake since it's not defined in the original + # RFC despite the RFC mandating ignoring settings you don't know about. + del self._h2_state.local_settings[ + h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL + ] + + self._h2_state.initiate_connection() + self._h2_state.increment_flow_control_window(2**24) + self._write_outgoing_data(request) + + # Sending the request... + + def _send_request_headers(self, request: Request, stream_id: int) -> None: + """ + Send the request headers to a given stream ID. + """ + end_stream = not has_body_headers(request) + + # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. + # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require + # HTTP/1.1 style headers, and map them appropriately if we end up on + # an HTTP/2 connection. + authority = [v for k, v in request.headers if k.lower() == b"host"][0] + + headers = [ + (b":method", request.method), + (b":authority", authority), + (b":scheme", request.url.scheme), + (b":path", request.url.target), + ] + [ + (k.lower(), v) + for k, v in request.headers + if k.lower() + not in ( + b"host", + b"transfer-encoding", + ) + ] + + self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) + self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id) + self._write_outgoing_data(request) + + def _send_request_body(self, request: Request, stream_id: int) -> None: + """ + Iterate over the request body sending it to a given stream ID. + """ + if not has_body_headers(request): + return + + assert isinstance(request.stream, typing.Iterable) + for data in request.stream: + self._send_stream_data(request, stream_id, data) + self._send_end_stream(request, stream_id) + + def _send_stream_data( + self, request: Request, stream_id: int, data: bytes + ) -> None: + """ + Send a single chunk of data in one or more data frames. + """ + while data: + max_flow = self._wait_for_outgoing_flow(request, stream_id) + chunk_size = min(len(data), max_flow) + chunk, data = data[:chunk_size], data[chunk_size:] + self._h2_state.send_data(stream_id, chunk) + self._write_outgoing_data(request) + + def _send_end_stream(self, request: Request, stream_id: int) -> None: + """ + Send an empty data frame on on a given stream ID with the END_STREAM flag set. + """ + self._h2_state.end_stream(stream_id) + self._write_outgoing_data(request) + + # Receiving the response... + + def _receive_response( + self, request: Request, stream_id: int + ) -> tuple[int, list[tuple[bytes, bytes]]]: + """ + Return the response status code and headers for a given stream ID. + """ + while True: + event = self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.ResponseReceived): + break + + status_code = 200 + headers = [] + assert event.headers is not None + for k, v in event.headers: + if k == b":status": + status_code = int(v.decode("ascii", errors="ignore")) + elif not k.startswith(b":"): + headers.append((k, v)) + + return (status_code, headers) + + def _receive_response_body( + self, request: Request, stream_id: int + ) -> typing.Iterator[bytes]: + """ + Iterator that returns the bytes of the response body for a given stream ID. + """ + while True: + event = self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.DataReceived): + assert event.flow_controlled_length is not None + assert event.data is not None + amount = event.flow_controlled_length + self._h2_state.acknowledge_received_data(amount, stream_id) + self._write_outgoing_data(request) + yield event.data + elif isinstance(event, h2.events.StreamEnded): + break + + def _receive_stream_event( + self, request: Request, stream_id: int + ) -> h2.events.ResponseReceived | h2.events.DataReceived | h2.events.StreamEnded: + """ + Return the next available event for a given stream ID. + + Will read more data from the network if required. + """ + while not self._events.get(stream_id): + self._receive_events(request, stream_id) + event = self._events[stream_id].pop(0) + if isinstance(event, h2.events.StreamReset): + raise RemoteProtocolError(event) + return event + + def _receive_events( + self, request: Request, stream_id: int | None = None + ) -> None: + """ + Read some data from the network until we see one or more events + for a given stream ID. + """ + with self._read_lock: + if self._connection_terminated is not None: + last_stream_id = self._connection_terminated.last_stream_id + if stream_id and last_stream_id and stream_id > last_stream_id: + self._request_count -= 1 + raise ConnectionNotAvailable() + raise RemoteProtocolError(self._connection_terminated) + + # This conditional is a bit icky. We don't want to block reading if we've + # actually got an event to return for a given stream. We need to do that + # check *within* the atomic read lock. Though it also need to be optional, + # because when we call it from `_wait_for_outgoing_flow` we *do* want to + # block until we've available flow control, event when we have events + # pending for the stream ID we're attempting to send on. + if stream_id is None or not self._events.get(stream_id): + events = self._read_incoming_data(request) + for event in events: + if isinstance(event, h2.events.RemoteSettingsChanged): + with Trace( + "receive_remote_settings", logger, request + ) as trace: + self._receive_remote_settings_change(event) + trace.return_value = event + + elif isinstance( + event, + ( + h2.events.ResponseReceived, + h2.events.DataReceived, + h2.events.StreamEnded, + h2.events.StreamReset, + ), + ): + if event.stream_id in self._events: + self._events[event.stream_id].append(event) + + elif isinstance(event, h2.events.ConnectionTerminated): + self._connection_terminated = event + + self._write_outgoing_data(request) + + def _receive_remote_settings_change( + self, event: h2.events.RemoteSettingsChanged + ) -> None: + max_concurrent_streams = event.changed_settings.get( + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS + ) + if max_concurrent_streams: + new_max_streams = min( + max_concurrent_streams.new_value, + self._h2_state.local_settings.max_concurrent_streams, + ) + if new_max_streams and new_max_streams != self._max_streams: + while new_max_streams > self._max_streams: + self._max_streams_semaphore.release() + self._max_streams += 1 + while new_max_streams < self._max_streams: + self._max_streams_semaphore.acquire() + self._max_streams -= 1 + + def _response_closed(self, stream_id: int) -> None: + self._max_streams_semaphore.release() + del self._events[stream_id] + with self._state_lock: + if self._connection_terminated and not self._events: + self.close() + + elif self._state == HTTPConnectionState.ACTIVE and not self._events: + self._state = HTTPConnectionState.IDLE + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + if self._used_all_stream_ids: # pragma: nocover + self.close() + + def close(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._h2_state.close_connection() + self._state = HTTPConnectionState.CLOSED + self._network_stream.close() + + # Wrappers around network read/write operations... + + def _read_incoming_data(self, request: Request) -> list[h2.events.Event]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + if self._read_exception is not None: + raise self._read_exception # pragma: nocover + + try: + data = self._network_stream.read(self.READ_NUM_BYTES, timeout) + if data == b"": + raise RemoteProtocolError("Server disconnected") + except Exception as exc: + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future reads. + # (For example, this means that a single read timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._read_exception = exc + self._connection_error = True + raise exc + + events: list[h2.events.Event] = self._h2_state.receive_data(data) + + return events + + def _write_outgoing_data(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + with self._write_lock: + data_to_send = self._h2_state.data_to_send() + + if self._write_exception is not None: + raise self._write_exception # pragma: nocover + + try: + self._network_stream.write(data_to_send, timeout) + except Exception as exc: # pragma: nocover + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future write. + # (For example, this means that a single write timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._write_exception = exc + self._connection_error = True + raise exc + + # Flow control... + + def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int: + """ + Returns the maximum allowable outgoing flow for a given stream. + + If the allowable flow is zero, then waits on the network until + WindowUpdated frames have increased the flow rate. + https://tools.ietf.org/html/rfc7540#section-6.9 + """ + local_flow: int = self._h2_state.local_flow_control_window(stream_id) + max_frame_size: int = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + while flow == 0: + self._receive_events(request) + local_flow = self._h2_state.local_flow_control_window(stream_id) + max_frame_size = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + return flow + + # Interface for connection pooling... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + return ( + self._state != HTTPConnectionState.CLOSED + and not self._connection_error + and not self._used_all_stream_ids + and not ( + self._h2_state.state_machine.state + == h2.connection.ConnectionState.CLOSED + ) + ) + + def has_expired(self) -> bool: + now = time.monotonic() + return self._expire_at is not None and now > self._expire_at + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/2, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + def __enter__(self) -> HTTP2Connection: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + self.close() + + +class HTTP2ConnectionByteStream: + def __init__( + self, connection: HTTP2Connection, request: Request, stream_id: int + ) -> None: + self._connection = connection + self._request = request + self._stream_id = stream_id + self._closed = False + + def __iter__(self) -> typing.Iterator[bytes]: + kwargs = {"request": self._request, "stream_id": self._stream_id} + try: + with Trace("receive_response_body", logger, self._request, kwargs): + for chunk in self._connection._receive_response_body( + request=self._request, stream_id=self._stream_id + ): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with ShieldCancellation(): + self.close() + raise exc + + def close(self) -> None: + if not self._closed: + self._closed = True + kwargs = {"stream_id": self._stream_id} + with Trace("response_closed", logger, self._request, kwargs): + self._connection._response_closed(stream_id=self._stream_id) diff --git a/venv/lib/python3.11/site-packages/httpcore/_sync/http_proxy.py b/venv/lib/python3.11/site-packages/httpcore/_sync/http_proxy.py new file mode 100644 index 0000000..ecca88f --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_sync/http_proxy.py @@ -0,0 +1,367 @@ +from __future__ import annotations + +import base64 +import logging +import ssl +import typing + +from .._backends.base import SOCKET_OPTION, NetworkBackend +from .._exceptions import ProxyError +from .._models import ( + URL, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, +) +from .._ssl import default_ssl_context +from .._synchronization import Lock +from .._trace import Trace +from .connection import HTTPConnection +from .connection_pool import ConnectionPool +from .http11 import HTTP11Connection +from .interfaces import ConnectionInterface + +ByteOrStr = typing.Union[bytes, str] +HeadersAsSequence = typing.Sequence[typing.Tuple[ByteOrStr, ByteOrStr]] +HeadersAsMapping = typing.Mapping[ByteOrStr, ByteOrStr] + + +logger = logging.getLogger("httpcore.proxy") + + +def merge_headers( + default_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, + override_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, +) -> list[tuple[bytes, bytes]]: + """ + Append default_headers and override_headers, de-duplicating if a key exists + in both cases. + """ + default_headers = [] if default_headers is None else list(default_headers) + override_headers = [] if override_headers is None else list(override_headers) + has_override = set(key.lower() for key, value in override_headers) + default_headers = [ + (key, value) + for key, value in default_headers + if key.lower() not in has_override + ] + return default_headers + override_headers + + +class HTTPProxy(ConnectionPool): # pragma: nocover + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: URL | bytes | str, + proxy_auth: tuple[bytes | str, bytes | str] | None = None, + proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None, + ssl_context: ssl.SSLContext | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: str | None = None, + uds: str | None = None, + network_backend: NetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + proxy_auth: Any proxy authentication as a two-tuple of + (username, password). May be either bytes or ascii-only str. + proxy_headers: Any HTTP headers to use for the proxy requests. + For example `{"Proxy-Authorization": "Basic :"}`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + proxy_ssl_context: The same as `ssl_context`, but for a proxy server rather than a remote origin. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + local_address=local_address, + uds=uds, + socket_options=socket_options, + ) + + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if ( + self._proxy_url.scheme == b"http" and proxy_ssl_context is not None + ): # pragma: no cover + raise RuntimeError( + "The `proxy_ssl_context` argument is not allowed for the http scheme" + ) + + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + if proxy_auth is not None: + username = enforce_bytes(proxy_auth[0], name="proxy_auth") + password = enforce_bytes(proxy_auth[1], name="proxy_auth") + userpass = username + b":" + password + authorization = b"Basic " + base64.b64encode(userpass) + self._proxy_headers = [ + (b"Proxy-Authorization", authorization) + ] + self._proxy_headers + + def create_connection(self, origin: Origin) -> ConnectionInterface: + if origin.scheme == b"http": + return ForwardHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + keepalive_expiry=self._keepalive_expiry, + network_backend=self._network_backend, + proxy_ssl_context=self._proxy_ssl_context, + ) + return TunnelHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + ssl_context=self._ssl_context, + proxy_ssl_context=self._proxy_ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class ForwardHTTPConnection(ConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None, + keepalive_expiry: float | None = None, + network_backend: NetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + ) -> None: + self._connection = HTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._remote_origin = remote_origin + + def handle_request(self, request: Request) -> Response: + headers = merge_headers(self._proxy_headers, request.headers) + url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=bytes(request.url), + ) + proxy_request = Request( + method=request.method, + url=url, + headers=headers, + content=request.stream, + extensions=request.extensions, + ) + return self._connection.handle_request(proxy_request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + def close(self) -> None: + self._connection.close() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + +class TunnelHTTPConnection(ConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + ssl_context: ssl.SSLContext | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + proxy_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + network_backend: NetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + self._connection: ConnectionInterface = HTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._connect_lock = Lock() + self._connected = False + + def handle_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("connect", None) + + with self._connect_lock: + if not self._connected: + target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port) + + connect_url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=target, + ) + connect_headers = merge_headers( + [(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers + ) + connect_request = Request( + method=b"CONNECT", + url=connect_url, + headers=connect_headers, + extensions=request.extensions, + ) + connect_response = self._connection.handle_request( + connect_request + ) + + if connect_response.status < 200 or connect_response.status > 299: + reason_bytes = connect_response.extensions.get("reason_phrase", b"") + reason_str = reason_bytes.decode("ascii", errors="ignore") + msg = "%d %s" % (connect_response.status, reason_str) + self._connection.close() + raise ProxyError(msg) + + stream = connect_response.extensions["network_stream"] + + # Upgrade the stream to SSL + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + with Trace("start_tls", logger, request, kwargs) as trace: + stream = stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import HTTP2Connection + + self._connection = HTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = HTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + + self._connected = True + return self._connection.handle_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + def close(self) -> None: + self._connection.close() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/venv/lib/python3.11/site-packages/httpcore/_sync/interfaces.py b/venv/lib/python3.11/site-packages/httpcore/_sync/interfaces.py new file mode 100644 index 0000000..e673d4c --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_sync/interfaces.py @@ -0,0 +1,137 @@ +from __future__ import annotations + +import contextlib +import typing + +from .._models import ( + URL, + Extensions, + HeaderTypes, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, + include_request_headers, +) + + +class RequestInterface: + def request( + self, + method: bytes | str, + url: URL | bytes | str, + *, + headers: HeaderTypes = None, + content: bytes | typing.Iterator[bytes] | None = None, + extensions: Extensions | None = None, + ) -> Response: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = self.handle_request(request) + try: + response.read() + finally: + response.close() + return response + + @contextlib.contextmanager + def stream( + self, + method: bytes | str, + url: URL | bytes | str, + *, + headers: HeaderTypes = None, + content: bytes | typing.Iterator[bytes] | None = None, + extensions: Extensions | None = None, + ) -> typing.Iterator[Response]: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = self.handle_request(request) + try: + yield response + finally: + response.close() + + def handle_request(self, request: Request) -> Response: + raise NotImplementedError() # pragma: nocover + + +class ConnectionInterface(RequestInterface): + def close(self) -> None: + raise NotImplementedError() # pragma: nocover + + def info(self) -> str: + raise NotImplementedError() # pragma: nocover + + def can_handle_request(self, origin: Origin) -> bool: + raise NotImplementedError() # pragma: nocover + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an + outgoing request. + + An HTTP/1.1 connection will only be available if it is currently idle. + + An HTTP/2 connection will be available so long as the stream ID space is + not yet exhausted, and the connection is not in an error state. + + While the connection is being established we may not yet know if it is going + to result in an HTTP/1.1 or HTTP/2 connection. The connection should be + treated as being available, but might ultimately raise `NewConnectionRequired` + required exceptions if multiple requests are attempted over a connection + that ends up being established as HTTP/1.1. + """ + raise NotImplementedError() # pragma: nocover + + def has_expired(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + + This either means that the connection is idle and it has passed the + expiry time on its keep-alive, or that server has sent an EOF. + """ + raise NotImplementedError() # pragma: nocover + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + raise NotImplementedError() # pragma: nocover + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + + Used when a response is closed to determine if the connection may be + returned to the connection pool or not. + """ + raise NotImplementedError() # pragma: nocover diff --git a/venv/lib/python3.11/site-packages/httpcore/_sync/socks_proxy.py b/venv/lib/python3.11/site-packages/httpcore/_sync/socks_proxy.py new file mode 100644 index 0000000..0ca96dd --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_sync/socks_proxy.py @@ -0,0 +1,341 @@ +from __future__ import annotations + +import logging +import ssl + +import socksio + +from .._backends.sync import SyncBackend +from .._backends.base import NetworkBackend, NetworkStream +from .._exceptions import ConnectionNotAvailable, ProxyError +from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url +from .._ssl import default_ssl_context +from .._synchronization import Lock +from .._trace import Trace +from .connection_pool import ConnectionPool +from .http11 import HTTP11Connection +from .interfaces import ConnectionInterface + +logger = logging.getLogger("httpcore.socks") + + +AUTH_METHODS = { + b"\x00": "NO AUTHENTICATION REQUIRED", + b"\x01": "GSSAPI", + b"\x02": "USERNAME/PASSWORD", + b"\xff": "NO ACCEPTABLE METHODS", +} + +REPLY_CODES = { + b"\x00": "Succeeded", + b"\x01": "General SOCKS server failure", + b"\x02": "Connection not allowed by ruleset", + b"\x03": "Network unreachable", + b"\x04": "Host unreachable", + b"\x05": "Connection refused", + b"\x06": "TTL expired", + b"\x07": "Command not supported", + b"\x08": "Address type not supported", +} + + +def _init_socks5_connection( + stream: NetworkStream, + *, + host: bytes, + port: int, + auth: tuple[bytes, bytes] | None = None, +) -> None: + conn = socksio.socks5.SOCKS5Connection() + + # Auth method request + auth_method = ( + socksio.socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED + if auth is None + else socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD + ) + conn.send(socksio.socks5.SOCKS5AuthMethodsRequest([auth_method])) + outgoing_bytes = conn.data_to_send() + stream.write(outgoing_bytes) + + # Auth method response + incoming_bytes = stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socksio.socks5.SOCKS5AuthReply) + if response.method != auth_method: + requested = AUTH_METHODS.get(auth_method, "UNKNOWN") + responded = AUTH_METHODS.get(response.method, "UNKNOWN") + raise ProxyError( + f"Requested {requested} from proxy server, but got {responded}." + ) + + if response.method == socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD: + # Username/password request + assert auth is not None + username, password = auth + conn.send(socksio.socks5.SOCKS5UsernamePasswordRequest(username, password)) + outgoing_bytes = conn.data_to_send() + stream.write(outgoing_bytes) + + # Username/password response + incoming_bytes = stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socksio.socks5.SOCKS5UsernamePasswordReply) + if not response.success: + raise ProxyError("Invalid username/password") + + # Connect request + conn.send( + socksio.socks5.SOCKS5CommandRequest.from_address( + socksio.socks5.SOCKS5Command.CONNECT, (host, port) + ) + ) + outgoing_bytes = conn.data_to_send() + stream.write(outgoing_bytes) + + # Connect response + incoming_bytes = stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socksio.socks5.SOCKS5Reply) + if response.reply_code != socksio.socks5.SOCKS5ReplyCode.SUCCEEDED: + reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN") + raise ProxyError(f"Proxy Server could not connect: {reply_code}.") + + +class SOCKSProxy(ConnectionPool): # pragma: nocover + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: URL | bytes | str, + proxy_auth: tuple[bytes | str, bytes | str] | None = None, + ssl_context: ssl.SSLContext | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + network_backend: NetworkBackend | None = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + ) + self._ssl_context = ssl_context + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if proxy_auth is not None: + username, password = proxy_auth + username_bytes = enforce_bytes(username, name="proxy_auth") + password_bytes = enforce_bytes(password, name="proxy_auth") + self._proxy_auth: tuple[bytes, bytes] | None = ( + username_bytes, + password_bytes, + ) + else: + self._proxy_auth = None + + def create_connection(self, origin: Origin) -> ConnectionInterface: + return Socks5Connection( + proxy_origin=self._proxy_url.origin, + remote_origin=origin, + proxy_auth=self._proxy_auth, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class Socks5Connection(ConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_auth: tuple[bytes, bytes] | None = None, + ssl_context: ssl.SSLContext | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + network_backend: NetworkBackend | None = None, + ) -> None: + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._proxy_auth = proxy_auth + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + + self._network_backend: NetworkBackend = ( + SyncBackend() if network_backend is None else network_backend + ) + self._connect_lock = Lock() + self._connection: ConnectionInterface | None = None + self._connect_failed = False + + def handle_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + with self._connect_lock: + if self._connection is None: + try: + # Connect to the proxy + kwargs = { + "host": self._proxy_origin.host.decode("ascii"), + "port": self._proxy_origin.port, + "timeout": timeout, + } + with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + + # Connect to the remote host using socks5 + kwargs = { + "stream": stream, + "host": self._remote_origin.host.decode("ascii"), + "port": self._remote_origin.port, + "auth": self._proxy_auth, + } + with Trace( + "setup_socks5_connection", logger, request, kwargs + ) as trace: + _init_socks5_connection(**kwargs) + trace.return_value = stream + + # Upgrade the stream to SSL + if self._remote_origin.scheme == b"https": + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ( + ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ) + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + with Trace("start_tls", logger, request, kwargs) as trace: + stream = stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or ( + self._http2 and not self._http1 + ): # pragma: nocover + from .http2 import HTTP2Connection + + self._connection = HTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = HTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except Exception as exc: + self._connect_failed = True + raise exc + elif not self._connection.is_available(): # pragma: nocover + raise ConnectionNotAvailable() + + return self._connection.handle_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + def close(self) -> None: + if self._connection is not None: + self._connection.close() + + def is_available(self) -> bool: + if self._connection is None: # pragma: nocover + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._remote_origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: # pragma: nocover + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/venv/lib/python3.11/site-packages/httpcore/_synchronization.py b/venv/lib/python3.11/site-packages/httpcore/_synchronization.py new file mode 100644 index 0000000..2ecc9e9 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_synchronization.py @@ -0,0 +1,318 @@ +from __future__ import annotations + +import threading +import types + +from ._exceptions import ExceptionMapping, PoolTimeout, map_exceptions + +# Our async synchronization primatives use either 'anyio' or 'trio' depending +# on if they're running under asyncio or trio. + +try: + import trio +except (ImportError, NotImplementedError): # pragma: nocover + trio = None # type: ignore + +try: + import anyio +except ImportError: # pragma: nocover + anyio = None # type: ignore + + +def current_async_library() -> str: + # Determine if we're running under trio or asyncio. + # See https://sniffio.readthedocs.io/en/latest/ + try: + import sniffio + except ImportError: # pragma: nocover + environment = "asyncio" + else: + environment = sniffio.current_async_library() + + if environment not in ("asyncio", "trio"): # pragma: nocover + raise RuntimeError("Running under an unsupported async environment.") + + if environment == "asyncio" and anyio is None: # pragma: nocover + raise RuntimeError( + "Running with asyncio requires installation of 'httpcore[asyncio]'." + ) + + if environment == "trio" and trio is None: # pragma: nocover + raise RuntimeError( + "Running with trio requires installation of 'httpcore[trio]'." + ) + + return environment + + +class AsyncLock: + """ + This is a standard lock. + + In the sync case `Lock` provides thread locking. + In the async case `AsyncLock` provides async locking. + """ + + def __init__(self) -> None: + self._backend = "" + + def setup(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a lock with the correct implementation. + """ + self._backend = current_async_library() + if self._backend == "trio": + self._trio_lock = trio.Lock() + elif self._backend == "asyncio": + self._anyio_lock = anyio.Lock() + + async def __aenter__(self) -> AsyncLock: + if not self._backend: + self.setup() + + if self._backend == "trio": + await self._trio_lock.acquire() + elif self._backend == "asyncio": + await self._anyio_lock.acquire() + + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + if self._backend == "trio": + self._trio_lock.release() + elif self._backend == "asyncio": + self._anyio_lock.release() + + +class AsyncThreadLock: + """ + This is a threading-only lock for no-I/O contexts. + + In the sync case `ThreadLock` provides thread locking. + In the async case `AsyncThreadLock` is a no-op. + """ + + def __enter__(self) -> AsyncThreadLock: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + pass + + +class AsyncEvent: + def __init__(self) -> None: + self._backend = "" + + def setup(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a lock with the correct implementation. + """ + self._backend = current_async_library() + if self._backend == "trio": + self._trio_event = trio.Event() + elif self._backend == "asyncio": + self._anyio_event = anyio.Event() + + def set(self) -> None: + if not self._backend: + self.setup() + + if self._backend == "trio": + self._trio_event.set() + elif self._backend == "asyncio": + self._anyio_event.set() + + async def wait(self, timeout: float | None = None) -> None: + if not self._backend: + self.setup() + + if self._backend == "trio": + trio_exc_map: ExceptionMapping = {trio.TooSlowError: PoolTimeout} + timeout_or_inf = float("inf") if timeout is None else timeout + with map_exceptions(trio_exc_map): + with trio.fail_after(timeout_or_inf): + await self._trio_event.wait() + elif self._backend == "asyncio": + anyio_exc_map: ExceptionMapping = {TimeoutError: PoolTimeout} + with map_exceptions(anyio_exc_map): + with anyio.fail_after(timeout): + await self._anyio_event.wait() + + +class AsyncSemaphore: + def __init__(self, bound: int) -> None: + self._bound = bound + self._backend = "" + + def setup(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a semaphore with the correct implementation. + """ + self._backend = current_async_library() + if self._backend == "trio": + self._trio_semaphore = trio.Semaphore( + initial_value=self._bound, max_value=self._bound + ) + elif self._backend == "asyncio": + self._anyio_semaphore = anyio.Semaphore( + initial_value=self._bound, max_value=self._bound + ) + + async def acquire(self) -> None: + if not self._backend: + self.setup() + + if self._backend == "trio": + await self._trio_semaphore.acquire() + elif self._backend == "asyncio": + await self._anyio_semaphore.acquire() + + async def release(self) -> None: + if self._backend == "trio": + self._trio_semaphore.release() + elif self._backend == "asyncio": + self._anyio_semaphore.release() + + +class AsyncShieldCancellation: + # For certain portions of our codebase where we're dealing with + # closing connections during exception handling we want to shield + # the operation from being cancelled. + # + # with AsyncShieldCancellation(): + # ... # clean-up operations, shielded from cancellation. + + def __init__(self) -> None: + """ + Detect if we're running under 'asyncio' or 'trio' and create + a shielded scope with the correct implementation. + """ + self._backend = current_async_library() + + if self._backend == "trio": + self._trio_shield = trio.CancelScope(shield=True) + elif self._backend == "asyncio": + self._anyio_shield = anyio.CancelScope(shield=True) + + def __enter__(self) -> AsyncShieldCancellation: + if self._backend == "trio": + self._trio_shield.__enter__() + elif self._backend == "asyncio": + self._anyio_shield.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + if self._backend == "trio": + self._trio_shield.__exit__(exc_type, exc_value, traceback) + elif self._backend == "asyncio": + self._anyio_shield.__exit__(exc_type, exc_value, traceback) + + +# Our thread-based synchronization primitives... + + +class Lock: + """ + This is a standard lock. + + In the sync case `Lock` provides thread locking. + In the async case `AsyncLock` provides async locking. + """ + + def __init__(self) -> None: + self._lock = threading.Lock() + + def __enter__(self) -> Lock: + self._lock.acquire() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + self._lock.release() + + +class ThreadLock: + """ + This is a threading-only lock for no-I/O contexts. + + In the sync case `ThreadLock` provides thread locking. + In the async case `AsyncThreadLock` is a no-op. + """ + + def __init__(self) -> None: + self._lock = threading.Lock() + + def __enter__(self) -> ThreadLock: + self._lock.acquire() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + self._lock.release() + + +class Event: + def __init__(self) -> None: + self._event = threading.Event() + + def set(self) -> None: + self._event.set() + + def wait(self, timeout: float | None = None) -> None: + if timeout == float("inf"): # pragma: no cover + timeout = None + if not self._event.wait(timeout=timeout): + raise PoolTimeout() # pragma: nocover + + +class Semaphore: + def __init__(self, bound: int) -> None: + self._semaphore = threading.Semaphore(value=bound) + + def acquire(self) -> None: + self._semaphore.acquire() + + def release(self) -> None: + self._semaphore.release() + + +class ShieldCancellation: + # Thread-synchronous codebases don't support cancellation semantics. + # We have this class because we need to mirror the async and sync + # cases within our package, but it's just a no-op. + def __enter__(self) -> ShieldCancellation: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + pass diff --git a/venv/lib/python3.11/site-packages/httpcore/_trace.py b/venv/lib/python3.11/site-packages/httpcore/_trace.py new file mode 100644 index 0000000..5f1cd7c --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_trace.py @@ -0,0 +1,107 @@ +from __future__ import annotations + +import inspect +import logging +import types +import typing + +from ._models import Request + + +class Trace: + def __init__( + self, + name: str, + logger: logging.Logger, + request: Request | None = None, + kwargs: dict[str, typing.Any] | None = None, + ) -> None: + self.name = name + self.logger = logger + self.trace_extension = ( + None if request is None else request.extensions.get("trace") + ) + self.debug = self.logger.isEnabledFor(logging.DEBUG) + self.kwargs = kwargs or {} + self.return_value: typing.Any = None + self.should_trace = self.debug or self.trace_extension is not None + self.prefix = self.logger.name.split(".")[-1] + + def trace(self, name: str, info: dict[str, typing.Any]) -> None: + if self.trace_extension is not None: + prefix_and_name = f"{self.prefix}.{name}" + ret = self.trace_extension(prefix_and_name, info) + if inspect.iscoroutine(ret): # pragma: no cover + raise TypeError( + "If you are using a synchronous interface, " + "the callback of the `trace` extension should " + "be a normal function instead of an asynchronous function." + ) + + if self.debug: + if not info or "return_value" in info and info["return_value"] is None: + message = name + else: + args = " ".join([f"{key}={value!r}" for key, value in info.items()]) + message = f"{name} {args}" + self.logger.debug(message) + + def __enter__(self) -> Trace: + if self.should_trace: + info = self.kwargs + self.trace(f"{self.name}.started", info) + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + if self.should_trace: + if exc_value is None: + info = {"return_value": self.return_value} + self.trace(f"{self.name}.complete", info) + else: + info = {"exception": exc_value} + self.trace(f"{self.name}.failed", info) + + async def atrace(self, name: str, info: dict[str, typing.Any]) -> None: + if self.trace_extension is not None: + prefix_and_name = f"{self.prefix}.{name}" + coro = self.trace_extension(prefix_and_name, info) + if not inspect.iscoroutine(coro): # pragma: no cover + raise TypeError( + "If you're using an asynchronous interface, " + "the callback of the `trace` extension should " + "be an asynchronous function rather than a normal function." + ) + await coro + + if self.debug: + if not info or "return_value" in info and info["return_value"] is None: + message = name + else: + args = " ".join([f"{key}={value!r}" for key, value in info.items()]) + message = f"{name} {args}" + self.logger.debug(message) + + async def __aenter__(self) -> Trace: + if self.should_trace: + info = self.kwargs + await self.atrace(f"{self.name}.started", info) + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + if self.should_trace: + if exc_value is None: + info = {"return_value": self.return_value} + await self.atrace(f"{self.name}.complete", info) + else: + info = {"exception": exc_value} + await self.atrace(f"{self.name}.failed", info) diff --git a/venv/lib/python3.11/site-packages/httpcore/_utils.py b/venv/lib/python3.11/site-packages/httpcore/_utils.py new file mode 100644 index 0000000..c44ff93 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpcore/_utils.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +import select +import socket +import sys + + +def is_socket_readable(sock: socket.socket | None) -> bool: + """ + Return whether a socket, as identifed by its file descriptor, is readable. + "A socket is readable" means that the read buffer isn't empty, i.e. that calling + .recv() on it would immediately return some data. + """ + # NOTE: we want check for readability without actually attempting to read, because + # we don't want to block forever if it's not readable. + + # In the case that the socket no longer exists, or cannot return a file + # descriptor, we treat it as being readable, as if it the next read operation + # on it is ready to return the terminating `b""`. + sock_fd = None if sock is None else sock.fileno() + if sock_fd is None or sock_fd < 0: # pragma: nocover + return True + + # The implementation below was stolen from: + # https://github.com/python-trio/trio/blob/20ee2b1b7376db637435d80e266212a35837ddcc/trio/_socket.py#L471-L478 + # See also: https://github.com/encode/httpcore/pull/193#issuecomment-703129316 + + # Use select.select on Windows, and when poll is unavailable and select.poll + # everywhere else. (E.g. When eventlet is in use. See #327) + if ( + sys.platform == "win32" or getattr(select, "poll", None) is None + ): # pragma: nocover + rready, _, _ = select.select([sock_fd], [], [], 0) + return bool(rready) + p = select.poll() + p.register(sock_fd, select.POLLIN) + return bool(p.poll(0)) diff --git a/venv/lib/python3.11/site-packages/httpcore/py.typed b/venv/lib/python3.11/site-packages/httpcore/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/METADATA b/venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/METADATA new file mode 100644 index 0000000..b0d2b19 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/METADATA @@ -0,0 +1,203 @@ +Metadata-Version: 2.3 +Name: httpx +Version: 0.28.1 +Summary: The next generation HTTP client. +Project-URL: Changelog, https://github.com/encode/httpx/blob/master/CHANGELOG.md +Project-URL: Documentation, https://www.python-httpx.org +Project-URL: Homepage, https://github.com/encode/httpx +Project-URL: Source, https://github.com/encode/httpx +Author-email: Tom Christie +License: BSD-3-Clause +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Web Environment +Classifier: Framework :: AsyncIO +Classifier: Framework :: Trio +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Internet :: WWW/HTTP +Requires-Python: >=3.8 +Requires-Dist: anyio +Requires-Dist: certifi +Requires-Dist: httpcore==1.* +Requires-Dist: idna +Provides-Extra: brotli +Requires-Dist: brotli; (platform_python_implementation == 'CPython') and extra == 'brotli' +Requires-Dist: brotlicffi; (platform_python_implementation != 'CPython') and extra == 'brotli' +Provides-Extra: cli +Requires-Dist: click==8.*; extra == 'cli' +Requires-Dist: pygments==2.*; extra == 'cli' +Requires-Dist: rich<14,>=10; extra == 'cli' +Provides-Extra: http2 +Requires-Dist: h2<5,>=3; extra == 'http2' +Provides-Extra: socks +Requires-Dist: socksio==1.*; extra == 'socks' +Provides-Extra: zstd +Requires-Dist: zstandard>=0.18.0; extra == 'zstd' +Description-Content-Type: text/markdown + +

+ HTTPX +

+ +

HTTPX - A next-generation HTTP client for Python.

+ +

+ + Test Suite + + + Package version + +

+ +HTTPX is a fully featured HTTP client library for Python 3. It includes **an integrated command line client**, has support for both **HTTP/1.1 and HTTP/2**, and provides both **sync and async APIs**. + +--- + +Install HTTPX using pip: + +```shell +$ pip install httpx +``` + +Now, let's get started: + +```pycon +>>> import httpx +>>> r = httpx.get('https://www.example.org/') +>>> r + +>>> r.status_code +200 +>>> r.headers['content-type'] +'text/html; charset=UTF-8' +>>> r.text +'\n\n\nExample Domain...' +``` + +Or, using the command-line client. + +```shell +$ pip install 'httpx[cli]' # The command line client is an optional dependency. +``` + +Which now allows us to use HTTPX directly from the command-line... + +

+ httpx --help +

+ +Sending a request... + +

+ httpx http://httpbin.org/json +

+ +## Features + +HTTPX builds on the well-established usability of `requests`, and gives you: + +* A broadly [requests-compatible API](https://www.python-httpx.org/compatibility/). +* An integrated command-line client. +* HTTP/1.1 [and HTTP/2 support](https://www.python-httpx.org/http2/). +* Standard synchronous interface, but with [async support if you need it](https://www.python-httpx.org/async/). +* Ability to make requests directly to [WSGI applications](https://www.python-httpx.org/advanced/transports/#wsgi-transport) or [ASGI applications](https://www.python-httpx.org/advanced/transports/#asgi-transport). +* Strict timeouts everywhere. +* Fully type annotated. +* 100% test coverage. + +Plus all the standard features of `requests`... + +* International Domains and URLs +* Keep-Alive & Connection Pooling +* Sessions with Cookie Persistence +* Browser-style SSL Verification +* Basic/Digest Authentication +* Elegant Key/Value Cookies +* Automatic Decompression +* Automatic Content Decoding +* Unicode Response Bodies +* Multipart File Uploads +* HTTP(S) Proxy Support +* Connection Timeouts +* Streaming Downloads +* .netrc Support +* Chunked Requests + +## Installation + +Install with pip: + +```shell +$ pip install httpx +``` + +Or, to include the optional HTTP/2 support, use: + +```shell +$ pip install httpx[http2] +``` + +HTTPX requires Python 3.8+. + +## Documentation + +Project documentation is available at [https://www.python-httpx.org/](https://www.python-httpx.org/). + +For a run-through of all the basics, head over to the [QuickStart](https://www.python-httpx.org/quickstart/). + +For more advanced topics, see the [Advanced Usage](https://www.python-httpx.org/advanced/) section, the [async support](https://www.python-httpx.org/async/) section, or the [HTTP/2](https://www.python-httpx.org/http2/) section. + +The [Developer Interface](https://www.python-httpx.org/api/) provides a comprehensive API reference. + +To find out about tools that integrate with HTTPX, see [Third Party Packages](https://www.python-httpx.org/third_party_packages/). + +## Contribute + +If you want to contribute with HTTPX check out the [Contributing Guide](https://www.python-httpx.org/contributing/) to learn how to start. + +## Dependencies + +The HTTPX project relies on these excellent libraries: + +* `httpcore` - The underlying transport implementation for `httpx`. + * `h11` - HTTP/1.1 support. +* `certifi` - SSL certificates. +* `idna` - Internationalized domain name support. +* `sniffio` - Async library autodetection. + +As well as these optional installs: + +* `h2` - HTTP/2 support. *(Optional, with `httpx[http2]`)* +* `socksio` - SOCKS proxy support. *(Optional, with `httpx[socks]`)* +* `rich` - Rich terminal support. *(Optional, with `httpx[cli]`)* +* `click` - Command line client support. *(Optional, with `httpx[cli]`)* +* `brotli` or `brotlicffi` - Decoding for "brotli" compressed responses. *(Optional, with `httpx[brotli]`)* +* `zstandard` - Decoding for "zstd" compressed responses. *(Optional, with `httpx[zstd]`)* + +A huge amount of credit is due to `requests` for the API layout that +much of this work follows, as well as to `urllib3` for plenty of design +inspiration around the lower-level networking details. + +--- + +

HTTPX is BSD licensed code.
Designed & crafted with care.

— 🦋 —

+ +## Release Information + +### Fixed + +* Reintroduced supposedly-private `URLTypes` shortcut. (#2673) + + +--- + +[Full changelog](https://github.com/encode/httpx/blob/master/CHANGELOG.md) diff --git a/venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/RECORD b/venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/RECORD new file mode 100644 index 0000000..b43810f --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/RECORD @@ -0,0 +1,54 @@ +../../../bin/httpx,sha256=csIxQAvwxJq_FcPaz4sC99T4bVkf2zYNfDFrYqJ340s,230 +httpx-0.28.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +httpx-0.28.1.dist-info/METADATA,sha256=_rubD48-gNV8gZnDBPNcQzboWB0dGNeYPJJ2a4J5OyU,7052 +httpx-0.28.1.dist-info/RECORD,, +httpx-0.28.1.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87 +httpx-0.28.1.dist-info/entry_points.txt,sha256=2lVkdQmxLA1pNMgSN2eV89o90HCZezhmNwsy6ryKDSA,37 +httpx-0.28.1.dist-info/licenses/LICENSE.md,sha256=TsWdVE8StfU5o6cW_TIaxYzNgDC0ZSIfLIgCAM3yjY0,1508 +httpx/__init__.py,sha256=CsaZe6yZj0rHg6322AWKWHGTMVr9txgEfD5P3_Rrz60,2171 +httpx/__pycache__/__init__.cpython-311.pyc,, +httpx/__pycache__/__version__.cpython-311.pyc,, +httpx/__pycache__/_api.cpython-311.pyc,, +httpx/__pycache__/_auth.cpython-311.pyc,, +httpx/__pycache__/_client.cpython-311.pyc,, +httpx/__pycache__/_config.cpython-311.pyc,, +httpx/__pycache__/_content.cpython-311.pyc,, +httpx/__pycache__/_decoders.cpython-311.pyc,, +httpx/__pycache__/_exceptions.cpython-311.pyc,, +httpx/__pycache__/_main.cpython-311.pyc,, +httpx/__pycache__/_models.cpython-311.pyc,, +httpx/__pycache__/_multipart.cpython-311.pyc,, +httpx/__pycache__/_status_codes.cpython-311.pyc,, +httpx/__pycache__/_types.cpython-311.pyc,, +httpx/__pycache__/_urlparse.cpython-311.pyc,, +httpx/__pycache__/_urls.cpython-311.pyc,, +httpx/__pycache__/_utils.cpython-311.pyc,, +httpx/__version__.py,sha256=LoUyYeOXTieGzuP_64UL0wxdtxjuu_QbOvE7NOg-IqU,108 +httpx/_api.py,sha256=r_Zgs4jIpcPJLqK5dbbSayqo_iVMKFaxZCd-oOHxLEs,11743 +httpx/_auth.py,sha256=Yr3QwaUSK17rGYx-7j-FdicFIzz4Y9FFV-1F4-7RXX4,11891 +httpx/_client.py,sha256=xD-UG67-WMkeltAAOeGGj-cZ2RRTAm19sWRxlFY7_40,65714 +httpx/_config.py,sha256=pPp2U-wicfcKsF-KYRE1LYdt3e6ERGeIoXZ8Gjo3LWc,8547 +httpx/_content.py,sha256=LGGzrJTR3OvN4Mb1GVVNLXkXJH-6oKlwAttO9p5w_yg,8161 +httpx/_decoders.py,sha256=p0dX8I0NEHexs3UGp4SsZutiMhsXrrWl6-GnqVb0iKM,12041 +httpx/_exceptions.py,sha256=bxW7fxzgVMAdNTbwT0Vnq04gJDW1_gI_GFiQPuMyjL0,8527 +httpx/_main.py,sha256=Cg9GMabiTT_swaDfUgIRitSwxLRMSwUDOm7LdSGqlA4,15626 +httpx/_models.py,sha256=4__Guyv1gLxuZChwim8kfQNiIOcJ9acreFOSurvZfms,44700 +httpx/_multipart.py,sha256=KOHEZZl6oohg9mPaKyyu345qq1rJLg35TUG3YAzXB3Y,9843 +httpx/_status_codes.py,sha256=DYn-2ufBgMeXy5s8x3_TB7wjAuAAMewTakPrm5rXEsc,5639 +httpx/_transports/__init__.py,sha256=GbUoBSAOp7z-l-9j5YhMhR3DMIcn6FVLhj072O3Nnno,275 +httpx/_transports/__pycache__/__init__.cpython-311.pyc,, +httpx/_transports/__pycache__/asgi.cpython-311.pyc,, +httpx/_transports/__pycache__/base.cpython-311.pyc,, +httpx/_transports/__pycache__/default.cpython-311.pyc,, +httpx/_transports/__pycache__/mock.cpython-311.pyc,, +httpx/_transports/__pycache__/wsgi.cpython-311.pyc,, +httpx/_transports/asgi.py,sha256=HRfiDYMPt4wQH2gFgHZg4c-i3sblo6bL5GTqcET-xz8,5501 +httpx/_transports/base.py,sha256=kZS_VMbViYfF570pogUCJ1bulz-ybfL51Pqs9yktebU,2523 +httpx/_transports/default.py,sha256=AzeaRUyVwCccTyyNJexDf0n1dFfzzydpdIQgvw7PLnk,13983 +httpx/_transports/mock.py,sha256=PTo0d567RITXxGrki6kN7_67wwAxfwiMDcuXJiZCjEo,1232 +httpx/_transports/wsgi.py,sha256=NcPX3Xap_EwCFZWO_OaSyQNuInCYx1QMNbO8GAei6jY,4825 +httpx/_types.py,sha256=Jyh41GQq7AOev8IOWKDAg7zCbvHAfufmW5g_PiTtErY,2965 +httpx/_urlparse.py,sha256=ZAmH47ONfkxrrj-PPYhGeiHjb6AjKCS-ANWIN4OL_KY,18546 +httpx/_urls.py,sha256=dX99VR1DSOHpgo9Aq7PzYO4FKdxqKjwyNp8grf8dHN0,21550 +httpx/_utils.py,sha256=_TVeqAKvxJkKHdz7dFeb4s0LZqQXgeFkXSgfiHBK_1o,8285 +httpx/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/WHEEL b/venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/WHEEL new file mode 100644 index 0000000..21aaa72 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.26.3 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/entry_points.txt b/venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/entry_points.txt new file mode 100644 index 0000000..8ae9600 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +httpx = httpx:main diff --git a/venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/licenses/LICENSE.md b/venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/licenses/LICENSE.md new file mode 100644 index 0000000..ab79d16 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/licenses/LICENSE.md @@ -0,0 +1,12 @@ +Copyright © 2019, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.11/site-packages/httpx/__init__.py b/venv/lib/python3.11/site-packages/httpx/__init__.py new file mode 100644 index 0000000..e9addde --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx/__init__.py @@ -0,0 +1,105 @@ +from .__version__ import __description__, __title__, __version__ +from ._api import * +from ._auth import * +from ._client import * +from ._config import * +from ._content import * +from ._exceptions import * +from ._models import * +from ._status_codes import * +from ._transports import * +from ._types import * +from ._urls import * + +try: + from ._main import main +except ImportError: # pragma: no cover + + def main() -> None: # type: ignore + import sys + + print( + "The httpx command line client could not run because the required " + "dependencies were not installed.\nMake sure you've installed " + "everything with: pip install 'httpx[cli]'" + ) + sys.exit(1) + + +__all__ = [ + "__description__", + "__title__", + "__version__", + "ASGITransport", + "AsyncBaseTransport", + "AsyncByteStream", + "AsyncClient", + "AsyncHTTPTransport", + "Auth", + "BaseTransport", + "BasicAuth", + "ByteStream", + "Client", + "CloseError", + "codes", + "ConnectError", + "ConnectTimeout", + "CookieConflict", + "Cookies", + "create_ssl_context", + "DecodingError", + "delete", + "DigestAuth", + "get", + "head", + "Headers", + "HTTPError", + "HTTPStatusError", + "HTTPTransport", + "InvalidURL", + "Limits", + "LocalProtocolError", + "main", + "MockTransport", + "NetRCAuth", + "NetworkError", + "options", + "patch", + "PoolTimeout", + "post", + "ProtocolError", + "Proxy", + "ProxyError", + "put", + "QueryParams", + "ReadError", + "ReadTimeout", + "RemoteProtocolError", + "request", + "Request", + "RequestError", + "RequestNotRead", + "Response", + "ResponseNotRead", + "stream", + "StreamClosed", + "StreamConsumed", + "StreamError", + "SyncByteStream", + "Timeout", + "TimeoutException", + "TooManyRedirects", + "TransportError", + "UnsupportedProtocol", + "URL", + "USE_CLIENT_DEFAULT", + "WriteError", + "WriteTimeout", + "WSGITransport", +] + + +__locals = locals() +for __name in __all__: + if not __name.startswith("__"): + setattr(__locals[__name], "__module__", "httpx") # noqa diff --git a/venv/lib/python3.11/site-packages/httpx/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpx/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..cf17629 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpx/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpx/__pycache__/__version__.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpx/__pycache__/__version__.cpython-311.pyc new file mode 100644 index 0000000..08533a3 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpx/__pycache__/__version__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpx/__pycache__/_api.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpx/__pycache__/_api.cpython-311.pyc new file mode 100644 index 0000000..a70ec09 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpx/__pycache__/_api.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpx/__pycache__/_auth.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpx/__pycache__/_auth.cpython-311.pyc new file mode 100644 index 0000000..f9f493c Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpx/__pycache__/_auth.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpx/__pycache__/_client.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpx/__pycache__/_client.cpython-311.pyc new file mode 100644 index 0000000..82bb506 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpx/__pycache__/_client.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpx/__pycache__/_config.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpx/__pycache__/_config.cpython-311.pyc new file mode 100644 index 0000000..2a9c615 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpx/__pycache__/_config.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpx/__pycache__/_content.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpx/__pycache__/_content.cpython-311.pyc new file mode 100644 index 0000000..69b2824 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpx/__pycache__/_content.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpx/__pycache__/_decoders.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpx/__pycache__/_decoders.cpython-311.pyc new file mode 100644 index 0000000..5c120ad Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpx/__pycache__/_decoders.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpx/__pycache__/_exceptions.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpx/__pycache__/_exceptions.cpython-311.pyc new file mode 100644 index 0000000..7a3a91a Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpx/__pycache__/_exceptions.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpx/__pycache__/_main.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpx/__pycache__/_main.cpython-311.pyc new file mode 100644 index 0000000..732631c Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpx/__pycache__/_main.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpx/__pycache__/_models.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpx/__pycache__/_models.cpython-311.pyc new file mode 100644 index 0000000..ca8ef52 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpx/__pycache__/_models.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpx/__pycache__/_multipart.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpx/__pycache__/_multipart.cpython-311.pyc new file mode 100644 index 0000000..9756718 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpx/__pycache__/_multipart.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpx/__pycache__/_status_codes.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpx/__pycache__/_status_codes.cpython-311.pyc new file mode 100644 index 0000000..87855ce Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpx/__pycache__/_status_codes.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpx/__pycache__/_types.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpx/__pycache__/_types.cpython-311.pyc new file mode 100644 index 0000000..8f6c900 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpx/__pycache__/_types.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpx/__pycache__/_urlparse.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpx/__pycache__/_urlparse.cpython-311.pyc new file mode 100644 index 0000000..f9b62d6 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpx/__pycache__/_urlparse.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpx/__pycache__/_urls.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpx/__pycache__/_urls.cpython-311.pyc new file mode 100644 index 0000000..600f534 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpx/__pycache__/_urls.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpx/__pycache__/_utils.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpx/__pycache__/_utils.cpython-311.pyc new file mode 100644 index 0000000..c63c934 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpx/__pycache__/_utils.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpx/__version__.py b/venv/lib/python3.11/site-packages/httpx/__version__.py new file mode 100644 index 0000000..801bfac --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx/__version__.py @@ -0,0 +1,3 @@ +__title__ = "httpx" +__description__ = "A next generation HTTP client, for Python 3." +__version__ = "0.28.1" diff --git a/venv/lib/python3.11/site-packages/httpx/_api.py b/venv/lib/python3.11/site-packages/httpx/_api.py new file mode 100644 index 0000000..c3cda1e --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx/_api.py @@ -0,0 +1,438 @@ +from __future__ import annotations + +import typing +from contextlib import contextmanager + +from ._client import Client +from ._config import DEFAULT_TIMEOUT_CONFIG +from ._models import Response +from ._types import ( + AuthTypes, + CookieTypes, + HeaderTypes, + ProxyTypes, + QueryParamTypes, + RequestContent, + RequestData, + RequestFiles, + TimeoutTypes, +) +from ._urls import URL + +if typing.TYPE_CHECKING: + import ssl # pragma: no cover + + +__all__ = [ + "delete", + "get", + "head", + "options", + "patch", + "post", + "put", + "request", + "stream", +] + + +def request( + method: str, + url: URL | str, + *, + params: QueryParamTypes | None = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + verify: ssl.SSLContext | str | bool = True, + trust_env: bool = True, +) -> Response: + """ + Sends an HTTP request. + + **Parameters:** + + * **method** - HTTP method for the new `Request` object: `GET`, `OPTIONS`, + `HEAD`, `POST`, `PUT`, `PATCH`, or `DELETE`. + * **url** - URL for the new `Request` object. + * **params** - *(optional)* Query parameters to include in the URL, as a + string, dictionary, or sequence of two-tuples. + * **content** - *(optional)* Binary content to include in the body of the + request, as bytes or a byte iterator. + * **data** - *(optional)* Form data to include in the body of the request, + as a dictionary. + * **files** - *(optional)* A dictionary of upload files to include in the + body of the request. + * **json** - *(optional)* A JSON serializable object to include in the body + of the request. + * **headers** - *(optional)* Dictionary of HTTP headers to include in the + request. + * **cookies** - *(optional)* Dictionary of Cookie items to include in the + request. + * **auth** - *(optional)* An authentication class to use when sending the + request. + * **proxy** - *(optional)* A proxy URL where all the traffic should be routed. + * **timeout** - *(optional)* The timeout configuration to use when sending + the request. + * **follow_redirects** - *(optional)* Enables or disables HTTP redirects. + * **verify** - *(optional)* Either `True` to use an SSL context with the + default CA bundle, `False` to disable verification, or an instance of + `ssl.SSLContext` to use a custom context. + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + + **Returns:** `Response` + + Usage: + + ``` + >>> import httpx + >>> response = httpx.request('GET', 'https://httpbin.org/get') + >>> response + + ``` + """ + with Client( + cookies=cookies, + proxy=proxy, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) as client: + return client.request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + auth=auth, + follow_redirects=follow_redirects, + ) + + +@contextmanager +def stream( + method: str, + url: URL | str, + *, + params: QueryParamTypes | None = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + verify: ssl.SSLContext | str | bool = True, + trust_env: bool = True, +) -> typing.Iterator[Response]: + """ + Alternative to `httpx.request()` that streams the response body + instead of loading it into memory at once. + + **Parameters**: See `httpx.request`. + + See also: [Streaming Responses][0] + + [0]: /quickstart#streaming-responses + """ + with Client( + cookies=cookies, + proxy=proxy, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) as client: + with client.stream( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + auth=auth, + follow_redirects=follow_redirects, + ) as response: + yield response + + +def get( + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + follow_redirects: bool = False, + verify: ssl.SSLContext | str | bool = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `GET` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `GET` requests should not include a request body. + """ + return request( + "GET", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + follow_redirects=follow_redirects, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def options( + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + follow_redirects: bool = False, + verify: ssl.SSLContext | str | bool = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends an `OPTIONS` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `OPTIONS` requests should not include a request body. + """ + return request( + "OPTIONS", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + follow_redirects=follow_redirects, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def head( + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + follow_redirects: bool = False, + verify: ssl.SSLContext | str | bool = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `HEAD` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `HEAD` requests should not include a request body. + """ + return request( + "HEAD", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + follow_redirects=follow_redirects, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def post( + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + follow_redirects: bool = False, + verify: ssl.SSLContext | str | bool = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `POST` request. + + **Parameters**: See `httpx.request`. + """ + return request( + "POST", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + follow_redirects=follow_redirects, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def put( + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + follow_redirects: bool = False, + verify: ssl.SSLContext | str | bool = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `PUT` request. + + **Parameters**: See `httpx.request`. + """ + return request( + "PUT", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + follow_redirects=follow_redirects, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def patch( + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + follow_redirects: bool = False, + verify: ssl.SSLContext | str | bool = True, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + trust_env: bool = True, +) -> Response: + """ + Sends a `PATCH` request. + + **Parameters**: See `httpx.request`. + """ + return request( + "PATCH", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + follow_redirects=follow_redirects, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) + + +def delete( + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | None = None, + proxy: ProxyTypes | None = None, + follow_redirects: bool = False, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + verify: ssl.SSLContext | str | bool = True, + trust_env: bool = True, +) -> Response: + """ + Sends a `DELETE` request. + + **Parameters**: See `httpx.request`. + + Note that the `data`, `files`, `json` and `content` parameters are not available + on this function, as `DELETE` requests should not include a request body. + """ + return request( + "DELETE", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + proxy=proxy, + follow_redirects=follow_redirects, + verify=verify, + timeout=timeout, + trust_env=trust_env, + ) diff --git a/venv/lib/python3.11/site-packages/httpx/_auth.py b/venv/lib/python3.11/site-packages/httpx/_auth.py new file mode 100644 index 0000000..b03971a --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx/_auth.py @@ -0,0 +1,348 @@ +from __future__ import annotations + +import hashlib +import os +import re +import time +import typing +from base64 import b64encode +from urllib.request import parse_http_list + +from ._exceptions import ProtocolError +from ._models import Cookies, Request, Response +from ._utils import to_bytes, to_str, unquote + +if typing.TYPE_CHECKING: # pragma: no cover + from hashlib import _Hash + + +__all__ = ["Auth", "BasicAuth", "DigestAuth", "NetRCAuth"] + + +class Auth: + """ + Base class for all authentication schemes. + + To implement a custom authentication scheme, subclass `Auth` and override + the `.auth_flow()` method. + + If the authentication scheme does I/O such as disk access or network calls, or uses + synchronization primitives such as locks, you should override `.sync_auth_flow()` + and/or `.async_auth_flow()` instead of `.auth_flow()` to provide specialized + implementations that will be used by `Client` and `AsyncClient` respectively. + """ + + requires_request_body = False + requires_response_body = False + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + """ + Execute the authentication flow. + + To dispatch a request, `yield` it: + + ``` + yield request + ``` + + The client will `.send()` the response back into the flow generator. You can + access it like so: + + ``` + response = yield request + ``` + + A `return` (or reaching the end of the generator) will result in the + client returning the last response obtained from the server. + + You can dispatch as many requests as is necessary. + """ + yield request + + def sync_auth_flow( + self, request: Request + ) -> typing.Generator[Request, Response, None]: + """ + Execute the authentication flow synchronously. + + By default, this defers to `.auth_flow()`. You should override this method + when the authentication scheme does I/O and/or uses concurrency primitives. + """ + if self.requires_request_body: + request.read() + + flow = self.auth_flow(request) + request = next(flow) + + while True: + response = yield request + if self.requires_response_body: + response.read() + + try: + request = flow.send(response) + except StopIteration: + break + + async def async_auth_flow( + self, request: Request + ) -> typing.AsyncGenerator[Request, Response]: + """ + Execute the authentication flow asynchronously. + + By default, this defers to `.auth_flow()`. You should override this method + when the authentication scheme does I/O and/or uses concurrency primitives. + """ + if self.requires_request_body: + await request.aread() + + flow = self.auth_flow(request) + request = next(flow) + + while True: + response = yield request + if self.requires_response_body: + await response.aread() + + try: + request = flow.send(response) + except StopIteration: + break + + +class FunctionAuth(Auth): + """ + Allows the 'auth' argument to be passed as a simple callable function, + that takes the request, and returns a new, modified request. + """ + + def __init__(self, func: typing.Callable[[Request], Request]) -> None: + self._func = func + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + yield self._func(request) + + +class BasicAuth(Auth): + """ + Allows the 'auth' argument to be passed as a (username, password) pair, + and uses HTTP Basic authentication. + """ + + def __init__(self, username: str | bytes, password: str | bytes) -> None: + self._auth_header = self._build_auth_header(username, password) + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + request.headers["Authorization"] = self._auth_header + yield request + + def _build_auth_header(self, username: str | bytes, password: str | bytes) -> str: + userpass = b":".join((to_bytes(username), to_bytes(password))) + token = b64encode(userpass).decode() + return f"Basic {token}" + + +class NetRCAuth(Auth): + """ + Use a 'netrc' file to lookup basic auth credentials based on the url host. + """ + + def __init__(self, file: str | None = None) -> None: + # Lazily import 'netrc'. + # There's no need for us to load this module unless 'NetRCAuth' is being used. + import netrc + + self._netrc_info = netrc.netrc(file) + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + auth_info = self._netrc_info.authenticators(request.url.host) + if auth_info is None or not auth_info[2]: + # The netrc file did not have authentication credentials for this host. + yield request + else: + # Build a basic auth header with credentials from the netrc file. + request.headers["Authorization"] = self._build_auth_header( + username=auth_info[0], password=auth_info[2] + ) + yield request + + def _build_auth_header(self, username: str | bytes, password: str | bytes) -> str: + userpass = b":".join((to_bytes(username), to_bytes(password))) + token = b64encode(userpass).decode() + return f"Basic {token}" + + +class DigestAuth(Auth): + _ALGORITHM_TO_HASH_FUNCTION: dict[str, typing.Callable[[bytes], _Hash]] = { + "MD5": hashlib.md5, + "MD5-SESS": hashlib.md5, + "SHA": hashlib.sha1, + "SHA-SESS": hashlib.sha1, + "SHA-256": hashlib.sha256, + "SHA-256-SESS": hashlib.sha256, + "SHA-512": hashlib.sha512, + "SHA-512-SESS": hashlib.sha512, + } + + def __init__(self, username: str | bytes, password: str | bytes) -> None: + self._username = to_bytes(username) + self._password = to_bytes(password) + self._last_challenge: _DigestAuthChallenge | None = None + self._nonce_count = 1 + + def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: + if self._last_challenge: + request.headers["Authorization"] = self._build_auth_header( + request, self._last_challenge + ) + + response = yield request + + if response.status_code != 401 or "www-authenticate" not in response.headers: + # If the response is not a 401 then we don't + # need to build an authenticated request. + return + + for auth_header in response.headers.get_list("www-authenticate"): + if auth_header.lower().startswith("digest "): + break + else: + # If the response does not include a 'WWW-Authenticate: Digest ...' + # header, then we don't need to build an authenticated request. + return + + self._last_challenge = self._parse_challenge(request, response, auth_header) + self._nonce_count = 1 + + request.headers["Authorization"] = self._build_auth_header( + request, self._last_challenge + ) + if response.cookies: + Cookies(response.cookies).set_cookie_header(request=request) + yield request + + def _parse_challenge( + self, request: Request, response: Response, auth_header: str + ) -> _DigestAuthChallenge: + """ + Returns a challenge from a Digest WWW-Authenticate header. + These take the form of: + `Digest realm="realm@host.com",qop="auth,auth-int",nonce="abc",opaque="xyz"` + """ + scheme, _, fields = auth_header.partition(" ") + + # This method should only ever have been called with a Digest auth header. + assert scheme.lower() == "digest" + + header_dict: dict[str, str] = {} + for field in parse_http_list(fields): + key, value = field.strip().split("=", 1) + header_dict[key] = unquote(value) + + try: + realm = header_dict["realm"].encode() + nonce = header_dict["nonce"].encode() + algorithm = header_dict.get("algorithm", "MD5") + opaque = header_dict["opaque"].encode() if "opaque" in header_dict else None + qop = header_dict["qop"].encode() if "qop" in header_dict else None + return _DigestAuthChallenge( + realm=realm, nonce=nonce, algorithm=algorithm, opaque=opaque, qop=qop + ) + except KeyError as exc: + message = "Malformed Digest WWW-Authenticate header" + raise ProtocolError(message, request=request) from exc + + def _build_auth_header( + self, request: Request, challenge: _DigestAuthChallenge + ) -> str: + hash_func = self._ALGORITHM_TO_HASH_FUNCTION[challenge.algorithm.upper()] + + def digest(data: bytes) -> bytes: + return hash_func(data).hexdigest().encode() + + A1 = b":".join((self._username, challenge.realm, self._password)) + + path = request.url.raw_path + A2 = b":".join((request.method.encode(), path)) + # TODO: implement auth-int + HA2 = digest(A2) + + nc_value = b"%08x" % self._nonce_count + cnonce = self._get_client_nonce(self._nonce_count, challenge.nonce) + self._nonce_count += 1 + + HA1 = digest(A1) + if challenge.algorithm.lower().endswith("-sess"): + HA1 = digest(b":".join((HA1, challenge.nonce, cnonce))) + + qop = self._resolve_qop(challenge.qop, request=request) + if qop is None: + # Following RFC 2069 + digest_data = [HA1, challenge.nonce, HA2] + else: + # Following RFC 2617/7616 + digest_data = [HA1, challenge.nonce, nc_value, cnonce, qop, HA2] + + format_args = { + "username": self._username, + "realm": challenge.realm, + "nonce": challenge.nonce, + "uri": path, + "response": digest(b":".join(digest_data)), + "algorithm": challenge.algorithm.encode(), + } + if challenge.opaque: + format_args["opaque"] = challenge.opaque + if qop: + format_args["qop"] = b"auth" + format_args["nc"] = nc_value + format_args["cnonce"] = cnonce + + return "Digest " + self._get_header_value(format_args) + + def _get_client_nonce(self, nonce_count: int, nonce: bytes) -> bytes: + s = str(nonce_count).encode() + s += nonce + s += time.ctime().encode() + s += os.urandom(8) + + return hashlib.sha1(s).hexdigest()[:16].encode() + + def _get_header_value(self, header_fields: dict[str, bytes]) -> str: + NON_QUOTED_FIELDS = ("algorithm", "qop", "nc") + QUOTED_TEMPLATE = '{}="{}"' + NON_QUOTED_TEMPLATE = "{}={}" + + header_value = "" + for i, (field, value) in enumerate(header_fields.items()): + if i > 0: + header_value += ", " + template = ( + QUOTED_TEMPLATE + if field not in NON_QUOTED_FIELDS + else NON_QUOTED_TEMPLATE + ) + header_value += template.format(field, to_str(value)) + + return header_value + + def _resolve_qop(self, qop: bytes | None, request: Request) -> bytes | None: + if qop is None: + return None + qops = re.split(b", ?", qop) + if b"auth" in qops: + return b"auth" + + if qops == [b"auth-int"]: + raise NotImplementedError("Digest auth-int support is not yet implemented") + + message = f'Unexpected qop value "{qop!r}" in digest auth' + raise ProtocolError(message, request=request) + + +class _DigestAuthChallenge(typing.NamedTuple): + realm: bytes + nonce: bytes + algorithm: str + opaque: bytes | None + qop: bytes | None diff --git a/venv/lib/python3.11/site-packages/httpx/_client.py b/venv/lib/python3.11/site-packages/httpx/_client.py new file mode 100644 index 0000000..2249231 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx/_client.py @@ -0,0 +1,2019 @@ +from __future__ import annotations + +import datetime +import enum +import logging +import time +import typing +import warnings +from contextlib import asynccontextmanager, contextmanager +from types import TracebackType + +from .__version__ import __version__ +from ._auth import Auth, BasicAuth, FunctionAuth +from ._config import ( + DEFAULT_LIMITS, + DEFAULT_MAX_REDIRECTS, + DEFAULT_TIMEOUT_CONFIG, + Limits, + Proxy, + Timeout, +) +from ._decoders import SUPPORTED_DECODERS +from ._exceptions import ( + InvalidURL, + RemoteProtocolError, + TooManyRedirects, + request_context, +) +from ._models import Cookies, Headers, Request, Response +from ._status_codes import codes +from ._transports.base import AsyncBaseTransport, BaseTransport +from ._transports.default import AsyncHTTPTransport, HTTPTransport +from ._types import ( + AsyncByteStream, + AuthTypes, + CertTypes, + CookieTypes, + HeaderTypes, + ProxyTypes, + QueryParamTypes, + RequestContent, + RequestData, + RequestExtensions, + RequestFiles, + SyncByteStream, + TimeoutTypes, +) +from ._urls import URL, QueryParams +from ._utils import URLPattern, get_environment_proxies + +if typing.TYPE_CHECKING: + import ssl # pragma: no cover + +__all__ = ["USE_CLIENT_DEFAULT", "AsyncClient", "Client"] + +# The type annotation for @classmethod and context managers here follows PEP 484 +# https://www.python.org/dev/peps/pep-0484/#annotating-instance-and-class-methods +T = typing.TypeVar("T", bound="Client") +U = typing.TypeVar("U", bound="AsyncClient") + + +def _is_https_redirect(url: URL, location: URL) -> bool: + """ + Return 'True' if 'location' is a HTTPS upgrade of 'url' + """ + if url.host != location.host: + return False + + return ( + url.scheme == "http" + and _port_or_default(url) == 80 + and location.scheme == "https" + and _port_or_default(location) == 443 + ) + + +def _port_or_default(url: URL) -> int | None: + if url.port is not None: + return url.port + return {"http": 80, "https": 443}.get(url.scheme) + + +def _same_origin(url: URL, other: URL) -> bool: + """ + Return 'True' if the given URLs share the same origin. + """ + return ( + url.scheme == other.scheme + and url.host == other.host + and _port_or_default(url) == _port_or_default(other) + ) + + +class UseClientDefault: + """ + For some parameters such as `auth=...` and `timeout=...` we need to be able + to indicate the default "unset" state, in a way that is distinctly different + to using `None`. + + The default "unset" state indicates that whatever default is set on the + client should be used. This is different to setting `None`, which + explicitly disables the parameter, possibly overriding a client default. + + For example we use `timeout=USE_CLIENT_DEFAULT` in the `request()` signature. + Omitting the `timeout` parameter will send a request using whatever default + timeout has been configured on the client. Including `timeout=None` will + ensure no timeout is used. + + Note that user code shouldn't need to use the `USE_CLIENT_DEFAULT` constant, + but it is used internally when a parameter is not included. + """ + + +USE_CLIENT_DEFAULT = UseClientDefault() + + +logger = logging.getLogger("httpx") + +USER_AGENT = f"python-httpx/{__version__}" +ACCEPT_ENCODING = ", ".join( + [key for key in SUPPORTED_DECODERS.keys() if key != "identity"] +) + + +class ClientState(enum.Enum): + # UNOPENED: + # The client has been instantiated, but has not been used to send a request, + # or been opened by entering the context of a `with` block. + UNOPENED = 1 + # OPENED: + # The client has either sent a request, or is within a `with` block. + OPENED = 2 + # CLOSED: + # The client has either exited the `with` block, or `close()` has + # been called explicitly. + CLOSED = 3 + + +class BoundSyncStream(SyncByteStream): + """ + A byte stream that is bound to a given response instance, and that + ensures the `response.elapsed` is set once the response is closed. + """ + + def __init__( + self, stream: SyncByteStream, response: Response, start: float + ) -> None: + self._stream = stream + self._response = response + self._start = start + + def __iter__(self) -> typing.Iterator[bytes]: + for chunk in self._stream: + yield chunk + + def close(self) -> None: + elapsed = time.perf_counter() - self._start + self._response.elapsed = datetime.timedelta(seconds=elapsed) + self._stream.close() + + +class BoundAsyncStream(AsyncByteStream): + """ + An async byte stream that is bound to a given response instance, and that + ensures the `response.elapsed` is set once the response is closed. + """ + + def __init__( + self, stream: AsyncByteStream, response: Response, start: float + ) -> None: + self._stream = stream + self._response = response + self._start = start + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + async for chunk in self._stream: + yield chunk + + async def aclose(self) -> None: + elapsed = time.perf_counter() - self._start + self._response.elapsed = datetime.timedelta(seconds=elapsed) + await self._stream.aclose() + + +EventHook = typing.Callable[..., typing.Any] + + +class BaseClient: + def __init__( + self, + *, + auth: AuthTypes | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None, + base_url: URL | str = "", + trust_env: bool = True, + default_encoding: str | typing.Callable[[bytes], str] = "utf-8", + ) -> None: + event_hooks = {} if event_hooks is None else event_hooks + + self._base_url = self._enforce_trailing_slash(URL(base_url)) + + self._auth = self._build_auth(auth) + self._params = QueryParams(params) + self.headers = Headers(headers) + self._cookies = Cookies(cookies) + self._timeout = Timeout(timeout) + self.follow_redirects = follow_redirects + self.max_redirects = max_redirects + self._event_hooks = { + "request": list(event_hooks.get("request", [])), + "response": list(event_hooks.get("response", [])), + } + self._trust_env = trust_env + self._default_encoding = default_encoding + self._state = ClientState.UNOPENED + + @property + def is_closed(self) -> bool: + """ + Check if the client being closed + """ + return self._state == ClientState.CLOSED + + @property + def trust_env(self) -> bool: + return self._trust_env + + def _enforce_trailing_slash(self, url: URL) -> URL: + if url.raw_path.endswith(b"/"): + return url + return url.copy_with(raw_path=url.raw_path + b"/") + + def _get_proxy_map( + self, proxy: ProxyTypes | None, allow_env_proxies: bool + ) -> dict[str, Proxy | None]: + if proxy is None: + if allow_env_proxies: + return { + key: None if url is None else Proxy(url=url) + for key, url in get_environment_proxies().items() + } + return {} + else: + proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy + return {"all://": proxy} + + @property + def timeout(self) -> Timeout: + return self._timeout + + @timeout.setter + def timeout(self, timeout: TimeoutTypes) -> None: + self._timeout = Timeout(timeout) + + @property + def event_hooks(self) -> dict[str, list[EventHook]]: + return self._event_hooks + + @event_hooks.setter + def event_hooks(self, event_hooks: dict[str, list[EventHook]]) -> None: + self._event_hooks = { + "request": list(event_hooks.get("request", [])), + "response": list(event_hooks.get("response", [])), + } + + @property + def auth(self) -> Auth | None: + """ + Authentication class used when none is passed at the request-level. + + See also [Authentication][0]. + + [0]: /quickstart/#authentication + """ + return self._auth + + @auth.setter + def auth(self, auth: AuthTypes) -> None: + self._auth = self._build_auth(auth) + + @property + def base_url(self) -> URL: + """ + Base URL to use when sending requests with relative URLs. + """ + return self._base_url + + @base_url.setter + def base_url(self, url: URL | str) -> None: + self._base_url = self._enforce_trailing_slash(URL(url)) + + @property + def headers(self) -> Headers: + """ + HTTP headers to include when sending requests. + """ + return self._headers + + @headers.setter + def headers(self, headers: HeaderTypes) -> None: + client_headers = Headers( + { + b"Accept": b"*/*", + b"Accept-Encoding": ACCEPT_ENCODING.encode("ascii"), + b"Connection": b"keep-alive", + b"User-Agent": USER_AGENT.encode("ascii"), + } + ) + client_headers.update(headers) + self._headers = client_headers + + @property + def cookies(self) -> Cookies: + """ + Cookie values to include when sending requests. + """ + return self._cookies + + @cookies.setter + def cookies(self, cookies: CookieTypes) -> None: + self._cookies = Cookies(cookies) + + @property + def params(self) -> QueryParams: + """ + Query parameters to include in the URL when sending requests. + """ + return self._params + + @params.setter + def params(self, params: QueryParamTypes) -> None: + self._params = QueryParams(params) + + def build_request( + self, + method: str, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Request: + """ + Build and return a request instance. + + * The `params`, `headers` and `cookies` arguments + are merged with any values set on the client. + * The `url` argument is merged with any `base_url` set on the client. + + See also: [Request instances][0] + + [0]: /advanced/clients/#request-instances + """ + url = self._merge_url(url) + headers = self._merge_headers(headers) + cookies = self._merge_cookies(cookies) + params = self._merge_queryparams(params) + extensions = {} if extensions is None else extensions + if "timeout" not in extensions: + timeout = ( + self.timeout + if isinstance(timeout, UseClientDefault) + else Timeout(timeout) + ) + extensions = dict(**extensions, timeout=timeout.as_dict()) + return Request( + method, + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + extensions=extensions, + ) + + def _merge_url(self, url: URL | str) -> URL: + """ + Merge a URL argument together with any 'base_url' on the client, + to create the URL used for the outgoing request. + """ + merge_url = URL(url) + if merge_url.is_relative_url: + # To merge URLs we always append to the base URL. To get this + # behaviour correct we always ensure the base URL ends in a '/' + # separator, and strip any leading '/' from the merge URL. + # + # So, eg... + # + # >>> client = Client(base_url="https://www.example.com/subpath") + # >>> client.base_url + # URL('https://www.example.com/subpath/') + # >>> client.build_request("GET", "/path").url + # URL('https://www.example.com/subpath/path') + merge_raw_path = self.base_url.raw_path + merge_url.raw_path.lstrip(b"/") + return self.base_url.copy_with(raw_path=merge_raw_path) + return merge_url + + def _merge_cookies(self, cookies: CookieTypes | None = None) -> CookieTypes | None: + """ + Merge a cookies argument together with any cookies on the client, + to create the cookies used for the outgoing request. + """ + if cookies or self.cookies: + merged_cookies = Cookies(self.cookies) + merged_cookies.update(cookies) + return merged_cookies + return cookies + + def _merge_headers(self, headers: HeaderTypes | None = None) -> HeaderTypes | None: + """ + Merge a headers argument together with any headers on the client, + to create the headers used for the outgoing request. + """ + merged_headers = Headers(self.headers) + merged_headers.update(headers) + return merged_headers + + def _merge_queryparams( + self, params: QueryParamTypes | None = None + ) -> QueryParamTypes | None: + """ + Merge a queryparams argument together with any queryparams on the client, + to create the queryparams used for the outgoing request. + """ + if params or self.params: + merged_queryparams = QueryParams(self.params) + return merged_queryparams.merge(params) + return params + + def _build_auth(self, auth: AuthTypes | None) -> Auth | None: + if auth is None: + return None + elif isinstance(auth, tuple): + return BasicAuth(username=auth[0], password=auth[1]) + elif isinstance(auth, Auth): + return auth + elif callable(auth): + return FunctionAuth(func=auth) + else: + raise TypeError(f'Invalid "auth" argument: {auth!r}') + + def _build_request_auth( + self, + request: Request, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + ) -> Auth: + auth = ( + self._auth if isinstance(auth, UseClientDefault) else self._build_auth(auth) + ) + + if auth is not None: + return auth + + username, password = request.url.username, request.url.password + if username or password: + return BasicAuth(username=username, password=password) + + return Auth() + + def _build_redirect_request(self, request: Request, response: Response) -> Request: + """ + Given a request and a redirect response, return a new request that + should be used to effect the redirect. + """ + method = self._redirect_method(request, response) + url = self._redirect_url(request, response) + headers = self._redirect_headers(request, url, method) + stream = self._redirect_stream(request, method) + cookies = Cookies(self.cookies) + return Request( + method=method, + url=url, + headers=headers, + cookies=cookies, + stream=stream, + extensions=request.extensions, + ) + + def _redirect_method(self, request: Request, response: Response) -> str: + """ + When being redirected we may want to change the method of the request + based on certain specs or browser behavior. + """ + method = request.method + + # https://tools.ietf.org/html/rfc7231#section-6.4.4 + if response.status_code == codes.SEE_OTHER and method != "HEAD": + method = "GET" + + # Do what the browsers do, despite standards... + # Turn 302s into GETs. + if response.status_code == codes.FOUND and method != "HEAD": + method = "GET" + + # If a POST is responded to with a 301, turn it into a GET. + # This bizarre behaviour is explained in 'requests' issue 1704. + if response.status_code == codes.MOVED_PERMANENTLY and method == "POST": + method = "GET" + + return method + + def _redirect_url(self, request: Request, response: Response) -> URL: + """ + Return the URL for the redirect to follow. + """ + location = response.headers["Location"] + + try: + url = URL(location) + except InvalidURL as exc: + raise RemoteProtocolError( + f"Invalid URL in location header: {exc}.", request=request + ) from None + + # Handle malformed 'Location' headers that are "absolute" form, have no host. + # See: https://github.com/encode/httpx/issues/771 + if url.scheme and not url.host: + url = url.copy_with(host=request.url.host) + + # Facilitate relative 'Location' headers, as allowed by RFC 7231. + # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') + if url.is_relative_url: + url = request.url.join(url) + + # Attach previous fragment if needed (RFC 7231 7.1.2) + if request.url.fragment and not url.fragment: + url = url.copy_with(fragment=request.url.fragment) + + return url + + def _redirect_headers(self, request: Request, url: URL, method: str) -> Headers: + """ + Return the headers that should be used for the redirect request. + """ + headers = Headers(request.headers) + + if not _same_origin(url, request.url): + if not _is_https_redirect(request.url, url): + # Strip Authorization headers when responses are redirected + # away from the origin. (Except for direct HTTP to HTTPS redirects.) + headers.pop("Authorization", None) + + # Update the Host header. + headers["Host"] = url.netloc.decode("ascii") + + if method != request.method and method == "GET": + # If we've switch to a 'GET' request, then strip any headers which + # are only relevant to the request body. + headers.pop("Content-Length", None) + headers.pop("Transfer-Encoding", None) + + # We should use the client cookie store to determine any cookie header, + # rather than whatever was on the original outgoing request. + headers.pop("Cookie", None) + + return headers + + def _redirect_stream( + self, request: Request, method: str + ) -> SyncByteStream | AsyncByteStream | None: + """ + Return the body that should be used for the redirect request. + """ + if method != request.method and method == "GET": + return None + + return request.stream + + def _set_timeout(self, request: Request) -> None: + if "timeout" not in request.extensions: + timeout = ( + self.timeout + if isinstance(self.timeout, UseClientDefault) + else Timeout(self.timeout) + ) + request.extensions = dict(**request.extensions, timeout=timeout.as_dict()) + + +class Client(BaseClient): + """ + An HTTP client, with connection pooling, HTTP/2, redirects, cookie persistence, etc. + + It can be shared between threads. + + Usage: + + ```python + >>> client = httpx.Client() + >>> response = client.get('https://example.org') + ``` + + **Parameters:** + + * **auth** - *(optional)* An authentication class to use when sending + requests. + * **params** - *(optional)* Query parameters to include in request URLs, as + a string, dictionary, or sequence of two-tuples. + * **headers** - *(optional)* Dictionary of HTTP headers to include when + sending requests. + * **cookies** - *(optional)* Dictionary of Cookie items to include when + sending requests. + * **verify** - *(optional)* Either `True` to use an SSL context with the + default CA bundle, `False` to disable verification, or an instance of + `ssl.SSLContext` to use a custom context. + * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be + enabled. Defaults to `False`. + * **proxy** - *(optional)* A proxy URL where all the traffic should be routed. + * **timeout** - *(optional)* The timeout configuration to use when sending + requests. + * **limits** - *(optional)* The limits configuration to use. + * **max_redirects** - *(optional)* The maximum number of redirect responses + that should be followed. + * **base_url** - *(optional)* A URL to use as the base when building + request URLs. + * **transport** - *(optional)* A transport class to use for sending requests + over the network. + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + * **default_encoding** - *(optional)* The default encoding to use for decoding + response text, if no charset information is included in a response Content-Type + header. Set to a callable for automatic character set detection. Default: "utf-8". + """ + + def __init__( + self, + *, + auth: AuthTypes | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + verify: ssl.SSLContext | str | bool = True, + cert: CertTypes | None = None, + trust_env: bool = True, + http1: bool = True, + http2: bool = False, + proxy: ProxyTypes | None = None, + mounts: None | (typing.Mapping[str, BaseTransport | None]) = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + limits: Limits = DEFAULT_LIMITS, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None, + base_url: URL | str = "", + transport: BaseTransport | None = None, + default_encoding: str | typing.Callable[[bytes], str] = "utf-8", + ) -> None: + super().__init__( + auth=auth, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + follow_redirects=follow_redirects, + max_redirects=max_redirects, + event_hooks=event_hooks, + base_url=base_url, + trust_env=trust_env, + default_encoding=default_encoding, + ) + + if http2: + try: + import h2 # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using http2=True, but the 'h2' package is not installed. " + "Make sure to install httpx using `pip install httpx[http2]`." + ) from None + + allow_env_proxies = trust_env and transport is None + proxy_map = self._get_proxy_map(proxy, allow_env_proxies) + + self._transport = self._init_transport( + verify=verify, + cert=cert, + trust_env=trust_env, + http1=http1, + http2=http2, + limits=limits, + transport=transport, + ) + self._mounts: dict[URLPattern, BaseTransport | None] = { + URLPattern(key): None + if proxy is None + else self._init_proxy_transport( + proxy, + verify=verify, + cert=cert, + trust_env=trust_env, + http1=http1, + http2=http2, + limits=limits, + ) + for key, proxy in proxy_map.items() + } + if mounts is not None: + self._mounts.update( + {URLPattern(key): transport for key, transport in mounts.items()} + ) + + self._mounts = dict(sorted(self._mounts.items())) + + def _init_transport( + self, + verify: ssl.SSLContext | str | bool = True, + cert: CertTypes | None = None, + trust_env: bool = True, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + transport: BaseTransport | None = None, + ) -> BaseTransport: + if transport is not None: + return transport + + return HTTPTransport( + verify=verify, + cert=cert, + trust_env=trust_env, + http1=http1, + http2=http2, + limits=limits, + ) + + def _init_proxy_transport( + self, + proxy: Proxy, + verify: ssl.SSLContext | str | bool = True, + cert: CertTypes | None = None, + trust_env: bool = True, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + ) -> BaseTransport: + return HTTPTransport( + verify=verify, + cert=cert, + trust_env=trust_env, + http1=http1, + http2=http2, + limits=limits, + proxy=proxy, + ) + + def _transport_for_url(self, url: URL) -> BaseTransport: + """ + Returns the transport instance that should be used for a given URL. + This will either be the standard connection pool, or a proxy. + """ + for pattern, transport in self._mounts.items(): + if pattern.matches(url): + return self._transport if transport is None else transport + + return self._transport + + def request( + self, + method: str, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Build and send a request. + + Equivalent to: + + ```python + request = client.build_request(...) + response = client.send(request, ...) + ``` + + See `Client.build_request()`, `Client.send()` and + [Merging of configuration][0] for how the various parameters + are merged with client-level configuration. + + [0]: /advanced/clients/#merging-of-configuration + """ + if cookies is not None: + message = ( + "Setting per-request cookies=<...> is being deprecated, because " + "the expected behaviour on cookie persistence is ambiguous. Set " + "cookies directly on the client instance instead." + ) + warnings.warn(message, DeprecationWarning, stacklevel=2) + + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + return self.send(request, auth=auth, follow_redirects=follow_redirects) + + @contextmanager + def stream( + self, + method: str, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> typing.Iterator[Response]: + """ + Alternative to `httpx.request()` that streams the response body + instead of loading it into memory at once. + + **Parameters**: See `httpx.request`. + + See also: [Streaming Responses][0] + + [0]: /quickstart#streaming-responses + """ + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + response = self.send( + request=request, + auth=auth, + follow_redirects=follow_redirects, + stream=True, + ) + try: + yield response + finally: + response.close() + + def send( + self, + request: Request, + *, + stream: bool = False, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a request. + + The request is sent as-is, unmodified. + + Typically you'll want to build one with `Client.build_request()` + so that any client-level configuration is merged into the request, + but passing an explicit `httpx.Request()` is supported as well. + + See also: [Request instances][0] + + [0]: /advanced/clients/#request-instances + """ + if self._state == ClientState.CLOSED: + raise RuntimeError("Cannot send a request, as the client has been closed.") + + self._state = ClientState.OPENED + follow_redirects = ( + self.follow_redirects + if isinstance(follow_redirects, UseClientDefault) + else follow_redirects + ) + + self._set_timeout(request) + + auth = self._build_request_auth(request, auth) + + response = self._send_handling_auth( + request, + auth=auth, + follow_redirects=follow_redirects, + history=[], + ) + try: + if not stream: + response.read() + + return response + + except BaseException as exc: + response.close() + raise exc + + def _send_handling_auth( + self, + request: Request, + auth: Auth, + follow_redirects: bool, + history: list[Response], + ) -> Response: + auth_flow = auth.sync_auth_flow(request) + try: + request = next(auth_flow) + + while True: + response = self._send_handling_redirects( + request, + follow_redirects=follow_redirects, + history=history, + ) + try: + try: + next_request = auth_flow.send(response) + except StopIteration: + return response + + response.history = list(history) + response.read() + request = next_request + history.append(response) + + except BaseException as exc: + response.close() + raise exc + finally: + auth_flow.close() + + def _send_handling_redirects( + self, + request: Request, + follow_redirects: bool, + history: list[Response], + ) -> Response: + while True: + if len(history) > self.max_redirects: + raise TooManyRedirects( + "Exceeded maximum allowed redirects.", request=request + ) + + for hook in self._event_hooks["request"]: + hook(request) + + response = self._send_single_request(request) + try: + for hook in self._event_hooks["response"]: + hook(response) + response.history = list(history) + + if not response.has_redirect_location: + return response + + request = self._build_redirect_request(request, response) + history = history + [response] + + if follow_redirects: + response.read() + else: + response.next_request = request + return response + + except BaseException as exc: + response.close() + raise exc + + def _send_single_request(self, request: Request) -> Response: + """ + Sends a single request, without handling any redirections. + """ + transport = self._transport_for_url(request.url) + start = time.perf_counter() + + if not isinstance(request.stream, SyncByteStream): + raise RuntimeError( + "Attempted to send an async request with a sync Client instance." + ) + + with request_context(request=request): + response = transport.handle_request(request) + + assert isinstance(response.stream, SyncByteStream) + + response.request = request + response.stream = BoundSyncStream( + response.stream, response=response, start=start + ) + self.cookies.extract_cookies(response) + response.default_encoding = self._default_encoding + + logger.info( + 'HTTP Request: %s %s "%s %d %s"', + request.method, + request.url, + response.http_version, + response.status_code, + response.reason_phrase, + ) + + return response + + def get( + self, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `GET` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "GET", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def options( + self, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send an `OPTIONS` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "OPTIONS", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def head( + self, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `HEAD` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "HEAD", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def post( + self, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `POST` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "POST", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def put( + self, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `PUT` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "PUT", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def patch( + self, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `PATCH` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "PATCH", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def delete( + self, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `DELETE` request. + + **Parameters**: See `httpx.request`. + """ + return self.request( + "DELETE", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + def close(self) -> None: + """ + Close transport and proxies. + """ + if self._state != ClientState.CLOSED: + self._state = ClientState.CLOSED + + self._transport.close() + for transport in self._mounts.values(): + if transport is not None: + transport.close() + + def __enter__(self: T) -> T: + if self._state != ClientState.UNOPENED: + msg = { + ClientState.OPENED: "Cannot open a client instance more than once.", + ClientState.CLOSED: ( + "Cannot reopen a client instance, once it has been closed." + ), + }[self._state] + raise RuntimeError(msg) + + self._state = ClientState.OPENED + + self._transport.__enter__() + for transport in self._mounts.values(): + if transport is not None: + transport.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + self._state = ClientState.CLOSED + + self._transport.__exit__(exc_type, exc_value, traceback) + for transport in self._mounts.values(): + if transport is not None: + transport.__exit__(exc_type, exc_value, traceback) + + +class AsyncClient(BaseClient): + """ + An asynchronous HTTP client, with connection pooling, HTTP/2, redirects, + cookie persistence, etc. + + It can be shared between tasks. + + Usage: + + ```python + >>> async with httpx.AsyncClient() as client: + >>> response = await client.get('https://example.org') + ``` + + **Parameters:** + + * **auth** - *(optional)* An authentication class to use when sending + requests. + * **params** - *(optional)* Query parameters to include in request URLs, as + a string, dictionary, or sequence of two-tuples. + * **headers** - *(optional)* Dictionary of HTTP headers to include when + sending requests. + * **cookies** - *(optional)* Dictionary of Cookie items to include when + sending requests. + * **verify** - *(optional)* Either `True` to use an SSL context with the + default CA bundle, `False` to disable verification, or an instance of + `ssl.SSLContext` to use a custom context. + * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be + enabled. Defaults to `False`. + * **proxy** - *(optional)* A proxy URL where all the traffic should be routed. + * **timeout** - *(optional)* The timeout configuration to use when sending + requests. + * **limits** - *(optional)* The limits configuration to use. + * **max_redirects** - *(optional)* The maximum number of redirect responses + that should be followed. + * **base_url** - *(optional)* A URL to use as the base when building + request URLs. + * **transport** - *(optional)* A transport class to use for sending requests + over the network. + * **trust_env** - *(optional)* Enables or disables usage of environment + variables for configuration. + * **default_encoding** - *(optional)* The default encoding to use for decoding + response text, if no charset information is included in a response Content-Type + header. Set to a callable for automatic character set detection. Default: "utf-8". + """ + + def __init__( + self, + *, + auth: AuthTypes | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + verify: ssl.SSLContext | str | bool = True, + cert: CertTypes | None = None, + http1: bool = True, + http2: bool = False, + proxy: ProxyTypes | None = None, + mounts: None | (typing.Mapping[str, AsyncBaseTransport | None]) = None, + timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, + follow_redirects: bool = False, + limits: Limits = DEFAULT_LIMITS, + max_redirects: int = DEFAULT_MAX_REDIRECTS, + event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None, + base_url: URL | str = "", + transport: AsyncBaseTransport | None = None, + trust_env: bool = True, + default_encoding: str | typing.Callable[[bytes], str] = "utf-8", + ) -> None: + super().__init__( + auth=auth, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + follow_redirects=follow_redirects, + max_redirects=max_redirects, + event_hooks=event_hooks, + base_url=base_url, + trust_env=trust_env, + default_encoding=default_encoding, + ) + + if http2: + try: + import h2 # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using http2=True, but the 'h2' package is not installed. " + "Make sure to install httpx using `pip install httpx[http2]`." + ) from None + + allow_env_proxies = trust_env and transport is None + proxy_map = self._get_proxy_map(proxy, allow_env_proxies) + + self._transport = self._init_transport( + verify=verify, + cert=cert, + trust_env=trust_env, + http1=http1, + http2=http2, + limits=limits, + transport=transport, + ) + + self._mounts: dict[URLPattern, AsyncBaseTransport | None] = { + URLPattern(key): None + if proxy is None + else self._init_proxy_transport( + proxy, + verify=verify, + cert=cert, + trust_env=trust_env, + http1=http1, + http2=http2, + limits=limits, + ) + for key, proxy in proxy_map.items() + } + if mounts is not None: + self._mounts.update( + {URLPattern(key): transport for key, transport in mounts.items()} + ) + self._mounts = dict(sorted(self._mounts.items())) + + def _init_transport( + self, + verify: ssl.SSLContext | str | bool = True, + cert: CertTypes | None = None, + trust_env: bool = True, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + transport: AsyncBaseTransport | None = None, + ) -> AsyncBaseTransport: + if transport is not None: + return transport + + return AsyncHTTPTransport( + verify=verify, + cert=cert, + trust_env=trust_env, + http1=http1, + http2=http2, + limits=limits, + ) + + def _init_proxy_transport( + self, + proxy: Proxy, + verify: ssl.SSLContext | str | bool = True, + cert: CertTypes | None = None, + trust_env: bool = True, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + ) -> AsyncBaseTransport: + return AsyncHTTPTransport( + verify=verify, + cert=cert, + trust_env=trust_env, + http1=http1, + http2=http2, + limits=limits, + proxy=proxy, + ) + + def _transport_for_url(self, url: URL) -> AsyncBaseTransport: + """ + Returns the transport instance that should be used for a given URL. + This will either be the standard connection pool, or a proxy. + """ + for pattern, transport in self._mounts.items(): + if pattern.matches(url): + return self._transport if transport is None else transport + + return self._transport + + async def request( + self, + method: str, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Build and send a request. + + Equivalent to: + + ```python + request = client.build_request(...) + response = await client.send(request, ...) + ``` + + See `AsyncClient.build_request()`, `AsyncClient.send()` + and [Merging of configuration][0] for how the various parameters + are merged with client-level configuration. + + [0]: /advanced/clients/#merging-of-configuration + """ + + if cookies is not None: # pragma: no cover + message = ( + "Setting per-request cookies=<...> is being deprecated, because " + "the expected behaviour on cookie persistence is ambiguous. Set " + "cookies directly on the client instance instead." + ) + warnings.warn(message, DeprecationWarning, stacklevel=2) + + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + return await self.send(request, auth=auth, follow_redirects=follow_redirects) + + @asynccontextmanager + async def stream( + self, + method: str, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> typing.AsyncIterator[Response]: + """ + Alternative to `httpx.request()` that streams the response body + instead of loading it into memory at once. + + **Parameters**: See `httpx.request`. + + See also: [Streaming Responses][0] + + [0]: /quickstart#streaming-responses + """ + request = self.build_request( + method=method, + url=url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + response = await self.send( + request=request, + auth=auth, + follow_redirects=follow_redirects, + stream=True, + ) + try: + yield response + finally: + await response.aclose() + + async def send( + self, + request: Request, + *, + stream: bool = False, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + ) -> Response: + """ + Send a request. + + The request is sent as-is, unmodified. + + Typically you'll want to build one with `AsyncClient.build_request()` + so that any client-level configuration is merged into the request, + but passing an explicit `httpx.Request()` is supported as well. + + See also: [Request instances][0] + + [0]: /advanced/clients/#request-instances + """ + if self._state == ClientState.CLOSED: + raise RuntimeError("Cannot send a request, as the client has been closed.") + + self._state = ClientState.OPENED + follow_redirects = ( + self.follow_redirects + if isinstance(follow_redirects, UseClientDefault) + else follow_redirects + ) + + self._set_timeout(request) + + auth = self._build_request_auth(request, auth) + + response = await self._send_handling_auth( + request, + auth=auth, + follow_redirects=follow_redirects, + history=[], + ) + try: + if not stream: + await response.aread() + + return response + + except BaseException as exc: + await response.aclose() + raise exc + + async def _send_handling_auth( + self, + request: Request, + auth: Auth, + follow_redirects: bool, + history: list[Response], + ) -> Response: + auth_flow = auth.async_auth_flow(request) + try: + request = await auth_flow.__anext__() + + while True: + response = await self._send_handling_redirects( + request, + follow_redirects=follow_redirects, + history=history, + ) + try: + try: + next_request = await auth_flow.asend(response) + except StopAsyncIteration: + return response + + response.history = list(history) + await response.aread() + request = next_request + history.append(response) + + except BaseException as exc: + await response.aclose() + raise exc + finally: + await auth_flow.aclose() + + async def _send_handling_redirects( + self, + request: Request, + follow_redirects: bool, + history: list[Response], + ) -> Response: + while True: + if len(history) > self.max_redirects: + raise TooManyRedirects( + "Exceeded maximum allowed redirects.", request=request + ) + + for hook in self._event_hooks["request"]: + await hook(request) + + response = await self._send_single_request(request) + try: + for hook in self._event_hooks["response"]: + await hook(response) + + response.history = list(history) + + if not response.has_redirect_location: + return response + + request = self._build_redirect_request(request, response) + history = history + [response] + + if follow_redirects: + await response.aread() + else: + response.next_request = request + return response + + except BaseException as exc: + await response.aclose() + raise exc + + async def _send_single_request(self, request: Request) -> Response: + """ + Sends a single request, without handling any redirections. + """ + transport = self._transport_for_url(request.url) + start = time.perf_counter() + + if not isinstance(request.stream, AsyncByteStream): + raise RuntimeError( + "Attempted to send an sync request with an AsyncClient instance." + ) + + with request_context(request=request): + response = await transport.handle_async_request(request) + + assert isinstance(response.stream, AsyncByteStream) + response.request = request + response.stream = BoundAsyncStream( + response.stream, response=response, start=start + ) + self.cookies.extract_cookies(response) + response.default_encoding = self._default_encoding + + logger.info( + 'HTTP Request: %s %s "%s %d %s"', + request.method, + request.url, + response.http_version, + response.status_code, + response.reason_phrase, + ) + + return response + + async def get( + self, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `GET` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "GET", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def options( + self, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send an `OPTIONS` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "OPTIONS", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def head( + self, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `HEAD` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "HEAD", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def post( + self, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `POST` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "POST", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def put( + self, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `PUT` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "PUT", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def patch( + self, + url: URL | str, + *, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `PATCH` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "PATCH", + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def delete( + self, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, + follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, + timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, + extensions: RequestExtensions | None = None, + ) -> Response: + """ + Send a `DELETE` request. + + **Parameters**: See `httpx.request`. + """ + return await self.request( + "DELETE", + url, + params=params, + headers=headers, + cookies=cookies, + auth=auth, + follow_redirects=follow_redirects, + timeout=timeout, + extensions=extensions, + ) + + async def aclose(self) -> None: + """ + Close transport and proxies. + """ + if self._state != ClientState.CLOSED: + self._state = ClientState.CLOSED + + await self._transport.aclose() + for proxy in self._mounts.values(): + if proxy is not None: + await proxy.aclose() + + async def __aenter__(self: U) -> U: + if self._state != ClientState.UNOPENED: + msg = { + ClientState.OPENED: "Cannot open a client instance more than once.", + ClientState.CLOSED: ( + "Cannot reopen a client instance, once it has been closed." + ), + }[self._state] + raise RuntimeError(msg) + + self._state = ClientState.OPENED + + await self._transport.__aenter__() + for proxy in self._mounts.values(): + if proxy is not None: + await proxy.__aenter__() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + self._state = ClientState.CLOSED + + await self._transport.__aexit__(exc_type, exc_value, traceback) + for proxy in self._mounts.values(): + if proxy is not None: + await proxy.__aexit__(exc_type, exc_value, traceback) diff --git a/venv/lib/python3.11/site-packages/httpx/_config.py b/venv/lib/python3.11/site-packages/httpx/_config.py new file mode 100644 index 0000000..467a6c9 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx/_config.py @@ -0,0 +1,248 @@ +from __future__ import annotations + +import os +import typing + +from ._models import Headers +from ._types import CertTypes, HeaderTypes, TimeoutTypes +from ._urls import URL + +if typing.TYPE_CHECKING: + import ssl # pragma: no cover + +__all__ = ["Limits", "Proxy", "Timeout", "create_ssl_context"] + + +class UnsetType: + pass # pragma: no cover + + +UNSET = UnsetType() + + +def create_ssl_context( + verify: ssl.SSLContext | str | bool = True, + cert: CertTypes | None = None, + trust_env: bool = True, +) -> ssl.SSLContext: + import ssl + import warnings + + import certifi + + if verify is True: + if trust_env and os.environ.get("SSL_CERT_FILE"): # pragma: nocover + ctx = ssl.create_default_context(cafile=os.environ["SSL_CERT_FILE"]) + elif trust_env and os.environ.get("SSL_CERT_DIR"): # pragma: nocover + ctx = ssl.create_default_context(capath=os.environ["SSL_CERT_DIR"]) + else: + # Default case... + ctx = ssl.create_default_context(cafile=certifi.where()) + elif verify is False: + ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + ctx.check_hostname = False + ctx.verify_mode = ssl.CERT_NONE + elif isinstance(verify, str): # pragma: nocover + message = ( + "`verify=` is deprecated. " + "Use `verify=ssl.create_default_context(cafile=...)` " + "or `verify=ssl.create_default_context(capath=...)` instead." + ) + warnings.warn(message, DeprecationWarning) + if os.path.isdir(verify): + return ssl.create_default_context(capath=verify) + return ssl.create_default_context(cafile=verify) + else: + ctx = verify + + if cert: # pragma: nocover + message = ( + "`cert=...` is deprecated. Use `verify=` instead," + "with `.load_cert_chain()` to configure the certificate chain." + ) + warnings.warn(message, DeprecationWarning) + if isinstance(cert, str): + ctx.load_cert_chain(cert) + else: + ctx.load_cert_chain(*cert) + + return ctx + + +class Timeout: + """ + Timeout configuration. + + **Usage**: + + Timeout(None) # No timeouts. + Timeout(5.0) # 5s timeout on all operations. + Timeout(None, connect=5.0) # 5s timeout on connect, no other timeouts. + Timeout(5.0, connect=10.0) # 10s timeout on connect. 5s timeout elsewhere. + Timeout(5.0, pool=None) # No timeout on acquiring connection from pool. + # 5s timeout elsewhere. + """ + + def __init__( + self, + timeout: TimeoutTypes | UnsetType = UNSET, + *, + connect: None | float | UnsetType = UNSET, + read: None | float | UnsetType = UNSET, + write: None | float | UnsetType = UNSET, + pool: None | float | UnsetType = UNSET, + ) -> None: + if isinstance(timeout, Timeout): + # Passed as a single explicit Timeout. + assert connect is UNSET + assert read is UNSET + assert write is UNSET + assert pool is UNSET + self.connect = timeout.connect # type: typing.Optional[float] + self.read = timeout.read # type: typing.Optional[float] + self.write = timeout.write # type: typing.Optional[float] + self.pool = timeout.pool # type: typing.Optional[float] + elif isinstance(timeout, tuple): + # Passed as a tuple. + self.connect = timeout[0] + self.read = timeout[1] + self.write = None if len(timeout) < 3 else timeout[2] + self.pool = None if len(timeout) < 4 else timeout[3] + elif not ( + isinstance(connect, UnsetType) + or isinstance(read, UnsetType) + or isinstance(write, UnsetType) + or isinstance(pool, UnsetType) + ): + self.connect = connect + self.read = read + self.write = write + self.pool = pool + else: + if isinstance(timeout, UnsetType): + raise ValueError( + "httpx.Timeout must either include a default, or set all " + "four parameters explicitly." + ) + self.connect = timeout if isinstance(connect, UnsetType) else connect + self.read = timeout if isinstance(read, UnsetType) else read + self.write = timeout if isinstance(write, UnsetType) else write + self.pool = timeout if isinstance(pool, UnsetType) else pool + + def as_dict(self) -> dict[str, float | None]: + return { + "connect": self.connect, + "read": self.read, + "write": self.write, + "pool": self.pool, + } + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, self.__class__) + and self.connect == other.connect + and self.read == other.read + and self.write == other.write + and self.pool == other.pool + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + if len({self.connect, self.read, self.write, self.pool}) == 1: + return f"{class_name}(timeout={self.connect})" + return ( + f"{class_name}(connect={self.connect}, " + f"read={self.read}, write={self.write}, pool={self.pool})" + ) + + +class Limits: + """ + Configuration for limits to various client behaviors. + + **Parameters:** + + * **max_connections** - The maximum number of concurrent connections that may be + established. + * **max_keepalive_connections** - Allow the connection pool to maintain + keep-alive connections below this point. Should be less than or equal + to `max_connections`. + * **keepalive_expiry** - Time limit on idle keep-alive connections in seconds. + """ + + def __init__( + self, + *, + max_connections: int | None = None, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = 5.0, + ) -> None: + self.max_connections = max_connections + self.max_keepalive_connections = max_keepalive_connections + self.keepalive_expiry = keepalive_expiry + + def __eq__(self, other: typing.Any) -> bool: + return ( + isinstance(other, self.__class__) + and self.max_connections == other.max_connections + and self.max_keepalive_connections == other.max_keepalive_connections + and self.keepalive_expiry == other.keepalive_expiry + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + return ( + f"{class_name}(max_connections={self.max_connections}, " + f"max_keepalive_connections={self.max_keepalive_connections}, " + f"keepalive_expiry={self.keepalive_expiry})" + ) + + +class Proxy: + def __init__( + self, + url: URL | str, + *, + ssl_context: ssl.SSLContext | None = None, + auth: tuple[str, str] | None = None, + headers: HeaderTypes | None = None, + ) -> None: + url = URL(url) + headers = Headers(headers) + + if url.scheme not in ("http", "https", "socks5", "socks5h"): + raise ValueError(f"Unknown scheme for proxy URL {url!r}") + + if url.username or url.password: + # Remove any auth credentials from the URL. + auth = (url.username, url.password) + url = url.copy_with(username=None, password=None) + + self.url = url + self.auth = auth + self.headers = headers + self.ssl_context = ssl_context + + @property + def raw_auth(self) -> tuple[bytes, bytes] | None: + # The proxy authentication as raw bytes. + return ( + None + if self.auth is None + else (self.auth[0].encode("utf-8"), self.auth[1].encode("utf-8")) + ) + + def __repr__(self) -> str: + # The authentication is represented with the password component masked. + auth = (self.auth[0], "********") if self.auth else None + + # Build a nice concise representation. + url_str = f"{str(self.url)!r}" + auth_str = f", auth={auth!r}" if auth else "" + headers_str = f", headers={dict(self.headers)!r}" if self.headers else "" + return f"Proxy({url_str}{auth_str}{headers_str})" + + +DEFAULT_TIMEOUT_CONFIG = Timeout(timeout=5.0) +DEFAULT_LIMITS = Limits(max_connections=100, max_keepalive_connections=20) +DEFAULT_MAX_REDIRECTS = 20 diff --git a/venv/lib/python3.11/site-packages/httpx/_content.py b/venv/lib/python3.11/site-packages/httpx/_content.py new file mode 100644 index 0000000..6f479a0 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx/_content.py @@ -0,0 +1,240 @@ +from __future__ import annotations + +import inspect +import warnings +from json import dumps as json_dumps +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + Iterable, + Iterator, + Mapping, +) +from urllib.parse import urlencode + +from ._exceptions import StreamClosed, StreamConsumed +from ._multipart import MultipartStream +from ._types import ( + AsyncByteStream, + RequestContent, + RequestData, + RequestFiles, + ResponseContent, + SyncByteStream, +) +from ._utils import peek_filelike_length, primitive_value_to_str + +__all__ = ["ByteStream"] + + +class ByteStream(AsyncByteStream, SyncByteStream): + def __init__(self, stream: bytes) -> None: + self._stream = stream + + def __iter__(self) -> Iterator[bytes]: + yield self._stream + + async def __aiter__(self) -> AsyncIterator[bytes]: + yield self._stream + + +class IteratorByteStream(SyncByteStream): + CHUNK_SIZE = 65_536 + + def __init__(self, stream: Iterable[bytes]) -> None: + self._stream = stream + self._is_stream_consumed = False + self._is_generator = inspect.isgenerator(stream) + + def __iter__(self) -> Iterator[bytes]: + if self._is_stream_consumed and self._is_generator: + raise StreamConsumed() + + self._is_stream_consumed = True + if hasattr(self._stream, "read"): + # File-like interfaces should use 'read' directly. + chunk = self._stream.read(self.CHUNK_SIZE) + while chunk: + yield chunk + chunk = self._stream.read(self.CHUNK_SIZE) + else: + # Otherwise iterate. + for part in self._stream: + yield part + + +class AsyncIteratorByteStream(AsyncByteStream): + CHUNK_SIZE = 65_536 + + def __init__(self, stream: AsyncIterable[bytes]) -> None: + self._stream = stream + self._is_stream_consumed = False + self._is_generator = inspect.isasyncgen(stream) + + async def __aiter__(self) -> AsyncIterator[bytes]: + if self._is_stream_consumed and self._is_generator: + raise StreamConsumed() + + self._is_stream_consumed = True + if hasattr(self._stream, "aread"): + # File-like interfaces should use 'aread' directly. + chunk = await self._stream.aread(self.CHUNK_SIZE) + while chunk: + yield chunk + chunk = await self._stream.aread(self.CHUNK_SIZE) + else: + # Otherwise iterate. + async for part in self._stream: + yield part + + +class UnattachedStream(AsyncByteStream, SyncByteStream): + """ + If a request or response is serialized using pickle, then it is no longer + attached to a stream for I/O purposes. Any stream operations should result + in `httpx.StreamClosed`. + """ + + def __iter__(self) -> Iterator[bytes]: + raise StreamClosed() + + async def __aiter__(self) -> AsyncIterator[bytes]: + raise StreamClosed() + yield b"" # pragma: no cover + + +def encode_content( + content: str | bytes | Iterable[bytes] | AsyncIterable[bytes], +) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]: + if isinstance(content, (bytes, str)): + body = content.encode("utf-8") if isinstance(content, str) else content + content_length = len(body) + headers = {"Content-Length": str(content_length)} if body else {} + return headers, ByteStream(body) + + elif isinstance(content, Iterable) and not isinstance(content, dict): + # `not isinstance(content, dict)` is a bit oddly specific, but it + # catches a case that's easy for users to make in error, and would + # otherwise pass through here, like any other bytes-iterable, + # because `dict` happens to be iterable. See issue #2491. + content_length_or_none = peek_filelike_length(content) + + if content_length_or_none is None: + headers = {"Transfer-Encoding": "chunked"} + else: + headers = {"Content-Length": str(content_length_or_none)} + return headers, IteratorByteStream(content) # type: ignore + + elif isinstance(content, AsyncIterable): + headers = {"Transfer-Encoding": "chunked"} + return headers, AsyncIteratorByteStream(content) + + raise TypeError(f"Unexpected type for 'content', {type(content)!r}") + + +def encode_urlencoded_data( + data: RequestData, +) -> tuple[dict[str, str], ByteStream]: + plain_data = [] + for key, value in data.items(): + if isinstance(value, (list, tuple)): + plain_data.extend([(key, primitive_value_to_str(item)) for item in value]) + else: + plain_data.append((key, primitive_value_to_str(value))) + body = urlencode(plain_data, doseq=True).encode("utf-8") + content_length = str(len(body)) + content_type = "application/x-www-form-urlencoded" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_multipart_data( + data: RequestData, files: RequestFiles, boundary: bytes | None +) -> tuple[dict[str, str], MultipartStream]: + multipart = MultipartStream(data=data, files=files, boundary=boundary) + headers = multipart.get_headers() + return headers, multipart + + +def encode_text(text: str) -> tuple[dict[str, str], ByteStream]: + body = text.encode("utf-8") + content_length = str(len(body)) + content_type = "text/plain; charset=utf-8" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_html(html: str) -> tuple[dict[str, str], ByteStream]: + body = html.encode("utf-8") + content_length = str(len(body)) + content_type = "text/html; charset=utf-8" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_json(json: Any) -> tuple[dict[str, str], ByteStream]: + body = json_dumps( + json, ensure_ascii=False, separators=(",", ":"), allow_nan=False + ).encode("utf-8") + content_length = str(len(body)) + content_type = "application/json" + headers = {"Content-Length": content_length, "Content-Type": content_type} + return headers, ByteStream(body) + + +def encode_request( + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: Any | None = None, + boundary: bytes | None = None, +) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]: + """ + Handles encoding the given `content`, `data`, `files`, and `json`, + returning a two-tuple of (, ). + """ + if data is not None and not isinstance(data, Mapping): + # We prefer to separate `content=` + # for raw request content, and `data=
` for url encoded or + # multipart form content. + # + # However for compat with requests, we *do* still support + # `data=` usages. We deal with that case here, treating it + # as if `content=<...>` had been supplied instead. + message = "Use 'content=<...>' to upload raw bytes/text content." + warnings.warn(message, DeprecationWarning, stacklevel=2) + return encode_content(data) + + if content is not None: + return encode_content(content) + elif files: + return encode_multipart_data(data or {}, files, boundary) + elif data: + return encode_urlencoded_data(data) + elif json is not None: + return encode_json(json) + + return {}, ByteStream(b"") + + +def encode_response( + content: ResponseContent | None = None, + text: str | None = None, + html: str | None = None, + json: Any | None = None, +) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]: + """ + Handles encoding the given `content`, returning a two-tuple of + (, ). + """ + if content is not None: + return encode_content(content) + elif text is not None: + return encode_text(text) + elif html is not None: + return encode_html(html) + elif json is not None: + return encode_json(json) + + return {}, ByteStream(b"") diff --git a/venv/lib/python3.11/site-packages/httpx/_decoders.py b/venv/lib/python3.11/site-packages/httpx/_decoders.py new file mode 100644 index 0000000..899dfad --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx/_decoders.py @@ -0,0 +1,393 @@ +""" +Handlers for Content-Encoding. + +See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding +""" + +from __future__ import annotations + +import codecs +import io +import typing +import zlib + +from ._exceptions import DecodingError + +# Brotli support is optional +try: + # The C bindings in `brotli` are recommended for CPython. + import brotli +except ImportError: # pragma: no cover + try: + # The CFFI bindings in `brotlicffi` are recommended for PyPy + # and other environments. + import brotlicffi as brotli + except ImportError: + brotli = None + + +# Zstandard support is optional +try: + import zstandard +except ImportError: # pragma: no cover + zstandard = None # type: ignore + + +class ContentDecoder: + def decode(self, data: bytes) -> bytes: + raise NotImplementedError() # pragma: no cover + + def flush(self) -> bytes: + raise NotImplementedError() # pragma: no cover + + +class IdentityDecoder(ContentDecoder): + """ + Handle unencoded data. + """ + + def decode(self, data: bytes) -> bytes: + return data + + def flush(self) -> bytes: + return b"" + + +class DeflateDecoder(ContentDecoder): + """ + Handle 'deflate' decoding. + + See: https://stackoverflow.com/questions/1838699 + """ + + def __init__(self) -> None: + self.first_attempt = True + self.decompressor = zlib.decompressobj() + + def decode(self, data: bytes) -> bytes: + was_first_attempt = self.first_attempt + self.first_attempt = False + try: + return self.decompressor.decompress(data) + except zlib.error as exc: + if was_first_attempt: + self.decompressor = zlib.decompressobj(-zlib.MAX_WBITS) + return self.decode(data) + raise DecodingError(str(exc)) from exc + + def flush(self) -> bytes: + try: + return self.decompressor.flush() + except zlib.error as exc: # pragma: no cover + raise DecodingError(str(exc)) from exc + + +class GZipDecoder(ContentDecoder): + """ + Handle 'gzip' decoding. + + See: https://stackoverflow.com/questions/1838699 + """ + + def __init__(self) -> None: + self.decompressor = zlib.decompressobj(zlib.MAX_WBITS | 16) + + def decode(self, data: bytes) -> bytes: + try: + return self.decompressor.decompress(data) + except zlib.error as exc: + raise DecodingError(str(exc)) from exc + + def flush(self) -> bytes: + try: + return self.decompressor.flush() + except zlib.error as exc: # pragma: no cover + raise DecodingError(str(exc)) from exc + + +class BrotliDecoder(ContentDecoder): + """ + Handle 'brotli' decoding. + + Requires `pip install brotlipy`. See: https://brotlipy.readthedocs.io/ + or `pip install brotli`. See https://github.com/google/brotli + Supports both 'brotlipy' and 'Brotli' packages since they share an import + name. The top branches are for 'brotlipy' and bottom branches for 'Brotli' + """ + + def __init__(self) -> None: + if brotli is None: # pragma: no cover + raise ImportError( + "Using 'BrotliDecoder', but neither of the 'brotlicffi' or 'brotli' " + "packages have been installed. " + "Make sure to install httpx using `pip install httpx[brotli]`." + ) from None + + self.decompressor = brotli.Decompressor() + self.seen_data = False + self._decompress: typing.Callable[[bytes], bytes] + if hasattr(self.decompressor, "decompress"): + # The 'brotlicffi' package. + self._decompress = self.decompressor.decompress # pragma: no cover + else: + # The 'brotli' package. + self._decompress = self.decompressor.process # pragma: no cover + + def decode(self, data: bytes) -> bytes: + if not data: + return b"" + self.seen_data = True + try: + return self._decompress(data) + except brotli.error as exc: + raise DecodingError(str(exc)) from exc + + def flush(self) -> bytes: + if not self.seen_data: + return b"" + try: + if hasattr(self.decompressor, "finish"): + # Only available in the 'brotlicffi' package. + + # As the decompressor decompresses eagerly, this + # will never actually emit any data. However, it will potentially throw + # errors if a truncated or damaged data stream has been used. + self.decompressor.finish() # pragma: no cover + return b"" + except brotli.error as exc: # pragma: no cover + raise DecodingError(str(exc)) from exc + + +class ZStandardDecoder(ContentDecoder): + """ + Handle 'zstd' RFC 8878 decoding. + + Requires `pip install zstandard`. + Can be installed as a dependency of httpx using `pip install httpx[zstd]`. + """ + + # inspired by the ZstdDecoder implementation in urllib3 + def __init__(self) -> None: + if zstandard is None: # pragma: no cover + raise ImportError( + "Using 'ZStandardDecoder', ..." + "Make sure to install httpx using `pip install httpx[zstd]`." + ) from None + + self.decompressor = zstandard.ZstdDecompressor().decompressobj() + self.seen_data = False + + def decode(self, data: bytes) -> bytes: + assert zstandard is not None + self.seen_data = True + output = io.BytesIO() + try: + output.write(self.decompressor.decompress(data)) + while self.decompressor.eof and self.decompressor.unused_data: + unused_data = self.decompressor.unused_data + self.decompressor = zstandard.ZstdDecompressor().decompressobj() + output.write(self.decompressor.decompress(unused_data)) + except zstandard.ZstdError as exc: + raise DecodingError(str(exc)) from exc + return output.getvalue() + + def flush(self) -> bytes: + if not self.seen_data: + return b"" + ret = self.decompressor.flush() # note: this is a no-op + if not self.decompressor.eof: + raise DecodingError("Zstandard data is incomplete") # pragma: no cover + return bytes(ret) + + +class MultiDecoder(ContentDecoder): + """ + Handle the case where multiple encodings have been applied. + """ + + def __init__(self, children: typing.Sequence[ContentDecoder]) -> None: + """ + 'children' should be a sequence of decoders in the order in which + each was applied. + """ + # Note that we reverse the order for decoding. + self.children = list(reversed(children)) + + def decode(self, data: bytes) -> bytes: + for child in self.children: + data = child.decode(data) + return data + + def flush(self) -> bytes: + data = b"" + for child in self.children: + data = child.decode(data) + child.flush() + return data + + +class ByteChunker: + """ + Handles returning byte content in fixed-size chunks. + """ + + def __init__(self, chunk_size: int | None = None) -> None: + self._buffer = io.BytesIO() + self._chunk_size = chunk_size + + def decode(self, content: bytes) -> list[bytes]: + if self._chunk_size is None: + return [content] if content else [] + + self._buffer.write(content) + if self._buffer.tell() >= self._chunk_size: + value = self._buffer.getvalue() + chunks = [ + value[i : i + self._chunk_size] + for i in range(0, len(value), self._chunk_size) + ] + if len(chunks[-1]) == self._chunk_size: + self._buffer.seek(0) + self._buffer.truncate() + return chunks + else: + self._buffer.seek(0) + self._buffer.write(chunks[-1]) + self._buffer.truncate() + return chunks[:-1] + else: + return [] + + def flush(self) -> list[bytes]: + value = self._buffer.getvalue() + self._buffer.seek(0) + self._buffer.truncate() + return [value] if value else [] + + +class TextChunker: + """ + Handles returning text content in fixed-size chunks. + """ + + def __init__(self, chunk_size: int | None = None) -> None: + self._buffer = io.StringIO() + self._chunk_size = chunk_size + + def decode(self, content: str) -> list[str]: + if self._chunk_size is None: + return [content] if content else [] + + self._buffer.write(content) + if self._buffer.tell() >= self._chunk_size: + value = self._buffer.getvalue() + chunks = [ + value[i : i + self._chunk_size] + for i in range(0, len(value), self._chunk_size) + ] + if len(chunks[-1]) == self._chunk_size: + self._buffer.seek(0) + self._buffer.truncate() + return chunks + else: + self._buffer.seek(0) + self._buffer.write(chunks[-1]) + self._buffer.truncate() + return chunks[:-1] + else: + return [] + + def flush(self) -> list[str]: + value = self._buffer.getvalue() + self._buffer.seek(0) + self._buffer.truncate() + return [value] if value else [] + + +class TextDecoder: + """ + Handles incrementally decoding bytes into text + """ + + def __init__(self, encoding: str = "utf-8") -> None: + self.decoder = codecs.getincrementaldecoder(encoding)(errors="replace") + + def decode(self, data: bytes) -> str: + return self.decoder.decode(data) + + def flush(self) -> str: + return self.decoder.decode(b"", True) + + +class LineDecoder: + """ + Handles incrementally reading lines from text. + + Has the same behaviour as the stdllib splitlines, + but handling the input iteratively. + """ + + def __init__(self) -> None: + self.buffer: list[str] = [] + self.trailing_cr: bool = False + + def decode(self, text: str) -> list[str]: + # See https://docs.python.org/3/library/stdtypes.html#str.splitlines + NEWLINE_CHARS = "\n\r\x0b\x0c\x1c\x1d\x1e\x85\u2028\u2029" + + # We always push a trailing `\r` into the next decode iteration. + if self.trailing_cr: + text = "\r" + text + self.trailing_cr = False + if text.endswith("\r"): + self.trailing_cr = True + text = text[:-1] + + if not text: + # NOTE: the edge case input of empty text doesn't occur in practice, + # because other httpx internals filter out this value + return [] # pragma: no cover + + trailing_newline = text[-1] in NEWLINE_CHARS + lines = text.splitlines() + + if len(lines) == 1 and not trailing_newline: + # No new lines, buffer the input and continue. + self.buffer.append(lines[0]) + return [] + + if self.buffer: + # Include any existing buffer in the first portion of the + # splitlines result. + lines = ["".join(self.buffer) + lines[0]] + lines[1:] + self.buffer = [] + + if not trailing_newline: + # If the last segment of splitlines is not newline terminated, + # then drop it from our output and start a new buffer. + self.buffer = [lines.pop()] + + return lines + + def flush(self) -> list[str]: + if not self.buffer and not self.trailing_cr: + return [] + + lines = ["".join(self.buffer)] + self.buffer = [] + self.trailing_cr = False + return lines + + +SUPPORTED_DECODERS = { + "identity": IdentityDecoder, + "gzip": GZipDecoder, + "deflate": DeflateDecoder, + "br": BrotliDecoder, + "zstd": ZStandardDecoder, +} + + +if brotli is None: + SUPPORTED_DECODERS.pop("br") # pragma: no cover +if zstandard is None: + SUPPORTED_DECODERS.pop("zstd") # pragma: no cover diff --git a/venv/lib/python3.11/site-packages/httpx/_exceptions.py b/venv/lib/python3.11/site-packages/httpx/_exceptions.py new file mode 100644 index 0000000..77f45a6 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx/_exceptions.py @@ -0,0 +1,379 @@ +""" +Our exception hierarchy: + +* HTTPError + x RequestError + + TransportError + - TimeoutException + · ConnectTimeout + · ReadTimeout + · WriteTimeout + · PoolTimeout + - NetworkError + · ConnectError + · ReadError + · WriteError + · CloseError + - ProtocolError + · LocalProtocolError + · RemoteProtocolError + - ProxyError + - UnsupportedProtocol + + DecodingError + + TooManyRedirects + x HTTPStatusError +* InvalidURL +* CookieConflict +* StreamError + x StreamConsumed + x StreamClosed + x ResponseNotRead + x RequestNotRead +""" + +from __future__ import annotations + +import contextlib +import typing + +if typing.TYPE_CHECKING: + from ._models import Request, Response # pragma: no cover + +__all__ = [ + "CloseError", + "ConnectError", + "ConnectTimeout", + "CookieConflict", + "DecodingError", + "HTTPError", + "HTTPStatusError", + "InvalidURL", + "LocalProtocolError", + "NetworkError", + "PoolTimeout", + "ProtocolError", + "ProxyError", + "ReadError", + "ReadTimeout", + "RemoteProtocolError", + "RequestError", + "RequestNotRead", + "ResponseNotRead", + "StreamClosed", + "StreamConsumed", + "StreamError", + "TimeoutException", + "TooManyRedirects", + "TransportError", + "UnsupportedProtocol", + "WriteError", + "WriteTimeout", +] + + +class HTTPError(Exception): + """ + Base class for `RequestError` and `HTTPStatusError`. + + Useful for `try...except` blocks when issuing a request, + and then calling `.raise_for_status()`. + + For example: + + ``` + try: + response = httpx.get("https://www.example.com") + response.raise_for_status() + except httpx.HTTPError as exc: + print(f"HTTP Exception for {exc.request.url} - {exc}") + ``` + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + self._request: Request | None = None + + @property + def request(self) -> Request: + if self._request is None: + raise RuntimeError("The .request property has not been set.") + return self._request + + @request.setter + def request(self, request: Request) -> None: + self._request = request + + +class RequestError(HTTPError): + """ + Base class for all exceptions that may occur when issuing a `.request()`. + """ + + def __init__(self, message: str, *, request: Request | None = None) -> None: + super().__init__(message) + # At the point an exception is raised we won't typically have a request + # instance to associate it with. + # + # The 'request_context' context manager is used within the Client and + # Response methods in order to ensure that any raised exceptions + # have a `.request` property set on them. + self._request = request + + +class TransportError(RequestError): + """ + Base class for all exceptions that occur at the level of the Transport API. + """ + + +# Timeout exceptions... + + +class TimeoutException(TransportError): + """ + The base class for timeout errors. + + An operation has timed out. + """ + + +class ConnectTimeout(TimeoutException): + """ + Timed out while connecting to the host. + """ + + +class ReadTimeout(TimeoutException): + """ + Timed out while receiving data from the host. + """ + + +class WriteTimeout(TimeoutException): + """ + Timed out while sending data to the host. + """ + + +class PoolTimeout(TimeoutException): + """ + Timed out waiting to acquire a connection from the pool. + """ + + +# Core networking exceptions... + + +class NetworkError(TransportError): + """ + The base class for network-related errors. + + An error occurred while interacting with the network. + """ + + +class ReadError(NetworkError): + """ + Failed to receive data from the network. + """ + + +class WriteError(NetworkError): + """ + Failed to send data through the network. + """ + + +class ConnectError(NetworkError): + """ + Failed to establish a connection. + """ + + +class CloseError(NetworkError): + """ + Failed to close a connection. + """ + + +# Other transport exceptions... + + +class ProxyError(TransportError): + """ + An error occurred while establishing a proxy connection. + """ + + +class UnsupportedProtocol(TransportError): + """ + Attempted to make a request to an unsupported protocol. + + For example issuing a request to `ftp://www.example.com`. + """ + + +class ProtocolError(TransportError): + """ + The protocol was violated. + """ + + +class LocalProtocolError(ProtocolError): + """ + A protocol was violated by the client. + + For example if the user instantiated a `Request` instance explicitly, + failed to include the mandatory `Host:` header, and then issued it directly + using `client.send()`. + """ + + +class RemoteProtocolError(ProtocolError): + """ + The protocol was violated by the server. + + For example, returning malformed HTTP. + """ + + +# Other request exceptions... + + +class DecodingError(RequestError): + """ + Decoding of the response failed, due to a malformed encoding. + """ + + +class TooManyRedirects(RequestError): + """ + Too many redirects. + """ + + +# Client errors + + +class HTTPStatusError(HTTPError): + """ + The response had an error HTTP status of 4xx or 5xx. + + May be raised when calling `response.raise_for_status()` + """ + + def __init__(self, message: str, *, request: Request, response: Response) -> None: + super().__init__(message) + self.request = request + self.response = response + + +class InvalidURL(Exception): + """ + URL is improperly formed or cannot be parsed. + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +class CookieConflict(Exception): + """ + Attempted to lookup a cookie by name, but multiple cookies existed. + + Can occur when calling `response.cookies.get(...)`. + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +# Stream exceptions... + +# These may occur as the result of a programming error, by accessing +# the request/response stream in an invalid manner. + + +class StreamError(RuntimeError): + """ + The base class for stream exceptions. + + The developer made an error in accessing the request stream in + an invalid way. + """ + + def __init__(self, message: str) -> None: + super().__init__(message) + + +class StreamConsumed(StreamError): + """ + Attempted to read or stream content, but the content has already + been streamed. + """ + + def __init__(self) -> None: + message = ( + "Attempted to read or stream some content, but the content has " + "already been streamed. For requests, this could be due to passing " + "a generator as request content, and then receiving a redirect " + "response or a secondary request as part of an authentication flow." + "For responses, this could be due to attempting to stream the response " + "content more than once." + ) + super().__init__(message) + + +class StreamClosed(StreamError): + """ + Attempted to read or stream response content, but the request has been + closed. + """ + + def __init__(self) -> None: + message = ( + "Attempted to read or stream content, but the stream has " "been closed." + ) + super().__init__(message) + + +class ResponseNotRead(StreamError): + """ + Attempted to access streaming response content, without having called `read()`. + """ + + def __init__(self) -> None: + message = ( + "Attempted to access streaming response content," + " without having called `read()`." + ) + super().__init__(message) + + +class RequestNotRead(StreamError): + """ + Attempted to access streaming request content, without having called `read()`. + """ + + def __init__(self) -> None: + message = ( + "Attempted to access streaming request content," + " without having called `read()`." + ) + super().__init__(message) + + +@contextlib.contextmanager +def request_context( + request: Request | None = None, +) -> typing.Iterator[None]: + """ + A context manager that can be used to attach the given request context + to any `RequestError` exceptions that are raised within the block. + """ + try: + yield + except RequestError as exc: + if request is not None: + exc.request = request + raise exc diff --git a/venv/lib/python3.11/site-packages/httpx/_main.py b/venv/lib/python3.11/site-packages/httpx/_main.py new file mode 100644 index 0000000..cffa4bb --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx/_main.py @@ -0,0 +1,506 @@ +from __future__ import annotations + +import functools +import json +import sys +import typing + +import click +import pygments.lexers +import pygments.util +import rich.console +import rich.markup +import rich.progress +import rich.syntax +import rich.table + +from ._client import Client +from ._exceptions import RequestError +from ._models import Response +from ._status_codes import codes + +if typing.TYPE_CHECKING: + import httpcore # pragma: no cover + + +def print_help() -> None: + console = rich.console.Console() + + console.print("[bold]HTTPX :butterfly:", justify="center") + console.print() + console.print("A next generation HTTP client.", justify="center") + console.print() + console.print( + "Usage: [bold]httpx[/bold] [cyan] [OPTIONS][/cyan] ", justify="left" + ) + console.print() + + table = rich.table.Table.grid(padding=1, pad_edge=True) + table.add_column("Parameter", no_wrap=True, justify="left", style="bold") + table.add_column("Description") + table.add_row( + "-m, --method [cyan]METHOD", + "Request method, such as GET, POST, PUT, PATCH, DELETE, OPTIONS, HEAD.\n" + "[Default: GET, or POST if a request body is included]", + ) + table.add_row( + "-p, --params [cyan] ...", + "Query parameters to include in the request URL.", + ) + table.add_row( + "-c, --content [cyan]TEXT", "Byte content to include in the request body." + ) + table.add_row( + "-d, --data [cyan] ...", "Form data to include in the request body." + ) + table.add_row( + "-f, --files [cyan] ...", + "Form files to include in the request body.", + ) + table.add_row("-j, --json [cyan]TEXT", "JSON data to include in the request body.") + table.add_row( + "-h, --headers [cyan] ...", + "Include additional HTTP headers in the request.", + ) + table.add_row( + "--cookies [cyan] ...", "Cookies to include in the request." + ) + table.add_row( + "--auth [cyan]", + "Username and password to include in the request. Specify '-' for the password" + " to use a password prompt. Note that using --verbose/-v will expose" + " the Authorization header, including the password encoding" + " in a trivially reversible format.", + ) + + table.add_row( + "--proxy [cyan]URL", + "Send the request via a proxy. Should be the URL giving the proxy address.", + ) + + table.add_row( + "--timeout [cyan]FLOAT", + "Timeout value to use for network operations, such as establishing the" + " connection, reading some data, etc... [Default: 5.0]", + ) + + table.add_row("--follow-redirects", "Automatically follow redirects.") + table.add_row("--no-verify", "Disable SSL verification.") + table.add_row( + "--http2", "Send the request using HTTP/2, if the remote server supports it." + ) + + table.add_row( + "--download [cyan]FILE", + "Save the response content as a file, rather than displaying it.", + ) + + table.add_row("-v, --verbose", "Verbose output. Show request as well as response.") + table.add_row("--help", "Show this message and exit.") + console.print(table) + + +def get_lexer_for_response(response: Response) -> str: + content_type = response.headers.get("Content-Type") + if content_type is not None: + mime_type, _, _ = content_type.partition(";") + try: + return typing.cast( + str, pygments.lexers.get_lexer_for_mimetype(mime_type.strip()).name + ) + except pygments.util.ClassNotFound: # pragma: no cover + pass + return "" # pragma: no cover + + +def format_request_headers(request: httpcore.Request, http2: bool = False) -> str: + version = "HTTP/2" if http2 else "HTTP/1.1" + headers = [ + (name.lower() if http2 else name, value) for name, value in request.headers + ] + method = request.method.decode("ascii") + target = request.url.target.decode("ascii") + lines = [f"{method} {target} {version}"] + [ + f"{name.decode('ascii')}: {value.decode('ascii')}" for name, value in headers + ] + return "\n".join(lines) + + +def format_response_headers( + http_version: bytes, + status: int, + reason_phrase: bytes | None, + headers: list[tuple[bytes, bytes]], +) -> str: + version = http_version.decode("ascii") + reason = ( + codes.get_reason_phrase(status) + if reason_phrase is None + else reason_phrase.decode("ascii") + ) + lines = [f"{version} {status} {reason}"] + [ + f"{name.decode('ascii')}: {value.decode('ascii')}" for name, value in headers + ] + return "\n".join(lines) + + +def print_request_headers(request: httpcore.Request, http2: bool = False) -> None: + console = rich.console.Console() + http_text = format_request_headers(request, http2=http2) + syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + + +def print_response_headers( + http_version: bytes, + status: int, + reason_phrase: bytes | None, + headers: list[tuple[bytes, bytes]], +) -> None: + console = rich.console.Console() + http_text = format_response_headers(http_version, status, reason_phrase, headers) + syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True) + console.print(syntax) + + +def print_response(response: Response) -> None: + console = rich.console.Console() + lexer_name = get_lexer_for_response(response) + if lexer_name: + if lexer_name.lower() == "json": + try: + data = response.json() + text = json.dumps(data, indent=4) + except ValueError: # pragma: no cover + text = response.text + else: + text = response.text + + syntax = rich.syntax.Syntax(text, lexer_name, theme="ansi_dark", word_wrap=True) + console.print(syntax) + else: + console.print(f"<{len(response.content)} bytes of binary data>") + + +_PCTRTT = typing.Tuple[typing.Tuple[str, str], ...] +_PCTRTTT = typing.Tuple[_PCTRTT, ...] +_PeerCertRetDictType = typing.Dict[str, typing.Union[str, _PCTRTTT, _PCTRTT]] + + +def format_certificate(cert: _PeerCertRetDictType) -> str: # pragma: no cover + lines = [] + for key, value in cert.items(): + if isinstance(value, (list, tuple)): + lines.append(f"* {key}:") + for item in value: + if key in ("subject", "issuer"): + for sub_item in item: + lines.append(f"* {sub_item[0]}: {sub_item[1]!r}") + elif isinstance(item, tuple) and len(item) == 2: + lines.append(f"* {item[0]}: {item[1]!r}") + else: + lines.append(f"* {item!r}") + else: + lines.append(f"* {key}: {value!r}") + return "\n".join(lines) + + +def trace( + name: str, info: typing.Mapping[str, typing.Any], verbose: bool = False +) -> None: + console = rich.console.Console() + if name == "connection.connect_tcp.started" and verbose: + host = info["host"] + console.print(f"* Connecting to {host!r}") + elif name == "connection.connect_tcp.complete" and verbose: + stream = info["return_value"] + server_addr = stream.get_extra_info("server_addr") + console.print(f"* Connected to {server_addr[0]!r} on port {server_addr[1]}") + elif name == "connection.start_tls.complete" and verbose: # pragma: no cover + stream = info["return_value"] + ssl_object = stream.get_extra_info("ssl_object") + version = ssl_object.version() + cipher = ssl_object.cipher() + server_cert = ssl_object.getpeercert() + alpn = ssl_object.selected_alpn_protocol() + console.print(f"* SSL established using {version!r} / {cipher[0]!r}") + console.print(f"* Selected ALPN protocol: {alpn!r}") + if server_cert: + console.print("* Server certificate:") + console.print(format_certificate(server_cert)) + elif name == "http11.send_request_headers.started" and verbose: + request = info["request"] + print_request_headers(request, http2=False) + elif name == "http2.send_request_headers.started" and verbose: # pragma: no cover + request = info["request"] + print_request_headers(request, http2=True) + elif name == "http11.receive_response_headers.complete": + http_version, status, reason_phrase, headers = info["return_value"] + print_response_headers(http_version, status, reason_phrase, headers) + elif name == "http2.receive_response_headers.complete": # pragma: no cover + status, headers = info["return_value"] + http_version = b"HTTP/2" + reason_phrase = None + print_response_headers(http_version, status, reason_phrase, headers) + + +def download_response(response: Response, download: typing.BinaryIO) -> None: + console = rich.console.Console() + console.print() + content_length = response.headers.get("Content-Length") + with rich.progress.Progress( + "[progress.description]{task.description}", + "[progress.percentage]{task.percentage:>3.0f}%", + rich.progress.BarColumn(bar_width=None), + rich.progress.DownloadColumn(), + rich.progress.TransferSpeedColumn(), + ) as progress: + description = f"Downloading [bold]{rich.markup.escape(download.name)}" + download_task = progress.add_task( + description, + total=int(content_length or 0), + start=content_length is not None, + ) + for chunk in response.iter_bytes(): + download.write(chunk) + progress.update(download_task, completed=response.num_bytes_downloaded) + + +def validate_json( + ctx: click.Context, + param: click.Option | click.Parameter, + value: typing.Any, +) -> typing.Any: + if value is None: + return None + + try: + return json.loads(value) + except json.JSONDecodeError: # pragma: no cover + raise click.BadParameter("Not valid JSON") + + +def validate_auth( + ctx: click.Context, + param: click.Option | click.Parameter, + value: typing.Any, +) -> typing.Any: + if value == (None, None): + return None + + username, password = value + if password == "-": # pragma: no cover + password = click.prompt("Password", hide_input=True) + return (username, password) + + +def handle_help( + ctx: click.Context, + param: click.Option | click.Parameter, + value: typing.Any, +) -> None: + if not value or ctx.resilient_parsing: + return + + print_help() + ctx.exit() + + +@click.command(add_help_option=False) +@click.argument("url", type=str) +@click.option( + "--method", + "-m", + "method", + type=str, + help=( + "Request method, such as GET, POST, PUT, PATCH, DELETE, OPTIONS, HEAD. " + "[Default: GET, or POST if a request body is included]" + ), +) +@click.option( + "--params", + "-p", + "params", + type=(str, str), + multiple=True, + help="Query parameters to include in the request URL.", +) +@click.option( + "--content", + "-c", + "content", + type=str, + help="Byte content to include in the request body.", +) +@click.option( + "--data", + "-d", + "data", + type=(str, str), + multiple=True, + help="Form data to include in the request body.", +) +@click.option( + "--files", + "-f", + "files", + type=(str, click.File(mode="rb")), + multiple=True, + help="Form files to include in the request body.", +) +@click.option( + "--json", + "-j", + "json", + type=str, + callback=validate_json, + help="JSON data to include in the request body.", +) +@click.option( + "--headers", + "-h", + "headers", + type=(str, str), + multiple=True, + help="Include additional HTTP headers in the request.", +) +@click.option( + "--cookies", + "cookies", + type=(str, str), + multiple=True, + help="Cookies to include in the request.", +) +@click.option( + "--auth", + "auth", + type=(str, str), + default=(None, None), + callback=validate_auth, + help=( + "Username and password to include in the request. " + "Specify '-' for the password to use a password prompt. " + "Note that using --verbose/-v will expose the Authorization header, " + "including the password encoding in a trivially reversible format." + ), +) +@click.option( + "--proxy", + "proxy", + type=str, + default=None, + help="Send the request via a proxy. Should be the URL giving the proxy address.", +) +@click.option( + "--timeout", + "timeout", + type=float, + default=5.0, + help=( + "Timeout value to use for network operations, such as establishing the " + "connection, reading some data, etc... [Default: 5.0]" + ), +) +@click.option( + "--follow-redirects", + "follow_redirects", + is_flag=True, + default=False, + help="Automatically follow redirects.", +) +@click.option( + "--no-verify", + "verify", + is_flag=True, + default=True, + help="Disable SSL verification.", +) +@click.option( + "--http2", + "http2", + type=bool, + is_flag=True, + default=False, + help="Send the request using HTTP/2, if the remote server supports it.", +) +@click.option( + "--download", + type=click.File("wb"), + help="Save the response content as a file, rather than displaying it.", +) +@click.option( + "--verbose", + "-v", + type=bool, + is_flag=True, + default=False, + help="Verbose. Show request as well as response.", +) +@click.option( + "--help", + is_flag=True, + is_eager=True, + expose_value=False, + callback=handle_help, + help="Show this message and exit.", +) +def main( + url: str, + method: str, + params: list[tuple[str, str]], + content: str, + data: list[tuple[str, str]], + files: list[tuple[str, click.File]], + json: str, + headers: list[tuple[str, str]], + cookies: list[tuple[str, str]], + auth: tuple[str, str] | None, + proxy: str, + timeout: float, + follow_redirects: bool, + verify: bool, + http2: bool, + download: typing.BinaryIO | None, + verbose: bool, +) -> None: + """ + An HTTP command line client. + Sends a request and displays the response. + """ + if not method: + method = "POST" if content or data or files or json else "GET" + + try: + with Client(proxy=proxy, timeout=timeout, http2=http2, verify=verify) as client: + with client.stream( + method, + url, + params=list(params), + content=content, + data=dict(data), + files=files, # type: ignore + json=json, + headers=headers, + cookies=dict(cookies), + auth=auth, + follow_redirects=follow_redirects, + extensions={"trace": functools.partial(trace, verbose=verbose)}, + ) as response: + if download is not None: + download_response(response, download) + else: + response.read() + if response.content: + print_response(response) + + except RequestError as exc: + console = rich.console.Console() + console.print(f"[red]{type(exc).__name__}[/red]: {exc}") + sys.exit(1) + + sys.exit(0 if response.is_success else 1) diff --git a/venv/lib/python3.11/site-packages/httpx/_models.py b/venv/lib/python3.11/site-packages/httpx/_models.py new file mode 100644 index 0000000..67d74bf --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx/_models.py @@ -0,0 +1,1277 @@ +from __future__ import annotations + +import codecs +import datetime +import email.message +import json as jsonlib +import re +import typing +import urllib.request +from collections.abc import Mapping +from http.cookiejar import Cookie, CookieJar + +from ._content import ByteStream, UnattachedStream, encode_request, encode_response +from ._decoders import ( + SUPPORTED_DECODERS, + ByteChunker, + ContentDecoder, + IdentityDecoder, + LineDecoder, + MultiDecoder, + TextChunker, + TextDecoder, +) +from ._exceptions import ( + CookieConflict, + HTTPStatusError, + RequestNotRead, + ResponseNotRead, + StreamClosed, + StreamConsumed, + request_context, +) +from ._multipart import get_multipart_boundary_from_content_type +from ._status_codes import codes +from ._types import ( + AsyncByteStream, + CookieTypes, + HeaderTypes, + QueryParamTypes, + RequestContent, + RequestData, + RequestExtensions, + RequestFiles, + ResponseContent, + ResponseExtensions, + SyncByteStream, +) +from ._urls import URL +from ._utils import to_bytes_or_str, to_str + +__all__ = ["Cookies", "Headers", "Request", "Response"] + +SENSITIVE_HEADERS = {"authorization", "proxy-authorization"} + + +def _is_known_encoding(encoding: str) -> bool: + """ + Return `True` if `encoding` is a known codec. + """ + try: + codecs.lookup(encoding) + except LookupError: + return False + return True + + +def _normalize_header_key(key: str | bytes, encoding: str | None = None) -> bytes: + """ + Coerce str/bytes into a strictly byte-wise HTTP header key. + """ + return key if isinstance(key, bytes) else key.encode(encoding or "ascii") + + +def _normalize_header_value(value: str | bytes, encoding: str | None = None) -> bytes: + """ + Coerce str/bytes into a strictly byte-wise HTTP header value. + """ + if isinstance(value, bytes): + return value + if not isinstance(value, str): + raise TypeError(f"Header value must be str or bytes, not {type(value)}") + return value.encode(encoding or "ascii") + + +def _parse_content_type_charset(content_type: str) -> str | None: + # We used to use `cgi.parse_header()` here, but `cgi` became a dead battery. + # See: https://peps.python.org/pep-0594/#cgi + msg = email.message.Message() + msg["content-type"] = content_type + return msg.get_content_charset(failobj=None) + + +def _parse_header_links(value: str) -> list[dict[str, str]]: + """ + Returns a list of parsed link headers, for more info see: + https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Link + The generic syntax of those is: + Link: < uri-reference >; param1=value1; param2="value2" + So for instance: + Link; '; type="image/jpeg",;' + would return + [ + {"url": "http:/.../front.jpeg", "type": "image/jpeg"}, + {"url": "http://.../back.jpeg"}, + ] + :param value: HTTP Link entity-header field + :return: list of parsed link headers + """ + links: list[dict[str, str]] = [] + replace_chars = " '\"" + value = value.strip(replace_chars) + if not value: + return links + for val in re.split(", *<", value): + try: + url, params = val.split(";", 1) + except ValueError: + url, params = val, "" + link = {"url": url.strip("<> '\"")} + for param in params.split(";"): + try: + key, value = param.split("=") + except ValueError: + break + link[key.strip(replace_chars)] = value.strip(replace_chars) + links.append(link) + return links + + +def _obfuscate_sensitive_headers( + items: typing.Iterable[tuple[typing.AnyStr, typing.AnyStr]], +) -> typing.Iterator[tuple[typing.AnyStr, typing.AnyStr]]: + for k, v in items: + if to_str(k.lower()) in SENSITIVE_HEADERS: + v = to_bytes_or_str("[secure]", match_type_of=v) + yield k, v + + +class Headers(typing.MutableMapping[str, str]): + """ + HTTP headers, as a case-insensitive multi-dict. + """ + + def __init__( + self, + headers: HeaderTypes | None = None, + encoding: str | None = None, + ) -> None: + self._list = [] # type: typing.List[typing.Tuple[bytes, bytes, bytes]] + + if isinstance(headers, Headers): + self._list = list(headers._list) + elif isinstance(headers, Mapping): + for k, v in headers.items(): + bytes_key = _normalize_header_key(k, encoding) + bytes_value = _normalize_header_value(v, encoding) + self._list.append((bytes_key, bytes_key.lower(), bytes_value)) + elif headers is not None: + for k, v in headers: + bytes_key = _normalize_header_key(k, encoding) + bytes_value = _normalize_header_value(v, encoding) + self._list.append((bytes_key, bytes_key.lower(), bytes_value)) + + self._encoding = encoding + + @property + def encoding(self) -> str: + """ + Header encoding is mandated as ascii, but we allow fallbacks to utf-8 + or iso-8859-1. + """ + if self._encoding is None: + for encoding in ["ascii", "utf-8"]: + for key, value in self.raw: + try: + key.decode(encoding) + value.decode(encoding) + except UnicodeDecodeError: + break + else: + # The else block runs if 'break' did not occur, meaning + # all values fitted the encoding. + self._encoding = encoding + break + else: + # The ISO-8859-1 encoding covers all 256 code points in a byte, + # so will never raise decode errors. + self._encoding = "iso-8859-1" + return self._encoding + + @encoding.setter + def encoding(self, value: str) -> None: + self._encoding = value + + @property + def raw(self) -> list[tuple[bytes, bytes]]: + """ + Returns a list of the raw header items, as byte pairs. + """ + return [(raw_key, value) for raw_key, _, value in self._list] + + def keys(self) -> typing.KeysView[str]: + return {key.decode(self.encoding): None for _, key, value in self._list}.keys() + + def values(self) -> typing.ValuesView[str]: + values_dict: dict[str, str] = {} + for _, key, value in self._list: + str_key = key.decode(self.encoding) + str_value = value.decode(self.encoding) + if str_key in values_dict: + values_dict[str_key] += f", {str_value}" + else: + values_dict[str_key] = str_value + return values_dict.values() + + def items(self) -> typing.ItemsView[str, str]: + """ + Return `(key, value)` items of headers. Concatenate headers + into a single comma separated value when a key occurs multiple times. + """ + values_dict: dict[str, str] = {} + for _, key, value in self._list: + str_key = key.decode(self.encoding) + str_value = value.decode(self.encoding) + if str_key in values_dict: + values_dict[str_key] += f", {str_value}" + else: + values_dict[str_key] = str_value + return values_dict.items() + + def multi_items(self) -> list[tuple[str, str]]: + """ + Return a list of `(key, value)` pairs of headers. Allow multiple + occurrences of the same key without concatenating into a single + comma separated value. + """ + return [ + (key.decode(self.encoding), value.decode(self.encoding)) + for _, key, value in self._list + ] + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Return a header value. If multiple occurrences of the header occur + then concatenate them together with commas. + """ + try: + return self[key] + except KeyError: + return default + + def get_list(self, key: str, split_commas: bool = False) -> list[str]: + """ + Return a list of all header values for a given key. + If `split_commas=True` is passed, then any comma separated header + values are split into multiple return strings. + """ + get_header_key = key.lower().encode(self.encoding) + + values = [ + item_value.decode(self.encoding) + for _, item_key, item_value in self._list + if item_key.lower() == get_header_key + ] + + if not split_commas: + return values + + split_values = [] + for value in values: + split_values.extend([item.strip() for item in value.split(",")]) + return split_values + + def update(self, headers: HeaderTypes | None = None) -> None: # type: ignore + headers = Headers(headers) + for key in headers.keys(): + if key in self: + self.pop(key) + self._list.extend(headers._list) + + def copy(self) -> Headers: + return Headers(self, encoding=self.encoding) + + def __getitem__(self, key: str) -> str: + """ + Return a single header value. + + If there are multiple headers with the same key, then we concatenate + them with commas. See: https://tools.ietf.org/html/rfc7230#section-3.2.2 + """ + normalized_key = key.lower().encode(self.encoding) + + items = [ + header_value.decode(self.encoding) + for _, header_key, header_value in self._list + if header_key == normalized_key + ] + + if items: + return ", ".join(items) + + raise KeyError(key) + + def __setitem__(self, key: str, value: str) -> None: + """ + Set the header `key` to `value`, removing any duplicate entries. + Retains insertion order. + """ + set_key = key.encode(self._encoding or "utf-8") + set_value = value.encode(self._encoding or "utf-8") + lookup_key = set_key.lower() + + found_indexes = [ + idx + for idx, (_, item_key, _) in enumerate(self._list) + if item_key == lookup_key + ] + + for idx in reversed(found_indexes[1:]): + del self._list[idx] + + if found_indexes: + idx = found_indexes[0] + self._list[idx] = (set_key, lookup_key, set_value) + else: + self._list.append((set_key, lookup_key, set_value)) + + def __delitem__(self, key: str) -> None: + """ + Remove the header `key`. + """ + del_key = key.lower().encode(self.encoding) + + pop_indexes = [ + idx + for idx, (_, item_key, _) in enumerate(self._list) + if item_key.lower() == del_key + ] + + if not pop_indexes: + raise KeyError(key) + + for idx in reversed(pop_indexes): + del self._list[idx] + + def __contains__(self, key: typing.Any) -> bool: + header_key = key.lower().encode(self.encoding) + return header_key in [key for _, key, _ in self._list] + + def __iter__(self) -> typing.Iterator[typing.Any]: + return iter(self.keys()) + + def __len__(self) -> int: + return len(self._list) + + def __eq__(self, other: typing.Any) -> bool: + try: + other_headers = Headers(other) + except ValueError: + return False + + self_list = [(key, value) for _, key, value in self._list] + other_list = [(key, value) for _, key, value in other_headers._list] + return sorted(self_list) == sorted(other_list) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + + encoding_str = "" + if self.encoding != "ascii": + encoding_str = f", encoding={self.encoding!r}" + + as_list = list(_obfuscate_sensitive_headers(self.multi_items())) + as_dict = dict(as_list) + + no_duplicate_keys = len(as_dict) == len(as_list) + if no_duplicate_keys: + return f"{class_name}({as_dict!r}{encoding_str})" + return f"{class_name}({as_list!r}{encoding_str})" + + +class Request: + def __init__( + self, + method: str, + url: URL | str, + *, + params: QueryParamTypes | None = None, + headers: HeaderTypes | None = None, + cookies: CookieTypes | None = None, + content: RequestContent | None = None, + data: RequestData | None = None, + files: RequestFiles | None = None, + json: typing.Any | None = None, + stream: SyncByteStream | AsyncByteStream | None = None, + extensions: RequestExtensions | None = None, + ) -> None: + self.method = method.upper() + self.url = URL(url) if params is None else URL(url, params=params) + self.headers = Headers(headers) + self.extensions = {} if extensions is None else dict(extensions) + + if cookies: + Cookies(cookies).set_cookie_header(self) + + if stream is None: + content_type: str | None = self.headers.get("content-type") + headers, stream = encode_request( + content=content, + data=data, + files=files, + json=json, + boundary=get_multipart_boundary_from_content_type( + content_type=content_type.encode(self.headers.encoding) + if content_type + else None + ), + ) + self._prepare(headers) + self.stream = stream + # Load the request body, except for streaming content. + if isinstance(stream, ByteStream): + self.read() + else: + # There's an important distinction between `Request(content=...)`, + # and `Request(stream=...)`. + # + # Using `content=...` implies automatically populated `Host` and content + # headers, of either `Content-Length: ...` or `Transfer-Encoding: chunked`. + # + # Using `stream=...` will not automatically include *any* + # auto-populated headers. + # + # As an end-user you don't really need `stream=...`. It's only + # useful when: + # + # * Preserving the request stream when copying requests, eg for redirects. + # * Creating request instances on the *server-side* of the transport API. + self.stream = stream + + def _prepare(self, default_headers: dict[str, str]) -> None: + for key, value in default_headers.items(): + # Ignore Transfer-Encoding if the Content-Length has been set explicitly. + if key.lower() == "transfer-encoding" and "Content-Length" in self.headers: + continue + self.headers.setdefault(key, value) + + auto_headers: list[tuple[bytes, bytes]] = [] + + has_host = "Host" in self.headers + has_content_length = ( + "Content-Length" in self.headers or "Transfer-Encoding" in self.headers + ) + + if not has_host and self.url.host: + auto_headers.append((b"Host", self.url.netloc)) + if not has_content_length and self.method in ("POST", "PUT", "PATCH"): + auto_headers.append((b"Content-Length", b"0")) + + self.headers = Headers(auto_headers + self.headers.raw) + + @property + def content(self) -> bytes: + if not hasattr(self, "_content"): + raise RequestNotRead() + return self._content + + def read(self) -> bytes: + """ + Read and return the request content. + """ + if not hasattr(self, "_content"): + assert isinstance(self.stream, typing.Iterable) + self._content = b"".join(self.stream) + if not isinstance(self.stream, ByteStream): + # If a streaming request has been read entirely into memory, then + # we can replace the stream with a raw bytes implementation, + # to ensure that any non-replayable streams can still be used. + self.stream = ByteStream(self._content) + return self._content + + async def aread(self) -> bytes: + """ + Read and return the request content. + """ + if not hasattr(self, "_content"): + assert isinstance(self.stream, typing.AsyncIterable) + self._content = b"".join([part async for part in self.stream]) + if not isinstance(self.stream, ByteStream): + # If a streaming request has been read entirely into memory, then + # we can replace the stream with a raw bytes implementation, + # to ensure that any non-replayable streams can still be used. + self.stream = ByteStream(self._content) + return self._content + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + url = str(self.url) + return f"<{class_name}({self.method!r}, {url!r})>" + + def __getstate__(self) -> dict[str, typing.Any]: + return { + name: value + for name, value in self.__dict__.items() + if name not in ["extensions", "stream"] + } + + def __setstate__(self, state: dict[str, typing.Any]) -> None: + for name, value in state.items(): + setattr(self, name, value) + self.extensions = {} + self.stream = UnattachedStream() + + +class Response: + def __init__( + self, + status_code: int, + *, + headers: HeaderTypes | None = None, + content: ResponseContent | None = None, + text: str | None = None, + html: str | None = None, + json: typing.Any = None, + stream: SyncByteStream | AsyncByteStream | None = None, + request: Request | None = None, + extensions: ResponseExtensions | None = None, + history: list[Response] | None = None, + default_encoding: str | typing.Callable[[bytes], str] = "utf-8", + ) -> None: + self.status_code = status_code + self.headers = Headers(headers) + + self._request: Request | None = request + + # When follow_redirects=False and a redirect is received, + # the client will set `response.next_request`. + self.next_request: Request | None = None + + self.extensions = {} if extensions is None else dict(extensions) + self.history = [] if history is None else list(history) + + self.is_closed = False + self.is_stream_consumed = False + + self.default_encoding = default_encoding + + if stream is None: + headers, stream = encode_response(content, text, html, json) + self._prepare(headers) + self.stream = stream + if isinstance(stream, ByteStream): + # Load the response body, except for streaming content. + self.read() + else: + # There's an important distinction between `Response(content=...)`, + # and `Response(stream=...)`. + # + # Using `content=...` implies automatically populated content headers, + # of either `Content-Length: ...` or `Transfer-Encoding: chunked`. + # + # Using `stream=...` will not automatically include any content headers. + # + # As an end-user you don't really need `stream=...`. It's only + # useful when creating response instances having received a stream + # from the transport API. + self.stream = stream + + self._num_bytes_downloaded = 0 + + def _prepare(self, default_headers: dict[str, str]) -> None: + for key, value in default_headers.items(): + # Ignore Transfer-Encoding if the Content-Length has been set explicitly. + if key.lower() == "transfer-encoding" and "content-length" in self.headers: + continue + self.headers.setdefault(key, value) + + @property + def elapsed(self) -> datetime.timedelta: + """ + Returns the time taken for the complete request/response + cycle to complete. + """ + if not hasattr(self, "_elapsed"): + raise RuntimeError( + "'.elapsed' may only be accessed after the response " + "has been read or closed." + ) + return self._elapsed + + @elapsed.setter + def elapsed(self, elapsed: datetime.timedelta) -> None: + self._elapsed = elapsed + + @property + def request(self) -> Request: + """ + Returns the request instance associated to the current response. + """ + if self._request is None: + raise RuntimeError( + "The request instance has not been set on this response." + ) + return self._request + + @request.setter + def request(self, value: Request) -> None: + self._request = value + + @property + def http_version(self) -> str: + try: + http_version: bytes = self.extensions["http_version"] + except KeyError: + return "HTTP/1.1" + else: + return http_version.decode("ascii", errors="ignore") + + @property + def reason_phrase(self) -> str: + try: + reason_phrase: bytes = self.extensions["reason_phrase"] + except KeyError: + return codes.get_reason_phrase(self.status_code) + else: + return reason_phrase.decode("ascii", errors="ignore") + + @property + def url(self) -> URL: + """ + Returns the URL for which the request was made. + """ + return self.request.url + + @property + def content(self) -> bytes: + if not hasattr(self, "_content"): + raise ResponseNotRead() + return self._content + + @property + def text(self) -> str: + if not hasattr(self, "_text"): + content = self.content + if not content: + self._text = "" + else: + decoder = TextDecoder(encoding=self.encoding or "utf-8") + self._text = "".join([decoder.decode(self.content), decoder.flush()]) + return self._text + + @property + def encoding(self) -> str | None: + """ + Return an encoding to use for decoding the byte content into text. + The priority for determining this is given by... + + * `.encoding = <>` has been set explicitly. + * The encoding as specified by the charset parameter in the Content-Type header. + * The encoding as determined by `default_encoding`, which may either be + a string like "utf-8" indicating the encoding to use, or may be a callable + which enables charset autodetection. + """ + if not hasattr(self, "_encoding"): + encoding = self.charset_encoding + if encoding is None or not _is_known_encoding(encoding): + if isinstance(self.default_encoding, str): + encoding = self.default_encoding + elif hasattr(self, "_content"): + encoding = self.default_encoding(self._content) + self._encoding = encoding or "utf-8" + return self._encoding + + @encoding.setter + def encoding(self, value: str) -> None: + """ + Set the encoding to use for decoding the byte content into text. + + If the `text` attribute has been accessed, attempting to set the + encoding will throw a ValueError. + """ + if hasattr(self, "_text"): + raise ValueError( + "Setting encoding after `text` has been accessed is not allowed." + ) + self._encoding = value + + @property + def charset_encoding(self) -> str | None: + """ + Return the encoding, as specified by the Content-Type header. + """ + content_type = self.headers.get("Content-Type") + if content_type is None: + return None + + return _parse_content_type_charset(content_type) + + def _get_content_decoder(self) -> ContentDecoder: + """ + Returns a decoder instance which can be used to decode the raw byte + content, depending on the Content-Encoding used in the response. + """ + if not hasattr(self, "_decoder"): + decoders: list[ContentDecoder] = [] + values = self.headers.get_list("content-encoding", split_commas=True) + for value in values: + value = value.strip().lower() + try: + decoder_cls = SUPPORTED_DECODERS[value] + decoders.append(decoder_cls()) + except KeyError: + continue + + if len(decoders) == 1: + self._decoder = decoders[0] + elif len(decoders) > 1: + self._decoder = MultiDecoder(children=decoders) + else: + self._decoder = IdentityDecoder() + + return self._decoder + + @property + def is_informational(self) -> bool: + """ + A property which is `True` for 1xx status codes, `False` otherwise. + """ + return codes.is_informational(self.status_code) + + @property + def is_success(self) -> bool: + """ + A property which is `True` for 2xx status codes, `False` otherwise. + """ + return codes.is_success(self.status_code) + + @property + def is_redirect(self) -> bool: + """ + A property which is `True` for 3xx status codes, `False` otherwise. + + Note that not all responses with a 3xx status code indicate a URL redirect. + + Use `response.has_redirect_location` to determine responses with a properly + formed URL redirection. + """ + return codes.is_redirect(self.status_code) + + @property + def is_client_error(self) -> bool: + """ + A property which is `True` for 4xx status codes, `False` otherwise. + """ + return codes.is_client_error(self.status_code) + + @property + def is_server_error(self) -> bool: + """ + A property which is `True` for 5xx status codes, `False` otherwise. + """ + return codes.is_server_error(self.status_code) + + @property + def is_error(self) -> bool: + """ + A property which is `True` for 4xx and 5xx status codes, `False` otherwise. + """ + return codes.is_error(self.status_code) + + @property + def has_redirect_location(self) -> bool: + """ + Returns True for 3xx responses with a properly formed URL redirection, + `False` otherwise. + """ + return ( + self.status_code + in ( + # 301 (Cacheable redirect. Method may change to GET.) + codes.MOVED_PERMANENTLY, + # 302 (Uncacheable redirect. Method may change to GET.) + codes.FOUND, + # 303 (Client should make a GET or HEAD request.) + codes.SEE_OTHER, + # 307 (Equiv. 302, but retain method) + codes.TEMPORARY_REDIRECT, + # 308 (Equiv. 301, but retain method) + codes.PERMANENT_REDIRECT, + ) + and "Location" in self.headers + ) + + def raise_for_status(self) -> Response: + """ + Raise the `HTTPStatusError` if one occurred. + """ + request = self._request + if request is None: + raise RuntimeError( + "Cannot call `raise_for_status` as the request " + "instance has not been set on this response." + ) + + if self.is_success: + return self + + if self.has_redirect_location: + message = ( + "{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n" + "Redirect location: '{0.headers[location]}'\n" + "For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}" + ) + else: + message = ( + "{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n" + "For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}" + ) + + status_class = self.status_code // 100 + error_types = { + 1: "Informational response", + 3: "Redirect response", + 4: "Client error", + 5: "Server error", + } + error_type = error_types.get(status_class, "Invalid status code") + message = message.format(self, error_type=error_type) + raise HTTPStatusError(message, request=request, response=self) + + def json(self, **kwargs: typing.Any) -> typing.Any: + return jsonlib.loads(self.content, **kwargs) + + @property + def cookies(self) -> Cookies: + if not hasattr(self, "_cookies"): + self._cookies = Cookies() + self._cookies.extract_cookies(self) + return self._cookies + + @property + def links(self) -> dict[str | None, dict[str, str]]: + """ + Returns the parsed header links of the response, if any + """ + header = self.headers.get("link") + if header is None: + return {} + + return { + (link.get("rel") or link.get("url")): link + for link in _parse_header_links(header) + } + + @property + def num_bytes_downloaded(self) -> int: + return self._num_bytes_downloaded + + def __repr__(self) -> str: + return f"" + + def __getstate__(self) -> dict[str, typing.Any]: + return { + name: value + for name, value in self.__dict__.items() + if name not in ["extensions", "stream", "is_closed", "_decoder"] + } + + def __setstate__(self, state: dict[str, typing.Any]) -> None: + for name, value in state.items(): + setattr(self, name, value) + self.is_closed = True + self.extensions = {} + self.stream = UnattachedStream() + + def read(self) -> bytes: + """ + Read and return the response content. + """ + if not hasattr(self, "_content"): + self._content = b"".join(self.iter_bytes()) + return self._content + + def iter_bytes(self, chunk_size: int | None = None) -> typing.Iterator[bytes]: + """ + A byte-iterator over the decoded response content. + This allows us to handle gzip, deflate, brotli, and zstd encoded responses. + """ + if hasattr(self, "_content"): + chunk_size = len(self._content) if chunk_size is None else chunk_size + for i in range(0, len(self._content), max(chunk_size, 1)): + yield self._content[i : i + chunk_size] + else: + decoder = self._get_content_decoder() + chunker = ByteChunker(chunk_size=chunk_size) + with request_context(request=self._request): + for raw_bytes in self.iter_raw(): + decoded = decoder.decode(raw_bytes) + for chunk in chunker.decode(decoded): + yield chunk + decoded = decoder.flush() + for chunk in chunker.decode(decoded): + yield chunk # pragma: no cover + for chunk in chunker.flush(): + yield chunk + + def iter_text(self, chunk_size: int | None = None) -> typing.Iterator[str]: + """ + A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + decoder = TextDecoder(encoding=self.encoding or "utf-8") + chunker = TextChunker(chunk_size=chunk_size) + with request_context(request=self._request): + for byte_content in self.iter_bytes(): + text_content = decoder.decode(byte_content) + for chunk in chunker.decode(text_content): + yield chunk + text_content = decoder.flush() + for chunk in chunker.decode(text_content): + yield chunk # pragma: no cover + for chunk in chunker.flush(): + yield chunk + + def iter_lines(self) -> typing.Iterator[str]: + decoder = LineDecoder() + with request_context(request=self._request): + for text in self.iter_text(): + for line in decoder.decode(text): + yield line + for line in decoder.flush(): + yield line + + def iter_raw(self, chunk_size: int | None = None) -> typing.Iterator[bytes]: + """ + A byte-iterator over the raw response content. + """ + if self.is_stream_consumed: + raise StreamConsumed() + if self.is_closed: + raise StreamClosed() + if not isinstance(self.stream, SyncByteStream): + raise RuntimeError("Attempted to call a sync iterator on an async stream.") + + self.is_stream_consumed = True + self._num_bytes_downloaded = 0 + chunker = ByteChunker(chunk_size=chunk_size) + + with request_context(request=self._request): + for raw_stream_bytes in self.stream: + self._num_bytes_downloaded += len(raw_stream_bytes) + for chunk in chunker.decode(raw_stream_bytes): + yield chunk + + for chunk in chunker.flush(): + yield chunk + + self.close() + + def close(self) -> None: + """ + Close the response and release the connection. + Automatically called if the response body is read to completion. + """ + if not isinstance(self.stream, SyncByteStream): + raise RuntimeError("Attempted to call an sync close on an async stream.") + + if not self.is_closed: + self.is_closed = True + with request_context(request=self._request): + self.stream.close() + + async def aread(self) -> bytes: + """ + Read and return the response content. + """ + if not hasattr(self, "_content"): + self._content = b"".join([part async for part in self.aiter_bytes()]) + return self._content + + async def aiter_bytes( + self, chunk_size: int | None = None + ) -> typing.AsyncIterator[bytes]: + """ + A byte-iterator over the decoded response content. + This allows us to handle gzip, deflate, brotli, and zstd encoded responses. + """ + if hasattr(self, "_content"): + chunk_size = len(self._content) if chunk_size is None else chunk_size + for i in range(0, len(self._content), max(chunk_size, 1)): + yield self._content[i : i + chunk_size] + else: + decoder = self._get_content_decoder() + chunker = ByteChunker(chunk_size=chunk_size) + with request_context(request=self._request): + async for raw_bytes in self.aiter_raw(): + decoded = decoder.decode(raw_bytes) + for chunk in chunker.decode(decoded): + yield chunk + decoded = decoder.flush() + for chunk in chunker.decode(decoded): + yield chunk # pragma: no cover + for chunk in chunker.flush(): + yield chunk + + async def aiter_text( + self, chunk_size: int | None = None + ) -> typing.AsyncIterator[str]: + """ + A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + decoder = TextDecoder(encoding=self.encoding or "utf-8") + chunker = TextChunker(chunk_size=chunk_size) + with request_context(request=self._request): + async for byte_content in self.aiter_bytes(): + text_content = decoder.decode(byte_content) + for chunk in chunker.decode(text_content): + yield chunk + text_content = decoder.flush() + for chunk in chunker.decode(text_content): + yield chunk # pragma: no cover + for chunk in chunker.flush(): + yield chunk + + async def aiter_lines(self) -> typing.AsyncIterator[str]: + decoder = LineDecoder() + with request_context(request=self._request): + async for text in self.aiter_text(): + for line in decoder.decode(text): + yield line + for line in decoder.flush(): + yield line + + async def aiter_raw( + self, chunk_size: int | None = None + ) -> typing.AsyncIterator[bytes]: + """ + A byte-iterator over the raw response content. + """ + if self.is_stream_consumed: + raise StreamConsumed() + if self.is_closed: + raise StreamClosed() + if not isinstance(self.stream, AsyncByteStream): + raise RuntimeError("Attempted to call an async iterator on an sync stream.") + + self.is_stream_consumed = True + self._num_bytes_downloaded = 0 + chunker = ByteChunker(chunk_size=chunk_size) + + with request_context(request=self._request): + async for raw_stream_bytes in self.stream: + self._num_bytes_downloaded += len(raw_stream_bytes) + for chunk in chunker.decode(raw_stream_bytes): + yield chunk + + for chunk in chunker.flush(): + yield chunk + + await self.aclose() + + async def aclose(self) -> None: + """ + Close the response and release the connection. + Automatically called if the response body is read to completion. + """ + if not isinstance(self.stream, AsyncByteStream): + raise RuntimeError("Attempted to call an async close on an sync stream.") + + if not self.is_closed: + self.is_closed = True + with request_context(request=self._request): + await self.stream.aclose() + + +class Cookies(typing.MutableMapping[str, str]): + """ + HTTP Cookies, as a mutable mapping. + """ + + def __init__(self, cookies: CookieTypes | None = None) -> None: + if cookies is None or isinstance(cookies, dict): + self.jar = CookieJar() + if isinstance(cookies, dict): + for key, value in cookies.items(): + self.set(key, value) + elif isinstance(cookies, list): + self.jar = CookieJar() + for key, value in cookies: + self.set(key, value) + elif isinstance(cookies, Cookies): + self.jar = CookieJar() + for cookie in cookies.jar: + self.jar.set_cookie(cookie) + else: + self.jar = cookies + + def extract_cookies(self, response: Response) -> None: + """ + Loads any cookies based on the response `Set-Cookie` headers. + """ + urllib_response = self._CookieCompatResponse(response) + urllib_request = self._CookieCompatRequest(response.request) + + self.jar.extract_cookies(urllib_response, urllib_request) # type: ignore + + def set_cookie_header(self, request: Request) -> None: + """ + Sets an appropriate 'Cookie:' HTTP header on the `Request`. + """ + urllib_request = self._CookieCompatRequest(request) + self.jar.add_cookie_header(urllib_request) + + def set(self, name: str, value: str, domain: str = "", path: str = "/") -> None: + """ + Set a cookie value by name. May optionally include domain and path. + """ + kwargs = { + "version": 0, + "name": name, + "value": value, + "port": None, + "port_specified": False, + "domain": domain, + "domain_specified": bool(domain), + "domain_initial_dot": domain.startswith("."), + "path": path, + "path_specified": bool(path), + "secure": False, + "expires": None, + "discard": True, + "comment": None, + "comment_url": None, + "rest": {"HttpOnly": None}, + "rfc2109": False, + } + cookie = Cookie(**kwargs) # type: ignore + self.jar.set_cookie(cookie) + + def get( # type: ignore + self, + name: str, + default: str | None = None, + domain: str | None = None, + path: str | None = None, + ) -> str | None: + """ + Get a cookie by name. May optionally include domain and path + in order to specify exactly which cookie to retrieve. + """ + value = None + for cookie in self.jar: + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + if value is not None: + message = f"Multiple cookies exist with name={name}" + raise CookieConflict(message) + value = cookie.value + + if value is None: + return default + return value + + def delete( + self, + name: str, + domain: str | None = None, + path: str | None = None, + ) -> None: + """ + Delete a cookie by name. May optionally include domain and path + in order to specify exactly which cookie to delete. + """ + if domain is not None and path is not None: + return self.jar.clear(domain, path, name) + + remove = [ + cookie + for cookie in self.jar + if cookie.name == name + and (domain is None or cookie.domain == domain) + and (path is None or cookie.path == path) + ] + + for cookie in remove: + self.jar.clear(cookie.domain, cookie.path, cookie.name) + + def clear(self, domain: str | None = None, path: str | None = None) -> None: + """ + Delete all cookies. Optionally include a domain and path in + order to only delete a subset of all the cookies. + """ + args = [] + if domain is not None: + args.append(domain) + if path is not None: + assert domain is not None + args.append(path) + self.jar.clear(*args) + + def update(self, cookies: CookieTypes | None = None) -> None: # type: ignore + cookies = Cookies(cookies) + for cookie in cookies.jar: + self.jar.set_cookie(cookie) + + def __setitem__(self, name: str, value: str) -> None: + return self.set(name, value) + + def __getitem__(self, name: str) -> str: + value = self.get(name) + if value is None: + raise KeyError(name) + return value + + def __delitem__(self, name: str) -> None: + return self.delete(name) + + def __len__(self) -> int: + return len(self.jar) + + def __iter__(self) -> typing.Iterator[str]: + return (cookie.name for cookie in self.jar) + + def __bool__(self) -> bool: + for _ in self.jar: + return True + return False + + def __repr__(self) -> str: + cookies_repr = ", ".join( + [ + f"" + for cookie in self.jar + ] + ) + + return f"" + + class _CookieCompatRequest(urllib.request.Request): + """ + Wraps a `Request` instance up in a compatibility interface suitable + for use with `CookieJar` operations. + """ + + def __init__(self, request: Request) -> None: + super().__init__( + url=str(request.url), + headers=dict(request.headers), + method=request.method, + ) + self.request = request + + def add_unredirected_header(self, key: str, value: str) -> None: + super().add_unredirected_header(key, value) + self.request.headers[key] = value + + class _CookieCompatResponse: + """ + Wraps a `Request` instance up in a compatibility interface suitable + for use with `CookieJar` operations. + """ + + def __init__(self, response: Response) -> None: + self.response = response + + def info(self) -> email.message.Message: + info = email.message.Message() + for key, value in self.response.headers.multi_items(): + # Note that setting `info[key]` here is an "append" operation, + # not a "replace" operation. + # https://docs.python.org/3/library/email.compat32-message.html#email.message.Message.__setitem__ + info[key] = value + return info diff --git a/venv/lib/python3.11/site-packages/httpx/_multipart.py b/venv/lib/python3.11/site-packages/httpx/_multipart.py new file mode 100644 index 0000000..b4761af9 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx/_multipart.py @@ -0,0 +1,300 @@ +from __future__ import annotations + +import io +import mimetypes +import os +import re +import typing +from pathlib import Path + +from ._types import ( + AsyncByteStream, + FileContent, + FileTypes, + RequestData, + RequestFiles, + SyncByteStream, +) +from ._utils import ( + peek_filelike_length, + primitive_value_to_str, + to_bytes, +) + +_HTML5_FORM_ENCODING_REPLACEMENTS = {'"': "%22", "\\": "\\\\"} +_HTML5_FORM_ENCODING_REPLACEMENTS.update( + {chr(c): "%{:02X}".format(c) for c in range(0x1F + 1) if c != 0x1B} +) +_HTML5_FORM_ENCODING_RE = re.compile( + r"|".join([re.escape(c) for c in _HTML5_FORM_ENCODING_REPLACEMENTS.keys()]) +) + + +def _format_form_param(name: str, value: str) -> bytes: + """ + Encode a name/value pair within a multipart form. + """ + + def replacer(match: typing.Match[str]) -> str: + return _HTML5_FORM_ENCODING_REPLACEMENTS[match.group(0)] + + value = _HTML5_FORM_ENCODING_RE.sub(replacer, value) + return f'{name}="{value}"'.encode() + + +def _guess_content_type(filename: str | None) -> str | None: + """ + Guesses the mimetype based on a filename. Defaults to `application/octet-stream`. + + Returns `None` if `filename` is `None` or empty. + """ + if filename: + return mimetypes.guess_type(filename)[0] or "application/octet-stream" + return None + + +def get_multipart_boundary_from_content_type( + content_type: bytes | None, +) -> bytes | None: + if not content_type or not content_type.startswith(b"multipart/form-data"): + return None + # parse boundary according to + # https://www.rfc-editor.org/rfc/rfc2046#section-5.1.1 + if b";" in content_type: + for section in content_type.split(b";"): + if section.strip().lower().startswith(b"boundary="): + return section.strip()[len(b"boundary=") :].strip(b'"') + return None + + +class DataField: + """ + A single form field item, within a multipart form field. + """ + + def __init__(self, name: str, value: str | bytes | int | float | None) -> None: + if not isinstance(name, str): + raise TypeError( + f"Invalid type for name. Expected str, got {type(name)}: {name!r}" + ) + if value is not None and not isinstance(value, (str, bytes, int, float)): + raise TypeError( + "Invalid type for value. Expected primitive type," + f" got {type(value)}: {value!r}" + ) + self.name = name + self.value: str | bytes = ( + value if isinstance(value, bytes) else primitive_value_to_str(value) + ) + + def render_headers(self) -> bytes: + if not hasattr(self, "_headers"): + name = _format_form_param("name", self.name) + self._headers = b"".join( + [b"Content-Disposition: form-data; ", name, b"\r\n\r\n"] + ) + + return self._headers + + def render_data(self) -> bytes: + if not hasattr(self, "_data"): + self._data = to_bytes(self.value) + + return self._data + + def get_length(self) -> int: + headers = self.render_headers() + data = self.render_data() + return len(headers) + len(data) + + def render(self) -> typing.Iterator[bytes]: + yield self.render_headers() + yield self.render_data() + + +class FileField: + """ + A single file field item, within a multipart form field. + """ + + CHUNK_SIZE = 64 * 1024 + + def __init__(self, name: str, value: FileTypes) -> None: + self.name = name + + fileobj: FileContent + + headers: dict[str, str] = {} + content_type: str | None = None + + # This large tuple based API largely mirror's requests' API + # It would be good to think of better APIs for this that we could + # include in httpx 2.0 since variable length tuples(especially of 4 elements) + # are quite unwieldly + if isinstance(value, tuple): + if len(value) == 2: + # neither the 3rd parameter (content_type) nor the 4th (headers) + # was included + filename, fileobj = value + elif len(value) == 3: + filename, fileobj, content_type = value + else: + # all 4 parameters included + filename, fileobj, content_type, headers = value # type: ignore + else: + filename = Path(str(getattr(value, "name", "upload"))).name + fileobj = value + + if content_type is None: + content_type = _guess_content_type(filename) + + has_content_type_header = any("content-type" in key.lower() for key in headers) + if content_type is not None and not has_content_type_header: + # note that unlike requests, we ignore the content_type provided in the 3rd + # tuple element if it is also included in the headers requests does + # the opposite (it overwrites the headerwith the 3rd tuple element) + headers["Content-Type"] = content_type + + if isinstance(fileobj, io.StringIO): + raise TypeError( + "Multipart file uploads require 'io.BytesIO', not 'io.StringIO'." + ) + if isinstance(fileobj, io.TextIOBase): + raise TypeError( + "Multipart file uploads must be opened in binary mode, not text mode." + ) + + self.filename = filename + self.file = fileobj + self.headers = headers + + def get_length(self) -> int | None: + headers = self.render_headers() + + if isinstance(self.file, (str, bytes)): + return len(headers) + len(to_bytes(self.file)) + + file_length = peek_filelike_length(self.file) + + # If we can't determine the filesize without reading it into memory, + # then return `None` here, to indicate an unknown file length. + if file_length is None: + return None + + return len(headers) + file_length + + def render_headers(self) -> bytes: + if not hasattr(self, "_headers"): + parts = [ + b"Content-Disposition: form-data; ", + _format_form_param("name", self.name), + ] + if self.filename: + filename = _format_form_param("filename", self.filename) + parts.extend([b"; ", filename]) + for header_name, header_value in self.headers.items(): + key, val = f"\r\n{header_name}: ".encode(), header_value.encode() + parts.extend([key, val]) + parts.append(b"\r\n\r\n") + self._headers = b"".join(parts) + + return self._headers + + def render_data(self) -> typing.Iterator[bytes]: + if isinstance(self.file, (str, bytes)): + yield to_bytes(self.file) + return + + if hasattr(self.file, "seek"): + try: + self.file.seek(0) + except io.UnsupportedOperation: + pass + + chunk = self.file.read(self.CHUNK_SIZE) + while chunk: + yield to_bytes(chunk) + chunk = self.file.read(self.CHUNK_SIZE) + + def render(self) -> typing.Iterator[bytes]: + yield self.render_headers() + yield from self.render_data() + + +class MultipartStream(SyncByteStream, AsyncByteStream): + """ + Request content as streaming multipart encoded form data. + """ + + def __init__( + self, + data: RequestData, + files: RequestFiles, + boundary: bytes | None = None, + ) -> None: + if boundary is None: + boundary = os.urandom(16).hex().encode("ascii") + + self.boundary = boundary + self.content_type = "multipart/form-data; boundary=%s" % boundary.decode( + "ascii" + ) + self.fields = list(self._iter_fields(data, files)) + + def _iter_fields( + self, data: RequestData, files: RequestFiles + ) -> typing.Iterator[FileField | DataField]: + for name, value in data.items(): + if isinstance(value, (tuple, list)): + for item in value: + yield DataField(name=name, value=item) + else: + yield DataField(name=name, value=value) + + file_items = files.items() if isinstance(files, typing.Mapping) else files + for name, value in file_items: + yield FileField(name=name, value=value) + + def iter_chunks(self) -> typing.Iterator[bytes]: + for field in self.fields: + yield b"--%s\r\n" % self.boundary + yield from field.render() + yield b"\r\n" + yield b"--%s--\r\n" % self.boundary + + def get_content_length(self) -> int | None: + """ + Return the length of the multipart encoded content, or `None` if + any of the files have a length that cannot be determined upfront. + """ + boundary_length = len(self.boundary) + length = 0 + + for field in self.fields: + field_length = field.get_length() + if field_length is None: + return None + + length += 2 + boundary_length + 2 # b"--{boundary}\r\n" + length += field_length + length += 2 # b"\r\n" + + length += 2 + boundary_length + 4 # b"--{boundary}--\r\n" + return length + + # Content stream interface. + + def get_headers(self) -> dict[str, str]: + content_length = self.get_content_length() + content_type = self.content_type + if content_length is None: + return {"Transfer-Encoding": "chunked", "Content-Type": content_type} + return {"Content-Length": str(content_length), "Content-Type": content_type} + + def __iter__(self) -> typing.Iterator[bytes]: + for chunk in self.iter_chunks(): + yield chunk + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + for chunk in self.iter_chunks(): + yield chunk diff --git a/venv/lib/python3.11/site-packages/httpx/_status_codes.py b/venv/lib/python3.11/site-packages/httpx/_status_codes.py new file mode 100644 index 0000000..133a623 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx/_status_codes.py @@ -0,0 +1,162 @@ +from __future__ import annotations + +from enum import IntEnum + +__all__ = ["codes"] + + +class codes(IntEnum): + """HTTP status codes and reason phrases + + Status codes from the following RFCs are all observed: + + * RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616 + * RFC 6585: Additional HTTP Status Codes + * RFC 3229: Delta encoding in HTTP + * RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518 + * RFC 5842: Binding Extensions to WebDAV + * RFC 7238: Permanent Redirect + * RFC 2295: Transparent Content Negotiation in HTTP + * RFC 2774: An HTTP Extension Framework + * RFC 7540: Hypertext Transfer Protocol Version 2 (HTTP/2) + * RFC 2324: Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0) + * RFC 7725: An HTTP Status Code to Report Legal Obstacles + * RFC 8297: An HTTP Status Code for Indicating Hints + * RFC 8470: Using Early Data in HTTP + """ + + def __new__(cls, value: int, phrase: str = "") -> codes: + obj = int.__new__(cls, value) + obj._value_ = value + + obj.phrase = phrase # type: ignore[attr-defined] + return obj + + def __str__(self) -> str: + return str(self.value) + + @classmethod + def get_reason_phrase(cls, value: int) -> str: + try: + return codes(value).phrase # type: ignore + except ValueError: + return "" + + @classmethod + def is_informational(cls, value: int) -> bool: + """ + Returns `True` for 1xx status codes, `False` otherwise. + """ + return 100 <= value <= 199 + + @classmethod + def is_success(cls, value: int) -> bool: + """ + Returns `True` for 2xx status codes, `False` otherwise. + """ + return 200 <= value <= 299 + + @classmethod + def is_redirect(cls, value: int) -> bool: + """ + Returns `True` for 3xx status codes, `False` otherwise. + """ + return 300 <= value <= 399 + + @classmethod + def is_client_error(cls, value: int) -> bool: + """ + Returns `True` for 4xx status codes, `False` otherwise. + """ + return 400 <= value <= 499 + + @classmethod + def is_server_error(cls, value: int) -> bool: + """ + Returns `True` for 5xx status codes, `False` otherwise. + """ + return 500 <= value <= 599 + + @classmethod + def is_error(cls, value: int) -> bool: + """ + Returns `True` for 4xx or 5xx status codes, `False` otherwise. + """ + return 400 <= value <= 599 + + # informational + CONTINUE = 100, "Continue" + SWITCHING_PROTOCOLS = 101, "Switching Protocols" + PROCESSING = 102, "Processing" + EARLY_HINTS = 103, "Early Hints" + + # success + OK = 200, "OK" + CREATED = 201, "Created" + ACCEPTED = 202, "Accepted" + NON_AUTHORITATIVE_INFORMATION = 203, "Non-Authoritative Information" + NO_CONTENT = 204, "No Content" + RESET_CONTENT = 205, "Reset Content" + PARTIAL_CONTENT = 206, "Partial Content" + MULTI_STATUS = 207, "Multi-Status" + ALREADY_REPORTED = 208, "Already Reported" + IM_USED = 226, "IM Used" + + # redirection + MULTIPLE_CHOICES = 300, "Multiple Choices" + MOVED_PERMANENTLY = 301, "Moved Permanently" + FOUND = 302, "Found" + SEE_OTHER = 303, "See Other" + NOT_MODIFIED = 304, "Not Modified" + USE_PROXY = 305, "Use Proxy" + TEMPORARY_REDIRECT = 307, "Temporary Redirect" + PERMANENT_REDIRECT = 308, "Permanent Redirect" + + # client error + BAD_REQUEST = 400, "Bad Request" + UNAUTHORIZED = 401, "Unauthorized" + PAYMENT_REQUIRED = 402, "Payment Required" + FORBIDDEN = 403, "Forbidden" + NOT_FOUND = 404, "Not Found" + METHOD_NOT_ALLOWED = 405, "Method Not Allowed" + NOT_ACCEPTABLE = 406, "Not Acceptable" + PROXY_AUTHENTICATION_REQUIRED = 407, "Proxy Authentication Required" + REQUEST_TIMEOUT = 408, "Request Timeout" + CONFLICT = 409, "Conflict" + GONE = 410, "Gone" + LENGTH_REQUIRED = 411, "Length Required" + PRECONDITION_FAILED = 412, "Precondition Failed" + REQUEST_ENTITY_TOO_LARGE = 413, "Request Entity Too Large" + REQUEST_URI_TOO_LONG = 414, "Request-URI Too Long" + UNSUPPORTED_MEDIA_TYPE = 415, "Unsupported Media Type" + REQUESTED_RANGE_NOT_SATISFIABLE = 416, "Requested Range Not Satisfiable" + EXPECTATION_FAILED = 417, "Expectation Failed" + IM_A_TEAPOT = 418, "I'm a teapot" + MISDIRECTED_REQUEST = 421, "Misdirected Request" + UNPROCESSABLE_ENTITY = 422, "Unprocessable Entity" + LOCKED = 423, "Locked" + FAILED_DEPENDENCY = 424, "Failed Dependency" + TOO_EARLY = 425, "Too Early" + UPGRADE_REQUIRED = 426, "Upgrade Required" + PRECONDITION_REQUIRED = 428, "Precondition Required" + TOO_MANY_REQUESTS = 429, "Too Many Requests" + REQUEST_HEADER_FIELDS_TOO_LARGE = 431, "Request Header Fields Too Large" + UNAVAILABLE_FOR_LEGAL_REASONS = 451, "Unavailable For Legal Reasons" + + # server errors + INTERNAL_SERVER_ERROR = 500, "Internal Server Error" + NOT_IMPLEMENTED = 501, "Not Implemented" + BAD_GATEWAY = 502, "Bad Gateway" + SERVICE_UNAVAILABLE = 503, "Service Unavailable" + GATEWAY_TIMEOUT = 504, "Gateway Timeout" + HTTP_VERSION_NOT_SUPPORTED = 505, "HTTP Version Not Supported" + VARIANT_ALSO_NEGOTIATES = 506, "Variant Also Negotiates" + INSUFFICIENT_STORAGE = 507, "Insufficient Storage" + LOOP_DETECTED = 508, "Loop Detected" + NOT_EXTENDED = 510, "Not Extended" + NETWORK_AUTHENTICATION_REQUIRED = 511, "Network Authentication Required" + + +# Include lower-case styles for `requests` compatibility. +for code in codes: + setattr(codes, code._name_.lower(), int(code)) diff --git a/venv/lib/python3.11/site-packages/httpx/_transports/__init__.py b/venv/lib/python3.11/site-packages/httpx/_transports/__init__.py new file mode 100644 index 0000000..7a32105 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx/_transports/__init__.py @@ -0,0 +1,15 @@ +from .asgi import * +from .base import * +from .default import * +from .mock import * +from .wsgi import * + +__all__ = [ + "ASGITransport", + "AsyncBaseTransport", + "BaseTransport", + "AsyncHTTPTransport", + "HTTPTransport", + "MockTransport", + "WSGITransport", +] diff --git a/venv/lib/python3.11/site-packages/httpx/_transports/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpx/_transports/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..ce95f43 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpx/_transports/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpx/_transports/__pycache__/asgi.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpx/_transports/__pycache__/asgi.cpython-311.pyc new file mode 100644 index 0000000..cdf79ee Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpx/_transports/__pycache__/asgi.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpx/_transports/__pycache__/base.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpx/_transports/__pycache__/base.cpython-311.pyc new file mode 100644 index 0000000..eba97d7 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpx/_transports/__pycache__/base.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpx/_transports/__pycache__/default.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpx/_transports/__pycache__/default.cpython-311.pyc new file mode 100644 index 0000000..699ed85 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpx/_transports/__pycache__/default.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpx/_transports/__pycache__/mock.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpx/_transports/__pycache__/mock.cpython-311.pyc new file mode 100644 index 0000000..2aa7954 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpx/_transports/__pycache__/mock.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpx/_transports/__pycache__/wsgi.cpython-311.pyc b/venv/lib/python3.11/site-packages/httpx/_transports/__pycache__/wsgi.cpython-311.pyc new file mode 100644 index 0000000..44e4801 Binary files /dev/null and b/venv/lib/python3.11/site-packages/httpx/_transports/__pycache__/wsgi.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/httpx/_transports/asgi.py b/venv/lib/python3.11/site-packages/httpx/_transports/asgi.py new file mode 100644 index 0000000..2bc4efa --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx/_transports/asgi.py @@ -0,0 +1,187 @@ +from __future__ import annotations + +import typing + +from .._models import Request, Response +from .._types import AsyncByteStream +from .base import AsyncBaseTransport + +if typing.TYPE_CHECKING: # pragma: no cover + import asyncio + + import trio + + Event = typing.Union[asyncio.Event, trio.Event] + + +_Message = typing.MutableMapping[str, typing.Any] +_Receive = typing.Callable[[], typing.Awaitable[_Message]] +_Send = typing.Callable[ + [typing.MutableMapping[str, typing.Any]], typing.Awaitable[None] +] +_ASGIApp = typing.Callable[ + [typing.MutableMapping[str, typing.Any], _Receive, _Send], typing.Awaitable[None] +] + +__all__ = ["ASGITransport"] + + +def is_running_trio() -> bool: + try: + # sniffio is a dependency of trio. + + # See https://github.com/python-trio/trio/issues/2802 + import sniffio + + if sniffio.current_async_library() == "trio": + return True + except ImportError: # pragma: nocover + pass + + return False + + +def create_event() -> Event: + if is_running_trio(): + import trio + + return trio.Event() + + import asyncio + + return asyncio.Event() + + +class ASGIResponseStream(AsyncByteStream): + def __init__(self, body: list[bytes]) -> None: + self._body = body + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + yield b"".join(self._body) + + +class ASGITransport(AsyncBaseTransport): + """ + A custom AsyncTransport that handles sending requests directly to an ASGI app. + + ```python + transport = httpx.ASGITransport( + app=app, + root_path="/submount", + client=("1.2.3.4", 123) + ) + client = httpx.AsyncClient(transport=transport) + ``` + + Arguments: + + * `app` - The ASGI application. + * `raise_app_exceptions` - Boolean indicating if exceptions in the application + should be raised. Default to `True`. Can be set to `False` for use cases + such as testing the content of a client 500 response. + * `root_path` - The root path on which the ASGI application should be mounted. + * `client` - A two-tuple indicating the client IP and port of incoming requests. + ``` + """ + + def __init__( + self, + app: _ASGIApp, + raise_app_exceptions: bool = True, + root_path: str = "", + client: tuple[str, int] = ("127.0.0.1", 123), + ) -> None: + self.app = app + self.raise_app_exceptions = raise_app_exceptions + self.root_path = root_path + self.client = client + + async def handle_async_request( + self, + request: Request, + ) -> Response: + assert isinstance(request.stream, AsyncByteStream) + + # ASGI scope. + scope = { + "type": "http", + "asgi": {"version": "3.0"}, + "http_version": "1.1", + "method": request.method, + "headers": [(k.lower(), v) for (k, v) in request.headers.raw], + "scheme": request.url.scheme, + "path": request.url.path, + "raw_path": request.url.raw_path.split(b"?")[0], + "query_string": request.url.query, + "server": (request.url.host, request.url.port), + "client": self.client, + "root_path": self.root_path, + } + + # Request. + request_body_chunks = request.stream.__aiter__() + request_complete = False + + # Response. + status_code = None + response_headers = None + body_parts = [] + response_started = False + response_complete = create_event() + + # ASGI callables. + + async def receive() -> dict[str, typing.Any]: + nonlocal request_complete + + if request_complete: + await response_complete.wait() + return {"type": "http.disconnect"} + + try: + body = await request_body_chunks.__anext__() + except StopAsyncIteration: + request_complete = True + return {"type": "http.request", "body": b"", "more_body": False} + return {"type": "http.request", "body": body, "more_body": True} + + async def send(message: typing.MutableMapping[str, typing.Any]) -> None: + nonlocal status_code, response_headers, response_started + + if message["type"] == "http.response.start": + assert not response_started + + status_code = message["status"] + response_headers = message.get("headers", []) + response_started = True + + elif message["type"] == "http.response.body": + assert not response_complete.is_set() + body = message.get("body", b"") + more_body = message.get("more_body", False) + + if body and request.method != "HEAD": + body_parts.append(body) + + if not more_body: + response_complete.set() + + try: + await self.app(scope, receive, send) + except Exception: # noqa: PIE-786 + if self.raise_app_exceptions: + raise + + response_complete.set() + if status_code is None: + status_code = 500 + if response_headers is None: + response_headers = {} + + assert response_complete.is_set() + assert status_code is not None + assert response_headers is not None + + stream = ASGIResponseStream(body_parts) + + return Response(status_code, headers=response_headers, stream=stream) diff --git a/venv/lib/python3.11/site-packages/httpx/_transports/base.py b/venv/lib/python3.11/site-packages/httpx/_transports/base.py new file mode 100644 index 0000000..66fd99d --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx/_transports/base.py @@ -0,0 +1,86 @@ +from __future__ import annotations + +import typing +from types import TracebackType + +from .._models import Request, Response + +T = typing.TypeVar("T", bound="BaseTransport") +A = typing.TypeVar("A", bound="AsyncBaseTransport") + +__all__ = ["AsyncBaseTransport", "BaseTransport"] + + +class BaseTransport: + def __enter__(self: T) -> T: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + self.close() + + def handle_request(self, request: Request) -> Response: + """ + Send a single HTTP request and return a response. + + Developers shouldn't typically ever need to call into this API directly, + since the Client class provides all the higher level user-facing API + niceties. + + In order to properly release any network resources, the response + stream should *either* be consumed immediately, with a call to + `response.stream.read()`, or else the `handle_request` call should + be followed with a try/finally block to ensuring the stream is + always closed. + + Example usage: + + with httpx.HTTPTransport() as transport: + req = httpx.Request( + method=b"GET", + url=(b"https", b"www.example.com", 443, b"/"), + headers=[(b"Host", b"www.example.com")], + ) + resp = transport.handle_request(req) + body = resp.stream.read() + print(resp.status_code, resp.headers, body) + + + Takes a `Request` instance as the only argument. + + Returns a `Response` instance. + """ + raise NotImplementedError( + "The 'handle_request' method must be implemented." + ) # pragma: no cover + + def close(self) -> None: + pass + + +class AsyncBaseTransport: + async def __aenter__(self: A) -> A: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + await self.aclose() + + async def handle_async_request( + self, + request: Request, + ) -> Response: + raise NotImplementedError( + "The 'handle_async_request' method must be implemented." + ) # pragma: no cover + + async def aclose(self) -> None: + pass diff --git a/venv/lib/python3.11/site-packages/httpx/_transports/default.py b/venv/lib/python3.11/site-packages/httpx/_transports/default.py new file mode 100644 index 0000000..d5aa05f --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx/_transports/default.py @@ -0,0 +1,406 @@ +""" +Custom transports, with nicely configured defaults. + +The following additional keyword arguments are currently supported by httpcore... + +* uds: str +* local_address: str +* retries: int + +Example usages... + +# Disable HTTP/2 on a single specific domain. +mounts = { + "all://": httpx.HTTPTransport(http2=True), + "all://*example.org": httpx.HTTPTransport() +} + +# Using advanced httpcore configuration, with connection retries. +transport = httpx.HTTPTransport(retries=1) +client = httpx.Client(transport=transport) + +# Using advanced httpcore configuration, with unix domain sockets. +transport = httpx.HTTPTransport(uds="socket.uds") +client = httpx.Client(transport=transport) +""" + +from __future__ import annotations + +import contextlib +import typing +from types import TracebackType + +if typing.TYPE_CHECKING: + import ssl # pragma: no cover + + import httpx # pragma: no cover + +from .._config import DEFAULT_LIMITS, Limits, Proxy, create_ssl_context +from .._exceptions import ( + ConnectError, + ConnectTimeout, + LocalProtocolError, + NetworkError, + PoolTimeout, + ProtocolError, + ProxyError, + ReadError, + ReadTimeout, + RemoteProtocolError, + TimeoutException, + UnsupportedProtocol, + WriteError, + WriteTimeout, +) +from .._models import Request, Response +from .._types import AsyncByteStream, CertTypes, ProxyTypes, SyncByteStream +from .._urls import URL +from .base import AsyncBaseTransport, BaseTransport + +T = typing.TypeVar("T", bound="HTTPTransport") +A = typing.TypeVar("A", bound="AsyncHTTPTransport") + +SOCKET_OPTION = typing.Union[ + typing.Tuple[int, int, int], + typing.Tuple[int, int, typing.Union[bytes, bytearray]], + typing.Tuple[int, int, None, int], +] + +__all__ = ["AsyncHTTPTransport", "HTTPTransport"] + +HTTPCORE_EXC_MAP: dict[type[Exception], type[httpx.HTTPError]] = {} + + +def _load_httpcore_exceptions() -> dict[type[Exception], type[httpx.HTTPError]]: + import httpcore + + return { + httpcore.TimeoutException: TimeoutException, + httpcore.ConnectTimeout: ConnectTimeout, + httpcore.ReadTimeout: ReadTimeout, + httpcore.WriteTimeout: WriteTimeout, + httpcore.PoolTimeout: PoolTimeout, + httpcore.NetworkError: NetworkError, + httpcore.ConnectError: ConnectError, + httpcore.ReadError: ReadError, + httpcore.WriteError: WriteError, + httpcore.ProxyError: ProxyError, + httpcore.UnsupportedProtocol: UnsupportedProtocol, + httpcore.ProtocolError: ProtocolError, + httpcore.LocalProtocolError: LocalProtocolError, + httpcore.RemoteProtocolError: RemoteProtocolError, + } + + +@contextlib.contextmanager +def map_httpcore_exceptions() -> typing.Iterator[None]: + global HTTPCORE_EXC_MAP + if len(HTTPCORE_EXC_MAP) == 0: + HTTPCORE_EXC_MAP = _load_httpcore_exceptions() + try: + yield + except Exception as exc: + mapped_exc = None + + for from_exc, to_exc in HTTPCORE_EXC_MAP.items(): + if not isinstance(exc, from_exc): + continue + # We want to map to the most specific exception we can find. + # Eg if `exc` is an `httpcore.ReadTimeout`, we want to map to + # `httpx.ReadTimeout`, not just `httpx.TimeoutException`. + if mapped_exc is None or issubclass(to_exc, mapped_exc): + mapped_exc = to_exc + + if mapped_exc is None: # pragma: no cover + raise + + message = str(exc) + raise mapped_exc(message) from exc + + +class ResponseStream(SyncByteStream): + def __init__(self, httpcore_stream: typing.Iterable[bytes]) -> None: + self._httpcore_stream = httpcore_stream + + def __iter__(self) -> typing.Iterator[bytes]: + with map_httpcore_exceptions(): + for part in self._httpcore_stream: + yield part + + def close(self) -> None: + if hasattr(self._httpcore_stream, "close"): + self._httpcore_stream.close() + + +class HTTPTransport(BaseTransport): + def __init__( + self, + verify: ssl.SSLContext | str | bool = True, + cert: CertTypes | None = None, + trust_env: bool = True, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + proxy: ProxyTypes | None = None, + uds: str | None = None, + local_address: str | None = None, + retries: int = 0, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + import httpcore + + proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy + ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) + + if proxy is None: + self._pool = httpcore.ConnectionPool( + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + uds=uds, + local_address=local_address, + retries=retries, + socket_options=socket_options, + ) + elif proxy.url.scheme in ("http", "https"): + self._pool = httpcore.HTTPProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + proxy_headers=proxy.headers.raw, + ssl_context=ssl_context, + proxy_ssl_context=proxy.ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + socket_options=socket_options, + ) + elif proxy.url.scheme in ("socks5", "socks5h"): + try: + import socksio # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using SOCKS proxy, but the 'socksio' package is not installed. " + "Make sure to install httpx using `pip install httpx[socks]`." + ) from None + + self._pool = httpcore.SOCKSProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + ) + else: # pragma: no cover + raise ValueError( + "Proxy protocol must be either 'http', 'https', 'socks5', or 'socks5h'," + f" but got {proxy.url.scheme!r}." + ) + + def __enter__(self: T) -> T: # Use generics for subclass support. + self._pool.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + with map_httpcore_exceptions(): + self._pool.__exit__(exc_type, exc_value, traceback) + + def handle_request( + self, + request: Request, + ) -> Response: + assert isinstance(request.stream, SyncByteStream) + import httpcore + + req = httpcore.Request( + method=request.method, + url=httpcore.URL( + scheme=request.url.raw_scheme, + host=request.url.raw_host, + port=request.url.port, + target=request.url.raw_path, + ), + headers=request.headers.raw, + content=request.stream, + extensions=request.extensions, + ) + with map_httpcore_exceptions(): + resp = self._pool.handle_request(req) + + assert isinstance(resp.stream, typing.Iterable) + + return Response( + status_code=resp.status, + headers=resp.headers, + stream=ResponseStream(resp.stream), + extensions=resp.extensions, + ) + + def close(self) -> None: + self._pool.close() + + +class AsyncResponseStream(AsyncByteStream): + def __init__(self, httpcore_stream: typing.AsyncIterable[bytes]) -> None: + self._httpcore_stream = httpcore_stream + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + with map_httpcore_exceptions(): + async for part in self._httpcore_stream: + yield part + + async def aclose(self) -> None: + if hasattr(self._httpcore_stream, "aclose"): + await self._httpcore_stream.aclose() + + +class AsyncHTTPTransport(AsyncBaseTransport): + def __init__( + self, + verify: ssl.SSLContext | str | bool = True, + cert: CertTypes | None = None, + trust_env: bool = True, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + proxy: ProxyTypes | None = None, + uds: str | None = None, + local_address: str | None = None, + retries: int = 0, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + import httpcore + + proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy + ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) + + if proxy is None: + self._pool = httpcore.AsyncConnectionPool( + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + uds=uds, + local_address=local_address, + retries=retries, + socket_options=socket_options, + ) + elif proxy.url.scheme in ("http", "https"): + self._pool = httpcore.AsyncHTTPProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + proxy_headers=proxy.headers.raw, + proxy_ssl_context=proxy.ssl_context, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + socket_options=socket_options, + ) + elif proxy.url.scheme in ("socks5", "socks5h"): + try: + import socksio # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using SOCKS proxy, but the 'socksio' package is not installed. " + "Make sure to install httpx using `pip install httpx[socks]`." + ) from None + + self._pool = httpcore.AsyncSOCKSProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + ) + else: # pragma: no cover + raise ValueError( + "Proxy protocol must be either 'http', 'https', 'socks5', or 'socks5h'," + " but got {proxy.url.scheme!r}." + ) + + async def __aenter__(self: A) -> A: # Use generics for subclass support. + await self._pool.__aenter__() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + with map_httpcore_exceptions(): + await self._pool.__aexit__(exc_type, exc_value, traceback) + + async def handle_async_request( + self, + request: Request, + ) -> Response: + assert isinstance(request.stream, AsyncByteStream) + import httpcore + + req = httpcore.Request( + method=request.method, + url=httpcore.URL( + scheme=request.url.raw_scheme, + host=request.url.raw_host, + port=request.url.port, + target=request.url.raw_path, + ), + headers=request.headers.raw, + content=request.stream, + extensions=request.extensions, + ) + with map_httpcore_exceptions(): + resp = await self._pool.handle_async_request(req) + + assert isinstance(resp.stream, typing.AsyncIterable) + + return Response( + status_code=resp.status, + headers=resp.headers, + stream=AsyncResponseStream(resp.stream), + extensions=resp.extensions, + ) + + async def aclose(self) -> None: + await self._pool.aclose() diff --git a/venv/lib/python3.11/site-packages/httpx/_transports/mock.py b/venv/lib/python3.11/site-packages/httpx/_transports/mock.py new file mode 100644 index 0000000..8c418f5 --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx/_transports/mock.py @@ -0,0 +1,43 @@ +from __future__ import annotations + +import typing + +from .._models import Request, Response +from .base import AsyncBaseTransport, BaseTransport + +SyncHandler = typing.Callable[[Request], Response] +AsyncHandler = typing.Callable[[Request], typing.Coroutine[None, None, Response]] + + +__all__ = ["MockTransport"] + + +class MockTransport(AsyncBaseTransport, BaseTransport): + def __init__(self, handler: SyncHandler | AsyncHandler) -> None: + self.handler = handler + + def handle_request( + self, + request: Request, + ) -> Response: + request.read() + response = self.handler(request) + if not isinstance(response, Response): # pragma: no cover + raise TypeError("Cannot use an async handler in a sync Client") + return response + + async def handle_async_request( + self, + request: Request, + ) -> Response: + await request.aread() + response = self.handler(request) + + # Allow handler to *optionally* be an `async` function. + # If it is, then the `response` variable need to be awaited to actually + # return the result. + + if not isinstance(response, Response): + response = await response + + return response diff --git a/venv/lib/python3.11/site-packages/httpx/_transports/wsgi.py b/venv/lib/python3.11/site-packages/httpx/_transports/wsgi.py new file mode 100644 index 0000000..8592ffe --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx/_transports/wsgi.py @@ -0,0 +1,149 @@ +from __future__ import annotations + +import io +import itertools +import sys +import typing + +from .._models import Request, Response +from .._types import SyncByteStream +from .base import BaseTransport + +if typing.TYPE_CHECKING: + from _typeshed import OptExcInfo # pragma: no cover + from _typeshed.wsgi import WSGIApplication # pragma: no cover + +_T = typing.TypeVar("_T") + + +__all__ = ["WSGITransport"] + + +def _skip_leading_empty_chunks(body: typing.Iterable[_T]) -> typing.Iterable[_T]: + body = iter(body) + for chunk in body: + if chunk: + return itertools.chain([chunk], body) + return [] + + +class WSGIByteStream(SyncByteStream): + def __init__(self, result: typing.Iterable[bytes]) -> None: + self._close = getattr(result, "close", None) + self._result = _skip_leading_empty_chunks(result) + + def __iter__(self) -> typing.Iterator[bytes]: + for part in self._result: + yield part + + def close(self) -> None: + if self._close is not None: + self._close() + + +class WSGITransport(BaseTransport): + """ + A custom transport that handles sending requests directly to an WSGI app. + The simplest way to use this functionality is to use the `app` argument. + + ``` + client = httpx.Client(app=app) + ``` + + Alternatively, you can setup the transport instance explicitly. + This allows you to include any additional configuration arguments specific + to the WSGITransport class: + + ``` + transport = httpx.WSGITransport( + app=app, + script_name="/submount", + remote_addr="1.2.3.4" + ) + client = httpx.Client(transport=transport) + ``` + + Arguments: + + * `app` - The WSGI application. + * `raise_app_exceptions` - Boolean indicating if exceptions in the application + should be raised. Default to `True`. Can be set to `False` for use cases + such as testing the content of a client 500 response. + * `script_name` - The root path on which the WSGI application should be mounted. + * `remote_addr` - A string indicating the client IP of incoming requests. + ``` + """ + + def __init__( + self, + app: WSGIApplication, + raise_app_exceptions: bool = True, + script_name: str = "", + remote_addr: str = "127.0.0.1", + wsgi_errors: typing.TextIO | None = None, + ) -> None: + self.app = app + self.raise_app_exceptions = raise_app_exceptions + self.script_name = script_name + self.remote_addr = remote_addr + self.wsgi_errors = wsgi_errors + + def handle_request(self, request: Request) -> Response: + request.read() + wsgi_input = io.BytesIO(request.content) + + port = request.url.port or {"http": 80, "https": 443}[request.url.scheme] + environ = { + "wsgi.version": (1, 0), + "wsgi.url_scheme": request.url.scheme, + "wsgi.input": wsgi_input, + "wsgi.errors": self.wsgi_errors or sys.stderr, + "wsgi.multithread": True, + "wsgi.multiprocess": False, + "wsgi.run_once": False, + "REQUEST_METHOD": request.method, + "SCRIPT_NAME": self.script_name, + "PATH_INFO": request.url.path, + "QUERY_STRING": request.url.query.decode("ascii"), + "SERVER_NAME": request.url.host, + "SERVER_PORT": str(port), + "SERVER_PROTOCOL": "HTTP/1.1", + "REMOTE_ADDR": self.remote_addr, + } + for header_key, header_value in request.headers.raw: + key = header_key.decode("ascii").upper().replace("-", "_") + if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"): + key = "HTTP_" + key + environ[key] = header_value.decode("ascii") + + seen_status = None + seen_response_headers = None + seen_exc_info = None + + def start_response( + status: str, + response_headers: list[tuple[str, str]], + exc_info: OptExcInfo | None = None, + ) -> typing.Callable[[bytes], typing.Any]: + nonlocal seen_status, seen_response_headers, seen_exc_info + seen_status = status + seen_response_headers = response_headers + seen_exc_info = exc_info + return lambda _: None + + result = self.app(environ, start_response) + + stream = WSGIByteStream(result) + + assert seen_status is not None + assert seen_response_headers is not None + if seen_exc_info and seen_exc_info[0] and self.raise_app_exceptions: + raise seen_exc_info[1] + + status_code = int(seen_status.split()[0]) + headers = [ + (key.encode("ascii"), value.encode("ascii")) + for key, value in seen_response_headers + ] + + return Response(status_code, headers=headers, stream=stream) diff --git a/venv/lib/python3.11/site-packages/httpx/_types.py b/venv/lib/python3.11/site-packages/httpx/_types.py new file mode 100644 index 0000000..704dfdf --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx/_types.py @@ -0,0 +1,114 @@ +""" +Type definitions for type checking purposes. +""" + +from http.cookiejar import CookieJar +from typing import ( + IO, + TYPE_CHECKING, + Any, + AsyncIterable, + AsyncIterator, + Callable, + Dict, + Iterable, + Iterator, + List, + Mapping, + Optional, + Sequence, + Tuple, + Union, +) + +if TYPE_CHECKING: # pragma: no cover + from ._auth import Auth # noqa: F401 + from ._config import Proxy, Timeout # noqa: F401 + from ._models import Cookies, Headers, Request # noqa: F401 + from ._urls import URL, QueryParams # noqa: F401 + + +PrimitiveData = Optional[Union[str, int, float, bool]] + +URLTypes = Union["URL", str] + +QueryParamTypes = Union[ + "QueryParams", + Mapping[str, Union[PrimitiveData, Sequence[PrimitiveData]]], + List[Tuple[str, PrimitiveData]], + Tuple[Tuple[str, PrimitiveData], ...], + str, + bytes, +] + +HeaderTypes = Union[ + "Headers", + Mapping[str, str], + Mapping[bytes, bytes], + Sequence[Tuple[str, str]], + Sequence[Tuple[bytes, bytes]], +] + +CookieTypes = Union["Cookies", CookieJar, Dict[str, str], List[Tuple[str, str]]] + +TimeoutTypes = Union[ + Optional[float], + Tuple[Optional[float], Optional[float], Optional[float], Optional[float]], + "Timeout", +] +ProxyTypes = Union["URL", str, "Proxy"] +CertTypes = Union[str, Tuple[str, str], Tuple[str, str, str]] + +AuthTypes = Union[ + Tuple[Union[str, bytes], Union[str, bytes]], + Callable[["Request"], "Request"], + "Auth", +] + +RequestContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] +ResponseContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] +ResponseExtensions = Mapping[str, Any] + +RequestData = Mapping[str, Any] + +FileContent = Union[IO[bytes], bytes, str] +FileTypes = Union[ + # file (or bytes) + FileContent, + # (filename, file (or bytes)) + Tuple[Optional[str], FileContent], + # (filename, file (or bytes), content_type) + Tuple[Optional[str], FileContent, Optional[str]], + # (filename, file (or bytes), content_type, headers) + Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], +] +RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]] + +RequestExtensions = Mapping[str, Any] + +__all__ = ["AsyncByteStream", "SyncByteStream"] + + +class SyncByteStream: + def __iter__(self) -> Iterator[bytes]: + raise NotImplementedError( + "The '__iter__' method must be implemented." + ) # pragma: no cover + yield b"" # pragma: no cover + + def close(self) -> None: + """ + Subclasses can override this method to release any network resources + after a request/response cycle is complete. + """ + + +class AsyncByteStream: + async def __aiter__(self) -> AsyncIterator[bytes]: + raise NotImplementedError( + "The '__aiter__' method must be implemented." + ) # pragma: no cover + yield b"" # pragma: no cover + + async def aclose(self) -> None: + pass diff --git a/venv/lib/python3.11/site-packages/httpx/_urlparse.py b/venv/lib/python3.11/site-packages/httpx/_urlparse.py new file mode 100644 index 0000000..bf190fd --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx/_urlparse.py @@ -0,0 +1,527 @@ +""" +An implementation of `urlparse` that provides URL validation and normalization +as described by RFC3986. + +We rely on this implementation rather than the one in Python's stdlib, because: + +* It provides more complete URL validation. +* It properly differentiates between an empty querystring and an absent querystring, + to distinguish URLs with a trailing '?'. +* It handles scheme, hostname, port, and path normalization. +* It supports IDNA hostnames, normalizing them to their encoded form. +* The API supports passing individual components, as well as the complete URL string. + +Previously we relied on the excellent `rfc3986` package to handle URL parsing and +validation, but this module provides a simpler alternative, with less indirection +required. +""" + +from __future__ import annotations + +import ipaddress +import re +import typing + +import idna + +from ._exceptions import InvalidURL + +MAX_URL_LENGTH = 65536 + +# https://datatracker.ietf.org/doc/html/rfc3986.html#section-2.3 +UNRESERVED_CHARACTERS = ( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~" +) +SUB_DELIMS = "!$&'()*+,;=" + +PERCENT_ENCODED_REGEX = re.compile("%[A-Fa-f0-9]{2}") + +# https://url.spec.whatwg.org/#percent-encoded-bytes + +# The fragment percent-encode set is the C0 control percent-encode set +# and U+0020 SPACE, U+0022 ("), U+003C (<), U+003E (>), and U+0060 (`). +FRAG_SAFE = "".join( + [chr(i) for i in range(0x20, 0x7F) if i not in (0x20, 0x22, 0x3C, 0x3E, 0x60)] +) + +# The query percent-encode set is the C0 control percent-encode set +# and U+0020 SPACE, U+0022 ("), U+0023 (#), U+003C (<), and U+003E (>). +QUERY_SAFE = "".join( + [chr(i) for i in range(0x20, 0x7F) if i not in (0x20, 0x22, 0x23, 0x3C, 0x3E)] +) + +# The path percent-encode set is the query percent-encode set +# and U+003F (?), U+0060 (`), U+007B ({), and U+007D (}). +PATH_SAFE = "".join( + [ + chr(i) + for i in range(0x20, 0x7F) + if i not in (0x20, 0x22, 0x23, 0x3C, 0x3E) + (0x3F, 0x60, 0x7B, 0x7D) + ] +) + +# The userinfo percent-encode set is the path percent-encode set +# and U+002F (/), U+003A (:), U+003B (;), U+003D (=), U+0040 (@), +# U+005B ([) to U+005E (^), inclusive, and U+007C (|). +USERNAME_SAFE = "".join( + [ + chr(i) + for i in range(0x20, 0x7F) + if i + not in (0x20, 0x22, 0x23, 0x3C, 0x3E) + + (0x3F, 0x60, 0x7B, 0x7D) + + (0x2F, 0x3A, 0x3B, 0x3D, 0x40, 0x5B, 0x5C, 0x5D, 0x5E, 0x7C) + ] +) +PASSWORD_SAFE = "".join( + [ + chr(i) + for i in range(0x20, 0x7F) + if i + not in (0x20, 0x22, 0x23, 0x3C, 0x3E) + + (0x3F, 0x60, 0x7B, 0x7D) + + (0x2F, 0x3A, 0x3B, 0x3D, 0x40, 0x5B, 0x5C, 0x5D, 0x5E, 0x7C) + ] +) +# Note... The terminology 'userinfo' percent-encode set in the WHATWG document +# is used for the username and password quoting. For the joint userinfo component +# we remove U+003A (:) from the safe set. +USERINFO_SAFE = "".join( + [ + chr(i) + for i in range(0x20, 0x7F) + if i + not in (0x20, 0x22, 0x23, 0x3C, 0x3E) + + (0x3F, 0x60, 0x7B, 0x7D) + + (0x2F, 0x3B, 0x3D, 0x40, 0x5B, 0x5C, 0x5D, 0x5E, 0x7C) + ] +) + + +# {scheme}: (optional) +# //{authority} (optional) +# {path} +# ?{query} (optional) +# #{fragment} (optional) +URL_REGEX = re.compile( + ( + r"(?:(?P{scheme}):)?" + r"(?://(?P{authority}))?" + r"(?P{path})" + r"(?:\?(?P{query}))?" + r"(?:#(?P{fragment}))?" + ).format( + scheme="([a-zA-Z][a-zA-Z0-9+.-]*)?", + authority="[^/?#]*", + path="[^?#]*", + query="[^#]*", + fragment=".*", + ) +) + +# {userinfo}@ (optional) +# {host} +# :{port} (optional) +AUTHORITY_REGEX = re.compile( + ( + r"(?:(?P{userinfo})@)?" r"(?P{host})" r":?(?P{port})?" + ).format( + userinfo=".*", # Any character sequence. + host="(\\[.*\\]|[^:@]*)", # Either any character sequence excluding ':' or '@', + # or an IPv6 address enclosed within square brackets. + port=".*", # Any character sequence. + ) +) + + +# If we call urlparse with an individual component, then we need to regex +# validate that component individually. +# Note that we're duplicating the same strings as above. Shock! Horror!! +COMPONENT_REGEX = { + "scheme": re.compile("([a-zA-Z][a-zA-Z0-9+.-]*)?"), + "authority": re.compile("[^/?#]*"), + "path": re.compile("[^?#]*"), + "query": re.compile("[^#]*"), + "fragment": re.compile(".*"), + "userinfo": re.compile("[^@]*"), + "host": re.compile("(\\[.*\\]|[^:]*)"), + "port": re.compile(".*"), +} + + +# We use these simple regexs as a first pass before handing off to +# the stdlib 'ipaddress' module for IP address validation. +IPv4_STYLE_HOSTNAME = re.compile(r"^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$") +IPv6_STYLE_HOSTNAME = re.compile(r"^\[.*\]$") + + +class ParseResult(typing.NamedTuple): + scheme: str + userinfo: str + host: str + port: int | None + path: str + query: str | None + fragment: str | None + + @property + def authority(self) -> str: + return "".join( + [ + f"{self.userinfo}@" if self.userinfo else "", + f"[{self.host}]" if ":" in self.host else self.host, + f":{self.port}" if self.port is not None else "", + ] + ) + + @property + def netloc(self) -> str: + return "".join( + [ + f"[{self.host}]" if ":" in self.host else self.host, + f":{self.port}" if self.port is not None else "", + ] + ) + + def copy_with(self, **kwargs: str | None) -> ParseResult: + if not kwargs: + return self + + defaults = { + "scheme": self.scheme, + "authority": self.authority, + "path": self.path, + "query": self.query, + "fragment": self.fragment, + } + defaults.update(kwargs) + return urlparse("", **defaults) + + def __str__(self) -> str: + authority = self.authority + return "".join( + [ + f"{self.scheme}:" if self.scheme else "", + f"//{authority}" if authority else "", + self.path, + f"?{self.query}" if self.query is not None else "", + f"#{self.fragment}" if self.fragment is not None else "", + ] + ) + + +def urlparse(url: str = "", **kwargs: str | None) -> ParseResult: + # Initial basic checks on allowable URLs. + # --------------------------------------- + + # Hard limit the maximum allowable URL length. + if len(url) > MAX_URL_LENGTH: + raise InvalidURL("URL too long") + + # If a URL includes any ASCII control characters including \t, \r, \n, + # then treat it as invalid. + if any(char.isascii() and not char.isprintable() for char in url): + char = next(char for char in url if char.isascii() and not char.isprintable()) + idx = url.find(char) + error = ( + f"Invalid non-printable ASCII character in URL, {char!r} at position {idx}." + ) + raise InvalidURL(error) + + # Some keyword arguments require special handling. + # ------------------------------------------------ + + # Coerce "port" to a string, if it is provided as an integer. + if "port" in kwargs: + port = kwargs["port"] + kwargs["port"] = str(port) if isinstance(port, int) else port + + # Replace "netloc" with "host and "port". + if "netloc" in kwargs: + netloc = kwargs.pop("netloc") or "" + kwargs["host"], _, kwargs["port"] = netloc.partition(":") + + # Replace "username" and/or "password" with "userinfo". + if "username" in kwargs or "password" in kwargs: + username = quote(kwargs.pop("username", "") or "", safe=USERNAME_SAFE) + password = quote(kwargs.pop("password", "") or "", safe=PASSWORD_SAFE) + kwargs["userinfo"] = f"{username}:{password}" if password else username + + # Replace "raw_path" with "path" and "query". + if "raw_path" in kwargs: + raw_path = kwargs.pop("raw_path") or "" + kwargs["path"], seperator, kwargs["query"] = raw_path.partition("?") + if not seperator: + kwargs["query"] = None + + # Ensure that IPv6 "host" addresses are always escaped with "[...]". + if "host" in kwargs: + host = kwargs.get("host") or "" + if ":" in host and not (host.startswith("[") and host.endswith("]")): + kwargs["host"] = f"[{host}]" + + # If any keyword arguments are provided, ensure they are valid. + # ------------------------------------------------------------- + + for key, value in kwargs.items(): + if value is not None: + if len(value) > MAX_URL_LENGTH: + raise InvalidURL(f"URL component '{key}' too long") + + # If a component includes any ASCII control characters including \t, \r, \n, + # then treat it as invalid. + if any(char.isascii() and not char.isprintable() for char in value): + char = next( + char for char in value if char.isascii() and not char.isprintable() + ) + idx = value.find(char) + error = ( + f"Invalid non-printable ASCII character in URL {key} component, " + f"{char!r} at position {idx}." + ) + raise InvalidURL(error) + + # Ensure that keyword arguments match as a valid regex. + if not COMPONENT_REGEX[key].fullmatch(value): + raise InvalidURL(f"Invalid URL component '{key}'") + + # The URL_REGEX will always match, but may have empty components. + url_match = URL_REGEX.match(url) + assert url_match is not None + url_dict = url_match.groupdict() + + # * 'scheme', 'authority', and 'path' may be empty strings. + # * 'query' may be 'None', indicating no trailing "?" portion. + # Any string including the empty string, indicates a trailing "?". + # * 'fragment' may be 'None', indicating no trailing "#" portion. + # Any string including the empty string, indicates a trailing "#". + scheme = kwargs.get("scheme", url_dict["scheme"]) or "" + authority = kwargs.get("authority", url_dict["authority"]) or "" + path = kwargs.get("path", url_dict["path"]) or "" + query = kwargs.get("query", url_dict["query"]) + frag = kwargs.get("fragment", url_dict["fragment"]) + + # The AUTHORITY_REGEX will always match, but may have empty components. + authority_match = AUTHORITY_REGEX.match(authority) + assert authority_match is not None + authority_dict = authority_match.groupdict() + + # * 'userinfo' and 'host' may be empty strings. + # * 'port' may be 'None'. + userinfo = kwargs.get("userinfo", authority_dict["userinfo"]) or "" + host = kwargs.get("host", authority_dict["host"]) or "" + port = kwargs.get("port", authority_dict["port"]) + + # Normalize and validate each component. + # We end up with a parsed representation of the URL, + # with components that are plain ASCII bytestrings. + parsed_scheme: str = scheme.lower() + parsed_userinfo: str = quote(userinfo, safe=USERINFO_SAFE) + parsed_host: str = encode_host(host) + parsed_port: int | None = normalize_port(port, scheme) + + has_scheme = parsed_scheme != "" + has_authority = ( + parsed_userinfo != "" or parsed_host != "" or parsed_port is not None + ) + validate_path(path, has_scheme=has_scheme, has_authority=has_authority) + if has_scheme or has_authority: + path = normalize_path(path) + + parsed_path: str = quote(path, safe=PATH_SAFE) + parsed_query: str | None = None if query is None else quote(query, safe=QUERY_SAFE) + parsed_frag: str | None = None if frag is None else quote(frag, safe=FRAG_SAFE) + + # The parsed ASCII bytestrings are our canonical form. + # All properties of the URL are derived from these. + return ParseResult( + parsed_scheme, + parsed_userinfo, + parsed_host, + parsed_port, + parsed_path, + parsed_query, + parsed_frag, + ) + + +def encode_host(host: str) -> str: + if not host: + return "" + + elif IPv4_STYLE_HOSTNAME.match(host): + # Validate IPv4 hostnames like #.#.#.# + # + # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 + # + # IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet + try: + ipaddress.IPv4Address(host) + except ipaddress.AddressValueError: + raise InvalidURL(f"Invalid IPv4 address: {host!r}") + return host + + elif IPv6_STYLE_HOSTNAME.match(host): + # Validate IPv6 hostnames like [...] + # + # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 + # + # "A host identified by an Internet Protocol literal address, version 6 + # [RFC3513] or later, is distinguished by enclosing the IP literal + # within square brackets ("[" and "]"). This is the only place where + # square bracket characters are allowed in the URI syntax." + try: + ipaddress.IPv6Address(host[1:-1]) + except ipaddress.AddressValueError: + raise InvalidURL(f"Invalid IPv6 address: {host!r}") + return host[1:-1] + + elif host.isascii(): + # Regular ASCII hostnames + # + # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 + # + # reg-name = *( unreserved / pct-encoded / sub-delims ) + WHATWG_SAFE = '"`{}%|\\' + return quote(host.lower(), safe=SUB_DELIMS + WHATWG_SAFE) + + # IDNA hostnames + try: + return idna.encode(host.lower()).decode("ascii") + except idna.IDNAError: + raise InvalidURL(f"Invalid IDNA hostname: {host!r}") + + +def normalize_port(port: str | int | None, scheme: str) -> int | None: + # From https://tools.ietf.org/html/rfc3986#section-3.2.3 + # + # "A scheme may define a default port. For example, the "http" scheme + # defines a default port of "80", corresponding to its reserved TCP + # port number. The type of port designated by the port number (e.g., + # TCP, UDP, SCTP) is defined by the URI scheme. URI producers and + # normalizers should omit the port component and its ":" delimiter if + # port is empty or if its value would be the same as that of the + # scheme's default." + if port is None or port == "": + return None + + try: + port_as_int = int(port) + except ValueError: + raise InvalidURL(f"Invalid port: {port!r}") + + # See https://url.spec.whatwg.org/#url-miscellaneous + default_port = {"ftp": 21, "http": 80, "https": 443, "ws": 80, "wss": 443}.get( + scheme + ) + if port_as_int == default_port: + return None + return port_as_int + + +def validate_path(path: str, has_scheme: bool, has_authority: bool) -> None: + """ + Path validation rules that depend on if the URL contains + a scheme or authority component. + + See https://datatracker.ietf.org/doc/html/rfc3986.html#section-3.3 + """ + if has_authority: + # If a URI contains an authority component, then the path component + # must either be empty or begin with a slash ("/") character." + if path and not path.startswith("/"): + raise InvalidURL("For absolute URLs, path must be empty or begin with '/'") + + if not has_scheme and not has_authority: + # If a URI does not contain an authority component, then the path cannot begin + # with two slash characters ("//"). + if path.startswith("//"): + raise InvalidURL("Relative URLs cannot have a path starting with '//'") + + # In addition, a URI reference (Section 4.1) may be a relative-path reference, + # in which case the first path segment cannot contain a colon (":") character. + if path.startswith(":"): + raise InvalidURL("Relative URLs cannot have a path starting with ':'") + + +def normalize_path(path: str) -> str: + """ + Drop "." and ".." segments from a URL path. + + For example: + + normalize_path("/path/./to/somewhere/..") == "/path/to" + """ + # Fast return when no '.' characters in the path. + if "." not in path: + return path + + components = path.split("/") + + # Fast return when no '.' or '..' components in the path. + if "." not in components and ".." not in components: + return path + + # https://datatracker.ietf.org/doc/html/rfc3986#section-5.2.4 + output: list[str] = [] + for component in components: + if component == ".": + pass + elif component == "..": + if output and output != [""]: + output.pop() + else: + output.append(component) + return "/".join(output) + + +def PERCENT(string: str) -> str: + return "".join([f"%{byte:02X}" for byte in string.encode("utf-8")]) + + +def percent_encoded(string: str, safe: str) -> str: + """ + Use percent-encoding to quote a string. + """ + NON_ESCAPED_CHARS = UNRESERVED_CHARACTERS + safe + + # Fast path for strings that don't need escaping. + if not string.rstrip(NON_ESCAPED_CHARS): + return string + + return "".join( + [char if char in NON_ESCAPED_CHARS else PERCENT(char) for char in string] + ) + + +def quote(string: str, safe: str) -> str: + """ + Use percent-encoding to quote a string, omitting existing '%xx' escape sequences. + + See: https://www.rfc-editor.org/rfc/rfc3986#section-2.1 + + * `string`: The string to be percent-escaped. + * `safe`: A string containing characters that may be treated as safe, and do not + need to be escaped. Unreserved characters are always treated as safe. + See: https://www.rfc-editor.org/rfc/rfc3986#section-2.3 + """ + parts = [] + current_position = 0 + for match in re.finditer(PERCENT_ENCODED_REGEX, string): + start_position, end_position = match.start(), match.end() + matched_text = match.group(0) + # Add any text up to the '%xx' escape sequence. + if start_position != current_position: + leading_text = string[current_position:start_position] + parts.append(percent_encoded(leading_text, safe=safe)) + + # Add the '%xx' escape sequence. + parts.append(matched_text) + current_position = end_position + + # Add any text after the final '%xx' escape sequence. + if current_position != len(string): + trailing_text = string[current_position:] + parts.append(percent_encoded(trailing_text, safe=safe)) + + return "".join(parts) diff --git a/venv/lib/python3.11/site-packages/httpx/_urls.py b/venv/lib/python3.11/site-packages/httpx/_urls.py new file mode 100644 index 0000000..147a8fa --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx/_urls.py @@ -0,0 +1,641 @@ +from __future__ import annotations + +import typing +from urllib.parse import parse_qs, unquote, urlencode + +import idna + +from ._types import QueryParamTypes +from ._urlparse import urlparse +from ._utils import primitive_value_to_str + +__all__ = ["URL", "QueryParams"] + + +class URL: + """ + url = httpx.URL("HTTPS://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink") + + assert url.scheme == "https" + assert url.username == "jo@email.com" + assert url.password == "a secret" + assert url.userinfo == b"jo%40email.com:a%20secret" + assert url.host == "müller.de" + assert url.raw_host == b"xn--mller-kva.de" + assert url.port == 1234 + assert url.netloc == b"xn--mller-kva.de:1234" + assert url.path == "/pa th" + assert url.query == b"?search=ab" + assert url.raw_path == b"/pa%20th?search=ab" + assert url.fragment == "anchorlink" + + The components of a URL are broken down like this: + + https://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink + [scheme] [ username ] [password] [ host ][port][ path ] [ query ] [fragment] + [ userinfo ] [ netloc ][ raw_path ] + + Note that: + + * `url.scheme` is normalized to always be lowercased. + + * `url.host` is normalized to always be lowercased. Internationalized domain + names are represented in unicode, without IDNA encoding applied. For instance: + + url = httpx.URL("http://中国.icom.museum") + assert url.host == "中国.icom.museum" + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.host == "中国.icom.museum" + + * `url.raw_host` is normalized to always be lowercased, and is IDNA encoded. + + url = httpx.URL("http://中国.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + * `url.port` is either None or an integer. URLs that include the default port for + "http", "https", "ws", "wss", and "ftp" schemes have their port + normalized to `None`. + + assert httpx.URL("http://example.com") == httpx.URL("http://example.com:80") + assert httpx.URL("http://example.com").port is None + assert httpx.URL("http://example.com:80").port is None + + * `url.userinfo` is raw bytes, without URL escaping. Usually you'll want to work + with `url.username` and `url.password` instead, which handle the URL escaping. + + * `url.raw_path` is raw bytes of both the path and query, without URL escaping. + This portion is used as the target when constructing HTTP requests. Usually you'll + want to work with `url.path` instead. + + * `url.query` is raw bytes, without URL escaping. A URL query string portion can + only be properly URL escaped when decoding the parameter names and values + themselves. + """ + + def __init__(self, url: URL | str = "", **kwargs: typing.Any) -> None: + if kwargs: + allowed = { + "scheme": str, + "username": str, + "password": str, + "userinfo": bytes, + "host": str, + "port": int, + "netloc": bytes, + "path": str, + "query": bytes, + "raw_path": bytes, + "fragment": str, + "params": object, + } + + # Perform type checking for all supported keyword arguments. + for key, value in kwargs.items(): + if key not in allowed: + message = f"{key!r} is an invalid keyword argument for URL()" + raise TypeError(message) + if value is not None and not isinstance(value, allowed[key]): + expected = allowed[key].__name__ + seen = type(value).__name__ + message = f"Argument {key!r} must be {expected} but got {seen}" + raise TypeError(message) + if isinstance(value, bytes): + kwargs[key] = value.decode("ascii") + + if "params" in kwargs: + # Replace any "params" keyword with the raw "query" instead. + # + # Ensure that empty params use `kwargs["query"] = None` rather + # than `kwargs["query"] = ""`, so that generated URLs do not + # include an empty trailing "?". + params = kwargs.pop("params") + kwargs["query"] = None if not params else str(QueryParams(params)) + + if isinstance(url, str): + self._uri_reference = urlparse(url, **kwargs) + elif isinstance(url, URL): + self._uri_reference = url._uri_reference.copy_with(**kwargs) + else: + raise TypeError( + "Invalid type for url. Expected str or httpx.URL," + f" got {type(url)}: {url!r}" + ) + + @property + def scheme(self) -> str: + """ + The URL scheme, such as "http", "https". + Always normalised to lowercase. + """ + return self._uri_reference.scheme + + @property + def raw_scheme(self) -> bytes: + """ + The raw bytes representation of the URL scheme, such as b"http", b"https". + Always normalised to lowercase. + """ + return self._uri_reference.scheme.encode("ascii") + + @property + def userinfo(self) -> bytes: + """ + The URL userinfo as a raw bytestring. + For example: b"jo%40email.com:a%20secret". + """ + return self._uri_reference.userinfo.encode("ascii") + + @property + def username(self) -> str: + """ + The URL username as a string, with URL decoding applied. + For example: "jo@email.com" + """ + userinfo = self._uri_reference.userinfo + return unquote(userinfo.partition(":")[0]) + + @property + def password(self) -> str: + """ + The URL password as a string, with URL decoding applied. + For example: "a secret" + """ + userinfo = self._uri_reference.userinfo + return unquote(userinfo.partition(":")[2]) + + @property + def host(self) -> str: + """ + The URL host as a string. + Always normalized to lowercase, with IDNA hosts decoded into unicode. + + Examples: + + url = httpx.URL("http://www.EXAMPLE.org") + assert url.host == "www.example.org" + + url = httpx.URL("http://中国.icom.museum") + assert url.host == "中国.icom.museum" + + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.host == "中国.icom.museum" + + url = httpx.URL("https://[::ffff:192.168.0.1]") + assert url.host == "::ffff:192.168.0.1" + """ + host: str = self._uri_reference.host + + if host.startswith("xn--"): + host = idna.decode(host) + + return host + + @property + def raw_host(self) -> bytes: + """ + The raw bytes representation of the URL host. + Always normalized to lowercase, and IDNA encoded. + + Examples: + + url = httpx.URL("http://www.EXAMPLE.org") + assert url.raw_host == b"www.example.org" + + url = httpx.URL("http://中国.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + url = httpx.URL("http://xn--fiqs8s.icom.museum") + assert url.raw_host == b"xn--fiqs8s.icom.museum" + + url = httpx.URL("https://[::ffff:192.168.0.1]") + assert url.raw_host == b"::ffff:192.168.0.1" + """ + return self._uri_reference.host.encode("ascii") + + @property + def port(self) -> int | None: + """ + The URL port as an integer. + + Note that the URL class performs port normalization as per the WHATWG spec. + Default ports for "http", "https", "ws", "wss", and "ftp" schemes are always + treated as `None`. + + For example: + + assert httpx.URL("http://www.example.com") == httpx.URL("http://www.example.com:80") + assert httpx.URL("http://www.example.com:80").port is None + """ + return self._uri_reference.port + + @property + def netloc(self) -> bytes: + """ + Either `` or `:` as bytes. + Always normalized to lowercase, and IDNA encoded. + + This property may be used for generating the value of a request + "Host" header. + """ + return self._uri_reference.netloc.encode("ascii") + + @property + def path(self) -> str: + """ + The URL path as a string. Excluding the query string, and URL decoded. + + For example: + + url = httpx.URL("https://example.com/pa%20th") + assert url.path == "/pa th" + """ + path = self._uri_reference.path or "/" + return unquote(path) + + @property + def query(self) -> bytes: + """ + The URL query string, as raw bytes, excluding the leading b"?". + + This is necessarily a bytewise interface, because we cannot + perform URL decoding of this representation until we've parsed + the keys and values into a QueryParams instance. + + For example: + + url = httpx.URL("https://example.com/?filter=some%20search%20terms") + assert url.query == b"filter=some%20search%20terms" + """ + query = self._uri_reference.query or "" + return query.encode("ascii") + + @property + def params(self) -> QueryParams: + """ + The URL query parameters, neatly parsed and packaged into an immutable + multidict representation. + """ + return QueryParams(self._uri_reference.query) + + @property + def raw_path(self) -> bytes: + """ + The complete URL path and query string as raw bytes. + Used as the target when constructing HTTP requests. + + For example: + + GET /users?search=some%20text HTTP/1.1 + Host: www.example.org + Connection: close + """ + path = self._uri_reference.path or "/" + if self._uri_reference.query is not None: + path += "?" + self._uri_reference.query + return path.encode("ascii") + + @property + def fragment(self) -> str: + """ + The URL fragments, as used in HTML anchors. + As a string, without the leading '#'. + """ + return unquote(self._uri_reference.fragment or "") + + @property + def is_absolute_url(self) -> bool: + """ + Return `True` for absolute URLs such as 'http://example.com/path', + and `False` for relative URLs such as '/path'. + """ + # We don't use `.is_absolute` from `rfc3986` because it treats + # URLs with a fragment portion as not absolute. + # What we actually care about is if the URL provides + # a scheme and hostname to which connections should be made. + return bool(self._uri_reference.scheme and self._uri_reference.host) + + @property + def is_relative_url(self) -> bool: + """ + Return `False` for absolute URLs such as 'http://example.com/path', + and `True` for relative URLs such as '/path'. + """ + return not self.is_absolute_url + + def copy_with(self, **kwargs: typing.Any) -> URL: + """ + Copy this URL, returning a new URL with some components altered. + Accepts the same set of parameters as the components that are made + available via properties on the `URL` class. + + For example: + + url = httpx.URL("https://www.example.com").copy_with( + username="jo@gmail.com", password="a secret" + ) + assert url == "https://jo%40email.com:a%20secret@www.example.com" + """ + return URL(self, **kwargs) + + def copy_set_param(self, key: str, value: typing.Any = None) -> URL: + return self.copy_with(params=self.params.set(key, value)) + + def copy_add_param(self, key: str, value: typing.Any = None) -> URL: + return self.copy_with(params=self.params.add(key, value)) + + def copy_remove_param(self, key: str) -> URL: + return self.copy_with(params=self.params.remove(key)) + + def copy_merge_params(self, params: QueryParamTypes) -> URL: + return self.copy_with(params=self.params.merge(params)) + + def join(self, url: URL | str) -> URL: + """ + Return an absolute URL, using this URL as the base. + + Eg. + + url = httpx.URL("https://www.example.com/test") + url = url.join("/new/path") + assert url == "https://www.example.com/new/path" + """ + from urllib.parse import urljoin + + return URL(urljoin(str(self), str(URL(url)))) + + def __hash__(self) -> int: + return hash(str(self)) + + def __eq__(self, other: typing.Any) -> bool: + return isinstance(other, (URL, str)) and str(self) == str(URL(other)) + + def __str__(self) -> str: + return str(self._uri_reference) + + def __repr__(self) -> str: + scheme, userinfo, host, port, path, query, fragment = self._uri_reference + + if ":" in userinfo: + # Mask any password component. + userinfo = f'{userinfo.split(":")[0]}:[secure]' + + authority = "".join( + [ + f"{userinfo}@" if userinfo else "", + f"[{host}]" if ":" in host else host, + f":{port}" if port is not None else "", + ] + ) + url = "".join( + [ + f"{self.scheme}:" if scheme else "", + f"//{authority}" if authority else "", + path, + f"?{query}" if query is not None else "", + f"#{fragment}" if fragment is not None else "", + ] + ) + + return f"{self.__class__.__name__}({url!r})" + + @property + def raw(self) -> tuple[bytes, bytes, int, bytes]: # pragma: nocover + import collections + import warnings + + warnings.warn("URL.raw is deprecated.") + RawURL = collections.namedtuple( + "RawURL", ["raw_scheme", "raw_host", "port", "raw_path"] + ) + return RawURL( + raw_scheme=self.raw_scheme, + raw_host=self.raw_host, + port=self.port, + raw_path=self.raw_path, + ) + + +class QueryParams(typing.Mapping[str, str]): + """ + URL query parameters, as a multi-dict. + """ + + def __init__(self, *args: QueryParamTypes | None, **kwargs: typing.Any) -> None: + assert len(args) < 2, "Too many arguments." + assert not (args and kwargs), "Cannot mix named and unnamed arguments." + + value = args[0] if args else kwargs + + if value is None or isinstance(value, (str, bytes)): + value = value.decode("ascii") if isinstance(value, bytes) else value + self._dict = parse_qs(value, keep_blank_values=True) + elif isinstance(value, QueryParams): + self._dict = {k: list(v) for k, v in value._dict.items()} + else: + dict_value: dict[typing.Any, list[typing.Any]] = {} + if isinstance(value, (list, tuple)): + # Convert list inputs like: + # [("a", "123"), ("a", "456"), ("b", "789")] + # To a dict representation, like: + # {"a": ["123", "456"], "b": ["789"]} + for item in value: + dict_value.setdefault(item[0], []).append(item[1]) + else: + # Convert dict inputs like: + # {"a": "123", "b": ["456", "789"]} + # To dict inputs where values are always lists, like: + # {"a": ["123"], "b": ["456", "789"]} + dict_value = { + k: list(v) if isinstance(v, (list, tuple)) else [v] + for k, v in value.items() + } + + # Ensure that keys and values are neatly coerced to strings. + # We coerce values `True` and `False` to JSON-like "true" and "false" + # representations, and coerce `None` values to the empty string. + self._dict = { + str(k): [primitive_value_to_str(item) for item in v] + for k, v in dict_value.items() + } + + def keys(self) -> typing.KeysView[str]: + """ + Return all the keys in the query params. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.keys()) == ["a", "b"] + """ + return self._dict.keys() + + def values(self) -> typing.ValuesView[str]: + """ + Return all the values in the query params. If a key occurs more than once + only the first item for that key is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.values()) == ["123", "789"] + """ + return {k: v[0] for k, v in self._dict.items()}.values() + + def items(self) -> typing.ItemsView[str, str]: + """ + Return all items in the query params. If a key occurs more than once + only the first item for that key is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.items()) == [("a", "123"), ("b", "789")] + """ + return {k: v[0] for k, v in self._dict.items()}.items() + + def multi_items(self) -> list[tuple[str, str]]: + """ + Return all items in the query params. Allow duplicate keys to occur. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert list(q.multi_items()) == [("a", "123"), ("a", "456"), ("b", "789")] + """ + multi_items: list[tuple[str, str]] = [] + for k, v in self._dict.items(): + multi_items.extend([(k, i) for i in v]) + return multi_items + + def get(self, key: typing.Any, default: typing.Any = None) -> typing.Any: + """ + Get a value from the query param for a given key. If the key occurs + more than once, then only the first value is returned. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert q.get("a") == "123" + """ + if key in self._dict: + return self._dict[str(key)][0] + return default + + def get_list(self, key: str) -> list[str]: + """ + Get all values from the query param for a given key. + + Usage: + + q = httpx.QueryParams("a=123&a=456&b=789") + assert q.get_list("a") == ["123", "456"] + """ + return list(self._dict.get(str(key), [])) + + def set(self, key: str, value: typing.Any = None) -> QueryParams: + """ + Return a new QueryParams instance, setting the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.set("a", "456") + assert q == httpx.QueryParams("a=456") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict[str(key)] = [primitive_value_to_str(value)] + return q + + def add(self, key: str, value: typing.Any = None) -> QueryParams: + """ + Return a new QueryParams instance, setting or appending the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.add("a", "456") + assert q == httpx.QueryParams("a=123&a=456") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict[str(key)] = q.get_list(key) + [primitive_value_to_str(value)] + return q + + def remove(self, key: str) -> QueryParams: + """ + Return a new QueryParams instance, removing the value of a key. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.remove("a") + assert q == httpx.QueryParams("") + """ + q = QueryParams() + q._dict = dict(self._dict) + q._dict.pop(str(key), None) + return q + + def merge(self, params: QueryParamTypes | None = None) -> QueryParams: + """ + Return a new QueryParams instance, updated with. + + Usage: + + q = httpx.QueryParams("a=123") + q = q.merge({"b": "456"}) + assert q == httpx.QueryParams("a=123&b=456") + + q = httpx.QueryParams("a=123") + q = q.merge({"a": "456", "b": "789"}) + assert q == httpx.QueryParams("a=456&b=789") + """ + q = QueryParams(params) + q._dict = {**self._dict, **q._dict} + return q + + def __getitem__(self, key: typing.Any) -> str: + return self._dict[key][0] + + def __contains__(self, key: typing.Any) -> bool: + return key in self._dict + + def __iter__(self) -> typing.Iterator[typing.Any]: + return iter(self.keys()) + + def __len__(self) -> int: + return len(self._dict) + + def __bool__(self) -> bool: + return bool(self._dict) + + def __hash__(self) -> int: + return hash(str(self)) + + def __eq__(self, other: typing.Any) -> bool: + if not isinstance(other, self.__class__): + return False + return sorted(self.multi_items()) == sorted(other.multi_items()) + + def __str__(self) -> str: + return urlencode(self.multi_items()) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + query_string = str(self) + return f"{class_name}({query_string!r})" + + def update(self, params: QueryParamTypes | None = None) -> None: + raise RuntimeError( + "QueryParams are immutable since 0.18.0. " + "Use `q = q.merge(...)` to create an updated copy." + ) + + def __setitem__(self, key: str, value: str) -> None: + raise RuntimeError( + "QueryParams are immutable since 0.18.0. " + "Use `q = q.set(key, value)` to create an updated copy." + ) diff --git a/venv/lib/python3.11/site-packages/httpx/_utils.py b/venv/lib/python3.11/site-packages/httpx/_utils.py new file mode 100644 index 0000000..7fe827d --- /dev/null +++ b/venv/lib/python3.11/site-packages/httpx/_utils.py @@ -0,0 +1,242 @@ +from __future__ import annotations + +import ipaddress +import os +import re +import typing +from urllib.request import getproxies + +from ._types import PrimitiveData + +if typing.TYPE_CHECKING: # pragma: no cover + from ._urls import URL + + +def primitive_value_to_str(value: PrimitiveData) -> str: + """ + Coerce a primitive data type into a string value. + + Note that we prefer JSON-style 'true'/'false' for boolean values here. + """ + if value is True: + return "true" + elif value is False: + return "false" + elif value is None: + return "" + return str(value) + + +def get_environment_proxies() -> dict[str, str | None]: + """Gets proxy information from the environment""" + + # urllib.request.getproxies() falls back on System + # Registry and Config for proxies on Windows and macOS. + # We don't want to propagate non-HTTP proxies into + # our configuration such as 'TRAVIS_APT_PROXY'. + proxy_info = getproxies() + mounts: dict[str, str | None] = {} + + for scheme in ("http", "https", "all"): + if proxy_info.get(scheme): + hostname = proxy_info[scheme] + mounts[f"{scheme}://"] = ( + hostname if "://" in hostname else f"http://{hostname}" + ) + + no_proxy_hosts = [host.strip() for host in proxy_info.get("no", "").split(",")] + for hostname in no_proxy_hosts: + # See https://curl.haxx.se/libcurl/c/CURLOPT_NOPROXY.html for details + # on how names in `NO_PROXY` are handled. + if hostname == "*": + # If NO_PROXY=* is used or if "*" occurs as any one of the comma + # separated hostnames, then we should just bypass any information + # from HTTP_PROXY, HTTPS_PROXY, ALL_PROXY, and always ignore + # proxies. + return {} + elif hostname: + # NO_PROXY=.google.com is marked as "all://*.google.com, + # which disables "www.google.com" but not "google.com" + # NO_PROXY=google.com is marked as "all://*google.com, + # which disables "www.google.com" and "google.com". + # (But not "wwwgoogle.com") + # NO_PROXY can include domains, IPv6, IPv4 addresses and "localhost" + # NO_PROXY=example.com,::1,localhost,192.168.0.0/16 + if "://" in hostname: + mounts[hostname] = None + elif is_ipv4_hostname(hostname): + mounts[f"all://{hostname}"] = None + elif is_ipv6_hostname(hostname): + mounts[f"all://[{hostname}]"] = None + elif hostname.lower() == "localhost": + mounts[f"all://{hostname}"] = None + else: + mounts[f"all://*{hostname}"] = None + + return mounts + + +def to_bytes(value: str | bytes, encoding: str = "utf-8") -> bytes: + return value.encode(encoding) if isinstance(value, str) else value + + +def to_str(value: str | bytes, encoding: str = "utf-8") -> str: + return value if isinstance(value, str) else value.decode(encoding) + + +def to_bytes_or_str(value: str, match_type_of: typing.AnyStr) -> typing.AnyStr: + return value if isinstance(match_type_of, str) else value.encode() + + +def unquote(value: str) -> str: + return value[1:-1] if value[0] == value[-1] == '"' else value + + +def peek_filelike_length(stream: typing.Any) -> int | None: + """ + Given a file-like stream object, return its length in number of bytes + without reading it into memory. + """ + try: + # Is it an actual file? + fd = stream.fileno() + # Yup, seems to be an actual file. + length = os.fstat(fd).st_size + except (AttributeError, OSError): + # No... Maybe it's something that supports random access, like `io.BytesIO`? + try: + # Assuming so, go to end of stream to figure out its length, + # then put it back in place. + offset = stream.tell() + length = stream.seek(0, os.SEEK_END) + stream.seek(offset) + except (AttributeError, OSError): + # Not even that? Sorry, we're doomed... + return None + + return length + + +class URLPattern: + """ + A utility class currently used for making lookups against proxy keys... + + # Wildcard matching... + >>> pattern = URLPattern("all://") + >>> pattern.matches(httpx.URL("http://example.com")) + True + + # Witch scheme matching... + >>> pattern = URLPattern("https://") + >>> pattern.matches(httpx.URL("https://example.com")) + True + >>> pattern.matches(httpx.URL("http://example.com")) + False + + # With domain matching... + >>> pattern = URLPattern("https://example.com") + >>> pattern.matches(httpx.URL("https://example.com")) + True + >>> pattern.matches(httpx.URL("http://example.com")) + False + >>> pattern.matches(httpx.URL("https://other.com")) + False + + # Wildcard scheme, with domain matching... + >>> pattern = URLPattern("all://example.com") + >>> pattern.matches(httpx.URL("https://example.com")) + True + >>> pattern.matches(httpx.URL("http://example.com")) + True + >>> pattern.matches(httpx.URL("https://other.com")) + False + + # With port matching... + >>> pattern = URLPattern("https://example.com:1234") + >>> pattern.matches(httpx.URL("https://example.com:1234")) + True + >>> pattern.matches(httpx.URL("https://example.com")) + False + """ + + def __init__(self, pattern: str) -> None: + from ._urls import URL + + if pattern and ":" not in pattern: + raise ValueError( + f"Proxy keys should use proper URL forms rather " + f"than plain scheme strings. " + f'Instead of "{pattern}", use "{pattern}://"' + ) + + url = URL(pattern) + self.pattern = pattern + self.scheme = "" if url.scheme == "all" else url.scheme + self.host = "" if url.host == "*" else url.host + self.port = url.port + if not url.host or url.host == "*": + self.host_regex: typing.Pattern[str] | None = None + elif url.host.startswith("*."): + # *.example.com should match "www.example.com", but not "example.com" + domain = re.escape(url.host[2:]) + self.host_regex = re.compile(f"^.+\\.{domain}$") + elif url.host.startswith("*"): + # *example.com should match "www.example.com" and "example.com" + domain = re.escape(url.host[1:]) + self.host_regex = re.compile(f"^(.+\\.)?{domain}$") + else: + # example.com should match "example.com" but not "www.example.com" + domain = re.escape(url.host) + self.host_regex = re.compile(f"^{domain}$") + + def matches(self, other: URL) -> bool: + if self.scheme and self.scheme != other.scheme: + return False + if ( + self.host + and self.host_regex is not None + and not self.host_regex.match(other.host) + ): + return False + if self.port is not None and self.port != other.port: + return False + return True + + @property + def priority(self) -> tuple[int, int, int]: + """ + The priority allows URLPattern instances to be sortable, so that + we can match from most specific to least specific. + """ + # URLs with a port should take priority over URLs without a port. + port_priority = 0 if self.port is not None else 1 + # Longer hostnames should match first. + host_priority = -len(self.host) + # Longer schemes should match first. + scheme_priority = -len(self.scheme) + return (port_priority, host_priority, scheme_priority) + + def __hash__(self) -> int: + return hash(self.pattern) + + def __lt__(self, other: URLPattern) -> bool: + return self.priority < other.priority + + def __eq__(self, other: typing.Any) -> bool: + return isinstance(other, URLPattern) and self.pattern == other.pattern + + +def is_ipv4_hostname(hostname: str) -> bool: + try: + ipaddress.IPv4Address(hostname.split("/")[0]) + except Exception: + return False + return True + + +def is_ipv6_hostname(hostname: str) -> bool: + try: + ipaddress.IPv6Address(hostname.split("/")[0]) + except Exception: + return False + return True diff --git a/venv/lib/python3.11/site-packages/httpx/py.typed b/venv/lib/python3.11/site-packages/httpx/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/idna-3.10.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/idna-3.10.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.11/site-packages/idna-3.10.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.11/site-packages/idna-3.10.dist-info/LICENSE.md b/venv/lib/python3.11/site-packages/idna-3.10.dist-info/LICENSE.md new file mode 100644 index 0000000..19b6b45 --- /dev/null +++ b/venv/lib/python3.11/site-packages/idna-3.10.dist-info/LICENSE.md @@ -0,0 +1,31 @@ +BSD 3-Clause License + +Copyright (c) 2013-2024, Kim Davies and contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.11/site-packages/idna-3.10.dist-info/METADATA b/venv/lib/python3.11/site-packages/idna-3.10.dist-info/METADATA new file mode 100644 index 0000000..c42623e --- /dev/null +++ b/venv/lib/python3.11/site-packages/idna-3.10.dist-info/METADATA @@ -0,0 +1,250 @@ +Metadata-Version: 2.1 +Name: idna +Version: 3.10 +Summary: Internationalized Domain Names in Applications (IDNA) +Author-email: Kim Davies +Requires-Python: >=3.6 +Description-Content-Type: text/x-rst +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: Name Service (DNS) +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Utilities +Requires-Dist: ruff >= 0.6.2 ; extra == "all" +Requires-Dist: mypy >= 1.11.2 ; extra == "all" +Requires-Dist: pytest >= 8.3.2 ; extra == "all" +Requires-Dist: flake8 >= 7.1.1 ; extra == "all" +Project-URL: Changelog, https://github.com/kjd/idna/blob/master/HISTORY.rst +Project-URL: Issue tracker, https://github.com/kjd/idna/issues +Project-URL: Source, https://github.com/kjd/idna +Provides-Extra: all + +Internationalized Domain Names in Applications (IDNA) +===================================================== + +Support for the Internationalized Domain Names in +Applications (IDNA) protocol as specified in `RFC 5891 +`_. This is the latest version of +the protocol and is sometimes referred to as “IDNA 2008”. + +This library also provides support for Unicode Technical +Standard 46, `Unicode IDNA Compatibility Processing +`_. + +This acts as a suitable replacement for the “encodings.idna” +module that comes with the Python standard library, but which +only supports the older superseded IDNA specification (`RFC 3490 +`_). + +Basic functions are simply executed: + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + + +Installation +------------ + +This package is available for installation from PyPI: + +.. code-block:: bash + + $ python3 -m pip install idna + + +Usage +----- + +For typical usage, the ``encode`` and ``decode`` functions will take a +domain name argument and perform a conversion to A-labels or U-labels +respectively. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + +You may use the codec encoding and decoding methods using the +``idna.codec`` module: + +.. code-block:: pycon + + >>> import idna.codec + >>> print('домен.испытание'.encode('idna2008')) + b'xn--d1acufc.xn--80akhbyknj4f' + >>> print(b'xn--d1acufc.xn--80akhbyknj4f'.decode('idna2008')) + домен.испытание + +Conversions can be applied at a per-label basis using the ``ulabel`` or +``alabel`` functions if necessary: + +.. code-block:: pycon + + >>> idna.alabel('测试') + b'xn--0zwm56d' + +Compatibility Mapping (UTS #46) ++++++++++++++++++++++++++++++++ + +As described in `RFC 5895 `_, the +IDNA specification does not normalize input from different potential +ways a user may input a domain name. This functionality, known as +a “mapping”, is considered by the specification to be a local +user-interface issue distinct from IDNA conversion functionality. + +This library provides one such mapping that was developed by the +Unicode Consortium. Known as `Unicode IDNA Compatibility Processing +`_, it provides for both a regular +mapping for typical applications, as well as a transitional mapping to +help migrate from older IDNA 2003 applications. Strings are +preprocessed according to Section 4.4 “Preprocessing for IDNA2008” +prior to the IDNA operations. + +For example, “Königsgäßchen” is not a permissible label as *LATIN +CAPITAL LETTER K* is not allowed (nor are capital letters in general). +UTS 46 will convert this into lower case prior to applying the IDNA +conversion. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('Königsgäßchen') + ... + idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed + >>> idna.encode('Königsgäßchen', uts46=True) + b'xn--knigsgchen-b4a3dun' + >>> print(idna.decode('xn--knigsgchen-b4a3dun')) + königsgäßchen + +Transitional processing provides conversions to help transition from +the older 2003 standard to the current standard. For example, in the +original IDNA specification, the *LATIN SMALL LETTER SHARP S* (ß) was +converted into two *LATIN SMALL LETTER S* (ss), whereas in the current +IDNA specification this conversion is not performed. + +.. code-block:: pycon + + >>> idna.encode('Königsgäßchen', uts46=True, transitional=True) + 'xn--knigsgsschen-lcb0w' + +Implementers should use transitional processing with caution, only in +rare cases where conversion from legacy labels to current labels must be +performed (i.e. IDNA implementations that pre-date 2008). For typical +applications that just need to convert labels, transitional processing +is unlikely to be beneficial and could produce unexpected incompatible +results. + +``encodings.idna`` Compatibility +++++++++++++++++++++++++++++++++ + +Function calls from the Python built-in ``encodings.idna`` module are +mapped to their IDNA 2008 equivalents using the ``idna.compat`` module. +Simply substitute the ``import`` clause in your code to refer to the new +module name. + +Exceptions +---------- + +All errors raised during the conversion following the specification +should raise an exception derived from the ``idna.IDNAError`` base +class. + +More specific exceptions that may be generated as ``idna.IDNABidiError`` +when the error reflects an illegal combination of left-to-right and +right-to-left characters in a label; ``idna.InvalidCodepoint`` when +a specific codepoint is an illegal character in an IDN label (i.e. +INVALID); and ``idna.InvalidCodepointContext`` when the codepoint is +illegal based on its positional context (i.e. it is CONTEXTO or CONTEXTJ +but the contextual requirements are not satisfied.) + +Building and Diagnostics +------------------------ + +The IDNA and UTS 46 functionality relies upon pre-calculated lookup +tables for performance. These tables are derived from computing against +eligibility criteria in the respective standards. These tables are +computed using the command-line script ``tools/idna-data``. + +This tool will fetch relevant codepoint data from the Unicode repository +and perform the required calculations to identify eligibility. There are +three main modes: + +* ``idna-data make-libdata``. Generates ``idnadata.py`` and + ``uts46data.py``, the pre-calculated lookup tables used for IDNA and + UTS 46 conversions. Implementers who wish to track this library against + a different Unicode version may use this tool to manually generate a + different version of the ``idnadata.py`` and ``uts46data.py`` files. + +* ``idna-data make-table``. Generate a table of the IDNA disposition + (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix + B.1 of RFC 5892 and the pre-computed tables published by `IANA + `_. + +* ``idna-data U+0061``. Prints debugging output on the various + properties associated with an individual Unicode codepoint (in this + case, U+0061), that are used to assess the IDNA and UTS 46 status of a + codepoint. This is helpful in debugging or analysis. + +The tool accepts a number of arguments, described using ``idna-data +-h``. Most notably, the ``--version`` argument allows the specification +of the version of Unicode to be used in computing the table data. For +example, ``idna-data --version 9.0.0 make-libdata`` will generate +library data against Unicode 9.0.0. + + +Additional Notes +---------------- + +* **Packages**. The latest tagged release version is published in the + `Python Package Index `_. + +* **Version support**. This library supports Python 3.6 and higher. + As this library serves as a low-level toolkit for a variety of + applications, many of which strive for broad compatibility with older + Python versions, there is no rush to remove older interpreter support. + Removing support for older versions should be well justified in that the + maintenance burden has become too high. + +* **Python 2**. Python 2 is supported by version 2.x of this library. + Use "idna<3" in your requirements file if you need this library for + a Python 2 application. Be advised that these versions are no longer + actively developed. + +* **Testing**. The library has a test suite based on each rule of the + IDNA specification, as well as tests that are provided as part of the + Unicode Technical Standard 46, `Unicode IDNA Compatibility Processing + `_. + +* **Emoji**. It is an occasional request to support emoji domains in + this library. Encoding of symbols like emoji is expressly prohibited by + the technical standard IDNA 2008 and emoji domains are broadly phased + out across the domain industry due to associated security risks. For + now, applications that need to support these non-compliant labels + may wish to consider trying the encode/decode operation in this library + first, and then falling back to using `encodings.idna`. See `the Github + project `_ for more discussion. + diff --git a/venv/lib/python3.11/site-packages/idna-3.10.dist-info/RECORD b/venv/lib/python3.11/site-packages/idna-3.10.dist-info/RECORD new file mode 100644 index 0000000..9ef9fa8 --- /dev/null +++ b/venv/lib/python3.11/site-packages/idna-3.10.dist-info/RECORD @@ -0,0 +1,22 @@ +idna-3.10.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +idna-3.10.dist-info/LICENSE.md,sha256=pZ8LDvNjWHQQmkRhykT_enDVBpboFHZ7-vch1Mmw2w8,1541 +idna-3.10.dist-info/METADATA,sha256=URR5ZyDfQ1PCEGhkYoojqfi2Ra0tau2--lhwG4XSfjI,10158 +idna-3.10.dist-info/RECORD,, +idna-3.10.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 +idna/__init__.py,sha256=MPqNDLZbXqGaNdXxAFhiqFPKEQXju2jNQhCey6-5eJM,868 +idna/__pycache__/__init__.cpython-311.pyc,, +idna/__pycache__/codec.cpython-311.pyc,, +idna/__pycache__/compat.cpython-311.pyc,, +idna/__pycache__/core.cpython-311.pyc,, +idna/__pycache__/idnadata.cpython-311.pyc,, +idna/__pycache__/intranges.cpython-311.pyc,, +idna/__pycache__/package_data.cpython-311.pyc,, +idna/__pycache__/uts46data.cpython-311.pyc,, +idna/codec.py,sha256=PEew3ItwzjW4hymbasnty2N2OXvNcgHB-JjrBuxHPYY,3422 +idna/compat.py,sha256=RzLy6QQCdl9784aFhb2EX9EKGCJjg0P3PilGdeXXcx8,316 +idna/core.py,sha256=YJYyAMnwiQEPjVC4-Fqu_p4CJ6yKKuDGmppBNQNQpFs,13239 +idna/idnadata.py,sha256=W30GcIGvtOWYwAjZj4ZjuouUutC6ffgNuyjJy7fZ-lo,78306 +idna/intranges.py,sha256=amUtkdhYcQG8Zr-CoMM_kVRacxkivC1WgxN1b63KKdU,1898 +idna/package_data.py,sha256=q59S3OXsc5VI8j6vSD0sGBMyk6zZ4vWFREE88yCJYKs,21 +idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +idna/uts46data.py,sha256=rt90K9J40gUSwppDPCrhjgi5AA6pWM65dEGRSf6rIhM,239289 diff --git a/venv/lib/python3.11/site-packages/idna-3.10.dist-info/WHEEL b/venv/lib/python3.11/site-packages/idna-3.10.dist-info/WHEEL new file mode 100644 index 0000000..3b5e64b --- /dev/null +++ b/venv/lib/python3.11/site-packages/idna-3.10.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.9.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.11/site-packages/idna/__init__.py b/venv/lib/python3.11/site-packages/idna/__init__.py new file mode 100644 index 0000000..cfdc030 --- /dev/null +++ b/venv/lib/python3.11/site-packages/idna/__init__.py @@ -0,0 +1,45 @@ +from .core import ( + IDNABidiError, + IDNAError, + InvalidCodepoint, + InvalidCodepointContext, + alabel, + check_bidi, + check_hyphen_ok, + check_initial_combiner, + check_label, + check_nfc, + decode, + encode, + ulabel, + uts46_remap, + valid_contextj, + valid_contexto, + valid_label_length, + valid_string_length, +) +from .intranges import intranges_contain +from .package_data import __version__ + +__all__ = [ + "__version__", + "IDNABidiError", + "IDNAError", + "InvalidCodepoint", + "InvalidCodepointContext", + "alabel", + "check_bidi", + "check_hyphen_ok", + "check_initial_combiner", + "check_label", + "check_nfc", + "decode", + "encode", + "intranges_contain", + "ulabel", + "uts46_remap", + "valid_contextj", + "valid_contexto", + "valid_label_length", + "valid_string_length", +] diff --git a/venv/lib/python3.11/site-packages/idna/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/idna/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..3917f03 Binary files /dev/null and b/venv/lib/python3.11/site-packages/idna/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/idna/__pycache__/codec.cpython-311.pyc b/venv/lib/python3.11/site-packages/idna/__pycache__/codec.cpython-311.pyc new file mode 100644 index 0000000..93206de Binary files /dev/null and b/venv/lib/python3.11/site-packages/idna/__pycache__/codec.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/idna/__pycache__/compat.cpython-311.pyc b/venv/lib/python3.11/site-packages/idna/__pycache__/compat.cpython-311.pyc new file mode 100644 index 0000000..0a24e68 Binary files /dev/null and b/venv/lib/python3.11/site-packages/idna/__pycache__/compat.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/idna/__pycache__/core.cpython-311.pyc b/venv/lib/python3.11/site-packages/idna/__pycache__/core.cpython-311.pyc new file mode 100644 index 0000000..b7980ef Binary files /dev/null and b/venv/lib/python3.11/site-packages/idna/__pycache__/core.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/idna/__pycache__/idnadata.cpython-311.pyc b/venv/lib/python3.11/site-packages/idna/__pycache__/idnadata.cpython-311.pyc new file mode 100644 index 0000000..320a81b Binary files /dev/null and b/venv/lib/python3.11/site-packages/idna/__pycache__/idnadata.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/idna/__pycache__/intranges.cpython-311.pyc b/venv/lib/python3.11/site-packages/idna/__pycache__/intranges.cpython-311.pyc new file mode 100644 index 0000000..ccb9f59 Binary files /dev/null and b/venv/lib/python3.11/site-packages/idna/__pycache__/intranges.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/idna/__pycache__/package_data.cpython-311.pyc b/venv/lib/python3.11/site-packages/idna/__pycache__/package_data.cpython-311.pyc new file mode 100644 index 0000000..ad89381 Binary files /dev/null and b/venv/lib/python3.11/site-packages/idna/__pycache__/package_data.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/idna/__pycache__/uts46data.cpython-311.pyc b/venv/lib/python3.11/site-packages/idna/__pycache__/uts46data.cpython-311.pyc new file mode 100644 index 0000000..aee1df9 Binary files /dev/null and b/venv/lib/python3.11/site-packages/idna/__pycache__/uts46data.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/idna/codec.py b/venv/lib/python3.11/site-packages/idna/codec.py new file mode 100644 index 0000000..913abfd --- /dev/null +++ b/venv/lib/python3.11/site-packages/idna/codec.py @@ -0,0 +1,122 @@ +import codecs +import re +from typing import Any, Optional, Tuple + +from .core import IDNAError, alabel, decode, encode, ulabel + +_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]") + + +class Codec(codecs.Codec): + def encode(self, data: str, errors: str = "strict") -> Tuple[bytes, int]: + if errors != "strict": + raise IDNAError('Unsupported error handling "{}"'.format(errors)) + + if not data: + return b"", 0 + + return encode(data), len(data) + + def decode(self, data: bytes, errors: str = "strict") -> Tuple[str, int]: + if errors != "strict": + raise IDNAError('Unsupported error handling "{}"'.format(errors)) + + if not data: + return "", 0 + + return decode(data), len(data) + + +class IncrementalEncoder(codecs.BufferedIncrementalEncoder): + def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[bytes, int]: + if errors != "strict": + raise IDNAError('Unsupported error handling "{}"'.format(errors)) + + if not data: + return b"", 0 + + labels = _unicode_dots_re.split(data) + trailing_dot = b"" + if labels: + if not labels[-1]: + trailing_dot = b"." + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = b"." + + result = [] + size = 0 + for label in labels: + result.append(alabel(label)) + if size: + size += 1 + size += len(label) + + # Join with U+002E + result_bytes = b".".join(result) + trailing_dot + size += len(trailing_dot) + return result_bytes, size + + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, data: Any, errors: str, final: bool) -> Tuple[str, int]: + if errors != "strict": + raise IDNAError('Unsupported error handling "{}"'.format(errors)) + + if not data: + return ("", 0) + + if not isinstance(data, str): + data = str(data, "ascii") + + labels = _unicode_dots_re.split(data) + trailing_dot = "" + if labels: + if not labels[-1]: + trailing_dot = "." + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = "." + + result = [] + size = 0 + for label in labels: + result.append(ulabel(label)) + if size: + size += 1 + size += len(label) + + result_str = ".".join(result) + trailing_dot + size += len(trailing_dot) + return (result_str, size) + + +class StreamWriter(Codec, codecs.StreamWriter): + pass + + +class StreamReader(Codec, codecs.StreamReader): + pass + + +def search_function(name: str) -> Optional[codecs.CodecInfo]: + if name != "idna2008": + return None + return codecs.CodecInfo( + name=name, + encode=Codec().encode, + decode=Codec().decode, + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamwriter=StreamWriter, + streamreader=StreamReader, + ) + + +codecs.register(search_function) diff --git a/venv/lib/python3.11/site-packages/idna/compat.py b/venv/lib/python3.11/site-packages/idna/compat.py new file mode 100644 index 0000000..1df9f2a --- /dev/null +++ b/venv/lib/python3.11/site-packages/idna/compat.py @@ -0,0 +1,15 @@ +from typing import Any, Union + +from .core import decode, encode + + +def ToASCII(label: str) -> bytes: + return encode(label) + + +def ToUnicode(label: Union[bytes, bytearray]) -> str: + return decode(label) + + +def nameprep(s: Any) -> None: + raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol") diff --git a/venv/lib/python3.11/site-packages/idna/core.py b/venv/lib/python3.11/site-packages/idna/core.py new file mode 100644 index 0000000..9115f12 --- /dev/null +++ b/venv/lib/python3.11/site-packages/idna/core.py @@ -0,0 +1,437 @@ +import bisect +import re +import unicodedata +from typing import Optional, Union + +from . import idnadata +from .intranges import intranges_contain + +_virama_combining_class = 9 +_alabel_prefix = b"xn--" +_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]") + + +class IDNAError(UnicodeError): + """Base exception for all IDNA-encoding related problems""" + + pass + + +class IDNABidiError(IDNAError): + """Exception when bidirectional requirements are not satisfied""" + + pass + + +class InvalidCodepoint(IDNAError): + """Exception when a disallowed or unallocated codepoint is used""" + + pass + + +class InvalidCodepointContext(IDNAError): + """Exception when the codepoint is not valid in the context it is used""" + + pass + + +def _combining_class(cp: int) -> int: + v = unicodedata.combining(chr(cp)) + if v == 0: + if not unicodedata.name(chr(cp)): + raise ValueError("Unknown character in unicodedata") + return v + + +def _is_script(cp: str, script: str) -> bool: + return intranges_contain(ord(cp), idnadata.scripts[script]) + + +def _punycode(s: str) -> bytes: + return s.encode("punycode") + + +def _unot(s: int) -> str: + return "U+{:04X}".format(s) + + +def valid_label_length(label: Union[bytes, str]) -> bool: + if len(label) > 63: + return False + return True + + +def valid_string_length(label: Union[bytes, str], trailing_dot: bool) -> bool: + if len(label) > (254 if trailing_dot else 253): + return False + return True + + +def check_bidi(label: str, check_ltr: bool = False) -> bool: + # Bidi rules should only be applied if string contains RTL characters + bidi_label = False + for idx, cp in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + if direction == "": + # String likely comes from a newer version of Unicode + raise IDNABidiError("Unknown directionality in label {} at position {}".format(repr(label), idx)) + if direction in ["R", "AL", "AN"]: + bidi_label = True + if not bidi_label and not check_ltr: + return True + + # Bidi rule 1 + direction = unicodedata.bidirectional(label[0]) + if direction in ["R", "AL"]: + rtl = True + elif direction == "L": + rtl = False + else: + raise IDNABidiError("First codepoint in label {} must be directionality L, R or AL".format(repr(label))) + + valid_ending = False + number_type: Optional[str] = None + for idx, cp in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + + if rtl: + # Bidi rule 2 + if direction not in [ + "R", + "AL", + "AN", + "EN", + "ES", + "CS", + "ET", + "ON", + "BN", + "NSM", + ]: + raise IDNABidiError("Invalid direction for codepoint at position {} in a right-to-left label".format(idx)) + # Bidi rule 3 + if direction in ["R", "AL", "EN", "AN"]: + valid_ending = True + elif direction != "NSM": + valid_ending = False + # Bidi rule 4 + if direction in ["AN", "EN"]: + if not number_type: + number_type = direction + else: + if number_type != direction: + raise IDNABidiError("Can not mix numeral types in a right-to-left label") + else: + # Bidi rule 5 + if direction not in ["L", "EN", "ES", "CS", "ET", "ON", "BN", "NSM"]: + raise IDNABidiError("Invalid direction for codepoint at position {} in a left-to-right label".format(idx)) + # Bidi rule 6 + if direction in ["L", "EN"]: + valid_ending = True + elif direction != "NSM": + valid_ending = False + + if not valid_ending: + raise IDNABidiError("Label ends with illegal codepoint directionality") + + return True + + +def check_initial_combiner(label: str) -> bool: + if unicodedata.category(label[0])[0] == "M": + raise IDNAError("Label begins with an illegal combining character") + return True + + +def check_hyphen_ok(label: str) -> bool: + if label[2:4] == "--": + raise IDNAError("Label has disallowed hyphens in 3rd and 4th position") + if label[0] == "-" or label[-1] == "-": + raise IDNAError("Label must not start or end with a hyphen") + return True + + +def check_nfc(label: str) -> None: + if unicodedata.normalize("NFC", label) != label: + raise IDNAError("Label must be in Normalization Form C") + + +def valid_contextj(label: str, pos: int) -> bool: + cp_value = ord(label[pos]) + + if cp_value == 0x200C: + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + + ok = False + for i in range(pos - 1, -1, -1): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord("T"): + continue + elif joining_type in [ord("L"), ord("D")]: + ok = True + break + else: + break + + if not ok: + return False + + ok = False + for i in range(pos + 1, len(label)): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord("T"): + continue + elif joining_type in [ord("R"), ord("D")]: + ok = True + break + else: + break + return ok + + if cp_value == 0x200D: + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + return False + + else: + return False + + +def valid_contexto(label: str, pos: int, exception: bool = False) -> bool: + cp_value = ord(label[pos]) + + if cp_value == 0x00B7: + if 0 < pos < len(label) - 1: + if ord(label[pos - 1]) == 0x006C and ord(label[pos + 1]) == 0x006C: + return True + return False + + elif cp_value == 0x0375: + if pos < len(label) - 1 and len(label) > 1: + return _is_script(label[pos + 1], "Greek") + return False + + elif cp_value == 0x05F3 or cp_value == 0x05F4: + if pos > 0: + return _is_script(label[pos - 1], "Hebrew") + return False + + elif cp_value == 0x30FB: + for cp in label: + if cp == "\u30fb": + continue + if _is_script(cp, "Hiragana") or _is_script(cp, "Katakana") or _is_script(cp, "Han"): + return True + return False + + elif 0x660 <= cp_value <= 0x669: + for cp in label: + if 0x6F0 <= ord(cp) <= 0x06F9: + return False + return True + + elif 0x6F0 <= cp_value <= 0x6F9: + for cp in label: + if 0x660 <= ord(cp) <= 0x0669: + return False + return True + + return False + + +def check_label(label: Union[str, bytes, bytearray]) -> None: + if isinstance(label, (bytes, bytearray)): + label = label.decode("utf-8") + if len(label) == 0: + raise IDNAError("Empty Label") + + check_nfc(label) + check_hyphen_ok(label) + check_initial_combiner(label) + + for pos, cp in enumerate(label): + cp_value = ord(cp) + if intranges_contain(cp_value, idnadata.codepoint_classes["PVALID"]): + continue + elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTJ"]): + try: + if not valid_contextj(label, pos): + raise InvalidCodepointContext( + "Joiner {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label)) + ) + except ValueError: + raise IDNAError( + "Unknown codepoint adjacent to joiner {} at position {} in {}".format( + _unot(cp_value), pos + 1, repr(label) + ) + ) + elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTO"]): + if not valid_contexto(label, pos): + raise InvalidCodepointContext( + "Codepoint {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label)) + ) + else: + raise InvalidCodepoint( + "Codepoint {} at position {} of {} not allowed".format(_unot(cp_value), pos + 1, repr(label)) + ) + + check_bidi(label) + + +def alabel(label: str) -> bytes: + try: + label_bytes = label.encode("ascii") + ulabel(label_bytes) + if not valid_label_length(label_bytes): + raise IDNAError("Label too long") + return label_bytes + except UnicodeEncodeError: + pass + + check_label(label) + label_bytes = _alabel_prefix + _punycode(label) + + if not valid_label_length(label_bytes): + raise IDNAError("Label too long") + + return label_bytes + + +def ulabel(label: Union[str, bytes, bytearray]) -> str: + if not isinstance(label, (bytes, bytearray)): + try: + label_bytes = label.encode("ascii") + except UnicodeEncodeError: + check_label(label) + return label + else: + label_bytes = label + + label_bytes = label_bytes.lower() + if label_bytes.startswith(_alabel_prefix): + label_bytes = label_bytes[len(_alabel_prefix) :] + if not label_bytes: + raise IDNAError("Malformed A-label, no Punycode eligible content found") + if label_bytes.decode("ascii")[-1] == "-": + raise IDNAError("A-label must not end with a hyphen") + else: + check_label(label_bytes) + return label_bytes.decode("ascii") + + try: + label = label_bytes.decode("punycode") + except UnicodeError: + raise IDNAError("Invalid A-label") + check_label(label) + return label + + +def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False) -> str: + """Re-map the characters in the string according to UTS46 processing.""" + from .uts46data import uts46data + + output = "" + + for pos, char in enumerate(domain): + code_point = ord(char) + try: + uts46row = uts46data[code_point if code_point < 256 else bisect.bisect_left(uts46data, (code_point, "Z")) - 1] + status = uts46row[1] + replacement: Optional[str] = None + if len(uts46row) == 3: + replacement = uts46row[2] + if ( + status == "V" + or (status == "D" and not transitional) + or (status == "3" and not std3_rules and replacement is None) + ): + output += char + elif replacement is not None and ( + status == "M" or (status == "3" and not std3_rules) or (status == "D" and transitional) + ): + output += replacement + elif status != "I": + raise IndexError() + except IndexError: + raise InvalidCodepoint( + "Codepoint {} not allowed at position {} in {}".format(_unot(code_point), pos + 1, repr(domain)) + ) + + return unicodedata.normalize("NFC", output) + + +def encode( + s: Union[str, bytes, bytearray], + strict: bool = False, + uts46: bool = False, + std3_rules: bool = False, + transitional: bool = False, +) -> bytes: + if not isinstance(s, str): + try: + s = str(s, "ascii") + except UnicodeDecodeError: + raise IDNAError("should pass a unicode string to the function rather than a byte string.") + if uts46: + s = uts46_remap(s, std3_rules, transitional) + trailing_dot = False + result = [] + if strict: + labels = s.split(".") + else: + labels = _unicode_dots_re.split(s) + if not labels or labels == [""]: + raise IDNAError("Empty domain") + if labels[-1] == "": + del labels[-1] + trailing_dot = True + for label in labels: + s = alabel(label) + if s: + result.append(s) + else: + raise IDNAError("Empty label") + if trailing_dot: + result.append(b"") + s = b".".join(result) + if not valid_string_length(s, trailing_dot): + raise IDNAError("Domain too long") + return s + + +def decode( + s: Union[str, bytes, bytearray], + strict: bool = False, + uts46: bool = False, + std3_rules: bool = False, +) -> str: + try: + if not isinstance(s, str): + s = str(s, "ascii") + except UnicodeDecodeError: + raise IDNAError("Invalid ASCII in A-label") + if uts46: + s = uts46_remap(s, std3_rules, False) + trailing_dot = False + result = [] + if not strict: + labels = _unicode_dots_re.split(s) + else: + labels = s.split(".") + if not labels or labels == [""]: + raise IDNAError("Empty domain") + if not labels[-1]: + del labels[-1] + trailing_dot = True + for label in labels: + s = ulabel(label) + if s: + result.append(s) + else: + raise IDNAError("Empty label") + if trailing_dot: + result.append("") + return ".".join(result) diff --git a/venv/lib/python3.11/site-packages/idna/idnadata.py b/venv/lib/python3.11/site-packages/idna/idnadata.py new file mode 100644 index 0000000..4be6004 --- /dev/null +++ b/venv/lib/python3.11/site-packages/idna/idnadata.py @@ -0,0 +1,4243 @@ +# This file is automatically generated by tools/idna-data + +__version__ = "15.1.0" +scripts = { + "Greek": ( + 0x37000000374, + 0x37500000378, + 0x37A0000037E, + 0x37F00000380, + 0x38400000385, + 0x38600000387, + 0x3880000038B, + 0x38C0000038D, + 0x38E000003A2, + 0x3A3000003E2, + 0x3F000000400, + 0x1D2600001D2B, + 0x1D5D00001D62, + 0x1D6600001D6B, + 0x1DBF00001DC0, + 0x1F0000001F16, + 0x1F1800001F1E, + 0x1F2000001F46, + 0x1F4800001F4E, + 0x1F5000001F58, + 0x1F5900001F5A, + 0x1F5B00001F5C, + 0x1F5D00001F5E, + 0x1F5F00001F7E, + 0x1F8000001FB5, + 0x1FB600001FC5, + 0x1FC600001FD4, + 0x1FD600001FDC, + 0x1FDD00001FF0, + 0x1FF200001FF5, + 0x1FF600001FFF, + 0x212600002127, + 0xAB650000AB66, + 0x101400001018F, + 0x101A0000101A1, + 0x1D2000001D246, + ), + "Han": ( + 0x2E8000002E9A, + 0x2E9B00002EF4, + 0x2F0000002FD6, + 0x300500003006, + 0x300700003008, + 0x30210000302A, + 0x30380000303C, + 0x340000004DC0, + 0x4E000000A000, + 0xF9000000FA6E, + 0xFA700000FADA, + 0x16FE200016FE4, + 0x16FF000016FF2, + 0x200000002A6E0, + 0x2A7000002B73A, + 0x2B7400002B81E, + 0x2B8200002CEA2, + 0x2CEB00002EBE1, + 0x2EBF00002EE5E, + 0x2F8000002FA1E, + 0x300000003134B, + 0x31350000323B0, + ), + "Hebrew": ( + 0x591000005C8, + 0x5D0000005EB, + 0x5EF000005F5, + 0xFB1D0000FB37, + 0xFB380000FB3D, + 0xFB3E0000FB3F, + 0xFB400000FB42, + 0xFB430000FB45, + 0xFB460000FB50, + ), + "Hiragana": ( + 0x304100003097, + 0x309D000030A0, + 0x1B0010001B120, + 0x1B1320001B133, + 0x1B1500001B153, + 0x1F2000001F201, + ), + "Katakana": ( + 0x30A1000030FB, + 0x30FD00003100, + 0x31F000003200, + 0x32D0000032FF, + 0x330000003358, + 0xFF660000FF70, + 0xFF710000FF9E, + 0x1AFF00001AFF4, + 0x1AFF50001AFFC, + 0x1AFFD0001AFFF, + 0x1B0000001B001, + 0x1B1200001B123, + 0x1B1550001B156, + 0x1B1640001B168, + ), +} +joining_types = { + 0xAD: 84, + 0x300: 84, + 0x301: 84, + 0x302: 84, + 0x303: 84, + 0x304: 84, + 0x305: 84, + 0x306: 84, + 0x307: 84, + 0x308: 84, + 0x309: 84, + 0x30A: 84, + 0x30B: 84, + 0x30C: 84, + 0x30D: 84, + 0x30E: 84, + 0x30F: 84, + 0x310: 84, + 0x311: 84, + 0x312: 84, + 0x313: 84, + 0x314: 84, + 0x315: 84, + 0x316: 84, + 0x317: 84, + 0x318: 84, + 0x319: 84, + 0x31A: 84, + 0x31B: 84, + 0x31C: 84, + 0x31D: 84, + 0x31E: 84, + 0x31F: 84, + 0x320: 84, + 0x321: 84, + 0x322: 84, + 0x323: 84, + 0x324: 84, + 0x325: 84, + 0x326: 84, + 0x327: 84, + 0x328: 84, + 0x329: 84, + 0x32A: 84, + 0x32B: 84, + 0x32C: 84, + 0x32D: 84, + 0x32E: 84, + 0x32F: 84, + 0x330: 84, + 0x331: 84, + 0x332: 84, + 0x333: 84, + 0x334: 84, + 0x335: 84, + 0x336: 84, + 0x337: 84, + 0x338: 84, + 0x339: 84, + 0x33A: 84, + 0x33B: 84, + 0x33C: 84, + 0x33D: 84, + 0x33E: 84, + 0x33F: 84, + 0x340: 84, + 0x341: 84, + 0x342: 84, + 0x343: 84, + 0x344: 84, + 0x345: 84, + 0x346: 84, + 0x347: 84, + 0x348: 84, + 0x349: 84, + 0x34A: 84, + 0x34B: 84, + 0x34C: 84, + 0x34D: 84, + 0x34E: 84, + 0x34F: 84, + 0x350: 84, + 0x351: 84, + 0x352: 84, + 0x353: 84, + 0x354: 84, + 0x355: 84, + 0x356: 84, + 0x357: 84, + 0x358: 84, + 0x359: 84, + 0x35A: 84, + 0x35B: 84, + 0x35C: 84, + 0x35D: 84, + 0x35E: 84, + 0x35F: 84, + 0x360: 84, + 0x361: 84, + 0x362: 84, + 0x363: 84, + 0x364: 84, + 0x365: 84, + 0x366: 84, + 0x367: 84, + 0x368: 84, + 0x369: 84, + 0x36A: 84, + 0x36B: 84, + 0x36C: 84, + 0x36D: 84, + 0x36E: 84, + 0x36F: 84, + 0x483: 84, + 0x484: 84, + 0x485: 84, + 0x486: 84, + 0x487: 84, + 0x488: 84, + 0x489: 84, + 0x591: 84, + 0x592: 84, + 0x593: 84, + 0x594: 84, + 0x595: 84, + 0x596: 84, + 0x597: 84, + 0x598: 84, + 0x599: 84, + 0x59A: 84, + 0x59B: 84, + 0x59C: 84, + 0x59D: 84, + 0x59E: 84, + 0x59F: 84, + 0x5A0: 84, + 0x5A1: 84, + 0x5A2: 84, + 0x5A3: 84, + 0x5A4: 84, + 0x5A5: 84, + 0x5A6: 84, + 0x5A7: 84, + 0x5A8: 84, + 0x5A9: 84, + 0x5AA: 84, + 0x5AB: 84, + 0x5AC: 84, + 0x5AD: 84, + 0x5AE: 84, + 0x5AF: 84, + 0x5B0: 84, + 0x5B1: 84, + 0x5B2: 84, + 0x5B3: 84, + 0x5B4: 84, + 0x5B5: 84, + 0x5B6: 84, + 0x5B7: 84, + 0x5B8: 84, + 0x5B9: 84, + 0x5BA: 84, + 0x5BB: 84, + 0x5BC: 84, + 0x5BD: 84, + 0x5BF: 84, + 0x5C1: 84, + 0x5C2: 84, + 0x5C4: 84, + 0x5C5: 84, + 0x5C7: 84, + 0x610: 84, + 0x611: 84, + 0x612: 84, + 0x613: 84, + 0x614: 84, + 0x615: 84, + 0x616: 84, + 0x617: 84, + 0x618: 84, + 0x619: 84, + 0x61A: 84, + 0x61C: 84, + 0x620: 68, + 0x622: 82, + 0x623: 82, + 0x624: 82, + 0x625: 82, + 0x626: 68, + 0x627: 82, + 0x628: 68, + 0x629: 82, + 0x62A: 68, + 0x62B: 68, + 0x62C: 68, + 0x62D: 68, + 0x62E: 68, + 0x62F: 82, + 0x630: 82, + 0x631: 82, + 0x632: 82, + 0x633: 68, + 0x634: 68, + 0x635: 68, + 0x636: 68, + 0x637: 68, + 0x638: 68, + 0x639: 68, + 0x63A: 68, + 0x63B: 68, + 0x63C: 68, + 0x63D: 68, + 0x63E: 68, + 0x63F: 68, + 0x640: 67, + 0x641: 68, + 0x642: 68, + 0x643: 68, + 0x644: 68, + 0x645: 68, + 0x646: 68, + 0x647: 68, + 0x648: 82, + 0x649: 68, + 0x64A: 68, + 0x64B: 84, + 0x64C: 84, + 0x64D: 84, + 0x64E: 84, + 0x64F: 84, + 0x650: 84, + 0x651: 84, + 0x652: 84, + 0x653: 84, + 0x654: 84, + 0x655: 84, + 0x656: 84, + 0x657: 84, + 0x658: 84, + 0x659: 84, + 0x65A: 84, + 0x65B: 84, + 0x65C: 84, + 0x65D: 84, + 0x65E: 84, + 0x65F: 84, + 0x66E: 68, + 0x66F: 68, + 0x670: 84, + 0x671: 82, + 0x672: 82, + 0x673: 82, + 0x675: 82, + 0x676: 82, + 0x677: 82, + 0x678: 68, + 0x679: 68, + 0x67A: 68, + 0x67B: 68, + 0x67C: 68, + 0x67D: 68, + 0x67E: 68, + 0x67F: 68, + 0x680: 68, + 0x681: 68, + 0x682: 68, + 0x683: 68, + 0x684: 68, + 0x685: 68, + 0x686: 68, + 0x687: 68, + 0x688: 82, + 0x689: 82, + 0x68A: 82, + 0x68B: 82, + 0x68C: 82, + 0x68D: 82, + 0x68E: 82, + 0x68F: 82, + 0x690: 82, + 0x691: 82, + 0x692: 82, + 0x693: 82, + 0x694: 82, + 0x695: 82, + 0x696: 82, + 0x697: 82, + 0x698: 82, + 0x699: 82, + 0x69A: 68, + 0x69B: 68, + 0x69C: 68, + 0x69D: 68, + 0x69E: 68, + 0x69F: 68, + 0x6A0: 68, + 0x6A1: 68, + 0x6A2: 68, + 0x6A3: 68, + 0x6A4: 68, + 0x6A5: 68, + 0x6A6: 68, + 0x6A7: 68, + 0x6A8: 68, + 0x6A9: 68, + 0x6AA: 68, + 0x6AB: 68, + 0x6AC: 68, + 0x6AD: 68, + 0x6AE: 68, + 0x6AF: 68, + 0x6B0: 68, + 0x6B1: 68, + 0x6B2: 68, + 0x6B3: 68, + 0x6B4: 68, + 0x6B5: 68, + 0x6B6: 68, + 0x6B7: 68, + 0x6B8: 68, + 0x6B9: 68, + 0x6BA: 68, + 0x6BB: 68, + 0x6BC: 68, + 0x6BD: 68, + 0x6BE: 68, + 0x6BF: 68, + 0x6C0: 82, + 0x6C1: 68, + 0x6C2: 68, + 0x6C3: 82, + 0x6C4: 82, + 0x6C5: 82, + 0x6C6: 82, + 0x6C7: 82, + 0x6C8: 82, + 0x6C9: 82, + 0x6CA: 82, + 0x6CB: 82, + 0x6CC: 68, + 0x6CD: 82, + 0x6CE: 68, + 0x6CF: 82, + 0x6D0: 68, + 0x6D1: 68, + 0x6D2: 82, + 0x6D3: 82, + 0x6D5: 82, + 0x6D6: 84, + 0x6D7: 84, + 0x6D8: 84, + 0x6D9: 84, + 0x6DA: 84, + 0x6DB: 84, + 0x6DC: 84, + 0x6DF: 84, + 0x6E0: 84, + 0x6E1: 84, + 0x6E2: 84, + 0x6E3: 84, + 0x6E4: 84, + 0x6E7: 84, + 0x6E8: 84, + 0x6EA: 84, + 0x6EB: 84, + 0x6EC: 84, + 0x6ED: 84, + 0x6EE: 82, + 0x6EF: 82, + 0x6FA: 68, + 0x6FB: 68, + 0x6FC: 68, + 0x6FF: 68, + 0x70F: 84, + 0x710: 82, + 0x711: 84, + 0x712: 68, + 0x713: 68, + 0x714: 68, + 0x715: 82, + 0x716: 82, + 0x717: 82, + 0x718: 82, + 0x719: 82, + 0x71A: 68, + 0x71B: 68, + 0x71C: 68, + 0x71D: 68, + 0x71E: 82, + 0x71F: 68, + 0x720: 68, + 0x721: 68, + 0x722: 68, + 0x723: 68, + 0x724: 68, + 0x725: 68, + 0x726: 68, + 0x727: 68, + 0x728: 82, + 0x729: 68, + 0x72A: 82, + 0x72B: 68, + 0x72C: 82, + 0x72D: 68, + 0x72E: 68, + 0x72F: 82, + 0x730: 84, + 0x731: 84, + 0x732: 84, + 0x733: 84, + 0x734: 84, + 0x735: 84, + 0x736: 84, + 0x737: 84, + 0x738: 84, + 0x739: 84, + 0x73A: 84, + 0x73B: 84, + 0x73C: 84, + 0x73D: 84, + 0x73E: 84, + 0x73F: 84, + 0x740: 84, + 0x741: 84, + 0x742: 84, + 0x743: 84, + 0x744: 84, + 0x745: 84, + 0x746: 84, + 0x747: 84, + 0x748: 84, + 0x749: 84, + 0x74A: 84, + 0x74D: 82, + 0x74E: 68, + 0x74F: 68, + 0x750: 68, + 0x751: 68, + 0x752: 68, + 0x753: 68, + 0x754: 68, + 0x755: 68, + 0x756: 68, + 0x757: 68, + 0x758: 68, + 0x759: 82, + 0x75A: 82, + 0x75B: 82, + 0x75C: 68, + 0x75D: 68, + 0x75E: 68, + 0x75F: 68, + 0x760: 68, + 0x761: 68, + 0x762: 68, + 0x763: 68, + 0x764: 68, + 0x765: 68, + 0x766: 68, + 0x767: 68, + 0x768: 68, + 0x769: 68, + 0x76A: 68, + 0x76B: 82, + 0x76C: 82, + 0x76D: 68, + 0x76E: 68, + 0x76F: 68, + 0x770: 68, + 0x771: 82, + 0x772: 68, + 0x773: 82, + 0x774: 82, + 0x775: 68, + 0x776: 68, + 0x777: 68, + 0x778: 82, + 0x779: 82, + 0x77A: 68, + 0x77B: 68, + 0x77C: 68, + 0x77D: 68, + 0x77E: 68, + 0x77F: 68, + 0x7A6: 84, + 0x7A7: 84, + 0x7A8: 84, + 0x7A9: 84, + 0x7AA: 84, + 0x7AB: 84, + 0x7AC: 84, + 0x7AD: 84, + 0x7AE: 84, + 0x7AF: 84, + 0x7B0: 84, + 0x7CA: 68, + 0x7CB: 68, + 0x7CC: 68, + 0x7CD: 68, + 0x7CE: 68, + 0x7CF: 68, + 0x7D0: 68, + 0x7D1: 68, + 0x7D2: 68, + 0x7D3: 68, + 0x7D4: 68, + 0x7D5: 68, + 0x7D6: 68, + 0x7D7: 68, + 0x7D8: 68, + 0x7D9: 68, + 0x7DA: 68, + 0x7DB: 68, + 0x7DC: 68, + 0x7DD: 68, + 0x7DE: 68, + 0x7DF: 68, + 0x7E0: 68, + 0x7E1: 68, + 0x7E2: 68, + 0x7E3: 68, + 0x7E4: 68, + 0x7E5: 68, + 0x7E6: 68, + 0x7E7: 68, + 0x7E8: 68, + 0x7E9: 68, + 0x7EA: 68, + 0x7EB: 84, + 0x7EC: 84, + 0x7ED: 84, + 0x7EE: 84, + 0x7EF: 84, + 0x7F0: 84, + 0x7F1: 84, + 0x7F2: 84, + 0x7F3: 84, + 0x7FA: 67, + 0x7FD: 84, + 0x816: 84, + 0x817: 84, + 0x818: 84, + 0x819: 84, + 0x81B: 84, + 0x81C: 84, + 0x81D: 84, + 0x81E: 84, + 0x81F: 84, + 0x820: 84, + 0x821: 84, + 0x822: 84, + 0x823: 84, + 0x825: 84, + 0x826: 84, + 0x827: 84, + 0x829: 84, + 0x82A: 84, + 0x82B: 84, + 0x82C: 84, + 0x82D: 84, + 0x840: 82, + 0x841: 68, + 0x842: 68, + 0x843: 68, + 0x844: 68, + 0x845: 68, + 0x846: 82, + 0x847: 82, + 0x848: 68, + 0x849: 82, + 0x84A: 68, + 0x84B: 68, + 0x84C: 68, + 0x84D: 68, + 0x84E: 68, + 0x84F: 68, + 0x850: 68, + 0x851: 68, + 0x852: 68, + 0x853: 68, + 0x854: 82, + 0x855: 68, + 0x856: 82, + 0x857: 82, + 0x858: 82, + 0x859: 84, + 0x85A: 84, + 0x85B: 84, + 0x860: 68, + 0x862: 68, + 0x863: 68, + 0x864: 68, + 0x865: 68, + 0x867: 82, + 0x868: 68, + 0x869: 82, + 0x86A: 82, + 0x870: 82, + 0x871: 82, + 0x872: 82, + 0x873: 82, + 0x874: 82, + 0x875: 82, + 0x876: 82, + 0x877: 82, + 0x878: 82, + 0x879: 82, + 0x87A: 82, + 0x87B: 82, + 0x87C: 82, + 0x87D: 82, + 0x87E: 82, + 0x87F: 82, + 0x880: 82, + 0x881: 82, + 0x882: 82, + 0x883: 67, + 0x884: 67, + 0x885: 67, + 0x886: 68, + 0x889: 68, + 0x88A: 68, + 0x88B: 68, + 0x88C: 68, + 0x88D: 68, + 0x88E: 82, + 0x898: 84, + 0x899: 84, + 0x89A: 84, + 0x89B: 84, + 0x89C: 84, + 0x89D: 84, + 0x89E: 84, + 0x89F: 84, + 0x8A0: 68, + 0x8A1: 68, + 0x8A2: 68, + 0x8A3: 68, + 0x8A4: 68, + 0x8A5: 68, + 0x8A6: 68, + 0x8A7: 68, + 0x8A8: 68, + 0x8A9: 68, + 0x8AA: 82, + 0x8AB: 82, + 0x8AC: 82, + 0x8AE: 82, + 0x8AF: 68, + 0x8B0: 68, + 0x8B1: 82, + 0x8B2: 82, + 0x8B3: 68, + 0x8B4: 68, + 0x8B5: 68, + 0x8B6: 68, + 0x8B7: 68, + 0x8B8: 68, + 0x8B9: 82, + 0x8BA: 68, + 0x8BB: 68, + 0x8BC: 68, + 0x8BD: 68, + 0x8BE: 68, + 0x8BF: 68, + 0x8C0: 68, + 0x8C1: 68, + 0x8C2: 68, + 0x8C3: 68, + 0x8C4: 68, + 0x8C5: 68, + 0x8C6: 68, + 0x8C7: 68, + 0x8C8: 68, + 0x8CA: 84, + 0x8CB: 84, + 0x8CC: 84, + 0x8CD: 84, + 0x8CE: 84, + 0x8CF: 84, + 0x8D0: 84, + 0x8D1: 84, + 0x8D2: 84, + 0x8D3: 84, + 0x8D4: 84, + 0x8D5: 84, + 0x8D6: 84, + 0x8D7: 84, + 0x8D8: 84, + 0x8D9: 84, + 0x8DA: 84, + 0x8DB: 84, + 0x8DC: 84, + 0x8DD: 84, + 0x8DE: 84, + 0x8DF: 84, + 0x8E0: 84, + 0x8E1: 84, + 0x8E3: 84, + 0x8E4: 84, + 0x8E5: 84, + 0x8E6: 84, + 0x8E7: 84, + 0x8E8: 84, + 0x8E9: 84, + 0x8EA: 84, + 0x8EB: 84, + 0x8EC: 84, + 0x8ED: 84, + 0x8EE: 84, + 0x8EF: 84, + 0x8F0: 84, + 0x8F1: 84, + 0x8F2: 84, + 0x8F3: 84, + 0x8F4: 84, + 0x8F5: 84, + 0x8F6: 84, + 0x8F7: 84, + 0x8F8: 84, + 0x8F9: 84, + 0x8FA: 84, + 0x8FB: 84, + 0x8FC: 84, + 0x8FD: 84, + 0x8FE: 84, + 0x8FF: 84, + 0x900: 84, + 0x901: 84, + 0x902: 84, + 0x93A: 84, + 0x93C: 84, + 0x941: 84, + 0x942: 84, + 0x943: 84, + 0x944: 84, + 0x945: 84, + 0x946: 84, + 0x947: 84, + 0x948: 84, + 0x94D: 84, + 0x951: 84, + 0x952: 84, + 0x953: 84, + 0x954: 84, + 0x955: 84, + 0x956: 84, + 0x957: 84, + 0x962: 84, + 0x963: 84, + 0x981: 84, + 0x9BC: 84, + 0x9C1: 84, + 0x9C2: 84, + 0x9C3: 84, + 0x9C4: 84, + 0x9CD: 84, + 0x9E2: 84, + 0x9E3: 84, + 0x9FE: 84, + 0xA01: 84, + 0xA02: 84, + 0xA3C: 84, + 0xA41: 84, + 0xA42: 84, + 0xA47: 84, + 0xA48: 84, + 0xA4B: 84, + 0xA4C: 84, + 0xA4D: 84, + 0xA51: 84, + 0xA70: 84, + 0xA71: 84, + 0xA75: 84, + 0xA81: 84, + 0xA82: 84, + 0xABC: 84, + 0xAC1: 84, + 0xAC2: 84, + 0xAC3: 84, + 0xAC4: 84, + 0xAC5: 84, + 0xAC7: 84, + 0xAC8: 84, + 0xACD: 84, + 0xAE2: 84, + 0xAE3: 84, + 0xAFA: 84, + 0xAFB: 84, + 0xAFC: 84, + 0xAFD: 84, + 0xAFE: 84, + 0xAFF: 84, + 0xB01: 84, + 0xB3C: 84, + 0xB3F: 84, + 0xB41: 84, + 0xB42: 84, + 0xB43: 84, + 0xB44: 84, + 0xB4D: 84, + 0xB55: 84, + 0xB56: 84, + 0xB62: 84, + 0xB63: 84, + 0xB82: 84, + 0xBC0: 84, + 0xBCD: 84, + 0xC00: 84, + 0xC04: 84, + 0xC3C: 84, + 0xC3E: 84, + 0xC3F: 84, + 0xC40: 84, + 0xC46: 84, + 0xC47: 84, + 0xC48: 84, + 0xC4A: 84, + 0xC4B: 84, + 0xC4C: 84, + 0xC4D: 84, + 0xC55: 84, + 0xC56: 84, + 0xC62: 84, + 0xC63: 84, + 0xC81: 84, + 0xCBC: 84, + 0xCBF: 84, + 0xCC6: 84, + 0xCCC: 84, + 0xCCD: 84, + 0xCE2: 84, + 0xCE3: 84, + 0xD00: 84, + 0xD01: 84, + 0xD3B: 84, + 0xD3C: 84, + 0xD41: 84, + 0xD42: 84, + 0xD43: 84, + 0xD44: 84, + 0xD4D: 84, + 0xD62: 84, + 0xD63: 84, + 0xD81: 84, + 0xDCA: 84, + 0xDD2: 84, + 0xDD3: 84, + 0xDD4: 84, + 0xDD6: 84, + 0xE31: 84, + 0xE34: 84, + 0xE35: 84, + 0xE36: 84, + 0xE37: 84, + 0xE38: 84, + 0xE39: 84, + 0xE3A: 84, + 0xE47: 84, + 0xE48: 84, + 0xE49: 84, + 0xE4A: 84, + 0xE4B: 84, + 0xE4C: 84, + 0xE4D: 84, + 0xE4E: 84, + 0xEB1: 84, + 0xEB4: 84, + 0xEB5: 84, + 0xEB6: 84, + 0xEB7: 84, + 0xEB8: 84, + 0xEB9: 84, + 0xEBA: 84, + 0xEBB: 84, + 0xEBC: 84, + 0xEC8: 84, + 0xEC9: 84, + 0xECA: 84, + 0xECB: 84, + 0xECC: 84, + 0xECD: 84, + 0xECE: 84, + 0xF18: 84, + 0xF19: 84, + 0xF35: 84, + 0xF37: 84, + 0xF39: 84, + 0xF71: 84, + 0xF72: 84, + 0xF73: 84, + 0xF74: 84, + 0xF75: 84, + 0xF76: 84, + 0xF77: 84, + 0xF78: 84, + 0xF79: 84, + 0xF7A: 84, + 0xF7B: 84, + 0xF7C: 84, + 0xF7D: 84, + 0xF7E: 84, + 0xF80: 84, + 0xF81: 84, + 0xF82: 84, + 0xF83: 84, + 0xF84: 84, + 0xF86: 84, + 0xF87: 84, + 0xF8D: 84, + 0xF8E: 84, + 0xF8F: 84, + 0xF90: 84, + 0xF91: 84, + 0xF92: 84, + 0xF93: 84, + 0xF94: 84, + 0xF95: 84, + 0xF96: 84, + 0xF97: 84, + 0xF99: 84, + 0xF9A: 84, + 0xF9B: 84, + 0xF9C: 84, + 0xF9D: 84, + 0xF9E: 84, + 0xF9F: 84, + 0xFA0: 84, + 0xFA1: 84, + 0xFA2: 84, + 0xFA3: 84, + 0xFA4: 84, + 0xFA5: 84, + 0xFA6: 84, + 0xFA7: 84, + 0xFA8: 84, + 0xFA9: 84, + 0xFAA: 84, + 0xFAB: 84, + 0xFAC: 84, + 0xFAD: 84, + 0xFAE: 84, + 0xFAF: 84, + 0xFB0: 84, + 0xFB1: 84, + 0xFB2: 84, + 0xFB3: 84, + 0xFB4: 84, + 0xFB5: 84, + 0xFB6: 84, + 0xFB7: 84, + 0xFB8: 84, + 0xFB9: 84, + 0xFBA: 84, + 0xFBB: 84, + 0xFBC: 84, + 0xFC6: 84, + 0x102D: 84, + 0x102E: 84, + 0x102F: 84, + 0x1030: 84, + 0x1032: 84, + 0x1033: 84, + 0x1034: 84, + 0x1035: 84, + 0x1036: 84, + 0x1037: 84, + 0x1039: 84, + 0x103A: 84, + 0x103D: 84, + 0x103E: 84, + 0x1058: 84, + 0x1059: 84, + 0x105E: 84, + 0x105F: 84, + 0x1060: 84, + 0x1071: 84, + 0x1072: 84, + 0x1073: 84, + 0x1074: 84, + 0x1082: 84, + 0x1085: 84, + 0x1086: 84, + 0x108D: 84, + 0x109D: 84, + 0x135D: 84, + 0x135E: 84, + 0x135F: 84, + 0x1712: 84, + 0x1713: 84, + 0x1714: 84, + 0x1732: 84, + 0x1733: 84, + 0x1752: 84, + 0x1753: 84, + 0x1772: 84, + 0x1773: 84, + 0x17B4: 84, + 0x17B5: 84, + 0x17B7: 84, + 0x17B8: 84, + 0x17B9: 84, + 0x17BA: 84, + 0x17BB: 84, + 0x17BC: 84, + 0x17BD: 84, + 0x17C6: 84, + 0x17C9: 84, + 0x17CA: 84, + 0x17CB: 84, + 0x17CC: 84, + 0x17CD: 84, + 0x17CE: 84, + 0x17CF: 84, + 0x17D0: 84, + 0x17D1: 84, + 0x17D2: 84, + 0x17D3: 84, + 0x17DD: 84, + 0x1807: 68, + 0x180A: 67, + 0x180B: 84, + 0x180C: 84, + 0x180D: 84, + 0x180F: 84, + 0x1820: 68, + 0x1821: 68, + 0x1822: 68, + 0x1823: 68, + 0x1824: 68, + 0x1825: 68, + 0x1826: 68, + 0x1827: 68, + 0x1828: 68, + 0x1829: 68, + 0x182A: 68, + 0x182B: 68, + 0x182C: 68, + 0x182D: 68, + 0x182E: 68, + 0x182F: 68, + 0x1830: 68, + 0x1831: 68, + 0x1832: 68, + 0x1833: 68, + 0x1834: 68, + 0x1835: 68, + 0x1836: 68, + 0x1837: 68, + 0x1838: 68, + 0x1839: 68, + 0x183A: 68, + 0x183B: 68, + 0x183C: 68, + 0x183D: 68, + 0x183E: 68, + 0x183F: 68, + 0x1840: 68, + 0x1841: 68, + 0x1842: 68, + 0x1843: 68, + 0x1844: 68, + 0x1845: 68, + 0x1846: 68, + 0x1847: 68, + 0x1848: 68, + 0x1849: 68, + 0x184A: 68, + 0x184B: 68, + 0x184C: 68, + 0x184D: 68, + 0x184E: 68, + 0x184F: 68, + 0x1850: 68, + 0x1851: 68, + 0x1852: 68, + 0x1853: 68, + 0x1854: 68, + 0x1855: 68, + 0x1856: 68, + 0x1857: 68, + 0x1858: 68, + 0x1859: 68, + 0x185A: 68, + 0x185B: 68, + 0x185C: 68, + 0x185D: 68, + 0x185E: 68, + 0x185F: 68, + 0x1860: 68, + 0x1861: 68, + 0x1862: 68, + 0x1863: 68, + 0x1864: 68, + 0x1865: 68, + 0x1866: 68, + 0x1867: 68, + 0x1868: 68, + 0x1869: 68, + 0x186A: 68, + 0x186B: 68, + 0x186C: 68, + 0x186D: 68, + 0x186E: 68, + 0x186F: 68, + 0x1870: 68, + 0x1871: 68, + 0x1872: 68, + 0x1873: 68, + 0x1874: 68, + 0x1875: 68, + 0x1876: 68, + 0x1877: 68, + 0x1878: 68, + 0x1885: 84, + 0x1886: 84, + 0x1887: 68, + 0x1888: 68, + 0x1889: 68, + 0x188A: 68, + 0x188B: 68, + 0x188C: 68, + 0x188D: 68, + 0x188E: 68, + 0x188F: 68, + 0x1890: 68, + 0x1891: 68, + 0x1892: 68, + 0x1893: 68, + 0x1894: 68, + 0x1895: 68, + 0x1896: 68, + 0x1897: 68, + 0x1898: 68, + 0x1899: 68, + 0x189A: 68, + 0x189B: 68, + 0x189C: 68, + 0x189D: 68, + 0x189E: 68, + 0x189F: 68, + 0x18A0: 68, + 0x18A1: 68, + 0x18A2: 68, + 0x18A3: 68, + 0x18A4: 68, + 0x18A5: 68, + 0x18A6: 68, + 0x18A7: 68, + 0x18A8: 68, + 0x18A9: 84, + 0x18AA: 68, + 0x1920: 84, + 0x1921: 84, + 0x1922: 84, + 0x1927: 84, + 0x1928: 84, + 0x1932: 84, + 0x1939: 84, + 0x193A: 84, + 0x193B: 84, + 0x1A17: 84, + 0x1A18: 84, + 0x1A1B: 84, + 0x1A56: 84, + 0x1A58: 84, + 0x1A59: 84, + 0x1A5A: 84, + 0x1A5B: 84, + 0x1A5C: 84, + 0x1A5D: 84, + 0x1A5E: 84, + 0x1A60: 84, + 0x1A62: 84, + 0x1A65: 84, + 0x1A66: 84, + 0x1A67: 84, + 0x1A68: 84, + 0x1A69: 84, + 0x1A6A: 84, + 0x1A6B: 84, + 0x1A6C: 84, + 0x1A73: 84, + 0x1A74: 84, + 0x1A75: 84, + 0x1A76: 84, + 0x1A77: 84, + 0x1A78: 84, + 0x1A79: 84, + 0x1A7A: 84, + 0x1A7B: 84, + 0x1A7C: 84, + 0x1A7F: 84, + 0x1AB0: 84, + 0x1AB1: 84, + 0x1AB2: 84, + 0x1AB3: 84, + 0x1AB4: 84, + 0x1AB5: 84, + 0x1AB6: 84, + 0x1AB7: 84, + 0x1AB8: 84, + 0x1AB9: 84, + 0x1ABA: 84, + 0x1ABB: 84, + 0x1ABC: 84, + 0x1ABD: 84, + 0x1ABE: 84, + 0x1ABF: 84, + 0x1AC0: 84, + 0x1AC1: 84, + 0x1AC2: 84, + 0x1AC3: 84, + 0x1AC4: 84, + 0x1AC5: 84, + 0x1AC6: 84, + 0x1AC7: 84, + 0x1AC8: 84, + 0x1AC9: 84, + 0x1ACA: 84, + 0x1ACB: 84, + 0x1ACC: 84, + 0x1ACD: 84, + 0x1ACE: 84, + 0x1B00: 84, + 0x1B01: 84, + 0x1B02: 84, + 0x1B03: 84, + 0x1B34: 84, + 0x1B36: 84, + 0x1B37: 84, + 0x1B38: 84, + 0x1B39: 84, + 0x1B3A: 84, + 0x1B3C: 84, + 0x1B42: 84, + 0x1B6B: 84, + 0x1B6C: 84, + 0x1B6D: 84, + 0x1B6E: 84, + 0x1B6F: 84, + 0x1B70: 84, + 0x1B71: 84, + 0x1B72: 84, + 0x1B73: 84, + 0x1B80: 84, + 0x1B81: 84, + 0x1BA2: 84, + 0x1BA3: 84, + 0x1BA4: 84, + 0x1BA5: 84, + 0x1BA8: 84, + 0x1BA9: 84, + 0x1BAB: 84, + 0x1BAC: 84, + 0x1BAD: 84, + 0x1BE6: 84, + 0x1BE8: 84, + 0x1BE9: 84, + 0x1BED: 84, + 0x1BEF: 84, + 0x1BF0: 84, + 0x1BF1: 84, + 0x1C2C: 84, + 0x1C2D: 84, + 0x1C2E: 84, + 0x1C2F: 84, + 0x1C30: 84, + 0x1C31: 84, + 0x1C32: 84, + 0x1C33: 84, + 0x1C36: 84, + 0x1C37: 84, + 0x1CD0: 84, + 0x1CD1: 84, + 0x1CD2: 84, + 0x1CD4: 84, + 0x1CD5: 84, + 0x1CD6: 84, + 0x1CD7: 84, + 0x1CD8: 84, + 0x1CD9: 84, + 0x1CDA: 84, + 0x1CDB: 84, + 0x1CDC: 84, + 0x1CDD: 84, + 0x1CDE: 84, + 0x1CDF: 84, + 0x1CE0: 84, + 0x1CE2: 84, + 0x1CE3: 84, + 0x1CE4: 84, + 0x1CE5: 84, + 0x1CE6: 84, + 0x1CE7: 84, + 0x1CE8: 84, + 0x1CED: 84, + 0x1CF4: 84, + 0x1CF8: 84, + 0x1CF9: 84, + 0x1DC0: 84, + 0x1DC1: 84, + 0x1DC2: 84, + 0x1DC3: 84, + 0x1DC4: 84, + 0x1DC5: 84, + 0x1DC6: 84, + 0x1DC7: 84, + 0x1DC8: 84, + 0x1DC9: 84, + 0x1DCA: 84, + 0x1DCB: 84, + 0x1DCC: 84, + 0x1DCD: 84, + 0x1DCE: 84, + 0x1DCF: 84, + 0x1DD0: 84, + 0x1DD1: 84, + 0x1DD2: 84, + 0x1DD3: 84, + 0x1DD4: 84, + 0x1DD5: 84, + 0x1DD6: 84, + 0x1DD7: 84, + 0x1DD8: 84, + 0x1DD9: 84, + 0x1DDA: 84, + 0x1DDB: 84, + 0x1DDC: 84, + 0x1DDD: 84, + 0x1DDE: 84, + 0x1DDF: 84, + 0x1DE0: 84, + 0x1DE1: 84, + 0x1DE2: 84, + 0x1DE3: 84, + 0x1DE4: 84, + 0x1DE5: 84, + 0x1DE6: 84, + 0x1DE7: 84, + 0x1DE8: 84, + 0x1DE9: 84, + 0x1DEA: 84, + 0x1DEB: 84, + 0x1DEC: 84, + 0x1DED: 84, + 0x1DEE: 84, + 0x1DEF: 84, + 0x1DF0: 84, + 0x1DF1: 84, + 0x1DF2: 84, + 0x1DF3: 84, + 0x1DF4: 84, + 0x1DF5: 84, + 0x1DF6: 84, + 0x1DF7: 84, + 0x1DF8: 84, + 0x1DF9: 84, + 0x1DFA: 84, + 0x1DFB: 84, + 0x1DFC: 84, + 0x1DFD: 84, + 0x1DFE: 84, + 0x1DFF: 84, + 0x200B: 84, + 0x200D: 67, + 0x200E: 84, + 0x200F: 84, + 0x202A: 84, + 0x202B: 84, + 0x202C: 84, + 0x202D: 84, + 0x202E: 84, + 0x2060: 84, + 0x2061: 84, + 0x2062: 84, + 0x2063: 84, + 0x2064: 84, + 0x206A: 84, + 0x206B: 84, + 0x206C: 84, + 0x206D: 84, + 0x206E: 84, + 0x206F: 84, + 0x20D0: 84, + 0x20D1: 84, + 0x20D2: 84, + 0x20D3: 84, + 0x20D4: 84, + 0x20D5: 84, + 0x20D6: 84, + 0x20D7: 84, + 0x20D8: 84, + 0x20D9: 84, + 0x20DA: 84, + 0x20DB: 84, + 0x20DC: 84, + 0x20DD: 84, + 0x20DE: 84, + 0x20DF: 84, + 0x20E0: 84, + 0x20E1: 84, + 0x20E2: 84, + 0x20E3: 84, + 0x20E4: 84, + 0x20E5: 84, + 0x20E6: 84, + 0x20E7: 84, + 0x20E8: 84, + 0x20E9: 84, + 0x20EA: 84, + 0x20EB: 84, + 0x20EC: 84, + 0x20ED: 84, + 0x20EE: 84, + 0x20EF: 84, + 0x20F0: 84, + 0x2CEF: 84, + 0x2CF0: 84, + 0x2CF1: 84, + 0x2D7F: 84, + 0x2DE0: 84, + 0x2DE1: 84, + 0x2DE2: 84, + 0x2DE3: 84, + 0x2DE4: 84, + 0x2DE5: 84, + 0x2DE6: 84, + 0x2DE7: 84, + 0x2DE8: 84, + 0x2DE9: 84, + 0x2DEA: 84, + 0x2DEB: 84, + 0x2DEC: 84, + 0x2DED: 84, + 0x2DEE: 84, + 0x2DEF: 84, + 0x2DF0: 84, + 0x2DF1: 84, + 0x2DF2: 84, + 0x2DF3: 84, + 0x2DF4: 84, + 0x2DF5: 84, + 0x2DF6: 84, + 0x2DF7: 84, + 0x2DF8: 84, + 0x2DF9: 84, + 0x2DFA: 84, + 0x2DFB: 84, + 0x2DFC: 84, + 0x2DFD: 84, + 0x2DFE: 84, + 0x2DFF: 84, + 0x302A: 84, + 0x302B: 84, + 0x302C: 84, + 0x302D: 84, + 0x3099: 84, + 0x309A: 84, + 0xA66F: 84, + 0xA670: 84, + 0xA671: 84, + 0xA672: 84, + 0xA674: 84, + 0xA675: 84, + 0xA676: 84, + 0xA677: 84, + 0xA678: 84, + 0xA679: 84, + 0xA67A: 84, + 0xA67B: 84, + 0xA67C: 84, + 0xA67D: 84, + 0xA69E: 84, + 0xA69F: 84, + 0xA6F0: 84, + 0xA6F1: 84, + 0xA802: 84, + 0xA806: 84, + 0xA80B: 84, + 0xA825: 84, + 0xA826: 84, + 0xA82C: 84, + 0xA840: 68, + 0xA841: 68, + 0xA842: 68, + 0xA843: 68, + 0xA844: 68, + 0xA845: 68, + 0xA846: 68, + 0xA847: 68, + 0xA848: 68, + 0xA849: 68, + 0xA84A: 68, + 0xA84B: 68, + 0xA84C: 68, + 0xA84D: 68, + 0xA84E: 68, + 0xA84F: 68, + 0xA850: 68, + 0xA851: 68, + 0xA852: 68, + 0xA853: 68, + 0xA854: 68, + 0xA855: 68, + 0xA856: 68, + 0xA857: 68, + 0xA858: 68, + 0xA859: 68, + 0xA85A: 68, + 0xA85B: 68, + 0xA85C: 68, + 0xA85D: 68, + 0xA85E: 68, + 0xA85F: 68, + 0xA860: 68, + 0xA861: 68, + 0xA862: 68, + 0xA863: 68, + 0xA864: 68, + 0xA865: 68, + 0xA866: 68, + 0xA867: 68, + 0xA868: 68, + 0xA869: 68, + 0xA86A: 68, + 0xA86B: 68, + 0xA86C: 68, + 0xA86D: 68, + 0xA86E: 68, + 0xA86F: 68, + 0xA870: 68, + 0xA871: 68, + 0xA872: 76, + 0xA8C4: 84, + 0xA8C5: 84, + 0xA8E0: 84, + 0xA8E1: 84, + 0xA8E2: 84, + 0xA8E3: 84, + 0xA8E4: 84, + 0xA8E5: 84, + 0xA8E6: 84, + 0xA8E7: 84, + 0xA8E8: 84, + 0xA8E9: 84, + 0xA8EA: 84, + 0xA8EB: 84, + 0xA8EC: 84, + 0xA8ED: 84, + 0xA8EE: 84, + 0xA8EF: 84, + 0xA8F0: 84, + 0xA8F1: 84, + 0xA8FF: 84, + 0xA926: 84, + 0xA927: 84, + 0xA928: 84, + 0xA929: 84, + 0xA92A: 84, + 0xA92B: 84, + 0xA92C: 84, + 0xA92D: 84, + 0xA947: 84, + 0xA948: 84, + 0xA949: 84, + 0xA94A: 84, + 0xA94B: 84, + 0xA94C: 84, + 0xA94D: 84, + 0xA94E: 84, + 0xA94F: 84, + 0xA950: 84, + 0xA951: 84, + 0xA980: 84, + 0xA981: 84, + 0xA982: 84, + 0xA9B3: 84, + 0xA9B6: 84, + 0xA9B7: 84, + 0xA9B8: 84, + 0xA9B9: 84, + 0xA9BC: 84, + 0xA9BD: 84, + 0xA9E5: 84, + 0xAA29: 84, + 0xAA2A: 84, + 0xAA2B: 84, + 0xAA2C: 84, + 0xAA2D: 84, + 0xAA2E: 84, + 0xAA31: 84, + 0xAA32: 84, + 0xAA35: 84, + 0xAA36: 84, + 0xAA43: 84, + 0xAA4C: 84, + 0xAA7C: 84, + 0xAAB0: 84, + 0xAAB2: 84, + 0xAAB3: 84, + 0xAAB4: 84, + 0xAAB7: 84, + 0xAAB8: 84, + 0xAABE: 84, + 0xAABF: 84, + 0xAAC1: 84, + 0xAAEC: 84, + 0xAAED: 84, + 0xAAF6: 84, + 0xABE5: 84, + 0xABE8: 84, + 0xABED: 84, + 0xFB1E: 84, + 0xFE00: 84, + 0xFE01: 84, + 0xFE02: 84, + 0xFE03: 84, + 0xFE04: 84, + 0xFE05: 84, + 0xFE06: 84, + 0xFE07: 84, + 0xFE08: 84, + 0xFE09: 84, + 0xFE0A: 84, + 0xFE0B: 84, + 0xFE0C: 84, + 0xFE0D: 84, + 0xFE0E: 84, + 0xFE0F: 84, + 0xFE20: 84, + 0xFE21: 84, + 0xFE22: 84, + 0xFE23: 84, + 0xFE24: 84, + 0xFE25: 84, + 0xFE26: 84, + 0xFE27: 84, + 0xFE28: 84, + 0xFE29: 84, + 0xFE2A: 84, + 0xFE2B: 84, + 0xFE2C: 84, + 0xFE2D: 84, + 0xFE2E: 84, + 0xFE2F: 84, + 0xFEFF: 84, + 0xFFF9: 84, + 0xFFFA: 84, + 0xFFFB: 84, + 0x101FD: 84, + 0x102E0: 84, + 0x10376: 84, + 0x10377: 84, + 0x10378: 84, + 0x10379: 84, + 0x1037A: 84, + 0x10A01: 84, + 0x10A02: 84, + 0x10A03: 84, + 0x10A05: 84, + 0x10A06: 84, + 0x10A0C: 84, + 0x10A0D: 84, + 0x10A0E: 84, + 0x10A0F: 84, + 0x10A38: 84, + 0x10A39: 84, + 0x10A3A: 84, + 0x10A3F: 84, + 0x10AC0: 68, + 0x10AC1: 68, + 0x10AC2: 68, + 0x10AC3: 68, + 0x10AC4: 68, + 0x10AC5: 82, + 0x10AC7: 82, + 0x10AC9: 82, + 0x10ACA: 82, + 0x10ACD: 76, + 0x10ACE: 82, + 0x10ACF: 82, + 0x10AD0: 82, + 0x10AD1: 82, + 0x10AD2: 82, + 0x10AD3: 68, + 0x10AD4: 68, + 0x10AD5: 68, + 0x10AD6: 68, + 0x10AD7: 76, + 0x10AD8: 68, + 0x10AD9: 68, + 0x10ADA: 68, + 0x10ADB: 68, + 0x10ADC: 68, + 0x10ADD: 82, + 0x10ADE: 68, + 0x10ADF: 68, + 0x10AE0: 68, + 0x10AE1: 82, + 0x10AE4: 82, + 0x10AE5: 84, + 0x10AE6: 84, + 0x10AEB: 68, + 0x10AEC: 68, + 0x10AED: 68, + 0x10AEE: 68, + 0x10AEF: 82, + 0x10B80: 68, + 0x10B81: 82, + 0x10B82: 68, + 0x10B83: 82, + 0x10B84: 82, + 0x10B85: 82, + 0x10B86: 68, + 0x10B87: 68, + 0x10B88: 68, + 0x10B89: 82, + 0x10B8A: 68, + 0x10B8B: 68, + 0x10B8C: 82, + 0x10B8D: 68, + 0x10B8E: 82, + 0x10B8F: 82, + 0x10B90: 68, + 0x10B91: 82, + 0x10BA9: 82, + 0x10BAA: 82, + 0x10BAB: 82, + 0x10BAC: 82, + 0x10BAD: 68, + 0x10BAE: 68, + 0x10D00: 76, + 0x10D01: 68, + 0x10D02: 68, + 0x10D03: 68, + 0x10D04: 68, + 0x10D05: 68, + 0x10D06: 68, + 0x10D07: 68, + 0x10D08: 68, + 0x10D09: 68, + 0x10D0A: 68, + 0x10D0B: 68, + 0x10D0C: 68, + 0x10D0D: 68, + 0x10D0E: 68, + 0x10D0F: 68, + 0x10D10: 68, + 0x10D11: 68, + 0x10D12: 68, + 0x10D13: 68, + 0x10D14: 68, + 0x10D15: 68, + 0x10D16: 68, + 0x10D17: 68, + 0x10D18: 68, + 0x10D19: 68, + 0x10D1A: 68, + 0x10D1B: 68, + 0x10D1C: 68, + 0x10D1D: 68, + 0x10D1E: 68, + 0x10D1F: 68, + 0x10D20: 68, + 0x10D21: 68, + 0x10D22: 82, + 0x10D23: 68, + 0x10D24: 84, + 0x10D25: 84, + 0x10D26: 84, + 0x10D27: 84, + 0x10EAB: 84, + 0x10EAC: 84, + 0x10EFD: 84, + 0x10EFE: 84, + 0x10EFF: 84, + 0x10F30: 68, + 0x10F31: 68, + 0x10F32: 68, + 0x10F33: 82, + 0x10F34: 68, + 0x10F35: 68, + 0x10F36: 68, + 0x10F37: 68, + 0x10F38: 68, + 0x10F39: 68, + 0x10F3A: 68, + 0x10F3B: 68, + 0x10F3C: 68, + 0x10F3D: 68, + 0x10F3E: 68, + 0x10F3F: 68, + 0x10F40: 68, + 0x10F41: 68, + 0x10F42: 68, + 0x10F43: 68, + 0x10F44: 68, + 0x10F46: 84, + 0x10F47: 84, + 0x10F48: 84, + 0x10F49: 84, + 0x10F4A: 84, + 0x10F4B: 84, + 0x10F4C: 84, + 0x10F4D: 84, + 0x10F4E: 84, + 0x10F4F: 84, + 0x10F50: 84, + 0x10F51: 68, + 0x10F52: 68, + 0x10F53: 68, + 0x10F54: 82, + 0x10F70: 68, + 0x10F71: 68, + 0x10F72: 68, + 0x10F73: 68, + 0x10F74: 82, + 0x10F75: 82, + 0x10F76: 68, + 0x10F77: 68, + 0x10F78: 68, + 0x10F79: 68, + 0x10F7A: 68, + 0x10F7B: 68, + 0x10F7C: 68, + 0x10F7D: 68, + 0x10F7E: 68, + 0x10F7F: 68, + 0x10F80: 68, + 0x10F81: 68, + 0x10F82: 84, + 0x10F83: 84, + 0x10F84: 84, + 0x10F85: 84, + 0x10FB0: 68, + 0x10FB2: 68, + 0x10FB3: 68, + 0x10FB4: 82, + 0x10FB5: 82, + 0x10FB6: 82, + 0x10FB8: 68, + 0x10FB9: 82, + 0x10FBA: 82, + 0x10FBB: 68, + 0x10FBC: 68, + 0x10FBD: 82, + 0x10FBE: 68, + 0x10FBF: 68, + 0x10FC1: 68, + 0x10FC2: 82, + 0x10FC3: 82, + 0x10FC4: 68, + 0x10FC9: 82, + 0x10FCA: 68, + 0x10FCB: 76, + 0x11001: 84, + 0x11038: 84, + 0x11039: 84, + 0x1103A: 84, + 0x1103B: 84, + 0x1103C: 84, + 0x1103D: 84, + 0x1103E: 84, + 0x1103F: 84, + 0x11040: 84, + 0x11041: 84, + 0x11042: 84, + 0x11043: 84, + 0x11044: 84, + 0x11045: 84, + 0x11046: 84, + 0x11070: 84, + 0x11073: 84, + 0x11074: 84, + 0x1107F: 84, + 0x11080: 84, + 0x11081: 84, + 0x110B3: 84, + 0x110B4: 84, + 0x110B5: 84, + 0x110B6: 84, + 0x110B9: 84, + 0x110BA: 84, + 0x110C2: 84, + 0x11100: 84, + 0x11101: 84, + 0x11102: 84, + 0x11127: 84, + 0x11128: 84, + 0x11129: 84, + 0x1112A: 84, + 0x1112B: 84, + 0x1112D: 84, + 0x1112E: 84, + 0x1112F: 84, + 0x11130: 84, + 0x11131: 84, + 0x11132: 84, + 0x11133: 84, + 0x11134: 84, + 0x11173: 84, + 0x11180: 84, + 0x11181: 84, + 0x111B6: 84, + 0x111B7: 84, + 0x111B8: 84, + 0x111B9: 84, + 0x111BA: 84, + 0x111BB: 84, + 0x111BC: 84, + 0x111BD: 84, + 0x111BE: 84, + 0x111C9: 84, + 0x111CA: 84, + 0x111CB: 84, + 0x111CC: 84, + 0x111CF: 84, + 0x1122F: 84, + 0x11230: 84, + 0x11231: 84, + 0x11234: 84, + 0x11236: 84, + 0x11237: 84, + 0x1123E: 84, + 0x11241: 84, + 0x112DF: 84, + 0x112E3: 84, + 0x112E4: 84, + 0x112E5: 84, + 0x112E6: 84, + 0x112E7: 84, + 0x112E8: 84, + 0x112E9: 84, + 0x112EA: 84, + 0x11300: 84, + 0x11301: 84, + 0x1133B: 84, + 0x1133C: 84, + 0x11340: 84, + 0x11366: 84, + 0x11367: 84, + 0x11368: 84, + 0x11369: 84, + 0x1136A: 84, + 0x1136B: 84, + 0x1136C: 84, + 0x11370: 84, + 0x11371: 84, + 0x11372: 84, + 0x11373: 84, + 0x11374: 84, + 0x11438: 84, + 0x11439: 84, + 0x1143A: 84, + 0x1143B: 84, + 0x1143C: 84, + 0x1143D: 84, + 0x1143E: 84, + 0x1143F: 84, + 0x11442: 84, + 0x11443: 84, + 0x11444: 84, + 0x11446: 84, + 0x1145E: 84, + 0x114B3: 84, + 0x114B4: 84, + 0x114B5: 84, + 0x114B6: 84, + 0x114B7: 84, + 0x114B8: 84, + 0x114BA: 84, + 0x114BF: 84, + 0x114C0: 84, + 0x114C2: 84, + 0x114C3: 84, + 0x115B2: 84, + 0x115B3: 84, + 0x115B4: 84, + 0x115B5: 84, + 0x115BC: 84, + 0x115BD: 84, + 0x115BF: 84, + 0x115C0: 84, + 0x115DC: 84, + 0x115DD: 84, + 0x11633: 84, + 0x11634: 84, + 0x11635: 84, + 0x11636: 84, + 0x11637: 84, + 0x11638: 84, + 0x11639: 84, + 0x1163A: 84, + 0x1163D: 84, + 0x1163F: 84, + 0x11640: 84, + 0x116AB: 84, + 0x116AD: 84, + 0x116B0: 84, + 0x116B1: 84, + 0x116B2: 84, + 0x116B3: 84, + 0x116B4: 84, + 0x116B5: 84, + 0x116B7: 84, + 0x1171D: 84, + 0x1171E: 84, + 0x1171F: 84, + 0x11722: 84, + 0x11723: 84, + 0x11724: 84, + 0x11725: 84, + 0x11727: 84, + 0x11728: 84, + 0x11729: 84, + 0x1172A: 84, + 0x1172B: 84, + 0x1182F: 84, + 0x11830: 84, + 0x11831: 84, + 0x11832: 84, + 0x11833: 84, + 0x11834: 84, + 0x11835: 84, + 0x11836: 84, + 0x11837: 84, + 0x11839: 84, + 0x1183A: 84, + 0x1193B: 84, + 0x1193C: 84, + 0x1193E: 84, + 0x11943: 84, + 0x119D4: 84, + 0x119D5: 84, + 0x119D6: 84, + 0x119D7: 84, + 0x119DA: 84, + 0x119DB: 84, + 0x119E0: 84, + 0x11A01: 84, + 0x11A02: 84, + 0x11A03: 84, + 0x11A04: 84, + 0x11A05: 84, + 0x11A06: 84, + 0x11A07: 84, + 0x11A08: 84, + 0x11A09: 84, + 0x11A0A: 84, + 0x11A33: 84, + 0x11A34: 84, + 0x11A35: 84, + 0x11A36: 84, + 0x11A37: 84, + 0x11A38: 84, + 0x11A3B: 84, + 0x11A3C: 84, + 0x11A3D: 84, + 0x11A3E: 84, + 0x11A47: 84, + 0x11A51: 84, + 0x11A52: 84, + 0x11A53: 84, + 0x11A54: 84, + 0x11A55: 84, + 0x11A56: 84, + 0x11A59: 84, + 0x11A5A: 84, + 0x11A5B: 84, + 0x11A8A: 84, + 0x11A8B: 84, + 0x11A8C: 84, + 0x11A8D: 84, + 0x11A8E: 84, + 0x11A8F: 84, + 0x11A90: 84, + 0x11A91: 84, + 0x11A92: 84, + 0x11A93: 84, + 0x11A94: 84, + 0x11A95: 84, + 0x11A96: 84, + 0x11A98: 84, + 0x11A99: 84, + 0x11C30: 84, + 0x11C31: 84, + 0x11C32: 84, + 0x11C33: 84, + 0x11C34: 84, + 0x11C35: 84, + 0x11C36: 84, + 0x11C38: 84, + 0x11C39: 84, + 0x11C3A: 84, + 0x11C3B: 84, + 0x11C3C: 84, + 0x11C3D: 84, + 0x11C3F: 84, + 0x11C92: 84, + 0x11C93: 84, + 0x11C94: 84, + 0x11C95: 84, + 0x11C96: 84, + 0x11C97: 84, + 0x11C98: 84, + 0x11C99: 84, + 0x11C9A: 84, + 0x11C9B: 84, + 0x11C9C: 84, + 0x11C9D: 84, + 0x11C9E: 84, + 0x11C9F: 84, + 0x11CA0: 84, + 0x11CA1: 84, + 0x11CA2: 84, + 0x11CA3: 84, + 0x11CA4: 84, + 0x11CA5: 84, + 0x11CA6: 84, + 0x11CA7: 84, + 0x11CAA: 84, + 0x11CAB: 84, + 0x11CAC: 84, + 0x11CAD: 84, + 0x11CAE: 84, + 0x11CAF: 84, + 0x11CB0: 84, + 0x11CB2: 84, + 0x11CB3: 84, + 0x11CB5: 84, + 0x11CB6: 84, + 0x11D31: 84, + 0x11D32: 84, + 0x11D33: 84, + 0x11D34: 84, + 0x11D35: 84, + 0x11D36: 84, + 0x11D3A: 84, + 0x11D3C: 84, + 0x11D3D: 84, + 0x11D3F: 84, + 0x11D40: 84, + 0x11D41: 84, + 0x11D42: 84, + 0x11D43: 84, + 0x11D44: 84, + 0x11D45: 84, + 0x11D47: 84, + 0x11D90: 84, + 0x11D91: 84, + 0x11D95: 84, + 0x11D97: 84, + 0x11EF3: 84, + 0x11EF4: 84, + 0x11F00: 84, + 0x11F01: 84, + 0x11F36: 84, + 0x11F37: 84, + 0x11F38: 84, + 0x11F39: 84, + 0x11F3A: 84, + 0x11F40: 84, + 0x11F42: 84, + 0x13430: 84, + 0x13431: 84, + 0x13432: 84, + 0x13433: 84, + 0x13434: 84, + 0x13435: 84, + 0x13436: 84, + 0x13437: 84, + 0x13438: 84, + 0x13439: 84, + 0x1343A: 84, + 0x1343B: 84, + 0x1343C: 84, + 0x1343D: 84, + 0x1343E: 84, + 0x1343F: 84, + 0x13440: 84, + 0x13447: 84, + 0x13448: 84, + 0x13449: 84, + 0x1344A: 84, + 0x1344B: 84, + 0x1344C: 84, + 0x1344D: 84, + 0x1344E: 84, + 0x1344F: 84, + 0x13450: 84, + 0x13451: 84, + 0x13452: 84, + 0x13453: 84, + 0x13454: 84, + 0x13455: 84, + 0x16AF0: 84, + 0x16AF1: 84, + 0x16AF2: 84, + 0x16AF3: 84, + 0x16AF4: 84, + 0x16B30: 84, + 0x16B31: 84, + 0x16B32: 84, + 0x16B33: 84, + 0x16B34: 84, + 0x16B35: 84, + 0x16B36: 84, + 0x16F4F: 84, + 0x16F8F: 84, + 0x16F90: 84, + 0x16F91: 84, + 0x16F92: 84, + 0x16FE4: 84, + 0x1BC9D: 84, + 0x1BC9E: 84, + 0x1BCA0: 84, + 0x1BCA1: 84, + 0x1BCA2: 84, + 0x1BCA3: 84, + 0x1CF00: 84, + 0x1CF01: 84, + 0x1CF02: 84, + 0x1CF03: 84, + 0x1CF04: 84, + 0x1CF05: 84, + 0x1CF06: 84, + 0x1CF07: 84, + 0x1CF08: 84, + 0x1CF09: 84, + 0x1CF0A: 84, + 0x1CF0B: 84, + 0x1CF0C: 84, + 0x1CF0D: 84, + 0x1CF0E: 84, + 0x1CF0F: 84, + 0x1CF10: 84, + 0x1CF11: 84, + 0x1CF12: 84, + 0x1CF13: 84, + 0x1CF14: 84, + 0x1CF15: 84, + 0x1CF16: 84, + 0x1CF17: 84, + 0x1CF18: 84, + 0x1CF19: 84, + 0x1CF1A: 84, + 0x1CF1B: 84, + 0x1CF1C: 84, + 0x1CF1D: 84, + 0x1CF1E: 84, + 0x1CF1F: 84, + 0x1CF20: 84, + 0x1CF21: 84, + 0x1CF22: 84, + 0x1CF23: 84, + 0x1CF24: 84, + 0x1CF25: 84, + 0x1CF26: 84, + 0x1CF27: 84, + 0x1CF28: 84, + 0x1CF29: 84, + 0x1CF2A: 84, + 0x1CF2B: 84, + 0x1CF2C: 84, + 0x1CF2D: 84, + 0x1CF30: 84, + 0x1CF31: 84, + 0x1CF32: 84, + 0x1CF33: 84, + 0x1CF34: 84, + 0x1CF35: 84, + 0x1CF36: 84, + 0x1CF37: 84, + 0x1CF38: 84, + 0x1CF39: 84, + 0x1CF3A: 84, + 0x1CF3B: 84, + 0x1CF3C: 84, + 0x1CF3D: 84, + 0x1CF3E: 84, + 0x1CF3F: 84, + 0x1CF40: 84, + 0x1CF41: 84, + 0x1CF42: 84, + 0x1CF43: 84, + 0x1CF44: 84, + 0x1CF45: 84, + 0x1CF46: 84, + 0x1D167: 84, + 0x1D168: 84, + 0x1D169: 84, + 0x1D173: 84, + 0x1D174: 84, + 0x1D175: 84, + 0x1D176: 84, + 0x1D177: 84, + 0x1D178: 84, + 0x1D179: 84, + 0x1D17A: 84, + 0x1D17B: 84, + 0x1D17C: 84, + 0x1D17D: 84, + 0x1D17E: 84, + 0x1D17F: 84, + 0x1D180: 84, + 0x1D181: 84, + 0x1D182: 84, + 0x1D185: 84, + 0x1D186: 84, + 0x1D187: 84, + 0x1D188: 84, + 0x1D189: 84, + 0x1D18A: 84, + 0x1D18B: 84, + 0x1D1AA: 84, + 0x1D1AB: 84, + 0x1D1AC: 84, + 0x1D1AD: 84, + 0x1D242: 84, + 0x1D243: 84, + 0x1D244: 84, + 0x1DA00: 84, + 0x1DA01: 84, + 0x1DA02: 84, + 0x1DA03: 84, + 0x1DA04: 84, + 0x1DA05: 84, + 0x1DA06: 84, + 0x1DA07: 84, + 0x1DA08: 84, + 0x1DA09: 84, + 0x1DA0A: 84, + 0x1DA0B: 84, + 0x1DA0C: 84, + 0x1DA0D: 84, + 0x1DA0E: 84, + 0x1DA0F: 84, + 0x1DA10: 84, + 0x1DA11: 84, + 0x1DA12: 84, + 0x1DA13: 84, + 0x1DA14: 84, + 0x1DA15: 84, + 0x1DA16: 84, + 0x1DA17: 84, + 0x1DA18: 84, + 0x1DA19: 84, + 0x1DA1A: 84, + 0x1DA1B: 84, + 0x1DA1C: 84, + 0x1DA1D: 84, + 0x1DA1E: 84, + 0x1DA1F: 84, + 0x1DA20: 84, + 0x1DA21: 84, + 0x1DA22: 84, + 0x1DA23: 84, + 0x1DA24: 84, + 0x1DA25: 84, + 0x1DA26: 84, + 0x1DA27: 84, + 0x1DA28: 84, + 0x1DA29: 84, + 0x1DA2A: 84, + 0x1DA2B: 84, + 0x1DA2C: 84, + 0x1DA2D: 84, + 0x1DA2E: 84, + 0x1DA2F: 84, + 0x1DA30: 84, + 0x1DA31: 84, + 0x1DA32: 84, + 0x1DA33: 84, + 0x1DA34: 84, + 0x1DA35: 84, + 0x1DA36: 84, + 0x1DA3B: 84, + 0x1DA3C: 84, + 0x1DA3D: 84, + 0x1DA3E: 84, + 0x1DA3F: 84, + 0x1DA40: 84, + 0x1DA41: 84, + 0x1DA42: 84, + 0x1DA43: 84, + 0x1DA44: 84, + 0x1DA45: 84, + 0x1DA46: 84, + 0x1DA47: 84, + 0x1DA48: 84, + 0x1DA49: 84, + 0x1DA4A: 84, + 0x1DA4B: 84, + 0x1DA4C: 84, + 0x1DA4D: 84, + 0x1DA4E: 84, + 0x1DA4F: 84, + 0x1DA50: 84, + 0x1DA51: 84, + 0x1DA52: 84, + 0x1DA53: 84, + 0x1DA54: 84, + 0x1DA55: 84, + 0x1DA56: 84, + 0x1DA57: 84, + 0x1DA58: 84, + 0x1DA59: 84, + 0x1DA5A: 84, + 0x1DA5B: 84, + 0x1DA5C: 84, + 0x1DA5D: 84, + 0x1DA5E: 84, + 0x1DA5F: 84, + 0x1DA60: 84, + 0x1DA61: 84, + 0x1DA62: 84, + 0x1DA63: 84, + 0x1DA64: 84, + 0x1DA65: 84, + 0x1DA66: 84, + 0x1DA67: 84, + 0x1DA68: 84, + 0x1DA69: 84, + 0x1DA6A: 84, + 0x1DA6B: 84, + 0x1DA6C: 84, + 0x1DA75: 84, + 0x1DA84: 84, + 0x1DA9B: 84, + 0x1DA9C: 84, + 0x1DA9D: 84, + 0x1DA9E: 84, + 0x1DA9F: 84, + 0x1DAA1: 84, + 0x1DAA2: 84, + 0x1DAA3: 84, + 0x1DAA4: 84, + 0x1DAA5: 84, + 0x1DAA6: 84, + 0x1DAA7: 84, + 0x1DAA8: 84, + 0x1DAA9: 84, + 0x1DAAA: 84, + 0x1DAAB: 84, + 0x1DAAC: 84, + 0x1DAAD: 84, + 0x1DAAE: 84, + 0x1DAAF: 84, + 0x1E000: 84, + 0x1E001: 84, + 0x1E002: 84, + 0x1E003: 84, + 0x1E004: 84, + 0x1E005: 84, + 0x1E006: 84, + 0x1E008: 84, + 0x1E009: 84, + 0x1E00A: 84, + 0x1E00B: 84, + 0x1E00C: 84, + 0x1E00D: 84, + 0x1E00E: 84, + 0x1E00F: 84, + 0x1E010: 84, + 0x1E011: 84, + 0x1E012: 84, + 0x1E013: 84, + 0x1E014: 84, + 0x1E015: 84, + 0x1E016: 84, + 0x1E017: 84, + 0x1E018: 84, + 0x1E01B: 84, + 0x1E01C: 84, + 0x1E01D: 84, + 0x1E01E: 84, + 0x1E01F: 84, + 0x1E020: 84, + 0x1E021: 84, + 0x1E023: 84, + 0x1E024: 84, + 0x1E026: 84, + 0x1E027: 84, + 0x1E028: 84, + 0x1E029: 84, + 0x1E02A: 84, + 0x1E08F: 84, + 0x1E130: 84, + 0x1E131: 84, + 0x1E132: 84, + 0x1E133: 84, + 0x1E134: 84, + 0x1E135: 84, + 0x1E136: 84, + 0x1E2AE: 84, + 0x1E2EC: 84, + 0x1E2ED: 84, + 0x1E2EE: 84, + 0x1E2EF: 84, + 0x1E4EC: 84, + 0x1E4ED: 84, + 0x1E4EE: 84, + 0x1E4EF: 84, + 0x1E8D0: 84, + 0x1E8D1: 84, + 0x1E8D2: 84, + 0x1E8D3: 84, + 0x1E8D4: 84, + 0x1E8D5: 84, + 0x1E8D6: 84, + 0x1E900: 68, + 0x1E901: 68, + 0x1E902: 68, + 0x1E903: 68, + 0x1E904: 68, + 0x1E905: 68, + 0x1E906: 68, + 0x1E907: 68, + 0x1E908: 68, + 0x1E909: 68, + 0x1E90A: 68, + 0x1E90B: 68, + 0x1E90C: 68, + 0x1E90D: 68, + 0x1E90E: 68, + 0x1E90F: 68, + 0x1E910: 68, + 0x1E911: 68, + 0x1E912: 68, + 0x1E913: 68, + 0x1E914: 68, + 0x1E915: 68, + 0x1E916: 68, + 0x1E917: 68, + 0x1E918: 68, + 0x1E919: 68, + 0x1E91A: 68, + 0x1E91B: 68, + 0x1E91C: 68, + 0x1E91D: 68, + 0x1E91E: 68, + 0x1E91F: 68, + 0x1E920: 68, + 0x1E921: 68, + 0x1E922: 68, + 0x1E923: 68, + 0x1E924: 68, + 0x1E925: 68, + 0x1E926: 68, + 0x1E927: 68, + 0x1E928: 68, + 0x1E929: 68, + 0x1E92A: 68, + 0x1E92B: 68, + 0x1E92C: 68, + 0x1E92D: 68, + 0x1E92E: 68, + 0x1E92F: 68, + 0x1E930: 68, + 0x1E931: 68, + 0x1E932: 68, + 0x1E933: 68, + 0x1E934: 68, + 0x1E935: 68, + 0x1E936: 68, + 0x1E937: 68, + 0x1E938: 68, + 0x1E939: 68, + 0x1E93A: 68, + 0x1E93B: 68, + 0x1E93C: 68, + 0x1E93D: 68, + 0x1E93E: 68, + 0x1E93F: 68, + 0x1E940: 68, + 0x1E941: 68, + 0x1E942: 68, + 0x1E943: 68, + 0x1E944: 84, + 0x1E945: 84, + 0x1E946: 84, + 0x1E947: 84, + 0x1E948: 84, + 0x1E949: 84, + 0x1E94A: 84, + 0x1E94B: 84, + 0xE0001: 84, + 0xE0020: 84, + 0xE0021: 84, + 0xE0022: 84, + 0xE0023: 84, + 0xE0024: 84, + 0xE0025: 84, + 0xE0026: 84, + 0xE0027: 84, + 0xE0028: 84, + 0xE0029: 84, + 0xE002A: 84, + 0xE002B: 84, + 0xE002C: 84, + 0xE002D: 84, + 0xE002E: 84, + 0xE002F: 84, + 0xE0030: 84, + 0xE0031: 84, + 0xE0032: 84, + 0xE0033: 84, + 0xE0034: 84, + 0xE0035: 84, + 0xE0036: 84, + 0xE0037: 84, + 0xE0038: 84, + 0xE0039: 84, + 0xE003A: 84, + 0xE003B: 84, + 0xE003C: 84, + 0xE003D: 84, + 0xE003E: 84, + 0xE003F: 84, + 0xE0040: 84, + 0xE0041: 84, + 0xE0042: 84, + 0xE0043: 84, + 0xE0044: 84, + 0xE0045: 84, + 0xE0046: 84, + 0xE0047: 84, + 0xE0048: 84, + 0xE0049: 84, + 0xE004A: 84, + 0xE004B: 84, + 0xE004C: 84, + 0xE004D: 84, + 0xE004E: 84, + 0xE004F: 84, + 0xE0050: 84, + 0xE0051: 84, + 0xE0052: 84, + 0xE0053: 84, + 0xE0054: 84, + 0xE0055: 84, + 0xE0056: 84, + 0xE0057: 84, + 0xE0058: 84, + 0xE0059: 84, + 0xE005A: 84, + 0xE005B: 84, + 0xE005C: 84, + 0xE005D: 84, + 0xE005E: 84, + 0xE005F: 84, + 0xE0060: 84, + 0xE0061: 84, + 0xE0062: 84, + 0xE0063: 84, + 0xE0064: 84, + 0xE0065: 84, + 0xE0066: 84, + 0xE0067: 84, + 0xE0068: 84, + 0xE0069: 84, + 0xE006A: 84, + 0xE006B: 84, + 0xE006C: 84, + 0xE006D: 84, + 0xE006E: 84, + 0xE006F: 84, + 0xE0070: 84, + 0xE0071: 84, + 0xE0072: 84, + 0xE0073: 84, + 0xE0074: 84, + 0xE0075: 84, + 0xE0076: 84, + 0xE0077: 84, + 0xE0078: 84, + 0xE0079: 84, + 0xE007A: 84, + 0xE007B: 84, + 0xE007C: 84, + 0xE007D: 84, + 0xE007E: 84, + 0xE007F: 84, + 0xE0100: 84, + 0xE0101: 84, + 0xE0102: 84, + 0xE0103: 84, + 0xE0104: 84, + 0xE0105: 84, + 0xE0106: 84, + 0xE0107: 84, + 0xE0108: 84, + 0xE0109: 84, + 0xE010A: 84, + 0xE010B: 84, + 0xE010C: 84, + 0xE010D: 84, + 0xE010E: 84, + 0xE010F: 84, + 0xE0110: 84, + 0xE0111: 84, + 0xE0112: 84, + 0xE0113: 84, + 0xE0114: 84, + 0xE0115: 84, + 0xE0116: 84, + 0xE0117: 84, + 0xE0118: 84, + 0xE0119: 84, + 0xE011A: 84, + 0xE011B: 84, + 0xE011C: 84, + 0xE011D: 84, + 0xE011E: 84, + 0xE011F: 84, + 0xE0120: 84, + 0xE0121: 84, + 0xE0122: 84, + 0xE0123: 84, + 0xE0124: 84, + 0xE0125: 84, + 0xE0126: 84, + 0xE0127: 84, + 0xE0128: 84, + 0xE0129: 84, + 0xE012A: 84, + 0xE012B: 84, + 0xE012C: 84, + 0xE012D: 84, + 0xE012E: 84, + 0xE012F: 84, + 0xE0130: 84, + 0xE0131: 84, + 0xE0132: 84, + 0xE0133: 84, + 0xE0134: 84, + 0xE0135: 84, + 0xE0136: 84, + 0xE0137: 84, + 0xE0138: 84, + 0xE0139: 84, + 0xE013A: 84, + 0xE013B: 84, + 0xE013C: 84, + 0xE013D: 84, + 0xE013E: 84, + 0xE013F: 84, + 0xE0140: 84, + 0xE0141: 84, + 0xE0142: 84, + 0xE0143: 84, + 0xE0144: 84, + 0xE0145: 84, + 0xE0146: 84, + 0xE0147: 84, + 0xE0148: 84, + 0xE0149: 84, + 0xE014A: 84, + 0xE014B: 84, + 0xE014C: 84, + 0xE014D: 84, + 0xE014E: 84, + 0xE014F: 84, + 0xE0150: 84, + 0xE0151: 84, + 0xE0152: 84, + 0xE0153: 84, + 0xE0154: 84, + 0xE0155: 84, + 0xE0156: 84, + 0xE0157: 84, + 0xE0158: 84, + 0xE0159: 84, + 0xE015A: 84, + 0xE015B: 84, + 0xE015C: 84, + 0xE015D: 84, + 0xE015E: 84, + 0xE015F: 84, + 0xE0160: 84, + 0xE0161: 84, + 0xE0162: 84, + 0xE0163: 84, + 0xE0164: 84, + 0xE0165: 84, + 0xE0166: 84, + 0xE0167: 84, + 0xE0168: 84, + 0xE0169: 84, + 0xE016A: 84, + 0xE016B: 84, + 0xE016C: 84, + 0xE016D: 84, + 0xE016E: 84, + 0xE016F: 84, + 0xE0170: 84, + 0xE0171: 84, + 0xE0172: 84, + 0xE0173: 84, + 0xE0174: 84, + 0xE0175: 84, + 0xE0176: 84, + 0xE0177: 84, + 0xE0178: 84, + 0xE0179: 84, + 0xE017A: 84, + 0xE017B: 84, + 0xE017C: 84, + 0xE017D: 84, + 0xE017E: 84, + 0xE017F: 84, + 0xE0180: 84, + 0xE0181: 84, + 0xE0182: 84, + 0xE0183: 84, + 0xE0184: 84, + 0xE0185: 84, + 0xE0186: 84, + 0xE0187: 84, + 0xE0188: 84, + 0xE0189: 84, + 0xE018A: 84, + 0xE018B: 84, + 0xE018C: 84, + 0xE018D: 84, + 0xE018E: 84, + 0xE018F: 84, + 0xE0190: 84, + 0xE0191: 84, + 0xE0192: 84, + 0xE0193: 84, + 0xE0194: 84, + 0xE0195: 84, + 0xE0196: 84, + 0xE0197: 84, + 0xE0198: 84, + 0xE0199: 84, + 0xE019A: 84, + 0xE019B: 84, + 0xE019C: 84, + 0xE019D: 84, + 0xE019E: 84, + 0xE019F: 84, + 0xE01A0: 84, + 0xE01A1: 84, + 0xE01A2: 84, + 0xE01A3: 84, + 0xE01A4: 84, + 0xE01A5: 84, + 0xE01A6: 84, + 0xE01A7: 84, + 0xE01A8: 84, + 0xE01A9: 84, + 0xE01AA: 84, + 0xE01AB: 84, + 0xE01AC: 84, + 0xE01AD: 84, + 0xE01AE: 84, + 0xE01AF: 84, + 0xE01B0: 84, + 0xE01B1: 84, + 0xE01B2: 84, + 0xE01B3: 84, + 0xE01B4: 84, + 0xE01B5: 84, + 0xE01B6: 84, + 0xE01B7: 84, + 0xE01B8: 84, + 0xE01B9: 84, + 0xE01BA: 84, + 0xE01BB: 84, + 0xE01BC: 84, + 0xE01BD: 84, + 0xE01BE: 84, + 0xE01BF: 84, + 0xE01C0: 84, + 0xE01C1: 84, + 0xE01C2: 84, + 0xE01C3: 84, + 0xE01C4: 84, + 0xE01C5: 84, + 0xE01C6: 84, + 0xE01C7: 84, + 0xE01C8: 84, + 0xE01C9: 84, + 0xE01CA: 84, + 0xE01CB: 84, + 0xE01CC: 84, + 0xE01CD: 84, + 0xE01CE: 84, + 0xE01CF: 84, + 0xE01D0: 84, + 0xE01D1: 84, + 0xE01D2: 84, + 0xE01D3: 84, + 0xE01D4: 84, + 0xE01D5: 84, + 0xE01D6: 84, + 0xE01D7: 84, + 0xE01D8: 84, + 0xE01D9: 84, + 0xE01DA: 84, + 0xE01DB: 84, + 0xE01DC: 84, + 0xE01DD: 84, + 0xE01DE: 84, + 0xE01DF: 84, + 0xE01E0: 84, + 0xE01E1: 84, + 0xE01E2: 84, + 0xE01E3: 84, + 0xE01E4: 84, + 0xE01E5: 84, + 0xE01E6: 84, + 0xE01E7: 84, + 0xE01E8: 84, + 0xE01E9: 84, + 0xE01EA: 84, + 0xE01EB: 84, + 0xE01EC: 84, + 0xE01ED: 84, + 0xE01EE: 84, + 0xE01EF: 84, +} +codepoint_classes = { + "PVALID": ( + 0x2D0000002E, + 0x300000003A, + 0x610000007B, + 0xDF000000F7, + 0xF800000100, + 0x10100000102, + 0x10300000104, + 0x10500000106, + 0x10700000108, + 0x1090000010A, + 0x10B0000010C, + 0x10D0000010E, + 0x10F00000110, + 0x11100000112, + 0x11300000114, + 0x11500000116, + 0x11700000118, + 0x1190000011A, + 0x11B0000011C, + 0x11D0000011E, + 0x11F00000120, + 0x12100000122, + 0x12300000124, + 0x12500000126, + 0x12700000128, + 0x1290000012A, + 0x12B0000012C, + 0x12D0000012E, + 0x12F00000130, + 0x13100000132, + 0x13500000136, + 0x13700000139, + 0x13A0000013B, + 0x13C0000013D, + 0x13E0000013F, + 0x14200000143, + 0x14400000145, + 0x14600000147, + 0x14800000149, + 0x14B0000014C, + 0x14D0000014E, + 0x14F00000150, + 0x15100000152, + 0x15300000154, + 0x15500000156, + 0x15700000158, + 0x1590000015A, + 0x15B0000015C, + 0x15D0000015E, + 0x15F00000160, + 0x16100000162, + 0x16300000164, + 0x16500000166, + 0x16700000168, + 0x1690000016A, + 0x16B0000016C, + 0x16D0000016E, + 0x16F00000170, + 0x17100000172, + 0x17300000174, + 0x17500000176, + 0x17700000178, + 0x17A0000017B, + 0x17C0000017D, + 0x17E0000017F, + 0x18000000181, + 0x18300000184, + 0x18500000186, + 0x18800000189, + 0x18C0000018E, + 0x19200000193, + 0x19500000196, + 0x1990000019C, + 0x19E0000019F, + 0x1A1000001A2, + 0x1A3000001A4, + 0x1A5000001A6, + 0x1A8000001A9, + 0x1AA000001AC, + 0x1AD000001AE, + 0x1B0000001B1, + 0x1B4000001B5, + 0x1B6000001B7, + 0x1B9000001BC, + 0x1BD000001C4, + 0x1CE000001CF, + 0x1D0000001D1, + 0x1D2000001D3, + 0x1D4000001D5, + 0x1D6000001D7, + 0x1D8000001D9, + 0x1DA000001DB, + 0x1DC000001DE, + 0x1DF000001E0, + 0x1E1000001E2, + 0x1E3000001E4, + 0x1E5000001E6, + 0x1E7000001E8, + 0x1E9000001EA, + 0x1EB000001EC, + 0x1ED000001EE, + 0x1EF000001F1, + 0x1F5000001F6, + 0x1F9000001FA, + 0x1FB000001FC, + 0x1FD000001FE, + 0x1FF00000200, + 0x20100000202, + 0x20300000204, + 0x20500000206, + 0x20700000208, + 0x2090000020A, + 0x20B0000020C, + 0x20D0000020E, + 0x20F00000210, + 0x21100000212, + 0x21300000214, + 0x21500000216, + 0x21700000218, + 0x2190000021A, + 0x21B0000021C, + 0x21D0000021E, + 0x21F00000220, + 0x22100000222, + 0x22300000224, + 0x22500000226, + 0x22700000228, + 0x2290000022A, + 0x22B0000022C, + 0x22D0000022E, + 0x22F00000230, + 0x23100000232, + 0x2330000023A, + 0x23C0000023D, + 0x23F00000241, + 0x24200000243, + 0x24700000248, + 0x2490000024A, + 0x24B0000024C, + 0x24D0000024E, + 0x24F000002B0, + 0x2B9000002C2, + 0x2C6000002D2, + 0x2EC000002ED, + 0x2EE000002EF, + 0x30000000340, + 0x34200000343, + 0x3460000034F, + 0x35000000370, + 0x37100000372, + 0x37300000374, + 0x37700000378, + 0x37B0000037E, + 0x39000000391, + 0x3AC000003CF, + 0x3D7000003D8, + 0x3D9000003DA, + 0x3DB000003DC, + 0x3DD000003DE, + 0x3DF000003E0, + 0x3E1000003E2, + 0x3E3000003E4, + 0x3E5000003E6, + 0x3E7000003E8, + 0x3E9000003EA, + 0x3EB000003EC, + 0x3ED000003EE, + 0x3EF000003F0, + 0x3F3000003F4, + 0x3F8000003F9, + 0x3FB000003FD, + 0x43000000460, + 0x46100000462, + 0x46300000464, + 0x46500000466, + 0x46700000468, + 0x4690000046A, + 0x46B0000046C, + 0x46D0000046E, + 0x46F00000470, + 0x47100000472, + 0x47300000474, + 0x47500000476, + 0x47700000478, + 0x4790000047A, + 0x47B0000047C, + 0x47D0000047E, + 0x47F00000480, + 0x48100000482, + 0x48300000488, + 0x48B0000048C, + 0x48D0000048E, + 0x48F00000490, + 0x49100000492, + 0x49300000494, + 0x49500000496, + 0x49700000498, + 0x4990000049A, + 0x49B0000049C, + 0x49D0000049E, + 0x49F000004A0, + 0x4A1000004A2, + 0x4A3000004A4, + 0x4A5000004A6, + 0x4A7000004A8, + 0x4A9000004AA, + 0x4AB000004AC, + 0x4AD000004AE, + 0x4AF000004B0, + 0x4B1000004B2, + 0x4B3000004B4, + 0x4B5000004B6, + 0x4B7000004B8, + 0x4B9000004BA, + 0x4BB000004BC, + 0x4BD000004BE, + 0x4BF000004C0, + 0x4C2000004C3, + 0x4C4000004C5, + 0x4C6000004C7, + 0x4C8000004C9, + 0x4CA000004CB, + 0x4CC000004CD, + 0x4CE000004D0, + 0x4D1000004D2, + 0x4D3000004D4, + 0x4D5000004D6, + 0x4D7000004D8, + 0x4D9000004DA, + 0x4DB000004DC, + 0x4DD000004DE, + 0x4DF000004E0, + 0x4E1000004E2, + 0x4E3000004E4, + 0x4E5000004E6, + 0x4E7000004E8, + 0x4E9000004EA, + 0x4EB000004EC, + 0x4ED000004EE, + 0x4EF000004F0, + 0x4F1000004F2, + 0x4F3000004F4, + 0x4F5000004F6, + 0x4F7000004F8, + 0x4F9000004FA, + 0x4FB000004FC, + 0x4FD000004FE, + 0x4FF00000500, + 0x50100000502, + 0x50300000504, + 0x50500000506, + 0x50700000508, + 0x5090000050A, + 0x50B0000050C, + 0x50D0000050E, + 0x50F00000510, + 0x51100000512, + 0x51300000514, + 0x51500000516, + 0x51700000518, + 0x5190000051A, + 0x51B0000051C, + 0x51D0000051E, + 0x51F00000520, + 0x52100000522, + 0x52300000524, + 0x52500000526, + 0x52700000528, + 0x5290000052A, + 0x52B0000052C, + 0x52D0000052E, + 0x52F00000530, + 0x5590000055A, + 0x56000000587, + 0x58800000589, + 0x591000005BE, + 0x5BF000005C0, + 0x5C1000005C3, + 0x5C4000005C6, + 0x5C7000005C8, + 0x5D0000005EB, + 0x5EF000005F3, + 0x6100000061B, + 0x62000000640, + 0x64100000660, + 0x66E00000675, + 0x679000006D4, + 0x6D5000006DD, + 0x6DF000006E9, + 0x6EA000006F0, + 0x6FA00000700, + 0x7100000074B, + 0x74D000007B2, + 0x7C0000007F6, + 0x7FD000007FE, + 0x8000000082E, + 0x8400000085C, + 0x8600000086B, + 0x87000000888, + 0x8890000088F, + 0x898000008E2, + 0x8E300000958, + 0x96000000964, + 0x96600000970, + 0x97100000984, + 0x9850000098D, + 0x98F00000991, + 0x993000009A9, + 0x9AA000009B1, + 0x9B2000009B3, + 0x9B6000009BA, + 0x9BC000009C5, + 0x9C7000009C9, + 0x9CB000009CF, + 0x9D7000009D8, + 0x9E0000009E4, + 0x9E6000009F2, + 0x9FC000009FD, + 0x9FE000009FF, + 0xA0100000A04, + 0xA0500000A0B, + 0xA0F00000A11, + 0xA1300000A29, + 0xA2A00000A31, + 0xA3200000A33, + 0xA3500000A36, + 0xA3800000A3A, + 0xA3C00000A3D, + 0xA3E00000A43, + 0xA4700000A49, + 0xA4B00000A4E, + 0xA5100000A52, + 0xA5C00000A5D, + 0xA6600000A76, + 0xA8100000A84, + 0xA8500000A8E, + 0xA8F00000A92, + 0xA9300000AA9, + 0xAAA00000AB1, + 0xAB200000AB4, + 0xAB500000ABA, + 0xABC00000AC6, + 0xAC700000ACA, + 0xACB00000ACE, + 0xAD000000AD1, + 0xAE000000AE4, + 0xAE600000AF0, + 0xAF900000B00, + 0xB0100000B04, + 0xB0500000B0D, + 0xB0F00000B11, + 0xB1300000B29, + 0xB2A00000B31, + 0xB3200000B34, + 0xB3500000B3A, + 0xB3C00000B45, + 0xB4700000B49, + 0xB4B00000B4E, + 0xB5500000B58, + 0xB5F00000B64, + 0xB6600000B70, + 0xB7100000B72, + 0xB8200000B84, + 0xB8500000B8B, + 0xB8E00000B91, + 0xB9200000B96, + 0xB9900000B9B, + 0xB9C00000B9D, + 0xB9E00000BA0, + 0xBA300000BA5, + 0xBA800000BAB, + 0xBAE00000BBA, + 0xBBE00000BC3, + 0xBC600000BC9, + 0xBCA00000BCE, + 0xBD000000BD1, + 0xBD700000BD8, + 0xBE600000BF0, + 0xC0000000C0D, + 0xC0E00000C11, + 0xC1200000C29, + 0xC2A00000C3A, + 0xC3C00000C45, + 0xC4600000C49, + 0xC4A00000C4E, + 0xC5500000C57, + 0xC5800000C5B, + 0xC5D00000C5E, + 0xC6000000C64, + 0xC6600000C70, + 0xC8000000C84, + 0xC8500000C8D, + 0xC8E00000C91, + 0xC9200000CA9, + 0xCAA00000CB4, + 0xCB500000CBA, + 0xCBC00000CC5, + 0xCC600000CC9, + 0xCCA00000CCE, + 0xCD500000CD7, + 0xCDD00000CDF, + 0xCE000000CE4, + 0xCE600000CF0, + 0xCF100000CF4, + 0xD0000000D0D, + 0xD0E00000D11, + 0xD1200000D45, + 0xD4600000D49, + 0xD4A00000D4F, + 0xD5400000D58, + 0xD5F00000D64, + 0xD6600000D70, + 0xD7A00000D80, + 0xD8100000D84, + 0xD8500000D97, + 0xD9A00000DB2, + 0xDB300000DBC, + 0xDBD00000DBE, + 0xDC000000DC7, + 0xDCA00000DCB, + 0xDCF00000DD5, + 0xDD600000DD7, + 0xDD800000DE0, + 0xDE600000DF0, + 0xDF200000DF4, + 0xE0100000E33, + 0xE3400000E3B, + 0xE4000000E4F, + 0xE5000000E5A, + 0xE8100000E83, + 0xE8400000E85, + 0xE8600000E8B, + 0xE8C00000EA4, + 0xEA500000EA6, + 0xEA700000EB3, + 0xEB400000EBE, + 0xEC000000EC5, + 0xEC600000EC7, + 0xEC800000ECF, + 0xED000000EDA, + 0xEDE00000EE0, + 0xF0000000F01, + 0xF0B00000F0C, + 0xF1800000F1A, + 0xF2000000F2A, + 0xF3500000F36, + 0xF3700000F38, + 0xF3900000F3A, + 0xF3E00000F43, + 0xF4400000F48, + 0xF4900000F4D, + 0xF4E00000F52, + 0xF5300000F57, + 0xF5800000F5C, + 0xF5D00000F69, + 0xF6A00000F6D, + 0xF7100000F73, + 0xF7400000F75, + 0xF7A00000F81, + 0xF8200000F85, + 0xF8600000F93, + 0xF9400000F98, + 0xF9900000F9D, + 0xF9E00000FA2, + 0xFA300000FA7, + 0xFA800000FAC, + 0xFAD00000FB9, + 0xFBA00000FBD, + 0xFC600000FC7, + 0x10000000104A, + 0x10500000109E, + 0x10D0000010FB, + 0x10FD00001100, + 0x120000001249, + 0x124A0000124E, + 0x125000001257, + 0x125800001259, + 0x125A0000125E, + 0x126000001289, + 0x128A0000128E, + 0x1290000012B1, + 0x12B2000012B6, + 0x12B8000012BF, + 0x12C0000012C1, + 0x12C2000012C6, + 0x12C8000012D7, + 0x12D800001311, + 0x131200001316, + 0x13180000135B, + 0x135D00001360, + 0x138000001390, + 0x13A0000013F6, + 0x14010000166D, + 0x166F00001680, + 0x16810000169B, + 0x16A0000016EB, + 0x16F1000016F9, + 0x170000001716, + 0x171F00001735, + 0x174000001754, + 0x17600000176D, + 0x176E00001771, + 0x177200001774, + 0x1780000017B4, + 0x17B6000017D4, + 0x17D7000017D8, + 0x17DC000017DE, + 0x17E0000017EA, + 0x18100000181A, + 0x182000001879, + 0x1880000018AB, + 0x18B0000018F6, + 0x19000000191F, + 0x19200000192C, + 0x19300000193C, + 0x19460000196E, + 0x197000001975, + 0x1980000019AC, + 0x19B0000019CA, + 0x19D0000019DA, + 0x1A0000001A1C, + 0x1A2000001A5F, + 0x1A6000001A7D, + 0x1A7F00001A8A, + 0x1A9000001A9A, + 0x1AA700001AA8, + 0x1AB000001ABE, + 0x1ABF00001ACF, + 0x1B0000001B4D, + 0x1B5000001B5A, + 0x1B6B00001B74, + 0x1B8000001BF4, + 0x1C0000001C38, + 0x1C4000001C4A, + 0x1C4D00001C7E, + 0x1CD000001CD3, + 0x1CD400001CFB, + 0x1D0000001D2C, + 0x1D2F00001D30, + 0x1D3B00001D3C, + 0x1D4E00001D4F, + 0x1D6B00001D78, + 0x1D7900001D9B, + 0x1DC000001E00, + 0x1E0100001E02, + 0x1E0300001E04, + 0x1E0500001E06, + 0x1E0700001E08, + 0x1E0900001E0A, + 0x1E0B00001E0C, + 0x1E0D00001E0E, + 0x1E0F00001E10, + 0x1E1100001E12, + 0x1E1300001E14, + 0x1E1500001E16, + 0x1E1700001E18, + 0x1E1900001E1A, + 0x1E1B00001E1C, + 0x1E1D00001E1E, + 0x1E1F00001E20, + 0x1E2100001E22, + 0x1E2300001E24, + 0x1E2500001E26, + 0x1E2700001E28, + 0x1E2900001E2A, + 0x1E2B00001E2C, + 0x1E2D00001E2E, + 0x1E2F00001E30, + 0x1E3100001E32, + 0x1E3300001E34, + 0x1E3500001E36, + 0x1E3700001E38, + 0x1E3900001E3A, + 0x1E3B00001E3C, + 0x1E3D00001E3E, + 0x1E3F00001E40, + 0x1E4100001E42, + 0x1E4300001E44, + 0x1E4500001E46, + 0x1E4700001E48, + 0x1E4900001E4A, + 0x1E4B00001E4C, + 0x1E4D00001E4E, + 0x1E4F00001E50, + 0x1E5100001E52, + 0x1E5300001E54, + 0x1E5500001E56, + 0x1E5700001E58, + 0x1E5900001E5A, + 0x1E5B00001E5C, + 0x1E5D00001E5E, + 0x1E5F00001E60, + 0x1E6100001E62, + 0x1E6300001E64, + 0x1E6500001E66, + 0x1E6700001E68, + 0x1E6900001E6A, + 0x1E6B00001E6C, + 0x1E6D00001E6E, + 0x1E6F00001E70, + 0x1E7100001E72, + 0x1E7300001E74, + 0x1E7500001E76, + 0x1E7700001E78, + 0x1E7900001E7A, + 0x1E7B00001E7C, + 0x1E7D00001E7E, + 0x1E7F00001E80, + 0x1E8100001E82, + 0x1E8300001E84, + 0x1E8500001E86, + 0x1E8700001E88, + 0x1E8900001E8A, + 0x1E8B00001E8C, + 0x1E8D00001E8E, + 0x1E8F00001E90, + 0x1E9100001E92, + 0x1E9300001E94, + 0x1E9500001E9A, + 0x1E9C00001E9E, + 0x1E9F00001EA0, + 0x1EA100001EA2, + 0x1EA300001EA4, + 0x1EA500001EA6, + 0x1EA700001EA8, + 0x1EA900001EAA, + 0x1EAB00001EAC, + 0x1EAD00001EAE, + 0x1EAF00001EB0, + 0x1EB100001EB2, + 0x1EB300001EB4, + 0x1EB500001EB6, + 0x1EB700001EB8, + 0x1EB900001EBA, + 0x1EBB00001EBC, + 0x1EBD00001EBE, + 0x1EBF00001EC0, + 0x1EC100001EC2, + 0x1EC300001EC4, + 0x1EC500001EC6, + 0x1EC700001EC8, + 0x1EC900001ECA, + 0x1ECB00001ECC, + 0x1ECD00001ECE, + 0x1ECF00001ED0, + 0x1ED100001ED2, + 0x1ED300001ED4, + 0x1ED500001ED6, + 0x1ED700001ED8, + 0x1ED900001EDA, + 0x1EDB00001EDC, + 0x1EDD00001EDE, + 0x1EDF00001EE0, + 0x1EE100001EE2, + 0x1EE300001EE4, + 0x1EE500001EE6, + 0x1EE700001EE8, + 0x1EE900001EEA, + 0x1EEB00001EEC, + 0x1EED00001EEE, + 0x1EEF00001EF0, + 0x1EF100001EF2, + 0x1EF300001EF4, + 0x1EF500001EF6, + 0x1EF700001EF8, + 0x1EF900001EFA, + 0x1EFB00001EFC, + 0x1EFD00001EFE, + 0x1EFF00001F08, + 0x1F1000001F16, + 0x1F2000001F28, + 0x1F3000001F38, + 0x1F4000001F46, + 0x1F5000001F58, + 0x1F6000001F68, + 0x1F7000001F71, + 0x1F7200001F73, + 0x1F7400001F75, + 0x1F7600001F77, + 0x1F7800001F79, + 0x1F7A00001F7B, + 0x1F7C00001F7D, + 0x1FB000001FB2, + 0x1FB600001FB7, + 0x1FC600001FC7, + 0x1FD000001FD3, + 0x1FD600001FD8, + 0x1FE000001FE3, + 0x1FE400001FE8, + 0x1FF600001FF7, + 0x214E0000214F, + 0x218400002185, + 0x2C3000002C60, + 0x2C6100002C62, + 0x2C6500002C67, + 0x2C6800002C69, + 0x2C6A00002C6B, + 0x2C6C00002C6D, + 0x2C7100002C72, + 0x2C7300002C75, + 0x2C7600002C7C, + 0x2C8100002C82, + 0x2C8300002C84, + 0x2C8500002C86, + 0x2C8700002C88, + 0x2C8900002C8A, + 0x2C8B00002C8C, + 0x2C8D00002C8E, + 0x2C8F00002C90, + 0x2C9100002C92, + 0x2C9300002C94, + 0x2C9500002C96, + 0x2C9700002C98, + 0x2C9900002C9A, + 0x2C9B00002C9C, + 0x2C9D00002C9E, + 0x2C9F00002CA0, + 0x2CA100002CA2, + 0x2CA300002CA4, + 0x2CA500002CA6, + 0x2CA700002CA8, + 0x2CA900002CAA, + 0x2CAB00002CAC, + 0x2CAD00002CAE, + 0x2CAF00002CB0, + 0x2CB100002CB2, + 0x2CB300002CB4, + 0x2CB500002CB6, + 0x2CB700002CB8, + 0x2CB900002CBA, + 0x2CBB00002CBC, + 0x2CBD00002CBE, + 0x2CBF00002CC0, + 0x2CC100002CC2, + 0x2CC300002CC4, + 0x2CC500002CC6, + 0x2CC700002CC8, + 0x2CC900002CCA, + 0x2CCB00002CCC, + 0x2CCD00002CCE, + 0x2CCF00002CD0, + 0x2CD100002CD2, + 0x2CD300002CD4, + 0x2CD500002CD6, + 0x2CD700002CD8, + 0x2CD900002CDA, + 0x2CDB00002CDC, + 0x2CDD00002CDE, + 0x2CDF00002CE0, + 0x2CE100002CE2, + 0x2CE300002CE5, + 0x2CEC00002CED, + 0x2CEE00002CF2, + 0x2CF300002CF4, + 0x2D0000002D26, + 0x2D2700002D28, + 0x2D2D00002D2E, + 0x2D3000002D68, + 0x2D7F00002D97, + 0x2DA000002DA7, + 0x2DA800002DAF, + 0x2DB000002DB7, + 0x2DB800002DBF, + 0x2DC000002DC7, + 0x2DC800002DCF, + 0x2DD000002DD7, + 0x2DD800002DDF, + 0x2DE000002E00, + 0x2E2F00002E30, + 0x300500003008, + 0x302A0000302E, + 0x303C0000303D, + 0x304100003097, + 0x30990000309B, + 0x309D0000309F, + 0x30A1000030FB, + 0x30FC000030FF, + 0x310500003130, + 0x31A0000031C0, + 0x31F000003200, + 0x340000004DC0, + 0x4E000000A48D, + 0xA4D00000A4FE, + 0xA5000000A60D, + 0xA6100000A62C, + 0xA6410000A642, + 0xA6430000A644, + 0xA6450000A646, + 0xA6470000A648, + 0xA6490000A64A, + 0xA64B0000A64C, + 0xA64D0000A64E, + 0xA64F0000A650, + 0xA6510000A652, + 0xA6530000A654, + 0xA6550000A656, + 0xA6570000A658, + 0xA6590000A65A, + 0xA65B0000A65C, + 0xA65D0000A65E, + 0xA65F0000A660, + 0xA6610000A662, + 0xA6630000A664, + 0xA6650000A666, + 0xA6670000A668, + 0xA6690000A66A, + 0xA66B0000A66C, + 0xA66D0000A670, + 0xA6740000A67E, + 0xA67F0000A680, + 0xA6810000A682, + 0xA6830000A684, + 0xA6850000A686, + 0xA6870000A688, + 0xA6890000A68A, + 0xA68B0000A68C, + 0xA68D0000A68E, + 0xA68F0000A690, + 0xA6910000A692, + 0xA6930000A694, + 0xA6950000A696, + 0xA6970000A698, + 0xA6990000A69A, + 0xA69B0000A69C, + 0xA69E0000A6E6, + 0xA6F00000A6F2, + 0xA7170000A720, + 0xA7230000A724, + 0xA7250000A726, + 0xA7270000A728, + 0xA7290000A72A, + 0xA72B0000A72C, + 0xA72D0000A72E, + 0xA72F0000A732, + 0xA7330000A734, + 0xA7350000A736, + 0xA7370000A738, + 0xA7390000A73A, + 0xA73B0000A73C, + 0xA73D0000A73E, + 0xA73F0000A740, + 0xA7410000A742, + 0xA7430000A744, + 0xA7450000A746, + 0xA7470000A748, + 0xA7490000A74A, + 0xA74B0000A74C, + 0xA74D0000A74E, + 0xA74F0000A750, + 0xA7510000A752, + 0xA7530000A754, + 0xA7550000A756, + 0xA7570000A758, + 0xA7590000A75A, + 0xA75B0000A75C, + 0xA75D0000A75E, + 0xA75F0000A760, + 0xA7610000A762, + 0xA7630000A764, + 0xA7650000A766, + 0xA7670000A768, + 0xA7690000A76A, + 0xA76B0000A76C, + 0xA76D0000A76E, + 0xA76F0000A770, + 0xA7710000A779, + 0xA77A0000A77B, + 0xA77C0000A77D, + 0xA77F0000A780, + 0xA7810000A782, + 0xA7830000A784, + 0xA7850000A786, + 0xA7870000A789, + 0xA78C0000A78D, + 0xA78E0000A790, + 0xA7910000A792, + 0xA7930000A796, + 0xA7970000A798, + 0xA7990000A79A, + 0xA79B0000A79C, + 0xA79D0000A79E, + 0xA79F0000A7A0, + 0xA7A10000A7A2, + 0xA7A30000A7A4, + 0xA7A50000A7A6, + 0xA7A70000A7A8, + 0xA7A90000A7AA, + 0xA7AF0000A7B0, + 0xA7B50000A7B6, + 0xA7B70000A7B8, + 0xA7B90000A7BA, + 0xA7BB0000A7BC, + 0xA7BD0000A7BE, + 0xA7BF0000A7C0, + 0xA7C10000A7C2, + 0xA7C30000A7C4, + 0xA7C80000A7C9, + 0xA7CA0000A7CB, + 0xA7D10000A7D2, + 0xA7D30000A7D4, + 0xA7D50000A7D6, + 0xA7D70000A7D8, + 0xA7D90000A7DA, + 0xA7F60000A7F8, + 0xA7FA0000A828, + 0xA82C0000A82D, + 0xA8400000A874, + 0xA8800000A8C6, + 0xA8D00000A8DA, + 0xA8E00000A8F8, + 0xA8FB0000A8FC, + 0xA8FD0000A92E, + 0xA9300000A954, + 0xA9800000A9C1, + 0xA9CF0000A9DA, + 0xA9E00000A9FF, + 0xAA000000AA37, + 0xAA400000AA4E, + 0xAA500000AA5A, + 0xAA600000AA77, + 0xAA7A0000AAC3, + 0xAADB0000AADE, + 0xAAE00000AAF0, + 0xAAF20000AAF7, + 0xAB010000AB07, + 0xAB090000AB0F, + 0xAB110000AB17, + 0xAB200000AB27, + 0xAB280000AB2F, + 0xAB300000AB5B, + 0xAB600000AB69, + 0xABC00000ABEB, + 0xABEC0000ABEE, + 0xABF00000ABFA, + 0xAC000000D7A4, + 0xFA0E0000FA10, + 0xFA110000FA12, + 0xFA130000FA15, + 0xFA1F0000FA20, + 0xFA210000FA22, + 0xFA230000FA25, + 0xFA270000FA2A, + 0xFB1E0000FB1F, + 0xFE200000FE30, + 0xFE730000FE74, + 0x100000001000C, + 0x1000D00010027, + 0x100280001003B, + 0x1003C0001003E, + 0x1003F0001004E, + 0x100500001005E, + 0x10080000100FB, + 0x101FD000101FE, + 0x102800001029D, + 0x102A0000102D1, + 0x102E0000102E1, + 0x1030000010320, + 0x1032D00010341, + 0x103420001034A, + 0x103500001037B, + 0x103800001039E, + 0x103A0000103C4, + 0x103C8000103D0, + 0x104280001049E, + 0x104A0000104AA, + 0x104D8000104FC, + 0x1050000010528, + 0x1053000010564, + 0x10597000105A2, + 0x105A3000105B2, + 0x105B3000105BA, + 0x105BB000105BD, + 0x1060000010737, + 0x1074000010756, + 0x1076000010768, + 0x1078000010781, + 0x1080000010806, + 0x1080800010809, + 0x1080A00010836, + 0x1083700010839, + 0x1083C0001083D, + 0x1083F00010856, + 0x1086000010877, + 0x108800001089F, + 0x108E0000108F3, + 0x108F4000108F6, + 0x1090000010916, + 0x109200001093A, + 0x10980000109B8, + 0x109BE000109C0, + 0x10A0000010A04, + 0x10A0500010A07, + 0x10A0C00010A14, + 0x10A1500010A18, + 0x10A1900010A36, + 0x10A3800010A3B, + 0x10A3F00010A40, + 0x10A6000010A7D, + 0x10A8000010A9D, + 0x10AC000010AC8, + 0x10AC900010AE7, + 0x10B0000010B36, + 0x10B4000010B56, + 0x10B6000010B73, + 0x10B8000010B92, + 0x10C0000010C49, + 0x10CC000010CF3, + 0x10D0000010D28, + 0x10D3000010D3A, + 0x10E8000010EAA, + 0x10EAB00010EAD, + 0x10EB000010EB2, + 0x10EFD00010F1D, + 0x10F2700010F28, + 0x10F3000010F51, + 0x10F7000010F86, + 0x10FB000010FC5, + 0x10FE000010FF7, + 0x1100000011047, + 0x1106600011076, + 0x1107F000110BB, + 0x110C2000110C3, + 0x110D0000110E9, + 0x110F0000110FA, + 0x1110000011135, + 0x1113600011140, + 0x1114400011148, + 0x1115000011174, + 0x1117600011177, + 0x11180000111C5, + 0x111C9000111CD, + 0x111CE000111DB, + 0x111DC000111DD, + 0x1120000011212, + 0x1121300011238, + 0x1123E00011242, + 0x1128000011287, + 0x1128800011289, + 0x1128A0001128E, + 0x1128F0001129E, + 0x1129F000112A9, + 0x112B0000112EB, + 0x112F0000112FA, + 0x1130000011304, + 0x113050001130D, + 0x1130F00011311, + 0x1131300011329, + 0x1132A00011331, + 0x1133200011334, + 0x113350001133A, + 0x1133B00011345, + 0x1134700011349, + 0x1134B0001134E, + 0x1135000011351, + 0x1135700011358, + 0x1135D00011364, + 0x113660001136D, + 0x1137000011375, + 0x114000001144B, + 0x114500001145A, + 0x1145E00011462, + 0x11480000114C6, + 0x114C7000114C8, + 0x114D0000114DA, + 0x11580000115B6, + 0x115B8000115C1, + 0x115D8000115DE, + 0x1160000011641, + 0x1164400011645, + 0x116500001165A, + 0x11680000116B9, + 0x116C0000116CA, + 0x117000001171B, + 0x1171D0001172C, + 0x117300001173A, + 0x1174000011747, + 0x118000001183B, + 0x118C0000118EA, + 0x118FF00011907, + 0x119090001190A, + 0x1190C00011914, + 0x1191500011917, + 0x1191800011936, + 0x1193700011939, + 0x1193B00011944, + 0x119500001195A, + 0x119A0000119A8, + 0x119AA000119D8, + 0x119DA000119E2, + 0x119E3000119E5, + 0x11A0000011A3F, + 0x11A4700011A48, + 0x11A5000011A9A, + 0x11A9D00011A9E, + 0x11AB000011AF9, + 0x11C0000011C09, + 0x11C0A00011C37, + 0x11C3800011C41, + 0x11C5000011C5A, + 0x11C7200011C90, + 0x11C9200011CA8, + 0x11CA900011CB7, + 0x11D0000011D07, + 0x11D0800011D0A, + 0x11D0B00011D37, + 0x11D3A00011D3B, + 0x11D3C00011D3E, + 0x11D3F00011D48, + 0x11D5000011D5A, + 0x11D6000011D66, + 0x11D6700011D69, + 0x11D6A00011D8F, + 0x11D9000011D92, + 0x11D9300011D99, + 0x11DA000011DAA, + 0x11EE000011EF7, + 0x11F0000011F11, + 0x11F1200011F3B, + 0x11F3E00011F43, + 0x11F5000011F5A, + 0x11FB000011FB1, + 0x120000001239A, + 0x1248000012544, + 0x12F9000012FF1, + 0x1300000013430, + 0x1344000013456, + 0x1440000014647, + 0x1680000016A39, + 0x16A4000016A5F, + 0x16A6000016A6A, + 0x16A7000016ABF, + 0x16AC000016ACA, + 0x16AD000016AEE, + 0x16AF000016AF5, + 0x16B0000016B37, + 0x16B4000016B44, + 0x16B5000016B5A, + 0x16B6300016B78, + 0x16B7D00016B90, + 0x16E6000016E80, + 0x16F0000016F4B, + 0x16F4F00016F88, + 0x16F8F00016FA0, + 0x16FE000016FE2, + 0x16FE300016FE5, + 0x16FF000016FF2, + 0x17000000187F8, + 0x1880000018CD6, + 0x18D0000018D09, + 0x1AFF00001AFF4, + 0x1AFF50001AFFC, + 0x1AFFD0001AFFF, + 0x1B0000001B123, + 0x1B1320001B133, + 0x1B1500001B153, + 0x1B1550001B156, + 0x1B1640001B168, + 0x1B1700001B2FC, + 0x1BC000001BC6B, + 0x1BC700001BC7D, + 0x1BC800001BC89, + 0x1BC900001BC9A, + 0x1BC9D0001BC9F, + 0x1CF000001CF2E, + 0x1CF300001CF47, + 0x1DA000001DA37, + 0x1DA3B0001DA6D, + 0x1DA750001DA76, + 0x1DA840001DA85, + 0x1DA9B0001DAA0, + 0x1DAA10001DAB0, + 0x1DF000001DF1F, + 0x1DF250001DF2B, + 0x1E0000001E007, + 0x1E0080001E019, + 0x1E01B0001E022, + 0x1E0230001E025, + 0x1E0260001E02B, + 0x1E08F0001E090, + 0x1E1000001E12D, + 0x1E1300001E13E, + 0x1E1400001E14A, + 0x1E14E0001E14F, + 0x1E2900001E2AF, + 0x1E2C00001E2FA, + 0x1E4D00001E4FA, + 0x1E7E00001E7E7, + 0x1E7E80001E7EC, + 0x1E7ED0001E7EF, + 0x1E7F00001E7FF, + 0x1E8000001E8C5, + 0x1E8D00001E8D7, + 0x1E9220001E94C, + 0x1E9500001E95A, + 0x200000002A6E0, + 0x2A7000002B73A, + 0x2B7400002B81E, + 0x2B8200002CEA2, + 0x2CEB00002EBE1, + 0x2EBF00002EE5E, + 0x300000003134B, + 0x31350000323B0, + ), + "CONTEXTJ": (0x200C0000200E,), + "CONTEXTO": ( + 0xB7000000B8, + 0x37500000376, + 0x5F3000005F5, + 0x6600000066A, + 0x6F0000006FA, + 0x30FB000030FC, + ), +} diff --git a/venv/lib/python3.11/site-packages/idna/intranges.py b/venv/lib/python3.11/site-packages/idna/intranges.py new file mode 100644 index 0000000..7bfaa8d --- /dev/null +++ b/venv/lib/python3.11/site-packages/idna/intranges.py @@ -0,0 +1,57 @@ +""" +Given a list of integers, made up of (hopefully) a small number of long runs +of consecutive integers, compute a representation of the form +((start1, end1), (start2, end2) ...). Then answer the question "was x present +in the original list?" in time O(log(# runs)). +""" + +import bisect +from typing import List, Tuple + + +def intranges_from_list(list_: List[int]) -> Tuple[int, ...]: + """Represent a list of integers as a sequence of ranges: + ((start_0, end_0), (start_1, end_1), ...), such that the original + integers are exactly those x such that start_i <= x < end_i for some i. + + Ranges are encoded as single integers (start << 32 | end), not as tuples. + """ + + sorted_list = sorted(list_) + ranges = [] + last_write = -1 + for i in range(len(sorted_list)): + if i + 1 < len(sorted_list): + if sorted_list[i] == sorted_list[i + 1] - 1: + continue + current_range = sorted_list[last_write + 1 : i + 1] + ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) + last_write = i + + return tuple(ranges) + + +def _encode_range(start: int, end: int) -> int: + return (start << 32) | end + + +def _decode_range(r: int) -> Tuple[int, int]: + return (r >> 32), (r & ((1 << 32) - 1)) + + +def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool: + """Determine if `int_` falls into one of the ranges in `ranges`.""" + tuple_ = _encode_range(int_, 0) + pos = bisect.bisect_left(ranges, tuple_) + # we could be immediately ahead of a tuple (start, end) + # with start < int_ <= end + if pos > 0: + left, right = _decode_range(ranges[pos - 1]) + if left <= int_ < right: + return True + # or we could be immediately behind a tuple (int_, end) + if pos < len(ranges): + left, _ = _decode_range(ranges[pos]) + if left == int_: + return True + return False diff --git a/venv/lib/python3.11/site-packages/idna/package_data.py b/venv/lib/python3.11/site-packages/idna/package_data.py new file mode 100644 index 0000000..514ff7e --- /dev/null +++ b/venv/lib/python3.11/site-packages/idna/package_data.py @@ -0,0 +1 @@ +__version__ = "3.10" diff --git a/venv/lib/python3.11/site-packages/idna/py.typed b/venv/lib/python3.11/site-packages/idna/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/idna/uts46data.py b/venv/lib/python3.11/site-packages/idna/uts46data.py new file mode 100644 index 0000000..eb89432 --- /dev/null +++ b/venv/lib/python3.11/site-packages/idna/uts46data.py @@ -0,0 +1,8681 @@ +# This file is automatically generated by tools/idna-data +# vim: set fileencoding=utf-8 : + +from typing import List, Tuple, Union + +"""IDNA Mapping Table from UTS46.""" + + +__version__ = "15.1.0" + + +def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x0, "3"), + (0x1, "3"), + (0x2, "3"), + (0x3, "3"), + (0x4, "3"), + (0x5, "3"), + (0x6, "3"), + (0x7, "3"), + (0x8, "3"), + (0x9, "3"), + (0xA, "3"), + (0xB, "3"), + (0xC, "3"), + (0xD, "3"), + (0xE, "3"), + (0xF, "3"), + (0x10, "3"), + (0x11, "3"), + (0x12, "3"), + (0x13, "3"), + (0x14, "3"), + (0x15, "3"), + (0x16, "3"), + (0x17, "3"), + (0x18, "3"), + (0x19, "3"), + (0x1A, "3"), + (0x1B, "3"), + (0x1C, "3"), + (0x1D, "3"), + (0x1E, "3"), + (0x1F, "3"), + (0x20, "3"), + (0x21, "3"), + (0x22, "3"), + (0x23, "3"), + (0x24, "3"), + (0x25, "3"), + (0x26, "3"), + (0x27, "3"), + (0x28, "3"), + (0x29, "3"), + (0x2A, "3"), + (0x2B, "3"), + (0x2C, "3"), + (0x2D, "V"), + (0x2E, "V"), + (0x2F, "3"), + (0x30, "V"), + (0x31, "V"), + (0x32, "V"), + (0x33, "V"), + (0x34, "V"), + (0x35, "V"), + (0x36, "V"), + (0x37, "V"), + (0x38, "V"), + (0x39, "V"), + (0x3A, "3"), + (0x3B, "3"), + (0x3C, "3"), + (0x3D, "3"), + (0x3E, "3"), + (0x3F, "3"), + (0x40, "3"), + (0x41, "M", "a"), + (0x42, "M", "b"), + (0x43, "M", "c"), + (0x44, "M", "d"), + (0x45, "M", "e"), + (0x46, "M", "f"), + (0x47, "M", "g"), + (0x48, "M", "h"), + (0x49, "M", "i"), + (0x4A, "M", "j"), + (0x4B, "M", "k"), + (0x4C, "M", "l"), + (0x4D, "M", "m"), + (0x4E, "M", "n"), + (0x4F, "M", "o"), + (0x50, "M", "p"), + (0x51, "M", "q"), + (0x52, "M", "r"), + (0x53, "M", "s"), + (0x54, "M", "t"), + (0x55, "M", "u"), + (0x56, "M", "v"), + (0x57, "M", "w"), + (0x58, "M", "x"), + (0x59, "M", "y"), + (0x5A, "M", "z"), + (0x5B, "3"), + (0x5C, "3"), + (0x5D, "3"), + (0x5E, "3"), + (0x5F, "3"), + (0x60, "3"), + (0x61, "V"), + (0x62, "V"), + (0x63, "V"), + ] + + +def _seg_1() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x64, "V"), + (0x65, "V"), + (0x66, "V"), + (0x67, "V"), + (0x68, "V"), + (0x69, "V"), + (0x6A, "V"), + (0x6B, "V"), + (0x6C, "V"), + (0x6D, "V"), + (0x6E, "V"), + (0x6F, "V"), + (0x70, "V"), + (0x71, "V"), + (0x72, "V"), + (0x73, "V"), + (0x74, "V"), + (0x75, "V"), + (0x76, "V"), + (0x77, "V"), + (0x78, "V"), + (0x79, "V"), + (0x7A, "V"), + (0x7B, "3"), + (0x7C, "3"), + (0x7D, "3"), + (0x7E, "3"), + (0x7F, "3"), + (0x80, "X"), + (0x81, "X"), + (0x82, "X"), + (0x83, "X"), + (0x84, "X"), + (0x85, "X"), + (0x86, "X"), + (0x87, "X"), + (0x88, "X"), + (0x89, "X"), + (0x8A, "X"), + (0x8B, "X"), + (0x8C, "X"), + (0x8D, "X"), + (0x8E, "X"), + (0x8F, "X"), + (0x90, "X"), + (0x91, "X"), + (0x92, "X"), + (0x93, "X"), + (0x94, "X"), + (0x95, "X"), + (0x96, "X"), + (0x97, "X"), + (0x98, "X"), + (0x99, "X"), + (0x9A, "X"), + (0x9B, "X"), + (0x9C, "X"), + (0x9D, "X"), + (0x9E, "X"), + (0x9F, "X"), + (0xA0, "3", " "), + (0xA1, "V"), + (0xA2, "V"), + (0xA3, "V"), + (0xA4, "V"), + (0xA5, "V"), + (0xA6, "V"), + (0xA7, "V"), + (0xA8, "3", " ̈"), + (0xA9, "V"), + (0xAA, "M", "a"), + (0xAB, "V"), + (0xAC, "V"), + (0xAD, "I"), + (0xAE, "V"), + (0xAF, "3", " ̄"), + (0xB0, "V"), + (0xB1, "V"), + (0xB2, "M", "2"), + (0xB3, "M", "3"), + (0xB4, "3", " ́"), + (0xB5, "M", "μ"), + (0xB6, "V"), + (0xB7, "V"), + (0xB8, "3", " ̧"), + (0xB9, "M", "1"), + (0xBA, "M", "o"), + (0xBB, "V"), + (0xBC, "M", "1⁄4"), + (0xBD, "M", "1⁄2"), + (0xBE, "M", "3⁄4"), + (0xBF, "V"), + (0xC0, "M", "à"), + (0xC1, "M", "á"), + (0xC2, "M", "â"), + (0xC3, "M", "ã"), + (0xC4, "M", "ä"), + (0xC5, "M", "å"), + (0xC6, "M", "æ"), + (0xC7, "M", "ç"), + ] + + +def _seg_2() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xC8, "M", "è"), + (0xC9, "M", "é"), + (0xCA, "M", "ê"), + (0xCB, "M", "ë"), + (0xCC, "M", "ì"), + (0xCD, "M", "í"), + (0xCE, "M", "î"), + (0xCF, "M", "ï"), + (0xD0, "M", "ð"), + (0xD1, "M", "ñ"), + (0xD2, "M", "ò"), + (0xD3, "M", "ó"), + (0xD4, "M", "ô"), + (0xD5, "M", "õ"), + (0xD6, "M", "ö"), + (0xD7, "V"), + (0xD8, "M", "ø"), + (0xD9, "M", "ù"), + (0xDA, "M", "ú"), + (0xDB, "M", "û"), + (0xDC, "M", "ü"), + (0xDD, "M", "ý"), + (0xDE, "M", "þ"), + (0xDF, "D", "ss"), + (0xE0, "V"), + (0xE1, "V"), + (0xE2, "V"), + (0xE3, "V"), + (0xE4, "V"), + (0xE5, "V"), + (0xE6, "V"), + (0xE7, "V"), + (0xE8, "V"), + (0xE9, "V"), + (0xEA, "V"), + (0xEB, "V"), + (0xEC, "V"), + (0xED, "V"), + (0xEE, "V"), + (0xEF, "V"), + (0xF0, "V"), + (0xF1, "V"), + (0xF2, "V"), + (0xF3, "V"), + (0xF4, "V"), + (0xF5, "V"), + (0xF6, "V"), + (0xF7, "V"), + (0xF8, "V"), + (0xF9, "V"), + (0xFA, "V"), + (0xFB, "V"), + (0xFC, "V"), + (0xFD, "V"), + (0xFE, "V"), + (0xFF, "V"), + (0x100, "M", "ā"), + (0x101, "V"), + (0x102, "M", "ă"), + (0x103, "V"), + (0x104, "M", "ą"), + (0x105, "V"), + (0x106, "M", "ć"), + (0x107, "V"), + (0x108, "M", "ĉ"), + (0x109, "V"), + (0x10A, "M", "ċ"), + (0x10B, "V"), + (0x10C, "M", "č"), + (0x10D, "V"), + (0x10E, "M", "ď"), + (0x10F, "V"), + (0x110, "M", "đ"), + (0x111, "V"), + (0x112, "M", "ē"), + (0x113, "V"), + (0x114, "M", "ĕ"), + (0x115, "V"), + (0x116, "M", "ė"), + (0x117, "V"), + (0x118, "M", "ę"), + (0x119, "V"), + (0x11A, "M", "ě"), + (0x11B, "V"), + (0x11C, "M", "ĝ"), + (0x11D, "V"), + (0x11E, "M", "ğ"), + (0x11F, "V"), + (0x120, "M", "ġ"), + (0x121, "V"), + (0x122, "M", "ģ"), + (0x123, "V"), + (0x124, "M", "ĥ"), + (0x125, "V"), + (0x126, "M", "ħ"), + (0x127, "V"), + (0x128, "M", "ĩ"), + (0x129, "V"), + (0x12A, "M", "ī"), + (0x12B, "V"), + ] + + +def _seg_3() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x12C, "M", "ĭ"), + (0x12D, "V"), + (0x12E, "M", "į"), + (0x12F, "V"), + (0x130, "M", "i̇"), + (0x131, "V"), + (0x132, "M", "ij"), + (0x134, "M", "ĵ"), + (0x135, "V"), + (0x136, "M", "ķ"), + (0x137, "V"), + (0x139, "M", "ĺ"), + (0x13A, "V"), + (0x13B, "M", "ļ"), + (0x13C, "V"), + (0x13D, "M", "ľ"), + (0x13E, "V"), + (0x13F, "M", "l·"), + (0x141, "M", "ł"), + (0x142, "V"), + (0x143, "M", "ń"), + (0x144, "V"), + (0x145, "M", "ņ"), + (0x146, "V"), + (0x147, "M", "ň"), + (0x148, "V"), + (0x149, "M", "ʼn"), + (0x14A, "M", "ŋ"), + (0x14B, "V"), + (0x14C, "M", "ō"), + (0x14D, "V"), + (0x14E, "M", "ŏ"), + (0x14F, "V"), + (0x150, "M", "ő"), + (0x151, "V"), + (0x152, "M", "œ"), + (0x153, "V"), + (0x154, "M", "ŕ"), + (0x155, "V"), + (0x156, "M", "ŗ"), + (0x157, "V"), + (0x158, "M", "ř"), + (0x159, "V"), + (0x15A, "M", "ś"), + (0x15B, "V"), + (0x15C, "M", "ŝ"), + (0x15D, "V"), + (0x15E, "M", "ş"), + (0x15F, "V"), + (0x160, "M", "š"), + (0x161, "V"), + (0x162, "M", "ţ"), + (0x163, "V"), + (0x164, "M", "ť"), + (0x165, "V"), + (0x166, "M", "ŧ"), + (0x167, "V"), + (0x168, "M", "ũ"), + (0x169, "V"), + (0x16A, "M", "ū"), + (0x16B, "V"), + (0x16C, "M", "ŭ"), + (0x16D, "V"), + (0x16E, "M", "ů"), + (0x16F, "V"), + (0x170, "M", "ű"), + (0x171, "V"), + (0x172, "M", "ų"), + (0x173, "V"), + (0x174, "M", "ŵ"), + (0x175, "V"), + (0x176, "M", "ŷ"), + (0x177, "V"), + (0x178, "M", "ÿ"), + (0x179, "M", "ź"), + (0x17A, "V"), + (0x17B, "M", "ż"), + (0x17C, "V"), + (0x17D, "M", "ž"), + (0x17E, "V"), + (0x17F, "M", "s"), + (0x180, "V"), + (0x181, "M", "ɓ"), + (0x182, "M", "ƃ"), + (0x183, "V"), + (0x184, "M", "ƅ"), + (0x185, "V"), + (0x186, "M", "ɔ"), + (0x187, "M", "ƈ"), + (0x188, "V"), + (0x189, "M", "ɖ"), + (0x18A, "M", "ɗ"), + (0x18B, "M", "ƌ"), + (0x18C, "V"), + (0x18E, "M", "ǝ"), + (0x18F, "M", "ə"), + (0x190, "M", "ɛ"), + (0x191, "M", "ƒ"), + (0x192, "V"), + (0x193, "M", "ɠ"), + ] + + +def _seg_4() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x194, "M", "ɣ"), + (0x195, "V"), + (0x196, "M", "ɩ"), + (0x197, "M", "ɨ"), + (0x198, "M", "ƙ"), + (0x199, "V"), + (0x19C, "M", "ɯ"), + (0x19D, "M", "ɲ"), + (0x19E, "V"), + (0x19F, "M", "ɵ"), + (0x1A0, "M", "ơ"), + (0x1A1, "V"), + (0x1A2, "M", "ƣ"), + (0x1A3, "V"), + (0x1A4, "M", "ƥ"), + (0x1A5, "V"), + (0x1A6, "M", "ʀ"), + (0x1A7, "M", "ƨ"), + (0x1A8, "V"), + (0x1A9, "M", "ʃ"), + (0x1AA, "V"), + (0x1AC, "M", "ƭ"), + (0x1AD, "V"), + (0x1AE, "M", "ʈ"), + (0x1AF, "M", "ư"), + (0x1B0, "V"), + (0x1B1, "M", "ʊ"), + (0x1B2, "M", "ʋ"), + (0x1B3, "M", "ƴ"), + (0x1B4, "V"), + (0x1B5, "M", "ƶ"), + (0x1B6, "V"), + (0x1B7, "M", "ʒ"), + (0x1B8, "M", "ƹ"), + (0x1B9, "V"), + (0x1BC, "M", "ƽ"), + (0x1BD, "V"), + (0x1C4, "M", "dž"), + (0x1C7, "M", "lj"), + (0x1CA, "M", "nj"), + (0x1CD, "M", "ǎ"), + (0x1CE, "V"), + (0x1CF, "M", "ǐ"), + (0x1D0, "V"), + (0x1D1, "M", "ǒ"), + (0x1D2, "V"), + (0x1D3, "M", "ǔ"), + (0x1D4, "V"), + (0x1D5, "M", "ǖ"), + (0x1D6, "V"), + (0x1D7, "M", "ǘ"), + (0x1D8, "V"), + (0x1D9, "M", "ǚ"), + (0x1DA, "V"), + (0x1DB, "M", "ǜ"), + (0x1DC, "V"), + (0x1DE, "M", "ǟ"), + (0x1DF, "V"), + (0x1E0, "M", "ǡ"), + (0x1E1, "V"), + (0x1E2, "M", "ǣ"), + (0x1E3, "V"), + (0x1E4, "M", "ǥ"), + (0x1E5, "V"), + (0x1E6, "M", "ǧ"), + (0x1E7, "V"), + (0x1E8, "M", "ǩ"), + (0x1E9, "V"), + (0x1EA, "M", "ǫ"), + (0x1EB, "V"), + (0x1EC, "M", "ǭ"), + (0x1ED, "V"), + (0x1EE, "M", "ǯ"), + (0x1EF, "V"), + (0x1F1, "M", "dz"), + (0x1F4, "M", "ǵ"), + (0x1F5, "V"), + (0x1F6, "M", "ƕ"), + (0x1F7, "M", "ƿ"), + (0x1F8, "M", "ǹ"), + (0x1F9, "V"), + (0x1FA, "M", "ǻ"), + (0x1FB, "V"), + (0x1FC, "M", "ǽ"), + (0x1FD, "V"), + (0x1FE, "M", "ǿ"), + (0x1FF, "V"), + (0x200, "M", "ȁ"), + (0x201, "V"), + (0x202, "M", "ȃ"), + (0x203, "V"), + (0x204, "M", "ȅ"), + (0x205, "V"), + (0x206, "M", "ȇ"), + (0x207, "V"), + (0x208, "M", "ȉ"), + (0x209, "V"), + (0x20A, "M", "ȋ"), + (0x20B, "V"), + (0x20C, "M", "ȍ"), + ] + + +def _seg_5() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x20D, "V"), + (0x20E, "M", "ȏ"), + (0x20F, "V"), + (0x210, "M", "ȑ"), + (0x211, "V"), + (0x212, "M", "ȓ"), + (0x213, "V"), + (0x214, "M", "ȕ"), + (0x215, "V"), + (0x216, "M", "ȗ"), + (0x217, "V"), + (0x218, "M", "ș"), + (0x219, "V"), + (0x21A, "M", "ț"), + (0x21B, "V"), + (0x21C, "M", "ȝ"), + (0x21D, "V"), + (0x21E, "M", "ȟ"), + (0x21F, "V"), + (0x220, "M", "ƞ"), + (0x221, "V"), + (0x222, "M", "ȣ"), + (0x223, "V"), + (0x224, "M", "ȥ"), + (0x225, "V"), + (0x226, "M", "ȧ"), + (0x227, "V"), + (0x228, "M", "ȩ"), + (0x229, "V"), + (0x22A, "M", "ȫ"), + (0x22B, "V"), + (0x22C, "M", "ȭ"), + (0x22D, "V"), + (0x22E, "M", "ȯ"), + (0x22F, "V"), + (0x230, "M", "ȱ"), + (0x231, "V"), + (0x232, "M", "ȳ"), + (0x233, "V"), + (0x23A, "M", "ⱥ"), + (0x23B, "M", "ȼ"), + (0x23C, "V"), + (0x23D, "M", "ƚ"), + (0x23E, "M", "ⱦ"), + (0x23F, "V"), + (0x241, "M", "ɂ"), + (0x242, "V"), + (0x243, "M", "ƀ"), + (0x244, "M", "ʉ"), + (0x245, "M", "ʌ"), + (0x246, "M", "ɇ"), + (0x247, "V"), + (0x248, "M", "ɉ"), + (0x249, "V"), + (0x24A, "M", "ɋ"), + (0x24B, "V"), + (0x24C, "M", "ɍ"), + (0x24D, "V"), + (0x24E, "M", "ɏ"), + (0x24F, "V"), + (0x2B0, "M", "h"), + (0x2B1, "M", "ɦ"), + (0x2B2, "M", "j"), + (0x2B3, "M", "r"), + (0x2B4, "M", "ɹ"), + (0x2B5, "M", "ɻ"), + (0x2B6, "M", "ʁ"), + (0x2B7, "M", "w"), + (0x2B8, "M", "y"), + (0x2B9, "V"), + (0x2D8, "3", " ̆"), + (0x2D9, "3", " ̇"), + (0x2DA, "3", " ̊"), + (0x2DB, "3", " ̨"), + (0x2DC, "3", " ̃"), + (0x2DD, "3", " ̋"), + (0x2DE, "V"), + (0x2E0, "M", "ɣ"), + (0x2E1, "M", "l"), + (0x2E2, "M", "s"), + (0x2E3, "M", "x"), + (0x2E4, "M", "ʕ"), + (0x2E5, "V"), + (0x340, "M", "̀"), + (0x341, "M", "́"), + (0x342, "V"), + (0x343, "M", "̓"), + (0x344, "M", "̈́"), + (0x345, "M", "ι"), + (0x346, "V"), + (0x34F, "I"), + (0x350, "V"), + (0x370, "M", "ͱ"), + (0x371, "V"), + (0x372, "M", "ͳ"), + (0x373, "V"), + (0x374, "M", "ʹ"), + (0x375, "V"), + (0x376, "M", "ͷ"), + (0x377, "V"), + ] + + +def _seg_6() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x378, "X"), + (0x37A, "3", " ι"), + (0x37B, "V"), + (0x37E, "3", ";"), + (0x37F, "M", "ϳ"), + (0x380, "X"), + (0x384, "3", " ́"), + (0x385, "3", " ̈́"), + (0x386, "M", "ά"), + (0x387, "M", "·"), + (0x388, "M", "έ"), + (0x389, "M", "ή"), + (0x38A, "M", "ί"), + (0x38B, "X"), + (0x38C, "M", "ό"), + (0x38D, "X"), + (0x38E, "M", "ύ"), + (0x38F, "M", "ώ"), + (0x390, "V"), + (0x391, "M", "α"), + (0x392, "M", "β"), + (0x393, "M", "γ"), + (0x394, "M", "δ"), + (0x395, "M", "ε"), + (0x396, "M", "ζ"), + (0x397, "M", "η"), + (0x398, "M", "θ"), + (0x399, "M", "ι"), + (0x39A, "M", "κ"), + (0x39B, "M", "λ"), + (0x39C, "M", "μ"), + (0x39D, "M", "ν"), + (0x39E, "M", "ξ"), + (0x39F, "M", "ο"), + (0x3A0, "M", "π"), + (0x3A1, "M", "ρ"), + (0x3A2, "X"), + (0x3A3, "M", "σ"), + (0x3A4, "M", "τ"), + (0x3A5, "M", "υ"), + (0x3A6, "M", "φ"), + (0x3A7, "M", "χ"), + (0x3A8, "M", "ψ"), + (0x3A9, "M", "ω"), + (0x3AA, "M", "ϊ"), + (0x3AB, "M", "ϋ"), + (0x3AC, "V"), + (0x3C2, "D", "σ"), + (0x3C3, "V"), + (0x3CF, "M", "ϗ"), + (0x3D0, "M", "β"), + (0x3D1, "M", "θ"), + (0x3D2, "M", "υ"), + (0x3D3, "M", "ύ"), + (0x3D4, "M", "ϋ"), + (0x3D5, "M", "φ"), + (0x3D6, "M", "π"), + (0x3D7, "V"), + (0x3D8, "M", "ϙ"), + (0x3D9, "V"), + (0x3DA, "M", "ϛ"), + (0x3DB, "V"), + (0x3DC, "M", "ϝ"), + (0x3DD, "V"), + (0x3DE, "M", "ϟ"), + (0x3DF, "V"), + (0x3E0, "M", "ϡ"), + (0x3E1, "V"), + (0x3E2, "M", "ϣ"), + (0x3E3, "V"), + (0x3E4, "M", "ϥ"), + (0x3E5, "V"), + (0x3E6, "M", "ϧ"), + (0x3E7, "V"), + (0x3E8, "M", "ϩ"), + (0x3E9, "V"), + (0x3EA, "M", "ϫ"), + (0x3EB, "V"), + (0x3EC, "M", "ϭ"), + (0x3ED, "V"), + (0x3EE, "M", "ϯ"), + (0x3EF, "V"), + (0x3F0, "M", "κ"), + (0x3F1, "M", "ρ"), + (0x3F2, "M", "σ"), + (0x3F3, "V"), + (0x3F4, "M", "θ"), + (0x3F5, "M", "ε"), + (0x3F6, "V"), + (0x3F7, "M", "ϸ"), + (0x3F8, "V"), + (0x3F9, "M", "σ"), + (0x3FA, "M", "ϻ"), + (0x3FB, "V"), + (0x3FD, "M", "ͻ"), + (0x3FE, "M", "ͼ"), + (0x3FF, "M", "ͽ"), + (0x400, "M", "ѐ"), + (0x401, "M", "ё"), + (0x402, "M", "ђ"), + ] + + +def _seg_7() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x403, "M", "ѓ"), + (0x404, "M", "є"), + (0x405, "M", "ѕ"), + (0x406, "M", "і"), + (0x407, "M", "ї"), + (0x408, "M", "ј"), + (0x409, "M", "љ"), + (0x40A, "M", "њ"), + (0x40B, "M", "ћ"), + (0x40C, "M", "ќ"), + (0x40D, "M", "ѝ"), + (0x40E, "M", "ў"), + (0x40F, "M", "џ"), + (0x410, "M", "а"), + (0x411, "M", "б"), + (0x412, "M", "в"), + (0x413, "M", "г"), + (0x414, "M", "д"), + (0x415, "M", "е"), + (0x416, "M", "ж"), + (0x417, "M", "з"), + (0x418, "M", "и"), + (0x419, "M", "й"), + (0x41A, "M", "к"), + (0x41B, "M", "л"), + (0x41C, "M", "м"), + (0x41D, "M", "н"), + (0x41E, "M", "о"), + (0x41F, "M", "п"), + (0x420, "M", "р"), + (0x421, "M", "с"), + (0x422, "M", "т"), + (0x423, "M", "у"), + (0x424, "M", "ф"), + (0x425, "M", "х"), + (0x426, "M", "ц"), + (0x427, "M", "ч"), + (0x428, "M", "ш"), + (0x429, "M", "щ"), + (0x42A, "M", "ъ"), + (0x42B, "M", "ы"), + (0x42C, "M", "ь"), + (0x42D, "M", "э"), + (0x42E, "M", "ю"), + (0x42F, "M", "я"), + (0x430, "V"), + (0x460, "M", "ѡ"), + (0x461, "V"), + (0x462, "M", "ѣ"), + (0x463, "V"), + (0x464, "M", "ѥ"), + (0x465, "V"), + (0x466, "M", "ѧ"), + (0x467, "V"), + (0x468, "M", "ѩ"), + (0x469, "V"), + (0x46A, "M", "ѫ"), + (0x46B, "V"), + (0x46C, "M", "ѭ"), + (0x46D, "V"), + (0x46E, "M", "ѯ"), + (0x46F, "V"), + (0x470, "M", "ѱ"), + (0x471, "V"), + (0x472, "M", "ѳ"), + (0x473, "V"), + (0x474, "M", "ѵ"), + (0x475, "V"), + (0x476, "M", "ѷ"), + (0x477, "V"), + (0x478, "M", "ѹ"), + (0x479, "V"), + (0x47A, "M", "ѻ"), + (0x47B, "V"), + (0x47C, "M", "ѽ"), + (0x47D, "V"), + (0x47E, "M", "ѿ"), + (0x47F, "V"), + (0x480, "M", "ҁ"), + (0x481, "V"), + (0x48A, "M", "ҋ"), + (0x48B, "V"), + (0x48C, "M", "ҍ"), + (0x48D, "V"), + (0x48E, "M", "ҏ"), + (0x48F, "V"), + (0x490, "M", "ґ"), + (0x491, "V"), + (0x492, "M", "ғ"), + (0x493, "V"), + (0x494, "M", "ҕ"), + (0x495, "V"), + (0x496, "M", "җ"), + (0x497, "V"), + (0x498, "M", "ҙ"), + (0x499, "V"), + (0x49A, "M", "қ"), + (0x49B, "V"), + (0x49C, "M", "ҝ"), + (0x49D, "V"), + ] + + +def _seg_8() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x49E, "M", "ҟ"), + (0x49F, "V"), + (0x4A0, "M", "ҡ"), + (0x4A1, "V"), + (0x4A2, "M", "ң"), + (0x4A3, "V"), + (0x4A4, "M", "ҥ"), + (0x4A5, "V"), + (0x4A6, "M", "ҧ"), + (0x4A7, "V"), + (0x4A8, "M", "ҩ"), + (0x4A9, "V"), + (0x4AA, "M", "ҫ"), + (0x4AB, "V"), + (0x4AC, "M", "ҭ"), + (0x4AD, "V"), + (0x4AE, "M", "ү"), + (0x4AF, "V"), + (0x4B0, "M", "ұ"), + (0x4B1, "V"), + (0x4B2, "M", "ҳ"), + (0x4B3, "V"), + (0x4B4, "M", "ҵ"), + (0x4B5, "V"), + (0x4B6, "M", "ҷ"), + (0x4B7, "V"), + (0x4B8, "M", "ҹ"), + (0x4B9, "V"), + (0x4BA, "M", "һ"), + (0x4BB, "V"), + (0x4BC, "M", "ҽ"), + (0x4BD, "V"), + (0x4BE, "M", "ҿ"), + (0x4BF, "V"), + (0x4C0, "X"), + (0x4C1, "M", "ӂ"), + (0x4C2, "V"), + (0x4C3, "M", "ӄ"), + (0x4C4, "V"), + (0x4C5, "M", "ӆ"), + (0x4C6, "V"), + (0x4C7, "M", "ӈ"), + (0x4C8, "V"), + (0x4C9, "M", "ӊ"), + (0x4CA, "V"), + (0x4CB, "M", "ӌ"), + (0x4CC, "V"), + (0x4CD, "M", "ӎ"), + (0x4CE, "V"), + (0x4D0, "M", "ӑ"), + (0x4D1, "V"), + (0x4D2, "M", "ӓ"), + (0x4D3, "V"), + (0x4D4, "M", "ӕ"), + (0x4D5, "V"), + (0x4D6, "M", "ӗ"), + (0x4D7, "V"), + (0x4D8, "M", "ә"), + (0x4D9, "V"), + (0x4DA, "M", "ӛ"), + (0x4DB, "V"), + (0x4DC, "M", "ӝ"), + (0x4DD, "V"), + (0x4DE, "M", "ӟ"), + (0x4DF, "V"), + (0x4E0, "M", "ӡ"), + (0x4E1, "V"), + (0x4E2, "M", "ӣ"), + (0x4E3, "V"), + (0x4E4, "M", "ӥ"), + (0x4E5, "V"), + (0x4E6, "M", "ӧ"), + (0x4E7, "V"), + (0x4E8, "M", "ө"), + (0x4E9, "V"), + (0x4EA, "M", "ӫ"), + (0x4EB, "V"), + (0x4EC, "M", "ӭ"), + (0x4ED, "V"), + (0x4EE, "M", "ӯ"), + (0x4EF, "V"), + (0x4F0, "M", "ӱ"), + (0x4F1, "V"), + (0x4F2, "M", "ӳ"), + (0x4F3, "V"), + (0x4F4, "M", "ӵ"), + (0x4F5, "V"), + (0x4F6, "M", "ӷ"), + (0x4F7, "V"), + (0x4F8, "M", "ӹ"), + (0x4F9, "V"), + (0x4FA, "M", "ӻ"), + (0x4FB, "V"), + (0x4FC, "M", "ӽ"), + (0x4FD, "V"), + (0x4FE, "M", "ӿ"), + (0x4FF, "V"), + (0x500, "M", "ԁ"), + (0x501, "V"), + (0x502, "M", "ԃ"), + ] + + +def _seg_9() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x503, "V"), + (0x504, "M", "ԅ"), + (0x505, "V"), + (0x506, "M", "ԇ"), + (0x507, "V"), + (0x508, "M", "ԉ"), + (0x509, "V"), + (0x50A, "M", "ԋ"), + (0x50B, "V"), + (0x50C, "M", "ԍ"), + (0x50D, "V"), + (0x50E, "M", "ԏ"), + (0x50F, "V"), + (0x510, "M", "ԑ"), + (0x511, "V"), + (0x512, "M", "ԓ"), + (0x513, "V"), + (0x514, "M", "ԕ"), + (0x515, "V"), + (0x516, "M", "ԗ"), + (0x517, "V"), + (0x518, "M", "ԙ"), + (0x519, "V"), + (0x51A, "M", "ԛ"), + (0x51B, "V"), + (0x51C, "M", "ԝ"), + (0x51D, "V"), + (0x51E, "M", "ԟ"), + (0x51F, "V"), + (0x520, "M", "ԡ"), + (0x521, "V"), + (0x522, "M", "ԣ"), + (0x523, "V"), + (0x524, "M", "ԥ"), + (0x525, "V"), + (0x526, "M", "ԧ"), + (0x527, "V"), + (0x528, "M", "ԩ"), + (0x529, "V"), + (0x52A, "M", "ԫ"), + (0x52B, "V"), + (0x52C, "M", "ԭ"), + (0x52D, "V"), + (0x52E, "M", "ԯ"), + (0x52F, "V"), + (0x530, "X"), + (0x531, "M", "ա"), + (0x532, "M", "բ"), + (0x533, "M", "գ"), + (0x534, "M", "դ"), + (0x535, "M", "ե"), + (0x536, "M", "զ"), + (0x537, "M", "է"), + (0x538, "M", "ը"), + (0x539, "M", "թ"), + (0x53A, "M", "ժ"), + (0x53B, "M", "ի"), + (0x53C, "M", "լ"), + (0x53D, "M", "խ"), + (0x53E, "M", "ծ"), + (0x53F, "M", "կ"), + (0x540, "M", "հ"), + (0x541, "M", "ձ"), + (0x542, "M", "ղ"), + (0x543, "M", "ճ"), + (0x544, "M", "մ"), + (0x545, "M", "յ"), + (0x546, "M", "ն"), + (0x547, "M", "շ"), + (0x548, "M", "ո"), + (0x549, "M", "չ"), + (0x54A, "M", "պ"), + (0x54B, "M", "ջ"), + (0x54C, "M", "ռ"), + (0x54D, "M", "ս"), + (0x54E, "M", "վ"), + (0x54F, "M", "տ"), + (0x550, "M", "ր"), + (0x551, "M", "ց"), + (0x552, "M", "ւ"), + (0x553, "M", "փ"), + (0x554, "M", "ք"), + (0x555, "M", "օ"), + (0x556, "M", "ֆ"), + (0x557, "X"), + (0x559, "V"), + (0x587, "M", "եւ"), + (0x588, "V"), + (0x58B, "X"), + (0x58D, "V"), + (0x590, "X"), + (0x591, "V"), + (0x5C8, "X"), + (0x5D0, "V"), + (0x5EB, "X"), + (0x5EF, "V"), + (0x5F5, "X"), + (0x606, "V"), + (0x61C, "X"), + (0x61D, "V"), + ] + + +def _seg_10() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x675, "M", "اٴ"), + (0x676, "M", "وٴ"), + (0x677, "M", "ۇٴ"), + (0x678, "M", "يٴ"), + (0x679, "V"), + (0x6DD, "X"), + (0x6DE, "V"), + (0x70E, "X"), + (0x710, "V"), + (0x74B, "X"), + (0x74D, "V"), + (0x7B2, "X"), + (0x7C0, "V"), + (0x7FB, "X"), + (0x7FD, "V"), + (0x82E, "X"), + (0x830, "V"), + (0x83F, "X"), + (0x840, "V"), + (0x85C, "X"), + (0x85E, "V"), + (0x85F, "X"), + (0x860, "V"), + (0x86B, "X"), + (0x870, "V"), + (0x88F, "X"), + (0x898, "V"), + (0x8E2, "X"), + (0x8E3, "V"), + (0x958, "M", "क़"), + (0x959, "M", "ख़"), + (0x95A, "M", "ग़"), + (0x95B, "M", "ज़"), + (0x95C, "M", "ड़"), + (0x95D, "M", "ढ़"), + (0x95E, "M", "फ़"), + (0x95F, "M", "य़"), + (0x960, "V"), + (0x984, "X"), + (0x985, "V"), + (0x98D, "X"), + (0x98F, "V"), + (0x991, "X"), + (0x993, "V"), + (0x9A9, "X"), + (0x9AA, "V"), + (0x9B1, "X"), + (0x9B2, "V"), + (0x9B3, "X"), + (0x9B6, "V"), + (0x9BA, "X"), + (0x9BC, "V"), + (0x9C5, "X"), + (0x9C7, "V"), + (0x9C9, "X"), + (0x9CB, "V"), + (0x9CF, "X"), + (0x9D7, "V"), + (0x9D8, "X"), + (0x9DC, "M", "ড়"), + (0x9DD, "M", "ঢ়"), + (0x9DE, "X"), + (0x9DF, "M", "য়"), + (0x9E0, "V"), + (0x9E4, "X"), + (0x9E6, "V"), + (0x9FF, "X"), + (0xA01, "V"), + (0xA04, "X"), + (0xA05, "V"), + (0xA0B, "X"), + (0xA0F, "V"), + (0xA11, "X"), + (0xA13, "V"), + (0xA29, "X"), + (0xA2A, "V"), + (0xA31, "X"), + (0xA32, "V"), + (0xA33, "M", "ਲ਼"), + (0xA34, "X"), + (0xA35, "V"), + (0xA36, "M", "ਸ਼"), + (0xA37, "X"), + (0xA38, "V"), + (0xA3A, "X"), + (0xA3C, "V"), + (0xA3D, "X"), + (0xA3E, "V"), + (0xA43, "X"), + (0xA47, "V"), + (0xA49, "X"), + (0xA4B, "V"), + (0xA4E, "X"), + (0xA51, "V"), + (0xA52, "X"), + (0xA59, "M", "ਖ਼"), + (0xA5A, "M", "ਗ਼"), + (0xA5B, "M", "ਜ਼"), + (0xA5C, "V"), + (0xA5D, "X"), + ] + + +def _seg_11() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA5E, "M", "ਫ਼"), + (0xA5F, "X"), + (0xA66, "V"), + (0xA77, "X"), + (0xA81, "V"), + (0xA84, "X"), + (0xA85, "V"), + (0xA8E, "X"), + (0xA8F, "V"), + (0xA92, "X"), + (0xA93, "V"), + (0xAA9, "X"), + (0xAAA, "V"), + (0xAB1, "X"), + (0xAB2, "V"), + (0xAB4, "X"), + (0xAB5, "V"), + (0xABA, "X"), + (0xABC, "V"), + (0xAC6, "X"), + (0xAC7, "V"), + (0xACA, "X"), + (0xACB, "V"), + (0xACE, "X"), + (0xAD0, "V"), + (0xAD1, "X"), + (0xAE0, "V"), + (0xAE4, "X"), + (0xAE6, "V"), + (0xAF2, "X"), + (0xAF9, "V"), + (0xB00, "X"), + (0xB01, "V"), + (0xB04, "X"), + (0xB05, "V"), + (0xB0D, "X"), + (0xB0F, "V"), + (0xB11, "X"), + (0xB13, "V"), + (0xB29, "X"), + (0xB2A, "V"), + (0xB31, "X"), + (0xB32, "V"), + (0xB34, "X"), + (0xB35, "V"), + (0xB3A, "X"), + (0xB3C, "V"), + (0xB45, "X"), + (0xB47, "V"), + (0xB49, "X"), + (0xB4B, "V"), + (0xB4E, "X"), + (0xB55, "V"), + (0xB58, "X"), + (0xB5C, "M", "ଡ଼"), + (0xB5D, "M", "ଢ଼"), + (0xB5E, "X"), + (0xB5F, "V"), + (0xB64, "X"), + (0xB66, "V"), + (0xB78, "X"), + (0xB82, "V"), + (0xB84, "X"), + (0xB85, "V"), + (0xB8B, "X"), + (0xB8E, "V"), + (0xB91, "X"), + (0xB92, "V"), + (0xB96, "X"), + (0xB99, "V"), + (0xB9B, "X"), + (0xB9C, "V"), + (0xB9D, "X"), + (0xB9E, "V"), + (0xBA0, "X"), + (0xBA3, "V"), + (0xBA5, "X"), + (0xBA8, "V"), + (0xBAB, "X"), + (0xBAE, "V"), + (0xBBA, "X"), + (0xBBE, "V"), + (0xBC3, "X"), + (0xBC6, "V"), + (0xBC9, "X"), + (0xBCA, "V"), + (0xBCE, "X"), + (0xBD0, "V"), + (0xBD1, "X"), + (0xBD7, "V"), + (0xBD8, "X"), + (0xBE6, "V"), + (0xBFB, "X"), + (0xC00, "V"), + (0xC0D, "X"), + (0xC0E, "V"), + (0xC11, "X"), + (0xC12, "V"), + (0xC29, "X"), + (0xC2A, "V"), + ] + + +def _seg_12() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xC3A, "X"), + (0xC3C, "V"), + (0xC45, "X"), + (0xC46, "V"), + (0xC49, "X"), + (0xC4A, "V"), + (0xC4E, "X"), + (0xC55, "V"), + (0xC57, "X"), + (0xC58, "V"), + (0xC5B, "X"), + (0xC5D, "V"), + (0xC5E, "X"), + (0xC60, "V"), + (0xC64, "X"), + (0xC66, "V"), + (0xC70, "X"), + (0xC77, "V"), + (0xC8D, "X"), + (0xC8E, "V"), + (0xC91, "X"), + (0xC92, "V"), + (0xCA9, "X"), + (0xCAA, "V"), + (0xCB4, "X"), + (0xCB5, "V"), + (0xCBA, "X"), + (0xCBC, "V"), + (0xCC5, "X"), + (0xCC6, "V"), + (0xCC9, "X"), + (0xCCA, "V"), + (0xCCE, "X"), + (0xCD5, "V"), + (0xCD7, "X"), + (0xCDD, "V"), + (0xCDF, "X"), + (0xCE0, "V"), + (0xCE4, "X"), + (0xCE6, "V"), + (0xCF0, "X"), + (0xCF1, "V"), + (0xCF4, "X"), + (0xD00, "V"), + (0xD0D, "X"), + (0xD0E, "V"), + (0xD11, "X"), + (0xD12, "V"), + (0xD45, "X"), + (0xD46, "V"), + (0xD49, "X"), + (0xD4A, "V"), + (0xD50, "X"), + (0xD54, "V"), + (0xD64, "X"), + (0xD66, "V"), + (0xD80, "X"), + (0xD81, "V"), + (0xD84, "X"), + (0xD85, "V"), + (0xD97, "X"), + (0xD9A, "V"), + (0xDB2, "X"), + (0xDB3, "V"), + (0xDBC, "X"), + (0xDBD, "V"), + (0xDBE, "X"), + (0xDC0, "V"), + (0xDC7, "X"), + (0xDCA, "V"), + (0xDCB, "X"), + (0xDCF, "V"), + (0xDD5, "X"), + (0xDD6, "V"), + (0xDD7, "X"), + (0xDD8, "V"), + (0xDE0, "X"), + (0xDE6, "V"), + (0xDF0, "X"), + (0xDF2, "V"), + (0xDF5, "X"), + (0xE01, "V"), + (0xE33, "M", "ํา"), + (0xE34, "V"), + (0xE3B, "X"), + (0xE3F, "V"), + (0xE5C, "X"), + (0xE81, "V"), + (0xE83, "X"), + (0xE84, "V"), + (0xE85, "X"), + (0xE86, "V"), + (0xE8B, "X"), + (0xE8C, "V"), + (0xEA4, "X"), + (0xEA5, "V"), + (0xEA6, "X"), + (0xEA7, "V"), + (0xEB3, "M", "ໍາ"), + (0xEB4, "V"), + ] + + +def _seg_13() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xEBE, "X"), + (0xEC0, "V"), + (0xEC5, "X"), + (0xEC6, "V"), + (0xEC7, "X"), + (0xEC8, "V"), + (0xECF, "X"), + (0xED0, "V"), + (0xEDA, "X"), + (0xEDC, "M", "ຫນ"), + (0xEDD, "M", "ຫມ"), + (0xEDE, "V"), + (0xEE0, "X"), + (0xF00, "V"), + (0xF0C, "M", "་"), + (0xF0D, "V"), + (0xF43, "M", "གྷ"), + (0xF44, "V"), + (0xF48, "X"), + (0xF49, "V"), + (0xF4D, "M", "ཌྷ"), + (0xF4E, "V"), + (0xF52, "M", "དྷ"), + (0xF53, "V"), + (0xF57, "M", "བྷ"), + (0xF58, "V"), + (0xF5C, "M", "ཛྷ"), + (0xF5D, "V"), + (0xF69, "M", "ཀྵ"), + (0xF6A, "V"), + (0xF6D, "X"), + (0xF71, "V"), + (0xF73, "M", "ཱི"), + (0xF74, "V"), + (0xF75, "M", "ཱུ"), + (0xF76, "M", "ྲྀ"), + (0xF77, "M", "ྲཱྀ"), + (0xF78, "M", "ླྀ"), + (0xF79, "M", "ླཱྀ"), + (0xF7A, "V"), + (0xF81, "M", "ཱྀ"), + (0xF82, "V"), + (0xF93, "M", "ྒྷ"), + (0xF94, "V"), + (0xF98, "X"), + (0xF99, "V"), + (0xF9D, "M", "ྜྷ"), + (0xF9E, "V"), + (0xFA2, "M", "ྡྷ"), + (0xFA3, "V"), + (0xFA7, "M", "ྦྷ"), + (0xFA8, "V"), + (0xFAC, "M", "ྫྷ"), + (0xFAD, "V"), + (0xFB9, "M", "ྐྵ"), + (0xFBA, "V"), + (0xFBD, "X"), + (0xFBE, "V"), + (0xFCD, "X"), + (0xFCE, "V"), + (0xFDB, "X"), + (0x1000, "V"), + (0x10A0, "X"), + (0x10C7, "M", "ⴧ"), + (0x10C8, "X"), + (0x10CD, "M", "ⴭ"), + (0x10CE, "X"), + (0x10D0, "V"), + (0x10FC, "M", "ნ"), + (0x10FD, "V"), + (0x115F, "X"), + (0x1161, "V"), + (0x1249, "X"), + (0x124A, "V"), + (0x124E, "X"), + (0x1250, "V"), + (0x1257, "X"), + (0x1258, "V"), + (0x1259, "X"), + (0x125A, "V"), + (0x125E, "X"), + (0x1260, "V"), + (0x1289, "X"), + (0x128A, "V"), + (0x128E, "X"), + (0x1290, "V"), + (0x12B1, "X"), + (0x12B2, "V"), + (0x12B6, "X"), + (0x12B8, "V"), + (0x12BF, "X"), + (0x12C0, "V"), + (0x12C1, "X"), + (0x12C2, "V"), + (0x12C6, "X"), + (0x12C8, "V"), + (0x12D7, "X"), + (0x12D8, "V"), + (0x1311, "X"), + (0x1312, "V"), + ] + + +def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1316, "X"), + (0x1318, "V"), + (0x135B, "X"), + (0x135D, "V"), + (0x137D, "X"), + (0x1380, "V"), + (0x139A, "X"), + (0x13A0, "V"), + (0x13F6, "X"), + (0x13F8, "M", "Ᏸ"), + (0x13F9, "M", "Ᏹ"), + (0x13FA, "M", "Ᏺ"), + (0x13FB, "M", "Ᏻ"), + (0x13FC, "M", "Ᏼ"), + (0x13FD, "M", "Ᏽ"), + (0x13FE, "X"), + (0x1400, "V"), + (0x1680, "X"), + (0x1681, "V"), + (0x169D, "X"), + (0x16A0, "V"), + (0x16F9, "X"), + (0x1700, "V"), + (0x1716, "X"), + (0x171F, "V"), + (0x1737, "X"), + (0x1740, "V"), + (0x1754, "X"), + (0x1760, "V"), + (0x176D, "X"), + (0x176E, "V"), + (0x1771, "X"), + (0x1772, "V"), + (0x1774, "X"), + (0x1780, "V"), + (0x17B4, "X"), + (0x17B6, "V"), + (0x17DE, "X"), + (0x17E0, "V"), + (0x17EA, "X"), + (0x17F0, "V"), + (0x17FA, "X"), + (0x1800, "V"), + (0x1806, "X"), + (0x1807, "V"), + (0x180B, "I"), + (0x180E, "X"), + (0x180F, "I"), + (0x1810, "V"), + (0x181A, "X"), + (0x1820, "V"), + (0x1879, "X"), + (0x1880, "V"), + (0x18AB, "X"), + (0x18B0, "V"), + (0x18F6, "X"), + (0x1900, "V"), + (0x191F, "X"), + (0x1920, "V"), + (0x192C, "X"), + (0x1930, "V"), + (0x193C, "X"), + (0x1940, "V"), + (0x1941, "X"), + (0x1944, "V"), + (0x196E, "X"), + (0x1970, "V"), + (0x1975, "X"), + (0x1980, "V"), + (0x19AC, "X"), + (0x19B0, "V"), + (0x19CA, "X"), + (0x19D0, "V"), + (0x19DB, "X"), + (0x19DE, "V"), + (0x1A1C, "X"), + (0x1A1E, "V"), + (0x1A5F, "X"), + (0x1A60, "V"), + (0x1A7D, "X"), + (0x1A7F, "V"), + (0x1A8A, "X"), + (0x1A90, "V"), + (0x1A9A, "X"), + (0x1AA0, "V"), + (0x1AAE, "X"), + (0x1AB0, "V"), + (0x1ACF, "X"), + (0x1B00, "V"), + (0x1B4D, "X"), + (0x1B50, "V"), + (0x1B7F, "X"), + (0x1B80, "V"), + (0x1BF4, "X"), + (0x1BFC, "V"), + (0x1C38, "X"), + (0x1C3B, "V"), + (0x1C4A, "X"), + (0x1C4D, "V"), + (0x1C80, "M", "в"), + ] + + +def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1C81, "M", "д"), + (0x1C82, "M", "о"), + (0x1C83, "M", "с"), + (0x1C84, "M", "т"), + (0x1C86, "M", "ъ"), + (0x1C87, "M", "ѣ"), + (0x1C88, "M", "ꙋ"), + (0x1C89, "X"), + (0x1C90, "M", "ა"), + (0x1C91, "M", "ბ"), + (0x1C92, "M", "გ"), + (0x1C93, "M", "დ"), + (0x1C94, "M", "ე"), + (0x1C95, "M", "ვ"), + (0x1C96, "M", "ზ"), + (0x1C97, "M", "თ"), + (0x1C98, "M", "ი"), + (0x1C99, "M", "კ"), + (0x1C9A, "M", "ლ"), + (0x1C9B, "M", "მ"), + (0x1C9C, "M", "ნ"), + (0x1C9D, "M", "ო"), + (0x1C9E, "M", "პ"), + (0x1C9F, "M", "ჟ"), + (0x1CA0, "M", "რ"), + (0x1CA1, "M", "ს"), + (0x1CA2, "M", "ტ"), + (0x1CA3, "M", "უ"), + (0x1CA4, "M", "ფ"), + (0x1CA5, "M", "ქ"), + (0x1CA6, "M", "ღ"), + (0x1CA7, "M", "ყ"), + (0x1CA8, "M", "შ"), + (0x1CA9, "M", "ჩ"), + (0x1CAA, "M", "ც"), + (0x1CAB, "M", "ძ"), + (0x1CAC, "M", "წ"), + (0x1CAD, "M", "ჭ"), + (0x1CAE, "M", "ხ"), + (0x1CAF, "M", "ჯ"), + (0x1CB0, "M", "ჰ"), + (0x1CB1, "M", "ჱ"), + (0x1CB2, "M", "ჲ"), + (0x1CB3, "M", "ჳ"), + (0x1CB4, "M", "ჴ"), + (0x1CB5, "M", "ჵ"), + (0x1CB6, "M", "ჶ"), + (0x1CB7, "M", "ჷ"), + (0x1CB8, "M", "ჸ"), + (0x1CB9, "M", "ჹ"), + (0x1CBA, "M", "ჺ"), + (0x1CBB, "X"), + (0x1CBD, "M", "ჽ"), + (0x1CBE, "M", "ჾ"), + (0x1CBF, "M", "ჿ"), + (0x1CC0, "V"), + (0x1CC8, "X"), + (0x1CD0, "V"), + (0x1CFB, "X"), + (0x1D00, "V"), + (0x1D2C, "M", "a"), + (0x1D2D, "M", "æ"), + (0x1D2E, "M", "b"), + (0x1D2F, "V"), + (0x1D30, "M", "d"), + (0x1D31, "M", "e"), + (0x1D32, "M", "ǝ"), + (0x1D33, "M", "g"), + (0x1D34, "M", "h"), + (0x1D35, "M", "i"), + (0x1D36, "M", "j"), + (0x1D37, "M", "k"), + (0x1D38, "M", "l"), + (0x1D39, "M", "m"), + (0x1D3A, "M", "n"), + (0x1D3B, "V"), + (0x1D3C, "M", "o"), + (0x1D3D, "M", "ȣ"), + (0x1D3E, "M", "p"), + (0x1D3F, "M", "r"), + (0x1D40, "M", "t"), + (0x1D41, "M", "u"), + (0x1D42, "M", "w"), + (0x1D43, "M", "a"), + (0x1D44, "M", "ɐ"), + (0x1D45, "M", "ɑ"), + (0x1D46, "M", "ᴂ"), + (0x1D47, "M", "b"), + (0x1D48, "M", "d"), + (0x1D49, "M", "e"), + (0x1D4A, "M", "ə"), + (0x1D4B, "M", "ɛ"), + (0x1D4C, "M", "ɜ"), + (0x1D4D, "M", "g"), + (0x1D4E, "V"), + (0x1D4F, "M", "k"), + (0x1D50, "M", "m"), + (0x1D51, "M", "ŋ"), + (0x1D52, "M", "o"), + (0x1D53, "M", "ɔ"), + ] + + +def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D54, "M", "ᴖ"), + (0x1D55, "M", "ᴗ"), + (0x1D56, "M", "p"), + (0x1D57, "M", "t"), + (0x1D58, "M", "u"), + (0x1D59, "M", "ᴝ"), + (0x1D5A, "M", "ɯ"), + (0x1D5B, "M", "v"), + (0x1D5C, "M", "ᴥ"), + (0x1D5D, "M", "β"), + (0x1D5E, "M", "γ"), + (0x1D5F, "M", "δ"), + (0x1D60, "M", "φ"), + (0x1D61, "M", "χ"), + (0x1D62, "M", "i"), + (0x1D63, "M", "r"), + (0x1D64, "M", "u"), + (0x1D65, "M", "v"), + (0x1D66, "M", "β"), + (0x1D67, "M", "γ"), + (0x1D68, "M", "ρ"), + (0x1D69, "M", "φ"), + (0x1D6A, "M", "χ"), + (0x1D6B, "V"), + (0x1D78, "M", "н"), + (0x1D79, "V"), + (0x1D9B, "M", "ɒ"), + (0x1D9C, "M", "c"), + (0x1D9D, "M", "ɕ"), + (0x1D9E, "M", "ð"), + (0x1D9F, "M", "ɜ"), + (0x1DA0, "M", "f"), + (0x1DA1, "M", "ɟ"), + (0x1DA2, "M", "ɡ"), + (0x1DA3, "M", "ɥ"), + (0x1DA4, "M", "ɨ"), + (0x1DA5, "M", "ɩ"), + (0x1DA6, "M", "ɪ"), + (0x1DA7, "M", "ᵻ"), + (0x1DA8, "M", "ʝ"), + (0x1DA9, "M", "ɭ"), + (0x1DAA, "M", "ᶅ"), + (0x1DAB, "M", "ʟ"), + (0x1DAC, "M", "ɱ"), + (0x1DAD, "M", "ɰ"), + (0x1DAE, "M", "ɲ"), + (0x1DAF, "M", "ɳ"), + (0x1DB0, "M", "ɴ"), + (0x1DB1, "M", "ɵ"), + (0x1DB2, "M", "ɸ"), + (0x1DB3, "M", "ʂ"), + (0x1DB4, "M", "ʃ"), + (0x1DB5, "M", "ƫ"), + (0x1DB6, "M", "ʉ"), + (0x1DB7, "M", "ʊ"), + (0x1DB8, "M", "ᴜ"), + (0x1DB9, "M", "ʋ"), + (0x1DBA, "M", "ʌ"), + (0x1DBB, "M", "z"), + (0x1DBC, "M", "ʐ"), + (0x1DBD, "M", "ʑ"), + (0x1DBE, "M", "ʒ"), + (0x1DBF, "M", "θ"), + (0x1DC0, "V"), + (0x1E00, "M", "ḁ"), + (0x1E01, "V"), + (0x1E02, "M", "ḃ"), + (0x1E03, "V"), + (0x1E04, "M", "ḅ"), + (0x1E05, "V"), + (0x1E06, "M", "ḇ"), + (0x1E07, "V"), + (0x1E08, "M", "ḉ"), + (0x1E09, "V"), + (0x1E0A, "M", "ḋ"), + (0x1E0B, "V"), + (0x1E0C, "M", "ḍ"), + (0x1E0D, "V"), + (0x1E0E, "M", "ḏ"), + (0x1E0F, "V"), + (0x1E10, "M", "ḑ"), + (0x1E11, "V"), + (0x1E12, "M", "ḓ"), + (0x1E13, "V"), + (0x1E14, "M", "ḕ"), + (0x1E15, "V"), + (0x1E16, "M", "ḗ"), + (0x1E17, "V"), + (0x1E18, "M", "ḙ"), + (0x1E19, "V"), + (0x1E1A, "M", "ḛ"), + (0x1E1B, "V"), + (0x1E1C, "M", "ḝ"), + (0x1E1D, "V"), + (0x1E1E, "M", "ḟ"), + (0x1E1F, "V"), + (0x1E20, "M", "ḡ"), + (0x1E21, "V"), + (0x1E22, "M", "ḣ"), + (0x1E23, "V"), + ] + + +def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E24, "M", "ḥ"), + (0x1E25, "V"), + (0x1E26, "M", "ḧ"), + (0x1E27, "V"), + (0x1E28, "M", "ḩ"), + (0x1E29, "V"), + (0x1E2A, "M", "ḫ"), + (0x1E2B, "V"), + (0x1E2C, "M", "ḭ"), + (0x1E2D, "V"), + (0x1E2E, "M", "ḯ"), + (0x1E2F, "V"), + (0x1E30, "M", "ḱ"), + (0x1E31, "V"), + (0x1E32, "M", "ḳ"), + (0x1E33, "V"), + (0x1E34, "M", "ḵ"), + (0x1E35, "V"), + (0x1E36, "M", "ḷ"), + (0x1E37, "V"), + (0x1E38, "M", "ḹ"), + (0x1E39, "V"), + (0x1E3A, "M", "ḻ"), + (0x1E3B, "V"), + (0x1E3C, "M", "ḽ"), + (0x1E3D, "V"), + (0x1E3E, "M", "ḿ"), + (0x1E3F, "V"), + (0x1E40, "M", "ṁ"), + (0x1E41, "V"), + (0x1E42, "M", "ṃ"), + (0x1E43, "V"), + (0x1E44, "M", "ṅ"), + (0x1E45, "V"), + (0x1E46, "M", "ṇ"), + (0x1E47, "V"), + (0x1E48, "M", "ṉ"), + (0x1E49, "V"), + (0x1E4A, "M", "ṋ"), + (0x1E4B, "V"), + (0x1E4C, "M", "ṍ"), + (0x1E4D, "V"), + (0x1E4E, "M", "ṏ"), + (0x1E4F, "V"), + (0x1E50, "M", "ṑ"), + (0x1E51, "V"), + (0x1E52, "M", "ṓ"), + (0x1E53, "V"), + (0x1E54, "M", "ṕ"), + (0x1E55, "V"), + (0x1E56, "M", "ṗ"), + (0x1E57, "V"), + (0x1E58, "M", "ṙ"), + (0x1E59, "V"), + (0x1E5A, "M", "ṛ"), + (0x1E5B, "V"), + (0x1E5C, "M", "ṝ"), + (0x1E5D, "V"), + (0x1E5E, "M", "ṟ"), + (0x1E5F, "V"), + (0x1E60, "M", "ṡ"), + (0x1E61, "V"), + (0x1E62, "M", "ṣ"), + (0x1E63, "V"), + (0x1E64, "M", "ṥ"), + (0x1E65, "V"), + (0x1E66, "M", "ṧ"), + (0x1E67, "V"), + (0x1E68, "M", "ṩ"), + (0x1E69, "V"), + (0x1E6A, "M", "ṫ"), + (0x1E6B, "V"), + (0x1E6C, "M", "ṭ"), + (0x1E6D, "V"), + (0x1E6E, "M", "ṯ"), + (0x1E6F, "V"), + (0x1E70, "M", "ṱ"), + (0x1E71, "V"), + (0x1E72, "M", "ṳ"), + (0x1E73, "V"), + (0x1E74, "M", "ṵ"), + (0x1E75, "V"), + (0x1E76, "M", "ṷ"), + (0x1E77, "V"), + (0x1E78, "M", "ṹ"), + (0x1E79, "V"), + (0x1E7A, "M", "ṻ"), + (0x1E7B, "V"), + (0x1E7C, "M", "ṽ"), + (0x1E7D, "V"), + (0x1E7E, "M", "ṿ"), + (0x1E7F, "V"), + (0x1E80, "M", "ẁ"), + (0x1E81, "V"), + (0x1E82, "M", "ẃ"), + (0x1E83, "V"), + (0x1E84, "M", "ẅ"), + (0x1E85, "V"), + (0x1E86, "M", "ẇ"), + (0x1E87, "V"), + ] + + +def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E88, "M", "ẉ"), + (0x1E89, "V"), + (0x1E8A, "M", "ẋ"), + (0x1E8B, "V"), + (0x1E8C, "M", "ẍ"), + (0x1E8D, "V"), + (0x1E8E, "M", "ẏ"), + (0x1E8F, "V"), + (0x1E90, "M", "ẑ"), + (0x1E91, "V"), + (0x1E92, "M", "ẓ"), + (0x1E93, "V"), + (0x1E94, "M", "ẕ"), + (0x1E95, "V"), + (0x1E9A, "M", "aʾ"), + (0x1E9B, "M", "ṡ"), + (0x1E9C, "V"), + (0x1E9E, "M", "ß"), + (0x1E9F, "V"), + (0x1EA0, "M", "ạ"), + (0x1EA1, "V"), + (0x1EA2, "M", "ả"), + (0x1EA3, "V"), + (0x1EA4, "M", "ấ"), + (0x1EA5, "V"), + (0x1EA6, "M", "ầ"), + (0x1EA7, "V"), + (0x1EA8, "M", "ẩ"), + (0x1EA9, "V"), + (0x1EAA, "M", "ẫ"), + (0x1EAB, "V"), + (0x1EAC, "M", "ậ"), + (0x1EAD, "V"), + (0x1EAE, "M", "ắ"), + (0x1EAF, "V"), + (0x1EB0, "M", "ằ"), + (0x1EB1, "V"), + (0x1EB2, "M", "ẳ"), + (0x1EB3, "V"), + (0x1EB4, "M", "ẵ"), + (0x1EB5, "V"), + (0x1EB6, "M", "ặ"), + (0x1EB7, "V"), + (0x1EB8, "M", "ẹ"), + (0x1EB9, "V"), + (0x1EBA, "M", "ẻ"), + (0x1EBB, "V"), + (0x1EBC, "M", "ẽ"), + (0x1EBD, "V"), + (0x1EBE, "M", "ế"), + (0x1EBF, "V"), + (0x1EC0, "M", "ề"), + (0x1EC1, "V"), + (0x1EC2, "M", "ể"), + (0x1EC3, "V"), + (0x1EC4, "M", "ễ"), + (0x1EC5, "V"), + (0x1EC6, "M", "ệ"), + (0x1EC7, "V"), + (0x1EC8, "M", "ỉ"), + (0x1EC9, "V"), + (0x1ECA, "M", "ị"), + (0x1ECB, "V"), + (0x1ECC, "M", "ọ"), + (0x1ECD, "V"), + (0x1ECE, "M", "ỏ"), + (0x1ECF, "V"), + (0x1ED0, "M", "ố"), + (0x1ED1, "V"), + (0x1ED2, "M", "ồ"), + (0x1ED3, "V"), + (0x1ED4, "M", "ổ"), + (0x1ED5, "V"), + (0x1ED6, "M", "ỗ"), + (0x1ED7, "V"), + (0x1ED8, "M", "ộ"), + (0x1ED9, "V"), + (0x1EDA, "M", "ớ"), + (0x1EDB, "V"), + (0x1EDC, "M", "ờ"), + (0x1EDD, "V"), + (0x1EDE, "M", "ở"), + (0x1EDF, "V"), + (0x1EE0, "M", "ỡ"), + (0x1EE1, "V"), + (0x1EE2, "M", "ợ"), + (0x1EE3, "V"), + (0x1EE4, "M", "ụ"), + (0x1EE5, "V"), + (0x1EE6, "M", "ủ"), + (0x1EE7, "V"), + (0x1EE8, "M", "ứ"), + (0x1EE9, "V"), + (0x1EEA, "M", "ừ"), + (0x1EEB, "V"), + (0x1EEC, "M", "ử"), + (0x1EED, "V"), + (0x1EEE, "M", "ữ"), + (0x1EEF, "V"), + (0x1EF0, "M", "ự"), + ] + + +def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EF1, "V"), + (0x1EF2, "M", "ỳ"), + (0x1EF3, "V"), + (0x1EF4, "M", "ỵ"), + (0x1EF5, "V"), + (0x1EF6, "M", "ỷ"), + (0x1EF7, "V"), + (0x1EF8, "M", "ỹ"), + (0x1EF9, "V"), + (0x1EFA, "M", "ỻ"), + (0x1EFB, "V"), + (0x1EFC, "M", "ỽ"), + (0x1EFD, "V"), + (0x1EFE, "M", "ỿ"), + (0x1EFF, "V"), + (0x1F08, "M", "ἀ"), + (0x1F09, "M", "ἁ"), + (0x1F0A, "M", "ἂ"), + (0x1F0B, "M", "ἃ"), + (0x1F0C, "M", "ἄ"), + (0x1F0D, "M", "ἅ"), + (0x1F0E, "M", "ἆ"), + (0x1F0F, "M", "ἇ"), + (0x1F10, "V"), + (0x1F16, "X"), + (0x1F18, "M", "ἐ"), + (0x1F19, "M", "ἑ"), + (0x1F1A, "M", "ἒ"), + (0x1F1B, "M", "ἓ"), + (0x1F1C, "M", "ἔ"), + (0x1F1D, "M", "ἕ"), + (0x1F1E, "X"), + (0x1F20, "V"), + (0x1F28, "M", "ἠ"), + (0x1F29, "M", "ἡ"), + (0x1F2A, "M", "ἢ"), + (0x1F2B, "M", "ἣ"), + (0x1F2C, "M", "ἤ"), + (0x1F2D, "M", "ἥ"), + (0x1F2E, "M", "ἦ"), + (0x1F2F, "M", "ἧ"), + (0x1F30, "V"), + (0x1F38, "M", "ἰ"), + (0x1F39, "M", "ἱ"), + (0x1F3A, "M", "ἲ"), + (0x1F3B, "M", "ἳ"), + (0x1F3C, "M", "ἴ"), + (0x1F3D, "M", "ἵ"), + (0x1F3E, "M", "ἶ"), + (0x1F3F, "M", "ἷ"), + (0x1F40, "V"), + (0x1F46, "X"), + (0x1F48, "M", "ὀ"), + (0x1F49, "M", "ὁ"), + (0x1F4A, "M", "ὂ"), + (0x1F4B, "M", "ὃ"), + (0x1F4C, "M", "ὄ"), + (0x1F4D, "M", "ὅ"), + (0x1F4E, "X"), + (0x1F50, "V"), + (0x1F58, "X"), + (0x1F59, "M", "ὑ"), + (0x1F5A, "X"), + (0x1F5B, "M", "ὓ"), + (0x1F5C, "X"), + (0x1F5D, "M", "ὕ"), + (0x1F5E, "X"), + (0x1F5F, "M", "ὗ"), + (0x1F60, "V"), + (0x1F68, "M", "ὠ"), + (0x1F69, "M", "ὡ"), + (0x1F6A, "M", "ὢ"), + (0x1F6B, "M", "ὣ"), + (0x1F6C, "M", "ὤ"), + (0x1F6D, "M", "ὥ"), + (0x1F6E, "M", "ὦ"), + (0x1F6F, "M", "ὧ"), + (0x1F70, "V"), + (0x1F71, "M", "ά"), + (0x1F72, "V"), + (0x1F73, "M", "έ"), + (0x1F74, "V"), + (0x1F75, "M", "ή"), + (0x1F76, "V"), + (0x1F77, "M", "ί"), + (0x1F78, "V"), + (0x1F79, "M", "ό"), + (0x1F7A, "V"), + (0x1F7B, "M", "ύ"), + (0x1F7C, "V"), + (0x1F7D, "M", "ώ"), + (0x1F7E, "X"), + (0x1F80, "M", "ἀι"), + (0x1F81, "M", "ἁι"), + (0x1F82, "M", "ἂι"), + (0x1F83, "M", "ἃι"), + (0x1F84, "M", "ἄι"), + (0x1F85, "M", "ἅι"), + (0x1F86, "M", "ἆι"), + (0x1F87, "M", "ἇι"), + ] + + +def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F88, "M", "ἀι"), + (0x1F89, "M", "ἁι"), + (0x1F8A, "M", "ἂι"), + (0x1F8B, "M", "ἃι"), + (0x1F8C, "M", "ἄι"), + (0x1F8D, "M", "ἅι"), + (0x1F8E, "M", "ἆι"), + (0x1F8F, "M", "ἇι"), + (0x1F90, "M", "ἠι"), + (0x1F91, "M", "ἡι"), + (0x1F92, "M", "ἢι"), + (0x1F93, "M", "ἣι"), + (0x1F94, "M", "ἤι"), + (0x1F95, "M", "ἥι"), + (0x1F96, "M", "ἦι"), + (0x1F97, "M", "ἧι"), + (0x1F98, "M", "ἠι"), + (0x1F99, "M", "ἡι"), + (0x1F9A, "M", "ἢι"), + (0x1F9B, "M", "ἣι"), + (0x1F9C, "M", "ἤι"), + (0x1F9D, "M", "ἥι"), + (0x1F9E, "M", "ἦι"), + (0x1F9F, "M", "ἧι"), + (0x1FA0, "M", "ὠι"), + (0x1FA1, "M", "ὡι"), + (0x1FA2, "M", "ὢι"), + (0x1FA3, "M", "ὣι"), + (0x1FA4, "M", "ὤι"), + (0x1FA5, "M", "ὥι"), + (0x1FA6, "M", "ὦι"), + (0x1FA7, "M", "ὧι"), + (0x1FA8, "M", "ὠι"), + (0x1FA9, "M", "ὡι"), + (0x1FAA, "M", "ὢι"), + (0x1FAB, "M", "ὣι"), + (0x1FAC, "M", "ὤι"), + (0x1FAD, "M", "ὥι"), + (0x1FAE, "M", "ὦι"), + (0x1FAF, "M", "ὧι"), + (0x1FB0, "V"), + (0x1FB2, "M", "ὰι"), + (0x1FB3, "M", "αι"), + (0x1FB4, "M", "άι"), + (0x1FB5, "X"), + (0x1FB6, "V"), + (0x1FB7, "M", "ᾶι"), + (0x1FB8, "M", "ᾰ"), + (0x1FB9, "M", "ᾱ"), + (0x1FBA, "M", "ὰ"), + (0x1FBB, "M", "ά"), + (0x1FBC, "M", "αι"), + (0x1FBD, "3", " ̓"), + (0x1FBE, "M", "ι"), + (0x1FBF, "3", " ̓"), + (0x1FC0, "3", " ͂"), + (0x1FC1, "3", " ̈͂"), + (0x1FC2, "M", "ὴι"), + (0x1FC3, "M", "ηι"), + (0x1FC4, "M", "ήι"), + (0x1FC5, "X"), + (0x1FC6, "V"), + (0x1FC7, "M", "ῆι"), + (0x1FC8, "M", "ὲ"), + (0x1FC9, "M", "έ"), + (0x1FCA, "M", "ὴ"), + (0x1FCB, "M", "ή"), + (0x1FCC, "M", "ηι"), + (0x1FCD, "3", " ̓̀"), + (0x1FCE, "3", " ̓́"), + (0x1FCF, "3", " ̓͂"), + (0x1FD0, "V"), + (0x1FD3, "M", "ΐ"), + (0x1FD4, "X"), + (0x1FD6, "V"), + (0x1FD8, "M", "ῐ"), + (0x1FD9, "M", "ῑ"), + (0x1FDA, "M", "ὶ"), + (0x1FDB, "M", "ί"), + (0x1FDC, "X"), + (0x1FDD, "3", " ̔̀"), + (0x1FDE, "3", " ̔́"), + (0x1FDF, "3", " ̔͂"), + (0x1FE0, "V"), + (0x1FE3, "M", "ΰ"), + (0x1FE4, "V"), + (0x1FE8, "M", "ῠ"), + (0x1FE9, "M", "ῡ"), + (0x1FEA, "M", "ὺ"), + (0x1FEB, "M", "ύ"), + (0x1FEC, "M", "ῥ"), + (0x1FED, "3", " ̈̀"), + (0x1FEE, "3", " ̈́"), + (0x1FEF, "3", "`"), + (0x1FF0, "X"), + (0x1FF2, "M", "ὼι"), + (0x1FF3, "M", "ωι"), + (0x1FF4, "M", "ώι"), + (0x1FF5, "X"), + (0x1FF6, "V"), + ] + + +def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1FF7, "M", "ῶι"), + (0x1FF8, "M", "ὸ"), + (0x1FF9, "M", "ό"), + (0x1FFA, "M", "ὼ"), + (0x1FFB, "M", "ώ"), + (0x1FFC, "M", "ωι"), + (0x1FFD, "3", " ́"), + (0x1FFE, "3", " ̔"), + (0x1FFF, "X"), + (0x2000, "3", " "), + (0x200B, "I"), + (0x200C, "D", ""), + (0x200E, "X"), + (0x2010, "V"), + (0x2011, "M", "‐"), + (0x2012, "V"), + (0x2017, "3", " ̳"), + (0x2018, "V"), + (0x2024, "X"), + (0x2027, "V"), + (0x2028, "X"), + (0x202F, "3", " "), + (0x2030, "V"), + (0x2033, "M", "′′"), + (0x2034, "M", "′′′"), + (0x2035, "V"), + (0x2036, "M", "‵‵"), + (0x2037, "M", "‵‵‵"), + (0x2038, "V"), + (0x203C, "3", "!!"), + (0x203D, "V"), + (0x203E, "3", " ̅"), + (0x203F, "V"), + (0x2047, "3", "??"), + (0x2048, "3", "?!"), + (0x2049, "3", "!?"), + (0x204A, "V"), + (0x2057, "M", "′′′′"), + (0x2058, "V"), + (0x205F, "3", " "), + (0x2060, "I"), + (0x2061, "X"), + (0x2064, "I"), + (0x2065, "X"), + (0x2070, "M", "0"), + (0x2071, "M", "i"), + (0x2072, "X"), + (0x2074, "M", "4"), + (0x2075, "M", "5"), + (0x2076, "M", "6"), + (0x2077, "M", "7"), + (0x2078, "M", "8"), + (0x2079, "M", "9"), + (0x207A, "3", "+"), + (0x207B, "M", "−"), + (0x207C, "3", "="), + (0x207D, "3", "("), + (0x207E, "3", ")"), + (0x207F, "M", "n"), + (0x2080, "M", "0"), + (0x2081, "M", "1"), + (0x2082, "M", "2"), + (0x2083, "M", "3"), + (0x2084, "M", "4"), + (0x2085, "M", "5"), + (0x2086, "M", "6"), + (0x2087, "M", "7"), + (0x2088, "M", "8"), + (0x2089, "M", "9"), + (0x208A, "3", "+"), + (0x208B, "M", "−"), + (0x208C, "3", "="), + (0x208D, "3", "("), + (0x208E, "3", ")"), + (0x208F, "X"), + (0x2090, "M", "a"), + (0x2091, "M", "e"), + (0x2092, "M", "o"), + (0x2093, "M", "x"), + (0x2094, "M", "ə"), + (0x2095, "M", "h"), + (0x2096, "M", "k"), + (0x2097, "M", "l"), + (0x2098, "M", "m"), + (0x2099, "M", "n"), + (0x209A, "M", "p"), + (0x209B, "M", "s"), + (0x209C, "M", "t"), + (0x209D, "X"), + (0x20A0, "V"), + (0x20A8, "M", "rs"), + (0x20A9, "V"), + (0x20C1, "X"), + (0x20D0, "V"), + (0x20F1, "X"), + (0x2100, "3", "a/c"), + (0x2101, "3", "a/s"), + (0x2102, "M", "c"), + (0x2103, "M", "°c"), + (0x2104, "V"), + ] + + +def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2105, "3", "c/o"), + (0x2106, "3", "c/u"), + (0x2107, "M", "ɛ"), + (0x2108, "V"), + (0x2109, "M", "°f"), + (0x210A, "M", "g"), + (0x210B, "M", "h"), + (0x210F, "M", "ħ"), + (0x2110, "M", "i"), + (0x2112, "M", "l"), + (0x2114, "V"), + (0x2115, "M", "n"), + (0x2116, "M", "no"), + (0x2117, "V"), + (0x2119, "M", "p"), + (0x211A, "M", "q"), + (0x211B, "M", "r"), + (0x211E, "V"), + (0x2120, "M", "sm"), + (0x2121, "M", "tel"), + (0x2122, "M", "tm"), + (0x2123, "V"), + (0x2124, "M", "z"), + (0x2125, "V"), + (0x2126, "M", "ω"), + (0x2127, "V"), + (0x2128, "M", "z"), + (0x2129, "V"), + (0x212A, "M", "k"), + (0x212B, "M", "å"), + (0x212C, "M", "b"), + (0x212D, "M", "c"), + (0x212E, "V"), + (0x212F, "M", "e"), + (0x2131, "M", "f"), + (0x2132, "X"), + (0x2133, "M", "m"), + (0x2134, "M", "o"), + (0x2135, "M", "א"), + (0x2136, "M", "ב"), + (0x2137, "M", "ג"), + (0x2138, "M", "ד"), + (0x2139, "M", "i"), + (0x213A, "V"), + (0x213B, "M", "fax"), + (0x213C, "M", "π"), + (0x213D, "M", "γ"), + (0x213F, "M", "π"), + (0x2140, "M", "∑"), + (0x2141, "V"), + (0x2145, "M", "d"), + (0x2147, "M", "e"), + (0x2148, "M", "i"), + (0x2149, "M", "j"), + (0x214A, "V"), + (0x2150, "M", "1⁄7"), + (0x2151, "M", "1⁄9"), + (0x2152, "M", "1⁄10"), + (0x2153, "M", "1⁄3"), + (0x2154, "M", "2⁄3"), + (0x2155, "M", "1⁄5"), + (0x2156, "M", "2⁄5"), + (0x2157, "M", "3⁄5"), + (0x2158, "M", "4⁄5"), + (0x2159, "M", "1⁄6"), + (0x215A, "M", "5⁄6"), + (0x215B, "M", "1⁄8"), + (0x215C, "M", "3⁄8"), + (0x215D, "M", "5⁄8"), + (0x215E, "M", "7⁄8"), + (0x215F, "M", "1⁄"), + (0x2160, "M", "i"), + (0x2161, "M", "ii"), + (0x2162, "M", "iii"), + (0x2163, "M", "iv"), + (0x2164, "M", "v"), + (0x2165, "M", "vi"), + (0x2166, "M", "vii"), + (0x2167, "M", "viii"), + (0x2168, "M", "ix"), + (0x2169, "M", "x"), + (0x216A, "M", "xi"), + (0x216B, "M", "xii"), + (0x216C, "M", "l"), + (0x216D, "M", "c"), + (0x216E, "M", "d"), + (0x216F, "M", "m"), + (0x2170, "M", "i"), + (0x2171, "M", "ii"), + (0x2172, "M", "iii"), + (0x2173, "M", "iv"), + (0x2174, "M", "v"), + (0x2175, "M", "vi"), + (0x2176, "M", "vii"), + (0x2177, "M", "viii"), + (0x2178, "M", "ix"), + (0x2179, "M", "x"), + (0x217A, "M", "xi"), + (0x217B, "M", "xii"), + (0x217C, "M", "l"), + ] + + +def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x217D, "M", "c"), + (0x217E, "M", "d"), + (0x217F, "M", "m"), + (0x2180, "V"), + (0x2183, "X"), + (0x2184, "V"), + (0x2189, "M", "0⁄3"), + (0x218A, "V"), + (0x218C, "X"), + (0x2190, "V"), + (0x222C, "M", "∫∫"), + (0x222D, "M", "∫∫∫"), + (0x222E, "V"), + (0x222F, "M", "∮∮"), + (0x2230, "M", "∮∮∮"), + (0x2231, "V"), + (0x2329, "M", "〈"), + (0x232A, "M", "〉"), + (0x232B, "V"), + (0x2427, "X"), + (0x2440, "V"), + (0x244B, "X"), + (0x2460, "M", "1"), + (0x2461, "M", "2"), + (0x2462, "M", "3"), + (0x2463, "M", "4"), + (0x2464, "M", "5"), + (0x2465, "M", "6"), + (0x2466, "M", "7"), + (0x2467, "M", "8"), + (0x2468, "M", "9"), + (0x2469, "M", "10"), + (0x246A, "M", "11"), + (0x246B, "M", "12"), + (0x246C, "M", "13"), + (0x246D, "M", "14"), + (0x246E, "M", "15"), + (0x246F, "M", "16"), + (0x2470, "M", "17"), + (0x2471, "M", "18"), + (0x2472, "M", "19"), + (0x2473, "M", "20"), + (0x2474, "3", "(1)"), + (0x2475, "3", "(2)"), + (0x2476, "3", "(3)"), + (0x2477, "3", "(4)"), + (0x2478, "3", "(5)"), + (0x2479, "3", "(6)"), + (0x247A, "3", "(7)"), + (0x247B, "3", "(8)"), + (0x247C, "3", "(9)"), + (0x247D, "3", "(10)"), + (0x247E, "3", "(11)"), + (0x247F, "3", "(12)"), + (0x2480, "3", "(13)"), + (0x2481, "3", "(14)"), + (0x2482, "3", "(15)"), + (0x2483, "3", "(16)"), + (0x2484, "3", "(17)"), + (0x2485, "3", "(18)"), + (0x2486, "3", "(19)"), + (0x2487, "3", "(20)"), + (0x2488, "X"), + (0x249C, "3", "(a)"), + (0x249D, "3", "(b)"), + (0x249E, "3", "(c)"), + (0x249F, "3", "(d)"), + (0x24A0, "3", "(e)"), + (0x24A1, "3", "(f)"), + (0x24A2, "3", "(g)"), + (0x24A3, "3", "(h)"), + (0x24A4, "3", "(i)"), + (0x24A5, "3", "(j)"), + (0x24A6, "3", "(k)"), + (0x24A7, "3", "(l)"), + (0x24A8, "3", "(m)"), + (0x24A9, "3", "(n)"), + (0x24AA, "3", "(o)"), + (0x24AB, "3", "(p)"), + (0x24AC, "3", "(q)"), + (0x24AD, "3", "(r)"), + (0x24AE, "3", "(s)"), + (0x24AF, "3", "(t)"), + (0x24B0, "3", "(u)"), + (0x24B1, "3", "(v)"), + (0x24B2, "3", "(w)"), + (0x24B3, "3", "(x)"), + (0x24B4, "3", "(y)"), + (0x24B5, "3", "(z)"), + (0x24B6, "M", "a"), + (0x24B7, "M", "b"), + (0x24B8, "M", "c"), + (0x24B9, "M", "d"), + (0x24BA, "M", "e"), + (0x24BB, "M", "f"), + (0x24BC, "M", "g"), + (0x24BD, "M", "h"), + (0x24BE, "M", "i"), + (0x24BF, "M", "j"), + (0x24C0, "M", "k"), + ] + + +def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x24C1, "M", "l"), + (0x24C2, "M", "m"), + (0x24C3, "M", "n"), + (0x24C4, "M", "o"), + (0x24C5, "M", "p"), + (0x24C6, "M", "q"), + (0x24C7, "M", "r"), + (0x24C8, "M", "s"), + (0x24C9, "M", "t"), + (0x24CA, "M", "u"), + (0x24CB, "M", "v"), + (0x24CC, "M", "w"), + (0x24CD, "M", "x"), + (0x24CE, "M", "y"), + (0x24CF, "M", "z"), + (0x24D0, "M", "a"), + (0x24D1, "M", "b"), + (0x24D2, "M", "c"), + (0x24D3, "M", "d"), + (0x24D4, "M", "e"), + (0x24D5, "M", "f"), + (0x24D6, "M", "g"), + (0x24D7, "M", "h"), + (0x24D8, "M", "i"), + (0x24D9, "M", "j"), + (0x24DA, "M", "k"), + (0x24DB, "M", "l"), + (0x24DC, "M", "m"), + (0x24DD, "M", "n"), + (0x24DE, "M", "o"), + (0x24DF, "M", "p"), + (0x24E0, "M", "q"), + (0x24E1, "M", "r"), + (0x24E2, "M", "s"), + (0x24E3, "M", "t"), + (0x24E4, "M", "u"), + (0x24E5, "M", "v"), + (0x24E6, "M", "w"), + (0x24E7, "M", "x"), + (0x24E8, "M", "y"), + (0x24E9, "M", "z"), + (0x24EA, "M", "0"), + (0x24EB, "V"), + (0x2A0C, "M", "∫∫∫∫"), + (0x2A0D, "V"), + (0x2A74, "3", "::="), + (0x2A75, "3", "=="), + (0x2A76, "3", "==="), + (0x2A77, "V"), + (0x2ADC, "M", "⫝̸"), + (0x2ADD, "V"), + (0x2B74, "X"), + (0x2B76, "V"), + (0x2B96, "X"), + (0x2B97, "V"), + (0x2C00, "M", "ⰰ"), + (0x2C01, "M", "ⰱ"), + (0x2C02, "M", "ⰲ"), + (0x2C03, "M", "ⰳ"), + (0x2C04, "M", "ⰴ"), + (0x2C05, "M", "ⰵ"), + (0x2C06, "M", "ⰶ"), + (0x2C07, "M", "ⰷ"), + (0x2C08, "M", "ⰸ"), + (0x2C09, "M", "ⰹ"), + (0x2C0A, "M", "ⰺ"), + (0x2C0B, "M", "ⰻ"), + (0x2C0C, "M", "ⰼ"), + (0x2C0D, "M", "ⰽ"), + (0x2C0E, "M", "ⰾ"), + (0x2C0F, "M", "ⰿ"), + (0x2C10, "M", "ⱀ"), + (0x2C11, "M", "ⱁ"), + (0x2C12, "M", "ⱂ"), + (0x2C13, "M", "ⱃ"), + (0x2C14, "M", "ⱄ"), + (0x2C15, "M", "ⱅ"), + (0x2C16, "M", "ⱆ"), + (0x2C17, "M", "ⱇ"), + (0x2C18, "M", "ⱈ"), + (0x2C19, "M", "ⱉ"), + (0x2C1A, "M", "ⱊ"), + (0x2C1B, "M", "ⱋ"), + (0x2C1C, "M", "ⱌ"), + (0x2C1D, "M", "ⱍ"), + (0x2C1E, "M", "ⱎ"), + (0x2C1F, "M", "ⱏ"), + (0x2C20, "M", "ⱐ"), + (0x2C21, "M", "ⱑ"), + (0x2C22, "M", "ⱒ"), + (0x2C23, "M", "ⱓ"), + (0x2C24, "M", "ⱔ"), + (0x2C25, "M", "ⱕ"), + (0x2C26, "M", "ⱖ"), + (0x2C27, "M", "ⱗ"), + (0x2C28, "M", "ⱘ"), + (0x2C29, "M", "ⱙ"), + (0x2C2A, "M", "ⱚ"), + (0x2C2B, "M", "ⱛ"), + (0x2C2C, "M", "ⱜ"), + ] + + +def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2C2D, "M", "ⱝ"), + (0x2C2E, "M", "ⱞ"), + (0x2C2F, "M", "ⱟ"), + (0x2C30, "V"), + (0x2C60, "M", "ⱡ"), + (0x2C61, "V"), + (0x2C62, "M", "ɫ"), + (0x2C63, "M", "ᵽ"), + (0x2C64, "M", "ɽ"), + (0x2C65, "V"), + (0x2C67, "M", "ⱨ"), + (0x2C68, "V"), + (0x2C69, "M", "ⱪ"), + (0x2C6A, "V"), + (0x2C6B, "M", "ⱬ"), + (0x2C6C, "V"), + (0x2C6D, "M", "ɑ"), + (0x2C6E, "M", "ɱ"), + (0x2C6F, "M", "ɐ"), + (0x2C70, "M", "ɒ"), + (0x2C71, "V"), + (0x2C72, "M", "ⱳ"), + (0x2C73, "V"), + (0x2C75, "M", "ⱶ"), + (0x2C76, "V"), + (0x2C7C, "M", "j"), + (0x2C7D, "M", "v"), + (0x2C7E, "M", "ȿ"), + (0x2C7F, "M", "ɀ"), + (0x2C80, "M", "ⲁ"), + (0x2C81, "V"), + (0x2C82, "M", "ⲃ"), + (0x2C83, "V"), + (0x2C84, "M", "ⲅ"), + (0x2C85, "V"), + (0x2C86, "M", "ⲇ"), + (0x2C87, "V"), + (0x2C88, "M", "ⲉ"), + (0x2C89, "V"), + (0x2C8A, "M", "ⲋ"), + (0x2C8B, "V"), + (0x2C8C, "M", "ⲍ"), + (0x2C8D, "V"), + (0x2C8E, "M", "ⲏ"), + (0x2C8F, "V"), + (0x2C90, "M", "ⲑ"), + (0x2C91, "V"), + (0x2C92, "M", "ⲓ"), + (0x2C93, "V"), + (0x2C94, "M", "ⲕ"), + (0x2C95, "V"), + (0x2C96, "M", "ⲗ"), + (0x2C97, "V"), + (0x2C98, "M", "ⲙ"), + (0x2C99, "V"), + (0x2C9A, "M", "ⲛ"), + (0x2C9B, "V"), + (0x2C9C, "M", "ⲝ"), + (0x2C9D, "V"), + (0x2C9E, "M", "ⲟ"), + (0x2C9F, "V"), + (0x2CA0, "M", "ⲡ"), + (0x2CA1, "V"), + (0x2CA2, "M", "ⲣ"), + (0x2CA3, "V"), + (0x2CA4, "M", "ⲥ"), + (0x2CA5, "V"), + (0x2CA6, "M", "ⲧ"), + (0x2CA7, "V"), + (0x2CA8, "M", "ⲩ"), + (0x2CA9, "V"), + (0x2CAA, "M", "ⲫ"), + (0x2CAB, "V"), + (0x2CAC, "M", "ⲭ"), + (0x2CAD, "V"), + (0x2CAE, "M", "ⲯ"), + (0x2CAF, "V"), + (0x2CB0, "M", "ⲱ"), + (0x2CB1, "V"), + (0x2CB2, "M", "ⲳ"), + (0x2CB3, "V"), + (0x2CB4, "M", "ⲵ"), + (0x2CB5, "V"), + (0x2CB6, "M", "ⲷ"), + (0x2CB7, "V"), + (0x2CB8, "M", "ⲹ"), + (0x2CB9, "V"), + (0x2CBA, "M", "ⲻ"), + (0x2CBB, "V"), + (0x2CBC, "M", "ⲽ"), + (0x2CBD, "V"), + (0x2CBE, "M", "ⲿ"), + (0x2CBF, "V"), + (0x2CC0, "M", "ⳁ"), + (0x2CC1, "V"), + (0x2CC2, "M", "ⳃ"), + (0x2CC3, "V"), + (0x2CC4, "M", "ⳅ"), + (0x2CC5, "V"), + (0x2CC6, "M", "ⳇ"), + ] + + +def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2CC7, "V"), + (0x2CC8, "M", "ⳉ"), + (0x2CC9, "V"), + (0x2CCA, "M", "ⳋ"), + (0x2CCB, "V"), + (0x2CCC, "M", "ⳍ"), + (0x2CCD, "V"), + (0x2CCE, "M", "ⳏ"), + (0x2CCF, "V"), + (0x2CD0, "M", "ⳑ"), + (0x2CD1, "V"), + (0x2CD2, "M", "ⳓ"), + (0x2CD3, "V"), + (0x2CD4, "M", "ⳕ"), + (0x2CD5, "V"), + (0x2CD6, "M", "ⳗ"), + (0x2CD7, "V"), + (0x2CD8, "M", "ⳙ"), + (0x2CD9, "V"), + (0x2CDA, "M", "ⳛ"), + (0x2CDB, "V"), + (0x2CDC, "M", "ⳝ"), + (0x2CDD, "V"), + (0x2CDE, "M", "ⳟ"), + (0x2CDF, "V"), + (0x2CE0, "M", "ⳡ"), + (0x2CE1, "V"), + (0x2CE2, "M", "ⳣ"), + (0x2CE3, "V"), + (0x2CEB, "M", "ⳬ"), + (0x2CEC, "V"), + (0x2CED, "M", "ⳮ"), + (0x2CEE, "V"), + (0x2CF2, "M", "ⳳ"), + (0x2CF3, "V"), + (0x2CF4, "X"), + (0x2CF9, "V"), + (0x2D26, "X"), + (0x2D27, "V"), + (0x2D28, "X"), + (0x2D2D, "V"), + (0x2D2E, "X"), + (0x2D30, "V"), + (0x2D68, "X"), + (0x2D6F, "M", "ⵡ"), + (0x2D70, "V"), + (0x2D71, "X"), + (0x2D7F, "V"), + (0x2D97, "X"), + (0x2DA0, "V"), + (0x2DA7, "X"), + (0x2DA8, "V"), + (0x2DAF, "X"), + (0x2DB0, "V"), + (0x2DB7, "X"), + (0x2DB8, "V"), + (0x2DBF, "X"), + (0x2DC0, "V"), + (0x2DC7, "X"), + (0x2DC8, "V"), + (0x2DCF, "X"), + (0x2DD0, "V"), + (0x2DD7, "X"), + (0x2DD8, "V"), + (0x2DDF, "X"), + (0x2DE0, "V"), + (0x2E5E, "X"), + (0x2E80, "V"), + (0x2E9A, "X"), + (0x2E9B, "V"), + (0x2E9F, "M", "母"), + (0x2EA0, "V"), + (0x2EF3, "M", "龟"), + (0x2EF4, "X"), + (0x2F00, "M", "一"), + (0x2F01, "M", "丨"), + (0x2F02, "M", "丶"), + (0x2F03, "M", "丿"), + (0x2F04, "M", "乙"), + (0x2F05, "M", "亅"), + (0x2F06, "M", "二"), + (0x2F07, "M", "亠"), + (0x2F08, "M", "人"), + (0x2F09, "M", "儿"), + (0x2F0A, "M", "入"), + (0x2F0B, "M", "八"), + (0x2F0C, "M", "冂"), + (0x2F0D, "M", "冖"), + (0x2F0E, "M", "冫"), + (0x2F0F, "M", "几"), + (0x2F10, "M", "凵"), + (0x2F11, "M", "刀"), + (0x2F12, "M", "力"), + (0x2F13, "M", "勹"), + (0x2F14, "M", "匕"), + (0x2F15, "M", "匚"), + (0x2F16, "M", "匸"), + (0x2F17, "M", "十"), + (0x2F18, "M", "卜"), + (0x2F19, "M", "卩"), + ] + + +def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F1A, "M", "厂"), + (0x2F1B, "M", "厶"), + (0x2F1C, "M", "又"), + (0x2F1D, "M", "口"), + (0x2F1E, "M", "囗"), + (0x2F1F, "M", "土"), + (0x2F20, "M", "士"), + (0x2F21, "M", "夂"), + (0x2F22, "M", "夊"), + (0x2F23, "M", "夕"), + (0x2F24, "M", "大"), + (0x2F25, "M", "女"), + (0x2F26, "M", "子"), + (0x2F27, "M", "宀"), + (0x2F28, "M", "寸"), + (0x2F29, "M", "小"), + (0x2F2A, "M", "尢"), + (0x2F2B, "M", "尸"), + (0x2F2C, "M", "屮"), + (0x2F2D, "M", "山"), + (0x2F2E, "M", "巛"), + (0x2F2F, "M", "工"), + (0x2F30, "M", "己"), + (0x2F31, "M", "巾"), + (0x2F32, "M", "干"), + (0x2F33, "M", "幺"), + (0x2F34, "M", "广"), + (0x2F35, "M", "廴"), + (0x2F36, "M", "廾"), + (0x2F37, "M", "弋"), + (0x2F38, "M", "弓"), + (0x2F39, "M", "彐"), + (0x2F3A, "M", "彡"), + (0x2F3B, "M", "彳"), + (0x2F3C, "M", "心"), + (0x2F3D, "M", "戈"), + (0x2F3E, "M", "戶"), + (0x2F3F, "M", "手"), + (0x2F40, "M", "支"), + (0x2F41, "M", "攴"), + (0x2F42, "M", "文"), + (0x2F43, "M", "斗"), + (0x2F44, "M", "斤"), + (0x2F45, "M", "方"), + (0x2F46, "M", "无"), + (0x2F47, "M", "日"), + (0x2F48, "M", "曰"), + (0x2F49, "M", "月"), + (0x2F4A, "M", "木"), + (0x2F4B, "M", "欠"), + (0x2F4C, "M", "止"), + (0x2F4D, "M", "歹"), + (0x2F4E, "M", "殳"), + (0x2F4F, "M", "毋"), + (0x2F50, "M", "比"), + (0x2F51, "M", "毛"), + (0x2F52, "M", "氏"), + (0x2F53, "M", "气"), + (0x2F54, "M", "水"), + (0x2F55, "M", "火"), + (0x2F56, "M", "爪"), + (0x2F57, "M", "父"), + (0x2F58, "M", "爻"), + (0x2F59, "M", "爿"), + (0x2F5A, "M", "片"), + (0x2F5B, "M", "牙"), + (0x2F5C, "M", "牛"), + (0x2F5D, "M", "犬"), + (0x2F5E, "M", "玄"), + (0x2F5F, "M", "玉"), + (0x2F60, "M", "瓜"), + (0x2F61, "M", "瓦"), + (0x2F62, "M", "甘"), + (0x2F63, "M", "生"), + (0x2F64, "M", "用"), + (0x2F65, "M", "田"), + (0x2F66, "M", "疋"), + (0x2F67, "M", "疒"), + (0x2F68, "M", "癶"), + (0x2F69, "M", "白"), + (0x2F6A, "M", "皮"), + (0x2F6B, "M", "皿"), + (0x2F6C, "M", "目"), + (0x2F6D, "M", "矛"), + (0x2F6E, "M", "矢"), + (0x2F6F, "M", "石"), + (0x2F70, "M", "示"), + (0x2F71, "M", "禸"), + (0x2F72, "M", "禾"), + (0x2F73, "M", "穴"), + (0x2F74, "M", "立"), + (0x2F75, "M", "竹"), + (0x2F76, "M", "米"), + (0x2F77, "M", "糸"), + (0x2F78, "M", "缶"), + (0x2F79, "M", "网"), + (0x2F7A, "M", "羊"), + (0x2F7B, "M", "羽"), + (0x2F7C, "M", "老"), + (0x2F7D, "M", "而"), + ] + + +def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F7E, "M", "耒"), + (0x2F7F, "M", "耳"), + (0x2F80, "M", "聿"), + (0x2F81, "M", "肉"), + (0x2F82, "M", "臣"), + (0x2F83, "M", "自"), + (0x2F84, "M", "至"), + (0x2F85, "M", "臼"), + (0x2F86, "M", "舌"), + (0x2F87, "M", "舛"), + (0x2F88, "M", "舟"), + (0x2F89, "M", "艮"), + (0x2F8A, "M", "色"), + (0x2F8B, "M", "艸"), + (0x2F8C, "M", "虍"), + (0x2F8D, "M", "虫"), + (0x2F8E, "M", "血"), + (0x2F8F, "M", "行"), + (0x2F90, "M", "衣"), + (0x2F91, "M", "襾"), + (0x2F92, "M", "見"), + (0x2F93, "M", "角"), + (0x2F94, "M", "言"), + (0x2F95, "M", "谷"), + (0x2F96, "M", "豆"), + (0x2F97, "M", "豕"), + (0x2F98, "M", "豸"), + (0x2F99, "M", "貝"), + (0x2F9A, "M", "赤"), + (0x2F9B, "M", "走"), + (0x2F9C, "M", "足"), + (0x2F9D, "M", "身"), + (0x2F9E, "M", "車"), + (0x2F9F, "M", "辛"), + (0x2FA0, "M", "辰"), + (0x2FA1, "M", "辵"), + (0x2FA2, "M", "邑"), + (0x2FA3, "M", "酉"), + (0x2FA4, "M", "釆"), + (0x2FA5, "M", "里"), + (0x2FA6, "M", "金"), + (0x2FA7, "M", "長"), + (0x2FA8, "M", "門"), + (0x2FA9, "M", "阜"), + (0x2FAA, "M", "隶"), + (0x2FAB, "M", "隹"), + (0x2FAC, "M", "雨"), + (0x2FAD, "M", "靑"), + (0x2FAE, "M", "非"), + (0x2FAF, "M", "面"), + (0x2FB0, "M", "革"), + (0x2FB1, "M", "韋"), + (0x2FB2, "M", "韭"), + (0x2FB3, "M", "音"), + (0x2FB4, "M", "頁"), + (0x2FB5, "M", "風"), + (0x2FB6, "M", "飛"), + (0x2FB7, "M", "食"), + (0x2FB8, "M", "首"), + (0x2FB9, "M", "香"), + (0x2FBA, "M", "馬"), + (0x2FBB, "M", "骨"), + (0x2FBC, "M", "高"), + (0x2FBD, "M", "髟"), + (0x2FBE, "M", "鬥"), + (0x2FBF, "M", "鬯"), + (0x2FC0, "M", "鬲"), + (0x2FC1, "M", "鬼"), + (0x2FC2, "M", "魚"), + (0x2FC3, "M", "鳥"), + (0x2FC4, "M", "鹵"), + (0x2FC5, "M", "鹿"), + (0x2FC6, "M", "麥"), + (0x2FC7, "M", "麻"), + (0x2FC8, "M", "黃"), + (0x2FC9, "M", "黍"), + (0x2FCA, "M", "黑"), + (0x2FCB, "M", "黹"), + (0x2FCC, "M", "黽"), + (0x2FCD, "M", "鼎"), + (0x2FCE, "M", "鼓"), + (0x2FCF, "M", "鼠"), + (0x2FD0, "M", "鼻"), + (0x2FD1, "M", "齊"), + (0x2FD2, "M", "齒"), + (0x2FD3, "M", "龍"), + (0x2FD4, "M", "龜"), + (0x2FD5, "M", "龠"), + (0x2FD6, "X"), + (0x3000, "3", " "), + (0x3001, "V"), + (0x3002, "M", "."), + (0x3003, "V"), + (0x3036, "M", "〒"), + (0x3037, "V"), + (0x3038, "M", "十"), + (0x3039, "M", "卄"), + (0x303A, "M", "卅"), + (0x303B, "V"), + (0x3040, "X"), + ] + + +def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3041, "V"), + (0x3097, "X"), + (0x3099, "V"), + (0x309B, "3", " ゙"), + (0x309C, "3", " ゚"), + (0x309D, "V"), + (0x309F, "M", "より"), + (0x30A0, "V"), + (0x30FF, "M", "コト"), + (0x3100, "X"), + (0x3105, "V"), + (0x3130, "X"), + (0x3131, "M", "ᄀ"), + (0x3132, "M", "ᄁ"), + (0x3133, "M", "ᆪ"), + (0x3134, "M", "ᄂ"), + (0x3135, "M", "ᆬ"), + (0x3136, "M", "ᆭ"), + (0x3137, "M", "ᄃ"), + (0x3138, "M", "ᄄ"), + (0x3139, "M", "ᄅ"), + (0x313A, "M", "ᆰ"), + (0x313B, "M", "ᆱ"), + (0x313C, "M", "ᆲ"), + (0x313D, "M", "ᆳ"), + (0x313E, "M", "ᆴ"), + (0x313F, "M", "ᆵ"), + (0x3140, "M", "ᄚ"), + (0x3141, "M", "ᄆ"), + (0x3142, "M", "ᄇ"), + (0x3143, "M", "ᄈ"), + (0x3144, "M", "ᄡ"), + (0x3145, "M", "ᄉ"), + (0x3146, "M", "ᄊ"), + (0x3147, "M", "ᄋ"), + (0x3148, "M", "ᄌ"), + (0x3149, "M", "ᄍ"), + (0x314A, "M", "ᄎ"), + (0x314B, "M", "ᄏ"), + (0x314C, "M", "ᄐ"), + (0x314D, "M", "ᄑ"), + (0x314E, "M", "ᄒ"), + (0x314F, "M", "ᅡ"), + (0x3150, "M", "ᅢ"), + (0x3151, "M", "ᅣ"), + (0x3152, "M", "ᅤ"), + (0x3153, "M", "ᅥ"), + (0x3154, "M", "ᅦ"), + (0x3155, "M", "ᅧ"), + (0x3156, "M", "ᅨ"), + (0x3157, "M", "ᅩ"), + (0x3158, "M", "ᅪ"), + (0x3159, "M", "ᅫ"), + (0x315A, "M", "ᅬ"), + (0x315B, "M", "ᅭ"), + (0x315C, "M", "ᅮ"), + (0x315D, "M", "ᅯ"), + (0x315E, "M", "ᅰ"), + (0x315F, "M", "ᅱ"), + (0x3160, "M", "ᅲ"), + (0x3161, "M", "ᅳ"), + (0x3162, "M", "ᅴ"), + (0x3163, "M", "ᅵ"), + (0x3164, "X"), + (0x3165, "M", "ᄔ"), + (0x3166, "M", "ᄕ"), + (0x3167, "M", "ᇇ"), + (0x3168, "M", "ᇈ"), + (0x3169, "M", "ᇌ"), + (0x316A, "M", "ᇎ"), + (0x316B, "M", "ᇓ"), + (0x316C, "M", "ᇗ"), + (0x316D, "M", "ᇙ"), + (0x316E, "M", "ᄜ"), + (0x316F, "M", "ᇝ"), + (0x3170, "M", "ᇟ"), + (0x3171, "M", "ᄝ"), + (0x3172, "M", "ᄞ"), + (0x3173, "M", "ᄠ"), + (0x3174, "M", "ᄢ"), + (0x3175, "M", "ᄣ"), + (0x3176, "M", "ᄧ"), + (0x3177, "M", "ᄩ"), + (0x3178, "M", "ᄫ"), + (0x3179, "M", "ᄬ"), + (0x317A, "M", "ᄭ"), + (0x317B, "M", "ᄮ"), + (0x317C, "M", "ᄯ"), + (0x317D, "M", "ᄲ"), + (0x317E, "M", "ᄶ"), + (0x317F, "M", "ᅀ"), + (0x3180, "M", "ᅇ"), + (0x3181, "M", "ᅌ"), + (0x3182, "M", "ᇱ"), + (0x3183, "M", "ᇲ"), + (0x3184, "M", "ᅗ"), + (0x3185, "M", "ᅘ"), + (0x3186, "M", "ᅙ"), + (0x3187, "M", "ᆄ"), + (0x3188, "M", "ᆅ"), + ] + + +def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3189, "M", "ᆈ"), + (0x318A, "M", "ᆑ"), + (0x318B, "M", "ᆒ"), + (0x318C, "M", "ᆔ"), + (0x318D, "M", "ᆞ"), + (0x318E, "M", "ᆡ"), + (0x318F, "X"), + (0x3190, "V"), + (0x3192, "M", "一"), + (0x3193, "M", "二"), + (0x3194, "M", "三"), + (0x3195, "M", "四"), + (0x3196, "M", "上"), + (0x3197, "M", "中"), + (0x3198, "M", "下"), + (0x3199, "M", "甲"), + (0x319A, "M", "乙"), + (0x319B, "M", "丙"), + (0x319C, "M", "丁"), + (0x319D, "M", "天"), + (0x319E, "M", "地"), + (0x319F, "M", "人"), + (0x31A0, "V"), + (0x31E4, "X"), + (0x31F0, "V"), + (0x3200, "3", "(ᄀ)"), + (0x3201, "3", "(ᄂ)"), + (0x3202, "3", "(ᄃ)"), + (0x3203, "3", "(ᄅ)"), + (0x3204, "3", "(ᄆ)"), + (0x3205, "3", "(ᄇ)"), + (0x3206, "3", "(ᄉ)"), + (0x3207, "3", "(ᄋ)"), + (0x3208, "3", "(ᄌ)"), + (0x3209, "3", "(ᄎ)"), + (0x320A, "3", "(ᄏ)"), + (0x320B, "3", "(ᄐ)"), + (0x320C, "3", "(ᄑ)"), + (0x320D, "3", "(ᄒ)"), + (0x320E, "3", "(가)"), + (0x320F, "3", "(나)"), + (0x3210, "3", "(다)"), + (0x3211, "3", "(라)"), + (0x3212, "3", "(마)"), + (0x3213, "3", "(바)"), + (0x3214, "3", "(사)"), + (0x3215, "3", "(아)"), + (0x3216, "3", "(자)"), + (0x3217, "3", "(차)"), + (0x3218, "3", "(카)"), + (0x3219, "3", "(타)"), + (0x321A, "3", "(파)"), + (0x321B, "3", "(하)"), + (0x321C, "3", "(주)"), + (0x321D, "3", "(오전)"), + (0x321E, "3", "(오후)"), + (0x321F, "X"), + (0x3220, "3", "(一)"), + (0x3221, "3", "(二)"), + (0x3222, "3", "(三)"), + (0x3223, "3", "(四)"), + (0x3224, "3", "(五)"), + (0x3225, "3", "(六)"), + (0x3226, "3", "(七)"), + (0x3227, "3", "(八)"), + (0x3228, "3", "(九)"), + (0x3229, "3", "(十)"), + (0x322A, "3", "(月)"), + (0x322B, "3", "(火)"), + (0x322C, "3", "(水)"), + (0x322D, "3", "(木)"), + (0x322E, "3", "(金)"), + (0x322F, "3", "(土)"), + (0x3230, "3", "(日)"), + (0x3231, "3", "(株)"), + (0x3232, "3", "(有)"), + (0x3233, "3", "(社)"), + (0x3234, "3", "(名)"), + (0x3235, "3", "(特)"), + (0x3236, "3", "(財)"), + (0x3237, "3", "(祝)"), + (0x3238, "3", "(労)"), + (0x3239, "3", "(代)"), + (0x323A, "3", "(呼)"), + (0x323B, "3", "(学)"), + (0x323C, "3", "(監)"), + (0x323D, "3", "(企)"), + (0x323E, "3", "(資)"), + (0x323F, "3", "(協)"), + (0x3240, "3", "(祭)"), + (0x3241, "3", "(休)"), + (0x3242, "3", "(自)"), + (0x3243, "3", "(至)"), + (0x3244, "M", "問"), + (0x3245, "M", "幼"), + (0x3246, "M", "文"), + (0x3247, "M", "箏"), + (0x3248, "V"), + (0x3250, "M", "pte"), + (0x3251, "M", "21"), + ] + + +def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3252, "M", "22"), + (0x3253, "M", "23"), + (0x3254, "M", "24"), + (0x3255, "M", "25"), + (0x3256, "M", "26"), + (0x3257, "M", "27"), + (0x3258, "M", "28"), + (0x3259, "M", "29"), + (0x325A, "M", "30"), + (0x325B, "M", "31"), + (0x325C, "M", "32"), + (0x325D, "M", "33"), + (0x325E, "M", "34"), + (0x325F, "M", "35"), + (0x3260, "M", "ᄀ"), + (0x3261, "M", "ᄂ"), + (0x3262, "M", "ᄃ"), + (0x3263, "M", "ᄅ"), + (0x3264, "M", "ᄆ"), + (0x3265, "M", "ᄇ"), + (0x3266, "M", "ᄉ"), + (0x3267, "M", "ᄋ"), + (0x3268, "M", "ᄌ"), + (0x3269, "M", "ᄎ"), + (0x326A, "M", "ᄏ"), + (0x326B, "M", "ᄐ"), + (0x326C, "M", "ᄑ"), + (0x326D, "M", "ᄒ"), + (0x326E, "M", "가"), + (0x326F, "M", "나"), + (0x3270, "M", "다"), + (0x3271, "M", "라"), + (0x3272, "M", "마"), + (0x3273, "M", "바"), + (0x3274, "M", "사"), + (0x3275, "M", "아"), + (0x3276, "M", "자"), + (0x3277, "M", "차"), + (0x3278, "M", "카"), + (0x3279, "M", "타"), + (0x327A, "M", "파"), + (0x327B, "M", "하"), + (0x327C, "M", "참고"), + (0x327D, "M", "주의"), + (0x327E, "M", "우"), + (0x327F, "V"), + (0x3280, "M", "一"), + (0x3281, "M", "二"), + (0x3282, "M", "三"), + (0x3283, "M", "四"), + (0x3284, "M", "五"), + (0x3285, "M", "六"), + (0x3286, "M", "七"), + (0x3287, "M", "八"), + (0x3288, "M", "九"), + (0x3289, "M", "十"), + (0x328A, "M", "月"), + (0x328B, "M", "火"), + (0x328C, "M", "水"), + (0x328D, "M", "木"), + (0x328E, "M", "金"), + (0x328F, "M", "土"), + (0x3290, "M", "日"), + (0x3291, "M", "株"), + (0x3292, "M", "有"), + (0x3293, "M", "社"), + (0x3294, "M", "名"), + (0x3295, "M", "特"), + (0x3296, "M", "財"), + (0x3297, "M", "祝"), + (0x3298, "M", "労"), + (0x3299, "M", "秘"), + (0x329A, "M", "男"), + (0x329B, "M", "女"), + (0x329C, "M", "適"), + (0x329D, "M", "優"), + (0x329E, "M", "印"), + (0x329F, "M", "注"), + (0x32A0, "M", "項"), + (0x32A1, "M", "休"), + (0x32A2, "M", "写"), + (0x32A3, "M", "正"), + (0x32A4, "M", "上"), + (0x32A5, "M", "中"), + (0x32A6, "M", "下"), + (0x32A7, "M", "左"), + (0x32A8, "M", "右"), + (0x32A9, "M", "医"), + (0x32AA, "M", "宗"), + (0x32AB, "M", "学"), + (0x32AC, "M", "監"), + (0x32AD, "M", "企"), + (0x32AE, "M", "資"), + (0x32AF, "M", "協"), + (0x32B0, "M", "夜"), + (0x32B1, "M", "36"), + (0x32B2, "M", "37"), + (0x32B3, "M", "38"), + (0x32B4, "M", "39"), + (0x32B5, "M", "40"), + ] + + +def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x32B6, "M", "41"), + (0x32B7, "M", "42"), + (0x32B8, "M", "43"), + (0x32B9, "M", "44"), + (0x32BA, "M", "45"), + (0x32BB, "M", "46"), + (0x32BC, "M", "47"), + (0x32BD, "M", "48"), + (0x32BE, "M", "49"), + (0x32BF, "M", "50"), + (0x32C0, "M", "1月"), + (0x32C1, "M", "2月"), + (0x32C2, "M", "3月"), + (0x32C3, "M", "4月"), + (0x32C4, "M", "5月"), + (0x32C5, "M", "6月"), + (0x32C6, "M", "7月"), + (0x32C7, "M", "8月"), + (0x32C8, "M", "9月"), + (0x32C9, "M", "10月"), + (0x32CA, "M", "11月"), + (0x32CB, "M", "12月"), + (0x32CC, "M", "hg"), + (0x32CD, "M", "erg"), + (0x32CE, "M", "ev"), + (0x32CF, "M", "ltd"), + (0x32D0, "M", "ア"), + (0x32D1, "M", "イ"), + (0x32D2, "M", "ウ"), + (0x32D3, "M", "エ"), + (0x32D4, "M", "オ"), + (0x32D5, "M", "カ"), + (0x32D6, "M", "キ"), + (0x32D7, "M", "ク"), + (0x32D8, "M", "ケ"), + (0x32D9, "M", "コ"), + (0x32DA, "M", "サ"), + (0x32DB, "M", "シ"), + (0x32DC, "M", "ス"), + (0x32DD, "M", "セ"), + (0x32DE, "M", "ソ"), + (0x32DF, "M", "タ"), + (0x32E0, "M", "チ"), + (0x32E1, "M", "ツ"), + (0x32E2, "M", "テ"), + (0x32E3, "M", "ト"), + (0x32E4, "M", "ナ"), + (0x32E5, "M", "ニ"), + (0x32E6, "M", "ヌ"), + (0x32E7, "M", "ネ"), + (0x32E8, "M", "ノ"), + (0x32E9, "M", "ハ"), + (0x32EA, "M", "ヒ"), + (0x32EB, "M", "フ"), + (0x32EC, "M", "ヘ"), + (0x32ED, "M", "ホ"), + (0x32EE, "M", "マ"), + (0x32EF, "M", "ミ"), + (0x32F0, "M", "ム"), + (0x32F1, "M", "メ"), + (0x32F2, "M", "モ"), + (0x32F3, "M", "ヤ"), + (0x32F4, "M", "ユ"), + (0x32F5, "M", "ヨ"), + (0x32F6, "M", "ラ"), + (0x32F7, "M", "リ"), + (0x32F8, "M", "ル"), + (0x32F9, "M", "レ"), + (0x32FA, "M", "ロ"), + (0x32FB, "M", "ワ"), + (0x32FC, "M", "ヰ"), + (0x32FD, "M", "ヱ"), + (0x32FE, "M", "ヲ"), + (0x32FF, "M", "令和"), + (0x3300, "M", "アパート"), + (0x3301, "M", "アルファ"), + (0x3302, "M", "アンペア"), + (0x3303, "M", "アール"), + (0x3304, "M", "イニング"), + (0x3305, "M", "インチ"), + (0x3306, "M", "ウォン"), + (0x3307, "M", "エスクード"), + (0x3308, "M", "エーカー"), + (0x3309, "M", "オンス"), + (0x330A, "M", "オーム"), + (0x330B, "M", "カイリ"), + (0x330C, "M", "カラット"), + (0x330D, "M", "カロリー"), + (0x330E, "M", "ガロン"), + (0x330F, "M", "ガンマ"), + (0x3310, "M", "ギガ"), + (0x3311, "M", "ギニー"), + (0x3312, "M", "キュリー"), + (0x3313, "M", "ギルダー"), + (0x3314, "M", "キロ"), + (0x3315, "M", "キログラム"), + (0x3316, "M", "キロメートル"), + (0x3317, "M", "キロワット"), + (0x3318, "M", "グラム"), + (0x3319, "M", "グラムトン"), + ] + + +def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x331A, "M", "クルゼイロ"), + (0x331B, "M", "クローネ"), + (0x331C, "M", "ケース"), + (0x331D, "M", "コルナ"), + (0x331E, "M", "コーポ"), + (0x331F, "M", "サイクル"), + (0x3320, "M", "サンチーム"), + (0x3321, "M", "シリング"), + (0x3322, "M", "センチ"), + (0x3323, "M", "セント"), + (0x3324, "M", "ダース"), + (0x3325, "M", "デシ"), + (0x3326, "M", "ドル"), + (0x3327, "M", "トン"), + (0x3328, "M", "ナノ"), + (0x3329, "M", "ノット"), + (0x332A, "M", "ハイツ"), + (0x332B, "M", "パーセント"), + (0x332C, "M", "パーツ"), + (0x332D, "M", "バーレル"), + (0x332E, "M", "ピアストル"), + (0x332F, "M", "ピクル"), + (0x3330, "M", "ピコ"), + (0x3331, "M", "ビル"), + (0x3332, "M", "ファラッド"), + (0x3333, "M", "フィート"), + (0x3334, "M", "ブッシェル"), + (0x3335, "M", "フラン"), + (0x3336, "M", "ヘクタール"), + (0x3337, "M", "ペソ"), + (0x3338, "M", "ペニヒ"), + (0x3339, "M", "ヘルツ"), + (0x333A, "M", "ペンス"), + (0x333B, "M", "ページ"), + (0x333C, "M", "ベータ"), + (0x333D, "M", "ポイント"), + (0x333E, "M", "ボルト"), + (0x333F, "M", "ホン"), + (0x3340, "M", "ポンド"), + (0x3341, "M", "ホール"), + (0x3342, "M", "ホーン"), + (0x3343, "M", "マイクロ"), + (0x3344, "M", "マイル"), + (0x3345, "M", "マッハ"), + (0x3346, "M", "マルク"), + (0x3347, "M", "マンション"), + (0x3348, "M", "ミクロン"), + (0x3349, "M", "ミリ"), + (0x334A, "M", "ミリバール"), + (0x334B, "M", "メガ"), + (0x334C, "M", "メガトン"), + (0x334D, "M", "メートル"), + (0x334E, "M", "ヤード"), + (0x334F, "M", "ヤール"), + (0x3350, "M", "ユアン"), + (0x3351, "M", "リットル"), + (0x3352, "M", "リラ"), + (0x3353, "M", "ルピー"), + (0x3354, "M", "ルーブル"), + (0x3355, "M", "レム"), + (0x3356, "M", "レントゲン"), + (0x3357, "M", "ワット"), + (0x3358, "M", "0点"), + (0x3359, "M", "1点"), + (0x335A, "M", "2点"), + (0x335B, "M", "3点"), + (0x335C, "M", "4点"), + (0x335D, "M", "5点"), + (0x335E, "M", "6点"), + (0x335F, "M", "7点"), + (0x3360, "M", "8点"), + (0x3361, "M", "9点"), + (0x3362, "M", "10点"), + (0x3363, "M", "11点"), + (0x3364, "M", "12点"), + (0x3365, "M", "13点"), + (0x3366, "M", "14点"), + (0x3367, "M", "15点"), + (0x3368, "M", "16点"), + (0x3369, "M", "17点"), + (0x336A, "M", "18点"), + (0x336B, "M", "19点"), + (0x336C, "M", "20点"), + (0x336D, "M", "21点"), + (0x336E, "M", "22点"), + (0x336F, "M", "23点"), + (0x3370, "M", "24点"), + (0x3371, "M", "hpa"), + (0x3372, "M", "da"), + (0x3373, "M", "au"), + (0x3374, "M", "bar"), + (0x3375, "M", "ov"), + (0x3376, "M", "pc"), + (0x3377, "M", "dm"), + (0x3378, "M", "dm2"), + (0x3379, "M", "dm3"), + (0x337A, "M", "iu"), + (0x337B, "M", "平成"), + (0x337C, "M", "昭和"), + (0x337D, "M", "大正"), + ] + + +def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x337E, "M", "明治"), + (0x337F, "M", "株式会社"), + (0x3380, "M", "pa"), + (0x3381, "M", "na"), + (0x3382, "M", "μa"), + (0x3383, "M", "ma"), + (0x3384, "M", "ka"), + (0x3385, "M", "kb"), + (0x3386, "M", "mb"), + (0x3387, "M", "gb"), + (0x3388, "M", "cal"), + (0x3389, "M", "kcal"), + (0x338A, "M", "pf"), + (0x338B, "M", "nf"), + (0x338C, "M", "μf"), + (0x338D, "M", "μg"), + (0x338E, "M", "mg"), + (0x338F, "M", "kg"), + (0x3390, "M", "hz"), + (0x3391, "M", "khz"), + (0x3392, "M", "mhz"), + (0x3393, "M", "ghz"), + (0x3394, "M", "thz"), + (0x3395, "M", "μl"), + (0x3396, "M", "ml"), + (0x3397, "M", "dl"), + (0x3398, "M", "kl"), + (0x3399, "M", "fm"), + (0x339A, "M", "nm"), + (0x339B, "M", "μm"), + (0x339C, "M", "mm"), + (0x339D, "M", "cm"), + (0x339E, "M", "km"), + (0x339F, "M", "mm2"), + (0x33A0, "M", "cm2"), + (0x33A1, "M", "m2"), + (0x33A2, "M", "km2"), + (0x33A3, "M", "mm3"), + (0x33A4, "M", "cm3"), + (0x33A5, "M", "m3"), + (0x33A6, "M", "km3"), + (0x33A7, "M", "m∕s"), + (0x33A8, "M", "m∕s2"), + (0x33A9, "M", "pa"), + (0x33AA, "M", "kpa"), + (0x33AB, "M", "mpa"), + (0x33AC, "M", "gpa"), + (0x33AD, "M", "rad"), + (0x33AE, "M", "rad∕s"), + (0x33AF, "M", "rad∕s2"), + (0x33B0, "M", "ps"), + (0x33B1, "M", "ns"), + (0x33B2, "M", "μs"), + (0x33B3, "M", "ms"), + (0x33B4, "M", "pv"), + (0x33B5, "M", "nv"), + (0x33B6, "M", "μv"), + (0x33B7, "M", "mv"), + (0x33B8, "M", "kv"), + (0x33B9, "M", "mv"), + (0x33BA, "M", "pw"), + (0x33BB, "M", "nw"), + (0x33BC, "M", "μw"), + (0x33BD, "M", "mw"), + (0x33BE, "M", "kw"), + (0x33BF, "M", "mw"), + (0x33C0, "M", "kω"), + (0x33C1, "M", "mω"), + (0x33C2, "X"), + (0x33C3, "M", "bq"), + (0x33C4, "M", "cc"), + (0x33C5, "M", "cd"), + (0x33C6, "M", "c∕kg"), + (0x33C7, "X"), + (0x33C8, "M", "db"), + (0x33C9, "M", "gy"), + (0x33CA, "M", "ha"), + (0x33CB, "M", "hp"), + (0x33CC, "M", "in"), + (0x33CD, "M", "kk"), + (0x33CE, "M", "km"), + (0x33CF, "M", "kt"), + (0x33D0, "M", "lm"), + (0x33D1, "M", "ln"), + (0x33D2, "M", "log"), + (0x33D3, "M", "lx"), + (0x33D4, "M", "mb"), + (0x33D5, "M", "mil"), + (0x33D6, "M", "mol"), + (0x33D7, "M", "ph"), + (0x33D8, "X"), + (0x33D9, "M", "ppm"), + (0x33DA, "M", "pr"), + (0x33DB, "M", "sr"), + (0x33DC, "M", "sv"), + (0x33DD, "M", "wb"), + (0x33DE, "M", "v∕m"), + (0x33DF, "M", "a∕m"), + (0x33E0, "M", "1日"), + (0x33E1, "M", "2日"), + ] + + +def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x33E2, "M", "3日"), + (0x33E3, "M", "4日"), + (0x33E4, "M", "5日"), + (0x33E5, "M", "6日"), + (0x33E6, "M", "7日"), + (0x33E7, "M", "8日"), + (0x33E8, "M", "9日"), + (0x33E9, "M", "10日"), + (0x33EA, "M", "11日"), + (0x33EB, "M", "12日"), + (0x33EC, "M", "13日"), + (0x33ED, "M", "14日"), + (0x33EE, "M", "15日"), + (0x33EF, "M", "16日"), + (0x33F0, "M", "17日"), + (0x33F1, "M", "18日"), + (0x33F2, "M", "19日"), + (0x33F3, "M", "20日"), + (0x33F4, "M", "21日"), + (0x33F5, "M", "22日"), + (0x33F6, "M", "23日"), + (0x33F7, "M", "24日"), + (0x33F8, "M", "25日"), + (0x33F9, "M", "26日"), + (0x33FA, "M", "27日"), + (0x33FB, "M", "28日"), + (0x33FC, "M", "29日"), + (0x33FD, "M", "30日"), + (0x33FE, "M", "31日"), + (0x33FF, "M", "gal"), + (0x3400, "V"), + (0xA48D, "X"), + (0xA490, "V"), + (0xA4C7, "X"), + (0xA4D0, "V"), + (0xA62C, "X"), + (0xA640, "M", "ꙁ"), + (0xA641, "V"), + (0xA642, "M", "ꙃ"), + (0xA643, "V"), + (0xA644, "M", "ꙅ"), + (0xA645, "V"), + (0xA646, "M", "ꙇ"), + (0xA647, "V"), + (0xA648, "M", "ꙉ"), + (0xA649, "V"), + (0xA64A, "M", "ꙋ"), + (0xA64B, "V"), + (0xA64C, "M", "ꙍ"), + (0xA64D, "V"), + (0xA64E, "M", "ꙏ"), + (0xA64F, "V"), + (0xA650, "M", "ꙑ"), + (0xA651, "V"), + (0xA652, "M", "ꙓ"), + (0xA653, "V"), + (0xA654, "M", "ꙕ"), + (0xA655, "V"), + (0xA656, "M", "ꙗ"), + (0xA657, "V"), + (0xA658, "M", "ꙙ"), + (0xA659, "V"), + (0xA65A, "M", "ꙛ"), + (0xA65B, "V"), + (0xA65C, "M", "ꙝ"), + (0xA65D, "V"), + (0xA65E, "M", "ꙟ"), + (0xA65F, "V"), + (0xA660, "M", "ꙡ"), + (0xA661, "V"), + (0xA662, "M", "ꙣ"), + (0xA663, "V"), + (0xA664, "M", "ꙥ"), + (0xA665, "V"), + (0xA666, "M", "ꙧ"), + (0xA667, "V"), + (0xA668, "M", "ꙩ"), + (0xA669, "V"), + (0xA66A, "M", "ꙫ"), + (0xA66B, "V"), + (0xA66C, "M", "ꙭ"), + (0xA66D, "V"), + (0xA680, "M", "ꚁ"), + (0xA681, "V"), + (0xA682, "M", "ꚃ"), + (0xA683, "V"), + (0xA684, "M", "ꚅ"), + (0xA685, "V"), + (0xA686, "M", "ꚇ"), + (0xA687, "V"), + (0xA688, "M", "ꚉ"), + (0xA689, "V"), + (0xA68A, "M", "ꚋ"), + (0xA68B, "V"), + (0xA68C, "M", "ꚍ"), + (0xA68D, "V"), + (0xA68E, "M", "ꚏ"), + (0xA68F, "V"), + (0xA690, "M", "ꚑ"), + (0xA691, "V"), + ] + + +def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA692, "M", "ꚓ"), + (0xA693, "V"), + (0xA694, "M", "ꚕ"), + (0xA695, "V"), + (0xA696, "M", "ꚗ"), + (0xA697, "V"), + (0xA698, "M", "ꚙ"), + (0xA699, "V"), + (0xA69A, "M", "ꚛ"), + (0xA69B, "V"), + (0xA69C, "M", "ъ"), + (0xA69D, "M", "ь"), + (0xA69E, "V"), + (0xA6F8, "X"), + (0xA700, "V"), + (0xA722, "M", "ꜣ"), + (0xA723, "V"), + (0xA724, "M", "ꜥ"), + (0xA725, "V"), + (0xA726, "M", "ꜧ"), + (0xA727, "V"), + (0xA728, "M", "ꜩ"), + (0xA729, "V"), + (0xA72A, "M", "ꜫ"), + (0xA72B, "V"), + (0xA72C, "M", "ꜭ"), + (0xA72D, "V"), + (0xA72E, "M", "ꜯ"), + (0xA72F, "V"), + (0xA732, "M", "ꜳ"), + (0xA733, "V"), + (0xA734, "M", "ꜵ"), + (0xA735, "V"), + (0xA736, "M", "ꜷ"), + (0xA737, "V"), + (0xA738, "M", "ꜹ"), + (0xA739, "V"), + (0xA73A, "M", "ꜻ"), + (0xA73B, "V"), + (0xA73C, "M", "ꜽ"), + (0xA73D, "V"), + (0xA73E, "M", "ꜿ"), + (0xA73F, "V"), + (0xA740, "M", "ꝁ"), + (0xA741, "V"), + (0xA742, "M", "ꝃ"), + (0xA743, "V"), + (0xA744, "M", "ꝅ"), + (0xA745, "V"), + (0xA746, "M", "ꝇ"), + (0xA747, "V"), + (0xA748, "M", "ꝉ"), + (0xA749, "V"), + (0xA74A, "M", "ꝋ"), + (0xA74B, "V"), + (0xA74C, "M", "ꝍ"), + (0xA74D, "V"), + (0xA74E, "M", "ꝏ"), + (0xA74F, "V"), + (0xA750, "M", "ꝑ"), + (0xA751, "V"), + (0xA752, "M", "ꝓ"), + (0xA753, "V"), + (0xA754, "M", "ꝕ"), + (0xA755, "V"), + (0xA756, "M", "ꝗ"), + (0xA757, "V"), + (0xA758, "M", "ꝙ"), + (0xA759, "V"), + (0xA75A, "M", "ꝛ"), + (0xA75B, "V"), + (0xA75C, "M", "ꝝ"), + (0xA75D, "V"), + (0xA75E, "M", "ꝟ"), + (0xA75F, "V"), + (0xA760, "M", "ꝡ"), + (0xA761, "V"), + (0xA762, "M", "ꝣ"), + (0xA763, "V"), + (0xA764, "M", "ꝥ"), + (0xA765, "V"), + (0xA766, "M", "ꝧ"), + (0xA767, "V"), + (0xA768, "M", "ꝩ"), + (0xA769, "V"), + (0xA76A, "M", "ꝫ"), + (0xA76B, "V"), + (0xA76C, "M", "ꝭ"), + (0xA76D, "V"), + (0xA76E, "M", "ꝯ"), + (0xA76F, "V"), + (0xA770, "M", "ꝯ"), + (0xA771, "V"), + (0xA779, "M", "ꝺ"), + (0xA77A, "V"), + (0xA77B, "M", "ꝼ"), + (0xA77C, "V"), + (0xA77D, "M", "ᵹ"), + (0xA77E, "M", "ꝿ"), + (0xA77F, "V"), + ] + + +def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA780, "M", "ꞁ"), + (0xA781, "V"), + (0xA782, "M", "ꞃ"), + (0xA783, "V"), + (0xA784, "M", "ꞅ"), + (0xA785, "V"), + (0xA786, "M", "ꞇ"), + (0xA787, "V"), + (0xA78B, "M", "ꞌ"), + (0xA78C, "V"), + (0xA78D, "M", "ɥ"), + (0xA78E, "V"), + (0xA790, "M", "ꞑ"), + (0xA791, "V"), + (0xA792, "M", "ꞓ"), + (0xA793, "V"), + (0xA796, "M", "ꞗ"), + (0xA797, "V"), + (0xA798, "M", "ꞙ"), + (0xA799, "V"), + (0xA79A, "M", "ꞛ"), + (0xA79B, "V"), + (0xA79C, "M", "ꞝ"), + (0xA79D, "V"), + (0xA79E, "M", "ꞟ"), + (0xA79F, "V"), + (0xA7A0, "M", "ꞡ"), + (0xA7A1, "V"), + (0xA7A2, "M", "ꞣ"), + (0xA7A3, "V"), + (0xA7A4, "M", "ꞥ"), + (0xA7A5, "V"), + (0xA7A6, "M", "ꞧ"), + (0xA7A7, "V"), + (0xA7A8, "M", "ꞩ"), + (0xA7A9, "V"), + (0xA7AA, "M", "ɦ"), + (0xA7AB, "M", "ɜ"), + (0xA7AC, "M", "ɡ"), + (0xA7AD, "M", "ɬ"), + (0xA7AE, "M", "ɪ"), + (0xA7AF, "V"), + (0xA7B0, "M", "ʞ"), + (0xA7B1, "M", "ʇ"), + (0xA7B2, "M", "ʝ"), + (0xA7B3, "M", "ꭓ"), + (0xA7B4, "M", "ꞵ"), + (0xA7B5, "V"), + (0xA7B6, "M", "ꞷ"), + (0xA7B7, "V"), + (0xA7B8, "M", "ꞹ"), + (0xA7B9, "V"), + (0xA7BA, "M", "ꞻ"), + (0xA7BB, "V"), + (0xA7BC, "M", "ꞽ"), + (0xA7BD, "V"), + (0xA7BE, "M", "ꞿ"), + (0xA7BF, "V"), + (0xA7C0, "M", "ꟁ"), + (0xA7C1, "V"), + (0xA7C2, "M", "ꟃ"), + (0xA7C3, "V"), + (0xA7C4, "M", "ꞔ"), + (0xA7C5, "M", "ʂ"), + (0xA7C6, "M", "ᶎ"), + (0xA7C7, "M", "ꟈ"), + (0xA7C8, "V"), + (0xA7C9, "M", "ꟊ"), + (0xA7CA, "V"), + (0xA7CB, "X"), + (0xA7D0, "M", "ꟑ"), + (0xA7D1, "V"), + (0xA7D2, "X"), + (0xA7D3, "V"), + (0xA7D4, "X"), + (0xA7D5, "V"), + (0xA7D6, "M", "ꟗ"), + (0xA7D7, "V"), + (0xA7D8, "M", "ꟙ"), + (0xA7D9, "V"), + (0xA7DA, "X"), + (0xA7F2, "M", "c"), + (0xA7F3, "M", "f"), + (0xA7F4, "M", "q"), + (0xA7F5, "M", "ꟶ"), + (0xA7F6, "V"), + (0xA7F8, "M", "ħ"), + (0xA7F9, "M", "œ"), + (0xA7FA, "V"), + (0xA82D, "X"), + (0xA830, "V"), + (0xA83A, "X"), + (0xA840, "V"), + (0xA878, "X"), + (0xA880, "V"), + (0xA8C6, "X"), + (0xA8CE, "V"), + (0xA8DA, "X"), + (0xA8E0, "V"), + (0xA954, "X"), + ] + + +def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA95F, "V"), + (0xA97D, "X"), + (0xA980, "V"), + (0xA9CE, "X"), + (0xA9CF, "V"), + (0xA9DA, "X"), + (0xA9DE, "V"), + (0xA9FF, "X"), + (0xAA00, "V"), + (0xAA37, "X"), + (0xAA40, "V"), + (0xAA4E, "X"), + (0xAA50, "V"), + (0xAA5A, "X"), + (0xAA5C, "V"), + (0xAAC3, "X"), + (0xAADB, "V"), + (0xAAF7, "X"), + (0xAB01, "V"), + (0xAB07, "X"), + (0xAB09, "V"), + (0xAB0F, "X"), + (0xAB11, "V"), + (0xAB17, "X"), + (0xAB20, "V"), + (0xAB27, "X"), + (0xAB28, "V"), + (0xAB2F, "X"), + (0xAB30, "V"), + (0xAB5C, "M", "ꜧ"), + (0xAB5D, "M", "ꬷ"), + (0xAB5E, "M", "ɫ"), + (0xAB5F, "M", "ꭒ"), + (0xAB60, "V"), + (0xAB69, "M", "ʍ"), + (0xAB6A, "V"), + (0xAB6C, "X"), + (0xAB70, "M", "Ꭰ"), + (0xAB71, "M", "Ꭱ"), + (0xAB72, "M", "Ꭲ"), + (0xAB73, "M", "Ꭳ"), + (0xAB74, "M", "Ꭴ"), + (0xAB75, "M", "Ꭵ"), + (0xAB76, "M", "Ꭶ"), + (0xAB77, "M", "Ꭷ"), + (0xAB78, "M", "Ꭸ"), + (0xAB79, "M", "Ꭹ"), + (0xAB7A, "M", "Ꭺ"), + (0xAB7B, "M", "Ꭻ"), + (0xAB7C, "M", "Ꭼ"), + (0xAB7D, "M", "Ꭽ"), + (0xAB7E, "M", "Ꭾ"), + (0xAB7F, "M", "Ꭿ"), + (0xAB80, "M", "Ꮀ"), + (0xAB81, "M", "Ꮁ"), + (0xAB82, "M", "Ꮂ"), + (0xAB83, "M", "Ꮃ"), + (0xAB84, "M", "Ꮄ"), + (0xAB85, "M", "Ꮅ"), + (0xAB86, "M", "Ꮆ"), + (0xAB87, "M", "Ꮇ"), + (0xAB88, "M", "Ꮈ"), + (0xAB89, "M", "Ꮉ"), + (0xAB8A, "M", "Ꮊ"), + (0xAB8B, "M", "Ꮋ"), + (0xAB8C, "M", "Ꮌ"), + (0xAB8D, "M", "Ꮍ"), + (0xAB8E, "M", "Ꮎ"), + (0xAB8F, "M", "Ꮏ"), + (0xAB90, "M", "Ꮐ"), + (0xAB91, "M", "Ꮑ"), + (0xAB92, "M", "Ꮒ"), + (0xAB93, "M", "Ꮓ"), + (0xAB94, "M", "Ꮔ"), + (0xAB95, "M", "Ꮕ"), + (0xAB96, "M", "Ꮖ"), + (0xAB97, "M", "Ꮗ"), + (0xAB98, "M", "Ꮘ"), + (0xAB99, "M", "Ꮙ"), + (0xAB9A, "M", "Ꮚ"), + (0xAB9B, "M", "Ꮛ"), + (0xAB9C, "M", "Ꮜ"), + (0xAB9D, "M", "Ꮝ"), + (0xAB9E, "M", "Ꮞ"), + (0xAB9F, "M", "Ꮟ"), + (0xABA0, "M", "Ꮠ"), + (0xABA1, "M", "Ꮡ"), + (0xABA2, "M", "Ꮢ"), + (0xABA3, "M", "Ꮣ"), + (0xABA4, "M", "Ꮤ"), + (0xABA5, "M", "Ꮥ"), + (0xABA6, "M", "Ꮦ"), + (0xABA7, "M", "Ꮧ"), + (0xABA8, "M", "Ꮨ"), + (0xABA9, "M", "Ꮩ"), + (0xABAA, "M", "Ꮪ"), + (0xABAB, "M", "Ꮫ"), + (0xABAC, "M", "Ꮬ"), + (0xABAD, "M", "Ꮭ"), + (0xABAE, "M", "Ꮮ"), + ] + + +def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xABAF, "M", "Ꮯ"), + (0xABB0, "M", "Ꮰ"), + (0xABB1, "M", "Ꮱ"), + (0xABB2, "M", "Ꮲ"), + (0xABB3, "M", "Ꮳ"), + (0xABB4, "M", "Ꮴ"), + (0xABB5, "M", "Ꮵ"), + (0xABB6, "M", "Ꮶ"), + (0xABB7, "M", "Ꮷ"), + (0xABB8, "M", "Ꮸ"), + (0xABB9, "M", "Ꮹ"), + (0xABBA, "M", "Ꮺ"), + (0xABBB, "M", "Ꮻ"), + (0xABBC, "M", "Ꮼ"), + (0xABBD, "M", "Ꮽ"), + (0xABBE, "M", "Ꮾ"), + (0xABBF, "M", "Ꮿ"), + (0xABC0, "V"), + (0xABEE, "X"), + (0xABF0, "V"), + (0xABFA, "X"), + (0xAC00, "V"), + (0xD7A4, "X"), + (0xD7B0, "V"), + (0xD7C7, "X"), + (0xD7CB, "V"), + (0xD7FC, "X"), + (0xF900, "M", "豈"), + (0xF901, "M", "更"), + (0xF902, "M", "車"), + (0xF903, "M", "賈"), + (0xF904, "M", "滑"), + (0xF905, "M", "串"), + (0xF906, "M", "句"), + (0xF907, "M", "龜"), + (0xF909, "M", "契"), + (0xF90A, "M", "金"), + (0xF90B, "M", "喇"), + (0xF90C, "M", "奈"), + (0xF90D, "M", "懶"), + (0xF90E, "M", "癩"), + (0xF90F, "M", "羅"), + (0xF910, "M", "蘿"), + (0xF911, "M", "螺"), + (0xF912, "M", "裸"), + (0xF913, "M", "邏"), + (0xF914, "M", "樂"), + (0xF915, "M", "洛"), + (0xF916, "M", "烙"), + (0xF917, "M", "珞"), + (0xF918, "M", "落"), + (0xF919, "M", "酪"), + (0xF91A, "M", "駱"), + (0xF91B, "M", "亂"), + (0xF91C, "M", "卵"), + (0xF91D, "M", "欄"), + (0xF91E, "M", "爛"), + (0xF91F, "M", "蘭"), + (0xF920, "M", "鸞"), + (0xF921, "M", "嵐"), + (0xF922, "M", "濫"), + (0xF923, "M", "藍"), + (0xF924, "M", "襤"), + (0xF925, "M", "拉"), + (0xF926, "M", "臘"), + (0xF927, "M", "蠟"), + (0xF928, "M", "廊"), + (0xF929, "M", "朗"), + (0xF92A, "M", "浪"), + (0xF92B, "M", "狼"), + (0xF92C, "M", "郎"), + (0xF92D, "M", "來"), + (0xF92E, "M", "冷"), + (0xF92F, "M", "勞"), + (0xF930, "M", "擄"), + (0xF931, "M", "櫓"), + (0xF932, "M", "爐"), + (0xF933, "M", "盧"), + (0xF934, "M", "老"), + (0xF935, "M", "蘆"), + (0xF936, "M", "虜"), + (0xF937, "M", "路"), + (0xF938, "M", "露"), + (0xF939, "M", "魯"), + (0xF93A, "M", "鷺"), + (0xF93B, "M", "碌"), + (0xF93C, "M", "祿"), + (0xF93D, "M", "綠"), + (0xF93E, "M", "菉"), + (0xF93F, "M", "錄"), + (0xF940, "M", "鹿"), + (0xF941, "M", "論"), + (0xF942, "M", "壟"), + (0xF943, "M", "弄"), + (0xF944, "M", "籠"), + (0xF945, "M", "聾"), + (0xF946, "M", "牢"), + (0xF947, "M", "磊"), + (0xF948, "M", "賂"), + (0xF949, "M", "雷"), + ] + + +def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xF94A, "M", "壘"), + (0xF94B, "M", "屢"), + (0xF94C, "M", "樓"), + (0xF94D, "M", "淚"), + (0xF94E, "M", "漏"), + (0xF94F, "M", "累"), + (0xF950, "M", "縷"), + (0xF951, "M", "陋"), + (0xF952, "M", "勒"), + (0xF953, "M", "肋"), + (0xF954, "M", "凜"), + (0xF955, "M", "凌"), + (0xF956, "M", "稜"), + (0xF957, "M", "綾"), + (0xF958, "M", "菱"), + (0xF959, "M", "陵"), + (0xF95A, "M", "讀"), + (0xF95B, "M", "拏"), + (0xF95C, "M", "樂"), + (0xF95D, "M", "諾"), + (0xF95E, "M", "丹"), + (0xF95F, "M", "寧"), + (0xF960, "M", "怒"), + (0xF961, "M", "率"), + (0xF962, "M", "異"), + (0xF963, "M", "北"), + (0xF964, "M", "磻"), + (0xF965, "M", "便"), + (0xF966, "M", "復"), + (0xF967, "M", "不"), + (0xF968, "M", "泌"), + (0xF969, "M", "數"), + (0xF96A, "M", "索"), + (0xF96B, "M", "參"), + (0xF96C, "M", "塞"), + (0xF96D, "M", "省"), + (0xF96E, "M", "葉"), + (0xF96F, "M", "說"), + (0xF970, "M", "殺"), + (0xF971, "M", "辰"), + (0xF972, "M", "沈"), + (0xF973, "M", "拾"), + (0xF974, "M", "若"), + (0xF975, "M", "掠"), + (0xF976, "M", "略"), + (0xF977, "M", "亮"), + (0xF978, "M", "兩"), + (0xF979, "M", "凉"), + (0xF97A, "M", "梁"), + (0xF97B, "M", "糧"), + (0xF97C, "M", "良"), + (0xF97D, "M", "諒"), + (0xF97E, "M", "量"), + (0xF97F, "M", "勵"), + (0xF980, "M", "呂"), + (0xF981, "M", "女"), + (0xF982, "M", "廬"), + (0xF983, "M", "旅"), + (0xF984, "M", "濾"), + (0xF985, "M", "礪"), + (0xF986, "M", "閭"), + (0xF987, "M", "驪"), + (0xF988, "M", "麗"), + (0xF989, "M", "黎"), + (0xF98A, "M", "力"), + (0xF98B, "M", "曆"), + (0xF98C, "M", "歷"), + (0xF98D, "M", "轢"), + (0xF98E, "M", "年"), + (0xF98F, "M", "憐"), + (0xF990, "M", "戀"), + (0xF991, "M", "撚"), + (0xF992, "M", "漣"), + (0xF993, "M", "煉"), + (0xF994, "M", "璉"), + (0xF995, "M", "秊"), + (0xF996, "M", "練"), + (0xF997, "M", "聯"), + (0xF998, "M", "輦"), + (0xF999, "M", "蓮"), + (0xF99A, "M", "連"), + (0xF99B, "M", "鍊"), + (0xF99C, "M", "列"), + (0xF99D, "M", "劣"), + (0xF99E, "M", "咽"), + (0xF99F, "M", "烈"), + (0xF9A0, "M", "裂"), + (0xF9A1, "M", "說"), + (0xF9A2, "M", "廉"), + (0xF9A3, "M", "念"), + (0xF9A4, "M", "捻"), + (0xF9A5, "M", "殮"), + (0xF9A6, "M", "簾"), + (0xF9A7, "M", "獵"), + (0xF9A8, "M", "令"), + (0xF9A9, "M", "囹"), + (0xF9AA, "M", "寧"), + (0xF9AB, "M", "嶺"), + (0xF9AC, "M", "怜"), + (0xF9AD, "M", "玲"), + ] + + +def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xF9AE, "M", "瑩"), + (0xF9AF, "M", "羚"), + (0xF9B0, "M", "聆"), + (0xF9B1, "M", "鈴"), + (0xF9B2, "M", "零"), + (0xF9B3, "M", "靈"), + (0xF9B4, "M", "領"), + (0xF9B5, "M", "例"), + (0xF9B6, "M", "禮"), + (0xF9B7, "M", "醴"), + (0xF9B8, "M", "隸"), + (0xF9B9, "M", "惡"), + (0xF9BA, "M", "了"), + (0xF9BB, "M", "僚"), + (0xF9BC, "M", "寮"), + (0xF9BD, "M", "尿"), + (0xF9BE, "M", "料"), + (0xF9BF, "M", "樂"), + (0xF9C0, "M", "燎"), + (0xF9C1, "M", "療"), + (0xF9C2, "M", "蓼"), + (0xF9C3, "M", "遼"), + (0xF9C4, "M", "龍"), + (0xF9C5, "M", "暈"), + (0xF9C6, "M", "阮"), + (0xF9C7, "M", "劉"), + (0xF9C8, "M", "杻"), + (0xF9C9, "M", "柳"), + (0xF9CA, "M", "流"), + (0xF9CB, "M", "溜"), + (0xF9CC, "M", "琉"), + (0xF9CD, "M", "留"), + (0xF9CE, "M", "硫"), + (0xF9CF, "M", "紐"), + (0xF9D0, "M", "類"), + (0xF9D1, "M", "六"), + (0xF9D2, "M", "戮"), + (0xF9D3, "M", "陸"), + (0xF9D4, "M", "倫"), + (0xF9D5, "M", "崙"), + (0xF9D6, "M", "淪"), + (0xF9D7, "M", "輪"), + (0xF9D8, "M", "律"), + (0xF9D9, "M", "慄"), + (0xF9DA, "M", "栗"), + (0xF9DB, "M", "率"), + (0xF9DC, "M", "隆"), + (0xF9DD, "M", "利"), + (0xF9DE, "M", "吏"), + (0xF9DF, "M", "履"), + (0xF9E0, "M", "易"), + (0xF9E1, "M", "李"), + (0xF9E2, "M", "梨"), + (0xF9E3, "M", "泥"), + (0xF9E4, "M", "理"), + (0xF9E5, "M", "痢"), + (0xF9E6, "M", "罹"), + (0xF9E7, "M", "裏"), + (0xF9E8, "M", "裡"), + (0xF9E9, "M", "里"), + (0xF9EA, "M", "離"), + (0xF9EB, "M", "匿"), + (0xF9EC, "M", "溺"), + (0xF9ED, "M", "吝"), + (0xF9EE, "M", "燐"), + (0xF9EF, "M", "璘"), + (0xF9F0, "M", "藺"), + (0xF9F1, "M", "隣"), + (0xF9F2, "M", "鱗"), + (0xF9F3, "M", "麟"), + (0xF9F4, "M", "林"), + (0xF9F5, "M", "淋"), + (0xF9F6, "M", "臨"), + (0xF9F7, "M", "立"), + (0xF9F8, "M", "笠"), + (0xF9F9, "M", "粒"), + (0xF9FA, "M", "狀"), + (0xF9FB, "M", "炙"), + (0xF9FC, "M", "識"), + (0xF9FD, "M", "什"), + (0xF9FE, "M", "茶"), + (0xF9FF, "M", "刺"), + (0xFA00, "M", "切"), + (0xFA01, "M", "度"), + (0xFA02, "M", "拓"), + (0xFA03, "M", "糖"), + (0xFA04, "M", "宅"), + (0xFA05, "M", "洞"), + (0xFA06, "M", "暴"), + (0xFA07, "M", "輻"), + (0xFA08, "M", "行"), + (0xFA09, "M", "降"), + (0xFA0A, "M", "見"), + (0xFA0B, "M", "廓"), + (0xFA0C, "M", "兀"), + (0xFA0D, "M", "嗀"), + (0xFA0E, "V"), + (0xFA10, "M", "塚"), + (0xFA11, "V"), + (0xFA12, "M", "晴"), + ] + + +def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFA13, "V"), + (0xFA15, "M", "凞"), + (0xFA16, "M", "猪"), + (0xFA17, "M", "益"), + (0xFA18, "M", "礼"), + (0xFA19, "M", "神"), + (0xFA1A, "M", "祥"), + (0xFA1B, "M", "福"), + (0xFA1C, "M", "靖"), + (0xFA1D, "M", "精"), + (0xFA1E, "M", "羽"), + (0xFA1F, "V"), + (0xFA20, "M", "蘒"), + (0xFA21, "V"), + (0xFA22, "M", "諸"), + (0xFA23, "V"), + (0xFA25, "M", "逸"), + (0xFA26, "M", "都"), + (0xFA27, "V"), + (0xFA2A, "M", "飯"), + (0xFA2B, "M", "飼"), + (0xFA2C, "M", "館"), + (0xFA2D, "M", "鶴"), + (0xFA2E, "M", "郞"), + (0xFA2F, "M", "隷"), + (0xFA30, "M", "侮"), + (0xFA31, "M", "僧"), + (0xFA32, "M", "免"), + (0xFA33, "M", "勉"), + (0xFA34, "M", "勤"), + (0xFA35, "M", "卑"), + (0xFA36, "M", "喝"), + (0xFA37, "M", "嘆"), + (0xFA38, "M", "器"), + (0xFA39, "M", "塀"), + (0xFA3A, "M", "墨"), + (0xFA3B, "M", "層"), + (0xFA3C, "M", "屮"), + (0xFA3D, "M", "悔"), + (0xFA3E, "M", "慨"), + (0xFA3F, "M", "憎"), + (0xFA40, "M", "懲"), + (0xFA41, "M", "敏"), + (0xFA42, "M", "既"), + (0xFA43, "M", "暑"), + (0xFA44, "M", "梅"), + (0xFA45, "M", "海"), + (0xFA46, "M", "渚"), + (0xFA47, "M", "漢"), + (0xFA48, "M", "煮"), + (0xFA49, "M", "爫"), + (0xFA4A, "M", "琢"), + (0xFA4B, "M", "碑"), + (0xFA4C, "M", "社"), + (0xFA4D, "M", "祉"), + (0xFA4E, "M", "祈"), + (0xFA4F, "M", "祐"), + (0xFA50, "M", "祖"), + (0xFA51, "M", "祝"), + (0xFA52, "M", "禍"), + (0xFA53, "M", "禎"), + (0xFA54, "M", "穀"), + (0xFA55, "M", "突"), + (0xFA56, "M", "節"), + (0xFA57, "M", "練"), + (0xFA58, "M", "縉"), + (0xFA59, "M", "繁"), + (0xFA5A, "M", "署"), + (0xFA5B, "M", "者"), + (0xFA5C, "M", "臭"), + (0xFA5D, "M", "艹"), + (0xFA5F, "M", "著"), + (0xFA60, "M", "褐"), + (0xFA61, "M", "視"), + (0xFA62, "M", "謁"), + (0xFA63, "M", "謹"), + (0xFA64, "M", "賓"), + (0xFA65, "M", "贈"), + (0xFA66, "M", "辶"), + (0xFA67, "M", "逸"), + (0xFA68, "M", "難"), + (0xFA69, "M", "響"), + (0xFA6A, "M", "頻"), + (0xFA6B, "M", "恵"), + (0xFA6C, "M", "𤋮"), + (0xFA6D, "M", "舘"), + (0xFA6E, "X"), + (0xFA70, "M", "並"), + (0xFA71, "M", "况"), + (0xFA72, "M", "全"), + (0xFA73, "M", "侀"), + (0xFA74, "M", "充"), + (0xFA75, "M", "冀"), + (0xFA76, "M", "勇"), + (0xFA77, "M", "勺"), + (0xFA78, "M", "喝"), + (0xFA79, "M", "啕"), + (0xFA7A, "M", "喙"), + (0xFA7B, "M", "嗢"), + (0xFA7C, "M", "塚"), + ] + + +def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFA7D, "M", "墳"), + (0xFA7E, "M", "奄"), + (0xFA7F, "M", "奔"), + (0xFA80, "M", "婢"), + (0xFA81, "M", "嬨"), + (0xFA82, "M", "廒"), + (0xFA83, "M", "廙"), + (0xFA84, "M", "彩"), + (0xFA85, "M", "徭"), + (0xFA86, "M", "惘"), + (0xFA87, "M", "慎"), + (0xFA88, "M", "愈"), + (0xFA89, "M", "憎"), + (0xFA8A, "M", "慠"), + (0xFA8B, "M", "懲"), + (0xFA8C, "M", "戴"), + (0xFA8D, "M", "揄"), + (0xFA8E, "M", "搜"), + (0xFA8F, "M", "摒"), + (0xFA90, "M", "敖"), + (0xFA91, "M", "晴"), + (0xFA92, "M", "朗"), + (0xFA93, "M", "望"), + (0xFA94, "M", "杖"), + (0xFA95, "M", "歹"), + (0xFA96, "M", "殺"), + (0xFA97, "M", "流"), + (0xFA98, "M", "滛"), + (0xFA99, "M", "滋"), + (0xFA9A, "M", "漢"), + (0xFA9B, "M", "瀞"), + (0xFA9C, "M", "煮"), + (0xFA9D, "M", "瞧"), + (0xFA9E, "M", "爵"), + (0xFA9F, "M", "犯"), + (0xFAA0, "M", "猪"), + (0xFAA1, "M", "瑱"), + (0xFAA2, "M", "甆"), + (0xFAA3, "M", "画"), + (0xFAA4, "M", "瘝"), + (0xFAA5, "M", "瘟"), + (0xFAA6, "M", "益"), + (0xFAA7, "M", "盛"), + (0xFAA8, "M", "直"), + (0xFAA9, "M", "睊"), + (0xFAAA, "M", "着"), + (0xFAAB, "M", "磌"), + (0xFAAC, "M", "窱"), + (0xFAAD, "M", "節"), + (0xFAAE, "M", "类"), + (0xFAAF, "M", "絛"), + (0xFAB0, "M", "練"), + (0xFAB1, "M", "缾"), + (0xFAB2, "M", "者"), + (0xFAB3, "M", "荒"), + (0xFAB4, "M", "華"), + (0xFAB5, "M", "蝹"), + (0xFAB6, "M", "襁"), + (0xFAB7, "M", "覆"), + (0xFAB8, "M", "視"), + (0xFAB9, "M", "調"), + (0xFABA, "M", "諸"), + (0xFABB, "M", "請"), + (0xFABC, "M", "謁"), + (0xFABD, "M", "諾"), + (0xFABE, "M", "諭"), + (0xFABF, "M", "謹"), + (0xFAC0, "M", "變"), + (0xFAC1, "M", "贈"), + (0xFAC2, "M", "輸"), + (0xFAC3, "M", "遲"), + (0xFAC4, "M", "醙"), + (0xFAC5, "M", "鉶"), + (0xFAC6, "M", "陼"), + (0xFAC7, "M", "難"), + (0xFAC8, "M", "靖"), + (0xFAC9, "M", "韛"), + (0xFACA, "M", "響"), + (0xFACB, "M", "頋"), + (0xFACC, "M", "頻"), + (0xFACD, "M", "鬒"), + (0xFACE, "M", "龜"), + (0xFACF, "M", "𢡊"), + (0xFAD0, "M", "𢡄"), + (0xFAD1, "M", "𣏕"), + (0xFAD2, "M", "㮝"), + (0xFAD3, "M", "䀘"), + (0xFAD4, "M", "䀹"), + (0xFAD5, "M", "𥉉"), + (0xFAD6, "M", "𥳐"), + (0xFAD7, "M", "𧻓"), + (0xFAD8, "M", "齃"), + (0xFAD9, "M", "龎"), + (0xFADA, "X"), + (0xFB00, "M", "ff"), + (0xFB01, "M", "fi"), + (0xFB02, "M", "fl"), + (0xFB03, "M", "ffi"), + (0xFB04, "M", "ffl"), + (0xFB05, "M", "st"), + ] + + +def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFB07, "X"), + (0xFB13, "M", "մն"), + (0xFB14, "M", "մե"), + (0xFB15, "M", "մի"), + (0xFB16, "M", "վն"), + (0xFB17, "M", "մխ"), + (0xFB18, "X"), + (0xFB1D, "M", "יִ"), + (0xFB1E, "V"), + (0xFB1F, "M", "ײַ"), + (0xFB20, "M", "ע"), + (0xFB21, "M", "א"), + (0xFB22, "M", "ד"), + (0xFB23, "M", "ה"), + (0xFB24, "M", "כ"), + (0xFB25, "M", "ל"), + (0xFB26, "M", "ם"), + (0xFB27, "M", "ר"), + (0xFB28, "M", "ת"), + (0xFB29, "3", "+"), + (0xFB2A, "M", "שׁ"), + (0xFB2B, "M", "שׂ"), + (0xFB2C, "M", "שּׁ"), + (0xFB2D, "M", "שּׂ"), + (0xFB2E, "M", "אַ"), + (0xFB2F, "M", "אָ"), + (0xFB30, "M", "אּ"), + (0xFB31, "M", "בּ"), + (0xFB32, "M", "גּ"), + (0xFB33, "M", "דּ"), + (0xFB34, "M", "הּ"), + (0xFB35, "M", "וּ"), + (0xFB36, "M", "זּ"), + (0xFB37, "X"), + (0xFB38, "M", "טּ"), + (0xFB39, "M", "יּ"), + (0xFB3A, "M", "ךּ"), + (0xFB3B, "M", "כּ"), + (0xFB3C, "M", "לּ"), + (0xFB3D, "X"), + (0xFB3E, "M", "מּ"), + (0xFB3F, "X"), + (0xFB40, "M", "נּ"), + (0xFB41, "M", "סּ"), + (0xFB42, "X"), + (0xFB43, "M", "ףּ"), + (0xFB44, "M", "פּ"), + (0xFB45, "X"), + (0xFB46, "M", "צּ"), + (0xFB47, "M", "קּ"), + (0xFB48, "M", "רּ"), + (0xFB49, "M", "שּ"), + (0xFB4A, "M", "תּ"), + (0xFB4B, "M", "וֹ"), + (0xFB4C, "M", "בֿ"), + (0xFB4D, "M", "כֿ"), + (0xFB4E, "M", "פֿ"), + (0xFB4F, "M", "אל"), + (0xFB50, "M", "ٱ"), + (0xFB52, "M", "ٻ"), + (0xFB56, "M", "پ"), + (0xFB5A, "M", "ڀ"), + (0xFB5E, "M", "ٺ"), + (0xFB62, "M", "ٿ"), + (0xFB66, "M", "ٹ"), + (0xFB6A, "M", "ڤ"), + (0xFB6E, "M", "ڦ"), + (0xFB72, "M", "ڄ"), + (0xFB76, "M", "ڃ"), + (0xFB7A, "M", "چ"), + (0xFB7E, "M", "ڇ"), + (0xFB82, "M", "ڍ"), + (0xFB84, "M", "ڌ"), + (0xFB86, "M", "ڎ"), + (0xFB88, "M", "ڈ"), + (0xFB8A, "M", "ژ"), + (0xFB8C, "M", "ڑ"), + (0xFB8E, "M", "ک"), + (0xFB92, "M", "گ"), + (0xFB96, "M", "ڳ"), + (0xFB9A, "M", "ڱ"), + (0xFB9E, "M", "ں"), + (0xFBA0, "M", "ڻ"), + (0xFBA4, "M", "ۀ"), + (0xFBA6, "M", "ہ"), + (0xFBAA, "M", "ھ"), + (0xFBAE, "M", "ے"), + (0xFBB0, "M", "ۓ"), + (0xFBB2, "V"), + (0xFBC3, "X"), + (0xFBD3, "M", "ڭ"), + (0xFBD7, "M", "ۇ"), + (0xFBD9, "M", "ۆ"), + (0xFBDB, "M", "ۈ"), + (0xFBDD, "M", "ۇٴ"), + (0xFBDE, "M", "ۋ"), + (0xFBE0, "M", "ۅ"), + (0xFBE2, "M", "ۉ"), + (0xFBE4, "M", "ې"), + (0xFBE8, "M", "ى"), + ] + + +def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFBEA, "M", "ئا"), + (0xFBEC, "M", "ئە"), + (0xFBEE, "M", "ئو"), + (0xFBF0, "M", "ئۇ"), + (0xFBF2, "M", "ئۆ"), + (0xFBF4, "M", "ئۈ"), + (0xFBF6, "M", "ئې"), + (0xFBF9, "M", "ئى"), + (0xFBFC, "M", "ی"), + (0xFC00, "M", "ئج"), + (0xFC01, "M", "ئح"), + (0xFC02, "M", "ئم"), + (0xFC03, "M", "ئى"), + (0xFC04, "M", "ئي"), + (0xFC05, "M", "بج"), + (0xFC06, "M", "بح"), + (0xFC07, "M", "بخ"), + (0xFC08, "M", "بم"), + (0xFC09, "M", "بى"), + (0xFC0A, "M", "بي"), + (0xFC0B, "M", "تج"), + (0xFC0C, "M", "تح"), + (0xFC0D, "M", "تخ"), + (0xFC0E, "M", "تم"), + (0xFC0F, "M", "تى"), + (0xFC10, "M", "تي"), + (0xFC11, "M", "ثج"), + (0xFC12, "M", "ثم"), + (0xFC13, "M", "ثى"), + (0xFC14, "M", "ثي"), + (0xFC15, "M", "جح"), + (0xFC16, "M", "جم"), + (0xFC17, "M", "حج"), + (0xFC18, "M", "حم"), + (0xFC19, "M", "خج"), + (0xFC1A, "M", "خح"), + (0xFC1B, "M", "خم"), + (0xFC1C, "M", "سج"), + (0xFC1D, "M", "سح"), + (0xFC1E, "M", "سخ"), + (0xFC1F, "M", "سم"), + (0xFC20, "M", "صح"), + (0xFC21, "M", "صم"), + (0xFC22, "M", "ضج"), + (0xFC23, "M", "ضح"), + (0xFC24, "M", "ضخ"), + (0xFC25, "M", "ضم"), + (0xFC26, "M", "طح"), + (0xFC27, "M", "طم"), + (0xFC28, "M", "ظم"), + (0xFC29, "M", "عج"), + (0xFC2A, "M", "عم"), + (0xFC2B, "M", "غج"), + (0xFC2C, "M", "غم"), + (0xFC2D, "M", "فج"), + (0xFC2E, "M", "فح"), + (0xFC2F, "M", "فخ"), + (0xFC30, "M", "فم"), + (0xFC31, "M", "فى"), + (0xFC32, "M", "في"), + (0xFC33, "M", "قح"), + (0xFC34, "M", "قم"), + (0xFC35, "M", "قى"), + (0xFC36, "M", "قي"), + (0xFC37, "M", "كا"), + (0xFC38, "M", "كج"), + (0xFC39, "M", "كح"), + (0xFC3A, "M", "كخ"), + (0xFC3B, "M", "كل"), + (0xFC3C, "M", "كم"), + (0xFC3D, "M", "كى"), + (0xFC3E, "M", "كي"), + (0xFC3F, "M", "لج"), + (0xFC40, "M", "لح"), + (0xFC41, "M", "لخ"), + (0xFC42, "M", "لم"), + (0xFC43, "M", "لى"), + (0xFC44, "M", "لي"), + (0xFC45, "M", "مج"), + (0xFC46, "M", "مح"), + (0xFC47, "M", "مخ"), + (0xFC48, "M", "مم"), + (0xFC49, "M", "مى"), + (0xFC4A, "M", "مي"), + (0xFC4B, "M", "نج"), + (0xFC4C, "M", "نح"), + (0xFC4D, "M", "نخ"), + (0xFC4E, "M", "نم"), + (0xFC4F, "M", "نى"), + (0xFC50, "M", "ني"), + (0xFC51, "M", "هج"), + (0xFC52, "M", "هم"), + (0xFC53, "M", "هى"), + (0xFC54, "M", "هي"), + (0xFC55, "M", "يج"), + (0xFC56, "M", "يح"), + (0xFC57, "M", "يخ"), + (0xFC58, "M", "يم"), + (0xFC59, "M", "يى"), + (0xFC5A, "M", "يي"), + ] + + +def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFC5B, "M", "ذٰ"), + (0xFC5C, "M", "رٰ"), + (0xFC5D, "M", "ىٰ"), + (0xFC5E, "3", " ٌّ"), + (0xFC5F, "3", " ٍّ"), + (0xFC60, "3", " َّ"), + (0xFC61, "3", " ُّ"), + (0xFC62, "3", " ِّ"), + (0xFC63, "3", " ّٰ"), + (0xFC64, "M", "ئر"), + (0xFC65, "M", "ئز"), + (0xFC66, "M", "ئم"), + (0xFC67, "M", "ئن"), + (0xFC68, "M", "ئى"), + (0xFC69, "M", "ئي"), + (0xFC6A, "M", "بر"), + (0xFC6B, "M", "بز"), + (0xFC6C, "M", "بم"), + (0xFC6D, "M", "بن"), + (0xFC6E, "M", "بى"), + (0xFC6F, "M", "بي"), + (0xFC70, "M", "تر"), + (0xFC71, "M", "تز"), + (0xFC72, "M", "تم"), + (0xFC73, "M", "تن"), + (0xFC74, "M", "تى"), + (0xFC75, "M", "تي"), + (0xFC76, "M", "ثر"), + (0xFC77, "M", "ثز"), + (0xFC78, "M", "ثم"), + (0xFC79, "M", "ثن"), + (0xFC7A, "M", "ثى"), + (0xFC7B, "M", "ثي"), + (0xFC7C, "M", "فى"), + (0xFC7D, "M", "في"), + (0xFC7E, "M", "قى"), + (0xFC7F, "M", "قي"), + (0xFC80, "M", "كا"), + (0xFC81, "M", "كل"), + (0xFC82, "M", "كم"), + (0xFC83, "M", "كى"), + (0xFC84, "M", "كي"), + (0xFC85, "M", "لم"), + (0xFC86, "M", "لى"), + (0xFC87, "M", "لي"), + (0xFC88, "M", "ما"), + (0xFC89, "M", "مم"), + (0xFC8A, "M", "نر"), + (0xFC8B, "M", "نز"), + (0xFC8C, "M", "نم"), + (0xFC8D, "M", "نن"), + (0xFC8E, "M", "نى"), + (0xFC8F, "M", "ني"), + (0xFC90, "M", "ىٰ"), + (0xFC91, "M", "ير"), + (0xFC92, "M", "يز"), + (0xFC93, "M", "يم"), + (0xFC94, "M", "ين"), + (0xFC95, "M", "يى"), + (0xFC96, "M", "يي"), + (0xFC97, "M", "ئج"), + (0xFC98, "M", "ئح"), + (0xFC99, "M", "ئخ"), + (0xFC9A, "M", "ئم"), + (0xFC9B, "M", "ئه"), + (0xFC9C, "M", "بج"), + (0xFC9D, "M", "بح"), + (0xFC9E, "M", "بخ"), + (0xFC9F, "M", "بم"), + (0xFCA0, "M", "به"), + (0xFCA1, "M", "تج"), + (0xFCA2, "M", "تح"), + (0xFCA3, "M", "تخ"), + (0xFCA4, "M", "تم"), + (0xFCA5, "M", "ته"), + (0xFCA6, "M", "ثم"), + (0xFCA7, "M", "جح"), + (0xFCA8, "M", "جم"), + (0xFCA9, "M", "حج"), + (0xFCAA, "M", "حم"), + (0xFCAB, "M", "خج"), + (0xFCAC, "M", "خم"), + (0xFCAD, "M", "سج"), + (0xFCAE, "M", "سح"), + (0xFCAF, "M", "سخ"), + (0xFCB0, "M", "سم"), + (0xFCB1, "M", "صح"), + (0xFCB2, "M", "صخ"), + (0xFCB3, "M", "صم"), + (0xFCB4, "M", "ضج"), + (0xFCB5, "M", "ضح"), + (0xFCB6, "M", "ضخ"), + (0xFCB7, "M", "ضم"), + (0xFCB8, "M", "طح"), + (0xFCB9, "M", "ظم"), + (0xFCBA, "M", "عج"), + (0xFCBB, "M", "عم"), + (0xFCBC, "M", "غج"), + (0xFCBD, "M", "غم"), + (0xFCBE, "M", "فج"), + ] + + +def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFCBF, "M", "فح"), + (0xFCC0, "M", "فخ"), + (0xFCC1, "M", "فم"), + (0xFCC2, "M", "قح"), + (0xFCC3, "M", "قم"), + (0xFCC4, "M", "كج"), + (0xFCC5, "M", "كح"), + (0xFCC6, "M", "كخ"), + (0xFCC7, "M", "كل"), + (0xFCC8, "M", "كم"), + (0xFCC9, "M", "لج"), + (0xFCCA, "M", "لح"), + (0xFCCB, "M", "لخ"), + (0xFCCC, "M", "لم"), + (0xFCCD, "M", "له"), + (0xFCCE, "M", "مج"), + (0xFCCF, "M", "مح"), + (0xFCD0, "M", "مخ"), + (0xFCD1, "M", "مم"), + (0xFCD2, "M", "نج"), + (0xFCD3, "M", "نح"), + (0xFCD4, "M", "نخ"), + (0xFCD5, "M", "نم"), + (0xFCD6, "M", "نه"), + (0xFCD7, "M", "هج"), + (0xFCD8, "M", "هم"), + (0xFCD9, "M", "هٰ"), + (0xFCDA, "M", "يج"), + (0xFCDB, "M", "يح"), + (0xFCDC, "M", "يخ"), + (0xFCDD, "M", "يم"), + (0xFCDE, "M", "يه"), + (0xFCDF, "M", "ئم"), + (0xFCE0, "M", "ئه"), + (0xFCE1, "M", "بم"), + (0xFCE2, "M", "به"), + (0xFCE3, "M", "تم"), + (0xFCE4, "M", "ته"), + (0xFCE5, "M", "ثم"), + (0xFCE6, "M", "ثه"), + (0xFCE7, "M", "سم"), + (0xFCE8, "M", "سه"), + (0xFCE9, "M", "شم"), + (0xFCEA, "M", "شه"), + (0xFCEB, "M", "كل"), + (0xFCEC, "M", "كم"), + (0xFCED, "M", "لم"), + (0xFCEE, "M", "نم"), + (0xFCEF, "M", "نه"), + (0xFCF0, "M", "يم"), + (0xFCF1, "M", "يه"), + (0xFCF2, "M", "ـَّ"), + (0xFCF3, "M", "ـُّ"), + (0xFCF4, "M", "ـِّ"), + (0xFCF5, "M", "طى"), + (0xFCF6, "M", "طي"), + (0xFCF7, "M", "عى"), + (0xFCF8, "M", "عي"), + (0xFCF9, "M", "غى"), + (0xFCFA, "M", "غي"), + (0xFCFB, "M", "سى"), + (0xFCFC, "M", "سي"), + (0xFCFD, "M", "شى"), + (0xFCFE, "M", "شي"), + (0xFCFF, "M", "حى"), + (0xFD00, "M", "حي"), + (0xFD01, "M", "جى"), + (0xFD02, "M", "جي"), + (0xFD03, "M", "خى"), + (0xFD04, "M", "خي"), + (0xFD05, "M", "صى"), + (0xFD06, "M", "صي"), + (0xFD07, "M", "ضى"), + (0xFD08, "M", "ضي"), + (0xFD09, "M", "شج"), + (0xFD0A, "M", "شح"), + (0xFD0B, "M", "شخ"), + (0xFD0C, "M", "شم"), + (0xFD0D, "M", "شر"), + (0xFD0E, "M", "سر"), + (0xFD0F, "M", "صر"), + (0xFD10, "M", "ضر"), + (0xFD11, "M", "طى"), + (0xFD12, "M", "طي"), + (0xFD13, "M", "عى"), + (0xFD14, "M", "عي"), + (0xFD15, "M", "غى"), + (0xFD16, "M", "غي"), + (0xFD17, "M", "سى"), + (0xFD18, "M", "سي"), + (0xFD19, "M", "شى"), + (0xFD1A, "M", "شي"), + (0xFD1B, "M", "حى"), + (0xFD1C, "M", "حي"), + (0xFD1D, "M", "جى"), + (0xFD1E, "M", "جي"), + (0xFD1F, "M", "خى"), + (0xFD20, "M", "خي"), + (0xFD21, "M", "صى"), + (0xFD22, "M", "صي"), + ] + + +def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFD23, "M", "ضى"), + (0xFD24, "M", "ضي"), + (0xFD25, "M", "شج"), + (0xFD26, "M", "شح"), + (0xFD27, "M", "شخ"), + (0xFD28, "M", "شم"), + (0xFD29, "M", "شر"), + (0xFD2A, "M", "سر"), + (0xFD2B, "M", "صر"), + (0xFD2C, "M", "ضر"), + (0xFD2D, "M", "شج"), + (0xFD2E, "M", "شح"), + (0xFD2F, "M", "شخ"), + (0xFD30, "M", "شم"), + (0xFD31, "M", "سه"), + (0xFD32, "M", "شه"), + (0xFD33, "M", "طم"), + (0xFD34, "M", "سج"), + (0xFD35, "M", "سح"), + (0xFD36, "M", "سخ"), + (0xFD37, "M", "شج"), + (0xFD38, "M", "شح"), + (0xFD39, "M", "شخ"), + (0xFD3A, "M", "طم"), + (0xFD3B, "M", "ظم"), + (0xFD3C, "M", "اً"), + (0xFD3E, "V"), + (0xFD50, "M", "تجم"), + (0xFD51, "M", "تحج"), + (0xFD53, "M", "تحم"), + (0xFD54, "M", "تخم"), + (0xFD55, "M", "تمج"), + (0xFD56, "M", "تمح"), + (0xFD57, "M", "تمخ"), + (0xFD58, "M", "جمح"), + (0xFD5A, "M", "حمي"), + (0xFD5B, "M", "حمى"), + (0xFD5C, "M", "سحج"), + (0xFD5D, "M", "سجح"), + (0xFD5E, "M", "سجى"), + (0xFD5F, "M", "سمح"), + (0xFD61, "M", "سمج"), + (0xFD62, "M", "سمم"), + (0xFD64, "M", "صحح"), + (0xFD66, "M", "صمم"), + (0xFD67, "M", "شحم"), + (0xFD69, "M", "شجي"), + (0xFD6A, "M", "شمخ"), + (0xFD6C, "M", "شمم"), + (0xFD6E, "M", "ضحى"), + (0xFD6F, "M", "ضخم"), + (0xFD71, "M", "طمح"), + (0xFD73, "M", "طمم"), + (0xFD74, "M", "طمي"), + (0xFD75, "M", "عجم"), + (0xFD76, "M", "عمم"), + (0xFD78, "M", "عمى"), + (0xFD79, "M", "غمم"), + (0xFD7A, "M", "غمي"), + (0xFD7B, "M", "غمى"), + (0xFD7C, "M", "فخم"), + (0xFD7E, "M", "قمح"), + (0xFD7F, "M", "قمم"), + (0xFD80, "M", "لحم"), + (0xFD81, "M", "لحي"), + (0xFD82, "M", "لحى"), + (0xFD83, "M", "لجج"), + (0xFD85, "M", "لخم"), + (0xFD87, "M", "لمح"), + (0xFD89, "M", "محج"), + (0xFD8A, "M", "محم"), + (0xFD8B, "M", "محي"), + (0xFD8C, "M", "مجح"), + (0xFD8D, "M", "مجم"), + (0xFD8E, "M", "مخج"), + (0xFD8F, "M", "مخم"), + (0xFD90, "X"), + (0xFD92, "M", "مجخ"), + (0xFD93, "M", "همج"), + (0xFD94, "M", "همم"), + (0xFD95, "M", "نحم"), + (0xFD96, "M", "نحى"), + (0xFD97, "M", "نجم"), + (0xFD99, "M", "نجى"), + (0xFD9A, "M", "نمي"), + (0xFD9B, "M", "نمى"), + (0xFD9C, "M", "يمم"), + (0xFD9E, "M", "بخي"), + (0xFD9F, "M", "تجي"), + (0xFDA0, "M", "تجى"), + (0xFDA1, "M", "تخي"), + (0xFDA2, "M", "تخى"), + (0xFDA3, "M", "تمي"), + (0xFDA4, "M", "تمى"), + (0xFDA5, "M", "جمي"), + (0xFDA6, "M", "جحى"), + (0xFDA7, "M", "جمى"), + (0xFDA8, "M", "سخى"), + (0xFDA9, "M", "صحي"), + (0xFDAA, "M", "شحي"), + ] + + +def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFDAB, "M", "ضحي"), + (0xFDAC, "M", "لجي"), + (0xFDAD, "M", "لمي"), + (0xFDAE, "M", "يحي"), + (0xFDAF, "M", "يجي"), + (0xFDB0, "M", "يمي"), + (0xFDB1, "M", "ممي"), + (0xFDB2, "M", "قمي"), + (0xFDB3, "M", "نحي"), + (0xFDB4, "M", "قمح"), + (0xFDB5, "M", "لحم"), + (0xFDB6, "M", "عمي"), + (0xFDB7, "M", "كمي"), + (0xFDB8, "M", "نجح"), + (0xFDB9, "M", "مخي"), + (0xFDBA, "M", "لجم"), + (0xFDBB, "M", "كمم"), + (0xFDBC, "M", "لجم"), + (0xFDBD, "M", "نجح"), + (0xFDBE, "M", "جحي"), + (0xFDBF, "M", "حجي"), + (0xFDC0, "M", "مجي"), + (0xFDC1, "M", "فمي"), + (0xFDC2, "M", "بحي"), + (0xFDC3, "M", "كمم"), + (0xFDC4, "M", "عجم"), + (0xFDC5, "M", "صمم"), + (0xFDC6, "M", "سخي"), + (0xFDC7, "M", "نجي"), + (0xFDC8, "X"), + (0xFDCF, "V"), + (0xFDD0, "X"), + (0xFDF0, "M", "صلے"), + (0xFDF1, "M", "قلے"), + (0xFDF2, "M", "الله"), + (0xFDF3, "M", "اكبر"), + (0xFDF4, "M", "محمد"), + (0xFDF5, "M", "صلعم"), + (0xFDF6, "M", "رسول"), + (0xFDF7, "M", "عليه"), + (0xFDF8, "M", "وسلم"), + (0xFDF9, "M", "صلى"), + (0xFDFA, "3", "صلى الله عليه وسلم"), + (0xFDFB, "3", "جل جلاله"), + (0xFDFC, "M", "ریال"), + (0xFDFD, "V"), + (0xFE00, "I"), + (0xFE10, "3", ","), + (0xFE11, "M", "、"), + (0xFE12, "X"), + (0xFE13, "3", ":"), + (0xFE14, "3", ";"), + (0xFE15, "3", "!"), + (0xFE16, "3", "?"), + (0xFE17, "M", "〖"), + (0xFE18, "M", "〗"), + (0xFE19, "X"), + (0xFE20, "V"), + (0xFE30, "X"), + (0xFE31, "M", "—"), + (0xFE32, "M", "–"), + (0xFE33, "3", "_"), + (0xFE35, "3", "("), + (0xFE36, "3", ")"), + (0xFE37, "3", "{"), + (0xFE38, "3", "}"), + (0xFE39, "M", "〔"), + (0xFE3A, "M", "〕"), + (0xFE3B, "M", "【"), + (0xFE3C, "M", "】"), + (0xFE3D, "M", "《"), + (0xFE3E, "M", "》"), + (0xFE3F, "M", "〈"), + (0xFE40, "M", "〉"), + (0xFE41, "M", "「"), + (0xFE42, "M", "」"), + (0xFE43, "M", "『"), + (0xFE44, "M", "』"), + (0xFE45, "V"), + (0xFE47, "3", "["), + (0xFE48, "3", "]"), + (0xFE49, "3", " ̅"), + (0xFE4D, "3", "_"), + (0xFE50, "3", ","), + (0xFE51, "M", "、"), + (0xFE52, "X"), + (0xFE54, "3", ";"), + (0xFE55, "3", ":"), + (0xFE56, "3", "?"), + (0xFE57, "3", "!"), + (0xFE58, "M", "—"), + (0xFE59, "3", "("), + (0xFE5A, "3", ")"), + (0xFE5B, "3", "{"), + (0xFE5C, "3", "}"), + (0xFE5D, "M", "〔"), + (0xFE5E, "M", "〕"), + (0xFE5F, "3", "#"), + (0xFE60, "3", "&"), + (0xFE61, "3", "*"), + ] + + +def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFE62, "3", "+"), + (0xFE63, "M", "-"), + (0xFE64, "3", "<"), + (0xFE65, "3", ">"), + (0xFE66, "3", "="), + (0xFE67, "X"), + (0xFE68, "3", "\\"), + (0xFE69, "3", "$"), + (0xFE6A, "3", "%"), + (0xFE6B, "3", "@"), + (0xFE6C, "X"), + (0xFE70, "3", " ً"), + (0xFE71, "M", "ـً"), + (0xFE72, "3", " ٌ"), + (0xFE73, "V"), + (0xFE74, "3", " ٍ"), + (0xFE75, "X"), + (0xFE76, "3", " َ"), + (0xFE77, "M", "ـَ"), + (0xFE78, "3", " ُ"), + (0xFE79, "M", "ـُ"), + (0xFE7A, "3", " ِ"), + (0xFE7B, "M", "ـِ"), + (0xFE7C, "3", " ّ"), + (0xFE7D, "M", "ـّ"), + (0xFE7E, "3", " ْ"), + (0xFE7F, "M", "ـْ"), + (0xFE80, "M", "ء"), + (0xFE81, "M", "آ"), + (0xFE83, "M", "أ"), + (0xFE85, "M", "ؤ"), + (0xFE87, "M", "إ"), + (0xFE89, "M", "ئ"), + (0xFE8D, "M", "ا"), + (0xFE8F, "M", "ب"), + (0xFE93, "M", "ة"), + (0xFE95, "M", "ت"), + (0xFE99, "M", "ث"), + (0xFE9D, "M", "ج"), + (0xFEA1, "M", "ح"), + (0xFEA5, "M", "خ"), + (0xFEA9, "M", "د"), + (0xFEAB, "M", "ذ"), + (0xFEAD, "M", "ر"), + (0xFEAF, "M", "ز"), + (0xFEB1, "M", "س"), + (0xFEB5, "M", "ش"), + (0xFEB9, "M", "ص"), + (0xFEBD, "M", "ض"), + (0xFEC1, "M", "ط"), + (0xFEC5, "M", "ظ"), + (0xFEC9, "M", "ع"), + (0xFECD, "M", "غ"), + (0xFED1, "M", "ف"), + (0xFED5, "M", "ق"), + (0xFED9, "M", "ك"), + (0xFEDD, "M", "ل"), + (0xFEE1, "M", "م"), + (0xFEE5, "M", "ن"), + (0xFEE9, "M", "ه"), + (0xFEED, "M", "و"), + (0xFEEF, "M", "ى"), + (0xFEF1, "M", "ي"), + (0xFEF5, "M", "لآ"), + (0xFEF7, "M", "لأ"), + (0xFEF9, "M", "لإ"), + (0xFEFB, "M", "لا"), + (0xFEFD, "X"), + (0xFEFF, "I"), + (0xFF00, "X"), + (0xFF01, "3", "!"), + (0xFF02, "3", '"'), + (0xFF03, "3", "#"), + (0xFF04, "3", "$"), + (0xFF05, "3", "%"), + (0xFF06, "3", "&"), + (0xFF07, "3", "'"), + (0xFF08, "3", "("), + (0xFF09, "3", ")"), + (0xFF0A, "3", "*"), + (0xFF0B, "3", "+"), + (0xFF0C, "3", ","), + (0xFF0D, "M", "-"), + (0xFF0E, "M", "."), + (0xFF0F, "3", "/"), + (0xFF10, "M", "0"), + (0xFF11, "M", "1"), + (0xFF12, "M", "2"), + (0xFF13, "M", "3"), + (0xFF14, "M", "4"), + (0xFF15, "M", "5"), + (0xFF16, "M", "6"), + (0xFF17, "M", "7"), + (0xFF18, "M", "8"), + (0xFF19, "M", "9"), + (0xFF1A, "3", ":"), + (0xFF1B, "3", ";"), + (0xFF1C, "3", "<"), + (0xFF1D, "3", "="), + (0xFF1E, "3", ">"), + ] + + +def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFF1F, "3", "?"), + (0xFF20, "3", "@"), + (0xFF21, "M", "a"), + (0xFF22, "M", "b"), + (0xFF23, "M", "c"), + (0xFF24, "M", "d"), + (0xFF25, "M", "e"), + (0xFF26, "M", "f"), + (0xFF27, "M", "g"), + (0xFF28, "M", "h"), + (0xFF29, "M", "i"), + (0xFF2A, "M", "j"), + (0xFF2B, "M", "k"), + (0xFF2C, "M", "l"), + (0xFF2D, "M", "m"), + (0xFF2E, "M", "n"), + (0xFF2F, "M", "o"), + (0xFF30, "M", "p"), + (0xFF31, "M", "q"), + (0xFF32, "M", "r"), + (0xFF33, "M", "s"), + (0xFF34, "M", "t"), + (0xFF35, "M", "u"), + (0xFF36, "M", "v"), + (0xFF37, "M", "w"), + (0xFF38, "M", "x"), + (0xFF39, "M", "y"), + (0xFF3A, "M", "z"), + (0xFF3B, "3", "["), + (0xFF3C, "3", "\\"), + (0xFF3D, "3", "]"), + (0xFF3E, "3", "^"), + (0xFF3F, "3", "_"), + (0xFF40, "3", "`"), + (0xFF41, "M", "a"), + (0xFF42, "M", "b"), + (0xFF43, "M", "c"), + (0xFF44, "M", "d"), + (0xFF45, "M", "e"), + (0xFF46, "M", "f"), + (0xFF47, "M", "g"), + (0xFF48, "M", "h"), + (0xFF49, "M", "i"), + (0xFF4A, "M", "j"), + (0xFF4B, "M", "k"), + (0xFF4C, "M", "l"), + (0xFF4D, "M", "m"), + (0xFF4E, "M", "n"), + (0xFF4F, "M", "o"), + (0xFF50, "M", "p"), + (0xFF51, "M", "q"), + (0xFF52, "M", "r"), + (0xFF53, "M", "s"), + (0xFF54, "M", "t"), + (0xFF55, "M", "u"), + (0xFF56, "M", "v"), + (0xFF57, "M", "w"), + (0xFF58, "M", "x"), + (0xFF59, "M", "y"), + (0xFF5A, "M", "z"), + (0xFF5B, "3", "{"), + (0xFF5C, "3", "|"), + (0xFF5D, "3", "}"), + (0xFF5E, "3", "~"), + (0xFF5F, "M", "⦅"), + (0xFF60, "M", "⦆"), + (0xFF61, "M", "."), + (0xFF62, "M", "「"), + (0xFF63, "M", "」"), + (0xFF64, "M", "、"), + (0xFF65, "M", "・"), + (0xFF66, "M", "ヲ"), + (0xFF67, "M", "ァ"), + (0xFF68, "M", "ィ"), + (0xFF69, "M", "ゥ"), + (0xFF6A, "M", "ェ"), + (0xFF6B, "M", "ォ"), + (0xFF6C, "M", "ャ"), + (0xFF6D, "M", "ュ"), + (0xFF6E, "M", "ョ"), + (0xFF6F, "M", "ッ"), + (0xFF70, "M", "ー"), + (0xFF71, "M", "ア"), + (0xFF72, "M", "イ"), + (0xFF73, "M", "ウ"), + (0xFF74, "M", "エ"), + (0xFF75, "M", "オ"), + (0xFF76, "M", "カ"), + (0xFF77, "M", "キ"), + (0xFF78, "M", "ク"), + (0xFF79, "M", "ケ"), + (0xFF7A, "M", "コ"), + (0xFF7B, "M", "サ"), + (0xFF7C, "M", "シ"), + (0xFF7D, "M", "ス"), + (0xFF7E, "M", "セ"), + (0xFF7F, "M", "ソ"), + (0xFF80, "M", "タ"), + (0xFF81, "M", "チ"), + (0xFF82, "M", "ツ"), + ] + + +def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFF83, "M", "テ"), + (0xFF84, "M", "ト"), + (0xFF85, "M", "ナ"), + (0xFF86, "M", "ニ"), + (0xFF87, "M", "ヌ"), + (0xFF88, "M", "ネ"), + (0xFF89, "M", "ノ"), + (0xFF8A, "M", "ハ"), + (0xFF8B, "M", "ヒ"), + (0xFF8C, "M", "フ"), + (0xFF8D, "M", "ヘ"), + (0xFF8E, "M", "ホ"), + (0xFF8F, "M", "マ"), + (0xFF90, "M", "ミ"), + (0xFF91, "M", "ム"), + (0xFF92, "M", "メ"), + (0xFF93, "M", "モ"), + (0xFF94, "M", "ヤ"), + (0xFF95, "M", "ユ"), + (0xFF96, "M", "ヨ"), + (0xFF97, "M", "ラ"), + (0xFF98, "M", "リ"), + (0xFF99, "M", "ル"), + (0xFF9A, "M", "レ"), + (0xFF9B, "M", "ロ"), + (0xFF9C, "M", "ワ"), + (0xFF9D, "M", "ン"), + (0xFF9E, "M", "゙"), + (0xFF9F, "M", "゚"), + (0xFFA0, "X"), + (0xFFA1, "M", "ᄀ"), + (0xFFA2, "M", "ᄁ"), + (0xFFA3, "M", "ᆪ"), + (0xFFA4, "M", "ᄂ"), + (0xFFA5, "M", "ᆬ"), + (0xFFA6, "M", "ᆭ"), + (0xFFA7, "M", "ᄃ"), + (0xFFA8, "M", "ᄄ"), + (0xFFA9, "M", "ᄅ"), + (0xFFAA, "M", "ᆰ"), + (0xFFAB, "M", "ᆱ"), + (0xFFAC, "M", "ᆲ"), + (0xFFAD, "M", "ᆳ"), + (0xFFAE, "M", "ᆴ"), + (0xFFAF, "M", "ᆵ"), + (0xFFB0, "M", "ᄚ"), + (0xFFB1, "M", "ᄆ"), + (0xFFB2, "M", "ᄇ"), + (0xFFB3, "M", "ᄈ"), + (0xFFB4, "M", "ᄡ"), + (0xFFB5, "M", "ᄉ"), + (0xFFB6, "M", "ᄊ"), + (0xFFB7, "M", "ᄋ"), + (0xFFB8, "M", "ᄌ"), + (0xFFB9, "M", "ᄍ"), + (0xFFBA, "M", "ᄎ"), + (0xFFBB, "M", "ᄏ"), + (0xFFBC, "M", "ᄐ"), + (0xFFBD, "M", "ᄑ"), + (0xFFBE, "M", "ᄒ"), + (0xFFBF, "X"), + (0xFFC2, "M", "ᅡ"), + (0xFFC3, "M", "ᅢ"), + (0xFFC4, "M", "ᅣ"), + (0xFFC5, "M", "ᅤ"), + (0xFFC6, "M", "ᅥ"), + (0xFFC7, "M", "ᅦ"), + (0xFFC8, "X"), + (0xFFCA, "M", "ᅧ"), + (0xFFCB, "M", "ᅨ"), + (0xFFCC, "M", "ᅩ"), + (0xFFCD, "M", "ᅪ"), + (0xFFCE, "M", "ᅫ"), + (0xFFCF, "M", "ᅬ"), + (0xFFD0, "X"), + (0xFFD2, "M", "ᅭ"), + (0xFFD3, "M", "ᅮ"), + (0xFFD4, "M", "ᅯ"), + (0xFFD5, "M", "ᅰ"), + (0xFFD6, "M", "ᅱ"), + (0xFFD7, "M", "ᅲ"), + (0xFFD8, "X"), + (0xFFDA, "M", "ᅳ"), + (0xFFDB, "M", "ᅴ"), + (0xFFDC, "M", "ᅵ"), + (0xFFDD, "X"), + (0xFFE0, "M", "¢"), + (0xFFE1, "M", "£"), + (0xFFE2, "M", "¬"), + (0xFFE3, "3", " ̄"), + (0xFFE4, "M", "¦"), + (0xFFE5, "M", "¥"), + (0xFFE6, "M", "₩"), + (0xFFE7, "X"), + (0xFFE8, "M", "│"), + (0xFFE9, "M", "←"), + (0xFFEA, "M", "↑"), + (0xFFEB, "M", "→"), + (0xFFEC, "M", "↓"), + (0xFFED, "M", "■"), + ] + + +def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFFEE, "M", "○"), + (0xFFEF, "X"), + (0x10000, "V"), + (0x1000C, "X"), + (0x1000D, "V"), + (0x10027, "X"), + (0x10028, "V"), + (0x1003B, "X"), + (0x1003C, "V"), + (0x1003E, "X"), + (0x1003F, "V"), + (0x1004E, "X"), + (0x10050, "V"), + (0x1005E, "X"), + (0x10080, "V"), + (0x100FB, "X"), + (0x10100, "V"), + (0x10103, "X"), + (0x10107, "V"), + (0x10134, "X"), + (0x10137, "V"), + (0x1018F, "X"), + (0x10190, "V"), + (0x1019D, "X"), + (0x101A0, "V"), + (0x101A1, "X"), + (0x101D0, "V"), + (0x101FE, "X"), + (0x10280, "V"), + (0x1029D, "X"), + (0x102A0, "V"), + (0x102D1, "X"), + (0x102E0, "V"), + (0x102FC, "X"), + (0x10300, "V"), + (0x10324, "X"), + (0x1032D, "V"), + (0x1034B, "X"), + (0x10350, "V"), + (0x1037B, "X"), + (0x10380, "V"), + (0x1039E, "X"), + (0x1039F, "V"), + (0x103C4, "X"), + (0x103C8, "V"), + (0x103D6, "X"), + (0x10400, "M", "𐐨"), + (0x10401, "M", "𐐩"), + (0x10402, "M", "𐐪"), + (0x10403, "M", "𐐫"), + (0x10404, "M", "𐐬"), + (0x10405, "M", "𐐭"), + (0x10406, "M", "𐐮"), + (0x10407, "M", "𐐯"), + (0x10408, "M", "𐐰"), + (0x10409, "M", "𐐱"), + (0x1040A, "M", "𐐲"), + (0x1040B, "M", "𐐳"), + (0x1040C, "M", "𐐴"), + (0x1040D, "M", "𐐵"), + (0x1040E, "M", "𐐶"), + (0x1040F, "M", "𐐷"), + (0x10410, "M", "𐐸"), + (0x10411, "M", "𐐹"), + (0x10412, "M", "𐐺"), + (0x10413, "M", "𐐻"), + (0x10414, "M", "𐐼"), + (0x10415, "M", "𐐽"), + (0x10416, "M", "𐐾"), + (0x10417, "M", "𐐿"), + (0x10418, "M", "𐑀"), + (0x10419, "M", "𐑁"), + (0x1041A, "M", "𐑂"), + (0x1041B, "M", "𐑃"), + (0x1041C, "M", "𐑄"), + (0x1041D, "M", "𐑅"), + (0x1041E, "M", "𐑆"), + (0x1041F, "M", "𐑇"), + (0x10420, "M", "𐑈"), + (0x10421, "M", "𐑉"), + (0x10422, "M", "𐑊"), + (0x10423, "M", "𐑋"), + (0x10424, "M", "𐑌"), + (0x10425, "M", "𐑍"), + (0x10426, "M", "𐑎"), + (0x10427, "M", "𐑏"), + (0x10428, "V"), + (0x1049E, "X"), + (0x104A0, "V"), + (0x104AA, "X"), + (0x104B0, "M", "𐓘"), + (0x104B1, "M", "𐓙"), + (0x104B2, "M", "𐓚"), + (0x104B3, "M", "𐓛"), + (0x104B4, "M", "𐓜"), + (0x104B5, "M", "𐓝"), + (0x104B6, "M", "𐓞"), + (0x104B7, "M", "𐓟"), + (0x104B8, "M", "𐓠"), + (0x104B9, "M", "𐓡"), + ] + + +def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x104BA, "M", "𐓢"), + (0x104BB, "M", "𐓣"), + (0x104BC, "M", "𐓤"), + (0x104BD, "M", "𐓥"), + (0x104BE, "M", "𐓦"), + (0x104BF, "M", "𐓧"), + (0x104C0, "M", "𐓨"), + (0x104C1, "M", "𐓩"), + (0x104C2, "M", "𐓪"), + (0x104C3, "M", "𐓫"), + (0x104C4, "M", "𐓬"), + (0x104C5, "M", "𐓭"), + (0x104C6, "M", "𐓮"), + (0x104C7, "M", "𐓯"), + (0x104C8, "M", "𐓰"), + (0x104C9, "M", "𐓱"), + (0x104CA, "M", "𐓲"), + (0x104CB, "M", "𐓳"), + (0x104CC, "M", "𐓴"), + (0x104CD, "M", "𐓵"), + (0x104CE, "M", "𐓶"), + (0x104CF, "M", "𐓷"), + (0x104D0, "M", "𐓸"), + (0x104D1, "M", "𐓹"), + (0x104D2, "M", "𐓺"), + (0x104D3, "M", "𐓻"), + (0x104D4, "X"), + (0x104D8, "V"), + (0x104FC, "X"), + (0x10500, "V"), + (0x10528, "X"), + (0x10530, "V"), + (0x10564, "X"), + (0x1056F, "V"), + (0x10570, "M", "𐖗"), + (0x10571, "M", "𐖘"), + (0x10572, "M", "𐖙"), + (0x10573, "M", "𐖚"), + (0x10574, "M", "𐖛"), + (0x10575, "M", "𐖜"), + (0x10576, "M", "𐖝"), + (0x10577, "M", "𐖞"), + (0x10578, "M", "𐖟"), + (0x10579, "M", "𐖠"), + (0x1057A, "M", "𐖡"), + (0x1057B, "X"), + (0x1057C, "M", "𐖣"), + (0x1057D, "M", "𐖤"), + (0x1057E, "M", "𐖥"), + (0x1057F, "M", "𐖦"), + (0x10580, "M", "𐖧"), + (0x10581, "M", "𐖨"), + (0x10582, "M", "𐖩"), + (0x10583, "M", "𐖪"), + (0x10584, "M", "𐖫"), + (0x10585, "M", "𐖬"), + (0x10586, "M", "𐖭"), + (0x10587, "M", "𐖮"), + (0x10588, "M", "𐖯"), + (0x10589, "M", "𐖰"), + (0x1058A, "M", "𐖱"), + (0x1058B, "X"), + (0x1058C, "M", "𐖳"), + (0x1058D, "M", "𐖴"), + (0x1058E, "M", "𐖵"), + (0x1058F, "M", "𐖶"), + (0x10590, "M", "𐖷"), + (0x10591, "M", "𐖸"), + (0x10592, "M", "𐖹"), + (0x10593, "X"), + (0x10594, "M", "𐖻"), + (0x10595, "M", "𐖼"), + (0x10596, "X"), + (0x10597, "V"), + (0x105A2, "X"), + (0x105A3, "V"), + (0x105B2, "X"), + (0x105B3, "V"), + (0x105BA, "X"), + (0x105BB, "V"), + (0x105BD, "X"), + (0x10600, "V"), + (0x10737, "X"), + (0x10740, "V"), + (0x10756, "X"), + (0x10760, "V"), + (0x10768, "X"), + (0x10780, "V"), + (0x10781, "M", "ː"), + (0x10782, "M", "ˑ"), + (0x10783, "M", "æ"), + (0x10784, "M", "ʙ"), + (0x10785, "M", "ɓ"), + (0x10786, "X"), + (0x10787, "M", "ʣ"), + (0x10788, "M", "ꭦ"), + (0x10789, "M", "ʥ"), + (0x1078A, "M", "ʤ"), + (0x1078B, "M", "ɖ"), + (0x1078C, "M", "ɗ"), + ] + + +def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1078D, "M", "ᶑ"), + (0x1078E, "M", "ɘ"), + (0x1078F, "M", "ɞ"), + (0x10790, "M", "ʩ"), + (0x10791, "M", "ɤ"), + (0x10792, "M", "ɢ"), + (0x10793, "M", "ɠ"), + (0x10794, "M", "ʛ"), + (0x10795, "M", "ħ"), + (0x10796, "M", "ʜ"), + (0x10797, "M", "ɧ"), + (0x10798, "M", "ʄ"), + (0x10799, "M", "ʪ"), + (0x1079A, "M", "ʫ"), + (0x1079B, "M", "ɬ"), + (0x1079C, "M", "𝼄"), + (0x1079D, "M", "ꞎ"), + (0x1079E, "M", "ɮ"), + (0x1079F, "M", "𝼅"), + (0x107A0, "M", "ʎ"), + (0x107A1, "M", "𝼆"), + (0x107A2, "M", "ø"), + (0x107A3, "M", "ɶ"), + (0x107A4, "M", "ɷ"), + (0x107A5, "M", "q"), + (0x107A6, "M", "ɺ"), + (0x107A7, "M", "𝼈"), + (0x107A8, "M", "ɽ"), + (0x107A9, "M", "ɾ"), + (0x107AA, "M", "ʀ"), + (0x107AB, "M", "ʨ"), + (0x107AC, "M", "ʦ"), + (0x107AD, "M", "ꭧ"), + (0x107AE, "M", "ʧ"), + (0x107AF, "M", "ʈ"), + (0x107B0, "M", "ⱱ"), + (0x107B1, "X"), + (0x107B2, "M", "ʏ"), + (0x107B3, "M", "ʡ"), + (0x107B4, "M", "ʢ"), + (0x107B5, "M", "ʘ"), + (0x107B6, "M", "ǀ"), + (0x107B7, "M", "ǁ"), + (0x107B8, "M", "ǂ"), + (0x107B9, "M", "𝼊"), + (0x107BA, "M", "𝼞"), + (0x107BB, "X"), + (0x10800, "V"), + (0x10806, "X"), + (0x10808, "V"), + (0x10809, "X"), + (0x1080A, "V"), + (0x10836, "X"), + (0x10837, "V"), + (0x10839, "X"), + (0x1083C, "V"), + (0x1083D, "X"), + (0x1083F, "V"), + (0x10856, "X"), + (0x10857, "V"), + (0x1089F, "X"), + (0x108A7, "V"), + (0x108B0, "X"), + (0x108E0, "V"), + (0x108F3, "X"), + (0x108F4, "V"), + (0x108F6, "X"), + (0x108FB, "V"), + (0x1091C, "X"), + (0x1091F, "V"), + (0x1093A, "X"), + (0x1093F, "V"), + (0x10940, "X"), + (0x10980, "V"), + (0x109B8, "X"), + (0x109BC, "V"), + (0x109D0, "X"), + (0x109D2, "V"), + (0x10A04, "X"), + (0x10A05, "V"), + (0x10A07, "X"), + (0x10A0C, "V"), + (0x10A14, "X"), + (0x10A15, "V"), + (0x10A18, "X"), + (0x10A19, "V"), + (0x10A36, "X"), + (0x10A38, "V"), + (0x10A3B, "X"), + (0x10A3F, "V"), + (0x10A49, "X"), + (0x10A50, "V"), + (0x10A59, "X"), + (0x10A60, "V"), + (0x10AA0, "X"), + (0x10AC0, "V"), + (0x10AE7, "X"), + (0x10AEB, "V"), + (0x10AF7, "X"), + (0x10B00, "V"), + ] + + +def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x10B36, "X"), + (0x10B39, "V"), + (0x10B56, "X"), + (0x10B58, "V"), + (0x10B73, "X"), + (0x10B78, "V"), + (0x10B92, "X"), + (0x10B99, "V"), + (0x10B9D, "X"), + (0x10BA9, "V"), + (0x10BB0, "X"), + (0x10C00, "V"), + (0x10C49, "X"), + (0x10C80, "M", "𐳀"), + (0x10C81, "M", "𐳁"), + (0x10C82, "M", "𐳂"), + (0x10C83, "M", "𐳃"), + (0x10C84, "M", "𐳄"), + (0x10C85, "M", "𐳅"), + (0x10C86, "M", "𐳆"), + (0x10C87, "M", "𐳇"), + (0x10C88, "M", "𐳈"), + (0x10C89, "M", "𐳉"), + (0x10C8A, "M", "𐳊"), + (0x10C8B, "M", "𐳋"), + (0x10C8C, "M", "𐳌"), + (0x10C8D, "M", "𐳍"), + (0x10C8E, "M", "𐳎"), + (0x10C8F, "M", "𐳏"), + (0x10C90, "M", "𐳐"), + (0x10C91, "M", "𐳑"), + (0x10C92, "M", "𐳒"), + (0x10C93, "M", "𐳓"), + (0x10C94, "M", "𐳔"), + (0x10C95, "M", "𐳕"), + (0x10C96, "M", "𐳖"), + (0x10C97, "M", "𐳗"), + (0x10C98, "M", "𐳘"), + (0x10C99, "M", "𐳙"), + (0x10C9A, "M", "𐳚"), + (0x10C9B, "M", "𐳛"), + (0x10C9C, "M", "𐳜"), + (0x10C9D, "M", "𐳝"), + (0x10C9E, "M", "𐳞"), + (0x10C9F, "M", "𐳟"), + (0x10CA0, "M", "𐳠"), + (0x10CA1, "M", "𐳡"), + (0x10CA2, "M", "𐳢"), + (0x10CA3, "M", "𐳣"), + (0x10CA4, "M", "𐳤"), + (0x10CA5, "M", "𐳥"), + (0x10CA6, "M", "𐳦"), + (0x10CA7, "M", "𐳧"), + (0x10CA8, "M", "𐳨"), + (0x10CA9, "M", "𐳩"), + (0x10CAA, "M", "𐳪"), + (0x10CAB, "M", "𐳫"), + (0x10CAC, "M", "𐳬"), + (0x10CAD, "M", "𐳭"), + (0x10CAE, "M", "𐳮"), + (0x10CAF, "M", "𐳯"), + (0x10CB0, "M", "𐳰"), + (0x10CB1, "M", "𐳱"), + (0x10CB2, "M", "𐳲"), + (0x10CB3, "X"), + (0x10CC0, "V"), + (0x10CF3, "X"), + (0x10CFA, "V"), + (0x10D28, "X"), + (0x10D30, "V"), + (0x10D3A, "X"), + (0x10E60, "V"), + (0x10E7F, "X"), + (0x10E80, "V"), + (0x10EAA, "X"), + (0x10EAB, "V"), + (0x10EAE, "X"), + (0x10EB0, "V"), + (0x10EB2, "X"), + (0x10EFD, "V"), + (0x10F28, "X"), + (0x10F30, "V"), + (0x10F5A, "X"), + (0x10F70, "V"), + (0x10F8A, "X"), + (0x10FB0, "V"), + (0x10FCC, "X"), + (0x10FE0, "V"), + (0x10FF7, "X"), + (0x11000, "V"), + (0x1104E, "X"), + (0x11052, "V"), + (0x11076, "X"), + (0x1107F, "V"), + (0x110BD, "X"), + (0x110BE, "V"), + (0x110C3, "X"), + (0x110D0, "V"), + (0x110E9, "X"), + (0x110F0, "V"), + ] + + +def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x110FA, "X"), + (0x11100, "V"), + (0x11135, "X"), + (0x11136, "V"), + (0x11148, "X"), + (0x11150, "V"), + (0x11177, "X"), + (0x11180, "V"), + (0x111E0, "X"), + (0x111E1, "V"), + (0x111F5, "X"), + (0x11200, "V"), + (0x11212, "X"), + (0x11213, "V"), + (0x11242, "X"), + (0x11280, "V"), + (0x11287, "X"), + (0x11288, "V"), + (0x11289, "X"), + (0x1128A, "V"), + (0x1128E, "X"), + (0x1128F, "V"), + (0x1129E, "X"), + (0x1129F, "V"), + (0x112AA, "X"), + (0x112B0, "V"), + (0x112EB, "X"), + (0x112F0, "V"), + (0x112FA, "X"), + (0x11300, "V"), + (0x11304, "X"), + (0x11305, "V"), + (0x1130D, "X"), + (0x1130F, "V"), + (0x11311, "X"), + (0x11313, "V"), + (0x11329, "X"), + (0x1132A, "V"), + (0x11331, "X"), + (0x11332, "V"), + (0x11334, "X"), + (0x11335, "V"), + (0x1133A, "X"), + (0x1133B, "V"), + (0x11345, "X"), + (0x11347, "V"), + (0x11349, "X"), + (0x1134B, "V"), + (0x1134E, "X"), + (0x11350, "V"), + (0x11351, "X"), + (0x11357, "V"), + (0x11358, "X"), + (0x1135D, "V"), + (0x11364, "X"), + (0x11366, "V"), + (0x1136D, "X"), + (0x11370, "V"), + (0x11375, "X"), + (0x11400, "V"), + (0x1145C, "X"), + (0x1145D, "V"), + (0x11462, "X"), + (0x11480, "V"), + (0x114C8, "X"), + (0x114D0, "V"), + (0x114DA, "X"), + (0x11580, "V"), + (0x115B6, "X"), + (0x115B8, "V"), + (0x115DE, "X"), + (0x11600, "V"), + (0x11645, "X"), + (0x11650, "V"), + (0x1165A, "X"), + (0x11660, "V"), + (0x1166D, "X"), + (0x11680, "V"), + (0x116BA, "X"), + (0x116C0, "V"), + (0x116CA, "X"), + (0x11700, "V"), + (0x1171B, "X"), + (0x1171D, "V"), + (0x1172C, "X"), + (0x11730, "V"), + (0x11747, "X"), + (0x11800, "V"), + (0x1183C, "X"), + (0x118A0, "M", "𑣀"), + (0x118A1, "M", "𑣁"), + (0x118A2, "M", "𑣂"), + (0x118A3, "M", "𑣃"), + (0x118A4, "M", "𑣄"), + (0x118A5, "M", "𑣅"), + (0x118A6, "M", "𑣆"), + (0x118A7, "M", "𑣇"), + (0x118A8, "M", "𑣈"), + (0x118A9, "M", "𑣉"), + (0x118AA, "M", "𑣊"), + ] + + +def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x118AB, "M", "𑣋"), + (0x118AC, "M", "𑣌"), + (0x118AD, "M", "𑣍"), + (0x118AE, "M", "𑣎"), + (0x118AF, "M", "𑣏"), + (0x118B0, "M", "𑣐"), + (0x118B1, "M", "𑣑"), + (0x118B2, "M", "𑣒"), + (0x118B3, "M", "𑣓"), + (0x118B4, "M", "𑣔"), + (0x118B5, "M", "𑣕"), + (0x118B6, "M", "𑣖"), + (0x118B7, "M", "𑣗"), + (0x118B8, "M", "𑣘"), + (0x118B9, "M", "𑣙"), + (0x118BA, "M", "𑣚"), + (0x118BB, "M", "𑣛"), + (0x118BC, "M", "𑣜"), + (0x118BD, "M", "𑣝"), + (0x118BE, "M", "𑣞"), + (0x118BF, "M", "𑣟"), + (0x118C0, "V"), + (0x118F3, "X"), + (0x118FF, "V"), + (0x11907, "X"), + (0x11909, "V"), + (0x1190A, "X"), + (0x1190C, "V"), + (0x11914, "X"), + (0x11915, "V"), + (0x11917, "X"), + (0x11918, "V"), + (0x11936, "X"), + (0x11937, "V"), + (0x11939, "X"), + (0x1193B, "V"), + (0x11947, "X"), + (0x11950, "V"), + (0x1195A, "X"), + (0x119A0, "V"), + (0x119A8, "X"), + (0x119AA, "V"), + (0x119D8, "X"), + (0x119DA, "V"), + (0x119E5, "X"), + (0x11A00, "V"), + (0x11A48, "X"), + (0x11A50, "V"), + (0x11AA3, "X"), + (0x11AB0, "V"), + (0x11AF9, "X"), + (0x11B00, "V"), + (0x11B0A, "X"), + (0x11C00, "V"), + (0x11C09, "X"), + (0x11C0A, "V"), + (0x11C37, "X"), + (0x11C38, "V"), + (0x11C46, "X"), + (0x11C50, "V"), + (0x11C6D, "X"), + (0x11C70, "V"), + (0x11C90, "X"), + (0x11C92, "V"), + (0x11CA8, "X"), + (0x11CA9, "V"), + (0x11CB7, "X"), + (0x11D00, "V"), + (0x11D07, "X"), + (0x11D08, "V"), + (0x11D0A, "X"), + (0x11D0B, "V"), + (0x11D37, "X"), + (0x11D3A, "V"), + (0x11D3B, "X"), + (0x11D3C, "V"), + (0x11D3E, "X"), + (0x11D3F, "V"), + (0x11D48, "X"), + (0x11D50, "V"), + (0x11D5A, "X"), + (0x11D60, "V"), + (0x11D66, "X"), + (0x11D67, "V"), + (0x11D69, "X"), + (0x11D6A, "V"), + (0x11D8F, "X"), + (0x11D90, "V"), + (0x11D92, "X"), + (0x11D93, "V"), + (0x11D99, "X"), + (0x11DA0, "V"), + (0x11DAA, "X"), + (0x11EE0, "V"), + (0x11EF9, "X"), + (0x11F00, "V"), + (0x11F11, "X"), + (0x11F12, "V"), + (0x11F3B, "X"), + (0x11F3E, "V"), + ] + + +def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x11F5A, "X"), + (0x11FB0, "V"), + (0x11FB1, "X"), + (0x11FC0, "V"), + (0x11FF2, "X"), + (0x11FFF, "V"), + (0x1239A, "X"), + (0x12400, "V"), + (0x1246F, "X"), + (0x12470, "V"), + (0x12475, "X"), + (0x12480, "V"), + (0x12544, "X"), + (0x12F90, "V"), + (0x12FF3, "X"), + (0x13000, "V"), + (0x13430, "X"), + (0x13440, "V"), + (0x13456, "X"), + (0x14400, "V"), + (0x14647, "X"), + (0x16800, "V"), + (0x16A39, "X"), + (0x16A40, "V"), + (0x16A5F, "X"), + (0x16A60, "V"), + (0x16A6A, "X"), + (0x16A6E, "V"), + (0x16ABF, "X"), + (0x16AC0, "V"), + (0x16ACA, "X"), + (0x16AD0, "V"), + (0x16AEE, "X"), + (0x16AF0, "V"), + (0x16AF6, "X"), + (0x16B00, "V"), + (0x16B46, "X"), + (0x16B50, "V"), + (0x16B5A, "X"), + (0x16B5B, "V"), + (0x16B62, "X"), + (0x16B63, "V"), + (0x16B78, "X"), + (0x16B7D, "V"), + (0x16B90, "X"), + (0x16E40, "M", "𖹠"), + (0x16E41, "M", "𖹡"), + (0x16E42, "M", "𖹢"), + (0x16E43, "M", "𖹣"), + (0x16E44, "M", "𖹤"), + (0x16E45, "M", "𖹥"), + (0x16E46, "M", "𖹦"), + (0x16E47, "M", "𖹧"), + (0x16E48, "M", "𖹨"), + (0x16E49, "M", "𖹩"), + (0x16E4A, "M", "𖹪"), + (0x16E4B, "M", "𖹫"), + (0x16E4C, "M", "𖹬"), + (0x16E4D, "M", "𖹭"), + (0x16E4E, "M", "𖹮"), + (0x16E4F, "M", "𖹯"), + (0x16E50, "M", "𖹰"), + (0x16E51, "M", "𖹱"), + (0x16E52, "M", "𖹲"), + (0x16E53, "M", "𖹳"), + (0x16E54, "M", "𖹴"), + (0x16E55, "M", "𖹵"), + (0x16E56, "M", "𖹶"), + (0x16E57, "M", "𖹷"), + (0x16E58, "M", "𖹸"), + (0x16E59, "M", "𖹹"), + (0x16E5A, "M", "𖹺"), + (0x16E5B, "M", "𖹻"), + (0x16E5C, "M", "𖹼"), + (0x16E5D, "M", "𖹽"), + (0x16E5E, "M", "𖹾"), + (0x16E5F, "M", "𖹿"), + (0x16E60, "V"), + (0x16E9B, "X"), + (0x16F00, "V"), + (0x16F4B, "X"), + (0x16F4F, "V"), + (0x16F88, "X"), + (0x16F8F, "V"), + (0x16FA0, "X"), + (0x16FE0, "V"), + (0x16FE5, "X"), + (0x16FF0, "V"), + (0x16FF2, "X"), + (0x17000, "V"), + (0x187F8, "X"), + (0x18800, "V"), + (0x18CD6, "X"), + (0x18D00, "V"), + (0x18D09, "X"), + (0x1AFF0, "V"), + (0x1AFF4, "X"), + (0x1AFF5, "V"), + (0x1AFFC, "X"), + (0x1AFFD, "V"), + ] + + +def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1AFFF, "X"), + (0x1B000, "V"), + (0x1B123, "X"), + (0x1B132, "V"), + (0x1B133, "X"), + (0x1B150, "V"), + (0x1B153, "X"), + (0x1B155, "V"), + (0x1B156, "X"), + (0x1B164, "V"), + (0x1B168, "X"), + (0x1B170, "V"), + (0x1B2FC, "X"), + (0x1BC00, "V"), + (0x1BC6B, "X"), + (0x1BC70, "V"), + (0x1BC7D, "X"), + (0x1BC80, "V"), + (0x1BC89, "X"), + (0x1BC90, "V"), + (0x1BC9A, "X"), + (0x1BC9C, "V"), + (0x1BCA0, "I"), + (0x1BCA4, "X"), + (0x1CF00, "V"), + (0x1CF2E, "X"), + (0x1CF30, "V"), + (0x1CF47, "X"), + (0x1CF50, "V"), + (0x1CFC4, "X"), + (0x1D000, "V"), + (0x1D0F6, "X"), + (0x1D100, "V"), + (0x1D127, "X"), + (0x1D129, "V"), + (0x1D15E, "M", "𝅗𝅥"), + (0x1D15F, "M", "𝅘𝅥"), + (0x1D160, "M", "𝅘𝅥𝅮"), + (0x1D161, "M", "𝅘𝅥𝅯"), + (0x1D162, "M", "𝅘𝅥𝅰"), + (0x1D163, "M", "𝅘𝅥𝅱"), + (0x1D164, "M", "𝅘𝅥𝅲"), + (0x1D165, "V"), + (0x1D173, "X"), + (0x1D17B, "V"), + (0x1D1BB, "M", "𝆹𝅥"), + (0x1D1BC, "M", "𝆺𝅥"), + (0x1D1BD, "M", "𝆹𝅥𝅮"), + (0x1D1BE, "M", "𝆺𝅥𝅮"), + (0x1D1BF, "M", "𝆹𝅥𝅯"), + (0x1D1C0, "M", "𝆺𝅥𝅯"), + (0x1D1C1, "V"), + (0x1D1EB, "X"), + (0x1D200, "V"), + (0x1D246, "X"), + (0x1D2C0, "V"), + (0x1D2D4, "X"), + (0x1D2E0, "V"), + (0x1D2F4, "X"), + (0x1D300, "V"), + (0x1D357, "X"), + (0x1D360, "V"), + (0x1D379, "X"), + (0x1D400, "M", "a"), + (0x1D401, "M", "b"), + (0x1D402, "M", "c"), + (0x1D403, "M", "d"), + (0x1D404, "M", "e"), + (0x1D405, "M", "f"), + (0x1D406, "M", "g"), + (0x1D407, "M", "h"), + (0x1D408, "M", "i"), + (0x1D409, "M", "j"), + (0x1D40A, "M", "k"), + (0x1D40B, "M", "l"), + (0x1D40C, "M", "m"), + (0x1D40D, "M", "n"), + (0x1D40E, "M", "o"), + (0x1D40F, "M", "p"), + (0x1D410, "M", "q"), + (0x1D411, "M", "r"), + (0x1D412, "M", "s"), + (0x1D413, "M", "t"), + (0x1D414, "M", "u"), + (0x1D415, "M", "v"), + (0x1D416, "M", "w"), + (0x1D417, "M", "x"), + (0x1D418, "M", "y"), + (0x1D419, "M", "z"), + (0x1D41A, "M", "a"), + (0x1D41B, "M", "b"), + (0x1D41C, "M", "c"), + (0x1D41D, "M", "d"), + (0x1D41E, "M", "e"), + (0x1D41F, "M", "f"), + (0x1D420, "M", "g"), + (0x1D421, "M", "h"), + (0x1D422, "M", "i"), + (0x1D423, "M", "j"), + (0x1D424, "M", "k"), + ] + + +def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D425, "M", "l"), + (0x1D426, "M", "m"), + (0x1D427, "M", "n"), + (0x1D428, "M", "o"), + (0x1D429, "M", "p"), + (0x1D42A, "M", "q"), + (0x1D42B, "M", "r"), + (0x1D42C, "M", "s"), + (0x1D42D, "M", "t"), + (0x1D42E, "M", "u"), + (0x1D42F, "M", "v"), + (0x1D430, "M", "w"), + (0x1D431, "M", "x"), + (0x1D432, "M", "y"), + (0x1D433, "M", "z"), + (0x1D434, "M", "a"), + (0x1D435, "M", "b"), + (0x1D436, "M", "c"), + (0x1D437, "M", "d"), + (0x1D438, "M", "e"), + (0x1D439, "M", "f"), + (0x1D43A, "M", "g"), + (0x1D43B, "M", "h"), + (0x1D43C, "M", "i"), + (0x1D43D, "M", "j"), + (0x1D43E, "M", "k"), + (0x1D43F, "M", "l"), + (0x1D440, "M", "m"), + (0x1D441, "M", "n"), + (0x1D442, "M", "o"), + (0x1D443, "M", "p"), + (0x1D444, "M", "q"), + (0x1D445, "M", "r"), + (0x1D446, "M", "s"), + (0x1D447, "M", "t"), + (0x1D448, "M", "u"), + (0x1D449, "M", "v"), + (0x1D44A, "M", "w"), + (0x1D44B, "M", "x"), + (0x1D44C, "M", "y"), + (0x1D44D, "M", "z"), + (0x1D44E, "M", "a"), + (0x1D44F, "M", "b"), + (0x1D450, "M", "c"), + (0x1D451, "M", "d"), + (0x1D452, "M", "e"), + (0x1D453, "M", "f"), + (0x1D454, "M", "g"), + (0x1D455, "X"), + (0x1D456, "M", "i"), + (0x1D457, "M", "j"), + (0x1D458, "M", "k"), + (0x1D459, "M", "l"), + (0x1D45A, "M", "m"), + (0x1D45B, "M", "n"), + (0x1D45C, "M", "o"), + (0x1D45D, "M", "p"), + (0x1D45E, "M", "q"), + (0x1D45F, "M", "r"), + (0x1D460, "M", "s"), + (0x1D461, "M", "t"), + (0x1D462, "M", "u"), + (0x1D463, "M", "v"), + (0x1D464, "M", "w"), + (0x1D465, "M", "x"), + (0x1D466, "M", "y"), + (0x1D467, "M", "z"), + (0x1D468, "M", "a"), + (0x1D469, "M", "b"), + (0x1D46A, "M", "c"), + (0x1D46B, "M", "d"), + (0x1D46C, "M", "e"), + (0x1D46D, "M", "f"), + (0x1D46E, "M", "g"), + (0x1D46F, "M", "h"), + (0x1D470, "M", "i"), + (0x1D471, "M", "j"), + (0x1D472, "M", "k"), + (0x1D473, "M", "l"), + (0x1D474, "M", "m"), + (0x1D475, "M", "n"), + (0x1D476, "M", "o"), + (0x1D477, "M", "p"), + (0x1D478, "M", "q"), + (0x1D479, "M", "r"), + (0x1D47A, "M", "s"), + (0x1D47B, "M", "t"), + (0x1D47C, "M", "u"), + (0x1D47D, "M", "v"), + (0x1D47E, "M", "w"), + (0x1D47F, "M", "x"), + (0x1D480, "M", "y"), + (0x1D481, "M", "z"), + (0x1D482, "M", "a"), + (0x1D483, "M", "b"), + (0x1D484, "M", "c"), + (0x1D485, "M", "d"), + (0x1D486, "M", "e"), + (0x1D487, "M", "f"), + (0x1D488, "M", "g"), + ] + + +def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D489, "M", "h"), + (0x1D48A, "M", "i"), + (0x1D48B, "M", "j"), + (0x1D48C, "M", "k"), + (0x1D48D, "M", "l"), + (0x1D48E, "M", "m"), + (0x1D48F, "M", "n"), + (0x1D490, "M", "o"), + (0x1D491, "M", "p"), + (0x1D492, "M", "q"), + (0x1D493, "M", "r"), + (0x1D494, "M", "s"), + (0x1D495, "M", "t"), + (0x1D496, "M", "u"), + (0x1D497, "M", "v"), + (0x1D498, "M", "w"), + (0x1D499, "M", "x"), + (0x1D49A, "M", "y"), + (0x1D49B, "M", "z"), + (0x1D49C, "M", "a"), + (0x1D49D, "X"), + (0x1D49E, "M", "c"), + (0x1D49F, "M", "d"), + (0x1D4A0, "X"), + (0x1D4A2, "M", "g"), + (0x1D4A3, "X"), + (0x1D4A5, "M", "j"), + (0x1D4A6, "M", "k"), + (0x1D4A7, "X"), + (0x1D4A9, "M", "n"), + (0x1D4AA, "M", "o"), + (0x1D4AB, "M", "p"), + (0x1D4AC, "M", "q"), + (0x1D4AD, "X"), + (0x1D4AE, "M", "s"), + (0x1D4AF, "M", "t"), + (0x1D4B0, "M", "u"), + (0x1D4B1, "M", "v"), + (0x1D4B2, "M", "w"), + (0x1D4B3, "M", "x"), + (0x1D4B4, "M", "y"), + (0x1D4B5, "M", "z"), + (0x1D4B6, "M", "a"), + (0x1D4B7, "M", "b"), + (0x1D4B8, "M", "c"), + (0x1D4B9, "M", "d"), + (0x1D4BA, "X"), + (0x1D4BB, "M", "f"), + (0x1D4BC, "X"), + (0x1D4BD, "M", "h"), + (0x1D4BE, "M", "i"), + (0x1D4BF, "M", "j"), + (0x1D4C0, "M", "k"), + (0x1D4C1, "M", "l"), + (0x1D4C2, "M", "m"), + (0x1D4C3, "M", "n"), + (0x1D4C4, "X"), + (0x1D4C5, "M", "p"), + (0x1D4C6, "M", "q"), + (0x1D4C7, "M", "r"), + (0x1D4C8, "M", "s"), + (0x1D4C9, "M", "t"), + (0x1D4CA, "M", "u"), + (0x1D4CB, "M", "v"), + (0x1D4CC, "M", "w"), + (0x1D4CD, "M", "x"), + (0x1D4CE, "M", "y"), + (0x1D4CF, "M", "z"), + (0x1D4D0, "M", "a"), + (0x1D4D1, "M", "b"), + (0x1D4D2, "M", "c"), + (0x1D4D3, "M", "d"), + (0x1D4D4, "M", "e"), + (0x1D4D5, "M", "f"), + (0x1D4D6, "M", "g"), + (0x1D4D7, "M", "h"), + (0x1D4D8, "M", "i"), + (0x1D4D9, "M", "j"), + (0x1D4DA, "M", "k"), + (0x1D4DB, "M", "l"), + (0x1D4DC, "M", "m"), + (0x1D4DD, "M", "n"), + (0x1D4DE, "M", "o"), + (0x1D4DF, "M", "p"), + (0x1D4E0, "M", "q"), + (0x1D4E1, "M", "r"), + (0x1D4E2, "M", "s"), + (0x1D4E3, "M", "t"), + (0x1D4E4, "M", "u"), + (0x1D4E5, "M", "v"), + (0x1D4E6, "M", "w"), + (0x1D4E7, "M", "x"), + (0x1D4E8, "M", "y"), + (0x1D4E9, "M", "z"), + (0x1D4EA, "M", "a"), + (0x1D4EB, "M", "b"), + (0x1D4EC, "M", "c"), + (0x1D4ED, "M", "d"), + (0x1D4EE, "M", "e"), + (0x1D4EF, "M", "f"), + ] + + +def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D4F0, "M", "g"), + (0x1D4F1, "M", "h"), + (0x1D4F2, "M", "i"), + (0x1D4F3, "M", "j"), + (0x1D4F4, "M", "k"), + (0x1D4F5, "M", "l"), + (0x1D4F6, "M", "m"), + (0x1D4F7, "M", "n"), + (0x1D4F8, "M", "o"), + (0x1D4F9, "M", "p"), + (0x1D4FA, "M", "q"), + (0x1D4FB, "M", "r"), + (0x1D4FC, "M", "s"), + (0x1D4FD, "M", "t"), + (0x1D4FE, "M", "u"), + (0x1D4FF, "M", "v"), + (0x1D500, "M", "w"), + (0x1D501, "M", "x"), + (0x1D502, "M", "y"), + (0x1D503, "M", "z"), + (0x1D504, "M", "a"), + (0x1D505, "M", "b"), + (0x1D506, "X"), + (0x1D507, "M", "d"), + (0x1D508, "M", "e"), + (0x1D509, "M", "f"), + (0x1D50A, "M", "g"), + (0x1D50B, "X"), + (0x1D50D, "M", "j"), + (0x1D50E, "M", "k"), + (0x1D50F, "M", "l"), + (0x1D510, "M", "m"), + (0x1D511, "M", "n"), + (0x1D512, "M", "o"), + (0x1D513, "M", "p"), + (0x1D514, "M", "q"), + (0x1D515, "X"), + (0x1D516, "M", "s"), + (0x1D517, "M", "t"), + (0x1D518, "M", "u"), + (0x1D519, "M", "v"), + (0x1D51A, "M", "w"), + (0x1D51B, "M", "x"), + (0x1D51C, "M", "y"), + (0x1D51D, "X"), + (0x1D51E, "M", "a"), + (0x1D51F, "M", "b"), + (0x1D520, "M", "c"), + (0x1D521, "M", "d"), + (0x1D522, "M", "e"), + (0x1D523, "M", "f"), + (0x1D524, "M", "g"), + (0x1D525, "M", "h"), + (0x1D526, "M", "i"), + (0x1D527, "M", "j"), + (0x1D528, "M", "k"), + (0x1D529, "M", "l"), + (0x1D52A, "M", "m"), + (0x1D52B, "M", "n"), + (0x1D52C, "M", "o"), + (0x1D52D, "M", "p"), + (0x1D52E, "M", "q"), + (0x1D52F, "M", "r"), + (0x1D530, "M", "s"), + (0x1D531, "M", "t"), + (0x1D532, "M", "u"), + (0x1D533, "M", "v"), + (0x1D534, "M", "w"), + (0x1D535, "M", "x"), + (0x1D536, "M", "y"), + (0x1D537, "M", "z"), + (0x1D538, "M", "a"), + (0x1D539, "M", "b"), + (0x1D53A, "X"), + (0x1D53B, "M", "d"), + (0x1D53C, "M", "e"), + (0x1D53D, "M", "f"), + (0x1D53E, "M", "g"), + (0x1D53F, "X"), + (0x1D540, "M", "i"), + (0x1D541, "M", "j"), + (0x1D542, "M", "k"), + (0x1D543, "M", "l"), + (0x1D544, "M", "m"), + (0x1D545, "X"), + (0x1D546, "M", "o"), + (0x1D547, "X"), + (0x1D54A, "M", "s"), + (0x1D54B, "M", "t"), + (0x1D54C, "M", "u"), + (0x1D54D, "M", "v"), + (0x1D54E, "M", "w"), + (0x1D54F, "M", "x"), + (0x1D550, "M", "y"), + (0x1D551, "X"), + (0x1D552, "M", "a"), + (0x1D553, "M", "b"), + (0x1D554, "M", "c"), + (0x1D555, "M", "d"), + (0x1D556, "M", "e"), + ] + + +def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D557, "M", "f"), + (0x1D558, "M", "g"), + (0x1D559, "M", "h"), + (0x1D55A, "M", "i"), + (0x1D55B, "M", "j"), + (0x1D55C, "M", "k"), + (0x1D55D, "M", "l"), + (0x1D55E, "M", "m"), + (0x1D55F, "M", "n"), + (0x1D560, "M", "o"), + (0x1D561, "M", "p"), + (0x1D562, "M", "q"), + (0x1D563, "M", "r"), + (0x1D564, "M", "s"), + (0x1D565, "M", "t"), + (0x1D566, "M", "u"), + (0x1D567, "M", "v"), + (0x1D568, "M", "w"), + (0x1D569, "M", "x"), + (0x1D56A, "M", "y"), + (0x1D56B, "M", "z"), + (0x1D56C, "M", "a"), + (0x1D56D, "M", "b"), + (0x1D56E, "M", "c"), + (0x1D56F, "M", "d"), + (0x1D570, "M", "e"), + (0x1D571, "M", "f"), + (0x1D572, "M", "g"), + (0x1D573, "M", "h"), + (0x1D574, "M", "i"), + (0x1D575, "M", "j"), + (0x1D576, "M", "k"), + (0x1D577, "M", "l"), + (0x1D578, "M", "m"), + (0x1D579, "M", "n"), + (0x1D57A, "M", "o"), + (0x1D57B, "M", "p"), + (0x1D57C, "M", "q"), + (0x1D57D, "M", "r"), + (0x1D57E, "M", "s"), + (0x1D57F, "M", "t"), + (0x1D580, "M", "u"), + (0x1D581, "M", "v"), + (0x1D582, "M", "w"), + (0x1D583, "M", "x"), + (0x1D584, "M", "y"), + (0x1D585, "M", "z"), + (0x1D586, "M", "a"), + (0x1D587, "M", "b"), + (0x1D588, "M", "c"), + (0x1D589, "M", "d"), + (0x1D58A, "M", "e"), + (0x1D58B, "M", "f"), + (0x1D58C, "M", "g"), + (0x1D58D, "M", "h"), + (0x1D58E, "M", "i"), + (0x1D58F, "M", "j"), + (0x1D590, "M", "k"), + (0x1D591, "M", "l"), + (0x1D592, "M", "m"), + (0x1D593, "M", "n"), + (0x1D594, "M", "o"), + (0x1D595, "M", "p"), + (0x1D596, "M", "q"), + (0x1D597, "M", "r"), + (0x1D598, "M", "s"), + (0x1D599, "M", "t"), + (0x1D59A, "M", "u"), + (0x1D59B, "M", "v"), + (0x1D59C, "M", "w"), + (0x1D59D, "M", "x"), + (0x1D59E, "M", "y"), + (0x1D59F, "M", "z"), + (0x1D5A0, "M", "a"), + (0x1D5A1, "M", "b"), + (0x1D5A2, "M", "c"), + (0x1D5A3, "M", "d"), + (0x1D5A4, "M", "e"), + (0x1D5A5, "M", "f"), + (0x1D5A6, "M", "g"), + (0x1D5A7, "M", "h"), + (0x1D5A8, "M", "i"), + (0x1D5A9, "M", "j"), + (0x1D5AA, "M", "k"), + (0x1D5AB, "M", "l"), + (0x1D5AC, "M", "m"), + (0x1D5AD, "M", "n"), + (0x1D5AE, "M", "o"), + (0x1D5AF, "M", "p"), + (0x1D5B0, "M", "q"), + (0x1D5B1, "M", "r"), + (0x1D5B2, "M", "s"), + (0x1D5B3, "M", "t"), + (0x1D5B4, "M", "u"), + (0x1D5B5, "M", "v"), + (0x1D5B6, "M", "w"), + (0x1D5B7, "M", "x"), + (0x1D5B8, "M", "y"), + (0x1D5B9, "M", "z"), + (0x1D5BA, "M", "a"), + ] + + +def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D5BB, "M", "b"), + (0x1D5BC, "M", "c"), + (0x1D5BD, "M", "d"), + (0x1D5BE, "M", "e"), + (0x1D5BF, "M", "f"), + (0x1D5C0, "M", "g"), + (0x1D5C1, "M", "h"), + (0x1D5C2, "M", "i"), + (0x1D5C3, "M", "j"), + (0x1D5C4, "M", "k"), + (0x1D5C5, "M", "l"), + (0x1D5C6, "M", "m"), + (0x1D5C7, "M", "n"), + (0x1D5C8, "M", "o"), + (0x1D5C9, "M", "p"), + (0x1D5CA, "M", "q"), + (0x1D5CB, "M", "r"), + (0x1D5CC, "M", "s"), + (0x1D5CD, "M", "t"), + (0x1D5CE, "M", "u"), + (0x1D5CF, "M", "v"), + (0x1D5D0, "M", "w"), + (0x1D5D1, "M", "x"), + (0x1D5D2, "M", "y"), + (0x1D5D3, "M", "z"), + (0x1D5D4, "M", "a"), + (0x1D5D5, "M", "b"), + (0x1D5D6, "M", "c"), + (0x1D5D7, "M", "d"), + (0x1D5D8, "M", "e"), + (0x1D5D9, "M", "f"), + (0x1D5DA, "M", "g"), + (0x1D5DB, "M", "h"), + (0x1D5DC, "M", "i"), + (0x1D5DD, "M", "j"), + (0x1D5DE, "M", "k"), + (0x1D5DF, "M", "l"), + (0x1D5E0, "M", "m"), + (0x1D5E1, "M", "n"), + (0x1D5E2, "M", "o"), + (0x1D5E3, "M", "p"), + (0x1D5E4, "M", "q"), + (0x1D5E5, "M", "r"), + (0x1D5E6, "M", "s"), + (0x1D5E7, "M", "t"), + (0x1D5E8, "M", "u"), + (0x1D5E9, "M", "v"), + (0x1D5EA, "M", "w"), + (0x1D5EB, "M", "x"), + (0x1D5EC, "M", "y"), + (0x1D5ED, "M", "z"), + (0x1D5EE, "M", "a"), + (0x1D5EF, "M", "b"), + (0x1D5F0, "M", "c"), + (0x1D5F1, "M", "d"), + (0x1D5F2, "M", "e"), + (0x1D5F3, "M", "f"), + (0x1D5F4, "M", "g"), + (0x1D5F5, "M", "h"), + (0x1D5F6, "M", "i"), + (0x1D5F7, "M", "j"), + (0x1D5F8, "M", "k"), + (0x1D5F9, "M", "l"), + (0x1D5FA, "M", "m"), + (0x1D5FB, "M", "n"), + (0x1D5FC, "M", "o"), + (0x1D5FD, "M", "p"), + (0x1D5FE, "M", "q"), + (0x1D5FF, "M", "r"), + (0x1D600, "M", "s"), + (0x1D601, "M", "t"), + (0x1D602, "M", "u"), + (0x1D603, "M", "v"), + (0x1D604, "M", "w"), + (0x1D605, "M", "x"), + (0x1D606, "M", "y"), + (0x1D607, "M", "z"), + (0x1D608, "M", "a"), + (0x1D609, "M", "b"), + (0x1D60A, "M", "c"), + (0x1D60B, "M", "d"), + (0x1D60C, "M", "e"), + (0x1D60D, "M", "f"), + (0x1D60E, "M", "g"), + (0x1D60F, "M", "h"), + (0x1D610, "M", "i"), + (0x1D611, "M", "j"), + (0x1D612, "M", "k"), + (0x1D613, "M", "l"), + (0x1D614, "M", "m"), + (0x1D615, "M", "n"), + (0x1D616, "M", "o"), + (0x1D617, "M", "p"), + (0x1D618, "M", "q"), + (0x1D619, "M", "r"), + (0x1D61A, "M", "s"), + (0x1D61B, "M", "t"), + (0x1D61C, "M", "u"), + (0x1D61D, "M", "v"), + (0x1D61E, "M", "w"), + ] + + +def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D61F, "M", "x"), + (0x1D620, "M", "y"), + (0x1D621, "M", "z"), + (0x1D622, "M", "a"), + (0x1D623, "M", "b"), + (0x1D624, "M", "c"), + (0x1D625, "M", "d"), + (0x1D626, "M", "e"), + (0x1D627, "M", "f"), + (0x1D628, "M", "g"), + (0x1D629, "M", "h"), + (0x1D62A, "M", "i"), + (0x1D62B, "M", "j"), + (0x1D62C, "M", "k"), + (0x1D62D, "M", "l"), + (0x1D62E, "M", "m"), + (0x1D62F, "M", "n"), + (0x1D630, "M", "o"), + (0x1D631, "M", "p"), + (0x1D632, "M", "q"), + (0x1D633, "M", "r"), + (0x1D634, "M", "s"), + (0x1D635, "M", "t"), + (0x1D636, "M", "u"), + (0x1D637, "M", "v"), + (0x1D638, "M", "w"), + (0x1D639, "M", "x"), + (0x1D63A, "M", "y"), + (0x1D63B, "M", "z"), + (0x1D63C, "M", "a"), + (0x1D63D, "M", "b"), + (0x1D63E, "M", "c"), + (0x1D63F, "M", "d"), + (0x1D640, "M", "e"), + (0x1D641, "M", "f"), + (0x1D642, "M", "g"), + (0x1D643, "M", "h"), + (0x1D644, "M", "i"), + (0x1D645, "M", "j"), + (0x1D646, "M", "k"), + (0x1D647, "M", "l"), + (0x1D648, "M", "m"), + (0x1D649, "M", "n"), + (0x1D64A, "M", "o"), + (0x1D64B, "M", "p"), + (0x1D64C, "M", "q"), + (0x1D64D, "M", "r"), + (0x1D64E, "M", "s"), + (0x1D64F, "M", "t"), + (0x1D650, "M", "u"), + (0x1D651, "M", "v"), + (0x1D652, "M", "w"), + (0x1D653, "M", "x"), + (0x1D654, "M", "y"), + (0x1D655, "M", "z"), + (0x1D656, "M", "a"), + (0x1D657, "M", "b"), + (0x1D658, "M", "c"), + (0x1D659, "M", "d"), + (0x1D65A, "M", "e"), + (0x1D65B, "M", "f"), + (0x1D65C, "M", "g"), + (0x1D65D, "M", "h"), + (0x1D65E, "M", "i"), + (0x1D65F, "M", "j"), + (0x1D660, "M", "k"), + (0x1D661, "M", "l"), + (0x1D662, "M", "m"), + (0x1D663, "M", "n"), + (0x1D664, "M", "o"), + (0x1D665, "M", "p"), + (0x1D666, "M", "q"), + (0x1D667, "M", "r"), + (0x1D668, "M", "s"), + (0x1D669, "M", "t"), + (0x1D66A, "M", "u"), + (0x1D66B, "M", "v"), + (0x1D66C, "M", "w"), + (0x1D66D, "M", "x"), + (0x1D66E, "M", "y"), + (0x1D66F, "M", "z"), + (0x1D670, "M", "a"), + (0x1D671, "M", "b"), + (0x1D672, "M", "c"), + (0x1D673, "M", "d"), + (0x1D674, "M", "e"), + (0x1D675, "M", "f"), + (0x1D676, "M", "g"), + (0x1D677, "M", "h"), + (0x1D678, "M", "i"), + (0x1D679, "M", "j"), + (0x1D67A, "M", "k"), + (0x1D67B, "M", "l"), + (0x1D67C, "M", "m"), + (0x1D67D, "M", "n"), + (0x1D67E, "M", "o"), + (0x1D67F, "M", "p"), + (0x1D680, "M", "q"), + (0x1D681, "M", "r"), + (0x1D682, "M", "s"), + ] + + +def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D683, "M", "t"), + (0x1D684, "M", "u"), + (0x1D685, "M", "v"), + (0x1D686, "M", "w"), + (0x1D687, "M", "x"), + (0x1D688, "M", "y"), + (0x1D689, "M", "z"), + (0x1D68A, "M", "a"), + (0x1D68B, "M", "b"), + (0x1D68C, "M", "c"), + (0x1D68D, "M", "d"), + (0x1D68E, "M", "e"), + (0x1D68F, "M", "f"), + (0x1D690, "M", "g"), + (0x1D691, "M", "h"), + (0x1D692, "M", "i"), + (0x1D693, "M", "j"), + (0x1D694, "M", "k"), + (0x1D695, "M", "l"), + (0x1D696, "M", "m"), + (0x1D697, "M", "n"), + (0x1D698, "M", "o"), + (0x1D699, "M", "p"), + (0x1D69A, "M", "q"), + (0x1D69B, "M", "r"), + (0x1D69C, "M", "s"), + (0x1D69D, "M", "t"), + (0x1D69E, "M", "u"), + (0x1D69F, "M", "v"), + (0x1D6A0, "M", "w"), + (0x1D6A1, "M", "x"), + (0x1D6A2, "M", "y"), + (0x1D6A3, "M", "z"), + (0x1D6A4, "M", "ı"), + (0x1D6A5, "M", "ȷ"), + (0x1D6A6, "X"), + (0x1D6A8, "M", "α"), + (0x1D6A9, "M", "β"), + (0x1D6AA, "M", "γ"), + (0x1D6AB, "M", "δ"), + (0x1D6AC, "M", "ε"), + (0x1D6AD, "M", "ζ"), + (0x1D6AE, "M", "η"), + (0x1D6AF, "M", "θ"), + (0x1D6B0, "M", "ι"), + (0x1D6B1, "M", "κ"), + (0x1D6B2, "M", "λ"), + (0x1D6B3, "M", "μ"), + (0x1D6B4, "M", "ν"), + (0x1D6B5, "M", "ξ"), + (0x1D6B6, "M", "ο"), + (0x1D6B7, "M", "π"), + (0x1D6B8, "M", "ρ"), + (0x1D6B9, "M", "θ"), + (0x1D6BA, "M", "σ"), + (0x1D6BB, "M", "τ"), + (0x1D6BC, "M", "υ"), + (0x1D6BD, "M", "φ"), + (0x1D6BE, "M", "χ"), + (0x1D6BF, "M", "ψ"), + (0x1D6C0, "M", "ω"), + (0x1D6C1, "M", "∇"), + (0x1D6C2, "M", "α"), + (0x1D6C3, "M", "β"), + (0x1D6C4, "M", "γ"), + (0x1D6C5, "M", "δ"), + (0x1D6C6, "M", "ε"), + (0x1D6C7, "M", "ζ"), + (0x1D6C8, "M", "η"), + (0x1D6C9, "M", "θ"), + (0x1D6CA, "M", "ι"), + (0x1D6CB, "M", "κ"), + (0x1D6CC, "M", "λ"), + (0x1D6CD, "M", "μ"), + (0x1D6CE, "M", "ν"), + (0x1D6CF, "M", "ξ"), + (0x1D6D0, "M", "ο"), + (0x1D6D1, "M", "π"), + (0x1D6D2, "M", "ρ"), + (0x1D6D3, "M", "σ"), + (0x1D6D5, "M", "τ"), + (0x1D6D6, "M", "υ"), + (0x1D6D7, "M", "φ"), + (0x1D6D8, "M", "χ"), + (0x1D6D9, "M", "ψ"), + (0x1D6DA, "M", "ω"), + (0x1D6DB, "M", "∂"), + (0x1D6DC, "M", "ε"), + (0x1D6DD, "M", "θ"), + (0x1D6DE, "M", "κ"), + (0x1D6DF, "M", "φ"), + (0x1D6E0, "M", "ρ"), + (0x1D6E1, "M", "π"), + (0x1D6E2, "M", "α"), + (0x1D6E3, "M", "β"), + (0x1D6E4, "M", "γ"), + (0x1D6E5, "M", "δ"), + (0x1D6E6, "M", "ε"), + (0x1D6E7, "M", "ζ"), + (0x1D6E8, "M", "η"), + ] + + +def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D6E9, "M", "θ"), + (0x1D6EA, "M", "ι"), + (0x1D6EB, "M", "κ"), + (0x1D6EC, "M", "λ"), + (0x1D6ED, "M", "μ"), + (0x1D6EE, "M", "ν"), + (0x1D6EF, "M", "ξ"), + (0x1D6F0, "M", "ο"), + (0x1D6F1, "M", "π"), + (0x1D6F2, "M", "ρ"), + (0x1D6F3, "M", "θ"), + (0x1D6F4, "M", "σ"), + (0x1D6F5, "M", "τ"), + (0x1D6F6, "M", "υ"), + (0x1D6F7, "M", "φ"), + (0x1D6F8, "M", "χ"), + (0x1D6F9, "M", "ψ"), + (0x1D6FA, "M", "ω"), + (0x1D6FB, "M", "∇"), + (0x1D6FC, "M", "α"), + (0x1D6FD, "M", "β"), + (0x1D6FE, "M", "γ"), + (0x1D6FF, "M", "δ"), + (0x1D700, "M", "ε"), + (0x1D701, "M", "ζ"), + (0x1D702, "M", "η"), + (0x1D703, "M", "θ"), + (0x1D704, "M", "ι"), + (0x1D705, "M", "κ"), + (0x1D706, "M", "λ"), + (0x1D707, "M", "μ"), + (0x1D708, "M", "ν"), + (0x1D709, "M", "ξ"), + (0x1D70A, "M", "ο"), + (0x1D70B, "M", "π"), + (0x1D70C, "M", "ρ"), + (0x1D70D, "M", "σ"), + (0x1D70F, "M", "τ"), + (0x1D710, "M", "υ"), + (0x1D711, "M", "φ"), + (0x1D712, "M", "χ"), + (0x1D713, "M", "ψ"), + (0x1D714, "M", "ω"), + (0x1D715, "M", "∂"), + (0x1D716, "M", "ε"), + (0x1D717, "M", "θ"), + (0x1D718, "M", "κ"), + (0x1D719, "M", "φ"), + (0x1D71A, "M", "ρ"), + (0x1D71B, "M", "π"), + (0x1D71C, "M", "α"), + (0x1D71D, "M", "β"), + (0x1D71E, "M", "γ"), + (0x1D71F, "M", "δ"), + (0x1D720, "M", "ε"), + (0x1D721, "M", "ζ"), + (0x1D722, "M", "η"), + (0x1D723, "M", "θ"), + (0x1D724, "M", "ι"), + (0x1D725, "M", "κ"), + (0x1D726, "M", "λ"), + (0x1D727, "M", "μ"), + (0x1D728, "M", "ν"), + (0x1D729, "M", "ξ"), + (0x1D72A, "M", "ο"), + (0x1D72B, "M", "π"), + (0x1D72C, "M", "ρ"), + (0x1D72D, "M", "θ"), + (0x1D72E, "M", "σ"), + (0x1D72F, "M", "τ"), + (0x1D730, "M", "υ"), + (0x1D731, "M", "φ"), + (0x1D732, "M", "χ"), + (0x1D733, "M", "ψ"), + (0x1D734, "M", "ω"), + (0x1D735, "M", "∇"), + (0x1D736, "M", "α"), + (0x1D737, "M", "β"), + (0x1D738, "M", "γ"), + (0x1D739, "M", "δ"), + (0x1D73A, "M", "ε"), + (0x1D73B, "M", "ζ"), + (0x1D73C, "M", "η"), + (0x1D73D, "M", "θ"), + (0x1D73E, "M", "ι"), + (0x1D73F, "M", "κ"), + (0x1D740, "M", "λ"), + (0x1D741, "M", "μ"), + (0x1D742, "M", "ν"), + (0x1D743, "M", "ξ"), + (0x1D744, "M", "ο"), + (0x1D745, "M", "π"), + (0x1D746, "M", "ρ"), + (0x1D747, "M", "σ"), + (0x1D749, "M", "τ"), + (0x1D74A, "M", "υ"), + (0x1D74B, "M", "φ"), + (0x1D74C, "M", "χ"), + (0x1D74D, "M", "ψ"), + (0x1D74E, "M", "ω"), + ] + + +def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D74F, "M", "∂"), + (0x1D750, "M", "ε"), + (0x1D751, "M", "θ"), + (0x1D752, "M", "κ"), + (0x1D753, "M", "φ"), + (0x1D754, "M", "ρ"), + (0x1D755, "M", "π"), + (0x1D756, "M", "α"), + (0x1D757, "M", "β"), + (0x1D758, "M", "γ"), + (0x1D759, "M", "δ"), + (0x1D75A, "M", "ε"), + (0x1D75B, "M", "ζ"), + (0x1D75C, "M", "η"), + (0x1D75D, "M", "θ"), + (0x1D75E, "M", "ι"), + (0x1D75F, "M", "κ"), + (0x1D760, "M", "λ"), + (0x1D761, "M", "μ"), + (0x1D762, "M", "ν"), + (0x1D763, "M", "ξ"), + (0x1D764, "M", "ο"), + (0x1D765, "M", "π"), + (0x1D766, "M", "ρ"), + (0x1D767, "M", "θ"), + (0x1D768, "M", "σ"), + (0x1D769, "M", "τ"), + (0x1D76A, "M", "υ"), + (0x1D76B, "M", "φ"), + (0x1D76C, "M", "χ"), + (0x1D76D, "M", "ψ"), + (0x1D76E, "M", "ω"), + (0x1D76F, "M", "∇"), + (0x1D770, "M", "α"), + (0x1D771, "M", "β"), + (0x1D772, "M", "γ"), + (0x1D773, "M", "δ"), + (0x1D774, "M", "ε"), + (0x1D775, "M", "ζ"), + (0x1D776, "M", "η"), + (0x1D777, "M", "θ"), + (0x1D778, "M", "ι"), + (0x1D779, "M", "κ"), + (0x1D77A, "M", "λ"), + (0x1D77B, "M", "μ"), + (0x1D77C, "M", "ν"), + (0x1D77D, "M", "ξ"), + (0x1D77E, "M", "ο"), + (0x1D77F, "M", "π"), + (0x1D780, "M", "ρ"), + (0x1D781, "M", "σ"), + (0x1D783, "M", "τ"), + (0x1D784, "M", "υ"), + (0x1D785, "M", "φ"), + (0x1D786, "M", "χ"), + (0x1D787, "M", "ψ"), + (0x1D788, "M", "ω"), + (0x1D789, "M", "∂"), + (0x1D78A, "M", "ε"), + (0x1D78B, "M", "θ"), + (0x1D78C, "M", "κ"), + (0x1D78D, "M", "φ"), + (0x1D78E, "M", "ρ"), + (0x1D78F, "M", "π"), + (0x1D790, "M", "α"), + (0x1D791, "M", "β"), + (0x1D792, "M", "γ"), + (0x1D793, "M", "δ"), + (0x1D794, "M", "ε"), + (0x1D795, "M", "ζ"), + (0x1D796, "M", "η"), + (0x1D797, "M", "θ"), + (0x1D798, "M", "ι"), + (0x1D799, "M", "κ"), + (0x1D79A, "M", "λ"), + (0x1D79B, "M", "μ"), + (0x1D79C, "M", "ν"), + (0x1D79D, "M", "ξ"), + (0x1D79E, "M", "ο"), + (0x1D79F, "M", "π"), + (0x1D7A0, "M", "ρ"), + (0x1D7A1, "M", "θ"), + (0x1D7A2, "M", "σ"), + (0x1D7A3, "M", "τ"), + (0x1D7A4, "M", "υ"), + (0x1D7A5, "M", "φ"), + (0x1D7A6, "M", "χ"), + (0x1D7A7, "M", "ψ"), + (0x1D7A8, "M", "ω"), + (0x1D7A9, "M", "∇"), + (0x1D7AA, "M", "α"), + (0x1D7AB, "M", "β"), + (0x1D7AC, "M", "γ"), + (0x1D7AD, "M", "δ"), + (0x1D7AE, "M", "ε"), + (0x1D7AF, "M", "ζ"), + (0x1D7B0, "M", "η"), + (0x1D7B1, "M", "θ"), + (0x1D7B2, "M", "ι"), + (0x1D7B3, "M", "κ"), + ] + + +def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D7B4, "M", "λ"), + (0x1D7B5, "M", "μ"), + (0x1D7B6, "M", "ν"), + (0x1D7B7, "M", "ξ"), + (0x1D7B8, "M", "ο"), + (0x1D7B9, "M", "π"), + (0x1D7BA, "M", "ρ"), + (0x1D7BB, "M", "σ"), + (0x1D7BD, "M", "τ"), + (0x1D7BE, "M", "υ"), + (0x1D7BF, "M", "φ"), + (0x1D7C0, "M", "χ"), + (0x1D7C1, "M", "ψ"), + (0x1D7C2, "M", "ω"), + (0x1D7C3, "M", "∂"), + (0x1D7C4, "M", "ε"), + (0x1D7C5, "M", "θ"), + (0x1D7C6, "M", "κ"), + (0x1D7C7, "M", "φ"), + (0x1D7C8, "M", "ρ"), + (0x1D7C9, "M", "π"), + (0x1D7CA, "M", "ϝ"), + (0x1D7CC, "X"), + (0x1D7CE, "M", "0"), + (0x1D7CF, "M", "1"), + (0x1D7D0, "M", "2"), + (0x1D7D1, "M", "3"), + (0x1D7D2, "M", "4"), + (0x1D7D3, "M", "5"), + (0x1D7D4, "M", "6"), + (0x1D7D5, "M", "7"), + (0x1D7D6, "M", "8"), + (0x1D7D7, "M", "9"), + (0x1D7D8, "M", "0"), + (0x1D7D9, "M", "1"), + (0x1D7DA, "M", "2"), + (0x1D7DB, "M", "3"), + (0x1D7DC, "M", "4"), + (0x1D7DD, "M", "5"), + (0x1D7DE, "M", "6"), + (0x1D7DF, "M", "7"), + (0x1D7E0, "M", "8"), + (0x1D7E1, "M", "9"), + (0x1D7E2, "M", "0"), + (0x1D7E3, "M", "1"), + (0x1D7E4, "M", "2"), + (0x1D7E5, "M", "3"), + (0x1D7E6, "M", "4"), + (0x1D7E7, "M", "5"), + (0x1D7E8, "M", "6"), + (0x1D7E9, "M", "7"), + (0x1D7EA, "M", "8"), + (0x1D7EB, "M", "9"), + (0x1D7EC, "M", "0"), + (0x1D7ED, "M", "1"), + (0x1D7EE, "M", "2"), + (0x1D7EF, "M", "3"), + (0x1D7F0, "M", "4"), + (0x1D7F1, "M", "5"), + (0x1D7F2, "M", "6"), + (0x1D7F3, "M", "7"), + (0x1D7F4, "M", "8"), + (0x1D7F5, "M", "9"), + (0x1D7F6, "M", "0"), + (0x1D7F7, "M", "1"), + (0x1D7F8, "M", "2"), + (0x1D7F9, "M", "3"), + (0x1D7FA, "M", "4"), + (0x1D7FB, "M", "5"), + (0x1D7FC, "M", "6"), + (0x1D7FD, "M", "7"), + (0x1D7FE, "M", "8"), + (0x1D7FF, "M", "9"), + (0x1D800, "V"), + (0x1DA8C, "X"), + (0x1DA9B, "V"), + (0x1DAA0, "X"), + (0x1DAA1, "V"), + (0x1DAB0, "X"), + (0x1DF00, "V"), + (0x1DF1F, "X"), + (0x1DF25, "V"), + (0x1DF2B, "X"), + (0x1E000, "V"), + (0x1E007, "X"), + (0x1E008, "V"), + (0x1E019, "X"), + (0x1E01B, "V"), + (0x1E022, "X"), + (0x1E023, "V"), + (0x1E025, "X"), + (0x1E026, "V"), + (0x1E02B, "X"), + (0x1E030, "M", "а"), + (0x1E031, "M", "б"), + (0x1E032, "M", "в"), + (0x1E033, "M", "г"), + (0x1E034, "M", "д"), + (0x1E035, "M", "е"), + (0x1E036, "M", "ж"), + ] + + +def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E037, "M", "з"), + (0x1E038, "M", "и"), + (0x1E039, "M", "к"), + (0x1E03A, "M", "л"), + (0x1E03B, "M", "м"), + (0x1E03C, "M", "о"), + (0x1E03D, "M", "п"), + (0x1E03E, "M", "р"), + (0x1E03F, "M", "с"), + (0x1E040, "M", "т"), + (0x1E041, "M", "у"), + (0x1E042, "M", "ф"), + (0x1E043, "M", "х"), + (0x1E044, "M", "ц"), + (0x1E045, "M", "ч"), + (0x1E046, "M", "ш"), + (0x1E047, "M", "ы"), + (0x1E048, "M", "э"), + (0x1E049, "M", "ю"), + (0x1E04A, "M", "ꚉ"), + (0x1E04B, "M", "ә"), + (0x1E04C, "M", "і"), + (0x1E04D, "M", "ј"), + (0x1E04E, "M", "ө"), + (0x1E04F, "M", "ү"), + (0x1E050, "M", "ӏ"), + (0x1E051, "M", "а"), + (0x1E052, "M", "б"), + (0x1E053, "M", "в"), + (0x1E054, "M", "г"), + (0x1E055, "M", "д"), + (0x1E056, "M", "е"), + (0x1E057, "M", "ж"), + (0x1E058, "M", "з"), + (0x1E059, "M", "и"), + (0x1E05A, "M", "к"), + (0x1E05B, "M", "л"), + (0x1E05C, "M", "о"), + (0x1E05D, "M", "п"), + (0x1E05E, "M", "с"), + (0x1E05F, "M", "у"), + (0x1E060, "M", "ф"), + (0x1E061, "M", "х"), + (0x1E062, "M", "ц"), + (0x1E063, "M", "ч"), + (0x1E064, "M", "ш"), + (0x1E065, "M", "ъ"), + (0x1E066, "M", "ы"), + (0x1E067, "M", "ґ"), + (0x1E068, "M", "і"), + (0x1E069, "M", "ѕ"), + (0x1E06A, "M", "џ"), + (0x1E06B, "M", "ҫ"), + (0x1E06C, "M", "ꙑ"), + (0x1E06D, "M", "ұ"), + (0x1E06E, "X"), + (0x1E08F, "V"), + (0x1E090, "X"), + (0x1E100, "V"), + (0x1E12D, "X"), + (0x1E130, "V"), + (0x1E13E, "X"), + (0x1E140, "V"), + (0x1E14A, "X"), + (0x1E14E, "V"), + (0x1E150, "X"), + (0x1E290, "V"), + (0x1E2AF, "X"), + (0x1E2C0, "V"), + (0x1E2FA, "X"), + (0x1E2FF, "V"), + (0x1E300, "X"), + (0x1E4D0, "V"), + (0x1E4FA, "X"), + (0x1E7E0, "V"), + (0x1E7E7, "X"), + (0x1E7E8, "V"), + (0x1E7EC, "X"), + (0x1E7ED, "V"), + (0x1E7EF, "X"), + (0x1E7F0, "V"), + (0x1E7FF, "X"), + (0x1E800, "V"), + (0x1E8C5, "X"), + (0x1E8C7, "V"), + (0x1E8D7, "X"), + (0x1E900, "M", "𞤢"), + (0x1E901, "M", "𞤣"), + (0x1E902, "M", "𞤤"), + (0x1E903, "M", "𞤥"), + (0x1E904, "M", "𞤦"), + (0x1E905, "M", "𞤧"), + (0x1E906, "M", "𞤨"), + (0x1E907, "M", "𞤩"), + (0x1E908, "M", "𞤪"), + (0x1E909, "M", "𞤫"), + (0x1E90A, "M", "𞤬"), + (0x1E90B, "M", "𞤭"), + (0x1E90C, "M", "𞤮"), + (0x1E90D, "M", "𞤯"), + ] + + +def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E90E, "M", "𞤰"), + (0x1E90F, "M", "𞤱"), + (0x1E910, "M", "𞤲"), + (0x1E911, "M", "𞤳"), + (0x1E912, "M", "𞤴"), + (0x1E913, "M", "𞤵"), + (0x1E914, "M", "𞤶"), + (0x1E915, "M", "𞤷"), + (0x1E916, "M", "𞤸"), + (0x1E917, "M", "𞤹"), + (0x1E918, "M", "𞤺"), + (0x1E919, "M", "𞤻"), + (0x1E91A, "M", "𞤼"), + (0x1E91B, "M", "𞤽"), + (0x1E91C, "M", "𞤾"), + (0x1E91D, "M", "𞤿"), + (0x1E91E, "M", "𞥀"), + (0x1E91F, "M", "𞥁"), + (0x1E920, "M", "𞥂"), + (0x1E921, "M", "𞥃"), + (0x1E922, "V"), + (0x1E94C, "X"), + (0x1E950, "V"), + (0x1E95A, "X"), + (0x1E95E, "V"), + (0x1E960, "X"), + (0x1EC71, "V"), + (0x1ECB5, "X"), + (0x1ED01, "V"), + (0x1ED3E, "X"), + (0x1EE00, "M", "ا"), + (0x1EE01, "M", "ب"), + (0x1EE02, "M", "ج"), + (0x1EE03, "M", "د"), + (0x1EE04, "X"), + (0x1EE05, "M", "و"), + (0x1EE06, "M", "ز"), + (0x1EE07, "M", "ح"), + (0x1EE08, "M", "ط"), + (0x1EE09, "M", "ي"), + (0x1EE0A, "M", "ك"), + (0x1EE0B, "M", "ل"), + (0x1EE0C, "M", "م"), + (0x1EE0D, "M", "ن"), + (0x1EE0E, "M", "س"), + (0x1EE0F, "M", "ع"), + (0x1EE10, "M", "ف"), + (0x1EE11, "M", "ص"), + (0x1EE12, "M", "ق"), + (0x1EE13, "M", "ر"), + (0x1EE14, "M", "ش"), + (0x1EE15, "M", "ت"), + (0x1EE16, "M", "ث"), + (0x1EE17, "M", "خ"), + (0x1EE18, "M", "ذ"), + (0x1EE19, "M", "ض"), + (0x1EE1A, "M", "ظ"), + (0x1EE1B, "M", "غ"), + (0x1EE1C, "M", "ٮ"), + (0x1EE1D, "M", "ں"), + (0x1EE1E, "M", "ڡ"), + (0x1EE1F, "M", "ٯ"), + (0x1EE20, "X"), + (0x1EE21, "M", "ب"), + (0x1EE22, "M", "ج"), + (0x1EE23, "X"), + (0x1EE24, "M", "ه"), + (0x1EE25, "X"), + (0x1EE27, "M", "ح"), + (0x1EE28, "X"), + (0x1EE29, "M", "ي"), + (0x1EE2A, "M", "ك"), + (0x1EE2B, "M", "ل"), + (0x1EE2C, "M", "م"), + (0x1EE2D, "M", "ن"), + (0x1EE2E, "M", "س"), + (0x1EE2F, "M", "ع"), + (0x1EE30, "M", "ف"), + (0x1EE31, "M", "ص"), + (0x1EE32, "M", "ق"), + (0x1EE33, "X"), + (0x1EE34, "M", "ش"), + (0x1EE35, "M", "ت"), + (0x1EE36, "M", "ث"), + (0x1EE37, "M", "خ"), + (0x1EE38, "X"), + (0x1EE39, "M", "ض"), + (0x1EE3A, "X"), + (0x1EE3B, "M", "غ"), + (0x1EE3C, "X"), + (0x1EE42, "M", "ج"), + (0x1EE43, "X"), + (0x1EE47, "M", "ح"), + (0x1EE48, "X"), + (0x1EE49, "M", "ي"), + (0x1EE4A, "X"), + (0x1EE4B, "M", "ل"), + (0x1EE4C, "X"), + (0x1EE4D, "M", "ن"), + (0x1EE4E, "M", "س"), + ] + + +def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EE4F, "M", "ع"), + (0x1EE50, "X"), + (0x1EE51, "M", "ص"), + (0x1EE52, "M", "ق"), + (0x1EE53, "X"), + (0x1EE54, "M", "ش"), + (0x1EE55, "X"), + (0x1EE57, "M", "خ"), + (0x1EE58, "X"), + (0x1EE59, "M", "ض"), + (0x1EE5A, "X"), + (0x1EE5B, "M", "غ"), + (0x1EE5C, "X"), + (0x1EE5D, "M", "ں"), + (0x1EE5E, "X"), + (0x1EE5F, "M", "ٯ"), + (0x1EE60, "X"), + (0x1EE61, "M", "ب"), + (0x1EE62, "M", "ج"), + (0x1EE63, "X"), + (0x1EE64, "M", "ه"), + (0x1EE65, "X"), + (0x1EE67, "M", "ح"), + (0x1EE68, "M", "ط"), + (0x1EE69, "M", "ي"), + (0x1EE6A, "M", "ك"), + (0x1EE6B, "X"), + (0x1EE6C, "M", "م"), + (0x1EE6D, "M", "ن"), + (0x1EE6E, "M", "س"), + (0x1EE6F, "M", "ع"), + (0x1EE70, "M", "ف"), + (0x1EE71, "M", "ص"), + (0x1EE72, "M", "ق"), + (0x1EE73, "X"), + (0x1EE74, "M", "ش"), + (0x1EE75, "M", "ت"), + (0x1EE76, "M", "ث"), + (0x1EE77, "M", "خ"), + (0x1EE78, "X"), + (0x1EE79, "M", "ض"), + (0x1EE7A, "M", "ظ"), + (0x1EE7B, "M", "غ"), + (0x1EE7C, "M", "ٮ"), + (0x1EE7D, "X"), + (0x1EE7E, "M", "ڡ"), + (0x1EE7F, "X"), + (0x1EE80, "M", "ا"), + (0x1EE81, "M", "ب"), + (0x1EE82, "M", "ج"), + (0x1EE83, "M", "د"), + (0x1EE84, "M", "ه"), + (0x1EE85, "M", "و"), + (0x1EE86, "M", "ز"), + (0x1EE87, "M", "ح"), + (0x1EE88, "M", "ط"), + (0x1EE89, "M", "ي"), + (0x1EE8A, "X"), + (0x1EE8B, "M", "ل"), + (0x1EE8C, "M", "م"), + (0x1EE8D, "M", "ن"), + (0x1EE8E, "M", "س"), + (0x1EE8F, "M", "ع"), + (0x1EE90, "M", "ف"), + (0x1EE91, "M", "ص"), + (0x1EE92, "M", "ق"), + (0x1EE93, "M", "ر"), + (0x1EE94, "M", "ش"), + (0x1EE95, "M", "ت"), + (0x1EE96, "M", "ث"), + (0x1EE97, "M", "خ"), + (0x1EE98, "M", "ذ"), + (0x1EE99, "M", "ض"), + (0x1EE9A, "M", "ظ"), + (0x1EE9B, "M", "غ"), + (0x1EE9C, "X"), + (0x1EEA1, "M", "ب"), + (0x1EEA2, "M", "ج"), + (0x1EEA3, "M", "د"), + (0x1EEA4, "X"), + (0x1EEA5, "M", "و"), + (0x1EEA6, "M", "ز"), + (0x1EEA7, "M", "ح"), + (0x1EEA8, "M", "ط"), + (0x1EEA9, "M", "ي"), + (0x1EEAA, "X"), + (0x1EEAB, "M", "ل"), + (0x1EEAC, "M", "م"), + (0x1EEAD, "M", "ن"), + (0x1EEAE, "M", "س"), + (0x1EEAF, "M", "ع"), + (0x1EEB0, "M", "ف"), + (0x1EEB1, "M", "ص"), + (0x1EEB2, "M", "ق"), + (0x1EEB3, "M", "ر"), + (0x1EEB4, "M", "ش"), + (0x1EEB5, "M", "ت"), + (0x1EEB6, "M", "ث"), + (0x1EEB7, "M", "خ"), + (0x1EEB8, "M", "ذ"), + ] + + +def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EEB9, "M", "ض"), + (0x1EEBA, "M", "ظ"), + (0x1EEBB, "M", "غ"), + (0x1EEBC, "X"), + (0x1EEF0, "V"), + (0x1EEF2, "X"), + (0x1F000, "V"), + (0x1F02C, "X"), + (0x1F030, "V"), + (0x1F094, "X"), + (0x1F0A0, "V"), + (0x1F0AF, "X"), + (0x1F0B1, "V"), + (0x1F0C0, "X"), + (0x1F0C1, "V"), + (0x1F0D0, "X"), + (0x1F0D1, "V"), + (0x1F0F6, "X"), + (0x1F101, "3", "0,"), + (0x1F102, "3", "1,"), + (0x1F103, "3", "2,"), + (0x1F104, "3", "3,"), + (0x1F105, "3", "4,"), + (0x1F106, "3", "5,"), + (0x1F107, "3", "6,"), + (0x1F108, "3", "7,"), + (0x1F109, "3", "8,"), + (0x1F10A, "3", "9,"), + (0x1F10B, "V"), + (0x1F110, "3", "(a)"), + (0x1F111, "3", "(b)"), + (0x1F112, "3", "(c)"), + (0x1F113, "3", "(d)"), + (0x1F114, "3", "(e)"), + (0x1F115, "3", "(f)"), + (0x1F116, "3", "(g)"), + (0x1F117, "3", "(h)"), + (0x1F118, "3", "(i)"), + (0x1F119, "3", "(j)"), + (0x1F11A, "3", "(k)"), + (0x1F11B, "3", "(l)"), + (0x1F11C, "3", "(m)"), + (0x1F11D, "3", "(n)"), + (0x1F11E, "3", "(o)"), + (0x1F11F, "3", "(p)"), + (0x1F120, "3", "(q)"), + (0x1F121, "3", "(r)"), + (0x1F122, "3", "(s)"), + (0x1F123, "3", "(t)"), + (0x1F124, "3", "(u)"), + (0x1F125, "3", "(v)"), + (0x1F126, "3", "(w)"), + (0x1F127, "3", "(x)"), + (0x1F128, "3", "(y)"), + (0x1F129, "3", "(z)"), + (0x1F12A, "M", "〔s〕"), + (0x1F12B, "M", "c"), + (0x1F12C, "M", "r"), + (0x1F12D, "M", "cd"), + (0x1F12E, "M", "wz"), + (0x1F12F, "V"), + (0x1F130, "M", "a"), + (0x1F131, "M", "b"), + (0x1F132, "M", "c"), + (0x1F133, "M", "d"), + (0x1F134, "M", "e"), + (0x1F135, "M", "f"), + (0x1F136, "M", "g"), + (0x1F137, "M", "h"), + (0x1F138, "M", "i"), + (0x1F139, "M", "j"), + (0x1F13A, "M", "k"), + (0x1F13B, "M", "l"), + (0x1F13C, "M", "m"), + (0x1F13D, "M", "n"), + (0x1F13E, "M", "o"), + (0x1F13F, "M", "p"), + (0x1F140, "M", "q"), + (0x1F141, "M", "r"), + (0x1F142, "M", "s"), + (0x1F143, "M", "t"), + (0x1F144, "M", "u"), + (0x1F145, "M", "v"), + (0x1F146, "M", "w"), + (0x1F147, "M", "x"), + (0x1F148, "M", "y"), + (0x1F149, "M", "z"), + (0x1F14A, "M", "hv"), + (0x1F14B, "M", "mv"), + (0x1F14C, "M", "sd"), + (0x1F14D, "M", "ss"), + (0x1F14E, "M", "ppv"), + (0x1F14F, "M", "wc"), + (0x1F150, "V"), + (0x1F16A, "M", "mc"), + (0x1F16B, "M", "md"), + (0x1F16C, "M", "mr"), + (0x1F16D, "V"), + (0x1F190, "M", "dj"), + (0x1F191, "V"), + ] + + +def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F1AE, "X"), + (0x1F1E6, "V"), + (0x1F200, "M", "ほか"), + (0x1F201, "M", "ココ"), + (0x1F202, "M", "サ"), + (0x1F203, "X"), + (0x1F210, "M", "手"), + (0x1F211, "M", "字"), + (0x1F212, "M", "双"), + (0x1F213, "M", "デ"), + (0x1F214, "M", "二"), + (0x1F215, "M", "多"), + (0x1F216, "M", "解"), + (0x1F217, "M", "天"), + (0x1F218, "M", "交"), + (0x1F219, "M", "映"), + (0x1F21A, "M", "無"), + (0x1F21B, "M", "料"), + (0x1F21C, "M", "前"), + (0x1F21D, "M", "後"), + (0x1F21E, "M", "再"), + (0x1F21F, "M", "新"), + (0x1F220, "M", "初"), + (0x1F221, "M", "終"), + (0x1F222, "M", "生"), + (0x1F223, "M", "販"), + (0x1F224, "M", "声"), + (0x1F225, "M", "吹"), + (0x1F226, "M", "演"), + (0x1F227, "M", "投"), + (0x1F228, "M", "捕"), + (0x1F229, "M", "一"), + (0x1F22A, "M", "三"), + (0x1F22B, "M", "遊"), + (0x1F22C, "M", "左"), + (0x1F22D, "M", "中"), + (0x1F22E, "M", "右"), + (0x1F22F, "M", "指"), + (0x1F230, "M", "走"), + (0x1F231, "M", "打"), + (0x1F232, "M", "禁"), + (0x1F233, "M", "空"), + (0x1F234, "M", "合"), + (0x1F235, "M", "満"), + (0x1F236, "M", "有"), + (0x1F237, "M", "月"), + (0x1F238, "M", "申"), + (0x1F239, "M", "割"), + (0x1F23A, "M", "営"), + (0x1F23B, "M", "配"), + (0x1F23C, "X"), + (0x1F240, "M", "〔本〕"), + (0x1F241, "M", "〔三〕"), + (0x1F242, "M", "〔二〕"), + (0x1F243, "M", "〔安〕"), + (0x1F244, "M", "〔点〕"), + (0x1F245, "M", "〔打〕"), + (0x1F246, "M", "〔盗〕"), + (0x1F247, "M", "〔勝〕"), + (0x1F248, "M", "〔敗〕"), + (0x1F249, "X"), + (0x1F250, "M", "得"), + (0x1F251, "M", "可"), + (0x1F252, "X"), + (0x1F260, "V"), + (0x1F266, "X"), + (0x1F300, "V"), + (0x1F6D8, "X"), + (0x1F6DC, "V"), + (0x1F6ED, "X"), + (0x1F6F0, "V"), + (0x1F6FD, "X"), + (0x1F700, "V"), + (0x1F777, "X"), + (0x1F77B, "V"), + (0x1F7DA, "X"), + (0x1F7E0, "V"), + (0x1F7EC, "X"), + (0x1F7F0, "V"), + (0x1F7F1, "X"), + (0x1F800, "V"), + (0x1F80C, "X"), + (0x1F810, "V"), + (0x1F848, "X"), + (0x1F850, "V"), + (0x1F85A, "X"), + (0x1F860, "V"), + (0x1F888, "X"), + (0x1F890, "V"), + (0x1F8AE, "X"), + (0x1F8B0, "V"), + (0x1F8B2, "X"), + (0x1F900, "V"), + (0x1FA54, "X"), + (0x1FA60, "V"), + (0x1FA6E, "X"), + (0x1FA70, "V"), + (0x1FA7D, "X"), + (0x1FA80, "V"), + (0x1FA89, "X"), + ] + + +def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1FA90, "V"), + (0x1FABE, "X"), + (0x1FABF, "V"), + (0x1FAC6, "X"), + (0x1FACE, "V"), + (0x1FADC, "X"), + (0x1FAE0, "V"), + (0x1FAE9, "X"), + (0x1FAF0, "V"), + (0x1FAF9, "X"), + (0x1FB00, "V"), + (0x1FB93, "X"), + (0x1FB94, "V"), + (0x1FBCB, "X"), + (0x1FBF0, "M", "0"), + (0x1FBF1, "M", "1"), + (0x1FBF2, "M", "2"), + (0x1FBF3, "M", "3"), + (0x1FBF4, "M", "4"), + (0x1FBF5, "M", "5"), + (0x1FBF6, "M", "6"), + (0x1FBF7, "M", "7"), + (0x1FBF8, "M", "8"), + (0x1FBF9, "M", "9"), + (0x1FBFA, "X"), + (0x20000, "V"), + (0x2A6E0, "X"), + (0x2A700, "V"), + (0x2B73A, "X"), + (0x2B740, "V"), + (0x2B81E, "X"), + (0x2B820, "V"), + (0x2CEA2, "X"), + (0x2CEB0, "V"), + (0x2EBE1, "X"), + (0x2EBF0, "V"), + (0x2EE5E, "X"), + (0x2F800, "M", "丽"), + (0x2F801, "M", "丸"), + (0x2F802, "M", "乁"), + (0x2F803, "M", "𠄢"), + (0x2F804, "M", "你"), + (0x2F805, "M", "侮"), + (0x2F806, "M", "侻"), + (0x2F807, "M", "倂"), + (0x2F808, "M", "偺"), + (0x2F809, "M", "備"), + (0x2F80A, "M", "僧"), + (0x2F80B, "M", "像"), + (0x2F80C, "M", "㒞"), + (0x2F80D, "M", "𠘺"), + (0x2F80E, "M", "免"), + (0x2F80F, "M", "兔"), + (0x2F810, "M", "兤"), + (0x2F811, "M", "具"), + (0x2F812, "M", "𠔜"), + (0x2F813, "M", "㒹"), + (0x2F814, "M", "內"), + (0x2F815, "M", "再"), + (0x2F816, "M", "𠕋"), + (0x2F817, "M", "冗"), + (0x2F818, "M", "冤"), + (0x2F819, "M", "仌"), + (0x2F81A, "M", "冬"), + (0x2F81B, "M", "况"), + (0x2F81C, "M", "𩇟"), + (0x2F81D, "M", "凵"), + (0x2F81E, "M", "刃"), + (0x2F81F, "M", "㓟"), + (0x2F820, "M", "刻"), + (0x2F821, "M", "剆"), + (0x2F822, "M", "割"), + (0x2F823, "M", "剷"), + (0x2F824, "M", "㔕"), + (0x2F825, "M", "勇"), + (0x2F826, "M", "勉"), + (0x2F827, "M", "勤"), + (0x2F828, "M", "勺"), + (0x2F829, "M", "包"), + (0x2F82A, "M", "匆"), + (0x2F82B, "M", "北"), + (0x2F82C, "M", "卉"), + (0x2F82D, "M", "卑"), + (0x2F82E, "M", "博"), + (0x2F82F, "M", "即"), + (0x2F830, "M", "卽"), + (0x2F831, "M", "卿"), + (0x2F834, "M", "𠨬"), + (0x2F835, "M", "灰"), + (0x2F836, "M", "及"), + (0x2F837, "M", "叟"), + (0x2F838, "M", "𠭣"), + (0x2F839, "M", "叫"), + (0x2F83A, "M", "叱"), + (0x2F83B, "M", "吆"), + (0x2F83C, "M", "咞"), + (0x2F83D, "M", "吸"), + (0x2F83E, "M", "呈"), + (0x2F83F, "M", "周"), + (0x2F840, "M", "咢"), + ] + + +def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F841, "M", "哶"), + (0x2F842, "M", "唐"), + (0x2F843, "M", "啓"), + (0x2F844, "M", "啣"), + (0x2F845, "M", "善"), + (0x2F847, "M", "喙"), + (0x2F848, "M", "喫"), + (0x2F849, "M", "喳"), + (0x2F84A, "M", "嗂"), + (0x2F84B, "M", "圖"), + (0x2F84C, "M", "嘆"), + (0x2F84D, "M", "圗"), + (0x2F84E, "M", "噑"), + (0x2F84F, "M", "噴"), + (0x2F850, "M", "切"), + (0x2F851, "M", "壮"), + (0x2F852, "M", "城"), + (0x2F853, "M", "埴"), + (0x2F854, "M", "堍"), + (0x2F855, "M", "型"), + (0x2F856, "M", "堲"), + (0x2F857, "M", "報"), + (0x2F858, "M", "墬"), + (0x2F859, "M", "𡓤"), + (0x2F85A, "M", "売"), + (0x2F85B, "M", "壷"), + (0x2F85C, "M", "夆"), + (0x2F85D, "M", "多"), + (0x2F85E, "M", "夢"), + (0x2F85F, "M", "奢"), + (0x2F860, "M", "𡚨"), + (0x2F861, "M", "𡛪"), + (0x2F862, "M", "姬"), + (0x2F863, "M", "娛"), + (0x2F864, "M", "娧"), + (0x2F865, "M", "姘"), + (0x2F866, "M", "婦"), + (0x2F867, "M", "㛮"), + (0x2F868, "X"), + (0x2F869, "M", "嬈"), + (0x2F86A, "M", "嬾"), + (0x2F86C, "M", "𡧈"), + (0x2F86D, "M", "寃"), + (0x2F86E, "M", "寘"), + (0x2F86F, "M", "寧"), + (0x2F870, "M", "寳"), + (0x2F871, "M", "𡬘"), + (0x2F872, "M", "寿"), + (0x2F873, "M", "将"), + (0x2F874, "X"), + (0x2F875, "M", "尢"), + (0x2F876, "M", "㞁"), + (0x2F877, "M", "屠"), + (0x2F878, "M", "屮"), + (0x2F879, "M", "峀"), + (0x2F87A, "M", "岍"), + (0x2F87B, "M", "𡷤"), + (0x2F87C, "M", "嵃"), + (0x2F87D, "M", "𡷦"), + (0x2F87E, "M", "嵮"), + (0x2F87F, "M", "嵫"), + (0x2F880, "M", "嵼"), + (0x2F881, "M", "巡"), + (0x2F882, "M", "巢"), + (0x2F883, "M", "㠯"), + (0x2F884, "M", "巽"), + (0x2F885, "M", "帨"), + (0x2F886, "M", "帽"), + (0x2F887, "M", "幩"), + (0x2F888, "M", "㡢"), + (0x2F889, "M", "𢆃"), + (0x2F88A, "M", "㡼"), + (0x2F88B, "M", "庰"), + (0x2F88C, "M", "庳"), + (0x2F88D, "M", "庶"), + (0x2F88E, "M", "廊"), + (0x2F88F, "M", "𪎒"), + (0x2F890, "M", "廾"), + (0x2F891, "M", "𢌱"), + (0x2F893, "M", "舁"), + (0x2F894, "M", "弢"), + (0x2F896, "M", "㣇"), + (0x2F897, "M", "𣊸"), + (0x2F898, "M", "𦇚"), + (0x2F899, "M", "形"), + (0x2F89A, "M", "彫"), + (0x2F89B, "M", "㣣"), + (0x2F89C, "M", "徚"), + (0x2F89D, "M", "忍"), + (0x2F89E, "M", "志"), + (0x2F89F, "M", "忹"), + (0x2F8A0, "M", "悁"), + (0x2F8A1, "M", "㤺"), + (0x2F8A2, "M", "㤜"), + (0x2F8A3, "M", "悔"), + (0x2F8A4, "M", "𢛔"), + (0x2F8A5, "M", "惇"), + (0x2F8A6, "M", "慈"), + (0x2F8A7, "M", "慌"), + (0x2F8A8, "M", "慎"), + ] + + +def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F8A9, "M", "慌"), + (0x2F8AA, "M", "慺"), + (0x2F8AB, "M", "憎"), + (0x2F8AC, "M", "憲"), + (0x2F8AD, "M", "憤"), + (0x2F8AE, "M", "憯"), + (0x2F8AF, "M", "懞"), + (0x2F8B0, "M", "懲"), + (0x2F8B1, "M", "懶"), + (0x2F8B2, "M", "成"), + (0x2F8B3, "M", "戛"), + (0x2F8B4, "M", "扝"), + (0x2F8B5, "M", "抱"), + (0x2F8B6, "M", "拔"), + (0x2F8B7, "M", "捐"), + (0x2F8B8, "M", "𢬌"), + (0x2F8B9, "M", "挽"), + (0x2F8BA, "M", "拼"), + (0x2F8BB, "M", "捨"), + (0x2F8BC, "M", "掃"), + (0x2F8BD, "M", "揤"), + (0x2F8BE, "M", "𢯱"), + (0x2F8BF, "M", "搢"), + (0x2F8C0, "M", "揅"), + (0x2F8C1, "M", "掩"), + (0x2F8C2, "M", "㨮"), + (0x2F8C3, "M", "摩"), + (0x2F8C4, "M", "摾"), + (0x2F8C5, "M", "撝"), + (0x2F8C6, "M", "摷"), + (0x2F8C7, "M", "㩬"), + (0x2F8C8, "M", "敏"), + (0x2F8C9, "M", "敬"), + (0x2F8CA, "M", "𣀊"), + (0x2F8CB, "M", "旣"), + (0x2F8CC, "M", "書"), + (0x2F8CD, "M", "晉"), + (0x2F8CE, "M", "㬙"), + (0x2F8CF, "M", "暑"), + (0x2F8D0, "M", "㬈"), + (0x2F8D1, "M", "㫤"), + (0x2F8D2, "M", "冒"), + (0x2F8D3, "M", "冕"), + (0x2F8D4, "M", "最"), + (0x2F8D5, "M", "暜"), + (0x2F8D6, "M", "肭"), + (0x2F8D7, "M", "䏙"), + (0x2F8D8, "M", "朗"), + (0x2F8D9, "M", "望"), + (0x2F8DA, "M", "朡"), + (0x2F8DB, "M", "杞"), + (0x2F8DC, "M", "杓"), + (0x2F8DD, "M", "𣏃"), + (0x2F8DE, "M", "㭉"), + (0x2F8DF, "M", "柺"), + (0x2F8E0, "M", "枅"), + (0x2F8E1, "M", "桒"), + (0x2F8E2, "M", "梅"), + (0x2F8E3, "M", "𣑭"), + (0x2F8E4, "M", "梎"), + (0x2F8E5, "M", "栟"), + (0x2F8E6, "M", "椔"), + (0x2F8E7, "M", "㮝"), + (0x2F8E8, "M", "楂"), + (0x2F8E9, "M", "榣"), + (0x2F8EA, "M", "槪"), + (0x2F8EB, "M", "檨"), + (0x2F8EC, "M", "𣚣"), + (0x2F8ED, "M", "櫛"), + (0x2F8EE, "M", "㰘"), + (0x2F8EF, "M", "次"), + (0x2F8F0, "M", "𣢧"), + (0x2F8F1, "M", "歔"), + (0x2F8F2, "M", "㱎"), + (0x2F8F3, "M", "歲"), + (0x2F8F4, "M", "殟"), + (0x2F8F5, "M", "殺"), + (0x2F8F6, "M", "殻"), + (0x2F8F7, "M", "𣪍"), + (0x2F8F8, "M", "𡴋"), + (0x2F8F9, "M", "𣫺"), + (0x2F8FA, "M", "汎"), + (0x2F8FB, "M", "𣲼"), + (0x2F8FC, "M", "沿"), + (0x2F8FD, "M", "泍"), + (0x2F8FE, "M", "汧"), + (0x2F8FF, "M", "洖"), + (0x2F900, "M", "派"), + (0x2F901, "M", "海"), + (0x2F902, "M", "流"), + (0x2F903, "M", "浩"), + (0x2F904, "M", "浸"), + (0x2F905, "M", "涅"), + (0x2F906, "M", "𣴞"), + (0x2F907, "M", "洴"), + (0x2F908, "M", "港"), + (0x2F909, "M", "湮"), + (0x2F90A, "M", "㴳"), + (0x2F90B, "M", "滋"), + (0x2F90C, "M", "滇"), + ] + + +def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F90D, "M", "𣻑"), + (0x2F90E, "M", "淹"), + (0x2F90F, "M", "潮"), + (0x2F910, "M", "𣽞"), + (0x2F911, "M", "𣾎"), + (0x2F912, "M", "濆"), + (0x2F913, "M", "瀹"), + (0x2F914, "M", "瀞"), + (0x2F915, "M", "瀛"), + (0x2F916, "M", "㶖"), + (0x2F917, "M", "灊"), + (0x2F918, "M", "災"), + (0x2F919, "M", "灷"), + (0x2F91A, "M", "炭"), + (0x2F91B, "M", "𠔥"), + (0x2F91C, "M", "煅"), + (0x2F91D, "M", "𤉣"), + (0x2F91E, "M", "熜"), + (0x2F91F, "X"), + (0x2F920, "M", "爨"), + (0x2F921, "M", "爵"), + (0x2F922, "M", "牐"), + (0x2F923, "M", "𤘈"), + (0x2F924, "M", "犀"), + (0x2F925, "M", "犕"), + (0x2F926, "M", "𤜵"), + (0x2F927, "M", "𤠔"), + (0x2F928, "M", "獺"), + (0x2F929, "M", "王"), + (0x2F92A, "M", "㺬"), + (0x2F92B, "M", "玥"), + (0x2F92C, "M", "㺸"), + (0x2F92E, "M", "瑇"), + (0x2F92F, "M", "瑜"), + (0x2F930, "M", "瑱"), + (0x2F931, "M", "璅"), + (0x2F932, "M", "瓊"), + (0x2F933, "M", "㼛"), + (0x2F934, "M", "甤"), + (0x2F935, "M", "𤰶"), + (0x2F936, "M", "甾"), + (0x2F937, "M", "𤲒"), + (0x2F938, "M", "異"), + (0x2F939, "M", "𢆟"), + (0x2F93A, "M", "瘐"), + (0x2F93B, "M", "𤾡"), + (0x2F93C, "M", "𤾸"), + (0x2F93D, "M", "𥁄"), + (0x2F93E, "M", "㿼"), + (0x2F93F, "M", "䀈"), + (0x2F940, "M", "直"), + (0x2F941, "M", "𥃳"), + (0x2F942, "M", "𥃲"), + (0x2F943, "M", "𥄙"), + (0x2F944, "M", "𥄳"), + (0x2F945, "M", "眞"), + (0x2F946, "M", "真"), + (0x2F948, "M", "睊"), + (0x2F949, "M", "䀹"), + (0x2F94A, "M", "瞋"), + (0x2F94B, "M", "䁆"), + (0x2F94C, "M", "䂖"), + (0x2F94D, "M", "𥐝"), + (0x2F94E, "M", "硎"), + (0x2F94F, "M", "碌"), + (0x2F950, "M", "磌"), + (0x2F951, "M", "䃣"), + (0x2F952, "M", "𥘦"), + (0x2F953, "M", "祖"), + (0x2F954, "M", "𥚚"), + (0x2F955, "M", "𥛅"), + (0x2F956, "M", "福"), + (0x2F957, "M", "秫"), + (0x2F958, "M", "䄯"), + (0x2F959, "M", "穀"), + (0x2F95A, "M", "穊"), + (0x2F95B, "M", "穏"), + (0x2F95C, "M", "𥥼"), + (0x2F95D, "M", "𥪧"), + (0x2F95F, "X"), + (0x2F960, "M", "䈂"), + (0x2F961, "M", "𥮫"), + (0x2F962, "M", "篆"), + (0x2F963, "M", "築"), + (0x2F964, "M", "䈧"), + (0x2F965, "M", "𥲀"), + (0x2F966, "M", "糒"), + (0x2F967, "M", "䊠"), + (0x2F968, "M", "糨"), + (0x2F969, "M", "糣"), + (0x2F96A, "M", "紀"), + (0x2F96B, "M", "𥾆"), + (0x2F96C, "M", "絣"), + (0x2F96D, "M", "䌁"), + (0x2F96E, "M", "緇"), + (0x2F96F, "M", "縂"), + (0x2F970, "M", "繅"), + (0x2F971, "M", "䌴"), + (0x2F972, "M", "𦈨"), + (0x2F973, "M", "𦉇"), + ] + + +def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F974, "M", "䍙"), + (0x2F975, "M", "𦋙"), + (0x2F976, "M", "罺"), + (0x2F977, "M", "𦌾"), + (0x2F978, "M", "羕"), + (0x2F979, "M", "翺"), + (0x2F97A, "M", "者"), + (0x2F97B, "M", "𦓚"), + (0x2F97C, "M", "𦔣"), + (0x2F97D, "M", "聠"), + (0x2F97E, "M", "𦖨"), + (0x2F97F, "M", "聰"), + (0x2F980, "M", "𣍟"), + (0x2F981, "M", "䏕"), + (0x2F982, "M", "育"), + (0x2F983, "M", "脃"), + (0x2F984, "M", "䐋"), + (0x2F985, "M", "脾"), + (0x2F986, "M", "媵"), + (0x2F987, "M", "𦞧"), + (0x2F988, "M", "𦞵"), + (0x2F989, "M", "𣎓"), + (0x2F98A, "M", "𣎜"), + (0x2F98B, "M", "舁"), + (0x2F98C, "M", "舄"), + (0x2F98D, "M", "辞"), + (0x2F98E, "M", "䑫"), + (0x2F98F, "M", "芑"), + (0x2F990, "M", "芋"), + (0x2F991, "M", "芝"), + (0x2F992, "M", "劳"), + (0x2F993, "M", "花"), + (0x2F994, "M", "芳"), + (0x2F995, "M", "芽"), + (0x2F996, "M", "苦"), + (0x2F997, "M", "𦬼"), + (0x2F998, "M", "若"), + (0x2F999, "M", "茝"), + (0x2F99A, "M", "荣"), + (0x2F99B, "M", "莭"), + (0x2F99C, "M", "茣"), + (0x2F99D, "M", "莽"), + (0x2F99E, "M", "菧"), + (0x2F99F, "M", "著"), + (0x2F9A0, "M", "荓"), + (0x2F9A1, "M", "菊"), + (0x2F9A2, "M", "菌"), + (0x2F9A3, "M", "菜"), + (0x2F9A4, "M", "𦰶"), + (0x2F9A5, "M", "𦵫"), + (0x2F9A6, "M", "𦳕"), + (0x2F9A7, "M", "䔫"), + (0x2F9A8, "M", "蓱"), + (0x2F9A9, "M", "蓳"), + (0x2F9AA, "M", "蔖"), + (0x2F9AB, "M", "𧏊"), + (0x2F9AC, "M", "蕤"), + (0x2F9AD, "M", "𦼬"), + (0x2F9AE, "M", "䕝"), + (0x2F9AF, "M", "䕡"), + (0x2F9B0, "M", "𦾱"), + (0x2F9B1, "M", "𧃒"), + (0x2F9B2, "M", "䕫"), + (0x2F9B3, "M", "虐"), + (0x2F9B4, "M", "虜"), + (0x2F9B5, "M", "虧"), + (0x2F9B6, "M", "虩"), + (0x2F9B7, "M", "蚩"), + (0x2F9B8, "M", "蚈"), + (0x2F9B9, "M", "蜎"), + (0x2F9BA, "M", "蛢"), + (0x2F9BB, "M", "蝹"), + (0x2F9BC, "M", "蜨"), + (0x2F9BD, "M", "蝫"), + (0x2F9BE, "M", "螆"), + (0x2F9BF, "X"), + (0x2F9C0, "M", "蟡"), + (0x2F9C1, "M", "蠁"), + (0x2F9C2, "M", "䗹"), + (0x2F9C3, "M", "衠"), + (0x2F9C4, "M", "衣"), + (0x2F9C5, "M", "𧙧"), + (0x2F9C6, "M", "裗"), + (0x2F9C7, "M", "裞"), + (0x2F9C8, "M", "䘵"), + (0x2F9C9, "M", "裺"), + (0x2F9CA, "M", "㒻"), + (0x2F9CB, "M", "𧢮"), + (0x2F9CC, "M", "𧥦"), + (0x2F9CD, "M", "䚾"), + (0x2F9CE, "M", "䛇"), + (0x2F9CF, "M", "誠"), + (0x2F9D0, "M", "諭"), + (0x2F9D1, "M", "變"), + (0x2F9D2, "M", "豕"), + (0x2F9D3, "M", "𧲨"), + (0x2F9D4, "M", "貫"), + (0x2F9D5, "M", "賁"), + (0x2F9D6, "M", "贛"), + (0x2F9D7, "M", "起"), + ] + + +def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F9D8, "M", "𧼯"), + (0x2F9D9, "M", "𠠄"), + (0x2F9DA, "M", "跋"), + (0x2F9DB, "M", "趼"), + (0x2F9DC, "M", "跰"), + (0x2F9DD, "M", "𠣞"), + (0x2F9DE, "M", "軔"), + (0x2F9DF, "M", "輸"), + (0x2F9E0, "M", "𨗒"), + (0x2F9E1, "M", "𨗭"), + (0x2F9E2, "M", "邔"), + (0x2F9E3, "M", "郱"), + (0x2F9E4, "M", "鄑"), + (0x2F9E5, "M", "𨜮"), + (0x2F9E6, "M", "鄛"), + (0x2F9E7, "M", "鈸"), + (0x2F9E8, "M", "鋗"), + (0x2F9E9, "M", "鋘"), + (0x2F9EA, "M", "鉼"), + (0x2F9EB, "M", "鏹"), + (0x2F9EC, "M", "鐕"), + (0x2F9ED, "M", "𨯺"), + (0x2F9EE, "M", "開"), + (0x2F9EF, "M", "䦕"), + (0x2F9F0, "M", "閷"), + (0x2F9F1, "M", "𨵷"), + (0x2F9F2, "M", "䧦"), + (0x2F9F3, "M", "雃"), + (0x2F9F4, "M", "嶲"), + (0x2F9F5, "M", "霣"), + (0x2F9F6, "M", "𩅅"), + (0x2F9F7, "M", "𩈚"), + (0x2F9F8, "M", "䩮"), + (0x2F9F9, "M", "䩶"), + (0x2F9FA, "M", "韠"), + (0x2F9FB, "M", "𩐊"), + (0x2F9FC, "M", "䪲"), + (0x2F9FD, "M", "𩒖"), + (0x2F9FE, "M", "頋"), + (0x2FA00, "M", "頩"), + (0x2FA01, "M", "𩖶"), + (0x2FA02, "M", "飢"), + (0x2FA03, "M", "䬳"), + (0x2FA04, "M", "餩"), + (0x2FA05, "M", "馧"), + (0x2FA06, "M", "駂"), + (0x2FA07, "M", "駾"), + (0x2FA08, "M", "䯎"), + (0x2FA09, "M", "𩬰"), + (0x2FA0A, "M", "鬒"), + (0x2FA0B, "M", "鱀"), + (0x2FA0C, "M", "鳽"), + (0x2FA0D, "M", "䳎"), + (0x2FA0E, "M", "䳭"), + (0x2FA0F, "M", "鵧"), + (0x2FA10, "M", "𪃎"), + (0x2FA11, "M", "䳸"), + (0x2FA12, "M", "𪄅"), + (0x2FA13, "M", "𪈎"), + (0x2FA14, "M", "𪊑"), + (0x2FA15, "M", "麻"), + (0x2FA16, "M", "䵖"), + (0x2FA17, "M", "黹"), + (0x2FA18, "M", "黾"), + (0x2FA19, "M", "鼅"), + (0x2FA1A, "M", "鼏"), + (0x2FA1B, "M", "鼖"), + (0x2FA1C, "M", "鼻"), + (0x2FA1D, "M", "𪘀"), + (0x2FA1E, "X"), + (0x30000, "V"), + (0x3134B, "X"), + (0x31350, "V"), + (0x323B0, "X"), + (0xE0100, "I"), + (0xE01F0, "X"), + ] + + +uts46data = tuple( + _seg_0() + + _seg_1() + + _seg_2() + + _seg_3() + + _seg_4() + + _seg_5() + + _seg_6() + + _seg_7() + + _seg_8() + + _seg_9() + + _seg_10() + + _seg_11() + + _seg_12() + + _seg_13() + + _seg_14() + + _seg_15() + + _seg_16() + + _seg_17() + + _seg_18() + + _seg_19() + + _seg_20() + + _seg_21() + + _seg_22() + + _seg_23() + + _seg_24() + + _seg_25() + + _seg_26() + + _seg_27() + + _seg_28() + + _seg_29() + + _seg_30() + + _seg_31() + + _seg_32() + + _seg_33() + + _seg_34() + + _seg_35() + + _seg_36() + + _seg_37() + + _seg_38() + + _seg_39() + + _seg_40() + + _seg_41() + + _seg_42() + + _seg_43() + + _seg_44() + + _seg_45() + + _seg_46() + + _seg_47() + + _seg_48() + + _seg_49() + + _seg_50() + + _seg_51() + + _seg_52() + + _seg_53() + + _seg_54() + + _seg_55() + + _seg_56() + + _seg_57() + + _seg_58() + + _seg_59() + + _seg_60() + + _seg_61() + + _seg_62() + + _seg_63() + + _seg_64() + + _seg_65() + + _seg_66() + + _seg_67() + + _seg_68() + + _seg_69() + + _seg_70() + + _seg_71() + + _seg_72() + + _seg_73() + + _seg_74() + + _seg_75() + + _seg_76() + + _seg_77() + + _seg_78() + + _seg_79() + + _seg_80() + + _seg_81() +) # type: Tuple[Union[Tuple[int, str], Tuple[int, str, str]], ...] diff --git a/venv/lib/python3.11/site-packages/iniconfig-2.1.0.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/iniconfig-2.1.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.11/site-packages/iniconfig-2.1.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.11/site-packages/iniconfig-2.1.0.dist-info/METADATA b/venv/lib/python3.11/site-packages/iniconfig-2.1.0.dist-info/METADATA new file mode 100644 index 0000000..3a8ef46 --- /dev/null +++ b/venv/lib/python3.11/site-packages/iniconfig-2.1.0.dist-info/METADATA @@ -0,0 +1,81 @@ +Metadata-Version: 2.4 +Name: iniconfig +Version: 2.1.0 +Summary: brain-dead simple config-ini parsing +Project-URL: Homepage, https://github.com/pytest-dev/iniconfig +Author-email: Ronny Pfannschmidt , Holger Krekel +License-Expression: MIT +License-File: LICENSE +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Utilities +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst + +iniconfig: brain-dead simple parsing of ini files +======================================================= + +iniconfig is a small and simple INI-file parser module +having a unique set of features: + +* maintains order of sections and entries +* supports multi-line values with or without line-continuations +* supports "#" comments everywhere +* raises errors with proper line-numbers +* no bells and whistles like automatic substitutions +* iniconfig raises an Error if two sections have the same name. + +If you encounter issues or have feature wishes please report them to: + + https://github.com/RonnyPfannschmidt/iniconfig/issues + +Basic Example +=================================== + +If you have an ini file like this: + +.. code-block:: ini + + # content of example.ini + [section1] # comment + name1=value1 # comment + name1b=value1,value2 # comment + + [section2] + name2= + line1 + line2 + +then you can do: + +.. code-block:: pycon + + >>> import iniconfig + >>> ini = iniconfig.IniConfig("example.ini") + >>> ini['section1']['name1'] # raises KeyError if not exists + 'value1' + >>> ini.get('section1', 'name1b', [], lambda x: x.split(",")) + ['value1', 'value2'] + >>> ini.get('section1', 'notexist', [], lambda x: x.split(",")) + [] + >>> [x.name for x in list(ini)] + ['section1', 'section2'] + >>> list(list(ini)[0].items()) + [('name1', 'value1'), ('name1b', 'value1,value2')] + >>> 'section1' in ini + True + >>> 'inexistendsection' in ini + False diff --git a/venv/lib/python3.11/site-packages/iniconfig-2.1.0.dist-info/RECORD b/venv/lib/python3.11/site-packages/iniconfig-2.1.0.dist-info/RECORD new file mode 100644 index 0000000..f3fefc2 --- /dev/null +++ b/venv/lib/python3.11/site-packages/iniconfig-2.1.0.dist-info/RECORD @@ -0,0 +1,14 @@ +iniconfig-2.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +iniconfig-2.1.0.dist-info/METADATA,sha256=uS-Ec4h2hMZZFTrbd_4EGKcxBQHnQ3CfwSYjzQPn5cs,2651 +iniconfig-2.1.0.dist-info/RECORD,, +iniconfig-2.1.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87 +iniconfig-2.1.0.dist-info/licenses/LICENSE,sha256=NAn6kfes5VeJRjJnZlbjImT-XvdYFTVyXcmiN3RVG9Q,1098 +iniconfig/__init__.py,sha256=H1UqjEmX-GytGCsqCafTLG-q1CPc_okvCKGairRFMq0,5462 +iniconfig/__pycache__/__init__.cpython-311.pyc,, +iniconfig/__pycache__/_parse.cpython-311.pyc,, +iniconfig/__pycache__/_version.cpython-311.pyc,, +iniconfig/__pycache__/exceptions.cpython-311.pyc,, +iniconfig/_parse.py,sha256=OWGLbmE8GjxcoMWTvnGbck1RoNsTm5bt5ficIRZqWJ8,2436 +iniconfig/_version.py,sha256=dseuoOPG9WZ1Ezr1SC3wS9_hczkX-b1NdE4TQPHFJso,511 +iniconfig/exceptions.py,sha256=BJguifCkPayz-n0hI2D5ym1USoAWYNIdi05Jc4r2r4o,490 +iniconfig/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.11/site-packages/iniconfig-2.1.0.dist-info/WHEEL b/venv/lib/python3.11/site-packages/iniconfig-2.1.0.dist-info/WHEEL new file mode 100644 index 0000000..12228d4 --- /dev/null +++ b/venv/lib/python3.11/site-packages/iniconfig-2.1.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.27.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.11/site-packages/iniconfig-2.1.0.dist-info/licenses/LICENSE b/venv/lib/python3.11/site-packages/iniconfig-2.1.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000..46f4b28 --- /dev/null +++ b/venv/lib/python3.11/site-packages/iniconfig-2.1.0.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2010 - 2023 Holger Krekel and others + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.11/site-packages/iniconfig/__init__.py b/venv/lib/python3.11/site-packages/iniconfig/__init__.py new file mode 100644 index 0000000..3c40bc9 --- /dev/null +++ b/venv/lib/python3.11/site-packages/iniconfig/__init__.py @@ -0,0 +1,216 @@ +""" brain-dead simple parser for ini-style files. +(C) Ronny Pfannschmidt, Holger Krekel -- MIT licensed +""" +from __future__ import annotations +from typing import ( + Callable, + Iterator, + Mapping, + Optional, + Tuple, + TypeVar, + Union, + TYPE_CHECKING, + NoReturn, + NamedTuple, + overload, + cast, +) + +import os + +if TYPE_CHECKING: + from typing import Final + +__all__ = ["IniConfig", "ParseError", "COMMENTCHARS", "iscommentline"] + +from .exceptions import ParseError +from . import _parse +from ._parse import COMMENTCHARS, iscommentline + +_D = TypeVar("_D") +_T = TypeVar("_T") + + +class SectionWrapper: + config: Final[IniConfig] + name: Final[str] + + def __init__(self, config: IniConfig, name: str) -> None: + self.config = config + self.name = name + + def lineof(self, name: str) -> int | None: + return self.config.lineof(self.name, name) + + @overload + def get(self, key: str) -> str | None: + ... + + @overload + def get( + self, + key: str, + convert: Callable[[str], _T], + ) -> _T | None: + ... + + @overload + def get( + self, + key: str, + default: None, + convert: Callable[[str], _T], + ) -> _T | None: + ... + + @overload + def get(self, key: str, default: _D, convert: None = None) -> str | _D: + ... + + @overload + def get( + self, + key: str, + default: _D, + convert: Callable[[str], _T], + ) -> _T | _D: + ... + + # TODO: investigate possible mypy bug wrt matching the passed over data + def get( # type: ignore [misc] + self, + key: str, + default: _D | None = None, + convert: Callable[[str], _T] | None = None, + ) -> _D | _T | str | None: + return self.config.get(self.name, key, convert=convert, default=default) + + def __getitem__(self, key: str) -> str: + return self.config.sections[self.name][key] + + def __iter__(self) -> Iterator[str]: + section: Mapping[str, str] = self.config.sections.get(self.name, {}) + + def lineof(key: str) -> int: + return self.config.lineof(self.name, key) # type: ignore[return-value] + + yield from sorted(section, key=lineof) + + def items(self) -> Iterator[tuple[str, str]]: + for name in self: + yield name, self[name] + + +class IniConfig: + path: Final[str] + sections: Final[Mapping[str, Mapping[str, str]]] + + def __init__( + self, + path: str | os.PathLike[str], + data: str | None = None, + encoding: str = "utf-8", + ) -> None: + self.path = os.fspath(path) + if data is None: + with open(self.path, encoding=encoding) as fp: + data = fp.read() + + tokens = _parse.parse_lines(self.path, data.splitlines(True)) + + self._sources = {} + sections_data: dict[str, dict[str, str]] + self.sections = sections_data = {} + + for lineno, section, name, value in tokens: + if section is None: + raise ParseError(self.path, lineno, "no section header defined") + self._sources[section, name] = lineno + if name is None: + if section in self.sections: + raise ParseError( + self.path, lineno, f"duplicate section {section!r}" + ) + sections_data[section] = {} + else: + if name in self.sections[section]: + raise ParseError(self.path, lineno, f"duplicate name {name!r}") + assert value is not None + sections_data[section][name] = value + + def lineof(self, section: str, name: str | None = None) -> int | None: + lineno = self._sources.get((section, name)) + return None if lineno is None else lineno + 1 + + @overload + def get( + self, + section: str, + name: str, + ) -> str | None: + ... + + @overload + def get( + self, + section: str, + name: str, + convert: Callable[[str], _T], + ) -> _T | None: + ... + + @overload + def get( + self, + section: str, + name: str, + default: None, + convert: Callable[[str], _T], + ) -> _T | None: + ... + + @overload + def get( + self, section: str, name: str, default: _D, convert: None = None + ) -> str | _D: + ... + + @overload + def get( + self, + section: str, + name: str, + default: _D, + convert: Callable[[str], _T], + ) -> _T | _D: + ... + + def get( # type: ignore + self, + section: str, + name: str, + default: _D | None = None, + convert: Callable[[str], _T] | None = None, + ) -> _D | _T | str | None: + try: + value: str = self.sections[section][name] + except KeyError: + return default + else: + if convert is not None: + return convert(value) + else: + return value + + def __getitem__(self, name: str) -> SectionWrapper: + if name not in self.sections: + raise KeyError(name) + return SectionWrapper(self, name) + + def __iter__(self) -> Iterator[SectionWrapper]: + for name in sorted(self.sections, key=self.lineof): # type: ignore + yield SectionWrapper(self, name) + + def __contains__(self, arg: str) -> bool: + return arg in self.sections diff --git a/venv/lib/python3.11/site-packages/iniconfig/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/iniconfig/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..04d3b12 Binary files /dev/null and b/venv/lib/python3.11/site-packages/iniconfig/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/iniconfig/__pycache__/_parse.cpython-311.pyc b/venv/lib/python3.11/site-packages/iniconfig/__pycache__/_parse.cpython-311.pyc new file mode 100644 index 0000000..9badec9 Binary files /dev/null and b/venv/lib/python3.11/site-packages/iniconfig/__pycache__/_parse.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/iniconfig/__pycache__/_version.cpython-311.pyc b/venv/lib/python3.11/site-packages/iniconfig/__pycache__/_version.cpython-311.pyc new file mode 100644 index 0000000..5d58063 Binary files /dev/null and b/venv/lib/python3.11/site-packages/iniconfig/__pycache__/_version.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/iniconfig/__pycache__/exceptions.cpython-311.pyc b/venv/lib/python3.11/site-packages/iniconfig/__pycache__/exceptions.cpython-311.pyc new file mode 100644 index 0000000..4c7f125 Binary files /dev/null and b/venv/lib/python3.11/site-packages/iniconfig/__pycache__/exceptions.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/iniconfig/_parse.py b/venv/lib/python3.11/site-packages/iniconfig/_parse.py new file mode 100644 index 0000000..2d03437 --- /dev/null +++ b/venv/lib/python3.11/site-packages/iniconfig/_parse.py @@ -0,0 +1,82 @@ +from __future__ import annotations +from .exceptions import ParseError + +from typing import NamedTuple + + +COMMENTCHARS = "#;" + + +class _ParsedLine(NamedTuple): + lineno: int + section: str | None + name: str | None + value: str | None + + +def parse_lines(path: str, line_iter: list[str]) -> list[_ParsedLine]: + result: list[_ParsedLine] = [] + section = None + for lineno, line in enumerate(line_iter): + name, data = _parseline(path, line, lineno) + # new value + if name is not None and data is not None: + result.append(_ParsedLine(lineno, section, name, data)) + # new section + elif name is not None and data is None: + if not name: + raise ParseError(path, lineno, "empty section name") + section = name + result.append(_ParsedLine(lineno, section, None, None)) + # continuation + elif name is None and data is not None: + if not result: + raise ParseError(path, lineno, "unexpected value continuation") + last = result.pop() + if last.name is None: + raise ParseError(path, lineno, "unexpected value continuation") + + if last.value: + last = last._replace(value=f"{last.value}\n{data}") + else: + last = last._replace(value=data) + result.append(last) + return result + + +def _parseline(path: str, line: str, lineno: int) -> tuple[str | None, str | None]: + # blank lines + if iscommentline(line): + line = "" + else: + line = line.rstrip() + if not line: + return None, None + # section + if line[0] == "[": + realline = line + for c in COMMENTCHARS: + line = line.split(c)[0].rstrip() + if line[-1] == "]": + return line[1:-1], None + return None, realline.strip() + # value + elif not line[0].isspace(): + try: + name, value = line.split("=", 1) + if ":" in name: + raise ValueError() + except ValueError: + try: + name, value = line.split(":", 1) + except ValueError: + raise ParseError(path, lineno, "unexpected line: %r" % line) + return name.strip(), value.strip() + # continuation + else: + return None, line.strip() + + +def iscommentline(line: str) -> bool: + c = line.lstrip()[:1] + return c in COMMENTCHARS diff --git a/venv/lib/python3.11/site-packages/iniconfig/_version.py b/venv/lib/python3.11/site-packages/iniconfig/_version.py new file mode 100644 index 0000000..e058e2c --- /dev/null +++ b/venv/lib/python3.11/site-packages/iniconfig/_version.py @@ -0,0 +1,21 @@ +# file generated by setuptools-scm +# don't change, don't track in version control + +__all__ = ["__version__", "__version_tuple__", "version", "version_tuple"] + +TYPE_CHECKING = False +if TYPE_CHECKING: + from typing import Tuple + from typing import Union + + VERSION_TUPLE = Tuple[Union[int, str], ...] +else: + VERSION_TUPLE = object + +version: str +__version__: str +__version_tuple__: VERSION_TUPLE +version_tuple: VERSION_TUPLE + +__version__ = version = '2.1.0' +__version_tuple__ = version_tuple = (2, 1, 0) diff --git a/venv/lib/python3.11/site-packages/iniconfig/exceptions.py b/venv/lib/python3.11/site-packages/iniconfig/exceptions.py new file mode 100644 index 0000000..8c4dc9a --- /dev/null +++ b/venv/lib/python3.11/site-packages/iniconfig/exceptions.py @@ -0,0 +1,20 @@ +from __future__ import annotations +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Final + + +class ParseError(Exception): + path: Final[str] + lineno: Final[int] + msg: Final[str] + + def __init__(self, path: str, lineno: int, msg: str) -> None: + super().__init__(path, lineno, msg) + self.path = path + self.lineno = lineno + self.msg = msg + + def __str__(self) -> str: + return f"{self.path}:{self.lineno + 1}: {self.msg}" diff --git a/venv/lib/python3.11/site-packages/iniconfig/py.typed b/venv/lib/python3.11/site-packages/iniconfig/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/itsdangerous-2.2.0.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/itsdangerous-2.2.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.11/site-packages/itsdangerous-2.2.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.11/site-packages/itsdangerous-2.2.0.dist-info/LICENSE.txt b/venv/lib/python3.11/site-packages/itsdangerous-2.2.0.dist-info/LICENSE.txt new file mode 100644 index 0000000..7b190ca --- /dev/null +++ b/venv/lib/python3.11/site-packages/itsdangerous-2.2.0.dist-info/LICENSE.txt @@ -0,0 +1,28 @@ +Copyright 2011 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.11/site-packages/itsdangerous-2.2.0.dist-info/METADATA b/venv/lib/python3.11/site-packages/itsdangerous-2.2.0.dist-info/METADATA new file mode 100644 index 0000000..ddf5464 --- /dev/null +++ b/venv/lib/python3.11/site-packages/itsdangerous-2.2.0.dist-info/METADATA @@ -0,0 +1,60 @@ +Metadata-Version: 2.1 +Name: itsdangerous +Version: 2.2.0 +Summary: Safely pass data to untrusted environments and back. +Maintainer-email: Pallets +Requires-Python: >=3.8 +Description-Content-Type: text/markdown +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Typing :: Typed +Project-URL: Changes, https://itsdangerous.palletsprojects.com/changes/ +Project-URL: Chat, https://discord.gg/pallets +Project-URL: Documentation, https://itsdangerous.palletsprojects.com/ +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Source, https://github.com/pallets/itsdangerous/ + +# ItsDangerous + +... so better sign this + +Various helpers to pass data to untrusted environments and to get it +back safe and sound. Data is cryptographically signed to ensure that a +token has not been tampered with. + +It's possible to customize how data is serialized. Data is compressed as +needed. A timestamp can be added and verified automatically while +loading a token. + + +## A Simple Example + +Here's how you could generate a token for transmitting a user's id and +name between web requests. + +```python +from itsdangerous import URLSafeSerializer +auth_s = URLSafeSerializer("secret key", "auth") +token = auth_s.dumps({"id": 5, "name": "itsdangerous"}) + +print(token) +# eyJpZCI6NSwibmFtZSI6Iml0c2Rhbmdlcm91cyJ9.6YP6T0BaO67XP--9UzTrmurXSmg + +data = auth_s.loads(token) +print(data["name"]) +# itsdangerous +``` + + +## Donate + +The Pallets organization develops and supports ItsDangerous and other +popular packages. In order to grow the community of contributors and +users, and allow the maintainers to devote more time to the projects, +[please donate today][]. + +[please donate today]: https://palletsprojects.com/donate + diff --git a/venv/lib/python3.11/site-packages/itsdangerous-2.2.0.dist-info/RECORD b/venv/lib/python3.11/site-packages/itsdangerous-2.2.0.dist-info/RECORD new file mode 100644 index 0000000..2857bf9 --- /dev/null +++ b/venv/lib/python3.11/site-packages/itsdangerous-2.2.0.dist-info/RECORD @@ -0,0 +1,22 @@ +itsdangerous-2.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +itsdangerous-2.2.0.dist-info/LICENSE.txt,sha256=Y68JiRtr6K0aQlLtQ68PTvun_JSOIoNnvtfzxa4LCdc,1475 +itsdangerous-2.2.0.dist-info/METADATA,sha256=0rk0-1ZwihuU5DnwJVwPWoEI4yWOyCexih3JyZHblhE,1924 +itsdangerous-2.2.0.dist-info/RECORD,, +itsdangerous-2.2.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 +itsdangerous/__init__.py,sha256=4SK75sCe29xbRgQE1ZQtMHnKUuZYAf3bSpZOrff1IAY,1427 +itsdangerous/__pycache__/__init__.cpython-311.pyc,, +itsdangerous/__pycache__/_json.cpython-311.pyc,, +itsdangerous/__pycache__/encoding.cpython-311.pyc,, +itsdangerous/__pycache__/exc.cpython-311.pyc,, +itsdangerous/__pycache__/serializer.cpython-311.pyc,, +itsdangerous/__pycache__/signer.cpython-311.pyc,, +itsdangerous/__pycache__/timed.cpython-311.pyc,, +itsdangerous/__pycache__/url_safe.cpython-311.pyc,, +itsdangerous/_json.py,sha256=wPQGmge2yZ9328EHKF6gadGeyGYCJQKxtU-iLKE6UnA,473 +itsdangerous/encoding.py,sha256=wwTz5q_3zLcaAdunk6_vSoStwGqYWe307Zl_U87aRFM,1409 +itsdangerous/exc.py,sha256=Rr3exo0MRFEcPZltwecyK16VV1bE2K9_F1-d-ljcUn4,3201 +itsdangerous/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +itsdangerous/serializer.py,sha256=PmdwADLqkSyQLZ0jOKAgDsAW4k_H0TlA71Ei3z0C5aI,15601 +itsdangerous/signer.py,sha256=YO0CV7NBvHA6j549REHJFUjUojw2pHqwcUpQnU7yNYQ,9647 +itsdangerous/timed.py,sha256=6RvDMqNumGMxf0-HlpaZdN9PUQQmRvrQGplKhxuivUs,8083 +itsdangerous/url_safe.py,sha256=az4e5fXi_vs-YbWj8YZwn4wiVKfeD--GEKRT5Ueu4P4,2505 diff --git a/venv/lib/python3.11/site-packages/itsdangerous-2.2.0.dist-info/WHEEL b/venv/lib/python3.11/site-packages/itsdangerous-2.2.0.dist-info/WHEEL new file mode 100644 index 0000000..3b5e64b --- /dev/null +++ b/venv/lib/python3.11/site-packages/itsdangerous-2.2.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.9.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.11/site-packages/itsdangerous/__init__.py b/venv/lib/python3.11/site-packages/itsdangerous/__init__.py new file mode 100644 index 0000000..ea55256 --- /dev/null +++ b/venv/lib/python3.11/site-packages/itsdangerous/__init__.py @@ -0,0 +1,38 @@ +from __future__ import annotations + +import typing as t + +from .encoding import base64_decode as base64_decode +from .encoding import base64_encode as base64_encode +from .encoding import want_bytes as want_bytes +from .exc import BadData as BadData +from .exc import BadHeader as BadHeader +from .exc import BadPayload as BadPayload +from .exc import BadSignature as BadSignature +from .exc import BadTimeSignature as BadTimeSignature +from .exc import SignatureExpired as SignatureExpired +from .serializer import Serializer as Serializer +from .signer import HMACAlgorithm as HMACAlgorithm +from .signer import NoneAlgorithm as NoneAlgorithm +from .signer import Signer as Signer +from .timed import TimedSerializer as TimedSerializer +from .timed import TimestampSigner as TimestampSigner +from .url_safe import URLSafeSerializer as URLSafeSerializer +from .url_safe import URLSafeTimedSerializer as URLSafeTimedSerializer + + +def __getattr__(name: str) -> t.Any: + if name == "__version__": + import importlib.metadata + import warnings + + warnings.warn( + "The '__version__' attribute is deprecated and will be removed in" + " ItsDangerous 2.3. Use feature detection or" + " 'importlib.metadata.version(\"itsdangerous\")' instead.", + DeprecationWarning, + stacklevel=2, + ) + return importlib.metadata.version("itsdangerous") + + raise AttributeError(name) diff --git a/venv/lib/python3.11/site-packages/itsdangerous/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/itsdangerous/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..6f11d07 Binary files /dev/null and b/venv/lib/python3.11/site-packages/itsdangerous/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/itsdangerous/__pycache__/_json.cpython-311.pyc b/venv/lib/python3.11/site-packages/itsdangerous/__pycache__/_json.cpython-311.pyc new file mode 100644 index 0000000..997ebad Binary files /dev/null and b/venv/lib/python3.11/site-packages/itsdangerous/__pycache__/_json.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/itsdangerous/__pycache__/encoding.cpython-311.pyc b/venv/lib/python3.11/site-packages/itsdangerous/__pycache__/encoding.cpython-311.pyc new file mode 100644 index 0000000..94693e8 Binary files /dev/null and b/venv/lib/python3.11/site-packages/itsdangerous/__pycache__/encoding.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/itsdangerous/__pycache__/exc.cpython-311.pyc b/venv/lib/python3.11/site-packages/itsdangerous/__pycache__/exc.cpython-311.pyc new file mode 100644 index 0000000..d033abe Binary files /dev/null and b/venv/lib/python3.11/site-packages/itsdangerous/__pycache__/exc.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/itsdangerous/__pycache__/serializer.cpython-311.pyc b/venv/lib/python3.11/site-packages/itsdangerous/__pycache__/serializer.cpython-311.pyc new file mode 100644 index 0000000..0e5bd03 Binary files /dev/null and b/venv/lib/python3.11/site-packages/itsdangerous/__pycache__/serializer.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/itsdangerous/__pycache__/signer.cpython-311.pyc b/venv/lib/python3.11/site-packages/itsdangerous/__pycache__/signer.cpython-311.pyc new file mode 100644 index 0000000..64ad39a Binary files /dev/null and b/venv/lib/python3.11/site-packages/itsdangerous/__pycache__/signer.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/itsdangerous/__pycache__/timed.cpython-311.pyc b/venv/lib/python3.11/site-packages/itsdangerous/__pycache__/timed.cpython-311.pyc new file mode 100644 index 0000000..e731422 Binary files /dev/null and b/venv/lib/python3.11/site-packages/itsdangerous/__pycache__/timed.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/itsdangerous/__pycache__/url_safe.cpython-311.pyc b/venv/lib/python3.11/site-packages/itsdangerous/__pycache__/url_safe.cpython-311.pyc new file mode 100644 index 0000000..5d97e25 Binary files /dev/null and b/venv/lib/python3.11/site-packages/itsdangerous/__pycache__/url_safe.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/itsdangerous/_json.py b/venv/lib/python3.11/site-packages/itsdangerous/_json.py new file mode 100644 index 0000000..fc23fea --- /dev/null +++ b/venv/lib/python3.11/site-packages/itsdangerous/_json.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +import json as _json +import typing as t + + +class _CompactJSON: + """Wrapper around json module that strips whitespace.""" + + @staticmethod + def loads(payload: str | bytes) -> t.Any: + return _json.loads(payload) + + @staticmethod + def dumps(obj: t.Any, **kwargs: t.Any) -> str: + kwargs.setdefault("ensure_ascii", False) + kwargs.setdefault("separators", (",", ":")) + return _json.dumps(obj, **kwargs) diff --git a/venv/lib/python3.11/site-packages/itsdangerous/encoding.py b/venv/lib/python3.11/site-packages/itsdangerous/encoding.py new file mode 100644 index 0000000..f5ca80f --- /dev/null +++ b/venv/lib/python3.11/site-packages/itsdangerous/encoding.py @@ -0,0 +1,54 @@ +from __future__ import annotations + +import base64 +import string +import struct +import typing as t + +from .exc import BadData + + +def want_bytes( + s: str | bytes, encoding: str = "utf-8", errors: str = "strict" +) -> bytes: + if isinstance(s, str): + s = s.encode(encoding, errors) + + return s + + +def base64_encode(string: str | bytes) -> bytes: + """Base64 encode a string of bytes or text. The resulting bytes are + safe to use in URLs. + """ + string = want_bytes(string) + return base64.urlsafe_b64encode(string).rstrip(b"=") + + +def base64_decode(string: str | bytes) -> bytes: + """Base64 decode a URL-safe string of bytes or text. The result is + bytes. + """ + string = want_bytes(string, encoding="ascii", errors="ignore") + string += b"=" * (-len(string) % 4) + + try: + return base64.urlsafe_b64decode(string) + except (TypeError, ValueError) as e: + raise BadData("Invalid base64-encoded data") from e + + +# The alphabet used by base64.urlsafe_* +_base64_alphabet = f"{string.ascii_letters}{string.digits}-_=".encode("ascii") + +_int64_struct = struct.Struct(">Q") +_int_to_bytes = _int64_struct.pack +_bytes_to_int = t.cast("t.Callable[[bytes], tuple[int]]", _int64_struct.unpack) + + +def int_to_bytes(num: int) -> bytes: + return _int_to_bytes(num).lstrip(b"\x00") + + +def bytes_to_int(bytestr: bytes) -> int: + return _bytes_to_int(bytestr.rjust(8, b"\x00"))[0] diff --git a/venv/lib/python3.11/site-packages/itsdangerous/exc.py b/venv/lib/python3.11/site-packages/itsdangerous/exc.py new file mode 100644 index 0000000..a75adcd --- /dev/null +++ b/venv/lib/python3.11/site-packages/itsdangerous/exc.py @@ -0,0 +1,106 @@ +from __future__ import annotations + +import typing as t +from datetime import datetime + + +class BadData(Exception): + """Raised if bad data of any sort was encountered. This is the base + for all exceptions that ItsDangerous defines. + + .. versionadded:: 0.15 + """ + + def __init__(self, message: str): + super().__init__(message) + self.message = message + + def __str__(self) -> str: + return self.message + + +class BadSignature(BadData): + """Raised if a signature does not match.""" + + def __init__(self, message: str, payload: t.Any | None = None): + super().__init__(message) + + #: The payload that failed the signature test. In some + #: situations you might still want to inspect this, even if + #: you know it was tampered with. + #: + #: .. versionadded:: 0.14 + self.payload: t.Any | None = payload + + +class BadTimeSignature(BadSignature): + """Raised if a time-based signature is invalid. This is a subclass + of :class:`BadSignature`. + """ + + def __init__( + self, + message: str, + payload: t.Any | None = None, + date_signed: datetime | None = None, + ): + super().__init__(message, payload) + + #: If the signature expired this exposes the date of when the + #: signature was created. This can be helpful in order to + #: tell the user how long a link has been gone stale. + #: + #: .. versionchanged:: 2.0 + #: The datetime value is timezone-aware rather than naive. + #: + #: .. versionadded:: 0.14 + self.date_signed = date_signed + + +class SignatureExpired(BadTimeSignature): + """Raised if a signature timestamp is older than ``max_age``. This + is a subclass of :exc:`BadTimeSignature`. + """ + + +class BadHeader(BadSignature): + """Raised if a signed header is invalid in some form. This only + happens for serializers that have a header that goes with the + signature. + + .. versionadded:: 0.24 + """ + + def __init__( + self, + message: str, + payload: t.Any | None = None, + header: t.Any | None = None, + original_error: Exception | None = None, + ): + super().__init__(message, payload) + + #: If the header is actually available but just malformed it + #: might be stored here. + self.header: t.Any | None = header + + #: If available, the error that indicates why the payload was + #: not valid. This might be ``None``. + self.original_error: Exception | None = original_error + + +class BadPayload(BadData): + """Raised if a payload is invalid. This could happen if the payload + is loaded despite an invalid signature, or if there is a mismatch + between the serializer and deserializer. The original exception + that occurred during loading is stored on as :attr:`original_error`. + + .. versionadded:: 0.15 + """ + + def __init__(self, message: str, original_error: Exception | None = None): + super().__init__(message) + + #: If available, the error that indicates why the payload was + #: not valid. This might be ``None``. + self.original_error: Exception | None = original_error diff --git a/venv/lib/python3.11/site-packages/itsdangerous/py.typed b/venv/lib/python3.11/site-packages/itsdangerous/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.11/site-packages/itsdangerous/serializer.py b/venv/lib/python3.11/site-packages/itsdangerous/serializer.py new file mode 100644 index 0000000..5ddf387 --- /dev/null +++ b/venv/lib/python3.11/site-packages/itsdangerous/serializer.py @@ -0,0 +1,406 @@ +from __future__ import annotations + +import collections.abc as cabc +import json +import typing as t + +from .encoding import want_bytes +from .exc import BadPayload +from .exc import BadSignature +from .signer import _make_keys_list +from .signer import Signer + +if t.TYPE_CHECKING: + import typing_extensions as te + + # This should be either be str or bytes. To avoid having to specify the + # bound type, it falls back to a union if structural matching fails. + _TSerialized = te.TypeVar( + "_TSerialized", bound=t.Union[str, bytes], default=t.Union[str, bytes] + ) +else: + # Still available at runtime on Python < 3.13, but without the default. + _TSerialized = t.TypeVar("_TSerialized", bound=t.Union[str, bytes]) + + +class _PDataSerializer(t.Protocol[_TSerialized]): + def loads(self, payload: _TSerialized, /) -> t.Any: ... + # A signature with additional arguments is not handled correctly by type + # checkers right now, so an overload is used below for serializers that + # don't match this strict protocol. + def dumps(self, obj: t.Any, /) -> _TSerialized: ... + + +# Use TypeIs once it's available in typing_extensions or 3.13. +def is_text_serializer( + serializer: _PDataSerializer[t.Any], +) -> te.TypeGuard[_PDataSerializer[str]]: + """Checks whether a serializer generates text or binary.""" + return isinstance(serializer.dumps({}), str) + + +class Serializer(t.Generic[_TSerialized]): + """A serializer wraps a :class:`~itsdangerous.signer.Signer` to + enable serializing and securely signing data other than bytes. It + can unsign to verify that the data hasn't been changed. + + The serializer provides :meth:`dumps` and :meth:`loads`, similar to + :mod:`json`, and by default uses :mod:`json` internally to serialize + the data to bytes. + + The secret key should be a random string of ``bytes`` and should not + be saved to code or version control. Different salts should be used + to distinguish signing in different contexts. See :doc:`/concepts` + for information about the security of the secret key and salt. + + :param secret_key: The secret key to sign and verify with. Can be a + list of keys, oldest to newest, to support key rotation. + :param salt: Extra key to combine with ``secret_key`` to distinguish + signatures in different contexts. + :param serializer: An object that provides ``dumps`` and ``loads`` + methods for serializing data to a string. Defaults to + :attr:`default_serializer`, which defaults to :mod:`json`. + :param serializer_kwargs: Keyword arguments to pass when calling + ``serializer.dumps``. + :param signer: A ``Signer`` class to instantiate when signing data. + Defaults to :attr:`default_signer`, which defaults to + :class:`~itsdangerous.signer.Signer`. + :param signer_kwargs: Keyword arguments to pass when instantiating + the ``Signer`` class. + :param fallback_signers: List of signer parameters to try when + unsigning with the default signer fails. Each item can be a dict + of ``signer_kwargs``, a ``Signer`` class, or a tuple of + ``(signer, signer_kwargs)``. Defaults to + :attr:`default_fallback_signers`. + + .. versionchanged:: 2.0 + Added support for key rotation by passing a list to + ``secret_key``. + + .. versionchanged:: 2.0 + Removed the default SHA-512 fallback signer from + ``default_fallback_signers``. + + .. versionchanged:: 1.1 + Added support for ``fallback_signers`` and configured a default + SHA-512 fallback. This fallback is for users who used the yanked + 1.0.0 release which defaulted to SHA-512. + + .. versionchanged:: 0.14 + The ``signer`` and ``signer_kwargs`` parameters were added to + the constructor. + """ + + #: The default serialization module to use to serialize data to a + #: string internally. The default is :mod:`json`, but can be changed + #: to any object that provides ``dumps`` and ``loads`` methods. + default_serializer: _PDataSerializer[t.Any] = json + + #: The default ``Signer`` class to instantiate when signing data. + #: The default is :class:`itsdangerous.signer.Signer`. + default_signer: type[Signer] = Signer + + #: The default fallback signers to try when unsigning fails. + default_fallback_signers: list[ + dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] + ] = [] + + # Serializer[str] if no data serializer is provided, or if it returns str. + @t.overload + def __init__( + self: Serializer[str], + secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], + salt: str | bytes | None = b"itsdangerous", + serializer: None | _PDataSerializer[str] = None, + serializer_kwargs: dict[str, t.Any] | None = None, + signer: type[Signer] | None = None, + signer_kwargs: dict[str, t.Any] | None = None, + fallback_signers: list[ + dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] + ] + | None = None, + ): ... + + # Serializer[bytes] with a bytes data serializer positional argument. + @t.overload + def __init__( + self: Serializer[bytes], + secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], + salt: str | bytes | None, + serializer: _PDataSerializer[bytes], + serializer_kwargs: dict[str, t.Any] | None = None, + signer: type[Signer] | None = None, + signer_kwargs: dict[str, t.Any] | None = None, + fallback_signers: list[ + dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] + ] + | None = None, + ): ... + + # Serializer[bytes] with a bytes data serializer keyword argument. + @t.overload + def __init__( + self: Serializer[bytes], + secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], + salt: str | bytes | None = b"itsdangerous", + *, + serializer: _PDataSerializer[bytes], + serializer_kwargs: dict[str, t.Any] | None = None, + signer: type[Signer] | None = None, + signer_kwargs: dict[str, t.Any] | None = None, + fallback_signers: list[ + dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] + ] + | None = None, + ): ... + + # Fall back with a positional argument. If the strict signature of + # _PDataSerializer doesn't match, fall back to a union, requiring the user + # to specify the type. + @t.overload + def __init__( + self, + secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], + salt: str | bytes | None, + serializer: t.Any, + serializer_kwargs: dict[str, t.Any] | None = None, + signer: type[Signer] | None = None, + signer_kwargs: dict[str, t.Any] | None = None, + fallback_signers: list[ + dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] + ] + | None = None, + ): ... + + # Fall back with a keyword argument. + @t.overload + def __init__( + self, + secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], + salt: str | bytes | None = b"itsdangerous", + *, + serializer: t.Any, + serializer_kwargs: dict[str, t.Any] | None = None, + signer: type[Signer] | None = None, + signer_kwargs: dict[str, t.Any] | None = None, + fallback_signers: list[ + dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] + ] + | None = None, + ): ... + + def __init__( + self, + secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], + salt: str | bytes | None = b"itsdangerous", + serializer: t.Any | None = None, + serializer_kwargs: dict[str, t.Any] | None = None, + signer: type[Signer] | None = None, + signer_kwargs: dict[str, t.Any] | None = None, + fallback_signers: list[ + dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] + ] + | None = None, + ): + #: The list of secret keys to try for verifying signatures, from + #: oldest to newest. The newest (last) key is used for signing. + #: + #: This allows a key rotation system to keep a list of allowed + #: keys and remove expired ones. + self.secret_keys: list[bytes] = _make_keys_list(secret_key) + + if salt is not None: + salt = want_bytes(salt) + # if salt is None then the signer's default is used + + self.salt = salt + + if serializer is None: + serializer = self.default_serializer + + self.serializer: _PDataSerializer[_TSerialized] = serializer + self.is_text_serializer: bool = is_text_serializer(serializer) + + if signer is None: + signer = self.default_signer + + self.signer: type[Signer] = signer + self.signer_kwargs: dict[str, t.Any] = signer_kwargs or {} + + if fallback_signers is None: + fallback_signers = list(self.default_fallback_signers) + + self.fallback_signers: list[ + dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] + ] = fallback_signers + self.serializer_kwargs: dict[str, t.Any] = serializer_kwargs or {} + + @property + def secret_key(self) -> bytes: + """The newest (last) entry in the :attr:`secret_keys` list. This + is for compatibility from before key rotation support was added. + """ + return self.secret_keys[-1] + + def load_payload( + self, payload: bytes, serializer: _PDataSerializer[t.Any] | None = None + ) -> t.Any: + """Loads the encoded object. This function raises + :class:`.BadPayload` if the payload is not valid. The + ``serializer`` parameter can be used to override the serializer + stored on the class. The encoded ``payload`` should always be + bytes. + """ + if serializer is None: + use_serializer = self.serializer + is_text = self.is_text_serializer + else: + use_serializer = serializer + is_text = is_text_serializer(serializer) + + try: + if is_text: + return use_serializer.loads(payload.decode("utf-8")) # type: ignore[arg-type] + + return use_serializer.loads(payload) # type: ignore[arg-type] + except Exception as e: + raise BadPayload( + "Could not load the payload because an exception" + " occurred on unserializing the data.", + original_error=e, + ) from e + + def dump_payload(self, obj: t.Any) -> bytes: + """Dumps the encoded object. The return value is always bytes. + If the internal serializer returns text, the value will be + encoded as UTF-8. + """ + return want_bytes(self.serializer.dumps(obj, **self.serializer_kwargs)) + + def make_signer(self, salt: str | bytes | None = None) -> Signer: + """Creates a new instance of the signer to be used. The default + implementation uses the :class:`.Signer` base class. + """ + if salt is None: + salt = self.salt + + return self.signer(self.secret_keys, salt=salt, **self.signer_kwargs) + + def iter_unsigners(self, salt: str | bytes | None = None) -> cabc.Iterator[Signer]: + """Iterates over all signers to be tried for unsigning. Starts + with the configured signer, then constructs each signer + specified in ``fallback_signers``. + """ + if salt is None: + salt = self.salt + + yield self.make_signer(salt) + + for fallback in self.fallback_signers: + if isinstance(fallback, dict): + kwargs = fallback + fallback = self.signer + elif isinstance(fallback, tuple): + fallback, kwargs = fallback + else: + kwargs = self.signer_kwargs + + for secret_key in self.secret_keys: + yield fallback(secret_key, salt=salt, **kwargs) + + def dumps(self, obj: t.Any, salt: str | bytes | None = None) -> _TSerialized: + """Returns a signed string serialized with the internal + serializer. The return value can be either a byte or unicode + string depending on the format of the internal serializer. + """ + payload = want_bytes(self.dump_payload(obj)) + rv = self.make_signer(salt).sign(payload) + + if self.is_text_serializer: + return rv.decode("utf-8") # type: ignore[return-value] + + return rv # type: ignore[return-value] + + def dump(self, obj: t.Any, f: t.IO[t.Any], salt: str | bytes | None = None) -> None: + """Like :meth:`dumps` but dumps into a file. The file handle has + to be compatible with what the internal serializer expects. + """ + f.write(self.dumps(obj, salt)) + + def loads( + self, s: str | bytes, salt: str | bytes | None = None, **kwargs: t.Any + ) -> t.Any: + """Reverse of :meth:`dumps`. Raises :exc:`.BadSignature` if the + signature validation fails. + """ + s = want_bytes(s) + last_exception = None + + for signer in self.iter_unsigners(salt): + try: + return self.load_payload(signer.unsign(s)) + except BadSignature as err: + last_exception = err + + raise t.cast(BadSignature, last_exception) + + def load(self, f: t.IO[t.Any], salt: str | bytes | None = None) -> t.Any: + """Like :meth:`loads` but loads from a file.""" + return self.loads(f.read(), salt) + + def loads_unsafe( + self, s: str | bytes, salt: str | bytes | None = None + ) -> tuple[bool, t.Any]: + """Like :meth:`loads` but without verifying the signature. This + is potentially very dangerous to use depending on how your + serializer works. The return value is ``(signature_valid, + payload)`` instead of just the payload. The first item will be a + boolean that indicates if the signature is valid. This function + never fails. + + Use it for debugging only and if you know that your serializer + module is not exploitable (for example, do not use it with a + pickle serializer). + + .. versionadded:: 0.15 + """ + return self._loads_unsafe_impl(s, salt) + + def _loads_unsafe_impl( + self, + s: str | bytes, + salt: str | bytes | None, + load_kwargs: dict[str, t.Any] | None = None, + load_payload_kwargs: dict[str, t.Any] | None = None, + ) -> tuple[bool, t.Any]: + """Low level helper function to implement :meth:`loads_unsafe` + in serializer subclasses. + """ + if load_kwargs is None: + load_kwargs = {} + + try: + return True, self.loads(s, salt=salt, **load_kwargs) + except BadSignature as e: + if e.payload is None: + return False, None + + if load_payload_kwargs is None: + load_payload_kwargs = {} + + try: + return ( + False, + self.load_payload(e.payload, **load_payload_kwargs), + ) + except BadPayload: + return False, None + + def load_unsafe( + self, f: t.IO[t.Any], salt: str | bytes | None = None + ) -> tuple[bool, t.Any]: + """Like :meth:`loads_unsafe` but loads from a file. + + .. versionadded:: 0.15 + """ + return self.loads_unsafe(f.read(), salt=salt) diff --git a/venv/lib/python3.11/site-packages/itsdangerous/signer.py b/venv/lib/python3.11/site-packages/itsdangerous/signer.py new file mode 100644 index 0000000..e324dc0 --- /dev/null +++ b/venv/lib/python3.11/site-packages/itsdangerous/signer.py @@ -0,0 +1,266 @@ +from __future__ import annotations + +import collections.abc as cabc +import hashlib +import hmac +import typing as t + +from .encoding import _base64_alphabet +from .encoding import base64_decode +from .encoding import base64_encode +from .encoding import want_bytes +from .exc import BadSignature + + +class SigningAlgorithm: + """Subclasses must implement :meth:`get_signature` to provide + signature generation functionality. + """ + + def get_signature(self, key: bytes, value: bytes) -> bytes: + """Returns the signature for the given key and value.""" + raise NotImplementedError() + + def verify_signature(self, key: bytes, value: bytes, sig: bytes) -> bool: + """Verifies the given signature matches the expected + signature. + """ + return hmac.compare_digest(sig, self.get_signature(key, value)) + + +class NoneAlgorithm(SigningAlgorithm): + """Provides an algorithm that does not perform any signing and + returns an empty signature. + """ + + def get_signature(self, key: bytes, value: bytes) -> bytes: + return b"" + + +def _lazy_sha1(string: bytes = b"") -> t.Any: + """Don't access ``hashlib.sha1`` until runtime. FIPS builds may not include + SHA-1, in which case the import and use as a default would fail before the + developer can configure something else. + """ + return hashlib.sha1(string) + + +class HMACAlgorithm(SigningAlgorithm): + """Provides signature generation using HMACs.""" + + #: The digest method to use with the MAC algorithm. This defaults to + #: SHA1, but can be changed to any other function in the hashlib + #: module. + default_digest_method: t.Any = staticmethod(_lazy_sha1) + + def __init__(self, digest_method: t.Any = None): + if digest_method is None: + digest_method = self.default_digest_method + + self.digest_method: t.Any = digest_method + + def get_signature(self, key: bytes, value: bytes) -> bytes: + mac = hmac.new(key, msg=value, digestmod=self.digest_method) + return mac.digest() + + +def _make_keys_list( + secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], +) -> list[bytes]: + if isinstance(secret_key, (str, bytes)): + return [want_bytes(secret_key)] + + return [want_bytes(s) for s in secret_key] # pyright: ignore + + +class Signer: + """A signer securely signs bytes, then unsigns them to verify that + the value hasn't been changed. + + The secret key should be a random string of ``bytes`` and should not + be saved to code or version control. Different salts should be used + to distinguish signing in different contexts. See :doc:`/concepts` + for information about the security of the secret key and salt. + + :param secret_key: The secret key to sign and verify with. Can be a + list of keys, oldest to newest, to support key rotation. + :param salt: Extra key to combine with ``secret_key`` to distinguish + signatures in different contexts. + :param sep: Separator between the signature and value. + :param key_derivation: How to derive the signing key from the secret + key and salt. Possible values are ``concat``, ``django-concat``, + or ``hmac``. Defaults to :attr:`default_key_derivation`, which + defaults to ``django-concat``. + :param digest_method: Hash function to use when generating the HMAC + signature. Defaults to :attr:`default_digest_method`, which + defaults to :func:`hashlib.sha1`. Note that the security of the + hash alone doesn't apply when used intermediately in HMAC. + :param algorithm: A :class:`SigningAlgorithm` instance to use + instead of building a default :class:`HMACAlgorithm` with the + ``digest_method``. + + .. versionchanged:: 2.0 + Added support for key rotation by passing a list to + ``secret_key``. + + .. versionchanged:: 0.18 + ``algorithm`` was added as an argument to the class constructor. + + .. versionchanged:: 0.14 + ``key_derivation`` and ``digest_method`` were added as arguments + to the class constructor. + """ + + #: The default digest method to use for the signer. The default is + #: :func:`hashlib.sha1`, but can be changed to any :mod:`hashlib` or + #: compatible object. Note that the security of the hash alone + #: doesn't apply when used intermediately in HMAC. + #: + #: .. versionadded:: 0.14 + default_digest_method: t.Any = staticmethod(_lazy_sha1) + + #: The default scheme to use to derive the signing key from the + #: secret key and salt. The default is ``django-concat``. Possible + #: values are ``concat``, ``django-concat``, and ``hmac``. + #: + #: .. versionadded:: 0.14 + default_key_derivation: str = "django-concat" + + def __init__( + self, + secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], + salt: str | bytes | None = b"itsdangerous.Signer", + sep: str | bytes = b".", + key_derivation: str | None = None, + digest_method: t.Any | None = None, + algorithm: SigningAlgorithm | None = None, + ): + #: The list of secret keys to try for verifying signatures, from + #: oldest to newest. The newest (last) key is used for signing. + #: + #: This allows a key rotation system to keep a list of allowed + #: keys and remove expired ones. + self.secret_keys: list[bytes] = _make_keys_list(secret_key) + self.sep: bytes = want_bytes(sep) + + if self.sep in _base64_alphabet: + raise ValueError( + "The given separator cannot be used because it may be" + " contained in the signature itself. ASCII letters," + " digits, and '-_=' must not be used." + ) + + if salt is not None: + salt = want_bytes(salt) + else: + salt = b"itsdangerous.Signer" + + self.salt = salt + + if key_derivation is None: + key_derivation = self.default_key_derivation + + self.key_derivation: str = key_derivation + + if digest_method is None: + digest_method = self.default_digest_method + + self.digest_method: t.Any = digest_method + + if algorithm is None: + algorithm = HMACAlgorithm(self.digest_method) + + self.algorithm: SigningAlgorithm = algorithm + + @property + def secret_key(self) -> bytes: + """The newest (last) entry in the :attr:`secret_keys` list. This + is for compatibility from before key rotation support was added. + """ + return self.secret_keys[-1] + + def derive_key(self, secret_key: str | bytes | None = None) -> bytes: + """This method is called to derive the key. The default key + derivation choices can be overridden here. Key derivation is not + intended to be used as a security method to make a complex key + out of a short password. Instead you should use large random + secret keys. + + :param secret_key: A specific secret key to derive from. + Defaults to the last item in :attr:`secret_keys`. + + .. versionchanged:: 2.0 + Added the ``secret_key`` parameter. + """ + if secret_key is None: + secret_key = self.secret_keys[-1] + else: + secret_key = want_bytes(secret_key) + + if self.key_derivation == "concat": + return t.cast(bytes, self.digest_method(self.salt + secret_key).digest()) + elif self.key_derivation == "django-concat": + return t.cast( + bytes, self.digest_method(self.salt + b"signer" + secret_key).digest() + ) + elif self.key_derivation == "hmac": + mac = hmac.new(secret_key, digestmod=self.digest_method) + mac.update(self.salt) + return mac.digest() + elif self.key_derivation == "none": + return secret_key + else: + raise TypeError("Unknown key derivation method") + + def get_signature(self, value: str | bytes) -> bytes: + """Returns the signature for the given value.""" + value = want_bytes(value) + key = self.derive_key() + sig = self.algorithm.get_signature(key, value) + return base64_encode(sig) + + def sign(self, value: str | bytes) -> bytes: + """Signs the given string.""" + value = want_bytes(value) + return value + self.sep + self.get_signature(value) + + def verify_signature(self, value: str | bytes, sig: str | bytes) -> bool: + """Verifies the signature for the given value.""" + try: + sig = base64_decode(sig) + except Exception: + return False + + value = want_bytes(value) + + for secret_key in reversed(self.secret_keys): + key = self.derive_key(secret_key) + + if self.algorithm.verify_signature(key, value, sig): + return True + + return False + + def unsign(self, signed_value: str | bytes) -> bytes: + """Unsigns the given string.""" + signed_value = want_bytes(signed_value) + + if self.sep not in signed_value: + raise BadSignature(f"No {self.sep!r} found in value") + + value, sig = signed_value.rsplit(self.sep, 1) + + if self.verify_signature(value, sig): + return value + + raise BadSignature(f"Signature {sig!r} does not match", payload=value) + + def validate(self, signed_value: str | bytes) -> bool: + """Only validates the given signed value. Returns ``True`` if + the signature exists and is valid. + """ + try: + self.unsign(signed_value) + return True + except BadSignature: + return False diff --git a/venv/lib/python3.11/site-packages/itsdangerous/timed.py b/venv/lib/python3.11/site-packages/itsdangerous/timed.py new file mode 100644 index 0000000..7384375 --- /dev/null +++ b/venv/lib/python3.11/site-packages/itsdangerous/timed.py @@ -0,0 +1,228 @@ +from __future__ import annotations + +import collections.abc as cabc +import time +import typing as t +from datetime import datetime +from datetime import timezone + +from .encoding import base64_decode +from .encoding import base64_encode +from .encoding import bytes_to_int +from .encoding import int_to_bytes +from .encoding import want_bytes +from .exc import BadSignature +from .exc import BadTimeSignature +from .exc import SignatureExpired +from .serializer import _TSerialized +from .serializer import Serializer +from .signer import Signer + + +class TimestampSigner(Signer): + """Works like the regular :class:`.Signer` but also records the time + of the signing and can be used to expire signatures. The + :meth:`unsign` method can raise :exc:`.SignatureExpired` if the + unsigning failed because the signature is expired. + """ + + def get_timestamp(self) -> int: + """Returns the current timestamp. The function must return an + integer. + """ + return int(time.time()) + + def timestamp_to_datetime(self, ts: int) -> datetime: + """Convert the timestamp from :meth:`get_timestamp` into an + aware :class`datetime.datetime` in UTC. + + .. versionchanged:: 2.0 + The timestamp is returned as a timezone-aware ``datetime`` + in UTC rather than a naive ``datetime`` assumed to be UTC. + """ + return datetime.fromtimestamp(ts, tz=timezone.utc) + + def sign(self, value: str | bytes) -> bytes: + """Signs the given string and also attaches time information.""" + value = want_bytes(value) + timestamp = base64_encode(int_to_bytes(self.get_timestamp())) + sep = want_bytes(self.sep) + value = value + sep + timestamp + return value + sep + self.get_signature(value) + + # Ignore overlapping signatures check, return_timestamp is the only + # parameter that affects the return type. + + @t.overload + def unsign( # type: ignore[overload-overlap] + self, + signed_value: str | bytes, + max_age: int | None = None, + return_timestamp: t.Literal[False] = False, + ) -> bytes: ... + + @t.overload + def unsign( + self, + signed_value: str | bytes, + max_age: int | None = None, + return_timestamp: t.Literal[True] = True, + ) -> tuple[bytes, datetime]: ... + + def unsign( + self, + signed_value: str | bytes, + max_age: int | None = None, + return_timestamp: bool = False, + ) -> tuple[bytes, datetime] | bytes: + """Works like the regular :meth:`.Signer.unsign` but can also + validate the time. See the base docstring of the class for + the general behavior. If ``return_timestamp`` is ``True`` the + timestamp of the signature will be returned as an aware + :class:`datetime.datetime` object in UTC. + + .. versionchanged:: 2.0 + The timestamp is returned as a timezone-aware ``datetime`` + in UTC rather than a naive ``datetime`` assumed to be UTC. + """ + try: + result = super().unsign(signed_value) + sig_error = None + except BadSignature as e: + sig_error = e + result = e.payload or b"" + + sep = want_bytes(self.sep) + + # If there is no timestamp in the result there is something + # seriously wrong. In case there was a signature error, we raise + # that one directly, otherwise we have a weird situation in + # which we shouldn't have come except someone uses a time-based + # serializer on non-timestamp data, so catch that. + if sep not in result: + if sig_error: + raise sig_error + + raise BadTimeSignature("timestamp missing", payload=result) + + value, ts_bytes = result.rsplit(sep, 1) + ts_int: int | None = None + ts_dt: datetime | None = None + + try: + ts_int = bytes_to_int(base64_decode(ts_bytes)) + except Exception: + pass + + # Signature is *not* okay. Raise a proper error now that we have + # split the value and the timestamp. + if sig_error is not None: + if ts_int is not None: + try: + ts_dt = self.timestamp_to_datetime(ts_int) + except (ValueError, OSError, OverflowError) as exc: + # Windows raises OSError + # 32-bit raises OverflowError + raise BadTimeSignature( + "Malformed timestamp", payload=value + ) from exc + + raise BadTimeSignature(str(sig_error), payload=value, date_signed=ts_dt) + + # Signature was okay but the timestamp is actually not there or + # malformed. Should not happen, but we handle it anyway. + if ts_int is None: + raise BadTimeSignature("Malformed timestamp", payload=value) + + # Check timestamp is not older than max_age + if max_age is not None: + age = self.get_timestamp() - ts_int + + if age > max_age: + raise SignatureExpired( + f"Signature age {age} > {max_age} seconds", + payload=value, + date_signed=self.timestamp_to_datetime(ts_int), + ) + + if age < 0: + raise SignatureExpired( + f"Signature age {age} < 0 seconds", + payload=value, + date_signed=self.timestamp_to_datetime(ts_int), + ) + + if return_timestamp: + return value, self.timestamp_to_datetime(ts_int) + + return value + + def validate(self, signed_value: str | bytes, max_age: int | None = None) -> bool: + """Only validates the given signed value. Returns ``True`` if + the signature exists and is valid.""" + try: + self.unsign(signed_value, max_age=max_age) + return True + except BadSignature: + return False + + +class TimedSerializer(Serializer[_TSerialized]): + """Uses :class:`TimestampSigner` instead of the default + :class:`.Signer`. + """ + + default_signer: type[TimestampSigner] = TimestampSigner + + def iter_unsigners( + self, salt: str | bytes | None = None + ) -> cabc.Iterator[TimestampSigner]: + return t.cast("cabc.Iterator[TimestampSigner]", super().iter_unsigners(salt)) + + # TODO: Signature is incompatible because parameters were added + # before salt. + + def loads( # type: ignore[override] + self, + s: str | bytes, + max_age: int | None = None, + return_timestamp: bool = False, + salt: str | bytes | None = None, + ) -> t.Any: + """Reverse of :meth:`dumps`, raises :exc:`.BadSignature` if the + signature validation fails. If a ``max_age`` is provided it will + ensure the signature is not older than that time in seconds. In + case the signature is outdated, :exc:`.SignatureExpired` is + raised. All arguments are forwarded to the signer's + :meth:`~TimestampSigner.unsign` method. + """ + s = want_bytes(s) + last_exception = None + + for signer in self.iter_unsigners(salt): + try: + base64d, timestamp = signer.unsign( + s, max_age=max_age, return_timestamp=True + ) + payload = self.load_payload(base64d) + + if return_timestamp: + return payload, timestamp + + return payload + except SignatureExpired: + # The signature was unsigned successfully but was + # expired. Do not try the next signer. + raise + except BadSignature as err: + last_exception = err + + raise t.cast(BadSignature, last_exception) + + def loads_unsafe( # type: ignore[override] + self, + s: str | bytes, + max_age: int | None = None, + salt: str | bytes | None = None, + ) -> tuple[bool, t.Any]: + return self._loads_unsafe_impl(s, salt, load_kwargs={"max_age": max_age}) diff --git a/venv/lib/python3.11/site-packages/itsdangerous/url_safe.py b/venv/lib/python3.11/site-packages/itsdangerous/url_safe.py new file mode 100644 index 0000000..56a0793 --- /dev/null +++ b/venv/lib/python3.11/site-packages/itsdangerous/url_safe.py @@ -0,0 +1,83 @@ +from __future__ import annotations + +import typing as t +import zlib + +from ._json import _CompactJSON +from .encoding import base64_decode +from .encoding import base64_encode +from .exc import BadPayload +from .serializer import _PDataSerializer +from .serializer import Serializer +from .timed import TimedSerializer + + +class URLSafeSerializerMixin(Serializer[str]): + """Mixed in with a regular serializer it will attempt to zlib + compress the string to make it shorter if necessary. It will also + base64 encode the string so that it can safely be placed in a URL. + """ + + default_serializer: _PDataSerializer[str] = _CompactJSON + + def load_payload( + self, + payload: bytes, + *args: t.Any, + serializer: t.Any | None = None, + **kwargs: t.Any, + ) -> t.Any: + decompress = False + + if payload.startswith(b"."): + payload = payload[1:] + decompress = True + + try: + json = base64_decode(payload) + except Exception as e: + raise BadPayload( + "Could not base64 decode the payload because of an exception", + original_error=e, + ) from e + + if decompress: + try: + json = zlib.decompress(json) + except Exception as e: + raise BadPayload( + "Could not zlib decompress the payload before decoding the payload", + original_error=e, + ) from e + + return super().load_payload(json, *args, **kwargs) + + def dump_payload(self, obj: t.Any) -> bytes: + json = super().dump_payload(obj) + is_compressed = False + compressed = zlib.compress(json) + + if len(compressed) < (len(json) - 1): + json = compressed + is_compressed = True + + base64d = base64_encode(json) + + if is_compressed: + base64d = b"." + base64d + + return base64d + + +class URLSafeSerializer(URLSafeSerializerMixin, Serializer[str]): + """Works like :class:`.Serializer` but dumps and loads into a URL + safe string consisting of the upper and lowercase character of the + alphabet as well as ``'_'``, ``'-'`` and ``'.'``. + """ + + +class URLSafeTimedSerializer(URLSafeSerializerMixin, TimedSerializer[str]): + """Works like :class:`.TimedSerializer` but dumps and loads into a + URL safe string consisting of the upper and lowercase character of + the alphabet as well as ``'_'``, ``'-'`` and ``'.'``. + """ diff --git a/venv/lib/python3.11/site-packages/jinja2-3.1.6.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/jinja2-3.1.6.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.11/site-packages/jinja2-3.1.6.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.11/site-packages/jinja2-3.1.6.dist-info/METADATA b/venv/lib/python3.11/site-packages/jinja2-3.1.6.dist-info/METADATA new file mode 100644 index 0000000..ffef2ff --- /dev/null +++ b/venv/lib/python3.11/site-packages/jinja2-3.1.6.dist-info/METADATA @@ -0,0 +1,84 @@ +Metadata-Version: 2.4 +Name: Jinja2 +Version: 3.1.6 +Summary: A very fast and expressive template engine. +Maintainer-email: Pallets +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Text Processing :: Markup :: HTML +Classifier: Typing :: Typed +License-File: LICENSE.txt +Requires-Dist: MarkupSafe>=2.0 +Requires-Dist: Babel>=2.7 ; extra == "i18n" +Project-URL: Changes, https://jinja.palletsprojects.com/changes/ +Project-URL: Chat, https://discord.gg/pallets +Project-URL: Documentation, https://jinja.palletsprojects.com/ +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Source, https://github.com/pallets/jinja/ +Provides-Extra: i18n + +# Jinja + +Jinja is a fast, expressive, extensible templating engine. Special +placeholders in the template allow writing code similar to Python +syntax. Then the template is passed data to render the final document. + +It includes: + +- Template inheritance and inclusion. +- Define and import macros within templates. +- HTML templates can use autoescaping to prevent XSS from untrusted + user input. +- A sandboxed environment can safely render untrusted templates. +- AsyncIO support for generating templates and calling async + functions. +- I18N support with Babel. +- Templates are compiled to optimized Python code just-in-time and + cached, or can be compiled ahead-of-time. +- Exceptions point to the correct line in templates to make debugging + easier. +- Extensible filters, tests, functions, and even syntax. + +Jinja's philosophy is that while application logic belongs in Python if +possible, it shouldn't make the template designer's job difficult by +restricting functionality too much. + + +## In A Nutshell + +```jinja +{% extends "base.html" %} +{% block title %}Members{% endblock %} +{% block content %} + +{% endblock %} +``` + +## Donate + +The Pallets organization develops and supports Jinja and other popular +packages. In order to grow the community of contributors and users, and +allow the maintainers to devote more time to the projects, [please +donate today][]. + +[please donate today]: https://palletsprojects.com/donate + +## Contributing + +See our [detailed contributing documentation][contrib] for many ways to +contribute, including reporting issues, requesting features, asking or answering +questions, and making PRs. + +[contrib]: https://palletsprojects.com/contributing/ + diff --git a/venv/lib/python3.11/site-packages/jinja2-3.1.6.dist-info/RECORD b/venv/lib/python3.11/site-packages/jinja2-3.1.6.dist-info/RECORD new file mode 100644 index 0000000..4278e04 --- /dev/null +++ b/venv/lib/python3.11/site-packages/jinja2-3.1.6.dist-info/RECORD @@ -0,0 +1,57 @@ +jinja2-3.1.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +jinja2-3.1.6.dist-info/METADATA,sha256=aMVUj7Z8QTKhOJjZsx7FDGvqKr3ZFdkh8hQ1XDpkmcg,2871 +jinja2-3.1.6.dist-info/RECORD,, +jinja2-3.1.6.dist-info/WHEEL,sha256=_2ozNFCLWc93bK4WKHCO-eDUENDlo-dgc9cU3qokYO4,82 +jinja2-3.1.6.dist-info/entry_points.txt,sha256=OL85gYU1eD8cuPlikifFngXpeBjaxl6rIJ8KkC_3r-I,58 +jinja2-3.1.6.dist-info/licenses/LICENSE.txt,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 +jinja2/__init__.py,sha256=xxepO9i7DHsqkQrgBEduLtfoz2QCuT6_gbL4XSN1hbU,1928 +jinja2/__pycache__/__init__.cpython-311.pyc,, +jinja2/__pycache__/_identifier.cpython-311.pyc,, +jinja2/__pycache__/async_utils.cpython-311.pyc,, +jinja2/__pycache__/bccache.cpython-311.pyc,, +jinja2/__pycache__/compiler.cpython-311.pyc,, +jinja2/__pycache__/constants.cpython-311.pyc,, +jinja2/__pycache__/debug.cpython-311.pyc,, +jinja2/__pycache__/defaults.cpython-311.pyc,, +jinja2/__pycache__/environment.cpython-311.pyc,, +jinja2/__pycache__/exceptions.cpython-311.pyc,, +jinja2/__pycache__/ext.cpython-311.pyc,, +jinja2/__pycache__/filters.cpython-311.pyc,, +jinja2/__pycache__/idtracking.cpython-311.pyc,, +jinja2/__pycache__/lexer.cpython-311.pyc,, +jinja2/__pycache__/loaders.cpython-311.pyc,, +jinja2/__pycache__/meta.cpython-311.pyc,, +jinja2/__pycache__/nativetypes.cpython-311.pyc,, +jinja2/__pycache__/nodes.cpython-311.pyc,, +jinja2/__pycache__/optimizer.cpython-311.pyc,, +jinja2/__pycache__/parser.cpython-311.pyc,, +jinja2/__pycache__/runtime.cpython-311.pyc,, +jinja2/__pycache__/sandbox.cpython-311.pyc,, +jinja2/__pycache__/tests.cpython-311.pyc,, +jinja2/__pycache__/utils.cpython-311.pyc,, +jinja2/__pycache__/visitor.cpython-311.pyc,, +jinja2/_identifier.py,sha256=_zYctNKzRqlk_murTNlzrju1FFJL7Va_Ijqqd7ii2lU,1958 +jinja2/async_utils.py,sha256=vK-PdsuorOMnWSnEkT3iUJRIkTnYgO2T6MnGxDgHI5o,2834 +jinja2/bccache.py,sha256=gh0qs9rulnXo0PhX5jTJy2UHzI8wFnQ63o_vw7nhzRg,14061 +jinja2/compiler.py,sha256=9RpCQl5X88BHllJiPsHPh295Hh0uApvwFJNQuutULeM,74131 +jinja2/constants.py,sha256=GMoFydBF_kdpaRKPoM5cl5MviquVRLVyZtfp5-16jg0,1433 +jinja2/debug.py,sha256=CnHqCDHd-BVGvti_8ZsTolnXNhA3ECsY-6n_2pwU8Hw,6297 +jinja2/defaults.py,sha256=boBcSw78h-lp20YbaXSJsqkAI2uN_mD_TtCydpeq5wU,1267 +jinja2/environment.py,sha256=9nhrP7Ch-NbGX00wvyr4yy-uhNHq2OCc60ggGrni_fk,61513 +jinja2/exceptions.py,sha256=ioHeHrWwCWNaXX1inHmHVblvc4haO7AXsjCp3GfWvx0,5071 +jinja2/ext.py,sha256=5PF5eHfh8mXAIxXHHRB2xXbXohi8pE3nHSOxa66uS7E,31875 +jinja2/filters.py,sha256=PQ_Egd9n9jSgtnGQYyF4K5j2nYwhUIulhPnyimkdr-k,55212 +jinja2/idtracking.py,sha256=-ll5lIp73pML3ErUYiIJj7tdmWxcH_IlDv3yA_hiZYo,10555 +jinja2/lexer.py,sha256=LYiYio6br-Tep9nPcupWXsPEtjluw3p1mU-lNBVRUfk,29786 +jinja2/loaders.py,sha256=wIrnxjvcbqh5VwW28NSkfotiDq8qNCxIOSFbGUiSLB4,24055 +jinja2/meta.py,sha256=OTDPkaFvU2Hgvx-6akz7154F8BIWaRmvJcBFvwopHww,4397 +jinja2/nativetypes.py,sha256=7GIGALVJgdyL80oZJdQUaUfwSt5q2lSSZbXt0dNf_M4,4210 +jinja2/nodes.py,sha256=m1Duzcr6qhZI8JQ6VyJgUNinjAf5bQzijSmDnMsvUx8,34579 +jinja2/optimizer.py,sha256=rJnCRlQ7pZsEEmMhsQDgC_pKyDHxP5TPS6zVPGsgcu8,1651 +jinja2/parser.py,sha256=lLOFy3sEmHc5IaEHRiH1sQVnId2moUQzhyeJZTtdY30,40383 +jinja2/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +jinja2/runtime.py,sha256=gDk-GvdriJXqgsGbHgrcKTP0Yp6zPXzhzrIpCFH3jAU,34249 +jinja2/sandbox.py,sha256=Mw2aitlY2I8la7FYhcX2YG9BtUYcLnD0Gh3d29cDWrY,15009 +jinja2/tests.py,sha256=VLsBhVFnWg-PxSBz1MhRnNWgP1ovXk3neO1FLQMeC9Q,5926 +jinja2/utils.py,sha256=rRp3o9e7ZKS4fyrWRbELyLcpuGVTFcnooaOa1qx_FIk,24129 +jinja2/visitor.py,sha256=EcnL1PIwf_4RVCOMxsRNuR8AXHbS1qfAdMOE2ngKJz4,3557 diff --git a/venv/lib/python3.11/site-packages/jinja2-3.1.6.dist-info/WHEEL b/venv/lib/python3.11/site-packages/jinja2-3.1.6.dist-info/WHEEL new file mode 100644 index 0000000..23d2d7e --- /dev/null +++ b/venv/lib/python3.11/site-packages/jinja2-3.1.6.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.11.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.11/site-packages/jinja2-3.1.6.dist-info/entry_points.txt b/venv/lib/python3.11/site-packages/jinja2-3.1.6.dist-info/entry_points.txt new file mode 100644 index 0000000..abc3eae --- /dev/null +++ b/venv/lib/python3.11/site-packages/jinja2-3.1.6.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[babel.extractors] +jinja2=jinja2.ext:babel_extract[i18n] + diff --git a/venv/lib/python3.11/site-packages/jinja2-3.1.6.dist-info/licenses/LICENSE.txt b/venv/lib/python3.11/site-packages/jinja2-3.1.6.dist-info/licenses/LICENSE.txt new file mode 100644 index 0000000..c37cae4 --- /dev/null +++ b/venv/lib/python3.11/site-packages/jinja2-3.1.6.dist-info/licenses/LICENSE.txt @@ -0,0 +1,28 @@ +Copyright 2007 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.11/site-packages/jinja2/__init__.py b/venv/lib/python3.11/site-packages/jinja2/__init__.py new file mode 100644 index 0000000..1a423a3 --- /dev/null +++ b/venv/lib/python3.11/site-packages/jinja2/__init__.py @@ -0,0 +1,38 @@ +"""Jinja is a template engine written in pure Python. It provides a +non-XML syntax that supports inline expressions and an optional +sandboxed environment. +""" + +from .bccache import BytecodeCache as BytecodeCache +from .bccache import FileSystemBytecodeCache as FileSystemBytecodeCache +from .bccache import MemcachedBytecodeCache as MemcachedBytecodeCache +from .environment import Environment as Environment +from .environment import Template as Template +from .exceptions import TemplateAssertionError as TemplateAssertionError +from .exceptions import TemplateError as TemplateError +from .exceptions import TemplateNotFound as TemplateNotFound +from .exceptions import TemplateRuntimeError as TemplateRuntimeError +from .exceptions import TemplatesNotFound as TemplatesNotFound +from .exceptions import TemplateSyntaxError as TemplateSyntaxError +from .exceptions import UndefinedError as UndefinedError +from .loaders import BaseLoader as BaseLoader +from .loaders import ChoiceLoader as ChoiceLoader +from .loaders import DictLoader as DictLoader +from .loaders import FileSystemLoader as FileSystemLoader +from .loaders import FunctionLoader as FunctionLoader +from .loaders import ModuleLoader as ModuleLoader +from .loaders import PackageLoader as PackageLoader +from .loaders import PrefixLoader as PrefixLoader +from .runtime import ChainableUndefined as ChainableUndefined +from .runtime import DebugUndefined as DebugUndefined +from .runtime import make_logging_undefined as make_logging_undefined +from .runtime import StrictUndefined as StrictUndefined +from .runtime import Undefined as Undefined +from .utils import clear_caches as clear_caches +from .utils import is_undefined as is_undefined +from .utils import pass_context as pass_context +from .utils import pass_environment as pass_environment +from .utils import pass_eval_context as pass_eval_context +from .utils import select_autoescape as select_autoescape + +__version__ = "3.1.6" diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..c228d57 Binary files /dev/null and b/venv/lib/python3.11/site-packages/jinja2/__pycache__/__init__.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/_identifier.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/_identifier.cpython-311.pyc new file mode 100644 index 0000000..682d261 Binary files /dev/null and b/venv/lib/python3.11/site-packages/jinja2/__pycache__/_identifier.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/async_utils.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/async_utils.cpython-311.pyc new file mode 100644 index 0000000..4c2465a Binary files /dev/null and b/venv/lib/python3.11/site-packages/jinja2/__pycache__/async_utils.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/bccache.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/bccache.cpython-311.pyc new file mode 100644 index 0000000..1c2b693 Binary files /dev/null and b/venv/lib/python3.11/site-packages/jinja2/__pycache__/bccache.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/compiler.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/compiler.cpython-311.pyc new file mode 100644 index 0000000..505a217 Binary files /dev/null and b/venv/lib/python3.11/site-packages/jinja2/__pycache__/compiler.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/constants.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/constants.cpython-311.pyc new file mode 100644 index 0000000..dde64ba Binary files /dev/null and b/venv/lib/python3.11/site-packages/jinja2/__pycache__/constants.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/debug.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/debug.cpython-311.pyc new file mode 100644 index 0000000..4532c6c Binary files /dev/null and b/venv/lib/python3.11/site-packages/jinja2/__pycache__/debug.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/defaults.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/defaults.cpython-311.pyc new file mode 100644 index 0000000..00c7042 Binary files /dev/null and b/venv/lib/python3.11/site-packages/jinja2/__pycache__/defaults.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/environment.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/environment.cpython-311.pyc new file mode 100644 index 0000000..05c65e7 Binary files /dev/null and b/venv/lib/python3.11/site-packages/jinja2/__pycache__/environment.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/exceptions.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/exceptions.cpython-311.pyc new file mode 100644 index 0000000..7fca386 Binary files /dev/null and b/venv/lib/python3.11/site-packages/jinja2/__pycache__/exceptions.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/ext.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/ext.cpython-311.pyc new file mode 100644 index 0000000..1d1fbcd Binary files /dev/null and b/venv/lib/python3.11/site-packages/jinja2/__pycache__/ext.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/filters.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/filters.cpython-311.pyc new file mode 100644 index 0000000..0bff570 Binary files /dev/null and b/venv/lib/python3.11/site-packages/jinja2/__pycache__/filters.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/idtracking.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/idtracking.cpython-311.pyc new file mode 100644 index 0000000..b074f9c Binary files /dev/null and b/venv/lib/python3.11/site-packages/jinja2/__pycache__/idtracking.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/lexer.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/lexer.cpython-311.pyc new file mode 100644 index 0000000..323b214 Binary files /dev/null and b/venv/lib/python3.11/site-packages/jinja2/__pycache__/lexer.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/loaders.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/loaders.cpython-311.pyc new file mode 100644 index 0000000..c11bd83 Binary files /dev/null and b/venv/lib/python3.11/site-packages/jinja2/__pycache__/loaders.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/meta.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/meta.cpython-311.pyc new file mode 100644 index 0000000..d5668c5 Binary files /dev/null and b/venv/lib/python3.11/site-packages/jinja2/__pycache__/meta.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/nativetypes.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/nativetypes.cpython-311.pyc new file mode 100644 index 0000000..259283e Binary files /dev/null and b/venv/lib/python3.11/site-packages/jinja2/__pycache__/nativetypes.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/nodes.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/nodes.cpython-311.pyc new file mode 100644 index 0000000..c0ac4ce Binary files /dev/null and b/venv/lib/python3.11/site-packages/jinja2/__pycache__/nodes.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/optimizer.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/optimizer.cpython-311.pyc new file mode 100644 index 0000000..0112c38 Binary files /dev/null and b/venv/lib/python3.11/site-packages/jinja2/__pycache__/optimizer.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/parser.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/parser.cpython-311.pyc new file mode 100644 index 0000000..36a854b Binary files /dev/null and b/venv/lib/python3.11/site-packages/jinja2/__pycache__/parser.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/runtime.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/runtime.cpython-311.pyc new file mode 100644 index 0000000..0605d56 Binary files /dev/null and b/venv/lib/python3.11/site-packages/jinja2/__pycache__/runtime.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/sandbox.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/sandbox.cpython-311.pyc new file mode 100644 index 0000000..e5cbc2c Binary files /dev/null and b/venv/lib/python3.11/site-packages/jinja2/__pycache__/sandbox.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/tests.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/tests.cpython-311.pyc new file mode 100644 index 0000000..93cb3ba Binary files /dev/null and b/venv/lib/python3.11/site-packages/jinja2/__pycache__/tests.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/utils.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/utils.cpython-311.pyc new file mode 100644 index 0000000..b1a30f3 Binary files /dev/null and b/venv/lib/python3.11/site-packages/jinja2/__pycache__/utils.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/jinja2/__pycache__/visitor.cpython-311.pyc b/venv/lib/python3.11/site-packages/jinja2/__pycache__/visitor.cpython-311.pyc new file mode 100644 index 0000000..93e8724 Binary files /dev/null and b/venv/lib/python3.11/site-packages/jinja2/__pycache__/visitor.cpython-311.pyc differ diff --git a/venv/lib/python3.11/site-packages/jinja2/_identifier.py b/venv/lib/python3.11/site-packages/jinja2/_identifier.py new file mode 100644 index 0000000..928c150 --- /dev/null +++ b/venv/lib/python3.11/site-packages/jinja2/_identifier.py @@ -0,0 +1,6 @@ +import re + +# generated by scripts/generate_identifier_pattern.py +pattern = re.compile( + r"[\w·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߽߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛࣓-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣ৾ਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣૺ-૿ଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఄా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഀ-ഃ഻഼ാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳷-᳹᷀-᷹᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꣿꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𐴤-𐽆𐴧-𐽐𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑄴𑅅𑅆𑅳𑆀-𑆂𑆳-𑇀𑇉-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌻𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑑞𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑠬-𑠺𑨁-𑨊𑨳-𑨹𑨻-𑨾𑩇𑩑-𑩛𑪊-𑪙𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𑴱-𑴶𑴺𑴼𑴽𑴿-𑵅𑵇𑶊-𑶎𑶐𑶑𑶓-𑶗𑻳-𑻶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯]+" # noqa: B950 +) diff --git a/venv/lib/python3.11/site-packages/jinja2/async_utils.py b/venv/lib/python3.11/site-packages/jinja2/async_utils.py new file mode 100644 index 0000000..f0c1402 --- /dev/null +++ b/venv/lib/python3.11/site-packages/jinja2/async_utils.py @@ -0,0 +1,99 @@ +import inspect +import typing as t +from functools import WRAPPER_ASSIGNMENTS +from functools import wraps + +from .utils import _PassArg +from .utils import pass_eval_context + +if t.TYPE_CHECKING: + import typing_extensions as te + +V = t.TypeVar("V") + + +def async_variant(normal_func): # type: ignore + def decorator(async_func): # type: ignore + pass_arg = _PassArg.from_obj(normal_func) + need_eval_context = pass_arg is None + + if pass_arg is _PassArg.environment: + + def is_async(args: t.Any) -> bool: + return t.cast(bool, args[0].is_async) + + else: + + def is_async(args: t.Any) -> bool: + return t.cast(bool, args[0].environment.is_async) + + # Take the doc and annotations from the sync function, but the + # name from the async function. Pallets-Sphinx-Themes + # build_function_directive expects __wrapped__ to point to the + # sync function. + async_func_attrs = ("__module__", "__name__", "__qualname__") + normal_func_attrs = tuple(set(WRAPPER_ASSIGNMENTS).difference(async_func_attrs)) + + @wraps(normal_func, assigned=normal_func_attrs) + @wraps(async_func, assigned=async_func_attrs, updated=()) + def wrapper(*args, **kwargs): # type: ignore + b = is_async(args) + + if need_eval_context: + args = args[1:] + + if b: + return async_func(*args, **kwargs) + + return normal_func(*args, **kwargs) + + if need_eval_context: + wrapper = pass_eval_context(wrapper) + + wrapper.jinja_async_variant = True # type: ignore[attr-defined] + return wrapper + + return decorator + + +_common_primitives = {int, float, bool, str, list, dict, tuple, type(None)} + + +async def auto_await(value: t.Union[t.Awaitable["V"], "V"]) -> "V": + # Avoid a costly call to isawaitable + if type(value) in _common_primitives: + return t.cast("V", value) + + if inspect.isawaitable(value): + return await t.cast("t.Awaitable[V]", value) + + return value + + +class _IteratorToAsyncIterator(t.Generic[V]): + def __init__(self, iterator: "t.Iterator[V]"): + self._iterator = iterator + + def __aiter__(self) -> "te.Self": + return self + + async def __anext__(self) -> V: + try: + return next(self._iterator) + except StopIteration as e: + raise StopAsyncIteration(e.value) from e + + +def auto_aiter( + iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", +) -> "t.AsyncIterator[V]": + if hasattr(iterable, "__aiter__"): + return iterable.__aiter__() + else: + return _IteratorToAsyncIterator(iter(iterable)) + + +async def auto_to_list( + value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", +) -> t.List["V"]: + return [x async for x in auto_aiter(value)] diff --git a/venv/lib/python3.11/site-packages/jinja2/bccache.py b/venv/lib/python3.11/site-packages/jinja2/bccache.py new file mode 100644 index 0000000..ada8b09 --- /dev/null +++ b/venv/lib/python3.11/site-packages/jinja2/bccache.py @@ -0,0 +1,408 @@ +"""The optional bytecode cache system. This is useful if you have very +complex template situations and the compilation of all those templates +slows down your application too much. + +Situations where this is useful are often forking web applications that +are initialized on the first request. +""" + +import errno +import fnmatch +import marshal +import os +import pickle +import stat +import sys +import tempfile +import typing as t +from hashlib import sha1 +from io import BytesIO +from types import CodeType + +if t.TYPE_CHECKING: + import typing_extensions as te + + from .environment import Environment + + class _MemcachedClient(te.Protocol): + def get(self, key: str) -> bytes: ... + + def set( + self, key: str, value: bytes, timeout: t.Optional[int] = None + ) -> None: ... + + +bc_version = 5 +# Magic bytes to identify Jinja bytecode cache files. Contains the +# Python major and minor version to avoid loading incompatible bytecode +# if a project upgrades its Python version. +bc_magic = ( + b"j2" + + pickle.dumps(bc_version, 2) + + pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1], 2) +) + + +class Bucket: + """Buckets are used to store the bytecode for one template. It's created + and initialized by the bytecode cache and passed to the loading functions. + + The buckets get an internal checksum from the cache assigned and use this + to automatically reject outdated cache material. Individual bytecode + cache subclasses don't have to care about cache invalidation. + """ + + def __init__(self, environment: "Environment", key: str, checksum: str) -> None: + self.environment = environment + self.key = key + self.checksum = checksum + self.reset() + + def reset(self) -> None: + """Resets the bucket (unloads the bytecode).""" + self.code: t.Optional[CodeType] = None + + def load_bytecode(self, f: t.BinaryIO) -> None: + """Loads bytecode from a file or file like object.""" + # make sure the magic header is correct + magic = f.read(len(bc_magic)) + if magic != bc_magic: + self.reset() + return + # the source code of the file changed, we need to reload + checksum = pickle.load(f) + if self.checksum != checksum: + self.reset() + return + # if marshal_load fails then we need to reload + try: + self.code = marshal.load(f) + except (EOFError, ValueError, TypeError): + self.reset() + return + + def write_bytecode(self, f: t.IO[bytes]) -> None: + """Dump the bytecode into the file or file like object passed.""" + if self.code is None: + raise TypeError("can't write empty bucket") + f.write(bc_magic) + pickle.dump(self.checksum, f, 2) + marshal.dump(self.code, f) + + def bytecode_from_string(self, string: bytes) -> None: + """Load bytecode from bytes.""" + self.load_bytecode(BytesIO(string)) + + def bytecode_to_string(self) -> bytes: + """Return the bytecode as bytes.""" + out = BytesIO() + self.write_bytecode(out) + return out.getvalue() + + +class BytecodeCache: + """To implement your own bytecode cache you have to subclass this class + and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of + these methods are passed a :class:`~jinja2.bccache.Bucket`. + + A very basic bytecode cache that saves the bytecode on the file system:: + + from os import path + + class MyCache(BytecodeCache): + + def __init__(self, directory): + self.directory = directory + + def load_bytecode(self, bucket): + filename = path.join(self.directory, bucket.key) + if path.exists(filename): + with open(filename, 'rb') as f: + bucket.load_bytecode(f) + + def dump_bytecode(self, bucket): + filename = path.join(self.directory, bucket.key) + with open(filename, 'wb') as f: + bucket.write_bytecode(f) + + A more advanced version of a filesystem based bytecode cache is part of + Jinja. + """ + + def load_bytecode(self, bucket: Bucket) -> None: + """Subclasses have to override this method to load bytecode into a + bucket. If they are not able to find code in the cache for the + bucket, it must not do anything. + """ + raise NotImplementedError() + + def dump_bytecode(self, bucket: Bucket) -> None: + """Subclasses have to override this method to write the bytecode + from a bucket back to the cache. If it unable to do so it must not + fail silently but raise an exception. + """ + raise NotImplementedError() + + def clear(self) -> None: + """Clears the cache. This method is not used by Jinja but should be + implemented to allow applications to clear the bytecode cache used + by a particular environment. + """ + + def get_cache_key( + self, name: str, filename: t.Optional[t.Union[str]] = None + ) -> str: + """Returns the unique hash key for this template name.""" + hash = sha1(name.encode("utf-8")) + + if filename is not None: + hash.update(f"|{filename}".encode()) + + return hash.hexdigest() + + def get_source_checksum(self, source: str) -> str: + """Returns a checksum for the source.""" + return sha1(source.encode("utf-8")).hexdigest() + + def get_bucket( + self, + environment: "Environment", + name: str, + filename: t.Optional[str], + source: str, + ) -> Bucket: + """Return a cache bucket for the given template. All arguments are + mandatory but filename may be `None`. + """ + key = self.get_cache_key(name, filename) + checksum = self.get_source_checksum(source) + bucket = Bucket(environment, key, checksum) + self.load_bytecode(bucket) + return bucket + + def set_bucket(self, bucket: Bucket) -> None: + """Put the bucket into the cache.""" + self.dump_bytecode(bucket) + + +class FileSystemBytecodeCache(BytecodeCache): + """A bytecode cache that stores bytecode on the filesystem. It accepts + two arguments: The directory where the cache items are stored and a + pattern string that is used to build the filename. + + If no directory is specified a default cache directory is selected. On + Windows the user's temp directory is used, on UNIX systems a directory + is created for the user in the system temp directory. + + The pattern can be used to have multiple separate caches operate on the + same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s`` + is replaced with the cache key. + + >>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache') + + This bytecode cache supports clearing of the cache using the clear method. + """ + + def __init__( + self, directory: t.Optional[str] = None, pattern: str = "__jinja2_%s.cache" + ) -> None: + if directory is None: + directory = self._get_default_cache_dir() + self.directory = directory + self.pattern = pattern + + def _get_default_cache_dir(self) -> str: + def _unsafe_dir() -> "te.NoReturn": + raise RuntimeError( + "Cannot determine safe temp directory. You " + "need to explicitly provide one." + ) + + tmpdir = tempfile.gettempdir() + + # On windows the temporary directory is used specific unless + # explicitly forced otherwise. We can just use that. + if os.name == "nt": + return tmpdir + if not hasattr(os, "getuid"): + _unsafe_dir() + + dirname = f"_jinja2-cache-{os.getuid()}" + actual_dir = os.path.join(tmpdir, dirname) + + try: + os.mkdir(actual_dir, stat.S_IRWXU) + except OSError as e: + if e.errno != errno.EEXIST: + raise + try: + os.chmod(actual_dir, stat.S_IRWXU) + actual_dir_stat = os.lstat(actual_dir) + if ( + actual_dir_stat.st_uid != os.getuid() + or not stat.S_ISDIR(actual_dir_stat.st_mode) + or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU + ): + _unsafe_dir() + except OSError as e: + if e.errno != errno.EEXIST: + raise + + actual_dir_stat = os.lstat(actual_dir) + if ( + actual_dir_stat.st_uid != os.getuid() + or not stat.S_ISDIR(actual_dir_stat.st_mode) + or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU + ): + _unsafe_dir() + + return actual_dir + + def _get_cache_filename(self, bucket: Bucket) -> str: + return os.path.join(self.directory, self.pattern % (bucket.key,)) + + def load_bytecode(self, bucket: Bucket) -> None: + filename = self._get_cache_filename(bucket) + + # Don't test for existence before opening the file, since the + # file could disappear after the test before the open. + try: + f = open(filename, "rb") + except (FileNotFoundError, IsADirectoryError, PermissionError): + # PermissionError can occur on Windows when an operation is + # in progress, such as calling clear(). + return + + with f: + bucket.load_bytecode(f) + + def dump_bytecode(self, bucket: Bucket) -> None: + # Write to a temporary file, then rename to the real name after + # writing. This avoids another process reading the file before + # it is fully written. + name = self._get_cache_filename(bucket) + f = tempfile.NamedTemporaryFile( + mode="wb", + dir=os.path.dirname(name), + prefix=os.path.basename(name), + suffix=".tmp", + delete=False, + ) + + def remove_silent() -> None: + try: + os.remove(f.name) + except OSError: + # Another process may have called clear(). On Windows, + # another program may be holding the file open. + pass + + try: + with f: + bucket.write_bytecode(f) + except BaseException: + remove_silent() + raise + + try: + os.replace(f.name, name) + except OSError: + # Another process may have called clear(). On Windows, + # another program may be holding the file open. + remove_silent() + except BaseException: + remove_silent() + raise + + def clear(self) -> None: + # imported lazily here because google app-engine doesn't support + # write access on the file system and the function does not exist + # normally. + from os import remove + + files = fnmatch.filter(os.listdir(self.directory), self.pattern % ("*",)) + for filename in files: + try: + remove(os.path.join(self.directory, filename)) + except OSError: + pass + + +class MemcachedBytecodeCache(BytecodeCache): + """This class implements a bytecode cache that uses a memcache cache for + storing the information. It does not enforce a specific memcache library + (tummy's memcache or cmemcache) but will accept any class that provides + the minimal interface required. + + Libraries compatible with this class: + + - `cachelib `_ + - `python-memcached `_ + + (Unfortunately the django cache interface is not compatible because it + does not support storing binary data, only text. You can however pass + the underlying cache client to the bytecode cache which is available + as `django.core.cache.cache._client`.) + + The minimal interface for the client passed to the constructor is this: + + .. class:: MinimalClientInterface + + .. method:: set(key, value[, timeout]) + + Stores the bytecode in the cache. `value` is a string and + `timeout` the timeout of the key. If timeout is not provided + a default timeout or no timeout should be assumed, if it's + provided it's an integer with the number of seconds the cache + item should exist. + + .. method:: get(key) + + Returns the value for the cache key. If the item does not + exist in the cache the return value must be `None`. + + The other arguments to the constructor are the prefix for all keys that + is added before the actual cache key and the timeout for the bytecode in + the cache system. We recommend a high (or no) timeout. + + This bytecode cache does not support clearing of used items in the cache. + The clear method is a no-operation function. + + .. versionadded:: 2.7 + Added support for ignoring memcache errors through the + `ignore_memcache_errors` parameter. + """ + + def __init__( + self, + client: "_MemcachedClient", + prefix: str = "jinja2/bytecode/", + timeout: t.Optional[int] = None, + ignore_memcache_errors: bool = True, + ): + self.client = client + self.prefix = prefix + self.timeout = timeout + self.ignore_memcache_errors = ignore_memcache_errors + + def load_bytecode(self, bucket: Bucket) -> None: + try: + code = self.client.get(self.prefix + bucket.key) + except Exception: + if not self.ignore_memcache_errors: + raise + else: + bucket.bytecode_from_string(code) + + def dump_bytecode(self, bucket: Bucket) -> None: + key = self.prefix + bucket.key + value = bucket.bytecode_to_string() + + try: + if self.timeout is not None: + self.client.set(key, value, self.timeout) + else: + self.client.set(key, value) + except Exception: + if not self.ignore_memcache_errors: + raise diff --git a/venv/lib/python3.11/site-packages/jinja2/compiler.py b/venv/lib/python3.11/site-packages/jinja2/compiler.py new file mode 100644 index 0000000..a4ff6a1 --- /dev/null +++ b/venv/lib/python3.11/site-packages/jinja2/compiler.py @@ -0,0 +1,1998 @@ +"""Compiles nodes from the parser into Python code.""" + +import typing as t +from contextlib import contextmanager +from functools import update_wrapper +from io import StringIO +from itertools import chain +from keyword import iskeyword as is_python_keyword + +from markupsafe import escape +from markupsafe import Markup + +from . import nodes +from .exceptions import TemplateAssertionError +from .idtracking import Symbols +from .idtracking import VAR_LOAD_ALIAS +from .idtracking import VAR_LOAD_PARAMETER +from .idtracking import VAR_LOAD_RESOLVE +from .idtracking import VAR_LOAD_UNDEFINED +from .nodes import EvalContext +from .optimizer import Optimizer +from .utils import _PassArg +from .utils import concat +from .visitor import NodeVisitor + +if t.TYPE_CHECKING: + import typing_extensions as te + + from .environment import Environment + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) + +operators = { + "eq": "==", + "ne": "!=", + "gt": ">", + "gteq": ">=", + "lt": "<", + "lteq": "<=", + "in": "in", + "notin": "not in", +} + + +def optimizeconst(f: F) -> F: + def new_func( + self: "CodeGenerator", node: nodes.Expr, frame: "Frame", **kwargs: t.Any + ) -> t.Any: + # Only optimize if the frame is not volatile + if self.optimizer is not None and not frame.eval_ctx.volatile: + new_node = self.optimizer.visit(node, frame.eval_ctx) + + if new_node != node: + return self.visit(new_node, frame) + + return f(self, node, frame, **kwargs) + + return update_wrapper(new_func, f) # type: ignore[return-value] + + +def _make_binop(op: str) -> t.Callable[["CodeGenerator", nodes.BinExpr, "Frame"], None]: + @optimizeconst + def visitor(self: "CodeGenerator", node: nodes.BinExpr, frame: Frame) -> None: + if ( + self.environment.sandboxed and op in self.environment.intercepted_binops # type: ignore + ): + self.write(f"environment.call_binop(context, {op!r}, ") + self.visit(node.left, frame) + self.write(", ") + self.visit(node.right, frame) + else: + self.write("(") + self.visit(node.left, frame) + self.write(f" {op} ") + self.visit(node.right, frame) + + self.write(")") + + return visitor + + +def _make_unop( + op: str, +) -> t.Callable[["CodeGenerator", nodes.UnaryExpr, "Frame"], None]: + @optimizeconst + def visitor(self: "CodeGenerator", node: nodes.UnaryExpr, frame: Frame) -> None: + if ( + self.environment.sandboxed and op in self.environment.intercepted_unops # type: ignore + ): + self.write(f"environment.call_unop(context, {op!r}, ") + self.visit(node.node, frame) + else: + self.write("(" + op) + self.visit(node.node, frame) + + self.write(")") + + return visitor + + +def generate( + node: nodes.Template, + environment: "Environment", + name: t.Optional[str], + filename: t.Optional[str], + stream: t.Optional[t.TextIO] = None, + defer_init: bool = False, + optimized: bool = True, +) -> t.Optional[str]: + """Generate the python source for a node tree.""" + if not isinstance(node, nodes.Template): + raise TypeError("Can't compile non template nodes") + + generator = environment.code_generator_class( + environment, name, filename, stream, defer_init, optimized + ) + generator.visit(node) + + if stream is None: + return generator.stream.getvalue() # type: ignore + + return None + + +def has_safe_repr(value: t.Any) -> bool: + """Does the node have a safe representation?""" + if value is None or value is NotImplemented or value is Ellipsis: + return True + + if type(value) in {bool, int, float, complex, range, str, Markup}: + return True + + if type(value) in {tuple, list, set, frozenset}: + return all(has_safe_repr(v) for v in value) + + if type(value) is dict: # noqa E721 + return all(has_safe_repr(k) and has_safe_repr(v) for k, v in value.items()) + + return False + + +def find_undeclared( + nodes: t.Iterable[nodes.Node], names: t.Iterable[str] +) -> t.Set[str]: + """Check if the names passed are accessed undeclared. The return value + is a set of all the undeclared names from the sequence of names found. + """ + visitor = UndeclaredNameVisitor(names) + try: + for node in nodes: + visitor.visit(node) + except VisitorExit: + pass + return visitor.undeclared + + +class MacroRef: + def __init__(self, node: t.Union[nodes.Macro, nodes.CallBlock]) -> None: + self.node = node + self.accesses_caller = False + self.accesses_kwargs = False + self.accesses_varargs = False + + +class Frame: + """Holds compile time information for us.""" + + def __init__( + self, + eval_ctx: EvalContext, + parent: t.Optional["Frame"] = None, + level: t.Optional[int] = None, + ) -> None: + self.eval_ctx = eval_ctx + + # the parent of this frame + self.parent = parent + + if parent is None: + self.symbols = Symbols(level=level) + + # in some dynamic inheritance situations the compiler needs to add + # write tests around output statements. + self.require_output_check = False + + # inside some tags we are using a buffer rather than yield statements. + # this for example affects {% filter %} or {% macro %}. If a frame + # is buffered this variable points to the name of the list used as + # buffer. + self.buffer: t.Optional[str] = None + + # the name of the block we're in, otherwise None. + self.block: t.Optional[str] = None + + else: + self.symbols = Symbols(parent.symbols, level=level) + self.require_output_check = parent.require_output_check + self.buffer = parent.buffer + self.block = parent.block + + # a toplevel frame is the root + soft frames such as if conditions. + self.toplevel = False + + # the root frame is basically just the outermost frame, so no if + # conditions. This information is used to optimize inheritance + # situations. + self.rootlevel = False + + # variables set inside of loops and blocks should not affect outer frames, + # but they still needs to be kept track of as part of the active context. + self.loop_frame = False + self.block_frame = False + + # track whether the frame is being used in an if-statement or conditional + # expression as it determines which errors should be raised during runtime + # or compile time. + self.soft_frame = False + + def copy(self) -> "te.Self": + """Create a copy of the current one.""" + rv = object.__new__(self.__class__) + rv.__dict__.update(self.__dict__) + rv.symbols = self.symbols.copy() + return rv + + def inner(self, isolated: bool = False) -> "Frame": + """Return an inner frame.""" + if isolated: + return Frame(self.eval_ctx, level=self.symbols.level + 1) + return Frame(self.eval_ctx, self) + + def soft(self) -> "te.Self": + """Return a soft frame. A soft frame may not be modified as + standalone thing as it shares the resources with the frame it + was created of, but it's not a rootlevel frame any longer. + + This is only used to implement if-statements and conditional + expressions. + """ + rv = self.copy() + rv.rootlevel = False + rv.soft_frame = True + return rv + + __copy__ = copy + + +class VisitorExit(RuntimeError): + """Exception used by the `UndeclaredNameVisitor` to signal a stop.""" + + +class DependencyFinderVisitor(NodeVisitor): + """A visitor that collects filter and test calls.""" + + def __init__(self) -> None: + self.filters: t.Set[str] = set() + self.tests: t.Set[str] = set() + + def visit_Filter(self, node: nodes.Filter) -> None: + self.generic_visit(node) + self.filters.add(node.name) + + def visit_Test(self, node: nodes.Test) -> None: + self.generic_visit(node) + self.tests.add(node.name) + + def visit_Block(self, node: nodes.Block) -> None: + """Stop visiting at blocks.""" + + +class UndeclaredNameVisitor(NodeVisitor): + """A visitor that checks if a name is accessed without being + declared. This is different from the frame visitor as it will + not stop at closure frames. + """ + + def __init__(self, names: t.Iterable[str]) -> None: + self.names = set(names) + self.undeclared: t.Set[str] = set() + + def visit_Name(self, node: nodes.Name) -> None: + if node.ctx == "load" and node.name in self.names: + self.undeclared.add(node.name) + if self.undeclared == self.names: + raise VisitorExit() + else: + self.names.discard(node.name) + + def visit_Block(self, node: nodes.Block) -> None: + """Stop visiting a blocks.""" + + +class CompilerExit(Exception): + """Raised if the compiler encountered a situation where it just + doesn't make sense to further process the code. Any block that + raises such an exception is not further processed. + """ + + +class CodeGenerator(NodeVisitor): + def __init__( + self, + environment: "Environment", + name: t.Optional[str], + filename: t.Optional[str], + stream: t.Optional[t.TextIO] = None, + defer_init: bool = False, + optimized: bool = True, + ) -> None: + if stream is None: + stream = StringIO() + self.environment = environment + self.name = name + self.filename = filename + self.stream = stream + self.created_block_context = False + self.defer_init = defer_init + self.optimizer: t.Optional[Optimizer] = None + + if optimized: + self.optimizer = Optimizer(environment) + + # aliases for imports + self.import_aliases: t.Dict[str, str] = {} + + # a registry for all blocks. Because blocks are moved out + # into the global python scope they are registered here + self.blocks: t.Dict[str, nodes.Block] = {} + + # the number of extends statements so far + self.extends_so_far = 0 + + # some templates have a rootlevel extends. In this case we + # can safely assume that we're a child template and do some + # more optimizations. + self.has_known_extends = False + + # the current line number + self.code_lineno = 1 + + # registry of all filters and tests (global, not block local) + self.tests: t.Dict[str, str] = {} + self.filters: t.Dict[str, str] = {} + + # the debug information + self.debug_info: t.List[t.Tuple[int, int]] = [] + self._write_debug_info: t.Optional[int] = None + + # the number of new lines before the next write() + self._new_lines = 0 + + # the line number of the last written statement + self._last_line = 0 + + # true if nothing was written so far. + self._first_write = True + + # used by the `temporary_identifier` method to get new + # unique, temporary identifier + self._last_identifier = 0 + + # the current indentation + self._indentation = 0 + + # Tracks toplevel assignments + self._assign_stack: t.List[t.Set[str]] = [] + + # Tracks parameter definition blocks + self._param_def_block: t.List[t.Set[str]] = [] + + # Tracks the current context. + self._context_reference_stack = ["context"] + + @property + def optimized(self) -> bool: + return self.optimizer is not None + + # -- Various compilation helpers + + def fail(self, msg: str, lineno: int) -> "te.NoReturn": + """Fail with a :exc:`TemplateAssertionError`.""" + raise TemplateAssertionError(msg, lineno, self.name, self.filename) + + def temporary_identifier(self) -> str: + """Get a new unique identifier.""" + self._last_identifier += 1 + return f"t_{self._last_identifier}" + + def buffer(self, frame: Frame) -> None: + """Enable buffering for the frame from that point onwards.""" + frame.buffer = self.temporary_identifier() + self.writeline(f"{frame.buffer} = []") + + def return_buffer_contents( + self, frame: Frame, force_unescaped: bool = False + ) -> None: + """Return the buffer contents of the frame.""" + if not force_unescaped: + if frame.eval_ctx.volatile: + self.writeline("if context.eval_ctx.autoescape:") + self.indent() + self.writeline(f"return Markup(concat({frame.buffer}))") + self.outdent() + self.writeline("else:") + self.indent() + self.writeline(f"return concat({frame.buffer})") + self.outdent() + return + elif frame.eval_ctx.autoescape: + self.writeline(f"return Markup(concat({frame.buffer}))") + return + self.writeline(f"return concat({frame.buffer})") + + def indent(self) -> None: + """Indent by one.""" + self._indentation += 1 + + def outdent(self, step: int = 1) -> None: + """Outdent by step.""" + self._indentation -= step + + def start_write(self, frame: Frame, node: t.Optional[nodes.Node] = None) -> None: + """Yield or write into the frame buffer.""" + if frame.buffer is None: + self.writeline("yield ", node) + else: + self.writeline(f"{frame.buffer}.append(", node) + + def end_write(self, frame: Frame) -> None: + """End the writing process started by `start_write`.""" + if frame.buffer is not None: + self.write(")") + + def simple_write( + self, s: str, frame: Frame, node: t.Optional[nodes.Node] = None + ) -> None: + """Simple shortcut for start_write + write + end_write.""" + self.start_write(frame, node) + self.write(s) + self.end_write(frame) + + def blockvisit(self, nodes: t.Iterable[nodes.Node], frame: Frame) -> None: + """Visit a list of nodes as block in a frame. If the current frame + is no buffer a dummy ``if 0: yield None`` is written automatically. + """ + try: + self.writeline("pass") + for node in nodes: + self.visit(node, frame) + except CompilerExit: + pass + + def write(self, x: str) -> None: + """Write a string into the output stream.""" + if self._new_lines: + if not self._first_write: + self.stream.write("\n" * self._new_lines) + self.code_lineno += self._new_lines + if self._write_debug_info is not None: + self.debug_info.append((self._write_debug_info, self.code_lineno)) + self._write_debug_info = None + self._first_write = False + self.stream.write(" " * self._indentation) + self._new_lines = 0 + self.stream.write(x) + + def writeline( + self, x: str, node: t.Optional[nodes.Node] = None, extra: int = 0 + ) -> None: + """Combination of newline and write.""" + self.newline(node, extra) + self.write(x) + + def newline(self, node: t.Optional[nodes.Node] = None, extra: int = 0) -> None: + """Add one or more newlines before the next write.""" + self._new_lines = max(self._new_lines, 1 + extra) + if node is not None and node.lineno != self._last_line: + self._write_debug_info = node.lineno + self._last_line = node.lineno + + def signature( + self, + node: t.Union[nodes.Call, nodes.Filter, nodes.Test], + frame: Frame, + extra_kwargs: t.Optional[t.Mapping[str, t.Any]] = None, + ) -> None: + """Writes a function call to the stream for the current node. + A leading comma is added automatically. The extra keyword + arguments may not include python keywords otherwise a syntax + error could occur. The extra keyword arguments should be given + as python dict. + """ + # if any of the given keyword arguments is a python keyword + # we have to make sure that no invalid call is created. + kwarg_workaround = any( + is_python_keyword(t.cast(str, k)) + for k in chain((x.key for x in node.kwargs), extra_kwargs or ()) + ) + + for arg in node.args: + self.write(", ") + self.visit(arg, frame) + + if not kwarg_workaround: + for kwarg in node.kwargs: + self.write(", ") + self.visit(kwarg, frame) + if extra_kwargs is not None: + for key, value in extra_kwargs.items(): + self.write(f", {key}={value}") + if node.dyn_args: + self.write(", *") + self.visit(node.dyn_args, frame) + + if kwarg_workaround: + if node.dyn_kwargs is not None: + self.write(", **dict({") + else: + self.write(", **{") + for kwarg in node.kwargs: + self.write(f"{kwarg.key!r}: ") + self.visit(kwarg.value, frame) + self.write(", ") + if extra_kwargs is not None: + for key, value in extra_kwargs.items(): + self.write(f"{key!r}: {value}, ") + if node.dyn_kwargs is not None: + self.write("}, **") + self.visit(node.dyn_kwargs, frame) + self.write(")") + else: + self.write("}") + + elif node.dyn_kwargs is not None: + self.write(", **") + self.visit(node.dyn_kwargs, frame) + + def pull_dependencies(self, nodes: t.Iterable[nodes.Node]) -> None: + """Find all filter and test names used in the template and + assign them to variables in the compiled namespace. Checking + that the names are registered with the environment is done when + compiling the Filter and Test nodes. If the node is in an If or + CondExpr node, the check is done at runtime instead. + + .. versionchanged:: 3.0 + Filters and tests in If and CondExpr nodes are checked at + runtime instead of compile time. + """ + visitor = DependencyFinderVisitor() + + for node in nodes: + visitor.visit(node) + + for id_map, names, dependency in ( + (self.filters, visitor.filters, "filters"), + ( + self.tests, + visitor.tests, + "tests", + ), + ): + for name in sorted(names): + if name not in id_map: + id_map[name] = self.temporary_identifier() + + # add check during runtime that dependencies used inside of executed + # blocks are defined, as this step may be skipped during compile time + self.writeline("try:") + self.indent() + self.writeline(f"{id_map[name]} = environment.{dependency}[{name!r}]") + self.outdent() + self.writeline("except KeyError:") + self.indent() + self.writeline("@internalcode") + self.writeline(f"def {id_map[name]}(*unused):") + self.indent() + self.writeline( + f'raise TemplateRuntimeError("No {dependency[:-1]}' + f' named {name!r} found.")' + ) + self.outdent() + self.outdent() + + def enter_frame(self, frame: Frame) -> None: + undefs = [] + for target, (action, param) in frame.symbols.loads.items(): + if action == VAR_LOAD_PARAMETER: + pass + elif action == VAR_LOAD_RESOLVE: + self.writeline(f"{target} = {self.get_resolve_func()}({param!r})") + elif action == VAR_LOAD_ALIAS: + self.writeline(f"{target} = {param}") + elif action == VAR_LOAD_UNDEFINED: + undefs.append(target) + else: + raise NotImplementedError("unknown load instruction") + if undefs: + self.writeline(f"{' = '.join(undefs)} = missing") + + def leave_frame(self, frame: Frame, with_python_scope: bool = False) -> None: + if not with_python_scope: + undefs = [] + for target in frame.symbols.loads: + undefs.append(target) + if undefs: + self.writeline(f"{' = '.join(undefs)} = missing") + + def choose_async(self, async_value: str = "async ", sync_value: str = "") -> str: + return async_value if self.environment.is_async else sync_value + + def func(self, name: str) -> str: + return f"{self.choose_async()}def {name}" + + def macro_body( + self, node: t.Union[nodes.Macro, nodes.CallBlock], frame: Frame + ) -> t.Tuple[Frame, MacroRef]: + """Dump the function def of a macro or call block.""" + frame = frame.inner() + frame.symbols.analyze_node(node) + macro_ref = MacroRef(node) + + explicit_caller = None + skip_special_params = set() + args = [] + + for idx, arg in enumerate(node.args): + if arg.name == "caller": + explicit_caller = idx + if arg.name in ("kwargs", "varargs"): + skip_special_params.add(arg.name) + args.append(frame.symbols.ref(arg.name)) + + undeclared = find_undeclared(node.body, ("caller", "kwargs", "varargs")) + + if "caller" in undeclared: + # In older Jinja versions there was a bug that allowed caller + # to retain the special behavior even if it was mentioned in + # the argument list. However thankfully this was only really + # working if it was the last argument. So we are explicitly + # checking this now and error out if it is anywhere else in + # the argument list. + if explicit_caller is not None: + try: + node.defaults[explicit_caller - len(node.args)] + except IndexError: + self.fail( + "When defining macros or call blocks the " + 'special "caller" argument must be omitted ' + "or be given a default.", + node.lineno, + ) + else: + args.append(frame.symbols.declare_parameter("caller")) + macro_ref.accesses_caller = True + if "kwargs" in undeclared and "kwargs" not in skip_special_params: + args.append(frame.symbols.declare_parameter("kwargs")) + macro_ref.accesses_kwargs = True + if "varargs" in undeclared and "varargs" not in skip_special_params: + args.append(frame.symbols.declare_parameter("varargs")) + macro_ref.accesses_varargs = True + + # macros are delayed, they never require output checks + frame.require_output_check = False + frame.symbols.analyze_node(node) + self.writeline(f"{self.func('macro')}({', '.join(args)}):", node) + self.indent() + + self.buffer(frame) + self.enter_frame(frame) + + self.push_parameter_definitions(frame) + for idx, arg in enumerate(node.args): + ref = frame.symbols.ref(arg.name) + self.writeline(f"if {ref} is missing:") + self.indent() + try: + default = node.defaults[idx - len(node.args)] + except IndexError: + self.writeline( + f'{ref} = undefined("parameter {arg.name!r} was not provided",' + f" name={arg.name!r})" + ) + else: + self.writeline(f"{ref} = ") + self.visit(default, frame) + self.mark_parameter_stored(ref) + self.outdent() + self.pop_parameter_definitions() + + self.blockvisit(node.body, frame) + self.return_buffer_contents(frame, force_unescaped=True) + self.leave_frame(frame, with_python_scope=True) + self.outdent() + + return frame, macro_ref + + def macro_def(self, macro_ref: MacroRef, frame: Frame) -> None: + """Dump the macro definition for the def created by macro_body.""" + arg_tuple = ", ".join(repr(x.name) for x in macro_ref.node.args) + name = getattr(macro_ref.node, "name", None) + if len(macro_ref.node.args) == 1: + arg_tuple += "," + self.write( + f"Macro(environment, macro, {name!r}, ({arg_tuple})," + f" {macro_ref.accesses_kwargs!r}, {macro_ref.accesses_varargs!r}," + f" {macro_ref.accesses_caller!r}, context.eval_ctx.autoescape)" + ) + + def position(self, node: nodes.Node) -> str: + """Return a human readable position for the node.""" + rv = f"line {node.lineno}" + if self.name is not None: + rv = f"{rv} in {self.name!r}" + return rv + + def dump_local_context(self, frame: Frame) -> str: + items_kv = ", ".join( + f"{name!r}: {target}" + for name, target in frame.symbols.dump_stores().items() + ) + return f"{{{items_kv}}}" + + def write_commons(self) -> None: + """Writes a common preamble that is used by root and block functions. + Primarily this sets up common local helpers and enforces a generator + through a dead branch. + """ + self.writeline("resolve = context.resolve_or_missing") + self.writeline("undefined = environment.undefined") + self.writeline("concat = environment.concat") + # always use the standard Undefined class for the implicit else of + # conditional expressions + self.writeline("cond_expr_undefined = Undefined") + self.writeline("if 0: yield None") + + def push_parameter_definitions(self, frame: Frame) -> None: + """Pushes all parameter targets from the given frame into a local + stack that permits tracking of yet to be assigned parameters. In + particular this enables the optimization from `visit_Name` to skip + undefined expressions for parameters in macros as macros can reference + otherwise unbound parameters. + """ + self._param_def_block.append(frame.symbols.dump_param_targets()) + + def pop_parameter_definitions(self) -> None: + """Pops the current parameter definitions set.""" + self._param_def_block.pop() + + def mark_parameter_stored(self, target: str) -> None: + """Marks a parameter in the current parameter definitions as stored. + This will skip the enforced undefined checks. + """ + if self._param_def_block: + self._param_def_block[-1].discard(target) + + def push_context_reference(self, target: str) -> None: + self._context_reference_stack.append(target) + + def pop_context_reference(self) -> None: + self._context_reference_stack.pop() + + def get_context_ref(self) -> str: + return self._context_reference_stack[-1] + + def get_resolve_func(self) -> str: + target = self._context_reference_stack[-1] + if target == "context": + return "resolve" + return f"{target}.resolve" + + def derive_context(self, frame: Frame) -> str: + return f"{self.get_context_ref()}.derived({self.dump_local_context(frame)})" + + def parameter_is_undeclared(self, target: str) -> bool: + """Checks if a given target is an undeclared parameter.""" + if not self._param_def_block: + return False + return target in self._param_def_block[-1] + + def push_assign_tracking(self) -> None: + """Pushes a new layer for assignment tracking.""" + self._assign_stack.append(set()) + + def pop_assign_tracking(self, frame: Frame) -> None: + """Pops the topmost level for assignment tracking and updates the + context variables if necessary. + """ + vars = self._assign_stack.pop() + if ( + not frame.block_frame + and not frame.loop_frame + and not frame.toplevel + or not vars + ): + return + public_names = [x for x in vars if x[:1] != "_"] + if len(vars) == 1: + name = next(iter(vars)) + ref = frame.symbols.ref(name) + if frame.loop_frame: + self.writeline(f"_loop_vars[{name!r}] = {ref}") + return + if frame.block_frame: + self.writeline(f"_block_vars[{name!r}] = {ref}") + return + self.writeline(f"context.vars[{name!r}] = {ref}") + else: + if frame.loop_frame: + self.writeline("_loop_vars.update({") + elif frame.block_frame: + self.writeline("_block_vars.update({") + else: + self.writeline("context.vars.update({") + for idx, name in enumerate(sorted(vars)): + if idx: + self.write(", ") + ref = frame.symbols.ref(name) + self.write(f"{name!r}: {ref}") + self.write("})") + if not frame.block_frame and not frame.loop_frame and public_names: + if len(public_names) == 1: + self.writeline(f"context.exported_vars.add({public_names[0]!r})") + else: + names_str = ", ".join(map(repr, sorted(public_names))) + self.writeline(f"context.exported_vars.update(({names_str}))") + + # -- Statement Visitors + + def visit_Template( + self, node: nodes.Template, frame: t.Optional[Frame] = None + ) -> None: + assert frame is None, "no root frame allowed" + eval_ctx = EvalContext(self.environment, self.name) + + from .runtime import async_exported + from .runtime import exported + + if self.environment.is_async: + exported_names = sorted(exported + async_exported) + else: + exported_names = sorted(exported) + + self.writeline("from jinja2.runtime import " + ", ".join(exported_names)) + + # if we want a deferred initialization we cannot move the + # environment into a local name + envenv = "" if self.defer_init else ", environment=environment" + + # do we have an extends tag at all? If not, we can save some + # overhead by just not processing any inheritance code. + have_extends = node.find(nodes.Extends) is not None + + # find all blocks + for block in node.find_all(nodes.Block): + if block.name in self.blocks: + self.fail(f"block {block.name!r} defined twice", block.lineno) + self.blocks[block.name] = block + + # find all imports and import them + for import_ in node.find_all(nodes.ImportedName): + if import_.importname not in self.import_aliases: + imp = import_.importname + self.import_aliases[imp] = alias = self.temporary_identifier() + if "." in imp: + module, obj = imp.rsplit(".", 1) + self.writeline(f"from {module} import {obj} as {alias}") + else: + self.writeline(f"import {imp} as {alias}") + + # add the load name + self.writeline(f"name = {self.name!r}") + + # generate the root render function. + self.writeline( + f"{self.func('root')}(context, missing=missing{envenv}):", extra=1 + ) + self.indent() + self.write_commons() + + # process the root + frame = Frame(eval_ctx) + if "self" in find_undeclared(node.body, ("self",)): + ref = frame.symbols.declare_parameter("self") + self.writeline(f"{ref} = TemplateReference(context)") + frame.symbols.analyze_node(node) + frame.toplevel = frame.rootlevel = True + frame.require_output_check = have_extends and not self.has_known_extends + if have_extends: + self.writeline("parent_template = None") + self.enter_frame(frame) + self.pull_dependencies(node.body) + self.blockvisit(node.body, frame) + self.leave_frame(frame, with_python_scope=True) + self.outdent() + + # make sure that the parent root is called. + if have_extends: + if not self.has_known_extends: + self.indent() + self.writeline("if parent_template is not None:") + self.indent() + if not self.environment.is_async: + self.writeline("yield from parent_template.root_render_func(context)") + else: + self.writeline("agen = parent_template.root_render_func(context)") + self.writeline("try:") + self.indent() + self.writeline("async for event in agen:") + self.indent() + self.writeline("yield event") + self.outdent() + self.outdent() + self.writeline("finally: await agen.aclose()") + self.outdent(1 + (not self.has_known_extends)) + + # at this point we now have the blocks collected and can visit them too. + for name, block in self.blocks.items(): + self.writeline( + f"{self.func('block_' + name)}(context, missing=missing{envenv}):", + block, + 1, + ) + self.indent() + self.write_commons() + # It's important that we do not make this frame a child of the + # toplevel template. This would cause a variety of + # interesting issues with identifier tracking. + block_frame = Frame(eval_ctx) + block_frame.block_frame = True + undeclared = find_undeclared(block.body, ("self", "super")) + if "self" in undeclared: + ref = block_frame.symbols.declare_parameter("self") + self.writeline(f"{ref} = TemplateReference(context)") + if "super" in undeclared: + ref = block_frame.symbols.declare_parameter("super") + self.writeline(f"{ref} = context.super({name!r}, block_{name})") + block_frame.symbols.analyze_node(block) + block_frame.block = name + self.writeline("_block_vars = {}") + self.enter_frame(block_frame) + self.pull_dependencies(block.body) + self.blockvisit(block.body, block_frame) + self.leave_frame(block_frame, with_python_scope=True) + self.outdent() + + blocks_kv_str = ", ".join(f"{x!r}: block_{x}" for x in self.blocks) + self.writeline(f"blocks = {{{blocks_kv_str}}}", extra=1) + debug_kv_str = "&".join(f"{k}={v}" for k, v in self.debug_info) + self.writeline(f"debug_info = {debug_kv_str!r}") + + def visit_Block(self, node: nodes.Block, frame: Frame) -> None: + """Call a block and register it for the template.""" + level = 0 + if frame.toplevel: + # if we know that we are a child template, there is no need to + # check if we are one + if self.has_known_extends: + return + if self.extends_so_far > 0: + self.writeline("if parent_template is None:") + self.indent() + level += 1 + + if node.scoped: + context = self.derive_context(frame) + else: + context = self.get_context_ref() + + if node.required: + self.writeline(f"if len(context.blocks[{node.name!r}]) <= 1:", node) + self.indent() + self.writeline( + f'raise TemplateRuntimeError("Required block {node.name!r} not found")', + node, + ) + self.outdent() + + if not self.environment.is_async and frame.buffer is None: + self.writeline( + f"yield from context.blocks[{node.name!r}][0]({context})", node + ) + else: + self.writeline(f"gen = context.blocks[{node.name!r}][0]({context})") + self.writeline("try:") + self.indent() + self.writeline( + f"{self.choose_async()}for event in gen:", + node, + ) + self.indent() + self.simple_write("event", frame) + self.outdent() + self.outdent() + self.writeline( + f"finally: {self.choose_async('await gen.aclose()', 'gen.close()')}" + ) + + self.outdent(level) + + def visit_Extends(self, node: nodes.Extends, frame: Frame) -> None: + """Calls the extender.""" + if not frame.toplevel: + self.fail("cannot use extend from a non top-level scope", node.lineno) + + # if the number of extends statements in general is zero so + # far, we don't have to add a check if something extended + # the template before this one. + if self.extends_so_far > 0: + # if we have a known extends we just add a template runtime + # error into the generated code. We could catch that at compile + # time too, but i welcome it not to confuse users by throwing the + # same error at different times just "because we can". + if not self.has_known_extends: + self.writeline("if parent_template is not None:") + self.indent() + self.writeline('raise TemplateRuntimeError("extended multiple times")') + + # if we have a known extends already we don't need that code here + # as we know that the template execution will end here. + if self.has_known_extends: + raise CompilerExit() + else: + self.outdent() + + self.writeline("parent_template = environment.get_template(", node) + self.visit(node.template, frame) + self.write(f", {self.name!r})") + self.writeline("for name, parent_block in parent_template.blocks.items():") + self.indent() + self.writeline("context.blocks.setdefault(name, []).append(parent_block)") + self.outdent() + + # if this extends statement was in the root level we can take + # advantage of that information and simplify the generated code + # in the top level from this point onwards + if frame.rootlevel: + self.has_known_extends = True + + # and now we have one more + self.extends_so_far += 1 + + def visit_Include(self, node: nodes.Include, frame: Frame) -> None: + """Handles includes.""" + if node.ignore_missing: + self.writeline("try:") + self.indent() + + func_name = "get_or_select_template" + if isinstance(node.template, nodes.Const): + if isinstance(node.template.value, str): + func_name = "get_template" + elif isinstance(node.template.value, (tuple, list)): + func_name = "select_template" + elif isinstance(node.template, (nodes.Tuple, nodes.List)): + func_name = "select_template" + + self.writeline(f"template = environment.{func_name}(", node) + self.visit(node.template, frame) + self.write(f", {self.name!r})") + if node.ignore_missing: + self.outdent() + self.writeline("except TemplateNotFound:") + self.indent() + self.writeline("pass") + self.outdent() + self.writeline("else:") + self.indent() + + def loop_body() -> None: + self.indent() + self.simple_write("event", frame) + self.outdent() + + if node.with_context: + self.writeline( + f"gen = template.root_render_func(" + "template.new_context(context.get_all(), True," + f" {self.dump_local_context(frame)}))" + ) + self.writeline("try:") + self.indent() + self.writeline(f"{self.choose_async()}for event in gen:") + loop_body() + self.outdent() + self.writeline( + f"finally: {self.choose_async('await gen.aclose()', 'gen.close()')}" + ) + elif self.environment.is_async: + self.writeline( + "for event in (await template._get_default_module_async())" + "._body_stream:" + ) + loop_body() + else: + self.writeline("yield from template._get_default_module()._body_stream") + + if node.ignore_missing: + self.outdent() + + def _import_common( + self, node: t.Union[nodes.Import, nodes.FromImport], frame: Frame + ) -> None: + self.write(f"{self.choose_async('await ')}environment.get_template(") + self.visit(node.template, frame) + self.write(f", {self.name!r}).") + + if node.with_context: + f_name = f"make_module{self.choose_async('_async')}" + self.write( + f"{f_name}(context.get_all(), True, {self.dump_local_context(frame)})" + ) + else: + self.write(f"_get_default_module{self.choose_async('_async')}(context)") + + def visit_Import(self, node: nodes.Import, frame: Frame) -> None: + """Visit regular imports.""" + self.writeline(f"{frame.symbols.ref(node.target)} = ", node) + if frame.toplevel: + self.write(f"context.vars[{node.target!r}] = ") + + self._import_common(node, frame) + + if frame.toplevel and not node.target.startswith("_"): + self.writeline(f"context.exported_vars.discard({node.target!r})") + + def visit_FromImport(self, node: nodes.FromImport, frame: Frame) -> None: + """Visit named imports.""" + self.newline(node) + self.write("included_template = ") + self._import_common(node, frame) + var_names = [] + discarded_names = [] + for name in node.names: + if isinstance(name, tuple): + name, alias = name + else: + alias = name + self.writeline( + f"{frame.symbols.ref(alias)} =" + f" getattr(included_template, {name!r}, missing)" + ) + self.writeline(f"if {frame.symbols.ref(alias)} is missing:") + self.indent() + # The position will contain the template name, and will be formatted + # into a string that will be compiled into an f-string. Curly braces + # in the name must be replaced with escapes so that they will not be + # executed as part of the f-string. + position = self.position(node).replace("{", "{{").replace("}", "}}") + message = ( + "the template {included_template.__name__!r}" + f" (imported on {position})" + f" does not export the requested name {name!r}" + ) + self.writeline( + f"{frame.symbols.ref(alias)} = undefined(f{message!r}, name={name!r})" + ) + self.outdent() + if frame.toplevel: + var_names.append(alias) + if not alias.startswith("_"): + discarded_names.append(alias) + + if var_names: + if len(var_names) == 1: + name = var_names[0] + self.writeline(f"context.vars[{name!r}] = {frame.symbols.ref(name)}") + else: + names_kv = ", ".join( + f"{name!r}: {frame.symbols.ref(name)}" for name in var_names + ) + self.writeline(f"context.vars.update({{{names_kv}}})") + if discarded_names: + if len(discarded_names) == 1: + self.writeline(f"context.exported_vars.discard({discarded_names[0]!r})") + else: + names_str = ", ".join(map(repr, discarded_names)) + self.writeline( + f"context.exported_vars.difference_update(({names_str}))" + ) + + def visit_For(self, node: nodes.For, frame: Frame) -> None: + loop_frame = frame.inner() + loop_frame.loop_frame = True + test_frame = frame.inner() + else_frame = frame.inner() + + # try to figure out if we have an extended loop. An extended loop + # is necessary if the loop is in recursive mode if the special loop + # variable is accessed in the body if the body is a scoped block. + extended_loop = ( + node.recursive + or "loop" + in find_undeclared(node.iter_child_nodes(only=("body",)), ("loop",)) + or any(block.scoped for block in node.find_all(nodes.Block)) + ) + + loop_ref = None + if extended_loop: + loop_ref = loop_frame.symbols.declare_parameter("loop") + + loop_frame.symbols.analyze_node(node, for_branch="body") + if node.else_: + else_frame.symbols.analyze_node(node, for_branch="else") + + if node.test: + loop_filter_func = self.temporary_identifier() + test_frame.symbols.analyze_node(node, for_branch="test") + self.writeline(f"{self.func(loop_filter_func)}(fiter):", node.test) + self.indent() + self.enter_frame(test_frame) + self.writeline(self.choose_async("async for ", "for ")) + self.visit(node.target, loop_frame) + self.write(" in ") + self.write(self.choose_async("auto_aiter(fiter)", "fiter")) + self.write(":") + self.indent() + self.writeline("if ", node.test) + self.visit(node.test, test_frame) + self.write(":") + self.indent() + self.writeline("yield ") + self.visit(node.target, loop_frame) + self.outdent(3) + self.leave_frame(test_frame, with_python_scope=True) + + # if we don't have an recursive loop we have to find the shadowed + # variables at that point. Because loops can be nested but the loop + # variable is a special one we have to enforce aliasing for it. + if node.recursive: + self.writeline( + f"{self.func('loop')}(reciter, loop_render_func, depth=0):", node + ) + self.indent() + self.buffer(loop_frame) + + # Use the same buffer for the else frame + else_frame.buffer = loop_frame.buffer + + # make sure the loop variable is a special one and raise a template + # assertion error if a loop tries to write to loop + if extended_loop: + self.writeline(f"{loop_ref} = missing") + + for name in node.find_all(nodes.Name): + if name.ctx == "store" and name.name == "loop": + self.fail( + "Can't assign to special loop variable in for-loop target", + name.lineno, + ) + + if node.else_: + iteration_indicator = self.temporary_identifier() + self.writeline(f"{iteration_indicator} = 1") + + self.writeline(self.choose_async("async for ", "for "), node) + self.visit(node.target, loop_frame) + if extended_loop: + self.write(f", {loop_ref} in {self.choose_async('Async')}LoopContext(") + else: + self.write(" in ") + + if node.test: + self.write(f"{loop_filter_func}(") + if node.recursive: + self.write("reciter") + else: + if self.environment.is_async and not extended_loop: + self.write("auto_aiter(") + self.visit(node.iter, frame) + if self.environment.is_async and not extended_loop: + self.write(")") + if node.test: + self.write(")") + + if node.recursive: + self.write(", undefined, loop_render_func, depth):") + else: + self.write(", undefined):" if extended_loop else ":") + + self.indent() + self.enter_frame(loop_frame) + + self.writeline("_loop_vars = {}") + self.blockvisit(node.body, loop_frame) + if node.else_: + self.writeline(f"{iteration_indicator} = 0") + self.outdent() + self.leave_frame( + loop_frame, with_python_scope=node.recursive and not node.else_ + ) + + if node.else_: + self.writeline(f"if {iteration_indicator}:") + self.indent() + self.enter_frame(else_frame) + self.blockvisit(node.else_, else_frame) + self.leave_frame(else_frame) + self.outdent() + + # if the node was recursive we have to return the buffer contents + # and start the iteration code + if node.recursive: + self.return_buffer_contents(loop_frame) + self.outdent() + self.start_write(frame, node) + self.write(f"{self.choose_async('await ')}loop(") + if self.environment.is_async: + self.write("auto_aiter(") + self.visit(node.iter, frame) + if self.environment.is_async: + self.write(")") + self.write(", loop)") + self.end_write(frame) + + # at the end of the iteration, clear any assignments made in the + # loop from the top level + if self._assign_stack: + self._assign_stack[-1].difference_update(loop_frame.symbols.stores) + + def visit_If(self, node: nodes.If, frame: Frame) -> None: + if_frame = frame.soft() + self.writeline("if ", node) + self.visit(node.test, if_frame) + self.write(":") + self.indent() + self.blockvisit(node.body, if_frame) + self.outdent() + for elif_ in node.elif_: + self.writeline("elif ", elif_) + self.visit(elif_.test, if_frame) + self.write(":") + self.indent() + self.blockvisit(elif_.body, if_frame) + self.outdent() + if node.else_: + self.writeline("else:") + self.indent() + self.blockvisit(node.else_, if_frame) + self.outdent() + + def visit_Macro(self, node: nodes.Macro, frame: Frame) -> None: + macro_frame, macro_ref = self.macro_body(node, frame) + self.newline() + if frame.toplevel: + if not node.name.startswith("_"): + self.write(f"context.exported_vars.add({node.name!r})") + self.writeline(f"context.vars[{node.name!r}] = ") + self.write(f"{frame.symbols.ref(node.name)} = ") + self.macro_def(macro_ref, macro_frame) + + def visit_CallBlock(self, node: nodes.CallBlock, frame: Frame) -> None: + call_frame, macro_ref = self.macro_body(node, frame) + self.writeline("caller = ") + self.macro_def(macro_ref, call_frame) + self.start_write(frame, node) + self.visit_Call(node.call, frame, forward_caller=True) + self.end_write(frame) + + def visit_FilterBlock(self, node: nodes.FilterBlock, frame: Frame) -> None: + filter_frame = frame.inner() + filter_frame.symbols.analyze_node(node) + self.enter_frame(filter_frame) + self.buffer(filter_frame) + self.blockvisit(node.body, filter_frame) + self.start_write(frame, node) + self.visit_Filter(node.filter, filter_frame) + self.end_write(frame) + self.leave_frame(filter_frame) + + def visit_With(self, node: nodes.With, frame: Frame) -> None: + with_frame = frame.inner() + with_frame.symbols.analyze_node(node) + self.enter_frame(with_frame) + for target, expr in zip(node.targets, node.values): + self.newline() + self.visit(target, with_frame) + self.write(" = ") + self.visit(expr, frame) + self.blockvisit(node.body, with_frame) + self.leave_frame(with_frame) + + def visit_ExprStmt(self, node: nodes.ExprStmt, frame: Frame) -> None: + self.newline(node) + self.visit(node.node, frame) + + class _FinalizeInfo(t.NamedTuple): + const: t.Optional[t.Callable[..., str]] + src: t.Optional[str] + + @staticmethod + def _default_finalize(value: t.Any) -> t.Any: + """The default finalize function if the environment isn't + configured with one. Or, if the environment has one, this is + called on that function's output for constants. + """ + return str(value) + + _finalize: t.Optional[_FinalizeInfo] = None + + def _make_finalize(self) -> _FinalizeInfo: + """Build the finalize function to be used on constants and at + runtime. Cached so it's only created once for all output nodes. + + Returns a ``namedtuple`` with the following attributes: + + ``const`` + A function to finalize constant data at compile time. + + ``src`` + Source code to output around nodes to be evaluated at + runtime. + """ + if self._finalize is not None: + return self._finalize + + finalize: t.Optional[t.Callable[..., t.Any]] + finalize = default = self._default_finalize + src = None + + if self.environment.finalize: + src = "environment.finalize(" + env_finalize = self.environment.finalize + pass_arg = { + _PassArg.context: "context", + _PassArg.eval_context: "context.eval_ctx", + _PassArg.environment: "environment", + }.get( + _PassArg.from_obj(env_finalize) # type: ignore + ) + finalize = None + + if pass_arg is None: + + def finalize(value: t.Any) -> t.Any: # noqa: F811 + return default(env_finalize(value)) + + else: + src = f"{src}{pass_arg}, " + + if pass_arg == "environment": + + def finalize(value: t.Any) -> t.Any: # noqa: F811 + return default(env_finalize(self.environment, value)) + + self._finalize = self._FinalizeInfo(finalize, src) + return self._finalize + + def _output_const_repr(self, group: t.Iterable[t.Any]) -> str: + """Given a group of constant values converted from ``Output`` + child nodes, produce a string to write to the template module + source. + """ + return repr(concat(group)) + + def _output_child_to_const( + self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo + ) -> str: + """Try to optimize a child of an ``Output`` node by trying to + convert it to constant, finalized data at compile time. + + If :exc:`Impossible` is raised, the node is not constant and + will be evaluated at runtime. Any other exception will also be + evaluated at runtime for easier debugging. + """ + const = node.as_const(frame.eval_ctx) + + if frame.eval_ctx.autoescape: + const = escape(const) + + # Template data doesn't go through finalize. + if isinstance(node, nodes.TemplateData): + return str(const) + + return finalize.const(const) # type: ignore + + def _output_child_pre( + self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo + ) -> None: + """Output extra source code before visiting a child of an + ``Output`` node. + """ + if frame.eval_ctx.volatile: + self.write("(escape if context.eval_ctx.autoescape else str)(") + elif frame.eval_ctx.autoescape: + self.write("escape(") + else: + self.write("str(") + + if finalize.src is not None: + self.write(finalize.src) + + def _output_child_post( + self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo + ) -> None: + """Output extra source code after visiting a child of an + ``Output`` node. + """ + self.write(")") + + if finalize.src is not None: + self.write(")") + + def visit_Output(self, node: nodes.Output, frame: Frame) -> None: + # If an extends is active, don't render outside a block. + if frame.require_output_check: + # A top-level extends is known to exist at compile time. + if self.has_known_extends: + return + + self.writeline("if parent_template is None:") + self.indent() + + finalize = self._make_finalize() + body: t.List[t.Union[t.List[t.Any], nodes.Expr]] = [] + + # Evaluate constants at compile time if possible. Each item in + # body will be either a list of static data or a node to be + # evaluated at runtime. + for child in node.nodes: + try: + if not ( + # If the finalize function requires runtime context, + # constants can't be evaluated at compile time. + finalize.const + # Unless it's basic template data that won't be + # finalized anyway. + or isinstance(child, nodes.TemplateData) + ): + raise nodes.Impossible() + + const = self._output_child_to_const(child, frame, finalize) + except (nodes.Impossible, Exception): + # The node was not constant and needs to be evaluated at + # runtime. Or another error was raised, which is easier + # to debug at runtime. + body.append(child) + continue + + if body and isinstance(body[-1], list): + body[-1].append(const) + else: + body.append([const]) + + if frame.buffer is not None: + if len(body) == 1: + self.writeline(f"{frame.buffer}.append(") + else: + self.writeline(f"{frame.buffer}.extend((") + + self.indent() + + for item in body: + if isinstance(item, list): + # A group of constant data to join and output. + val = self._output_const_repr(item) + + if frame.buffer is None: + self.writeline("yield " + val) + else: + self.writeline(val + ",") + else: + if frame.buffer is None: + self.writeline("yield ", item) + else: + self.newline(item) + + # A node to be evaluated at runtime. + self._output_child_pre(item, frame, finalize) + self.visit(item, frame) + self._output_child_post(item, frame, finalize) + + if frame.buffer is not None: + self.write(",") + + if frame.buffer is not None: + self.outdent() + self.writeline(")" if len(body) == 1 else "))") + + if frame.require_output_check: + self.outdent() + + def visit_Assign(self, node: nodes.Assign, frame: Frame) -> None: + self.push_assign_tracking() + + # ``a.b`` is allowed for assignment, and is parsed as an NSRef. However, + # it is only valid if it references a Namespace object. Emit a check for + # that for each ref here, before assignment code is emitted. This can't + # be done in visit_NSRef as the ref could be in the middle of a tuple. + seen_refs: t.Set[str] = set() + + for nsref in node.find_all(nodes.NSRef): + if nsref.name in seen_refs: + # Only emit the check for each reference once, in case the same + # ref is used multiple times in a tuple, `ns.a, ns.b = c, d`. + continue + + seen_refs.add(nsref.name) + ref = frame.symbols.ref(nsref.name) + self.writeline(f"if not isinstance({ref}, Namespace):") + self.indent() + self.writeline( + "raise TemplateRuntimeError" + '("cannot assign attribute on non-namespace object")' + ) + self.outdent() + + self.newline(node) + self.visit(node.target, frame) + self.write(" = ") + self.visit(node.node, frame) + self.pop_assign_tracking(frame) + + def visit_AssignBlock(self, node: nodes.AssignBlock, frame: Frame) -> None: + self.push_assign_tracking() + block_frame = frame.inner() + # This is a special case. Since a set block always captures we + # will disable output checks. This way one can use set blocks + # toplevel even in extended templates. + block_frame.require_output_check = False + block_frame.symbols.analyze_node(node) + self.enter_frame(block_frame) + self.buffer(block_frame) + self.blockvisit(node.body, block_frame) + self.newline(node) + self.visit(node.target, frame) + self.write(" = (Markup if context.eval_ctx.autoescape else identity)(") + if node.filter is not None: + self.visit_Filter(node.filter, block_frame) + else: + self.write(f"concat({block_frame.buffer})") + self.write(")") + self.pop_assign_tracking(frame) + self.leave_frame(block_frame) + + # -- Expression Visitors + + def visit_Name(self, node: nodes.Name, frame: Frame) -> None: + if node.ctx == "store" and ( + frame.toplevel or frame.loop_frame or frame.block_frame + ): + if self._assign_stack: + self._assign_stack[-1].add(node.name) + ref = frame.symbols.ref(node.name) + + # If we are looking up a variable we might have to deal with the + # case where it's undefined. We can skip that case if the load + # instruction indicates a parameter which are always defined. + if node.ctx == "load": + load = frame.symbols.find_load(ref) + if not ( + load is not None + and load[0] == VAR_LOAD_PARAMETER + and not self.parameter_is_undeclared(ref) + ): + self.write( + f"(undefined(name={node.name!r}) if {ref} is missing else {ref})" + ) + return + + self.write(ref) + + def visit_NSRef(self, node: nodes.NSRef, frame: Frame) -> None: + # NSRef is a dotted assignment target a.b=c, but uses a[b]=c internally. + # visit_Assign emits code to validate that each ref is to a Namespace + # object only. That can't be emitted here as the ref could be in the + # middle of a tuple assignment. + ref = frame.symbols.ref(node.name) + self.writeline(f"{ref}[{node.attr!r}]") + + def visit_Const(self, node: nodes.Const, frame: Frame) -> None: + val = node.as_const(frame.eval_ctx) + if isinstance(val, float): + self.write(str(val)) + else: + self.write(repr(val)) + + def visit_TemplateData(self, node: nodes.TemplateData, frame: Frame) -> None: + try: + self.write(repr(node.as_const(frame.eval_ctx))) + except nodes.Impossible: + self.write( + f"(Markup if context.eval_ctx.autoescape else identity)({node.data!r})" + ) + + def visit_Tuple(self, node: nodes.Tuple, frame: Frame) -> None: + self.write("(") + idx = -1 + for idx, item in enumerate(node.items): + if idx: + self.write(", ") + self.visit(item, frame) + self.write(",)" if idx == 0 else ")") + + def visit_List(self, node: nodes.List, frame: Frame) -> None: + self.write("[") + for idx, item in enumerate(node.items): + if idx: + self.write(", ") + self.visit(item, frame) + self.write("]") + + def visit_Dict(self, node: nodes.Dict, frame: Frame) -> None: + self.write("{") + for idx, item in enumerate(node.items): + if idx: + self.write(", ") + self.visit(item.key, frame) + self.write(": ") + self.visit(item.value, frame) + self.write("}") + + visit_Add = _make_binop("+") + visit_Sub = _make_binop("-") + visit_Mul = _make_binop("*") + visit_Div = _make_binop("/") + visit_FloorDiv = _make_binop("//") + visit_Pow = _make_binop("**") + visit_Mod = _make_binop("%") + visit_And = _make_binop("and") + visit_Or = _make_binop("or") + visit_Pos = _make_unop("+") + visit_Neg = _make_unop("-") + visit_Not = _make_unop("not ") + + @optimizeconst + def visit_Concat(self, node: nodes.Concat, frame: Frame) -> None: + if frame.eval_ctx.volatile: + func_name = "(markup_join if context.eval_ctx.volatile else str_join)" + elif frame.eval_ctx.autoescape: + func_name = "markup_join" + else: + func_name = "str_join" + self.write(f"{func_name}((") + for arg in node.nodes: + self.visit(arg, frame) + self.write(", ") + self.write("))") + + @optimizeconst + def visit_Compare(self, node: nodes.Compare, frame: Frame) -> None: + self.write("(") + self.visit(node.expr, frame) + for op in node.ops: + self.visit(op, frame) + self.write(")") + + def visit_Operand(self, node: nodes.Operand, frame: Frame) -> None: + self.write(f" {operators[node.op]} ") + self.visit(node.expr, frame) + + @optimizeconst + def visit_Getattr(self, node: nodes.Getattr, frame: Frame) -> None: + if self.environment.is_async: + self.write("(await auto_await(") + + self.write("environment.getattr(") + self.visit(node.node, frame) + self.write(f", {node.attr!r})") + + if self.environment.is_async: + self.write("))") + + @optimizeconst + def visit_Getitem(self, node: nodes.Getitem, frame: Frame) -> None: + # slices bypass the environment getitem method. + if isinstance(node.arg, nodes.Slice): + self.visit(node.node, frame) + self.write("[") + self.visit(node.arg, frame) + self.write("]") + else: + if self.environment.is_async: + self.write("(await auto_await(") + + self.write("environment.getitem(") + self.visit(node.node, frame) + self.write(", ") + self.visit(node.arg, frame) + self.write(")") + + if self.environment.is_async: + self.write("))") + + def visit_Slice(self, node: nodes.Slice, frame: Frame) -> None: + if node.start is not None: + self.visit(node.start, frame) + self.write(":") + if node.stop is not None: + self.visit(node.stop, frame) + if node.step is not None: + self.write(":") + self.visit(node.step, frame) + + @contextmanager + def _filter_test_common( + self, node: t.Union[nodes.Filter, nodes.Test], frame: Frame, is_filter: bool + ) -> t.Iterator[None]: + if self.environment.is_async: + self.write("(await auto_await(") + + if is_filter: + self.write(f"{self.filters[node.name]}(") + func = self.environment.filters.get(node.name) + else: + self.write(f"{self.tests[node.name]}(") + func = self.environment.tests.get(node.name) + + # When inside an If or CondExpr frame, allow the filter to be + # undefined at compile time and only raise an error if it's + # actually called at runtime. See pull_dependencies. + if func is None and not frame.soft_frame: + type_name = "filter" if is_filter else "test" + self.fail(f"No {type_name} named {node.name!r}.", node.lineno) + + pass_arg = { + _PassArg.context: "context", + _PassArg.eval_context: "context.eval_ctx", + _PassArg.environment: "environment", + }.get( + _PassArg.from_obj(func) # type: ignore + ) + + if pass_arg is not None: + self.write(f"{pass_arg}, ") + + # Back to the visitor function to handle visiting the target of + # the filter or test. + yield + + self.signature(node, frame) + self.write(")") + + if self.environment.is_async: + self.write("))") + + @optimizeconst + def visit_Filter(self, node: nodes.Filter, frame: Frame) -> None: + with self._filter_test_common(node, frame, True): + # if the filter node is None we are inside a filter block + # and want to write to the current buffer + if node.node is not None: + self.visit(node.node, frame) + elif frame.eval_ctx.volatile: + self.write( + f"(Markup(concat({frame.buffer}))" + f" if context.eval_ctx.autoescape else concat({frame.buffer}))" + ) + elif frame.eval_ctx.autoescape: + self.write(f"Markup(concat({frame.buffer}))") + else: + self.write(f"concat({frame.buffer})") + + @optimizeconst + def visit_Test(self, node: nodes.Test, frame: Frame) -> None: + with self._filter_test_common(node, frame, False): + self.visit(node.node, frame) + + @optimizeconst + def visit_CondExpr(self, node: nodes.CondExpr, frame: Frame) -> None: + frame = frame.soft() + + def write_expr2() -> None: + if node.expr2 is not None: + self.visit(node.expr2, frame) + return + + self.write( + f'cond_expr_undefined("the inline if-expression on' + f" {self.position(node)} evaluated to false and no else" + f' section was defined.")' + ) + + self.write("(") + self.visit(node.expr1, frame) + self.write(" if ") + self.visit(node.test, frame) + self.write(" else ") + write_expr2() + self.write(")") + + @optimizeconst + def visit_Call( + self, node: nodes.Call, frame: Frame, forward_caller: bool = False + ) -> None: + if self.environment.is_async: + self.write("(await auto_await(") + if self.environment.sandboxed: + self.write("environment.call(context, ") + else: + self.write("context.call(") + self.visit(node.node, frame) + extra_kwargs = {"caller": "caller"} if forward_caller else None + loop_kwargs = {"_loop_vars": "_loop_vars"} if frame.loop_frame else {} + block_kwargs = {"_block_vars": "_block_vars"} if frame.block_frame else {} + if extra_kwargs: + extra_kwargs.update(loop_kwargs, **block_kwargs) + elif loop_kwargs or block_kwargs: + extra_kwargs = dict(loop_kwargs, **block_kwargs) + self.signature(node, frame, extra_kwargs) + self.write(")") + if self.environment.is_async: + self.write("))") + + def visit_Keyword(self, node: nodes.Keyword, frame: Frame) -> None: + self.write(node.key + "=") + self.visit(node.value, frame) + + # -- Unused nodes for extensions + + def visit_MarkSafe(self, node: nodes.MarkSafe, frame: Frame) -> None: + self.write("Markup(") + self.visit(node.expr, frame) + self.write(")") + + def visit_MarkSafeIfAutoescape( + self, node: nodes.MarkSafeIfAutoescape, frame: Frame + ) -> None: + self.write("(Markup if context.eval_ctx.autoescape else identity)(") + self.visit(node.expr, frame) + self.write(")") + + def visit_EnvironmentAttribute( + self, node: nodes.EnvironmentAttribute, frame: Frame + ) -> None: + self.write("environment." + node.name) + + def visit_ExtensionAttribute( + self, node: nodes.ExtensionAttribute, frame: Frame + ) -> None: + self.write(f"environment.extensions[{node.identifier!r}].{node.name}") + + def visit_ImportedName(self, node: nodes.ImportedName, frame: Frame) -> None: + self.write(self.import_aliases[node.importname]) + + def visit_InternalName(self, node: nodes.InternalName, frame: Frame) -> None: + self.write(node.name) + + def visit_ContextReference( + self, node: nodes.ContextReference, frame: Frame + ) -> None: + self.write("context") + + def visit_DerivedContextReference( + self, node: nodes.DerivedContextReference, frame: Frame + ) -> None: + self.write(self.derive_context(frame)) + + def visit_Continue(self, node: nodes.Continue, frame: Frame) -> None: + self.writeline("continue", node) + + def visit_Break(self, node: nodes.Break, frame: Frame) -> None: + self.writeline("break", node) + + def visit_Scope(self, node: nodes.Scope, frame: Frame) -> None: + scope_frame = frame.inner() + scope_frame.symbols.analyze_node(node) + self.enter_frame(scope_frame) + self.blockvisit(node.body, scope_frame) + self.leave_frame(scope_frame) + + def visit_OverlayScope(self, node: nodes.OverlayScope, frame: Frame) -> None: + ctx = self.temporary_identifier() + self.writeline(f"{ctx} = {self.derive_context(frame)}") + self.writeline(f"{ctx}.vars = ") + self.visit(node.context, frame) + self.push_context_reference(ctx) + + scope_frame = frame.inner(isolated=True) + scope_frame.symbols.analyze_node(node) + self.enter_frame(scope_frame) + self.blockvisit(node.body, scope_frame) + self.leave_frame(scope_frame) + self.pop_context_reference() + + def visit_EvalContextModifier( + self, node: nodes.EvalContextModifier, frame: Frame + ) -> None: + for keyword in node.options: + self.writeline(f"context.eval_ctx.{keyword.key} = ") + self.visit(keyword.value, frame) + try: + val = keyword.value.as_const(frame.eval_ctx) + except nodes.Impossible: + frame.eval_ctx.volatile = True + else: + setattr(frame.eval_ctx, keyword.key, val) + + def visit_ScopedEvalContextModifier( + self, node: nodes.ScopedEvalContextModifier, frame: Frame + ) -> None: + old_ctx_name = self.temporary_identifier() + saved_ctx = frame.eval_ctx.save() + self.writeline(f"{old_ctx_name} = context.eval_ctx.save()") + self.visit_EvalContextModifier(node, frame) + for child in node.body: + self.visit(child, frame) + frame.eval_ctx.revert(saved_ctx) + self.writeline(f"context.eval_ctx.revert({old_ctx_name})") diff --git a/venv/lib/python3.11/site-packages/jinja2/constants.py b/venv/lib/python3.11/site-packages/jinja2/constants.py new file mode 100644 index 0000000..41a1c23 --- /dev/null +++ b/venv/lib/python3.11/site-packages/jinja2/constants.py @@ -0,0 +1,20 @@ +#: list of lorem ipsum words used by the lipsum() helper function +LOREM_IPSUM_WORDS = """\ +a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at +auctor augue bibendum blandit class commodo condimentum congue consectetuer +consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus +diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend +elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames +faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac +hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum +justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem +luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie +mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non +nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque +penatibus per pharetra phasellus placerat platea porta porttitor posuere +potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus +ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit +sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor +tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices +ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus +viverra volutpat vulputate""" diff --git a/venv/lib/python3.11/site-packages/jinja2/debug.py b/venv/lib/python3.11/site-packages/jinja2/debug.py new file mode 100644 index 0000000..eeeeee7 --- /dev/null +++ b/venv/lib/python3.11/site-packages/jinja2/debug.py @@ -0,0 +1,191 @@ +import sys +import typing as t +from types import CodeType +from types import TracebackType + +from .exceptions import TemplateSyntaxError +from .utils import internal_code +from .utils import missing + +if t.TYPE_CHECKING: + from .runtime import Context + + +def rewrite_traceback_stack(source: t.Optional[str] = None) -> BaseException: + """Rewrite the current exception to replace any tracebacks from + within compiled template code with tracebacks that look like they + came from the template source. + + This must be called within an ``except`` block. + + :param source: For ``TemplateSyntaxError``, the original source if + known. + :return: The original exception with the rewritten traceback. + """ + _, exc_value, tb = sys.exc_info() + exc_value = t.cast(BaseException, exc_value) + tb = t.cast(TracebackType, tb) + + if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated: + exc_value.translated = True + exc_value.source = source + # Remove the old traceback, otherwise the frames from the + # compiler still show up. + exc_value.with_traceback(None) + # Outside of runtime, so the frame isn't executing template + # code, but it still needs to point at the template. + tb = fake_traceback( + exc_value, None, exc_value.filename or "", exc_value.lineno + ) + else: + # Skip the frame for the render function. + tb = tb.tb_next + + stack = [] + + # Build the stack of traceback object, replacing any in template + # code with the source file and line information. + while tb is not None: + # Skip frames decorated with @internalcode. These are internal + # calls that aren't useful in template debugging output. + if tb.tb_frame.f_code in internal_code: + tb = tb.tb_next + continue + + template = tb.tb_frame.f_globals.get("__jinja_template__") + + if template is not None: + lineno = template.get_corresponding_lineno(tb.tb_lineno) + fake_tb = fake_traceback(exc_value, tb, template.filename, lineno) + stack.append(fake_tb) + else: + stack.append(tb) + + tb = tb.tb_next + + tb_next = None + + # Assign tb_next in reverse to avoid circular references. + for tb in reversed(stack): + tb.tb_next = tb_next + tb_next = tb + + return exc_value.with_traceback(tb_next) + + +def fake_traceback( # type: ignore + exc_value: BaseException, tb: t.Optional[TracebackType], filename: str, lineno: int +) -> TracebackType: + """Produce a new traceback object that looks like it came from the + template source instead of the compiled code. The filename, line + number, and location name will point to the template, and the local + variables will be the current template context. + + :param exc_value: The original exception to be re-raised to create + the new traceback. + :param tb: The original traceback to get the local variables and + code info from. + :param filename: The template filename. + :param lineno: The line number in the template source. + """ + if tb is not None: + # Replace the real locals with the context that would be + # available at that point in the template. + locals = get_template_locals(tb.tb_frame.f_locals) + locals.pop("__jinja_exception__", None) + else: + locals = {} + + globals = { + "__name__": filename, + "__file__": filename, + "__jinja_exception__": exc_value, + } + # Raise an exception at the correct line number. + code: CodeType = compile( + "\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec" + ) + + # Build a new code object that points to the template file and + # replaces the location with a block name. + location = "template" + + if tb is not None: + function = tb.tb_frame.f_code.co_name + + if function == "root": + location = "top-level template code" + elif function.startswith("block_"): + location = f"block {function[6:]!r}" + + if sys.version_info >= (3, 8): + code = code.replace(co_name=location) + else: + code = CodeType( + code.co_argcount, + code.co_kwonlyargcount, + code.co_nlocals, + code.co_stacksize, + code.co_flags, + code.co_code, + code.co_consts, + code.co_names, + code.co_varnames, + code.co_filename, + location, + code.co_firstlineno, + code.co_lnotab, + code.co_freevars, + code.co_cellvars, + ) + + # Execute the new code, which is guaranteed to raise, and return + # the new traceback without this frame. + try: + exec(code, globals, locals) + except BaseException: + return sys.exc_info()[2].tb_next # type: ignore + + +def get_template_locals(real_locals: t.Mapping[str, t.Any]) -> t.Dict[str, t.Any]: + """Based on the runtime locals, get the context that would be + available at that point in the template. + """ + # Start with the current template context. + ctx: t.Optional[Context] = real_locals.get("context") + + if ctx is not None: + data: t.Dict[str, t.Any] = ctx.get_all().copy() + else: + data = {} + + # Might be in a derived context that only sets local variables + # rather than pushing a context. Local variables follow the scheme + # l_depth_name. Find the highest-depth local that has a value for + # each name. + local_overrides: t.Dict[str, t.Tuple[int, t.Any]] = {} + + for name, value in real_locals.items(): + if not name.startswith("l_") or value is missing: + # Not a template variable, or no longer relevant. + continue + + try: + _, depth_str, name = name.split("_", 2) + depth = int(depth_str) + except ValueError: + continue + + cur_depth = local_overrides.get(name, (-1,))[0] + + if cur_depth < depth: + local_overrides[name] = (depth, value) + + # Modify the context with any derived context. + for name, (_, value) in local_overrides.items(): + if value is missing: + data.pop(name, None) + else: + data[name] = value + + return data diff --git a/venv/lib/python3.11/site-packages/jinja2/defaults.py b/venv/lib/python3.11/site-packages/jinja2/defaults.py new file mode 100644 index 0000000..638cad3 --- /dev/null +++ b/venv/lib/python3.11/site-packages/jinja2/defaults.py @@ -0,0 +1,48 @@ +import typing as t + +from .filters import FILTERS as DEFAULT_FILTERS # noqa: F401 +from .tests import TESTS as DEFAULT_TESTS # noqa: F401 +from .utils import Cycler +from .utils import generate_lorem_ipsum +from .utils import Joiner +from .utils import Namespace + +if t.TYPE_CHECKING: + import typing_extensions as te + +# defaults for the parser / lexer +BLOCK_START_STRING = "{%" +BLOCK_END_STRING = "%}" +VARIABLE_START_STRING = "{{" +VARIABLE_END_STRING = "}}" +COMMENT_START_STRING = "{#" +COMMENT_END_STRING = "#}" +LINE_STATEMENT_PREFIX: t.Optional[str] = None +LINE_COMMENT_PREFIX: t.Optional[str] = None +TRIM_BLOCKS = False +LSTRIP_BLOCKS = False +NEWLINE_SEQUENCE: "te.Literal['\\n', '\\r\\n', '\\r']" = "\n" +KEEP_TRAILING_NEWLINE = False + +# default filters, tests and namespace + +DEFAULT_NAMESPACE = { + "range": range, + "dict": dict, + "lipsum": generate_lorem_ipsum, + "cycler": Cycler, + "joiner": Joiner, + "namespace": Namespace, +} + +# default policies +DEFAULT_POLICIES: t.Dict[str, t.Any] = { + "compiler.ascii_str": True, + "urlize.rel": "noopener", + "urlize.target": None, + "urlize.extra_schemes": None, + "truncate.leeway": 5, + "json.dumps_function": None, + "json.dumps_kwargs": {"sort_keys": True}, + "ext.i18n.trimmed": False, +} diff --git a/venv/lib/python3.11/site-packages/jinja2/environment.py b/venv/lib/python3.11/site-packages/jinja2/environment.py new file mode 100644 index 0000000..0fc6e5b --- /dev/null +++ b/venv/lib/python3.11/site-packages/jinja2/environment.py @@ -0,0 +1,1672 @@ +"""Classes for managing templates and their runtime and compile time +options. +""" + +import os +import typing +import typing as t +import weakref +from collections import ChainMap +from functools import lru_cache +from functools import partial +from functools import reduce +from types import CodeType + +from markupsafe import Markup + +from . import nodes +from .compiler import CodeGenerator +from .compiler import generate +from .defaults import BLOCK_END_STRING +from .defaults import BLOCK_START_STRING +from .defaults import COMMENT_END_STRING +from .defaults import COMMENT_START_STRING +from .defaults import DEFAULT_FILTERS # type: ignore[attr-defined] +from .defaults import DEFAULT_NAMESPACE +from .defaults import DEFAULT_POLICIES +from .defaults import DEFAULT_TESTS # type: ignore[attr-defined] +from .defaults import KEEP_TRAILING_NEWLINE +from .defaults import LINE_COMMENT_PREFIX +from .defaults import LINE_STATEMENT_PREFIX +from .defaults import LSTRIP_BLOCKS +from .defaults import NEWLINE_SEQUENCE +from .defaults import TRIM_BLOCKS +from .defaults import VARIABLE_END_STRING +from .defaults import VARIABLE_START_STRING +from .exceptions import TemplateNotFound +from .exceptions import TemplateRuntimeError +from .exceptions import TemplatesNotFound +from .exceptions import TemplateSyntaxError +from .exceptions import UndefinedError +from .lexer import get_lexer +from .lexer import Lexer +from .lexer import TokenStream +from .nodes import EvalContext +from .parser import Parser +from .runtime import Context +from .runtime import new_context +from .runtime import Undefined +from .utils import _PassArg +from .utils import concat +from .utils import consume +from .utils import import_string +from .utils import internalcode +from .utils import LRUCache +from .utils import missing + +if t.TYPE_CHECKING: + import typing_extensions as te + + from .bccache import BytecodeCache + from .ext import Extension + from .loaders import BaseLoader + +_env_bound = t.TypeVar("_env_bound", bound="Environment") + + +# for direct template usage we have up to ten living environments +@lru_cache(maxsize=10) +def get_spontaneous_environment(cls: t.Type[_env_bound], *args: t.Any) -> _env_bound: + """Return a new spontaneous environment. A spontaneous environment + is used for templates created directly rather than through an + existing environment. + + :param cls: Environment class to create. + :param args: Positional arguments passed to environment. + """ + env = cls(*args) + env.shared = True + return env + + +def create_cache( + size: int, +) -> t.Optional[t.MutableMapping[t.Tuple["weakref.ref[t.Any]", str], "Template"]]: + """Return the cache class for the given size.""" + if size == 0: + return None + + if size < 0: + return {} + + return LRUCache(size) # type: ignore + + +def copy_cache( + cache: t.Optional[t.MutableMapping[t.Any, t.Any]], +) -> t.Optional[t.MutableMapping[t.Tuple["weakref.ref[t.Any]", str], "Template"]]: + """Create an empty copy of the given cache.""" + if cache is None: + return None + + if type(cache) is dict: # noqa E721 + return {} + + return LRUCache(cache.capacity) # type: ignore + + +def load_extensions( + environment: "Environment", + extensions: t.Sequence[t.Union[str, t.Type["Extension"]]], +) -> t.Dict[str, "Extension"]: + """Load the extensions from the list and bind it to the environment. + Returns a dict of instantiated extensions. + """ + result = {} + + for extension in extensions: + if isinstance(extension, str): + extension = t.cast(t.Type["Extension"], import_string(extension)) + + result[extension.identifier] = extension(environment) + + return result + + +def _environment_config_check(environment: _env_bound) -> _env_bound: + """Perform a sanity check on the environment.""" + assert issubclass( + environment.undefined, Undefined + ), "'undefined' must be a subclass of 'jinja2.Undefined'." + assert ( + environment.block_start_string + != environment.variable_start_string + != environment.comment_start_string + ), "block, variable and comment start strings must be different." + assert environment.newline_sequence in { + "\r", + "\r\n", + "\n", + }, "'newline_sequence' must be one of '\\n', '\\r\\n', or '\\r'." + return environment + + +class Environment: + r"""The core component of Jinja is the `Environment`. It contains + important shared variables like configuration, filters, tests, + globals and others. Instances of this class may be modified if + they are not shared and if no template was loaded so far. + Modifications on environments after the first template was loaded + will lead to surprising effects and undefined behavior. + + Here are the possible initialization parameters: + + `block_start_string` + The string marking the beginning of a block. Defaults to ``'{%'``. + + `block_end_string` + The string marking the end of a block. Defaults to ``'%}'``. + + `variable_start_string` + The string marking the beginning of a print statement. + Defaults to ``'{{'``. + + `variable_end_string` + The string marking the end of a print statement. Defaults to + ``'}}'``. + + `comment_start_string` + The string marking the beginning of a comment. Defaults to ``'{#'``. + + `comment_end_string` + The string marking the end of a comment. Defaults to ``'#}'``. + + `line_statement_prefix` + If given and a string, this will be used as prefix for line based + statements. See also :ref:`line-statements`. + + `line_comment_prefix` + If given and a string, this will be used as prefix for line based + comments. See also :ref:`line-statements`. + + .. versionadded:: 2.2 + + `trim_blocks` + If this is set to ``True`` the first newline after a block is + removed (block, not variable tag!). Defaults to `False`. + + `lstrip_blocks` + If this is set to ``True`` leading spaces and tabs are stripped + from the start of a line to a block. Defaults to `False`. + + `newline_sequence` + The sequence that starts a newline. Must be one of ``'\r'``, + ``'\n'`` or ``'\r\n'``. The default is ``'\n'`` which is a + useful default for Linux and OS X systems as well as web + applications. + + `keep_trailing_newline` + Preserve the trailing newline when rendering templates. + The default is ``False``, which causes a single newline, + if present, to be stripped from the end of the template. + + .. versionadded:: 2.7 + + `extensions` + List of Jinja extensions to use. This can either be import paths + as strings or extension classes. For more information have a + look at :ref:`the extensions documentation `. + + `optimized` + should the optimizer be enabled? Default is ``True``. + + `undefined` + :class:`Undefined` or a subclass of it that is used to represent + undefined values in the template. + + `finalize` + A callable that can be used to process the result of a variable + expression before it is output. For example one can convert + ``None`` implicitly into an empty string here. + + `autoescape` + If set to ``True`` the XML/HTML autoescaping feature is enabled by + default. For more details about autoescaping see + :class:`~markupsafe.Markup`. As of Jinja 2.4 this can also + be a callable that is passed the template name and has to + return ``True`` or ``False`` depending on autoescape should be + enabled by default. + + .. versionchanged:: 2.4 + `autoescape` can now be a function + + `loader` + The template loader for this environment. + + `cache_size` + The size of the cache. Per default this is ``400`` which means + that if more than 400 templates are loaded the loader will clean + out the least recently used template. If the cache size is set to + ``0`` templates are recompiled all the time, if the cache size is + ``-1`` the cache will not be cleaned. + + .. versionchanged:: 2.8 + The cache size was increased to 400 from a low 50. + + `auto_reload` + Some loaders load templates from locations where the template + sources may change (ie: file system or database). If + ``auto_reload`` is set to ``True`` (default) every time a template is + requested the loader checks if the source changed and if yes, it + will reload the template. For higher performance it's possible to + disable that. + + `bytecode_cache` + If set to a bytecode cache object, this object will provide a + cache for the internal Jinja bytecode so that templates don't + have to be parsed if they were not changed. + + See :ref:`bytecode-cache` for more information. + + `enable_async` + If set to true this enables async template execution which + allows using async functions and generators. + """ + + #: if this environment is sandboxed. Modifying this variable won't make + #: the environment sandboxed though. For a real sandboxed environment + #: have a look at jinja2.sandbox. This flag alone controls the code + #: generation by the compiler. + sandboxed = False + + #: True if the environment is just an overlay + overlayed = False + + #: the environment this environment is linked to if it is an overlay + linked_to: t.Optional["Environment"] = None + + #: shared environments have this set to `True`. A shared environment + #: must not be modified + shared = False + + #: the class that is used for code generation. See + #: :class:`~jinja2.compiler.CodeGenerator` for more information. + code_generator_class: t.Type["CodeGenerator"] = CodeGenerator + + concat = "".join + + #: the context class that is used for templates. See + #: :class:`~jinja2.runtime.Context` for more information. + context_class: t.Type[Context] = Context + + template_class: t.Type["Template"] + + def __init__( + self, + block_start_string: str = BLOCK_START_STRING, + block_end_string: str = BLOCK_END_STRING, + variable_start_string: str = VARIABLE_START_STRING, + variable_end_string: str = VARIABLE_END_STRING, + comment_start_string: str = COMMENT_START_STRING, + comment_end_string: str = COMMENT_END_STRING, + line_statement_prefix: t.Optional[str] = LINE_STATEMENT_PREFIX, + line_comment_prefix: t.Optional[str] = LINE_COMMENT_PREFIX, + trim_blocks: bool = TRIM_BLOCKS, + lstrip_blocks: bool = LSTRIP_BLOCKS, + newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = NEWLINE_SEQUENCE, + keep_trailing_newline: bool = KEEP_TRAILING_NEWLINE, + extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = (), + optimized: bool = True, + undefined: t.Type[Undefined] = Undefined, + finalize: t.Optional[t.Callable[..., t.Any]] = None, + autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = False, + loader: t.Optional["BaseLoader"] = None, + cache_size: int = 400, + auto_reload: bool = True, + bytecode_cache: t.Optional["BytecodeCache"] = None, + enable_async: bool = False, + ): + # !!Important notice!! + # The constructor accepts quite a few arguments that should be + # passed by keyword rather than position. However it's important to + # not change the order of arguments because it's used at least + # internally in those cases: + # - spontaneous environments (i18n extension and Template) + # - unittests + # If parameter changes are required only add parameters at the end + # and don't change the arguments (or the defaults!) of the arguments + # existing already. + + # lexer / parser information + self.block_start_string = block_start_string + self.block_end_string = block_end_string + self.variable_start_string = variable_start_string + self.variable_end_string = variable_end_string + self.comment_start_string = comment_start_string + self.comment_end_string = comment_end_string + self.line_statement_prefix = line_statement_prefix + self.line_comment_prefix = line_comment_prefix + self.trim_blocks = trim_blocks + self.lstrip_blocks = lstrip_blocks + self.newline_sequence = newline_sequence + self.keep_trailing_newline = keep_trailing_newline + + # runtime information + self.undefined: t.Type[Undefined] = undefined + self.optimized = optimized + self.finalize = finalize + self.autoescape = autoescape + + # defaults + self.filters = DEFAULT_FILTERS.copy() + self.tests = DEFAULT_TESTS.copy() + self.globals = DEFAULT_NAMESPACE.copy() + + # set the loader provided + self.loader = loader + self.cache = create_cache(cache_size) + self.bytecode_cache = bytecode_cache + self.auto_reload = auto_reload + + # configurable policies + self.policies = DEFAULT_POLICIES.copy() + + # load extensions + self.extensions = load_extensions(self, extensions) + + self.is_async = enable_async + _environment_config_check(self) + + def add_extension(self, extension: t.Union[str, t.Type["Extension"]]) -> None: + """Adds an extension after the environment was created. + + .. versionadded:: 2.5 + """ + self.extensions.update(load_extensions(self, [extension])) + + def extend(self, **attributes: t.Any) -> None: + """Add the items to the instance of the environment if they do not exist + yet. This is used by :ref:`extensions ` to register + callbacks and configuration values without breaking inheritance. + """ + for key, value in attributes.items(): + if not hasattr(self, key): + setattr(self, key, value) + + def overlay( + self, + block_start_string: str = missing, + block_end_string: str = missing, + variable_start_string: str = missing, + variable_end_string: str = missing, + comment_start_string: str = missing, + comment_end_string: str = missing, + line_statement_prefix: t.Optional[str] = missing, + line_comment_prefix: t.Optional[str] = missing, + trim_blocks: bool = missing, + lstrip_blocks: bool = missing, + newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = missing, + keep_trailing_newline: bool = missing, + extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = missing, + optimized: bool = missing, + undefined: t.Type[Undefined] = missing, + finalize: t.Optional[t.Callable[..., t.Any]] = missing, + autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = missing, + loader: t.Optional["BaseLoader"] = missing, + cache_size: int = missing, + auto_reload: bool = missing, + bytecode_cache: t.Optional["BytecodeCache"] = missing, + enable_async: bool = missing, + ) -> "te.Self": + """Create a new overlay environment that shares all the data with the + current environment except for cache and the overridden attributes. + Extensions cannot be removed for an overlayed environment. An overlayed + environment automatically gets all the extensions of the environment it + is linked to plus optional extra extensions. + + Creating overlays should happen after the initial environment was set + up completely. Not all attributes are truly linked, some are just + copied over so modifications on the original environment may not shine + through. + + .. versionchanged:: 3.1.5 + ``enable_async`` is applied correctly. + + .. versionchanged:: 3.1.2 + Added the ``newline_sequence``, ``keep_trailing_newline``, + and ``enable_async`` parameters to match ``__init__``. + """ + args = dict(locals()) + del args["self"], args["cache_size"], args["extensions"], args["enable_async"] + + rv = object.__new__(self.__class__) + rv.__dict__.update(self.__dict__) + rv.overlayed = True + rv.linked_to = self + + for key, value in args.items(): + if value is not missing: + setattr(rv, key, value) + + if cache_size is not missing: + rv.cache = create_cache(cache_size) + else: + rv.cache = copy_cache(self.cache) + + rv.extensions = {} + for key, value in self.extensions.items(): + rv.extensions[key] = value.bind(rv) + if extensions is not missing: + rv.extensions.update(load_extensions(rv, extensions)) + + if enable_async is not missing: + rv.is_async = enable_async + + return _environment_config_check(rv) + + @property + def lexer(self) -> Lexer: + """The lexer for this environment.""" + return get_lexer(self) + + def iter_extensions(self) -> t.Iterator["Extension"]: + """Iterates over the extensions by priority.""" + return iter(sorted(self.extensions.values(), key=lambda x: x.priority)) + + def getitem( + self, obj: t.Any, argument: t.Union[str, t.Any] + ) -> t.Union[t.Any, Undefined]: + """Get an item or attribute of an object but prefer the item.""" + try: + return obj[argument] + except (AttributeError, TypeError, LookupError): + if isinstance(argument, str): + try: + attr = str(argument) + except Exception: + pass + else: + try: + return getattr(obj, attr) + except AttributeError: + pass + return self.undefined(obj=obj, name=argument) + + def getattr(self, obj: t.Any, attribute: str) -> t.Any: + """Get an item or attribute of an object but prefer the attribute. + Unlike :meth:`getitem` the attribute *must* be a string. + """ + try: + return getattr(obj, attribute) + except AttributeError: + pass + try: + return obj[attribute] + except (TypeError, LookupError, AttributeError): + return self.undefined(obj=obj, name=attribute) + + def _filter_test_common( + self, + name: t.Union[str, Undefined], + value: t.Any, + args: t.Optional[t.Sequence[t.Any]], + kwargs: t.Optional[t.Mapping[str, t.Any]], + context: t.Optional[Context], + eval_ctx: t.Optional[EvalContext], + is_filter: bool, + ) -> t.Any: + if is_filter: + env_map = self.filters + type_name = "filter" + else: + env_map = self.tests + type_name = "test" + + func = env_map.get(name) # type: ignore + + if func is None: + msg = f"No {type_name} named {name!r}." + + if isinstance(name, Undefined): + try: + name._fail_with_undefined_error() + except Exception as e: + msg = f"{msg} ({e}; did you forget to quote the callable name?)" + + raise TemplateRuntimeError(msg) + + args = [value, *(args if args is not None else ())] + kwargs = kwargs if kwargs is not None else {} + pass_arg = _PassArg.from_obj(func) + + if pass_arg is _PassArg.context: + if context is None: + raise TemplateRuntimeError( + f"Attempted to invoke a context {type_name} without context." + ) + + args.insert(0, context) + elif pass_arg is _PassArg.eval_context: + if eval_ctx is None: + if context is not None: + eval_ctx = context.eval_ctx + else: + eval_ctx = EvalContext(self) + + args.insert(0, eval_ctx) + elif pass_arg is _PassArg.environment: + args.insert(0, self) + + return func(*args, **kwargs) + + def call_filter( + self, + name: str, + value: t.Any, + args: t.Optional[t.Sequence[t.Any]] = None, + kwargs: t.Optional[t.Mapping[str, t.Any]] = None, + context: t.Optional[Context] = None, + eval_ctx: t.Optional[EvalContext] = None, + ) -> t.Any: + """Invoke a filter on a value the same way the compiler does. + + This might return a coroutine if the filter is running from an + environment in async mode and the filter supports async + execution. It's your responsibility to await this if needed. + + .. versionadded:: 2.7 + """ + return self._filter_test_common( + name, value, args, kwargs, context, eval_ctx, True + ) + + def call_test( + self, + name: str, + value: t.Any, + args: t.Optional[t.Sequence[t.Any]] = None, + kwargs: t.Optional[t.Mapping[str, t.Any]] = None, + context: t.Optional[Context] = None, + eval_ctx: t.Optional[EvalContext] = None, + ) -> t.Any: + """Invoke a test on a value the same way the compiler does. + + This might return a coroutine if the test is running from an + environment in async mode and the test supports async execution. + It's your responsibility to await this if needed. + + .. versionchanged:: 3.0 + Tests support ``@pass_context``, etc. decorators. Added + the ``context`` and ``eval_ctx`` parameters. + + .. versionadded:: 2.7 + """ + return self._filter_test_common( + name, value, args, kwargs, context, eval_ctx, False + ) + + @internalcode + def parse( + self, + source: str, + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + ) -> nodes.Template: + """Parse the sourcecode and return the abstract syntax tree. This + tree of nodes is used by the compiler to convert the template into + executable source- or bytecode. This is useful for debugging or to + extract information from templates. + + If you are :ref:`developing Jinja extensions ` + this gives you a good overview of the node tree generated. + """ + try: + return self._parse(source, name, filename) + except TemplateSyntaxError: + self.handle_exception(source=source) + + def _parse( + self, source: str, name: t.Optional[str], filename: t.Optional[str] + ) -> nodes.Template: + """Internal parsing function used by `parse` and `compile`.""" + return Parser(self, source, name, filename).parse() + + def lex( + self, + source: str, + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + ) -> t.Iterator[t.Tuple[int, str, str]]: + """Lex the given sourcecode and return a generator that yields + tokens as tuples in the form ``(lineno, token_type, value)``. + This can be useful for :ref:`extension development ` + and debugging templates. + + This does not perform preprocessing. If you want the preprocessing + of the extensions to be applied you have to filter source through + the :meth:`preprocess` method. + """ + source = str(source) + try: + return self.lexer.tokeniter(source, name, filename) + except TemplateSyntaxError: + self.handle_exception(source=source) + + def preprocess( + self, + source: str, + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + ) -> str: + """Preprocesses the source with all extensions. This is automatically + called for all parsing and compiling methods but *not* for :meth:`lex` + because there you usually only want the actual source tokenized. + """ + return reduce( + lambda s, e: e.preprocess(s, name, filename), + self.iter_extensions(), + str(source), + ) + + def _tokenize( + self, + source: str, + name: t.Optional[str], + filename: t.Optional[str] = None, + state: t.Optional[str] = None, + ) -> TokenStream: + """Called by the parser to do the preprocessing and filtering + for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`. + """ + source = self.preprocess(source, name, filename) + stream = self.lexer.tokenize(source, name, filename, state) + + for ext in self.iter_extensions(): + stream = ext.filter_stream(stream) # type: ignore + + if not isinstance(stream, TokenStream): + stream = TokenStream(stream, name, filename) + + return stream + + def _generate( + self, + source: nodes.Template, + name: t.Optional[str], + filename: t.Optional[str], + defer_init: bool = False, + ) -> str: + """Internal hook that can be overridden to hook a different generate + method in. + + .. versionadded:: 2.5 + """ + return generate( # type: ignore + source, + self, + name, + filename, + defer_init=defer_init, + optimized=self.optimized, + ) + + def _compile(self, source: str, filename: str) -> CodeType: + """Internal hook that can be overridden to hook a different compile + method in. + + .. versionadded:: 2.5 + """ + return compile(source, filename, "exec") + + @typing.overload + def compile( + self, + source: t.Union[str, nodes.Template], + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + raw: "te.Literal[False]" = False, + defer_init: bool = False, + ) -> CodeType: ... + + @typing.overload + def compile( + self, + source: t.Union[str, nodes.Template], + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + raw: "te.Literal[True]" = ..., + defer_init: bool = False, + ) -> str: ... + + @internalcode + def compile( + self, + source: t.Union[str, nodes.Template], + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + raw: bool = False, + defer_init: bool = False, + ) -> t.Union[str, CodeType]: + """Compile a node or template source code. The `name` parameter is + the load name of the template after it was joined using + :meth:`join_path` if necessary, not the filename on the file system. + the `filename` parameter is the estimated filename of the template on + the file system. If the template came from a database or memory this + can be omitted. + + The return value of this method is a python code object. If the `raw` + parameter is `True` the return value will be a string with python + code equivalent to the bytecode returned otherwise. This method is + mainly used internally. + + `defer_init` is use internally to aid the module code generator. This + causes the generated code to be able to import without the global + environment variable to be set. + + .. versionadded:: 2.4 + `defer_init` parameter added. + """ + source_hint = None + try: + if isinstance(source, str): + source_hint = source + source = self._parse(source, name, filename) + source = self._generate(source, name, filename, defer_init=defer_init) + if raw: + return source + if filename is None: + filename = "