commit 36f25aaff437aab28c3c17eb5c1b6e6fc546c6d0 Author: Ismo Vuorinen Date: Sun Oct 19 09:45:39 2025 +0300 feat: initial commit diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..6d7fea2 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,22 @@ +# EditorConfig +# https://editorconfig.org + +root = true + +[*] +charset = utf-8 +end_of_line = lf +indent_size = 4 +indent_style = space +insert_final_newline = true +trim_trailing_whitespace = true +max_line_length = 120 + +[*.{yml,yaml,json,toml}] +indent_size = 2 + +[*.md] +trim_trailing_whitespace = false + +[Makefile] +indent_style = tab diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml new file mode 100644 index 0000000..dfc873f --- /dev/null +++ b/.github/workflows/pr.yml @@ -0,0 +1,53 @@ +--- +name: PR + +on: + pull_request: + +jobs: + test: + runs-on: ubuntu-latest + permissions: + contents: read + steps: + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + + - uses: astral-sh/setup-uv@3259c6206f993105e3a61b142c2d97bf4b9ef83d # v7.1.0 + + - name: Install dependencies + run: uv sync + + - name: Run tests + run: uv run -m pytest --cov + + - name: Lint + run: uvx ruff check . + + - name: Type check + run: uvx mypy . + + audit: + runs-on: ubuntu-latest + permissions: + contents: read + steps: + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + + - uses: astral-sh/setup-uv@3259c6206f993105e3a61b142c2d97bf4b9ef83d # v7.1.0 + + - name: Install + run: | + uv sync + uv pip install -e . + + - name: Audit workflows + run: uv run ghaw-auditor scan --repo . --output audit-results + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Upload results + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + if: always() + with: + name: audit-results + path: audit-results/ diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..51fd797 --- /dev/null +++ b/.gitignore @@ -0,0 +1,54 @@ +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg + +# Virtual environments +venv/ +ENV/ +env/ + +# IDEs +.vscode/ +.idea/ +*.swp +*.swo +*~ + +# Testing +.pytest_cache/ +.coverage +htmlcov/ +.tox/ + +# Type checking +.mypy_cache/ +.dmypy.json +dmypy.json + +# Ruff +.ruff_cache/ + +# Auditor output +.ghaw-auditor/ + +# Cache +.cache/ +audit-results diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..d5ce5c2 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,77 @@ +--- +# Configure pre-commit to use uv for Python hooks +# Pre-commit 3.6.0+ automatically detects and uses uv when available +default_install_hook_types: [pre-commit, commit-msg] + +repos: + - repo: https://github.com/astral-sh/uv-pre-commit + rev: 0.9.2 + hooks: + - id: uv-lock + - id: uv-sync + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v6.0.0 + hooks: + - id: requirements-txt-fixer + - id: detect-private-key + - id: destroyed-symlinks + - id: trailing-whitespace + args: [--markdown-linebreak-ext=md] + - id: check-ast + - id: check-case-conflict + - id: check-merge-conflict + - id: check-executables-have-shebangs + - id: check-shebang-scripts-are-executable + - id: check-symlinks + - id: check-toml + - id: check-xml + - id: check-yaml + args: [--allow-multiple-documents] + - id: end-of-file-fixer + - id: mixed-line-ending + args: [--fix=auto] + - id: pretty-format-json + args: [--autofix, --no-sort-keys] + + - repo: https://github.com/DavidAnson/markdownlint-cli2 + rev: v0.18.1 + hooks: + - id: markdownlint-cli2 + args: [--fix] + + - repo: https://github.com/adrienverge/yamllint + rev: v1.37.1 + hooks: + - id: yamllint + + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.14.0 + hooks: + # Run the linter with auto-fix + - id: ruff-check + args: [--fix] + # Run the formatter + - id: ruff-format + + - repo: https://github.com/rhysd/actionlint + rev: v1.7.8 + hooks: + - id: actionlint + args: ["-shellcheck="] + + - repo: https://github.com/renovatebot/pre-commit-hooks + rev: 41.149.2 + hooks: + - id: renovate-config-validator + + - repo: https://github.com/bridgecrewio/checkov.git + rev: "3.2.483" + hooks: + - id: checkov + args: + - "--quiet" + + - repo: https://github.com/gitleaks/gitleaks + rev: v8.28.0 + hooks: + - id: gitleaks diff --git a/.yamlignore b/.yamlignore new file mode 100644 index 0000000..3d8868d --- /dev/null +++ b/.yamlignore @@ -0,0 +1 @@ +# Ignore patterns for yamllint diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..e86e644 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,42 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [1.0.0] - 2025-10-02 + +### Added + +- Initial release +- Full workflow and action scanning +- GitHub API integration with caching and retries +- Action reference resolution (local, GitHub, Docker) +- Monorepo action support (owner/repo/path@ref) +- Diff mode with baseline comparison +- Policy validation with enforcement +- JSON and Markdown report generation +- Comprehensive metadata extraction: + - Triggers, permissions, concurrency + - Jobs, steps, actions used + - Secrets, environment variables + - Containers, services, strategies +- `scan`, `inventory`, and `validate` commands +- uv-based dependency management +- Disk caching with configurable TTL +- Parallel API calls with configurable concurrency +- Reusable workflow detection and contract parsing +- Support for empty workflow_call declarations +- Robust error handling for malformed YAML + +### Technical + +- Python 3.11+ with type hints +- Pydantic v2 models +- ruamel.yaml parser +- httpx client with tenacity retries +- Rich console output +- Typer CLI framework +- diskcache for persistent caching +- Test coverage with pytest diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..f2360e1 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,66 @@ +# Contributing + +## Setup + +```bash +uv sync +uv pip install -e . +``` + +## Development + +```bash +# Run locally +uv run ghaw-auditor scan --repo . + +# Tests +uv run -m pytest +uv run -m pytest -k test_name + +# Coverage +uv run -m pytest --cov --cov-report=html + +# Lint & format +uvx ruff check . +uvx ruff format . + +# Type check +uvx mypy . +``` + +## Code Style + +- Python 3.11+ with type hints +- Max line length: 120 characters +- Follow PEP 8 +- Use Pydantic for models +- Add docstrings to public functions + +## Testing + +- Write tests for new features +- Maintain coverage ≥ 85% +- Use pytest fixtures +- Mock external API calls + +## Pull Requests + +1. Fork and create a feature branch +2. Add tests +3. Ensure all checks pass +4. Update CHANGELOG.md +5. Submit PR with clear description + +## Commit Messages + +Follow conventional commits: + +- `feat:` new feature +- `fix:` bug fix +- `docs:` documentation +- `test:` tests +- `refactor:` code refactoring + +## Questions? + +Open an issue for discussion. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..7d6d8db --- /dev/null +++ b/Makefile @@ -0,0 +1,37 @@ +.PHONY: help install test lint format check clean audit + +help: + @echo "Available targets:" + @echo " install - Install dependencies with uv" + @echo " test - Run tests with coverage" + @echo " lint - Run ruff linting" + @echo " format - Format code with ruff" + @echo " check - Run all checks (lint, format, type, test)" + @echo " clean - Remove generated files" + @echo " audit - Run auditor on current repo" + +install: + uv sync + uv pip install -e . + +test: + uv run -m pytest -v --cov=ghaw_auditor --cov-report=term-missing + +lint: + uvx ruff check . + +format: + uvx ruff format . + +typecheck: + uvx mypy . + +check: lint format typecheck test + +clean: + rm -rf .pytest_cache .mypy_cache .ruff_cache htmlcov .coverage + rm -rf build dist *.egg-info + find . -type d -name __pycache__ -exec rm -rf {} + + +audit: + uv run ghaw-auditor scan --repo . --output .ghaw-auditor diff --git a/README.md b/README.md new file mode 100644 index 0000000..9ac386a --- /dev/null +++ b/README.md @@ -0,0 +1,476 @@ +# GitHub Actions & Workflows Auditor + +A Python CLI tool for analyzing, auditing, and tracking +GitHub Actions workflows and actions. + +## Features + +- **Comprehensive Scanning**: Discovers workflows (`.github/workflows/*.yml`) + and action manifests (`action.yml`) +- **Action Resolution**: Resolves GitHub action references to specific SHAs + via GitHub API +- **Monorepo Support**: Handles monorepo actions like `owner/repo/path@ref` +- **Policy Validation**: Enforces security and best practice policies +- **Diff Mode**: Compare current state against baselines to track changes + over time +- **Multiple Output Formats**: JSON and Markdown reports +- **Fast & Cached**: Uses `uv` for dependency management and disk caching + for API responses +- **Rich Analysis**: Extracts triggers, permissions, secrets, runners, + containers, services, and more + +## Usage (Recommended) + +Run directly with `uvx` without installation: + +```bash +# Scan current directory +uvx ghaw-auditor scan + +# Scan specific repository +uvx ghaw-auditor scan --repo /path/to/repo + +# With GitHub token for better rate limits +GITHUB_TOKEN=ghp_xxx uvx ghaw-auditor scan --repo /path/to/repo + +# List unique actions +uvx ghaw-auditor inventory --repo /path/to/repo + +# Validate against policy +uvx ghaw-auditor validate --policy policy.yml --enforce +``` + +> **Note:** `uvx` runs the tool directly without installation. +> For frequent use or CI pipelines, see +> [Installation](#installation-optional) below. + +## Installation (Optional) + +### Using uv (recommended) + +```bash +# Install uv if you don't have it +curl -LsSf https://astral.sh/uv/install.sh | sh + +# Clone and install +git clone +cd ghaw_auditor +uv sync + +# Install in editable mode +uv pip install -e . +``` + +### Using pipx + +```bash +pipx install . +``` + +> **When to install:** Install locally if you use the tool frequently, +> need it in CI pipelines, or want faster execution (no download on each run). + +## Commands + +> **Note:** Examples use `uvx ghaw-auditor`. +> If installed locally, use `ghaw-auditor` directly. + +### `scan` - Full Analysis + +Analyzes workflows, resolves actions, generates reports. + +```bash +# Basic scan +uvx ghaw-auditor scan --repo . + +# Full scan with all options +uvx ghaw-auditor scan \ + --repo . \ + --output .audit \ + --format all \ + --token $GITHUB_TOKEN \ + --concurrency 8 \ + --write-baseline + +# Offline mode (no API calls) +uvx ghaw-auditor scan --offline --format md +``` + +**Options:** + +- `--repo ` - Repository path (default: `.`) +- `--token ` - GitHub token (env: `GITHUB_TOKEN`) +- `--output ` - Output directory (default: `.ghaw-auditor`) +- `--format ` - Output format (default: `all`) +- `--cache-dir ` - Cache directory +- `--offline` - Skip API resolution +- `--concurrency ` - API concurrency (default: 4) +- `--verbose`, `--quiet` - Logging levels + +### `inventory` - List Actions + +Print deduplicated action inventory. + +```bash +uvx ghaw-auditor inventory --repo /path/to/repo + +# Output: +# Unique Actions: 15 +# • actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 +# • actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 +# ... +``` + +### `validate` - Policy Validation + +Validate workflows against policies. + +```bash +# Validate with default policy +uvx ghaw-auditor validate --repo . + +# Validate with custom policy +uvx ghaw-auditor validate --policy policy.yml --enforce +``` + +**Options:** + +- `--policy ` - Policy file path +- `--enforce` - Exit non-zero on violations + +## Diff Mode + +Track changes over time by comparing against baselines. + +```bash +# Create initial baseline +uvx ghaw-auditor scan --write-baseline --output .audit + +# Later, compare against baseline +uvx ghaw-auditor scan --diff --baseline .audit/baseline + +# Output: .audit/diff/report.diff.md +``` + +**Baseline contents:** + +- `baseline/actions.json` - Action inventory snapshot +- `baseline/workflows.json` - Workflow metadata snapshot +- `baseline/meta.json` - Auditor version, commit SHA, timestamp + +**Diff reports show:** + +- Added/removed/modified workflows +- Added/removed actions +- Changes to permissions, triggers, concurrency, secrets, etc. + +## Output + +The tool generates structured reports in the output directory: + +### JSON Files + +- **`actions.json`** - Deduplicated action inventory with manifests +- **`workflows.json`** - Complete workflow metadata +- **`violations.json`** - Policy violations + +### Markdown Report + +**`report.md`** includes: + +- Summary (workflow count, action count, violations) +- Analysis (triggers, runners, secrets, permissions) +- Per-workflow details (jobs, actions used, configuration) +- Action inventory with inputs/outputs +- Policy violations + +### Example Output + +```text +.ghaw-auditor/ +├── actions.json +├── workflows.json +├── violations.json +├── report.md +├── baseline/ +│ ├── actions.json +│ ├── workflows.json +│ └── meta.json +└── diff/ + ├── actions.diff.json + ├── workflows.diff.json + └── report.diff.md +``` + +## Policy Configuration + +Create `policy.yml` to enforce policies: + +```yaml +require_pinned_actions: true # Actions must use SHA refs +forbid_branch_refs: true # Forbid branch refs (main, master, etc.) +require_concurrency_on_pr: true # PR workflows must have concurrency + +allowed_actions: # Whitelist + - actions/* + - github/* + - docker/* + +denied_actions: # Blacklist + - dangerous/action + +min_permissions: true # Enforce least-privilege +``` + +**Policy rules:** + +- `require_pinned_actions` - Actions must be pinned to SHA (not tags/branches) +- `forbid_branch_refs` - Forbid branch references (main, master, develop) +- `allowed_actions` - Whitelist of allowed actions (glob patterns) +- `denied_actions` - Blacklist of forbidden actions +- `require_concurrency_on_pr` - PR workflows must set concurrency groups + +**Enforcement:** + +```bash +# Warn on violations +uvx ghaw-auditor validate --policy policy.yml + +# Fail CI on violations +uvx ghaw-auditor validate --policy policy.yml --enforce +# Exit code: 0 (pass), 1 (violations), 2 (error) +``` + +## Extracted Metadata + +### Workflows + +- Name, path, triggers (push, PR, schedule, etc.) +- Permissions (workflow & job-level) +- Concurrency groups +- Environment variables +- Reusable workflow contracts (inputs, outputs, secrets) + +### Jobs + +- Runner (`runs-on`) +- Dependencies (`needs`) +- Conditions (`if`) +- Timeouts +- Container & service configurations +- Matrix strategies +- Actions used per job + +### Actions + +- Type (GitHub, local, Docker) +- Resolved SHAs for GitHub actions +- Input/output definitions +- Runtime (composite, Docker, Node.js) +- Monorepo path support + +### Security + +- Secrets used (`${{ secrets.* }}`) +- Permissions (contents, packages, issues, etc.) +- Service containers (databases, caches) +- External actions (owner/repo resolution) + +## Architecture + +**Layers:** + +- `cli` - Typer-based CLI interface +- `scanner` - File discovery +- `parser` - YAML parsing (ruamel.yaml) +- `resolver` - GitHub API integration +- `analyzer` - Pattern extraction +- `policy` - Policy validation +- `renderer` - JSON/Markdown reports +- `differ` - Baseline comparison +- `cache` - Disk-based caching +- `github_client` - HTTP client with retries + +**Models (Pydantic):** + +- `ActionRef`, `ActionManifest` +- `WorkflowMeta`, `JobMeta` +- `Permissions`, `Strategy`, `Container`, `Service` +- `Policy`, `Baseline`, `DiffEntry` + +## Development + +```bash +# Install dependencies +uv sync + +# Run locally +uv run ghaw-auditor scan --repo . + +# Run tests +uv run -m pytest + +# Lint +uvx ruff check . + +# Format +uvx ruff format . + +# Type check +uvx mypy . + +# Coverage +uv run -m pytest --cov --cov-report=html +``` + +## CI Integration + +### GitHub Actions + +```yaml +- name: Audit GitHub Actions + run: | + uvx ghaw-auditor scan --output audit-results + uvx ghaw-auditor validate --policy policy.yml --enforce + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + +- name: Upload Audit Results + uses: actions/upload-artifact@v4 + with: + name: audit-results + path: audit-results/ +``` + +> **Alternative:** For faster CI runs, cache the installation: +> `pip install ghaw-auditor` then use `ghaw-auditor` directly. + +### Baseline Tracking + +```yaml +- name: Compare Against Baseline + run: | + uvx ghaw-auditor scan --diff --baseline .audit/baseline + cat .audit/diff/report.diff.md >> $GITHUB_STEP_SUMMARY +``` + +## Examples + +### Analyze a Repository + +```bash +uvx ghaw-auditor scan --repo ~/projects/myrepo +``` + +Output: + +```text +Scanning repository... +Found 7 workflows and 2 actions +Parsing workflows... +Found 15 unique action references +Resolving actions... +Analyzing workflows... +Generating reports... +✓ Audit complete! Reports in .ghaw-auditor +``` + +### Track Changes Over Time + +```bash +# Day 1: Create baseline +uvx ghaw-auditor scan --write-baseline + +# Day 7: Check for changes +uvx ghaw-auditor scan --diff --baseline .ghaw-auditor/baseline + +# View diff +cat .ghaw-auditor/diff/report.diff.md +``` + +### Validate Security Policies + +```bash +# Check for unpinned actions +uvx ghaw-auditor validate --enforce + +# Output: +# [ERROR] .github/workflows/ci.yml: Action actions/checkout +# is not pinned to SHA: v4 +# Policy enforcement failed: 1 errors +``` + +### Generate Inventory + +```bash +uvx ghaw-auditor inventory --repo . > actions-inventory.txt +``` + +## Performance + +- **Parallel API calls** - Configurable concurrency (default: 4) +- **Disk caching** - API responses cached with TTL +- **Fast parsing** - Efficient YAML parsing with ruamel.yaml +- **Target**: 100+ workflows in < 60 seconds (with warm cache) + +## Configuration + +Optional `auditor.yaml` in repo root: + +```yaml +exclude_paths: + - "**/node_modules/**" + - "**/vendor/**" + +cache: + dir: ~/.cache/ghaw-auditor + ttl: 3600 # 1 hour + +policies: + require_pinned_actions: true + forbid_branch_refs: true +``` + +## Troubleshooting + +### Rate Limiting + +```bash +# Set GitHub token for higher rate limits +export GITHUB_TOKEN=ghp_xxx +uvx ghaw-auditor scan +``` + +### Large Repositories + +```bash +# Increase concurrency +uvx ghaw-auditor scan --concurrency 10 + +# Use offline mode for local analysis +uvx ghaw-auditor scan --offline +``` + +### Debugging + +```bash +# Verbose output +uvx ghaw-auditor scan --verbose + +# JSON logging for CI +uvx ghaw-auditor scan --log-json +``` + +## License + +MIT + +## Contributing + +Contributions welcome! Please ensure: + +- Tests pass: `uv run -m pytest` +- Code formatted: `uvx ruff format .` +- Linting clean: `uvx ruff check .` +- Type hints valid: `uvx mypy .` +- Coverage ≥ 85% diff --git a/ghaw_auditor/__init__.py b/ghaw_auditor/__init__.py new file mode 100644 index 0000000..d9260f6 --- /dev/null +++ b/ghaw_auditor/__init__.py @@ -0,0 +1,3 @@ +"""GitHub Actions & Workflows Auditor.""" + +__version__ = "1.0.0" diff --git a/ghaw_auditor/analyzer.py b/ghaw_auditor/analyzer.py new file mode 100644 index 0000000..cfdecee --- /dev/null +++ b/ghaw_auditor/analyzer.py @@ -0,0 +1,95 @@ +"""Analyzer for workflows and actions.""" + +from __future__ import annotations + +import logging +from typing import Any + +from ghaw_auditor.models import ActionManifest, WorkflowMeta + +logger = logging.getLogger(__name__) + + +class Analyzer: + """Analyzes workflows and actions for patterns and risks.""" + + def __init__(self) -> None: + """Initialize analyzer.""" + pass + + def analyze_workflows( + self, workflows: dict[str, WorkflowMeta], actions: dict[str, ActionManifest] + ) -> dict[str, Any]: + """Analyze workflows for patterns and issues.""" + analysis = { + "total_workflows": len(workflows), + "total_jobs": sum(len(w.jobs) for w in workflows.values()), + "reusable_workflows": sum(1 for w in workflows.values() if w.is_reusable), + "triggers": self._analyze_triggers(workflows), + "permissions": self._analyze_permissions(workflows), + "secrets": self._analyze_secrets(workflows), + "runners": self._analyze_runners(workflows), + "containers": self._analyze_containers(workflows), + } + return analysis + + def _analyze_triggers(self, workflows: dict[str, WorkflowMeta]) -> dict[str, int]: + """Analyze workflow triggers.""" + triggers: dict[str, int] = {} + for workflow in workflows.values(): + for trigger in workflow.triggers: + triggers[trigger] = triggers.get(trigger, 0) + 1 + return triggers + + def _analyze_permissions(self, workflows: dict[str, WorkflowMeta]) -> dict[str, Any]: + """Analyze permissions usage.""" + has_permissions = sum(1 for w in workflows.values() if w.permissions) + job_permissions = sum(1 for w in workflows.values() for j in w.jobs.values() if j.permissions) + return { + "workflows_with_permissions": has_permissions, + "jobs_with_permissions": job_permissions, + } + + def _analyze_secrets(self, workflows: dict[str, WorkflowMeta]) -> dict[str, Any]: + """Analyze secrets usage.""" + all_secrets: set[str] = set() + for workflow in workflows.values(): + all_secrets.update(workflow.secrets_used) + + return { + "total_unique_secrets": len(all_secrets), + "secrets": sorted(all_secrets), + } + + def _analyze_runners(self, workflows: dict[str, WorkflowMeta]) -> dict[str, int]: + """Analyze runner usage.""" + runners: dict[str, int] = {} + for workflow in workflows.values(): + for job in workflow.jobs.values(): + runner = str(job.runs_on) if isinstance(job.runs_on, list) else job.runs_on + runners[runner] = runners.get(runner, 0) + 1 + return runners + + def _analyze_containers(self, workflows: dict[str, WorkflowMeta]) -> dict[str, Any]: + """Analyze container usage.""" + jobs_with_containers = 0 + jobs_with_services = 0 + + for workflow in workflows.values(): + for job in workflow.jobs.values(): + jobs_with_containers += 1 if job.container else 0 + jobs_with_services += 1 if job.services else 0 + + return { + "jobs_with_containers": jobs_with_containers, + "jobs_with_services": jobs_with_services, + } + + def deduplicate_actions(self, all_actions: list[Any]) -> dict[str, Any]: + """Deduplicate actions by canonical key.""" + unique_actions: dict[str, Any] = {} + for action in all_actions: + key = action.canonical_key() + if key not in unique_actions: + unique_actions[key] = action + return unique_actions diff --git a/ghaw_auditor/cache.py b/ghaw_auditor/cache.py new file mode 100644 index 0000000..d69b421 --- /dev/null +++ b/ghaw_auditor/cache.py @@ -0,0 +1,47 @@ +"""Caching layer for GitHub API responses and parsed data.""" + +from __future__ import annotations + +import hashlib +import logging +from pathlib import Path +from typing import Any + +import diskcache +from platformdirs import user_cache_dir + +logger = logging.getLogger(__name__) + + +class Cache: + """Disk-based cache for API responses and parsed objects.""" + + def __init__(self, cache_dir: str | Path | None = None, ttl: int = 3600) -> None: + """Initialize cache.""" + if cache_dir is None: + cache_dir = Path(user_cache_dir("ghaw-auditor")) + self.cache_dir = Path(cache_dir) + self.cache_dir.mkdir(parents=True, exist_ok=True) + self.cache = diskcache.Cache(str(self.cache_dir)) + self.ttl = ttl + + def get(self, key: str) -> Any: + """Get value from cache.""" + return self.cache.get(key) + + def set(self, key: str, value: Any, ttl: int | None = None) -> None: + """Set value in cache.""" + self.cache.set(key, value, expire=ttl or self.ttl) + + def make_key(self, *parts: str) -> str: + """Generate cache key from parts.""" + combined = ":".join(str(p) for p in parts) + return hashlib.sha256(combined.encode()).hexdigest() + + def clear(self) -> None: + """Clear all cache entries.""" + self.cache.clear() + + def close(self) -> None: + """Close cache.""" + self.cache.close() diff --git a/ghaw_auditor/cli.py b/ghaw_auditor/cli.py new file mode 100644 index 0000000..56ff6bb --- /dev/null +++ b/ghaw_auditor/cli.py @@ -0,0 +1,270 @@ +"""CLI interface for GitHub Actions & Workflows Auditor.""" + +from __future__ import annotations + +import logging +from pathlib import Path +from typing import Any + +import typer +from rich.console import Console +from rich.logging import RichHandler + +from ghaw_auditor import __version__ +from ghaw_auditor.analyzer import Analyzer +from ghaw_auditor.differ import Differ +from ghaw_auditor.factory import AuditServiceFactory +from ghaw_auditor.models import Policy +from ghaw_auditor.parser import Parser +from ghaw_auditor.policy import PolicyValidator +from ghaw_auditor.renderer import Renderer +from ghaw_auditor.scanner import Scanner +from ghaw_auditor.services import DiffService, ScanResult + +app = typer.Typer( + name="ghaw-auditor", + help="GitHub Actions & Workflows Auditor - analyze and audit GitHub Actions ecosystem", +) +console = Console() + + +def setup_logging(verbose: bool = False, quiet: bool = False, log_json: bool = False) -> None: + """Configure logging.""" + if quiet: + level = logging.ERROR + elif verbose: + level = logging.DEBUG + else: + level = logging.INFO + + if log_json: + logging.basicConfig(level=level, format="%(message)s") + else: + logging.basicConfig( + level=level, format="%(message)s", handlers=[RichHandler(console=console, rich_tracebacks=True)] + ) + + +def _render_reports( + renderer: Renderer, + result: ScanResult, + format_type: str, +) -> None: + """Render reports based on format type.""" + console.print("[cyan]Generating reports...[/cyan]") + if format_type in ("json", "all"): + renderer.render_json(result.workflows, result.actions, result.violations) + if format_type in ("md", "all"): + renderer.render_markdown(result.workflows, result.actions, result.violations, result.analysis) + + +def _handle_diff_mode( + result: ScanResult, + baseline: Path, + output: Path, +) -> None: + """Handle diff mode comparison.""" + console.print("[cyan]Running diff...[/cyan]") + diff_service = DiffService(Differ(baseline)) + try: + workflow_diffs, action_diffs = diff_service.compare(result.workflows, result.actions) + + diff_dir = output / "diff" + diff_dir.mkdir(exist_ok=True) + diff_service.differ.render_diff_markdown(workflow_diffs, action_diffs, diff_dir / "report.diff.md") + console.print(f"[green]Diff report written to {diff_dir / 'report.diff.md'}[/green]") + except FileNotFoundError as e: + logger = logging.getLogger(__name__) + logger.error(f"Baseline not found: {e}") + + +def _write_baseline(result: ScanResult, baseline_path: Path, commit_sha: str | None = None) -> None: + """Write baseline snapshot.""" + differ = Differ(baseline_path) + differ.save_baseline(result.workflows, result.actions, commit_sha) + console.print(f"[green]Baseline saved to {baseline_path}[/green]") + + +def _enforce_policy(violations: list[dict[str, Any]]) -> None: + """Enforce policy and exit if errors found.""" + error_violations = [v for v in violations if v.get("severity") == "error"] + if error_violations: + console.print(f"[red]Policy enforcement failed: {len(error_violations)} errors[/red]") + raise typer.Exit(1) + + +@app.command() +def scan( + repo: str = typer.Option(".", help="Repository path or URL"), + token: str | None = typer.Option(None, help="GitHub token", envvar="GITHUB_TOKEN"), + output: Path = typer.Option(".ghaw-auditor", help="Output directory"), + format_type: str = typer.Option("all", help="Output format: json, md, or all"), + cache_dir: Path | None = typer.Option(None, help="Cache directory"), + offline: bool = typer.Option(False, help="Offline mode (no API calls)"), + concurrency: int = typer.Option(4, help="Concurrency for API calls"), + enforce: bool = typer.Option(False, help="Enforce policy (exit non-zero on violations)"), + policy_file: Path | None = typer.Option(None, help="Policy file path"), + diff: bool = typer.Option(False, help="Run in diff mode"), + baseline: Path | None = typer.Option(None, help="Baseline path for diff"), + write_baseline: bool = typer.Option(False, help="Write baseline after scan"), + verbose: bool = typer.Option(False, "--verbose", "-v", help="Verbose output"), + quiet: bool = typer.Option(False, "--quiet", "-q", help="Quiet output"), + log_json: bool = typer.Option(False, help="JSON logging"), +) -> None: + """Scan repository for GitHub Actions and workflows.""" + setup_logging(verbose, quiet, log_json) + logger = logging.getLogger(__name__) + + try: + # Validate repository path + repo_path = Path(repo).resolve() + if not repo_path.exists(): + console.print(f"[red]Repository not found: {repo_path}[/red]") + raise typer.Exit(1) + + # Load policy if specified + policy = None + if policy_file and policy_file.exists(): + # TODO: Load policy from YAML file + policy = Policy() + + # Create service via factory + service = AuditServiceFactory.create( + repo_path=repo_path, + token=token, + offline=offline, + cache_dir=cache_dir, + concurrency=concurrency, + policy=policy, + ) + + # Execute scan + console.print("[cyan]Scanning repository...[/cyan]") + result = service.scan(offline=offline) + + # Display summary + console.print(f"Found {result.workflow_count} workflows and {result.action_count} actions") + console.print(f"Found {result.unique_action_count} unique action references") + + if result.violations: + console.print(f"Found {len(result.violations)} policy violations") + + # Render reports + renderer = Renderer(output) + _render_reports(renderer, result, format_type) + + # Handle diff mode + if diff and baseline: + _handle_diff_mode(result, baseline, output) + + # Write baseline + if write_baseline: + baseline_path = baseline or (output / "baseline") + _write_baseline(result, baseline_path) + + console.print(f"[green]✓ Audit complete! Reports in {output}[/green]") + + # Enforce policy + if enforce and result.violations: + _enforce_policy(result.violations) + + except Exception as e: + logger.exception(f"Scan failed: {e}") + raise typer.Exit(2) from None + + +@app.command() +def inventory( + repo: str = typer.Option(".", help="Repository path"), + verbose: bool = typer.Option(False, "--verbose", "-v"), +) -> None: + """Print deduplicated action inventory.""" + setup_logging(verbose) + logger = logging.getLogger(__name__) + + repo_path = Path(repo).resolve() + scanner = Scanner(repo_path) + parser = Parser(repo_path) + analyzer = Analyzer() + + workflow_files = scanner.find_workflows() + all_actions = [] + + for wf_file in workflow_files: + try: + workflow = parser.parse_workflow(wf_file) + all_actions.extend(workflow.actions_used) + except Exception as e: + logger.error(f"Failed to parse {wf_file}: {e}") + if verbose: + logger.exception(e) + + unique_actions = analyzer.deduplicate_actions(all_actions) + + console.print(f"\n[cyan]Unique Actions: {len(unique_actions)}[/cyan]\n") + for key, _action in sorted(unique_actions.items()): + console.print(f" • {key}") + + +@app.command() +def validate( + repo: str = typer.Option(".", help="Repository path"), + policy_file: Path | None = typer.Option(None, help="Policy file"), + enforce: bool = typer.Option(False, help="Exit non-zero on violations"), + verbose: bool = typer.Option(False, "--verbose", "-v"), +) -> None: + """Validate workflows against policy.""" + setup_logging(verbose) + logger = logging.getLogger(__name__) + + repo_path = Path(repo).resolve() + scanner = Scanner(repo_path) + parser = Parser(repo_path) + + workflow_files = scanner.find_workflows() + workflows = {} + all_actions = [] + + for wf_file in workflow_files: + try: + workflow = parser.parse_workflow(wf_file) + rel_path = str(wf_file.relative_to(repo_path)) + workflows[rel_path] = workflow + all_actions.extend(workflow.actions_used) + except Exception as e: + logger.error(f"Failed to parse {wf_file}: {e}") + if verbose: + logger.exception(e) + + # Load or use default policy + policy = Policy() + if policy_file and policy_file.exists(): + # TODO: Parse YAML policy file here + pass + + validator = PolicyValidator(policy) + violations = validator.validate(workflows, all_actions) + + if violations: + console.print(f"\n[yellow]Found {len(violations)} policy violations:[/yellow]\n") + for v in violations: + severity = v.get("severity", "warning").upper() + color = "red" if severity == "ERROR" else "yellow" + console.print(f"[{color}]{severity}[/{color}] {v['workflow']}: {v['message']}") + + if enforce: + errors = [v for v in violations if v.get("severity") == "error"] + if errors: + raise typer.Exit(1) + else: + console.print("[green]✓ No policy violations found[/green]") + + +@app.command() +def version() -> None: + """Show version information.""" + console.print(f"ghaw-auditor version {__version__}") + + +if __name__ == "__main__": # pragma: no cover + app() diff --git a/ghaw_auditor/differ.py b/ghaw_auditor/differ.py new file mode 100644 index 0000000..742a8b7 --- /dev/null +++ b/ghaw_auditor/differ.py @@ -0,0 +1,248 @@ +"""Diff functionality for comparing baselines.""" + +from __future__ import annotations + +import json +import logging +from datetime import datetime +from pathlib import Path +from typing import Any + +from ghaw_auditor.models import ( + ActionDiff, + ActionManifest, + Baseline, + BaselineMeta, + DiffEntry, + WorkflowDiff, + WorkflowMeta, +) + +logger = logging.getLogger(__name__) + + +class Differ: + """Compares current state against baseline.""" + + def __init__(self, baseline_path: Path) -> None: + """Initialize differ.""" + self.baseline_path = baseline_path + + def load_baseline(self) -> Baseline: + """Load baseline from disk.""" + actions_file = self.baseline_path / "actions.json" + workflows_file = self.baseline_path / "workflows.json" + meta_file = self.baseline_path / "meta.json" + + if not actions_file.exists() or not workflows_file.exists(): + raise FileNotFoundError(f"Baseline not found at {self.baseline_path}") + + with open(actions_file, encoding="utf-8") as f: + actions_data = json.load(f) + + with open(workflows_file, encoding="utf-8") as f: + workflows_data = json.load(f) + + meta = BaselineMeta(auditor_version="1.0.0", commit_sha=None, timestamp=datetime.now()) + if meta_file.exists(): + with open(meta_file, encoding="utf-8") as f: + meta_data = json.load(f) + meta = BaselineMeta(**meta_data) + + # Convert dicts to model instances + actions = {k: ActionManifest(**v) for k, v in actions_data.items()} + workflows = {k: WorkflowMeta(**v) for k, v in workflows_data.items()} + + return Baseline(meta=meta, actions=actions, workflows=workflows) + + def save_baseline( + self, workflows: dict[str, WorkflowMeta], actions: dict[str, ActionManifest], commit_sha: str | None = None + ) -> None: + """Save current state as baseline.""" + self.baseline_path.mkdir(parents=True, exist_ok=True) + + # Save actions + actions_data = {k: v.model_dump(mode="json") for k, v in actions.items()} + with open(self.baseline_path / "actions.json", "w", encoding="utf-8") as f: + json.dump(actions_data, f, indent=2, default=str) + + # Save workflows + workflows_data = {k: v.model_dump(mode="json") for k, v in workflows.items()} + with open(self.baseline_path / "workflows.json", "w", encoding="utf-8") as f: + json.dump(workflows_data, f, indent=2, default=str) + + # Save metadata + meta = BaselineMeta(auditor_version="1.0.0", commit_sha=commit_sha, timestamp=datetime.now()) + with open(self.baseline_path / "meta.json", "w", encoding="utf-8") as f: + json.dump(meta.model_dump(mode="json"), f, indent=2, default=str) + + logger.info(f"Baseline saved to {self.baseline_path}") + + def diff_workflows(self, baseline: dict[str, WorkflowMeta], current: dict[str, WorkflowMeta]) -> list[WorkflowDiff]: + """Compare workflows.""" + diffs: list[WorkflowDiff] = [] + + all_paths = set(baseline.keys()) | set(current.keys()) + + for path in all_paths: + baseline_wf = baseline.get(path) + current_wf = current.get(path) + + if not baseline_wf and current_wf: + # Added + diffs.append(WorkflowDiff(path=path, status="added", changes=[])) + elif baseline_wf and not current_wf: + # Removed + diffs.append(WorkflowDiff(path=path, status="removed", changes=[])) + elif baseline_wf and current_wf: + # Compare + changes = self._compare_workflows(baseline_wf, current_wf) + status = "modified" if changes else "unchanged" + diffs.append(WorkflowDiff(path=path, status=status, changes=changes)) + + return diffs + + def _compare_workflows(self, old: WorkflowMeta, new: WorkflowMeta) -> list[DiffEntry]: + """Compare two workflows.""" + changes: list[DiffEntry] = [] + + # Compare triggers + if set(old.triggers) != set(new.triggers): + changes.append( + DiffEntry(field="triggers", old_value=old.triggers, new_value=new.triggers, change_type="modified") + ) + + # Compare permissions + if old.permissions != new.permissions: + changes.append( + DiffEntry( + field="permissions", + old_value=old.permissions.model_dump() if old.permissions else None, + new_value=new.permissions.model_dump() if new.permissions else None, + change_type="modified", + ) + ) + + # Compare concurrency + if old.concurrency != new.concurrency: + changes.append( + DiffEntry( + field="concurrency", old_value=old.concurrency, new_value=new.concurrency, change_type="modified" + ) + ) + + # Compare jobs + if set(old.jobs.keys()) != set(new.jobs.keys()): + changes.append( + DiffEntry( + field="jobs", + old_value=list(old.jobs.keys()), + new_value=list(new.jobs.keys()), + change_type="modified", + ) + ) + + # Compare secrets + if old.secrets_used != new.secrets_used: + changes.append( + DiffEntry( + field="secrets_used", + old_value=sorted(old.secrets_used), + new_value=sorted(new.secrets_used), + change_type="modified", + ) + ) + + return changes + + def diff_actions(self, baseline: dict[str, ActionManifest], current: dict[str, ActionManifest]) -> list[ActionDiff]: + """Compare actions.""" + diffs: list[ActionDiff] = [] + + all_keys = set(baseline.keys()) | set(current.keys()) + + for key in all_keys: + baseline_action = baseline.get(key) + current_action = current.get(key) + + if not baseline_action and current_action: + # Added + diffs.append(ActionDiff(key=key, status="added", changes=[])) + elif baseline_action and not current_action: + # Removed + diffs.append(ActionDiff(key=key, status="removed", changes=[])) + elif baseline_action and current_action: + # Compare (for now, just mark as unchanged) + diffs.append(ActionDiff(key=key, status="unchanged", changes=[])) + + return diffs + + def _write_workflow_changes(self, f: Any, workflow_diffs: list[WorkflowDiff]) -> None: + """Write workflow changes section to markdown file.""" + f.write("## Workflow Changes\n\n") + + added_wfs = [d for d in workflow_diffs if d.status == "added"] + removed_wfs = [d for d in workflow_diffs if d.status == "removed"] + modified_wfs = [d for d in workflow_diffs if d.status == "modified"] + + f.write(f"- **Added:** {len(added_wfs)}\n") + f.write(f"- **Removed:** {len(removed_wfs)}\n") + f.write(f"- **Modified:** {len(modified_wfs)}\n\n") + + if added_wfs: + f.write("### Added Workflows\n\n") + for diff in added_wfs: + f.write(f"- `{diff.path}`\n") + f.write("\n") + + if removed_wfs: + f.write("### Removed Workflows\n\n") + for diff in removed_wfs: + f.write(f"- `{diff.path}`\n") + f.write("\n") + + if modified_wfs: + f.write("### Modified Workflows\n\n") + for diff in modified_wfs: + f.write(f"#### {diff.path}\n\n") + for change in diff.changes: + f.write(f"- **{change.field}** changed\n") + if change.old_value is not None: + f.write(f" - Old: `{change.old_value}`\n") + if change.new_value is not None: + f.write(f" - New: `{change.new_value}`\n") + f.write("\n") + + def _write_action_changes(self, f: Any, action_diffs: list[ActionDiff]) -> None: + """Write action changes section to markdown file.""" + f.write("## Action Changes\n\n") + + added_actions = [d for d in action_diffs if d.status == "added"] + removed_actions = [d for d in action_diffs if d.status == "removed"] + + f.write(f"- **Added:** {len(added_actions)}\n") + f.write(f"- **Removed:** {len(removed_actions)}\n\n") + + if added_actions: + f.write("### Added Actions\n\n") + for diff in added_actions: + f.write(f"- `{diff.key}`\n") + f.write("\n") + + if removed_actions: + f.write("### Removed Actions\n\n") + for diff in removed_actions: + f.write(f"- `{diff.key}`\n") + + def render_diff_markdown( + self, workflow_diffs: list[WorkflowDiff], action_diffs: list[ActionDiff], output_path: Path + ) -> None: + """Render diff as Markdown.""" + with open(output_path, "w", encoding="utf-8") as f: + f.write("# Audit Diff Report\n\n") + f.write(f"**Generated:** {datetime.now().isoformat()}\n\n") + + self._write_workflow_changes(f, workflow_diffs) + self._write_action_changes(f, action_diffs) + + logger.info(f"Diff report written to {output_path}") diff --git a/ghaw_auditor/factory.py b/ghaw_auditor/factory.py new file mode 100644 index 0000000..ad554d0 --- /dev/null +++ b/ghaw_auditor/factory.py @@ -0,0 +1,62 @@ +"""Factory for creating audit services with dependency injection.""" + +from __future__ import annotations + +from pathlib import Path + +from ghaw_auditor.analyzer import Analyzer +from ghaw_auditor.cache import Cache +from ghaw_auditor.github_client import GitHubClient +from ghaw_auditor.models import Policy +from ghaw_auditor.parser import Parser +from ghaw_auditor.policy import PolicyValidator +from ghaw_auditor.resolver import Resolver +from ghaw_auditor.scanner import Scanner +from ghaw_auditor.services import AuditService + + +class AuditServiceFactory: + """Factory for creating audit services with configured dependencies.""" + + @staticmethod + def create( + repo_path: Path, + token: str | None = None, + offline: bool = False, + cache_dir: Path | None = None, + concurrency: int = 4, + policy: Policy | None = None, + exclude_patterns: list[str] | None = None, + ) -> AuditService: + """Create configured audit service. + + Args: + repo_path: Path to repository + token: GitHub API token + offline: Disable API calls + cache_dir: Cache directory path + concurrency: API concurrency level + policy: Policy configuration + exclude_patterns: File exclusion patterns + + Returns: + Configured AuditService instance + """ + # Core components (always created) + scanner = Scanner(repo_path, exclude_patterns=exclude_patterns or []) + parser = Parser(repo_path) + analyzer = Analyzer() + cache = Cache(cache_dir) + + # Optional resolver (only if not offline) + resolver = None + if not offline: + client = GitHubClient(token) + resolver = Resolver(client, cache, repo_path, concurrency) + + # Optional validator (only if policy provided) + validator = None + if policy: + validator = PolicyValidator(policy) + + return AuditService(scanner, parser, analyzer, resolver, validator) diff --git a/ghaw_auditor/github_client.py b/ghaw_auditor/github_client.py new file mode 100644 index 0000000..0f9a1d0 --- /dev/null +++ b/ghaw_auditor/github_client.py @@ -0,0 +1,128 @@ +"""GitHub API client for resolving actions and refs.""" + +from __future__ import annotations + +import logging +from typing import Any + +import httpx +from tenacity import retry, retry_if_exception, stop_after_attempt, wait_exponential + +logger = logging.getLogger(__name__) + +# Suppress httpx INFO logging (we handle logging ourselves) +logging.getLogger("httpx").setLevel(logging.WARNING) + + +def should_retry_http_error(exception: BaseException) -> bool: + """Determine if an HTTP error should be retried. + + Retry on: + - Network errors (RequestError) + - Server errors (5xx) + - Rate limiting (429) + + Don't retry on: + - 404 (not found - won't change on retry) + - 401/403 (auth errors - won't change on retry) + - 400 (bad request - won't change on retry) + """ + if isinstance(exception, httpx.RequestError): + # Network errors - retry + return True + + if isinstance(exception, httpx.HTTPStatusError): + status_code = exception.response.status_code + # Retry on 5xx server errors and 429 rate limiting + # Don't retry on 4xx client errors (except 429) + if status_code == 429: + return True + return 500 <= status_code < 600 + + return False + + +class GitHubClient: + """GitHub API client with rate limiting and retries.""" + + def __init__(self, token: str | None = None, base_url: str = "https://api.github.com") -> None: + """Initialize GitHub client.""" + self.base_url = base_url + self.headers = { + "Accept": "application/vnd.github+json", + "X-GitHub-Api-Version": "2022-11-28", + } + if token: + self.headers["Authorization"] = f"Bearer {token}" + + self.client = httpx.Client(headers=self.headers, timeout=30.0, follow_redirects=True) + + @retry( + retry=retry_if_exception(should_retry_http_error), + wait=wait_exponential(multiplier=1, min=2, max=30), + stop=stop_after_attempt(3), + ) + def get_ref_sha(self, owner: str, repo: str, ref: str) -> str: + """Resolve a ref (tag/branch) to a SHA.""" + url = f"{self.base_url}/repos/{owner}/{repo}/commits/{ref}" + logger.debug(f"Fetching ref SHA: {owner}/{repo}@{ref}") + + try: + response = self.client.get(url) + response.raise_for_status() + sha = response.json()["sha"] + logger.debug(f"Resolved {owner}/{repo}@{ref} -> {sha}") + return sha + except httpx.HTTPStatusError as e: + status_code = e.response.status_code + if status_code == 404: + logger.error(f"Action not found: {owner}/{repo}@{ref}") + elif status_code == 403: + logger.error(f"Access denied (check token permissions): {owner}/{repo}@{ref}") + elif status_code == 401: + logger.error(f"Authentication required: {owner}/{repo}@{ref}") + elif 400 <= status_code < 600: + logger.warning(f"HTTP {status_code} fetching {url}") + raise + + @retry( + retry=retry_if_exception(should_retry_http_error), + wait=wait_exponential(multiplier=1, min=2, max=30), + stop=stop_after_attempt(3), + ) + def get_file_content(self, owner: str, repo: str, path: str, ref: str) -> str: + """Fetch raw file content at a specific ref.""" + # Use raw.githubusercontent.com for files + raw_url = f"https://raw.githubusercontent.com/{owner}/{repo}/{ref}/{path}" + logger.debug(f"Fetching file: {owner}/{repo}/{path}@{ref}") + + try: + response = self.client.get(raw_url) + response.raise_for_status() + content = response.text + logger.debug(f"Downloaded {path} ({len(content)} bytes)") + return content + except httpx.HTTPStatusError as e: + status_code = e.response.status_code + # Don't log 404 as warning - it's expected when trying action.yml before action.yaml + if status_code == 404: + logger.debug(f"File not found: {path}") + elif status_code == 403: + logger.error(f"Access denied (check token permissions): {owner}/{repo}/{path}") + elif status_code == 401: + logger.error(f"Authentication required: {owner}/{repo}/{path}") + elif 400 <= status_code < 600: + logger.warning(f"HTTP {status_code} fetching {raw_url}") + raise + + def close(self) -> None: + """Close the HTTP client.""" + self.client.close() + + def __enter__(self) -> GitHubClient: + """Context manager entry.""" + return self + + def __exit__(self, *args: Any) -> None: + """Context manager exit.""" + self.close() diff --git a/ghaw_auditor/models.py b/ghaw_auditor/models.py new file mode 100644 index 0000000..54330c9 --- /dev/null +++ b/ghaw_auditor/models.py @@ -0,0 +1,254 @@ +"""Pydantic models for GitHub Actions and Workflows.""" + +from __future__ import annotations + +from datetime import datetime +from enum import Enum +from typing import Any + +from pydantic import BaseModel, Field + + +class ActionType(str, Enum): + """Type of action reference.""" + + LOCAL = "local" + GITHUB = "github" + DOCKER = "docker" + REUSABLE_WORKFLOW = "reusable_workflow" + + +class ActionRef(BaseModel): + """Reference to an action with version info.""" + + type: ActionType + owner: str | None = None + repo: str | None = None + path: str | None = None + ref: str | None = None # Tag, branch, or SHA + resolved_sha: str | None = None + source_file: str + source_line: int | None = None + + def canonical_key(self) -> str: + """Generate unique key for deduplication.""" + if self.type == ActionType.LOCAL: + return f"local:{self.path}" + elif self.type == ActionType.DOCKER: + return f"docker:{self.path}" + elif self.type == ActionType.REUSABLE_WORKFLOW: + return f"{self.owner}/{self.repo}/{self.path}@{self.resolved_sha or self.ref}" + return f"{self.owner}/{self.repo}@{self.resolved_sha or self.ref}" + + +class ActionInput(BaseModel): + """Action input definition.""" + + name: str + description: str | None = None + required: bool = False + default: str | bool | int | None = None + + +class ActionOutput(BaseModel): + """Action output definition.""" + + name: str + description: str | None = None + + +class ActionManifest(BaseModel): + """Parsed action.yml manifest.""" + + name: str + description: str | None = None + author: str | None = None + inputs: dict[str, ActionInput] = Field(default_factory=dict) + outputs: dict[str, ActionOutput] = Field(default_factory=dict) + runs: dict[str, Any] = Field(default_factory=dict) + branding: dict[str, str] | None = None + is_composite: bool = False + is_docker: bool = False + is_javascript: bool = False + + +class PermissionLevel(str, Enum): + """Permission level.""" + + NONE = "none" + READ = "read" + WRITE = "write" + + +class Permissions(BaseModel): + """Job or workflow permissions.""" + + actions: PermissionLevel | None = None + checks: PermissionLevel | None = None + contents: PermissionLevel | None = None + deployments: PermissionLevel | None = None + id_token: PermissionLevel | None = None + issues: PermissionLevel | None = None + packages: PermissionLevel | None = None + pages: PermissionLevel | None = None + pull_requests: PermissionLevel | None = None + repository_projects: PermissionLevel | None = None + security_events: PermissionLevel | None = None + statuses: PermissionLevel | None = None + + +class Container(BaseModel): + """Container configuration.""" + + image: str + credentials: dict[str, str] | None = None + env: dict[str, str | int | float | bool] = Field(default_factory=dict) + ports: list[int] = Field(default_factory=list) + volumes: list[str] = Field(default_factory=list) + options: str | None = None + + +class Service(BaseModel): + """Service container configuration.""" + + name: str + image: str + credentials: dict[str, str] | None = None + env: dict[str, str | int | float | bool] = Field(default_factory=dict) + ports: list[int] = Field(default_factory=list) + volumes: list[str] = Field(default_factory=list) + options: str | None = None + + +class Strategy(BaseModel): + """Job matrix strategy.""" + + matrix: dict[str, Any] = Field(default_factory=dict) + fail_fast: bool = True + max_parallel: int | None = None + + +class JobMeta(BaseModel): + """Job metadata.""" + + name: str + runs_on: str | list[str] + needs: list[str] = Field(default_factory=list) + if_condition: str | None = Field(None, alias="if") + permissions: Permissions | None = None + environment: str | dict[str, Any] | None = None + concurrency: str | dict[str, Any] | None = None + timeout_minutes: int | None = None + continue_on_error: bool = False + container: Container | None = None + services: dict[str, Service] = Field(default_factory=dict) + strategy: Strategy | None = None + # Reusable workflow fields + uses: str | None = None # Reusable workflow reference + with_inputs: dict[str, Any] = Field(default_factory=dict) # Inputs via 'with' + secrets_passed: dict[str, str] | None = None # Secrets passed to reusable workflow + inherit_secrets: bool = False # Whether secrets: inherit is used + outputs: dict[str, Any] = Field(default_factory=dict) # Job outputs + # Action tracking + actions_used: list[ActionRef] = Field(default_factory=list) + secrets_used: set[str] = Field(default_factory=set) + env_vars: dict[str, str | int | float | bool] = Field(default_factory=dict) + + +class ReusableContract(BaseModel): + """Reusable workflow contract.""" + + inputs: dict[str, Any] = Field(default_factory=dict) + outputs: dict[str, Any] = Field(default_factory=dict) + secrets: dict[str, Any] = Field(default_factory=dict) + + +class WorkflowMeta(BaseModel): + """Workflow metadata.""" + + name: str + path: str + triggers: list[str] = Field(default_factory=list) + permissions: Permissions | None = None + concurrency: str | dict[str, Any] | None = None + env: dict[str, str | int | float | bool] = Field(default_factory=dict) + defaults: dict[str, Any] = Field(default_factory=dict) + jobs: dict[str, JobMeta] = Field(default_factory=dict) + is_reusable: bool = False + reusable_contract: ReusableContract | None = None + secrets_used: set[str] = Field(default_factory=set) + actions_used: list[ActionRef] = Field(default_factory=list) + + +class PolicyRule(BaseModel): + """Policy rule.""" + + name: str + enabled: bool = True + severity: str = "warning" # warning, error + config: dict[str, Any] = Field(default_factory=dict) + + +class Policy(BaseModel): + """Audit policy configuration.""" + + min_permissions: bool = True + require_pinned_actions: bool = True + forbid_branch_refs: bool = False + allowed_actions: list[str] = Field(default_factory=list) + denied_actions: list[str] = Field(default_factory=list) + require_concurrency_on_pr: bool = False + custom_rules: list[PolicyRule] = Field(default_factory=list) + + +class BaselineMeta(BaseModel): + """Baseline metadata.""" + + auditor_version: str + commit_sha: str | None = None + timestamp: datetime + schema_version: str = "1.0" + + +class Baseline(BaseModel): + """Baseline snapshot for diff mode.""" + + meta: BaselineMeta + actions: dict[str, ActionManifest] + workflows: dict[str, WorkflowMeta] + + +class DiffEntry(BaseModel): + """Single diff entry.""" + + field: str + old_value: Any = None + new_value: Any = None + change_type: str # added, removed, modified + + +class ActionDiff(BaseModel): + """Action diff.""" + + key: str + status: str # added, removed, modified, unchanged + changes: list[DiffEntry] = Field(default_factory=list) + + +class WorkflowDiff(BaseModel): + """Workflow diff.""" + + path: str + status: str # added, removed, modified, unchanged + changes: list[DiffEntry] = Field(default_factory=list) + + +class AuditReport(BaseModel): + """Complete audit report.""" + + generated_at: datetime + repository: str + commit_sha: str | None = None + actions: dict[str, ActionManifest] + workflows: dict[str, WorkflowMeta] + policy_violations: list[dict[str, Any]] = Field(default_factory=list) diff --git a/ghaw_auditor/parser.py b/ghaw_auditor/parser.py new file mode 100644 index 0000000..fb2f546 --- /dev/null +++ b/ghaw_auditor/parser.py @@ -0,0 +1,373 @@ +"""YAML parser for workflow and action files.""" + +from __future__ import annotations + +import logging +import re +from pathlib import Path +from typing import Any + +from ruamel.yaml import YAML + +from ghaw_auditor.models import ( + ActionInput, + ActionManifest, + ActionOutput, + ActionRef, + ActionType, + Container, + JobMeta, + PermissionLevel, + Permissions, + ReusableContract, + Service, + Strategy, + WorkflowMeta, +) + +logger = logging.getLogger(__name__) + + +class Parser: + """Parse workflow and action YAML files.""" + + def __init__(self, repo_path: Path | None = None) -> None: + """Initialize parser.""" + self.yaml = YAML(typ="safe") + self.repo_path = repo_path or Path.cwd() + + def parse_workflow(self, path: Path) -> WorkflowMeta: + """Parse a workflow file.""" + with open(path, encoding="utf-8") as f: + content = f.read() + data = self.yaml.load(content) + + if not data: + raise ValueError(f"Empty workflow file: {path}") + + name = data.get("name", path.stem) + triggers = self._extract_triggers(data.get("on", {})) + permissions = self._parse_permissions(data.get("permissions")) + env = data.get("env", {}) + concurrency = data.get("concurrency") + defaults = data.get("defaults", {}) + + # Check if reusable workflow + is_reusable = "workflow_call" in triggers + reusable_contract = None + if is_reusable: + on_data = data.get("on", {}) + if isinstance(on_data, dict) and "workflow_call" in on_data: + call_data = on_data["workflow_call"] + if call_data is not None: + reusable_contract = ReusableContract( + inputs=call_data.get("inputs", {}), + outputs=call_data.get("outputs", {}), + secrets=call_data.get("secrets", {}), + ) + + # Parse jobs + jobs = {} + secrets_used: set[str] = set() + actions_used: list[ActionRef] = [] + + jobs_data = data.get("jobs") + if jobs_data: + for job_name, job_data in jobs_data.items(): + job_meta = self._parse_job(job_name, job_data, path, content) + jobs[job_name] = job_meta + secrets_used.update(job_meta.secrets_used) + actions_used.extend(job_meta.actions_used) + + return WorkflowMeta( + name=name, + path=str(path.relative_to(self.repo_path)), + triggers=triggers, + permissions=permissions, + concurrency=concurrency, + env=env, + defaults=defaults, + jobs=jobs, + is_reusable=is_reusable, + reusable_contract=reusable_contract, + secrets_used=secrets_used, + actions_used=actions_used, + ) + + def _extract_triggers(self, on_data: Any) -> list[str]: + """Extract trigger events from 'on' field.""" + if isinstance(on_data, str): + return [on_data] + elif isinstance(on_data, list): + return on_data + elif isinstance(on_data, dict): + return list(on_data.keys()) + return [] + + def _parse_permissions(self, perms: Any) -> Permissions | None: + """Parse permissions.""" + if perms is None: + return None + if isinstance(perms, str): + # Global read-all or write-all + return Permissions() + if isinstance(perms, dict): + return Permissions(**{k: PermissionLevel(v) for k, v in perms.items() if v}) + return None + + def _parse_job(self, name: str, data: dict[str, Any] | None, path: Path, content: str) -> JobMeta: + """Parse a job.""" + if data is None: + data = {} + + # Check if this is a reusable workflow call + uses = data.get("uses") + is_reusable_call = uses is not None + + # runs-on is optional for reusable workflow calls + runs_on = data.get("runs-on", "ubuntu-latest" if not is_reusable_call else "") + + needs = data.get("needs", []) + if isinstance(needs, str): + needs = [needs] + + permissions = self._parse_permissions(data.get("permissions")) + environment = data.get("environment") + concurrency = data.get("concurrency") + timeout_minutes = data.get("timeout-minutes") + continue_on_error = data.get("continue-on-error", False) + container = self._parse_container(data.get("container")) + services = self._parse_services(data.get("services", {})) + strategy = self._parse_strategy(data.get("strategy")) + + # Reusable workflow fields + with_inputs = data.get("with", {}) + outputs = data.get("outputs", {}) + + # Parse secrets for reusable workflows + secrets_passed = None + inherit_secrets = False + secrets_data = data.get("secrets") + if secrets_data == "inherit": + inherit_secrets = True + elif isinstance(secrets_data, dict): + secrets_passed = secrets_data + + # Extract actions from steps or reusable workflow + actions_used: list[ActionRef] = [] + secrets_used: set[str] = set() + + if is_reusable_call: + # Parse reusable workflow reference + workflow_ref = self._parse_reusable_workflow_ref(uses, path) + actions_used.append(workflow_ref) + else: + # Parse actions from steps + for step in data.get("steps", []): + if step is None: + continue + if "uses" in step: + action_ref = self._parse_action_ref(step["uses"], path) + actions_used.append(action_ref) + + # Extract secrets from entire job content + secrets_used.update(self._extract_secrets(str(data))) + + job_data = { + "name": name, + "runs_on": runs_on, + "needs": needs, + "permissions": permissions, + "environment": environment, + "concurrency": concurrency, + "timeout_minutes": timeout_minutes, + "continue_on_error": continue_on_error, + "container": container, + "services": services, + "strategy": strategy, + "uses": uses, + "with_inputs": with_inputs, + "secrets_passed": secrets_passed, + "inherit_secrets": inherit_secrets, + "outputs": outputs, + "actions_used": actions_used, + "secrets_used": secrets_used, + "env_vars": data.get("env", {}), + } + + # Use alias for 'if' field + if data.get("if") is not None: + job_data["if"] = data.get("if") + + return JobMeta(**job_data) + + def _parse_action_ref(self, uses: str, source_file: Path) -> ActionRef: + """Parse a 'uses' string into ActionRef.""" + uses = uses.strip() + + # Local action: ./path or ./.github/actions/name + if uses.startswith("./"): + return ActionRef( + type=ActionType.LOCAL, + path=uses, + source_file=str(source_file), + ) + + # Docker action: docker:// + if uses.startswith("docker://"): + return ActionRef( + type=ActionType.DOCKER, + path=uses, + source_file=str(source_file), + ) + + # GitHub action: owner/repo@ref or owner/repo/path@ref + match = re.match(r"^([^/]+)/([^/@]+)(?:/([^@]+))?@(.+)$", uses) + if match: + owner, repo, path, ref = match.groups() + return ActionRef( + type=ActionType.GITHUB, + owner=owner, + repo=repo, + path=path or "action.yml", + ref=ref, + source_file=str(source_file), + ) + + raise ValueError(f"Invalid action reference: {uses}") + + def _parse_reusable_workflow_ref(self, uses: str, source_file: Path) -> ActionRef: + """Parse a reusable workflow 'uses' string into ActionRef. + + Format: owner/repo/.github/workflows/workflow.yml@ref + or: ./.github/workflows/workflow.yml (local) + """ + uses = uses.strip() + + # Local reusable workflow + if uses.startswith("./"): + return ActionRef( + type=ActionType.REUSABLE_WORKFLOW, + path=uses, + source_file=str(source_file), + ) + + # GitHub reusable workflow: owner/repo/path/to/workflow.yml@ref + match = re.match(r"^([^/]+)/([^/@]+)/(.+\.ya?ml)@(.+)$", uses) + if match: + owner, repo, path, ref = match.groups() + return ActionRef( + type=ActionType.REUSABLE_WORKFLOW, + owner=owner, + repo=repo, + path=path, + ref=ref, + source_file=str(source_file), + ) + + raise ValueError(f"Invalid reusable workflow reference: {uses}") + + def _parse_container(self, data: Any) -> Container | None: + """Parse container configuration.""" + if data is None: + return None + if isinstance(data, str): + return Container(image=data) + return Container( + image=data.get("image", ""), + credentials=data.get("credentials"), + env=data.get("env", {}), + ports=data.get("ports", []), + volumes=data.get("volumes", []), + options=data.get("options"), + ) + + def _parse_services(self, data: dict[str, Any] | None) -> dict[str, Service]: + """Parse services.""" + if data is None: + return {} + services = {} + for name, svc_data in data.items(): + if isinstance(svc_data, str): + services[name] = Service(name=name, image=svc_data) + else: + services[name] = Service( + name=name, + image=svc_data.get("image", ""), + credentials=svc_data.get("credentials"), + env=svc_data.get("env", {}), + ports=svc_data.get("ports", []), + volumes=svc_data.get("volumes", []), + options=svc_data.get("options"), + ) + return services + + def _parse_strategy(self, data: Any) -> Strategy | None: + """Parse strategy.""" + if data is None: + return None + return Strategy( + matrix=data.get("matrix", {}), + fail_fast=data.get("fail-fast", True), + max_parallel=data.get("max-parallel"), + ) + + def _extract_secrets(self, content: str) -> set[str]: + """Extract secret references from content.""" + secrets = set() + # Match ${{ secrets.NAME }} + pattern = r"\$\{\{\s*secrets\.(\w+)\s*\}\}" + for match in re.finditer(pattern, content): + secrets.add(match.group(1)) + return secrets + + def parse_action(self, path: Path) -> ActionManifest: + """Parse an action.yml file.""" + with open(path, encoding="utf-8") as f: + data = self.yaml.load(f) + + if not data: + raise ValueError(f"Empty action file: {path}") + + name = data.get("name", path.parent.name) + description = data.get("description") + author = data.get("author") + + # Parse inputs + inputs = {} + for input_name, input_data in data.get("inputs", {}).items(): + if isinstance(input_data, dict): + inputs[input_name] = ActionInput( + name=input_name, + description=input_data.get("description"), + required=input_data.get("required", False), + default=input_data.get("default"), + ) + + # Parse outputs + outputs = {} + for output_name, output_data in data.get("outputs", {}).items(): + if isinstance(output_data, dict): + outputs[output_name] = ActionOutput( + name=output_name, + description=output_data.get("description"), + ) + + # Parse runs + runs = data.get("runs", {}) + is_composite = runs.get("using") == "composite" + is_docker = runs.get("using") in ("docker", "Dockerfile") + is_javascript = runs.get("using", "").startswith("node") + + return ActionManifest( + name=name, + description=description, + author=author, + inputs=inputs, + outputs=outputs, + runs=runs, + branding=data.get("branding"), + is_composite=is_composite, + is_docker=is_docker, + is_javascript=is_javascript, + ) diff --git a/ghaw_auditor/policy.py b/ghaw_auditor/policy.py new file mode 100644 index 0000000..4f3ca00 --- /dev/null +++ b/ghaw_auditor/policy.py @@ -0,0 +1,163 @@ +"""Policy validator for workflows and actions.""" + +from __future__ import annotations + +import logging +import re +from typing import Any + +from ghaw_auditor.models import ActionRef, ActionType, Policy, WorkflowMeta + +logger = logging.getLogger(__name__) + + +class PolicyValidator: + """Validates workflows against policy rules.""" + + def __init__(self, policy: Policy) -> None: + """Initialize validator.""" + self.policy = policy + + def validate(self, workflows: dict[str, WorkflowMeta], actions: list[ActionRef]) -> list[dict[str, Any]]: + """Validate workflows and actions against policy.""" + violations: list[dict[str, Any]] = [] + + for workflow_path, workflow in workflows.items(): + violations.extend(self._validate_workflow(workflow_path, workflow, actions)) + + return violations + + def _validate_workflow( + self, workflow_path: str, workflow: WorkflowMeta, actions: list[ActionRef] + ) -> list[dict[str, Any]]: + """Validate a single workflow.""" + violations: list[dict[str, Any]] = [] + + # Check pinned actions + if self.policy.require_pinned_actions: + violations.extend(self._check_pinned_actions(workflow_path, workflow)) + + # Check branch refs + if self.policy.forbid_branch_refs: + violations.extend(self._check_branch_refs(workflow_path, workflow)) + + # Check allowed/denied actions + violations.extend(self._check_action_allowlist(workflow_path, workflow)) + + # Check concurrency on PR + if self.policy.require_concurrency_on_pr: + violations.extend(self._check_pr_concurrency(workflow_path, workflow)) + + return violations + + def _check_pinned_actions(self, workflow_path: str, workflow: WorkflowMeta) -> list[dict[str, Any]]: + """Check if actions are pinned to SHA.""" + violations: list[dict[str, Any]] = [] + + # Check all actions in workflow + all_actions = workflow.actions_used[:] + for job in workflow.jobs.values(): + all_actions.extend(job.actions_used) + + for action in all_actions: + # Check if ref is a SHA (40 hex chars) + if action.type == ActionType.GITHUB and action.ref and not re.match(r"^[a-f0-9]{40}$", action.ref): + violations.append( + { + "workflow": workflow_path, + "rule": "require_pinned_actions", + "severity": "error", + "message": f"Action {action.owner}/{action.repo} is not pinned to SHA: {action.ref}", + } + ) + + return violations + + def _check_branch_refs(self, workflow_path: str, workflow: WorkflowMeta) -> list[dict[str, Any]]: + """Check for branch refs in actions.""" + violations: list[dict[str, Any]] = [] + + # Check all actions in workflow + all_actions = workflow.actions_used[:] + for job in workflow.jobs.values(): + all_actions.extend(job.actions_used) + + for action in all_actions: + # Common branch names + if action.type == ActionType.GITHUB and action.ref and action.ref in ("main", "master", "develop", "dev"): + violations.append( + { + "workflow": workflow_path, + "rule": "forbid_branch_refs", + "severity": "error", + "message": f"Action {action.owner}/{action.repo} uses branch ref: {action.ref}", + } + ) + + return violations + + def _check_action_allowlist(self, workflow_path: str, workflow: WorkflowMeta) -> list[dict[str, Any]]: + """Check allowed/denied actions.""" + violations: list[dict[str, Any]] = [] + + # Check all actions in workflow + all_actions = workflow.actions_used[:] + for job in workflow.jobs.values(): + all_actions.extend(job.actions_used) + + for action in all_actions: + if action.type == ActionType.GITHUB: + action_id = f"{action.owner}/{action.repo}" + + # Check denied list + if self.policy.denied_actions: + for denied in self.policy.denied_actions: + if self._matches_pattern(action_id, denied): + violations.append( + { + "workflow": workflow_path, + "rule": "denied_actions", + "severity": "error", + "message": f"Action {action_id} is denied by policy", + } + ) + + # Check allowed list (if specified) + if self.policy.allowed_actions: + allowed = any(self._matches_pattern(action_id, pattern) for pattern in self.policy.allowed_actions) + if not allowed: + violations.append( + { + "workflow": workflow_path, + "rule": "allowed_actions", + "severity": "error", + "message": f"Action {action_id} is not in allowed list", + } + ) + + return violations + + def _check_pr_concurrency(self, workflow_path: str, workflow: WorkflowMeta) -> list[dict[str, Any]]: + """Check if PR workflows have concurrency set.""" + violations: list[dict[str, Any]] = [] + + # Check if workflow is triggered by PR + pr_triggers = {"pull_request", "pull_request_target"} + has_pr_trigger = any(t in pr_triggers for t in workflow.triggers) + + if has_pr_trigger and not workflow.concurrency: + violations.append( + { + "workflow": workflow_path, + "rule": "require_concurrency_on_pr", + "severity": "warning", + "message": "PR workflow should have concurrency group to prevent resource waste", + } + ) + + return violations + + def _matches_pattern(self, action_id: str, pattern: str) -> bool: + """Check if action ID matches pattern (supports wildcards).""" + regex_pattern = pattern.replace("*", ".*") + return bool(re.match(f"^{regex_pattern}$", action_id)) diff --git a/ghaw_auditor/renderer.py b/ghaw_auditor/renderer.py new file mode 100644 index 0000000..18322bd --- /dev/null +++ b/ghaw_auditor/renderer.py @@ -0,0 +1,268 @@ +"""Renderers for JSON and Markdown reports.""" + +from __future__ import annotations + +import json +import logging +from datetime import datetime +from pathlib import Path +from typing import Any + +from ghaw_auditor.models import ActionManifest, ActionRef, ActionType, WorkflowMeta + +logger = logging.getLogger(__name__) + + +class Renderer: + """Renders audit reports in various formats.""" + + def __init__(self, output_dir: Path) -> None: + """Initialize renderer.""" + self.output_dir = output_dir + self.output_dir.mkdir(parents=True, exist_ok=True) + + @staticmethod + def _create_action_anchor(key: str) -> str: + """Create markdown-compatible anchor ID from action key. + + Examples: + "actions/checkout@abc123" -> "actions-checkout" + "local:./sync-labels" -> "local-sync-labels" + "docker://alpine:3.8" -> "docker-alpine-3-8" + """ + # For GitHub actions, remove the @ref/SHA part + if "@" in key and not key.startswith("docker://"): + key = key.split("@")[0] + + # Replace special characters with dashes + anchor = key.replace("/", "-").replace(":", "-").replace(".", "-") + # Clean up multiple consecutive dashes + while "--" in anchor: + anchor = anchor.replace("--", "-") + return anchor.lower().strip("-") + + @staticmethod + def _get_action_repo_url(action_ref: ActionRef) -> str | None: + """Get GitHub repository URL for an action. + + Returns: + URL string for GitHub actions, None for local/docker actions + """ + if action_ref.type == ActionType.GITHUB and action_ref.owner and action_ref.repo: + return f"https://github.com/{action_ref.owner}/{action_ref.repo}" + return None + + def render_json( + self, workflows: dict[str, WorkflowMeta], actions: dict[str, ActionManifest], violations: list[dict[str, Any]] + ) -> None: + """Render JSON reports.""" + # Write workflows + workflows_data = {k: v.model_dump(mode="json") for k, v in workflows.items()} + workflows_file = self.output_dir / "workflows.json" + with open(workflows_file, "w", encoding="utf-8") as f: + json.dump(workflows_data, f, indent=2, default=str) + + # Write actions + actions_data = {k: v.model_dump(mode="json") for k, v in actions.items()} + actions_file = self.output_dir / "actions.json" + with open(actions_file, "w", encoding="utf-8") as f: + json.dump(actions_data, f, indent=2, default=str) + + # Write violations + violations_file = self.output_dir / "violations.json" + with open(violations_file, "w", encoding="utf-8") as f: + json.dump(violations, f, indent=2) + + logger.info(f"JSON reports written to {self.output_dir}") + + def _write_summary( + self, + f: Any, + workflows: dict[str, WorkflowMeta], + actions: dict[str, ActionManifest], + violations: list[dict[str, Any]], + ) -> None: + """Write summary section to markdown file.""" + f.write("## Summary\n\n") + f.write(f"- **Workflows:** {len(workflows)}\n") + f.write(f"- **Actions:** {len(actions)}\n") + f.write(f"- **Policy Violations:** {len(violations)}\n\n") + + def _write_analysis(self, f: Any, analysis: dict[str, Any]) -> None: + """Write analysis section to markdown file.""" + if not analysis: + return + + f.write("## Analysis\n\n") + f.write(f"- **Total Jobs:** {analysis.get('total_jobs', 0)}\n") + f.write(f"- **Reusable Workflows:** {analysis.get('reusable_workflows', 0)}\n") + + if "triggers" in analysis: + f.write("\n### Triggers\n\n") + for trigger, count in sorted(analysis["triggers"].items()): + f.write(f"- `{trigger}`: {count}\n") + f.write("\n") + + if "runners" in analysis: + f.write("\n### Runners\n\n") + for runner, count in sorted(analysis["runners"].items()): + f.write(f"- `{runner}`: {count}\n") + f.write("\n") + + if "secrets" in analysis: + f.write("\n### Secrets\n\n") + f.write(f"Total unique secrets: {analysis['secrets'].get('total_unique_secrets', 0)}\n\n") + secrets = analysis["secrets"].get("secrets", []) + if secrets: + for secret in sorted(secrets): + f.write(f"- `{secret}`\n") + f.write("\n") + + def _write_job_details(self, f: Any, job_name: str, job: Any) -> None: + """Write job details to markdown file.""" + f.write(f"- **{job_name}**\n") + f.write(f" - Runner: `{job.runs_on}`\n") + + if job.permissions: + active_perms = {k: v for k, v in job.permissions.model_dump(mode="json").items() if v is not None} + if active_perms: + f.write(" - Permissions:\n") + for perm_name, perm_level in sorted(active_perms.items()): + display_name = perm_name.replace("_", "-") + f.write(f" - `{display_name}`: {perm_level}\n") + + if job.actions_used: + f.write(" - Actions used:\n") + for action_ref in job.actions_used: + action_key = action_ref.canonical_key() + anchor = self._create_action_anchor(action_key) + + if action_ref.type == ActionType.GITHUB: + type_label = "GitHub" + display_name = f"{action_ref.owner}/{action_ref.repo}" + elif action_ref.type == ActionType.LOCAL: + type_label = "Local" + display_name = action_ref.path or "local" + elif action_ref.type == ActionType.DOCKER: + type_label = "Docker" + display_name = action_ref.path or action_key + else: + type_label = "Reusable Workflow" + display_name = action_ref.path or action_key + + f.write(f" - [{display_name}](#{anchor}) ({type_label})\n") + + def _write_workflows(self, f: Any, workflows: dict[str, WorkflowMeta]) -> None: + """Write workflows section to markdown file.""" + f.write("\n## Workflows\n\n") + for path, workflow in sorted(workflows.items()): + f.write(f"### {workflow.name}\n\n") + f.write(f"**Path:** `{path}`\n\n") + f.write(f"**Triggers:** {', '.join(f'`{t}`' for t in workflow.triggers)}\n\n") + f.write(f"**Jobs:** {len(workflow.jobs)}\n\n") + + if workflow.jobs: + f.write("#### Jobs\n\n") + for job_name, job in workflow.jobs.items(): + self._write_job_details(f, job_name, job) + f.write("\n") + + def _write_action_header( + self, f: Any, key: str, action: ActionManifest, action_ref_map: dict[str, ActionRef] + ) -> None: + """Write action header with key and repository info.""" + anchor = self._create_action_anchor(key) + f.write(f'### {action.name}\n\n') + f.write(f"**Key:** `{key}`\n\n") + + if key in action_ref_map: + repo_url = self._get_action_repo_url(action_ref_map[key]) + if repo_url: + f.write(f"**Repository:** [{action_ref_map[key].owner}/{action_ref_map[key].repo}]({repo_url})\n\n") + elif action_ref_map[key].type == ActionType.LOCAL: + f.write("**Type:** Local Action\n\n") + + if action.description: + f.write(f"{action.description}\n\n") + + def _write_workflows_using_action(self, f: Any, key: str, workflows: dict[str, WorkflowMeta]) -> None: + """Write section showing workflows that use this action.""" + workflows_using_action = [] + for workflow_path, workflow in workflows.items(): + for action_ref in workflow.actions_used: + if action_ref.canonical_key() == key: + workflows_using_action.append((workflow_path, workflow.name)) + break + + if workflows_using_action: + f.write("
\n") + f.write("Used in Workflows\n\n") + for workflow_path, workflow_name in sorted(workflows_using_action): + workflow_anchor = workflow_name.lower().replace(" ", "-").replace(".", "-") + f.write(f"- [{workflow_name}](#{workflow_anchor}) (`{workflow_path}`)\n") + f.write("\n
\n\n") + + def _write_action_inputs(self, f: Any, action: ActionManifest) -> None: + """Write action inputs section.""" + if action.inputs: + f.write("
\n") + f.write("Inputs\n\n") + for inp in action.inputs.values(): + req = "required" if inp.required else "optional" + f.write(f"- `{inp.name}` ({req}): {inp.description or 'No description'}\n") + f.write("\n
\n\n") + else: + f.write("\n") + + def _write_actions_inventory( + self, f: Any, workflows: dict[str, WorkflowMeta], actions: dict[str, ActionManifest] + ) -> None: + """Write actions inventory section to markdown file.""" + f.write("\n## Actions Inventory\n\n") + + # Build mapping of action keys to ActionRef for repo URLs + action_ref_map: dict[str, ActionRef] = {} + for workflow in workflows.values(): + for action_ref in workflow.actions_used: + key = action_ref.canonical_key() + if key not in action_ref_map: + action_ref_map[key] = action_ref + + for key, action in sorted(actions.items()): + self._write_action_header(f, key, action, action_ref_map) + self._write_workflows_using_action(f, key, workflows) + self._write_action_inputs(f, action) + + def _write_violations(self, f: Any, violations: list[dict[str, Any]]) -> None: + """Write violations section to markdown file.""" + if not violations: + return + + f.write("\n## Policy Violations\n\n") + for violation in violations: + severity = violation.get("severity", "warning").upper() + f.write(f"### [{severity}] {violation['rule']}\n\n") + f.write(f"**Workflow:** `{violation['workflow']}`\n\n") + f.write(f"{violation['message']}\n\n") + + def render_markdown( + self, + workflows: dict[str, WorkflowMeta], + actions: dict[str, ActionManifest], + violations: list[dict[str, Any]], + analysis: dict[str, Any], + ) -> None: + """Render Markdown report.""" + report_file = self.output_dir / "report.md" + + with open(report_file, "w", encoding="utf-8") as f: + f.write("# GitHub Actions & Workflows Audit Report\n\n") + f.write(f"**Generated:** {datetime.now().isoformat()}\n\n") + + self._write_summary(f, workflows, actions, violations) + self._write_analysis(f, analysis) + self._write_workflows(f, workflows) + self._write_actions_inventory(f, workflows, actions) + self._write_violations(f, violations) + + logger.info(f"Markdown report written to {report_file}") diff --git a/ghaw_auditor/resolver.py b/ghaw_auditor/resolver.py new file mode 100644 index 0000000..948b59f --- /dev/null +++ b/ghaw_auditor/resolver.py @@ -0,0 +1,164 @@ +"""Action resolver for GitHub actions.""" + +from __future__ import annotations + +import logging +from concurrent.futures import ThreadPoolExecutor, as_completed +from pathlib import Path + +from ghaw_auditor.cache import Cache +from ghaw_auditor.github_client import GitHubClient +from ghaw_auditor.models import ActionManifest, ActionRef, ActionType +from ghaw_auditor.parser import Parser + +logger = logging.getLogger(__name__) + + +class Resolver: + """Resolves action references and fetches manifests.""" + + def __init__( + self, + github_client: GitHubClient, + cache: Cache, + repo_path: Path, + concurrency: int = 4, + ) -> None: + """Initialize resolver.""" + self.github_client = github_client + self.cache = cache + self.parser = Parser(repo_path) + self.repo_path = repo_path + self.concurrency = concurrency + + def resolve_actions(self, actions: list[ActionRef]) -> dict[str, ActionManifest]: + """Resolve multiple action references in parallel.""" + resolved: dict[str, ActionManifest] = {} + + with ThreadPoolExecutor(max_workers=self.concurrency) as executor: + futures = {executor.submit(self._resolve_action, action): action for action in actions} + + for future in as_completed(futures): + action = futures[future] + try: + key, manifest = future.result() + if key and manifest: + resolved[key] = manifest + except Exception as e: + logger.error(f"Failed to resolve {action.canonical_key()}: {e}") + + return resolved + + def _resolve_action(self, action: ActionRef) -> tuple[str, ActionManifest | None]: + """Resolve a single action reference.""" + if action.type == ActionType.LOCAL: + return self._resolve_local_action(action) + elif action.type == ActionType.GITHUB: + return self._resolve_github_action(action) + elif action.type == ActionType.DOCKER: + # Docker actions don't have manifests to parse + return action.canonical_key(), None + return "", None + + def _resolve_local_action(self, action: ActionRef) -> tuple[str, ActionManifest | None]: + """Resolve a local action.""" + if not action.path: + return "", None + + # Remove leading ./ prefix only + clean_path = action.path[2:] if action.path.startswith("./") else action.path + action_path = self.repo_path / clean_path + + # If action_path is a directory, look for action.yml/yaml inside + # If it's a file path, look in parent directory + if action_path.is_dir(): + for name in ("action.yml", "action.yaml"): + manifest_path = action_path / name + if manifest_path.exists(): + try: + manifest = self.parser.parse_action(manifest_path) + return action.canonical_key(), manifest + except Exception as e: + logger.error(f"Failed to parse local action {manifest_path}: {e}") + continue + else: + # Try as parent directory + parent = action_path.parent if action_path.name.startswith("action.") else action_path + for name in ("action.yml", "action.yaml"): + manifest_path = parent / name + if manifest_path.exists(): + try: + manifest = self.parser.parse_action(manifest_path) + return action.canonical_key(), manifest + except Exception as e: + logger.error(f"Failed to parse local action {manifest_path}: {e}") + continue + + logger.warning(f"Local action manifest not found: {action_path}") + return "", None + + def _resolve_github_action(self, action: ActionRef) -> tuple[str, ActionManifest | None]: + """Resolve a GitHub action.""" + if not action.owner or not action.repo or not action.ref: + return "", None + + # Resolve ref to SHA + cache_key = self.cache.make_key("ref", action.owner, action.repo, action.ref) + sha = self.cache.get(cache_key) + + if not sha: + try: + sha = self.github_client.get_ref_sha(action.owner, action.repo, action.ref) + self.cache.set(cache_key, sha) + except Exception as e: + logger.error(f"Failed to resolve ref {action.owner}/{action.repo}@{action.ref}: {e}") + return "", None + + action.resolved_sha = sha + + # Fetch action manifest + manifest_path = action.path if action.path and action.path != "action.yml" else "" + manifest_key = self.cache.make_key("manifest", action.owner, action.repo, sha, manifest_path) + manifest_content = self.cache.get(manifest_key) + + if not manifest_content: + # Try action.yml first, then action.yaml + base_path = f"{manifest_path}/" if manifest_path else "" + for name in ("action.yml", "action.yaml"): + file_path = f"{base_path}{name}" + try: + manifest_content = self.github_client.get_file_content(action.owner, action.repo, file_path, sha) + self.cache.set(manifest_key, manifest_content) + break + except Exception: + continue + + if not manifest_content: + # Only log warning if both extensions failed + if manifest_path: + logger.error( + f"Action manifest not found: {action.owner}/{action.repo}/{manifest_path} " + f"(tried action.yml and action.yaml)" + ) + else: + logger.error( + f"Action manifest not found: {action.owner}/{action.repo} (tried action.yml and action.yaml)" + ) + return action.canonical_key(), None + + # Parse manifest + try: + # Write to temp file and parse + import tempfile + + with tempfile.NamedTemporaryFile(mode="w", suffix=".yml", delete=False) as f: + f.write(manifest_content) + temp_path = Path(f.name) + + manifest = self.parser.parse_action(temp_path) + temp_path.unlink() + + return action.canonical_key(), manifest + except Exception as e: + logger.error(f"Failed to parse manifest for {action.canonical_key()}: {e}") + return "", None diff --git a/ghaw_auditor/scanner.py b/ghaw_auditor/scanner.py new file mode 100644 index 0000000..63d4cae --- /dev/null +++ b/ghaw_auditor/scanner.py @@ -0,0 +1,84 @@ +"""File scanner for discovering GitHub Actions and workflows.""" + +from __future__ import annotations + +import logging +from pathlib import Path + +logger = logging.getLogger(__name__) + + +class Scanner: + """Scans repository for workflow and action files.""" + + WORKFLOW_PATTERNS = [ + ".github/workflows/*.yml", + ".github/workflows/*.yaml", + ] + + ACTION_PATTERNS = [ + "**/action.yml", + "**/action.yaml", + ".github/actions/*/action.yml", + ".github/actions/*/action.yaml", + ] + + def __init__(self, repo_path: str | Path, exclude_patterns: list[str] | None = None) -> None: + """Initialize scanner.""" + self.repo_path = Path(repo_path).resolve() + self.exclude_patterns = exclude_patterns or [] + + def _should_exclude(self, path: Path) -> bool: + """Check if path should be excluded.""" + rel_path = path.relative_to(self.repo_path) + return any(rel_path.match(pattern) for pattern in self.exclude_patterns) + + def find_workflows(self) -> list[Path]: + """Find all workflow files.""" + workflows = [] + workflow_dir = self.repo_path / ".github" / "workflows" + + if not workflow_dir.exists(): + logger.warning(f"Workflow directory not found: {workflow_dir}") + return workflows + + for pattern in ["*.yml", "*.yaml"]: + for file_path in workflow_dir.glob(pattern): + if not self._should_exclude(file_path): + workflows.append(file_path) + + logger.info(f"Found {len(workflows)} workflow files") + return sorted(workflows) + + def find_actions(self) -> list[Path]: + """Find all action manifest files. + + Supports multiple action discovery patterns: + - .github/actions/*/action.yml (standard GitHub location) + - ./action-name/action.yml (monorepo root-level actions) + - Any depth: path/to/action/action.yml (recursive search) + + Excludes .github/workflows directory to avoid false positives. + """ + actions = [] + + # Check .github/actions directory + actions_dir = self.repo_path / ".github" / "actions" + if actions_dir.exists(): + for action_file in actions_dir.rglob("action.y*ml"): + if action_file.name in ("action.yml", "action.yaml") and not self._should_exclude(action_file): + actions.append(action_file) + logger.debug(f"Found action: {action_file.relative_to(self.repo_path)}") + + # Check for action files in root and subdirectories (supports monorepo structure) + for name in ("action.yml", "action.yaml"): + for action_file in self.repo_path.rglob(name): + # Skip if in .github/workflows + if ".github/workflows" in str(action_file.relative_to(self.repo_path)): + continue + if not self._should_exclude(action_file) and action_file not in actions: + actions.append(action_file) + logger.debug(f"Found action: {action_file.relative_to(self.repo_path)}") + + logger.info(f"Found {len(actions)} action files") + return sorted(actions) diff --git a/ghaw_auditor/services.py b/ghaw_auditor/services.py new file mode 100644 index 0000000..3efbd2a --- /dev/null +++ b/ghaw_auditor/services.py @@ -0,0 +1,118 @@ +"""Service layer for orchestrating audit operations.""" + +from __future__ import annotations + +import logging +from dataclasses import dataclass +from typing import Any + +from ghaw_auditor.analyzer import Analyzer +from ghaw_auditor.differ import Differ +from ghaw_auditor.models import ( + ActionDiff, + ActionManifest, + WorkflowDiff, + WorkflowMeta, +) +from ghaw_auditor.parser import Parser +from ghaw_auditor.policy import PolicyValidator +from ghaw_auditor.resolver import Resolver +from ghaw_auditor.scanner import Scanner + +logger = logging.getLogger(__name__) + + +@dataclass +class ScanResult: + """Result of a scan operation.""" + + workflows: dict[str, WorkflowMeta] + actions: dict[str, ActionManifest] + violations: list[dict[str, Any]] + analysis: dict[str, Any] + workflow_count: int + action_count: int + unique_action_count: int + + +class AuditService: + """Orchestrates the audit workflow.""" + + def __init__( + self, + scanner: Scanner, + parser: Parser, + analyzer: Analyzer, + resolver: Resolver | None = None, + validator: PolicyValidator | None = None, + ) -> None: + """Initialize audit service.""" + self.scanner = scanner + self.parser = parser + self.analyzer = analyzer + self.resolver = resolver + self.validator = validator + + def scan(self, offline: bool = False) -> ScanResult: + """Execute scan workflow and return results.""" + # Find files + workflow_files = self.scanner.find_workflows() + action_files = self.scanner.find_actions() + + # Parse workflows + workflows = {} + all_actions = [] + + for wf_file in workflow_files: + try: + workflow = self.parser.parse_workflow(wf_file) + rel_path = str(wf_file.relative_to(self.scanner.repo_path)) + workflows[rel_path] = workflow + all_actions.extend(workflow.actions_used) + except Exception as e: + logger.error(f"Failed to parse workflow {wf_file}: {e}") + + # Deduplicate actions + unique_actions = self.analyzer.deduplicate_actions(all_actions) + + # Resolve actions + actions = {} + if not offline and self.resolver: + actions = self.resolver.resolve_actions(list(unique_actions.values())) + + # Analyze + analysis = self.analyzer.analyze_workflows(workflows, actions) + + # Validate + violations = [] + if self.validator: + violations = self.validator.validate(workflows, all_actions) + + return ScanResult( + workflows=workflows, + actions=actions, + violations=violations, + analysis=analysis, + workflow_count=len(workflow_files), + action_count=len(action_files), + unique_action_count=len(unique_actions), + ) + + +class DiffService: + """Handles baseline comparison.""" + + def __init__(self, differ: Differ) -> None: + """Initialize diff service.""" + self.differ = differ + + def compare( + self, + workflows: dict[str, WorkflowMeta], + actions: dict[str, ActionManifest], + ) -> tuple[list[WorkflowDiff], list[ActionDiff]]: + """Compare current state with baseline.""" + baseline = self.differ.load_baseline() + workflow_diffs = self.differ.diff_workflows(baseline.workflows, workflows) + action_diffs = self.differ.diff_actions(baseline.actions, actions) + return workflow_diffs, action_diffs diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..65fa33b --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,62 @@ +[project] +name = "ghaw-auditor" +version = "1.0.0" +description = "GitHub Actions & Workflows Auditor - analyze and audit GitHub Actions ecosystem" +readme = "README.md" +requires-python = ">=3.11" +license = {text = "MIT"} +authors = [ + {name = "Ismo Vuorinen", email = "ismo@ivuorinen.net"} +] +dependencies = [ + "typer>=0.12.0", + "rich>=13.7.0", + "httpx>=0.27.0", + "pydantic>=2.6.0", + "ruamel.yaml>=0.18.0", + "platformdirs>=4.2.0", + "diskcache>=5.6.0", + "packaging>=24.0", + "tenacity>=8.2.0", +] + +[project.optional-dependencies] +dev = [ + "pytest>=8.0.0", + "pytest-cov>=4.1.0", + "vcrpy>=6.0.0", + "mypy>=1.8.0", + "ruff>=0.3.0", + "types-PyYAML", +] + +[project.scripts] +ghaw-auditor = "ghaw_auditor.cli:app" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.ruff] +line-length = 120 +target-version = "py311" + +[tool.ruff.lint] +select = ["E", "F", "I", "N", "W", "UP", "B", "SIM", "C90"] +ignore = ["E501"] + +[tool.ruff.lint.per-file-ignores] +"ghaw_auditor/cli.py" = ["B008"] # Typer uses function calls in defaults + +[tool.mypy] +python_version = "3.11" +strict = true +warn_return_any = true +warn_unused_configs = true +disallow_untyped_defs = true + +[tool.pytest.ini_options] +testpaths = ["tests"] +python_files = "test_*.py" +python_functions = "test_*" +addopts = "-v --cov=ghaw_auditor --cov-report=term-missing --cov-fail-under=70" diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..7dabedc --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ +"""Tests for ghaw-auditor.""" diff --git a/tests/fixtures/action-with-defaults.yml b/tests/fixtures/action-with-defaults.yml new file mode 100644 index 0000000..bae84a5 --- /dev/null +++ b/tests/fixtures/action-with-defaults.yml @@ -0,0 +1,23 @@ +--- +name: 'Action with Various Defaults' +description: 'Tests different input default types' + +inputs: + string-input: + description: 'String input' + default: 'hello' + boolean-input: + description: 'Boolean input' + default: true + number-input: + description: 'Number input' + default: 42 + no-default: + description: 'Input without default' + required: true + +runs: + using: composite + steps: + - run: echo "test" + shell: bash diff --git a/tests/fixtures/basic-workflow.yml b/tests/fixtures/basic-workflow.yml new file mode 100644 index 0000000..ba8ad77 --- /dev/null +++ b/tests/fixtures/basic-workflow.yml @@ -0,0 +1,12 @@ +--- +name: Basic Workflow + +on: push + +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Run tests + run: echo "Testing" diff --git a/tests/fixtures/complex-workflow.yml b/tests/fixtures/complex-workflow.yml new file mode 100644 index 0000000..c7af366 --- /dev/null +++ b/tests/fixtures/complex-workflow.yml @@ -0,0 +1,89 @@ +--- +name: Complex Workflow + +on: + push: + branches: [main, develop] + pull_request: + workflow_dispatch: + +permissions: + contents: read + issues: write + pull_requests: write + +env: + NODE_ENV: production + API_URL: https://api.example.com + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +defaults: + run: + shell: bash + working-directory: ./src + +jobs: + build: + runs-on: ubuntu-latest + timeout-minutes: 30 + permissions: + contents: read + environment: + name: production + url: https://example.com + env: + BUILD_ENV: production + steps: + - uses: actions/checkout@v4 + - name: Build + run: npm run build + env: + API_KEY: ${{ secrets.API_KEY }} + TOKEN: ${{ secrets.GITHUB_TOKEN }} + + test: + runs-on: ubuntu-latest + needs: build + if: github.event_name == 'pull_request' + container: + image: node:20-alpine + credentials: + username: ${{ secrets.DOCKER_USER }} + password: ${{ secrets.DOCKER_PASS }} + env: + NODE_ENV: test + ports: + - 8080 + volumes: + - /tmp:/tmp + options: --cpus 2 + services: + postgres: + image: postgres:15 + credentials: + username: ${{ secrets.DOCKER_USER }} + password: ${{ secrets.DOCKER_PASS }} + env: + POSTGRES_PASSWORD: ${{ secrets.DB_PASSWORD }} + ports: + - 5432 + options: --health-cmd pg_isready + strategy: + matrix: + node-version: [18, 20] + os: [ubuntu-latest, windows-latest] + fail-fast: false + max-parallel: 2 + continue-on-error: true + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + - name: Test + run: npm test + env: + DATABASE_URL: ${{ secrets.DATABASE_URL }} diff --git a/tests/fixtures/composite-action.yml b/tests/fixtures/composite-action.yml new file mode 100644 index 0000000..5bcab66 --- /dev/null +++ b/tests/fixtures/composite-action.yml @@ -0,0 +1,33 @@ +--- +name: 'Composite Action' +description: 'A composite action example' +author: 'Test Author' + +inputs: + message: + description: 'Message to display' + required: true + debug: + description: 'Enable debug mode' + required: false + default: 'false' + +outputs: + result: + description: 'Action result' + value: ${{ steps.output.outputs.result }} + +runs: + using: composite + steps: + - name: Display message + run: echo "${{ inputs.message }}" + shell: bash + - name: Set output + id: output + run: echo "result=success" >> $GITHUB_OUTPUT + shell: bash + +branding: + icon: 'check' + color: 'green' diff --git a/tests/fixtures/docker-action.yml b/tests/fixtures/docker-action.yml new file mode 100644 index 0000000..9919326 --- /dev/null +++ b/tests/fixtures/docker-action.yml @@ -0,0 +1,21 @@ +--- +name: 'Docker Action' +description: 'A Docker action example' + +inputs: + dockerfile: + description: 'Path to Dockerfile' + required: false + default: 'Dockerfile' + +outputs: + image-id: + description: 'Built image ID' + +runs: + using: docker + image: Dockerfile + args: + - ${{ inputs.dockerfile }} + env: + BUILD_ENV: production diff --git a/tests/fixtures/empty-workflow-call.yml b/tests/fixtures/empty-workflow-call.yml new file mode 100644 index 0000000..748edfc --- /dev/null +++ b/tests/fixtures/empty-workflow-call.yml @@ -0,0 +1,11 @@ +--- +name: Empty Workflow Call + +on: + workflow_call: + +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 diff --git a/tests/fixtures/invalid-action.yml b/tests/fixtures/invalid-action.yml new file mode 100644 index 0000000..66cfac1 --- /dev/null +++ b/tests/fixtures/invalid-action.yml @@ -0,0 +1 @@ +# Empty action file diff --git a/tests/fixtures/invalid-workflow.yml b/tests/fixtures/invalid-workflow.yml new file mode 100644 index 0000000..932b798 --- /dev/null +++ b/tests/fixtures/invalid-workflow.yml @@ -0,0 +1 @@ +# Empty file diff --git a/tests/fixtures/javascript-action.yml b/tests/fixtures/javascript-action.yml new file mode 100644 index 0000000..e633fcf --- /dev/null +++ b/tests/fixtures/javascript-action.yml @@ -0,0 +1,27 @@ +--- +name: 'JavaScript Action' +description: 'A Node.js action example' +author: 'GitHub' + +inputs: + token: + description: 'GitHub token' + required: true + timeout: + description: 'Timeout in seconds' + required: false + default: '60' + +outputs: + status: + description: 'Action status' + +runs: + using: node20 + main: dist/index.js + pre: dist/setup.js + post: dist/cleanup.js + +branding: + icon: 'code' + color: 'blue' diff --git a/tests/fixtures/job-with-outputs.yml b/tests/fixtures/job-with-outputs.yml new file mode 100644 index 0000000..ec3fa83 --- /dev/null +++ b/tests/fixtures/job-with-outputs.yml @@ -0,0 +1,27 @@ +--- +name: Job with Outputs + +on: push + +jobs: + build: + runs-on: ubuntu-latest + outputs: + version: ${{ steps.version.outputs.version }} + artifact-url: ${{ steps.upload.outputs.url }} + status: success + steps: + - uses: actions/checkout@v4 + - name: Get version + id: version + run: echo "version=1.0.0" >> $GITHUB_OUTPUT + - name: Upload artifact + id: upload + run: echo "url=https://example.com/artifact" >> $GITHUB_OUTPUT + + deploy: + runs-on: ubuntu-latest + needs: build + steps: + - name: Deploy + run: echo "Deploying version ${{ needs.build.outputs.version }}" diff --git a/tests/fixtures/reusable-workflow-caller.yml b/tests/fixtures/reusable-workflow-caller.yml new file mode 100644 index 0000000..bdf5263 --- /dev/null +++ b/tests/fixtures/reusable-workflow-caller.yml @@ -0,0 +1,26 @@ +--- +name: Reusable Workflow Caller + +on: + push: + branches: [main] + +jobs: + call-workflow: + uses: owner/repo/.github/workflows/deploy.yml@v1 + with: + environment: production + debug: false + version: 1.2.3 + secrets: + deploy-token: ${{ secrets.DEPLOY_TOKEN }} + api-key: ${{ secrets.API_KEY }} + + call-workflow-inherit: + uses: owner/repo/.github/workflows/test.yml@main + secrets: inherit + + call-local-workflow: + uses: ./.github/workflows/shared.yml + with: + config: custom diff --git a/tests/fixtures/reusable-workflow.yml b/tests/fixtures/reusable-workflow.yml new file mode 100644 index 0000000..41b8f7e --- /dev/null +++ b/tests/fixtures/reusable-workflow.yml @@ -0,0 +1,39 @@ +--- +name: Reusable Workflow + +on: + workflow_call: + inputs: + environment: + description: 'Deployment environment' + required: true + type: string + debug: + description: 'Enable debug mode' + required: false + type: boolean + default: false + outputs: + deployment-id: + description: 'Deployment ID' + value: ${{ jobs.deploy.outputs.id }} + secrets: + deploy-token: + description: 'Deployment token' + required: true + api-key: + required: false + +jobs: + deploy: + runs-on: ubuntu-latest + outputs: + id: ${{ steps.deploy.outputs.id }} + steps: + - uses: actions/checkout@v4 + - name: Deploy + id: deploy + run: echo "id=12345" >> $GITHUB_OUTPUT + env: + TOKEN: ${{ secrets.deploy-token }} + API_KEY: ${{ secrets.api-key }} diff --git a/tests/golden/actions.json b/tests/golden/actions.json new file mode 100644 index 0000000..09a1d5d --- /dev/null +++ b/tests/golden/actions.json @@ -0,0 +1,30 @@ +{ + "actions/checkout@abc123": { + "name": "Checkout", + "description": "Checkout a Git repository", + "author": "GitHub", + "inputs": { + "repository": { + "name": "repository", + "description": "Repository name with owner", + "required": false, + "default": null + }, + "ref": { + "name": "ref", + "description": "The branch, tag or SHA to checkout", + "required": false, + "default": null + } + }, + "outputs": {}, + "runs": { + "using": "node20", + "main": "dist/index.js" + }, + "branding": null, + "is_composite": false, + "is_docker": false, + "is_javascript": true + } +} diff --git a/tests/golden/report.md b/tests/golden/report.md new file mode 100644 index 0000000..854ba6f --- /dev/null +++ b/tests/golden/report.md @@ -0,0 +1,57 @@ +# GitHub Actions & Workflows Audit Report + +**Generated:** 2025-10-02T00:00:00.000000 + +## Summary + +- **Workflows:** 1 +- **Actions:** 1 +- **Policy Violations:** 0 + +## Analysis + +- **Total Jobs:** 1 +- **Reusable Workflows:** 0 + +### Triggers + +- `pull_request`: 1 +- `push`: 1 + +### Runners + +- `ubuntu-latest`: 1 + +### Secrets + +Total unique secrets: 1 + +- `GITHUB_TOKEN` + +## Workflows + +### Test Workflow + +**Path:** `test.yml` + +**Triggers:** `push`, `pull_request` + +**Jobs:** 1 + +#### Jobs + +- **test** + - Runner: `ubuntu-latest` + +## Actions Inventory + +### Checkout + +**Key:** `actions/checkout@abc123` + +Checkout a Git repository + +**Inputs:** + +- `repository` (optional): Repository name with owner +- `ref` (optional): The branch, tag or SHA to checkout diff --git a/tests/golden/workflows.json b/tests/golden/workflows.json new file mode 100644 index 0000000..a4ef204 --- /dev/null +++ b/tests/golden/workflows.json @@ -0,0 +1,41 @@ +{ + "test.yml": { + "name": "Test Workflow", + "path": "test.yml", + "triggers": [ + "push", + "pull_request" + ], + "permissions": null, + "concurrency": null, + "env": {}, + "defaults": {}, + "jobs": { + "test": { + "name": "test", + "runs_on": "ubuntu-latest", + "needs": [], + "if_condition": null, + "permissions": null, + "environment": null, + "concurrency": null, + "timeout_minutes": null, + "continue_on_error": false, + "container": null, + "services": {}, + "strategy": null, + "actions_used": [], + "secrets_used": [ + "GITHUB_TOKEN" + ], + "env_vars": {} + } + }, + "is_reusable": false, + "reusable_contract": null, + "secrets_used": [ + "GITHUB_TOKEN" + ], + "actions_used": [] + } +} diff --git a/tests/test_analyzer.py b/tests/test_analyzer.py new file mode 100644 index 0000000..ae0d564 --- /dev/null +++ b/tests/test_analyzer.py @@ -0,0 +1,144 @@ +"""Tests for analyzer module.""" + +from ghaw_auditor.analyzer import Analyzer +from ghaw_auditor.models import ActionRef, ActionType, JobMeta, WorkflowMeta + + +def test_analyzer_initialization() -> None: + """Test analyzer can be initialized.""" + analyzer = Analyzer() + assert analyzer is not None + + +def test_deduplicate_actions() -> None: + """Test action deduplication.""" + analyzer = Analyzer() + + action1 = ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="checkout", + ref="v4", + source_file="test.yml", + ) + action2 = ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="checkout", + ref="v4", + source_file="test2.yml", + ) + action3 = ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="setup-node", + ref="v4", + source_file="test.yml", + ) + + result = analyzer.deduplicate_actions([action1, action2, action3]) + + # Should have 2 unique actions (checkout appears twice) + assert len(result) == 2 + + +def test_analyze_workflows() -> None: + """Test workflow analysis.""" + analyzer = Analyzer() + + job = JobMeta( + name="test", + runs_on="ubuntu-latest", + ) + + workflow = WorkflowMeta( + name="Test Workflow", + path="test.yml", + triggers=["push", "pull_request"], + jobs={"test": job}, + secrets_used={"SECRET1", "SECRET2"}, + ) + + workflows = {"test.yml": workflow} + analysis = analyzer.analyze_workflows(workflows, {}) + + assert analysis["total_workflows"] == 1 + assert analysis["total_jobs"] == 1 + assert "push" in analysis["triggers"] + assert analysis["triggers"]["push"] == 1 + assert analysis["secrets"]["total_unique_secrets"] == 2 + + +def test_analyze_runners_with_list() -> None: + """Test runner analysis with list runner.""" + from ghaw_auditor.analyzer import Analyzer + from ghaw_auditor.models import JobMeta, WorkflowMeta + + analyzer = Analyzer() + + # Job with list runner (matrix runner) + job = JobMeta( + name="test", + runs_on=["ubuntu-latest", "macos-latest"], + ) + + workflow = WorkflowMeta( + name="Test Workflow", + path="test.yml", + triggers=["push"], + jobs={"test": job}, + ) + + workflows = {"test.yml": workflow} + analysis = analyzer.analyze_workflows(workflows, {}) + + # List runner should be converted to string + assert "['ubuntu-latest', 'macos-latest']" in analysis["runners"] + + +def test_analyze_containers_and_services() -> None: + """Test container and service analysis.""" + from ghaw_auditor.analyzer import Analyzer + from ghaw_auditor.models import Container, JobMeta, Service, WorkflowMeta + + analyzer = Analyzer() + + # Job with container + job1 = JobMeta( + name="with-container", + runs_on="ubuntu-latest", + container=Container(image="node:18"), + ) + + # Job with services + job2 = JobMeta( + name="with-services", + runs_on="ubuntu-latest", + services={"postgres": Service(name="postgres", image="postgres:14")}, + ) + + # Job with both + job3 = JobMeta( + name="with-both", + runs_on="ubuntu-latest", + container=Container(image="node:18"), + services={"redis": Service(name="redis", image="redis:7")}, + ) + + workflow = WorkflowMeta( + name="Test Workflow", + path="test.yml", + triggers=["push"], + jobs={ + "with-container": job1, + "with-services": job2, + "with-both": job3, + }, + ) + + workflows = {"test.yml": workflow} + analysis = analyzer.analyze_workflows(workflows, {}) + + # Should count containers and services + assert analysis["containers"]["jobs_with_containers"] == 2 + assert analysis["containers"]["jobs_with_services"] == 2 diff --git a/tests/test_cache.py b/tests/test_cache.py new file mode 100644 index 0000000..f92216f --- /dev/null +++ b/tests/test_cache.py @@ -0,0 +1,58 @@ +"""Tests for cache module.""" + +from pathlib import Path + +from ghaw_auditor.cache import Cache + + +def test_cache_initialization(tmp_path: Path) -> None: + """Test cache can be initialized.""" + cache = Cache(tmp_path / "cache") + assert cache.cache_dir.exists() + cache.close() + + +def test_cache_set_get(tmp_path: Path) -> None: + """Test cache set and get.""" + cache = Cache(tmp_path / "cache") + + cache.set("test_key", "test_value") + value = cache.get("test_key") + + assert value == "test_value" + cache.close() + + +def test_cache_make_key() -> None: + """Test cache key generation.""" + cache = Cache() + + key1 = cache.make_key("part1", "part2", "part3") + key2 = cache.make_key("part1", "part2", "part3") + key3 = cache.make_key("different", "parts") + + assert key1 == key2 + assert key1 != key3 + cache.close() + + +def test_cache_clear(tmp_path: Path) -> None: + """Test cache clear.""" + cache = Cache(tmp_path / "cache") + + # Add some values + cache.set("key1", "value1") + cache.set("key2", "value2") + + # Verify they exist + assert cache.get("key1") == "value1" + assert cache.get("key2") == "value2" + + # Clear cache + cache.clear() + + # Verify values are gone + assert cache.get("key1") is None + assert cache.get("key2") is None + + cache.close() diff --git a/tests/test_cli.py b/tests/test_cli.py new file mode 100644 index 0000000..caba3b1 --- /dev/null +++ b/tests/test_cli.py @@ -0,0 +1,584 @@ +"""Integration tests for CLI commands.""" + +from pathlib import Path +from unittest.mock import Mock, patch + +from typer.testing import CliRunner + +from ghaw_auditor.cli import app + +runner = CliRunner() + + +def test_scan_command_basic(tmp_path: Path) -> None: + """Test basic scan command.""" + output_dir = tmp_path / "output" + + with patch("ghaw_auditor.cli.Scanner") as mock_scanner: + mock_scanner.return_value.find_workflows.return_value = [] + mock_scanner.return_value.find_actions.return_value = [] + + result = runner.invoke(app, ["scan", "--repo", str(tmp_path), "--output", str(output_dir), "--offline"]) + + assert result.exit_code == 0 + assert "Scanning repository" in result.stdout + + +def test_scan_command_with_token(tmp_path: Path) -> None: + """Test scan with GitHub token.""" + with patch("ghaw_auditor.cli.Scanner") as mock_scanner: + mock_scanner.return_value.find_workflows.return_value = [] + mock_scanner.return_value.find_actions.return_value = [] + + result = runner.invoke( + app, + ["scan", "--repo", str(tmp_path), "--token", "test_token", "--offline"], + ) + + assert result.exit_code == 0 + + +def test_inventory_command(tmp_path: Path) -> None: + """Test inventory command.""" + with patch("ghaw_auditor.cli.Scanner") as mock_scanner: + mock_scanner.return_value.find_workflows.return_value = [] + + result = runner.invoke(app, ["inventory", "--repo", str(tmp_path)]) + + assert result.exit_code == 0 + assert "Unique Actions" in result.stdout + + +def test_validate_command(tmp_path: Path) -> None: + """Test validate command.""" + with patch("ghaw_auditor.cli.Scanner") as mock_scanner: + mock_scanner.return_value.find_workflows.return_value = [] + + result = runner.invoke(app, ["validate", "--repo", str(tmp_path)]) + + assert result.exit_code == 0 + + +def test_version_command() -> None: + """Test version command.""" + result = runner.invoke(app, ["version"]) + + assert result.exit_code == 0 + assert "ghaw-auditor version" in result.stdout + + +def test_scan_command_verbose(tmp_path: Path) -> None: + """Test scan with verbose flag.""" + with patch("ghaw_auditor.cli.Scanner") as mock_scanner: + mock_scanner.return_value.find_workflows.return_value = [] + mock_scanner.return_value.find_actions.return_value = [] + + result = runner.invoke(app, ["scan", "--repo", str(tmp_path), "--verbose", "--offline"]) + + assert result.exit_code == 0 + + +def test_scan_command_quiet(tmp_path: Path) -> None: + """Test scan with quiet flag.""" + with patch("ghaw_auditor.cli.Scanner") as mock_scanner: + mock_scanner.return_value.find_workflows.return_value = [] + mock_scanner.return_value.find_actions.return_value = [] + + result = runner.invoke(app, ["scan", "--repo", str(tmp_path), "--quiet", "--offline"]) + + assert result.exit_code == 0 + + +def test_scan_command_nonexistent_repo() -> None: + """Test scan with nonexistent repository.""" + result = runner.invoke(app, ["scan", "--repo", "/nonexistent/path"]) + + assert result.exit_code in (1, 2) # Either repo not found or other error + assert "Repository not found" in result.stdout or result.exit_code == 2 + + +def test_scan_command_with_log_json(tmp_path: Path) -> None: + """Test scan with JSON logging.""" + with patch("ghaw_auditor.cli.Scanner") as mock_scanner: + mock_scanner.return_value.find_workflows.return_value = [] + mock_scanner.return_value.find_actions.return_value = [] + + result = runner.invoke(app, ["scan", "--repo", str(tmp_path), "--log-json", "--offline"]) + + assert result.exit_code == 0 + + +def test_scan_command_with_policy_file(tmp_path: Path) -> None: + """Test scan with policy file.""" + policy_file = tmp_path / "policy.yml" + policy_file.write_text("require_pinned_actions: true") + + with patch("ghaw_auditor.cli.Scanner") as mock_scanner: + mock_scanner.return_value.find_workflows.return_value = [] + mock_scanner.return_value.find_actions.return_value = [] + + result = runner.invoke( + app, + [ + "scan", + "--repo", + str(tmp_path), + "--policy-file", + str(policy_file), + "--offline", + ], + ) + + assert result.exit_code == 0 + + +def test_scan_command_with_violations(tmp_path: Path) -> None: + """Test scan with policy violations.""" + # Create workflow with unpinned action + workflows_dir = tmp_path / ".github" / "workflows" + workflows_dir.mkdir(parents=True) + (workflows_dir / "ci.yml").write_text( + """ +name: CI +on: push +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@main +""" + ) + + policy_file = tmp_path / "policy.yml" + policy_file.write_text("require_pinned_actions: true") + + result = runner.invoke( + app, + [ + "scan", + "--repo", + str(tmp_path), + "--policy-file", + str(policy_file), + "--offline", + ], + ) + + assert result.exit_code == 0 + assert "policy violations" in result.stdout + + +def test_scan_command_with_enforcement(tmp_path: Path) -> None: + """Test scan with policy enforcement.""" + # Create workflow with unpinned action + workflows_dir = tmp_path / ".github" / "workflows" + workflows_dir.mkdir(parents=True) + (workflows_dir / "ci.yml").write_text( + """ +name: CI +on: push +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@main +""" + ) + + policy_file = tmp_path / "policy.yml" + policy_file.write_text("require_pinned_actions: true") + + result = runner.invoke( + app, + [ + "scan", + "--repo", + str(tmp_path), + "--policy-file", + str(policy_file), + "--enforce", + "--offline", + ], + ) + + # Should exit with error due to violations + assert result.exit_code in (1, 2) # Exit code 1 from policy, or 2 from exception handling + # Check that enforcement was triggered + assert "policy violations" in result.stdout or "Policy enforcement failed" in result.stdout + + +def test_scan_command_with_diff_mode(tmp_path: Path) -> None: + """Test scan in diff mode.""" + # Create baseline + baseline_dir = tmp_path / "baseline" + baseline_dir.mkdir() + + from ghaw_auditor.differ import Differ + from ghaw_auditor.models import WorkflowMeta + + differ = Differ(baseline_dir) + workflow = WorkflowMeta(name="Test", path="test.yml", triggers=["push"], jobs={}) + differ.save_baseline({"test.yml": workflow}, {}) + + # Create workflow + workflows_dir = tmp_path / ".github" / "workflows" + workflows_dir.mkdir(parents=True) + (workflows_dir / "test.yml").write_text("name: Test\non: push\njobs: {}") + + output_dir = tmp_path / "output" + + result = runner.invoke( + app, + [ + "scan", + "--repo", + str(tmp_path), + "--diff", + "--baseline", + str(baseline_dir), + "--output", + str(output_dir), + "--offline", + ], + ) + + assert result.exit_code == 0 + assert "Running diff" in result.stdout + + +def test_scan_command_with_write_baseline(tmp_path: Path) -> None: + """Test scan with baseline writing.""" + workflows_dir = tmp_path / ".github" / "workflows" + workflows_dir.mkdir(parents=True) + (workflows_dir / "ci.yml").write_text("name: CI\non: push\njobs:\n test:\n runs-on: ubuntu-latest") + + baseline_dir = tmp_path / "baseline" + + result = runner.invoke( + app, + [ + "scan", + "--repo", + str(tmp_path), + "--write-baseline", + "--baseline", + str(baseline_dir), + "--offline", + ], + ) + + assert result.exit_code == 0 + assert "Baseline saved" in result.stdout + assert baseline_dir.exists() + + +def test_scan_command_with_format_json(tmp_path: Path) -> None: + """Test scan with JSON format only.""" + with patch("ghaw_auditor.cli.Scanner") as mock_scanner: + mock_scanner.return_value.find_workflows.return_value = [] + mock_scanner.return_value.find_actions.return_value = [] + + result = runner.invoke( + app, + ["scan", "--repo", str(tmp_path), "--format-type", "json", "--offline"], + ) + + assert result.exit_code == 0 + + +def test_scan_command_with_format_md(tmp_path: Path) -> None: + """Test scan with Markdown format only.""" + with patch("ghaw_auditor.cli.Scanner") as mock_scanner: + mock_scanner.return_value.find_workflows.return_value = [] + mock_scanner.return_value.find_actions.return_value = [] + + result = runner.invoke( + app, + ["scan", "--repo", str(tmp_path), "--format-type", "md", "--offline"], + ) + + assert result.exit_code == 0 + + +def test_inventory_command_with_error(tmp_path: Path) -> None: + """Test inventory command with parse error.""" + # Create invalid workflow + workflows_dir = tmp_path / ".github" / "workflows" + workflows_dir.mkdir(parents=True) + (workflows_dir / "invalid.yml").write_text("invalid: yaml: {{{") + + result = runner.invoke(app, ["inventory", "--repo", str(tmp_path)]) + + assert result.exit_code == 0 + assert "Unique Actions" in result.stdout + + +def test_inventory_command_verbose_with_error(tmp_path: Path) -> None: + """Test inventory command verbose mode with error.""" + # Create invalid workflow + workflows_dir = tmp_path / ".github" / "workflows" + workflows_dir.mkdir(parents=True) + (workflows_dir / "invalid.yml").write_text("invalid: yaml: {{{") + + result = runner.invoke(app, ["inventory", "--repo", str(tmp_path), "--verbose"]) + + assert result.exit_code == 0 + + +def test_validate_command_with_violations(tmp_path: Path) -> None: + """Test validate command with violations.""" + # Create workflow with unpinned action + workflows_dir = tmp_path / ".github" / "workflows" + workflows_dir.mkdir(parents=True) + (workflows_dir / "ci.yml").write_text( + """ +name: CI +on: push +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@main +""" + ) + + result = runner.invoke(app, ["validate", "--repo", str(tmp_path)]) + + assert result.exit_code == 0 + assert "policy violations" in result.stdout + + +def test_validate_command_with_enforcement(tmp_path: Path) -> None: + """Test validate command with enforcement.""" + # Create workflow with unpinned action + workflows_dir = tmp_path / ".github" / "workflows" + workflows_dir.mkdir(parents=True) + (workflows_dir / "ci.yml").write_text( + """ +name: CI +on: push +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@main +""" + ) + + result = runner.invoke(app, ["validate", "--repo", str(tmp_path), "--enforce"]) + + # Should exit with error + assert result.exit_code == 1 + + +def test_validate_command_no_violations(tmp_path: Path) -> None: + """Test validate command with no violations.""" + # Create workflow with pinned action (valid 40-char SHA) + workflows_dir = tmp_path / ".github" / "workflows" + workflows_dir.mkdir(parents=True) + (workflows_dir / "ci.yml").write_text( + """ +name: CI +on: push +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@a81bbbf8298c0fa03ea29cdc473d45769f953675 +""" + ) + + result = runner.invoke(app, ["validate", "--repo", str(tmp_path)]) + + assert result.exit_code == 0 + assert "No policy violations found" in result.stdout + + +def test_validate_command_with_error(tmp_path: Path) -> None: + """Test validate command with parse error.""" + # Create invalid workflow + workflows_dir = tmp_path / ".github" / "workflows" + workflows_dir.mkdir(parents=True) + (workflows_dir / "invalid.yml").write_text("invalid: yaml: {{{") + + result = runner.invoke(app, ["validate", "--repo", str(tmp_path)]) + + assert result.exit_code == 0 + + +def test_validate_command_verbose_with_error(tmp_path: Path) -> None: + """Test validate command verbose mode with error.""" + # Create invalid workflow + workflows_dir = tmp_path / ".github" / "workflows" + workflows_dir.mkdir(parents=True) + (workflows_dir / "invalid.yml").write_text("invalid: yaml: {{{") + + result = runner.invoke(app, ["validate", "--repo", str(tmp_path), "--verbose"]) + + assert result.exit_code == 0 + + +def test_scan_command_diff_baseline_not_found(tmp_path: Path) -> None: + """Test scan with diff mode when baseline doesn't exist.""" + # Create workflow + workflows_dir = tmp_path / ".github" / "workflows" + workflows_dir.mkdir(parents=True) + (workflows_dir / "ci.yml").write_text("name: CI\non: push\njobs:\n test:\n runs-on: ubuntu-latest") + + # Non-existent baseline + baseline_dir = tmp_path / "nonexistent_baseline" + output_dir = tmp_path / "output" + + result = runner.invoke( + app, + [ + "scan", + "--repo", + str(tmp_path), + "--diff", + "--baseline", + str(baseline_dir), + "--output", + str(output_dir), + "--offline", + ], + ) + + # Should complete but log error about missing baseline + assert result.exit_code == 0 + # Diff should be attempted but baseline not found is logged + + +def test_scan_command_general_exception(tmp_path: Path) -> None: + """Test scan command with general exception.""" + # Mock the factory to raise an exception + with patch("ghaw_auditor.cli.AuditServiceFactory") as mock_factory: + mock_factory.create.side_effect = RuntimeError("Factory failed") + + result = runner.invoke( + app, + ["scan", "--repo", str(tmp_path), "--offline"], + ) + + # Should exit with code 2 (exception) + assert result.exit_code == 2 + + +def test_inventory_command_parse_error_verbose(tmp_path: Path) -> None: + """Test inventory command logs exceptions in verbose mode.""" + # Create workflow that will cause parse exception + workflows_dir = tmp_path / ".github" / "workflows" + workflows_dir.mkdir(parents=True) + (workflows_dir / "bad.yml").write_text("!!invalid yaml!!") + + result = runner.invoke( + app, + ["inventory", "--repo", str(tmp_path), "--verbose"], + ) + + # Should complete (exception is caught) + assert result.exit_code == 0 + # Check for error message in output or logs + + +def test_validate_command_parse_error_verbose(tmp_path: Path) -> None: + """Test validate command logs exceptions in verbose mode.""" + # Create workflow that will cause parse exception + workflows_dir = tmp_path / ".github" / "workflows" + workflows_dir.mkdir(parents=True) + (workflows_dir / "bad.yml").write_text("!!invalid yaml!!") + + result = runner.invoke( + app, + ["validate", "--repo", str(tmp_path), "--verbose"], + ) + + # Should complete (exception is caught) + assert result.exit_code == 0 + + +def test_scan_command_with_resolver_exception(tmp_path: Path) -> None: + """Test scan with resolver that raises exception.""" + workflows_dir = tmp_path / ".github" / "workflows" + workflows_dir.mkdir(parents=True) + (workflows_dir / "ci.yml").write_text( + """ +name: CI +on: push +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 +""" + ) + + # Mock resolver to raise exception + with patch("ghaw_auditor.cli.AuditServiceFactory") as mock_factory: + mock_service = Mock() + mock_service.scan.side_effect = Exception("Resolver error") + mock_factory.create.return_value = mock_service + + result = runner.invoke( + app, + ["scan", "--repo", str(tmp_path), "--offline"], + ) + + # Should exit with code 2 + assert result.exit_code == 2 + + +def test_inventory_command_with_actions(tmp_path: Path) -> None: + """Test inventory command with workflow that has actions.""" + # Create workflow with actions + workflows_dir = tmp_path / ".github" / "workflows" + workflows_dir.mkdir(parents=True) + (workflows_dir / "ci.yml").write_text( + """ +name: CI +on: push +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 +""" + ) + + result = runner.invoke(app, ["inventory", "--repo", str(tmp_path)]) + + assert result.exit_code == 0 + assert "Unique Actions" in result.stdout + # Should list the actions + assert "actions/checkout" in result.stdout or "•" in result.stdout + + +def test_validate_command_with_policy_file(tmp_path: Path) -> None: + """Test validate command with policy file.""" + # Create workflow + workflows_dir = tmp_path / ".github" / "workflows" + workflows_dir.mkdir(parents=True) + (workflows_dir / "ci.yml").write_text( + """ +name: CI +on: push +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 +""" + ) + + # Create policy file + policy_file = tmp_path / "policy.yml" + policy_file.write_text("require_pinned_actions: true") + + result = runner.invoke( + app, + ["validate", "--repo", str(tmp_path), "--policy-file", str(policy_file)], + ) + + assert result.exit_code == 0 + # Policy file exists, so TODO block executes diff --git a/tests/test_differ.py b/tests/test_differ.py new file mode 100644 index 0000000..d4f7dd4 --- /dev/null +++ b/tests/test_differ.py @@ -0,0 +1,376 @@ +"""Tests for differ module.""" + +from pathlib import Path + +import pytest + +from ghaw_auditor.differ import Differ +from ghaw_auditor.models import ( + ActionManifest, + JobMeta, + PermissionLevel, + Permissions, + WorkflowMeta, +) + + +def test_save_and_load_baseline(tmp_path: Path) -> None: + """Test saving and loading baseline.""" + baseline_path = tmp_path / "baseline" + differ = Differ(baseline_path) + + # Create sample data + workflows = { + "test.yml": WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + jobs={"test": JobMeta(name="test", runs_on="ubuntu-latest")}, + ) + } + actions = { + "actions/checkout@v4": ActionManifest( + name="Checkout", + description="Checkout code", + ) + } + + # Save baseline + differ.save_baseline(workflows, actions, "abc123") + assert (baseline_path / "workflows.json").exists() + assert (baseline_path / "actions.json").exists() + assert (baseline_path / "meta.json").exists() + + # Load baseline + baseline = differ.load_baseline() + assert baseline.meta.commit_sha == "abc123" + assert len(baseline.workflows) == 1 + assert len(baseline.actions) == 1 + + +def test_diff_workflows(tmp_path: Path) -> None: + """Test workflow diff.""" + baseline_path = tmp_path / "baseline" + differ = Differ(baseline_path) + + old_workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + jobs={}, + ) + + new_workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push", "pull_request"], + jobs={}, + ) + + diffs = differ.diff_workflows({"test.yml": old_workflow}, {"test.yml": new_workflow}) + + assert len(diffs) == 1 + assert diffs[0].status == "modified" + assert len(diffs[0].changes) > 0 + + +def test_diff_added_workflow(tmp_path: Path) -> None: + """Test added workflow detection.""" + baseline_path = tmp_path / "baseline" + differ = Differ(baseline_path) + + new_workflow = WorkflowMeta( + name="New", + path="new.yml", + triggers=["push"], + jobs={}, + ) + + diffs = differ.diff_workflows({}, {"new.yml": new_workflow}) + + assert len(diffs) == 1 + assert diffs[0].status == "added" + assert diffs[0].path == "new.yml" + + +def test_diff_removed_workflow(tmp_path: Path) -> None: + """Test removed workflow detection.""" + baseline_path = tmp_path / "baseline" + differ = Differ(baseline_path) + + old_workflow = WorkflowMeta( + name="Old", + path="old.yml", + triggers=["push"], + jobs={}, + ) + + diffs = differ.diff_workflows({"old.yml": old_workflow}, {}) + + assert len(diffs) == 1 + assert diffs[0].status == "removed" + assert diffs[0].path == "old.yml" + + +def test_load_baseline_not_found(tmp_path: Path) -> None: + """Test loading baseline when it doesn't exist.""" + baseline_path = tmp_path / "nonexistent" + differ = Differ(baseline_path) + + with pytest.raises(FileNotFoundError, match="Baseline not found"): + differ.load_baseline() + + +def test_load_baseline_without_meta(tmp_path: Path) -> None: + """Test loading baseline when meta.json doesn't exist.""" + baseline_path = tmp_path / "baseline" + baseline_path.mkdir() + + # Create only workflows.json and actions.json + (baseline_path / "workflows.json").write_text("{}") + (baseline_path / "actions.json").write_text("{}") + + differ = Differ(baseline_path) + baseline = differ.load_baseline() + + # Should still load with default meta + assert baseline.meta is not None + assert baseline.workflows == {} + assert baseline.actions == {} + + +def test_diff_workflows_permissions_change(tmp_path: Path) -> None: + """Test workflow diff with permissions changes.""" + baseline_path = tmp_path / "baseline" + differ = Differ(baseline_path) + + old_workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + permissions=Permissions(contents=PermissionLevel.READ), + jobs={}, + ) + + new_workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + permissions=Permissions(contents=PermissionLevel.WRITE), + jobs={}, + ) + + diffs = differ.diff_workflows({"test.yml": old_workflow}, {"test.yml": new_workflow}) + + assert len(diffs) == 1 + assert diffs[0].status == "modified" + assert any(c.field == "permissions" for c in diffs[0].changes) + + +def test_diff_workflows_concurrency_change(tmp_path: Path) -> None: + """Test workflow diff with concurrency changes.""" + baseline_path = tmp_path / "baseline" + differ = Differ(baseline_path) + + old_workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + concurrency="group1", + jobs={}, + ) + + new_workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + concurrency="group2", + jobs={}, + ) + + diffs = differ.diff_workflows({"test.yml": old_workflow}, {"test.yml": new_workflow}) + + assert len(diffs) == 1 + assert diffs[0].status == "modified" + assert any(c.field == "concurrency" for c in diffs[0].changes) + + +def test_diff_workflows_jobs_change(tmp_path: Path) -> None: + """Test workflow diff with job changes.""" + baseline_path = tmp_path / "baseline" + differ = Differ(baseline_path) + + old_workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + jobs={"build": JobMeta(name="build", runs_on="ubuntu-latest")}, + ) + + new_workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + jobs={ + "build": JobMeta(name="build", runs_on="ubuntu-latest"), + "test": JobMeta(name="test", runs_on="ubuntu-latest"), + }, + ) + + diffs = differ.diff_workflows({"test.yml": old_workflow}, {"test.yml": new_workflow}) + + assert len(diffs) == 1 + assert diffs[0].status == "modified" + assert any(c.field == "jobs" for c in diffs[0].changes) + + +def test_diff_workflows_secrets_change(tmp_path: Path) -> None: + """Test workflow diff with secrets changes.""" + baseline_path = tmp_path / "baseline" + differ = Differ(baseline_path) + + old_workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + jobs={}, + secrets_used={"API_KEY"}, + ) + + new_workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + jobs={}, + secrets_used={"API_KEY", "DATABASE_URL"}, + ) + + diffs = differ.diff_workflows({"test.yml": old_workflow}, {"test.yml": new_workflow}) + + assert len(diffs) == 1 + assert diffs[0].status == "modified" + assert any(c.field == "secrets_used" for c in diffs[0].changes) + + +def test_diff_workflows_unchanged(tmp_path: Path) -> None: + """Test workflow diff when unchanged.""" + baseline_path = tmp_path / "baseline" + differ = Differ(baseline_path) + + workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + jobs={}, + ) + + diffs = differ.diff_workflows({"test.yml": workflow}, {"test.yml": workflow}) + + assert len(diffs) == 1 + assert diffs[0].status == "unchanged" + assert len(diffs[0].changes) == 0 + + +def test_diff_actions_added(tmp_path: Path) -> None: + """Test action diff with added action.""" + baseline_path = tmp_path / "baseline" + differ = Differ(baseline_path) + + new_action = ActionManifest(name="New Action", description="Test") + + diffs = differ.diff_actions({}, {"actions/new@v1": new_action}) + + assert len(diffs) == 1 + assert diffs[0].status == "added" + assert diffs[0].key == "actions/new@v1" + + +def test_diff_actions_removed(tmp_path: Path) -> None: + """Test action diff with removed action.""" + baseline_path = tmp_path / "baseline" + differ = Differ(baseline_path) + + old_action = ActionManifest(name="Old Action", description="Test") + + diffs = differ.diff_actions({"actions/old@v1": old_action}, {}) + + assert len(diffs) == 1 + assert diffs[0].status == "removed" + assert diffs[0].key == "actions/old@v1" + + +def test_diff_actions_unchanged(tmp_path: Path) -> None: + """Test action diff when unchanged.""" + baseline_path = tmp_path / "baseline" + differ = Differ(baseline_path) + + action = ActionManifest(name="Test Action", description="Test") + + diffs = differ.diff_actions({"actions/test@v1": action}, {"actions/test@v1": action}) + + assert len(diffs) == 1 + assert diffs[0].status == "unchanged" + assert len(diffs[0].changes) == 0 + + +def test_render_diff_markdown(tmp_path: Path) -> None: + """Test rendering diff as Markdown.""" + from ghaw_auditor.models import ActionDiff, DiffEntry, WorkflowDiff + + baseline_path = tmp_path / "baseline" + differ = Differ(baseline_path) + + workflow_diffs = [ + WorkflowDiff(path="added.yml", status="added", changes=[]), + WorkflowDiff(path="removed.yml", status="removed", changes=[]), + WorkflowDiff( + path="modified.yml", + status="modified", + changes=[ + DiffEntry( + field="triggers", + old_value=["push"], + new_value=["push", "pull_request"], + change_type="modified", + ) + ], + ), + ] + + action_diffs = [ + ActionDiff(key="actions/new@v1", status="added", changes=[]), + ActionDiff(key="actions/old@v1", status="removed", changes=[]), + ] + + output_path = tmp_path / "diff.md" + differ.render_diff_markdown(workflow_diffs, action_diffs, output_path) + + assert output_path.exists() + content = output_path.read_text() + + # Check content + assert "# Audit Diff Report" in content + assert "## Workflow Changes" in content + assert "## Action Changes" in content + assert "added.yml" in content + assert "removed.yml" in content + assert "modified.yml" in content + assert "actions/new@v1" in content + assert "actions/old@v1" in content + assert "triggers" in content + + +def test_render_diff_markdown_empty(tmp_path: Path) -> None: + """Test rendering empty diff.""" + baseline_path = tmp_path / "baseline" + differ = Differ(baseline_path) + + output_path = tmp_path / "diff.md" + differ.render_diff_markdown([], [], output_path) + + assert output_path.exists() + content = output_path.read_text() + + assert "# Audit Diff Report" in content + assert "**Added:** 0" in content + assert "**Removed:** 0" in content diff --git a/tests/test_factory.py b/tests/test_factory.py new file mode 100644 index 0000000..6c87dbe --- /dev/null +++ b/tests/test_factory.py @@ -0,0 +1,81 @@ +"""Tests for factory module.""" + +from pathlib import Path + +from ghaw_auditor.factory import AuditServiceFactory +from ghaw_auditor.models import Policy + + +def test_factory_create_basic(tmp_path: Path) -> None: + """Test factory creates service with basic configuration.""" + service = AuditServiceFactory.create( + repo_path=tmp_path, + offline=True, + ) + + assert service.scanner is not None + assert service.parser is not None + assert service.analyzer is not None + assert service.resolver is None # Offline mode + assert service.validator is None # No policy + + +def test_factory_create_with_policy(tmp_path: Path) -> None: + """Test factory creates service with policy.""" + policy = Policy(require_pinned_actions=True) + + service = AuditServiceFactory.create( + repo_path=tmp_path, + offline=True, + policy=policy, + ) + + assert service.validator is not None + + +def test_factory_create_with_resolver(tmp_path: Path) -> None: + """Test factory creates service with resolver.""" + service = AuditServiceFactory.create( + repo_path=tmp_path, + offline=False, + token="test_token", + ) + + assert service.resolver is not None + + +def test_factory_create_with_exclude_patterns(tmp_path: Path) -> None: + """Test factory creates service with exclusion patterns.""" + service = AuditServiceFactory.create( + repo_path=tmp_path, + offline=True, + exclude_patterns=["**/node_modules/**", "**/dist/**"], + ) + + assert len(service.scanner.exclude_patterns) == 2 + + +def test_factory_create_with_cache_dir(tmp_path: Path) -> None: + """Test factory creates service with custom cache directory.""" + cache_dir = tmp_path / "custom_cache" + + service = AuditServiceFactory.create( + repo_path=tmp_path, + offline=True, + cache_dir=cache_dir, + ) + + # Service created successfully + assert service is not None + + +def test_factory_create_with_concurrency(tmp_path: Path) -> None: + """Test factory creates service with custom concurrency.""" + service = AuditServiceFactory.create( + repo_path=tmp_path, + offline=False, + concurrency=8, + ) + + assert service.resolver is not None + assert service.resolver.concurrency == 8 diff --git a/tests/test_github_client.py b/tests/test_github_client.py new file mode 100644 index 0000000..d0608b8 --- /dev/null +++ b/tests/test_github_client.py @@ -0,0 +1,399 @@ +"""Tests for GitHub client module.""" + +from unittest.mock import Mock, patch + +import httpx +import pytest + +from ghaw_auditor.github_client import GitHubClient, should_retry_http_error + + +def test_github_client_initialization_no_token() -> None: + """Test GitHub client initialization without token.""" + client = GitHubClient() + + assert client.base_url == "https://api.github.com" + assert "Accept" in client.headers + assert "Authorization" not in client.headers + assert client.client is not None + + client.close() + + +def test_github_client_initialization_with_token() -> None: + """Test GitHub client initialization with token.""" + client = GitHubClient(token="ghp_test123") + + assert "Authorization" in client.headers + assert client.headers["Authorization"] == "Bearer ghp_test123" + + client.close() + + +def test_github_client_custom_base_url() -> None: + """Test GitHub client with custom base URL.""" + client = GitHubClient(base_url="https://github.enterprise.com/api/v3") + + assert client.base_url == "https://github.enterprise.com/api/v3" + + client.close() + + +@patch("httpx.Client") +def test_get_ref_sha_success(mock_client_class: Mock) -> None: + """Test successful ref SHA resolution.""" + # Setup mock + mock_response = Mock() + mock_response.json.return_value = {"sha": "abc123def456"} + mock_response.raise_for_status = Mock() + + mock_http_client = Mock() + mock_http_client.get.return_value = mock_response + mock_client_class.return_value = mock_http_client + + # Test + client = GitHubClient(token="test") + sha = client.get_ref_sha("actions", "checkout", "v4") + + assert sha == "abc123def456" + mock_http_client.get.assert_called_once_with("https://api.github.com/repos/actions/checkout/commits/v4") + + +@patch("httpx.Client") +def test_get_ref_sha_http_error(mock_client_class: Mock) -> None: + """Test ref SHA resolution with HTTP error.""" + # Setup mock to raise HTTPStatusError + mock_error_response = Mock() + mock_error_response.status_code = 404 + + mock_response = Mock() + mock_response.raise_for_status.side_effect = httpx.HTTPStatusError( + "404 Not Found", + request=Mock(), + response=mock_error_response, + ) + + mock_http_client = Mock() + mock_http_client.get.return_value = mock_response + mock_client_class.return_value = mock_http_client + + # Test - 404 errors should not be retried, so expect HTTPStatusError + client = GitHubClient(token="test") + with pytest.raises(httpx.HTTPStatusError): + client.get_ref_sha("actions", "nonexistent", "v1") + + +@patch("httpx.Client") +def test_get_file_content_success(mock_client_class: Mock) -> None: + """Test successful file content retrieval.""" + # Setup mock + mock_response = Mock() + mock_response.text = "name: Test Action\\nruns:\\n using: node20" + mock_response.raise_for_status = Mock() + + mock_http_client = Mock() + mock_http_client.get.return_value = mock_response + mock_client_class.return_value = mock_http_client + + # Test + client = GitHubClient() + content = client.get_file_content("actions", "checkout", "action.yml", "abc123") + + assert "Test Action" in content + mock_http_client.get.assert_called_once_with("https://raw.githubusercontent.com/actions/checkout/abc123/action.yml") + + +@patch("httpx.Client") +def test_get_file_content_http_error(mock_client_class: Mock) -> None: + """Test file content retrieval with HTTP error.""" + # Setup mock to raise HTTPStatusError + mock_error_response = Mock() + mock_error_response.status_code = 404 + + mock_response = Mock() + mock_response.raise_for_status.side_effect = httpx.HTTPStatusError( + "404 Not Found", + request=Mock(), + response=mock_error_response, + ) + + mock_http_client = Mock() + mock_http_client.get.return_value = mock_response + mock_client_class.return_value = mock_http_client + + # Test - 404 errors should not be retried, so expect HTTPStatusError + client = GitHubClient() + with pytest.raises(httpx.HTTPStatusError): + client.get_file_content("actions", "checkout", "missing.yml", "abc123") + + +@patch("httpx.Client") +def test_github_client_context_manager(mock_client_class: Mock) -> None: + """Test GitHub client as context manager.""" + mock_http_client = Mock() + mock_client_class.return_value = mock_http_client + + # Test context manager + with GitHubClient(token="test") as client: + assert client is not None + assert isinstance(client, GitHubClient) + + # Should have called close + mock_http_client.close.assert_called_once() + + +@patch("httpx.Client") +def test_github_client_close(mock_client_class: Mock) -> None: + """Test GitHub client close method.""" + mock_http_client = Mock() + mock_client_class.return_value = mock_http_client + + client = GitHubClient() + client.close() + + mock_http_client.close.assert_called_once() + + +@patch("httpx.Client") +def test_github_client_logs_successful_ref_sha(mock_client_class: Mock, caplog: pytest.LogCaptureFixture) -> None: + """Test that successful ref SHA requests are logged at DEBUG level.""" + import logging + + mock_http_client = Mock() + mock_response = Mock() + mock_response.json.return_value = {"sha": "abc123def"} + mock_http_client.get.return_value = mock_response + mock_client_class.return_value = mock_http_client + + with caplog.at_level(logging.DEBUG): + client = GitHubClient(token="test") + sha = client.get_ref_sha("actions", "checkout", "v4") + + assert sha == "abc123def" + assert "Fetching ref SHA: actions/checkout@v4" in caplog.text + assert "Resolved actions/checkout@v4 -> abc123def" in caplog.text + + +@patch("httpx.Client") +def test_github_client_logs_4xx_error(mock_client_class: Mock, caplog: pytest.LogCaptureFixture) -> None: + """Test that 404 errors are logged with user-friendly messages at ERROR level.""" + import logging + + mock_http_client = Mock() + mock_response = Mock() + mock_response.status_code = 404 + mock_response.raise_for_status.side_effect = httpx.HTTPStatusError( + "Not found", request=Mock(), response=mock_response + ) + mock_http_client.get.return_value = mock_response + mock_client_class.return_value = mock_http_client + + with caplog.at_level(logging.ERROR): + client = GitHubClient() + with pytest.raises(httpx.HTTPStatusError): + client.get_ref_sha("actions", "nonexistent", "v1") + + # Check for user-friendly error message + assert "Action not found" in caplog.text + assert "actions/nonexistent@v1" in caplog.text + + +@patch("httpx.Client") +def test_github_client_logs_successful_file_content(mock_client_class: Mock, caplog: pytest.LogCaptureFixture) -> None: + """Test that successful file content requests are logged at DEBUG level.""" + import logging + + mock_http_client = Mock() + mock_response = Mock() + mock_response.text = "name: Checkout\ndescription: Test" + mock_http_client.get.return_value = mock_response + mock_client_class.return_value = mock_http_client + + with caplog.at_level(logging.DEBUG): + client = GitHubClient(token="test") + content = client.get_file_content("actions", "checkout", "action.yml", "v4") + + assert content == "name: Checkout\ndescription: Test" + assert "Fetching file: actions/checkout/action.yml@v4" in caplog.text + assert "Downloaded action.yml" in caplog.text + assert "bytes" in caplog.text + + +@patch("httpx.Client") +def test_github_client_retries_5xx_errors(mock_client_class: Mock) -> None: + """Test that 5xx errors are retried.""" + from tenacity import RetryError + + mock_http_client = Mock() + mock_response = Mock() + mock_response.status_code = 500 + mock_response.raise_for_status.side_effect = httpx.HTTPStatusError( + "Server error", request=Mock(), response=mock_response + ) + mock_http_client.get.return_value = mock_response + mock_client_class.return_value = mock_http_client + + client = GitHubClient() + with pytest.raises(RetryError): + client.get_ref_sha("actions", "checkout", "v1") + + # Should have retried 3 times + assert mock_http_client.get.call_count == 3 + + +@patch("httpx.Client") +def test_github_client_logs_5xx_warning(mock_client_class: Mock, caplog: pytest.LogCaptureFixture) -> None: + """Test that 5xx errors are logged at WARNING level.""" + import logging + + from tenacity import RetryError + + mock_http_client = Mock() + mock_response = Mock() + mock_response.status_code = 503 + mock_response.raise_for_status.side_effect = httpx.HTTPStatusError( + "Service unavailable", request=Mock(), response=mock_response + ) + mock_http_client.get.return_value = mock_response + mock_client_class.return_value = mock_http_client + + with caplog.at_level(logging.WARNING): + client = GitHubClient() + with pytest.raises(RetryError): + client.get_file_content("actions", "checkout", "action.yml", "v4") + + assert "HTTP 503" in caplog.text + + +def test_should_retry_http_error_network_errors() -> None: + """Test that network errors should be retried.""" + error = httpx.RequestError("Connection failed") + assert should_retry_http_error(error) is True + + +def test_should_retry_http_error_404() -> None: + """Test that 404 errors should not be retried.""" + mock_response = Mock() + mock_response.status_code = 404 + error = httpx.HTTPStatusError("Not found", request=Mock(), response=mock_response) + assert should_retry_http_error(error) is False + + +def test_should_retry_http_error_403() -> None: + """Test that 403 errors should not be retried.""" + mock_response = Mock() + mock_response.status_code = 403 + error = httpx.HTTPStatusError("Forbidden", request=Mock(), response=mock_response) + assert should_retry_http_error(error) is False + + +def test_should_retry_http_error_429() -> None: + """Test that 429 rate limiting errors should be retried.""" + mock_response = Mock() + mock_response.status_code = 429 + error = httpx.HTTPStatusError("Rate limited", request=Mock(), response=mock_response) + assert should_retry_http_error(error) is True + + +def test_should_retry_http_error_500() -> None: + """Test that 500 errors should be retried.""" + mock_response = Mock() + mock_response.status_code = 500 + error = httpx.HTTPStatusError("Server error", request=Mock(), response=mock_response) + assert should_retry_http_error(error) is True + + +def test_should_retry_http_error_other() -> None: + """Test that non-HTTP errors should not be retried.""" + error = ValueError("Some other error") + assert should_retry_http_error(error) is False + + +@patch("httpx.Client") +def test_github_client_logs_403_error(mock_client_class: Mock, caplog: pytest.LogCaptureFixture) -> None: + """Test that 403 errors are logged with user-friendly messages.""" + import logging + + mock_http_client = Mock() + mock_response = Mock() + mock_response.status_code = 403 + mock_response.raise_for_status.side_effect = httpx.HTTPStatusError( + "Forbidden", request=Mock(), response=mock_response + ) + mock_http_client.get.return_value = mock_response + mock_client_class.return_value = mock_http_client + + with caplog.at_level(logging.ERROR): + client = GitHubClient() + with pytest.raises(httpx.HTTPStatusError): + client.get_ref_sha("actions", "checkout", "v1") + + assert "Access denied (check token permissions)" in caplog.text + + +@patch("httpx.Client") +def test_github_client_logs_401_error(mock_client_class: Mock, caplog: pytest.LogCaptureFixture) -> None: + """Test that 401 errors are logged with user-friendly messages.""" + import logging + + mock_http_client = Mock() + mock_response = Mock() + mock_response.status_code = 401 + mock_response.raise_for_status.side_effect = httpx.HTTPStatusError( + "Unauthorized", request=Mock(), response=mock_response + ) + mock_http_client.get.return_value = mock_response + mock_client_class.return_value = mock_http_client + + with caplog.at_level(logging.ERROR): + client = GitHubClient() + with pytest.raises(httpx.HTTPStatusError): + client.get_file_content("actions", "checkout", "action.yml", "abc123") + + assert "Authentication required" in caplog.text + + +@patch("httpx.Client") +def test_github_client_logs_401_error_get_ref_sha(mock_client_class: Mock, caplog: pytest.LogCaptureFixture) -> None: + """Test that 401 errors are logged in get_ref_sha.""" + import logging + + mock_http_client = Mock() + mock_response = Mock() + mock_response.status_code = 401 + mock_response.raise_for_status.side_effect = httpx.HTTPStatusError( + "Unauthorized", request=Mock(), response=mock_response + ) + mock_http_client.get.return_value = mock_response + mock_client_class.return_value = mock_http_client + + with caplog.at_level(logging.ERROR): + client = GitHubClient() + with pytest.raises(httpx.HTTPStatusError): + client.get_ref_sha("actions", "checkout", "v1") + + assert "Authentication required" in caplog.text + + +@patch("httpx.Client") +def test_github_client_logs_403_error_get_file_content( + mock_client_class: Mock, caplog: pytest.LogCaptureFixture +) -> None: + """Test that 403 errors are logged in get_file_content.""" + import logging + + mock_http_client = Mock() + mock_response = Mock() + mock_response.status_code = 403 + mock_response.raise_for_status.side_effect = httpx.HTTPStatusError( + "Forbidden", request=Mock(), response=mock_response + ) + mock_http_client.get.return_value = mock_response + mock_client_class.return_value = mock_http_client + + with caplog.at_level(logging.ERROR): + client = GitHubClient() + with pytest.raises(httpx.HTTPStatusError): + client.get_file_content("actions", "checkout", "action.yml", "abc123") + + assert "Access denied (check token permissions)" in caplog.text diff --git a/tests/test_golden.py b/tests/test_golden.py new file mode 100644 index 0000000..cc0c3cf --- /dev/null +++ b/tests/test_golden.py @@ -0,0 +1,168 @@ +"""Golden file tests for reports.""" + +import json +from pathlib import Path + +from ghaw_auditor.models import ( + ActionInput, + ActionManifest, + JobMeta, + WorkflowMeta, +) +from ghaw_auditor.renderer import Renderer + + +def test_json_workflow_output(tmp_path: Path) -> None: + """Test workflow JSON matches golden file.""" + renderer = Renderer(tmp_path) + + workflows = { + "test.yml": WorkflowMeta( + name="Test Workflow", + path="test.yml", + triggers=["push", "pull_request"], + jobs={ + "test": JobMeta( + name="test", + runs_on="ubuntu-latest", + secrets_used={"GITHUB_TOKEN"}, + ) + }, + secrets_used={"GITHUB_TOKEN"}, + ) + } + + renderer.render_json(workflows, {}, []) + + # Load generated and golden files + with open(tmp_path / "workflows.json") as f: + generated = json.load(f) + + golden_path = Path(__file__).parent / "golden" / "workflows.json" + with open(golden_path) as f: + golden = json.load(f) + + # Compare structure (ignoring list order differences) + assert generated["test.yml"]["name"] == golden["test.yml"]["name"] + assert set(generated["test.yml"]["triggers"]) == set(golden["test.yml"]["triggers"]) + assert generated["test.yml"]["jobs"]["test"]["runs_on"] == golden["test.yml"]["jobs"]["test"]["runs_on"] + + +def test_json_action_output(tmp_path: Path) -> None: + """Test action JSON matches golden file.""" + renderer = Renderer(tmp_path) + + actions = { + "actions/checkout@abc123": ActionManifest( + name="Checkout", + description="Checkout a Git repository", + author="GitHub", + inputs={ + "repository": ActionInput( + name="repository", + description="Repository name with owner", + required=False, + ), + "ref": ActionInput( + name="ref", + description="The branch, tag or SHA to checkout", + required=False, + ), + }, + runs={"using": "node20", "main": "dist/index.js"}, + is_javascript=True, + ) + } + + renderer.render_json({}, actions, []) + + with open(tmp_path / "actions.json") as f: + generated = json.load(f) + + golden_path = Path(__file__).parent / "golden" / "actions.json" + with open(golden_path) as f: + golden = json.load(f) + + assert generated["actions/checkout@abc123"]["name"] == golden["actions/checkout@abc123"]["name"] + assert generated["actions/checkout@abc123"]["is_javascript"] is True + + +def test_markdown_report_structure(tmp_path: Path) -> None: + """Test markdown report structure.""" + renderer = Renderer(tmp_path) + + workflows = { + "test.yml": WorkflowMeta( + name="Test Workflow", + path="test.yml", + triggers=["push", "pull_request"], + jobs={ + "test": JobMeta( + name="test", + runs_on="ubuntu-latest", + secrets_used={"GITHUB_TOKEN"}, + ) + }, + secrets_used={"GITHUB_TOKEN"}, + ) + } + + actions = { + "actions/checkout@abc123": ActionManifest( + name="Checkout", + description="Checkout a Git repository", + inputs={ + "repository": ActionInput( + name="repository", + description="Repository name with owner", + ), + "ref": ActionInput( + name="ref", + description="The branch, tag or SHA to checkout", + ), + }, + ) + } + + analysis = { + "total_jobs": 1, + "reusable_workflows": 0, + "triggers": {"push": 1, "pull_request": 1}, + "runners": {"ubuntu-latest": 1}, + "secrets": {"total_unique_secrets": 1, "secrets": ["GITHUB_TOKEN"]}, + } + + renderer.render_markdown(workflows, actions, [], analysis) + + with open(tmp_path / "report.md") as f: + content = f.read() + + # Check key sections exist + assert "# GitHub Actions & Workflows Audit Report" in content + assert "## Summary" in content + assert "## Analysis" in content + assert "## Workflows" in content + assert "## Actions Inventory" in content + + # Check specific content + assert "Test Workflow" in content + assert "Checkout" in content + assert "GITHUB_TOKEN" in content + assert "`ubuntu-latest`" in content + + +def test_empty_report_generation(tmp_path: Path) -> None: + """Test report generation with empty data.""" + renderer = Renderer(tmp_path) + + renderer.render_json({}, {}, []) + renderer.render_markdown({}, {}, [], {}) + + # Files should exist even with empty data + assert (tmp_path / "workflows.json").exists() + assert (tmp_path / "actions.json").exists() + assert (tmp_path / "violations.json").exists() + assert (tmp_path / "report.md").exists() + + with open(tmp_path / "workflows.json") as f: + assert json.load(f) == {} diff --git a/tests/test_models.py b/tests/test_models.py new file mode 100644 index 0000000..8f717eb --- /dev/null +++ b/tests/test_models.py @@ -0,0 +1,105 @@ +"""Tests for models.""" + +from datetime import datetime + +from ghaw_auditor.models import ( + ActionInput, + ActionManifest, + ActionRef, + ActionType, + BaselineMeta, + PermissionLevel, + Permissions, +) + + +def test_action_ref_canonical_key_github() -> None: + """Test canonical key for GitHub action.""" + ref = ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="checkout", + ref="v4", + resolved_sha="abc123", + source_file="test.yml", + ) + + key = ref.canonical_key() + assert key == "actions/checkout@abc123" + + +def test_action_ref_canonical_key_local() -> None: + """Test canonical key for local action.""" + ref = ActionRef( + type=ActionType.LOCAL, + path="./.github/actions/custom", + source_file="test.yml", + ) + + key = ref.canonical_key() + assert key == "local:./.github/actions/custom" + + +def test_action_ref_canonical_key_reusable_workflow() -> None: + """Test canonical key for reusable workflow.""" + ref = ActionRef( + type=ActionType.REUSABLE_WORKFLOW, + owner="owner", + repo="repo", + path=".github/workflows/reusable.yml", + ref="v1", + resolved_sha="abc123", + source_file="test.yml", + ) + + key = ref.canonical_key() + assert key == "owner/repo/.github/workflows/reusable.yml@abc123" + + +def test_action_ref_canonical_key_docker() -> None: + """Test canonical key for Docker action.""" + ref = ActionRef( + type=ActionType.DOCKER, + path="docker://alpine:3.8", + source_file="test.yml", + ) + + key = ref.canonical_key() + assert key == "docker:docker://alpine:3.8" + + +def test_permissions_model() -> None: + """Test permissions model.""" + perms = Permissions( + contents=PermissionLevel.READ, + pull_requests=PermissionLevel.WRITE, + ) + + assert perms.contents == PermissionLevel.READ + assert perms.pull_requests == PermissionLevel.WRITE + + +def test_action_manifest() -> None: + """Test action manifest model.""" + manifest = ActionManifest( + name="Test Action", + description="A test action", + inputs={"test-input": ActionInput(name="test-input", required=True)}, + ) + + assert manifest.name == "Test Action" + assert "test-input" in manifest.inputs + assert manifest.inputs["test-input"].required is True + + +def test_baseline_meta() -> None: + """Test baseline metadata model.""" + meta = BaselineMeta( + auditor_version="1.0.0", + commit_sha="abc123", + timestamp=datetime.now(), + ) + + assert meta.auditor_version == "1.0.0" + assert meta.commit_sha == "abc123" + assert meta.schema_version == "1.0" diff --git a/tests/test_parser.py b/tests/test_parser.py new file mode 100644 index 0000000..208d906 --- /dev/null +++ b/tests/test_parser.py @@ -0,0 +1,672 @@ +"""Tests for parser module.""" + +from pathlib import Path + +import pytest + +from ghaw_auditor.models import ActionType, PermissionLevel +from ghaw_auditor.parser import Parser + +FIXTURES_DIR = Path(__file__).parent / "fixtures" + + +def test_parser_initialization() -> None: + """Test parser can be initialized.""" + parser = Parser(Path.cwd()) + assert parser.yaml is not None + + +# ============================================================================ +# Workflow Parsing Tests +# ============================================================================ + + +def test_parse_basic_workflow() -> None: + """Test parsing a basic workflow.""" + parser = Parser(FIXTURES_DIR) + workflow = parser.parse_workflow(FIXTURES_DIR / "basic-workflow.yml") + + assert workflow.name == "Basic Workflow" + assert workflow.path == "basic-workflow.yml" + assert workflow.triggers == ["push"] + assert "test" in workflow.jobs + assert workflow.jobs["test"].runs_on == "ubuntu-latest" + assert len(workflow.jobs["test"].actions_used) == 1 + assert workflow.jobs["test"].actions_used[0].owner == "actions" + assert workflow.jobs["test"].actions_used[0].repo == "checkout" + + +def test_parse_complex_workflow() -> None: + """Test parsing a complex workflow with all features.""" + parser = Parser(FIXTURES_DIR) + workflow = parser.parse_workflow(FIXTURES_DIR / "complex-workflow.yml") + + # Basic metadata + assert workflow.name == "Complex Workflow" + assert set(workflow.triggers) == {"push", "pull_request", "workflow_dispatch"} + + # Permissions + assert workflow.permissions is not None + assert workflow.permissions.contents == PermissionLevel.READ + assert workflow.permissions.issues == PermissionLevel.WRITE + assert workflow.permissions.pull_requests == PermissionLevel.WRITE + + # Environment variables + assert workflow.env["NODE_ENV"] == "production" + assert workflow.env["API_URL"] == "https://api.example.com" + + # Concurrency + assert workflow.concurrency is not None + + # Defaults + assert workflow.defaults["run"]["shell"] == "bash" + + # Jobs + assert "build" in workflow.jobs + assert "test" in workflow.jobs + + # Build job + build = workflow.jobs["build"] + assert build.timeout_minutes == 30 + assert build.permissions is not None + assert build.environment == {"name": "production", "url": "https://example.com"} + + # Test job + test = workflow.jobs["test"] + assert test.needs == ["build"] + assert test.if_condition == "github.event_name == 'pull_request'" + assert test.container is not None + assert test.container.image == "node:20-alpine" + assert "NODE_ENV" in test.container.env + assert test.continue_on_error is True + + # Services + assert "postgres" in test.services + assert test.services["postgres"].image == "postgres:15" + + # Strategy + assert test.strategy is not None + assert test.strategy.fail_fast is False + assert test.strategy.max_parallel == 2 + + # Secrets extraction + assert "API_KEY" in workflow.secrets_used + assert "GITHUB_TOKEN" in workflow.secrets_used + assert "DATABASE_URL" in workflow.secrets_used + + +def test_parse_reusable_workflow() -> None: + """Test parsing a reusable workflow.""" + parser = Parser(FIXTURES_DIR) + workflow = parser.parse_workflow(FIXTURES_DIR / "reusable-workflow.yml") + + assert workflow.is_reusable is True + assert workflow.reusable_contract is not None + + # Check inputs + assert "environment" in workflow.reusable_contract.inputs + assert workflow.reusable_contract.inputs["environment"]["required"] is True + assert workflow.reusable_contract.inputs["debug"]["default"] is False + + # Check outputs + assert "deployment-id" in workflow.reusable_contract.outputs + + # Check secrets + assert "deploy-token" in workflow.reusable_contract.secrets + assert workflow.reusable_contract.secrets["deploy-token"]["required"] is True + + +def test_parse_workflow_with_empty_workflow_call() -> None: + """Test parsing workflow with empty workflow_call.""" + parser = Parser(FIXTURES_DIR) + workflow = parser.parse_workflow(FIXTURES_DIR / "empty-workflow-call.yml") + + assert workflow.is_reusable is True + # Empty workflow_call should result in None contract + assert workflow.reusable_contract is None or workflow.reusable_contract.inputs == {} + + +def test_parse_empty_workflow() -> None: + """Test parsing an empty workflow file raises error.""" + parser = Parser(FIXTURES_DIR) + + with pytest.raises(ValueError, match="Empty workflow file"): + parser.parse_workflow(FIXTURES_DIR / "invalid-workflow.yml") + + +# ============================================================================ +# Action Reference Parsing Tests +# ============================================================================ + + +def test_parse_action_ref_github() -> None: + """Test parsing GitHub action reference.""" + parser = Parser(Path.cwd()) + ref = parser._parse_action_ref("actions/checkout@v4", Path("test.yml")) + + assert ref.type == ActionType.GITHUB + assert ref.owner == "actions" + assert ref.repo == "checkout" + assert ref.ref == "v4" + + +def test_parse_action_ref_github_with_path() -> None: + """Test parsing GitHub action reference with path (monorepo).""" + parser = Parser(Path.cwd()) + ref = parser._parse_action_ref("owner/repo/path/to/action@v1", Path("test.yml")) + + assert ref.type == ActionType.GITHUB + assert ref.owner == "owner" + assert ref.repo == "repo" + assert ref.path == "path/to/action" + assert ref.ref == "v1" + + +def test_parse_action_ref_local() -> None: + """Test parsing local action reference.""" + parser = Parser(Path.cwd()) + ref = parser._parse_action_ref("./.github/actions/custom", Path("test.yml")) + + assert ref.type == ActionType.LOCAL + assert ref.path == "./.github/actions/custom" + + +def test_parse_action_ref_docker() -> None: + """Test parsing Docker action reference.""" + parser = Parser(Path.cwd()) + ref = parser._parse_action_ref("docker://alpine:3.8", Path("test.yml")) + + assert ref.type == ActionType.DOCKER + assert ref.path == "docker://alpine:3.8" + + +def test_parse_action_ref_invalid() -> None: + """Test parsing invalid action reference raises error.""" + parser = Parser(Path.cwd()) + + with pytest.raises(ValueError, match="Invalid action reference"): + parser._parse_action_ref("invalid-ref", Path("test.yml")) + + +def test_extract_secrets() -> None: + """Test extracting secrets from content.""" + parser = Parser(Path.cwd()) + content = """ + env: + TOKEN: ${{ secrets.GITHUB_TOKEN }} + API_KEY: ${{ secrets.API_KEY }} + """ + secrets = parser._extract_secrets(content) + + assert "GITHUB_TOKEN" in secrets + assert "API_KEY" in secrets + assert len(secrets) == 2 + + +# ============================================================================ +# Trigger Extraction Tests +# ============================================================================ + + +def test_extract_triggers_string() -> None: + """Test extracting triggers from string.""" + parser = Parser(Path.cwd()) + triggers = parser._extract_triggers("push") + + assert triggers == ["push"] + + +def test_extract_triggers_list() -> None: + """Test extracting triggers from list.""" + parser = Parser(Path.cwd()) + triggers = parser._extract_triggers(["push", "pull_request"]) + + assert triggers == ["push", "pull_request"] + + +def test_extract_triggers_dict() -> None: + """Test extracting triggers from dict.""" + parser = Parser(Path.cwd()) + triggers = parser._extract_triggers( + { + "push": {"branches": ["main"]}, + "pull_request": None, + "workflow_dispatch": None, + } + ) + + assert set(triggers) == {"push", "pull_request", "workflow_dispatch"} + + +def test_extract_triggers_empty() -> None: + """Test extracting triggers from empty value.""" + parser = Parser(Path.cwd()) + triggers = parser._extract_triggers(None) + + assert triggers == [] + + +# ============================================================================ +# Permissions Parsing Tests +# ============================================================================ + + +def test_parse_permissions_none() -> None: + """Test parsing None permissions.""" + parser = Parser(Path.cwd()) + perms = parser._parse_permissions(None) + + assert perms is None + + +def test_parse_permissions_string() -> None: + """Test parsing string permissions (read-all/write-all).""" + parser = Parser(Path.cwd()) + perms = parser._parse_permissions("read-all") + + # Should return an empty Permissions object + assert perms is not None + + +def test_parse_permissions_dict() -> None: + """Test parsing dict permissions.""" + parser = Parser(Path.cwd()) + perms = parser._parse_permissions( + { + "contents": "read", + "issues": "write", + "pull_requests": "write", + } + ) + + assert perms is not None + assert perms.contents == PermissionLevel.READ + assert perms.issues == PermissionLevel.WRITE + assert perms.pull_requests == PermissionLevel.WRITE + + +# ============================================================================ +# Job Parsing Tests +# ============================================================================ + + +def test_parse_job_with_none_data() -> None: + """Test parsing job with None data.""" + parser = Parser(Path.cwd()) + job = parser._parse_job("test", None, Path("test.yml"), "") + + assert job.name == "test" + assert job.runs_on == "ubuntu-latest" # default value + + +def test_parse_job_needs_string_vs_list() -> None: + """Test parsing job needs as string vs list.""" + parser = Parser(Path.cwd()) + + # String needs + job1 = parser._parse_job("test", {"needs": "build"}, Path("test.yml"), "") + assert job1.needs == ["build"] + + # List needs + job2 = parser._parse_job("test", {"needs": ["build", "lint"]}, Path("test.yml"), "") + assert job2.needs == ["build", "lint"] + + +def test_parse_job_with_none_steps() -> None: + """Test parsing job with None steps.""" + parser = Parser(Path.cwd()) + job = parser._parse_job( + "test", + {"steps": [None, {"uses": "actions/checkout@v4"}]}, + Path("test.yml"), + "", + ) + + # Should skip None steps + assert len(job.actions_used) == 1 + assert job.actions_used[0].repo == "checkout" + + +# ============================================================================ +# Container/Services/Strategy Parsing Tests +# ============================================================================ + + +def test_parse_container_none() -> None: + """Test parsing None container.""" + parser = Parser(Path.cwd()) + container = parser._parse_container(None) + + assert container is None + + +def test_parse_container_string() -> None: + """Test parsing container from string.""" + parser = Parser(Path.cwd()) + container = parser._parse_container("ubuntu:latest") + + assert container is not None + assert container.image == "ubuntu:latest" + + +def test_parse_container_dict() -> None: + """Test parsing container from dict.""" + parser = Parser(Path.cwd()) + container = parser._parse_container( + { + "image": "node:20", + "credentials": {"username": "user", "password": "pass"}, + "env": {"NODE_ENV": "test"}, + "ports": [8080], + "volumes": ["/tmp:/tmp"], + "options": "--cpus 2", + } + ) + + assert container is not None + assert container.image == "node:20" + assert container.credentials == {"username": "user", "password": "pass"} + assert container.env["NODE_ENV"] == "test" + assert container.ports == [8080] + assert container.volumes == ["/tmp:/tmp"] + assert container.options == "--cpus 2" + + +def test_parse_services_none() -> None: + """Test parsing None services.""" + parser = Parser(Path.cwd()) + services = parser._parse_services(None) + + assert services == {} + + +def test_parse_services_string_image() -> None: + """Test parsing service with string image.""" + parser = Parser(Path.cwd()) + services = parser._parse_services({"postgres": "postgres:15"}) + + assert "postgres" in services + assert services["postgres"].name == "postgres" + assert services["postgres"].image == "postgres:15" + + +def test_parse_services_dict() -> None: + """Test parsing service with dict config.""" + parser = Parser(Path.cwd()) + services = parser._parse_services( + { + "redis": { + "image": "redis:alpine", + "ports": [6379], + "options": "--health-cmd 'redis-cli ping'", + } + } + ) + + assert "redis" in services + assert services["redis"].image == "redis:alpine" + assert services["redis"].ports == [6379] + + +def test_parse_strategy_none() -> None: + """Test parsing None strategy.""" + parser = Parser(Path.cwd()) + strategy = parser._parse_strategy(None) + + assert strategy is None + + +def test_parse_strategy_matrix() -> None: + """Test parsing strategy with matrix.""" + parser = Parser(Path.cwd()) + strategy = parser._parse_strategy( + { + "matrix": {"node-version": [18, 20], "os": ["ubuntu-latest", "windows-latest"]}, + "fail-fast": False, + "max-parallel": 4, + } + ) + + assert strategy is not None + assert strategy.matrix == {"node-version": [18, 20], "os": ["ubuntu-latest", "windows-latest"]} + assert strategy.fail_fast is False + assert strategy.max_parallel == 4 + + +# ============================================================================ +# Action Manifest Parsing Tests +# ============================================================================ + + +def test_parse_composite_action() -> None: + """Test parsing a composite action.""" + parser = Parser(FIXTURES_DIR) + action = parser.parse_action(FIXTURES_DIR / "composite-action.yml") + + assert action.name == "Composite Action" + assert action.description == "A composite action example" + assert action.author == "Test Author" + assert action.is_composite is True + assert action.is_docker is False + assert action.is_javascript is False + + # Check inputs + assert "message" in action.inputs + assert action.inputs["message"].required is True + assert "debug" in action.inputs + assert action.inputs["debug"].required is False + assert action.inputs["debug"].default == "false" + + # Check outputs + assert "result" in action.outputs + assert action.outputs["result"].description == "Action result" + + # Check branding + assert action.branding is not None + + +def test_parse_docker_action() -> None: + """Test parsing a Docker action.""" + parser = Parser(FIXTURES_DIR) + action = parser.parse_action(FIXTURES_DIR / "docker-action.yml") + + assert action.name == "Docker Action" + assert action.is_docker is True + assert action.is_composite is False + assert action.is_javascript is False + + # Check inputs + assert "dockerfile" in action.inputs + assert action.inputs["dockerfile"].default == "Dockerfile" + + # Check outputs + assert "image-id" in action.outputs + + +def test_parse_javascript_action() -> None: + """Test parsing a JavaScript action.""" + parser = Parser(FIXTURES_DIR) + action = parser.parse_action(FIXTURES_DIR / "javascript-action.yml") + + assert action.name == "JavaScript Action" + assert action.is_javascript is True + assert action.is_composite is False + assert action.is_docker is False + + # Check runs config + assert action.runs["using"] == "node20" + assert action.runs["main"] == "dist/index.js" + + +def test_parse_action_with_various_defaults() -> None: + """Test parsing action with different input default types.""" + parser = Parser(FIXTURES_DIR) + action = parser.parse_action(FIXTURES_DIR / "action-with-defaults.yml") + + assert action.name == "Action with Various Defaults" + + # String default + assert action.inputs["string-input"].default == "hello" + + # Boolean default + assert action.inputs["boolean-input"].default is True + + # Number default + assert action.inputs["number-input"].default == 42 + + # No default + assert action.inputs["no-default"].required is True + + +def test_parse_action_empty_inputs_outputs() -> None: + """Test parsing action with empty inputs/outputs.""" + parser = Parser(FIXTURES_DIR) + action = parser.parse_action(FIXTURES_DIR / "composite-action.yml") + + # Even if action has inputs/outputs, the parser should handle missing ones + assert action.inputs is not None + assert action.outputs is not None + + +def test_parse_empty_action() -> None: + """Test parsing an empty action file raises error.""" + parser = Parser(FIXTURES_DIR) + + with pytest.raises(ValueError, match="Empty action file"): + parser.parse_action(FIXTURES_DIR / "invalid-action.yml") + + +# ============================================================================ +# Reusable Workflow Tests +# ============================================================================ + + +def test_parse_reusable_workflow_caller() -> None: + """Test parsing workflow that calls reusable workflows.""" + parser = Parser(FIXTURES_DIR) + workflow = parser.parse_workflow(FIXTURES_DIR / "reusable-workflow-caller.yml") + + assert workflow.name == "Reusable Workflow Caller" + assert "call-workflow" in workflow.jobs + assert "call-workflow-inherit" in workflow.jobs + assert "call-local-workflow" in workflow.jobs + + # Test job with explicit secrets + call_job = workflow.jobs["call-workflow"] + assert call_job.uses == "owner/repo/.github/workflows/deploy.yml@v1" + assert call_job.with_inputs["environment"] == "production" + assert call_job.with_inputs["debug"] is False + assert call_job.with_inputs["version"] == "1.2.3" + assert call_job.secrets_passed is not None + assert "deploy-token" in call_job.secrets_passed + assert call_job.inherit_secrets is False + + # Verify reusable workflow tracked as action + assert len(call_job.actions_used) == 1 + assert call_job.actions_used[0].type == ActionType.REUSABLE_WORKFLOW + assert call_job.actions_used[0].owner == "owner" + assert call_job.actions_used[0].repo == "repo" + assert call_job.actions_used[0].path == ".github/workflows/deploy.yml" + assert call_job.actions_used[0].ref == "v1" + + # Test job with inherited secrets + inherit_job = workflow.jobs["call-workflow-inherit"] + assert inherit_job.uses == "owner/repo/.github/workflows/test.yml@main" + assert inherit_job.inherit_secrets is True + assert inherit_job.secrets_passed is None + + # Test local reusable workflow + local_job = workflow.jobs["call-local-workflow"] + assert local_job.uses == "./.github/workflows/shared.yml" + assert local_job.actions_used[0].type == ActionType.REUSABLE_WORKFLOW + assert local_job.actions_used[0].path == "./.github/workflows/shared.yml" + + +def test_parse_job_with_outputs() -> None: + """Test parsing job with outputs.""" + parser = Parser(FIXTURES_DIR) + workflow = parser.parse_workflow(FIXTURES_DIR / "job-with-outputs.yml") + + assert "build" in workflow.jobs + build_job = workflow.jobs["build"] + + assert build_job.outputs is not None + assert "version" in build_job.outputs + assert "artifact-url" in build_job.outputs + assert "status" in build_job.outputs + assert build_job.outputs["status"] == "success" + + +def test_parse_reusable_workflow_ref_local() -> None: + """Test parsing local reusable workflow reference.""" + parser = Parser(Path.cwd()) + ref = parser._parse_reusable_workflow_ref("./.github/workflows/deploy.yml", Path("test.yml")) + + assert ref.type == ActionType.REUSABLE_WORKFLOW + assert ref.path == "./.github/workflows/deploy.yml" + + +def test_parse_reusable_workflow_ref_github() -> None: + """Test parsing GitHub reusable workflow reference.""" + parser = Parser(Path.cwd()) + ref = parser._parse_reusable_workflow_ref("actions/reusable/.github/workflows/build.yml@v1", Path("test.yml")) + + assert ref.type == ActionType.REUSABLE_WORKFLOW + assert ref.owner == "actions" + assert ref.repo == "reusable" + assert ref.path == ".github/workflows/build.yml" + assert ref.ref == "v1" + + +def test_parse_reusable_workflow_ref_invalid() -> None: + """Test parsing invalid reusable workflow reference raises error.""" + parser = Parser(Path.cwd()) + + with pytest.raises(ValueError, match="Invalid reusable workflow reference"): + parser._parse_reusable_workflow_ref("invalid-workflow-ref", Path("test.yml")) + + +def test_parse_permissions_invalid_type(tmp_path: Path) -> None: + """Test parsing permissions with invalid type.""" + parser = Parser(tmp_path) + + # Test with boolean (invalid type) + result = parser._parse_permissions(True) + assert result is None + + # Test with int (invalid type) + result = parser._parse_permissions(123) + assert result is None + + # Test with list (invalid type) + result = parser._parse_permissions(["read", "write"]) + assert result is None + + +def test_parse_workflow_with_boolean_and_number_env(tmp_path: Path) -> None: + """Test parsing workflow with boolean and number values in env.""" + workflow_file = tmp_path / "test.yml" + workflow_file.write_text( + """ +name: Test +on: push +env: + STRING_VAR: "hello" + BOOL_VAR: true + NUMBER_VAR: 42 + FLOAT_VAR: 3.14 +jobs: + test: + runs-on: ubuntu-latest + steps: + - run: echo test +""" + ) + + parser = Parser(tmp_path) + workflow = parser.parse_workflow(workflow_file) + + assert workflow.env["STRING_VAR"] == "hello" + assert workflow.env["BOOL_VAR"] is True + assert workflow.env["NUMBER_VAR"] == 42 + assert workflow.env["FLOAT_VAR"] == 3.14 diff --git a/tests/test_policy.py b/tests/test_policy.py new file mode 100644 index 0000000..125c03b --- /dev/null +++ b/tests/test_policy.py @@ -0,0 +1,256 @@ +"""Tests for policy validator.""" + +from ghaw_auditor.models import ActionRef, ActionType, JobMeta, Policy, WorkflowMeta +from ghaw_auditor.policy import PolicyValidator + + +def test_policy_validator_initialization() -> None: + """Test validator initialization.""" + policy = Policy() + validator = PolicyValidator(policy) + assert validator.policy == policy + + +def test_pinned_actions_validation() -> None: + """Test pinned actions policy.""" + policy = Policy(require_pinned_actions=True) + validator = PolicyValidator(policy) + + workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + jobs={ + "test": JobMeta( + name="test", + runs_on="ubuntu-latest", + actions_used=[ + ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="checkout", + ref="v4", # Not pinned to SHA + source_file="test.yml", + ) + ], + ) + }, + actions_used=[ + ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="checkout", + ref="v4", + source_file="test.yml", + ) + ], + ) + + violations = validator.validate({"test.yml": workflow}, []) + + assert len(violations) > 0 + assert violations[0]["rule"] == "require_pinned_actions" + assert violations[0]["severity"] == "error" + + +def test_pinned_actions_with_sha() -> None: + """Test pinned actions with SHA pass validation.""" + policy = Policy(require_pinned_actions=True) + validator = PolicyValidator(policy) + + workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + jobs={ + "test": JobMeta( + name="test", + runs_on="ubuntu-latest", + actions_used=[ + ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="checkout", + ref="abc123def456789012345678901234567890abcd", # SHA + source_file="test.yml", + ) + ], + ) + }, + actions_used=[], + ) + + violations = validator.validate({"test.yml": workflow}, []) + + assert len(violations) == 0 + + +def test_branch_refs_validation() -> None: + """Test forbid branch refs policy.""" + policy = Policy(require_pinned_actions=False, forbid_branch_refs=True) + validator = PolicyValidator(policy) + + workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + jobs={ + "test": JobMeta( + name="test", + runs_on="ubuntu-latest", + actions_used=[ + ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="checkout", + ref="main", + source_file="test.yml", + ) + ], + ) + }, + actions_used=[ + ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="checkout", + ref="main", + source_file="test.yml", + ) + ], + ) + + violations = validator.validate({"test.yml": workflow}, []) + + assert len(violations) > 0 + assert violations[0]["rule"] == "forbid_branch_refs" + + +def test_allowed_actions_validation() -> None: + """Test allowed actions whitelist.""" + policy = Policy(require_pinned_actions=False, allowed_actions=["actions/*", "github/*"]) + validator = PolicyValidator(policy) + + workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + jobs={ + "test": JobMeta( + name="test", + runs_on="ubuntu-latest", + actions_used=[ + ActionRef( + type=ActionType.GITHUB, + owner="thirdparty", + repo="action", + ref="v1", + source_file="test.yml", + ) + ], + ) + }, + actions_used=[ + ActionRef( + type=ActionType.GITHUB, + owner="thirdparty", + repo="action", + ref="v1", + source_file="test.yml", + ) + ], + ) + + violations = validator.validate({"test.yml": workflow}, []) + + assert len(violations) > 0 + assert violations[0]["rule"] == "allowed_actions" + + +def test_denied_actions_validation() -> None: + """Test denied actions blacklist.""" + policy = Policy(require_pinned_actions=False, denied_actions=["dangerous/*"]) + validator = PolicyValidator(policy) + + workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + jobs={ + "test": JobMeta( + name="test", + runs_on="ubuntu-latest", + actions_used=[ + ActionRef( + type=ActionType.GITHUB, + owner="dangerous", + repo="action", + ref="v1", + source_file="test.yml", + ) + ], + ) + }, + actions_used=[ + ActionRef( + type=ActionType.GITHUB, + owner="dangerous", + repo="action", + ref="v1", + source_file="test.yml", + ) + ], + ) + + violations = validator.validate({"test.yml": workflow}, []) + + assert len(violations) > 0 + assert violations[0]["rule"] == "denied_actions" + + +def test_pr_concurrency_validation() -> None: + """Test PR concurrency requirement.""" + policy = Policy(require_concurrency_on_pr=True) + validator = PolicyValidator(policy) + + workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["pull_request"], + concurrency=None, + jobs={}, + ) + + violations = validator.validate({"test.yml": workflow}, []) + + assert len(violations) > 0 + assert violations[0]["rule"] == "require_concurrency_on_pr" + assert violations[0]["severity"] == "warning" + + +def test_pr_concurrency_with_group() -> None: + """Test PR with concurrency group passes.""" + policy = Policy(require_concurrency_on_pr=True) + validator = PolicyValidator(policy) + + workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["pull_request"], + concurrency={"group": "${{ github.workflow }}"}, + jobs={}, + ) + + violations = validator.validate({"test.yml": workflow}, []) + + assert len(violations) == 0 + + +def test_matches_pattern() -> None: + """Test pattern matching.""" + policy = Policy() + validator = PolicyValidator(policy) + + assert validator._matches_pattern("actions/checkout", "actions/*") is True + assert validator._matches_pattern("github/codeql-action", "github/*") is True + assert validator._matches_pattern("thirdparty/action", "actions/*") is False diff --git a/tests/test_renderer.py b/tests/test_renderer.py new file mode 100644 index 0000000..eda77ba --- /dev/null +++ b/tests/test_renderer.py @@ -0,0 +1,755 @@ +"""Tests for renderer.""" + +import json +from pathlib import Path + +from ghaw_auditor.models import ActionManifest, JobMeta, WorkflowMeta +from ghaw_auditor.renderer import Renderer + + +def test_renderer_initialization(tmp_path: Path) -> None: + """Test renderer initialization.""" + renderer = Renderer(tmp_path) + assert renderer.output_dir == tmp_path + assert renderer.output_dir.exists() + + +def test_render_json(tmp_path: Path) -> None: + """Test JSON rendering.""" + renderer = Renderer(tmp_path) + + workflows = { + "test.yml": WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + jobs={"test": JobMeta(name="test", runs_on="ubuntu-latest")}, + ) + } + + actions = { + "actions/checkout@v4": ActionManifest( + name="Checkout", + description="Checkout code", + ) + } + + violations = [ + { + "workflow": "test.yml", + "rule": "test_rule", + "severity": "error", + "message": "Test violation", + } + ] + + renderer.render_json(workflows, actions, violations) + + # Check files exist + assert (tmp_path / "workflows.json").exists() + assert (tmp_path / "actions.json").exists() + assert (tmp_path / "violations.json").exists() + + # Verify JSON content + with open(tmp_path / "workflows.json") as f: + data = json.load(f) + assert "test.yml" in data + assert data["test.yml"]["name"] == "Test" + + with open(tmp_path / "actions.json") as f: + data = json.load(f) + assert "actions/checkout@v4" in data + + with open(tmp_path / "violations.json") as f: + data = json.load(f) + assert len(data) == 1 + assert data[0]["rule"] == "test_rule" + + +def test_render_markdown(tmp_path: Path) -> None: + """Test Markdown rendering.""" + renderer = Renderer(tmp_path) + + workflows = { + "test.yml": WorkflowMeta( + name="Test Workflow", + path="test.yml", + triggers=["push", "pull_request"], + jobs={"test": JobMeta(name="test", runs_on="ubuntu-latest")}, + ) + } + + actions = { + "actions/checkout@v4": ActionManifest( + name="Checkout", + description="Checkout repository", + ) + } + + violations = [ + { + "workflow": "test.yml", + "rule": "require_pinned_actions", + "severity": "error", + "message": "Action not pinned to SHA", + } + ] + + analysis = { + "total_jobs": 1, + "reusable_workflows": 0, + "triggers": {"push": 1, "pull_request": 1}, + "runners": {"ubuntu-latest": 1}, + "secrets": {"total_unique_secrets": 0, "secrets": []}, + } + + renderer.render_markdown(workflows, actions, violations, analysis) + + report_file = tmp_path / "report.md" + assert report_file.exists() + + content = report_file.read_text() + assert "# GitHub Actions & Workflows Audit Report" in content + assert "Test Workflow" in content + assert "Checkout" in content + assert "require_pinned_actions" in content + assert "push" in content + assert "pull_request" in content + + +def test_render_empty_data(tmp_path: Path) -> None: + """Test rendering with empty data.""" + renderer = Renderer(tmp_path) + + renderer.render_json({}, {}, []) + + assert (tmp_path / "workflows.json").exists() + assert (tmp_path / "actions.json").exists() + assert (tmp_path / "violations.json").exists() + + with open(tmp_path / "workflows.json") as f: + assert json.load(f) == {} + + with open(tmp_path / "violations.json") as f: + assert json.load(f) == [] + + +def test_render_markdown_with_actions_used(tmp_path: Path) -> None: + """Test Markdown rendering with job actions_used.""" + from ghaw_auditor.models import ActionRef, ActionType + + renderer = Renderer(tmp_path) + + # Create a job with actions_used + action_ref = ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="checkout", + ref="v4", + source_file="test.yml", + ) + + job = JobMeta( + name="test", + runs_on="ubuntu-latest", + actions_used=[action_ref], + ) + + workflows = { + "test.yml": WorkflowMeta( + name="Test Workflow", + path="test.yml", + triggers=["push"], + jobs={"test": job}, + ) + } + + renderer.render_markdown(workflows, {}, [], {}) + + report_file = tmp_path / "report.md" + assert report_file.exists() + + content = report_file.read_text() + # Should render the actions used with link + assert "Actions used:" in content + assert "[actions/checkout](#actions-checkout)" in content + + +def test_render_markdown_with_secrets(tmp_path: Path) -> None: + """Test Markdown rendering with secrets.""" + renderer = Renderer(tmp_path) + + workflows = { + "test.yml": WorkflowMeta( + name="Test Workflow", + path="test.yml", + triggers=["push"], + jobs={}, + ) + } + + analysis = { + "total_jobs": 0, + "reusable_workflows": 0, + "secrets": { + "total_unique_secrets": 2, + "secrets": ["API_KEY", "DATABASE_URL"], + }, + } + + renderer.render_markdown(workflows, {}, [], analysis) + + report_file = tmp_path / "report.md" + content = report_file.read_text() + + # Should render secrets + assert "API_KEY" in content + assert "DATABASE_URL" in content + + +def test_render_markdown_with_action_inputs(tmp_path: Path) -> None: + """Test Markdown rendering with action inputs.""" + from ghaw_auditor.models import ActionInput + + renderer = Renderer(tmp_path) + + action = ActionManifest( + name="Test Action", + description="A test action", + inputs={ + "token": ActionInput( + name="token", + description="GitHub token", + required=True, + ), + "debug": ActionInput( + name="debug", + description="Enable debug mode", + required=False, + ), + }, + ) + + renderer.render_markdown({}, {"test/action@v1": action}, [], {}) + + report_file = tmp_path / "report.md" + content = report_file.read_text() + + # Should render inputs with required/optional status + assert "token" in content + assert "required" in content + assert "debug" in content + assert "optional" in content + assert "GitHub token" in content + assert "Enable debug mode" in content + + +def test_render_markdown_with_action_anchors(tmp_path: Path) -> None: + """Test that action anchors are created for linking.""" + from ghaw_auditor.models import ActionRef, ActionType + + renderer = Renderer(tmp_path) + + action_ref = ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="checkout", + ref="v4", + resolved_sha="abc123", + source_file="test.yml", + ) + + workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + jobs={}, + actions_used=[action_ref], + ) + + action = ActionManifest( + name="Checkout", + description="Checkout code", + ) + + renderer.render_markdown({"test.yml": workflow}, {"actions/checkout@abc123": action}, [], {}) + + report_file = tmp_path / "report.md" + content = report_file.read_text() + + # Should have anchor tag + assert '' in content + + +def test_render_markdown_with_repo_urls(tmp_path: Path) -> None: + """Test that GitHub action repository URLs are included.""" + from ghaw_auditor.models import ActionRef, ActionType + + renderer = Renderer(tmp_path) + + action_ref = ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="setup-node", + ref="v4", + resolved_sha="def456", + source_file="test.yml", + ) + + workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + jobs={}, + actions_used=[action_ref], + ) + + action = ActionManifest( + name="Setup Node", + description="Setup Node.js", + ) + + renderer.render_markdown({"test.yml": workflow}, {"actions/setup-node@def456": action}, [], {}) + + report_file = tmp_path / "report.md" + content = report_file.read_text() + + # Should have repository link + assert "https://github.com/actions/setup-node" in content + assert "[actions/setup-node](https://github.com/actions/setup-node)" in content + + +def test_render_markdown_with_details_tags(tmp_path: Path) -> None: + """Test that inputs are wrapped in details tags.""" + from ghaw_auditor.models import ActionInput, ActionRef, ActionType + + renderer = Renderer(tmp_path) + + action_ref = ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="checkout", + ref="v4", + source_file="test.yml", + ) + + workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + jobs={}, + actions_used=[action_ref], + ) + + action = ActionManifest( + name="Checkout", + description="Checkout code", + inputs={ + "token": ActionInput( + name="token", + description="GitHub token", + required=False, + ), + }, + ) + + renderer.render_markdown({"test.yml": workflow}, {"actions/checkout@v4": action}, [], {}) + + report_file = tmp_path / "report.md" + content = report_file.read_text() + + # Should have details tags + assert "
" in content + assert "Inputs" in content + assert "
" in content + + +def test_render_markdown_with_job_action_links(tmp_path: Path) -> None: + """Test that job actions are linked to inventory.""" + from ghaw_auditor.models import ActionRef, ActionType + + renderer = Renderer(tmp_path) + + action_ref = ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="checkout", + ref="v4", + source_file="test.yml", + ) + + job = JobMeta( + name="test", + runs_on="ubuntu-latest", + actions_used=[action_ref], + ) + + workflow = WorkflowMeta( + name="CI", + path="ci.yml", + triggers=["push"], + jobs={"test": job}, + actions_used=[action_ref], + ) + + action = ActionManifest( + name="Checkout", + description="Checkout code", + ) + + renderer.render_markdown({"ci.yml": workflow}, {"actions/checkout@v4": action}, [], {}) + + report_file = tmp_path / "report.md" + content = report_file.read_text() + + # Should have action link in jobs section + assert "Actions used:" in content + assert "[actions/checkout](#actions-checkout) (GitHub)" in content + + +def test_create_action_anchor() -> None: + """Test anchor creation from action keys.""" + # GitHub action + assert Renderer._create_action_anchor("actions/checkout@abc123") == "actions-checkout" + + # Local action + assert Renderer._create_action_anchor("local:./sync-labels") == "local-sync-labels" + + # Docker action + assert Renderer._create_action_anchor("docker://alpine:3.8") == "docker-alpine-3-8" + + # Long SHA + assert ( + Renderer._create_action_anchor("actions/setup-node@1234567890abcdef1234567890abcdef12345678") + == "actions-setup-node" + ) + + +def test_get_action_repo_url() -> None: + """Test repository URL generation.""" + from ghaw_auditor.models import ActionRef, ActionType + + # GitHub action + github_action = ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="checkout", + ref="v4", + source_file="test.yml", + ) + assert Renderer._get_action_repo_url(github_action) == "https://github.com/actions/checkout" + + # Local action (no URL) + local_action = ActionRef( + type=ActionType.LOCAL, + path="./my-action", + source_file="test.yml", + ) + assert Renderer._get_action_repo_url(local_action) is None + + # Docker action (no URL) + docker_action = ActionRef( + type=ActionType.DOCKER, + path="docker://alpine:3.8", + source_file="test.yml", + ) + assert Renderer._get_action_repo_url(docker_action) is None + + +def test_render_markdown_with_docker_action(tmp_path: Path) -> None: + """Test Markdown rendering with Docker action in jobs.""" + from ghaw_auditor.models import ActionRef, ActionType + + renderer = Renderer(tmp_path) + + docker_action = ActionRef( + type=ActionType.DOCKER, + path="docker://alpine:3.8", + source_file="test.yml", + ) + + job = JobMeta( + name="test", + runs_on="ubuntu-latest", + actions_used=[docker_action], + ) + + workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + jobs={"test": job}, + ) + + renderer.render_markdown({"test.yml": workflow}, {}, [], {}) + + report_file = tmp_path / "report.md" + content = report_file.read_text() + + # Should show Docker action with correct type label + assert "Actions used:" in content + assert "(Docker)" in content + assert "docker://alpine:3.8" in content + + +def test_render_markdown_with_reusable_workflow(tmp_path: Path) -> None: + """Test Markdown rendering with reusable workflow in jobs.""" + from ghaw_auditor.models import ActionRef, ActionType + + renderer = Renderer(tmp_path) + + reusable_wf = ActionRef( + type=ActionType.REUSABLE_WORKFLOW, + owner="org", + repo="workflows", + path=".github/workflows/reusable.yml", + ref="main", + source_file="test.yml", + ) + + job = JobMeta( + name="test", + runs_on="ubuntu-latest", + actions_used=[reusable_wf], + ) + + workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + jobs={"test": job}, + ) + + renderer.render_markdown({"test.yml": workflow}, {}, [], {}) + + report_file = tmp_path / "report.md" + content = report_file.read_text() + + # Should show reusable workflow with correct type label + assert "Actions used:" in content + assert "(Reusable Workflow)" in content + assert ".github/workflows/reusable.yml" in content + + +def test_render_markdown_with_docker_action_in_inventory(tmp_path: Path) -> None: + """Test Markdown rendering with Docker action in inventory.""" + from ghaw_auditor.models import ActionRef, ActionType + + renderer = Renderer(tmp_path) + + docker_action_ref = ActionRef( + type=ActionType.DOCKER, + path="docker://node:18-alpine", + source_file="test.yml", + ) + + workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + jobs={}, + actions_used=[docker_action_ref], + ) + + action_manifest = ActionManifest( + name="Node Alpine", + description="Node.js on Alpine Linux", + ) + + renderer.render_markdown({"test.yml": workflow}, {"docker:docker://node:18-alpine": action_manifest}, [], {}) + + report_file = tmp_path / "report.md" + content = report_file.read_text() + + # Docker actions shouldn't have repository links or Local Action type + assert "**Repository:**" not in content or "node:18-alpine" not in content + assert "Node Alpine" in content + + +def test_render_markdown_with_local_action_without_path(tmp_path: Path) -> None: + """Test Markdown rendering with LOCAL action that has no path.""" + from ghaw_auditor.models import ActionRef, ActionType + + renderer = Renderer(tmp_path) + + local_action = ActionRef( + type=ActionType.LOCAL, + path=None, + source_file="test.yml", + ) + + job = JobMeta( + name="test", + runs_on="ubuntu-latest", + actions_used=[local_action], + ) + + workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + jobs={"test": job}, + ) + + renderer.render_markdown({"test.yml": workflow}, {}, [], {}) + + report_file = tmp_path / "report.md" + content = report_file.read_text() + + # Should show "local" as display name when path is None + assert "Actions used:" in content + assert "[local](#local-none) (Local)" in content + + +def test_render_markdown_with_local_action_in_inventory(tmp_path: Path) -> None: + """Test Markdown rendering with LOCAL action in inventory showing Type label.""" + from ghaw_auditor.models import ActionRef, ActionType + + renderer = Renderer(tmp_path) + + local_action_ref = ActionRef( + type=ActionType.LOCAL, + path="./my-custom-action", + source_file="test.yml", + ) + + workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + jobs={}, + actions_used=[local_action_ref], + ) + + action_manifest = ActionManifest( + name="My Custom Action", + description="A custom local action", + ) + + renderer.render_markdown({"test.yml": workflow}, {"local:./my-custom-action": action_manifest}, [], {}) + + report_file = tmp_path / "report.md" + content = report_file.read_text() + + # Local actions should have "Type: Local Action" label + assert "**Type:** Local Action" in content + assert "My Custom Action" in content + + +def test_render_markdown_with_job_permissions(tmp_path: Path) -> None: + """Test Markdown rendering with job permissions.""" + from ghaw_auditor.models import PermissionLevel, Permissions + + renderer = Renderer(tmp_path) + + job = JobMeta( + name="test", + runs_on="ubuntu-latest", + permissions=Permissions( + contents=PermissionLevel.READ, + issues=PermissionLevel.WRITE, + security_events=PermissionLevel.WRITE, + ), + ) + + workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + jobs={"test": job}, + ) + + renderer.render_markdown({"test.yml": workflow}, {}, [], {}) + + report_file = tmp_path / "report.md" + content = report_file.read_text() + + # Should show permissions + assert "Permissions:" in content + assert "`contents`: read" in content + assert "`issues`: write" in content + assert "`security-events`: write" in content + + +def test_render_markdown_without_job_permissions(tmp_path: Path) -> None: + """Test Markdown rendering with job that has no permissions set.""" + renderer = Renderer(tmp_path) + + job = JobMeta( + name="test", + runs_on="ubuntu-latest", + permissions=None, + ) + + workflow = WorkflowMeta( + name="Test", + path="test.yml", + triggers=["push"], + jobs={"test": job}, + ) + + renderer.render_markdown({"test.yml": workflow}, {}, [], {}) + + report_file = tmp_path / "report.md" + content = report_file.read_text() + + # Should not show permissions section + assert "Permissions:" not in content + + +def test_render_markdown_with_workflows_using_action(tmp_path: Path) -> None: + """Test that actions show which workflows use them.""" + from ghaw_auditor.models import ActionRef, ActionType + + renderer = Renderer(tmp_path) + + # Create an action reference + action_ref = ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="checkout", + ref="v4", + source_file=".github/workflows/ci.yml", + ) + + # Create two workflows that use the same action + workflow1 = WorkflowMeta( + name="CI Workflow", + path=".github/workflows/ci.yml", + triggers=["push"], + actions_used=[action_ref], + ) + + workflow2 = WorkflowMeta( + name="Deploy Workflow", + path=".github/workflows/deploy.yml", + triggers=["push"], + actions_used=[action_ref], + ) + + # Create the action manifest + action = ActionManifest( + name="Checkout", + description="Checkout repository", + ) + + workflows = { + ".github/workflows/ci.yml": workflow1, + ".github/workflows/deploy.yml": workflow2, + } + actions = {"actions/checkout@v4": action} + + renderer.render_markdown(workflows, actions, [], {}) + + report_file = tmp_path / "report.md" + content = report_file.read_text() + + # Should show "Used in Workflows" section + assert "Used in Workflows" in content + assert "CI Workflow" in content + assert "Deploy Workflow" in content + assert ".github/workflows/ci.yml" in content + assert ".github/workflows/deploy.yml" in content + # Should have links to workflow sections + assert "[CI Workflow](#ci-workflow)" in content + assert "[Deploy Workflow](#deploy-workflow)" in content diff --git a/tests/test_resolver.py b/tests/test_resolver.py new file mode 100644 index 0000000..c1cb440 --- /dev/null +++ b/tests/test_resolver.py @@ -0,0 +1,531 @@ +"""Tests for resolver with mocked API.""" + +from pathlib import Path +from unittest.mock import Mock, patch + +import pytest + +from ghaw_auditor.cache import Cache +from ghaw_auditor.github_client import GitHubClient +from ghaw_auditor.models import ActionRef, ActionType +from ghaw_auditor.resolver import Resolver + + +@pytest.fixture +def mock_github_client() -> Mock: + """Create mock GitHub client.""" + client = Mock(spec=GitHubClient) + client.get_ref_sha.return_value = "abc123def456" + client.get_file_content.return_value = """ +name: Test Action +description: A test action +runs: + using: node20 + main: index.js +""" + return client + + +@pytest.fixture +def temp_cache(tmp_path: Path) -> Cache: + """Create temporary cache.""" + return Cache(tmp_path / "cache") + + +def test_resolver_initialization(mock_github_client: Mock, temp_cache: Cache, tmp_path: Path) -> None: + """Test resolver initialization.""" + resolver = Resolver(mock_github_client, temp_cache, tmp_path) + assert resolver.github_client == mock_github_client + assert resolver.cache == temp_cache + assert resolver.repo_path == tmp_path + + +def test_resolve_github_action(mock_github_client: Mock, temp_cache: Cache, tmp_path: Path) -> None: + """Test resolving GitHub action.""" + resolver = Resolver(mock_github_client, temp_cache, tmp_path) + + action = ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="checkout", + ref="v4", + source_file="test.yml", + ) + + key, manifest = resolver._resolve_github_action(action) + + assert key == "actions/checkout@abc123def456" + assert manifest is not None + assert manifest.name == "Test Action" + assert action.resolved_sha == "abc123def456" + + +def test_resolve_local_action(mock_github_client: Mock, temp_cache: Cache, tmp_path: Path) -> None: + """Test resolving local action.""" + # Create local action + action_dir = tmp_path / ".github" / "actions" / "custom" + action_dir.mkdir(parents=True) + action_file = action_dir / "action.yml" + + # Write valid composite action YAML + action_file.write_text( + """name: Custom Action +description: Local action +runs: + using: composite + steps: + - name: Test step + run: echo test + shell: bash +""" + ) + + resolver = Resolver(mock_github_client, temp_cache, tmp_path) + + action = ActionRef( + type=ActionType.LOCAL, + path="./.github/actions/custom", # With leading ./ + source_file="test.yml", + ) + + key, manifest = resolver._resolve_local_action(action) + + assert key == "local:./.github/actions/custom" + assert manifest is not None + assert manifest.name == "Custom Action" + assert manifest.is_composite is True + + +def test_resolve_docker_action(mock_github_client: Mock, temp_cache: Cache, tmp_path: Path) -> None: + """Test resolving Docker action.""" + resolver = Resolver(mock_github_client, temp_cache, tmp_path) + + action = ActionRef( + type=ActionType.DOCKER, + path="docker://alpine:3.8", + source_file="test.yml", + ) + + key, manifest = resolver._resolve_action(action) + + assert key == "docker:docker://alpine:3.8" + assert manifest is None # Docker actions don't have manifests + + +def test_resolve_actions_parallel(mock_github_client: Mock, temp_cache: Cache, tmp_path: Path) -> None: + """Test parallel action resolution.""" + resolver = Resolver(mock_github_client, temp_cache, tmp_path, concurrency=2) + + actions = [ + ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="checkout", + ref="v4", + source_file="test.yml", + ), + ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="setup-node", + ref="v4", + source_file="test.yml", + ), + ] + + resolved = resolver.resolve_actions(actions) + + assert len(resolved) == 2 + assert mock_github_client.get_ref_sha.call_count == 2 + + +def test_resolve_action_with_cache(mock_github_client: Mock, temp_cache: Cache, tmp_path: Path) -> None: + """Test action resolution with caching.""" + resolver = Resolver(mock_github_client, temp_cache, tmp_path) + + action = ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="checkout", + ref="v4", + source_file="test.yml", + ) + + # First call + key1, manifest1 = resolver._resolve_github_action(action) + + # Reset mock + mock_github_client.reset_mock() + + # Second call should use cache + key2, manifest2 = resolver._resolve_github_action(action) + + assert key1 == key2 + # Cache should reduce API calls + assert mock_github_client.get_ref_sha.call_count <= 1 + + +def test_resolve_action_api_error(mock_github_client: Mock, temp_cache: Cache, tmp_path: Path) -> None: + """Test handling API errors.""" + mock_github_client.get_ref_sha.side_effect = Exception("API Error") + + resolver = Resolver(mock_github_client, temp_cache, tmp_path) + + action = ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="checkout", + ref="v4", + source_file="test.yml", + ) + + key, manifest = resolver._resolve_github_action(action) + + assert key == "" + assert manifest is None + + +def test_resolve_monorepo_action(mock_github_client: Mock, temp_cache: Cache, tmp_path: Path) -> None: + """Test resolving monorepo action with path.""" + resolver = Resolver(mock_github_client, temp_cache, tmp_path) + + action = ActionRef( + type=ActionType.GITHUB, + owner="owner", + repo="repo", + path="subdir/action", + ref="v1", + source_file="test.yml", + ) + + key, manifest = resolver._resolve_github_action(action) + + # Should try to fetch subdir/action/action.yml + mock_github_client.get_file_content.assert_called_with("owner", "repo", "subdir/action/action.yml", "abc123def456") + + +def test_resolve_action_unknown_type(mock_github_client: Mock, temp_cache: Cache, tmp_path: Path) -> None: + """Test resolving action with unknown type returns empty.""" + from ghaw_auditor.models import ActionType + + resolver = Resolver(mock_github_client, temp_cache, tmp_path) + + # Create action with REUSABLE_WORKFLOW type (not handled by resolver) + action = ActionRef( + type=ActionType.REUSABLE_WORKFLOW, + owner="owner", + repo="repo", + path=".github/workflows/test.yml", + ref="v1", + source_file="test.yml", + ) + + key, manifest = resolver._resolve_action(action) + + assert key == "" + assert manifest is None + + +def test_resolve_local_action_no_path(mock_github_client: Mock, temp_cache: Cache, tmp_path: Path) -> None: + """Test resolving local action without path.""" + resolver = Resolver(mock_github_client, temp_cache, tmp_path) + + action = ActionRef( + type=ActionType.LOCAL, + path=None, + source_file="test.yml", + ) + + key, manifest = resolver._resolve_local_action(action) + + assert key == "" + assert manifest is None + + +def test_resolve_local_action_not_found(mock_github_client: Mock, temp_cache: Cache, tmp_path: Path) -> None: + """Test resolving local action that doesn't exist.""" + resolver = Resolver(mock_github_client, temp_cache, tmp_path) + + action = ActionRef( + type=ActionType.LOCAL, + path="./.github/actions/nonexistent", + source_file="test.yml", + ) + + key, manifest = resolver._resolve_local_action(action) + + assert key == "" + assert manifest is None + + +def test_resolve_local_action_invalid_yaml(mock_github_client: Mock, temp_cache: Cache, tmp_path: Path) -> None: + """Test resolving local action with invalid YAML.""" + action_dir = tmp_path / ".github" / "actions" / "broken" + action_dir.mkdir(parents=True) + action_file = action_dir / "action.yml" + action_file.write_text("invalid: yaml: content: {{{") + + resolver = Resolver(mock_github_client, temp_cache, tmp_path) + + action = ActionRef( + type=ActionType.LOCAL, + path="./.github/actions/broken", + source_file="test.yml", + ) + + key, manifest = resolver._resolve_local_action(action) + + # Should handle parse error gracefully + assert key == "" + assert manifest is None + + +def test_resolve_github_action_missing_fields(mock_github_client: Mock, temp_cache: Cache, tmp_path: Path) -> None: + """Test resolving GitHub action with missing required fields.""" + resolver = Resolver(mock_github_client, temp_cache, tmp_path) + + # Missing owner + action = ActionRef( + type=ActionType.GITHUB, + owner=None, + repo="checkout", + ref="v4", + source_file="test.yml", + ) + + key, manifest = resolver._resolve_github_action(action) + + assert key == "" + assert manifest is None + + +def test_resolve_github_action_manifest_not_found(mock_github_client: Mock, temp_cache: Cache, tmp_path: Path) -> None: + """Test resolving GitHub action when manifest cannot be fetched.""" + # Setup mock to fail fetching manifest + mock_github_client.get_ref_sha.return_value = "abc123" + mock_github_client.get_file_content.side_effect = Exception("404 Not Found") + + resolver = Resolver(mock_github_client, temp_cache, tmp_path) + + action = ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="missing", + ref="v1", + source_file="test.yml", + ) + + key, manifest = resolver._resolve_github_action(action) + + # Should return key but no manifest + assert "actions/missing@abc123" in key + assert manifest is None + + +def test_resolve_monorepo_action_manifest_not_found( + mock_github_client: Mock, temp_cache: Cache, tmp_path: Path, caplog: pytest.LogCaptureFixture +) -> None: + """Test resolving monorepo action when manifest cannot be fetched.""" + import logging + + # Setup mock to fail fetching manifest for both .yml and .yaml + mock_github_client.get_ref_sha.return_value = "abc123" + mock_github_client.get_file_content.side_effect = Exception("404 Not Found") + + resolver = Resolver(mock_github_client, temp_cache, tmp_path) + + action = ActionRef( + type=ActionType.GITHUB, + owner="owner", + repo="repo", + path="subdir/action", + ref="v1", + source_file="test.yml", + ) + + with caplog.at_level(logging.ERROR): + key, manifest = resolver._resolve_github_action(action) + + # Should return key but no manifest + assert "owner/repo@abc123" in key + assert manifest is None + # Should log error with path + assert "owner/repo/subdir/action" in caplog.text + assert "(tried action.yml and action.yaml)" in caplog.text + + +def test_resolve_github_action_invalid_manifest(mock_github_client: Mock, temp_cache: Cache, tmp_path: Path) -> None: + """Test resolving GitHub action with invalid manifest content.""" + # Setup mock to return invalid YAML + mock_github_client.get_ref_sha.return_value = "abc123" + mock_github_client.get_file_content.return_value = "invalid: yaml: {{{: bad" + + resolver = Resolver(mock_github_client, temp_cache, tmp_path) + + action = ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="broken", + ref="v1", + source_file="test.yml", + ) + + key, manifest = resolver._resolve_github_action(action) + + # Should handle parse error gracefully + assert key == "" + assert manifest is None + + +def test_resolve_actions_with_exception(mock_github_client: Mock, temp_cache: Cache, tmp_path: Path) -> None: + """Test parallel resolution handles exceptions gracefully.""" + + # Setup one action to succeed, one to fail + def side_effect_get_ref(owner: str, repo: str, ref: str) -> str: + if repo == "fail": + raise Exception("API Error") + return "abc123" + + mock_github_client.get_ref_sha.side_effect = side_effect_get_ref + + resolver = Resolver(mock_github_client, temp_cache, tmp_path, concurrency=2) + + actions = [ + ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="checkout", + ref="v4", + source_file="test.yml", + ), + ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="fail", + ref="v1", + source_file="test.yml", + ), + ] + + resolved = resolver.resolve_actions(actions) + + # Should only resolve the successful one + assert len(resolved) == 1 + assert "actions/checkout" in list(resolved.keys())[0] + + +def test_resolve_actions_logs_exception(mock_github_client: Mock, temp_cache: Cache, tmp_path: Path) -> None: + """Test that exceptions during resolution are logged.""" + resolver = Resolver(mock_github_client, temp_cache, tmp_path) + + # Patch _resolve_action to raise an exception directly + # This will propagate to future.result() and trigger the exception handler + with patch.object(resolver, "_resolve_action", side_effect=RuntimeError("Unexpected error")): + actions = [ + ActionRef( + type=ActionType.GITHUB, + owner="actions", + repo="broken", + ref="v1", + source_file="test.yml", + ), + ] + + resolved = resolver.resolve_actions(actions) + + # Should handle exception gracefully and log error + assert len(resolved) == 0 + + +def test_resolve_local_action_file_path_parse_error( + mock_github_client: Mock, temp_cache: Cache, tmp_path: Path +) -> None: + """Test resolving local action when file path parsing fails.""" + # Create a directory with invalid action.yml + action_dir = tmp_path / "my-action" + action_dir.mkdir() + action_file = action_dir / "action.yml" + action_file.write_text("invalid: yaml: content: {{{") + + resolver = Resolver(mock_github_client, temp_cache, tmp_path) + + # Reference a file that starts with "action." so parent = action_path.parent + # This triggers the else branch where we look in parent directory + action = ActionRef( + type=ActionType.LOCAL, + path="./my-action/action.custom.yml", + source_file="test.yml", + ) + + key, manifest = resolver._resolve_local_action(action) + + # Should handle parse error in file path branch (else branch) + # The code will look in parent (my-action/) for action.yml and fail to parse + assert key == "" + assert manifest is None + + +def test_resolve_action_local_type(mock_github_client: Mock, temp_cache: Cache, tmp_path: Path) -> None: + """Test _resolve_action with LOCAL action type.""" + # Create valid local action + action_dir = tmp_path / "my-action" + action_dir.mkdir() + action_file = action_dir / "action.yml" + action_file.write_text(""" +name: My Action +description: Test action +runs: + using: composite + steps: + - run: echo test + shell: bash +""") + + resolver = Resolver(mock_github_client, temp_cache, tmp_path) + + action = ActionRef( + type=ActionType.LOCAL, + path="./my-action", + source_file="test.yml", + ) + + # Call _resolve_action to hit the LOCAL branch + key, manifest = resolver._resolve_action(action) + + assert key == "local:./my-action" + assert manifest is not None + assert manifest.name == "My Action" + + +def test_resolve_local_action_file_path_success(mock_github_client: Mock, temp_cache: Cache, tmp_path: Path) -> None: + """Test resolving local action via file path (else branch) with valid YAML.""" + # Create a directory with valid action.yml + action_dir = tmp_path / "my-action" + action_dir.mkdir() + action_file = action_dir / "action.yml" + action_file.write_text(""" +name: File Path Action +description: Test action via file path +runs: + using: composite + steps: + - run: echo test + shell: bash +""") + + resolver = Resolver(mock_github_client, temp_cache, tmp_path) + + # Reference a file that starts with "action." to trigger else branch + # with parent = action_path.parent + action = ActionRef( + type=ActionType.LOCAL, + path="./my-action/action.yml", + source_file="test.yml", + ) + + key, manifest = resolver._resolve_local_action(action) + + # Should successfully parse from parent directory + assert key == "local:./my-action/action.yml" + assert manifest is not None + assert manifest.name == "File Path Action" diff --git a/tests/test_scanner.py b/tests/test_scanner.py new file mode 100644 index 0000000..bc9984c --- /dev/null +++ b/tests/test_scanner.py @@ -0,0 +1,205 @@ +"""Tests for scanner module.""" + +from pathlib import Path + +from ghaw_auditor.scanner import Scanner + + +def test_scanner_initialization() -> None: + """Test scanner can be initialized.""" + scanner = Scanner(".") + assert scanner.repo_path.exists() + + +def test_scanner_initialization_with_exclusions() -> None: + """Test scanner initialization with exclusion patterns.""" + scanner = Scanner(".", exclude_patterns=["**/node_modules/**", "**/dist/**"]) + assert len(scanner.exclude_patterns) == 2 + assert "**/node_modules/**" in scanner.exclude_patterns + + +def test_scanner_should_exclude(tmp_path: Path) -> None: + """Test exclusion pattern matching.""" + # Note: glob patterns need to match the full path including files + scanner = Scanner(tmp_path, exclude_patterns=["node_modules/**/*", ".git/**/*"]) + + # Create test directories and files + node_modules_path = tmp_path / "node_modules" / "test" / "action.yml" + node_modules_path.parent.mkdir(parents=True) + node_modules_path.touch() + + git_path = tmp_path / ".git" / "hooks" / "pre-commit" + git_path.parent.mkdir(parents=True) + git_path.touch() + + valid_path = tmp_path / ".github" / "actions" / "test" / "action.yml" + valid_path.parent.mkdir(parents=True) + valid_path.touch() + + # Test exclusions + assert scanner._should_exclude(node_modules_path) is True + assert scanner._should_exclude(git_path) is True + assert scanner._should_exclude(valid_path) is False + + +def test_find_workflows_empty_dir(tmp_path: Path) -> None: + """Test finding workflows in empty directory.""" + scanner = Scanner(tmp_path) + workflows = scanner.find_workflows() + assert len(workflows) == 0 + + +def test_find_workflows_with_files(tmp_path: Path) -> None: + """Test finding workflow files.""" + # Create workflow directory + workflows_dir = tmp_path / ".github" / "workflows" + workflows_dir.mkdir(parents=True) + + # Create workflow files + (workflows_dir / "ci.yml").write_text("name: CI\non: push") + (workflows_dir / "release.yaml").write_text("name: Release\non: push") + (workflows_dir / "README.md").write_text("# Workflows") # Should be ignored + + scanner = Scanner(tmp_path) + workflows = scanner.find_workflows() + + assert len(workflows) == 2 + assert workflows[0].name == "ci.yml" + assert workflows[1].name == "release.yaml" + + +def test_find_workflows_with_exclusions(tmp_path: Path) -> None: + """Test finding workflows with exclusion patterns.""" + # Create workflow directory + workflows_dir = tmp_path / ".github" / "workflows" + workflows_dir.mkdir(parents=True) + + # Create workflow files + (workflows_dir / "ci.yml").write_text("name: CI") + (workflows_dir / "test.yml").write_text("name: Test") + + scanner = Scanner(tmp_path, exclude_patterns=["**test.yml"]) + workflows = scanner.find_workflows() + + assert len(workflows) == 1 + assert workflows[0].name == "ci.yml" + + +def test_find_actions_empty_dir(tmp_path: Path) -> None: + """Test finding actions in empty directory.""" + scanner = Scanner(tmp_path) + actions = scanner.find_actions() + assert len(actions) == 0 + + +def test_find_actions_in_github_directory(tmp_path: Path) -> None: + """Test finding actions in .github/actions directory.""" + # Create actions directory + actions_dir = tmp_path / ".github" / "actions" + + # Create multiple actions + action1_dir = actions_dir / "action1" + action1_dir.mkdir(parents=True) + (action1_dir / "action.yml").write_text("name: Action 1") + + action2_dir = actions_dir / "action2" + action2_dir.mkdir(parents=True) + (action2_dir / "action.yaml").write_text("name: Action 2") + + # Create nested action + nested_dir = actions_dir / "group" / "nested" + nested_dir.mkdir(parents=True) + (nested_dir / "action.yml").write_text("name: Nested Action") + + scanner = Scanner(tmp_path) + actions = scanner.find_actions() + + assert len(actions) == 3 + assert any("action1" in str(a) for a in actions) + assert any("action2" in str(a) for a in actions) + assert any("nested" in str(a) for a in actions) + + +def test_find_actions_in_root(tmp_path: Path) -> None: + """Test finding action in root directory.""" + # Create action in root + (tmp_path / "action.yml").write_text("name: Root Action") + + scanner = Scanner(tmp_path) + actions = scanner.find_actions() + + assert len(actions) == 1 + assert actions[0].name == "action.yml" + + +def test_find_actions_excludes_workflows_dir(tmp_path: Path) -> None: + """Test that actions in workflows directory are excluded.""" + # Create workflow directory with action file (should be ignored) + workflows_dir = tmp_path / ".github" / "workflows" + workflows_dir.mkdir(parents=True) + (workflows_dir / "action.yml").write_text("name: Not an action") + + # Create real action + actions_dir = tmp_path / ".github" / "actions" / "real" + actions_dir.mkdir(parents=True) + (actions_dir / "action.yml").write_text("name: Real Action") + + scanner = Scanner(tmp_path) + actions = scanner.find_actions() + + # Should only find the action in .github/actions, not in workflows + assert len(actions) == 1 + assert "actions/real" in str(actions[0]) + + +def test_find_actions_with_exclusions(tmp_path: Path) -> None: + """Test finding actions with exclusion patterns.""" + # Create actions + actions_dir = tmp_path / ".github" / "actions" + + action1_dir = actions_dir / "include-me" + action1_dir.mkdir(parents=True) + (action1_dir / "action.yml").write_text("name: Include") + + action2_dir = actions_dir / "exclude-me" + action2_dir.mkdir(parents=True) + (action2_dir / "action.yml").write_text("name: Exclude") + + scanner = Scanner(tmp_path, exclude_patterns=["**/exclude-me/**"]) + actions = scanner.find_actions() + + assert len(actions) == 1 + assert "include-me" in str(actions[0]) + + +def test_find_actions_deduplication(tmp_path: Path) -> None: + """Test that duplicate actions are not included.""" + # Create action in .github/actions + actions_dir = tmp_path / ".github" / "actions" / "my-action" + actions_dir.mkdir(parents=True) + action_file = actions_dir / "action.yml" + action_file.write_text("name: My Action") + + scanner = Scanner(tmp_path) + actions = scanner.find_actions() + + # Should find it exactly once + assert len(actions) == 1 + assert actions[0] == action_file + + +def test_find_actions_monorepo_structure(tmp_path: Path) -> None: + """Test finding actions in monorepo with multiple root-level action directories.""" + # Create monorepo structure: ./action1/, ./action2/, etc. + for name in ["sync-labels", "deploy-action", "test-action"]: + action_dir = tmp_path / name + action_dir.mkdir() + (action_dir / "action.yml").write_text(f"name: {name}\ndescription: Test action") + + scanner = Scanner(tmp_path) + actions = scanner.find_actions() + + assert len(actions) == 3 + assert any("sync-labels" in str(a) for a in actions) + assert any("deploy-action" in str(a) for a in actions) + assert any("test-action" in str(a) for a in actions) diff --git a/tests/test_services.py b/tests/test_services.py new file mode 100644 index 0000000..4b3820c --- /dev/null +++ b/tests/test_services.py @@ -0,0 +1,227 @@ +"""Tests for service layer.""" + +from pathlib import Path +from unittest.mock import Mock + +import pytest + +from ghaw_auditor.analyzer import Analyzer +from ghaw_auditor.differ import Differ +from ghaw_auditor.models import ( + ActionManifest, + Policy, + WorkflowMeta, +) +from ghaw_auditor.parser import Parser +from ghaw_auditor.policy import PolicyValidator +from ghaw_auditor.scanner import Scanner +from ghaw_auditor.services import AuditService, DiffService + + +def test_audit_service_scan_basic(tmp_path: Path) -> None: + """Test basic scan without workflows.""" + scanner = Scanner(tmp_path) + parser = Parser(tmp_path) + analyzer = Analyzer() + + service = AuditService(scanner, parser, analyzer) + result = service.scan(offline=True) + + assert result.workflow_count == 0 + assert result.action_count == 0 + assert result.unique_action_count == 0 + assert len(result.workflows) == 0 + assert len(result.actions) == 0 + assert len(result.violations) == 0 + + +def test_audit_service_scan_with_workflow(tmp_path: Path) -> None: + """Test scan with a simple workflow.""" + # Create test workflow + workflows_dir = tmp_path / ".github" / "workflows" + workflows_dir.mkdir(parents=True) + (workflows_dir / "ci.yml").write_text( + """ +name: CI +on: push +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 +""" + ) + + scanner = Scanner(tmp_path) + parser = Parser(tmp_path) + analyzer = Analyzer() + + service = AuditService(scanner, parser, analyzer) + result = service.scan(offline=True) + + assert result.workflow_count == 1 + assert len(result.workflows) == 1 + assert ".github/workflows/ci.yml" in result.workflows + assert result.unique_action_count == 1 + + +def test_audit_service_scan_with_policy_violations(tmp_path: Path) -> None: + """Test scan with policy violations.""" + # Create workflow with branch ref (violates pinning policy) + workflows_dir = tmp_path / ".github" / "workflows" + workflows_dir.mkdir(parents=True) + (workflows_dir / "ci.yml").write_text( + """ +name: CI +on: push +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@main +""" + ) + + scanner = Scanner(tmp_path) + parser = Parser(tmp_path) + analyzer = Analyzer() + policy = Policy(require_pinned_actions=True) + validator = PolicyValidator(policy) + + service = AuditService(scanner, parser, analyzer, validator=validator) + result = service.scan(offline=True) + + assert len(result.violations) > 0 + assert any("pinned" in v["message"].lower() for v in result.violations) + + +def test_audit_service_scan_parse_error(tmp_path: Path) -> None: + """Test scan handles parse errors gracefully.""" + # Create invalid workflow + workflows_dir = tmp_path / ".github" / "workflows" + workflows_dir.mkdir(parents=True) + (workflows_dir / "invalid.yml").write_text("invalid: yaml: {{{") + + scanner = Scanner(tmp_path) + parser = Parser(tmp_path) + analyzer = Analyzer() + + service = AuditService(scanner, parser, analyzer) + result = service.scan(offline=True) + + # Should continue despite parse error + assert result.workflow_count == 1 + assert len(result.workflows) == 0 # Workflow not parsed + + +def test_audit_service_scan_with_resolver(tmp_path: Path) -> None: + """Test scan with resolver (mocked).""" + workflows_dir = tmp_path / ".github" / "workflows" + workflows_dir.mkdir(parents=True) + (workflows_dir / "ci.yml").write_text( + """ +name: CI +on: push +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 +""" + ) + + scanner = Scanner(tmp_path) + parser = Parser(tmp_path) + analyzer = Analyzer() + + # Mock resolver + mock_resolver = Mock() + mock_resolver.resolve_actions.return_value = { + "actions/checkout@abc123": ActionManifest( + name="Checkout", + description="Checkout code", + ) + } + + service = AuditService(scanner, parser, analyzer, resolver=mock_resolver) + result = service.scan(offline=False) + + # Should call resolver + assert mock_resolver.resolve_actions.called + assert len(result.actions) == 1 + + +def test_audit_service_scan_analysis(tmp_path: Path) -> None: + """Test that scan includes analysis.""" + workflows_dir = tmp_path / ".github" / "workflows" + workflows_dir.mkdir(parents=True) + (workflows_dir / "ci.yml").write_text( + """ +name: CI +on: + - push + - pull_request +jobs: + test: + runs-on: ubuntu-latest + steps: + - run: echo test +""" + ) + + scanner = Scanner(tmp_path) + parser = Parser(tmp_path) + analyzer = Analyzer() + + service = AuditService(scanner, parser, analyzer) + result = service.scan(offline=True) + + # Check analysis + assert "total_workflows" in result.analysis + assert result.analysis["total_workflows"] == 1 + assert "triggers" in result.analysis + assert "push" in result.analysis["triggers"] + assert "pull_request" in result.analysis["triggers"] + + +def test_diff_service_compare(tmp_path: Path) -> None: + """Test diff service comparison.""" + baseline_dir = tmp_path / "baseline" + baseline_dir.mkdir() + + # Create baseline + differ = Differ(baseline_dir) + old_workflow = WorkflowMeta( + name="Old", + path="test.yml", + triggers=["push"], + jobs={}, + ) + differ.save_baseline({"test.yml": old_workflow}, {}) + + # Create diff service + diff_service = DiffService(differ) + + # New workflow + new_workflow = WorkflowMeta( + name="New", + path="test.yml", + triggers=["push", "pull_request"], + jobs={}, + ) + + workflow_diffs, action_diffs = diff_service.compare({"test.yml": new_workflow}, {}) + + assert len(workflow_diffs) == 1 + assert workflow_diffs[0].status == "modified" + + +def test_diff_service_compare_no_baseline(tmp_path: Path) -> None: + """Test diff service with missing baseline.""" + baseline_dir = tmp_path / "nonexistent" + + differ = Differ(baseline_dir) + diff_service = DiffService(differ) + + with pytest.raises(FileNotFoundError): + diff_service.compare({}, {}) diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..c7b0884 --- /dev/null +++ b/uv.lock @@ -0,0 +1,1079 @@ +version = 1 +revision = 2 +requires-python = ">=3.11" +resolution-markers = [ + "platform_python_implementation != 'PyPy'", + "platform_python_implementation == 'PyPy'", +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, +] + +[[package]] +name = "certifi" +version = "2025.8.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, +] + +[[package]] +name = "click" +version = "8.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coverage" +version = "7.10.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/51/26/d22c300112504f5f9a9fd2297ce33c35f3d353e4aeb987c8419453b2a7c2/coverage-7.10.7.tar.gz", hash = "sha256:f4ab143ab113be368a3e9b795f9cd7906c5ef407d6173fe9675a902e1fffc239", size = 827704, upload-time = "2025-09-21T20:03:56.815Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/5d/c1a17867b0456f2e9ce2d8d4708a4c3a089947d0bec9c66cdf60c9e7739f/coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a609f9c93113be646f44c2a0256d6ea375ad047005d7f57a5c15f614dc1b2f59", size = 218102, upload-time = "2025-09-21T20:01:16.089Z" }, + { url = "https://files.pythonhosted.org/packages/54/f0/514dcf4b4e3698b9a9077f084429681bf3aad2b4a72578f89d7f643eb506/coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:65646bb0359386e07639c367a22cf9b5bf6304e8630b565d0626e2bdf329227a", size = 218505, upload-time = "2025-09-21T20:01:17.788Z" }, + { url = "https://files.pythonhosted.org/packages/20/f6/9626b81d17e2a4b25c63ac1b425ff307ecdeef03d67c9a147673ae40dc36/coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5f33166f0dfcce728191f520bd2692914ec70fac2713f6bf3ce59c3deacb4699", size = 248898, upload-time = "2025-09-21T20:01:19.488Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ef/bd8e719c2f7417ba03239052e099b76ea1130ac0cbb183ee1fcaa58aaff3/coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:35f5e3f9e455bb17831876048355dca0f758b6df22f49258cb5a91da23ef437d", size = 250831, upload-time = "2025-09-21T20:01:20.817Z" }, + { url = "https://files.pythonhosted.org/packages/a5/b6/bf054de41ec948b151ae2b79a55c107f5760979538f5fb80c195f2517718/coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4da86b6d62a496e908ac2898243920c7992499c1712ff7c2b6d837cc69d9467e", size = 252937, upload-time = "2025-09-21T20:01:22.171Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e5/3860756aa6f9318227443c6ce4ed7bf9e70bb7f1447a0353f45ac5c7974b/coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6b8b09c1fad947c84bbbc95eca841350fad9cbfa5a2d7ca88ac9f8d836c92e23", size = 249021, upload-time = "2025-09-21T20:01:23.907Z" }, + { url = "https://files.pythonhosted.org/packages/26/0f/bd08bd042854f7fd07b45808927ebcce99a7ed0f2f412d11629883517ac2/coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4376538f36b533b46f8971d3a3e63464f2c7905c9800db97361c43a2b14792ab", size = 250626, upload-time = "2025-09-21T20:01:25.721Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a7/4777b14de4abcc2e80c6b1d430f5d51eb18ed1d75fca56cbce5f2db9b36e/coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:121da30abb574f6ce6ae09840dae322bef734480ceafe410117627aa54f76d82", size = 248682, upload-time = "2025-09-21T20:01:27.105Z" }, + { url = "https://files.pythonhosted.org/packages/34/72/17d082b00b53cd45679bad682fac058b87f011fd8b9fe31d77f5f8d3a4e4/coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:88127d40df529336a9836870436fc2751c339fbaed3a836d42c93f3e4bd1d0a2", size = 248402, upload-time = "2025-09-21T20:01:28.629Z" }, + { url = "https://files.pythonhosted.org/packages/81/7a/92367572eb5bdd6a84bfa278cc7e97db192f9f45b28c94a9ca1a921c3577/coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ba58bbcd1b72f136080c0bccc2400d66cc6115f3f906c499013d065ac33a4b61", size = 249320, upload-time = "2025-09-21T20:01:30.004Z" }, + { url = "https://files.pythonhosted.org/packages/2f/88/a23cc185f6a805dfc4fdf14a94016835eeb85e22ac3a0e66d5e89acd6462/coverage-7.10.7-cp311-cp311-win32.whl", hash = "sha256:972b9e3a4094b053a4e46832b4bc829fc8a8d347160eb39d03f1690316a99c14", size = 220536, upload-time = "2025-09-21T20:01:32.184Z" }, + { url = "https://files.pythonhosted.org/packages/fe/ef/0b510a399dfca17cec7bc2f05ad8bd78cf55f15c8bc9a73ab20c5c913c2e/coverage-7.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:a7b55a944a7f43892e28ad4bc0561dfd5f0d73e605d1aa5c3c976b52aea121d2", size = 221425, upload-time = "2025-09-21T20:01:33.557Z" }, + { url = "https://files.pythonhosted.org/packages/51/7f/023657f301a276e4ba1850f82749bc136f5a7e8768060c2e5d9744a22951/coverage-7.10.7-cp311-cp311-win_arm64.whl", hash = "sha256:736f227fb490f03c6488f9b6d45855f8e0fd749c007f9303ad30efab0e73c05a", size = 220103, upload-time = "2025-09-21T20:01:34.929Z" }, + { url = "https://files.pythonhosted.org/packages/13/e4/eb12450f71b542a53972d19117ea5a5cea1cab3ac9e31b0b5d498df1bd5a/coverage-7.10.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7bb3b9ddb87ef7725056572368040c32775036472d5a033679d1fa6c8dc08417", size = 218290, upload-time = "2025-09-21T20:01:36.455Z" }, + { url = "https://files.pythonhosted.org/packages/37/66/593f9be12fc19fb36711f19a5371af79a718537204d16ea1d36f16bd78d2/coverage-7.10.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:18afb24843cbc175687225cab1138c95d262337f5473512010e46831aa0c2973", size = 218515, upload-time = "2025-09-21T20:01:37.982Z" }, + { url = "https://files.pythonhosted.org/packages/66/80/4c49f7ae09cafdacc73fbc30949ffe77359635c168f4e9ff33c9ebb07838/coverage-7.10.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:399a0b6347bcd3822be369392932884b8216d0944049ae22925631a9b3d4ba4c", size = 250020, upload-time = "2025-09-21T20:01:39.617Z" }, + { url = "https://files.pythonhosted.org/packages/a6/90/a64aaacab3b37a17aaedd83e8000142561a29eb262cede42d94a67f7556b/coverage-7.10.7-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314f2c326ded3f4b09be11bc282eb2fc861184bc95748ae67b360ac962770be7", size = 252769, upload-time = "2025-09-21T20:01:41.341Z" }, + { url = "https://files.pythonhosted.org/packages/98/2e/2dda59afd6103b342e096f246ebc5f87a3363b5412609946c120f4e7750d/coverage-7.10.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c41e71c9cfb854789dee6fc51e46743a6d138b1803fab6cb860af43265b42ea6", size = 253901, upload-time = "2025-09-21T20:01:43.042Z" }, + { url = "https://files.pythonhosted.org/packages/53/dc/8d8119c9051d50f3119bb4a75f29f1e4a6ab9415cd1fa8bf22fcc3fb3b5f/coverage-7.10.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc01f57ca26269c2c706e838f6422e2a8788e41b3e3c65e2f41148212e57cd59", size = 250413, upload-time = "2025-09-21T20:01:44.469Z" }, + { url = "https://files.pythonhosted.org/packages/98/b3/edaff9c5d79ee4d4b6d3fe046f2b1d799850425695b789d491a64225d493/coverage-7.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a6442c59a8ac8b85812ce33bc4d05bde3fb22321fa8294e2a5b487c3505f611b", size = 251820, upload-time = "2025-09-21T20:01:45.915Z" }, + { url = "https://files.pythonhosted.org/packages/11/25/9a0728564bb05863f7e513e5a594fe5ffef091b325437f5430e8cfb0d530/coverage-7.10.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:78a384e49f46b80fb4c901d52d92abe098e78768ed829c673fbb53c498bef73a", size = 249941, upload-time = "2025-09-21T20:01:47.296Z" }, + { url = "https://files.pythonhosted.org/packages/e0/fd/ca2650443bfbef5b0e74373aac4df67b08180d2f184b482c41499668e258/coverage-7.10.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:5e1e9802121405ede4b0133aa4340ad8186a1d2526de5b7c3eca519db7bb89fb", size = 249519, upload-time = "2025-09-21T20:01:48.73Z" }, + { url = "https://files.pythonhosted.org/packages/24/79/f692f125fb4299b6f963b0745124998ebb8e73ecdfce4ceceb06a8c6bec5/coverage-7.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d41213ea25a86f69efd1575073d34ea11aabe075604ddf3d148ecfec9e1e96a1", size = 251375, upload-time = "2025-09-21T20:01:50.529Z" }, + { url = "https://files.pythonhosted.org/packages/5e/75/61b9bbd6c7d24d896bfeec57acba78e0f8deac68e6baf2d4804f7aae1f88/coverage-7.10.7-cp312-cp312-win32.whl", hash = "sha256:77eb4c747061a6af8d0f7bdb31f1e108d172762ef579166ec84542f711d90256", size = 220699, upload-time = "2025-09-21T20:01:51.941Z" }, + { url = "https://files.pythonhosted.org/packages/ca/f3/3bf7905288b45b075918d372498f1cf845b5b579b723c8fd17168018d5f5/coverage-7.10.7-cp312-cp312-win_amd64.whl", hash = "sha256:f51328ffe987aecf6d09f3cd9d979face89a617eacdaea43e7b3080777f647ba", size = 221512, upload-time = "2025-09-21T20:01:53.481Z" }, + { url = "https://files.pythonhosted.org/packages/5c/44/3e32dbe933979d05cf2dac5e697c8599cfe038aaf51223ab901e208d5a62/coverage-7.10.7-cp312-cp312-win_arm64.whl", hash = "sha256:bda5e34f8a75721c96085903c6f2197dc398c20ffd98df33f866a9c8fd95f4bf", size = 220147, upload-time = "2025-09-21T20:01:55.2Z" }, + { url = "https://files.pythonhosted.org/packages/9a/94/b765c1abcb613d103b64fcf10395f54d69b0ef8be6a0dd9c524384892cc7/coverage-7.10.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:981a651f543f2854abd3b5fcb3263aac581b18209be49863ba575de6edf4c14d", size = 218320, upload-time = "2025-09-21T20:01:56.629Z" }, + { url = "https://files.pythonhosted.org/packages/72/4f/732fff31c119bb73b35236dd333030f32c4bfe909f445b423e6c7594f9a2/coverage-7.10.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:73ab1601f84dc804f7812dc297e93cd99381162da39c47040a827d4e8dafe63b", size = 218575, upload-time = "2025-09-21T20:01:58.203Z" }, + { url = "https://files.pythonhosted.org/packages/87/02/ae7e0af4b674be47566707777db1aa375474f02a1d64b9323e5813a6cdd5/coverage-7.10.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a8b6f03672aa6734e700bbcd65ff050fd19cddfec4b031cc8cf1c6967de5a68e", size = 249568, upload-time = "2025-09-21T20:01:59.748Z" }, + { url = "https://files.pythonhosted.org/packages/a2/77/8c6d22bf61921a59bce5471c2f1f7ac30cd4ac50aadde72b8c48d5727902/coverage-7.10.7-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10b6ba00ab1132a0ce4428ff68cf50a25efd6840a42cdf4239c9b99aad83be8b", size = 252174, upload-time = "2025-09-21T20:02:01.192Z" }, + { url = "https://files.pythonhosted.org/packages/b1/20/b6ea4f69bbb52dac0aebd62157ba6a9dddbfe664f5af8122dac296c3ee15/coverage-7.10.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c79124f70465a150e89340de5963f936ee97097d2ef76c869708c4248c63ca49", size = 253447, upload-time = "2025-09-21T20:02:02.701Z" }, + { url = "https://files.pythonhosted.org/packages/f9/28/4831523ba483a7f90f7b259d2018fef02cb4d5b90bc7c1505d6e5a84883c/coverage-7.10.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:69212fbccdbd5b0e39eac4067e20a4a5256609e209547d86f740d68ad4f04911", size = 249779, upload-time = "2025-09-21T20:02:04.185Z" }, + { url = "https://files.pythonhosted.org/packages/a7/9f/4331142bc98c10ca6436d2d620c3e165f31e6c58d43479985afce6f3191c/coverage-7.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7ea7c6c9d0d286d04ed3541747e6597cbe4971f22648b68248f7ddcd329207f0", size = 251604, upload-time = "2025-09-21T20:02:06.034Z" }, + { url = "https://files.pythonhosted.org/packages/ce/60/bda83b96602036b77ecf34e6393a3836365481b69f7ed7079ab85048202b/coverage-7.10.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b9be91986841a75042b3e3243d0b3cb0b2434252b977baaf0cd56e960fe1e46f", size = 249497, upload-time = "2025-09-21T20:02:07.619Z" }, + { url = "https://files.pythonhosted.org/packages/5f/af/152633ff35b2af63977edd835d8e6430f0caef27d171edf2fc76c270ef31/coverage-7.10.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:b281d5eca50189325cfe1f365fafade89b14b4a78d9b40b05ddd1fc7d2a10a9c", size = 249350, upload-time = "2025-09-21T20:02:10.34Z" }, + { url = "https://files.pythonhosted.org/packages/9d/71/d92105d122bd21cebba877228990e1646d862e34a98bb3374d3fece5a794/coverage-7.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:99e4aa63097ab1118e75a848a28e40d68b08a5e19ce587891ab7fd04475e780f", size = 251111, upload-time = "2025-09-21T20:02:12.122Z" }, + { url = "https://files.pythonhosted.org/packages/a2/9e/9fdb08f4bf476c912f0c3ca292e019aab6712c93c9344a1653986c3fd305/coverage-7.10.7-cp313-cp313-win32.whl", hash = "sha256:dc7c389dce432500273eaf48f410b37886be9208b2dd5710aaf7c57fd442c698", size = 220746, upload-time = "2025-09-21T20:02:13.919Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b1/a75fd25df44eab52d1931e89980d1ada46824c7a3210be0d3c88a44aaa99/coverage-7.10.7-cp313-cp313-win_amd64.whl", hash = "sha256:cac0fdca17b036af3881a9d2729a850b76553f3f716ccb0360ad4dbc06b3b843", size = 221541, upload-time = "2025-09-21T20:02:15.57Z" }, + { url = "https://files.pythonhosted.org/packages/14/3a/d720d7c989562a6e9a14b2c9f5f2876bdb38e9367126d118495b89c99c37/coverage-7.10.7-cp313-cp313-win_arm64.whl", hash = "sha256:4b6f236edf6e2f9ae8fcd1332da4e791c1b6ba0dc16a2dc94590ceccb482e546", size = 220170, upload-time = "2025-09-21T20:02:17.395Z" }, + { url = "https://files.pythonhosted.org/packages/bb/22/e04514bf2a735d8b0add31d2b4ab636fc02370730787c576bb995390d2d5/coverage-7.10.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a0ec07fd264d0745ee396b666d47cef20875f4ff2375d7c4f58235886cc1ef0c", size = 219029, upload-time = "2025-09-21T20:02:18.936Z" }, + { url = "https://files.pythonhosted.org/packages/11/0b/91128e099035ece15da3445d9015e4b4153a6059403452d324cbb0a575fa/coverage-7.10.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:dd5e856ebb7bfb7672b0086846db5afb4567a7b9714b8a0ebafd211ec7ce6a15", size = 219259, upload-time = "2025-09-21T20:02:20.44Z" }, + { url = "https://files.pythonhosted.org/packages/8b/51/66420081e72801536a091a0c8f8c1f88a5c4bf7b9b1bdc6222c7afe6dc9b/coverage-7.10.7-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f57b2a3c8353d3e04acf75b3fed57ba41f5c0646bbf1d10c7c282291c97936b4", size = 260592, upload-time = "2025-09-21T20:02:22.313Z" }, + { url = "https://files.pythonhosted.org/packages/5d/22/9b8d458c2881b22df3db5bb3e7369e63d527d986decb6c11a591ba2364f7/coverage-7.10.7-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ef2319dd15a0b009667301a3f84452a4dc6fddfd06b0c5c53ea472d3989fbf0", size = 262768, upload-time = "2025-09-21T20:02:24.287Z" }, + { url = "https://files.pythonhosted.org/packages/f7/08/16bee2c433e60913c610ea200b276e8eeef084b0d200bdcff69920bd5828/coverage-7.10.7-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83082a57783239717ceb0ad584de3c69cf581b2a95ed6bf81ea66034f00401c0", size = 264995, upload-time = "2025-09-21T20:02:26.133Z" }, + { url = "https://files.pythonhosted.org/packages/20/9d/e53eb9771d154859b084b90201e5221bca7674ba449a17c101a5031d4054/coverage-7.10.7-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:50aa94fb1fb9a397eaa19c0d5ec15a5edd03a47bf1a3a6111a16b36e190cff65", size = 259546, upload-time = "2025-09-21T20:02:27.716Z" }, + { url = "https://files.pythonhosted.org/packages/ad/b0/69bc7050f8d4e56a89fb550a1577d5d0d1db2278106f6f626464067b3817/coverage-7.10.7-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2120043f147bebb41c85b97ac45dd173595ff14f2a584f2963891cbcc3091541", size = 262544, upload-time = "2025-09-21T20:02:29.216Z" }, + { url = "https://files.pythonhosted.org/packages/ef/4b/2514b060dbd1bc0aaf23b852c14bb5818f244c664cb16517feff6bb3a5ab/coverage-7.10.7-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2fafd773231dd0378fdba66d339f84904a8e57a262f583530f4f156ab83863e6", size = 260308, upload-time = "2025-09-21T20:02:31.226Z" }, + { url = "https://files.pythonhosted.org/packages/54/78/7ba2175007c246d75e496f64c06e94122bdb914790a1285d627a918bd271/coverage-7.10.7-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:0b944ee8459f515f28b851728ad224fa2d068f1513ef6b7ff1efafeb2185f999", size = 258920, upload-time = "2025-09-21T20:02:32.823Z" }, + { url = "https://files.pythonhosted.org/packages/c0/b3/fac9f7abbc841409b9a410309d73bfa6cfb2e51c3fada738cb607ce174f8/coverage-7.10.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4b583b97ab2e3efe1b3e75248a9b333bd3f8b0b1b8e5b45578e05e5850dfb2c2", size = 261434, upload-time = "2025-09-21T20:02:34.86Z" }, + { url = "https://files.pythonhosted.org/packages/ee/51/a03bec00d37faaa891b3ff7387192cef20f01604e5283a5fabc95346befa/coverage-7.10.7-cp313-cp313t-win32.whl", hash = "sha256:2a78cd46550081a7909b3329e2266204d584866e8d97b898cd7fb5ac8d888b1a", size = 221403, upload-time = "2025-09-21T20:02:37.034Z" }, + { url = "https://files.pythonhosted.org/packages/53/22/3cf25d614e64bf6d8e59c7c669b20d6d940bb337bdee5900b9ca41c820bb/coverage-7.10.7-cp313-cp313t-win_amd64.whl", hash = "sha256:33a5e6396ab684cb43dc7befa386258acb2d7fae7f67330ebb85ba4ea27938eb", size = 222469, upload-time = "2025-09-21T20:02:39.011Z" }, + { url = "https://files.pythonhosted.org/packages/49/a1/00164f6d30d8a01c3c9c48418a7a5be394de5349b421b9ee019f380df2a0/coverage-7.10.7-cp313-cp313t-win_arm64.whl", hash = "sha256:86b0e7308289ddde73d863b7683f596d8d21c7d8664ce1dee061d0bcf3fbb4bb", size = 220731, upload-time = "2025-09-21T20:02:40.939Z" }, + { url = "https://files.pythonhosted.org/packages/23/9c/5844ab4ca6a4dd97a1850e030a15ec7d292b5c5cb93082979225126e35dd/coverage-7.10.7-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b06f260b16ead11643a5a9f955bd4b5fd76c1a4c6796aeade8520095b75de520", size = 218302, upload-time = "2025-09-21T20:02:42.527Z" }, + { url = "https://files.pythonhosted.org/packages/f0/89/673f6514b0961d1f0e20ddc242e9342f6da21eaba3489901b565c0689f34/coverage-7.10.7-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:212f8f2e0612778f09c55dd4872cb1f64a1f2b074393d139278ce902064d5b32", size = 218578, upload-time = "2025-09-21T20:02:44.468Z" }, + { url = "https://files.pythonhosted.org/packages/05/e8/261cae479e85232828fb17ad536765c88dd818c8470aca690b0ac6feeaa3/coverage-7.10.7-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3445258bcded7d4aa630ab8296dea4d3f15a255588dd535f980c193ab6b95f3f", size = 249629, upload-time = "2025-09-21T20:02:46.503Z" }, + { url = "https://files.pythonhosted.org/packages/82/62/14ed6546d0207e6eda876434e3e8475a3e9adbe32110ce896c9e0c06bb9a/coverage-7.10.7-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb45474711ba385c46a0bfe696c695a929ae69ac636cda8f532be9e8c93d720a", size = 252162, upload-time = "2025-09-21T20:02:48.689Z" }, + { url = "https://files.pythonhosted.org/packages/ff/49/07f00db9ac6478e4358165a08fb41b469a1b053212e8a00cb02f0d27a05f/coverage-7.10.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:813922f35bd800dca9994c5971883cbc0d291128a5de6b167c7aa697fcf59360", size = 253517, upload-time = "2025-09-21T20:02:50.31Z" }, + { url = "https://files.pythonhosted.org/packages/a2/59/c5201c62dbf165dfbc91460f6dbbaa85a8b82cfa6131ac45d6c1bfb52deb/coverage-7.10.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:93c1b03552081b2a4423091d6fb3787265b8f86af404cff98d1b5342713bdd69", size = 249632, upload-time = "2025-09-21T20:02:51.971Z" }, + { url = "https://files.pythonhosted.org/packages/07/ae/5920097195291a51fb00b3a70b9bbd2edbfe3c84876a1762bd1ef1565ebc/coverage-7.10.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:cc87dd1b6eaf0b848eebb1c86469b9f72a1891cb42ac7adcfbce75eadb13dd14", size = 251520, upload-time = "2025-09-21T20:02:53.858Z" }, + { url = "https://files.pythonhosted.org/packages/b9/3c/a815dde77a2981f5743a60b63df31cb322c944843e57dbd579326625a413/coverage-7.10.7-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:39508ffda4f343c35f3236fe8d1a6634a51f4581226a1262769d7f970e73bffe", size = 249455, upload-time = "2025-09-21T20:02:55.807Z" }, + { url = "https://files.pythonhosted.org/packages/aa/99/f5cdd8421ea656abefb6c0ce92556709db2265c41e8f9fc6c8ae0f7824c9/coverage-7.10.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:925a1edf3d810537c5a3abe78ec5530160c5f9a26b1f4270b40e62cc79304a1e", size = 249287, upload-time = "2025-09-21T20:02:57.784Z" }, + { url = "https://files.pythonhosted.org/packages/c3/7a/e9a2da6a1fc5d007dd51fca083a663ab930a8c4d149c087732a5dbaa0029/coverage-7.10.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2c8b9a0636f94c43cd3576811e05b89aa9bc2d0a85137affc544ae5cb0e4bfbd", size = 250946, upload-time = "2025-09-21T20:02:59.431Z" }, + { url = "https://files.pythonhosted.org/packages/ef/5b/0b5799aa30380a949005a353715095d6d1da81927d6dbed5def2200a4e25/coverage-7.10.7-cp314-cp314-win32.whl", hash = "sha256:b7b8288eb7cdd268b0304632da8cb0bb93fadcfec2fe5712f7b9cc8f4d487be2", size = 221009, upload-time = "2025-09-21T20:03:01.324Z" }, + { url = "https://files.pythonhosted.org/packages/da/b0/e802fbb6eb746de006490abc9bb554b708918b6774b722bb3a0e6aa1b7de/coverage-7.10.7-cp314-cp314-win_amd64.whl", hash = "sha256:1ca6db7c8807fb9e755d0379ccc39017ce0a84dcd26d14b5a03b78563776f681", size = 221804, upload-time = "2025-09-21T20:03:03.4Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e8/71d0c8e374e31f39e3389bb0bd19e527d46f00ea8571ec7ec8fd261d8b44/coverage-7.10.7-cp314-cp314-win_arm64.whl", hash = "sha256:097c1591f5af4496226d5783d036bf6fd6cd0cbc132e071b33861de756efb880", size = 220384, upload-time = "2025-09-21T20:03:05.111Z" }, + { url = "https://files.pythonhosted.org/packages/62/09/9a5608d319fa3eba7a2019addeacb8c746fb50872b57a724c9f79f146969/coverage-7.10.7-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:a62c6ef0d50e6de320c270ff91d9dd0a05e7250cac2a800b7784bae474506e63", size = 219047, upload-time = "2025-09-21T20:03:06.795Z" }, + { url = "https://files.pythonhosted.org/packages/f5/6f/f58d46f33db9f2e3647b2d0764704548c184e6f5e014bef528b7f979ef84/coverage-7.10.7-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9fa6e4dd51fe15d8738708a973470f67a855ca50002294852e9571cdbd9433f2", size = 219266, upload-time = "2025-09-21T20:03:08.495Z" }, + { url = "https://files.pythonhosted.org/packages/74/5c/183ffc817ba68e0b443b8c934c8795553eb0c14573813415bd59941ee165/coverage-7.10.7-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8fb190658865565c549b6b4706856d6a7b09302c797eb2cf8e7fe9dabb043f0d", size = 260767, upload-time = "2025-09-21T20:03:10.172Z" }, + { url = "https://files.pythonhosted.org/packages/0f/48/71a8abe9c1ad7e97548835e3cc1adbf361e743e9d60310c5f75c9e7bf847/coverage-7.10.7-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:affef7c76a9ef259187ef31599a9260330e0335a3011732c4b9effa01e1cd6e0", size = 262931, upload-time = "2025-09-21T20:03:11.861Z" }, + { url = "https://files.pythonhosted.org/packages/84/fd/193a8fb132acfc0a901f72020e54be5e48021e1575bb327d8ee1097a28fd/coverage-7.10.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e16e07d85ca0cf8bafe5f5d23a0b850064e8e945d5677492b06bbe6f09cc699", size = 265186, upload-time = "2025-09-21T20:03:13.539Z" }, + { url = "https://files.pythonhosted.org/packages/b1/8f/74ecc30607dd95ad50e3034221113ccb1c6d4e8085cc761134782995daae/coverage-7.10.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:03ffc58aacdf65d2a82bbeb1ffe4d01ead4017a21bfd0454983b88ca73af94b9", size = 259470, upload-time = "2025-09-21T20:03:15.584Z" }, + { url = "https://files.pythonhosted.org/packages/0f/55/79ff53a769f20d71b07023ea115c9167c0bb56f281320520cf64c5298a96/coverage-7.10.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1b4fd784344d4e52647fd7857b2af5b3fbe6c239b0b5fa63e94eb67320770e0f", size = 262626, upload-time = "2025-09-21T20:03:17.673Z" }, + { url = "https://files.pythonhosted.org/packages/88/e2/dac66c140009b61ac3fc13af673a574b00c16efdf04f9b5c740703e953c0/coverage-7.10.7-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:0ebbaddb2c19b71912c6f2518e791aa8b9f054985a0769bdb3a53ebbc765c6a1", size = 260386, upload-time = "2025-09-21T20:03:19.36Z" }, + { url = "https://files.pythonhosted.org/packages/a2/f1/f48f645e3f33bb9ca8a496bc4a9671b52f2f353146233ebd7c1df6160440/coverage-7.10.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:a2d9a3b260cc1d1dbdb1c582e63ddcf5363426a1a68faa0f5da28d8ee3c722a0", size = 258852, upload-time = "2025-09-21T20:03:21.007Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3b/8442618972c51a7affeead957995cfa8323c0c9bcf8fa5a027421f720ff4/coverage-7.10.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a3cc8638b2480865eaa3926d192e64ce6c51e3d29c849e09d5b4ad95efae5399", size = 261534, upload-time = "2025-09-21T20:03:23.12Z" }, + { url = "https://files.pythonhosted.org/packages/b2/dc/101f3fa3a45146db0cb03f5b4376e24c0aac818309da23e2de0c75295a91/coverage-7.10.7-cp314-cp314t-win32.whl", hash = "sha256:67f8c5cbcd3deb7a60b3345dffc89a961a484ed0af1f6f73de91705cc6e31235", size = 221784, upload-time = "2025-09-21T20:03:24.769Z" }, + { url = "https://files.pythonhosted.org/packages/4c/a1/74c51803fc70a8a40d7346660379e144be772bab4ac7bb6e6b905152345c/coverage-7.10.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e1ed71194ef6dea7ed2d5cb5f7243d4bcd334bfb63e59878519be558078f848d", size = 222905, upload-time = "2025-09-21T20:03:26.93Z" }, + { url = "https://files.pythonhosted.org/packages/12/65/f116a6d2127df30bcafbceef0302d8a64ba87488bf6f73a6d8eebf060873/coverage-7.10.7-cp314-cp314t-win_arm64.whl", hash = "sha256:7fe650342addd8524ca63d77b2362b02345e5f1a093266787d210c70a50b471a", size = 220922, upload-time = "2025-09-21T20:03:28.672Z" }, + { url = "https://files.pythonhosted.org/packages/ec/16/114df1c291c22cac3b0c127a73e0af5c12ed7bbb6558d310429a0ae24023/coverage-7.10.7-py3-none-any.whl", hash = "sha256:f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260", size = 209952, upload-time = "2025-09-21T20:03:53.918Z" }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version <= '3.11'" }, +] + +[[package]] +name = "diskcache" +version = "5.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3f/21/1c1ffc1a039ddcc459db43cc108658f32c57d271d7289a2794e401d0fdb6/diskcache-5.6.3.tar.gz", hash = "sha256:2c3a3fa2743d8535d832ec61c2054a1641f41775aa7c556758a109941e33e4fc", size = 67916, upload-time = "2023-08-31T06:12:00.316Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/27/4570e78fc0bf5ea0ca45eb1de3818a23787af9b390c0b0a0033a1b8236f9/diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19", size = 45550, upload-time = "2023-08-31T06:11:58.822Z" }, +] + +[[package]] +name = "ghaw-auditor" +version = "1.0.0" +source = { editable = "." } +dependencies = [ + { name = "diskcache" }, + { name = "httpx" }, + { name = "packaging" }, + { name = "platformdirs" }, + { name = "pydantic" }, + { name = "rich" }, + { name = "ruamel-yaml" }, + { name = "tenacity" }, + { name = "typer" }, +] + +[package.optional-dependencies] +dev = [ + { name = "mypy" }, + { name = "pytest" }, + { name = "pytest-cov" }, + { name = "ruff" }, + { name = "types-pyyaml" }, + { name = "vcrpy" }, +] + +[package.metadata] +requires-dist = [ + { name = "diskcache", specifier = ">=5.6.0" }, + { name = "httpx", specifier = ">=0.27.0" }, + { name = "mypy", marker = "extra == 'dev'", specifier = ">=1.8.0" }, + { name = "packaging", specifier = ">=24.0" }, + { name = "platformdirs", specifier = ">=4.2.0" }, + { name = "pydantic", specifier = ">=2.6.0" }, + { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.0.0" }, + { name = "pytest-cov", marker = "extra == 'dev'", specifier = ">=4.1.0" }, + { name = "rich", specifier = ">=13.7.0" }, + { name = "ruamel-yaml", specifier = ">=0.18.0" }, + { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.3.0" }, + { name = "tenacity", specifier = ">=8.2.0" }, + { name = "typer", specifier = ">=0.12.0" }, + { name = "types-pyyaml", marker = "extra == 'dev'" }, + { name = "vcrpy", marker = "extra == 'dev'", specifier = ">=6.0.0" }, +] +provides-extras = ["dev"] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "multidict" +version = "6.6.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/69/7f/0652e6ed47ab288e3756ea9c0df8b14950781184d4bd7883f4d87dd41245/multidict-6.6.4.tar.gz", hash = "sha256:d2d4e4787672911b48350df02ed3fa3fffdc2f2e8ca06dd6afdf34189b76a9dd", size = 101843, upload-time = "2025-08-11T12:08:48.217Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/7f/90a7f01e2d005d6653c689039977f6856718c75c5579445effb7e60923d1/multidict-6.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c7a0e9b561e6460484318a7612e725df1145d46b0ef57c6b9866441bf6e27e0c", size = 76472, upload-time = "2025-08-11T12:06:29.006Z" }, + { url = "https://files.pythonhosted.org/packages/54/a3/bed07bc9e2bb302ce752f1dabc69e884cd6a676da44fb0e501b246031fdd/multidict-6.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6bf2f10f70acc7a2446965ffbc726e5fc0b272c97a90b485857e5c70022213eb", size = 44634, upload-time = "2025-08-11T12:06:30.374Z" }, + { url = "https://files.pythonhosted.org/packages/a7/4b/ceeb4f8f33cf81277da464307afeaf164fb0297947642585884f5cad4f28/multidict-6.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66247d72ed62d5dd29752ffc1d3b88f135c6a8de8b5f63b7c14e973ef5bda19e", size = 44282, upload-time = "2025-08-11T12:06:31.958Z" }, + { url = "https://files.pythonhosted.org/packages/03/35/436a5da8702b06866189b69f655ffdb8f70796252a8772a77815f1812679/multidict-6.6.4-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:105245cc6b76f51e408451a844a54e6823bbd5a490ebfe5bdfc79798511ceded", size = 229696, upload-time = "2025-08-11T12:06:33.087Z" }, + { url = "https://files.pythonhosted.org/packages/b6/0e/915160be8fecf1fca35f790c08fb74ca684d752fcba62c11daaf3d92c216/multidict-6.6.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cbbc54e58b34c3bae389ef00046be0961f30fef7cb0dd9c7756aee376a4f7683", size = 246665, upload-time = "2025-08-11T12:06:34.448Z" }, + { url = "https://files.pythonhosted.org/packages/08/ee/2f464330acd83f77dcc346f0b1a0eaae10230291450887f96b204b8ac4d3/multidict-6.6.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:56c6b3652f945c9bc3ac6c8178cd93132b8d82dd581fcbc3a00676c51302bc1a", size = 225485, upload-time = "2025-08-11T12:06:35.672Z" }, + { url = "https://files.pythonhosted.org/packages/71/cc/9a117f828b4d7fbaec6adeed2204f211e9caf0a012692a1ee32169f846ae/multidict-6.6.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b95494daf857602eccf4c18ca33337dd2be705bccdb6dddbfc9d513e6addb9d9", size = 257318, upload-time = "2025-08-11T12:06:36.98Z" }, + { url = "https://files.pythonhosted.org/packages/25/77/62752d3dbd70e27fdd68e86626c1ae6bccfebe2bb1f84ae226363e112f5a/multidict-6.6.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e5b1413361cef15340ab9dc61523e653d25723e82d488ef7d60a12878227ed50", size = 254689, upload-time = "2025-08-11T12:06:38.233Z" }, + { url = "https://files.pythonhosted.org/packages/00/6e/fac58b1072a6fc59af5e7acb245e8754d3e1f97f4f808a6559951f72a0d4/multidict-6.6.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e167bf899c3d724f9662ef00b4f7fef87a19c22b2fead198a6f68b263618df52", size = 246709, upload-time = "2025-08-11T12:06:39.517Z" }, + { url = "https://files.pythonhosted.org/packages/01/ef/4698d6842ef5e797c6db7744b0081e36fb5de3d00002cc4c58071097fac3/multidict-6.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:aaea28ba20a9026dfa77f4b80369e51cb767c61e33a2d4043399c67bd95fb7c6", size = 243185, upload-time = "2025-08-11T12:06:40.796Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c9/d82e95ae1d6e4ef396934e9b0e942dfc428775f9554acf04393cce66b157/multidict-6.6.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8c91cdb30809a96d9ecf442ec9bc45e8cfaa0f7f8bdf534e082c2443a196727e", size = 237838, upload-time = "2025-08-11T12:06:42.595Z" }, + { url = "https://files.pythonhosted.org/packages/57/cf/f94af5c36baaa75d44fab9f02e2a6bcfa0cd90acb44d4976a80960759dbc/multidict-6.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1a0ccbfe93ca114c5d65a2471d52d8829e56d467c97b0e341cf5ee45410033b3", size = 246368, upload-time = "2025-08-11T12:06:44.304Z" }, + { url = "https://files.pythonhosted.org/packages/4a/fe/29f23460c3d995f6a4b678cb2e9730e7277231b981f0b234702f0177818a/multidict-6.6.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:55624b3f321d84c403cb7d8e6e982f41ae233d85f85db54ba6286f7295dc8a9c", size = 253339, upload-time = "2025-08-11T12:06:45.597Z" }, + { url = "https://files.pythonhosted.org/packages/29/b6/fd59449204426187b82bf8a75f629310f68c6adc9559dc922d5abe34797b/multidict-6.6.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:4a1fb393a2c9d202cb766c76208bd7945bc194eba8ac920ce98c6e458f0b524b", size = 246933, upload-time = "2025-08-11T12:06:46.841Z" }, + { url = "https://files.pythonhosted.org/packages/19/52/d5d6b344f176a5ac3606f7a61fb44dc746e04550e1a13834dff722b8d7d6/multidict-6.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:43868297a5759a845fa3a483fb4392973a95fb1de891605a3728130c52b8f40f", size = 242225, upload-time = "2025-08-11T12:06:48.588Z" }, + { url = "https://files.pythonhosted.org/packages/ec/d3/5b2281ed89ff4d5318d82478a2a2450fcdfc3300da48ff15c1778280ad26/multidict-6.6.4-cp311-cp311-win32.whl", hash = "sha256:ed3b94c5e362a8a84d69642dbeac615452e8af9b8eb825b7bc9f31a53a1051e2", size = 41306, upload-time = "2025-08-11T12:06:49.95Z" }, + { url = "https://files.pythonhosted.org/packages/74/7d/36b045c23a1ab98507aefd44fd8b264ee1dd5e5010543c6fccf82141ccef/multidict-6.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:d8c112f7a90d8ca5d20213aa41eac690bb50a76da153e3afb3886418e61cb22e", size = 46029, upload-time = "2025-08-11T12:06:51.082Z" }, + { url = "https://files.pythonhosted.org/packages/0f/5e/553d67d24432c5cd52b49047f2d248821843743ee6d29a704594f656d182/multidict-6.6.4-cp311-cp311-win_arm64.whl", hash = "sha256:3bb0eae408fa1996d87247ca0d6a57b7fc1dcf83e8a5c47ab82c558c250d4adf", size = 43017, upload-time = "2025-08-11T12:06:52.243Z" }, + { url = "https://files.pythonhosted.org/packages/05/f6/512ffd8fd8b37fb2680e5ac35d788f1d71bbaf37789d21a820bdc441e565/multidict-6.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0ffb87be160942d56d7b87b0fdf098e81ed565add09eaa1294268c7f3caac4c8", size = 76516, upload-time = "2025-08-11T12:06:53.393Z" }, + { url = "https://files.pythonhosted.org/packages/99/58/45c3e75deb8855c36bd66cc1658007589662ba584dbf423d01df478dd1c5/multidict-6.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d191de6cbab2aff5de6c5723101705fd044b3e4c7cfd587a1929b5028b9714b3", size = 45394, upload-time = "2025-08-11T12:06:54.555Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ca/e8c4472a93a26e4507c0b8e1f0762c0d8a32de1328ef72fd704ef9cc5447/multidict-6.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38a0956dd92d918ad5feff3db8fcb4a5eb7dba114da917e1a88475619781b57b", size = 43591, upload-time = "2025-08-11T12:06:55.672Z" }, + { url = "https://files.pythonhosted.org/packages/05/51/edf414f4df058574a7265034d04c935aa84a89e79ce90fcf4df211f47b16/multidict-6.6.4-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:6865f6d3b7900ae020b495d599fcf3765653bc927951c1abb959017f81ae8287", size = 237215, upload-time = "2025-08-11T12:06:57.213Z" }, + { url = "https://files.pythonhosted.org/packages/c8/45/8b3d6dbad8cf3252553cc41abea09ad527b33ce47a5e199072620b296902/multidict-6.6.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a2088c126b6f72db6c9212ad827d0ba088c01d951cee25e758c450da732c138", size = 258299, upload-time = "2025-08-11T12:06:58.946Z" }, + { url = "https://files.pythonhosted.org/packages/3c/e8/8ca2e9a9f5a435fc6db40438a55730a4bf4956b554e487fa1b9ae920f825/multidict-6.6.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0f37bed7319b848097085d7d48116f545985db988e2256b2e6f00563a3416ee6", size = 242357, upload-time = "2025-08-11T12:07:00.301Z" }, + { url = "https://files.pythonhosted.org/packages/0f/84/80c77c99df05a75c28490b2af8f7cba2a12621186e0a8b0865d8e745c104/multidict-6.6.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:01368e3c94032ba6ca0b78e7ccb099643466cf24f8dc8eefcfdc0571d56e58f9", size = 268369, upload-time = "2025-08-11T12:07:01.638Z" }, + { url = "https://files.pythonhosted.org/packages/0d/e9/920bfa46c27b05fb3e1ad85121fd49f441492dca2449c5bcfe42e4565d8a/multidict-6.6.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8fe323540c255db0bffee79ad7f048c909f2ab0edb87a597e1c17da6a54e493c", size = 269341, upload-time = "2025-08-11T12:07:02.943Z" }, + { url = "https://files.pythonhosted.org/packages/af/65/753a2d8b05daf496f4a9c367fe844e90a1b2cac78e2be2c844200d10cc4c/multidict-6.6.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8eb3025f17b0a4c3cd08cda49acf312a19ad6e8a4edd9dbd591e6506d999402", size = 256100, upload-time = "2025-08-11T12:07:04.564Z" }, + { url = "https://files.pythonhosted.org/packages/09/54/655be13ae324212bf0bc15d665a4e34844f34c206f78801be42f7a0a8aaa/multidict-6.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bbc14f0365534d35a06970d6a83478b249752e922d662dc24d489af1aa0d1be7", size = 253584, upload-time = "2025-08-11T12:07:05.914Z" }, + { url = "https://files.pythonhosted.org/packages/5c/74/ab2039ecc05264b5cec73eb018ce417af3ebb384ae9c0e9ed42cb33f8151/multidict-6.6.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:75aa52fba2d96bf972e85451b99d8e19cc37ce26fd016f6d4aa60da9ab2b005f", size = 251018, upload-time = "2025-08-11T12:07:08.301Z" }, + { url = "https://files.pythonhosted.org/packages/af/0a/ccbb244ac848e56c6427f2392741c06302bbfba49c0042f1eb3c5b606497/multidict-6.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4fefd4a815e362d4f011919d97d7b4a1e566f1dde83dc4ad8cfb5b41de1df68d", size = 251477, upload-time = "2025-08-11T12:07:10.248Z" }, + { url = "https://files.pythonhosted.org/packages/0e/b0/0ed49bba775b135937f52fe13922bc64a7eaf0a3ead84a36e8e4e446e096/multidict-6.6.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:db9801fe021f59a5b375ab778973127ca0ac52429a26e2fd86aa9508f4d26eb7", size = 263575, upload-time = "2025-08-11T12:07:11.928Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d9/7fb85a85e14de2e44dfb6a24f03c41e2af8697a6df83daddb0e9b7569f73/multidict-6.6.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a650629970fa21ac1fb06ba25dabfc5b8a2054fcbf6ae97c758aa956b8dba802", size = 259649, upload-time = "2025-08-11T12:07:13.244Z" }, + { url = "https://files.pythonhosted.org/packages/03/9e/b3a459bcf9b6e74fa461a5222a10ff9b544cb1cd52fd482fb1b75ecda2a2/multidict-6.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:452ff5da78d4720d7516a3a2abd804957532dd69296cb77319c193e3ffb87e24", size = 251505, upload-time = "2025-08-11T12:07:14.57Z" }, + { url = "https://files.pythonhosted.org/packages/86/a2/8022f78f041dfe6d71e364001a5cf987c30edfc83c8a5fb7a3f0974cff39/multidict-6.6.4-cp312-cp312-win32.whl", hash = "sha256:8c2fcb12136530ed19572bbba61b407f655e3953ba669b96a35036a11a485793", size = 41888, upload-time = "2025-08-11T12:07:15.904Z" }, + { url = "https://files.pythonhosted.org/packages/c7/eb/d88b1780d43a56db2cba24289fa744a9d216c1a8546a0dc3956563fd53ea/multidict-6.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:047d9425860a8c9544fed1b9584f0c8bcd31bcde9568b047c5e567a1025ecd6e", size = 46072, upload-time = "2025-08-11T12:07:17.045Z" }, + { url = "https://files.pythonhosted.org/packages/9f/16/b929320bf5750e2d9d4931835a4c638a19d2494a5b519caaaa7492ebe105/multidict-6.6.4-cp312-cp312-win_arm64.whl", hash = "sha256:14754eb72feaa1e8ae528468f24250dd997b8e2188c3d2f593f9eba259e4b364", size = 43222, upload-time = "2025-08-11T12:07:18.328Z" }, + { url = "https://files.pythonhosted.org/packages/3a/5d/e1db626f64f60008320aab00fbe4f23fc3300d75892a3381275b3d284580/multidict-6.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f46a6e8597f9bd71b31cc708195d42b634c8527fecbcf93febf1052cacc1f16e", size = 75848, upload-time = "2025-08-11T12:07:19.912Z" }, + { url = "https://files.pythonhosted.org/packages/4c/aa/8b6f548d839b6c13887253af4e29c939af22a18591bfb5d0ee6f1931dae8/multidict-6.6.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:22e38b2bc176c5eb9c0a0e379f9d188ae4cd8b28c0f53b52bce7ab0a9e534657", size = 45060, upload-time = "2025-08-11T12:07:21.163Z" }, + { url = "https://files.pythonhosted.org/packages/eb/c6/f5e97e5d99a729bc2aa58eb3ebfa9f1e56a9b517cc38c60537c81834a73f/multidict-6.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5df8afd26f162da59e218ac0eefaa01b01b2e6cd606cffa46608f699539246da", size = 43269, upload-time = "2025-08-11T12:07:22.392Z" }, + { url = "https://files.pythonhosted.org/packages/dc/31/d54eb0c62516776f36fe67f84a732f97e0b0e12f98d5685bebcc6d396910/multidict-6.6.4-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:49517449b58d043023720aa58e62b2f74ce9b28f740a0b5d33971149553d72aa", size = 237158, upload-time = "2025-08-11T12:07:23.636Z" }, + { url = "https://files.pythonhosted.org/packages/c4/1c/8a10c1c25b23156e63b12165a929d8eb49a6ed769fdbefb06e6f07c1e50d/multidict-6.6.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae9408439537c5afdca05edd128a63f56a62680f4b3c234301055d7a2000220f", size = 257076, upload-time = "2025-08-11T12:07:25.049Z" }, + { url = "https://files.pythonhosted.org/packages/ad/86/90e20b5771d6805a119e483fd3d1e8393e745a11511aebca41f0da38c3e2/multidict-6.6.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:87a32d20759dc52a9e850fe1061b6e41ab28e2998d44168a8a341b99ded1dba0", size = 240694, upload-time = "2025-08-11T12:07:26.458Z" }, + { url = "https://files.pythonhosted.org/packages/e7/49/484d3e6b535bc0555b52a0a26ba86e4d8d03fd5587d4936dc59ba7583221/multidict-6.6.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:52e3c8d43cdfff587ceedce9deb25e6ae77daba560b626e97a56ddcad3756879", size = 266350, upload-time = "2025-08-11T12:07:27.94Z" }, + { url = "https://files.pythonhosted.org/packages/bf/b4/aa4c5c379b11895083d50021e229e90c408d7d875471cb3abf721e4670d6/multidict-6.6.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ad8850921d3a8d8ff6fbef790e773cecfc260bbfa0566998980d3fa8f520bc4a", size = 267250, upload-time = "2025-08-11T12:07:29.303Z" }, + { url = "https://files.pythonhosted.org/packages/80/e5/5e22c5bf96a64bdd43518b1834c6d95a4922cc2066b7d8e467dae9b6cee6/multidict-6.6.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:497a2954adc25c08daff36f795077f63ad33e13f19bfff7736e72c785391534f", size = 254900, upload-time = "2025-08-11T12:07:30.764Z" }, + { url = "https://files.pythonhosted.org/packages/17/38/58b27fed927c07035abc02befacab42491e7388ca105e087e6e0215ead64/multidict-6.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:024ce601f92d780ca1617ad4be5ac15b501cc2414970ffa2bb2bbc2bd5a68fa5", size = 252355, upload-time = "2025-08-11T12:07:32.205Z" }, + { url = "https://files.pythonhosted.org/packages/d0/a1/dad75d23a90c29c02b5d6f3d7c10ab36c3197613be5d07ec49c7791e186c/multidict-6.6.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a693fc5ed9bdd1c9e898013e0da4dcc640de7963a371c0bd458e50e046bf6438", size = 250061, upload-time = "2025-08-11T12:07:33.623Z" }, + { url = "https://files.pythonhosted.org/packages/b8/1a/ac2216b61c7f116edab6dc3378cca6c70dc019c9a457ff0d754067c58b20/multidict-6.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:190766dac95aab54cae5b152a56520fd99298f32a1266d66d27fdd1b5ac00f4e", size = 249675, upload-time = "2025-08-11T12:07:34.958Z" }, + { url = "https://files.pythonhosted.org/packages/d4/79/1916af833b800d13883e452e8e0977c065c4ee3ab7a26941fbfdebc11895/multidict-6.6.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:34d8f2a5ffdceab9dcd97c7a016deb2308531d5f0fced2bb0c9e1df45b3363d7", size = 261247, upload-time = "2025-08-11T12:07:36.588Z" }, + { url = "https://files.pythonhosted.org/packages/c5/65/d1f84fe08ac44a5fc7391cbc20a7cedc433ea616b266284413fd86062f8c/multidict-6.6.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:59e8d40ab1f5a8597abcef00d04845155a5693b5da00d2c93dbe88f2050f2812", size = 257960, upload-time = "2025-08-11T12:07:39.735Z" }, + { url = "https://files.pythonhosted.org/packages/13/b5/29ec78057d377b195ac2c5248c773703a6b602e132a763e20ec0457e7440/multidict-6.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:467fe64138cfac771f0e949b938c2e1ada2b5af22f39692aa9258715e9ea613a", size = 250078, upload-time = "2025-08-11T12:07:41.525Z" }, + { url = "https://files.pythonhosted.org/packages/c4/0e/7e79d38f70a872cae32e29b0d77024bef7834b0afb406ddae6558d9e2414/multidict-6.6.4-cp313-cp313-win32.whl", hash = "sha256:14616a30fe6d0a48d0a48d1a633ab3b8bec4cf293aac65f32ed116f620adfd69", size = 41708, upload-time = "2025-08-11T12:07:43.405Z" }, + { url = "https://files.pythonhosted.org/packages/9d/34/746696dffff742e97cd6a23da953e55d0ea51fa601fa2ff387b3edcfaa2c/multidict-6.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:40cd05eaeb39e2bc8939451f033e57feaa2ac99e07dbca8afe2be450a4a3b6cf", size = 45912, upload-time = "2025-08-11T12:07:45.082Z" }, + { url = "https://files.pythonhosted.org/packages/c7/87/3bac136181e271e29170d8d71929cdeddeb77f3e8b6a0c08da3a8e9da114/multidict-6.6.4-cp313-cp313-win_arm64.whl", hash = "sha256:f6eb37d511bfae9e13e82cb4d1af36b91150466f24d9b2b8a9785816deb16605", size = 43076, upload-time = "2025-08-11T12:07:46.746Z" }, + { url = "https://files.pythonhosted.org/packages/64/94/0a8e63e36c049b571c9ae41ee301ada29c3fee9643d9c2548d7d558a1d99/multidict-6.6.4-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:6c84378acd4f37d1b507dfa0d459b449e2321b3ba5f2338f9b085cf7a7ba95eb", size = 82812, upload-time = "2025-08-11T12:07:48.402Z" }, + { url = "https://files.pythonhosted.org/packages/25/1a/be8e369dfcd260d2070a67e65dd3990dd635cbd735b98da31e00ea84cd4e/multidict-6.6.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0e0558693063c75f3d952abf645c78f3c5dfdd825a41d8c4d8156fc0b0da6e7e", size = 48313, upload-time = "2025-08-11T12:07:49.679Z" }, + { url = "https://files.pythonhosted.org/packages/26/5a/dd4ade298674b2f9a7b06a32c94ffbc0497354df8285f27317c66433ce3b/multidict-6.6.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3f8e2384cb83ebd23fd07e9eada8ba64afc4c759cd94817433ab8c81ee4b403f", size = 46777, upload-time = "2025-08-11T12:07:51.318Z" }, + { url = "https://files.pythonhosted.org/packages/89/db/98aa28bc7e071bfba611ac2ae803c24e96dd3a452b4118c587d3d872c64c/multidict-6.6.4-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:f996b87b420995a9174b2a7c1a8daf7db4750be6848b03eb5e639674f7963773", size = 229321, upload-time = "2025-08-11T12:07:52.965Z" }, + { url = "https://files.pythonhosted.org/packages/c7/bc/01ddda2a73dd9d167bd85d0e8ef4293836a8f82b786c63fb1a429bc3e678/multidict-6.6.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc356250cffd6e78416cf5b40dc6a74f1edf3be8e834cf8862d9ed5265cf9b0e", size = 249954, upload-time = "2025-08-11T12:07:54.423Z" }, + { url = "https://files.pythonhosted.org/packages/06/78/6b7c0f020f9aa0acf66d0ab4eb9f08375bac9a50ff5e3edb1c4ccd59eafc/multidict-6.6.4-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:dadf95aa862714ea468a49ad1e09fe00fcc9ec67d122f6596a8d40caf6cec7d0", size = 228612, upload-time = "2025-08-11T12:07:55.914Z" }, + { url = "https://files.pythonhosted.org/packages/00/44/3faa416f89b2d5d76e9d447296a81521e1c832ad6e40b92f990697b43192/multidict-6.6.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7dd57515bebffd8ebd714d101d4c434063322e4fe24042e90ced41f18b6d3395", size = 257528, upload-time = "2025-08-11T12:07:57.371Z" }, + { url = "https://files.pythonhosted.org/packages/05/5f/77c03b89af0fcb16f018f668207768191fb9dcfb5e3361a5e706a11db2c9/multidict-6.6.4-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:967af5f238ebc2eb1da4e77af5492219fbd9b4b812347da39a7b5f5c72c0fa45", size = 256329, upload-time = "2025-08-11T12:07:58.844Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e9/ed750a2a9afb4f8dc6f13dc5b67b514832101b95714f1211cd42e0aafc26/multidict-6.6.4-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2a4c6875c37aae9794308ec43e3530e4aa0d36579ce38d89979bbf89582002bb", size = 247928, upload-time = "2025-08-11T12:08:01.037Z" }, + { url = "https://files.pythonhosted.org/packages/1f/b5/e0571bc13cda277db7e6e8a532791d4403dacc9850006cb66d2556e649c0/multidict-6.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:7f683a551e92bdb7fac545b9c6f9fa2aebdeefa61d607510b3533286fcab67f5", size = 245228, upload-time = "2025-08-11T12:08:02.96Z" }, + { url = "https://files.pythonhosted.org/packages/f3/a3/69a84b0eccb9824491f06368f5b86e72e4af54c3067c37c39099b6687109/multidict-6.6.4-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:3ba5aaf600edaf2a868a391779f7a85d93bed147854925f34edd24cc70a3e141", size = 235869, upload-time = "2025-08-11T12:08:04.746Z" }, + { url = "https://files.pythonhosted.org/packages/a9/9d/28802e8f9121a6a0804fa009debf4e753d0a59969ea9f70be5f5fdfcb18f/multidict-6.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:580b643b7fd2c295d83cad90d78419081f53fd532d1f1eb67ceb7060f61cff0d", size = 243446, upload-time = "2025-08-11T12:08:06.332Z" }, + { url = "https://files.pythonhosted.org/packages/38/ea/6c98add069b4878c1d66428a5f5149ddb6d32b1f9836a826ac764b9940be/multidict-6.6.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:37b7187197da6af3ee0b044dbc9625afd0c885f2800815b228a0e70f9a7f473d", size = 252299, upload-time = "2025-08-11T12:08:07.931Z" }, + { url = "https://files.pythonhosted.org/packages/3a/09/8fe02d204473e14c0af3affd50af9078839dfca1742f025cca765435d6b4/multidict-6.6.4-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e1b93790ed0bc26feb72e2f08299691ceb6da5e9e14a0d13cc74f1869af327a0", size = 246926, upload-time = "2025-08-11T12:08:09.467Z" }, + { url = "https://files.pythonhosted.org/packages/37/3d/7b1e10d774a6df5175ecd3c92bff069e77bed9ec2a927fdd4ff5fe182f67/multidict-6.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a506a77ddee1efcca81ecbeae27ade3e09cdf21a8ae854d766c2bb4f14053f92", size = 243383, upload-time = "2025-08-11T12:08:10.981Z" }, + { url = "https://files.pythonhosted.org/packages/50/b0/a6fae46071b645ae98786ab738447de1ef53742eaad949f27e960864bb49/multidict-6.6.4-cp313-cp313t-win32.whl", hash = "sha256:f93b2b2279883d1d0a9e1bd01f312d6fc315c5e4c1f09e112e4736e2f650bc4e", size = 47775, upload-time = "2025-08-11T12:08:12.439Z" }, + { url = "https://files.pythonhosted.org/packages/b2/0a/2436550b1520091af0600dff547913cb2d66fbac27a8c33bc1b1bccd8d98/multidict-6.6.4-cp313-cp313t-win_amd64.whl", hash = "sha256:6d46a180acdf6e87cc41dc15d8f5c2986e1e8739dc25dbb7dac826731ef381a4", size = 53100, upload-time = "2025-08-11T12:08:13.823Z" }, + { url = "https://files.pythonhosted.org/packages/97/ea/43ac51faff934086db9c072a94d327d71b7d8b40cd5dcb47311330929ef0/multidict-6.6.4-cp313-cp313t-win_arm64.whl", hash = "sha256:756989334015e3335d087a27331659820d53ba432befdef6a718398b0a8493ad", size = 45501, upload-time = "2025-08-11T12:08:15.173Z" }, + { url = "https://files.pythonhosted.org/packages/fd/69/b547032297c7e63ba2af494edba695d781af8a0c6e89e4d06cf848b21d80/multidict-6.6.4-py3-none-any.whl", hash = "sha256:27d8f8e125c07cb954e54d75d04905a9bba8a439c1d84aca94949d4d03d8601c", size = 12313, upload-time = "2025-08-11T12:08:46.891Z" }, +] + +[[package]] +name = "mypy" +version = "1.18.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mypy-extensions" }, + { name = "pathspec" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c0/77/8f0d0001ffad290cef2f7f216f96c814866248a0b92a722365ed54648e7e/mypy-1.18.2.tar.gz", hash = "sha256:06a398102a5f203d7477b2923dda3634c36727fa5c237d8f859ef90c42a9924b", size = 3448846, upload-time = "2025-09-19T00:11:10.519Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/87/cafd3ae563f88f94eec33f35ff722d043e09832ea8530ef149ec1efbaf08/mypy-1.18.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:807d9315ab9d464125aa9fcf6d84fde6e1dc67da0b6f80e7405506b8ac72bc7f", size = 12731198, upload-time = "2025-09-19T00:09:44.857Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e0/1e96c3d4266a06d4b0197ace5356d67d937d8358e2ee3ffac71faa843724/mypy-1.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:776bb00de1778caf4db739c6e83919c1d85a448f71979b6a0edd774ea8399341", size = 11817879, upload-time = "2025-09-19T00:09:47.131Z" }, + { url = "https://files.pythonhosted.org/packages/72/ef/0c9ba89eb03453e76bdac5a78b08260a848c7bfc5d6603634774d9cd9525/mypy-1.18.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1379451880512ffce14505493bd9fe469e0697543717298242574882cf8cdb8d", size = 12427292, upload-time = "2025-09-19T00:10:22.472Z" }, + { url = "https://files.pythonhosted.org/packages/1a/52/ec4a061dd599eb8179d5411d99775bec2a20542505988f40fc2fee781068/mypy-1.18.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1331eb7fd110d60c24999893320967594ff84c38ac6d19e0a76c5fd809a84c86", size = 13163750, upload-time = "2025-09-19T00:09:51.472Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5f/2cf2ceb3b36372d51568f2208c021870fe7834cf3186b653ac6446511839/mypy-1.18.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ca30b50a51e7ba93b00422e486cbb124f1c56a535e20eff7b2d6ab72b3b2e37", size = 13351827, upload-time = "2025-09-19T00:09:58.311Z" }, + { url = "https://files.pythonhosted.org/packages/c8/7d/2697b930179e7277529eaaec1513f8de622818696857f689e4a5432e5e27/mypy-1.18.2-cp311-cp311-win_amd64.whl", hash = "sha256:664dc726e67fa54e14536f6e1224bcfce1d9e5ac02426d2326e2bb4e081d1ce8", size = 9757983, upload-time = "2025-09-19T00:10:09.071Z" }, + { url = "https://files.pythonhosted.org/packages/07/06/dfdd2bc60c66611dd8335f463818514733bc763e4760dee289dcc33df709/mypy-1.18.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:33eca32dd124b29400c31d7cf784e795b050ace0e1f91b8dc035672725617e34", size = 12908273, upload-time = "2025-09-19T00:10:58.321Z" }, + { url = "https://files.pythonhosted.org/packages/81/14/6a9de6d13a122d5608e1a04130724caf9170333ac5a924e10f670687d3eb/mypy-1.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3c47adf30d65e89b2dcd2fa32f3aeb5e94ca970d2c15fcb25e297871c8e4764", size = 11920910, upload-time = "2025-09-19T00:10:20.043Z" }, + { url = "https://files.pythonhosted.org/packages/5f/a9/b29de53e42f18e8cc547e38daa9dfa132ffdc64f7250e353f5c8cdd44bee/mypy-1.18.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d6c838e831a062f5f29d11c9057c6009f60cb294fea33a98422688181fe2893", size = 12465585, upload-time = "2025-09-19T00:10:33.005Z" }, + { url = "https://files.pythonhosted.org/packages/77/ae/6c3d2c7c61ff21f2bee938c917616c92ebf852f015fb55917fd6e2811db2/mypy-1.18.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01199871b6110a2ce984bde85acd481232d17413868c9807e95c1b0739a58914", size = 13348562, upload-time = "2025-09-19T00:10:11.51Z" }, + { url = "https://files.pythonhosted.org/packages/4d/31/aec68ab3b4aebdf8f36d191b0685d99faa899ab990753ca0fee60fb99511/mypy-1.18.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a2afc0fa0b0e91b4599ddfe0f91e2c26c2b5a5ab263737e998d6817874c5f7c8", size = 13533296, upload-time = "2025-09-19T00:10:06.568Z" }, + { url = "https://files.pythonhosted.org/packages/9f/83/abcb3ad9478fca3ebeb6a5358bb0b22c95ea42b43b7789c7fb1297ca44f4/mypy-1.18.2-cp312-cp312-win_amd64.whl", hash = "sha256:d8068d0afe682c7c4897c0f7ce84ea77f6de953262b12d07038f4d296d547074", size = 9828828, upload-time = "2025-09-19T00:10:28.203Z" }, + { url = "https://files.pythonhosted.org/packages/5f/04/7f462e6fbba87a72bc8097b93f6842499c428a6ff0c81dd46948d175afe8/mypy-1.18.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:07b8b0f580ca6d289e69209ec9d3911b4a26e5abfde32228a288eb79df129fcc", size = 12898728, upload-time = "2025-09-19T00:10:01.33Z" }, + { url = "https://files.pythonhosted.org/packages/99/5b/61ed4efb64f1871b41fd0b82d29a64640f3516078f6c7905b68ab1ad8b13/mypy-1.18.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ed4482847168439651d3feee5833ccedbf6657e964572706a2adb1f7fa4dfe2e", size = 11910758, upload-time = "2025-09-19T00:10:42.607Z" }, + { url = "https://files.pythonhosted.org/packages/3c/46/d297d4b683cc89a6e4108c4250a6a6b717f5fa96e1a30a7944a6da44da35/mypy-1.18.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3ad2afadd1e9fea5cf99a45a822346971ede8685cc581ed9cd4d42eaf940986", size = 12475342, upload-time = "2025-09-19T00:11:00.371Z" }, + { url = "https://files.pythonhosted.org/packages/83/45/4798f4d00df13eae3bfdf726c9244bcb495ab5bd588c0eed93a2f2dd67f3/mypy-1.18.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a431a6f1ef14cf8c144c6b14793a23ec4eae3db28277c358136e79d7d062f62d", size = 13338709, upload-time = "2025-09-19T00:11:03.358Z" }, + { url = "https://files.pythonhosted.org/packages/d7/09/479f7358d9625172521a87a9271ddd2441e1dab16a09708f056e97007207/mypy-1.18.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7ab28cc197f1dd77a67e1c6f35cd1f8e8b73ed2217e4fc005f9e6a504e46e7ba", size = 13529806, upload-time = "2025-09-19T00:10:26.073Z" }, + { url = "https://files.pythonhosted.org/packages/71/cf/ac0f2c7e9d0ea3c75cd99dff7aec1c9df4a1376537cb90e4c882267ee7e9/mypy-1.18.2-cp313-cp313-win_amd64.whl", hash = "sha256:0e2785a84b34a72ba55fb5daf079a1003a34c05b22238da94fcae2bbe46f3544", size = 9833262, upload-time = "2025-09-19T00:10:40.035Z" }, + { url = "https://files.pythonhosted.org/packages/5a/0c/7d5300883da16f0063ae53996358758b2a2df2a09c72a5061fa79a1f5006/mypy-1.18.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:62f0e1e988ad41c2a110edde6c398383a889d95b36b3e60bcf155f5164c4fdce", size = 12893775, upload-time = "2025-09-19T00:10:03.814Z" }, + { url = "https://files.pythonhosted.org/packages/50/df/2cffbf25737bdb236f60c973edf62e3e7b4ee1c25b6878629e88e2cde967/mypy-1.18.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8795a039bab805ff0c1dfdb8cd3344642c2b99b8e439d057aba30850b8d3423d", size = 11936852, upload-time = "2025-09-19T00:10:51.631Z" }, + { url = "https://files.pythonhosted.org/packages/be/50/34059de13dd269227fb4a03be1faee6e2a4b04a2051c82ac0a0b5a773c9a/mypy-1.18.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6ca1e64b24a700ab5ce10133f7ccd956a04715463d30498e64ea8715236f9c9c", size = 12480242, upload-time = "2025-09-19T00:11:07.955Z" }, + { url = "https://files.pythonhosted.org/packages/5b/11/040983fad5132d85914c874a2836252bbc57832065548885b5bb5b0d4359/mypy-1.18.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d924eef3795cc89fecf6bedc6ed32b33ac13e8321344f6ddbf8ee89f706c05cb", size = 13326683, upload-time = "2025-09-19T00:09:55.572Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ba/89b2901dd77414dd7a8c8729985832a5735053be15b744c18e4586e506ef/mypy-1.18.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20c02215a080e3a2be3aa50506c67242df1c151eaba0dcbc1e4e557922a26075", size = 13514749, upload-time = "2025-09-19T00:10:44.827Z" }, + { url = "https://files.pythonhosted.org/packages/25/bc/cc98767cffd6b2928ba680f3e5bc969c4152bf7c2d83f92f5a504b92b0eb/mypy-1.18.2-cp314-cp314-win_amd64.whl", hash = "sha256:749b5f83198f1ca64345603118a6f01a4e99ad4bf9d103ddc5a3200cc4614adf", size = 9982959, upload-time = "2025-09-19T00:10:37.344Z" }, + { url = "https://files.pythonhosted.org/packages/87/e3/be76d87158ebafa0309946c4a73831974d4d6ab4f4ef40c3b53a385a66fd/mypy-1.18.2-py3-none-any.whl", hash = "sha256:22a1748707dd62b58d2ae53562ffc4d7f8bcc727e8ac7cbc69c053ddc874d47e", size = 2352367, upload-time = "2025-09-19T00:10:15.489Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, +] + +[[package]] +name = "platformdirs" +version = "4.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/23/e8/21db9c9987b0e728855bd57bff6984f67952bea55d6f75e055c46b5383e8/platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf", size = 21634, upload-time = "2025-08-26T14:32:04.268Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/4b/2028861e724d3bd36227adfa20d3fd24c3fc6d52032f4a93c133be5d17ce/platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85", size = 18654, upload-time = "2025-08-26T14:32:02.735Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "propcache" +version = "0.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/16/43264e4a779dd8588c21a70f0709665ee8f611211bdd2c87d952cfa7c776/propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168", size = 44139, upload-time = "2025-06-09T22:56:06.081Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/80/8d/e8b436717ab9c2cfc23b116d2c297305aa4cd8339172a456d61ebf5669b8/propcache-0.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b8d2f607bd8f80ddc04088bc2a037fdd17884a6fcadc47a96e334d72f3717be", size = 74207, upload-time = "2025-06-09T22:54:05.399Z" }, + { url = "https://files.pythonhosted.org/packages/d6/29/1e34000e9766d112171764b9fa3226fa0153ab565d0c242c70e9945318a7/propcache-0.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06766d8f34733416e2e34f46fea488ad5d60726bb9481d3cddf89a6fa2d9603f", size = 43648, upload-time = "2025-06-09T22:54:08.023Z" }, + { url = "https://files.pythonhosted.org/packages/46/92/1ad5af0df781e76988897da39b5f086c2bf0f028b7f9bd1f409bb05b6874/propcache-0.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2dc1f4a1df4fecf4e6f68013575ff4af84ef6f478fe5344317a65d38a8e6dc9", size = 43496, upload-time = "2025-06-09T22:54:09.228Z" }, + { url = "https://files.pythonhosted.org/packages/b3/ce/e96392460f9fb68461fabab3e095cb00c8ddf901205be4eae5ce246e5b7e/propcache-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be29c4f4810c5789cf10ddf6af80b041c724e629fa51e308a7a0fb19ed1ef7bf", size = 217288, upload-time = "2025-06-09T22:54:10.466Z" }, + { url = "https://files.pythonhosted.org/packages/c5/2a/866726ea345299f7ceefc861a5e782b045545ae6940851930a6adaf1fca6/propcache-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59d61f6970ecbd8ff2e9360304d5c8876a6abd4530cb752c06586849ac8a9dc9", size = 227456, upload-time = "2025-06-09T22:54:11.828Z" }, + { url = "https://files.pythonhosted.org/packages/de/03/07d992ccb6d930398689187e1b3c718339a1c06b8b145a8d9650e4726166/propcache-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62180e0b8dbb6b004baec00a7983e4cc52f5ada9cd11f48c3528d8cfa7b96a66", size = 225429, upload-time = "2025-06-09T22:54:13.823Z" }, + { url = "https://files.pythonhosted.org/packages/5d/e6/116ba39448753b1330f48ab8ba927dcd6cf0baea8a0ccbc512dfb49ba670/propcache-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c144ca294a204c470f18cf4c9d78887810d04a3e2fbb30eea903575a779159df", size = 213472, upload-time = "2025-06-09T22:54:15.232Z" }, + { url = "https://files.pythonhosted.org/packages/a6/85/f01f5d97e54e428885a5497ccf7f54404cbb4f906688a1690cd51bf597dc/propcache-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5c2a784234c28854878d68978265617aa6dc0780e53d44b4d67f3651a17a9a2", size = 204480, upload-time = "2025-06-09T22:54:17.104Z" }, + { url = "https://files.pythonhosted.org/packages/e3/79/7bf5ab9033b8b8194cc3f7cf1aaa0e9c3256320726f64a3e1f113a812dce/propcache-0.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5745bc7acdafa978ca1642891b82c19238eadc78ba2aaa293c6863b304e552d7", size = 214530, upload-time = "2025-06-09T22:54:18.512Z" }, + { url = "https://files.pythonhosted.org/packages/31/0b/bd3e0c00509b609317df4a18e6b05a450ef2d9a963e1d8bc9c9415d86f30/propcache-0.3.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:c0075bf773d66fa8c9d41f66cc132ecc75e5bb9dd7cce3cfd14adc5ca184cb95", size = 205230, upload-time = "2025-06-09T22:54:19.947Z" }, + { url = "https://files.pythonhosted.org/packages/7a/23/fae0ff9b54b0de4e819bbe559508da132d5683c32d84d0dc2ccce3563ed4/propcache-0.3.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5f57aa0847730daceff0497f417c9de353c575d8da3579162cc74ac294c5369e", size = 206754, upload-time = "2025-06-09T22:54:21.716Z" }, + { url = "https://files.pythonhosted.org/packages/b7/7f/ad6a3c22630aaa5f618b4dc3c3598974a72abb4c18e45a50b3cdd091eb2f/propcache-0.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:eef914c014bf72d18efb55619447e0aecd5fb7c2e3fa7441e2e5d6099bddff7e", size = 218430, upload-time = "2025-06-09T22:54:23.17Z" }, + { url = "https://files.pythonhosted.org/packages/5b/2c/ba4f1c0e8a4b4c75910742f0d333759d441f65a1c7f34683b4a74c0ee015/propcache-0.3.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a4092e8549031e82facf3decdbc0883755d5bbcc62d3aea9d9e185549936dcf", size = 223884, upload-time = "2025-06-09T22:54:25.539Z" }, + { url = "https://files.pythonhosted.org/packages/88/e4/ebe30fc399e98572019eee82ad0caf512401661985cbd3da5e3140ffa1b0/propcache-0.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:85871b050f174bc0bfb437efbdb68aaf860611953ed12418e4361bc9c392749e", size = 211480, upload-time = "2025-06-09T22:54:26.892Z" }, + { url = "https://files.pythonhosted.org/packages/96/0a/7d5260b914e01d1d0906f7f38af101f8d8ed0dc47426219eeaf05e8ea7c2/propcache-0.3.2-cp311-cp311-win32.whl", hash = "sha256:36c8d9b673ec57900c3554264e630d45980fd302458e4ac801802a7fd2ef7897", size = 37757, upload-time = "2025-06-09T22:54:28.241Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2d/89fe4489a884bc0da0c3278c552bd4ffe06a1ace559db5ef02ef24ab446b/propcache-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53af8cb6a781b02d2ea079b5b853ba9430fcbe18a8e3ce647d5982a3ff69f39", size = 41500, upload-time = "2025-06-09T22:54:29.4Z" }, + { url = "https://files.pythonhosted.org/packages/a8/42/9ca01b0a6f48e81615dca4765a8f1dd2c057e0540f6116a27dc5ee01dfb6/propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10", size = 73674, upload-time = "2025-06-09T22:54:30.551Z" }, + { url = "https://files.pythonhosted.org/packages/af/6e/21293133beb550f9c901bbece755d582bfaf2176bee4774000bd4dd41884/propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154", size = 43570, upload-time = "2025-06-09T22:54:32.296Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c8/0393a0a3a2b8760eb3bde3c147f62b20044f0ddac81e9d6ed7318ec0d852/propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615", size = 43094, upload-time = "2025-06-09T22:54:33.929Z" }, + { url = "https://files.pythonhosted.org/packages/37/2c/489afe311a690399d04a3e03b069225670c1d489eb7b044a566511c1c498/propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db", size = 226958, upload-time = "2025-06-09T22:54:35.186Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ca/63b520d2f3d418c968bf596839ae26cf7f87bead026b6192d4da6a08c467/propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1", size = 234894, upload-time = "2025-06-09T22:54:36.708Z" }, + { url = "https://files.pythonhosted.org/packages/11/60/1d0ed6fff455a028d678df30cc28dcee7af77fa2b0e6962ce1df95c9a2a9/propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c", size = 233672, upload-time = "2025-06-09T22:54:38.062Z" }, + { url = "https://files.pythonhosted.org/packages/37/7c/54fd5301ef38505ab235d98827207176a5c9b2aa61939b10a460ca53e123/propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67", size = 224395, upload-time = "2025-06-09T22:54:39.634Z" }, + { url = "https://files.pythonhosted.org/packages/ee/1a/89a40e0846f5de05fdc6779883bf46ba980e6df4d2ff8fb02643de126592/propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b", size = 212510, upload-time = "2025-06-09T22:54:41.565Z" }, + { url = "https://files.pythonhosted.org/packages/5e/33/ca98368586c9566a6b8d5ef66e30484f8da84c0aac3f2d9aec6d31a11bd5/propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8", size = 222949, upload-time = "2025-06-09T22:54:43.038Z" }, + { url = "https://files.pythonhosted.org/packages/ba/11/ace870d0aafe443b33b2f0b7efdb872b7c3abd505bfb4890716ad7865e9d/propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251", size = 217258, upload-time = "2025-06-09T22:54:44.376Z" }, + { url = "https://files.pythonhosted.org/packages/5b/d2/86fd6f7adffcfc74b42c10a6b7db721d1d9ca1055c45d39a1a8f2a740a21/propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474", size = 213036, upload-time = "2025-06-09T22:54:46.243Z" }, + { url = "https://files.pythonhosted.org/packages/07/94/2d7d1e328f45ff34a0a284cf5a2847013701e24c2a53117e7c280a4316b3/propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535", size = 227684, upload-time = "2025-06-09T22:54:47.63Z" }, + { url = "https://files.pythonhosted.org/packages/b7/05/37ae63a0087677e90b1d14710e532ff104d44bc1efa3b3970fff99b891dc/propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06", size = 234562, upload-time = "2025-06-09T22:54:48.982Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7c/3f539fcae630408d0bd8bf3208b9a647ccad10976eda62402a80adf8fc34/propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1", size = 222142, upload-time = "2025-06-09T22:54:50.424Z" }, + { url = "https://files.pythonhosted.org/packages/7c/d2/34b9eac8c35f79f8a962546b3e97e9d4b990c420ee66ac8255d5d9611648/propcache-0.3.2-cp312-cp312-win32.whl", hash = "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1", size = 37711, upload-time = "2025-06-09T22:54:52.072Z" }, + { url = "https://files.pythonhosted.org/packages/19/61/d582be5d226cf79071681d1b46b848d6cb03d7b70af7063e33a2787eaa03/propcache-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c", size = 41479, upload-time = "2025-06-09T22:54:53.234Z" }, + { url = "https://files.pythonhosted.org/packages/dc/d1/8c747fafa558c603c4ca19d8e20b288aa0c7cda74e9402f50f31eb65267e/propcache-0.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ca592ed634a73ca002967458187109265e980422116c0a107cf93d81f95af945", size = 71286, upload-time = "2025-06-09T22:54:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/61/99/d606cb7986b60d89c36de8a85d58764323b3a5ff07770a99d8e993b3fa73/propcache-0.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9ecb0aad4020e275652ba3975740f241bd12a61f1a784df044cf7477a02bc252", size = 42425, upload-time = "2025-06-09T22:54:55.642Z" }, + { url = "https://files.pythonhosted.org/packages/8c/96/ef98f91bbb42b79e9bb82bdd348b255eb9d65f14dbbe3b1594644c4073f7/propcache-0.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7f08f1cc28bd2eade7a8a3d2954ccc673bb02062e3e7da09bc75d843386b342f", size = 41846, upload-time = "2025-06-09T22:54:57.246Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ad/3f0f9a705fb630d175146cd7b1d2bf5555c9beaed54e94132b21aac098a6/propcache-0.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1a342c834734edb4be5ecb1e9fb48cb64b1e2320fccbd8c54bf8da8f2a84c33", size = 208871, upload-time = "2025-06-09T22:54:58.975Z" }, + { url = "https://files.pythonhosted.org/packages/3a/38/2085cda93d2c8b6ec3e92af2c89489a36a5886b712a34ab25de9fbca7992/propcache-0.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a544caaae1ac73f1fecfae70ded3e93728831affebd017d53449e3ac052ac1e", size = 215720, upload-time = "2025-06-09T22:55:00.471Z" }, + { url = "https://files.pythonhosted.org/packages/61/c1/d72ea2dc83ac7f2c8e182786ab0fc2c7bd123a1ff9b7975bee671866fe5f/propcache-0.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310d11aa44635298397db47a3ebce7db99a4cc4b9bbdfcf6c98a60c8d5261cf1", size = 215203, upload-time = "2025-06-09T22:55:01.834Z" }, + { url = "https://files.pythonhosted.org/packages/af/81/b324c44ae60c56ef12007105f1460d5c304b0626ab0cc6b07c8f2a9aa0b8/propcache-0.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1396592321ac83157ac03a2023aa6cc4a3cc3cfdecb71090054c09e5a7cce3", size = 206365, upload-time = "2025-06-09T22:55:03.199Z" }, + { url = "https://files.pythonhosted.org/packages/09/73/88549128bb89e66d2aff242488f62869014ae092db63ccea53c1cc75a81d/propcache-0.3.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cabf5b5902272565e78197edb682017d21cf3b550ba0460ee473753f28d23c1", size = 196016, upload-time = "2025-06-09T22:55:04.518Z" }, + { url = "https://files.pythonhosted.org/packages/b9/3f/3bdd14e737d145114a5eb83cb172903afba7242f67c5877f9909a20d948d/propcache-0.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0a2f2235ac46a7aa25bdeb03a9e7060f6ecbd213b1f9101c43b3090ffb971ef6", size = 205596, upload-time = "2025-06-09T22:55:05.942Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ca/2f4aa819c357d3107c3763d7ef42c03980f9ed5c48c82e01e25945d437c1/propcache-0.3.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:92b69e12e34869a6970fd2f3da91669899994b47c98f5d430b781c26f1d9f387", size = 200977, upload-time = "2025-06-09T22:55:07.792Z" }, + { url = "https://files.pythonhosted.org/packages/cd/4a/e65276c7477533c59085251ae88505caf6831c0e85ff8b2e31ebcbb949b1/propcache-0.3.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:54e02207c79968ebbdffc169591009f4474dde3b4679e16634d34c9363ff56b4", size = 197220, upload-time = "2025-06-09T22:55:09.173Z" }, + { url = "https://files.pythonhosted.org/packages/7c/54/fc7152e517cf5578278b242396ce4d4b36795423988ef39bb8cd5bf274c8/propcache-0.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4adfb44cb588001f68c5466579d3f1157ca07f7504fc91ec87862e2b8e556b88", size = 210642, upload-time = "2025-06-09T22:55:10.62Z" }, + { url = "https://files.pythonhosted.org/packages/b9/80/abeb4a896d2767bf5f1ea7b92eb7be6a5330645bd7fb844049c0e4045d9d/propcache-0.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fd3e6019dc1261cd0291ee8919dd91fbab7b169bb76aeef6c716833a3f65d206", size = 212789, upload-time = "2025-06-09T22:55:12.029Z" }, + { url = "https://files.pythonhosted.org/packages/b3/db/ea12a49aa7b2b6d68a5da8293dcf50068d48d088100ac016ad92a6a780e6/propcache-0.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4c181cad81158d71c41a2bce88edce078458e2dd5ffee7eddd6b05da85079f43", size = 205880, upload-time = "2025-06-09T22:55:13.45Z" }, + { url = "https://files.pythonhosted.org/packages/d1/e5/9076a0bbbfb65d1198007059c65639dfd56266cf8e477a9707e4b1999ff4/propcache-0.3.2-cp313-cp313-win32.whl", hash = "sha256:8a08154613f2249519e549de2330cf8e2071c2887309a7b07fb56098f5170a02", size = 37220, upload-time = "2025-06-09T22:55:15.284Z" }, + { url = "https://files.pythonhosted.org/packages/d3/f5/b369e026b09a26cd77aa88d8fffd69141d2ae00a2abaaf5380d2603f4b7f/propcache-0.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e41671f1594fc4ab0a6dec1351864713cb3a279910ae8b58f884a88a0a632c05", size = 40678, upload-time = "2025-06-09T22:55:16.445Z" }, + { url = "https://files.pythonhosted.org/packages/a4/3a/6ece377b55544941a08d03581c7bc400a3c8cd3c2865900a68d5de79e21f/propcache-0.3.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:9a3cf035bbaf035f109987d9d55dc90e4b0e36e04bbbb95af3055ef17194057b", size = 76560, upload-time = "2025-06-09T22:55:17.598Z" }, + { url = "https://files.pythonhosted.org/packages/0c/da/64a2bb16418740fa634b0e9c3d29edff1db07f56d3546ca2d86ddf0305e1/propcache-0.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:156c03d07dc1323d8dacaa221fbe028c5c70d16709cdd63502778e6c3ccca1b0", size = 44676, upload-time = "2025-06-09T22:55:18.922Z" }, + { url = "https://files.pythonhosted.org/packages/36/7b/f025e06ea51cb72c52fb87e9b395cced02786610b60a3ed51da8af017170/propcache-0.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74413c0ba02ba86f55cf60d18daab219f7e531620c15f1e23d95563f505efe7e", size = 44701, upload-time = "2025-06-09T22:55:20.106Z" }, + { url = "https://files.pythonhosted.org/packages/a4/00/faa1b1b7c3b74fc277f8642f32a4c72ba1d7b2de36d7cdfb676db7f4303e/propcache-0.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f066b437bb3fa39c58ff97ab2ca351db465157d68ed0440abecb21715eb24b28", size = 276934, upload-time = "2025-06-09T22:55:21.5Z" }, + { url = "https://files.pythonhosted.org/packages/74/ab/935beb6f1756e0476a4d5938ff44bf0d13a055fed880caf93859b4f1baf4/propcache-0.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1304b085c83067914721e7e9d9917d41ad87696bf70f0bc7dee450e9c71ad0a", size = 278316, upload-time = "2025-06-09T22:55:22.918Z" }, + { url = "https://files.pythonhosted.org/packages/f8/9d/994a5c1ce4389610838d1caec74bdf0e98b306c70314d46dbe4fcf21a3e2/propcache-0.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab50cef01b372763a13333b4e54021bdcb291fc9a8e2ccb9c2df98be51bcde6c", size = 282619, upload-time = "2025-06-09T22:55:24.651Z" }, + { url = "https://files.pythonhosted.org/packages/2b/00/a10afce3d1ed0287cef2e09506d3be9822513f2c1e96457ee369adb9a6cd/propcache-0.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad3b2a085ec259ad2c2842666b2a0a49dea8463579c606426128925af1ed725", size = 265896, upload-time = "2025-06-09T22:55:26.049Z" }, + { url = "https://files.pythonhosted.org/packages/2e/a8/2aa6716ffa566ca57c749edb909ad27884680887d68517e4be41b02299f3/propcache-0.3.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:261fa020c1c14deafd54c76b014956e2f86991af198c51139faf41c4d5e83892", size = 252111, upload-time = "2025-06-09T22:55:27.381Z" }, + { url = "https://files.pythonhosted.org/packages/36/4f/345ca9183b85ac29c8694b0941f7484bf419c7f0fea2d1e386b4f7893eed/propcache-0.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:46d7f8aa79c927e5f987ee3a80205c987717d3659f035c85cf0c3680526bdb44", size = 268334, upload-time = "2025-06-09T22:55:28.747Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ca/fcd54f78b59e3f97b3b9715501e3147f5340167733d27db423aa321e7148/propcache-0.3.2-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:6d8f3f0eebf73e3c0ff0e7853f68be638b4043c65a70517bb575eff54edd8dbe", size = 255026, upload-time = "2025-06-09T22:55:30.184Z" }, + { url = "https://files.pythonhosted.org/packages/8b/95/8e6a6bbbd78ac89c30c225210a5c687790e532ba4088afb8c0445b77ef37/propcache-0.3.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:03c89c1b14a5452cf15403e291c0ccd7751d5b9736ecb2c5bab977ad6c5bcd81", size = 250724, upload-time = "2025-06-09T22:55:31.646Z" }, + { url = "https://files.pythonhosted.org/packages/ee/b0/0dd03616142baba28e8b2d14ce5df6631b4673850a3d4f9c0f9dd714a404/propcache-0.3.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:0cc17efde71e12bbaad086d679ce575268d70bc123a5a71ea7ad76f70ba30bba", size = 268868, upload-time = "2025-06-09T22:55:33.209Z" }, + { url = "https://files.pythonhosted.org/packages/c5/98/2c12407a7e4fbacd94ddd32f3b1e3d5231e77c30ef7162b12a60e2dd5ce3/propcache-0.3.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:acdf05d00696bc0447e278bb53cb04ca72354e562cf88ea6f9107df8e7fd9770", size = 271322, upload-time = "2025-06-09T22:55:35.065Z" }, + { url = "https://files.pythonhosted.org/packages/35/91/9cb56efbb428b006bb85db28591e40b7736847b8331d43fe335acf95f6c8/propcache-0.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4445542398bd0b5d32df908031cb1b30d43ac848e20470a878b770ec2dcc6330", size = 265778, upload-time = "2025-06-09T22:55:36.45Z" }, + { url = "https://files.pythonhosted.org/packages/9a/4c/b0fe775a2bdd01e176b14b574be679d84fc83958335790f7c9a686c1f468/propcache-0.3.2-cp313-cp313t-win32.whl", hash = "sha256:f86e5d7cd03afb3a1db8e9f9f6eff15794e79e791350ac48a8c924e6f439f394", size = 41175, upload-time = "2025-06-09T22:55:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ff/47f08595e3d9b5e149c150f88d9714574f1a7cbd89fe2817158a952674bf/propcache-0.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9704bedf6e7cbe3c65eca4379a9b53ee6a83749f047808cbb5044d40d7d72198", size = 44857, upload-time = "2025-06-09T22:55:39.687Z" }, + { url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", size = 12663, upload-time = "2025-06-09T22:56:04.484Z" }, +] + +[[package]] +name = "pydantic" +version = "2.11.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, + { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, + { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, + { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, + { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, + { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, + { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, + { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, + { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, + { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, + { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, + { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, + { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, + { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, + { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, + { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, + { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, + { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, + { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, + { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, + { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, + { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, + { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, + { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, + { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, + { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, + { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, + { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, + { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, + { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, + { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, + { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, + { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, + { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, + { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, + { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, + { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, + { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, + { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, + { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, + { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, + { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, + { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, + { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, + { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, + { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, + { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pytest" +version = "8.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, +] + +[[package]] +name = "pytest-cov" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", extra = ["toml"] }, + { name = "pluggy" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, + { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, + { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, + { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" }, + { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" }, + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +] + +[[package]] +name = "rich" +version = "14.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fe/75/af448d8e52bf1d8fa6a9d089ca6c07ff4453d86c65c145d0a300bb073b9b/rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8", size = 224441, upload-time = "2025-07-25T07:32:58.125Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/30/3c4d035596d3cf444529e0b2953ad0466f6049528a879d27534700580395/rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f", size = 243368, upload-time = "2025-07-25T07:32:56.73Z" }, +] + +[[package]] +name = "ruamel-yaml" +version = "0.18.15" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ruamel-yaml-clib", marker = "python_full_version < '3.14' and platform_python_implementation == 'CPython'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3e/db/f3950f5e5031b618aae9f423a39bf81a55c148aecd15a34527898e752cf4/ruamel.yaml-0.18.15.tar.gz", hash = "sha256:dbfca74b018c4c3fba0b9cc9ee33e53c371194a9000e694995e620490fd40700", size = 146865, upload-time = "2025-08-19T11:15:10.694Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/e5/f2a0621f1781b76a38194acae72f01e37b1941470407345b6e8653ad7640/ruamel.yaml-0.18.15-py3-none-any.whl", hash = "sha256:148f6488d698b7a5eded5ea793a025308b25eca97208181b6a026037f391f701", size = 119702, upload-time = "2025-08-19T11:15:07.696Z" }, +] + +[[package]] +name = "ruamel-yaml-clib" +version = "0.2.14" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/e9/39ec4d4b3f91188fad1842748f67d4e749c77c37e353c4e545052ee8e893/ruamel.yaml.clib-0.2.14.tar.gz", hash = "sha256:803f5044b13602d58ea378576dd75aa759f52116a0232608e8fdada4da33752e", size = 225394, upload-time = "2025-09-22T19:51:23.753Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/9f/3c51e9578b8c36fcc4bdd271a1a5bb65963a74a4b6ad1a989768a22f6c2a/ruamel.yaml.clib-0.2.14-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5bae1a073ca4244620425cd3d3aa9746bde590992b98ee8c7c8be8c597ca0d4e", size = 270207, upload-time = "2025-09-23T14:24:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/4a/16/cb02815bc2ae9c66760c0c061d23c7358f9ba51dae95ac85247662b7fbe2/ruamel.yaml.clib-0.2.14-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:0a54e5e40a7a691a426c2703b09b0d61a14294d25cfacc00631aa6f9c964df0d", size = 137780, upload-time = "2025-09-22T19:50:37.734Z" }, + { url = "https://files.pythonhosted.org/packages/31/c6/fc687cd1b93bff8e40861eea46d6dc1a6a778d9a085684e4045ff26a8e40/ruamel.yaml.clib-0.2.14-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:10d9595b6a19778f3269399eff6bab642608e5966183abc2adbe558a42d4efc9", size = 641590, upload-time = "2025-09-22T19:50:41.978Z" }, + { url = "https://files.pythonhosted.org/packages/45/5d/65a2bc08b709b08576b3f307bf63951ee68a8e047cbbda6f1c9864ecf9a7/ruamel.yaml.clib-0.2.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dba72975485f2b87b786075e18a6e5d07dc2b4d8973beb2732b9b2816f1bad70", size = 738090, upload-time = "2025-09-22T19:50:39.152Z" }, + { url = "https://files.pythonhosted.org/packages/fb/d0/a70a03614d9a6788a3661ab1538879ed2aae4e84d861f101243116308a37/ruamel.yaml.clib-0.2.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29757bdb7c142f9595cc1b62ec49a3d1c83fab9cef92db52b0ccebaad4eafb98", size = 700744, upload-time = "2025-09-22T19:50:40.811Z" }, + { url = "https://files.pythonhosted.org/packages/77/30/c93fa457611f79946d5cb6cc97493ca5425f3f21891d7b1f9b44eaa1b38e/ruamel.yaml.clib-0.2.14-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:557df28dbccf79b152fe2d1b935f6063d9cc431199ea2b0e84892f35c03bb0ee", size = 742321, upload-time = "2025-09-23T18:42:48.916Z" }, + { url = "https://files.pythonhosted.org/packages/40/85/e2c54ad637117cd13244a4649946eaa00f32edcb882d1f92df90e079ab00/ruamel.yaml.clib-0.2.14-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:26a8de280ab0d22b6e3ec745b4a5a07151a0f74aad92dd76ab9c8d8d7087720d", size = 743805, upload-time = "2025-09-22T19:50:43.58Z" }, + { url = "https://files.pythonhosted.org/packages/81/50/f899072c38877d8ef5382e0b3d47f8c4346226c1f52d6945d6f64fec6a2f/ruamel.yaml.clib-0.2.14-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e501c096aa3889133d674605ebd018471bc404a59cbc17da3c5924421c54d97c", size = 769529, upload-time = "2025-09-22T19:50:45.707Z" }, + { url = "https://files.pythonhosted.org/packages/99/7c/96d4b5075e30c65ea2064e40c2d657c7c235d7b6ef18751cf89a935b9041/ruamel.yaml.clib-0.2.14-cp311-cp311-win32.whl", hash = "sha256:915748cfc25b8cfd81b14d00f4bfdb2ab227a30d6d43459034533f4d1c207a2a", size = 100256, upload-time = "2025-09-22T19:50:48.26Z" }, + { url = "https://files.pythonhosted.org/packages/7d/8c/73ee2babd04e8bfcf1fd5c20aa553d18bf0ebc24b592b4f831d12ae46cc0/ruamel.yaml.clib-0.2.14-cp311-cp311-win_amd64.whl", hash = "sha256:4ccba93c1e5a40af45b2f08e4591969fa4697eae951c708f3f83dcbf9f6c6bb1", size = 118234, upload-time = "2025-09-22T19:50:47.019Z" }, + { url = "https://files.pythonhosted.org/packages/b4/42/ccfb34a25289afbbc42017e4d3d4288e61d35b2e00cfc6b92974a6a1f94b/ruamel.yaml.clib-0.2.14-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:6aeadc170090ff1889f0d2c3057557f9cd71f975f17535c26a5d37af98f19c27", size = 271775, upload-time = "2025-09-23T14:24:12.771Z" }, + { url = "https://files.pythonhosted.org/packages/82/73/e628a92e80197ff6a79ab81ec3fa00d4cc082d58ab78d3337b7ba7043301/ruamel.yaml.clib-0.2.14-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5e56ac47260c0eed992789fa0b8efe43404a9adb608608631a948cee4fc2b052", size = 138842, upload-time = "2025-09-22T19:50:49.156Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c5/346c7094344a60419764b4b1334d9e0285031c961176ff88ffb652405b0c/ruamel.yaml.clib-0.2.14-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:a911aa73588d9a8b08d662b9484bc0567949529824a55d3885b77e8dd62a127a", size = 647404, upload-time = "2025-09-22T19:50:52.921Z" }, + { url = "https://files.pythonhosted.org/packages/df/99/65080c863eb06d4498de3d6c86f3e90595e02e159fd8529f1565f56cfe2c/ruamel.yaml.clib-0.2.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a05ba88adf3d7189a974b2de7a9d56731548d35dc0a822ec3dc669caa7019b29", size = 753141, upload-time = "2025-09-22T19:50:50.294Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e3/0de85f3e3333f8e29e4b10244374a202a87665d1131798946ee22cf05c7c/ruamel.yaml.clib-0.2.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb04c5650de6668b853623eceadcdb1a9f2fee381f5d7b6bc842ee7c239eeec4", size = 703477, upload-time = "2025-09-22T19:50:51.508Z" }, + { url = "https://files.pythonhosted.org/packages/d9/25/0d2f09d8833c7fd77ab8efeff213093c16856479a9d293180a0d89f6bed9/ruamel.yaml.clib-0.2.14-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:df3ec9959241d07bc261f4983d25a1205ff37703faf42b474f15d54d88b4f8c9", size = 741157, upload-time = "2025-09-23T18:42:50.408Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8c/959f10c2e2153cbdab834c46e6954b6dd9e3b109c8f8c0a3cf1618310985/ruamel.yaml.clib-0.2.14-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fbc08c02e9b147a11dfcaa1ac8a83168b699863493e183f7c0c8b12850b7d259", size = 745859, upload-time = "2025-09-22T19:50:54.497Z" }, + { url = "https://files.pythonhosted.org/packages/ed/6b/e580a7c18b485e1a5f30a32cda96b20364b0ba649d9d2baaf72f8bd21f83/ruamel.yaml.clib-0.2.14-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c099cafc1834d3c5dac305865d04235f7c21c167c8dd31ebc3d6bbc357e2f023", size = 770200, upload-time = "2025-09-22T19:50:55.718Z" }, + { url = "https://files.pythonhosted.org/packages/ef/44/3455eebc761dc8e8fdced90f2b0a3fa61e32ba38b50de4130e2d57db0f21/ruamel.yaml.clib-0.2.14-cp312-cp312-win32.whl", hash = "sha256:b5b0f7e294700b615a3bcf6d28b26e6da94e8eba63b079f4ec92e9ba6c0d6b54", size = 98829, upload-time = "2025-09-22T19:50:58.895Z" }, + { url = "https://files.pythonhosted.org/packages/76/ab/5121f7f3b651db93de546f8c982c241397aad0a4765d793aca1dac5eadee/ruamel.yaml.clib-0.2.14-cp312-cp312-win_amd64.whl", hash = "sha256:a37f40a859b503304dd740686359fcf541d6fb3ff7fc10f539af7f7150917c68", size = 115570, upload-time = "2025-09-22T19:50:57.981Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ae/e3811f05415594025e96000349d3400978adaed88d8f98d494352d9761ee/ruamel.yaml.clib-0.2.14-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7e4f9da7e7549946e02a6122dcad00b7c1168513acb1f8a726b1aaf504a99d32", size = 269205, upload-time = "2025-09-23T14:24:15.06Z" }, + { url = "https://files.pythonhosted.org/packages/72/06/7d51f4688d6d72bb72fa74254e1593c4f5ebd0036be5b41fe39315b275e9/ruamel.yaml.clib-0.2.14-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:dd7546c851e59c06197a7c651335755e74aa383a835878ca86d2c650c07a2f85", size = 137417, upload-time = "2025-09-22T19:50:59.82Z" }, + { url = "https://files.pythonhosted.org/packages/5a/08/b4499234a420ef42960eeb05585df5cc7eb25ccb8c980490b079e6367050/ruamel.yaml.clib-0.2.14-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:1c1acc3a0209ea9042cc3cfc0790edd2eddd431a2ec3f8283d081e4d5018571e", size = 642558, upload-time = "2025-09-22T19:51:03.388Z" }, + { url = "https://files.pythonhosted.org/packages/b6/ba/1975a27dedf1c4c33306ee67c948121be8710b19387aada29e2f139c43ee/ruamel.yaml.clib-0.2.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2070bf0ad1540d5c77a664de07ebcc45eebd1ddcab71a7a06f26936920692beb", size = 744087, upload-time = "2025-09-22T19:51:00.897Z" }, + { url = "https://files.pythonhosted.org/packages/20/15/8a19a13d27f3bd09fa18813add8380a29115a47b553845f08802959acbce/ruamel.yaml.clib-0.2.14-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd8fe07f49c170e09d76773fb86ad9135e0beee44f36e1576a201b0676d3d1d", size = 699709, upload-time = "2025-09-22T19:51:02.075Z" }, + { url = "https://files.pythonhosted.org/packages/19/ee/8d6146a079ad21e534b5083c9ee4a4c8bec42f79cf87594b60978286b39a/ruamel.yaml.clib-0.2.14-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ff86876889ea478b1381089e55cf9e345707b312beda4986f823e1d95e8c0f59", size = 708926, upload-time = "2025-09-23T18:42:51.707Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/426b714abdc222392e68f3b8ad323930d05a214a27c7e7a0f06c69126401/ruamel.yaml.clib-0.2.14-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1f118b707eece8cf84ecbc3e3ec94d9db879d85ed608f95870d39b2d2efa5dca", size = 740202, upload-time = "2025-09-22T19:51:04.673Z" }, + { url = "https://files.pythonhosted.org/packages/3d/ac/3c5c2b27a183f4fda8a57c82211721c016bcb689a4a175865f7646db9f94/ruamel.yaml.clib-0.2.14-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b30110b29484adc597df6bd92a37b90e63a8c152ca8136aad100a02f8ba6d1b6", size = 765196, upload-time = "2025-09-22T19:51:05.916Z" }, + { url = "https://files.pythonhosted.org/packages/92/2e/06f56a71fd55021c993ed6e848c9b2e5e9cfce180a42179f0ddd28253f7c/ruamel.yaml.clib-0.2.14-cp313-cp313-win32.whl", hash = "sha256:f4e97a1cf0b7a30af9e1d9dad10a5671157b9acee790d9e26996391f49b965a2", size = 98635, upload-time = "2025-09-22T19:51:08.183Z" }, + { url = "https://files.pythonhosted.org/packages/51/79/76aba16a1689b50528224b182f71097ece338e7a4ab55e84c2e73443b78a/ruamel.yaml.clib-0.2.14-cp313-cp313-win_amd64.whl", hash = "sha256:090782b5fb9d98df96509eecdbcaffd037d47389a89492320280d52f91330d78", size = 115238, upload-time = "2025-09-22T19:51:07.081Z" }, + { url = "https://files.pythonhosted.org/packages/21/e2/a59ff65c26aaf21a24eb38df777cb9af5d87ba8fc8107c163c2da9d1e85e/ruamel.yaml.clib-0.2.14-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:7df6f6e9d0e33c7b1d435defb185095386c469109de723d514142632a7b9d07f", size = 271441, upload-time = "2025-09-23T14:24:16.498Z" }, + { url = "https://files.pythonhosted.org/packages/6b/fa/3234f913fe9a6525a7b97c6dad1f51e72b917e6872e051a5e2ffd8b16fbb/ruamel.yaml.clib-0.2.14-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:70eda7703b8126f5e52fcf276e6c0f40b0d314674f896fc58c47b0aef2b9ae83", size = 137970, upload-time = "2025-09-22T19:51:09.472Z" }, + { url = "https://files.pythonhosted.org/packages/ef/ec/4edbf17ac2c87fa0845dd366ef8d5852b96eb58fcd65fc1ecf5fe27b4641/ruamel.yaml.clib-0.2.14-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a0cb71ccc6ef9ce36eecb6272c81afdc2f565950cdcec33ae8e6cd8f7fc86f27", size = 739639, upload-time = "2025-09-22T19:51:10.566Z" }, + { url = "https://files.pythonhosted.org/packages/15/18/b0e1fafe59051de9e79cdd431863b03593ecfa8341c110affad7c8121efc/ruamel.yaml.clib-0.2.14-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e7cb9ad1d525d40f7d87b6df7c0ff916a66bc52cb61b66ac1b2a16d0c1b07640", size = 764456, upload-time = "2025-09-22T19:51:11.736Z" }, +] + +[[package]] +name = "ruff" +version = "0.13.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/df/8d7d8c515d33adfc540e2edf6c6021ea1c5a58a678d8cfce9fae59aabcab/ruff-0.13.2.tar.gz", hash = "sha256:cb12fffd32fb16d32cef4ed16d8c7cdc27ed7c944eaa98d99d01ab7ab0b710ff", size = 5416417, upload-time = "2025-09-25T14:54:09.936Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6e/84/5716a7fa4758e41bf70e603e13637c42cfb9dbf7ceb07180211b9bbf75ef/ruff-0.13.2-py3-none-linux_armv6l.whl", hash = "sha256:3796345842b55f033a78285e4f1641078f902020d8450cade03aad01bffd81c3", size = 12343254, upload-time = "2025-09-25T14:53:27.784Z" }, + { url = "https://files.pythonhosted.org/packages/9b/77/c7042582401bb9ac8eff25360e9335e901d7a1c0749a2b28ba4ecb239991/ruff-0.13.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ff7e4dda12e683e9709ac89e2dd436abf31a4d8a8fc3d89656231ed808e231d2", size = 13040891, upload-time = "2025-09-25T14:53:31.38Z" }, + { url = "https://files.pythonhosted.org/packages/c6/15/125a7f76eb295cb34d19c6778e3a82ace33730ad4e6f28d3427e134a02e0/ruff-0.13.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c75e9d2a2fafd1fdd895d0e7e24b44355984affdde1c412a6f6d3f6e16b22d46", size = 12243588, upload-time = "2025-09-25T14:53:33.543Z" }, + { url = "https://files.pythonhosted.org/packages/9e/eb/0093ae04a70f81f8be7fd7ed6456e926b65d238fc122311293d033fdf91e/ruff-0.13.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cceac74e7bbc53ed7d15d1042ffe7b6577bf294611ad90393bf9b2a0f0ec7cb6", size = 12491359, upload-time = "2025-09-25T14:53:35.892Z" }, + { url = "https://files.pythonhosted.org/packages/43/fe/72b525948a6956f07dad4a6f122336b6a05f2e3fd27471cea612349fedb9/ruff-0.13.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6ae3f469b5465ba6d9721383ae9d49310c19b452a161b57507764d7ef15f4b07", size = 12162486, upload-time = "2025-09-25T14:53:38.171Z" }, + { url = "https://files.pythonhosted.org/packages/6a/e3/0fac422bbbfb2ea838023e0d9fcf1f30183d83ab2482800e2cb892d02dfe/ruff-0.13.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f8f9e3cd6714358238cd6626b9d43026ed19c0c018376ac1ef3c3a04ffb42d8", size = 13871203, upload-time = "2025-09-25T14:53:41.943Z" }, + { url = "https://files.pythonhosted.org/packages/6b/82/b721c8e3ec5df6d83ba0e45dcf00892c4f98b325256c42c38ef136496cbf/ruff-0.13.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c6ed79584a8f6cbe2e5d7dbacf7cc1ee29cbdb5df1172e77fbdadc8bb85a1f89", size = 14929635, upload-time = "2025-09-25T14:53:43.953Z" }, + { url = "https://files.pythonhosted.org/packages/c4/a0/ad56faf6daa507b83079a1ad7a11694b87d61e6bf01c66bd82b466f21821/ruff-0.13.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aed130b2fde049cea2019f55deb939103123cdd191105f97a0599a3e753d61b0", size = 14338783, upload-time = "2025-09-25T14:53:46.205Z" }, + { url = "https://files.pythonhosted.org/packages/47/77/ad1d9156db8f99cd01ee7e29d74b34050e8075a8438e589121fcd25c4b08/ruff-0.13.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1887c230c2c9d65ed1b4e4cfe4d255577ea28b718ae226c348ae68df958191aa", size = 13355322, upload-time = "2025-09-25T14:53:48.164Z" }, + { url = "https://files.pythonhosted.org/packages/64/8b/e87cfca2be6f8b9f41f0bb12dc48c6455e2d66df46fe61bb441a226f1089/ruff-0.13.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5bcb10276b69b3cfea3a102ca119ffe5c6ba3901e20e60cf9efb53fa417633c3", size = 13354427, upload-time = "2025-09-25T14:53:50.486Z" }, + { url = "https://files.pythonhosted.org/packages/7f/df/bf382f3fbead082a575edb860897287f42b1b3c694bafa16bc9904c11ed3/ruff-0.13.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:afa721017aa55a555b2ff7944816587f1cb813c2c0a882d158f59b832da1660d", size = 13537637, upload-time = "2025-09-25T14:53:52.887Z" }, + { url = "https://files.pythonhosted.org/packages/51/70/1fb7a7c8a6fc8bd15636288a46e209e81913b87988f26e1913d0851e54f4/ruff-0.13.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1dbc875cf3720c64b3990fef8939334e74cb0ca65b8dbc61d1f439201a38101b", size = 12340025, upload-time = "2025-09-25T14:53:54.88Z" }, + { url = "https://files.pythonhosted.org/packages/4c/27/1e5b3f1c23ca5dd4106d9d580e5c13d9acb70288bff614b3d7b638378cc9/ruff-0.13.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5b939a1b2a960e9742e9a347e5bbc9b3c3d2c716f86c6ae273d9cbd64f193f22", size = 12133449, upload-time = "2025-09-25T14:53:57.089Z" }, + { url = "https://files.pythonhosted.org/packages/2d/09/b92a5ccee289f11ab128df57d5911224197d8d55ef3bd2043534ff72ca54/ruff-0.13.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:50e2d52acb8de3804fc5f6e2fa3ae9bdc6812410a9e46837e673ad1f90a18736", size = 13051369, upload-time = "2025-09-25T14:53:59.124Z" }, + { url = "https://files.pythonhosted.org/packages/89/99/26c9d1c7d8150f45e346dc045cc49f23e961efceb4a70c47dea0960dea9a/ruff-0.13.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3196bc13ab2110c176b9a4ae5ff7ab676faaa1964b330a1383ba20e1e19645f2", size = 13523644, upload-time = "2025-09-25T14:54:01.622Z" }, + { url = "https://files.pythonhosted.org/packages/f7/00/e7f1501e81e8ec290e79527827af1d88f541d8d26151751b46108978dade/ruff-0.13.2-py3-none-win32.whl", hash = "sha256:7c2a0b7c1e87795fec3404a485096bcd790216c7c146a922d121d8b9c8f1aaac", size = 12245990, upload-time = "2025-09-25T14:54:03.647Z" }, + { url = "https://files.pythonhosted.org/packages/ee/bd/d9f33a73de84fafd0146c6fba4f497c4565fe8fa8b46874b8e438869abc2/ruff-0.13.2-py3-none-win_amd64.whl", hash = "sha256:17d95fb32218357c89355f6f6f9a804133e404fc1f65694372e02a557edf8585", size = 13324004, upload-time = "2025-09-25T14:54:06.05Z" }, + { url = "https://files.pythonhosted.org/packages/c3/12/28fa2f597a605884deb0f65c1b1ae05111051b2a7030f5d8a4ff7f4599ba/ruff-0.13.2-py3-none-win_arm64.whl", hash = "sha256:da711b14c530412c827219312b7d7fbb4877fb31150083add7e8c5336549cea7", size = 12484437, upload-time = "2025-09-25T14:54:08.022Z" }, +] + +[[package]] +name = "shellingham" +version = "1.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + +[[package]] +name = "tenacity" +version = "9.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/d4/2b0cd0fe285e14b36db076e78c93766ff1d529d70408bd1d2a5a84f1d929/tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb", size = 48036, upload-time = "2025-04-02T08:25:09.966Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/30/643397144bfbfec6f6ef821f36f33e57d35946c44a2352d3c9f0ae847619/tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138", size = 28248, upload-time = "2025-04-02T08:25:07.678Z" }, +] + +[[package]] +name = "tomli" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, + { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, + { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, + { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, + { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, + { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, + { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" }, + { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" }, + { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" }, + { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" }, + { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" }, + { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, +] + +[[package]] +name = "typer" +version = "0.19.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "rich" }, + { name = "shellingham" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/21/ca/950278884e2ca20547ff3eb109478c6baf6b8cf219318e6bc4f666fad8e8/typer-0.19.2.tar.gz", hash = "sha256:9ad824308ded0ad06cc716434705f691d4ee0bfd0fb081839d2e426860e7fdca", size = 104755, upload-time = "2025-09-23T09:47:48.256Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/22/35617eee79080a5d071d0f14ad698d325ee6b3bf824fc0467c03b30e7fa8/typer-0.19.2-py3-none-any.whl", hash = "sha256:755e7e19670ffad8283db353267cb81ef252f595aa6834a0d1ca9312d9326cb9", size = 46748, upload-time = "2025-09-23T09:47:46.777Z" }, +] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.20250915" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/69/3c51b36d04da19b92f9e815be12753125bd8bc247ba0470a982e6979e71c/types_pyyaml-6.0.12.20250915.tar.gz", hash = "sha256:0f8b54a528c303f0e6f7165687dd33fafa81c807fcac23f632b63aa624ced1d3", size = 17522, upload-time = "2025-09-15T03:01:00.728Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/e0/1eed384f02555dde685fff1a1ac805c1c7dcb6dd019c916fe659b1c1f9ec/types_pyyaml-6.0.12.20250915-py3-none-any.whl", hash = "sha256:e7d4d9e064e89a3b3cae120b4990cd370874d2bf12fa5f46c97018dd5d3c9ab6", size = 20338, upload-time = "2025-09-15T03:00:59.218Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "urllib3" +version = "1.26.20" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "platform_python_implementation == 'PyPy'", +] +sdist = { url = "https://files.pythonhosted.org/packages/e4/e8/6ff5e6bc22095cfc59b6ea711b687e2b7ed4bdb373f7eeec370a97d7392f/urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32", size = 307380, upload-time = "2024-08-29T15:43:11.37Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/cf/8435d5a7159e2a9c83a95896ed596f68cf798005fe107cc655b5c5c14704/urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e", size = 144225, upload-time = "2024-08-29T15:43:08.921Z" }, +] + +[[package]] +name = "urllib3" +version = "2.5.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "platform_python_implementation != 'PyPy'", +] +sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, +] + +[[package]] +name = "vcrpy" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyyaml" }, + { name = "urllib3", version = "1.26.20", source = { registry = "https://pypi.org/simple" }, marker = "platform_python_implementation == 'PyPy'" }, + { name = "urllib3", version = "2.5.0", source = { registry = "https://pypi.org/simple" }, marker = "platform_python_implementation != 'PyPy'" }, + { name = "wrapt" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/25/d3/856e06184d4572aada1dd559ddec3bedc46df1f2edc5ab2c91121a2cccdb/vcrpy-7.0.0.tar.gz", hash = "sha256:176391ad0425edde1680c5b20738ea3dc7fb942520a48d2993448050986b3a50", size = 85502, upload-time = "2024-12-31T00:07:57.894Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/5d/1f15b252890c968d42b348d1e9b0aa12d5bf3e776704178ec37cceccdb63/vcrpy-7.0.0-py2.py3-none-any.whl", hash = "sha256:55791e26c18daa363435054d8b35bd41a4ac441b6676167635d1b37a71dbe124", size = 42321, upload-time = "2024-12-31T00:07:55.277Z" }, +] + +[[package]] +name = "wrapt" +version = "1.17.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/8f/aeb76c5b46e273670962298c23e7ddde79916cb74db802131d49a85e4b7d/wrapt-1.17.3.tar.gz", hash = "sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0", size = 55547, upload-time = "2025-08-12T05:53:21.714Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/db/00e2a219213856074a213503fdac0511203dceefff26e1daa15250cc01a0/wrapt-1.17.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:273a736c4645e63ac582c60a56b0acb529ef07f78e08dc6bfadf6a46b19c0da7", size = 53482, upload-time = "2025-08-12T05:51:45.79Z" }, + { url = "https://files.pythonhosted.org/packages/5e/30/ca3c4a5eba478408572096fe9ce36e6e915994dd26a4e9e98b4f729c06d9/wrapt-1.17.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5531d911795e3f935a9c23eb1c8c03c211661a5060aab167065896bbf62a5f85", size = 38674, upload-time = "2025-08-12T05:51:34.629Z" }, + { url = "https://files.pythonhosted.org/packages/31/25/3e8cc2c46b5329c5957cec959cb76a10718e1a513309c31399a4dad07eb3/wrapt-1.17.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0610b46293c59a3adbae3dee552b648b984176f8562ee0dba099a56cfbe4df1f", size = 38959, upload-time = "2025-08-12T05:51:56.074Z" }, + { url = "https://files.pythonhosted.org/packages/5d/8f/a32a99fc03e4b37e31b57cb9cefc65050ea08147a8ce12f288616b05ef54/wrapt-1.17.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b32888aad8b6e68f83a8fdccbf3165f5469702a7544472bdf41f582970ed3311", size = 82376, upload-time = "2025-08-12T05:52:32.134Z" }, + { url = "https://files.pythonhosted.org/packages/31/57/4930cb8d9d70d59c27ee1332a318c20291749b4fba31f113c2f8ac49a72e/wrapt-1.17.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cccf4f81371f257440c88faed6b74f1053eef90807b77e31ca057b2db74edb1", size = 83604, upload-time = "2025-08-12T05:52:11.663Z" }, + { url = "https://files.pythonhosted.org/packages/a8/f3/1afd48de81d63dd66e01b263a6fbb86e1b5053b419b9b33d13e1f6d0f7d0/wrapt-1.17.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8a210b158a34164de8bb68b0e7780041a903d7b00c87e906fb69928bf7890d5", size = 82782, upload-time = "2025-08-12T05:52:12.626Z" }, + { url = "https://files.pythonhosted.org/packages/1e/d7/4ad5327612173b144998232f98a85bb24b60c352afb73bc48e3e0d2bdc4e/wrapt-1.17.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:79573c24a46ce11aab457b472efd8d125e5a51da2d1d24387666cd85f54c05b2", size = 82076, upload-time = "2025-08-12T05:52:33.168Z" }, + { url = "https://files.pythonhosted.org/packages/bb/59/e0adfc831674a65694f18ea6dc821f9fcb9ec82c2ce7e3d73a88ba2e8718/wrapt-1.17.3-cp311-cp311-win32.whl", hash = "sha256:c31eebe420a9a5d2887b13000b043ff6ca27c452a9a22fa71f35f118e8d4bf89", size = 36457, upload-time = "2025-08-12T05:53:03.936Z" }, + { url = "https://files.pythonhosted.org/packages/83/88/16b7231ba49861b6f75fc309b11012ede4d6b0a9c90969d9e0db8d991aeb/wrapt-1.17.3-cp311-cp311-win_amd64.whl", hash = "sha256:0b1831115c97f0663cb77aa27d381237e73ad4f721391a9bfb2fe8bc25fa6e77", size = 38745, upload-time = "2025-08-12T05:53:02.885Z" }, + { url = "https://files.pythonhosted.org/packages/9a/1e/c4d4f3398ec073012c51d1c8d87f715f56765444e1a4b11e5180577b7e6e/wrapt-1.17.3-cp311-cp311-win_arm64.whl", hash = "sha256:5a7b3c1ee8265eb4c8f1b7d29943f195c00673f5ab60c192eba2d4a7eae5f46a", size = 36806, upload-time = "2025-08-12T05:52:53.368Z" }, + { url = "https://files.pythonhosted.org/packages/9f/41/cad1aba93e752f1f9268c77270da3c469883d56e2798e7df6240dcb2287b/wrapt-1.17.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ab232e7fdb44cdfbf55fc3afa31bcdb0d8980b9b95c38b6405df2acb672af0e0", size = 53998, upload-time = "2025-08-12T05:51:47.138Z" }, + { url = "https://files.pythonhosted.org/packages/60/f8/096a7cc13097a1869fe44efe68dace40d2a16ecb853141394047f0780b96/wrapt-1.17.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9baa544e6acc91130e926e8c802a17f3b16fbea0fd441b5a60f5cf2cc5c3deba", size = 39020, upload-time = "2025-08-12T05:51:35.906Z" }, + { url = "https://files.pythonhosted.org/packages/33/df/bdf864b8997aab4febb96a9ae5c124f700a5abd9b5e13d2a3214ec4be705/wrapt-1.17.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6b538e31eca1a7ea4605e44f81a48aa24c4632a277431a6ed3f328835901f4fd", size = 39098, upload-time = "2025-08-12T05:51:57.474Z" }, + { url = "https://files.pythonhosted.org/packages/9f/81/5d931d78d0eb732b95dc3ddaeeb71c8bb572fb01356e9133916cd729ecdd/wrapt-1.17.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:042ec3bb8f319c147b1301f2393bc19dba6e176b7da446853406d041c36c7828", size = 88036, upload-time = "2025-08-12T05:52:34.784Z" }, + { url = "https://files.pythonhosted.org/packages/ca/38/2e1785df03b3d72d34fc6252d91d9d12dc27a5c89caef3335a1bbb8908ca/wrapt-1.17.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3af60380ba0b7b5aeb329bc4e402acd25bd877e98b3727b0135cb5c2efdaefe9", size = 88156, upload-time = "2025-08-12T05:52:13.599Z" }, + { url = "https://files.pythonhosted.org/packages/b3/8b/48cdb60fe0603e34e05cffda0b2a4adab81fd43718e11111a4b0100fd7c1/wrapt-1.17.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b02e424deef65c9f7326d8c19220a2c9040c51dc165cddb732f16198c168396", size = 87102, upload-time = "2025-08-12T05:52:14.56Z" }, + { url = "https://files.pythonhosted.org/packages/3c/51/d81abca783b58f40a154f1b2c56db1d2d9e0d04fa2d4224e357529f57a57/wrapt-1.17.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:74afa28374a3c3a11b3b5e5fca0ae03bef8450d6aa3ab3a1e2c30e3a75d023dc", size = 87732, upload-time = "2025-08-12T05:52:36.165Z" }, + { url = "https://files.pythonhosted.org/packages/9e/b1/43b286ca1392a006d5336412d41663eeef1ad57485f3e52c767376ba7e5a/wrapt-1.17.3-cp312-cp312-win32.whl", hash = "sha256:4da9f45279fff3543c371d5ababc57a0384f70be244de7759c85a7f989cb4ebe", size = 36705, upload-time = "2025-08-12T05:53:07.123Z" }, + { url = "https://files.pythonhosted.org/packages/28/de/49493f962bd3c586ab4b88066e967aa2e0703d6ef2c43aa28cb83bf7b507/wrapt-1.17.3-cp312-cp312-win_amd64.whl", hash = "sha256:e71d5c6ebac14875668a1e90baf2ea0ef5b7ac7918355850c0908ae82bcb297c", size = 38877, upload-time = "2025-08-12T05:53:05.436Z" }, + { url = "https://files.pythonhosted.org/packages/f1/48/0f7102fe9cb1e8a5a77f80d4f0956d62d97034bbe88d33e94699f99d181d/wrapt-1.17.3-cp312-cp312-win_arm64.whl", hash = "sha256:604d076c55e2fdd4c1c03d06dc1a31b95130010517b5019db15365ec4a405fc6", size = 36885, upload-time = "2025-08-12T05:52:54.367Z" }, + { url = "https://files.pythonhosted.org/packages/fc/f6/759ece88472157acb55fc195e5b116e06730f1b651b5b314c66291729193/wrapt-1.17.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a47681378a0439215912ef542c45a783484d4dd82bac412b71e59cf9c0e1cea0", size = 54003, upload-time = "2025-08-12T05:51:48.627Z" }, + { url = "https://files.pythonhosted.org/packages/4f/a9/49940b9dc6d47027dc850c116d79b4155f15c08547d04db0f07121499347/wrapt-1.17.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54a30837587c6ee3cd1a4d1c2ec5d24e77984d44e2f34547e2323ddb4e22eb77", size = 39025, upload-time = "2025-08-12T05:51:37.156Z" }, + { url = "https://files.pythonhosted.org/packages/45/35/6a08de0f2c96dcdd7fe464d7420ddb9a7655a6561150e5fc4da9356aeaab/wrapt-1.17.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:16ecf15d6af39246fe33e507105d67e4b81d8f8d2c6598ff7e3ca1b8a37213f7", size = 39108, upload-time = "2025-08-12T05:51:58.425Z" }, + { url = "https://files.pythonhosted.org/packages/0c/37/6faf15cfa41bf1f3dba80cd3f5ccc6622dfccb660ab26ed79f0178c7497f/wrapt-1.17.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6fd1ad24dc235e4ab88cda009e19bf347aabb975e44fd5c2fb22a3f6e4141277", size = 88072, upload-time = "2025-08-12T05:52:37.53Z" }, + { url = "https://files.pythonhosted.org/packages/78/f2/efe19ada4a38e4e15b6dff39c3e3f3f73f5decf901f66e6f72fe79623a06/wrapt-1.17.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ed61b7c2d49cee3c027372df5809a59d60cf1b6c2f81ee980a091f3afed6a2d", size = 88214, upload-time = "2025-08-12T05:52:15.886Z" }, + { url = "https://files.pythonhosted.org/packages/40/90/ca86701e9de1622b16e09689fc24b76f69b06bb0150990f6f4e8b0eeb576/wrapt-1.17.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:423ed5420ad5f5529db9ce89eac09c8a2f97da18eb1c870237e84c5a5c2d60aa", size = 87105, upload-time = "2025-08-12T05:52:17.914Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e0/d10bd257c9a3e15cbf5523025252cc14d77468e8ed644aafb2d6f54cb95d/wrapt-1.17.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e01375f275f010fcbf7f643b4279896d04e571889b8a5b3f848423d91bf07050", size = 87766, upload-time = "2025-08-12T05:52:39.243Z" }, + { url = "https://files.pythonhosted.org/packages/e8/cf/7d848740203c7b4b27eb55dbfede11aca974a51c3d894f6cc4b865f42f58/wrapt-1.17.3-cp313-cp313-win32.whl", hash = "sha256:53e5e39ff71b3fc484df8a522c933ea2b7cdd0d5d15ae82e5b23fde87d44cbd8", size = 36711, upload-time = "2025-08-12T05:53:10.074Z" }, + { url = "https://files.pythonhosted.org/packages/57/54/35a84d0a4d23ea675994104e667ceff49227ce473ba6a59ba2c84f250b74/wrapt-1.17.3-cp313-cp313-win_amd64.whl", hash = "sha256:1f0b2f40cf341ee8cc1a97d51ff50dddb9fcc73241b9143ec74b30fc4f44f6cb", size = 38885, upload-time = "2025-08-12T05:53:08.695Z" }, + { url = "https://files.pythonhosted.org/packages/01/77/66e54407c59d7b02a3c4e0af3783168fff8e5d61def52cda8728439d86bc/wrapt-1.17.3-cp313-cp313-win_arm64.whl", hash = "sha256:7425ac3c54430f5fc5e7b6f41d41e704db073309acfc09305816bc6a0b26bb16", size = 36896, upload-time = "2025-08-12T05:52:55.34Z" }, + { url = "https://files.pythonhosted.org/packages/02/a2/cd864b2a14f20d14f4c496fab97802001560f9f41554eef6df201cd7f76c/wrapt-1.17.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cf30f6e3c077c8e6a9a7809c94551203c8843e74ba0c960f4a98cd80d4665d39", size = 54132, upload-time = "2025-08-12T05:51:49.864Z" }, + { url = "https://files.pythonhosted.org/packages/d5/46/d011725b0c89e853dc44cceb738a307cde5d240d023d6d40a82d1b4e1182/wrapt-1.17.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e228514a06843cae89621384cfe3a80418f3c04aadf8a3b14e46a7be704e4235", size = 39091, upload-time = "2025-08-12T05:51:38.935Z" }, + { url = "https://files.pythonhosted.org/packages/2e/9e/3ad852d77c35aae7ddebdbc3b6d35ec8013af7d7dddad0ad911f3d891dae/wrapt-1.17.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5ea5eb3c0c071862997d6f3e02af1d055f381b1d25b286b9d6644b79db77657c", size = 39172, upload-time = "2025-08-12T05:51:59.365Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f7/c983d2762bcce2326c317c26a6a1e7016f7eb039c27cdf5c4e30f4160f31/wrapt-1.17.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:281262213373b6d5e4bb4353bc36d1ba4084e6d6b5d242863721ef2bf2c2930b", size = 87163, upload-time = "2025-08-12T05:52:40.965Z" }, + { url = "https://files.pythonhosted.org/packages/e4/0f/f673f75d489c7f22d17fe0193e84b41540d962f75fce579cf6873167c29b/wrapt-1.17.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc4a8d2b25efb6681ecacad42fca8859f88092d8732b170de6a5dddd80a1c8fa", size = 87963, upload-time = "2025-08-12T05:52:20.326Z" }, + { url = "https://files.pythonhosted.org/packages/df/61/515ad6caca68995da2fac7a6af97faab8f78ebe3bf4f761e1b77efbc47b5/wrapt-1.17.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:373342dd05b1d07d752cecbec0c41817231f29f3a89aa8b8843f7b95992ed0c7", size = 86945, upload-time = "2025-08-12T05:52:21.581Z" }, + { url = "https://files.pythonhosted.org/packages/d3/bd/4e70162ce398462a467bc09e768bee112f1412e563620adc353de9055d33/wrapt-1.17.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d40770d7c0fd5cbed9d84b2c3f2e156431a12c9a37dc6284060fb4bec0b7ffd4", size = 86857, upload-time = "2025-08-12T05:52:43.043Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b8/da8560695e9284810b8d3df8a19396a6e40e7518059584a1a394a2b35e0a/wrapt-1.17.3-cp314-cp314-win32.whl", hash = "sha256:fbd3c8319de8e1dc79d346929cd71d523622da527cca14e0c1d257e31c2b8b10", size = 37178, upload-time = "2025-08-12T05:53:12.605Z" }, + { url = "https://files.pythonhosted.org/packages/db/c8/b71eeb192c440d67a5a0449aaee2310a1a1e8eca41676046f99ed2487e9f/wrapt-1.17.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1a4120ae5705f673727d3253de3ed0e016f7cd78dc463db1b31e2463e1f3cf6", size = 39310, upload-time = "2025-08-12T05:53:11.106Z" }, + { url = "https://files.pythonhosted.org/packages/45/20/2cda20fd4865fa40f86f6c46ed37a2a8356a7a2fde0773269311f2af56c7/wrapt-1.17.3-cp314-cp314-win_arm64.whl", hash = "sha256:507553480670cab08a800b9463bdb881b2edeed77dc677b0a5915e6106e91a58", size = 37266, upload-time = "2025-08-12T05:52:56.531Z" }, + { url = "https://files.pythonhosted.org/packages/77/ed/dd5cf21aec36c80443c6f900449260b80e2a65cf963668eaef3b9accce36/wrapt-1.17.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ed7c635ae45cfbc1a7371f708727bf74690daedc49b4dba310590ca0bd28aa8a", size = 56544, upload-time = "2025-08-12T05:51:51.109Z" }, + { url = "https://files.pythonhosted.org/packages/8d/96/450c651cc753877ad100c7949ab4d2e2ecc4d97157e00fa8f45df682456a/wrapt-1.17.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:249f88ed15503f6492a71f01442abddd73856a0032ae860de6d75ca62eed8067", size = 40283, upload-time = "2025-08-12T05:51:39.912Z" }, + { url = "https://files.pythonhosted.org/packages/d1/86/2fcad95994d9b572db57632acb6f900695a648c3e063f2cd344b3f5c5a37/wrapt-1.17.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5a03a38adec8066d5a37bea22f2ba6bbf39fcdefbe2d91419ab864c3fb515454", size = 40366, upload-time = "2025-08-12T05:52:00.693Z" }, + { url = "https://files.pythonhosted.org/packages/64/0e/f4472f2fdde2d4617975144311f8800ef73677a159be7fe61fa50997d6c0/wrapt-1.17.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5d4478d72eb61c36e5b446e375bbc49ed002430d17cdec3cecb36993398e1a9e", size = 108571, upload-time = "2025-08-12T05:52:44.521Z" }, + { url = "https://files.pythonhosted.org/packages/cc/01/9b85a99996b0a97c8a17484684f206cbb6ba73c1ce6890ac668bcf3838fb/wrapt-1.17.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:223db574bb38637e8230eb14b185565023ab624474df94d2af18f1cdb625216f", size = 113094, upload-time = "2025-08-12T05:52:22.618Z" }, + { url = "https://files.pythonhosted.org/packages/25/02/78926c1efddcc7b3aa0bc3d6b33a822f7d898059f7cd9ace8c8318e559ef/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e405adefb53a435f01efa7ccdec012c016b5a1d3f35459990afc39b6be4d5056", size = 110659, upload-time = "2025-08-12T05:52:24.057Z" }, + { url = "https://files.pythonhosted.org/packages/dc/ee/c414501ad518ac3e6fe184753632fe5e5ecacdcf0effc23f31c1e4f7bfcf/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:88547535b787a6c9ce4086917b6e1d291aa8ed914fdd3a838b3539dc95c12804", size = 106946, upload-time = "2025-08-12T05:52:45.976Z" }, + { url = "https://files.pythonhosted.org/packages/be/44/a1bd64b723d13bb151d6cc91b986146a1952385e0392a78567e12149c7b4/wrapt-1.17.3-cp314-cp314t-win32.whl", hash = "sha256:41b1d2bc74c2cac6f9074df52b2efbef2b30bdfe5f40cb78f8ca22963bc62977", size = 38717, upload-time = "2025-08-12T05:53:15.214Z" }, + { url = "https://files.pythonhosted.org/packages/79/d9/7cfd5a312760ac4dd8bf0184a6ee9e43c33e47f3dadc303032ce012b8fa3/wrapt-1.17.3-cp314-cp314t-win_amd64.whl", hash = "sha256:73d496de46cd2cdbdbcce4ae4bcdb4afb6a11234a1df9c085249d55166b95116", size = 41334, upload-time = "2025-08-12T05:53:14.178Z" }, + { url = "https://files.pythonhosted.org/packages/46/78/10ad9781128ed2f99dbc474f43283b13fea8ba58723e98844367531c18e9/wrapt-1.17.3-cp314-cp314t-win_arm64.whl", hash = "sha256:f38e60678850c42461d4202739f9bf1e3a737c7ad283638251e79cc49effb6b6", size = 38471, upload-time = "2025-08-12T05:52:57.784Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22", size = 23591, upload-time = "2025-08-12T05:53:20.674Z" }, +] + +[[package]] +name = "yarl" +version = "1.20.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3c/fb/efaa23fa4e45537b827620f04cf8f3cd658b76642205162e072703a5b963/yarl-1.20.1.tar.gz", hash = "sha256:d017a4997ee50c91fd5466cef416231bb82177b93b029906cefc542ce14c35ac", size = 186428, upload-time = "2025-06-10T00:46:09.923Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/18/893b50efc2350e47a874c5c2d67e55a0ea5df91186b2a6f5ac52eff887cd/yarl-1.20.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:47ee6188fea634bdfaeb2cc420f5b3b17332e6225ce88149a17c413c77ff269e", size = 133833, upload-time = "2025-06-10T00:43:07.393Z" }, + { url = "https://files.pythonhosted.org/packages/89/ed/b8773448030e6fc47fa797f099ab9eab151a43a25717f9ac043844ad5ea3/yarl-1.20.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d0f6500f69e8402d513e5eedb77a4e1818691e8f45e6b687147963514d84b44b", size = 91070, upload-time = "2025-06-10T00:43:09.538Z" }, + { url = "https://files.pythonhosted.org/packages/e3/e3/409bd17b1e42619bf69f60e4f031ce1ccb29bd7380117a55529e76933464/yarl-1.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a8900a42fcdaad568de58887c7b2f602962356908eedb7628eaf6021a6e435b", size = 89818, upload-time = "2025-06-10T00:43:11.575Z" }, + { url = "https://files.pythonhosted.org/packages/f8/77/64d8431a4d77c856eb2d82aa3de2ad6741365245a29b3a9543cd598ed8c5/yarl-1.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bad6d131fda8ef508b36be3ece16d0902e80b88ea7200f030a0f6c11d9e508d4", size = 347003, upload-time = "2025-06-10T00:43:14.088Z" }, + { url = "https://files.pythonhosted.org/packages/8d/d2/0c7e4def093dcef0bd9fa22d4d24b023788b0a33b8d0088b51aa51e21e99/yarl-1.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:df018d92fe22aaebb679a7f89fe0c0f368ec497e3dda6cb81a567610f04501f1", size = 336537, upload-time = "2025-06-10T00:43:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/f0/f3/fc514f4b2cf02cb59d10cbfe228691d25929ce8f72a38db07d3febc3f706/yarl-1.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f969afbb0a9b63c18d0feecf0db09d164b7a44a053e78a7d05f5df163e43833", size = 362358, upload-time = "2025-06-10T00:43:18.704Z" }, + { url = "https://files.pythonhosted.org/packages/ea/6d/a313ac8d8391381ff9006ac05f1d4331cee3b1efaa833a53d12253733255/yarl-1.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:812303eb4aa98e302886ccda58d6b099e3576b1b9276161469c25803a8db277d", size = 357362, upload-time = "2025-06-10T00:43:20.888Z" }, + { url = "https://files.pythonhosted.org/packages/00/70/8f78a95d6935a70263d46caa3dd18e1f223cf2f2ff2037baa01a22bc5b22/yarl-1.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98c4a7d166635147924aa0bf9bfe8d8abad6fffa6102de9c99ea04a1376f91e8", size = 348979, upload-time = "2025-06-10T00:43:23.169Z" }, + { url = "https://files.pythonhosted.org/packages/cb/05/42773027968968f4f15143553970ee36ead27038d627f457cc44bbbeecf3/yarl-1.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12e768f966538e81e6e7550f9086a6236b16e26cd964cf4df35349970f3551cf", size = 337274, upload-time = "2025-06-10T00:43:27.111Z" }, + { url = "https://files.pythonhosted.org/packages/05/be/665634aa196954156741ea591d2f946f1b78ceee8bb8f28488bf28c0dd62/yarl-1.20.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe41919b9d899661c5c28a8b4b0acf704510b88f27f0934ac7a7bebdd8938d5e", size = 363294, upload-time = "2025-06-10T00:43:28.96Z" }, + { url = "https://files.pythonhosted.org/packages/eb/90/73448401d36fa4e210ece5579895731f190d5119c4b66b43b52182e88cd5/yarl-1.20.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8601bc010d1d7780592f3fc1bdc6c72e2b6466ea34569778422943e1a1f3c389", size = 358169, upload-time = "2025-06-10T00:43:30.701Z" }, + { url = "https://files.pythonhosted.org/packages/c3/b0/fce922d46dc1eb43c811f1889f7daa6001b27a4005587e94878570300881/yarl-1.20.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:daadbdc1f2a9033a2399c42646fbd46da7992e868a5fe9513860122d7fe7a73f", size = 362776, upload-time = "2025-06-10T00:43:32.51Z" }, + { url = "https://files.pythonhosted.org/packages/f1/0d/b172628fce039dae8977fd22caeff3eeebffd52e86060413f5673767c427/yarl-1.20.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:03aa1e041727cb438ca762628109ef1333498b122e4c76dd858d186a37cec845", size = 381341, upload-time = "2025-06-10T00:43:34.543Z" }, + { url = "https://files.pythonhosted.org/packages/6b/9b/5b886d7671f4580209e855974fe1cecec409aa4a89ea58b8f0560dc529b1/yarl-1.20.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:642980ef5e0fa1de5fa96d905c7e00cb2c47cb468bfcac5a18c58e27dbf8d8d1", size = 379988, upload-time = "2025-06-10T00:43:36.489Z" }, + { url = "https://files.pythonhosted.org/packages/73/be/75ef5fd0fcd8f083a5d13f78fd3f009528132a1f2a1d7c925c39fa20aa79/yarl-1.20.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:86971e2795584fe8c002356d3b97ef6c61862720eeff03db2a7c86b678d85b3e", size = 371113, upload-time = "2025-06-10T00:43:38.592Z" }, + { url = "https://files.pythonhosted.org/packages/50/4f/62faab3b479dfdcb741fe9e3f0323e2a7d5cd1ab2edc73221d57ad4834b2/yarl-1.20.1-cp311-cp311-win32.whl", hash = "sha256:597f40615b8d25812f14562699e287f0dcc035d25eb74da72cae043bb884d773", size = 81485, upload-time = "2025-06-10T00:43:41.038Z" }, + { url = "https://files.pythonhosted.org/packages/f0/09/d9c7942f8f05c32ec72cd5c8e041c8b29b5807328b68b4801ff2511d4d5e/yarl-1.20.1-cp311-cp311-win_amd64.whl", hash = "sha256:26ef53a9e726e61e9cd1cda6b478f17e350fb5800b4bd1cd9fe81c4d91cfeb2e", size = 86686, upload-time = "2025-06-10T00:43:42.692Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9a/cb7fad7d73c69f296eda6815e4a2c7ed53fc70c2f136479a91c8e5fbdb6d/yarl-1.20.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdcc4cd244e58593a4379fe60fdee5ac0331f8eb70320a24d591a3be197b94a9", size = 133667, upload-time = "2025-06-10T00:43:44.369Z" }, + { url = "https://files.pythonhosted.org/packages/67/38/688577a1cb1e656e3971fb66a3492501c5a5df56d99722e57c98249e5b8a/yarl-1.20.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b29a2c385a5f5b9c7d9347e5812b6f7ab267193c62d282a540b4fc528c8a9d2a", size = 91025, upload-time = "2025-06-10T00:43:46.295Z" }, + { url = "https://files.pythonhosted.org/packages/50/ec/72991ae51febeb11a42813fc259f0d4c8e0507f2b74b5514618d8b640365/yarl-1.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1112ae8154186dfe2de4732197f59c05a83dc814849a5ced892b708033f40dc2", size = 89709, upload-time = "2025-06-10T00:43:48.22Z" }, + { url = "https://files.pythonhosted.org/packages/99/da/4d798025490e89426e9f976702e5f9482005c548c579bdae792a4c37769e/yarl-1.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90bbd29c4fe234233f7fa2b9b121fb63c321830e5d05b45153a2ca68f7d310ee", size = 352287, upload-time = "2025-06-10T00:43:49.924Z" }, + { url = "https://files.pythonhosted.org/packages/1a/26/54a15c6a567aac1c61b18aa0f4b8aa2e285a52d547d1be8bf48abe2b3991/yarl-1.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:680e19c7ce3710ac4cd964e90dad99bf9b5029372ba0c7cbfcd55e54d90ea819", size = 345429, upload-time = "2025-06-10T00:43:51.7Z" }, + { url = "https://files.pythonhosted.org/packages/d6/95/9dcf2386cb875b234353b93ec43e40219e14900e046bf6ac118f94b1e353/yarl-1.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a979218c1fdb4246a05efc2cc23859d47c89af463a90b99b7c56094daf25a16", size = 365429, upload-time = "2025-06-10T00:43:53.494Z" }, + { url = "https://files.pythonhosted.org/packages/91/b2/33a8750f6a4bc224242a635f5f2cff6d6ad5ba651f6edcccf721992c21a0/yarl-1.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255b468adf57b4a7b65d8aad5b5138dce6a0752c139965711bdcb81bc370e1b6", size = 363862, upload-time = "2025-06-10T00:43:55.766Z" }, + { url = "https://files.pythonhosted.org/packages/98/28/3ab7acc5b51f4434b181b0cee8f1f4b77a65919700a355fb3617f9488874/yarl-1.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a97d67108e79cfe22e2b430d80d7571ae57d19f17cda8bb967057ca8a7bf5bfd", size = 355616, upload-time = "2025-06-10T00:43:58.056Z" }, + { url = "https://files.pythonhosted.org/packages/36/a3/f666894aa947a371724ec7cd2e5daa78ee8a777b21509b4252dd7bd15e29/yarl-1.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8570d998db4ddbfb9a590b185a0a33dbf8aafb831d07a5257b4ec9948df9cb0a", size = 339954, upload-time = "2025-06-10T00:43:59.773Z" }, + { url = "https://files.pythonhosted.org/packages/f1/81/5f466427e09773c04219d3450d7a1256138a010b6c9f0af2d48565e9ad13/yarl-1.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97c75596019baae7c71ccf1d8cc4738bc08134060d0adfcbe5642f778d1dca38", size = 365575, upload-time = "2025-06-10T00:44:02.051Z" }, + { url = "https://files.pythonhosted.org/packages/2e/e3/e4b0ad8403e97e6c9972dd587388940a032f030ebec196ab81a3b8e94d31/yarl-1.20.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1c48912653e63aef91ff988c5432832692ac5a1d8f0fb8a33091520b5bbe19ef", size = 365061, upload-time = "2025-06-10T00:44:04.196Z" }, + { url = "https://files.pythonhosted.org/packages/ac/99/b8a142e79eb86c926f9f06452eb13ecb1bb5713bd01dc0038faf5452e544/yarl-1.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4c3ae28f3ae1563c50f3d37f064ddb1511ecc1d5584e88c6b7c63cf7702a6d5f", size = 364142, upload-time = "2025-06-10T00:44:06.527Z" }, + { url = "https://files.pythonhosted.org/packages/34/f2/08ed34a4a506d82a1a3e5bab99ccd930a040f9b6449e9fd050320e45845c/yarl-1.20.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c5e9642f27036283550f5f57dc6156c51084b458570b9d0d96100c8bebb186a8", size = 381894, upload-time = "2025-06-10T00:44:08.379Z" }, + { url = "https://files.pythonhosted.org/packages/92/f8/9a3fbf0968eac704f681726eff595dce9b49c8a25cd92bf83df209668285/yarl-1.20.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2c26b0c49220d5799f7b22c6838409ee9bc58ee5c95361a4d7831f03cc225b5a", size = 383378, upload-time = "2025-06-10T00:44:10.51Z" }, + { url = "https://files.pythonhosted.org/packages/af/85/9363f77bdfa1e4d690957cd39d192c4cacd1c58965df0470a4905253b54f/yarl-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:564ab3d517e3d01c408c67f2e5247aad4019dcf1969982aba3974b4093279004", size = 374069, upload-time = "2025-06-10T00:44:12.834Z" }, + { url = "https://files.pythonhosted.org/packages/35/99/9918c8739ba271dcd935400cff8b32e3cd319eaf02fcd023d5dcd487a7c8/yarl-1.20.1-cp312-cp312-win32.whl", hash = "sha256:daea0d313868da1cf2fac6b2d3a25c6e3a9e879483244be38c8e6a41f1d876a5", size = 81249, upload-time = "2025-06-10T00:44:14.731Z" }, + { url = "https://files.pythonhosted.org/packages/eb/83/5d9092950565481b413b31a23e75dd3418ff0a277d6e0abf3729d4d1ce25/yarl-1.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698", size = 86710, upload-time = "2025-06-10T00:44:16.716Z" }, + { url = "https://files.pythonhosted.org/packages/8a/e1/2411b6d7f769a07687acee88a062af5833cf1966b7266f3d8dfb3d3dc7d3/yarl-1.20.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0b5ff0fbb7c9f1b1b5ab53330acbfc5247893069e7716840c8e7d5bb7355038a", size = 131811, upload-time = "2025-06-10T00:44:18.933Z" }, + { url = "https://files.pythonhosted.org/packages/b2/27/584394e1cb76fb771371770eccad35de400e7b434ce3142c2dd27392c968/yarl-1.20.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:14f326acd845c2b2e2eb38fb1346c94f7f3b01a4f5c788f8144f9b630bfff9a3", size = 90078, upload-time = "2025-06-10T00:44:20.635Z" }, + { url = "https://files.pythonhosted.org/packages/bf/9a/3246ae92d4049099f52d9b0fe3486e3b500e29b7ea872d0f152966fc209d/yarl-1.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f60e4ad5db23f0b96e49c018596707c3ae89f5d0bd97f0ad3684bcbad899f1e7", size = 88748, upload-time = "2025-06-10T00:44:22.34Z" }, + { url = "https://files.pythonhosted.org/packages/a3/25/35afe384e31115a1a801fbcf84012d7a066d89035befae7c5d4284df1e03/yarl-1.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49bdd1b8e00ce57e68ba51916e4bb04461746e794e7c4d4bbc42ba2f18297691", size = 349595, upload-time = "2025-06-10T00:44:24.314Z" }, + { url = "https://files.pythonhosted.org/packages/28/2d/8aca6cb2cabc8f12efcb82749b9cefecbccfc7b0384e56cd71058ccee433/yarl-1.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:66252d780b45189975abfed839616e8fd2dbacbdc262105ad7742c6ae58f3e31", size = 342616, upload-time = "2025-06-10T00:44:26.167Z" }, + { url = "https://files.pythonhosted.org/packages/0b/e9/1312633d16b31acf0098d30440ca855e3492d66623dafb8e25b03d00c3da/yarl-1.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59174e7332f5d153d8f7452a102b103e2e74035ad085f404df2e40e663a22b28", size = 361324, upload-time = "2025-06-10T00:44:27.915Z" }, + { url = "https://files.pythonhosted.org/packages/bc/a0/688cc99463f12f7669eec7c8acc71ef56a1521b99eab7cd3abb75af887b0/yarl-1.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3968ec7d92a0c0f9ac34d5ecfd03869ec0cab0697c91a45db3fbbd95fe1b653", size = 359676, upload-time = "2025-06-10T00:44:30.041Z" }, + { url = "https://files.pythonhosted.org/packages/af/44/46407d7f7a56e9a85a4c207724c9f2c545c060380718eea9088f222ba697/yarl-1.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1a4fbb50e14396ba3d375f68bfe02215d8e7bc3ec49da8341fe3157f59d2ff5", size = 352614, upload-time = "2025-06-10T00:44:32.171Z" }, + { url = "https://files.pythonhosted.org/packages/b1/91/31163295e82b8d5485d31d9cf7754d973d41915cadce070491778d9c9825/yarl-1.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11a62c839c3a8eac2410e951301309426f368388ff2f33799052787035793b02", size = 336766, upload-time = "2025-06-10T00:44:34.494Z" }, + { url = "https://files.pythonhosted.org/packages/b4/8e/c41a5bc482121f51c083c4c2bcd16b9e01e1cf8729e380273a952513a21f/yarl-1.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:041eaa14f73ff5a8986b4388ac6bb43a77f2ea09bf1913df7a35d4646db69e53", size = 364615, upload-time = "2025-06-10T00:44:36.856Z" }, + { url = "https://files.pythonhosted.org/packages/e3/5b/61a3b054238d33d70ea06ebba7e58597891b71c699e247df35cc984ab393/yarl-1.20.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:377fae2fef158e8fd9d60b4c8751387b8d1fb121d3d0b8e9b0be07d1b41e83dc", size = 360982, upload-time = "2025-06-10T00:44:39.141Z" }, + { url = "https://files.pythonhosted.org/packages/df/a3/6a72fb83f8d478cb201d14927bc8040af901811a88e0ff2da7842dd0ed19/yarl-1.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1c92f4390e407513f619d49319023664643d3339bd5e5a56a3bebe01bc67ec04", size = 369792, upload-time = "2025-06-10T00:44:40.934Z" }, + { url = "https://files.pythonhosted.org/packages/7c/af/4cc3c36dfc7c077f8dedb561eb21f69e1e9f2456b91b593882b0b18c19dc/yarl-1.20.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d25ddcf954df1754ab0f86bb696af765c5bfaba39b74095f27eececa049ef9a4", size = 382049, upload-time = "2025-06-10T00:44:42.854Z" }, + { url = "https://files.pythonhosted.org/packages/19/3a/e54e2c4752160115183a66dc9ee75a153f81f3ab2ba4bf79c3c53b33de34/yarl-1.20.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:909313577e9619dcff8c31a0ea2aa0a2a828341d92673015456b3ae492e7317b", size = 384774, upload-time = "2025-06-10T00:44:45.275Z" }, + { url = "https://files.pythonhosted.org/packages/9c/20/200ae86dabfca89060ec6447649f219b4cbd94531e425e50d57e5f5ac330/yarl-1.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:793fd0580cb9664548c6b83c63b43c477212c0260891ddf86809e1c06c8b08f1", size = 374252, upload-time = "2025-06-10T00:44:47.31Z" }, + { url = "https://files.pythonhosted.org/packages/83/75/11ee332f2f516b3d094e89448da73d557687f7d137d5a0f48c40ff211487/yarl-1.20.1-cp313-cp313-win32.whl", hash = "sha256:468f6e40285de5a5b3c44981ca3a319a4b208ccc07d526b20b12aeedcfa654b7", size = 81198, upload-time = "2025-06-10T00:44:49.164Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ba/39b1ecbf51620b40ab402b0fc817f0ff750f6d92712b44689c2c215be89d/yarl-1.20.1-cp313-cp313-win_amd64.whl", hash = "sha256:495b4ef2fea40596bfc0affe3837411d6aa3371abcf31aac0ccc4bdd64d4ef5c", size = 86346, upload-time = "2025-06-10T00:44:51.182Z" }, + { url = "https://files.pythonhosted.org/packages/43/c7/669c52519dca4c95153c8ad96dd123c79f354a376346b198f438e56ffeb4/yarl-1.20.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f60233b98423aab21d249a30eb27c389c14929f47be8430efa7dbd91493a729d", size = 138826, upload-time = "2025-06-10T00:44:52.883Z" }, + { url = "https://files.pythonhosted.org/packages/6a/42/fc0053719b44f6ad04a75d7f05e0e9674d45ef62f2d9ad2c1163e5c05827/yarl-1.20.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6f3eff4cc3f03d650d8755c6eefc844edde99d641d0dcf4da3ab27141a5f8ddf", size = 93217, upload-time = "2025-06-10T00:44:54.658Z" }, + { url = "https://files.pythonhosted.org/packages/4f/7f/fa59c4c27e2a076bba0d959386e26eba77eb52ea4a0aac48e3515c186b4c/yarl-1.20.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:69ff8439d8ba832d6bed88af2c2b3445977eba9a4588b787b32945871c2444e3", size = 92700, upload-time = "2025-06-10T00:44:56.784Z" }, + { url = "https://files.pythonhosted.org/packages/2f/d4/062b2f48e7c93481e88eff97a6312dca15ea200e959f23e96d8ab898c5b8/yarl-1.20.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cf34efa60eb81dd2645a2e13e00bb98b76c35ab5061a3989c7a70f78c85006d", size = 347644, upload-time = "2025-06-10T00:44:59.071Z" }, + { url = "https://files.pythonhosted.org/packages/89/47/78b7f40d13c8f62b499cc702fdf69e090455518ae544c00a3bf4afc9fc77/yarl-1.20.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8e0fe9364ad0fddab2688ce72cb7a8e61ea42eff3c7caeeb83874a5d479c896c", size = 323452, upload-time = "2025-06-10T00:45:01.605Z" }, + { url = "https://files.pythonhosted.org/packages/eb/2b/490d3b2dc66f52987d4ee0d3090a147ea67732ce6b4d61e362c1846d0d32/yarl-1.20.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f64fbf81878ba914562c672024089e3401974a39767747691c65080a67b18c1", size = 346378, upload-time = "2025-06-10T00:45:03.946Z" }, + { url = "https://files.pythonhosted.org/packages/66/ad/775da9c8a94ce925d1537f939a4f17d782efef1f973039d821cbe4bcc211/yarl-1.20.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6342d643bf9a1de97e512e45e4b9560a043347e779a173250824f8b254bd5ce", size = 353261, upload-time = "2025-06-10T00:45:05.992Z" }, + { url = "https://files.pythonhosted.org/packages/4b/23/0ed0922b47a4f5c6eb9065d5ff1e459747226ddce5c6a4c111e728c9f701/yarl-1.20.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56dac5f452ed25eef0f6e3c6a066c6ab68971d96a9fb441791cad0efba6140d3", size = 335987, upload-time = "2025-06-10T00:45:08.227Z" }, + { url = "https://files.pythonhosted.org/packages/3e/49/bc728a7fe7d0e9336e2b78f0958a2d6b288ba89f25a1762407a222bf53c3/yarl-1.20.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7d7f497126d65e2cad8dc5f97d34c27b19199b6414a40cb36b52f41b79014be", size = 329361, upload-time = "2025-06-10T00:45:10.11Z" }, + { url = "https://files.pythonhosted.org/packages/93/8f/b811b9d1f617c83c907e7082a76e2b92b655400e61730cd61a1f67178393/yarl-1.20.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:67e708dfb8e78d8a19169818eeb5c7a80717562de9051bf2413aca8e3696bf16", size = 346460, upload-time = "2025-06-10T00:45:12.055Z" }, + { url = "https://files.pythonhosted.org/packages/70/fd/af94f04f275f95da2c3b8b5e1d49e3e79f1ed8b6ceb0f1664cbd902773ff/yarl-1.20.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:595c07bc79af2494365cc96ddeb772f76272364ef7c80fb892ef9d0649586513", size = 334486, upload-time = "2025-06-10T00:45:13.995Z" }, + { url = "https://files.pythonhosted.org/packages/84/65/04c62e82704e7dd0a9b3f61dbaa8447f8507655fd16c51da0637b39b2910/yarl-1.20.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7bdd2f80f4a7df852ab9ab49484a4dee8030023aa536df41f2d922fd57bf023f", size = 342219, upload-time = "2025-06-10T00:45:16.479Z" }, + { url = "https://files.pythonhosted.org/packages/91/95/459ca62eb958381b342d94ab9a4b6aec1ddec1f7057c487e926f03c06d30/yarl-1.20.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c03bfebc4ae8d862f853a9757199677ab74ec25424d0ebd68a0027e9c639a390", size = 350693, upload-time = "2025-06-10T00:45:18.399Z" }, + { url = "https://files.pythonhosted.org/packages/a6/00/d393e82dd955ad20617abc546a8f1aee40534d599ff555ea053d0ec9bf03/yarl-1.20.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:344d1103e9c1523f32a5ed704d576172d2cabed3122ea90b1d4e11fe17c66458", size = 355803, upload-time = "2025-06-10T00:45:20.677Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ed/c5fb04869b99b717985e244fd93029c7a8e8febdfcffa06093e32d7d44e7/yarl-1.20.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:88cab98aa4e13e1ade8c141daeedd300a4603b7132819c484841bb7af3edce9e", size = 341709, upload-time = "2025-06-10T00:45:23.221Z" }, + { url = "https://files.pythonhosted.org/packages/24/fd/725b8e73ac2a50e78a4534ac43c6addf5c1c2d65380dd48a9169cc6739a9/yarl-1.20.1-cp313-cp313t-win32.whl", hash = "sha256:b121ff6a7cbd4abc28985b6028235491941b9fe8fe226e6fdc539c977ea1739d", size = 86591, upload-time = "2025-06-10T00:45:25.793Z" }, + { url = "https://files.pythonhosted.org/packages/94/c3/b2e9f38bc3e11191981d57ea08cab2166e74ea770024a646617c9cddd9f6/yarl-1.20.1-cp313-cp313t-win_amd64.whl", hash = "sha256:541d050a355bbbc27e55d906bc91cb6fe42f96c01413dd0f4ed5a5240513874f", size = 93003, upload-time = "2025-06-10T00:45:27.752Z" }, + { url = "https://files.pythonhosted.org/packages/b4/2d/2345fce04cfd4bee161bf1e7d9cdc702e3e16109021035dbb24db654a622/yarl-1.20.1-py3-none-any.whl", hash = "sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77", size = 46542, upload-time = "2025-06-10T00:46:07.521Z" }, +]