diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 0000000..65d8f56
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,86 @@
+name: CI
+
+on:
+ push:
+ branches: [main, dev, feature/*]
+ pull_request:
+ branches: [main, dev]
+ workflow_dispatch:
+
+jobs:
+ lint-and-typecheck:
+ name: Lint & Type Check
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Install uv
+ uses: astral-sh/setup-uv@v5
+ with:
+ version: "latest"
+
+ - name: Set up Python
+ run: uv python install 3.14
+
+ - name: Install dependencies
+ run: uv sync
+
+ - name: Ruff check (fuzzforge-cli)
+ run: |
+ cd fuzzforge-cli
+ uv run --extra lints ruff check src/
+
+ - name: Ruff check (fuzzforge-mcp)
+ run: |
+ cd fuzzforge-mcp
+ uv run --extra lints ruff check src/
+
+ - name: Ruff check (fuzzforge-common)
+ run: |
+ cd fuzzforge-common
+ uv run --extra lints ruff check src/
+
+ - name: Mypy type check (fuzzforge-cli)
+ run: |
+ cd fuzzforge-cli
+ uv run --extra lints mypy src/
+
+ - name: Mypy type check (fuzzforge-mcp)
+ run: |
+ cd fuzzforge-mcp
+ uv run --extra lints mypy src/
+
+ # NOTE: Mypy check for fuzzforge-common temporarily disabled
+ # due to 37 pre-existing type errors in legacy code.
+ # TODO: Fix type errors and re-enable strict checking
+ #- name: Mypy type check (fuzzforge-common)
+ # run: |
+ # cd fuzzforge-common
+ # uv run --extra lints mypy src/
+
+ test:
+ name: Tests
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Install uv
+ uses: astral-sh/setup-uv@v5
+ with:
+ version: "latest"
+
+ - name: Set up Python
+ run: uv python install 3.14
+
+ - name: Install dependencies
+ run: uv sync --all-extras
+
+ - name: Run MCP tests
+ run: |
+ cd fuzzforge-mcp
+ uv run --extra tests pytest -v
+
+ - name: Run common tests
+ run: |
+ cd fuzzforge-common
+ uv run --extra tests pytest -v
diff --git a/.github/workflows/mcp-server.yml b/.github/workflows/mcp-server.yml
new file mode 100644
index 0000000..233fd42
--- /dev/null
+++ b/.github/workflows/mcp-server.yml
@@ -0,0 +1,49 @@
+name: MCP Server Smoke Test
+
+on:
+ push:
+ branches: [main, dev]
+ pull_request:
+ branches: [main, dev]
+ workflow_dispatch:
+
+jobs:
+ mcp-server:
+ name: MCP Server Test
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Install uv
+ uses: astral-sh/setup-uv@v5
+ with:
+ version: "latest"
+
+ - name: Set up Python
+ run: uv python install 3.14
+
+ - name: Install dependencies
+ run: uv sync --all-extras
+
+ - name: Start MCP server in background
+ run: |
+ cd fuzzforge-mcp
+ nohup uv run python -m fuzzforge_mcp.server > server.log 2>&1 &
+ echo $! > server.pid
+ sleep 3
+
+ - name: Run MCP tool tests
+ run: |
+ cd fuzzforge-mcp
+ uv run --extra tests pytest tests/test_resources.py -v
+
+ - name: Stop MCP server
+ if: always()
+ run: |
+ if [ -f fuzzforge-mcp/server.pid ]; then
+ kill $(cat fuzzforge-mcp/server.pid) || true
+ fi
+
+ - name: Show server logs
+ if: failure()
+ run: cat fuzzforge-mcp/server.log || true
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 638be6e..bd98c16 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,6 +1,6 @@
-# Contributing to FuzzForge OSS
+# Contributing to FuzzForge AI
-Thank you for your interest in contributing to FuzzForge OSS! We welcome contributions from the community and are excited to collaborate with you.
+Thank you for your interest in contributing to FuzzForge AI! We welcome contributions from the community and are excited to collaborate with you.
**Our Vision**: FuzzForge aims to be a **universal platform for security research** across all cybersecurity domains. Through our modular architecture, any security toolβfrom fuzzing engines to cloud scanners, from mobile app analyzers to IoT security toolsβcan be integrated as a containerized module and controlled via AI agents.
@@ -360,8 +360,8 @@ Beyond modules, you can contribute to FuzzForge's core components.
1. **Clone and Install**
```bash
- git clone https://github.com/FuzzingLabs/fuzzforge-oss.git
- cd fuzzforge-oss
+ git clone https://github.com/FuzzingLabs/fuzzforge_ai.git
+ cd fuzzforge_ai
uv sync --all-extras
```
@@ -538,7 +538,7 @@ Before submitting a new module:
## License
-By contributing to FuzzForge OSS, you agree that your contributions will be licensed under the same license as the project (see [LICENSE](LICENSE)).
+By contributing to FuzzForge AI, you agree that your contributions will be licensed under the same license as the project (see [LICENSE](LICENSE)).
For module contributions:
- Modules you create remain under the project license
diff --git a/Makefile b/Makefile
index a5ebc7e..f04b240 100644
--- a/Makefile
+++ b/Makefile
@@ -1,10 +1,10 @@
-.PHONY: help install sync format lint typecheck test build-modules clean
+.PHONY: help install sync format lint typecheck test build-hub-images clean
SHELL := /bin/bash
# Default target
help:
- @echo "FuzzForge OSS Development Commands"
+ @echo "FuzzForge AI Development Commands"
@echo ""
@echo " make install - Install all dependencies"
@echo " make sync - Sync shared packages from upstream"
@@ -12,8 +12,8 @@ help:
@echo " make lint - Lint code with ruff"
@echo " make typecheck - Type check with mypy"
@echo " make test - Run all tests"
- @echo " make build-modules - Build all module container images"
- @echo " make clean - Clean build artifacts"
+ @echo " make build-hub-images - Build all mcp-security-hub images"
+ @echo " make clean - Clean build artifacts"
@echo ""
# Install all dependencies
@@ -64,34 +64,9 @@ test:
fi \
done
-# Build all module container images
-# Uses Docker by default, or Podman if FUZZFORGE_ENGINE=podman
-build-modules:
- @echo "Building FuzzForge module images..."
- @if [ "$$FUZZFORGE_ENGINE" = "podman" ]; then \
- if [ -n "$$SNAP" ]; then \
- echo "Using Podman with isolated storage (Snap detected)"; \
- CONTAINER_CMD="podman --root ~/.fuzzforge/containers/storage --runroot ~/.fuzzforge/containers/run"; \
- else \
- echo "Using Podman"; \
- CONTAINER_CMD="podman"; \
- fi; \
- else \
- echo "Using Docker"; \
- CONTAINER_CMD="docker"; \
- fi; \
- for module in fuzzforge-modules/*/; do \
- if [ -f "$$module/Dockerfile" ] && \
- [ "$$module" != "fuzzforge-modules/fuzzforge-modules-sdk/" ] && \
- [ "$$module" != "fuzzforge-modules/fuzzforge-module-template/" ]; then \
- name=$$(basename $$module); \
- version=$$(grep 'version' "$$module/pyproject.toml" 2>/dev/null | head -1 | sed 's/.*"\(.*\\)".*/\\1/' || echo "0.1.0"); \
- echo "Building $$name:$$version..."; \
- $$CONTAINER_CMD build -t "fuzzforge-$$name:$$version" "$$module" || exit 1; \
- fi \
- done
- @echo ""
- @echo "β All modules built successfully!"
+# Build all mcp-security-hub images for the firmware analysis pipeline
+build-hub-images:
+ @bash scripts/build-hub-images.sh
# Clean build artifacts
clean:
diff --git a/README.md b/README.md
index 137cfb7..a180513 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-
FuzzForge OSS
+ FuzzForge AI
AI-Powered Security Research Orchestration via MCP
@@ -26,13 +26,13 @@
---
-> π§ **FuzzForge OSS is under active development.** Expect breaking changes and new features!
+> π§ **FuzzForge AI is under active development.** Expect breaking changes and new features!
---
## π Overview
-**FuzzForge OSS** is an open-source runtime that enables AI agents (GitHub Copilot, Claude, etc.) to orchestrate security research workflows through the **Model Context Protocol (MCP)**.
+**FuzzForge AI** is an open-source runtime that enables AI agents (GitHub Copilot, Claude, etc.) to orchestrate security research workflows through the **Model Context Protocol (MCP)**.
### The Core: Modules
@@ -43,7 +43,7 @@ At the heart of FuzzForge are **modules** - containerized security tools that AI
- **π Composable**: Chain modules together into automated workflows
- **π¦ Extensible**: Build custom modules with the Python SDK
-The OSS runtime handles module discovery, execution, and result collection. Security modules (developed separately) provide the actual security tooling - from static analyzers to fuzzers to crash triagers.
+FuzzForge AI handles module discovery, execution, and result collection. Security modules (developed separately) provide the actual security tooling - from static analyzers to fuzzers to crash triagers.
Instead of manually running security tools, describe what you want and let your AI assistant handle it.
@@ -171,11 +171,11 @@ FuzzForge modules are containerized security tools that AI agents can orchestrat
### Module Ecosystem
-| | FuzzForge OSS | FuzzForge Enterprise Modules |
+| | FuzzForge AI | FuzzForge Enterprise Modules |
|---|---|---|
| **What** | Runtime & MCP server | Security research modules |
| **License** | Apache 2.0 | BSL 1.1 (Business Source License) |
-| **Compatibility** | β
Runs any compatible module | β
Works with OSS runtime |
+| **Compatibility** | β
Runs any compatible module | β
Works with FuzzForge AI |
**Enterprise modules** are developed separately and provide production-ready security tooling:
@@ -187,7 +187,7 @@ FuzzForge modules are containerized security tools that AI agents can orchestrat
| π **Vulnerability Detection** | Pattern Matcher, Taint Analyzer | Security vulnerability scanning |
| π **Reporting** | Report Generator, SARIF Exporter | Automated security report generation |
-> π‘ **Build your own modules!** The FuzzForge SDK allows you to create custom modules that integrate seamlessly with the OSS runtime. See [Creating Custom Modules](#-creating-custom-modules).
+> π‘ **Build your own modules!** The FuzzForge SDK allows you to create custom modules that integrate seamlessly with FuzzForge AI. See [Creating Custom Modules](#-creating-custom-modules).
### Execution Modes
diff --git a/ROADMAP.md b/ROADMAP.md
index 8741bef..20f3632 100644
--- a/ROADMAP.md
+++ b/ROADMAP.md
@@ -1,6 +1,6 @@
-# FuzzForge OSS Roadmap
+# FuzzForge AI Roadmap
-This document outlines the planned features and development direction for FuzzForge OSS.
+This document outlines the planned features and development direction for FuzzForge AI.
---
diff --git a/USAGE.md b/USAGE.md
index 095a0bd..07ae248 100644
--- a/USAGE.md
+++ b/USAGE.md
@@ -1,8 +1,9 @@
-# FuzzForge OSS Usage Guide
+# FuzzForge AI Usage Guide
-This guide covers everything you need to know to get started with FuzzForge OSS - from installation to running your first security research workflow with AI.
+This guide covers everything you need to know to get started with FuzzForge AI β from installation to linking your first MCP hub and running security research workflows with AI.
> **FuzzForge is designed to be used with AI agents** (GitHub Copilot, Claude, etc.) via MCP.
+> A terminal UI (`fuzzforge ui`) is provided for managing agents and hubs.
> The CLI is available for advanced users but the primary experience is through natural language interaction with your AI assistant.
---
@@ -12,8 +13,17 @@ This guide covers everything you need to know to get started with FuzzForge OSS
- [Quick Start](#quick-start)
- [Prerequisites](#prerequisites)
- [Installation](#installation)
-- [Building Modules](#building-modules)
-- [MCP Server Configuration](#mcp-server-configuration)
+- [Terminal UI](#terminal-ui)
+ - [Launching the UI](#launching-the-ui)
+ - [Dashboard](#dashboard)
+ - [Agent Setup](#agent-setup)
+ - [Hub Manager](#hub-manager)
+- [MCP Hub System](#mcp-hub-system)
+ - [What is an MCP Hub?](#what-is-an-mcp-hub)
+ - [FuzzingLabs Security Hub](#fuzzinglabs-security-hub)
+ - [Linking a Custom Hub](#linking-a-custom-hub)
+ - [Building Hub Images](#building-hub-images)
+- [MCP Server Configuration (CLI)](#mcp-server-configuration-cli)
- [GitHub Copilot](#github-copilot)
- [Claude Code (CLI)](#claude-code-cli)
- [Claude Desktop](#claude-desktop)
@@ -27,28 +37,43 @@ This guide covers everything you need to know to get started with FuzzForge OSS
## Quick Start
> **Prerequisites:** You need [uv](https://docs.astral.sh/uv/) and [Docker](https://docs.docker.com/get-docker/) installed.
-> See the [Prerequisites](#prerequisites) section for installation instructions.
+> See the [Prerequisites](#prerequisites) section for details.
```bash
# 1. Clone and install
-git clone https://github.com/FuzzingLabs/fuzzforge-oss.git
-cd fuzzforge-oss
+git clone https://github.com/FuzzingLabs/fuzzforge_ai.git
+cd fuzzforge_ai
uv sync
-# 2. Build the module images (one-time setup)
-make build-modules
+# 2. Launch the terminal UI
+uv run fuzzforge ui
-# 3. Install MCP for your AI agent
-uv run fuzzforge mcp install copilot # For VS Code + GitHub Copilot
-# OR
-uv run fuzzforge mcp install claude-code # For Claude Code CLI
+# 3. Press 'h' β "FuzzingLabs Hub" to clone & link the default security hub
+# 4. Select an agent row and press Enter to install the MCP server for your agent
+# 5. Build the Docker images for the hub tools (required before tools can run)
+./scripts/build-hub-images.sh
-# 4. Restart your AI agent (VS Code, Claude, etc.)
-
-# 5. Start talking to your AI:
-# "List available FuzzForge modules"
+# 6. Restart your AI agent and start talking:
+# "What security tools are available?"
+# "Scan this binary with binwalk and yara"
# "Analyze this Rust crate for fuzzable functions"
-# "Start fuzzing the parse_input function"
+```
+
+Or do it entirely from the command line:
+
+```bash
+# Install MCP for your AI agent
+uv run fuzzforge mcp install copilot # For VS Code + GitHub Copilot
+# OR
+uv run fuzzforge mcp install claude-code # For Claude Code CLI
+
+# Clone and link the default security hub
+git clone git@github.com:FuzzingLabs/mcp-security-hub.git ~/.fuzzforge/hubs/mcp-security-hub
+
+# Build hub tool images (required β tools only run once their image is built)
+./scripts/build-hub-images.sh
+
+# Restart your AI agent β done!
```
> **Note:** FuzzForge uses Docker by default. Podman is also supported via `--engine podman`.
@@ -57,11 +82,12 @@ uv run fuzzforge mcp install claude-code # For Claude Code CLI
## Prerequisites
-Before installing FuzzForge OSS, ensure you have:
+Before installing FuzzForge AI, ensure you have:
-- **Python 3.12+** - [Download Python](https://www.python.org/downloads/)
-- **uv** package manager - [Install uv](https://docs.astral.sh/uv/)
-- **Docker** - Container runtime ([Install Docker](https://docs.docker.com/get-docker/))
+- **Python 3.12+** β [Download Python](https://www.python.org/downloads/)
+- **uv** package manager β [Install uv](https://docs.astral.sh/uv/)
+- **Docker** β Container runtime ([Install Docker](https://docs.docker.com/get-docker/))
+- **Git** β For cloning hub repositories
### Installing uv
@@ -95,8 +121,8 @@ sudo usermod -aG docker $USER
### 1. Clone the Repository
```bash
-git clone https://github.com/FuzzingLabs/fuzzforge-oss.git
-cd fuzzforge-oss
+git clone https://github.com/FuzzingLabs/fuzzforge_ai.git
+cd fuzzforge_ai
```
### 2. Install Dependencies
@@ -115,74 +141,164 @@ uv run fuzzforge --help
---
-## Building Modules
+## Terminal UI
-FuzzForge modules are containerized security tools. After cloning, you need to build them once:
+FuzzForge ships with a terminal user interface (TUI) built on [Textual](https://textual.textualize.io/) for managing AI agents and MCP hub servers from a single dashboard.
-### Build All Modules
+### Launching the UI
```bash
-# From the fuzzforge-oss directory
-make build-modules
+uv run fuzzforge ui
```
-This builds all available modules:
-- `fuzzforge-rust-analyzer` - Analyzes Rust code for fuzzable functions
-- `fuzzforge-cargo-fuzzer` - Runs cargo-fuzz on Rust crates
-- `fuzzforge-harness-validator` - Validates generated fuzzing harnesses
-- `fuzzforge-crash-analyzer` - Analyzes crash inputs
+### Dashboard
-### Build a Single Module
+The main screen is split into two panels:
-```bash
-# Build a specific module
-cd fuzzforge-modules/rust-analyzer
-make build
-```
+| Panel | Content |
+|-------|---------|
+| **AI Agents** (left) | Shows GitHub Copilot, Claude Desktop, and Claude Code with live link status and config file path |
+| **Hub Servers** (right) | Shows all configured MCP hub tools with Docker image name, source hub, and build status (β Ready / β Not built) |
-### Verify Modules are Built
+### Keyboard Shortcuts
-```bash
-# List built module images
-docker images | grep fuzzforge
-```
+| Key | Action |
+|-----|--------|
+| `Enter` | **Select** β Act on the selected row (setup/unlink an agent) |
+| `h` | **Hub Manager** β Open the hub management screen |
+| `r` | **Refresh** β Re-check all agent and hub statuses |
+| `q` | **Quit** |
-You should see something like:
-```
-fuzzforge-rust-analyzer 0.1.0 abc123def456 2 minutes ago 850 MB
-fuzzforge-cargo-fuzzer 0.1.0 789ghi012jkl 2 minutes ago 1.2 GB
-...
-```
+### Agent Setup
+
+Select an agent row in the AI Agents table and press `Enter`:
+
+- **If the agent is not linked** β a setup dialog opens asking for your container engine (Docker or Podman), then installs the FuzzForge MCP configuration
+- **If the agent is already linked** β a confirmation dialog offers to unlink it (removes the `fuzzforge` entry without touching other MCP servers)
+
+The setup auto-detects:
+- FuzzForge installation root
+- Docker/Podman socket path
+- Hub configuration from `hub-config.json`
+
+### Hub Manager
+
+Press `h` to open the hub manager. This is where you manage your MCP hub repositories:
+
+| Button | Action |
+|--------|--------|
+| **FuzzingLabs Hub** | One-click clone of the official [mcp-security-hub](https://github.com/FuzzingLabs/mcp-security-hub) repository β clones to `~/.fuzzforge/hubs/mcp-security-hub`, scans for tools, and registers them in `hub-config.json` |
+| **Link Path** | Link any local directory as a hub β enter a name and path, FuzzForge scans it for `category/tool-name/Dockerfile` patterns |
+| **Clone URL** | Clone any git repository and link it as a hub |
+| **Remove** | Unlink the selected hub and remove its servers from the configuration |
+
+The hub table shows:
+- **Name** β Hub name (β
prefix for the default hub)
+- **Path** β Local directory path
+- **Servers** β Number of MCP tools discovered
+- **Source** β Git URL or "local"
---
-## MCP Server Configuration
+## MCP Hub System
-FuzzForge integrates with AI agents through the Model Context Protocol (MCP). Configure your preferred AI agent to use FuzzForge tools.
+### What is an MCP Hub?
+
+An MCP hub is a directory containing one or more containerized MCP tools, organized by category:
+
+```
+my-hub/
+βββ category-a/
+β βββ tool-1/
+β β βββ Dockerfile
+β βββ tool-2/
+β βββ Dockerfile
+βββ category-b/
+β βββ tool-3/
+β βββ Dockerfile
+βββ ...
+```
+
+FuzzForge scans for the pattern `category/tool-name/Dockerfile` and auto-generates server configuration entries for each discovered tool.
+
+### FuzzingLabs Security Hub
+
+The default MCP hub is [mcp-security-hub](https://github.com/FuzzingLabs/mcp-security-hub), maintained by FuzzingLabs. It includes **40+ security tools** across categories:
+
+| Category | Tools |
+|----------|-------|
+| **Reconnaissance** | nmap, masscan, shodan, zoomeye, whatweb, pd-tools, externalattacker, networksdb |
+| **Binary Analysis** | binwalk, yara, capa, radare2, ghidra, ida |
+| **Code Security** | semgrep, rust-analyzer, harness-tester, cargo-fuzzer, crash-analyzer |
+| **Web Security** | nuclei, nikto, sqlmap, ffuf, burp, waybackurls |
+| **Fuzzing** | boofuzz, dharma |
+| **Exploitation** | searchsploit |
+| **Secrets** | gitleaks |
+| **Cloud Security** | trivy, prowler, roadrecon |
+| **OSINT** | maigret, dnstwist |
+| **Threat Intel** | virustotal, otx |
+| **Password Cracking** | hashcat |
+| **Blockchain** | medusa, solazy, daml-viewer |
+
+**Clone it via the UI:**
+
+1. `uv run fuzzforge ui`
+2. Press `h` β click **FuzzingLabs Hub**
+3. Wait for the clone to finish β servers are auto-registered
+
+**Or clone manually:**
+
+```bash
+git clone git@github.com:FuzzingLabs/mcp-security-hub.git ~/.fuzzforge/hubs/mcp-security-hub
+```
+
+### Linking a Custom Hub
+
+You can link any directory that follows the `category/tool-name/Dockerfile` layout:
+
+**Via the UI:**
+
+1. Press `h` β **Link Path**
+2. Enter a name and the directory path
+
+**Via the CLI (planned):** Not yet available β use the UI.
+
+### Building Hub Images
+
+After linking a hub, you need to build the Docker images before the tools can be used:
+
+```bash
+# Build all images from the default security hub
+./scripts/build-hub-images.sh
+
+# Or build a single tool image
+docker build -t semgrep-mcp:latest mcp-security-hub/code-security/semgrep-mcp/
+```
+
+The dashboard hub table shows β Ready for built images and β Not built for missing ones.
+
+---
+
+## MCP Server Configuration (CLI)
+
+If you prefer the command line over the TUI, you can configure agents directly:
### GitHub Copilot
```bash
-# That's it! Just run this command:
uv run fuzzforge mcp install copilot
```
-The command auto-detects everything:
-- **FuzzForge root** - Where FuzzForge is installed
-- **Modules path** - Defaults to `fuzzforge-oss/fuzzforge-modules`
-- **Docker socket** - Auto-detects `/var/run/docker.sock`
+The command auto-detects:
+- **FuzzForge root** β Where FuzzForge is installed
+- **Docker socket** β Auto-detects `/var/run/docker.sock`
-**Optional overrides** (usually not needed):
+**Optional overrides:**
```bash
-uv run fuzzforge mcp install copilot \
- --modules /path/to/modules \
- --engine podman # if using Podman instead of Docker
+uv run fuzzforge mcp install copilot --engine podman
```
-**After installation:**
-1. Restart VS Code
-2. Open GitHub Copilot Chat
-3. FuzzForge tools are now available!
+**After installation:** Restart VS Code. FuzzForge tools appear in GitHub Copilot Chat.
### Claude Code (CLI)
@@ -190,143 +306,89 @@ uv run fuzzforge mcp install copilot \
uv run fuzzforge mcp install claude-code
```
-Installs to `~/.claude.json` so FuzzForge tools are available from any directory.
-
-**After installation:**
-1. Run `claude` from any directory
-2. FuzzForge tools are now available!
+Installs to `~/.claude.json`. FuzzForge tools are available from any directory after restarting Claude.
### Claude Desktop
```bash
-# Automatic installation
uv run fuzzforge mcp install claude-desktop
-
-# Verify
-uv run fuzzforge mcp status
```
-**After installation:**
-1. Restart Claude Desktop
-2. FuzzForge tools are now available!
+**After installation:** Restart Claude Desktop.
-### Check MCP Status
+### Check Status
```bash
uv run fuzzforge mcp status
```
-Shows configuration status for all supported AI agents:
-
-```
-ββββββββββββββββββββββββ³ββββββββββββββββββββββββββββββββββββββββββββ³βββββββββββββββ³βββββββββββββββββββββββββββ
-β Agent β Config Path β Status β FuzzForge Configured β
-β‘βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ©
-β GitHub Copilot β ~/.config/Code/User/mcp.json β β Exists β β Yes β
-β Claude Desktop β ~/.config/Claude/claude_desktop_config... β Not found β - β
-β Claude Code β ~/.claude.json β β Exists β β Yes β
-ββββββββββββββββββββββββ΄ββββββββββββββββββββββββββββββββββββββββββββ΄βββββββββββββββ΄ββββββββββββββββββββββββββ
-```
-
-### Generate Config Without Installing
-
-```bash
-# Preview the configuration that would be installed
-uv run fuzzforge mcp generate copilot
-uv run fuzzforge mcp generate claude-desktop
-uv run fuzzforge mcp generate claude-code
-```
-
-### Remove MCP Configuration
+### Remove Configuration
```bash
uv run fuzzforge mcp uninstall copilot
-uv run fuzzforge mcp uninstall claude-desktop
uv run fuzzforge mcp uninstall claude-code
+uv run fuzzforge mcp uninstall claude-desktop
```
---
## Using FuzzForge with AI
-Once MCP is configured, you interact with FuzzForge through natural language with your AI assistant.
+Once MCP is configured and hub images are built, interact with FuzzForge through natural language with your AI assistant.
### Example Conversations
**Discover available tools:**
```
-You: "What FuzzForge modules are available?"
-AI: Uses list_modules β "I found 4 modules: rust-analyzer, cargo-fuzzer,
- harness-validator, and crash-analyzer..."
+You: "What security tools are available in FuzzForge?"
+AI: Queries hub tools β "I found 15 tools across categories: nmap for
+ port scanning, binwalk for firmware analysis, semgrep for code
+ scanning, cargo-fuzzer for Rust fuzzing..."
```
-**Analyze code for fuzzing targets:**
+**Analyze a binary:**
+```
+You: "Extract and analyze this firmware image"
+AI: Uses binwalk to extract β yara for pattern matching β capa for
+ capability detection β "Found 3 embedded filesystems, 2 YARA
+ matches for known vulnerabilities..."
+```
+
+**Fuzz Rust code:**
```
You: "Analyze this Rust crate for functions I should fuzz"
-AI: Uses execute_module("rust-analyzer") β "I found 3 good fuzzing candidates:
- - parse_input() in src/parser.rs - handles untrusted input
- - decode_message() in src/codec.rs - complex parsing logic
- ..."
-```
+AI: Uses rust-analyzer β "Found 3 fuzzable entry points..."
-**Generate and validate harnesses:**
-```
-You: "Generate a fuzzing harness for the parse_input function"
-AI: Creates harness code, then uses execute_module("harness-validator")
- β "Here's a harness that compiles successfully..."
-```
-
-**Run continuous fuzzing:**
-```
You: "Start fuzzing parse_input for 10 minutes"
-AI: Uses start_continuous_module("cargo-fuzzer") β "Started fuzzing session abc123"
-
-You: "How's the fuzzing going?"
-AI: Uses get_continuous_status("abc123") β "Running for 5 minutes:
- - 150,000 executions
- - 2 crashes found
- - 45% edge coverage"
-
-You: "Stop and show me the crashes"
-AI: Uses stop_continuous_module("abc123") β "Found 2 unique crashes..."
+AI: Uses cargo-fuzzer β "Fuzzing session started. 2 crashes found..."
```
-### Available MCP Tools
-
-| Tool | Description |
-|------|-------------|
-| `list_modules` | List all available security modules |
-| `execute_module` | Run a module once and get results |
-| `start_continuous_module` | Start a long-running module (e.g., fuzzing) |
-| `get_continuous_status` | Check status of a continuous session |
-| `stop_continuous_module` | Stop a continuous session |
-| `list_continuous_sessions` | List all active sessions |
-| `get_execution_results` | Retrieve results from an execution |
-| `execute_workflow` | Run a multi-step workflow |
+**Scan for vulnerabilities:**
+```
+You: "Scan this codebase with semgrep for security issues"
+AI: Uses semgrep-mcp β "Found 5 findings: 2 high severity SQL injection
+ patterns, 3 medium severity hardcoded secrets..."
+```
---
## CLI Reference
-> **Note:** The CLI is for advanced users. Most users should interact with FuzzForge through their AI assistant.
+### UI Command
+
+```bash
+uv run fuzzforge ui # Launch the terminal dashboard
+```
### MCP Commands
```bash
-uv run fuzzforge mcp status # Check configuration status
-uv run fuzzforge mcp install # Install MCP config
+uv run fuzzforge mcp status # Check agent configuration status
+uv run fuzzforge mcp install # Install MCP config (copilot|claude-code|claude-desktop)
uv run fuzzforge mcp uninstall # Remove MCP config
uv run fuzzforge mcp generate # Preview config without installing
```
-### Module Commands
-
-```bash
-uv run fuzzforge modules list # List available modules
-uv run fuzzforge modules info # Show module details
-uv run fuzzforge modules run --assets . # Run a module
-```
-
### Project Commands
```bash
@@ -343,14 +405,20 @@ uv run fuzzforge project results # Get execution results
Configure FuzzForge using environment variables:
```bash
-# Project paths
-export FUZZFORGE_MODULES_PATH=/path/to/modules
-export FUZZFORGE_STORAGE_PATH=/path/to/storage
+# Override the FuzzForge installation root (auto-detected from cwd by default)
+export FUZZFORGE_ROOT=/path/to/fuzzforge_ai
+
+# Override the user-global data directory (default: ~/.fuzzforge)
+# Useful for isolated testing without touching your real installation
+export FUZZFORGE_USER_DIR=/tmp/my-fuzzforge-test
+
+# Storage path for projects and execution results (default: /.fuzzforge/storage)
+export FUZZFORGE_STORAGE__PATH=/path/to/storage
# Container engine (Docker is default)
export FUZZFORGE_ENGINE__TYPE=docker # or podman
-# Podman-specific settings (only needed if using Podman under Snap)
+# Podman-specific container storage paths
export FUZZFORGE_ENGINE__GRAPHROOT=~/.fuzzforge/containers/storage
export FUZZFORGE_ENGINE__RUNROOT=~/.fuzzforge/containers/run
```
@@ -384,66 +452,62 @@ Error: Permission denied connecting to Docker socket
**Solution:**
```bash
-# Add your user to the docker group
sudo usermod -aG docker $USER
-
-# Log out and back in for changes to take effect
-# Then verify:
+# Log out and back in, then verify:
docker run --rm hello-world
```
-### No Modules Found
+### Hub Images Not Built
-```
-No modules found.
-```
+The dashboard shows β Not built for tools:
-**Solution:**
-1. Build the modules first: `make build-modules`
-2. Check the modules path: `uv run fuzzforge modules list`
-3. Verify images exist: `docker images | grep fuzzforge`
+```bash
+# Build all hub images
+./scripts/build-hub-images.sh
+
+# Or build a single tool
+docker build -t :latest mcp-security-hub///
+```
### MCP Server Not Starting
-Check the MCP configuration:
```bash
+# Check agent configuration
uv run fuzzforge mcp status
-```
-Verify the configuration file path exists and contains valid JSON.
-
-### Module Container Fails to Build
-
-```bash
-# Build module container manually to see errors
-cd fuzzforge-modules/
-docker build -t .
+# Verify the config file path exists and contains valid JSON
+cat ~/.config/Code/User/mcp.json # Copilot
+cat ~/.claude.json # Claude Code
```
### Using Podman Instead of Docker
-If you prefer Podman:
```bash
-# Use --engine podman with CLI
+# Install with Podman engine
uv run fuzzforge mcp install copilot --engine podman
# Or set environment variable
export FUZZFORGE_ENGINE=podman
```
-### Check Logs
+### Hub Registry
+
+FuzzForge stores linked hub information in `~/.fuzzforge/hubs.json`. If something goes wrong:
-FuzzForge stores execution logs in the storage directory:
```bash
-ls -la ~/.fuzzforge/storage///
+# View registry
+cat ~/.fuzzforge/hubs.json
+
+# Reset registry
+rm ~/.fuzzforge/hubs.json
```
---
## Next Steps
-- π Read the [Module SDK Guide](fuzzforge-modules/fuzzforge-modules-sdk/README.md) to create custom modules
-- π¬ Check the demos in the [README](README.md)
+- π₯οΈ Launch `uv run fuzzforge ui` and explore the dashboard
+- π Clone the [mcp-security-hub](https://github.com/FuzzingLabs/mcp-security-hub) for 40+ security tools
- π¬ Join our [Discord](https://discord.gg/8XEX33UUwZ) for support
---
diff --git a/fuzzforge-cli/pyproject.toml b/fuzzforge-cli/pyproject.toml
index 02b066b..2bfc1fb 100644
--- a/fuzzforge-cli/pyproject.toml
+++ b/fuzzforge-cli/pyproject.toml
@@ -1,13 +1,14 @@
[project]
name = "fuzzforge-cli"
version = "0.0.1"
-description = "FuzzForge CLI - Command-line interface for FuzzForge OSS."
+description = "FuzzForge CLI - Command-line interface for FuzzForge AI."
authors = []
readme = "README.md"
requires-python = ">=3.14"
dependencies = [
- "fuzzforge-runner==0.0.1",
+ "fuzzforge-mcp==0.0.1",
"rich>=14.0.0",
+ "textual>=1.0.0",
"typer==0.20.1",
]
@@ -25,4 +26,4 @@ tests = [
fuzzforge = "fuzzforge_cli.__main__:main"
[tool.uv.sources]
-fuzzforge-runner = { workspace = true }
+fuzzforge-mcp = { workspace = true }
diff --git a/fuzzforge-cli/ruff.toml b/fuzzforge-cli/ruff.toml
index 678218a..6db025b 100644
--- a/fuzzforge-cli/ruff.toml
+++ b/fuzzforge-cli/ruff.toml
@@ -13,3 +13,49 @@ ignore = [
"PLR2004", # allowing comparisons using unamed numerical constants in tests
"S101", # allowing 'assert' statements in tests
]
+"src/fuzzforge_cli/tui/**" = [
+ "ARG002", # unused method argument: callback signature
+ "BLE001", # blind exception: broad error handling in UI
+ "C901", # complexity: UI logic
+ "D107", # missing docstring in __init__: simple dataclasses
+ "FBT001", # boolean positional arg
+ "FBT002", # boolean default arg
+ "PLC0415", # import outside top-level: lazy loading
+ "PLR0911", # too many return statements
+ "PLR0912", # too many branches
+ "PLR2004", # magic value comparison
+ "RUF012", # mutable class default: Textual pattern
+ "S603", # subprocess: validated inputs
+ "S607", # subprocess: PATH lookup
+ "SIM108", # ternary: readability preference
+ "TC001", # TYPE_CHECKING: runtime type needs
+ "TC002", # TYPE_CHECKING: runtime type needs
+ "TC003", # TYPE_CHECKING: runtime type needs
+ "TRY300", # try-else: existing pattern
+]
+"tui/*.py" = [
+ "D107", # missing docstring in __init__: simple dataclasses
+ "TC001", # TYPE_CHECKING: runtime type needs
+ "TC002", # TYPE_CHECKING: runtime type needs
+ "TC003", # TYPE_CHECKING: runtime type needs
+]
+"src/fuzzforge_cli/commands/mcp.py" = [
+ "ARG001", # unused argument: callback signature
+ "B904", # raise from: existing pattern
+ "F841", # unused variable: legacy code
+ "FBT002", # boolean default arg
+ "PLR0912", # too many branches
+ "PLR0915", # too many statements
+ "SIM108", # ternary: readability preference
+]
+"src/fuzzforge_cli/application.py" = [
+ "B008", # function call in default: Path.cwd()
+ "PLC0415", # import outside top-level: lazy loading
+]
+"src/fuzzforge_cli/commands/projects.py" = [
+ "TC003", # TYPE_CHECKING: runtime type needs
+]
+"src/fuzzforge_cli/context.py" = [
+ "TC002", # TYPE_CHECKING: runtime type needs
+ "TC003", # TYPE_CHECKING: runtime type needs
+]
diff --git a/fuzzforge-cli/src/fuzzforge_cli/application.py b/fuzzforge-cli/src/fuzzforge_cli/application.py
index 8e3b89a..2ff84cb 100644
--- a/fuzzforge-cli/src/fuzzforge_cli/application.py
+++ b/fuzzforge-cli/src/fuzzforge_cli/application.py
@@ -3,16 +3,16 @@
from pathlib import Path
from typing import Annotated
-from fuzzforge_runner import Runner, Settings
+from fuzzforge_mcp.storage import LocalStorage # type: ignore[import-untyped]
from typer import Context as TyperContext
from typer import Option, Typer
-from fuzzforge_cli.commands import mcp, modules, projects
+from fuzzforge_cli.commands import mcp, projects
from fuzzforge_cli.context import Context
application: Typer = Typer(
name="fuzzforge",
- help="FuzzForge OSS - Security research orchestration platform.",
+ help="FuzzForge AI - Security research orchestration platform.",
)
@@ -27,15 +27,6 @@ def main(
help="Path to the FuzzForge project directory.",
),
] = Path.cwd(),
- modules_path: Annotated[
- Path,
- Option(
- "--modules",
- "-m",
- envvar="FUZZFORGE_MODULES_PATH",
- help="Path to the modules directory.",
- ),
- ] = Path.home() / ".fuzzforge" / "modules",
storage_path: Annotated[
Path,
Option(
@@ -43,54 +34,37 @@ def main(
envvar="FUZZFORGE_STORAGE__PATH",
help="Path to the storage directory.",
),
- ] = Path.home() / ".fuzzforge" / "storage",
- engine_type: Annotated[
- str,
- Option(
- "--engine",
- envvar="FUZZFORGE_ENGINE__TYPE",
- help="Container engine type (docker or podman).",
- ),
- ] = "docker",
- engine_socket: Annotated[
- str,
- Option(
- "--socket",
- envvar="FUZZFORGE_ENGINE__SOCKET",
- help="Container engine socket path.",
- ),
- ] = "",
+ ] = Path.cwd() / ".fuzzforge" / "storage",
context: TyperContext = None, # type: ignore[assignment]
) -> None:
- """FuzzForge OSS - Security research orchestration platform.
+ """FuzzForge AI - Security research orchestration platform.
- Execute security research modules in isolated containers.
+ Discover and execute MCP hub tools for security research.
"""
- from fuzzforge_runner.settings import EngineSettings, ProjectSettings, StorageSettings
-
- settings = Settings(
- engine=EngineSettings(
- type=engine_type, # type: ignore[arg-type]
- socket=engine_socket,
- ),
- storage=StorageSettings(
- path=storage_path,
- ),
- project=ProjectSettings(
- default_path=project_path,
- modules_path=modules_path,
- ),
- )
-
- runner = Runner(settings)
+ storage = LocalStorage(base_path=storage_path)
context.obj = Context(
- runner=runner,
+ storage=storage,
project_path=project_path,
)
application.add_typer(mcp.application)
-application.add_typer(modules.application)
application.add_typer(projects.application)
+
+
+@application.command(
+ name="ui",
+ help="Launch the FuzzForge terminal interface.",
+)
+def launch_ui() -> None:
+ """Launch the interactive FuzzForge TUI dashboard.
+
+ Provides a visual dashboard showing AI agent connection status
+ and hub server availability, with wizards for setup and configuration.
+
+ """
+ from fuzzforge_cli.tui.app import FuzzForgeApp
+
+ FuzzForgeApp().run()
diff --git a/fuzzforge-cli/src/fuzzforge_cli/commands/mcp.py b/fuzzforge-cli/src/fuzzforge_cli/commands/mcp.py
index 249cb27..8eb9e78 100644
--- a/fuzzforge-cli/src/fuzzforge_cli/commands/mcp.py
+++ b/fuzzforge-cli/src/fuzzforge_cli/commands/mcp.py
@@ -12,7 +12,7 @@ import os
import sys
from enum import StrEnum
from pathlib import Path
-from typing import Annotated
+from typing import Annotated, Any
from rich.console import Console
from rich.panel import Panel
@@ -44,10 +44,10 @@ def _get_copilot_mcp_path() -> Path:
"""
if sys.platform == "darwin":
return Path.home() / "Library" / "Application Support" / "Code" / "User" / "mcp.json"
- elif sys.platform == "win32":
+ if sys.platform == "win32":
return Path(os.environ.get("APPDATA", "")) / "Code" / "User" / "mcp.json"
- else: # Linux
- return Path.home() / ".config" / "Code" / "User" / "mcp.json"
+ # Linux
+ return Path.home() / ".config" / "Code" / "User" / "mcp.json"
def _get_claude_desktop_mcp_path() -> Path:
@@ -58,10 +58,10 @@ def _get_claude_desktop_mcp_path() -> Path:
"""
if sys.platform == "darwin":
return Path.home() / "Library" / "Application Support" / "Claude" / "claude_desktop_config.json"
- elif sys.platform == "win32":
+ if sys.platform == "win32":
return Path(os.environ.get("APPDATA", "")) / "Claude" / "claude_desktop_config.json"
- else: # Linux
- return Path.home() / ".config" / "Claude" / "claude_desktop_config.json"
+ # Linux
+ return Path.home() / ".config" / "Claude" / "claude_desktop_config.json"
def _get_claude_code_mcp_path(project_path: Path | None = None) -> Path:
@@ -114,13 +114,13 @@ def _detect_docker_socket() -> str:
:returns: Path to the Docker socket.
"""
- socket_paths = [
- "/var/run/docker.sock",
+ socket_paths: list[Path] = [
+ Path("/var/run/docker.sock"),
Path.home() / ".docker" / "run" / "docker.sock",
]
for path in socket_paths:
- if Path(path).exists():
+ if path.exists():
return str(path)
return "/var/run/docker.sock"
@@ -132,28 +132,33 @@ def _find_fuzzforge_root() -> Path:
:returns: Path to fuzzforge-oss directory.
"""
- # Try to find from current file location
- current = Path(__file__).resolve()
+ # Check environment variable override first
+ env_root = os.environ.get("FUZZFORGE_ROOT")
+ if env_root:
+ return Path(env_root).resolve()
- # Walk up to find fuzzforge-oss root
- for parent in current.parents:
- if (parent / "fuzzforge-mcp").is_dir() and (parent / "fuzzforge-runner").is_dir():
+ # Walk up from cwd to find a fuzzforge root (hub-config.json is the marker)
+ for parent in [Path.cwd(), *Path.cwd().parents]:
+ if (parent / "hub-config.json").is_file():
+ return parent
+
+ # Fall back to __file__-based search (dev install inside fuzzforge-oss)
+ current = Path(__file__).resolve()
+ for parent in current.parents:
+ if (parent / "fuzzforge-mcp").is_dir():
return parent
- # Fall back to cwd
return Path.cwd()
def _generate_mcp_config(
fuzzforge_root: Path,
- modules_path: Path,
engine_type: str,
engine_socket: str,
-) -> dict:
+) -> dict[str, Any]:
"""Generate MCP server configuration.
:param fuzzforge_root: Path to fuzzforge-oss installation.
- :param modules_path: Path to the modules directory.
:param engine_type: Container engine type (podman or docker).
:param engine_socket: Container engine socket path.
:returns: MCP configuration dictionary.
@@ -169,9 +174,12 @@ def _generate_mcp_config(
command = "uv"
args = ["--directory", str(fuzzforge_root), "run", "fuzzforge-mcp"]
- # Self-contained storage paths for FuzzForge containers
- # This isolates FuzzForge from system Podman and avoids snap issues
- fuzzforge_home = Path.home() / ".fuzzforge"
+ # User-global storage paths for FuzzForge containers.
+ # Kept under ~/.fuzzforge so images are built once and shared across
+ # all workspaces β regardless of where `fuzzforge mcp install` is run.
+ # Override with FUZZFORGE_USER_DIR for isolated testing.
+ user_dir_env = os.environ.get("FUZZFORGE_USER_DIR")
+ fuzzforge_home = Path(user_dir_env).resolve() if user_dir_env else Path.home() / ".fuzzforge"
graphroot = fuzzforge_home / "containers" / "storage"
runroot = fuzzforge_home / "containers" / "run"
@@ -181,10 +189,11 @@ def _generate_mcp_config(
"args": args,
"cwd": str(fuzzforge_root),
"env": {
- "FUZZFORGE_MODULES_PATH": str(modules_path),
"FUZZFORGE_ENGINE__TYPE": engine_type,
"FUZZFORGE_ENGINE__GRAPHROOT": str(graphroot),
"FUZZFORGE_ENGINE__RUNROOT": str(runroot),
+ "FUZZFORGE_HUB__ENABLED": "true",
+ "FUZZFORGE_HUB__CONFIG_PATH": str(fuzzforge_root / "hub-config.json"),
},
}
@@ -264,14 +273,6 @@ def generate(
help="AI agent to generate config for (copilot, claude-desktop, or claude-code).",
),
],
- modules_path: Annotated[
- Path | None,
- Option(
- "--modules",
- "-m",
- help="Path to the modules directory.",
- ),
- ] = None,
engine: Annotated[
str,
Option(
@@ -285,16 +286,12 @@ def generate(
:param context: Typer context.
:param agent: Target AI agent.
- :param modules_path: Override modules path.
:param engine: Container engine type.
"""
console = Console()
fuzzforge_root = _find_fuzzforge_root()
- # Use defaults if not specified
- resolved_modules = modules_path or (fuzzforge_root / "fuzzforge-modules")
-
# Detect socket
if engine == "podman":
socket = _detect_podman_socket()
@@ -304,7 +301,6 @@ def generate(
# Generate config
server_config = _generate_mcp_config(
fuzzforge_root=fuzzforge_root,
- modules_path=resolved_modules,
engine_type=engine,
engine_socket=socket,
)
@@ -348,14 +344,6 @@ def install(
help="AI agent to install config for (copilot, claude-desktop, or claude-code).",
),
],
- modules_path: Annotated[
- Path | None,
- Option(
- "--modules",
- "-m",
- help="Path to the modules directory.",
- ),
- ] = None,
engine: Annotated[
str,
Option(
@@ -380,7 +368,6 @@ def install(
:param context: Typer context.
:param agent: Target AI agent.
- :param modules_path: Override modules path.
:param engine: Container engine type.
:param force: Overwrite existing configuration.
@@ -399,9 +386,6 @@ def install(
config_path = _get_claude_desktop_mcp_path()
servers_key = "mcpServers"
- # Use defaults if not specified
- resolved_modules = modules_path or (fuzzforge_root / "fuzzforge-modules")
-
# Detect socket
if engine == "podman":
socket = _detect_podman_socket()
@@ -411,7 +395,6 @@ def install(
# Generate server config
server_config = _generate_mcp_config(
fuzzforge_root=fuzzforge_root,
- modules_path=resolved_modules,
engine_type=engine,
engine_socket=socket,
)
@@ -451,9 +434,9 @@ def install(
console.print(f"[bold]Configuration file:[/bold] {config_path}")
console.print()
console.print("[bold]Settings:[/bold]")
- console.print(f" Modules Path: {resolved_modules}")
console.print(f" Engine: {engine}")
console.print(f" Socket: {socket}")
+ console.print(f" Hub Config: {fuzzforge_root / 'hub-config.json'}")
console.print()
console.print("[bold]Next steps:[/bold]")
diff --git a/fuzzforge-cli/src/fuzzforge_cli/commands/modules.py b/fuzzforge-cli/src/fuzzforge_cli/commands/modules.py
deleted file mode 100644
index 0e43d3b..0000000
--- a/fuzzforge-cli/src/fuzzforge_cli/commands/modules.py
+++ /dev/null
@@ -1,166 +0,0 @@
-"""Module management commands for FuzzForge CLI."""
-
-import asyncio
-from pathlib import Path
-from typing import Annotated, Any
-
-from rich.console import Console
-from rich.table import Table
-from typer import Argument, Context, Option, Typer
-
-from fuzzforge_cli.context import get_project_path, get_runner
-
-application: Typer = Typer(
- name="modules",
- help="Module management commands.",
-)
-
-
-@application.command(
- help="List available modules.",
- name="list",
-)
-def list_modules(
- context: Context,
-) -> None:
- """List all available modules.
-
- :param context: Typer context.
-
- """
- runner = get_runner(context)
- modules = runner.list_modules()
-
- console = Console()
-
- if not modules:
- console.print("[yellow]No modules found.[/yellow]")
- console.print(f" Modules directory: {runner.settings.modules_path}")
- return
-
- table = Table(title="Available Modules")
- table.add_column("Identifier", style="cyan")
- table.add_column("Available")
- table.add_column("Description")
-
- for module in modules:
- table.add_row(
- module.identifier,
- "β" if module.available else "β",
- module.description or "-",
- )
-
- console.print(table)
-
-
-@application.command(
- help="Execute a module.",
- name="run",
-)
-def run_module(
- context: Context,
- module_identifier: Annotated[
- str,
- Argument(
- help="Identifier of the module to execute.",
- ),
- ],
- assets_path: Annotated[
- Path | None,
- Option(
- "--assets",
- "-a",
- help="Path to input assets.",
- ),
- ] = None,
- config: Annotated[
- str | None,
- Option(
- "--config",
- "-c",
- help="Module configuration as JSON string.",
- ),
- ] = None,
-) -> None:
- """Execute a module.
-
- :param context: Typer context.
- :param module_identifier: Module to execute.
- :param assets_path: Optional path to input assets.
- :param config: Optional JSON configuration.
-
- """
- import json
-
- runner = get_runner(context)
- project_path = get_project_path(context)
-
- configuration: dict[str, Any] | None = None
- if config:
- try:
- configuration = json.loads(config)
- except json.JSONDecodeError as e:
- console = Console()
- console.print(f"[red]β[/red] Invalid JSON configuration: {e}")
- return
-
- console = Console()
- console.print(f"[blue]β[/blue] Executing module: {module_identifier}")
-
- async def execute() -> None:
- result = await runner.execute_module(
- module_identifier=module_identifier,
- project_path=project_path,
- configuration=configuration,
- assets_path=assets_path,
- )
-
- if result.success:
- console.print(f"[green]β[/green] Module execution completed")
- console.print(f" Execution ID: {result.execution_id}")
- console.print(f" Results: {result.results_path}")
- else:
- console.print(f"[red]β[/red] Module execution failed")
- console.print(f" Error: {result.error}")
-
- asyncio.run(execute())
-
-
-@application.command(
- help="Show module information.",
- name="info",
-)
-def module_info(
- context: Context,
- module_identifier: Annotated[
- str,
- Argument(
- help="Identifier of the module.",
- ),
- ],
-) -> None:
- """Show information about a specific module.
-
- :param context: Typer context.
- :param module_identifier: Module to get info for.
-
- """
- runner = get_runner(context)
- module = runner.get_module_info(module_identifier)
-
- console = Console()
-
- if module is None:
- console.print(f"[red]β[/red] Module not found: {module_identifier}")
- return
-
- table = Table(title=f"Module: {module.identifier}")
- table.add_column("Property", style="cyan")
- table.add_column("Value")
-
- table.add_row("Identifier", module.identifier)
- table.add_row("Available", "Yes" if module.available else "No")
- table.add_row("Description", module.description or "-")
- table.add_row("Version", module.version or "-")
-
- console.print(table)
diff --git a/fuzzforge-cli/src/fuzzforge_cli/commands/projects.py b/fuzzforge-cli/src/fuzzforge_cli/commands/projects.py
index 8be9f58..a21c666 100644
--- a/fuzzforge-cli/src/fuzzforge_cli/commands/projects.py
+++ b/fuzzforge-cli/src/fuzzforge_cli/commands/projects.py
@@ -7,7 +7,7 @@ from rich.console import Console
from rich.table import Table
from typer import Argument, Context, Option, Typer
-from fuzzforge_cli.context import get_project_path, get_runner
+from fuzzforge_cli.context import get_project_path, get_storage
application: Typer = Typer(
name="project",
@@ -36,10 +36,10 @@ def init_project(
:param path: Path to initialize (defaults to current directory).
"""
- runner = get_runner(context)
+ storage = get_storage(context)
project_path = path or get_project_path(context)
- storage_path = runner.init_project(project_path)
+ storage_path = storage.init_project(project_path)
console = Console()
console.print(f"[green]β[/green] Project initialized at {project_path}")
@@ -65,10 +65,10 @@ def set_assets(
:param assets_path: Path to assets.
"""
- runner = get_runner(context)
+ storage = get_storage(context)
project_path = get_project_path(context)
- stored_path = runner.set_project_assets(project_path, assets_path)
+ stored_path = storage.set_project_assets(project_path, assets_path)
console = Console()
console.print(f"[green]β[/green] Assets stored from {assets_path}")
@@ -87,11 +87,11 @@ def show_info(
:param context: Typer context.
"""
- runner = get_runner(context)
+ storage = get_storage(context)
project_path = get_project_path(context)
- executions = runner.list_executions(project_path)
- assets_path = runner.storage.get_project_assets_path(project_path)
+ executions = storage.list_executions(project_path)
+ assets_path = storage.get_project_assets_path(project_path)
console = Console()
table = Table(title=f"Project: {project_path.name}")
@@ -118,10 +118,10 @@ def list_executions(
:param context: Typer context.
"""
- runner = get_runner(context)
+ storage = get_storage(context)
project_path = get_project_path(context)
- executions = runner.list_executions(project_path)
+ executions = storage.list_executions(project_path)
console = Console()
@@ -134,7 +134,7 @@ def list_executions(
table.add_column("Has Results")
for exec_id in executions:
- has_results = runner.get_execution_results(project_path, exec_id) is not None
+ has_results = storage.get_execution_results(project_path, exec_id) is not None
table.add_row(exec_id, "β" if has_results else "-")
console.print(table)
@@ -168,10 +168,10 @@ def get_results(
:param extract_to: Optional directory to extract to.
"""
- runner = get_runner(context)
+ storage = get_storage(context)
project_path = get_project_path(context)
- results_path = runner.get_execution_results(project_path, execution_id)
+ results_path = storage.get_execution_results(project_path, execution_id)
console = Console()
@@ -182,5 +182,5 @@ def get_results(
console.print(f"[green]β[/green] Results: {results_path}")
if extract_to:
- extracted = runner.extract_results(results_path, extract_to)
+ extracted = storage.extract_results(results_path, extract_to)
console.print(f" Extracted to: {extracted}")
diff --git a/fuzzforge-cli/src/fuzzforge_cli/context.py b/fuzzforge-cli/src/fuzzforge_cli/context.py
index c53a061..7e12511 100644
--- a/fuzzforge-cli/src/fuzzforge_cli/context.py
+++ b/fuzzforge-cli/src/fuzzforge_cli/context.py
@@ -5,35 +5,35 @@ from __future__ import annotations
from pathlib import Path
from typing import TYPE_CHECKING, cast
-from fuzzforge_runner import Runner, Settings
+from fuzzforge_mcp.storage import LocalStorage # type: ignore[import-untyped]
if TYPE_CHECKING:
from typer import Context as TyperContext
class Context:
- """CLI context holding the runner instance and settings."""
+ """CLI context holding the storage instance and settings."""
- _runner: Runner
+ _storage: LocalStorage
_project_path: Path
- def __init__(self, runner: Runner, project_path: Path) -> None:
+ def __init__(self, storage: LocalStorage, project_path: Path) -> None:
"""Initialize an instance of the class.
- :param runner: FuzzForge runner instance.
+ :param storage: FuzzForge local storage instance.
:param project_path: Path to the current project.
"""
- self._runner = runner
+ self._storage = storage
self._project_path = project_path
- def get_runner(self) -> Runner:
- """Get the runner instance.
+ def get_storage(self) -> LocalStorage:
+ """Get the storage instance.
- :return: Runner instance.
+ :return: LocalStorage instance.
"""
- return self._runner
+ return self._storage
def get_project_path(self) -> Path:
"""Get the current project path.
@@ -44,14 +44,14 @@ class Context:
return self._project_path
-def get_runner(context: TyperContext) -> Runner:
- """Get runner from Typer context.
+def get_storage(context: TyperContext) -> LocalStorage:
+ """Get storage from Typer context.
:param context: Typer context.
- :return: Runner instance.
+ :return: LocalStorage instance.
"""
- return cast("Context", context.obj).get_runner()
+ return cast("Context", context.obj).get_storage()
def get_project_path(context: TyperContext) -> Path:
diff --git a/fuzzforge-cli/src/fuzzforge_cli/tui/__init__.py b/fuzzforge-cli/src/fuzzforge_cli/tui/__init__.py
new file mode 100644
index 0000000..ed987a5
--- /dev/null
+++ b/fuzzforge-cli/src/fuzzforge_cli/tui/__init__.py
@@ -0,0 +1 @@
+"""FuzzForge terminal user interface."""
diff --git a/fuzzforge-cli/src/fuzzforge_cli/tui/app.py b/fuzzforge-cli/src/fuzzforge_cli/tui/app.py
new file mode 100644
index 0000000..80c79fc
--- /dev/null
+++ b/fuzzforge-cli/src/fuzzforge_cli/tui/app.py
@@ -0,0 +1,562 @@
+"""FuzzForge TUI application.
+
+Main terminal user interface for FuzzForge, providing a dashboard
+with AI agent connection status, hub server availability, and
+hub management capabilities.
+
+"""
+
+from __future__ import annotations
+
+from collections import defaultdict
+from pathlib import Path
+from typing import TYPE_CHECKING, Any
+
+from rich.text import Text
+from textual import events, work
+from textual.app import App, ComposeResult
+from textual.binding import Binding
+from textual.containers import Horizontal, Vertical, VerticalScroll
+from textual.message import Message
+from textual.widgets import Button, DataTable, Footer, Header
+
+from fuzzforge_cli.tui.helpers import (
+ check_agent_status,
+ check_hub_image,
+ find_fuzzforge_root,
+ get_agent_configs,
+ load_hub_config,
+)
+
+if TYPE_CHECKING:
+ from fuzzforge_cli.commands.mcp import AIAgent
+
+# Agent config entries stored alongside their linked status for row mapping
+_AgentRow = tuple[str, "AIAgent", Path, str, bool]
+
+
+class SingleClickDataTable(DataTable[Any]):
+ """DataTable subclass that also fires ``RowClicked`` on a single mouse click.
+
+ Textual's built-in ``RowSelected`` only fires on Enter or on a second click
+ of an already-highlighted row. ``RowClicked`` fires on every first click,
+ enabling single-click-to-act UX without requiring Enter.
+ """
+
+ class RowClicked(Message):
+ """Fired on every single mouse click on a data row."""
+
+ def __init__(self, data_table: SingleClickDataTable, cursor_row: int) -> None:
+ self.data_table = data_table
+ self.cursor_row = cursor_row
+ super().__init__()
+
+ @property
+ def control(self) -> SingleClickDataTable:
+ """Return the data table that fired this event."""
+ return self.data_table
+
+ async def _on_click(self, event: events.Click) -> None:
+ """Forward to parent, then post RowClicked on every mouse click.
+
+ The hub table is handled exclusively via RowClicked. RowSelected is
+ intentionally NOT used for the hub table to avoid double-dispatch.
+ """
+ await super()._on_click(event)
+ meta = event.style.meta
+ if meta and "row" in meta and self.cursor_type == "row":
+ row_index: int = int(meta["row"])
+ if row_index >= 0:
+ self.post_message(SingleClickDataTable.RowClicked(self, row_index))
+
+
+class FuzzForgeApp(App[None]):
+ """FuzzForge AI terminal user interface."""
+
+ TITLE = "FuzzForge AI"
+ SUB_TITLE = "Security Research Orchestration"
+
+ CSS = """
+ Screen {
+ background: $surface;
+ }
+
+ #main {
+ height: 1fr;
+ margin: 1 2;
+ }
+
+ .panel {
+ width: 1fr;
+ border: round #4699fc;
+ padding: 1 2;
+ margin: 0 0 1 0;
+ }
+
+ #hub-panel {
+ height: 12;
+ }
+
+ #hub-table {
+ height: 1fr;
+ }
+
+ #agents-panel {
+ height: auto;
+ }
+
+ .panel-title {
+ text-style: bold;
+ color: #4699fc;
+ text-align: left;
+ margin-bottom: 1;
+ }
+
+ #hub-title-bar {
+ height: auto;
+ align: center middle;
+ margin: 0 0 1 0;
+ }
+
+ #btn-hub-manager {
+ min-width: 40;
+ margin-right: 2;
+ }
+
+ #btn-fuzzinglabs-hub {
+ min-width: 30;
+ }
+
+ #agents-table {
+ height: auto;
+ }
+
+ /* Modal screens */
+ AgentSetupScreen, AgentUnlinkScreen,
+ HubManagerScreen, LinkHubScreen, CloneHubScreen,
+ BuildImageScreen, BuildLogScreen {
+ align: center middle;
+ }
+
+ #setup-dialog, #unlink-dialog {
+ width: 56;
+ height: auto;
+ max-height: 80%;
+ border: thick #4699fc;
+ background: $surface;
+ padding: 2 3;
+ overflow-y: auto;
+ }
+
+ #hub-manager-dialog {
+ width: 100;
+ height: auto;
+ max-height: 85%;
+ border: thick #4699fc;
+ background: $surface;
+ padding: 2 3;
+ overflow-y: auto;
+ }
+
+ #link-dialog, #clone-dialog {
+ width: 72;
+ height: auto;
+ max-height: 80%;
+ border: thick #4699fc;
+ background: $surface;
+ padding: 2 3;
+ overflow-y: auto;
+ }
+
+ #build-dialog {
+ width: 72;
+ height: auto;
+ max-height: 80%;
+ border: thick #4699fc;
+ background: $surface;
+ padding: 2 3;
+ }
+
+ #confirm-text {
+ margin: 1 0 2 0;
+ }
+
+ #build-log {
+ height: 30;
+ border: round $panel;
+ margin: 1 0;
+ }
+
+ #build-subtitle {
+ color: $text-muted;
+ margin-bottom: 1;
+ }
+
+ #build-status {
+ height: 1;
+ margin-top: 1;
+ }
+
+ .dialog-title {
+ text-style: bold;
+ text-align: center;
+ color: #4699fc;
+ margin-bottom: 1;
+ }
+
+ .field-label {
+ margin-top: 1;
+ text-style: bold;
+ }
+
+ RadioSet {
+ height: auto;
+ margin: 0 0 1 2;
+ }
+
+ Input {
+ margin: 0 0 1 0;
+ }
+
+ .dialog-buttons {
+ layout: horizontal;
+ height: 3;
+ align: center middle;
+ margin-top: 1;
+ }
+
+ .dialog-buttons Button {
+ margin: 0 1;
+ min-width: 14;
+ }
+ """
+
+ BINDINGS = [
+ Binding("q", "quit", "Quit"),
+ Binding("h", "manage_hubs", "Hub Manager"),
+ Binding("r", "refresh", "Refresh"),
+ Binding("enter", "select_row", "Select", show=False),
+ ]
+
+ def compose(self) -> ComposeResult:
+ """Compose the dashboard layout."""
+ yield Header()
+ with VerticalScroll(id="main"):
+ with Vertical(id="hub-panel", classes="panel"):
+ yield SingleClickDataTable(id="hub-table")
+ with Horizontal(id="hub-title-bar"):
+ yield Button(
+ "Hub Manager (h)",
+ variant="primary",
+ id="btn-hub-manager",
+ )
+ yield Button(
+ "FuzzingLabs Hub",
+ variant="primary",
+ id="btn-fuzzinglabs-hub",
+ )
+ with Vertical(id="agents-panel", classes="panel"):
+ yield DataTable(id="agents-table")
+ yield Footer()
+
+ def on_mount(self) -> None:
+ """Populate tables on startup."""
+ self._agent_rows: list[_AgentRow] = []
+ self._hub_rows: list[tuple[str, str, str, bool] | None] = []
+ # Background build tracking
+ self._active_builds: dict[str, object] = {} # image -> Popen
+ self._build_logs: dict[str, list[str]] = {} # image -> log lines
+ self._build_results: dict[str, bool] = {} # image -> success
+ self.query_one("#hub-panel").border_title = "Hub Servers [dim](click β Not built to build)[/dim]"
+ self.query_one("#agents-panel").border_title = "AI Agents"
+ self._refresh_agents()
+ self._refresh_hub()
+
+ def _refresh_agents(self) -> None:
+ """Refresh the AI agents status table."""
+ table = self.query_one("#agents-table", DataTable)
+ table.clear(columns=True)
+ table.add_columns("Agent", "Status", "Config Path")
+ table.cursor_type = "row"
+
+ self._agent_rows = []
+ for display_name, agent, config_path, servers_key in get_agent_configs():
+ is_linked, status_text = check_agent_status(config_path, servers_key)
+ if is_linked:
+ status_cell = Text(f"β {status_text}", style="green")
+ else:
+ status_cell = Text(f"β {status_text}", style="red")
+ table.add_row(display_name, status_cell, str(config_path))
+ self._agent_rows.append(
+ (display_name, agent, config_path, servers_key, is_linked)
+ )
+
+ def _refresh_hub(self) -> None:
+ """Refresh the hub servers table, grouped by source hub."""
+ self._hub_rows = []
+ table = self.query_one("#hub-table", SingleClickDataTable)
+ table.clear(columns=True)
+ table.add_columns("Server", "Image", "Hub", "Status")
+ table.cursor_type = "row"
+
+ try:
+ fuzzforge_root = find_fuzzforge_root()
+ hub_config = load_hub_config(fuzzforge_root)
+ except Exception:
+ table.add_row(
+ Text("Error loading config", style="red"), "", "", ""
+ )
+ return
+
+ servers = hub_config.get("servers", [])
+ if not servers:
+ table.add_row(
+ Text("No servers β press h", style="dim"), "", "", ""
+ )
+ return
+
+ # Group servers by source hub
+ groups: dict[str, list[dict[str, Any]]] = defaultdict(list)
+ for server in servers:
+ source = server.get("source_hub", "manual")
+ groups[source].append(server)
+
+ for hub_name, hub_servers in groups.items():
+ ready_count = 0
+ total = len(hub_servers)
+
+ statuses: list[tuple[dict[str, Any], bool, str]] = []
+ for server in hub_servers:
+ enabled = server.get("enabled", True)
+ if not enabled:
+ statuses.append((server, False, "Disabled"))
+ else:
+ is_ready, status_text = check_hub_image(
+ server.get("image", "")
+ )
+ if is_ready:
+ ready_count += 1
+ statuses.append((server, is_ready, status_text))
+
+ # Group header row
+ if hub_name == "manual":
+ header = Text(
+ f"βΌ π¦ Local config ({ready_count}/{total} ready)",
+ style="bold",
+ )
+ else:
+ header = Text(
+ f"βΌ π {hub_name} ({ready_count}/{total} ready)",
+ style="bold",
+ )
+ table.add_row(header, "", "", "")
+ self._hub_rows.append(None) # group header β not selectable
+
+ # Tool rows
+ for server, is_ready, status_text in statuses:
+ name = server.get("name", "unknown")
+ image = server.get("image", "unknown")
+ enabled = server.get("enabled", True)
+
+ if image in getattr(self, "_active_builds", {}):
+ status_cell = Text("β³ Buildingβ¦", style="yellow")
+ elif not enabled:
+ status_cell = Text("Disabled", style="dim")
+ elif is_ready:
+ status_cell = Text("β Ready", style="green")
+ else:
+ status_cell = Text(f"β {status_text}", style="red dim")
+
+ table.add_row(
+ f" {name}",
+ Text(image, style="dim"),
+ hub_name,
+ status_cell,
+ )
+ self._hub_rows.append((name, image, hub_name, is_ready))
+
+ def on_data_table_row_selected(self, event: DataTable.RowSelected) -> None:
+ """Handle Enter-key row selection (agents table only).
+
+ Hub table uses RowClicked exclusively β wiring it to RowSelected too
+ would cause a double push on every click since Textual 8 fires
+ RowSelected on ALL clicks, not just second-click-on-same-row.
+ """
+ if event.data_table.id == "agents-table":
+ self._handle_agent_row(event.cursor_row)
+
+ def on_single_click_data_table_row_clicked(
+ self, event: SingleClickDataTable.RowClicked
+ ) -> None:
+ """Handle single mouse-click on a hub table row."""
+ if event.data_table.id == "hub-table":
+ self._handle_hub_row(event.cursor_row)
+
+ def _handle_agent_row(self, idx: int) -> None:
+ """Open agent setup/unlink for the selected agent row."""
+ if idx < 0 or idx >= len(self._agent_rows):
+ return
+
+ display_name, agent, _config_path, _servers_key, is_linked = self._agent_rows[idx]
+
+ if is_linked:
+ from fuzzforge_cli.tui.screens.agent_setup import AgentUnlinkScreen
+
+ self.push_screen(
+ AgentUnlinkScreen(agent, display_name),
+ callback=self._on_agent_changed,
+ )
+ else:
+ from fuzzforge_cli.tui.screens.agent_setup import AgentSetupScreen
+
+ self.push_screen(
+ AgentSetupScreen(agent, display_name),
+ callback=self._on_agent_changed,
+ )
+
+ def _handle_hub_row(self, idx: int) -> None:
+ """Handle a click on a hub table row."""
+ # Guard: never push two build dialogs at once (double-click protection)
+ if getattr(self, "_build_dialog_open", False):
+ return
+
+ if idx < 0 or idx >= len(self._hub_rows):
+ return
+ row_data = self._hub_rows[idx]
+ if row_data is None:
+ return # group header row β ignore
+
+ server_name, image, hub_name, is_ready = row_data
+
+ # If a build is already running, open the live log viewer
+ if image in self._active_builds:
+ from fuzzforge_cli.tui.screens.build_log import BuildLogScreen
+ self._build_dialog_open = True
+ self.push_screen(
+ BuildLogScreen(image),
+ callback=lambda _: setattr(self, "_build_dialog_open", False),
+ )
+ return
+
+ if is_ready:
+ self.notify(f"{image} is already built β", severity="information")
+ return
+
+ if hub_name == "manual":
+ self.notify("Manual servers must be built outside FuzzForge")
+ return
+
+ from fuzzforge_cli.tui.screens.build_image import BuildImageScreen
+
+ self._build_dialog_open = True
+
+ def _on_build_dialog_done(result: bool | None) -> None:
+ self._build_dialog_open = False
+ if result is not None:
+ self._on_build_confirmed(result, server_name, image, hub_name)
+
+ self.push_screen(
+ BuildImageScreen(server_name, image, hub_name),
+ callback=_on_build_dialog_done,
+ )
+
+ def _on_build_confirmed(self, confirmed: bool, server_name: str, image: str, hub_name: str) -> None:
+ """Start a background build if the user confirmed."""
+ if not confirmed:
+ return
+ self._build_logs[image] = []
+ self._build_results.pop(image, None)
+ self._active_builds[image] = True # mark as pending so β³ shows immediately
+ self._refresh_hub() # show ⳠBuilding⦠immediately
+ self._run_build(server_name, image, hub_name)
+
+ @work(thread=True)
+ def _run_build(self, server_name: str, image: str, hub_name: str) -> None:
+ """Build a Docker/Podman image in a background thread."""
+ from fuzzforge_cli.tui.helpers import build_image, find_dockerfile_for_server
+
+ logs = self._build_logs.setdefault(image, [])
+
+ dockerfile = find_dockerfile_for_server(server_name, hub_name)
+ if dockerfile is None:
+ logs.append(f"ERROR: Dockerfile not found for '{server_name}' in hub '{hub_name}'")
+ self._build_results[image] = False
+ self._active_builds.pop(image, None)
+ self.call_from_thread(self._on_build_done, image, success=False)
+ return
+
+ logs.append(f"Building {image} from {dockerfile.parent}")
+ logs.append("")
+
+ try:
+ proc = build_image(image, dockerfile)
+ except FileNotFoundError as exc:
+ logs.append(f"ERROR: {exc}")
+ self._build_results[image] = False
+ self._active_builds.pop(image, None)
+ self.call_from_thread(self._on_build_done, image, success=False)
+ return
+
+ self._active_builds[image] = proc # replace pending marker with actual process
+ self.call_from_thread(self._refresh_hub) # show β³ in table
+
+ if proc.stdout is None:
+ return
+ for line in proc.stdout:
+ logs.append(line.rstrip())
+
+ proc.wait()
+ self._active_builds.pop(image, None)
+ success = proc.returncode == 0
+ self._build_results[image] = success
+ self.call_from_thread(self._on_build_done, image, success=success)
+
+ def _on_build_done(self, image: str, *, success: bool) -> None:
+ """Handle completion of a background build on the main thread."""
+ self._refresh_hub()
+ if success:
+ self.notify(f"β {image} built successfully", severity="information")
+ else:
+ self.notify(f"β {image} build failed β click row for log", severity="error")
+
+ def on_button_pressed(self, event: Button.Pressed) -> None:
+ """Handle button presses."""
+ if event.button.id == "btn-hub-manager":
+ self.action_manage_hubs()
+ elif event.button.id == "btn-fuzzinglabs-hub":
+ self.action_add_fuzzinglabs_hub()
+
+ def action_add_fuzzinglabs_hub(self) -> None:
+ """Open the clone dialog pre-filled with the FuzzingLabs hub URL."""
+ from fuzzforge_cli.tui.screens.hub_manager import CloneHubScreen
+
+ self.push_screen(
+ CloneHubScreen(
+ default_url="https://github.com/FuzzingLabs/mcp-security-hub",
+ default_name="mcp-security-hub",
+ is_default=True,
+ ),
+ callback=self._on_hub_changed,
+ )
+
+ def action_manage_hubs(self) -> None:
+ """Open the hub manager."""
+ from fuzzforge_cli.tui.screens.hub_manager import HubManagerScreen
+
+ self.push_screen(HubManagerScreen(), callback=self._on_hub_changed)
+
+ def _on_agent_changed(self, result: str | None) -> None:
+ """Handle agent setup/unlink completion."""
+ if result:
+ self.notify(result)
+ self._refresh_agents()
+
+ def _on_hub_changed(self, result: str | None) -> None:
+ """Handle hub manager completion β refresh the hub table."""
+ self._refresh_hub()
+
+ def action_refresh(self) -> None:
+ """Refresh all status panels."""
+ self._refresh_agents()
+ self._refresh_hub()
+ self.notify("Status refreshed")
diff --git a/fuzzforge-cli/src/fuzzforge_cli/tui/helpers.py b/fuzzforge-cli/src/fuzzforge_cli/tui/helpers.py
new file mode 100644
index 0000000..3efce39
--- /dev/null
+++ b/fuzzforge-cli/src/fuzzforge_cli/tui/helpers.py
@@ -0,0 +1,687 @@
+"""Shared helpers for FuzzForge TUI and CLI.
+
+Provides utility functions for checking AI agent configuration status,
+hub server image availability, installing/removing MCP configurations,
+and managing linked MCP hub repositories.
+
+"""
+
+from __future__ import annotations
+
+import contextlib
+import json
+import os
+import subprocess
+from pathlib import Path
+from typing import Any
+
+from fuzzforge_cli.commands.mcp import (
+ AIAgent,
+ _detect_docker_socket,
+ _detect_podman_socket,
+ _find_fuzzforge_root,
+ _generate_mcp_config,
+ _get_claude_code_user_mcp_path,
+ _get_claude_desktop_mcp_path,
+ _get_copilot_mcp_path,
+)
+
+# --- Hub Management Constants ---
+
+FUZZFORGE_DEFAULT_HUB_URL = "git@github.com:FuzzingLabs/mcp-security-hub.git"
+FUZZFORGE_DEFAULT_HUB_NAME = "mcp-security-hub"
+
+
+def get_fuzzforge_user_dir() -> Path:
+ """Return the user-global ``~/.fuzzforge/`` directory.
+
+ Stores data that is shared across all workspaces: cloned hub
+ repositories, the hub registry, container storage (graphroot/runroot),
+ and the hub workspace volume.
+
+ Override with the ``FUZZFORGE_USER_DIR`` environment variable to
+ redirect all user-global data to a custom path β useful for testing
+ a fresh install without touching the real ``~/.fuzzforge/``.
+
+ :return: ``Path.home() / ".fuzzforge"`` or ``$FUZZFORGE_USER_DIR``
+
+ """
+ env_dir = os.environ.get("FUZZFORGE_USER_DIR")
+ if env_dir:
+ return Path(env_dir).resolve()
+ return Path.home() / ".fuzzforge"
+
+
+def get_fuzzforge_dir() -> Path:
+ """Return the project-local ``.fuzzforge/`` directory.
+
+ Stores data that is specific to the current workspace: fuzzing
+ results and project artifacts. Similar to how ``.git/`` scopes
+ version-control data to a single project.
+
+ :return: ``Path.cwd() / ".fuzzforge"``
+
+ """
+ return Path.cwd() / ".fuzzforge"
+
+# Categories that typically need NET_RAW capability for network access
+_NET_RAW_CATEGORIES = {"reconnaissance", "web-security"}
+
+# Directories to skip when scanning a hub for MCP tool Dockerfiles
+_SCAN_SKIP_DIRS = {
+ ".git",
+ ".github",
+ "scripts",
+ "tests",
+ "examples",
+ "meta",
+ "__pycache__",
+ "node_modules",
+ ".venv",
+}
+
+
+def get_agent_configs() -> list[tuple[str, AIAgent, Path, str]]:
+ """Return agent display configs with resolved paths.
+
+ Each tuple contains:
+ - Display name
+ - AIAgent enum value
+ - Config file path
+ - Servers JSON key
+
+ :return: List of agent configuration tuples.
+
+ """
+ return [
+ ("GitHub Copilot", AIAgent.COPILOT, _get_copilot_mcp_path(), "servers"),
+ ("Claude Desktop", AIAgent.CLAUDE_DESKTOP, _get_claude_desktop_mcp_path(), "mcpServers"),
+ ("Claude Code", AIAgent.CLAUDE_CODE, _get_claude_code_user_mcp_path(), "mcpServers"),
+ ]
+
+
+def check_agent_status(config_path: Path, servers_key: str) -> tuple[bool, str]:
+ """Check whether an AI agent has FuzzForge configured.
+
+ :param config_path: Path to the agent's MCP config file.
+ :param servers_key: JSON key for the servers dict (e.g. "servers" or "mcpServers").
+ :return: Tuple of (is_linked, status_description).
+
+ """
+ if not config_path.exists():
+ return False, "Not configured"
+ try:
+ config = json.loads(config_path.read_text())
+ servers = config.get(servers_key, {})
+ if "fuzzforge" in servers:
+ return True, "Linked"
+ return False, "Config exists, not linked"
+ except json.JSONDecodeError:
+ return False, "Invalid config file"
+
+
+def check_hub_image(image: str) -> tuple[bool, str]:
+ """Check whether a container image exists locally.
+
+ Respects the ``FUZZFORGE_ENGINE__TYPE`` environment variable so that
+ Podman users see the correct build status instead of always "Not built".
+
+ :param image: Image name (e.g. "semgrep-mcp:latest").
+ :return: Tuple of (is_ready, status_description).
+
+ """
+ engine = os.environ.get("FUZZFORGE_ENGINE__TYPE", "docker").lower()
+ cmd = "podman" if engine == "podman" else "docker"
+ try:
+ result = subprocess.run(
+ [cmd, "image", "inspect", image],
+ check=False, capture_output=True,
+ text=True,
+ timeout=5,
+ )
+ if result.returncode == 0:
+ return True, "Ready"
+ return False, "Not built"
+ except subprocess.TimeoutExpired:
+ return False, "Timeout"
+ except FileNotFoundError:
+ return False, f"{cmd} not found"
+
+
+def load_hub_config(fuzzforge_root: Path) -> dict[str, Any]:
+ """Load hub-config.json from the FuzzForge root.
+
+ :param fuzzforge_root: Path to fuzzforge-oss directory.
+ :return: Parsed hub configuration dict, empty dict on error.
+
+ """
+ config_path = fuzzforge_root / "hub-config.json"
+ if not config_path.exists():
+ return {}
+ try:
+ data: dict[str, Any] = json.loads(config_path.read_text())
+ return data
+ except json.JSONDecodeError:
+ return {}
+
+
+def find_fuzzforge_root() -> Path:
+ """Find the FuzzForge installation root directory.
+
+ :return: Path to the fuzzforge-oss directory.
+
+ """
+ return _find_fuzzforge_root()
+
+
+def install_agent_config(agent: AIAgent, engine: str, force: bool = False) -> str:
+ """Install FuzzForge MCP configuration for an AI agent.
+
+ :param agent: Target AI agent.
+ :param engine: Container engine type ("docker" or "podman").
+ :param force: Overwrite existing configuration.
+ :return: Result message string.
+
+ """
+ fuzzforge_root = _find_fuzzforge_root()
+
+ if agent == AIAgent.COPILOT:
+ config_path = _get_copilot_mcp_path()
+ servers_key = "servers"
+ elif agent == AIAgent.CLAUDE_CODE:
+ config_path = _get_claude_code_user_mcp_path()
+ servers_key = "mcpServers"
+ else:
+ config_path = _get_claude_desktop_mcp_path()
+ servers_key = "mcpServers"
+
+ socket = _detect_docker_socket() if engine == "docker" else _detect_podman_socket()
+
+ server_config = _generate_mcp_config(
+ fuzzforge_root=fuzzforge_root,
+ engine_type=engine,
+ engine_socket=socket,
+ )
+
+ if config_path.exists():
+ try:
+ existing = json.loads(config_path.read_text())
+ except json.JSONDecodeError:
+ return f"Error: Invalid JSON in {config_path}"
+
+ servers = existing.get(servers_key, {})
+ if "fuzzforge" in servers and not force:
+ return "Already configured (use force to overwrite)"
+
+ if servers_key not in existing:
+ existing[servers_key] = {}
+ existing[servers_key]["fuzzforge"] = server_config
+ full_config = existing
+ else:
+ config_path.parent.mkdir(parents=True, exist_ok=True)
+ full_config = {servers_key: {"fuzzforge": server_config}}
+
+ config_path.write_text(json.dumps(full_config, indent=4))
+ return f"Installed FuzzForge for {agent.value}"
+
+
+def uninstall_agent_config(agent: AIAgent) -> str:
+ """Remove FuzzForge MCP configuration from an AI agent.
+
+ :param agent: Target AI agent.
+ :return: Result message string.
+
+ """
+ if agent == AIAgent.COPILOT:
+ config_path = _get_copilot_mcp_path()
+ servers_key = "servers"
+ elif agent == AIAgent.CLAUDE_CODE:
+ config_path = _get_claude_code_user_mcp_path()
+ servers_key = "mcpServers"
+ else:
+ config_path = _get_claude_desktop_mcp_path()
+ servers_key = "mcpServers"
+
+ if not config_path.exists():
+ return "Configuration file not found"
+
+ try:
+ config = json.loads(config_path.read_text())
+ except json.JSONDecodeError:
+ return "Error: Invalid JSON in config file"
+
+ servers = config.get(servers_key, {})
+ if "fuzzforge" not in servers:
+ return "FuzzForge is not configured for this agent"
+
+ del servers["fuzzforge"]
+ config_path.write_text(json.dumps(config, indent=4))
+ return f"Removed FuzzForge from {agent.value}"
+
+
+# ---------------------------------------------------------------------------
+# Hub Management
+# ---------------------------------------------------------------------------
+
+
+def get_hubs_registry_path() -> Path:
+ """Return path to the hubs registry file (``~/.fuzzforge/hubs.json``).
+
+ Stored in the user-global directory so the registry is shared across
+ all workspaces.
+
+ :return: Path to the registry JSON file.
+
+ """
+ return get_fuzzforge_user_dir() / "hubs.json"
+
+
+def get_default_hubs_dir() -> Path:
+ """Return default directory for cloned hubs (``~/.fuzzforge/hubs/``).
+
+ Stored in the user-global directory so hubs are cloned once and
+ reused in every workspace.
+
+ :return: Path to the default hubs directory.
+
+ """
+ return get_fuzzforge_user_dir() / "hubs"
+
+
+def _discover_hub_dirs() -> list[Path]:
+ """Scan known hub directories for cloned repos.
+
+ Checks both the current global location (``~/.fuzzforge/hubs/``) and the
+ legacy workspace-local location (``/.fuzzforge/hubs/``) so that hubs
+ cloned before the global-dir migration are still found.
+
+ :return: List of hub directory paths (each is a direct child with a ``.git``
+ sub-directory).
+
+ """
+ candidates: list[Path] = []
+ for base in (get_fuzzforge_user_dir() / "hubs", get_fuzzforge_dir() / "hubs"):
+ if base.is_dir():
+ candidates.extend(
+ entry for entry in base.iterdir()
+ if entry.is_dir() and (entry / ".git").is_dir()
+ )
+ return candidates
+
+
+def load_hubs_registry() -> dict[str, Any]:
+ """Load the hubs registry from disk.
+
+ If the registry file does not exist, auto-recovers it by scanning known hub
+ directories and rebuilding entries for any discovered hubs. This handles
+ the migration from the old workspace-local ``/.fuzzforge/hubs.json``
+ path to the global ``~/.fuzzforge/hubs.json`` path, as well as any case
+ where the registry was lost.
+
+ :return: Registry dict with ``hubs`` key containing a list of hub entries.
+
+ """
+ path = get_hubs_registry_path()
+ if path.exists():
+ try:
+ data: dict[str, Any] = json.loads(path.read_text())
+ return data
+ except (json.JSONDecodeError, OSError):
+ pass
+
+ # Registry missing β attempt to rebuild from discovered hub directories.
+ discovered = _discover_hub_dirs()
+ if not discovered:
+ return {"hubs": []}
+
+ hubs: list[dict[str, Any]] = []
+ for hub_dir in discovered:
+ name = hub_dir.name
+ # Try to read the git remote URL
+ git_url: str = ""
+ try:
+ import subprocess as _sp
+ r = _sp.run(
+ ["git", "-C", str(hub_dir), "remote", "get-url", "origin"],
+ check=False, capture_output=True, text=True, timeout=5,
+ )
+ if r.returncode == 0:
+ git_url = r.stdout.strip()
+ except Exception: # noqa: S110 - git URL is optional, failure is acceptable
+ pass
+ hubs.append({
+ "name": name,
+ "path": str(hub_dir),
+ "git_url": git_url,
+ "is_default": name == FUZZFORGE_DEFAULT_HUB_NAME,
+ })
+
+ registry: dict[str, Any] = {"hubs": hubs}
+ # Persist so we don't re-scan on every load
+ with contextlib.suppress(OSError):
+ save_hubs_registry(registry)
+ return registry
+
+
+def save_hubs_registry(registry: dict[str, Any]) -> None:
+ """Save the hubs registry to disk.
+
+ :param registry: Registry dict to persist.
+
+ """
+ path = get_hubs_registry_path()
+ path.parent.mkdir(parents=True, exist_ok=True)
+ path.write_text(json.dumps(registry, indent=2))
+
+
+def scan_hub_for_servers(hub_path: Path) -> list[dict[str, Any]]:
+ """Scan a hub directory for MCP tool Dockerfiles.
+
+ Looks for the ``category/tool-name/Dockerfile`` pattern and generates
+ a server configuration entry for each discovered tool.
+
+ :param hub_path: Root directory of the hub repository.
+ :return: Sorted list of server configuration dicts.
+
+ """
+ servers: list[dict[str, Any]] = []
+
+ if not hub_path.is_dir():
+ return servers
+
+ for dockerfile in sorted(hub_path.rglob("Dockerfile")):
+ rel = dockerfile.relative_to(hub_path)
+ parts = rel.parts
+
+ # Expected layout: category/tool-name/Dockerfile (exactly 3 parts)
+ if len(parts) != 3:
+ continue
+
+ category, tool_name, _ = parts
+
+ if category in _SCAN_SKIP_DIRS:
+ continue
+
+ capabilities: list[str] = []
+ if category in _NET_RAW_CATEGORIES:
+ capabilities = ["NET_RAW"]
+
+ servers.append(
+ {
+ "name": tool_name,
+ "description": f"{tool_name} β {category}",
+ "type": "docker",
+ "image": f"{tool_name}:latest",
+ "category": category,
+ "capabilities": capabilities,
+ "volumes": [f"{get_fuzzforge_user_dir()}/hub/workspace:/data"],
+ "enabled": True,
+ }
+ )
+
+ return servers
+
+
+def link_hub(
+ name: str,
+ path: str | Path,
+ git_url: str | None = None,
+ is_default: bool = False,
+) -> str:
+ """Link a hub directory and add its servers to hub-config.json.
+
+ :param name: Display name for the hub.
+ :param path: Local directory path containing the hub.
+ :param git_url: Optional git remote URL (for tracking).
+ :param is_default: Whether this is the default FuzzingLabs hub.
+ :return: Result message string.
+
+ """
+ hub_path = Path(path).resolve()
+
+ if not hub_path.is_dir():
+ return f"Error: directory not found: {hub_path}"
+
+ # Update registry
+ registry = load_hubs_registry()
+ hubs = registry.get("hubs", [])
+
+ # Remove existing entry with same name
+ hubs = [h for h in hubs if h.get("name") != name]
+
+ hubs.append(
+ {
+ "name": name,
+ "path": str(hub_path),
+ "git_url": git_url,
+ "is_default": is_default,
+ }
+ )
+
+ registry["hubs"] = hubs
+ save_hubs_registry(registry)
+
+ # Scan and update hub-config.json
+ scanned = scan_hub_for_servers(hub_path)
+ if not scanned:
+ return f"Linked '{name}' (0 servers found)"
+
+ try:
+ added = _merge_servers_into_hub_config(name, scanned)
+ except Exception as exc:
+ return f"Linked '{name}' but config update failed: {exc}"
+
+ return f"Linked '{name}' β {added} new servers added ({len(scanned)} scanned)"
+
+
+def unlink_hub(name: str) -> str:
+ """Unlink a hub and remove its servers from hub-config.json.
+
+ :param name: Name of the hub to unlink.
+ :return: Result message string.
+
+ """
+ registry = load_hubs_registry()
+ hubs = registry.get("hubs", [])
+
+ if not any(h.get("name") == name for h in hubs):
+ return f"Hub '{name}' is not linked"
+
+ hubs = [h for h in hubs if h.get("name") != name]
+ registry["hubs"] = hubs
+ save_hubs_registry(registry)
+
+ try:
+ removed = _remove_hub_servers_from_config(name)
+ except Exception:
+ removed = 0
+
+ return f"Unlinked '{name}' β {removed} server(s) removed"
+
+
+def clone_hub(
+ git_url: str,
+ dest: Path | None = None,
+ name: str | None = None,
+) -> tuple[bool, str, Path | None]:
+ """Clone a git hub repository.
+
+ If the destination already exists and is a git repo, pulls instead.
+
+ :param git_url: Git remote URL to clone.
+ :param dest: Destination directory (auto-derived from URL if *None*).
+ :param name: Hub name (auto-derived from URL if *None*).
+ :return: Tuple of ``(success, message, clone_path)``.
+
+ """
+ if name is None:
+ name = git_url.rstrip("/").split("/")[-1]
+ name = name.removesuffix(".git")
+
+ if dest is None:
+ dest = get_default_hubs_dir() / name
+
+ if dest.exists():
+ if (dest / ".git").is_dir():
+ try:
+ result = subprocess.run(
+ ["git", "-C", str(dest), "pull"],
+ check=False, capture_output=True,
+ text=True,
+ timeout=120,
+ )
+ if result.returncode == 0:
+ return True, f"Updated existing clone at {dest}", dest
+ return False, f"Git pull failed: {result.stderr.strip()}", None
+ except subprocess.TimeoutExpired:
+ return False, "Git pull timed out", None
+ except FileNotFoundError:
+ return False, "Git not found", None
+ return False, f"Directory already exists (not a git repo): {dest}", None
+
+ dest.parent.mkdir(parents=True, exist_ok=True)
+
+ try:
+ result = subprocess.run(
+ ["git", "clone", git_url, str(dest)],
+ check=False, capture_output=True,
+ text=True,
+ timeout=300,
+ )
+ if result.returncode == 0:
+ return True, f"Cloned to {dest}", dest
+ return False, f"Git clone failed: {result.stderr.strip()}", None
+ except subprocess.TimeoutExpired:
+ return False, "Git clone timed out (5 min limit)", None
+ except FileNotFoundError:
+ return False, "Git not found on PATH", None
+
+
+def _merge_servers_into_hub_config(
+ hub_name: str,
+ servers: list[dict[str, Any]],
+) -> int:
+ """Merge scanned servers into hub-config.json.
+
+ Only adds servers whose name does not already exist in the config.
+ New entries are tagged with ``source_hub`` for later removal.
+
+ :param hub_name: Name of the source hub (used for tagging).
+ :param servers: List of server dicts from :func:`scan_hub_for_servers`.
+ :return: Number of newly added servers.
+
+ """
+ fuzzforge_root = find_fuzzforge_root()
+ config_path = fuzzforge_root / "hub-config.json"
+
+ if config_path.exists():
+ try:
+ config = json.loads(config_path.read_text())
+ except json.JSONDecodeError:
+ config = {"servers": [], "default_timeout": 300, "cache_tools": True}
+ else:
+ config = {"servers": [], "default_timeout": 300, "cache_tools": True}
+
+ existing = config.get("servers", [])
+ existing_names = {s.get("name") for s in existing}
+
+ added = 0
+ for server in servers:
+ if server["name"] not in existing_names:
+ server["source_hub"] = hub_name
+ existing.append(server)
+ existing_names.add(server["name"])
+ added += 1
+
+ config["servers"] = existing
+ config_path.write_text(json.dumps(config, indent=2))
+ return added
+
+
+def _remove_hub_servers_from_config(hub_name: str) -> int:
+ """Remove servers belonging to a hub from hub-config.json.
+
+ Only removes servers tagged with the given ``source_hub`` value.
+ Manually-added servers (without a tag) are preserved.
+
+ :param hub_name: Name of the hub whose servers should be removed.
+ :return: Number of servers removed.
+
+ """
+ fuzzforge_root = find_fuzzforge_root()
+ config_path = fuzzforge_root / "hub-config.json"
+
+ if not config_path.exists():
+ return 0
+
+ try:
+ config = json.loads(config_path.read_text())
+ except json.JSONDecodeError:
+ return 0
+
+ existing = config.get("servers", [])
+ before = len(existing)
+ config["servers"] = [s for s in existing if s.get("source_hub") != hub_name]
+ after = len(config["servers"])
+
+ config_path.write_text(json.dumps(config, indent=2))
+ return before - after
+
+
+def find_dockerfile_for_server(server_name: str, hub_name: str) -> Path | None:
+ """Find the Dockerfile for a hub server tool.
+
+ Looks up the hub path from the registry, then scans for
+ ``category//Dockerfile``.
+
+ :param server_name: Tool name (e.g. ``"nmap-mcp"``).
+ :param hub_name: Hub name as stored in the registry.
+ :return: Absolute path to the Dockerfile, or ``None`` if not found.
+
+ """
+ registry = load_hubs_registry()
+ hub_entry = next(
+ (h for h in registry.get("hubs", []) if h.get("name") == hub_name),
+ None,
+ )
+ if not hub_entry:
+ return None
+
+ hub_path = Path(hub_entry["path"])
+ for dockerfile in hub_path.rglob("Dockerfile"):
+ rel = dockerfile.relative_to(hub_path)
+ parts = rel.parts
+ if len(parts) == 3 and parts[1] == server_name:
+ return dockerfile
+
+ return None
+
+
+def build_image(
+ image: str,
+ dockerfile: Path,
+ *,
+ engine: str | None = None,
+) -> subprocess.Popen[str]:
+ """Start a non-blocking ``docker/podman build`` subprocess.
+
+ Returns the running :class:`subprocess.Popen` object so the caller
+ can stream ``stdout`` / ``stderr`` lines incrementally.
+
+ :param image: Image tag (e.g. ``"nmap-mcp:latest"``).
+ :param dockerfile: Path to the ``Dockerfile``.
+ :param engine: ``"docker"`` or ``"podman"`` (auto-detected if ``None``).
+ :return: Running subprocess with merged stdout+stderr.
+
+ """
+ if engine is None:
+ engine = os.environ.get("FUZZFORGE_ENGINE__TYPE", "docker").lower()
+ engine = "podman" if engine == "podman" else "docker"
+
+ context_dir = str(dockerfile.parent)
+ return subprocess.Popen(
+ [engine, "build", "-t", image, context_dir],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ text=True,
+ )
diff --git a/fuzzforge-cli/src/fuzzforge_cli/tui/screens/__init__.py b/fuzzforge-cli/src/fuzzforge_cli/tui/screens/__init__.py
new file mode 100644
index 0000000..1cd8a98
--- /dev/null
+++ b/fuzzforge-cli/src/fuzzforge_cli/tui/screens/__init__.py
@@ -0,0 +1 @@
+"""TUI screens for FuzzForge."""
diff --git a/fuzzforge-cli/src/fuzzforge_cli/tui/screens/agent_setup.py b/fuzzforge-cli/src/fuzzforge_cli/tui/screens/agent_setup.py
new file mode 100644
index 0000000..fee79e8
--- /dev/null
+++ b/fuzzforge-cli/src/fuzzforge_cli/tui/screens/agent_setup.py
@@ -0,0 +1,96 @@
+"""Agent setup and unlink modal screens for FuzzForge TUI.
+
+Provides context-aware modals that receive the target agent directly
+from the dashboard row selection β no redundant agent picker needed.
+
+"""
+
+from __future__ import annotations
+
+from textual.app import ComposeResult
+from textual.containers import Horizontal, Vertical
+from textual.screen import ModalScreen
+from textual.widgets import Button, Label, RadioButton, RadioSet
+
+from fuzzforge_cli.commands.mcp import AIAgent
+from fuzzforge_cli.tui.helpers import install_agent_config, uninstall_agent_config
+
+
+class AgentSetupScreen(ModalScreen[str | None]):
+ """Modal for linking a specific agent β only asks for engine choice."""
+
+ BINDINGS = [("escape", "cancel", "Cancel")]
+
+ def __init__(self, agent: AIAgent, display_name: str) -> None:
+ super().__init__()
+ self._agent = agent
+ self._display_name = display_name
+
+ def compose(self) -> ComposeResult:
+ """Compose the setup dialog layout."""
+ with Vertical(id="setup-dialog"):
+ yield Label(f"Setup {self._display_name}", classes="dialog-title")
+
+ yield Label("Container Engine:", classes="field-label")
+ yield RadioSet(
+ RadioButton("Docker", value=True),
+ RadioButton("Podman"),
+ id="engine-select",
+ )
+
+ with Horizontal(classes="dialog-buttons"):
+ yield Button("Install", variant="primary", id="btn-install")
+ yield Button("Cancel", variant="default", id="btn-cancel")
+
+ def on_button_pressed(self, event: Button.Pressed) -> None:
+ """Handle button clicks."""
+ if event.button.id == "btn-cancel":
+ self.dismiss(None)
+ elif event.button.id == "btn-install":
+ self._do_install()
+
+ def action_cancel(self) -> None:
+ """Dismiss the dialog without action."""
+ self.dismiss(None)
+
+ def _do_install(self) -> None:
+ """Execute the installation."""
+ engine_set = self.query_one("#engine-select", RadioSet)
+ engine = "docker" if engine_set.pressed_index <= 0 else "podman"
+ result = install_agent_config(self._agent, engine, force=True)
+ self.dismiss(result)
+
+
+class AgentUnlinkScreen(ModalScreen[str | None]):
+ """Confirmation modal for unlinking a specific agent."""
+
+ BINDINGS = [("escape", "cancel", "Cancel")]
+
+ def __init__(self, agent: AIAgent, display_name: str) -> None:
+ super().__init__()
+ self._agent = agent
+ self._display_name = display_name
+
+ def compose(self) -> ComposeResult:
+ """Compose the unlink confirmation layout."""
+ with Vertical(id="unlink-dialog"):
+ yield Label(f"Unlink {self._display_name}?", classes="dialog-title")
+ yield Label(
+ f"This will remove the FuzzForge MCP configuration from {self._display_name}.",
+ )
+
+ with Horizontal(classes="dialog-buttons"):
+ yield Button("Unlink", variant="warning", id="btn-unlink")
+ yield Button("Cancel", variant="default", id="btn-cancel")
+
+ def on_button_pressed(self, event: Button.Pressed) -> None:
+ """Handle button clicks."""
+ if event.button.id == "btn-cancel":
+ self.dismiss(None)
+ elif event.button.id == "btn-unlink":
+ result = uninstall_agent_config(self._agent)
+ self.dismiss(result)
+
+ def action_cancel(self) -> None:
+ """Dismiss without action."""
+ self.dismiss(None)
diff --git a/fuzzforge-cli/src/fuzzforge_cli/tui/screens/build_image.py b/fuzzforge-cli/src/fuzzforge_cli/tui/screens/build_image.py
new file mode 100644
index 0000000..f556f67
--- /dev/null
+++ b/fuzzforge-cli/src/fuzzforge_cli/tui/screens/build_image.py
@@ -0,0 +1,58 @@
+"""Build-image confirm dialog for FuzzForge TUI.
+
+Simple modal that asks the user to confirm before starting a background
+build. The actual build is managed by the app so the user is never
+locked on this screen.
+
+"""
+
+from __future__ import annotations
+
+from textual.app import ComposeResult
+from textual.containers import Horizontal, Vertical
+from textual.screen import ModalScreen
+from textual.widgets import Button, Label
+
+
+class _NoFocusButton(Button):
+ can_focus = False
+
+
+class BuildImageScreen(ModalScreen[bool]):
+ """Quick confirmation before starting a background Docker/Podman build."""
+
+ BINDINGS = [("escape", "cancel", "Cancel")]
+
+ def __init__(self, server_name: str, image: str, hub_name: str) -> None:
+ super().__init__()
+ self._server_name = server_name
+ self._image = image
+ self._hub_name = hub_name
+
+ def compose(self) -> ComposeResult:
+ """Build the confirmation dialog UI."""
+ with Vertical(id="build-dialog"):
+ yield Label(f"Build {self._image}", classes="dialog-title")
+ yield Label(
+ f"Hub: {self._hub_name} β’ Tool: {self._server_name}",
+ id="build-subtitle",
+ )
+ yield Label(
+ "The image will be built in the background.\n"
+ "You'll receive a notification when it's done.",
+ id="confirm-text",
+ )
+ with Horizontal(classes="dialog-buttons"):
+ yield _NoFocusButton("Build", variant="primary", id="btn-build")
+ yield _NoFocusButton("Cancel", variant="default", id="btn-cancel")
+
+ def on_button_pressed(self, event: Button.Pressed) -> None:
+ """Handle Build or Cancel button clicks."""
+ if event.button.id == "btn-build":
+ self.dismiss(result=True)
+ elif event.button.id == "btn-cancel":
+ self.dismiss(result=False)
+
+ def action_cancel(self) -> None:
+ """Dismiss the dialog when Escape is pressed."""
+ self.dismiss(result=False)
diff --git a/fuzzforge-cli/src/fuzzforge_cli/tui/screens/build_log.py b/fuzzforge-cli/src/fuzzforge_cli/tui/screens/build_log.py
new file mode 100644
index 0000000..e260af8
--- /dev/null
+++ b/fuzzforge-cli/src/fuzzforge_cli/tui/screens/build_log.py
@@ -0,0 +1,80 @@
+"""Build-log viewer screen for FuzzForge TUI.
+
+Shows live output of a background build started by the app. Polls the
+app's ``_build_logs`` buffer every 500 ms so the user can pop this screen
+open at any time while the build is running and see up-to-date output.
+
+"""
+
+from __future__ import annotations
+
+from typing import Any
+
+from textual.app import ComposeResult
+from textual.containers import Horizontal, Vertical
+from textual.screen import ModalScreen
+from textual.widgets import Button, Label, Log
+
+
+class _NoFocusButton(Button):
+ can_focus = False
+
+
+class BuildLogScreen(ModalScreen[None]):
+ """Live log viewer for a background build job managed by the app."""
+
+ BINDINGS = [("escape", "close", "Close")]
+
+ def __init__(self, image: str) -> None:
+ super().__init__()
+ self._image = image
+ self._last_line: int = 0
+
+ def compose(self) -> ComposeResult:
+ """Build the log viewer UI."""
+ with Vertical(id="build-dialog"):
+ yield Label(f"Build log β {self._image}", classes="dialog-title")
+ yield Label("", id="build-status")
+ yield Log(id="build-log", auto_scroll=True)
+ with Horizontal(classes="dialog-buttons"):
+ yield _NoFocusButton("Close", variant="default", id="btn-close")
+
+ def on_mount(self) -> None:
+ """Initialize log polling when the screen is mounted."""
+ self._flush_log()
+ self.set_interval(0.5, self._poll_log)
+
+ def _flush_log(self) -> None:
+ """Write any new lines since the last flush."""
+ logs: list[str] = getattr(self.app, "_build_logs", {}).get(self._image, [])
+ log_widget = self.query_one("#build-log", Log)
+ new_lines = logs[self._last_line :]
+ for line in new_lines:
+ log_widget.write_line(line)
+ self._last_line += len(new_lines)
+
+ active: dict[str, Any] = getattr(self.app, "_active_builds", {})
+ status = self.query_one("#build-status", Label)
+ if self._image in active:
+ status.update("[yellow]β³ Buildingβ¦[/yellow]")
+ else:
+ # Build is done β check if we have a result stored
+ results: dict[str, Any] = getattr(self.app, "_build_results", {})
+ if self._image in results:
+ if results[self._image]:
+ status.update(f"[green]β {self._image} built successfully[/green]")
+ else:
+ status.update(f"[red]β {self._image} build failed[/red]")
+
+ def _poll_log(self) -> None:
+ """Poll for new log lines periodically."""
+ self._flush_log()
+
+ def on_button_pressed(self, event: Button.Pressed) -> None:
+ """Handle Close button click."""
+ if event.button.id == "btn-close":
+ self.dismiss(None)
+
+ def action_close(self) -> None:
+ """Dismiss the dialog when Escape is pressed."""
+ self.dismiss(None)
diff --git a/fuzzforge-cli/src/fuzzforge_cli/tui/screens/hub_manager.py b/fuzzforge-cli/src/fuzzforge_cli/tui/screens/hub_manager.py
new file mode 100644
index 0000000..975caf9
--- /dev/null
+++ b/fuzzforge-cli/src/fuzzforge_cli/tui/screens/hub_manager.py
@@ -0,0 +1,301 @@
+"""Hub management screens for FuzzForge TUI.
+
+Provides modal dialogs for managing linked MCP hub repositories:
+- HubManagerScreen: list, add, remove linked hubs
+- LinkHubScreen: link a local directory as a hub
+- CloneHubScreen: clone a git repo and link it (defaults to FuzzingLabs hub)
+
+"""
+
+from __future__ import annotations
+
+from pathlib import Path
+
+from rich.text import Text
+from textual import work
+from textual.app import ComposeResult
+from textual.containers import Horizontal, Vertical
+from textual.screen import ModalScreen
+from textual.widgets import Button, DataTable, Input, Label, Static
+
+from fuzzforge_cli.tui.helpers import (
+ FUZZFORGE_DEFAULT_HUB_NAME,
+ FUZZFORGE_DEFAULT_HUB_URL,
+ clone_hub,
+ link_hub,
+ load_hubs_registry,
+ scan_hub_for_servers,
+ unlink_hub,
+)
+
+
+class HubManagerScreen(ModalScreen[str | None]):
+ """Modal screen for managing linked MCP hubs."""
+
+ BINDINGS = [("escape", "cancel", "Close")]
+
+ def compose(self) -> ComposeResult:
+ """Compose the hub manager layout."""
+ with Vertical(id="hub-manager-dialog"):
+ yield Label("Hub Manager", classes="dialog-title")
+ yield DataTable(id="hubs-table")
+ yield Label("", id="hub-status")
+ with Horizontal(classes="dialog-buttons"):
+ yield Button(
+ "FuzzingLabs Hub",
+ variant="primary",
+ id="btn-clone-default",
+ )
+ yield Button("Link Path", variant="default", id="btn-link")
+ yield Button("Clone URL", variant="default", id="btn-clone")
+ yield Button("Remove", variant="primary", id="btn-remove")
+ yield Button("Close", variant="default", id="btn-close")
+
+ def on_mount(self) -> None:
+ """Populate the hubs table on startup."""
+ self._refresh_hubs()
+
+ def _refresh_hubs(self) -> None:
+ """Refresh the linked hubs table."""
+ table = self.query_one("#hubs-table", DataTable)
+ table.clear(columns=True)
+ table.add_columns("Name", "Path", "Servers", "Source")
+ table.cursor_type = "row"
+
+ registry = load_hubs_registry()
+ hubs = registry.get("hubs", [])
+
+ if not hubs:
+ table.add_row(
+ Text("No hubs linked", style="dim"),
+ Text("Press 'FuzzingLabs Hub' to get started", style="dim"),
+ "",
+ "",
+ )
+ return
+
+ for hub in hubs:
+ name = hub.get("name", "unknown")
+ path = hub.get("path", "")
+ git_url = hub.get("git_url", "")
+ is_default = hub.get("is_default", False)
+
+ hub_path = Path(path)
+ count: str | Text
+ if hub_path.is_dir():
+ servers = scan_hub_for_servers(hub_path)
+ count = str(len(servers))
+ else:
+ count = Text("dir missing", style="yellow")
+
+ source = git_url or "local"
+ name_cell: str | Text
+ if is_default:
+ name_cell = Text(f"β
{name}", style="bold")
+ else:
+ name_cell = name
+
+ table.add_row(name_cell, path, count, source)
+
+ def on_button_pressed(self, event: Button.Pressed) -> None:
+ """Route button actions."""
+ if event.button.id == "btn-close":
+ self.dismiss("refreshed")
+ elif event.button.id == "btn-clone-default":
+ self.app.push_screen(
+ CloneHubScreen(
+ FUZZFORGE_DEFAULT_HUB_URL,
+ FUZZFORGE_DEFAULT_HUB_NAME,
+ is_default=True,
+ ),
+ callback=self._on_hub_action,
+ )
+ elif event.button.id == "btn-link":
+ self.app.push_screen(
+ LinkHubScreen(),
+ callback=self._on_hub_action,
+ )
+ elif event.button.id == "btn-clone":
+ self.app.push_screen(
+ CloneHubScreen(),
+ callback=self._on_hub_action,
+ )
+ elif event.button.id == "btn-remove":
+ self._remove_selected()
+
+ def _on_hub_action(self, result: str | None) -> None:
+ """Handle result from a sub-screen."""
+ if result:
+ self.query_one("#hub-status", Label).update(result)
+ self.app.notify(result)
+ self._refresh_hubs()
+
+ def _remove_selected(self) -> None:
+ """Remove the currently selected hub."""
+ table = self.query_one("#hubs-table", DataTable)
+ registry = load_hubs_registry()
+ hubs = registry.get("hubs", [])
+
+ if not hubs:
+ self.app.notify("No hubs to remove", severity="warning")
+ return
+
+ idx = table.cursor_row
+ if idx is None or idx < 0 or idx >= len(hubs):
+ self.app.notify("Select a hub to remove", severity="warning")
+ return
+
+ name = hubs[idx].get("name", "")
+ result = unlink_hub(name)
+ self.query_one("#hub-status", Label).update(result)
+ self._refresh_hubs()
+ self.app.notify(result)
+
+ def action_cancel(self) -> None:
+ """Close the hub manager."""
+ self.dismiss("refreshed")
+
+
+class LinkHubScreen(ModalScreen[str | None]):
+ """Modal for linking a local directory as an MCP hub."""
+
+ BINDINGS = [("escape", "cancel", "Cancel")]
+
+ def compose(self) -> ComposeResult:
+ """Compose the link dialog layout."""
+ with Vertical(id="link-dialog"):
+ yield Label("Link Local Hub", classes="dialog-title")
+
+ yield Label("Hub Name:", classes="field-label")
+ yield Input(placeholder="my-hub", id="name-input")
+
+ yield Label("Directory Path:", classes="field-label")
+ yield Input(placeholder="/path/to/hub-directory", id="path-input")
+
+ yield Label("", id="link-status")
+ with Horizontal(classes="dialog-buttons"):
+ yield Button("Link", variant="primary", id="btn-link")
+ yield Button("Cancel", variant="default", id="btn-cancel")
+
+ def on_button_pressed(self, event: Button.Pressed) -> None:
+ """Handle button clicks."""
+ if event.button.id == "btn-cancel":
+ self.dismiss(None)
+ elif event.button.id == "btn-link":
+ self._do_link()
+
+ def _do_link(self) -> None:
+ """Execute the link operation."""
+ name = self.query_one("#name-input", Input).value.strip()
+ path = self.query_one("#path-input", Input).value.strip()
+
+ if not name:
+ self.app.notify("Please enter a hub name", severity="warning")
+ return
+ if not path:
+ self.app.notify("Please enter a directory path", severity="warning")
+ return
+
+ result = link_hub(name, path)
+ self.dismiss(result)
+
+ def action_cancel(self) -> None:
+ """Dismiss without action."""
+ self.dismiss(None)
+
+
+class CloneHubScreen(ModalScreen[str | None]):
+ """Modal for cloning a git hub repository and linking it.
+
+ When instantiated with *is_default=True* and FuzzingLabs URL,
+ provides a one-click setup for the standard security hub.
+
+ """
+
+ BINDINGS = [("escape", "cancel", "Cancel")]
+
+ def __init__(
+ self,
+ default_url: str = "",
+ default_name: str = "",
+ is_default: bool = False,
+ ) -> None:
+ super().__init__()
+ self._default_url = default_url
+ self._default_name = default_name
+ self._is_default = is_default
+
+ def compose(self) -> ComposeResult:
+ """Compose the clone dialog layout."""
+ title = "Clone FuzzingLabs Hub" if self._is_default else "Clone Git Hub"
+ with Vertical(id="clone-dialog"):
+ yield Label(title, classes="dialog-title")
+
+ yield Label("Git URL:", classes="field-label")
+ yield Input(
+ value=self._default_url,
+ placeholder="git@github.com:org/repo.git",
+ id="url-input",
+ )
+
+ yield Label("Hub Name (optional):", classes="field-label")
+ yield Input(
+ value=self._default_name,
+ placeholder="auto-detect from URL",
+ id="name-input",
+ )
+
+ yield Static("", id="clone-status")
+ with Horizontal(classes="dialog-buttons"):
+ yield Button(
+ "Clone & Link",
+ variant="primary",
+ id="btn-clone",
+ )
+ yield Button("Cancel", variant="default", id="btn-cancel")
+
+ def on_button_pressed(self, event: Button.Pressed) -> None:
+ """Handle button clicks."""
+ if event.button.id == "btn-cancel":
+ self.dismiss(None)
+ elif event.button.id == "btn-clone":
+ self._start_clone()
+
+ def _start_clone(self) -> None:
+ """Validate input and start the async clone operation."""
+ url = self.query_one("#url-input", Input).value.strip()
+ if not url:
+ self.app.notify("Please enter a git URL", severity="warning")
+ return
+
+ self.query_one("#btn-clone", Button).disabled = True
+ self.query_one("#clone-status", Static).update("β³ Cloning repository...")
+ self._do_clone(url)
+
+ @work(thread=True)
+ def _do_clone(self, url: str) -> None:
+ """Clone the repo in a background thread."""
+ name_input = self.query_one("#name-input", Input).value.strip()
+ name = name_input or None
+
+ success, msg, path = clone_hub(url, name=name)
+ if success and path:
+ hub_name = name or path.name
+ link_result = link_hub(
+ hub_name,
+ path,
+ git_url=url,
+ is_default=self._is_default,
+ )
+ self.app.call_from_thread(self.dismiss, f"β {link_result}")
+ else:
+ self.app.call_from_thread(self._on_clone_failed, msg)
+
+ def _on_clone_failed(self, msg: str) -> None:
+ """Handle a failed clone β re-enable the button and show the error."""
+ self.query_one("#clone-status", Static).update(f"β {msg}")
+ self.query_one("#btn-clone", Button).disabled = False
+
+ def action_cancel(self) -> None:
+ """Dismiss without action."""
+ self.dismiss(None)
diff --git a/fuzzforge-common/ruff.toml b/fuzzforge-common/ruff.toml
index f8c919b..6cb163d 100644
--- a/fuzzforge-common/ruff.toml
+++ b/fuzzforge-common/ruff.toml
@@ -18,3 +18,32 @@ ignore = [
"PLR2004", # allowing comparisons using unamed numerical constants in tests
"S101", # allowing 'assert' statements in tests
]
+"src/**" = [
+ "ANN201", # missing return type: legacy code
+ "ARG002", # unused argument: callback pattern
+ "ASYNC109", # async with timeout param: intentional pattern
+ "BLE001", # blind exception: broad error handling needed
+ "C901", # complexity: legacy code
+ "EM102", # f-string in exception: existing pattern
+ "F401", # unused import: re-export pattern
+ "FBT001", # boolean positional arg
+ "FBT002", # boolean default arg
+ "FIX002", # TODO comments: documented tech debt
+ "N806", # variable naming: intentional constants
+ "PERF401", # list comprehension: readability over perf
+ "PLW0603", # global statement: intentional for shared state
+ "PTH111", # os.path usage: legacy code
+ "RUF005", # collection literal: legacy style
+ "S110", # try-except-pass: intentional suppression
+ "S603", # subprocess: validated inputs
+ "SIM108", # ternary: readability preference
+ "TC001", # TYPE_CHECKING: causes circular imports
+ "TC003", # TYPE_CHECKING: causes circular imports
+ "TRY003", # message in exception: existing pattern
+ "TRY300", # try-else: existing pattern
+ "TRY400", # logging.error vs exception: existing pattern
+ "UP017", # datetime.UTC: Python 3.11+ only
+ "UP041", # TimeoutError alias: compatibility
+ "UP043", # unnecessary type args: compatibility
+ "W293", # blank line whitespace: formatting
+]
diff --git a/fuzzforge-common/src/fuzzforge_common/hub/__init__.py b/fuzzforge-common/src/fuzzforge_common/hub/__init__.py
new file mode 100644
index 0000000..d719f3f
--- /dev/null
+++ b/fuzzforge-common/src/fuzzforge_common/hub/__init__.py
@@ -0,0 +1,43 @@
+"""FuzzForge Hub - Generic MCP server bridge.
+
+This module provides a generic bridge to connect FuzzForge with any MCP server.
+It allows AI agents to discover and execute tools from external MCP servers
+(like mcp-security-hub) through the same interface as native FuzzForge modules.
+
+The hub is server-agnostic: it doesn't hardcode any specific tools or servers.
+Instead, it dynamically discovers tools by connecting to configured MCP servers
+and calling their `list_tools()` method.
+
+Supported transport types:
+- docker: Run MCP server as a Docker container with stdio transport
+- command: Run MCP server as a local process with stdio transport
+- sse: Connect to a remote MCP server via Server-Sent Events
+
+"""
+
+from fuzzforge_common.hub.client import HubClient, HubClientError, PersistentSession
+from fuzzforge_common.hub.executor import HubExecutionResult, HubExecutor
+from fuzzforge_common.hub.models import (
+ HubConfig,
+ HubServer,
+ HubServerConfig,
+ HubServerType,
+ HubTool,
+ HubToolParameter,
+)
+from fuzzforge_common.hub.registry import HubRegistry
+
+__all__ = [
+ "HubClient",
+ "HubClientError",
+ "HubConfig",
+ "HubExecutionResult",
+ "HubExecutor",
+ "HubRegistry",
+ "HubServer",
+ "HubServerConfig",
+ "HubServerType",
+ "HubTool",
+ "HubToolParameter",
+ "PersistentSession",
+]
diff --git a/fuzzforge-common/src/fuzzforge_common/hub/client.py b/fuzzforge-common/src/fuzzforge_common/hub/client.py
new file mode 100644
index 0000000..5ffb43b
--- /dev/null
+++ b/fuzzforge-common/src/fuzzforge_common/hub/client.py
@@ -0,0 +1,753 @@
+"""MCP client for communicating with hub servers.
+
+This module provides a generic MCP client that can connect to any MCP server
+via stdio (docker/command) or SSE transport. It handles:
+- Starting containers/processes for stdio transport
+- Connecting to SSE endpoints
+- Discovering tools via list_tools()
+- Executing tools via call_tool()
+- Persistent container sessions for stateful interactions
+
+"""
+
+from __future__ import annotations
+
+import asyncio
+import json
+import os
+import subprocess
+from contextlib import asynccontextmanager
+from dataclasses import dataclass, field
+from datetime import datetime, timezone
+from typing import TYPE_CHECKING, Any, cast
+
+from fuzzforge_common.hub.models import (
+ HubServer,
+ HubServerConfig,
+ HubServerType,
+ HubTool,
+)
+
+if TYPE_CHECKING:
+ from asyncio.subprocess import Process
+ from collections.abc import AsyncGenerator
+
+ from structlog.stdlib import BoundLogger
+
+
+def get_logger() -> BoundLogger:
+ """Get structlog logger instance.
+
+ :returns: Configured structlog logger.
+
+ """
+ from structlog import get_logger # noqa: PLC0415
+
+ return cast("BoundLogger", get_logger())
+
+
+class HubClientError(Exception):
+ """Error in hub client operations."""
+
+
+@dataclass
+class PersistentSession:
+ """A persistent container session with an active MCP connection.
+
+ Keeps a Docker container running between tool calls to allow
+ stateful interactions (e.g., radare2 analysis, long-running fuzzing).
+
+ """
+
+ #: Server name this session belongs to.
+ server_name: str
+
+ #: Docker container name.
+ container_name: str
+
+ #: Underlying process (docker run).
+ process: Process
+
+ #: Stream reader (process stdout).
+ reader: asyncio.StreamReader
+
+ #: Stream writer (process stdin).
+ writer: asyncio.StreamWriter
+
+ #: Whether the MCP session has been initialized.
+ initialized: bool = False
+
+ #: Lock to serialise concurrent requests on the same session.
+ lock: asyncio.Lock = field(default_factory=asyncio.Lock)
+
+ #: When the session was started.
+ started_at: datetime = field(default_factory=lambda: datetime.now(tz=timezone.utc))
+
+ #: Monotonic counter for JSON-RPC request IDs.
+ request_id: int = 0
+
+ @property
+ def alive(self) -> bool:
+ """Check if the underlying process is still running."""
+ return self.process.returncode is None
+
+
+class HubClient:
+ """Client for communicating with MCP hub servers.
+
+ Supports stdio (via docker/command) and SSE transports.
+ Uses the MCP protocol for tool discovery and execution.
+
+ """
+
+ #: Default timeout for operations.
+ DEFAULT_TIMEOUT: int = 30
+
+ def __init__(self, timeout: int = DEFAULT_TIMEOUT) -> None:
+ """Initialize the hub client.
+
+ :param timeout: Default timeout for operations in seconds.
+
+ """
+ self._timeout = timeout
+ self._persistent_sessions: dict[str, PersistentSession] = {}
+ self._request_id: int = 0
+
+ async def discover_tools(self, server: HubServer) -> list[HubTool]:
+ """Discover tools from a hub server.
+
+ Connects to the server, calls list_tools(), and returns
+ parsed HubTool instances.
+
+ :param server: Hub server to discover tools from.
+ :returns: List of discovered tools.
+ :raises HubClientError: If discovery fails.
+
+ """
+ logger = get_logger()
+ config = server.config
+
+ logger.info("Discovering tools", server=config.name, type=config.type.value)
+
+ try:
+ async with self._connect(config) as (reader, writer):
+ # Initialise MCP session (skip for persistent β already done)
+ if not self._persistent_sessions.get(config.name):
+ await self._initialize_session(reader, writer, config.name)
+
+ # List tools
+ tools_data = await self._call_method(
+ reader,
+ writer,
+ "tools/list",
+ {},
+ )
+
+ # Parse tools
+ tools = []
+ for tool_data in tools_data.get("tools", []):
+ tool = HubTool.from_mcp_tool(
+ server_name=config.name,
+ name=tool_data["name"],
+ description=tool_data.get("description"),
+ input_schema=tool_data.get("inputSchema", {}),
+ )
+ tools.append(tool)
+
+ logger.info(
+ "Discovered tools",
+ server=config.name,
+ count=len(tools),
+ )
+ return tools
+
+ except Exception as e:
+ logger.error(
+ "Tool discovery failed",
+ server=config.name,
+ error=str(e),
+ )
+ raise HubClientError(f"Discovery failed for {config.name}: {e}") from e
+
+ async def execute_tool(
+ self,
+ server: HubServer,
+ tool_name: str,
+ arguments: dict[str, Any],
+ *,
+ timeout: int | None = None,
+ extra_volumes: list[str] | None = None,
+ ) -> dict[str, Any]:
+ """Execute a tool on a hub server.
+
+ :param server: Hub server to execute on.
+ :param tool_name: Name of the tool to execute.
+ :param arguments: Tool arguments.
+ :param timeout: Execution timeout (uses default if None).
+ :param extra_volumes: Additional Docker volume mounts to inject.
+ :returns: Tool execution result.
+ :raises HubClientError: If execution fails.
+
+ """
+ logger = get_logger()
+ config = server.config
+ exec_timeout = timeout or config.timeout or self._timeout
+
+ logger.info(
+ "Executing hub tool",
+ server=config.name,
+ tool=tool_name,
+ timeout=exec_timeout,
+ )
+
+ try:
+ async with self._connect(config, extra_volumes=extra_volumes) as (reader, writer):
+ # Initialise MCP session (skip for persistent β already done)
+ if not self._persistent_sessions.get(config.name):
+ await self._initialize_session(reader, writer, config.name)
+
+ # Call tool
+ result = await asyncio.wait_for(
+ self._call_method(
+ reader,
+ writer,
+ "tools/call",
+ {"name": tool_name, "arguments": arguments},
+ ),
+ timeout=exec_timeout,
+ )
+
+ logger.info(
+ "Tool execution completed",
+ server=config.name,
+ tool=tool_name,
+ )
+ return result
+
+ except asyncio.TimeoutError as e:
+ logger.error(
+ "Tool execution timed out",
+ server=config.name,
+ tool=tool_name,
+ timeout=exec_timeout,
+ )
+ raise HubClientError(
+ f"Execution timed out for {config.name}:{tool_name}"
+ ) from e
+
+ except Exception as e:
+ logger.error(
+ "Tool execution failed",
+ server=config.name,
+ tool=tool_name,
+ error=str(e),
+ )
+ raise HubClientError(
+ f"Execution failed for {config.name}:{tool_name}: {e}"
+ ) from e
+
+ @asynccontextmanager
+ async def _connect(
+ self,
+ config: HubServerConfig,
+ extra_volumes: list[str] | None = None,
+ ) -> AsyncGenerator[tuple[asyncio.StreamReader, asyncio.StreamWriter], None]:
+ """Connect to an MCP server.
+
+ If a persistent session exists for this server, reuse it (with a lock
+ to serialise concurrent requests). Otherwise, fall through to the
+ ephemeral per-call connection logic.
+
+ :param config: Server configuration.
+ :param extra_volumes: Additional Docker volume mounts to inject.
+ :yields: Tuple of (reader, writer) for communication.
+
+ """
+ # Check for active persistent session
+ session = self._persistent_sessions.get(config.name)
+ if session and session.initialized and session.alive:
+ async with session.lock:
+ yield session.reader, session.writer # type: ignore[misc]
+ return
+
+ # Ephemeral connection (original behaviour)
+ if config.type == HubServerType.DOCKER:
+ async with self._connect_docker(config, extra_volumes=extra_volumes) as streams:
+ yield streams
+ elif config.type == HubServerType.COMMAND:
+ async with self._connect_command(config) as streams:
+ yield streams
+ elif config.type == HubServerType.SSE:
+ async with self._connect_sse(config) as streams:
+ yield streams
+ else:
+ msg = f"Unsupported server type: {config.type}"
+ raise HubClientError(msg)
+
+ @asynccontextmanager
+ async def _connect_docker(
+ self,
+ config: HubServerConfig,
+ extra_volumes: list[str] | None = None,
+ ) -> AsyncGenerator[tuple[asyncio.StreamReader, asyncio.StreamWriter], None]:
+ """Connect to a Docker-based MCP server.
+
+ :param config: Server configuration with image name.
+ :param extra_volumes: Additional volume mounts to inject (e.g. project assets).
+ :yields: Tuple of (reader, writer) for stdio communication.
+
+ """
+ if not config.image:
+ msg = f"Docker image not specified for server '{config.name}'"
+ raise HubClientError(msg)
+
+ # Build docker command
+ cmd = ["docker", "run", "-i", "--rm"]
+
+ # Add capabilities
+ for cap in config.capabilities:
+ cmd.extend(["--cap-add", cap])
+
+ # Add volumes from server config
+ for volume in config.volumes:
+ cmd.extend(["-v", os.path.expanduser(volume)])
+
+ # Add extra volumes (e.g. project assets injected at runtime)
+ for volume in (extra_volumes or []):
+ cmd.extend(["-v", os.path.expanduser(volume)])
+
+ # Add environment variables
+ for key, value in config.environment.items():
+ cmd.extend(["-e", f"{key}={value}"])
+
+ cmd.append(config.image)
+
+ # Use 4 MB buffer to handle large tool responses (YARA rulesets, trivy output, etc.)
+ _STREAM_LIMIT = 4 * 1024 * 1024
+
+ process: Process = await asyncio.create_subprocess_exec(
+ *cmd,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ limit=_STREAM_LIMIT,
+ )
+
+ try:
+ if process.stdin is None or process.stdout is None:
+ msg = "Failed to get process streams"
+ raise HubClientError(msg)
+
+ # Create asyncio streams from process pipes
+ reader = process.stdout
+ writer = process.stdin
+
+ yield reader, writer # type: ignore[misc]
+
+ finally:
+ process.terminate()
+ try:
+ await asyncio.wait_for(process.wait(), timeout=5)
+ except asyncio.TimeoutError:
+ process.kill()
+
+ @asynccontextmanager
+ async def _connect_command(
+ self,
+ config: HubServerConfig,
+ ) -> AsyncGenerator[tuple[asyncio.StreamReader, asyncio.StreamWriter], None]:
+ """Connect to a command-based MCP server.
+
+ :param config: Server configuration with command.
+ :yields: Tuple of (reader, writer) for stdio communication.
+
+ """
+ if not config.command:
+ msg = f"Command not specified for server '{config.name}'"
+ raise HubClientError(msg)
+
+ # Set up environment
+ env = dict(config.environment) if config.environment else None
+
+ # Use 4 MB buffer to handle large tool responses
+ _STREAM_LIMIT = 4 * 1024 * 1024
+
+ process: Process = await asyncio.create_subprocess_exec(
+ *config.command,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ env=env,
+ limit=_STREAM_LIMIT,
+ )
+
+ try:
+ if process.stdin is None or process.stdout is None:
+ msg = "Failed to get process streams"
+ raise HubClientError(msg)
+
+ reader = process.stdout
+ writer = process.stdin
+
+ yield reader, writer # type: ignore[misc]
+
+ finally:
+ process.terminate()
+ try:
+ await asyncio.wait_for(process.wait(), timeout=5)
+ except asyncio.TimeoutError:
+ process.kill()
+
+ @asynccontextmanager
+ async def _connect_sse(
+ self,
+ config: HubServerConfig,
+ ) -> AsyncGenerator[tuple[asyncio.StreamReader, asyncio.StreamWriter], None]:
+ """Connect to an SSE-based MCP server.
+
+ :param config: Server configuration with URL.
+ :yields: Tuple of (reader, writer) for SSE communication.
+
+ """
+ # SSE support requires additional dependencies
+ # For now, raise not implemented
+ msg = "SSE transport not yet implemented"
+ raise NotImplementedError(msg)
+
+ async def _initialize_session(
+ self,
+ reader: asyncio.StreamReader,
+ writer: asyncio.StreamWriter,
+ server_name: str,
+ ) -> dict[str, Any]:
+ """Initialize MCP session with the server.
+
+ :param reader: Stream reader.
+ :param writer: Stream writer.
+ :param server_name: Server name for logging.
+ :returns: Server capabilities.
+
+ """
+ # Send initialize request
+ result = await self._call_method(
+ reader,
+ writer,
+ "initialize",
+ {
+ "protocolVersion": "2024-11-05",
+ "capabilities": {},
+ "clientInfo": {
+ "name": "fuzzforge-hub",
+ "version": "0.1.0",
+ },
+ },
+ )
+
+ # Send initialized notification
+ await self._send_notification(reader, writer, "notifications/initialized", {})
+
+ return result
+
+ async def _call_method(
+ self,
+ reader: asyncio.StreamReader,
+ writer: asyncio.StreamWriter,
+ method: str,
+ params: dict[str, Any],
+ ) -> dict[str, Any]:
+ """Call an MCP method.
+
+ :param reader: Stream reader.
+ :param writer: Stream writer.
+ :param method: Method name.
+ :param params: Method parameters.
+ :returns: Method result.
+
+ """
+ # Create JSON-RPC request with unique ID
+ self._request_id += 1
+ request = {
+ "jsonrpc": "2.0",
+ "id": self._request_id,
+ "method": method,
+ "params": params,
+ }
+
+ # Send request
+ request_line = json.dumps(request) + "\n"
+ writer.write(request_line.encode())
+ await writer.drain()
+
+ # Read response
+ response_line = await asyncio.wait_for(
+ reader.readline(),
+ timeout=self._timeout,
+ )
+
+ if not response_line:
+ msg = "Empty response from server"
+ raise HubClientError(msg)
+
+ response = json.loads(response_line.decode())
+
+ if "error" in response:
+ error = response["error"]
+ msg = f"MCP error: {error.get('message', 'Unknown error')}"
+ raise HubClientError(msg)
+
+ result = response.get("result", {})
+
+ # Check for tool-level errors in content items
+ for item in result.get("content", []):
+ if item.get("isError", False):
+ error_text = item.get("text", "unknown error")
+ msg = f"Tool returned error: {error_text}"
+ raise HubClientError(msg)
+
+ return result
+
+ async def _send_notification(
+ self,
+ reader: asyncio.StreamReader,
+ writer: asyncio.StreamWriter,
+ method: str,
+ params: dict[str, Any],
+ ) -> None:
+ """Send an MCP notification (no response expected).
+
+ :param reader: Stream reader (unused but kept for consistency).
+ :param writer: Stream writer.
+ :param method: Notification method name.
+ :param params: Notification parameters.
+
+ """
+ # Create JSON-RPC notification (no id)
+ notification = {
+ "jsonrpc": "2.0",
+ "method": method,
+ "params": params,
+ }
+
+ notification_line = json.dumps(notification) + "\n"
+ writer.write(notification_line.encode())
+ await writer.drain()
+
+ # ------------------------------------------------------------------
+ # Persistent session management
+ # ------------------------------------------------------------------
+
+ async def start_persistent_session(
+ self,
+ config: HubServerConfig,
+ extra_volumes: list[str] | None = None,
+ ) -> PersistentSession:
+ """Start a persistent Docker container and initialise MCP session.
+
+ The container stays running until :meth:`stop_persistent_session` is
+ called, allowing multiple tool calls on the same session.
+
+ :param config: Server configuration (must be Docker type).
+ :param extra_volumes: Additional host:container volume mounts to inject.
+ :returns: The created persistent session.
+ :raises HubClientError: If the container cannot be started.
+
+ """
+ logger = get_logger()
+
+ if config.name in self._persistent_sessions:
+ session = self._persistent_sessions[config.name]
+ if session.alive:
+ logger.info("Persistent session already running", server=config.name)
+ return session
+ # Dead session β clean up and restart
+ await self._cleanup_session(config.name)
+
+ if config.type != HubServerType.DOCKER:
+ msg = f"Persistent mode only supports Docker servers (got {config.type.value})"
+ raise HubClientError(msg)
+
+ if not config.image:
+ msg = f"Docker image not specified for server '{config.name}'"
+ raise HubClientError(msg)
+
+ container_name = f"fuzzforge-{config.name}"
+
+ # Remove stale container with same name if it exists
+ try:
+ rm_proc = await asyncio.create_subprocess_exec(
+ "docker", "rm", "-f", container_name,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ )
+ await rm_proc.wait()
+ except Exception:
+ pass
+
+ # Build docker run command (no --rm, with --name)
+ cmd = ["docker", "run", "-i", "--name", container_name]
+
+ for cap in config.capabilities:
+ cmd.extend(["--cap-add", cap])
+
+ for volume in config.volumes:
+ cmd.extend(["-v", os.path.expanduser(volume)])
+
+ for extra_vol in (extra_volumes or []):
+ cmd.extend(["-v", extra_vol])
+
+ for key, value in config.environment.items():
+ cmd.extend(["-e", f"{key}={value}"])
+
+ cmd.append(config.image)
+
+ _STREAM_LIMIT = 4 * 1024 * 1024
+
+ logger.info(
+ "Starting persistent container",
+ server=config.name,
+ container=container_name,
+ image=config.image,
+ )
+
+ process: Process = await asyncio.create_subprocess_exec(
+ *cmd,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ limit=_STREAM_LIMIT,
+ )
+
+ if process.stdin is None or process.stdout is None:
+ process.terminate()
+ msg = "Failed to get process streams"
+ raise HubClientError(msg)
+
+ session = PersistentSession(
+ server_name=config.name,
+ container_name=container_name,
+ process=process,
+ reader=process.stdout,
+ writer=process.stdin,
+ )
+
+ # Initialise MCP session
+ try:
+ await self._initialize_session(
+ session.reader, # type: ignore[arg-type]
+ session.writer, # type: ignore[arg-type]
+ config.name,
+ )
+ session.initialized = True
+ except Exception as e:
+ process.terminate()
+ try:
+ await asyncio.wait_for(process.wait(), timeout=5)
+ except asyncio.TimeoutError:
+ process.kill()
+ msg = f"Failed to initialise MCP session for {config.name}: {e}"
+ raise HubClientError(msg) from e
+
+ self._persistent_sessions[config.name] = session
+
+ logger.info(
+ "Persistent session started",
+ server=config.name,
+ container=container_name,
+ )
+ return session
+
+ async def stop_persistent_session(self, server_name: str) -> bool:
+ """Stop a persistent container session.
+
+ :param server_name: Name of the server whose session to stop.
+ :returns: True if a session was stopped, False if none found.
+
+ """
+ return await self._cleanup_session(server_name)
+
+ def get_persistent_session(self, server_name: str) -> PersistentSession | None:
+ """Get a persistent session by server name.
+
+ :param server_name: Server name.
+ :returns: The session if running, None otherwise.
+
+ """
+ session = self._persistent_sessions.get(server_name)
+ if session and not session.alive:
+ # Mark dead session β don't remove here to avoid async issues
+ return None
+ return session
+
+ def list_persistent_sessions(self) -> list[dict[str, Any]]:
+ """List all persistent sessions with their status.
+
+ :returns: List of session info dictionaries.
+
+ """
+ sessions = []
+ for name, session in self._persistent_sessions.items():
+ sessions.append({
+ "server_name": name,
+ "container_name": session.container_name,
+ "alive": session.alive,
+ "initialized": session.initialized,
+ "started_at": session.started_at.isoformat(),
+ "uptime_seconds": int(
+ (datetime.now(tz=timezone.utc) - session.started_at).total_seconds()
+ ),
+ })
+ return sessions
+
+ async def stop_all_persistent_sessions(self) -> int:
+ """Stop all persistent sessions.
+
+ :returns: Number of sessions stopped.
+
+ """
+ names = list(self._persistent_sessions.keys())
+ count = 0
+ for name in names:
+ if await self._cleanup_session(name):
+ count += 1
+ return count
+
+ async def _cleanup_session(self, server_name: str) -> bool:
+ """Clean up a persistent session (terminate process, remove container).
+
+ :param server_name: Server name.
+ :returns: True if cleaned up, False if not found.
+
+ """
+ logger = get_logger()
+ session = self._persistent_sessions.pop(server_name, None)
+ if session is None:
+ return False
+
+ logger.info("Stopping persistent session", server=server_name)
+
+ # Terminate process
+ if session.alive:
+ session.process.terminate()
+ try:
+ await asyncio.wait_for(session.process.wait(), timeout=10)
+ except asyncio.TimeoutError:
+ session.process.kill()
+ await session.process.wait()
+
+ # Remove Docker container
+ try:
+ rm_proc = await asyncio.create_subprocess_exec(
+ "docker", "rm", "-f", session.container_name,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ )
+ await rm_proc.wait()
+ except Exception:
+ pass
+
+ logger.info(
+ "Persistent session stopped",
+ server=server_name,
+ container=session.container_name,
+ )
+ return True
diff --git a/fuzzforge-common/src/fuzzforge_common/hub/executor.py b/fuzzforge-common/src/fuzzforge_common/hub/executor.py
new file mode 100644
index 0000000..37205e1
--- /dev/null
+++ b/fuzzforge-common/src/fuzzforge_common/hub/executor.py
@@ -0,0 +1,627 @@
+"""Hub executor for managing MCP server lifecycle and tool execution.
+
+This module provides a high-level interface for:
+- Discovering tools from all registered hub servers
+- Executing tools with proper error handling
+- Managing the lifecycle of hub operations
+
+"""
+
+from __future__ import annotations
+
+from pathlib import Path
+from typing import TYPE_CHECKING, Any, cast
+
+from fuzzforge_common.hub.client import HubClient, HubClientError, PersistentSession
+from fuzzforge_common.hub.models import HubServer, HubServerConfig, HubTool
+from fuzzforge_common.hub.registry import HubRegistry
+
+if TYPE_CHECKING:
+ from structlog.stdlib import BoundLogger
+
+
+def get_logger() -> BoundLogger:
+ """Get structlog logger instance.
+
+ :returns: Configured structlog logger.
+
+ """
+ from structlog import get_logger # noqa: PLC0415
+
+ return cast("BoundLogger", get_logger())
+
+
+class HubExecutionResult:
+ """Result of a hub tool execution."""
+
+ def __init__(
+ self,
+ *,
+ success: bool,
+ server_name: str,
+ tool_name: str,
+ result: dict[str, Any] | None = None,
+ error: str | None = None,
+ ) -> None:
+ """Initialize execution result.
+
+ :param success: Whether execution succeeded.
+ :param server_name: Name of the hub server.
+ :param tool_name: Name of the executed tool.
+ :param result: Tool execution result data.
+ :param error: Error message if execution failed.
+
+ """
+ self.success = success
+ self.server_name = server_name
+ self.tool_name = tool_name
+ self.result = result or {}
+ self.error = error
+
+ @property
+ def identifier(self) -> str:
+ """Get full tool identifier."""
+ return f"hub:{self.server_name}:{self.tool_name}"
+
+ def to_dict(self) -> dict[str, Any]:
+ """Convert to dictionary.
+
+ :returns: Dictionary representation.
+
+ """
+ return {
+ "success": self.success,
+ "identifier": self.identifier,
+ "server": self.server_name,
+ "tool": self.tool_name,
+ "result": self.result,
+ "error": self.error,
+ }
+
+
+class HubExecutor:
+ """Executor for hub server operations.
+
+ Provides high-level methods for discovering and executing
+ tools from hub servers.
+
+ """
+
+ #: Hub registry instance.
+ _registry: HubRegistry
+
+ #: MCP client instance.
+ _client: HubClient
+
+ def __init__(
+ self,
+ config_path: Path | None = None,
+ timeout: int = 300,
+ ) -> None:
+ """Initialize the hub executor.
+
+ :param config_path: Path to hub-servers.json config file.
+ :param timeout: Default timeout for tool execution.
+
+ """
+ self._registry = HubRegistry(config_path)
+ self._client = HubClient(timeout=timeout)
+ self._continuous_sessions: dict[str, dict[str, Any]] = {}
+
+ @property
+ def registry(self) -> HubRegistry:
+ """Get the hub registry.
+
+ :returns: Hub registry instance.
+
+ """
+ return self._registry
+
+ def add_server(self, config: HubServerConfig) -> HubServer:
+ """Add a server to the registry.
+
+ :param config: Server configuration.
+ :returns: Created HubServer instance.
+
+ """
+ return self._registry.add_server(config)
+
+ async def discover_all_tools(self) -> dict[str, list[HubTool]]:
+ """Discover tools from all enabled servers.
+
+ :returns: Dict mapping server names to lists of discovered tools.
+
+ """
+ logger = get_logger()
+ results: dict[str, list[HubTool]] = {}
+
+ for server in self._registry.enabled_servers:
+ try:
+ tools = await self._client.discover_tools(server)
+ self._registry.update_server_tools(server.name, tools)
+ results[server.name] = tools
+
+ except HubClientError as e:
+ logger.warning(
+ "Failed to discover tools",
+ server=server.name,
+ error=str(e),
+ )
+ self._registry.update_server_tools(server.name, [], error=str(e))
+ results[server.name] = []
+
+ return results
+
+ async def discover_server_tools(self, server_name: str) -> list[HubTool]:
+ """Discover tools from a specific server.
+
+ :param server_name: Name of the server.
+ :returns: List of discovered tools.
+ :raises ValueError: If server not found.
+
+ """
+ server = self._registry.get_server(server_name)
+ if not server:
+ msg = f"Server '{server_name}' not found"
+ raise ValueError(msg)
+
+ try:
+ tools = await self._client.discover_tools(server)
+ self._registry.update_server_tools(server_name, tools)
+ return tools
+
+ except HubClientError as e:
+ self._registry.update_server_tools(server_name, [], error=str(e))
+ raise
+
+ async def execute_tool(
+ self,
+ identifier: str,
+ arguments: dict[str, Any] | None = None,
+ *,
+ timeout: int | None = None,
+ extra_volumes: list[str] | None = None,
+ ) -> HubExecutionResult:
+ """Execute a hub tool.
+
+ :param identifier: Tool identifier (hub:server:tool or server:tool).
+ :param arguments: Tool arguments.
+ :param timeout: Execution timeout.
+ :param extra_volumes: Additional Docker volume mounts to inject.
+ :returns: Execution result.
+
+ """
+ logger = get_logger()
+ arguments = arguments or {}
+
+ # Parse identifier and find tool
+ server, tool = self._registry.find_tool(identifier)
+
+ if not server or not tool:
+ # Try to parse as server:tool and discover
+ parts = identifier.replace("hub:", "").split(":")
+ if len(parts) == 2: # noqa: PLR2004
+ server_name, tool_name = parts
+ server = self._registry.get_server(server_name)
+
+ if server and not server.discovered:
+ # Try to discover tools first
+ try:
+ await self.discover_server_tools(server_name)
+ tool = server.get_tool(tool_name)
+ except HubClientError:
+ pass
+
+ if server and not tool:
+ # Tool not found, but server exists - try to execute anyway
+ # The server might have the tool even if discovery failed
+ tool_name_to_use = tool_name
+ else:
+ tool_name_to_use = tool.name if tool else ""
+
+ if not server:
+ return HubExecutionResult(
+ success=False,
+ server_name=server_name,
+ tool_name=tool_name,
+ error=f"Server '{server_name}' not found",
+ )
+
+ # Execute even if tool wasn't discovered (server might still have it)
+ try:
+ result = await self._client.execute_tool(
+ server,
+ tool_name_to_use or tool_name,
+ arguments,
+ timeout=timeout,
+ extra_volumes=extra_volumes,
+ )
+ return HubExecutionResult(
+ success=True,
+ server_name=server.name,
+ tool_name=tool_name_to_use or tool_name,
+ result=result,
+ )
+ except HubClientError as e:
+ return HubExecutionResult(
+ success=False,
+ server_name=server.name,
+ tool_name=tool_name_to_use or tool_name,
+ error=str(e),
+ )
+ else:
+ return HubExecutionResult(
+ success=False,
+ server_name="unknown",
+ tool_name=identifier,
+ error=f"Invalid tool identifier: {identifier}",
+ )
+
+ # Execute the tool
+ logger.info(
+ "Executing hub tool",
+ server=server.name,
+ tool=tool.name,
+ arguments=arguments,
+ )
+
+ try:
+ result = await self._client.execute_tool(
+ server,
+ tool.name,
+ arguments,
+ timeout=timeout,
+ extra_volumes=extra_volumes,
+ )
+ return HubExecutionResult(
+ success=True,
+ server_name=server.name,
+ tool_name=tool.name,
+ result=result,
+ )
+
+ except HubClientError as e:
+ return HubExecutionResult(
+ success=False,
+ server_name=server.name,
+ tool_name=tool.name,
+ error=str(e),
+ )
+
+ def list_servers(self) -> list[dict[str, Any]]:
+ """List all registered servers with their status.
+
+ :returns: List of server info dicts.
+
+ """
+ servers = []
+ for server in self._registry.servers:
+ session = self._client.get_persistent_session(server.name)
+ servers.append({
+ "name": server.name,
+ "identifier": server.identifier,
+ "type": server.config.type.value,
+ "enabled": server.config.enabled,
+ "category": server.config.category,
+ "description": server.config.description,
+ "persistent": server.config.persistent,
+ "persistent_session_active": session is not None and session.alive,
+ "discovered": server.discovered,
+ "tool_count": len(server.tools),
+ "error": server.discovery_error,
+ })
+ return servers
+
+ def list_tools(self) -> list[dict[str, Any]]:
+ """List all discovered tools.
+
+ :returns: List of tool info dicts.
+
+ """
+ tools = []
+ for tool in self._registry.get_all_tools():
+ tools.append({
+ "identifier": tool.identifier,
+ "name": tool.name,
+ "server": tool.server_name,
+ "description": tool.description,
+ "parameters": [p.model_dump() for p in tool.parameters],
+ })
+ return tools
+
+ def get_tool_schema(self, identifier: str) -> dict[str, Any] | None:
+ """Get the JSON Schema for a tool's input.
+
+ :param identifier: Tool identifier.
+ :returns: JSON Schema dict or None if not found.
+
+ """
+ _, tool = self._registry.find_tool(identifier)
+ if tool:
+ return tool.input_schema
+ return None
+
+ # ------------------------------------------------------------------
+ # Persistent session management
+ # ------------------------------------------------------------------
+
+ async def start_persistent_server(self, server_name: str, extra_volumes: list[str] | None = None) -> dict[str, Any]:
+ """Start a persistent container session for a server.
+
+ The container stays running between tool calls, allowing stateful
+ interactions (e.g., radare2 sessions, long-running fuzzing).
+
+ :param server_name: Name of the hub server to start.
+ :param extra_volumes: Additional host:container volume mounts to inject.
+ :returns: Session status dictionary.
+ :raises ValueError: If server not found.
+
+ """
+ logger = get_logger()
+ server = self._registry.get_server(server_name)
+ if not server:
+ msg = f"Server '{server_name}' not found"
+ raise ValueError(msg)
+
+ session = await self._client.start_persistent_session(server.config, extra_volumes=extra_volumes)
+
+ # Auto-discover tools on the new session
+ try:
+ tools = await self._client.discover_tools(server)
+ self._registry.update_server_tools(server_name, tools)
+ except HubClientError as e:
+ logger.warning(
+ "Tool discovery failed on persistent session",
+ server=server_name,
+ error=str(e),
+ )
+
+ # Include discovered tools in the result so agent knows what's available
+ discovered_tools = []
+ server_obj = self._registry.get_server(server_name)
+ if server_obj:
+ for tool in server_obj.tools:
+ discovered_tools.append({
+ "identifier": tool.identifier,
+ "name": tool.name,
+ "description": tool.description,
+ })
+
+ return {
+ "server_name": session.server_name,
+ "container_name": session.container_name,
+ "alive": session.alive,
+ "initialized": session.initialized,
+ "started_at": session.started_at.isoformat(),
+ "tools": discovered_tools,
+ "tool_count": len(discovered_tools),
+ }
+
+ async def stop_persistent_server(self, server_name: str) -> bool:
+ """Stop a persistent container session.
+
+ :param server_name: Server name.
+ :returns: True if a session was stopped.
+
+ """
+ return await self._client.stop_persistent_session(server_name)
+
+ def get_persistent_status(self, server_name: str) -> dict[str, Any] | None:
+ """Get status of a persistent session.
+
+ :param server_name: Server name.
+ :returns: Status dict or None if no session.
+
+ """
+ session = self._client.get_persistent_session(server_name)
+ if not session:
+ return None
+
+ from datetime import datetime, timezone # noqa: PLC0415
+
+ return {
+ "server_name": session.server_name,
+ "container_name": session.container_name,
+ "alive": session.alive,
+ "initialized": session.initialized,
+ "started_at": session.started_at.isoformat(),
+ "uptime_seconds": int(
+ (datetime.now(tz=timezone.utc) - session.started_at).total_seconds()
+ ),
+ }
+
+ def list_persistent_sessions(self) -> list[dict[str, Any]]:
+ """List all persistent sessions.
+
+ :returns: List of session status dicts.
+
+ """
+ return self._client.list_persistent_sessions()
+
+ async def stop_all_persistent_servers(self) -> int:
+ """Stop all persistent sessions.
+
+ :returns: Number of sessions stopped.
+
+ """
+ return await self._client.stop_all_persistent_sessions()
+
+ # ------------------------------------------------------------------
+ # Continuous session management
+ # ------------------------------------------------------------------
+
+ async def start_continuous_tool(
+ self,
+ server_name: str,
+ start_tool: str,
+ arguments: dict[str, Any],
+ ) -> dict[str, Any]:
+ """Start a continuous hub tool session.
+
+ Ensures a persistent container is running, then calls the start tool
+ (e.g., ``cargo_fuzz_start``) which returns a session_id. Tracks the
+ session for subsequent status/stop calls.
+
+ :param server_name: Hub server name.
+ :param start_tool: Name of the start tool on the server.
+ :param arguments: Arguments for the start tool.
+ :returns: Start result including session_id.
+ :raises ValueError: If server not found.
+
+ """
+ logger = get_logger()
+
+ server = self._registry.get_server(server_name)
+ if not server:
+ msg = f"Server '{server_name}' not found"
+ raise ValueError(msg)
+
+ # Ensure persistent session is running
+ persistent = self._client.get_persistent_session(server_name)
+ if not persistent or not persistent.alive:
+ logger.info(
+ "Auto-starting persistent session for continuous tool",
+ server=server_name,
+ )
+ await self._client.start_persistent_session(server.config)
+ # Discover tools on the new session
+ try:
+ tools = await self._client.discover_tools(server)
+ self._registry.update_server_tools(server_name, tools)
+ except HubClientError as e:
+ logger.warning(
+ "Tool discovery failed on persistent session",
+ server=server_name,
+ error=str(e),
+ )
+
+ # Call the start tool
+ result = await self._client.execute_tool(
+ server, start_tool, arguments,
+ )
+
+ # Extract session_id from result
+ content_text = ""
+ for item in result.get("content", []):
+ if item.get("type") == "text":
+ content_text = item.get("text", "")
+ break
+
+ import json # noqa: PLC0415
+
+ try:
+ start_result = json.loads(content_text) if content_text else result
+ except json.JSONDecodeError:
+ start_result = result
+
+ session_id = start_result.get("session_id", "")
+
+ if session_id:
+ from datetime import datetime, timezone # noqa: PLC0415
+
+ self._continuous_sessions[session_id] = {
+ "session_id": session_id,
+ "server_name": server_name,
+ "start_tool": start_tool,
+ "status_tool": start_tool.replace("_start", "_status"),
+ "stop_tool": start_tool.replace("_start", "_stop"),
+ "started_at": datetime.now(tz=timezone.utc).isoformat(),
+ "status": "running",
+ }
+
+ return start_result
+
+ async def get_continuous_tool_status(
+ self,
+ session_id: str,
+ ) -> dict[str, Any]:
+ """Get status of a continuous hub tool session.
+
+ :param session_id: Session ID from start_continuous_tool.
+ :returns: Status dict from the hub server's status tool.
+ :raises ValueError: If session not found.
+
+ """
+ session_info = self._continuous_sessions.get(session_id)
+ if not session_info:
+ msg = f"Unknown continuous session: {session_id}"
+ raise ValueError(msg)
+
+ server = self._registry.get_server(session_info["server_name"])
+ if not server:
+ msg = f"Server '{session_info['server_name']}' not found"
+ raise ValueError(msg)
+
+ result = await self._client.execute_tool(
+ server,
+ session_info["status_tool"],
+ {"session_id": session_id},
+ )
+
+ # Parse the text content
+ content_text = ""
+ for item in result.get("content", []):
+ if item.get("type") == "text":
+ content_text = item.get("text", "")
+ break
+
+ import json # noqa: PLC0415
+
+ try:
+ return json.loads(content_text) if content_text else result
+ except json.JSONDecodeError:
+ return result
+
+ async def stop_continuous_tool(
+ self,
+ session_id: str,
+ ) -> dict[str, Any]:
+ """Stop a continuous hub tool session.
+
+ :param session_id: Session ID to stop.
+ :returns: Final results from the hub server's stop tool.
+ :raises ValueError: If session not found.
+
+ """
+ session_info = self._continuous_sessions.get(session_id)
+ if not session_info:
+ msg = f"Unknown continuous session: {session_id}"
+ raise ValueError(msg)
+
+ server = self._registry.get_server(session_info["server_name"])
+ if not server:
+ msg = f"Server '{session_info['server_name']}' not found"
+ raise ValueError(msg)
+
+ result = await self._client.execute_tool(
+ server,
+ session_info["stop_tool"],
+ {"session_id": session_id},
+ )
+
+ # Parse the text content
+ content_text = ""
+ for item in result.get("content", []):
+ if item.get("type") == "text":
+ content_text = item.get("text", "")
+ break
+
+ import json # noqa: PLC0415
+
+ try:
+ stop_result = json.loads(content_text) if content_text else result
+ except json.JSONDecodeError:
+ stop_result = result
+
+ # Update session tracking
+ session_info["status"] = "stopped"
+
+ return stop_result
+
+ def list_continuous_sessions(self) -> list[dict[str, Any]]:
+ """List all tracked continuous sessions.
+
+ :returns: List of continuous session info dicts.
+
+ """
+ return list(self._continuous_sessions.values())
diff --git a/fuzzforge-common/src/fuzzforge_common/hub/models.py b/fuzzforge-common/src/fuzzforge_common/hub/models.py
new file mode 100644
index 0000000..f0df8bd
--- /dev/null
+++ b/fuzzforge-common/src/fuzzforge_common/hub/models.py
@@ -0,0 +1,296 @@
+"""Data models for FuzzForge Hub.
+
+This module defines the Pydantic models used to represent MCP servers
+and their tools in the hub registry.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any
+
+from pydantic import BaseModel, Field
+
+
+class HubServerType(str, Enum):
+ """Type of MCP server connection."""
+
+ #: Run as Docker container with stdio transport.
+ DOCKER = "docker"
+ #: Run as local command/process with stdio transport.
+ COMMAND = "command"
+ #: Connect via Server-Sent Events (HTTP).
+ SSE = "sse"
+
+
+class HubServerConfig(BaseModel):
+ """Configuration for an MCP server in the hub.
+
+ This defines how to connect to an MCP server, not what tools it provides.
+ Tools are discovered dynamically at runtime.
+
+ """
+
+ #: Unique identifier for this server (e.g., "nmap", "nuclei").
+ name: str = Field(description="Unique server identifier")
+
+ #: Human-readable description of the server.
+ description: str | None = Field(
+ default=None,
+ description="Human-readable description",
+ )
+
+ #: Type of connection to use.
+ type: HubServerType = Field(description="Connection type")
+
+ #: Docker image name (for type=docker).
+ image: str | None = Field(
+ default=None,
+ description="Docker image name (for docker type)",
+ )
+
+ #: Command to run (for type=command).
+ command: list[str] | None = Field(
+ default=None,
+ description="Command and args (for command type)",
+ )
+
+ #: URL endpoint (for type=sse).
+ url: str | None = Field(
+ default=None,
+ description="SSE endpoint URL (for sse type)",
+ )
+
+ #: Environment variables to pass to the server.
+ environment: dict[str, str] = Field(
+ default_factory=dict,
+ description="Environment variables",
+ )
+
+ #: Docker capabilities to add (e.g., ["NET_RAW"] for nmap).
+ capabilities: list[str] = Field(
+ default_factory=list,
+ description="Docker capabilities to add",
+ )
+
+ #: Volume mounts for Docker (e.g., ["/host/path:/container/path:ro"]).
+ volumes: list[str] = Field(
+ default_factory=list,
+ description="Docker volume mounts",
+ )
+
+ #: Whether this server is enabled.
+ enabled: bool = Field(
+ default=True,
+ description="Whether server is enabled",
+ )
+
+ #: Category for grouping (e.g., "reconnaissance", "web-security").
+ category: str | None = Field(
+ default=None,
+ description="Category for grouping servers",
+ )
+
+ #: Per-server timeout override in seconds (None = use default_timeout).
+ timeout: int | None = Field(
+ default=None,
+ description="Per-server execution timeout override in seconds",
+ )
+
+ #: Whether to use persistent container mode (keep container running between calls).
+ persistent: bool = Field(
+ default=False,
+ description="Keep container running between tool calls for stateful interactions",
+ )
+
+
+class HubToolParameter(BaseModel):
+ """A parameter for an MCP tool.
+
+ Parsed from the tool's JSON Schema inputSchema.
+
+ """
+
+ #: Parameter name.
+ name: str
+
+ #: Parameter type (string, integer, boolean, array, object).
+ type: str
+
+ #: Human-readable description.
+ description: str | None = None
+
+ #: Whether this parameter is required.
+ required: bool = False
+
+ #: Default value if any.
+ default: Any = None
+
+ #: Enum values if constrained.
+ enum: list[Any] | None = None
+
+
+class HubTool(BaseModel):
+ """An MCP tool discovered from a hub server.
+
+ This is populated by calling `list_tools()` on the MCP server.
+
+ """
+
+ #: Tool name as defined by the MCP server.
+ name: str = Field(description="Tool name from MCP server")
+
+ #: Human-readable description.
+ description: str | None = Field(
+ default=None,
+ description="Tool description",
+ )
+
+ #: Name of the hub server this tool belongs to.
+ server_name: str = Field(description="Parent server name")
+
+ #: Parsed parameters from inputSchema.
+ parameters: list[HubToolParameter] = Field(
+ default_factory=list,
+ description="Tool parameters",
+ )
+
+ #: Raw JSON Schema for the tool input.
+ input_schema: dict[str, Any] = Field(
+ default_factory=dict,
+ description="Raw JSON Schema from MCP",
+ )
+
+ @property
+ def identifier(self) -> str:
+ """Get the full tool identifier (hub:server:tool)."""
+ return f"hub:{self.server_name}:{self.name}"
+
+ @classmethod
+ def from_mcp_tool(
+ cls,
+ server_name: str,
+ name: str,
+ description: str | None,
+ input_schema: dict[str, Any],
+ ) -> HubTool:
+ """Create a HubTool from MCP tool metadata.
+
+ :param server_name: Name of the parent hub server.
+ :param name: Tool name.
+ :param description: Tool description.
+ :param input_schema: JSON Schema for tool input.
+ :returns: HubTool instance.
+
+ """
+ parameters = cls._parse_parameters(input_schema)
+ return cls(
+ name=name,
+ description=description,
+ server_name=server_name,
+ parameters=parameters,
+ input_schema=input_schema,
+ )
+
+ @staticmethod
+ def _parse_parameters(schema: dict[str, Any]) -> list[HubToolParameter]:
+ """Parse parameters from JSON Schema.
+
+ :param schema: JSON Schema dict.
+ :returns: List of parsed parameters.
+
+ """
+ parameters: list[HubToolParameter] = []
+ properties = schema.get("properties", {})
+ required_params = set(schema.get("required", []))
+
+ for name, prop in properties.items():
+ param = HubToolParameter(
+ name=name,
+ type=prop.get("type", "string"),
+ description=prop.get("description"),
+ required=name in required_params,
+ default=prop.get("default"),
+ enum=prop.get("enum"),
+ )
+ parameters.append(param)
+
+ return parameters
+
+
+class HubServer(BaseModel):
+ """A hub server with its discovered tools.
+
+ Combines configuration with dynamically discovered tools.
+
+ """
+
+ #: Server configuration.
+ config: HubServerConfig
+
+ #: Tools discovered from the server (populated at runtime).
+ tools: list[HubTool] = Field(
+ default_factory=list,
+ description="Discovered tools",
+ )
+
+ #: Whether tools have been discovered.
+ discovered: bool = Field(
+ default=False,
+ description="Whether tools have been discovered",
+ )
+
+ #: Error message if discovery failed.
+ discovery_error: str | None = Field(
+ default=None,
+ description="Error message if discovery failed",
+ )
+
+ @property
+ def name(self) -> str:
+ """Get server name."""
+ return self.config.name
+
+ @property
+ def identifier(self) -> str:
+ """Get server identifier for module listing."""
+ return f"hub:{self.config.name}"
+
+ def get_tool(self, tool_name: str) -> HubTool | None:
+ """Get a tool by name.
+
+ :param tool_name: Name of the tool.
+ :returns: HubTool if found, None otherwise.
+
+ """
+ for tool in self.tools:
+ if tool.name == tool_name:
+ return tool
+ return None
+
+
+class HubConfig(BaseModel):
+ """Configuration for the entire hub.
+
+ Loaded from hub-servers.json or similar config file.
+
+ """
+
+ #: List of configured servers.
+ servers: list[HubServerConfig] = Field(
+ default_factory=list,
+ description="Configured MCP servers",
+ )
+
+ #: Default timeout for tool execution (seconds).
+ default_timeout: int = Field(
+ default=300,
+ description="Default execution timeout",
+ )
+
+ #: Whether to cache discovered tools.
+ cache_tools: bool = Field(
+ default=True,
+ description="Cache discovered tools",
+ )
diff --git a/fuzzforge-common/src/fuzzforge_common/hub/registry.py b/fuzzforge-common/src/fuzzforge_common/hub/registry.py
new file mode 100644
index 0000000..ccbdd48
--- /dev/null
+++ b/fuzzforge-common/src/fuzzforge_common/hub/registry.py
@@ -0,0 +1,258 @@
+"""Hub registry for managing MCP server configurations.
+
+The registry loads server configurations from a JSON file and provides
+methods to access and manage them. It does not hardcode any specific
+servers or tools - everything is configured by the user.
+
+"""
+
+from __future__ import annotations
+
+import json
+from pathlib import Path
+from typing import TYPE_CHECKING, cast
+
+from fuzzforge_common.hub.models import (
+ HubConfig,
+ HubServer,
+ HubServerConfig,
+)
+
+if TYPE_CHECKING:
+ from structlog.stdlib import BoundLogger
+
+
+def get_logger() -> BoundLogger:
+ """Get structlog logger instance.
+
+ :returns: Configured structlog logger.
+
+ """
+ from structlog import get_logger # noqa: PLC0415
+
+ return cast("BoundLogger", get_logger())
+
+
+class HubRegistry:
+ """Registry for MCP hub servers.
+
+ Manages the configuration and state of hub servers.
+ Configurations are loaded from a JSON file.
+
+ """
+
+ #: Loaded hub configuration.
+ _config: HubConfig
+
+ #: Server instances with discovered tools.
+ _servers: dict[str, HubServer]
+
+ #: Path to the configuration file.
+ _config_path: Path | None
+
+ def __init__(self, config_path: Path | str | None = None) -> None:
+ """Initialize the hub registry.
+
+ :param config_path: Path to hub-servers.json config file.
+ If None, starts with empty configuration.
+
+ """
+ if config_path is not None:
+ self._config_path = Path(config_path)
+ else:
+ self._config_path = None
+ self._servers = {}
+ self._config = HubConfig()
+
+ if self._config_path and self._config_path.exists():
+ self._load_config(self._config_path)
+
+ def _load_config(self, config_path: Path) -> None:
+ """Load configuration from JSON file.
+
+ :param config_path: Path to config file.
+
+ """
+ logger = get_logger()
+ try:
+ with config_path.open() as f:
+ data = json.load(f)
+
+ self._config = HubConfig.model_validate(data)
+
+ # Create server instances from config
+ for server_config in self._config.servers:
+ if server_config.enabled:
+ self._servers[server_config.name] = HubServer(
+ config=server_config,
+ )
+
+ logger.info(
+ "Loaded hub configuration",
+ path=str(config_path),
+ servers=len(self._servers),
+ )
+
+ except Exception as e:
+ logger.error(
+ "Failed to load hub configuration",
+ path=str(config_path),
+ error=str(e),
+ )
+ raise
+
+ def reload(self) -> None:
+ """Reload configuration from file."""
+ if self._config_path and self._config_path.exists():
+ self._servers.clear()
+ self._load_config(self._config_path)
+
+ @property
+ def servers(self) -> list[HubServer]:
+ """Get all registered servers.
+
+ :returns: List of hub servers.
+
+ """
+ return list(self._servers.values())
+
+ @property
+ def enabled_servers(self) -> list[HubServer]:
+ """Get all enabled servers.
+
+ :returns: List of enabled hub servers.
+
+ """
+ return [s for s in self._servers.values() if s.config.enabled]
+
+ def get_server(self, name: str) -> HubServer | None:
+ """Get a server by name.
+
+ :param name: Server name.
+ :returns: HubServer if found, None otherwise.
+
+ """
+ return self._servers.get(name)
+
+ def add_server(self, config: HubServerConfig) -> HubServer:
+ """Add a server to the registry.
+
+ :param config: Server configuration.
+ :returns: Created HubServer instance.
+ :raises ValueError: If server with same name exists.
+
+ """
+ if config.name in self._servers:
+ msg = f"Server '{config.name}' already exists"
+ raise ValueError(msg)
+
+ server = HubServer(config=config)
+ self._servers[config.name] = server
+ self._config.servers.append(config)
+
+ get_logger().info("Added hub server", name=config.name, type=config.type)
+ return server
+
+ def remove_server(self, name: str) -> bool:
+ """Remove a server from the registry.
+
+ :param name: Server name.
+ :returns: True if removed, False if not found.
+
+ """
+ if name not in self._servers:
+ return False
+
+ del self._servers[name]
+ self._config.servers = [s for s in self._config.servers if s.name != name]
+
+ get_logger().info("Removed hub server", name=name)
+ return True
+
+ def save_config(self, path: Path | None = None) -> None:
+ """Save current configuration to file.
+
+ :param path: Path to save to. Uses original path if None.
+
+ """
+ save_path = path or self._config_path
+ if not save_path:
+ msg = "No config path specified"
+ raise ValueError(msg)
+
+ with save_path.open("w") as f:
+ json.dump(
+ self._config.model_dump(mode="json"),
+ f,
+ indent=2,
+ )
+
+ get_logger().info("Saved hub configuration", path=str(save_path))
+
+ def update_server_tools(
+ self,
+ server_name: str,
+ tools: list,
+ *,
+ error: str | None = None,
+ ) -> None:
+ """Update discovered tools for a server.
+
+ Called by the hub client after tool discovery.
+
+ :param server_name: Server name.
+ :param tools: List of HubTool instances.
+ :param error: Error message if discovery failed.
+
+ """
+ server = self._servers.get(server_name)
+ if not server:
+ return
+
+ if error:
+ server.discovered = False
+ server.discovery_error = error
+ server.tools = []
+ else:
+ server.discovered = True
+ server.discovery_error = None
+ server.tools = tools
+
+ def get_all_tools(self) -> list:
+ """Get all discovered tools from all servers.
+
+ :returns: Flat list of all HubTool instances.
+
+ """
+ tools = []
+ for server in self._servers.values():
+ if server.discovered:
+ tools.extend(server.tools)
+ return tools
+
+ def find_tool(self, identifier: str):
+ """Find a tool by its full identifier.
+
+ :param identifier: Full identifier (hub:server:tool or server:tool).
+ :returns: Tuple of (HubServer, HubTool) if found, (None, None) otherwise.
+
+ """
+ # Parse identifier
+ parts = identifier.split(":")
+ if len(parts) == 3 and parts[0] == "hub": # noqa: PLR2004
+ # hub:server:tool format
+ server_name = parts[1]
+ tool_name = parts[2]
+ elif len(parts) == 2: # noqa: PLR2004
+ # server:tool format
+ server_name = parts[0]
+ tool_name = parts[1]
+ else:
+ return None, None
+
+ server = self._servers.get(server_name)
+ if not server:
+ return None, None
+
+ tool = server.get_tool(tool_name)
+ return server, tool
diff --git a/fuzzforge-common/src/fuzzforge_common/sandboxes/engines/base/configuration.py b/fuzzforge-common/src/fuzzforge_common/sandboxes/engines/base/configuration.py
index aaae960..c255c72 100644
--- a/fuzzforge-common/src/fuzzforge_common/sandboxes/engines/base/configuration.py
+++ b/fuzzforge-common/src/fuzzforge_common/sandboxes/engines/base/configuration.py
@@ -4,7 +4,7 @@ from typing import TYPE_CHECKING
from pydantic import BaseModel
from fuzzforge_common.sandboxes.engines.enumeration import (
- FuzzForgeSandboxEngines, # noqa: TC001 (required by 'pydantic' at runtime)
+ FuzzForgeSandboxEngines,
)
if TYPE_CHECKING:
diff --git a/fuzzforge-common/src/fuzzforge_common/sandboxes/engines/base/engine.py b/fuzzforge-common/src/fuzzforge_common/sandboxes/engines/base/engine.py
index 993b979..3bd034b 100644
--- a/fuzzforge-common/src/fuzzforge_common/sandboxes/engines/base/engine.py
+++ b/fuzzforge-common/src/fuzzforge_common/sandboxes/engines/base/engine.py
@@ -272,6 +272,23 @@ class AbstractFuzzForgeSandboxEngine(ABC):
message: str = f"method 'read_file_from_container' is not implemented for class '{self.__class__.__name__}'"
raise NotImplementedError(message)
+ @abstractmethod
+ def tail_file_from_container(self, identifier: str, path: str, start_line: int = 1) -> str:
+ """Read a file from a running container starting at a given line number.
+
+ Uses ``tail -n +{start_line}`` to avoid re-reading the entire file on
+ every poll. This is the preferred method for incremental reads of
+ append-only files such as ``stream.jsonl``.
+
+ :param identifier: Container identifier.
+ :param path: Path to file inside container.
+ :param start_line: 1-based line number to start reading from.
+ :returns: File contents from *start_line* onwards (may be empty).
+
+ """
+ message: str = f"method 'tail_file_from_container' is not implemented for class '{self.__class__.__name__}'"
+ raise NotImplementedError(message)
+
@abstractmethod
def list_containers(self, all_containers: bool = True) -> list[dict]:
"""List containers.
diff --git a/fuzzforge-common/src/fuzzforge_common/sandboxes/engines/docker/cli.py b/fuzzforge-common/src/fuzzforge_common/sandboxes/engines/docker/cli.py
index 95580fa..e80d984 100644
--- a/fuzzforge-common/src/fuzzforge_common/sandboxes/engines/docker/cli.py
+++ b/fuzzforge-common/src/fuzzforge_common/sandboxes/engines/docker/cli.py
@@ -389,6 +389,24 @@ class DockerCLI(AbstractFuzzForgeSandboxEngine):
return ""
return result.stdout
+ def tail_file_from_container(self, identifier: str, path: str, start_line: int = 1) -> str:
+ """Read a file from a container starting at a given line number.
+
+ :param identifier: Container identifier.
+ :param path: Path to file in container.
+ :param start_line: 1-based line number to start reading from.
+ :returns: File contents from *start_line* onwards.
+
+ """
+ result = self._run(
+ ["exec", identifier, "tail", "-n", f"+{start_line}", path],
+ check=False,
+ )
+ if result.returncode != 0:
+ get_logger().debug("failed to tail file from container", path=path, start_line=start_line)
+ return ""
+ return result.stdout
+
def list_containers(self, all_containers: bool = True) -> list[dict]:
"""List containers.
diff --git a/fuzzforge-common/src/fuzzforge_common/sandboxes/engines/docker/engine.py b/fuzzforge-common/src/fuzzforge_common/sandboxes/engines/docker/engine.py
index 217da43..31ac9d8 100644
--- a/fuzzforge-common/src/fuzzforge_common/sandboxes/engines/docker/engine.py
+++ b/fuzzforge-common/src/fuzzforge_common/sandboxes/engines/docker/engine.py
@@ -168,6 +168,11 @@ class Docker(AbstractFuzzForgeSandboxEngine):
message: str = "Docker engine read_file_from_container is not yet implemented"
raise NotImplementedError(message)
+ def tail_file_from_container(self, identifier: str, path: str, start_line: int = 1) -> str:
+ """Read a file from a container starting at a given line number."""
+ message: str = "Docker engine tail_file_from_container is not yet implemented"
+ raise NotImplementedError(message)
+
def list_containers(self, all_containers: bool = True) -> list[dict]:
"""List containers."""
message: str = "Docker engine list_containers is not yet implemented"
diff --git a/fuzzforge-common/src/fuzzforge_common/sandboxes/engines/podman/cli.py b/fuzzforge-common/src/fuzzforge_common/sandboxes/engines/podman/cli.py
index ca333d6..95aae1b 100644
--- a/fuzzforge-common/src/fuzzforge_common/sandboxes/engines/podman/cli.py
+++ b/fuzzforge-common/src/fuzzforge_common/sandboxes/engines/podman/cli.py
@@ -449,6 +449,24 @@ class PodmanCLI(AbstractFuzzForgeSandboxEngine):
return ""
return result.stdout
+ def tail_file_from_container(self, identifier: str, path: str, start_line: int = 1) -> str:
+ """Read a file from a container starting at a given line number.
+
+ :param identifier: Container identifier.
+ :param path: Path to file in container.
+ :param start_line: 1-based line number to start reading from.
+ :returns: File contents from *start_line* onwards.
+
+ """
+ result = self._run(
+ ["exec", identifier, "tail", "-n", f"+{start_line}", path],
+ check=False,
+ )
+ if result.returncode != 0:
+ get_logger().debug("failed to tail file from container", path=path, start_line=start_line)
+ return ""
+ return result.stdout
+
def list_containers(self, all_containers: bool = True) -> list[dict]:
"""List containers.
diff --git a/fuzzforge-common/src/fuzzforge_common/sandboxes/engines/podman/engine.py b/fuzzforge-common/src/fuzzforge_common/sandboxes/engines/podman/engine.py
index 4973ac9..fcd7969 100644
--- a/fuzzforge-common/src/fuzzforge_common/sandboxes/engines/podman/engine.py
+++ b/fuzzforge-common/src/fuzzforge_common/sandboxes/engines/podman/engine.py
@@ -475,6 +475,30 @@ class Podman(AbstractFuzzForgeSandboxEngine):
return ""
return stdout.decode("utf-8", errors="replace") if stdout else ""
+ def tail_file_from_container(self, identifier: str, path: str, start_line: int = 1) -> str:
+ """Read a file from a container starting at a given line number.
+
+ :param identifier: Container identifier.
+ :param path: Path to file inside container.
+ :param start_line: 1-based line number to start reading from.
+ :returns: File contents from *start_line* onwards.
+
+ """
+ client: PodmanClient = self.get_client()
+ with client:
+ container: Container = client.containers.get(key=identifier)
+ (status, (stdout, stderr)) = container.exec_run(
+ cmd=["tail", "-n", f"+{start_line}", path],
+ demux=True,
+ )
+ if status != 0:
+ error_msg = stderr.decode("utf-8", errors="replace") if stderr else "File not found"
+ get_logger().debug(
+ "failed to tail file from container", path=path, start_line=start_line, error=error_msg,
+ )
+ return ""
+ return stdout.decode("utf-8", errors="replace") if stdout else ""
+
def list_containers(self, all_containers: bool = True) -> list[dict]:
"""List containers.
diff --git a/fuzzforge-mcp/README.md b/fuzzforge-mcp/README.md
index 1c68125..7c98fd1 100644
--- a/fuzzforge-mcp/README.md
+++ b/fuzzforge-mcp/README.md
@@ -45,11 +45,11 @@ For custom setups, you can manually configure the MCP server.
{
"mcpServers": {
"fuzzforge": {
- "command": "/path/to/fuzzforge-oss/.venv/bin/python",
+ "command": "/path/to/fuzzforge_ai/.venv/bin/python",
"args": ["-m", "fuzzforge_mcp"],
- "cwd": "/path/to/fuzzforge-oss",
+ "cwd": "/path/to/fuzzforge_ai",
"env": {
- "FUZZFORGE_MODULES_PATH": "/path/to/fuzzforge-oss/fuzzforge-modules",
+ "FUZZFORGE_MODULES_PATH": "/path/to/fuzzforge_ai/fuzzforge-modules",
"FUZZFORGE_ENGINE__TYPE": "docker"
}
}
@@ -64,11 +64,11 @@ For custom setups, you can manually configure the MCP server.
"servers": {
"fuzzforge": {
"type": "stdio",
- "command": "/path/to/fuzzforge-oss/.venv/bin/python",
+ "command": "/path/to/fuzzforge_ai/.venv/bin/python",
"args": ["-m", "fuzzforge_mcp"],
- "cwd": "/path/to/fuzzforge-oss",
+ "cwd": "/path/to/fuzzforge_ai",
"env": {
- "FUZZFORGE_MODULES_PATH": "/path/to/fuzzforge-oss/fuzzforge-modules",
+ "FUZZFORGE_MODULES_PATH": "/path/to/fuzzforge_ai/fuzzforge-modules",
"FUZZFORGE_ENGINE__TYPE": "docker"
}
}
@@ -83,11 +83,11 @@ For custom setups, you can manually configure the MCP server.
"mcpServers": {
"fuzzforge": {
"type": "stdio",
- "command": "/path/to/fuzzforge-oss/.venv/bin/python",
+ "command": "/path/to/fuzzforge_ai/.venv/bin/python",
"args": ["-m", "fuzzforge_mcp"],
- "cwd": "/path/to/fuzzforge-oss",
+ "cwd": "/path/to/fuzzforge_ai",
"env": {
- "FUZZFORGE_MODULES_PATH": "/path/to/fuzzforge-oss/fuzzforge-modules",
+ "FUZZFORGE_MODULES_PATH": "/path/to/fuzzforge_ai/fuzzforge-modules",
"FUZZFORGE_ENGINE__TYPE": "docker"
}
}
diff --git a/fuzzforge-mcp/pyproject.toml b/fuzzforge-mcp/pyproject.toml
index 44f4255..a0636f5 100644
--- a/fuzzforge-mcp/pyproject.toml
+++ b/fuzzforge-mcp/pyproject.toml
@@ -1,14 +1,13 @@
[project]
name = "fuzzforge-mcp"
version = "0.0.1"
-description = "FuzzForge MCP Server - AI agent gateway for FuzzForge OSS."
+description = "FuzzForge MCP Server - AI agent gateway for FuzzForge AI."
authors = []
readme = "README.md"
requires-python = ">=3.14"
dependencies = [
"fastmcp==2.14.1",
"fuzzforge-common==0.0.1",
- "fuzzforge-runner==0.0.1",
"pydantic==2.12.4",
"pydantic-settings==2.12.0",
"structlog==25.5.0",
@@ -32,5 +31,4 @@ tests = [
[tool.uv.sources]
fuzzforge-common = { workspace = true }
-fuzzforge-runner = { workspace = true }
fuzzforge-tests = { workspace = true }
diff --git a/fuzzforge-mcp/ruff.toml b/fuzzforge-mcp/ruff.toml
index c3310b5..1df1bc9 100644
--- a/fuzzforge-mcp/ruff.toml
+++ b/fuzzforge-mcp/ruff.toml
@@ -14,3 +14,18 @@ ignore = [
"PLR2004", # allowing comparisons using unamed numerical constants in tests
"S101", # allowing 'assert' statements in tests
]
+"src/**" = [
+ "ASYNC109", # async with timeout param: intentional pattern
+ "EM102", # f-string in exception: existing pattern
+ "PERF401", # list comprehension: readability over perf
+ "PLR0913", # too many arguments: API compatibility
+ "PLW0602", # global variable: intentional for shared state
+ "PLW0603", # global statement: intentional for shared state
+ "RET504", # unnecessary assignment: readability
+ "RET505", # unnecessary elif after return: readability
+ "TC001", # TYPE_CHECKING: causes circular imports
+ "TC003", # TYPE_CHECKING: causes circular imports
+ "TRY300", # try-else: existing pattern
+ "TRY301", # abstract raise: existing pattern
+ "TRY003", # message in exception: existing pattern
+]
diff --git a/fuzzforge-mcp/src/fuzzforge_mcp/application.py b/fuzzforge-mcp/src/fuzzforge_mcp/application.py
index a46a916..57dcf0c 100644
--- a/fuzzforge-mcp/src/fuzzforge_mcp/application.py
+++ b/fuzzforge-mcp/src/fuzzforge_mcp/application.py
@@ -1,7 +1,8 @@
"""FuzzForge MCP Server Application.
This is the main entry point for the FuzzForge MCP server, providing
-AI agents with tools to execute security research modules.
+AI agents with tools to discover and execute MCP hub tools for
+security research.
"""
@@ -12,7 +13,7 @@ from fastmcp import FastMCP
from fastmcp.server.middleware.error_handling import ErrorHandlingMiddleware
from fuzzforge_mcp import resources, tools
-from fuzzforge_runner import Settings
+from fuzzforge_mcp.settings import Settings
if TYPE_CHECKING:
from collections.abc import AsyncGenerator
@@ -38,18 +39,23 @@ mcp: FastMCP = FastMCP(
instructions="""
FuzzForge is a security research orchestration platform. Use these tools to:
-1. **List modules**: Discover available security research modules
-2. **Execute modules**: Run modules in isolated containers
-3. **Execute workflows**: Chain multiple modules together
+1. **List hub servers**: Discover registered MCP tool servers
+2. **Discover tools**: Find available tools from hub servers
+3. **Execute hub tools**: Run security tools in isolated containers
4. **Manage projects**: Initialize and configure projects
5. **Get results**: Retrieve execution results
Typical workflow:
1. Initialize a project with `init_project`
2. Set project assets with `set_project_assets` (optional, only needed once for the source directory)
-3. List available modules with `list_modules`
-4. Execute a module with `execute_module` β use `assets_path` param to pass different inputs per module
-5. Read outputs from `results_path` returned by `execute_module` β check module's `output_artifacts` metadata for filenames
+3. List available hub servers with `list_hub_servers`
+4. Discover tools from servers with `discover_hub_tools`
+5. Execute hub tools with `execute_hub_tool`
+
+Hub workflow:
+1. List available hub servers with `list_hub_servers`
+2. Discover tools from servers with `discover_hub_tools`
+3. Execute hub tools with `execute_hub_tool`
""",
lifespan=lifespan,
)
diff --git a/fuzzforge-mcp/src/fuzzforge_mcp/dependencies.py b/fuzzforge-mcp/src/fuzzforge_mcp/dependencies.py
index 5781427..2942855 100644
--- a/fuzzforge-mcp/src/fuzzforge_mcp/dependencies.py
+++ b/fuzzforge-mcp/src/fuzzforge_mcp/dependencies.py
@@ -6,9 +6,10 @@ from pathlib import Path
from typing import TYPE_CHECKING, cast
from fastmcp.server.dependencies import get_context
-from fuzzforge_runner import Runner, Settings
from fuzzforge_mcp.exceptions import FuzzForgeMCPError
+from fuzzforge_mcp.settings import Settings
+from fuzzforge_mcp.storage import LocalStorage
if TYPE_CHECKING:
from fastmcp import Context
@@ -17,6 +18,9 @@ if TYPE_CHECKING:
# Track the current active project path (set by init_project)
_current_project_path: Path | None = None
+# Singleton storage instance
+_storage: LocalStorage | None = None
+
def set_current_project_path(project_path: Path) -> None:
"""Set the current project path.
@@ -60,11 +64,14 @@ def get_project_path() -> Path:
return Path.cwd()
-def get_runner() -> Runner:
- """Get a configured Runner instance.
+def get_storage() -> LocalStorage:
+ """Get the storage backend instance.
- :return: Runner instance configured from MCP settings.
+ :return: LocalStorage instance.
"""
- settings: Settings = get_settings()
- return Runner(settings)
+ global _storage
+ if _storage is None:
+ settings = get_settings()
+ _storage = LocalStorage(settings.storage.path)
+ return _storage
diff --git a/fuzzforge-mcp/src/fuzzforge_mcp/resources/__init__.py b/fuzzforge-mcp/src/fuzzforge_mcp/resources/__init__.py
index f6a1ce0..ac66e72 100644
--- a/fuzzforge-mcp/src/fuzzforge_mcp/resources/__init__.py
+++ b/fuzzforge-mcp/src/fuzzforge_mcp/resources/__init__.py
@@ -2,14 +2,12 @@
from fastmcp import FastMCP
-from fuzzforge_mcp.resources import executions, modules, project, workflows
+from fuzzforge_mcp.resources import executions, project
mcp: FastMCP = FastMCP()
mcp.mount(executions.mcp)
-mcp.mount(modules.mcp)
mcp.mount(project.mcp)
-mcp.mount(workflows.mcp)
__all__ = [
"mcp",
diff --git a/fuzzforge-mcp/src/fuzzforge_mcp/resources/executions.py b/fuzzforge-mcp/src/fuzzforge_mcp/resources/executions.py
index c3d5f31..a720761 100644
--- a/fuzzforge-mcp/src/fuzzforge_mcp/resources/executions.py
+++ b/fuzzforge-mcp/src/fuzzforge_mcp/resources/executions.py
@@ -3,16 +3,12 @@
from __future__ import annotations
from pathlib import Path
-from typing import TYPE_CHECKING, Any
+from typing import Any
from fastmcp import FastMCP
from fastmcp.exceptions import ResourceError
-from fuzzforge_mcp.dependencies import get_project_path, get_runner
-
-if TYPE_CHECKING:
- from fuzzforge_runner import Runner
-
+from fuzzforge_mcp.dependencies import get_project_path, get_storage
mcp: FastMCP = FastMCP()
@@ -26,16 +22,16 @@ async def list_executions() -> list[dict[str, Any]]:
:return: List of execution information dictionaries.
"""
- runner: Runner = get_runner()
+ storage = get_storage()
project_path: Path = get_project_path()
try:
- execution_ids = runner.list_executions(project_path)
+ execution_ids = storage.list_executions(project_path)
return [
{
"execution_id": exec_id,
- "has_results": runner.get_execution_results(project_path, exec_id) is not None,
+ "has_results": storage.get_execution_results(project_path, exec_id) is not None,
}
for exec_id in execution_ids
]
@@ -53,11 +49,11 @@ async def get_execution(execution_id: str) -> dict[str, Any]:
:return: Execution information dictionary.
"""
- runner: Runner = get_runner()
+ storage = get_storage()
project_path: Path = get_project_path()
try:
- results_path = runner.get_execution_results(project_path, execution_id)
+ results_path = storage.get_execution_results(project_path, execution_id)
if results_path is None:
raise ResourceError(f"Execution not found: {execution_id}")
diff --git a/fuzzforge-mcp/src/fuzzforge_mcp/resources/modules.py b/fuzzforge-mcp/src/fuzzforge_mcp/resources/modules.py
deleted file mode 100644
index a551ccd..0000000
--- a/fuzzforge-mcp/src/fuzzforge_mcp/resources/modules.py
+++ /dev/null
@@ -1,78 +0,0 @@
-"""Module resources for FuzzForge MCP."""
-
-from __future__ import annotations
-
-from typing import TYPE_CHECKING, Any
-
-from fastmcp import FastMCP
-from fastmcp.exceptions import ResourceError
-
-from fuzzforge_mcp.dependencies import get_runner
-
-if TYPE_CHECKING:
- from fuzzforge_runner import Runner
- from fuzzforge_runner.runner import ModuleInfo
-
-
-mcp: FastMCP = FastMCP()
-
-
-@mcp.resource("fuzzforge://modules/")
-async def list_modules() -> list[dict[str, Any]]:
- """List all available FuzzForge modules.
-
- Returns information about modules that can be executed,
- including their identifiers and availability status.
-
- :return: List of module information dictionaries.
-
- """
- runner: Runner = get_runner()
-
- try:
- modules: list[ModuleInfo] = runner.list_modules()
-
- return [
- {
- "identifier": module.identifier,
- "description": module.description,
- "version": module.version,
- "available": module.available,
- }
- for module in modules
- ]
-
- except Exception as exception:
- message: str = f"Failed to list modules: {exception}"
- raise ResourceError(message) from exception
-
-
-@mcp.resource("fuzzforge://modules/{module_identifier}")
-async def get_module(module_identifier: str) -> dict[str, Any]:
- """Get information about a specific module.
-
- :param module_identifier: The identifier of the module to retrieve.
- :return: Module information dictionary.
-
- """
- runner: Runner = get_runner()
-
- try:
- module: ModuleInfo | None = runner.get_module_info(module_identifier)
-
- if module is None:
- raise ResourceError(f"Module not found: {module_identifier}")
-
- return {
- "identifier": module.identifier,
- "description": module.description,
- "version": module.version,
- "available": module.available,
- }
-
- except ResourceError:
- raise
- except Exception as exception:
- message: str = f"Failed to get module: {exception}"
- raise ResourceError(message) from exception
-
diff --git a/fuzzforge-mcp/src/fuzzforge_mcp/resources/project.py b/fuzzforge-mcp/src/fuzzforge_mcp/resources/project.py
index 566ba0f..ee9717a 100644
--- a/fuzzforge-mcp/src/fuzzforge_mcp/resources/project.py
+++ b/fuzzforge-mcp/src/fuzzforge_mcp/resources/project.py
@@ -3,16 +3,12 @@
from __future__ import annotations
from pathlib import Path
-from typing import TYPE_CHECKING, Any
+from typing import Any
from fastmcp import FastMCP
from fastmcp.exceptions import ResourceError
-from fuzzforge_mcp.dependencies import get_project_path, get_runner
-
-if TYPE_CHECKING:
- from fuzzforge_runner import Runner
-
+from fuzzforge_mcp.dependencies import get_project_path, get_settings, get_storage
mcp: FastMCP = FastMCP()
@@ -27,12 +23,12 @@ async def get_project() -> dict[str, Any]:
:return: Project information dictionary.
"""
- runner: Runner = get_runner()
+ storage = get_storage()
project_path: Path = get_project_path()
try:
- executions = runner.list_executions(project_path)
- assets_path = runner.storage.get_project_assets_path(project_path)
+ executions = storage.list_executions(project_path)
+ assets_path = storage.get_project_assets_path(project_path)
return {
"path": str(project_path),
@@ -40,7 +36,7 @@ async def get_project() -> dict[str, Any]:
"has_assets": assets_path is not None,
"assets_path": str(assets_path) if assets_path else None,
"execution_count": len(executions),
- "recent_executions": executions[:10], # Last 10 executions
+ "recent_executions": executions[:10],
}
except Exception as exception:
@@ -53,13 +49,11 @@ async def get_project_settings() -> dict[str, Any]:
"""Get current FuzzForge settings.
Returns the active configuration for the MCP server including
- engine, storage, and project settings.
+ engine, storage, and hub settings.
:return: Settings dictionary.
"""
- from fuzzforge_mcp.dependencies import get_settings
-
try:
settings = get_settings()
@@ -71,9 +65,10 @@ async def get_project_settings() -> dict[str, Any]:
"storage": {
"path": str(settings.storage.path),
},
- "project": {
- "path": str(settings.project.path),
- "modules_path": str(settings.modules_path),
+ "hub": {
+ "enabled": settings.hub.enabled,
+ "config_path": str(settings.hub.config_path),
+ "timeout": settings.hub.timeout,
},
"debug": settings.debug,
}
diff --git a/fuzzforge-mcp/src/fuzzforge_mcp/resources/workflows.py b/fuzzforge-mcp/src/fuzzforge_mcp/resources/workflows.py
deleted file mode 100644
index 968dce9..0000000
--- a/fuzzforge-mcp/src/fuzzforge_mcp/resources/workflows.py
+++ /dev/null
@@ -1,53 +0,0 @@
-"""Workflow resources for FuzzForge MCP.
-
-Note: In FuzzForge OSS, workflows are defined at runtime rather than
-stored. This resource provides documentation about workflow capabilities.
-
-"""
-
-from __future__ import annotations
-
-from typing import Any
-
-from fastmcp import FastMCP
-
-
-mcp: FastMCP = FastMCP()
-
-
-@mcp.resource("fuzzforge://workflows/help")
-async def get_workflow_help() -> dict[str, Any]:
- """Get help information about creating workflows.
-
- Workflows in FuzzForge OSS are defined at execution time rather
- than stored. Use the execute_workflow tool with step definitions.
-
- :return: Workflow documentation.
-
- """
- return {
- "description": "Workflows chain multiple modules together",
- "usage": "Use the execute_workflow tool with step definitions",
- "example": {
- "workflow_name": "security-audit",
- "steps": [
- {
- "module": "compile-contracts",
- "configuration": {"solc_version": "0.8.0"},
- },
- {
- "module": "slither",
- "configuration": {},
- },
- {
- "module": "echidna",
- "configuration": {"test_limit": 10000},
- },
- ],
- },
- "step_format": {
- "module": "Module identifier (required)",
- "configuration": "Module-specific configuration (optional)",
- "name": "Step name for logging (optional)",
- },
- }
diff --git a/fuzzforge-mcp/src/fuzzforge_mcp/settings.py b/fuzzforge-mcp/src/fuzzforge_mcp/settings.py
new file mode 100644
index 0000000..a2d6bbd
--- /dev/null
+++ b/fuzzforge-mcp/src/fuzzforge_mcp/settings.py
@@ -0,0 +1,113 @@
+"""FuzzForge MCP Server settings.
+
+Standalone settings for the MCP server. Replaces the previous dependency
+on fuzzforge-runner Settings now that the module system has been removed
+and FuzzForge operates exclusively through MCP hub tools.
+
+All settings can be configured via environment variables with the prefix
+``FUZZFORGE_``. Nested settings use double-underscore as delimiter.
+
+Example:
+ ``FUZZFORGE_ENGINE__TYPE=docker``
+ ``FUZZFORGE_STORAGE__PATH=/data/fuzzforge``
+ ``FUZZFORGE_HUB__CONFIG_PATH=/path/to/hub-config.json``
+
+"""
+
+from __future__ import annotations
+
+from enum import StrEnum
+from pathlib import Path
+
+from pydantic import BaseModel, Field
+from pydantic_settings import BaseSettings, SettingsConfigDict
+
+
+class EngineType(StrEnum):
+ """Supported container engine types."""
+
+ DOCKER = "docker"
+ PODMAN = "podman"
+
+
+class EngineSettings(BaseModel):
+ """Container engine configuration."""
+
+ #: Type of container engine to use.
+ type: EngineType = EngineType.DOCKER
+
+ #: Path to the container engine socket.
+ socket: str = Field(default="")
+
+ #: Custom graph root for Podman storage.
+ graphroot: Path = Field(default=Path.home() / ".fuzzforge" / "containers" / "storage")
+
+ #: Custom run root for Podman runtime state.
+ runroot: Path = Field(default=Path.home() / ".fuzzforge" / "containers" / "run")
+
+
+class StorageSettings(BaseModel):
+ """Storage configuration for local filesystem storage."""
+
+ #: Base path for local storage.
+ path: Path = Field(default=Path.home() / ".fuzzforge" / "storage")
+
+
+class ProjectSettings(BaseModel):
+ """Project configuration."""
+
+ #: Default path for FuzzForge projects.
+ default_path: Path = Field(default=Path.home() / ".fuzzforge" / "projects")
+
+
+class HubSettings(BaseModel):
+ """MCP Hub configuration for external tool servers.
+
+ Controls the hub that bridges FuzzForge with external MCP servers
+ (e.g., mcp-security-hub). AI agents discover and execute tools
+ from registered MCP servers.
+
+ Configure via environment variables:
+ ``FUZZFORGE_HUB__ENABLED=true``
+ ``FUZZFORGE_HUB__CONFIG_PATH=/path/to/hub-config.json``
+ ``FUZZFORGE_HUB__TIMEOUT=300``
+ """
+
+ #: Whether the MCP hub is enabled.
+ enabled: bool = Field(default=True)
+
+ #: Path to the hub configuration JSON file.
+ config_path: Path = Field(default=Path.home() / ".fuzzforge" / "hub-config.json")
+
+ #: Default timeout in seconds for hub tool execution.
+ timeout: int = Field(default=300)
+
+
+class Settings(BaseSettings):
+ """FuzzForge MCP Server settings.
+
+ Settings can be configured via environment variables with the prefix
+ ``FUZZFORGE_``. Nested settings use double-underscore as delimiter.
+
+ """
+
+ model_config = SettingsConfigDict(
+ case_sensitive=False,
+ env_nested_delimiter="__",
+ env_prefix="FUZZFORGE_",
+ )
+
+ #: Container engine settings.
+ engine: EngineSettings = Field(default_factory=EngineSettings)
+
+ #: Storage settings.
+ storage: StorageSettings = Field(default_factory=StorageSettings)
+
+ #: Project settings.
+ project: ProjectSettings = Field(default_factory=ProjectSettings)
+
+ #: MCP Hub settings.
+ hub: HubSettings = Field(default_factory=HubSettings)
+
+ #: Enable debug logging.
+ debug: bool = False
diff --git a/fuzzforge-mcp/src/fuzzforge_mcp/storage.py b/fuzzforge-mcp/src/fuzzforge_mcp/storage.py
new file mode 100644
index 0000000..d4228d1
--- /dev/null
+++ b/fuzzforge-mcp/src/fuzzforge_mcp/storage.py
@@ -0,0 +1,203 @@
+"""FuzzForge MCP Server - Local project storage.
+
+Lightweight project storage for managing `.fuzzforge/` directories,
+execution results, and project configuration. Extracted from the
+former fuzzforge-runner storage module.
+
+Storage is placed directly in the project directory as `.fuzzforge/`
+for maximum visibility and ease of debugging.
+
+"""
+
+from __future__ import annotations
+
+import json
+import logging
+from pathlib import Path
+from tarfile import open as Archive # noqa: N812
+from typing import Any
+
+logger = logging.getLogger("fuzzforge-mcp")
+
+#: Name of the FuzzForge storage directory within projects.
+FUZZFORGE_DIR_NAME: str = ".fuzzforge"
+
+#: Standard results archive filename.
+RESULTS_ARCHIVE_FILENAME: str = "results.tar.gz"
+
+
+class StorageError(Exception):
+ """Raised when a storage operation fails."""
+
+
+class LocalStorage:
+ """Local filesystem storage backend for FuzzForge.
+
+ Provides lightweight storage for project configuration and
+ execution results tracking.
+
+ Directory structure (inside project directory)::
+
+ {project_path}/.fuzzforge/
+ config.json # Project config (source path reference)
+ runs/ # Execution results
+ {execution_id}/
+ results.tar.gz
+
+ """
+
+ _base_path: Path
+
+ def __init__(self, base_path: Path) -> None:
+ """Initialize storage backend.
+
+ :param base_path: Root directory for global storage (fallback).
+
+ """
+ self._base_path = base_path
+ self._base_path.mkdir(parents=True, exist_ok=True)
+
+ def _get_project_path(self, project_path: Path) -> Path:
+ """Get the .fuzzforge storage path for a project.
+
+ :param project_path: Path to the project directory.
+ :returns: Storage path (.fuzzforge inside project).
+
+ """
+ return project_path / FUZZFORGE_DIR_NAME
+
+ def init_project(self, project_path: Path) -> Path:
+ """Initialize storage for a new project.
+
+ Creates a .fuzzforge/ directory inside the project for storing
+ configuration and execution results.
+
+ :param project_path: Path to the project directory.
+ :returns: Path to the project storage directory.
+
+ """
+ storage_path = self._get_project_path(project_path)
+ storage_path.mkdir(parents=True, exist_ok=True)
+ (storage_path / "runs").mkdir(parents=True, exist_ok=True)
+
+ # Create .gitignore to avoid committing large files
+ gitignore_path = storage_path / ".gitignore"
+ if not gitignore_path.exists():
+ gitignore_path.write_text(
+ "# FuzzForge storage - ignore large/temporary files\n"
+ "runs/\n"
+ "!config.json\n"
+ )
+
+ logger.info("Initialized project storage: %s", storage_path)
+ return storage_path
+
+ def get_project_assets_path(self, project_path: Path) -> Path | None:
+ """Get the configured source path for a project.
+
+ :param project_path: Path to the project directory.
+ :returns: Path to source directory, or None if not configured.
+
+ """
+ storage_path = self._get_project_path(project_path)
+ config_path = storage_path / "config.json"
+
+ if config_path.exists():
+ config = json.loads(config_path.read_text())
+ source_path = config.get("source_path")
+ if source_path:
+ path = Path(source_path)
+ if path.exists():
+ return path
+
+ return None
+
+ def set_project_assets(self, project_path: Path, assets_path: Path) -> Path:
+ """Set the source path for a project (reference only, no copying).
+
+ :param project_path: Path to the project directory.
+ :param assets_path: Path to source directory.
+ :returns: The assets path (unchanged).
+ :raises StorageError: If path doesn't exist.
+
+ """
+ if not assets_path.exists():
+ msg = f"Assets path does not exist: {assets_path}"
+ raise StorageError(msg)
+
+ assets_path = assets_path.resolve()
+
+ storage_path = self._get_project_path(project_path)
+ storage_path.mkdir(parents=True, exist_ok=True)
+ config_path = storage_path / "config.json"
+
+ config: dict[str, Any] = {}
+ if config_path.exists():
+ config = json.loads(config_path.read_text())
+
+ config["source_path"] = str(assets_path)
+ config_path.write_text(json.dumps(config, indent=2))
+
+ logger.info("Set project assets: %s -> %s", project_path.name, assets_path)
+ return assets_path
+
+ def list_executions(self, project_path: Path) -> list[str]:
+ """List all execution IDs for a project.
+
+ :param project_path: Path to the project directory.
+ :returns: List of execution IDs.
+
+ """
+ runs_dir = self._get_project_path(project_path) / "runs"
+ if not runs_dir.exists():
+ return []
+ return [d.name for d in runs_dir.iterdir() if d.is_dir()]
+
+ def get_execution_results(
+ self,
+ project_path: Path,
+ execution_id: str,
+ ) -> Path | None:
+ """Retrieve execution results path.
+
+ :param project_path: Path to the project directory.
+ :param execution_id: Execution ID.
+ :returns: Path to results archive, or None if not found.
+
+ """
+ storage_path = self._get_project_path(project_path)
+
+ # Try direct path
+ results_path = storage_path / "runs" / execution_id / RESULTS_ARCHIVE_FILENAME
+ if results_path.exists():
+ return results_path
+
+ # Search in all run directories
+ runs_dir = storage_path / "runs"
+ if runs_dir.exists():
+ for run_dir in runs_dir.iterdir():
+ if run_dir.is_dir() and execution_id in run_dir.name:
+ candidate = run_dir / RESULTS_ARCHIVE_FILENAME
+ if candidate.exists():
+ return candidate
+
+ return None
+
+ def extract_results(self, results_path: Path, destination: Path) -> Path:
+ """Extract a results archive to a destination directory.
+
+ :param results_path: Path to the results archive.
+ :param destination: Directory to extract to.
+ :returns: Path to extracted directory.
+ :raises StorageError: If extraction fails.
+
+ """
+ try:
+ destination.mkdir(parents=True, exist_ok=True)
+ with Archive(results_path, "r:gz") as tar:
+ tar.extractall(path=destination) # noqa: S202
+ logger.info("Extracted results: %s -> %s", results_path, destination)
+ return destination
+ except Exception as exc:
+ msg = f"Failed to extract results: {exc}"
+ raise StorageError(msg) from exc
diff --git a/fuzzforge-mcp/src/fuzzforge_mcp/tools/__init__.py b/fuzzforge-mcp/src/fuzzforge_mcp/tools/__init__.py
index 3a9edc9..fc339f9 100644
--- a/fuzzforge-mcp/src/fuzzforge_mcp/tools/__init__.py
+++ b/fuzzforge-mcp/src/fuzzforge_mcp/tools/__init__.py
@@ -2,13 +2,12 @@
from fastmcp import FastMCP
-from fuzzforge_mcp.tools import modules, projects, workflows
+from fuzzforge_mcp.tools import hub, projects
mcp: FastMCP = FastMCP()
-mcp.mount(modules.mcp)
mcp.mount(projects.mcp)
-mcp.mount(workflows.mcp)
+mcp.mount(hub.mcp)
__all__ = [
"mcp",
diff --git a/fuzzforge-mcp/src/fuzzforge_mcp/tools/hub.py b/fuzzforge-mcp/src/fuzzforge_mcp/tools/hub.py
new file mode 100644
index 0000000..33d724a
--- /dev/null
+++ b/fuzzforge-mcp/src/fuzzforge_mcp/tools/hub.py
@@ -0,0 +1,601 @@
+"""MCP Hub tools for FuzzForge MCP server.
+
+This module provides tools for interacting with external MCP servers
+through the FuzzForge hub. AI agents can:
+- List available hub servers and their tools
+- Discover tools from hub servers
+- Execute hub tools
+
+"""
+
+from __future__ import annotations
+
+from typing import Any
+
+from fastmcp import FastMCP
+from fastmcp.exceptions import ToolError
+from fuzzforge_common.hub import HubExecutor, HubServerConfig, HubServerType
+
+from fuzzforge_mcp.dependencies import get_project_path, get_settings, get_storage
+
+mcp: FastMCP = FastMCP()
+
+# Global hub executor instance (lazy initialization)
+_hub_executor: HubExecutor | None = None
+
+
+def _get_hub_executor() -> HubExecutor:
+ """Get or create the hub executor instance.
+
+ :returns: Hub executor instance.
+ :raises ToolError: If hub is disabled.
+
+ """
+ global _hub_executor
+
+ settings = get_settings()
+
+ if not settings.hub.enabled:
+ msg = "MCP Hub is disabled. Enable it via FUZZFORGE_HUB__ENABLED=true"
+ raise ToolError(msg)
+
+ if _hub_executor is None:
+ config_path = settings.hub.config_path
+ _hub_executor = HubExecutor(
+ config_path=config_path,
+ timeout=settings.hub.timeout,
+ )
+
+ return _hub_executor
+
+
+@mcp.tool
+async def list_hub_servers() -> dict[str, Any]:
+ """List all registered MCP hub servers.
+
+ Returns information about configured hub servers, including
+ their connection type, status, and discovered tool count.
+
+ :return: Dictionary with list of hub servers.
+
+ """
+ try:
+ executor = _get_hub_executor()
+ servers = executor.list_servers()
+
+ return {
+ "servers": servers,
+ "count": len(servers),
+ "enabled_count": len([s for s in servers if s["enabled"]]),
+ }
+
+ except Exception as e:
+ if isinstance(e, ToolError):
+ raise
+ msg = f"Failed to list hub servers: {e}"
+ raise ToolError(msg) from e
+
+
+@mcp.tool
+async def discover_hub_tools(server_name: str | None = None) -> dict[str, Any]:
+ """Discover tools from hub servers.
+
+ Connects to hub servers and retrieves their available tools.
+ If server_name is provided, only discovers from that server.
+ Otherwise discovers from all enabled servers.
+
+ :param server_name: Optional specific server to discover from.
+ :return: Dictionary with discovered tools.
+
+ """
+ try:
+ executor = _get_hub_executor()
+
+ if server_name:
+ tools = await executor.discover_server_tools(server_name)
+ return {
+ "server": server_name,
+ "tools": [
+ {
+ "identifier": t.identifier,
+ "name": t.name,
+ "description": t.description,
+ "parameters": [p.model_dump() for p in t.parameters],
+ }
+ for t in tools
+ ],
+ "count": len(tools),
+ }
+ else:
+ results = await executor.discover_all_tools()
+ all_tools = []
+ for server, tools in results.items():
+ for tool in tools:
+ all_tools.append({
+ "identifier": tool.identifier,
+ "name": tool.name,
+ "server": server,
+ "description": tool.description,
+ "parameters": [p.model_dump() for p in tool.parameters],
+ })
+
+ return {
+ "servers_discovered": len(results),
+ "tools": all_tools,
+ "count": len(all_tools),
+ }
+
+ except Exception as e:
+ if isinstance(e, ToolError):
+ raise
+ msg = f"Failed to discover hub tools: {e}"
+ raise ToolError(msg) from e
+
+
+@mcp.tool
+async def list_hub_tools() -> dict[str, Any]:
+ """List all discovered hub tools.
+
+ Returns tools that have been previously discovered from hub servers.
+ Run discover_hub_tools first if no tools are listed.
+
+ :return: Dictionary with list of discovered tools.
+
+ """
+ try:
+ executor = _get_hub_executor()
+ tools = executor.list_tools()
+
+ return {
+ "tools": tools,
+ "count": len(tools),
+ }
+
+ except Exception as e:
+ if isinstance(e, ToolError):
+ raise
+ msg = f"Failed to list hub tools: {e}"
+ raise ToolError(msg) from e
+
+
+@mcp.tool
+async def execute_hub_tool(
+ identifier: str,
+ arguments: dict[str, Any] | None = None,
+ timeout: int | None = None,
+) -> dict[str, Any]:
+ """Execute a tool from a hub server.
+
+ :param identifier: Tool identifier (format: hub:server:tool or server:tool).
+ :param arguments: Tool arguments matching the tool's input schema.
+ :param timeout: Optional execution timeout in seconds.
+ :return: Tool execution result.
+
+ Example identifiers:
+ - "hub:binwalk-mcp:binwalk_scan"
+ - "hub:yara-mcp:yara_scan_with_rules"
+ - "hub:nmap:nmap_scan"
+
+ FILE ACCESS β if set_project_assets was called, the assets directory is
+ mounted read-only inside the container at two standard paths:
+ - /app/uploads/ (used by binwalk, and tools with UPLOAD_DIR)
+ - /app/samples/ (used by yara, capa, and tools with SAMPLES_DIR)
+ Always use /app/uploads/ or /app/samples/ when
+ passing file paths to hub tools β do NOT use the host path.
+
+ """
+ try:
+ executor = _get_hub_executor()
+
+ # Inject project assets as Docker volume mounts if configured.
+ # Mounts the assets directory at the standard paths used by hub tools:
+ # /app/uploads β binwalk, and other tools that use UPLOAD_DIR
+ # /app/samples β yara, capa, and other tools that use SAMPLES_DIR
+ extra_volumes: list[str] = []
+ try:
+ storage = get_storage()
+ project_path = get_project_path()
+ assets_path = storage.get_project_assets_path(project_path)
+ if assets_path:
+ assets_str = str(assets_path)
+ extra_volumes = [
+ f"{assets_str}:/app/uploads:ro",
+ f"{assets_str}:/app/samples:ro",
+ ]
+ except Exception: # noqa: BLE001 - never block tool execution due to asset injection failure
+ extra_volumes = []
+
+ result = await executor.execute_tool(
+ identifier=identifier,
+ arguments=arguments or {},
+ timeout=timeout,
+ extra_volumes=extra_volumes or None,
+ )
+
+ return result.to_dict()
+
+ except Exception as e:
+ if isinstance(e, ToolError):
+ raise
+ msg = f"Hub tool execution failed: {e}"
+ raise ToolError(msg) from e
+
+
+@mcp.tool
+async def get_hub_tool_schema(identifier: str) -> dict[str, Any]:
+ """Get the input schema for a hub tool.
+
+ Returns the JSON Schema that describes the tool's expected arguments.
+
+ :param identifier: Tool identifier (format: hub:server:tool or server:tool).
+ :return: JSON Schema for the tool's input.
+
+ """
+ try:
+ executor = _get_hub_executor()
+ schema = executor.get_tool_schema(identifier)
+
+ if schema is None:
+ msg = f"Tool '{identifier}' not found. Run discover_hub_tools first."
+ raise ToolError(msg)
+
+ return {
+ "identifier": identifier,
+ "schema": schema,
+ }
+
+ except Exception as e:
+ if isinstance(e, ToolError):
+ raise
+ msg = f"Failed to get tool schema: {e}"
+ raise ToolError(msg) from e
+
+
+@mcp.tool
+async def add_hub_server(
+ name: str,
+ server_type: str,
+ image: str | None = None,
+ command: list[str] | None = None,
+ url: str | None = None,
+ category: str | None = None,
+ description: str | None = None,
+ capabilities: list[str] | None = None,
+ environment: dict[str, str] | None = None,
+) -> dict[str, Any]:
+ """Add a new MCP server to the hub.
+
+ Register a new external MCP server that can be used for tool discovery
+ and execution. Servers can be Docker images, local commands, or SSE endpoints.
+
+ :param name: Unique name for the server (e.g., "nmap", "nuclei").
+ :param server_type: Connection type ("docker", "command", or "sse").
+ :param image: Docker image name (for docker type).
+ :param command: Command and args (for command type).
+ :param url: SSE endpoint URL (for sse type).
+ :param category: Category for grouping (e.g., "reconnaissance").
+ :param description: Human-readable description.
+ :param capabilities: Docker capabilities to add (e.g., ["NET_RAW"]).
+ :param environment: Environment variables to pass.
+ :return: Information about the added server.
+
+ Examples:
+ - Docker: add_hub_server("nmap", "docker", image="nmap-mcp:latest", capabilities=["NET_RAW"])
+ - Command: add_hub_server("custom", "command", command=["python", "server.py"])
+
+ """
+ try:
+ executor = _get_hub_executor()
+
+ # Parse server type
+ try:
+ stype = HubServerType(server_type)
+ except ValueError:
+ msg = f"Invalid server type: {server_type}. Use 'docker', 'command', or 'sse'."
+ raise ToolError(msg) from None
+
+ # Validate required fields based on type
+ if stype == HubServerType.DOCKER and not image:
+ msg = "Docker image required for docker type"
+ raise ToolError(msg)
+ if stype == HubServerType.COMMAND and not command:
+ msg = "Command required for command type"
+ raise ToolError(msg)
+ if stype == HubServerType.SSE and not url:
+ msg = "URL required for sse type"
+ raise ToolError(msg)
+
+ config = HubServerConfig(
+ name=name,
+ type=stype,
+ image=image,
+ command=command,
+ url=url,
+ category=category,
+ description=description,
+ capabilities=capabilities or [],
+ environment=environment or {},
+ )
+
+ server = executor.add_server(config)
+
+ return {
+ "success": True,
+ "server": {
+ "name": server.name,
+ "identifier": server.identifier,
+ "type": server.config.type.value,
+ "enabled": server.config.enabled,
+ },
+ "message": f"Server '{name}' added. Use discover_hub_tools('{name}') to discover its tools.",
+ }
+
+ except ValueError as e:
+ msg = f"Failed to add server: {e}"
+ raise ToolError(msg) from e
+ except Exception as e:
+ if isinstance(e, ToolError):
+ raise
+ msg = f"Failed to add hub server: {e}"
+ raise ToolError(msg) from e
+
+
+@mcp.tool
+async def start_hub_server(server_name: str) -> dict[str, Any]:
+ """Start a persistent container session for a hub server.
+
+ Starts a Docker container that stays running between tool calls,
+ allowing stateful interactions. Tools are auto-discovered on start.
+
+ Use this for servers like radare2 or ghidra where you want to
+ keep an analysis session open across multiple tool calls.
+
+ After starting, use execute_hub_tool as normal - calls will be
+ routed to the persistent container automatically.
+
+ :param server_name: Name of the hub server to start (e.g., "radare2-mcp").
+ :return: Session status with container name and start time.
+
+ """
+ try:
+ executor = _get_hub_executor()
+
+ # Inject project assets as Docker volume mounts (same logic as execute_hub_tool).
+ extra_volumes: list[str] = []
+ try:
+ storage = get_storage()
+ project_path = get_project_path()
+ assets_path = storage.get_project_assets_path(project_path)
+ if assets_path:
+ assets_str = str(assets_path)
+ extra_volumes = [
+ f"{assets_str}:/app/uploads:ro",
+ f"{assets_str}:/app/samples:ro",
+ ]
+ except Exception: # noqa: BLE001 - never block server start due to asset injection failure
+ extra_volumes = []
+
+ result = await executor.start_persistent_server(server_name, extra_volumes=extra_volumes or None)
+
+ return {
+ "success": True,
+ "session": result,
+ "tools": result.get("tools", []),
+ "tool_count": result.get("tool_count", 0),
+ "message": (
+ f"Persistent session started for '{server_name}'. "
+ f"Discovered {result.get('tool_count', 0)} tools. "
+ "Use execute_hub_tool to call them β they will reuse this container. "
+ f"Stop with stop_hub_server('{server_name}') when done."
+ ),
+ }
+
+ except ValueError as e:
+ msg = f"Server not found: {e}"
+ raise ToolError(msg) from e
+ except Exception as e:
+ if isinstance(e, ToolError):
+ raise
+ msg = f"Failed to start persistent server: {e}"
+ raise ToolError(msg) from e
+
+
+@mcp.tool
+async def stop_hub_server(server_name: str) -> dict[str, Any]:
+ """Stop a persistent container session for a hub server.
+
+ Terminates the running Docker container and cleans up resources.
+ After stopping, tool calls will fall back to ephemeral mode
+ (a new container per call).
+
+ :param server_name: Name of the hub server to stop.
+ :return: Result indicating if the session was stopped.
+
+ """
+ try:
+ executor = _get_hub_executor()
+
+ stopped = await executor.stop_persistent_server(server_name)
+
+ if stopped:
+ return {
+ "success": True,
+ "message": f"Persistent session for '{server_name}' stopped and container removed.",
+ }
+ else:
+ return {
+ "success": False,
+ "message": f"No active persistent session found for '{server_name}'.",
+ }
+
+ except Exception as e:
+ if isinstance(e, ToolError):
+ raise
+ msg = f"Failed to stop persistent server: {e}"
+ raise ToolError(msg) from e
+
+
+@mcp.tool
+async def hub_server_status(server_name: str | None = None) -> dict[str, Any]:
+ """Get status of persistent hub server sessions.
+
+ If server_name is provided, returns status for that specific server.
+ Otherwise returns status for all active persistent sessions.
+
+ :param server_name: Optional specific server to check.
+ :return: Session status information.
+
+ """
+ try:
+ executor = _get_hub_executor()
+
+ if server_name:
+ status = executor.get_persistent_status(server_name)
+ if status:
+ return {"active": True, "session": status}
+ else:
+ return {
+ "active": False,
+ "message": f"No active persistent session for '{server_name}'.",
+ }
+ else:
+ sessions = executor.list_persistent_sessions()
+ return {
+ "active_sessions": sessions,
+ "count": len(sessions),
+ }
+
+ except Exception as e:
+ if isinstance(e, ToolError):
+ raise
+ msg = f"Failed to get server status: {e}"
+ raise ToolError(msg) from e
+
+
+# ------------------------------------------------------------------
+# Continuous mode tools
+# ------------------------------------------------------------------
+
+
+@mcp.tool
+async def start_continuous_hub_tool(
+ server_name: str,
+ start_tool: str,
+ arguments: dict[str, Any] | None = None,
+) -> dict[str, Any]:
+ """Start a continuous/background tool on a hub server.
+
+ Automatically starts a persistent container if not already running,
+ then calls the server's start tool (e.g., cargo_fuzz_start) which
+ launches a background process and returns a session_id.
+
+ The tool runs indefinitely until stopped with stop_continuous_hub_tool.
+ Use get_continuous_hub_status to monitor progress.
+
+ Example workflow for continuous cargo fuzzing:
+ 1. start_continuous_hub_tool("cargo-fuzzer-mcp", "cargo_fuzz_start", {"project_path": "/data/myproject"})
+ 2. get_continuous_hub_status(session_id) -- poll every 10-30s
+ 3. stop_continuous_hub_tool(session_id) -- when done
+
+ :param server_name: Hub server name (e.g., "cargo-fuzzer-mcp").
+ :param start_tool: Name of the start tool on the server.
+ :param arguments: Arguments for the start tool.
+ :return: Start result including session_id for monitoring.
+
+ """
+ try:
+ executor = _get_hub_executor()
+
+ result = await executor.start_continuous_tool(
+ server_name=server_name,
+ start_tool=start_tool,
+ arguments=arguments or {},
+ )
+
+ # Return the server's response directly β it already contains
+ # session_id, status, targets, and a message.
+ return result
+
+ except ValueError as e:
+ msg = f"Server not found: {e}"
+ raise ToolError(msg) from e
+ except Exception as e:
+ if isinstance(e, ToolError):
+ raise
+ msg = f"Failed to start continuous tool: {e}"
+ raise ToolError(msg) from e
+
+
+@mcp.tool
+async def get_continuous_hub_status(session_id: str) -> dict[str, Any]:
+ """Get live status of a continuous hub tool session.
+
+ Returns current metrics, progress, and recent output from the
+ running tool. Call periodically (every 10-30 seconds) to monitor.
+
+ :param session_id: Session ID returned by start_continuous_hub_tool.
+ :return: Current status with metrics (executions, coverage, crashes, etc.).
+
+ """
+ try:
+ executor = _get_hub_executor()
+
+ return await executor.get_continuous_tool_status(session_id)
+
+ except ValueError as e:
+ msg = str(e)
+ raise ToolError(msg) from e
+ except Exception as e:
+ if isinstance(e, ToolError):
+ raise
+ msg = f"Failed to get continuous status: {e}"
+ raise ToolError(msg) from e
+
+
+@mcp.tool
+async def stop_continuous_hub_tool(session_id: str) -> dict[str, Any]:
+ """Stop a running continuous hub tool session.
+
+ Gracefully stops the background process and returns final results
+ including total metrics and any artifacts (crash files, etc.).
+
+ :param session_id: Session ID of the session to stop.
+ :return: Final metrics and results summary.
+
+ """
+ try:
+ executor = _get_hub_executor()
+
+ return await executor.stop_continuous_tool(session_id)
+
+ except ValueError as e:
+ msg = str(e)
+ raise ToolError(msg) from e
+ except Exception as e:
+ if isinstance(e, ToolError):
+ raise
+ msg = f"Failed to stop continuous tool: {e}"
+ raise ToolError(msg) from e
+
+
+@mcp.tool
+async def list_continuous_hub_sessions() -> dict[str, Any]:
+ """List all active and recent continuous hub tool sessions.
+
+ :return: List of sessions with their status and server info.
+
+ """
+ try:
+ executor = _get_hub_executor()
+
+ sessions = executor.list_continuous_sessions()
+ return {
+ "sessions": sessions,
+ "count": len(sessions),
+ }
+
+ except Exception as e:
+ if isinstance(e, ToolError):
+ raise
+ msg = f"Failed to list continuous sessions: {e}"
+ raise ToolError(msg) from e
diff --git a/fuzzforge-mcp/src/fuzzforge_mcp/tools/modules.py b/fuzzforge-mcp/src/fuzzforge_mcp/tools/modules.py
deleted file mode 100644
index 567f12e..0000000
--- a/fuzzforge-mcp/src/fuzzforge_mcp/tools/modules.py
+++ /dev/null
@@ -1,373 +0,0 @@
-"""Module tools for FuzzForge MCP."""
-
-from __future__ import annotations
-
-import json
-import uuid
-from datetime import datetime, timezone
-from pathlib import Path
-from typing import TYPE_CHECKING, Any
-
-from fastmcp import FastMCP
-from fastmcp.exceptions import ToolError
-
-from fuzzforge_mcp.dependencies import get_project_path, get_runner, get_settings
-
-if TYPE_CHECKING:
- from fuzzforge_runner import Runner
- from fuzzforge_runner.orchestrator import StepResult
-
-
-mcp: FastMCP = FastMCP()
-
-# Track running background executions
-_background_executions: dict[str, dict[str, Any]] = {}
-
-
-@mcp.tool
-async def list_modules() -> dict[str, Any]:
- """List all available FuzzForge modules.
-
- Returns information about modules that can be executed,
- including their identifiers, availability status, and metadata
- such as use cases, input requirements, and output artifacts.
-
- :return: Dictionary with list of available modules and their details.
-
- """
- try:
- runner: Runner = get_runner()
- settings = get_settings()
-
- # Use the engine abstraction to list images
- # Default filter matches locally-built fuzzforge-* modules
- modules = runner.list_module_images(filter_prefix="fuzzforge-")
-
- available_modules = [
- {
- "identifier": module.identifier,
- "image": f"{module.identifier}:{module.version or 'latest'}",
- "available": module.available,
- "description": module.description,
- # New metadata fields from pyproject.toml
- "category": module.category,
- "language": module.language,
- "pipeline_stage": module.pipeline_stage,
- "pipeline_order": module.pipeline_order,
- "dependencies": module.dependencies,
- "continuous_mode": module.continuous_mode,
- "typical_duration": module.typical_duration,
- # AI-discoverable metadata
- "use_cases": module.use_cases,
- "input_requirements": module.input_requirements,
- "output_artifacts": module.output_artifacts,
- }
- for module in modules
- ]
-
- # Sort by pipeline_order if available
- available_modules.sort(key=lambda m: (m.get("pipeline_order") or 999, m["identifier"]))
-
- return {
- "modules": available_modules,
- "count": len(available_modules),
- "container_engine": settings.engine.type,
- "registry_url": settings.registry.url,
- "registry_tag": settings.registry.default_tag,
- }
-
- except Exception as exception:
- message: str = f"Failed to list modules: {exception}"
- raise ToolError(message) from exception
-
-
-@mcp.tool
-async def execute_module(
- module_identifier: str,
- configuration: dict[str, Any] | None = None,
- assets_path: str | None = None,
-) -> dict[str, Any]:
- """Execute a FuzzForge module in an isolated container.
-
- This tool runs a module in a sandboxed environment.
- The module receives input assets and produces output results.
-
- The response includes `results_path` pointing to the stored results archive.
- Use this path directly to read outputs β no need to call `get_execution_results`.
-
- :param module_identifier: The identifier of the module to execute.
- :param configuration: Optional configuration dict to pass to the module.
- :param assets_path: Optional path to input assets. Use this to pass specific
- inputs to a module (e.g. crash files to crash-analyzer) without changing
- the project's default assets. If not provided, uses project assets.
- :return: Execution result including status and results path.
-
- """
- runner: Runner = get_runner()
- project_path: Path = get_project_path()
-
- try:
- result: StepResult = await runner.execute_module(
- module_identifier=module_identifier,
- project_path=project_path,
- configuration=configuration,
- assets_path=Path(assets_path) if assets_path else None,
- )
-
- return {
- "success": result.success,
- "execution_id": result.execution_id,
- "module": result.module_identifier,
- "results_path": str(result.results_path) if result.results_path else None,
- "started_at": result.started_at.isoformat(),
- "completed_at": result.completed_at.isoformat(),
- "error": result.error,
- }
-
- except Exception as exception:
- message: str = f"Module execution failed: {exception}"
- raise ToolError(message) from exception
-
-
-@mcp.tool
-async def start_continuous_module(
- module_identifier: str,
- configuration: dict[str, Any] | None = None,
- assets_path: str | None = None,
-) -> dict[str, Any]:
- """Start a module in continuous/background mode.
-
- The module will run indefinitely until stopped with stop_continuous_module().
- Use get_continuous_status() to check progress and metrics.
-
- This is useful for long-running modules that should run until
- the user decides to stop them.
-
- :param module_identifier: The module to run.
- :param configuration: Optional configuration. Set max_duration to 0 for infinite.
- :param assets_path: Optional path to input assets.
- :return: Execution info including session_id for monitoring.
-
- """
- runner: Runner = get_runner()
- project_path: Path = get_project_path()
- session_id = str(uuid.uuid4())[:8]
-
- # Set infinite duration if not specified
- if configuration is None:
- configuration = {}
- if "max_duration" not in configuration:
- configuration["max_duration"] = 0 # 0 = infinite
-
- try:
- # Determine assets path
- if assets_path:
- actual_assets_path = Path(assets_path)
- else:
- storage = runner.storage
- actual_assets_path = storage.get_project_assets_path(project_path)
-
- # Use the new non-blocking executor method
- executor = runner._executor
- result = executor.start_module_continuous(
- module_identifier=module_identifier,
- assets_path=actual_assets_path,
- configuration=configuration,
- project_path=project_path,
- execution_id=session_id,
- )
-
- # Store execution info for tracking
- _background_executions[session_id] = {
- "session_id": session_id,
- "module": module_identifier,
- "configuration": configuration,
- "started_at": datetime.now(timezone.utc).isoformat(),
- "status": "running",
- "container_id": result["container_id"],
- "input_dir": result["input_dir"],
- "project_path": str(project_path),
- }
-
- return {
- "success": True,
- "session_id": session_id,
- "module": module_identifier,
- "container_id": result["container_id"],
- "status": "running",
- "message": f"Continuous module started. Use get_continuous_status('{session_id}') to monitor progress.",
- }
-
- except Exception as exception:
- message: str = f"Failed to start continuous module: {exception}"
- raise ToolError(message) from exception
-
-
-def _get_continuous_status_impl(session_id: str) -> dict[str, Any]:
- """Internal helper to get continuous session status (non-tool version)."""
- if session_id not in _background_executions:
- raise ToolError(f"Unknown session: {session_id}. Use list_continuous_sessions() to see active sessions.")
-
- execution = _background_executions[session_id]
- container_id = execution.get("container_id")
-
- # Initialize metrics
- metrics: dict[str, Any] = {
- "total_executions": 0,
- "total_crashes": 0,
- "exec_per_sec": 0,
- "coverage": 0,
- "current_target": "",
- "latest_events": [],
- }
-
- # Read stream.jsonl from inside the running container
- if container_id:
- try:
- runner: Runner = get_runner()
- executor = runner._executor
-
- # Check container status first
- container_status = executor.get_module_status(container_id)
- if container_status != "running":
- execution["status"] = "stopped" if container_status == "exited" else container_status
-
- # Read stream.jsonl from container
- stream_content = executor.read_module_output(container_id, "/data/output/stream.jsonl")
-
- if stream_content:
- lines = stream_content.strip().split("\n")
- # Get last 20 events
- recent_lines = lines[-20:] if len(lines) > 20 else lines
- crash_count = 0
-
- for line in recent_lines:
- try:
- event = json.loads(line)
- metrics["latest_events"].append(event)
-
- # Extract metrics from events
- if event.get("event") == "metrics":
- metrics["total_executions"] = event.get("executions", 0)
- metrics["current_target"] = event.get("target", "")
- metrics["exec_per_sec"] = event.get("exec_per_sec", 0)
- metrics["coverage"] = event.get("coverage", 0)
-
- if event.get("event") == "crash_detected":
- crash_count += 1
-
- except json.JSONDecodeError:
- continue
-
- metrics["total_crashes"] = crash_count
-
- except Exception as e:
- metrics["error"] = str(e)
-
- # Calculate elapsed time
- started_at = execution.get("started_at", "")
- elapsed_seconds = 0
- if started_at:
- try:
- start_time = datetime.fromisoformat(started_at)
- elapsed_seconds = int((datetime.now(timezone.utc) - start_time).total_seconds())
- except Exception:
- pass
-
- return {
- "session_id": session_id,
- "module": execution.get("module"),
- "status": execution.get("status"),
- "container_id": container_id,
- "started_at": started_at,
- "elapsed_seconds": elapsed_seconds,
- "elapsed_human": f"{elapsed_seconds // 60}m {elapsed_seconds % 60}s",
- "metrics": metrics,
- }
-
-
-@mcp.tool
-async def get_continuous_status(session_id: str) -> dict[str, Any]:
- """Get the current status and metrics of a running continuous session.
-
- Call this periodically (e.g., every 30 seconds) to get live updates
- on progress and metrics.
-
- :param session_id: The session ID returned by start_continuous_module().
- :return: Current status, metrics, and any events found.
-
- """
- return _get_continuous_status_impl(session_id)
-
-
-@mcp.tool
-async def stop_continuous_module(session_id: str) -> dict[str, Any]:
- """Stop a running continuous session.
-
- This will gracefully stop the module and collect any results.
-
- :param session_id: The session ID of the session to stop.
- :return: Final status and summary of the session.
-
- """
- if session_id not in _background_executions:
- raise ToolError(f"Unknown session: {session_id}")
-
- execution = _background_executions[session_id]
- container_id = execution.get("container_id")
- input_dir = execution.get("input_dir")
-
- try:
- # Get final metrics before stopping (use helper, not the tool)
- final_metrics = _get_continuous_status_impl(session_id)
-
- # Stop the container and collect results
- results_path = None
- if container_id:
- runner: Runner = get_runner()
- executor = runner._executor
-
- try:
- results_path = executor.stop_module_continuous(container_id, input_dir)
- except Exception:
- # Container may have already stopped
- pass
-
- execution["status"] = "stopped"
- execution["stopped_at"] = datetime.now(timezone.utc).isoformat()
-
- return {
- "success": True,
- "session_id": session_id,
- "message": "Continuous session stopped",
- "results_path": str(results_path) if results_path else None,
- "final_metrics": final_metrics.get("metrics", {}),
- "elapsed": final_metrics.get("elapsed_human", ""),
- }
-
- except Exception as exception:
- message: str = f"Failed to stop continuous module: {exception}"
- raise ToolError(message) from exception
-
-
-@mcp.tool
-async def list_continuous_sessions() -> dict[str, Any]:
- """List all active and recent continuous sessions.
-
- :return: List of continuous sessions with their status.
-
- """
- sessions = []
- for session_id, execution in _background_executions.items():
- sessions.append({
- "session_id": session_id,
- "module": execution.get("module"),
- "status": execution.get("status"),
- "started_at": execution.get("started_at"),
- })
-
- return {
- "sessions": sessions,
- "count": len(sessions),
- }
-
diff --git a/fuzzforge-mcp/src/fuzzforge_mcp/tools/projects.py b/fuzzforge-mcp/src/fuzzforge_mcp/tools/projects.py
index 1868576..2530922 100644
--- a/fuzzforge-mcp/src/fuzzforge_mcp/tools/projects.py
+++ b/fuzzforge-mcp/src/fuzzforge_mcp/tools/projects.py
@@ -3,16 +3,12 @@
from __future__ import annotations
from pathlib import Path
-from typing import TYPE_CHECKING, Any
+from typing import Any
from fastmcp import FastMCP
from fastmcp.exceptions import ToolError
-from fuzzforge_mcp.dependencies import get_project_path, get_runner, set_current_project_path
-
-if TYPE_CHECKING:
- from fuzzforge_runner import Runner
-
+from fuzzforge_mcp.dependencies import get_project_path, get_storage, set_current_project_path
mcp: FastMCP = FastMCP()
@@ -22,25 +18,24 @@ async def init_project(project_path: str | None = None) -> dict[str, Any]:
"""Initialize a new FuzzForge project.
Creates a `.fuzzforge/` directory inside the project for storing:
- - assets/: Input files (source code, etc.)
- - inputs/: Prepared module inputs (for debugging)
- - runs/: Execution results from each module
+ - config.json: Project configuration
+ - runs/: Execution results
- This should be called before executing modules or workflows.
+ This should be called before executing hub tools.
:param project_path: Path to the project directory. If not provided, uses current directory.
:return: Project initialization result.
"""
- runner: Runner = get_runner()
+ storage = get_storage()
try:
path = Path(project_path) if project_path else get_project_path()
-
+
# Track this as the current active project
set_current_project_path(path)
-
- storage_path = runner.init_project(path)
+
+ storage_path = storage.init_project(path)
return {
"success": True,
@@ -58,23 +53,18 @@ async def init_project(project_path: str | None = None) -> dict[str, Any]:
async def set_project_assets(assets_path: str) -> dict[str, Any]:
"""Set the initial assets (source code) for a project.
- This sets the DEFAULT source directory mounted into modules.
- Usually this is the project root containing source code (e.g. Cargo.toml, src/).
+ This sets the DEFAULT source directory that will be mounted into
+ hub tool containers via volume mounts.
- IMPORTANT: This OVERWRITES the previous assets path. Only call this once
- during project setup. To pass different inputs to a specific module
- (e.g. crash files to crash-analyzer), use the `assets_path` parameter
- on `execute_module` instead.
-
- :param assets_path: Path to the project source directory or archive.
+ :param assets_path: Path to the project source directory.
:return: Result including stored assets path.
"""
- runner: Runner = get_runner()
+ storage = get_storage()
project_path: Path = get_project_path()
try:
- stored_path = runner.set_project_assets(
+ stored_path = storage.set_project_assets(
project_path=project_path,
assets_path=Path(assets_path),
)
@@ -100,11 +90,11 @@ async def list_executions() -> dict[str, Any]:
:return: List of execution IDs.
"""
- runner: Runner = get_runner()
+ storage = get_storage()
project_path: Path = get_project_path()
try:
- executions = runner.list_executions(project_path)
+ executions = storage.list_executions(project_path)
return {
"success": True,
@@ -127,11 +117,11 @@ async def get_execution_results(execution_id: str, extract_to: str | None = None
:return: Result including path to results archive.
"""
- runner: Runner = get_runner()
+ storage = get_storage()
project_path: Path = get_project_path()
try:
- results_path = runner.get_execution_results(project_path, execution_id)
+ results_path = storage.get_execution_results(project_path, execution_id)
if results_path is None:
return {
@@ -140,7 +130,7 @@ async def get_execution_results(execution_id: str, extract_to: str | None = None
"error": "Execution results not found",
}
- result = {
+ result: dict[str, Any] = {
"success": True,
"execution_id": execution_id,
"results_path": str(results_path),
@@ -148,7 +138,7 @@ async def get_execution_results(execution_id: str, extract_to: str | None = None
# Extract if requested
if extract_to:
- extracted_path = runner.extract_results(results_path, Path(extract_to))
+ extracted_path = storage.extract_results(results_path, Path(extract_to))
result["extracted_path"] = str(extracted_path)
return result
diff --git a/fuzzforge-mcp/src/fuzzforge_mcp/tools/workflows.py b/fuzzforge-mcp/src/fuzzforge_mcp/tools/workflows.py
deleted file mode 100644
index 222ca60..0000000
--- a/fuzzforge-mcp/src/fuzzforge_mcp/tools/workflows.py
+++ /dev/null
@@ -1,92 +0,0 @@
-"""Workflow tools for FuzzForge MCP."""
-
-from __future__ import annotations
-
-from pathlib import Path
-from typing import TYPE_CHECKING, Any
-
-from fastmcp import FastMCP
-from fastmcp.exceptions import ToolError
-from fuzzforge_runner.orchestrator import WorkflowDefinition, WorkflowStep
-
-from fuzzforge_mcp.dependencies import get_project_path, get_runner
-
-if TYPE_CHECKING:
- from fuzzforge_runner import Runner
- from fuzzforge_runner.orchestrator import WorkflowResult
-
-
-mcp: FastMCP = FastMCP()
-
-
-@mcp.tool
-async def execute_workflow(
- workflow_name: str,
- steps: list[dict[str, Any]],
- initial_assets_path: str | None = None,
-) -> dict[str, Any]:
- """Execute a workflow consisting of multiple module steps.
-
- A workflow chains multiple modules together, passing the output of each
- module as input to the next. This enables complex pipelines.
-
- :param workflow_name: Name for this workflow execution.
- :param steps: List of step definitions, each with "module" and optional "configuration".
- :param initial_assets_path: Optional path to initial assets for the first step.
- :return: Workflow execution result including status of each step.
-
- Example steps format:
- [
- {"module": "module-a", "configuration": {"key": "value"}},
- {"module": "module-b", "configuration": {}},
- {"module": "module-c"}
- ]
-
- """
- runner: Runner = get_runner()
- project_path: Path = get_project_path()
-
- try:
- # Convert step dicts to WorkflowStep objects
- workflow_steps = [
- WorkflowStep(
- module_identifier=step["module"],
- configuration=step.get("configuration"),
- name=step.get("name", f"step-{i}"),
- )
- for i, step in enumerate(steps)
- ]
-
- workflow = WorkflowDefinition(
- name=workflow_name,
- steps=workflow_steps,
- )
-
- result: WorkflowResult = await runner.execute_workflow(
- workflow=workflow,
- project_path=project_path,
- initial_assets_path=Path(initial_assets_path) if initial_assets_path else None,
- )
-
- return {
- "success": result.success,
- "execution_id": result.execution_id,
- "workflow_name": result.name,
- "final_results_path": str(result.final_results_path) if result.final_results_path else None,
- "steps": [
- {
- "step_index": step.step_index,
- "module": step.module_identifier,
- "success": step.success,
- "execution_id": step.execution_id,
- "results_path": str(step.results_path) if step.results_path else None,
- "error": step.error,
- }
- for step in result.steps
- ],
- }
-
- except Exception as exception:
- message: str = f"Workflow execution failed: {exception}"
- raise ToolError(message) from exception
-
diff --git a/fuzzforge-mcp/tests/test_resources.py b/fuzzforge-mcp/tests/test_resources.py
index 370ffff..6665f91 100644
--- a/fuzzforge-mcp/tests/test_resources.py
+++ b/fuzzforge-mcp/tests/test_resources.py
@@ -1,6 +1,6 @@
-"""MCP tool tests for FuzzForge OSS.
+"""MCP tool tests for FuzzForge AI.
-Tests the MCP tools that are available in the OSS version.
+Tests the MCP tools that are available in FuzzForge AI.
"""
import pytest
@@ -11,16 +11,6 @@ if TYPE_CHECKING:
from fastmcp.client import FastMCPTransport
-async def test_list_modules_tool_exists(
- mcp_client: "Client[FastMCPTransport]",
-) -> None:
- """Test that the list_modules tool is available."""
- tools = await mcp_client.list_tools()
- tool_names = [tool.name for tool in tools]
-
- assert "list_modules" in tool_names
-
-
async def test_init_project_tool_exists(
mcp_client: "Client[FastMCPTransport]",
) -> None:
@@ -31,31 +21,11 @@ async def test_init_project_tool_exists(
assert "init_project" in tool_names
-async def test_execute_module_tool_exists(
- mcp_client: "Client[FastMCPTransport]",
-) -> None:
- """Test that the execute_module tool is available."""
- tools = await mcp_client.list_tools()
- tool_names = [tool.name for tool in tools]
-
- assert "execute_module" in tool_names
-
-
-async def test_execute_workflow_tool_exists(
- mcp_client: "Client[FastMCPTransport]",
-) -> None:
- """Test that the execute_workflow tool is available."""
- tools = await mcp_client.list_tools()
- tool_names = [tool.name for tool in tools]
-
- assert "execute_workflow" in tool_names
-
-
async def test_mcp_has_expected_tool_count(
mcp_client: "Client[FastMCPTransport]",
) -> None:
"""Test that MCP has the expected number of tools."""
tools = await mcp_client.list_tools()
- # Should have at least 4 core tools
- assert len(tools) >= 4
+ # Should have project tools + hub tools
+ assert len(tools) >= 2
diff --git a/fuzzforge-modules/cargo-fuzzer/Dockerfile b/fuzzforge-modules/cargo-fuzzer/Dockerfile
deleted file mode 100644
index 33f658a..0000000
--- a/fuzzforge-modules/cargo-fuzzer/Dockerfile
+++ /dev/null
@@ -1,26 +0,0 @@
-FROM localhost/fuzzforge-modules-sdk:0.1.0
-
-# Module metadata is now read from pyproject.toml [tool.fuzzforge.module] section
-
-# Install system dependencies for Rust compilation
-RUN apt-get update && apt-get install -y \
- curl \
- build-essential \
- pkg-config \
- libssl-dev \
- && rm -rf /var/lib/apt/lists/*
-
-# Install Rust toolchain with nightly (required for cargo-fuzz)
-RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain nightly
-ENV PATH="/root/.cargo/bin:${PATH}"
-
-# Install cargo-fuzz
-RUN cargo install cargo-fuzz --locked || true
-
-COPY ./src /app/src
-COPY ./pyproject.toml /app/pyproject.toml
-
-# Remove workspace reference since we're using wheels
-RUN sed -i '/\[tool\.uv\.sources\]/,/^$/d' /app/pyproject.toml
-
-RUN uv sync --find-links /wheels
diff --git a/fuzzforge-modules/cargo-fuzzer/Makefile b/fuzzforge-modules/cargo-fuzzer/Makefile
deleted file mode 100644
index cada4d0..0000000
--- a/fuzzforge-modules/cargo-fuzzer/Makefile
+++ /dev/null
@@ -1,45 +0,0 @@
-PACKAGE=$(word 1, $(shell uv version))
-VERSION=$(word 2, $(shell uv version))
-
-PODMAN?=/usr/bin/podman
-
-SOURCES=./src
-TESTS=./tests
-
-.PHONY: bandit build clean format mypy pytest ruff version
-
-bandit:
- uv run bandit --recursive $(SOURCES)
-
-build:
- $(PODMAN) build --file ./Dockerfile --no-cache --tag $(PACKAGE):$(VERSION)
-
-save: build
- $(PODMAN) save --format oci-archive --output /tmp/$(PACKAGE)-$(VERSION).oci $(PACKAGE):$(VERSION)
-
-clean:
- @find . -type d \( \
- -name '*.egg-info' \
- -o -name '.mypy_cache' \
- -o -name '.pytest_cache' \
- -o -name '.ruff_cache' \
- -o -name '__pycache__' \
- \) -printf 'removing directory %p\n' -exec rm -rf {} +
-
-cloc:
- cloc $(SOURCES)
-
-format:
- uv run ruff format $(SOURCES) $(TESTS)
-
-mypy:
- uv run mypy $(SOURCES)
-
-pytest:
- uv run pytest $(TESTS)
-
-ruff:
- uv run ruff check --fix $(SOURCES) $(TESTS)
-
-version:
- @echo '$(PACKAGE)@$(VERSION)'
diff --git a/fuzzforge-modules/cargo-fuzzer/README.md b/fuzzforge-modules/cargo-fuzzer/README.md
deleted file mode 100644
index d0671a1..0000000
--- a/fuzzforge-modules/cargo-fuzzer/README.md
+++ /dev/null
@@ -1,46 +0,0 @@
-# FuzzForge Modules - FIXME
-
-## Installation
-
-### Python
-
-```shell
-# install the package (users)
-uv sync
-# install the package and all development dependencies (developers)
-uv sync --all-extras
-```
-
-### Container
-
-```shell
-# build the image
-make build
-# run the container
-mkdir -p "${PWD}/data" "${PWD}/data/input" "${PWD}/data/output"
-echo '{"settings":{},"resources":[]}' > "${PWD}/data/input/input.json"
-podman run --rm \
- --volume "${PWD}/data:/data" \
- ':' 'uv run module'
-```
-
-## Usage
-
-```shell
-uv run module
-```
-
-## Development tools
-
-```shell
-# run ruff (formatter)
-make format
-# run mypy (type checker)
-make mypy
-# run tests (pytest)
-make pytest
-# run ruff (linter)
-make ruff
-```
-
-See the file `Makefile` at the root of this directory for more tools.
diff --git a/fuzzforge-modules/cargo-fuzzer/mypy.ini b/fuzzforge-modules/cargo-fuzzer/mypy.ini
deleted file mode 100644
index 84e90d2..0000000
--- a/fuzzforge-modules/cargo-fuzzer/mypy.ini
+++ /dev/null
@@ -1,6 +0,0 @@
-[mypy]
-plugins = pydantic.mypy
-strict = True
-warn_unused_ignores = True
-warn_redundant_casts = True
-warn_return_any = True
diff --git a/fuzzforge-modules/cargo-fuzzer/pyproject.toml b/fuzzforge-modules/cargo-fuzzer/pyproject.toml
deleted file mode 100644
index 935549c..0000000
--- a/fuzzforge-modules/cargo-fuzzer/pyproject.toml
+++ /dev/null
@@ -1,58 +0,0 @@
-[project]
-name = "fuzzforge-cargo-fuzzer"
-version = "0.1.0"
-description = "Runs continuous coverage-guided fuzzing on Rust targets using cargo-fuzz"
-authors = []
-readme = "README.md"
-requires-python = ">=3.14"
-dependencies = [
- "fuzzforge-modules-sdk==0.0.1",
- "pydantic==2.12.4",
- "structlog==25.5.0",
-]
-
-[project.optional-dependencies]
-lints = [
- "bandit==1.8.6",
- "mypy==1.18.2",
- "ruff==0.14.4",
-]
-tests = [
- "pytest==9.0.2",
-]
-
-[project.scripts]
-module = "module.__main__:main"
-
-[tool.uv.sources]
-fuzzforge-modules-sdk = { workspace = true }
-
-[tool.uv]
-package = true
-
-# FuzzForge module metadata for AI agent discovery
-[tool.fuzzforge.module]
-identifier = "fuzzforge-cargo-fuzzer"
-suggested_predecessors = ["fuzzforge-harness-tester"]
-continuous_mode = true
-
-use_cases = [
- "Run continuous coverage-guided fuzzing on Rust targets with libFuzzer",
- "Execute cargo-fuzz on validated harnesses",
- "Produce crash artifacts for analysis",
- "Long-running fuzzing campaign"
-]
-
-common_inputs = [
- "validated-harnesses",
- "Cargo.toml",
- "rust-source-code"
-]
-
-output_artifacts = [
- "fuzzing_results.json",
- "crashes/",
- "results.json"
-]
-
-output_treatment = "Read fuzzing_results.json which contains: targets_fuzzed, total_crashes, total_executions, crashes_path, and results array with per-target crash info. Display summary of crashes found. The crashes/ directory contains crash inputs for downstream crash-analyzer."
diff --git a/fuzzforge-modules/cargo-fuzzer/ruff.toml b/fuzzforge-modules/cargo-fuzzer/ruff.toml
deleted file mode 100644
index 6374f62..0000000
--- a/fuzzforge-modules/cargo-fuzzer/ruff.toml
+++ /dev/null
@@ -1,19 +0,0 @@
-line-length = 120
-
-[lint]
-select = [ "ALL" ]
-ignore = [
- "COM812", # conflicts with the formatter
- "D100", # ignoring missing docstrings in public modules
- "D104", # ignoring missing docstrings in public packages
- "D203", # conflicts with 'D211'
- "D213", # conflicts with 'D212'
- "TD002", # ignoring missing author in 'TODO' statements
- "TD003", # ignoring missing issue link in 'TODO' statements
-]
-
-[lint.per-file-ignores]
-"tests/*" = [
- "PLR2004", # allowing comparisons using unamed numerical constants in tests
- "S101", # allowing 'assert' statements in tests
-]
diff --git a/fuzzforge-modules/cargo-fuzzer/src/module/__init__.py b/fuzzforge-modules/cargo-fuzzer/src/module/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/fuzzforge-modules/cargo-fuzzer/src/module/__main__.py b/fuzzforge-modules/cargo-fuzzer/src/module/__main__.py
deleted file mode 100644
index bc8914a..0000000
--- a/fuzzforge-modules/cargo-fuzzer/src/module/__main__.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from typing import TYPE_CHECKING
-
-from fuzzforge_modules_sdk.api import logs
-
-from module.mod import Module
-
-if TYPE_CHECKING:
- from fuzzforge_modules_sdk.api.modules.base import FuzzForgeModule
-
-
-def main() -> None:
- """TODO."""
- logs.configure()
- module: FuzzForgeModule = Module()
- module.main()
-
-
-if __name__ == "__main__":
- main()
diff --git a/fuzzforge-modules/cargo-fuzzer/src/module/mod.py b/fuzzforge-modules/cargo-fuzzer/src/module/mod.py
deleted file mode 100644
index 4000c3b..0000000
--- a/fuzzforge-modules/cargo-fuzzer/src/module/mod.py
+++ /dev/null
@@ -1,538 +0,0 @@
-"""Cargo Fuzzer module for FuzzForge.
-
-This module runs cargo-fuzz (libFuzzer) on validated Rust fuzz targets.
-It takes a fuzz project with compiled harnesses and runs fuzzing for a
-configurable duration, collecting crashes and statistics.
-"""
-
-from __future__ import annotations
-
-import json
-import os
-import re
-import shutil
-import subprocess
-import signal
-import time
-from pathlib import Path
-from typing import TYPE_CHECKING
-
-import structlog
-
-from fuzzforge_modules_sdk.api.constants import PATH_TO_INPUTS, PATH_TO_OUTPUTS
-from fuzzforge_modules_sdk.api.models import FuzzForgeModuleResults
-from fuzzforge_modules_sdk.api.modules.base import FuzzForgeModule
-
-from module.models import Input, Output, CrashInfo, FuzzingStats, TargetResult
-from module.settings import Settings
-
-if TYPE_CHECKING:
- from fuzzforge_modules_sdk.api.models import FuzzForgeModuleResource
-
-logger = structlog.get_logger()
-
-
-class Module(FuzzForgeModule):
- """Cargo Fuzzer module - runs cargo-fuzz with libFuzzer on Rust targets."""
-
- _settings: Settings | None
- _fuzz_project_path: Path | None
- _target_results: list[TargetResult]
- _crashes_path: Path | None
-
- def __init__(self) -> None:
- """Initialize an instance of the class."""
- name: str = "cargo-fuzzer"
- version: str = "0.1.0"
- FuzzForgeModule.__init__(self, name=name, version=version)
- self._settings = None
- self._fuzz_project_path = None
- self._target_results = []
- self._crashes_path = None
-
- @classmethod
- def _get_input_type(cls) -> type[Input]:
- """Return the input type."""
- return Input
-
- @classmethod
- def _get_output_type(cls) -> type[Output]:
- """Return the output type."""
- return Output
-
- def _prepare(self, settings: Settings) -> None: # type: ignore[override]
- """Prepare the module with settings.
-
- :param settings: Module settings.
-
- """
- self._settings = settings
- logger.info("cargo-fuzzer preparing", settings=settings.model_dump() if settings else {})
-
- def _run(self, resources: list[FuzzForgeModuleResource]) -> FuzzForgeModuleResults:
- """Run the fuzzer.
-
- :param resources: Input resources (fuzz project + source).
- :returns: Module execution result.
-
- """
- logger.info("cargo-fuzzer starting", resource_count=len(resources))
-
- # Emit initial progress
- self.emit_progress(0, status="initializing", message="Setting up fuzzing environment")
- self.emit_event("module_started", resource_count=len(resources))
-
- # Setup the fuzzing environment
- if not self._setup_environment(resources):
- self.emit_progress(100, status="failed", message="Failed to setup environment")
- return FuzzForgeModuleResults.FAILURE
-
- # Get list of fuzz targets
- targets = self._get_fuzz_targets()
- if not targets:
- logger.error("no fuzz targets found")
- self.emit_progress(100, status="failed", message="No fuzz targets found")
- return FuzzForgeModuleResults.FAILURE
-
- # Filter targets if specific ones were requested
- if self._settings and self._settings.targets:
- requested = set(self._settings.targets)
- targets = [t for t in targets if t in requested]
- if not targets:
- logger.error("none of the requested targets found", requested=list(requested))
- self.emit_progress(100, status="failed", message="Requested targets not found")
- return FuzzForgeModuleResults.FAILURE
-
- logger.info("found fuzz targets", targets=targets)
- self.emit_event("targets_found", targets=targets, count=len(targets))
-
- # Setup output directories
- self._crashes_path = PATH_TO_OUTPUTS / "crashes"
- self._crashes_path.mkdir(parents=True, exist_ok=True)
-
- # Run fuzzing on each target
- # max_duration=0 means infinite/continuous mode
- max_duration = self._settings.max_duration if self._settings else 60
- is_continuous = max_duration == 0
-
- if is_continuous:
- # Continuous mode: cycle through targets indefinitely
- # Each target runs for 60 seconds before moving to next
- duration_per_target = 60
- else:
- duration_per_target = max_duration // max(len(targets), 1)
- total_crashes = 0
-
- # In continuous mode, loop forever; otherwise loop once
- round_num = 0
- while True:
- round_num += 1
-
- for i, target in enumerate(targets):
- if is_continuous:
- progress_msg = f"Round {round_num}: Fuzzing {target}"
- else:
- progress_msg = f"Fuzzing target {i+1}/{len(targets)}"
-
- progress = int((i / len(targets)) * 100) if not is_continuous else 50
- self.emit_progress(
- progress,
- status="running",
- message=progress_msg,
- current_task=target,
- metrics={
- "targets_completed": i,
- "total_targets": len(targets),
- "crashes_found": total_crashes,
- "round": round_num if is_continuous else 1,
- }
- )
- self.emit_event("target_started", target=target, index=i, total=len(targets), round=round_num)
-
- result = self._fuzz_target(target, duration_per_target)
- self._target_results.append(result)
- total_crashes += len(result.crashes)
-
- # Emit target completion
- self.emit_event(
- "target_completed",
- target=target,
- crashes=len(result.crashes),
- executions=result.stats.total_executions if result.stats else 0,
- coverage=result.stats.coverage_edges if result.stats else 0,
- )
-
- logger.info("target completed",
- target=target,
- crashes=len(result.crashes),
- execs=result.stats.total_executions if result.stats else 0)
-
- # Exit loop if not continuous mode
- if not is_continuous:
- break
-
- # Write output
- self._write_output()
-
- # Emit final progress
- self.emit_progress(
- 100,
- status="completed",
- message=f"Fuzzing completed. Found {total_crashes} crashes.",
- metrics={
- "targets_fuzzed": len(self._target_results),
- "total_crashes": total_crashes,
- "total_executions": sum(r.stats.total_executions for r in self._target_results if r.stats),
- }
- )
- self.emit_event("module_completed", total_crashes=total_crashes, targets_fuzzed=len(targets))
-
- logger.info("cargo-fuzzer completed",
- targets=len(self._target_results),
- total_crashes=total_crashes)
-
- return FuzzForgeModuleResults.SUCCESS
-
- def _cleanup(self, settings: Settings) -> None: # type: ignore[override]
- """Clean up after execution.
-
- :param settings: Module settings.
-
- """
- pass
-
- def _setup_environment(self, resources: list[FuzzForgeModuleResource]) -> bool:
- """Setup the fuzzing environment.
-
- :param resources: Input resources.
- :returns: True if setup successful.
-
- """
- import shutil
-
- # Find fuzz project in resources
- source_fuzz_project = None
- source_project_root = None
-
- for resource in resources:
- path = Path(resource.path)
- if path.is_dir():
- # Check for fuzz subdirectory
- fuzz_dir = path / "fuzz"
- if fuzz_dir.is_dir() and (fuzz_dir / "Cargo.toml").exists():
- source_fuzz_project = fuzz_dir
- source_project_root = path
- break
- # Or direct fuzz project
- if (path / "Cargo.toml").exists() and (path / "fuzz_targets").is_dir():
- source_fuzz_project = path
- source_project_root = path.parent
- break
-
- if source_fuzz_project is None:
- logger.error("no fuzz project found in resources")
- return False
-
- # Copy project to writable location since /data/input is read-only
- # and cargo-fuzz needs to write corpus, artifacts, and build cache
- work_dir = Path("/tmp/fuzz-work")
- if work_dir.exists():
- shutil.rmtree(work_dir)
-
- # Copy the entire project root
- work_project = work_dir / source_project_root.name
- shutil.copytree(source_project_root, work_project, dirs_exist_ok=True)
-
- # Update fuzz_project_path to point to the copied location
- relative_fuzz = source_fuzz_project.relative_to(source_project_root)
- self._fuzz_project_path = work_project / relative_fuzz
-
- logger.info("using fuzz project", path=str(self._fuzz_project_path))
- return True
-
- def _get_fuzz_targets(self) -> list[str]:
- """Get list of fuzz target names.
-
- :returns: List of target names.
-
- """
- if self._fuzz_project_path is None:
- return []
-
- targets = []
- fuzz_targets_dir = self._fuzz_project_path / "fuzz_targets"
-
- if fuzz_targets_dir.is_dir():
- for rs_file in fuzz_targets_dir.glob("*.rs"):
- targets.append(rs_file.stem)
-
- return targets
-
- def _fuzz_target(self, target: str, duration: int) -> TargetResult:
- """Run fuzzing on a single target.
-
- :param target: Name of the fuzz target.
- :param duration: Maximum duration in seconds.
- :returns: Fuzzing result for this target.
-
- """
- logger.info("fuzzing target", target=target, duration=duration)
-
- crashes: list[CrashInfo] = []
- stats = FuzzingStats()
-
- if self._fuzz_project_path is None:
- return TargetResult(target=target, crashes=crashes, stats=stats)
-
- # Create corpus directory for this target
- corpus_dir = self._fuzz_project_path / "corpus" / target
- corpus_dir.mkdir(parents=True, exist_ok=True)
-
- # Build the command
- cmd = [
- "cargo", "+nightly", "fuzz", "run",
- target,
- "--",
- ]
-
- # Add time limit
- if duration > 0:
- cmd.append(f"-max_total_time={duration}")
-
- # Use fork mode to continue after crashes
- # This makes libFuzzer restart worker after crash instead of exiting
- cmd.append("-fork=1")
- cmd.append("-ignore_crashes=1")
- cmd.append("-print_final_stats=1")
-
- # Add jobs if specified
- if self._settings and self._settings.jobs > 1:
- cmd.extend([f"-jobs={self._settings.jobs}"])
-
- try:
- env = os.environ.copy()
- env["CARGO_INCREMENTAL"] = "0"
-
- process = subprocess.Popen(
- cmd,
- cwd=self._fuzz_project_path,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- text=True,
- env=env,
- )
-
- output_lines = []
- start_time = time.time()
- last_metrics_emit = 0.0
- current_execs = 0
- current_cov = 0
- current_exec_s = 0
- crash_count = 0
-
- # Read output with timeout (skip timeout check in infinite mode)
- while True:
- if process.poll() is not None:
- break
-
- elapsed = time.time() - start_time
- # Only enforce timeout if duration > 0 (not infinite mode)
- if duration > 0 and elapsed > duration + 30: # Grace period
- logger.warning("fuzzer timeout, terminating", target=target)
- process.terminate()
- try:
- process.wait(timeout=10)
- except subprocess.TimeoutExpired:
- process.kill()
- break
-
- try:
- if process.stdout:
- line = process.stdout.readline()
- if line:
- output_lines.append(line)
-
- # Parse real-time metrics from libFuzzer output
- # Example: "#12345 NEW cov: 100 ft: 50 corp: 25/1Kb exec/s: 1000"
- exec_match = re.search(r"#(\d+)", line)
- if exec_match:
- current_execs = int(exec_match.group(1))
-
- cov_match = re.search(r"cov:\s*(\d+)", line)
- if cov_match:
- current_cov = int(cov_match.group(1))
-
- exec_s_match = re.search(r"exec/s:\s*(\d+)", line)
- if exec_s_match:
- current_exec_s = int(exec_s_match.group(1))
-
- # Check for crash indicators
- if "SUMMARY:" in line or "ERROR:" in line or "crash-" in line.lower():
- crash_count += 1
- self.emit_event(
- "crash_detected",
- target=target,
- crash_number=crash_count,
- line=line.strip(),
- )
- logger.debug("fuzzer output", line=line.strip())
-
- # Emit metrics periodically (every 2 seconds)
- if elapsed - last_metrics_emit >= 2.0:
- last_metrics_emit = elapsed
- self.emit_event(
- "metrics",
- target=target,
- executions=current_execs,
- coverage=current_cov,
- exec_per_sec=current_exec_s,
- crashes=crash_count,
- elapsed_seconds=int(elapsed),
- remaining_seconds=max(0, duration - int(elapsed)),
- )
-
- except Exception:
- pass
-
- # Parse statistics from output
- stats = self._parse_fuzzer_stats(output_lines)
-
- # Collect crashes
- crashes = self._collect_crashes(target)
-
- # Emit final event for this target if crashes were found
- if crashes:
- self.emit_event(
- "crashes_collected",
- target=target,
- count=len(crashes),
- paths=[c.file_path for c in crashes],
- )
-
- except FileNotFoundError:
- logger.error("cargo-fuzz not found, please install with: cargo install cargo-fuzz")
- stats.error = "cargo-fuzz not installed"
- self.emit_event("error", target=target, message="cargo-fuzz not installed")
- except Exception as e:
- logger.exception("fuzzing error", target=target, error=str(e))
- stats.error = str(e)
- self.emit_event("error", target=target, message=str(e))
-
- return TargetResult(target=target, crashes=crashes, stats=stats)
-
- def _parse_fuzzer_stats(self, output_lines: list[str]) -> FuzzingStats:
- """Parse fuzzer output for statistics.
-
- :param output_lines: Lines of fuzzer output.
- :returns: Parsed statistics.
-
- """
- stats = FuzzingStats()
- full_output = "".join(output_lines)
-
- # Parse libFuzzer stats
- # Example: "#12345 DONE cov: 100 ft: 50 corp: 25/1Kb exec/s: 1000"
- exec_match = re.search(r"#(\d+)", full_output)
- if exec_match:
- stats.total_executions = int(exec_match.group(1))
-
- cov_match = re.search(r"cov:\s*(\d+)", full_output)
- if cov_match:
- stats.coverage_edges = int(cov_match.group(1))
-
- corp_match = re.search(r"corp:\s*(\d+)", full_output)
- if corp_match:
- stats.corpus_size = int(corp_match.group(1))
-
- exec_s_match = re.search(r"exec/s:\s*(\d+)", full_output)
- if exec_s_match:
- stats.executions_per_second = int(exec_s_match.group(1))
-
- return stats
-
- def _collect_crashes(self, target: str) -> list[CrashInfo]:
- """Collect crash files from fuzzer output.
-
- :param target: Name of the fuzz target.
- :returns: List of crash info.
-
- """
- crashes: list[CrashInfo] = []
- seen_hashes: set[str] = set()
-
- if self._fuzz_project_path is None or self._crashes_path is None:
- return crashes
-
- # Check multiple possible crash locations:
- # 1. Standard artifacts directory (target-specific)
- # 2. Generic artifacts directory
- # 3. Fuzz project root (fork mode sometimes writes here)
- # 4. Project root (parent of fuzz directory)
- search_paths = [
- self._fuzz_project_path / "artifacts" / target,
- self._fuzz_project_path / "artifacts",
- self._fuzz_project_path,
- self._fuzz_project_path.parent,
- ]
-
- for search_dir in search_paths:
- if not search_dir.is_dir():
- continue
-
- # Use rglob to recursively find crash files
- for crash_file in search_dir.rglob("crash-*"):
- if not crash_file.is_file():
- continue
-
- # Skip duplicates by hash
- if crash_file.name in seen_hashes:
- continue
- seen_hashes.add(crash_file.name)
-
- # Copy crash to output
- output_crash = self._crashes_path / target
- output_crash.mkdir(parents=True, exist_ok=True)
- dest = output_crash / crash_file.name
- shutil.copy2(crash_file, dest)
-
- # Read crash input
- crash_data = crash_file.read_bytes()
-
- crash_info = CrashInfo(
- file_path=str(dest),
- input_hash=crash_file.name,
- input_size=len(crash_data),
- )
- crashes.append(crash_info)
-
- logger.info("found crash", target=target, file=crash_file.name, source=str(search_dir))
-
- logger.info("crash collection complete", target=target, total_crashes=len(crashes))
- return crashes
-
- def _write_output(self) -> None:
- """Write the fuzzing results to output."""
- output_path = PATH_TO_OUTPUTS / "fuzzing_results.json"
- output_path.parent.mkdir(parents=True, exist_ok=True)
-
- total_crashes = sum(len(r.crashes) for r in self._target_results)
- total_execs = sum(r.stats.total_executions for r in self._target_results if r.stats)
-
- output_data = {
- "fuzz_project": str(self._fuzz_project_path),
- "targets_fuzzed": len(self._target_results),
- "total_crashes": total_crashes,
- "total_executions": total_execs,
- "crashes_path": str(self._crashes_path),
- "results": [
- {
- "target": r.target,
- "crashes": [c.model_dump() for c in r.crashes],
- "stats": r.stats.model_dump() if r.stats else None,
- }
- for r in self._target_results
- ],
- }
-
- output_path.write_text(json.dumps(output_data, indent=2))
- logger.info("wrote fuzzing results", path=str(output_path))
diff --git a/fuzzforge-modules/cargo-fuzzer/src/module/models.py b/fuzzforge-modules/cargo-fuzzer/src/module/models.py
deleted file mode 100644
index 9c4fb9e..0000000
--- a/fuzzforge-modules/cargo-fuzzer/src/module/models.py
+++ /dev/null
@@ -1,88 +0,0 @@
-"""Models for the cargo-fuzzer module."""
-
-from pydantic import BaseModel, Field
-from fuzzforge_modules_sdk.api.models import FuzzForgeModuleInputBase, FuzzForgeModuleOutputBase
-
-from module.settings import Settings
-
-
-class FuzzingStats(BaseModel):
- """Statistics from a fuzzing run."""
-
- #: Total number of test case executions
- total_executions: int = 0
-
- #: Executions per second
- executions_per_second: int = 0
-
- #: Number of coverage edges discovered
- coverage_edges: int = 0
-
- #: Size of the corpus
- corpus_size: int = 0
-
- #: Any error message
- error: str = ""
-
-
-class CrashInfo(BaseModel):
- """Information about a discovered crash."""
-
- #: Path to the crash input file
- file_path: str
-
- #: Hash/name of the crash input
- input_hash: str
-
- #: Size of the crash input in bytes
- input_size: int = 0
-
- #: Crash type (if identified)
- crash_type: str = ""
-
- #: Stack trace (if available)
- stack_trace: str = ""
-
-
-class TargetResult(BaseModel):
- """Result of fuzzing a single target."""
-
- #: Name of the fuzz target
- target: str
-
- #: List of crashes found
- crashes: list[CrashInfo] = Field(default_factory=list)
-
- #: Fuzzing statistics
- stats: FuzzingStats = Field(default_factory=FuzzingStats)
-
-
-class Input(FuzzForgeModuleInputBase[Settings]):
- """Input for the cargo-fuzzer module.
-
- Expects:
- - A fuzz project directory with validated harnesses
- - Optionally the source crate to link against
- """
-
-
-class Output(FuzzForgeModuleOutputBase):
- """Output from the cargo-fuzzer module."""
-
- #: Path to the fuzz project
- fuzz_project: str = ""
-
- #: Number of targets fuzzed
- targets_fuzzed: int = 0
-
- #: Total crashes found across all targets
- total_crashes: int = 0
-
- #: Total executions across all targets
- total_executions: int = 0
-
- #: Path to collected crash files
- crashes_path: str = ""
-
- #: Results per target
- results: list[TargetResult] = Field(default_factory=list)
diff --git a/fuzzforge-modules/cargo-fuzzer/src/module/settings.py b/fuzzforge-modules/cargo-fuzzer/src/module/settings.py
deleted file mode 100644
index ec49c21..0000000
--- a/fuzzforge-modules/cargo-fuzzer/src/module/settings.py
+++ /dev/null
@@ -1,35 +0,0 @@
-"""Settings for the cargo-fuzzer module."""
-
-from typing import Optional
-from pydantic import model_validator
-from fuzzforge_modules_sdk.api.models import FuzzForgeModulesSettingsBase
-
-
-class Settings(FuzzForgeModulesSettingsBase):
- """Settings for the cargo-fuzzer module."""
-
- #: Maximum fuzzing duration in seconds (total across all targets)
- #: Set to 0 for infinite/continuous mode
- max_duration: int = 60
-
- #: Number of parallel fuzzing jobs
- jobs: int = 1
-
- #: Maximum length of generated inputs
- max_len: int = 4096
-
- #: Whether to use AddressSanitizer
- use_asan: bool = True
-
- #: Specific targets to fuzz (empty = all targets)
- targets: list[str] = []
-
- #: Single target to fuzz (convenience alias for targets)
- target: Optional[str] = None
-
- @model_validator(mode="after")
- def handle_single_target(self) -> "Settings":
- """Convert single target to targets list if provided."""
- if self.target and self.target not in self.targets:
- self.targets.append(self.target)
- return self
diff --git a/fuzzforge-modules/cargo-fuzzer/tests/.gitkeep b/fuzzforge-modules/cargo-fuzzer/tests/.gitkeep
deleted file mode 100644
index e69de29..0000000
diff --git a/fuzzforge-modules/crash-analyzer/Dockerfile b/fuzzforge-modules/crash-analyzer/Dockerfile
deleted file mode 100644
index 3f32d99..0000000
--- a/fuzzforge-modules/crash-analyzer/Dockerfile
+++ /dev/null
@@ -1,11 +0,0 @@
-FROM localhost/fuzzforge-modules-sdk:0.1.0
-
-# Module metadata is now read from pyproject.toml [tool.fuzzforge.module] section
-
-COPY ./src /app/src
-COPY ./pyproject.toml /app/pyproject.toml
-
-# Remove workspace reference since we're using wheels
-RUN sed -i '/\[tool\.uv\.sources\]/,/^$/d' /app/pyproject.toml
-
-RUN uv sync --find-links /wheels
diff --git a/fuzzforge-modules/crash-analyzer/Makefile b/fuzzforge-modules/crash-analyzer/Makefile
deleted file mode 100644
index cada4d0..0000000
--- a/fuzzforge-modules/crash-analyzer/Makefile
+++ /dev/null
@@ -1,45 +0,0 @@
-PACKAGE=$(word 1, $(shell uv version))
-VERSION=$(word 2, $(shell uv version))
-
-PODMAN?=/usr/bin/podman
-
-SOURCES=./src
-TESTS=./tests
-
-.PHONY: bandit build clean format mypy pytest ruff version
-
-bandit:
- uv run bandit --recursive $(SOURCES)
-
-build:
- $(PODMAN) build --file ./Dockerfile --no-cache --tag $(PACKAGE):$(VERSION)
-
-save: build
- $(PODMAN) save --format oci-archive --output /tmp/$(PACKAGE)-$(VERSION).oci $(PACKAGE):$(VERSION)
-
-clean:
- @find . -type d \( \
- -name '*.egg-info' \
- -o -name '.mypy_cache' \
- -o -name '.pytest_cache' \
- -o -name '.ruff_cache' \
- -o -name '__pycache__' \
- \) -printf 'removing directory %p\n' -exec rm -rf {} +
-
-cloc:
- cloc $(SOURCES)
-
-format:
- uv run ruff format $(SOURCES) $(TESTS)
-
-mypy:
- uv run mypy $(SOURCES)
-
-pytest:
- uv run pytest $(TESTS)
-
-ruff:
- uv run ruff check --fix $(SOURCES) $(TESTS)
-
-version:
- @echo '$(PACKAGE)@$(VERSION)'
diff --git a/fuzzforge-modules/crash-analyzer/README.md b/fuzzforge-modules/crash-analyzer/README.md
deleted file mode 100644
index d0671a1..0000000
--- a/fuzzforge-modules/crash-analyzer/README.md
+++ /dev/null
@@ -1,46 +0,0 @@
-# FuzzForge Modules - FIXME
-
-## Installation
-
-### Python
-
-```shell
-# install the package (users)
-uv sync
-# install the package and all development dependencies (developers)
-uv sync --all-extras
-```
-
-### Container
-
-```shell
-# build the image
-make build
-# run the container
-mkdir -p "${PWD}/data" "${PWD}/data/input" "${PWD}/data/output"
-echo '{"settings":{},"resources":[]}' > "${PWD}/data/input/input.json"
-podman run --rm \
- --volume "${PWD}/data:/data" \
- ':' 'uv run module'
-```
-
-## Usage
-
-```shell
-uv run module
-```
-
-## Development tools
-
-```shell
-# run ruff (formatter)
-make format
-# run mypy (type checker)
-make mypy
-# run tests (pytest)
-make pytest
-# run ruff (linter)
-make ruff
-```
-
-See the file `Makefile` at the root of this directory for more tools.
diff --git a/fuzzforge-modules/crash-analyzer/mypy.ini b/fuzzforge-modules/crash-analyzer/mypy.ini
deleted file mode 100644
index 84e90d2..0000000
--- a/fuzzforge-modules/crash-analyzer/mypy.ini
+++ /dev/null
@@ -1,6 +0,0 @@
-[mypy]
-plugins = pydantic.mypy
-strict = True
-warn_unused_ignores = True
-warn_redundant_casts = True
-warn_return_any = True
diff --git a/fuzzforge-modules/crash-analyzer/pyproject.toml b/fuzzforge-modules/crash-analyzer/pyproject.toml
deleted file mode 100644
index c1a5268..0000000
--- a/fuzzforge-modules/crash-analyzer/pyproject.toml
+++ /dev/null
@@ -1,58 +0,0 @@
-[project]
-name = "fuzzforge-crash-analyzer"
-version = "0.1.0"
-description = "Analyzes fuzzing crashes, deduplicates them, and generates security reports"
-authors = []
-readme = "README.md"
-requires-python = ">=3.14"
-dependencies = [
- "fuzzforge-modules-sdk==0.0.1",
- "pydantic==2.12.4",
- "structlog==25.5.0",
- "jinja2==3.1.6",
-]
-
-[project.optional-dependencies]
-lints = [
- "bandit==1.8.6",
- "mypy==1.18.2",
- "ruff==0.14.4",
-]
-tests = [
- "pytest==9.0.2",
-]
-
-[project.scripts]
-module = "module.__main__:main"
-
-[tool.uv.sources]
-fuzzforge-modules-sdk = { workspace = true }
-
-[tool.uv]
-package = true
-
-# FuzzForge module metadata for AI agent discovery
-[tool.fuzzforge.module]
-identifier = "fuzzforge-crash-analyzer"
-suggested_predecessors = ["fuzzforge-cargo-fuzzer"]
-continuous_mode = false
-
-use_cases = [
- "Analyze Rust crash artifacts from fuzzing",
- "Deduplicate crashes by stack trace signature",
- "Triage crashes by severity (critical, high, medium, low)",
- "Generate security vulnerability reports"
-]
-
-common_inputs = [
- "crash-artifacts",
- "stack-traces",
- "rust-source-code"
-]
-
-output_artifacts = [
- "crash_analysis.json",
- "results.json"
-]
-
-output_treatment = "Read crash_analysis.json which contains: total_crashes, unique_crashes, duplicate_crashes, severity_summary (high/medium/low/unknown counts), and unique_analyses array with details per crash. Display a summary table of unique crashes by severity."
diff --git a/fuzzforge-modules/crash-analyzer/ruff.toml b/fuzzforge-modules/crash-analyzer/ruff.toml
deleted file mode 100644
index 6374f62..0000000
--- a/fuzzforge-modules/crash-analyzer/ruff.toml
+++ /dev/null
@@ -1,19 +0,0 @@
-line-length = 120
-
-[lint]
-select = [ "ALL" ]
-ignore = [
- "COM812", # conflicts with the formatter
- "D100", # ignoring missing docstrings in public modules
- "D104", # ignoring missing docstrings in public packages
- "D203", # conflicts with 'D211'
- "D213", # conflicts with 'D212'
- "TD002", # ignoring missing author in 'TODO' statements
- "TD003", # ignoring missing issue link in 'TODO' statements
-]
-
-[lint.per-file-ignores]
-"tests/*" = [
- "PLR2004", # allowing comparisons using unamed numerical constants in tests
- "S101", # allowing 'assert' statements in tests
-]
diff --git a/fuzzforge-modules/crash-analyzer/src/module/__init__.py b/fuzzforge-modules/crash-analyzer/src/module/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/fuzzforge-modules/crash-analyzer/src/module/__main__.py b/fuzzforge-modules/crash-analyzer/src/module/__main__.py
deleted file mode 100644
index bc8914a..0000000
--- a/fuzzforge-modules/crash-analyzer/src/module/__main__.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from typing import TYPE_CHECKING
-
-from fuzzforge_modules_sdk.api import logs
-
-from module.mod import Module
-
-if TYPE_CHECKING:
- from fuzzforge_modules_sdk.api.modules.base import FuzzForgeModule
-
-
-def main() -> None:
- """TODO."""
- logs.configure()
- module: FuzzForgeModule = Module()
- module.main()
-
-
-if __name__ == "__main__":
- main()
diff --git a/fuzzforge-modules/crash-analyzer/src/module/mod.py b/fuzzforge-modules/crash-analyzer/src/module/mod.py
deleted file mode 100644
index 40f71ff..0000000
--- a/fuzzforge-modules/crash-analyzer/src/module/mod.py
+++ /dev/null
@@ -1,340 +0,0 @@
-"""Crash Analyzer module for FuzzForge.
-
-This module analyzes crashes from cargo-fuzz, deduplicates them,
-extracts stack traces, and triages them by severity.
-"""
-
-from __future__ import annotations
-
-import hashlib
-import json
-import os
-import re
-import subprocess
-from pathlib import Path
-from typing import TYPE_CHECKING
-
-import structlog
-
-from fuzzforge_modules_sdk.api.constants import PATH_TO_INPUTS, PATH_TO_OUTPUTS
-from fuzzforge_modules_sdk.api.models import FuzzForgeModuleResults
-from fuzzforge_modules_sdk.api.modules.base import FuzzForgeModule
-
-from module.models import Input, Output, CrashAnalysis, Severity
-from module.settings import Settings
-
-if TYPE_CHECKING:
- from fuzzforge_modules_sdk.api.models import FuzzForgeModuleResource
-
-logger = structlog.get_logger()
-
-
-class Module(FuzzForgeModule):
- """Crash Analyzer module - analyzes and triages fuzzer crashes."""
-
- _settings: Settings | None
- _analyses: list[CrashAnalysis]
- _fuzz_project_path: Path | None
-
- def __init__(self) -> None:
- """Initialize an instance of the class."""
- name: str = "crash-analyzer"
- version: str = "0.1.0"
- FuzzForgeModule.__init__(self, name=name, version=version)
- self._settings = None
- self._analyses = []
- self._fuzz_project_path = None
-
- @classmethod
- def _get_input_type(cls) -> type[Input]:
- """Return the input type."""
- return Input
-
- @classmethod
- def _get_output_type(cls) -> type[Output]:
- """Return the output type."""
- return Output
-
- def _prepare(self, settings: Settings) -> None: # type: ignore[override]
- """Prepare the module.
-
- :param settings: Module settings.
-
- """
- self._settings = settings
- logger.info("crash-analyzer preparing", settings=settings.model_dump() if settings else {})
-
- def _run(self, resources: list[FuzzForgeModuleResource]) -> FuzzForgeModuleResults:
- """Run the crash analyzer.
-
- :param resources: Input resources (fuzzing results + crashes).
- :returns: Module execution result.
-
- """
- logger.info("crash-analyzer starting", resource_count=len(resources))
-
- # Find crashes directory and fuzz project
- crashes_path = None
- for resource in resources:
- path = Path(resource.path)
- if path.is_dir():
- if path.name == "crashes" or (path / "crashes").is_dir():
- crashes_path = path if path.name == "crashes" else path / "crashes"
- if (path / "fuzz_targets").is_dir():
- self._fuzz_project_path = path
- if (path / "fuzz" / "fuzz_targets").is_dir():
- self._fuzz_project_path = path / "fuzz"
-
- if crashes_path is None:
- # Try to find crashes in fuzzing_results.json
- for resource in resources:
- path = Path(resource.path)
- if path.name == "fuzzing_results.json" and path.exists():
- with open(path) as f:
- data = json.load(f)
- if "crashes_path" in data:
- crashes_path = Path(data["crashes_path"])
- break
-
- if crashes_path is None or not crashes_path.exists():
- logger.warning("no crashes found to analyze")
- self._write_output()
- return FuzzForgeModuleResults.SUCCESS
-
- logger.info("analyzing crashes", path=str(crashes_path))
-
- # Analyze crashes per target
- for target_dir in crashes_path.iterdir():
- if target_dir.is_dir():
- target = target_dir.name
- for crash_file in target_dir.glob("crash-*"):
- if crash_file.is_file():
- analysis = self._analyze_crash(target, crash_file)
- self._analyses.append(analysis)
-
- # Deduplicate crashes
- self._deduplicate_crashes()
-
- # Write output
- self._write_output()
-
- unique_count = sum(1 for a in self._analyses if not a.is_duplicate)
- logger.info("crash-analyzer completed",
- total=len(self._analyses),
- unique=unique_count)
-
- return FuzzForgeModuleResults.SUCCESS
-
- def _cleanup(self, settings: Settings) -> None: # type: ignore[override]
- """Clean up after execution.
-
- :param settings: Module settings.
-
- """
- pass
-
- def _analyze_crash(self, target: str, crash_file: Path) -> CrashAnalysis:
- """Analyze a single crash.
-
- :param target: Name of the fuzz target.
- :param crash_file: Path to the crash input file.
- :returns: Crash analysis result.
-
- """
- logger.debug("analyzing crash", target=target, file=crash_file.name)
-
- # Read crash input
- crash_data = crash_file.read_bytes()
- input_hash = hashlib.sha256(crash_data).hexdigest()[:16]
-
- # Try to reproduce and get stack trace
- stack_trace = ""
- crash_type = "unknown"
- severity = Severity.UNKNOWN
-
- if self._fuzz_project_path:
- stack_trace, crash_type = self._reproduce_crash(target, crash_file)
- severity = self._determine_severity(crash_type, stack_trace)
-
- return CrashAnalysis(
- target=target,
- input_file=str(crash_file),
- input_hash=input_hash,
- input_size=len(crash_data),
- crash_type=crash_type,
- severity=severity,
- stack_trace=stack_trace,
- is_duplicate=False,
- )
-
- def _reproduce_crash(self, target: str, crash_file: Path) -> tuple[str, str]:
- """Reproduce a crash to get stack trace.
-
- :param target: Name of the fuzz target.
- :param crash_file: Path to the crash input file.
- :returns: Tuple of (stack_trace, crash_type).
-
- """
- if self._fuzz_project_path is None:
- return "", "unknown"
-
- try:
- env = os.environ.copy()
- env["RUST_BACKTRACE"] = "1"
-
- result = subprocess.run(
- [
- "cargo", "+nightly", "fuzz", "run",
- target,
- str(crash_file),
- "--",
- "-runs=1",
- ],
- cwd=self._fuzz_project_path,
- capture_output=True,
- text=True,
- timeout=30,
- env=env,
- )
-
- output = result.stdout + result.stderr
-
- # Extract crash type
- crash_type = "unknown"
- if "heap-buffer-overflow" in output.lower():
- crash_type = "heap-buffer-overflow"
- elif "stack-buffer-overflow" in output.lower():
- crash_type = "stack-buffer-overflow"
- elif "heap-use-after-free" in output.lower():
- crash_type = "use-after-free"
- elif "null" in output.lower() and "deref" in output.lower():
- crash_type = "null-pointer-dereference"
- elif "panic" in output.lower():
- crash_type = "panic"
- elif "assertion" in output.lower():
- crash_type = "assertion-failure"
- elif "timeout" in output.lower():
- crash_type = "timeout"
- elif "out of memory" in output.lower() or "oom" in output.lower():
- crash_type = "out-of-memory"
-
- # Extract stack trace
- stack_lines = []
- in_stack = False
- for line in output.splitlines():
- if "SUMMARY:" in line or "ERROR:" in line:
- in_stack = True
- if in_stack:
- stack_lines.append(line)
- if len(stack_lines) > 50: # Limit stack trace length
- break
-
- return "\n".join(stack_lines), crash_type
-
- except subprocess.TimeoutExpired:
- return "", "timeout"
- except Exception as e:
- logger.warning("failed to reproduce crash", error=str(e))
- return "", "unknown"
-
- def _determine_severity(self, crash_type: str, stack_trace: str) -> Severity:
- """Determine crash severity based on type and stack trace.
-
- :param crash_type: Type of the crash.
- :param stack_trace: Stack trace string.
- :returns: Severity level.
-
- """
- high_severity = [
- "heap-buffer-overflow",
- "stack-buffer-overflow",
- "use-after-free",
- "double-free",
- ]
-
- medium_severity = [
- "null-pointer-dereference",
- "out-of-memory",
- "integer-overflow",
- ]
-
- low_severity = [
- "panic",
- "assertion-failure",
- "timeout",
- ]
-
- if crash_type in high_severity:
- return Severity.HIGH
- elif crash_type in medium_severity:
- return Severity.MEDIUM
- elif crash_type in low_severity:
- return Severity.LOW
- else:
- return Severity.UNKNOWN
-
- def _deduplicate_crashes(self) -> None:
- """Mark duplicate crashes based on stack trace similarity."""
- seen_signatures: set[str] = set()
-
- for analysis in self._analyses:
- # Create a signature from crash type and key stack frames
- signature = self._create_signature(analysis)
-
- if signature in seen_signatures:
- analysis.is_duplicate = True
- else:
- seen_signatures.add(signature)
-
- def _create_signature(self, analysis: CrashAnalysis) -> str:
- """Create a unique signature for a crash.
-
- :param analysis: Crash analysis.
- :returns: Signature string.
-
- """
- # Use crash type + first few significant stack frames
- parts = [analysis.target, analysis.crash_type]
-
- # Extract function names from stack trace
- func_pattern = re.compile(r"in (\S+)")
- funcs = func_pattern.findall(analysis.stack_trace)
-
- # Use first 3 unique functions
- seen = set()
- for func in funcs:
- if func not in seen and not func.startswith("std::"):
- parts.append(func)
- seen.add(func)
- if len(seen) >= 3:
- break
-
- return "|".join(parts)
-
- def _write_output(self) -> None:
- """Write the analysis results to output."""
- output_path = PATH_TO_OUTPUTS / "crash_analysis.json"
- output_path.parent.mkdir(parents=True, exist_ok=True)
-
- unique = [a for a in self._analyses if not a.is_duplicate]
- duplicates = [a for a in self._analyses if a.is_duplicate]
-
- # Group by severity
- by_severity = {
- "high": [a for a in unique if a.severity == Severity.HIGH],
- "medium": [a for a in unique if a.severity == Severity.MEDIUM],
- "low": [a for a in unique if a.severity == Severity.LOW],
- "unknown": [a for a in unique if a.severity == Severity.UNKNOWN],
- }
-
- output_data = {
- "total_crashes": len(self._analyses),
- "unique_crashes": len(unique),
- "duplicate_crashes": len(duplicates),
- "severity_summary": {k: len(v) for k, v in by_severity.items()},
- "unique_analyses": [a.model_dump() for a in unique],
- "duplicate_analyses": [a.model_dump() for a in duplicates],
- }
-
- output_path.write_text(json.dumps(output_data, indent=2, default=str))
- logger.info("wrote crash analysis", path=str(output_path))
diff --git a/fuzzforge-modules/crash-analyzer/src/module/models.py b/fuzzforge-modules/crash-analyzer/src/module/models.py
deleted file mode 100644
index bf8620c..0000000
--- a/fuzzforge-modules/crash-analyzer/src/module/models.py
+++ /dev/null
@@ -1,79 +0,0 @@
-"""Models for the crash-analyzer module."""
-
-from enum import Enum
-
-from pydantic import BaseModel, Field
-from fuzzforge_modules_sdk.api.models import FuzzForgeModuleInputBase, FuzzForgeModuleOutputBase
-
-from module.settings import Settings
-
-
-class Severity(str, Enum):
- """Severity level of a crash."""
-
- HIGH = "high"
- MEDIUM = "medium"
- LOW = "low"
- UNKNOWN = "unknown"
-
-
-class CrashAnalysis(BaseModel):
- """Analysis of a single crash."""
-
- #: Name of the fuzz target
- target: str
-
- #: Path to the input file that caused the crash
- input_file: str
-
- #: Hash of the input for identification
- input_hash: str
-
- #: Size of the input in bytes
- input_size: int = 0
-
- #: Type of crash (e.g., "heap-buffer-overflow", "panic")
- crash_type: str = "unknown"
-
- #: Severity level
- severity: Severity = Severity.UNKNOWN
-
- #: Stack trace from reproducing the crash
- stack_trace: str = ""
-
- #: Whether this crash is a duplicate of another
- is_duplicate: bool = False
-
- #: Signature for deduplication
- signature: str = ""
-
-
-class Input(FuzzForgeModuleInputBase[Settings]):
- """Input for the crash-analyzer module.
-
- Expects:
- - Crashes directory from cargo-fuzzer
- - Optionally the fuzz project for reproduction
- """
-
-
-class Output(FuzzForgeModuleOutputBase):
- """Output from the crash-analyzer module."""
-
- #: Total number of crashes analyzed
- total_crashes: int = 0
-
- #: Number of unique crashes (after deduplication)
- unique_crashes: int = 0
-
- #: Number of duplicate crashes
- duplicate_crashes: int = 0
-
- #: Summary by severity
- severity_summary: dict[str, int] = Field(default_factory=dict)
-
- #: Unique crash analyses
- unique_analyses: list[CrashAnalysis] = Field(default_factory=list)
-
- #: Duplicate crash analyses
- duplicate_analyses: list[CrashAnalysis] = Field(default_factory=list)
diff --git a/fuzzforge-modules/crash-analyzer/src/module/settings.py b/fuzzforge-modules/crash-analyzer/src/module/settings.py
deleted file mode 100644
index fdfaf62..0000000
--- a/fuzzforge-modules/crash-analyzer/src/module/settings.py
+++ /dev/null
@@ -1,16 +0,0 @@
-"""Settings for the crash-analyzer module."""
-
-from fuzzforge_modules_sdk.api.models import FuzzForgeModulesSettingsBase
-
-
-class Settings(FuzzForgeModulesSettingsBase):
- """Settings for the crash-analyzer module."""
-
- #: Whether to reproduce crashes for stack traces
- reproduce_crashes: bool = True
-
- #: Timeout for reproducing each crash (seconds)
- reproduce_timeout: int = 30
-
- #: Whether to deduplicate crashes
- deduplicate: bool = True
diff --git a/fuzzforge-modules/crash-analyzer/tests/.gitkeep b/fuzzforge-modules/crash-analyzer/tests/.gitkeep
deleted file mode 100644
index e69de29..0000000
diff --git a/fuzzforge-modules/fuzzforge-module-template/Dockerfile b/fuzzforge-modules/fuzzforge-module-template/Dockerfile
deleted file mode 100644
index d663a1f..0000000
--- a/fuzzforge-modules/fuzzforge-module-template/Dockerfile
+++ /dev/null
@@ -1,12 +0,0 @@
-FROM localhost/fuzzforge-modules-sdk:0.1.0
-
-# Module metadata is now read from pyproject.toml [tool.fuzzforge.module] section
-# See MODULE_METADATA.md for documentation on configuring metadata
-
-COPY ./src /app/src
-COPY ./pyproject.toml /app/pyproject.toml
-
-# Remove workspace reference since we're using wheels
-RUN sed -i '/\[tool\.uv\.sources\]/,/^$/d' /app/pyproject.toml
-
-RUN uv sync --find-links /wheels
diff --git a/fuzzforge-modules/fuzzforge-module-template/Makefile b/fuzzforge-modules/fuzzforge-module-template/Makefile
deleted file mode 100644
index cada4d0..0000000
--- a/fuzzforge-modules/fuzzforge-module-template/Makefile
+++ /dev/null
@@ -1,45 +0,0 @@
-PACKAGE=$(word 1, $(shell uv version))
-VERSION=$(word 2, $(shell uv version))
-
-PODMAN?=/usr/bin/podman
-
-SOURCES=./src
-TESTS=./tests
-
-.PHONY: bandit build clean format mypy pytest ruff version
-
-bandit:
- uv run bandit --recursive $(SOURCES)
-
-build:
- $(PODMAN) build --file ./Dockerfile --no-cache --tag $(PACKAGE):$(VERSION)
-
-save: build
- $(PODMAN) save --format oci-archive --output /tmp/$(PACKAGE)-$(VERSION).oci $(PACKAGE):$(VERSION)
-
-clean:
- @find . -type d \( \
- -name '*.egg-info' \
- -o -name '.mypy_cache' \
- -o -name '.pytest_cache' \
- -o -name '.ruff_cache' \
- -o -name '__pycache__' \
- \) -printf 'removing directory %p\n' -exec rm -rf {} +
-
-cloc:
- cloc $(SOURCES)
-
-format:
- uv run ruff format $(SOURCES) $(TESTS)
-
-mypy:
- uv run mypy $(SOURCES)
-
-pytest:
- uv run pytest $(TESTS)
-
-ruff:
- uv run ruff check --fix $(SOURCES) $(TESTS)
-
-version:
- @echo '$(PACKAGE)@$(VERSION)'
diff --git a/fuzzforge-modules/fuzzforge-module-template/README.md b/fuzzforge-modules/fuzzforge-module-template/README.md
deleted file mode 100644
index d0671a1..0000000
--- a/fuzzforge-modules/fuzzforge-module-template/README.md
+++ /dev/null
@@ -1,46 +0,0 @@
-# FuzzForge Modules - FIXME
-
-## Installation
-
-### Python
-
-```shell
-# install the package (users)
-uv sync
-# install the package and all development dependencies (developers)
-uv sync --all-extras
-```
-
-### Container
-
-```shell
-# build the image
-make build
-# run the container
-mkdir -p "${PWD}/data" "${PWD}/data/input" "${PWD}/data/output"
-echo '{"settings":{},"resources":[]}' > "${PWD}/data/input/input.json"
-podman run --rm \
- --volume "${PWD}/data:/data" \
- ':' 'uv run module'
-```
-
-## Usage
-
-```shell
-uv run module
-```
-
-## Development tools
-
-```shell
-# run ruff (formatter)
-make format
-# run mypy (type checker)
-make mypy
-# run tests (pytest)
-make pytest
-# run ruff (linter)
-make ruff
-```
-
-See the file `Makefile` at the root of this directory for more tools.
diff --git a/fuzzforge-modules/fuzzforge-module-template/mypy.ini b/fuzzforge-modules/fuzzforge-module-template/mypy.ini
deleted file mode 100644
index 84e90d2..0000000
--- a/fuzzforge-modules/fuzzforge-module-template/mypy.ini
+++ /dev/null
@@ -1,6 +0,0 @@
-[mypy]
-plugins = pydantic.mypy
-strict = True
-warn_unused_ignores = True
-warn_redundant_casts = True
-warn_return_any = True
diff --git a/fuzzforge-modules/fuzzforge-module-template/pyproject.toml b/fuzzforge-modules/fuzzforge-module-template/pyproject.toml
deleted file mode 100644
index 303600d..0000000
--- a/fuzzforge-modules/fuzzforge-module-template/pyproject.toml
+++ /dev/null
@@ -1,59 +0,0 @@
-[project]
-name = "fuzzforge-module-template"
-version = "0.1.0"
-description = "FIXME: Add module description"
-authors = []
-readme = "README.md"
-requires-python = ">=3.14"
-dependencies = [
- "fuzzforge-modules-sdk==0.0.1",
- "pydantic==2.12.4",
- "structlog==25.5.0",
-]
-
-[project.optional-dependencies]
-lints = [
- "bandit==1.8.6",
- "mypy==1.18.2",
- "ruff==0.14.4",
-]
-tests = [
- "pytest==9.0.2",
-]
-
-[project.scripts]
-module = "module.__main__:main"
-
-[tool.uv.sources]
-fuzzforge-modules-sdk = { workspace = true }
-
-[tool.uv]
-package = true
-
-# FuzzForge module metadata for AI agent discovery
-[tool.fuzzforge.module]
-# REQUIRED: Unique module identifier (should match Docker image name)
-identifier = "fuzzforge-module-template"
-
-# Optional: List of module identifiers that should run before this one
-suggested_predecessors = []
-
-# Optional: Whether this module supports continuous/background execution
-continuous_mode = false
-
-# REQUIRED: Use cases help AI agents understand when to use this module
-# Include language/target info here (e.g., "Analyze Rust crate...")
-use_cases = [
- "FIXME: Describe what this module does",
- "FIXME: Describe typical usage scenario"
-]
-
-# REQUIRED: What inputs the module expects
-common_inputs = [
- "FIXME: List required input files or artifacts"
-]
-
-# REQUIRED: What outputs the module produces
-output_artifacts = [
- "FIXME: List output files produced"
-]
diff --git a/fuzzforge-modules/fuzzforge-module-template/ruff.toml b/fuzzforge-modules/fuzzforge-module-template/ruff.toml
deleted file mode 100644
index 6374f62..0000000
--- a/fuzzforge-modules/fuzzforge-module-template/ruff.toml
+++ /dev/null
@@ -1,19 +0,0 @@
-line-length = 120
-
-[lint]
-select = [ "ALL" ]
-ignore = [
- "COM812", # conflicts with the formatter
- "D100", # ignoring missing docstrings in public modules
- "D104", # ignoring missing docstrings in public packages
- "D203", # conflicts with 'D211'
- "D213", # conflicts with 'D212'
- "TD002", # ignoring missing author in 'TODO' statements
- "TD003", # ignoring missing issue link in 'TODO' statements
-]
-
-[lint.per-file-ignores]
-"tests/*" = [
- "PLR2004", # allowing comparisons using unamed numerical constants in tests
- "S101", # allowing 'assert' statements in tests
-]
diff --git a/fuzzforge-modules/fuzzforge-module-template/src/module/__init__.py b/fuzzforge-modules/fuzzforge-module-template/src/module/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/fuzzforge-modules/fuzzforge-module-template/src/module/__main__.py b/fuzzforge-modules/fuzzforge-module-template/src/module/__main__.py
deleted file mode 100644
index bc8914a..0000000
--- a/fuzzforge-modules/fuzzforge-module-template/src/module/__main__.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from typing import TYPE_CHECKING
-
-from fuzzforge_modules_sdk.api import logs
-
-from module.mod import Module
-
-if TYPE_CHECKING:
- from fuzzforge_modules_sdk.api.modules.base import FuzzForgeModule
-
-
-def main() -> None:
- """TODO."""
- logs.configure()
- module: FuzzForgeModule = Module()
- module.main()
-
-
-if __name__ == "__main__":
- main()
diff --git a/fuzzforge-modules/fuzzforge-module-template/src/module/mod.py b/fuzzforge-modules/fuzzforge-module-template/src/module/mod.py
deleted file mode 100644
index f0f85e9..0000000
--- a/fuzzforge-modules/fuzzforge-module-template/src/module/mod.py
+++ /dev/null
@@ -1,54 +0,0 @@
-from __future__ import annotations
-
-from typing import TYPE_CHECKING
-
-from fuzzforge_modules_sdk.api.models import FuzzForgeModuleResults
-from fuzzforge_modules_sdk.api.modules.base import FuzzForgeModule
-
-from module.models import Input, Output
-
-if TYPE_CHECKING:
- from fuzzforge_modules_sdk.api.models import FuzzForgeModuleResource, FuzzForgeModulesSettingsType
-
-
-class Module(FuzzForgeModule):
- """TODO."""
-
- def __init__(self) -> None:
- """Initialize an instance of the class."""
- name: str = "FIXME"
- version: str = "FIXME"
- FuzzForgeModule.__init__(self, name=name, version=version)
-
- @classmethod
- def _get_input_type(cls) -> type[Input]:
- """TODO."""
- return Input
-
- @classmethod
- def _get_output_type(cls) -> type[Output]:
- """TODO."""
- return Output
-
- def _prepare(self, settings: FuzzForgeModulesSettingsType) -> None:
- """TODO.
-
- :param settings: TODO.
-
- """
-
- def _run(self, resources: list[FuzzForgeModuleResource]) -> FuzzForgeModuleResults: # noqa: ARG002
- """TODO.
-
- :param resources: TODO.
- :returns: TODO.
-
- """
- return FuzzForgeModuleResults.SUCCESS
-
- def _cleanup(self, settings: FuzzForgeModulesSettingsType) -> None:
- """TODO.
-
- :param settings: TODO.
-
- """
diff --git a/fuzzforge-modules/fuzzforge-module-template/src/module/models.py b/fuzzforge-modules/fuzzforge-module-template/src/module/models.py
deleted file mode 100644
index 2a3f021..0000000
--- a/fuzzforge-modules/fuzzforge-module-template/src/module/models.py
+++ /dev/null
@@ -1,11 +0,0 @@
-from fuzzforge_modules_sdk.api.models import FuzzForgeModuleInputBase, FuzzForgeModuleOutputBase
-
-from module.settings import Settings
-
-
-class Input(FuzzForgeModuleInputBase[Settings]):
- """TODO."""
-
-
-class Output(FuzzForgeModuleOutputBase):
- """TODO."""
diff --git a/fuzzforge-modules/fuzzforge-module-template/src/module/settings.py b/fuzzforge-modules/fuzzforge-module-template/src/module/settings.py
deleted file mode 100644
index f916ad4..0000000
--- a/fuzzforge-modules/fuzzforge-module-template/src/module/settings.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from fuzzforge_modules_sdk.api.models import FuzzForgeModulesSettingsBase
-
-
-class Settings(FuzzForgeModulesSettingsBase):
- """TODO."""
-
- # Here goes your attributes
diff --git a/fuzzforge-modules/fuzzforge-module-template/tests/.gitkeep b/fuzzforge-modules/fuzzforge-module-template/tests/.gitkeep
deleted file mode 100644
index e69de29..0000000
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/Dockerfile b/fuzzforge-modules/fuzzforge-modules-sdk/Dockerfile
deleted file mode 100644
index c98782a..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/Dockerfile
+++ /dev/null
@@ -1,30 +0,0 @@
-# FuzzForge Modules SDK - Base image for all modules
-#
-# This image provides:
-# - Python 3.14 with uv package manager
-# - Pre-built wheels for common dependencies
-# - Standard module directory structure
-
-FROM ghcr.io/astral-sh/uv:python3.14-bookworm-slim
-
-# Install system dependencies commonly needed by modules
-RUN apt-get update && apt-get install -y --no-install-recommends \
- build-essential \
- && rm -rf /var/lib/apt/lists/*
-
-# Set up application directory structure
-WORKDIR /app
-
-# Create FuzzForge standard directories
-RUN mkdir -p /fuzzforge/input /fuzzforge/output
-
-# Copy wheels directory (built by parent Makefile)
-COPY .wheels /wheels
-
-# Set up uv for the container
-ENV UV_SYSTEM_PYTHON=1
-ENV UV_COMPILE_BYTECODE=1
-ENV UV_LINK_MODE=copy
-
-# Default entrypoint - modules override this
-ENTRYPOINT ["uv", "run", "module"]
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/Makefile b/fuzzforge-modules/fuzzforge-modules-sdk/Makefile
deleted file mode 100644
index e7ce0a9..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/Makefile
+++ /dev/null
@@ -1,39 +0,0 @@
-PACKAGE=$(word 1, $(shell uv version))
-VERSION=$(word 2, $(shell uv version))
-
-SOURCES=./src
-TESTS=./tests
-
-FUZZFORGE_MODULE_TEMPLATE=$(PWD)/src/fuzzforge_modules_sdk/templates/module
-
-.PHONY: bandit clean format mypy pytest ruff version
-
-bandit:
- uv run bandit --recursive $(SOURCES)
-
-clean:
- @find . -type d \( \
- -name '*.egg-info' \
- -o -name '.mypy_cache' \
- -o -name '.pytest_cache' \
- -o -name '.ruff_cache' \
- -o -name '__pycache__' \
- \) -printf 'removing directory %p\n' -exec rm -rf {} +
-
-cloc:
- cloc $(SOURCES)
-
-format:
- uv run ruff format $(SOURCES) $(TESTS)
-
-mypy:
- uv run mypy $(SOURCES)
-
-pytest:
- uv run pytest $(TESTS)
-
-ruff:
- uv run ruff check --fix $(SOURCES) $(TESTS)
-
-version:
- @echo '$(PACKAGE)@$(VERSION)'
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/README.md b/fuzzforge-modules/fuzzforge-modules-sdk/README.md
deleted file mode 100644
index 334325b..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/README.md
+++ /dev/null
@@ -1,67 +0,0 @@
-# FuzzForge Modules SDK
-
-...
-
-# Setup
-
-- start the podman user socket
-
-```shell
-systemctl --user start podman.socket
-```
-
-NB : you can also automaticllay start it at boot
-
-```shell
-systemctl --user enable --now podman.socket
-```
-
-## HACK : fix missing `fuzzforge-modules-sdk`
-
-- if you have this error when using some fuzzforge-modules-sdk deps :
-
-```shell
-β― make format
-uv run ruff format ./src ./tests
- Γ No solution found when resolving dependencies:
- β°ββΆ Because fuzzforge-modules-sdk was not found in the package registry and your project depends on fuzzforge-modules-sdk==0.0.1, we can
- conclude that your project's requirements are unsatisfiable.
- And because your project requires opengrep[lints], we can conclude that your project's requirements are unsatisfiable.
-make: *** [Makefile:30: format] Error 1
-```
-
-- build a wheel package of fuzzforge-modules-sdk
-
-```shell
-cd fuzzforge_ng/fuzzforge-modules/fuzzforge-modules-sdk
-uv build
-```
-
-- then inside your module project, install it
-
-```shell
-cd fuzzforge_ng_modules/mymodule
-uv sync --all-extras --find-links ../../fuzzforge_ng/dist/
-```
-
-# Usage
-
-## Prepare
-
-- enter venv (or use uv run)
-
-```shell
-source .venv/bin/activate
-```
-
-- create a new module
-
-```shell
-fuzzforge-modules-sdk new module --name my_new_module --directory ../fuzzforge_ng_modules/
-```
-
-- build the base image
-
-```shell
-fuzzforge-modules-sdk build image
-```
\ No newline at end of file
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/mypy.ini b/fuzzforge-modules/fuzzforge-modules-sdk/mypy.ini
deleted file mode 100644
index f74350d..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/mypy.ini
+++ /dev/null
@@ -1,7 +0,0 @@
-[mypy]
-exclude = ^src/fuzzforge_modules_sdk/templates/.*
-plugins = pydantic.mypy
-strict = True
-warn_unused_ignores = True
-warn_redundant_casts = True
-warn_return_any = True
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/pyproject.toml b/fuzzforge-modules/fuzzforge-modules-sdk/pyproject.toml
deleted file mode 100644
index 4734f59..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/pyproject.toml
+++ /dev/null
@@ -1,32 +0,0 @@
-[project]
-name = "fuzzforge-modules-sdk"
-version = "0.0.1"
-description = "Software development kit (SDK) for FuzzForge's modules."
-authors = []
-readme = "README.md"
-requires-python = ">=3.14"
-dependencies = [
- "podman==5.6.0",
- "pydantic==2.12.4",
- "structlog==25.5.0",
- "tomlkit==0.13.3",
-]
-
-[project.optional-dependencies]
-lints = [
- "bandit==1.8.6",
- "mypy==1.18.2",
- "ruff==0.14.4",
-]
-tests = [
- "pytest==9.0.2",
-]
-
-[project.scripts]
-fuzzforge-modules-sdk = "fuzzforge_modules_sdk._cli.main:main"
-
-[tool.setuptools.package-data]
-fuzzforge_modules_sdk = [
- "assets/**/*",
- "templates/**/*",
-]
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/ruff.toml b/fuzzforge-modules/fuzzforge-modules-sdk/ruff.toml
deleted file mode 100644
index 6374f62..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/ruff.toml
+++ /dev/null
@@ -1,19 +0,0 @@
-line-length = 120
-
-[lint]
-select = [ "ALL" ]
-ignore = [
- "COM812", # conflicts with the formatter
- "D100", # ignoring missing docstrings in public modules
- "D104", # ignoring missing docstrings in public packages
- "D203", # conflicts with 'D211'
- "D213", # conflicts with 'D212'
- "TD002", # ignoring missing author in 'TODO' statements
- "TD003", # ignoring missing issue link in 'TODO' statements
-]
-
-[lint.per-file-ignores]
-"tests/*" = [
- "PLR2004", # allowing comparisons using unamed numerical constants in tests
- "S101", # allowing 'assert' statements in tests
-]
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/__init__.py b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/_cli/__init__.py b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/_cli/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/_cli/build_base_image.py b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/_cli/build_base_image.py
deleted file mode 100644
index 3cc04cb..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/_cli/build_base_image.py
+++ /dev/null
@@ -1,66 +0,0 @@
-from importlib.resources import files
-from pathlib import Path
-from shutil import copyfile, copytree
-from tempfile import TemporaryDirectory
-from typing import TYPE_CHECKING, Literal
-
-import os
-
-from podman import PodmanClient
-from tomlkit import TOMLDocument, parse
-
-if TYPE_CHECKING:
- from importlib.resources.abc import Traversable
-
-
-def _get_default_podman_socket() -> str:
- """Get the default Podman socket path for the current user."""
- uid = os.getuid()
- return f"unix:///run/user/{uid}/podman/podman.sock"
-
-
-PATH_TO_SOURCES: Path = Path(__file__).parent.parent
-
-
-def _build_podman_image(directory: Path, tag: str, socket: str | None = None) -> None:
- if socket is None:
- socket = _get_default_podman_socket()
- with PodmanClient(base_url=socket) as client:
- client.images.build(
- dockerfile="Dockerfile",
- nocache=True,
- path=directory,
- tag=tag,
- )
-
-
-def build_base_image(engine: Literal["podman"], socket: str | None = None) -> None:
- with TemporaryDirectory() as directory:
- path_to_assets: Traversable = files("fuzzforge_modules_sdk").joinpath("assets")
- copyfile(
- src=str(path_to_assets.joinpath("Dockerfile")),
- dst=Path(directory).joinpath("Dockerfile"),
- )
- copyfile(
- src=str(path_to_assets.joinpath("pyproject.toml")),
- dst=Path(directory).joinpath("pyproject.toml"),
- )
- copytree(src=str(PATH_TO_SOURCES), dst=Path(directory).joinpath("src").joinpath(PATH_TO_SOURCES.name))
-
- # update the file 'pyproject.toml'
- path: Path = Path(directory).joinpath("pyproject.toml")
- data: TOMLDocument = parse(path.read_text())
- name: str = data["project"]["name"] # type: ignore[assignment, index]
- version: str = data["project"]["version"] # type: ignore[assignment, index]
- tag: str = f"{name}:{version}"
-
- match engine:
- case "podman":
- _build_podman_image(
- directory=Path(directory),
- socket=socket,
- tag=tag,
- )
- case _:
- message: str = f"unsupported engine '{engine}'"
- raise Exception(message) # noqa: TRY002
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/_cli/create_new_module.py b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/_cli/create_new_module.py
deleted file mode 100644
index 0c2001c..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/_cli/create_new_module.py
+++ /dev/null
@@ -1,30 +0,0 @@
-from __future__ import annotations
-
-from importlib.resources import files
-from shutil import copytree, ignore_patterns
-from typing import TYPE_CHECKING
-
-from tomlkit import dumps, parse
-
-if TYPE_CHECKING:
- from importlib.resources.abc import Traversable
- from pathlib import Path
-
- from tomlkit import TOMLDocument
-
-
-def create_new_module(name: str, directory: Path) -> None:
- source: Traversable = files("fuzzforge_modules_sdk").joinpath("templates").joinpath("fuzzforge-module-template")
- destination: Path = directory.joinpath(name) # TODO: sanitize path
- copytree(
- src=str(source),
- dst=destination,
- ignore=ignore_patterns("__pycache__", "*.egg-info", "*.pyc", ".mypy_cache", ".ruff_cache", ".venv"),
- )
-
- # update the file 'pyproject.toml'
- path: Path = destination.joinpath("pyproject.toml")
- data: TOMLDocument = parse(path.read_text())
- data["project"]["name"] = name # type: ignore[index]
- del data["tool"]["uv"]["sources"] # type: ignore[index, union-attr]
- path.write_text(dumps(data))
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/_cli/main.py b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/_cli/main.py
deleted file mode 100644
index e6ab418..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/_cli/main.py
+++ /dev/null
@@ -1,71 +0,0 @@
-from argparse import ArgumentParser
-from pathlib import Path
-
-from fuzzforge_modules_sdk._cli.build_base_image import build_base_image
-from fuzzforge_modules_sdk._cli.create_new_module import create_new_module
-
-
-def create_parser() -> ArgumentParser:
- parser: ArgumentParser = ArgumentParser(
- prog="fuzzforge-modules-sdk", description="Utilities for the Fuzzforge Modules SDK."
- )
-
- subparsers = parser.add_subparsers(required=True)
-
- # fuzzforge-modules-sdk build ...
- parser_build = subparsers.add_parser(name="build")
-
- subparsers_build = parser_build.add_subparsers(required=True)
-
- # fuzzforge-modules-sdk build image ...
- parser_build_image = subparsers_build.add_parser(
- name="image",
- help="Build the image.",
- )
- parser_build_image.add_argument(
- "--engine",
- default="podman",
- )
- parser_build_image.add_argument(
- "--socket",
- default=None,
- )
- parser_build_image.set_defaults(
- function_to_execute=build_base_image,
- )
-
- # fuzzforge-modules-sdk new ...
- parser_new = subparsers.add_parser(name="new")
-
- subparsers_new = parser_new.add_subparsers(required=True)
-
- # fuzzforge-modules-sdk new module ...
- parser_new_module = subparsers_new.add_parser(
- name="module",
- help="Generate the boilerplate required to create a new module.",
- )
- parser_new_module.add_argument(
- "--name",
- help="The name of the module to create.",
- required=True,
- )
- parser_new_module.add_argument(
- "--directory",
- default=".",
- type=Path,
- help="The directory the new module should be created into (defaults to current working directory).",
- )
- parser_new_module.set_defaults(
- function_to_execute=create_new_module,
- )
-
- return parser
-
-
-def main() -> None:
- """Entry point for the command-line interface."""
- parser: ArgumentParser = create_parser()
- arguments = parser.parse_args()
- function_to_execute = arguments.function_to_execute
- del arguments.function_to_execute
- function_to_execute(**vars(arguments))
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/api/__init__.py b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/api/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/api/constants.py b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/api/constants.py
deleted file mode 100644
index 47f6a35..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/api/constants.py
+++ /dev/null
@@ -1,13 +0,0 @@
-from pathlib import Path
-
-PATH_TO_DATA: Path = Path("/data")
-PATH_TO_INPUTS: Path = PATH_TO_DATA.joinpath("input")
-PATH_TO_INPUT: Path = PATH_TO_INPUTS.joinpath("input.json")
-PATH_TO_OUTPUTS: Path = PATH_TO_DATA.joinpath("output")
-PATH_TO_ARTIFACTS: Path = PATH_TO_OUTPUTS.joinpath("artifacts")
-PATH_TO_RESULTS: Path = PATH_TO_OUTPUTS.joinpath("results.json")
-PATH_TO_LOGS: Path = PATH_TO_OUTPUTS.joinpath("logs.jsonl")
-
-# Streaming output paths for real-time progress
-PATH_TO_PROGRESS: Path = PATH_TO_OUTPUTS.joinpath("progress.json")
-PATH_TO_STREAM: Path = PATH_TO_OUTPUTS.joinpath("stream.jsonl")
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/api/exceptions.py b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/api/exceptions.py
deleted file mode 100644
index da1d040..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/api/exceptions.py
+++ /dev/null
@@ -1,2 +0,0 @@
-class FuzzForgeModuleError(Exception):
- """TODO."""
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/api/logs.py b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/api/logs.py
deleted file mode 100644
index d3a0fb4..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/api/logs.py
+++ /dev/null
@@ -1,43 +0,0 @@
-import logging
-import sys
-
-import structlog
-
-from fuzzforge_modules_sdk.api.constants import PATH_TO_LOGS
-
-
-class Formatter(logging.Formatter):
- """TODO."""
-
- def format(self, record: logging.LogRecord) -> str:
- """TODO."""
- record.exc_info = None
- return super().format(record)
-
-
-def configure() -> None:
- """TODO."""
- fmt: str = "%(message)s"
- level = logging.DEBUG
- PATH_TO_LOGS.parent.mkdir(exist_ok=True, parents=True)
- PATH_TO_LOGS.unlink(missing_ok=True)
- handler_file = logging.FileHandler(filename=PATH_TO_LOGS, mode="a")
- handler_file.setFormatter(fmt=Formatter(fmt=fmt))
- handler_file.setLevel(level=level)
- handler_stderr = logging.StreamHandler(stream=sys.stderr)
- handler_stderr.setFormatter(fmt=Formatter(fmt=fmt))
- handler_stderr.setLevel(level=level)
- logger: logging.Logger = logging.getLogger()
- logger.setLevel(level=level)
- logger.addHandler(handler_file)
- logger.addHandler(handler_stderr)
- structlog.configure(
- processors=[
- structlog.stdlib.add_log_level,
- structlog.processors.TimeStamper(fmt="iso"),
- structlog.processors.dict_tracebacks,
- structlog.processors.JSONRenderer(),
- ],
- logger_factory=structlog.stdlib.LoggerFactory(),
- wrapper_class=structlog.stdlib.BoundLogger,
- )
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/api/models.py b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/api/models.py
deleted file mode 100644
index 5bcabb8..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/api/models.py
+++ /dev/null
@@ -1,85 +0,0 @@
-from enum import StrEnum
-from pathlib import Path # noqa: TC003 (required by pydantic at runtime)
-from typing import TypeVar
-
-from pydantic import BaseModel, ConfigDict
-
-
-class Base(BaseModel):
- """TODO."""
-
- model_config = ConfigDict(extra="forbid")
-
-
-class FuzzForgeModulesSettingsBase(Base):
- """TODO."""
-
-
-FuzzForgeModulesSettingsType = TypeVar("FuzzForgeModulesSettingsType", bound=FuzzForgeModulesSettingsBase)
-
-
-class FuzzForgeModuleResources(StrEnum):
- """Enumeration of artifact types."""
-
- #: The type of the resource is unknown or irrelevant.
- UNKNOWN = "unknown"
-
-
-class FuzzForgeModuleResource(Base):
- """TODO."""
-
- #: The description of the resource.
- description: str
- #: The type of the resource.
- kind: FuzzForgeModuleResources
- #: The name of the resource.
- name: str
- #: The path of the resource on disk.
- path: Path
-
-
-class FuzzForgeModuleInputBase[FuzzForgeModulesSettingsType: FuzzForgeModulesSettingsBase](Base):
- """The (standardized) input of a FuzzForge module."""
-
- #: The collection of resources given to the module as inputs.
- resources: list[FuzzForgeModuleResource]
- #: The settings of the module.
- settings: FuzzForgeModulesSettingsType
-
-
-class FuzzForgeModuleArtifacts(StrEnum):
- """Enumeration of artifact types."""
-
- #: The artifact is an asset.
- ASSET = "asset"
-
-
-class FuzzForgeModuleArtifact(Base):
- """An artifact generated by the module during its run."""
-
- #: The description of the artifact.
- description: str
- #: The type of the artifact.
- kind: FuzzForgeModuleArtifacts
- #: The name of the artifact.
- name: str
- #: The path to the artifact on disk.
- path: Path
-
-
-class FuzzForgeModuleResults(StrEnum):
- """TODO."""
-
- SUCCESS = "success"
- FAILURE = "failure"
-
-
-class FuzzForgeModuleOutputBase(Base):
- """The (standardized) output of a FuzzForge module."""
-
- #: The collection of artifacts generated by the module during its run.
- artifacts: list[FuzzForgeModuleArtifacts]
- #: The path to the logs.
- logs: Path
- #: The result of the module's run.
- result: FuzzForgeModuleResults
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/api/modules/__init__.py b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/api/modules/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/api/modules/base.py b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/api/modules/base.py
deleted file mode 100644
index 3cf89f9..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/api/modules/base.py
+++ /dev/null
@@ -1,288 +0,0 @@
-from abc import ABC, abstractmethod
-import json
-import time
-from datetime import datetime, timezone
-from shutil import rmtree
-from typing import TYPE_CHECKING, Any, Final, final
-
-from structlog import get_logger
-
-from fuzzforge_modules_sdk.api.constants import (
- PATH_TO_ARTIFACTS,
- PATH_TO_INPUT,
- PATH_TO_LOGS,
- PATH_TO_PROGRESS,
- PATH_TO_RESULTS,
- PATH_TO_STREAM,
-)
-from fuzzforge_modules_sdk.api.exceptions import FuzzForgeModuleError
-from fuzzforge_modules_sdk.api.models import (
- FuzzForgeModuleArtifact,
- FuzzForgeModuleArtifacts,
- FuzzForgeModuleInputBase,
- FuzzForgeModuleOutputBase,
- FuzzForgeModuleResource,
- FuzzForgeModuleResults,
- FuzzForgeModulesSettingsType,
-)
-
-if TYPE_CHECKING:
- from pathlib import Path
-
- from structlog.stdlib import BoundLogger
-
-
-class FuzzForgeModule(ABC):
- """FuzzForge Modules' base."""
-
- __artifacts: dict[str, FuzzForgeModuleArtifact]
-
- #: The logger associated with the module.
- __logger: Final[BoundLogger]
-
- #: The name of the module.
- __name: Final[str]
-
- #: The version of the module.
- __version: Final[str]
-
- #: Start time for progress tracking.
- __start_time: float
-
- #: Custom output data set by the module.
- __output_data: dict[str, Any]
-
- def __init__(self, name: str, version: str) -> None:
- """Initialize an instance of the class.
-
- :param name: The name of the module.
- :param version: The version of the module.
-
- """
- self.__artifacts = {}
- self.__logger = get_logger("module")
- self.__name = name
- self.__version = version
- self.__start_time = time.time()
- self.__output_data = {}
-
- # Initialize streaming output files
- PATH_TO_PROGRESS.parent.mkdir(exist_ok=True, parents=True)
- PATH_TO_STREAM.parent.mkdir(exist_ok=True, parents=True)
-
- @final
- def get_logger(self) -> BoundLogger:
- """Return the logger associated with the module."""
- return self.__logger
-
- @final
- def get_name(self) -> str:
- """Return the name of the module."""
- return self.__name
-
- @final
- def get_version(self) -> str:
- """Return the version of the module."""
- return self.__version
-
- @final
- def set_output(self, **kwargs: Any) -> None:
- """Set custom output data to be included in results.json.
-
- Call this from _run() to add module-specific fields to the output.
-
- :param kwargs: Key-value pairs to include in the output.
-
- Example:
- self.set_output(
- total_targets=4,
- valid_targets=["target1", "target2"],
- results=[...]
- )
-
- """
- self.__output_data.update(kwargs)
-
- @final
- def emit_progress(
- self,
- progress: int,
- status: str = "running",
- message: str = "",
- metrics: dict[str, Any] | None = None,
- current_task: str = "",
- ) -> None:
- """Emit a progress update to the progress file.
-
- This method writes to /data/output/progress.json which can be polled
- by the orchestrator or UI to show real-time progress.
-
- :param progress: Progress percentage (0-100).
- :param status: Current status ("initializing", "running", "completed", "failed").
- :param message: Human-readable status message.
- :param metrics: Dictionary of metrics (e.g., {"executions": 1000, "coverage": 50}).
- :param current_task: Name of the current task being performed.
-
- """
- elapsed = time.time() - self.__start_time
-
- progress_data = {
- "module": self.__name,
- "version": self.__version,
- "status": status,
- "progress": max(0, min(100, progress)),
- "message": message,
- "current_task": current_task,
- "elapsed_seconds": round(elapsed, 2),
- "timestamp": datetime.now(timezone.utc).isoformat(),
- "metrics": metrics or {},
- }
-
- PATH_TO_PROGRESS.write_text(json.dumps(progress_data, indent=2))
-
- @final
- def emit_event(self, event: str, **data: Any) -> None:
- """Emit a streaming event to the stream file.
-
- This method appends to /data/output/stream.jsonl which can be tailed
- by the orchestrator or UI for real-time event streaming.
-
- :param event: Event type (e.g., "crash_found", "target_started", "metrics").
- :param data: Additional event data as keyword arguments.
-
- """
- elapsed = time.time() - self.__start_time
-
- event_data = {
- "timestamp": datetime.now(timezone.utc).isoformat(),
- "elapsed_seconds": round(elapsed, 2),
- "module": self.__name,
- "event": event,
- **data,
- }
-
- # Append to stream file (create if doesn't exist)
- with PATH_TO_STREAM.open("a") as f:
- f.write(json.dumps(event_data) + "\n")
-
- @final
- def get_elapsed_seconds(self) -> float:
- """Return the elapsed time since module start.
-
- :returns: Elapsed time in seconds.
-
- """
- return time.time() - self.__start_time
-
- @final
- def _register_artifact(self, name: str, kind: FuzzForgeModuleArtifacts, description: str, path: Path) -> None:
- """Register an artifact.
-
- :param name: The name of the artifact.
- :param kind: The type of the artifact.
- :param description: The description of the artifact.
- :param path: The path of the artifact on the file system.
-
- """
- source: Path = path.resolve(strict=True)
- destination: Path = PATH_TO_ARTIFACTS.joinpath(name).resolve()
- if destination.parent != PATH_TO_ARTIFACTS:
- message: str = f"path '{destination} is not a direct descendant of path '{PATH_TO_ARTIFACTS}'"
- raise FuzzForgeModuleError(message)
- if destination.exists(follow_symlinks=False):
- if destination.is_file() or destination.is_symlink():
- destination.unlink()
- elif destination.is_dir():
- rmtree(destination)
- else:
- message = f"unable to remove resource at path '{destination}': unsupported resource type"
- raise FuzzForgeModuleError(message)
- destination.parent.mkdir(exist_ok=True, parents=True)
- source.copy(destination)
- self.__artifacts[name] = FuzzForgeModuleArtifact(
- description=description,
- kind=kind,
- name=name,
- path=path,
- )
-
- @final
- def main(self) -> None:
- """TODO."""
- result = FuzzForgeModuleResults.SUCCESS
-
- try:
- buffer: bytes = PATH_TO_INPUT.read_bytes()
- data = self._get_input_type().model_validate_json(buffer)
- self._prepare(settings=data.settings)
- except: # noqa: E722
- self.get_logger().exception(event="exception during 'prepare' step")
- result = FuzzForgeModuleResults.FAILURE
-
- if result != FuzzForgeModuleResults.FAILURE:
- try:
- result = self._run(resources=data.resources)
- except: # noqa: E722
- self.get_logger().exception(event="exception during 'run' step")
- result = FuzzForgeModuleResults.FAILURE
-
- if result != FuzzForgeModuleResults.FAILURE:
- try:
- self._cleanup(settings=data.settings)
- except: # noqa: E722
- self.get_logger().exception(event="exception during 'cleanup' step")
-
- output = self._get_output_type()(
- artifacts=list(self.__artifacts.values()),
- logs=PATH_TO_LOGS,
- result=result,
- **self.__output_data,
- )
- buffer = output.model_dump_json().encode("utf-8")
- PATH_TO_RESULTS.parent.mkdir(exist_ok=True, parents=True)
- PATH_TO_RESULTS.write_bytes(buffer)
-
- @classmethod
- @abstractmethod
- def _get_input_type(cls) -> type[FuzzForgeModuleInputBase[Any]]:
- """TODO."""
- message: str = f"method '_get_input_type' is not implemented for class '{cls.__name__}'"
- raise NotImplementedError(message)
-
- @classmethod
- @abstractmethod
- def _get_output_type(cls) -> type[FuzzForgeModuleOutputBase]:
- """TODO."""
- message: str = f"method '_get_output_type' is not implemented for class '{cls.__name__}'"
- raise NotImplementedError(message)
-
- @abstractmethod
- def _prepare(self, settings: FuzzForgeModulesSettingsType) -> None:
- """TODO.
-
- :param settings: TODO.
-
- """
- message: str = f"method '_prepare' is not implemented for class '{self.__class__.__name__}'"
- raise NotImplementedError(message)
-
- @abstractmethod
- def _run(self, resources: list[FuzzForgeModuleResource]) -> FuzzForgeModuleResults:
- """TODO.
-
- :param resources: TODO.
- :returns: TODO.
-
- """
- message: str = f"method '_run' is not implemented for class '{self.__class__.__name__}'"
- raise NotImplementedError(message)
-
- @abstractmethod
- def _cleanup(self, settings: FuzzForgeModulesSettingsType) -> None:
- """TODO.
-
- :param settings: TODO.
-
- """
- message: str = f"method '_cleanup' is not implemented for class '{self.__class__.__name__}'"
- raise NotImplementedError(message)
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/assets/Dockerfile b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/assets/Dockerfile
deleted file mode 100644
index 416c8f2..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/assets/Dockerfile
+++ /dev/null
@@ -1,20 +0,0 @@
-FROM docker.io/debian:trixie as base
-
-COPY --from=ghcr.io/astral-sh/uv:0.9.10 /uv /uvx /bin/
-
-FROM base as builder
-
-WORKDIR /sdk
-
-COPY ./src /sdk/src
-COPY ./pyproject.toml /sdk/pyproject.toml
-
-RUN uv build --wheel -o /sdk/distributions
-
-FROM base as final
-
-COPY --from=builder /sdk/distributions /wheels
-
-WORKDIR /app
-
-CMD [ "/usr/bin/sleep", "infinity" ]
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/assets/pyproject.toml b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/assets/pyproject.toml
deleted file mode 120000
index 7aa7944..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/assets/pyproject.toml
+++ /dev/null
@@ -1 +0,0 @@
-../../../pyproject.toml
\ No newline at end of file
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/py.typed b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/py.typed
deleted file mode 100644
index e69de29..0000000
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/Dockerfile b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/Dockerfile
deleted file mode 100644
index f2c71af..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/Dockerfile
+++ /dev/null
@@ -1,12 +0,0 @@
-FROM localhost/fuzzforge-modules-sdk:0.1.0
-
-# Module metadata is read from pyproject.toml [tool.fuzzforge.module] section
-# See MODULE_METADATA.md for documentation on configuring metadata
-
-COPY ./src /app/src
-COPY ./pyproject.toml /app/pyproject.toml
-
-# Remove workspace reference since we're using wheels
-RUN sed -i '/\[tool\.uv\.sources\]/,/^$/d' /app/pyproject.toml
-
-RUN uv sync --find-links /wheels
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/Makefile b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/Makefile
deleted file mode 100644
index cada4d0..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/Makefile
+++ /dev/null
@@ -1,45 +0,0 @@
-PACKAGE=$(word 1, $(shell uv version))
-VERSION=$(word 2, $(shell uv version))
-
-PODMAN?=/usr/bin/podman
-
-SOURCES=./src
-TESTS=./tests
-
-.PHONY: bandit build clean format mypy pytest ruff version
-
-bandit:
- uv run bandit --recursive $(SOURCES)
-
-build:
- $(PODMAN) build --file ./Dockerfile --no-cache --tag $(PACKAGE):$(VERSION)
-
-save: build
- $(PODMAN) save --format oci-archive --output /tmp/$(PACKAGE)-$(VERSION).oci $(PACKAGE):$(VERSION)
-
-clean:
- @find . -type d \( \
- -name '*.egg-info' \
- -o -name '.mypy_cache' \
- -o -name '.pytest_cache' \
- -o -name '.ruff_cache' \
- -o -name '__pycache__' \
- \) -printf 'removing directory %p\n' -exec rm -rf {} +
-
-cloc:
- cloc $(SOURCES)
-
-format:
- uv run ruff format $(SOURCES) $(TESTS)
-
-mypy:
- uv run mypy $(SOURCES)
-
-pytest:
- uv run pytest $(TESTS)
-
-ruff:
- uv run ruff check --fix $(SOURCES) $(TESTS)
-
-version:
- @echo '$(PACKAGE)@$(VERSION)'
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/README.md b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/README.md
deleted file mode 100644
index d0671a1..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/README.md
+++ /dev/null
@@ -1,46 +0,0 @@
-# FuzzForge Modules - FIXME
-
-## Installation
-
-### Python
-
-```shell
-# install the package (users)
-uv sync
-# install the package and all development dependencies (developers)
-uv sync --all-extras
-```
-
-### Container
-
-```shell
-# build the image
-make build
-# run the container
-mkdir -p "${PWD}/data" "${PWD}/data/input" "${PWD}/data/output"
-echo '{"settings":{},"resources":[]}' > "${PWD}/data/input/input.json"
-podman run --rm \
- --volume "${PWD}/data:/data" \
- ':' 'uv run module'
-```
-
-## Usage
-
-```shell
-uv run module
-```
-
-## Development tools
-
-```shell
-# run ruff (formatter)
-make format
-# run mypy (type checker)
-make mypy
-# run tests (pytest)
-make pytest
-# run ruff (linter)
-make ruff
-```
-
-See the file `Makefile` at the root of this directory for more tools.
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/mypy.ini b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/mypy.ini
deleted file mode 100644
index 84e90d2..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/mypy.ini
+++ /dev/null
@@ -1,6 +0,0 @@
-[mypy]
-plugins = pydantic.mypy
-strict = True
-warn_unused_ignores = True
-warn_redundant_casts = True
-warn_return_any = True
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/pyproject.toml b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/pyproject.toml
deleted file mode 100644
index 086e7b2..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/pyproject.toml
+++ /dev/null
@@ -1,62 +0,0 @@
-[project]
-name = "fuzzforge-module-template"
-version = "0.1.0"
-description = "FIXME: Add module description"
-authors = []
-readme = "README.md"
-requires-python = ">=3.14"
-dependencies = [
- "fuzzforge-modules-sdk==0.0.1",
- "pydantic==2.12.4",
- "structlog==25.5.0",
-]
-
-[project.optional-dependencies]
-lints = [
- "bandit==1.8.6",
- "mypy==1.18.2",
- "ruff==0.14.4",
-]
-tests = [
- "pytest==9.0.2",
-]
-
-[project.scripts]
-module = "module.__main__:main"
-
-[tool.uv.sources]
-fuzzforge-modules-sdk = { workspace = true }
-
-[tool.uv]
-package = true
-
-# FuzzForge module metadata for AI agent discovery
-[tool.fuzzforge.module]
-# REQUIRED: Unique module identifier (should match Docker image name)
-identifier = "fuzzforge-module-template"
-
-# Optional: List of module identifiers that should run before this one
-suggested_predecessors = []
-
-# Optional: Whether this module supports continuous/background execution
-continuous_mode = false
-
-# REQUIRED: Use cases help AI agents understand when to use this module
-# Include language/target info here (e.g., "Analyze Rust crate...")
-use_cases = [
- "FIXME: Describe what this module does",
- "FIXME: Describe typical usage scenario"
-]
-
-# REQUIRED: What inputs the module expects
-common_inputs = [
- "FIXME: List required input files or artifacts"
-]
-
-# REQUIRED: What outputs the module produces
-output_artifacts = [
- "FIXME: List output files produced"
-]
-
-# REQUIRED: How AI should display output to user
-output_treatment = "FIXME: Describe how to present the output"
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/ruff.toml b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/ruff.toml
deleted file mode 100644
index 6374f62..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/ruff.toml
+++ /dev/null
@@ -1,19 +0,0 @@
-line-length = 120
-
-[lint]
-select = [ "ALL" ]
-ignore = [
- "COM812", # conflicts with the formatter
- "D100", # ignoring missing docstrings in public modules
- "D104", # ignoring missing docstrings in public packages
- "D203", # conflicts with 'D211'
- "D213", # conflicts with 'D212'
- "TD002", # ignoring missing author in 'TODO' statements
- "TD003", # ignoring missing issue link in 'TODO' statements
-]
-
-[lint.per-file-ignores]
-"tests/*" = [
- "PLR2004", # allowing comparisons using unamed numerical constants in tests
- "S101", # allowing 'assert' statements in tests
-]
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/src/module/__init__.py b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/src/module/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/src/module/__main__.py b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/src/module/__main__.py
deleted file mode 100644
index bc8914a..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/src/module/__main__.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from typing import TYPE_CHECKING
-
-from fuzzforge_modules_sdk.api import logs
-
-from module.mod import Module
-
-if TYPE_CHECKING:
- from fuzzforge_modules_sdk.api.modules.base import FuzzForgeModule
-
-
-def main() -> None:
- """TODO."""
- logs.configure()
- module: FuzzForgeModule = Module()
- module.main()
-
-
-if __name__ == "__main__":
- main()
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/src/module/mod.py b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/src/module/mod.py
deleted file mode 100644
index f0f85e9..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/src/module/mod.py
+++ /dev/null
@@ -1,54 +0,0 @@
-from __future__ import annotations
-
-from typing import TYPE_CHECKING
-
-from fuzzforge_modules_sdk.api.models import FuzzForgeModuleResults
-from fuzzforge_modules_sdk.api.modules.base import FuzzForgeModule
-
-from module.models import Input, Output
-
-if TYPE_CHECKING:
- from fuzzforge_modules_sdk.api.models import FuzzForgeModuleResource, FuzzForgeModulesSettingsType
-
-
-class Module(FuzzForgeModule):
- """TODO."""
-
- def __init__(self) -> None:
- """Initialize an instance of the class."""
- name: str = "FIXME"
- version: str = "FIXME"
- FuzzForgeModule.__init__(self, name=name, version=version)
-
- @classmethod
- def _get_input_type(cls) -> type[Input]:
- """TODO."""
- return Input
-
- @classmethod
- def _get_output_type(cls) -> type[Output]:
- """TODO."""
- return Output
-
- def _prepare(self, settings: FuzzForgeModulesSettingsType) -> None:
- """TODO.
-
- :param settings: TODO.
-
- """
-
- def _run(self, resources: list[FuzzForgeModuleResource]) -> FuzzForgeModuleResults: # noqa: ARG002
- """TODO.
-
- :param resources: TODO.
- :returns: TODO.
-
- """
- return FuzzForgeModuleResults.SUCCESS
-
- def _cleanup(self, settings: FuzzForgeModulesSettingsType) -> None:
- """TODO.
-
- :param settings: TODO.
-
- """
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/src/module/models.py b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/src/module/models.py
deleted file mode 100644
index 2a3f021..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/src/module/models.py
+++ /dev/null
@@ -1,11 +0,0 @@
-from fuzzforge_modules_sdk.api.models import FuzzForgeModuleInputBase, FuzzForgeModuleOutputBase
-
-from module.settings import Settings
-
-
-class Input(FuzzForgeModuleInputBase[Settings]):
- """TODO."""
-
-
-class Output(FuzzForgeModuleOutputBase):
- """TODO."""
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/src/module/settings.py b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/src/module/settings.py
deleted file mode 100644
index f916ad4..0000000
--- a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/src/module/settings.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from fuzzforge_modules_sdk.api.models import FuzzForgeModulesSettingsBase
-
-
-class Settings(FuzzForgeModulesSettingsBase):
- """TODO."""
-
- # Here goes your attributes
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/tests/.gitkeep b/fuzzforge-modules/fuzzforge-modules-sdk/src/fuzzforge_modules_sdk/templates/fuzzforge-module-template/tests/.gitkeep
deleted file mode 100644
index e69de29..0000000
diff --git a/fuzzforge-modules/fuzzforge-modules-sdk/tests/.gitkeep b/fuzzforge-modules/fuzzforge-modules-sdk/tests/.gitkeep
deleted file mode 100644
index e69de29..0000000
diff --git a/fuzzforge-modules/harness-tester/Dockerfile b/fuzzforge-modules/harness-tester/Dockerfile
deleted file mode 100644
index b960d5b..0000000
--- a/fuzzforge-modules/harness-tester/Dockerfile
+++ /dev/null
@@ -1,26 +0,0 @@
-FROM localhost/fuzzforge-modules-sdk:0.1.0
-
-# Module metadata is now read from pyproject.toml [tool.fuzzforge.module] section
-
-# Install build tools and Rust nightly for compiling and testing fuzz harnesses
-RUN apt-get update && apt-get install -y \
- curl \
- build-essential \
- pkg-config \
- libssl-dev \
- && rm -rf /var/lib/apt/lists/*
-
-RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain nightly
-ENV PATH="/root/.cargo/bin:${PATH}"
-
-# Install cargo-fuzz for testing harnesses
-RUN cargo install cargo-fuzz --locked || true
-
-COPY ./src /app/src
-COPY ./pyproject.toml /app/pyproject.toml
-COPY ./README.md /app/README.md
-
-# Remove workspace reference since we're using wheels
-RUN sed -i '/\[tool\.uv\.sources\]/,/^$/d' /app/pyproject.toml
-
-RUN uv sync --find-links /wheels
diff --git a/fuzzforge-modules/harness-tester/FEEDBACK_TYPES.md b/fuzzforge-modules/harness-tester/FEEDBACK_TYPES.md
deleted file mode 100644
index f964dc7..0000000
--- a/fuzzforge-modules/harness-tester/FEEDBACK_TYPES.md
+++ /dev/null
@@ -1,289 +0,0 @@
-# Harness Tester Feedback Types
-
-Complete reference of all feedback the `harness-tester` module provides to help AI agents improve fuzz harnesses.
-
-## Overview
-
-The harness-tester evaluates harnesses across **6 dimensions** and provides specific, actionable suggestions for each issue detected.
-
----
-
-## 1. Compilation Feedback
-
-### β
Success Cases
-- **Compiles successfully** β Strength noted
-
-### β Error Cases
-
-| Issue Type | Severity | Detection | Suggestion |
-|------------|----------|-----------|------------|
-| `undefined_variable` | CRITICAL | "cannot find" in error | Check variable names match function signature. Use exact names from fuzzable_functions.json |
-| `type_mismatch` | CRITICAL | "mismatched types" in error | Check function expects types you're passing. Convert fuzzer input to correct type (e.g., &[u8] to &str with from_utf8) |
-| `trait_not_implemented` | CRITICAL | "trait" + "not implemented" | Ensure you're using correct types. Some functions require specific trait implementations |
-| `compilation_error` | CRITICAL | Any other error | Review error message and fix syntax/type issues. Check function signatures in source code |
-
-### β οΈ Warning Cases
-
-| Issue Type | Severity | Detection | Suggestion |
-|------------|----------|-----------|------------|
-| `unused_variable` | INFO | "unused" in warning | Remove unused variables or use underscore prefix (_variable) to suppress warning |
-
----
-
-## 2. Execution Feedback
-
-### β
Success Cases
-- **Executes without crashing** β Strength noted
-
-### β Error Cases
-
-| Issue Type | Severity | Detection | Suggestion |
-|------------|----------|-----------|------------|
-| `stack_overflow` | CRITICAL | "stack overflow" in crash | Check for infinite recursion or large stack allocations. Use heap allocation (Box, Vec) for large data structures |
-| `panic_on_start` | CRITICAL | "panic" in crash | Check initialization code. Ensure required resources are available and input validation doesn't panic on empty input |
-| `immediate_crash` | CRITICAL | Crashes on first run | Debug harness initialization. Add error handling and check for null/invalid pointers |
-| `infinite_loop` | CRITICAL | Execution timeout | Check for loops that depend on fuzzer input. Add iteration limits or timeout mechanisms |
-
----
-
-## 3. Coverage Feedback
-
-### β
Success Cases
-- **>50% coverage** β "Excellent coverage"
-- **Good growth** β "Harness exploring code paths"
-
-### β Error Cases
-
-| Issue Type | Severity | Detection | Suggestion |
-|------------|----------|-----------|------------|
-| `no_coverage` | CRITICAL | 0 new edges found | Ensure you're actually calling the target function with fuzzer-provided data. Check that 'data' parameter is passed to function |
-| `very_low_coverage` | WARNING | <5% coverage or "none" growth | Harness may not be reaching target code. Verify correct entry point function. Check if input validation rejects all fuzzer data |
-| `low_coverage` | WARNING | <20% coverage or "poor" growth | Try fuzzing multiple entry points or remove restrictive input validation. Consider using dictionary for structured inputs |
-| `early_stagnation` | INFO | Coverage stops growing <10s | Harness may be hitting input validation barriers. Consider fuzzing with seed corpus of valid inputs |
-
----
-
-## 4. Performance Feedback
-
-### β
Success Cases
-- **>1000 execs/s** β "Excellent performance"
-- **>500 execs/s** β "Good performance"
-
-### β Error Cases
-
-| Issue Type | Severity | Detection | Suggestion |
-|------------|----------|-----------|------------|
-| `extremely_slow` | CRITICAL | <10 execs/s | Remove file I/O, network operations, or expensive computations from harness loop. Move setup code outside fuzz target function |
-| `slow_execution` | WARNING | <100 execs/s | Optimize harness: avoid allocations in hot path, reuse buffers, remove logging. Profile to find bottlenecks |
-
----
-
-## 5. Stability Feedback
-
-### β
Success Cases
-- **Stable execution** β Strength noted
-- **Found unique crashes** β "Found N potential bugs!"
-
-### β οΈ Warning Cases
-
-| Issue Type | Severity | Detection | Suggestion |
-|------------|----------|-----------|------------|
-| `unstable_frequent_crashes` | WARNING | >10 crashes per 1000 execs | This might be expected if testing buggy code. If not, add error handling for edge cases or invalid inputs |
-| `hangs_detected` | WARNING | Hangs found during trial | Add timeouts to prevent infinite loops. Check for blocking operations or resource exhaustion |
-
----
-
-## 6. Code Quality Feedback
-
-### Informational
-
-| Issue Type | Severity | Detection | Suggestion |
-|------------|----------|-----------|------------|
-| `unused_variable` | INFO | Compiler warnings | Clean up code for better maintainability |
-
----
-
-## Quality Scoring Formula
-
-```
-Base Score: 20 points (for compiling + running)
-
-+ Coverage (0-40 points):
- - Excellent growth: +40
- - Good growth: +30
- - Poor growth: +10
- - No growth: +0
-
-+ Performance (0-25 points):
- - >1000 execs/s: +25
- - >500 execs/s: +20
- - >100 execs/s: +10
- - >10 execs/s: +5
- - <10 execs/s: +0
-
-+ Stability (0-15 points):
- - Stable: +15
- - Unstable: +10
- - Crashes frequently: +5
-
-Maximum: 100 points
-```
-
-### Verdicts
-
-- **70-100**: `production-ready` β Use for long-term fuzzing campaigns
-- **30-69**: `needs-improvement` β Fix issues before production use
-- **0-29**: `broken` β Critical issues block execution
-
----
-
-## Example Feedback Flow
-
-### Scenario 1: Broken Harness (Type Mismatch)
-
-```json
-{
- "quality": {
- "score": 0,
- "verdict": "broken",
- "issues": [
- {
- "category": "compilation",
- "severity": "critical",
- "type": "type_mismatch",
- "message": "Type mismatch: expected &[u8], found &str",
- "suggestion": "Check function expects types you're passing. Convert fuzzer input to correct type (e.g., &[u8] to &str with from_utf8)"
- }
- ],
- "recommended_actions": [
- "Fix 1 critical issue(s) preventing execution"
- ]
- }
-}
-```
-
-**AI Agent Action**: Regenerate harness with correct type conversion
-
----
-
-### Scenario 2: Low Coverage Harness
-
-```json
-{
- "quality": {
- "score": 35,
- "verdict": "needs-improvement",
- "issues": [
- {
- "category": "coverage",
- "severity": "warning",
- "type": "low_coverage",
- "message": "Low coverage: 12% - not exploring enough code paths",
- "suggestion": "Try fuzzing multiple entry points or remove restrictive input validation"
- },
- {
- "category": "performance",
- "severity": "warning",
- "type": "slow_execution",
- "message": "Slow execution: 45 execs/sec (expected 500+)",
- "suggestion": "Optimize harness: avoid allocations in hot path, reuse buffers"
- }
- ],
- "strengths": [
- "Compiles successfully",
- "Executes without crashing"
- ],
- "recommended_actions": [
- "Address 2 warning(s) to improve harness quality"
- ]
- }
-}
-```
-
-**AI Agent Action**: Remove input validation, optimize performance
-
----
-
-### Scenario 3: Production-Ready Harness
-
-```json
-{
- "quality": {
- "score": 85,
- "verdict": "production-ready",
- "issues": [],
- "strengths": [
- "Compiles successfully",
- "Executes without crashing",
- "Excellent coverage: 67% of target code reached",
- "Excellent performance: 1507 execs/sec",
- "Stable execution - no crashes or hangs"
- ],
- "recommended_actions": [
- "Harness is ready for production fuzzing"
- ]
- }
-}
-```
-
-**AI Agent Action**: Proceed to long-term fuzzing with cargo-fuzzer
-
----
-
-## Integration with AI Workflow
-
-```python
-def iterative_harness_generation(target_function):
- """AI agent iteratively improves harness based on feedback."""
-
- max_iterations = 3
-
- for iteration in range(max_iterations):
- # Generate or improve harness
- if iteration == 0:
- harness = ai_generate_harness(target_function)
- else:
- harness = ai_improve_harness(previous_harness, feedback)
-
- # Test harness
- result = execute_module("harness-tester", harness)
- evaluation = result["harnesses"][0]
-
- # Check verdict
- if evaluation["quality"]["verdict"] == "production-ready":
- return harness # Success!
-
- # Extract feedback for next iteration
- feedback = {
- "issues": evaluation["quality"]["issues"],
- "suggestions": [issue["suggestion"] for issue in evaluation["quality"]["issues"]],
- "score": evaluation["quality"]["score"],
- "coverage": evaluation["fuzzing_trial"]["coverage"] if "fuzzing_trial" in evaluation else None,
- "performance": evaluation["fuzzing_trial"]["performance"] if "fuzzing_trial" in evaluation else None
- }
-
- # Store for next iteration
- previous_harness = harness
-
- return harness # Return best attempt after max iterations
-```
-
----
-
-## Summary
-
-The harness-tester provides **comprehensive, actionable feedback** across 6 dimensions:
-
-1. β
**Compilation** - Syntax and type correctness
-2. β
**Execution** - Runtime stability
-3. β
**Coverage** - Code exploration effectiveness
-4. β
**Performance** - Execution speed
-5. β
**Stability** - Crash/hang frequency
-6. β
**Code Quality** - Best practices
-
-Each issue includes:
-- **Clear detection** of what went wrong
-- **Specific suggestion** on how to fix it
-- **Severity level** to prioritize fixes
-
-This enables AI agents to rapidly iterate and produce high-quality fuzz harnesses with minimal human intervention.
diff --git a/fuzzforge-modules/harness-tester/Makefile b/fuzzforge-modules/harness-tester/Makefile
deleted file mode 100644
index a28ba9c..0000000
--- a/fuzzforge-modules/harness-tester/Makefile
+++ /dev/null
@@ -1,28 +0,0 @@
-.PHONY: help build clean format lint test
-
-help:
- @echo "Available targets:"
- @echo " build - Build Docker image"
- @echo " clean - Remove build artifacts"
- @echo " format - Format code with ruff"
- @echo " lint - Lint code with ruff and mypy"
- @echo " test - Run tests"
-
-build:
- docker build -t fuzzforge-harness-tester:0.1.0 .
-
-clean:
- rm -rf .pytest_cache
- rm -rf .mypy_cache
- rm -rf .ruff_cache
- find . -type d -name __pycache__ -exec rm -rf {} +
-
-format:
- uv run ruff format ./src ./tests
-
-lint:
- uv run ruff check ./src ./tests
- uv run mypy ./src
-
-test:
- uv run pytest tests/ -v
diff --git a/fuzzforge-modules/harness-tester/README.md b/fuzzforge-modules/harness-tester/README.md
deleted file mode 100644
index 130bc84..0000000
--- a/fuzzforge-modules/harness-tester/README.md
+++ /dev/null
@@ -1,155 +0,0 @@
-# Harness Tester Module
-
-Tests and evaluates fuzz harnesses with comprehensive feedback for AI-driven iteration.
-
-## Overview
-
-The `harness-tester` module runs a battery of tests on fuzz harnesses to provide actionable feedback:
-
-1. **Compilation Testing** - Validates harness compiles correctly
-2. **Execution Testing** - Ensures harness runs without immediate crashes
-3. **Fuzzing Trial** - Runs short fuzzing session (default: 30s) to measure:
- - Coverage growth
- - Execution performance (execs/sec)
- - Stability (crashes, hangs)
-4. **Quality Assessment** - Generates scored evaluation with specific issues and suggestions
-
-## Feedback Categories
-
-### 1. Compilation Feedback
-- Undefined variables β "Check variable names match function signature"
-- Type mismatches β "Convert fuzzer input to correct type"
-- Missing traits β "Ensure you're using correct types"
-
-### 2. Execution Feedback
-- Stack overflow β "Check for infinite recursion, use heap allocation"
-- Immediate panic β "Check initialization code and input validation"
-- Timeout/infinite loop β "Add iteration limits"
-
-### 3. Coverage Feedback
-- No coverage β "Harness may not be using fuzzer input"
-- Very low coverage (<5%) β "May not be reaching target code, check entry point"
-- Low coverage (<20%) β "Try fuzzing multiple entry points"
-- Good/Excellent coverage β "Harness is exploring code paths well"
-
-### 4. Performance Feedback
-- Extremely slow (<10 execs/s) β "Remove file I/O or network operations"
-- Slow (<100 execs/s) β "Optimize harness, avoid allocations in hot path"
-- Good (>500 execs/s) β Ready for production
-- Excellent (>1000 execs/s) β Optimal performance
-
-### 5. Stability Feedback
-- Frequent crashes β "Add error handling for edge cases"
-- Hangs detected β "Add timeouts to prevent infinite loops"
-- Stable β Ready for production
-
-## Usage
-
-```python
-# Via MCP
-result = execute_module("harness-tester",
- assets_path="/path/to/rust/project",
- configuration={
- "trial_duration_sec": 30,
- "execution_timeout_sec": 10
- })
-```
-
-## Input Requirements
-
-- Rust project with `Cargo.toml`
-- Fuzz harnesses in `fuzz/fuzz_targets/`
-- Source code to analyze
-
-## Output Artifacts
-
-### `harness-evaluation.json`
-Complete structured evaluation with:
-```json
-{
- "harnesses": [
- {
- "name": "fuzz_png_decode",
- "compilation": { "success": true, "time_ms": 4523 },
- "execution": { "success": true },
- "fuzzing_trial": {
- "coverage": {
- "final_edges": 891,
- "growth_rate": "good",
- "percentage_estimate": 67.0
- },
- "performance": {
- "execs_per_sec": 1507.0,
- "performance_rating": "excellent"
- },
- "stability": { "status": "stable" }
- },
- "quality": {
- "score": 85,
- "verdict": "production-ready",
- "issues": [],
- "strengths": ["Excellent performance", "Good coverage"],
- "recommended_actions": ["Ready for production fuzzing"]
- }
- }
- ],
- "summary": {
- "total_harnesses": 1,
- "production_ready": 1,
- "average_score": 85.0
- }
-}
-```
-
-### `feedback-summary.md`
-Human-readable summary with all issues and suggestions.
-
-## Quality Scoring
-
-Harnesses are scored 0-100 based on:
-
-- **Compilation** (20 points): Must compile to proceed
-- **Execution** (20 points): Must run without crashing
-- **Coverage** (40 points):
- - Excellent growth: 40 pts
- - Good growth: 30 pts
- - Poor growth: 10 pts
-- **Performance** (25 points):
- - >1000 execs/s: 25 pts
- - >500 execs/s: 20 pts
- - >100 execs/s: 10 pts
-- **Stability** (15 points):
- - Stable: 15 pts
- - Unstable: 10 pts
- - Crashes frequently: 5 pts
-
-**Verdicts:**
-- 70-100: `production-ready`
-- 30-69: `needs-improvement`
-- 0-29: `broken`
-
-## AI Agent Iteration Pattern
-
-```
-1. AI generates harness
-2. harness-tester evaluates it
-3. Returns: score=35, verdict="needs-improvement"
- Issues: "Low coverage (8%), slow execution (7.8 execs/s)"
- Suggestions: "Check entry point function, remove I/O operations"
-4. AI fixes harness based on feedback
-5. harness-tester re-evaluates
-6. Returns: score=85, verdict="production-ready"
-7. Proceed to production fuzzing
-```
-
-## Configuration Options
-
-| Option | Default | Description |
-|--------|---------|-------------|
-| `trial_duration_sec` | 30 | How long to run fuzzing trial |
-| `execution_timeout_sec` | 10 | Timeout for execution test |
-
-## See Also
-
-- [Module SDK Documentation](../fuzzforge-modules-sdk/README.md)
-- [MODULE_METADATA.md](../MODULE_METADATA.md)
diff --git a/fuzzforge-modules/harness-tester/mypy.ini b/fuzzforge-modules/harness-tester/mypy.ini
deleted file mode 100644
index 84e90d2..0000000
--- a/fuzzforge-modules/harness-tester/mypy.ini
+++ /dev/null
@@ -1,6 +0,0 @@
-[mypy]
-plugins = pydantic.mypy
-strict = True
-warn_unused_ignores = True
-warn_redundant_casts = True
-warn_return_any = True
diff --git a/fuzzforge-modules/harness-tester/pyproject.toml b/fuzzforge-modules/harness-tester/pyproject.toml
deleted file mode 100644
index 078eff9..0000000
--- a/fuzzforge-modules/harness-tester/pyproject.toml
+++ /dev/null
@@ -1,60 +0,0 @@
-[project]
-name = "fuzzforge-harness-tester"
-version = "0.1.0"
-description = "Tests and evaluates fuzz harnesses with detailed feedback for AI-driven iteration"
-readme = "README.md"
-requires-python = ">=3.14"
-dependencies = [
- "fuzzforge-modules-sdk==0.0.1",
- "pydantic==2.12.4",
- "structlog==25.5.0",
-]
-
-[project.scripts]
-module = "module.__main__:main"
-
-[tool.uv.sources]
-fuzzforge-modules-sdk = { workspace = true }
-
-[build-system]
-requires = ["hatchling"]
-build-backend = "hatchling.build"
-
-[tool.hatch.build.targets.wheel]
-packages = ["src/module"]
-
-[dependency-groups]
-dev = [
- "mypy>=1.8.0",
- "pytest>=7.4.3",
- "pytest-asyncio>=0.21.1",
- "pytest-cov>=4.1.0",
- "ruff>=0.1.9",
-]
-
-# FuzzForge module metadata for AI agent discovery
-[tool.fuzzforge.module]
-identifier = "fuzzforge-harness-tester"
-suggested_predecessors = ["fuzzforge-rust-analyzer"]
-continuous_mode = false
-
-use_cases = [
- "Validate Rust fuzz harnesses compile correctly",
- "Run short fuzzing trials to assess harness quality",
- "Provide detailed feedback for AI to improve harnesses",
- "Gate before running expensive long fuzzing campaigns"
-]
-
-common_inputs = [
- "fuzz-harnesses",
- "Cargo.toml",
- "rust-source-code"
-]
-
-output_artifacts = [
- "artifacts/harness-evaluation.json",
- "artifacts/feedback-summary.md",
- "results.json"
-]
-
-output_treatment = "Display artifacts/feedback-summary.md as rendered markdown for quick review. Read artifacts/harness-evaluation.json for detailed per-harness results with verdict (production_ready/needs_improvement/broken), score, strengths, and issues with suggestions."
diff --git a/fuzzforge-modules/harness-tester/ruff.toml b/fuzzforge-modules/harness-tester/ruff.toml
deleted file mode 100644
index 6374f62..0000000
--- a/fuzzforge-modules/harness-tester/ruff.toml
+++ /dev/null
@@ -1,19 +0,0 @@
-line-length = 120
-
-[lint]
-select = [ "ALL" ]
-ignore = [
- "COM812", # conflicts with the formatter
- "D100", # ignoring missing docstrings in public modules
- "D104", # ignoring missing docstrings in public packages
- "D203", # conflicts with 'D211'
- "D213", # conflicts with 'D212'
- "TD002", # ignoring missing author in 'TODO' statements
- "TD003", # ignoring missing issue link in 'TODO' statements
-]
-
-[lint.per-file-ignores]
-"tests/*" = [
- "PLR2004", # allowing comparisons using unamed numerical constants in tests
- "S101", # allowing 'assert' statements in tests
-]
diff --git a/fuzzforge-modules/harness-tester/src/module/__init__.py b/fuzzforge-modules/harness-tester/src/module/__init__.py
deleted file mode 100644
index 77e4a87..0000000
--- a/fuzzforge-modules/harness-tester/src/module/__init__.py
+++ /dev/null
@@ -1,730 +0,0 @@
-"""Harness tester module - tests and evaluates fuzz harnesses."""
-
-from __future__ import annotations
-
-import json
-import subprocess
-import time
-from pathlib import Path
-from typing import TYPE_CHECKING, Any
-
-from fuzzforge_modules_sdk.api.models import (
- FuzzForgeModuleResource,
- FuzzForgeModuleResults,
-)
-from fuzzforge_modules_sdk.api.modules.base import FuzzForgeModule
-
-from module.analyzer import FeedbackGenerator
-from module.feedback import (
- CompilationResult,
- CoverageMetrics,
- EvaluationSummary,
- ExecutionResult,
- FuzzingTrial,
- HarnessEvaluation,
- HarnessTestReport,
- PerformanceMetrics,
- StabilityMetrics,
-)
-from module.models import Input, Output
-from module.settings import Settings
-
-
-class HarnessTesterModule(FuzzForgeModule):
- """Tests fuzz harnesses with compilation, execution, and short fuzzing trials."""
-
- _settings: Settings | None
-
- def __init__(self) -> None:
- """Initialize an instance of the class."""
- name: str = "harness-tester"
- version: str = "0.1.0"
- FuzzForgeModule.__init__(self, name=name, version=version)
- self._settings = None
- self.configuration: dict[str, Any] = {}
-
- @classmethod
- def _get_input_type(cls) -> type[Input]:
- """Return the input type."""
- return Input
-
- @classmethod
- def _get_output_type(cls) -> type[Output]:
- """Return the output type."""
- return Output
-
- def _prepare(self, settings: Settings) -> None: # type: ignore[override]
- """Prepare the module.
-
- :param settings: Module settings.
-
- """
- self._settings = settings
- self.configuration = {
- "trial_duration_sec": settings.trial_duration_sec,
- "execution_timeout_sec": settings.execution_timeout_sec,
- "enable_coverage": settings.enable_coverage,
- "min_quality_score": settings.min_quality_score,
- }
-
- def _cleanup(self, settings: Settings) -> None: # type: ignore[override]
- """Cleanup after module execution.
-
- :param settings: Module settings.
-
- """
- pass # No cleanup needed
-
- def _run(self, resources: list[FuzzForgeModuleResource]) -> FuzzForgeModuleResults:
- """Run harness testing on provided resources.
-
- :param resources: List of resources (Rust project with fuzz harnesses)
- :returns: Module execution result
- """
- import shutil
-
- self.emit_event("started", message="Beginning harness testing")
-
- # Configuration
- trial_duration = self.configuration.get("trial_duration_sec", 30)
- timeout_sec = self.configuration.get("execution_timeout_sec", 10)
-
- # Debug: Log resources
- self.get_logger().info(
- "Received resources",
- count=len(resources),
- resources=[str(r.path) for r in resources],
- )
-
- # Find Rust project
- project_path = self._find_rust_project(resources)
- if not project_path:
- self.emit_event("error", message="No Rust project found in resources")
- return FuzzForgeModuleResults.FAILURE
-
- # Copy project to writable workspace (input is read-only)
- workspace = Path("/tmp/harness-workspace")
- if workspace.exists():
- shutil.rmtree(workspace)
- shutil.copytree(project_path, workspace)
- project_path = workspace
-
- self.get_logger().info("Copied project to writable workspace", path=str(project_path))
-
- # Find fuzz harnesses
- harnesses = self._find_fuzz_harnesses(project_path)
-
- # Debug: Log fuzz directory status
- fuzz_dir = project_path / "fuzz" / "fuzz_targets"
- self.get_logger().info(
- "Checking fuzz directory",
- fuzz_dir=str(fuzz_dir),
- exists=fuzz_dir.exists(),
- )
-
- if not harnesses:
- self.emit_event("error", message="No fuzz harnesses found")
- return FuzzForgeModuleResults.FAILURE
-
- self.emit_event(
- "found_harnesses",
- count=len(harnesses),
- harnesses=[h.name for h in harnesses],
- )
-
- # Test each harness
- evaluations = []
- total_harnesses = len(harnesses)
-
- for idx, harness in enumerate(harnesses, 1):
- self.emit_progress(
- int((idx / total_harnesses) * 90),
- status="testing",
- message=f"Testing harness {idx}/{total_harnesses}: {harness.name}",
- )
-
- evaluation = self._test_harness(
- project_path, harness, trial_duration, timeout_sec
- )
- evaluations.append(evaluation)
-
- # Emit evaluation summary
- self.emit_event(
- "harness_tested",
- harness=harness.name,
- verdict=evaluation.quality.verdict,
- score=evaluation.quality.score,
- issues=len(evaluation.quality.issues),
- )
-
- # Generate summary
- summary = self._generate_summary(evaluations)
-
- # Create report
- report = HarnessTestReport(
- harnesses=evaluations,
- summary=summary,
- test_configuration={
- "trial_duration_sec": trial_duration,
- "execution_timeout_sec": timeout_sec,
- },
- )
-
- # Save report
- self._save_report(report)
-
- self.emit_progress(100, status="completed", message="Harness testing complete")
- self.emit_event(
- "completed",
- total_harnesses=total_harnesses,
- production_ready=summary.production_ready,
- needs_improvement=summary.needs_improvement,
- broken=summary.broken,
- )
-
- return FuzzForgeModuleResults.SUCCESS
-
- def _find_rust_project(self, resources: list[FuzzForgeModuleResource]) -> Path | None:
- """Find Rust project with Cargo.toml (the main project, not fuzz workspace).
-
- :param resources: List of resources
- :returns: Path to Rust project or None
- """
- # First, try to find a directory with both Cargo.toml and src/
- for resource in resources:
- path = Path(resource.path)
- cargo_toml = path / "Cargo.toml"
- src_dir = path / "src"
- if cargo_toml.exists() and src_dir.exists():
- return path
-
- # Fall back to finding parent of fuzz directory
- for resource in resources:
- path = Path(resource.path)
- if path.name == "fuzz" and (path / "Cargo.toml").exists():
- # This is the fuzz workspace, return parent
- parent = path.parent
- if (parent / "Cargo.toml").exists():
- return parent
-
- # Last resort: find any Cargo.toml
- for resource in resources:
- path = Path(resource.path)
- cargo_toml = path / "Cargo.toml"
- if cargo_toml.exists():
- return path
- return None
-
- def _find_fuzz_harnesses(self, project_path: Path) -> list[Path]:
- """Find fuzz harnesses in project.
-
- :param project_path: Path to Rust project
- :returns: List of harness file paths
- """
- fuzz_dir = project_path / "fuzz" / "fuzz_targets"
- if not fuzz_dir.exists():
- return []
-
- harnesses = list(fuzz_dir.glob("*.rs"))
- return harnesses
-
- def _test_harness(
- self,
- project_path: Path,
- harness_path: Path,
- trial_duration: int,
- timeout_sec: int,
- ) -> HarnessEvaluation:
- """Test a single harness comprehensively.
-
- :param project_path: Path to Rust project
- :param harness_path: Path to harness file
- :param trial_duration: Duration for fuzzing trial in seconds
- :param timeout_sec: Timeout for execution test
- :returns: Harness evaluation
- """
- harness_name = harness_path.stem
-
- # Step 1: Compilation
- self.emit_event("compiling", harness=harness_name)
- compilation = self._test_compilation(project_path, harness_name)
-
- # If compilation failed, generate feedback and return early
- if not compilation.success:
- quality = FeedbackGenerator.generate_quality_assessment(
- compilation_result=compilation.model_dump(),
- execution_result=None,
- coverage=None,
- performance=None,
- stability=None,
- )
- return HarnessEvaluation(
- name=harness_name,
- path=str(harness_path),
- compilation=compilation,
- execution=None,
- fuzzing_trial=None,
- quality=quality,
- )
-
- # Step 2: Execution test
- self.emit_event("testing_execution", harness=harness_name)
- execution = self._test_execution(project_path, harness_name, timeout_sec)
-
- if not execution.success:
- quality = FeedbackGenerator.generate_quality_assessment(
- compilation_result=compilation.model_dump(),
- execution_result=execution.model_dump(),
- coverage=None,
- performance=None,
- stability=None,
- )
- return HarnessEvaluation(
- name=harness_name,
- path=str(harness_path),
- compilation=compilation,
- execution=execution,
- fuzzing_trial=None,
- quality=quality,
- )
-
- # Step 3: Fuzzing trial
- self.emit_event("running_trial", harness=harness_name, duration=trial_duration)
- fuzzing_trial = self._run_fuzzing_trial(
- project_path, harness_name, trial_duration
- )
-
- # Generate quality assessment
- quality = FeedbackGenerator.generate_quality_assessment(
- compilation_result=compilation.model_dump(),
- execution_result=execution.model_dump(),
- coverage=fuzzing_trial.coverage if fuzzing_trial else None,
- performance=fuzzing_trial.performance if fuzzing_trial else None,
- stability=fuzzing_trial.stability if fuzzing_trial else None,
- )
-
- return HarnessEvaluation(
- name=harness_name,
- path=str(harness_path),
- compilation=compilation,
- execution=execution,
- fuzzing_trial=fuzzing_trial,
- quality=quality,
- )
-
- def _test_compilation(self, project_path: Path, harness_name: str) -> CompilationResult:
- """Test harness compilation.
-
- :param project_path: Path to Rust project
- :param harness_name: Name of harness to compile
- :returns: Compilation result
- """
- start_time = time.time()
-
- try:
- result = subprocess.run(
- ["cargo", "fuzz", "build", harness_name],
- cwd=project_path,
- capture_output=True,
- text=True,
- timeout=300, # 5 min timeout for compilation
- )
-
- compilation_time = int((time.time() - start_time) * 1000)
-
- if result.returncode == 0:
- # Parse warnings
- warnings = self._parse_compiler_warnings(result.stderr)
- return CompilationResult(
- success=True, time_ms=compilation_time, warnings=warnings
- )
- else:
- # Parse errors
- errors = self._parse_compiler_errors(result.stderr)
- return CompilationResult(
- success=False,
- time_ms=compilation_time,
- errors=errors,
- stderr=result.stderr,
- )
-
- except subprocess.TimeoutExpired:
- return CompilationResult(
- success=False,
- errors=["Compilation timed out after 5 minutes"],
- stderr="Timeout",
- )
- except Exception as e:
- return CompilationResult(
- success=False, errors=[f"Compilation failed: {e!s}"], stderr=str(e)
- )
-
- def _test_execution(
- self, project_path: Path, harness_name: str, timeout_sec: int
- ) -> ExecutionResult:
- """Test harness execution with minimal input.
-
- :param project_path: Path to Rust project
- :param harness_name: Name of harness
- :param timeout_sec: Timeout for execution
- :returns: Execution result
- """
- try:
- # Run with very short timeout and max runs
- result = subprocess.run(
- [
- "cargo",
- "fuzz",
- "run",
- harness_name,
- "--",
- "-runs=10",
- f"-max_total_time={timeout_sec}",
- ],
- cwd=project_path,
- capture_output=True,
- text=True,
- timeout=timeout_sec + 5,
- )
-
- # Check if it crashed immediately
- if "SUMMARY: libFuzzer: deadly signal" in result.stderr:
- return ExecutionResult(
- success=False,
- immediate_crash=True,
- crash_details=self._extract_crash_info(result.stderr),
- )
-
- # Success if completed runs
- return ExecutionResult(success=True, runs_completed=10)
-
- except subprocess.TimeoutExpired:
- return ExecutionResult(success=False, timeout=True)
- except Exception as e:
- return ExecutionResult(
- success=False, immediate_crash=True, crash_details=str(e)
- )
-
- def _run_fuzzing_trial(
- self, project_path: Path, harness_name: str, duration_sec: int
- ) -> FuzzingTrial | None:
- """Run short fuzzing trial to gather metrics.
-
- :param project_path: Path to Rust project
- :param harness_name: Name of harness
- :param duration_sec: Duration to run fuzzing
- :returns: Fuzzing trial results or None if failed
- """
- try:
- result = subprocess.run(
- [
- "cargo",
- "fuzz",
- "run",
- harness_name,
- "--",
- f"-max_total_time={duration_sec}",
- "-print_final_stats=1",
- ],
- cwd=project_path,
- capture_output=True,
- text=True,
- timeout=duration_sec + 30,
- )
-
- # Parse fuzzing statistics
- stats = self._parse_fuzzing_stats(result.stderr)
-
- # Create metrics
- coverage = CoverageMetrics(
- initial_edges=stats.get("initial_edges", 0),
- final_edges=stats.get("cov_edges", 0),
- new_edges_found=stats.get("cov_edges", 0) - stats.get("initial_edges", 0),
- growth_rate=self._assess_coverage_growth(stats),
- percentage_estimate=self._estimate_coverage_percentage(stats),
- stagnation_time_sec=stats.get("stagnation_time"),
- )
-
- performance = PerformanceMetrics(
- total_execs=stats.get("total_execs", 0),
- execs_per_sec=stats.get("exec_per_sec", 0.0),
- performance_rating=self._assess_performance(stats.get("exec_per_sec", 0.0)),
- )
-
- stability = StabilityMetrics(
- status=self._assess_stability(stats),
- crashes_found=stats.get("crashes", 0),
- unique_crashes=stats.get("unique_crashes", 0),
- crash_rate=self._calculate_crash_rate(stats),
- )
-
- return FuzzingTrial(
- duration_seconds=duration_sec,
- coverage=coverage,
- performance=performance,
- stability=stability,
- trial_successful=True,
- )
-
- except Exception:
- return None
-
- def _parse_compiler_errors(self, stderr: str) -> list[str]:
- """Parse compiler error messages.
-
- :param stderr: Compiler stderr output
- :returns: List of error messages
- """
- errors = []
- for line in stderr.split("\n"):
- if "error:" in line or "error[" in line:
- errors.append(line.strip())
- return errors[:10] # Limit to first 10 errors
-
- def _parse_compiler_warnings(self, stderr: str) -> list[str]:
- """Parse compiler warnings.
-
- :param stderr: Compiler stderr output
- :returns: List of warning messages
- """
- warnings = []
- for line in stderr.split("\n"):
- if "warning:" in line:
- warnings.append(line.strip())
- return warnings[:5] # Limit to first 5 warnings
-
- def _extract_crash_info(self, stderr: str) -> str:
- """Extract crash information from stderr.
-
- :param stderr: Fuzzer stderr output
- :returns: Crash details
- """
- lines = stderr.split("\n")
- for i, line in enumerate(lines):
- if "SUMMARY:" in line or "deadly signal" in line:
- return "\n".join(lines[max(0, i - 3) : i + 5])
- return stderr[:500] # First 500 chars if no specific crash info
-
- def _parse_fuzzing_stats(self, stderr: str) -> dict:
- """Parse fuzzing statistics from libFuzzer output.
-
- :param stderr: Fuzzer stderr output
- :returns: Dictionary of statistics
- """
- stats = {
- "total_execs": 0,
- "exec_per_sec": 0.0,
- "cov_edges": 0,
- "initial_edges": 0,
- "crashes": 0,
- "unique_crashes": 0,
- }
-
- lines = stderr.split("\n")
-
- # Find initial coverage
- for line in lines[:20]:
- if "cov:" in line:
- try:
- cov_part = line.split("cov:")[1].split()[0]
- stats["initial_edges"] = int(cov_part)
- break
- except (IndexError, ValueError):
- pass
-
- # Parse final stats
- for line in reversed(lines):
- if "#" in line and "cov:" in line and "exec/s:" in line:
- try:
- # Parse line like: "#12345 cov: 891 ft: 1234 corp: 56/789b exec/s: 1507"
- parts = line.split()
- for i, part in enumerate(parts):
- if part.startswith("#"):
- stats["total_execs"] = int(part[1:])
- elif part == "cov:":
- stats["cov_edges"] = int(parts[i + 1])
- elif part == "exec/s:":
- stats["exec_per_sec"] = float(parts[i + 1])
- except (IndexError, ValueError):
- pass
-
- # Count crashes
- if "crash-" in line or "leak-" in line or "timeout-" in line:
- stats["crashes"] += 1
-
- # Estimate unique crashes (simplified)
- stats["unique_crashes"] = min(stats["crashes"], 10)
-
- return stats
-
- def _assess_coverage_growth(self, stats: dict) -> str:
- """Assess coverage growth quality.
-
- :param stats: Fuzzing statistics
- :returns: Growth rate assessment
- """
- new_edges = stats.get("cov_edges", 0) - stats.get("initial_edges", 0)
-
- if new_edges == 0:
- return "none"
- elif new_edges < 50:
- return "poor"
- elif new_edges < 200:
- return "good"
- else:
- return "excellent"
-
- def _estimate_coverage_percentage(self, stats: dict) -> float | None:
- """Estimate coverage percentage (rough heuristic).
-
- :param stats: Fuzzing statistics
- :returns: Estimated percentage or None
- """
- edges = stats.get("cov_edges", 0)
- if edges == 0:
- return 0.0
-
- # Rough heuristic: assume medium-sized function has ~2000 edges
- # This is very approximate
- estimated = min((edges / 2000) * 100, 100)
- return round(estimated, 1)
-
- def _assess_performance(self, execs_per_sec: float) -> str:
- """Assess performance rating.
-
- :param execs_per_sec: Executions per second
- :returns: Performance rating
- """
- if execs_per_sec > 1000:
- return "excellent"
- elif execs_per_sec > 100:
- return "good"
- else:
- return "poor"
-
- def _assess_stability(self, stats: dict) -> str:
- """Assess stability status.
-
- :param stats: Fuzzing statistics
- :returns: Stability status
- """
- crashes = stats.get("crashes", 0)
- total_execs = stats.get("total_execs", 0)
-
- if total_execs == 0:
- return "unknown"
-
- crash_rate = (crashes / total_execs) * 1000
-
- if crash_rate > 10:
- return "crashes_frequently"
- elif crash_rate > 1:
- return "unstable"
- else:
- return "stable"
-
- def _calculate_crash_rate(self, stats: dict) -> float:
- """Calculate crash rate per 1000 executions.
-
- :param stats: Fuzzing statistics
- :returns: Crash rate
- """
- crashes = stats.get("crashes", 0)
- total = stats.get("total_execs", 0)
-
- if total == 0:
- return 0.0
-
- return (crashes / total) * 1000
-
- def _generate_summary(self, evaluations: list[HarnessEvaluation]) -> EvaluationSummary:
- """Generate evaluation summary.
-
- :param evaluations: List of harness evaluations
- :returns: Summary statistics
- """
- production_ready = sum(
- 1 for e in evaluations if e.quality.verdict == "production-ready"
- )
- needs_improvement = sum(
- 1 for e in evaluations if e.quality.verdict == "needs-improvement"
- )
- broken = sum(1 for e in evaluations if e.quality.verdict == "broken")
-
- avg_score = (
- sum(e.quality.score for e in evaluations) / len(evaluations)
- if evaluations
- else 0
- )
-
- # Generate recommendation
- if broken > 0:
- recommended_action = f"Fix {broken} broken harness(es) before proceeding."
- elif needs_improvement > 0:
- recommended_action = f"Improve {needs_improvement} harness(es) for better results."
- else:
- recommended_action = "All harnesses are production-ready!"
-
- return EvaluationSummary(
- total_harnesses=len(evaluations),
- production_ready=production_ready,
- needs_improvement=needs_improvement,
- broken=broken,
- average_score=round(avg_score, 1),
- recommended_action=recommended_action,
- )
-
- def _save_report(self, report: HarnessTestReport) -> None:
- """Save test report to results directory.
-
- :param report: Harness test report
- """
- from fuzzforge_modules_sdk.api.constants import PATH_TO_ARTIFACTS
-
- # Ensure artifacts directory exists
- PATH_TO_ARTIFACTS.mkdir(parents=True, exist_ok=True)
-
- # Save JSON report
- results_path = PATH_TO_ARTIFACTS / "harness-evaluation.json"
- with results_path.open("w") as f:
- json.dump(report.model_dump(), f, indent=2)
-
- # Save human-readable summary
- summary_path = PATH_TO_ARTIFACTS / "feedback-summary.md"
- with summary_path.open("w") as f:
- f.write("# Harness Testing Report\n\n")
- f.write(f"**Total Harnesses:** {report.summary.total_harnesses}\n")
- f.write(f"**Production Ready:** {report.summary.production_ready}\n")
- f.write(f"**Needs Improvement:** {report.summary.needs_improvement}\n")
- f.write(f"**Broken:** {report.summary.broken}\n")
- f.write(f"**Average Score:** {report.summary.average_score}/100\n\n")
- f.write(f"**Recommendation:** {report.summary.recommended_action}\n\n")
-
- f.write("## Individual Harness Results\n\n")
- for harness in report.harnesses:
- f.write(f"### {harness.name}\n\n")
- f.write(f"- **Verdict:** {harness.quality.verdict}\n")
- f.write(f"- **Score:** {harness.quality.score}/100\n\n")
-
- if harness.quality.strengths:
- f.write("**Strengths:**\n")
- for strength in harness.quality.strengths:
- f.write(f"- {strength}\n")
- f.write("\n")
-
- if harness.quality.issues:
- f.write("**Issues:**\n")
- for issue in harness.quality.issues:
- f.write(f"- [{issue.severity.upper()}] {issue.message}\n")
- f.write(f" - **Suggestion:** {issue.suggestion}\n")
- f.write("\n")
-
- if harness.quality.recommended_actions:
- f.write("**Actions:**\n")
- for action in harness.quality.recommended_actions:
- f.write(f"- {action}\n")
- f.write("\n")
-
-
-# Export the module class for use by __main__.py
-__all__ = ["HarnessTesterModule"]
diff --git a/fuzzforge-modules/harness-tester/src/module/__main__.py b/fuzzforge-modules/harness-tester/src/module/__main__.py
deleted file mode 100644
index dc334b1..0000000
--- a/fuzzforge-modules/harness-tester/src/module/__main__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-"""Harness tester module entrypoint."""
-
-from fuzzforge_modules_sdk.api import logs
-
-from module import HarnessTesterModule
-
-
-def main() -> None:
- """Run the harness tester module."""
- logs.configure()
- module = HarnessTesterModule()
- module.main()
-
-
-if __name__ == "__main__":
- main()
diff --git a/fuzzforge-modules/harness-tester/src/module/analyzer.py b/fuzzforge-modules/harness-tester/src/module/analyzer.py
deleted file mode 100644
index ec25fb5..0000000
--- a/fuzzforge-modules/harness-tester/src/module/analyzer.py
+++ /dev/null
@@ -1,486 +0,0 @@
-"""Feedback generator with actionable suggestions for AI agents."""
-
-from module.feedback import (
- CoverageMetrics,
- FeedbackCategory,
- FeedbackIssue,
- FeedbackSeverity,
- PerformanceMetrics,
- QualityAssessment,
- StabilityMetrics,
-)
-
-
-class FeedbackGenerator:
- """Generates actionable feedback based on harness test results."""
-
- @staticmethod
- def analyze_compilation(
- compilation_result: dict,
- ) -> tuple[list[FeedbackIssue], list[str]]:
- """Analyze compilation results and generate feedback.
-
- :param compilation_result: Compilation output and errors
- :returns: Tuple of (issues, strengths)
- """
- issues = []
- strengths = []
-
- if not compilation_result.get("success"):
- errors = compilation_result.get("errors", [])
-
- for error in errors:
- # Analyze specific error types
- if "cannot find" in error.lower():
- issues.append(
- FeedbackIssue(
- category=FeedbackCategory.COMPILATION,
- severity=FeedbackSeverity.CRITICAL,
- type="undefined_variable",
- message=f"Compilation error: {error}",
- suggestion="Check variable names match the function signature. Use the exact names from fuzzable_functions.json.",
- details={"error": error},
- )
- )
- elif "mismatched types" in error.lower():
- issues.append(
- FeedbackIssue(
- category=FeedbackCategory.COMPILATION,
- severity=FeedbackSeverity.CRITICAL,
- type="type_mismatch",
- message=f"Type mismatch: {error}",
- suggestion="Check the function expects the types you're passing. Convert fuzzer input to the correct type (e.g., &[u8] to &str with from_utf8).",
- details={"error": error},
- )
- )
- elif "trait" in error.lower() and "not implemented" in error.lower():
- issues.append(
- FeedbackIssue(
- category=FeedbackCategory.COMPILATION,
- severity=FeedbackSeverity.CRITICAL,
- type="trait_not_implemented",
- message=f"Trait not implemented: {error}",
- suggestion="Ensure you're using the correct types. Some functions require specific trait implementations.",
- details={"error": error},
- )
- )
- else:
- issues.append(
- FeedbackIssue(
- category=FeedbackCategory.COMPILATION,
- severity=FeedbackSeverity.CRITICAL,
- type="compilation_error",
- message=f"Compilation failed: {error}",
- suggestion="Review the error message and fix syntax/type issues. Check function signatures in the source code.",
- details={"error": error},
- )
- )
- else:
- strengths.append("Compiles successfully")
-
- # Check for warnings
- warnings = compilation_result.get("warnings", [])
- if warnings:
- for warning in warnings[:3]: # Limit to 3 most important
- if "unused" in warning.lower():
- issues.append(
- FeedbackIssue(
- category=FeedbackCategory.CODE_QUALITY,
- severity=FeedbackSeverity.INFO,
- type="unused_variable",
- message=f"Code quality: {warning}",
- suggestion="Remove unused variables or use underscore prefix (_variable) to suppress warning.",
- details={"warning": warning},
- )
- )
-
- return issues, strengths
-
- @staticmethod
- def analyze_execution(
- execution_result: dict,
- ) -> tuple[list[FeedbackIssue], list[str]]:
- """Analyze execution results.
-
- :param execution_result: Execution test results
- :returns: Tuple of (issues, strengths)
- """
- issues = []
- strengths = []
-
- if not execution_result.get("success"):
- if execution_result.get("immediate_crash"):
- crash_details = execution_result.get("crash_details", "")
-
- if "stack overflow" in crash_details.lower():
- issues.append(
- FeedbackIssue(
- category=FeedbackCategory.EXECUTION,
- severity=FeedbackSeverity.CRITICAL,
- type="stack_overflow",
- message="Harness crashes immediately with stack overflow",
- suggestion="Check for infinite recursion or large stack allocations. Use heap allocation (Box, Vec) for large data structures.",
- details={"crash": crash_details},
- )
- )
- elif "panic" in crash_details.lower():
- issues.append(
- FeedbackIssue(
- category=FeedbackCategory.EXECUTION,
- severity=FeedbackSeverity.CRITICAL,
- type="panic_on_start",
- message="Harness panics immediately",
- suggestion="Check initialization code. Ensure required resources are available and input validation doesn't panic on empty input.",
- details={"crash": crash_details},
- )
- )
- else:
- issues.append(
- FeedbackIssue(
- category=FeedbackCategory.EXECUTION,
- severity=FeedbackSeverity.CRITICAL,
- type="immediate_crash",
- message=f"Harness crashes immediately: {crash_details}",
- suggestion="Debug the harness initialization. Add error handling and check for null/invalid pointers.",
- details={"crash": crash_details},
- )
- )
-
- elif execution_result.get("timeout"):
- issues.append(
- FeedbackIssue(
- category=FeedbackCategory.EXECUTION,
- severity=FeedbackSeverity.CRITICAL,
- type="infinite_loop",
- message="Harness times out - likely infinite loop",
- suggestion="Check for loops that depend on fuzzer input. Add iteration limits or timeout mechanisms.",
- details={},
- )
- )
- else:
- strengths.append("Executes without crashing")
-
- return issues, strengths
-
- @staticmethod
- def analyze_coverage(
- coverage: CoverageMetrics,
- ) -> tuple[list[FeedbackIssue], list[str]]:
- """Analyze coverage metrics.
-
- :param coverage: Coverage metrics from fuzzing trial
- :returns: Tuple of (issues, strengths)
- """
- issues = []
- strengths = []
-
- # No coverage growth
- if coverage.new_edges_found == 0:
- issues.append(
- FeedbackIssue(
- category=FeedbackCategory.COVERAGE,
- severity=FeedbackSeverity.CRITICAL,
- type="no_coverage",
- message="No coverage detected - harness may not be using fuzzer input",
- suggestion="Ensure you're actually calling the target function with fuzzer-provided data. Check that 'data' parameter is passed to the function being fuzzed.",
- details={"initial_edges": coverage.initial_edges},
- )
- )
- # Very low coverage
- elif coverage.growth_rate == "none" or (
- coverage.percentage_estimate and coverage.percentage_estimate < 5
- ):
- issues.append(
- FeedbackIssue(
- category=FeedbackCategory.COVERAGE,
- severity=FeedbackSeverity.WARNING,
- type="very_low_coverage",
- message=f"Very low coverage: ~{coverage.percentage_estimate}%",
- suggestion="Harness may not be reaching the target code. Verify you're calling the correct entry point function. Check if there's input validation that rejects all fuzzer data.",
- details={
- "percentage": coverage.percentage_estimate,
- "edges": coverage.final_edges,
- },
- )
- )
- # Low coverage
- elif coverage.growth_rate == "poor" or (
- coverage.percentage_estimate and coverage.percentage_estimate < 20
- ):
- issues.append(
- FeedbackIssue(
- category=FeedbackCategory.COVERAGE,
- severity=FeedbackSeverity.WARNING,
- type="low_coverage",
- message=f"Low coverage: {coverage.percentage_estimate}% - not exploring enough code paths",
- suggestion="Try fuzzing multiple entry points or remove restrictive input validation. Consider using a dictionary for structured inputs.",
- details={
- "percentage": coverage.percentage_estimate,
- "new_edges": coverage.new_edges_found,
- },
- )
- )
- # Good coverage
- elif coverage.growth_rate in ["good", "excellent"]:
- if coverage.percentage_estimate and coverage.percentage_estimate > 50:
- strengths.append(
- f"Excellent coverage: {coverage.percentage_estimate}% of target code reached"
- )
- else:
- strengths.append("Good coverage growth - harness is exploring code paths")
-
- # Coverage stagnation
- if (
- coverage.stagnation_time_sec
- and coverage.stagnation_time_sec < 10
- and coverage.final_edges < 500
- ):
- issues.append(
- FeedbackIssue(
- category=FeedbackCategory.COVERAGE,
- severity=FeedbackSeverity.INFO,
- type="early_stagnation",
- message=f"Coverage stopped growing after {coverage.stagnation_time_sec}s",
- suggestion="Harness may be hitting input validation barriers. Consider fuzzing with a seed corpus of valid inputs.",
- details={"stagnation_time": coverage.stagnation_time_sec},
- )
- )
-
- return issues, strengths
-
- @staticmethod
- def analyze_performance(
- performance: PerformanceMetrics,
- ) -> tuple[list[FeedbackIssue], list[str]]:
- """Analyze performance metrics.
-
- :param performance: Performance metrics from fuzzing trial
- :returns: Tuple of (issues, strengths)
- """
- issues = []
- strengths = []
-
- execs_per_sec = performance.execs_per_sec
-
- # Very slow execution
- if execs_per_sec < 10:
- issues.append(
- FeedbackIssue(
- category=FeedbackCategory.PERFORMANCE,
- severity=FeedbackSeverity.CRITICAL,
- type="extremely_slow",
- message=f"Extremely slow: {execs_per_sec:.1f} execs/sec",
- suggestion="Remove file I/O, network operations, or expensive computations from the harness loop. Move setup code outside the fuzz target function.",
- details={"execs_per_sec": execs_per_sec},
- )
- )
- # Slow execution
- elif execs_per_sec < 100:
- issues.append(
- FeedbackIssue(
- category=FeedbackCategory.PERFORMANCE,
- severity=FeedbackSeverity.WARNING,
- type="slow_execution",
- message=f"Slow execution: {execs_per_sec:.1f} execs/sec (expected 500+)",
- suggestion="Optimize harness: avoid allocations in hot path, reuse buffers, remove logging. Profile to find bottlenecks.",
- details={"execs_per_sec": execs_per_sec},
- )
- )
- # Good performance
- elif execs_per_sec > 1000:
- strengths.append(f"Excellent performance: {execs_per_sec:.0f} execs/sec")
- elif execs_per_sec > 500:
- strengths.append(f"Good performance: {execs_per_sec:.0f} execs/sec")
-
- return issues, strengths
-
- @staticmethod
- def analyze_stability(
- stability: StabilityMetrics,
- ) -> tuple[list[FeedbackIssue], list[str]]:
- """Analyze stability metrics.
-
- :param stability: Stability metrics from fuzzing trial
- :returns: Tuple of (issues, strengths)
- """
- issues = []
- strengths = []
-
- if stability.status == "crashes_frequently":
- issues.append(
- FeedbackIssue(
- category=FeedbackCategory.STABILITY,
- severity=FeedbackSeverity.WARNING,
- type="unstable_frequent_crashes",
- message=f"Harness crashes frequently: {stability.crash_rate:.1f} crashes per 1000 execs",
- suggestion="This might be expected if testing buggy code. If not, add error handling for edge cases or invalid inputs.",
- details={
- "crashes": stability.crashes_found,
- "crash_rate": stability.crash_rate,
- },
- )
- )
- elif stability.status == "hangs":
- issues.append(
- FeedbackIssue(
- category=FeedbackCategory.STABILITY,
- severity=FeedbackSeverity.WARNING,
- type="hangs_detected",
- message=f"Harness hangs: {stability.hangs_found} detected",
- suggestion="Add timeouts to prevent infinite loops. Check for blocking operations or resource exhaustion.",
- details={"hangs": stability.hangs_found},
- )
- )
- elif stability.status == "stable":
- strengths.append("Stable execution - no crashes or hangs")
-
- # Finding crashes can be good!
- if stability.unique_crashes > 0 and stability.status != "crashes_frequently":
- strengths.append(
- f"Found {stability.unique_crashes} potential bugs during trial!"
- )
-
- return issues, strengths
-
- @staticmethod
- def calculate_quality_score(
- compilation_success: bool,
- execution_success: bool,
- coverage: CoverageMetrics | None,
- performance: PerformanceMetrics | None,
- stability: StabilityMetrics | None,
- ) -> int:
- """Calculate overall quality score (0-100).
-
- :param compilation_success: Whether compilation succeeded
- :param execution_success: Whether execution succeeded
- :param coverage: Coverage metrics
- :param performance: Performance metrics
- :param stability: Stability metrics
- :returns: Quality score 0-100
- """
- if not compilation_success:
- return 0
-
- if not execution_success:
- return 10
-
- score = 20 # Base score for compiling and running
-
- # Coverage contribution (0-40 points)
- if coverage:
- if coverage.growth_rate == "excellent":
- score += 40
- elif coverage.growth_rate == "good":
- score += 30
- elif coverage.growth_rate == "poor":
- score += 10
-
- # Performance contribution (0-25 points)
- if performance:
- if performance.execs_per_sec > 1000:
- score += 25
- elif performance.execs_per_sec > 500:
- score += 20
- elif performance.execs_per_sec > 100:
- score += 10
- elif performance.execs_per_sec > 10:
- score += 5
-
- # Stability contribution (0-15 points)
- if stability:
- if stability.status == "stable":
- score += 15
- elif stability.status == "unstable":
- score += 10
- elif stability.status == "crashes_frequently":
- score += 5
-
- return min(score, 100)
-
- @classmethod
- def generate_quality_assessment(
- cls,
- compilation_result: dict,
- execution_result: dict | None,
- coverage: CoverageMetrics | None,
- performance: PerformanceMetrics | None,
- stability: StabilityMetrics | None,
- ) -> QualityAssessment:
- """Generate complete quality assessment with all feedback.
-
- :param compilation_result: Compilation results
- :param execution_result: Execution results
- :param coverage: Coverage metrics
- :param performance: Performance metrics
- :param stability: Stability metrics
- :returns: Complete quality assessment
- """
- all_issues = []
- all_strengths = []
-
- # Analyze each aspect
- comp_issues, comp_strengths = cls.analyze_compilation(compilation_result)
- all_issues.extend(comp_issues)
- all_strengths.extend(comp_strengths)
-
- if execution_result:
- exec_issues, exec_strengths = cls.analyze_execution(execution_result)
- all_issues.extend(exec_issues)
- all_strengths.extend(exec_strengths)
-
- if coverage:
- cov_issues, cov_strengths = cls.analyze_coverage(coverage)
- all_issues.extend(cov_issues)
- all_strengths.extend(cov_strengths)
-
- if performance:
- perf_issues, perf_strengths = cls.analyze_performance(performance)
- all_issues.extend(perf_issues)
- all_strengths.extend(perf_strengths)
-
- if stability:
- stab_issues, stab_strengths = cls.analyze_stability(stability)
- all_issues.extend(stab_issues)
- all_strengths.extend(stab_strengths)
-
- # Calculate score
- score = cls.calculate_quality_score(
- compilation_result.get("success", False),
- execution_result.get("success", False) if execution_result else False,
- coverage,
- performance,
- stability,
- )
-
- # Determine verdict
- if score >= 70:
- verdict = "production-ready"
- elif score >= 30:
- verdict = "needs-improvement"
- else:
- verdict = "broken"
-
- # Generate recommended actions
- recommended_actions = []
- critical_issues = [i for i in all_issues if i.severity == FeedbackSeverity.CRITICAL]
- warning_issues = [i for i in all_issues if i.severity == FeedbackSeverity.WARNING]
-
- if critical_issues:
- recommended_actions.append(
- f"Fix {len(critical_issues)} critical issue(s) preventing execution"
- )
- if warning_issues:
- recommended_actions.append(
- f"Address {len(warning_issues)} warning(s) to improve harness quality"
- )
- if verdict == "production-ready":
- recommended_actions.append("Harness is ready for production fuzzing")
-
- return QualityAssessment(
- score=score,
- verdict=verdict,
- issues=all_issues,
- strengths=all_strengths,
- recommended_actions=recommended_actions,
- )
diff --git a/fuzzforge-modules/harness-tester/src/module/feedback.py b/fuzzforge-modules/harness-tester/src/module/feedback.py
deleted file mode 100644
index fab8848..0000000
--- a/fuzzforge-modules/harness-tester/src/module/feedback.py
+++ /dev/null
@@ -1,148 +0,0 @@
-"""Feedback types and schemas for harness testing."""
-
-from enum import Enum
-from typing import Any
-
-from pydantic import BaseModel, Field
-
-
-class FeedbackSeverity(str, Enum):
- """Severity levels for feedback issues."""
-
- CRITICAL = "critical" # Blocks execution (compilation errors, crashes)
- WARNING = "warning" # Should fix (low coverage, slow execution)
- INFO = "info" # Nice to have (optimization suggestions)
-
-
-class FeedbackCategory(str, Enum):
- """Categories of feedback."""
-
- COMPILATION = "compilation"
- EXECUTION = "execution"
- PERFORMANCE = "performance"
- COVERAGE = "coverage"
- STABILITY = "stability"
- CODE_QUALITY = "code_quality"
-
-
-class FeedbackIssue(BaseModel):
- """A single feedback issue with actionable suggestion."""
-
- category: FeedbackCategory
- severity: FeedbackSeverity
- type: str = Field(description="Specific issue type (e.g., 'low_coverage', 'compilation_error')")
- message: str = Field(description="Human-readable description of the issue")
- suggestion: str = Field(description="Actionable suggestion for AI agent to fix the issue")
- details: dict[str, Any] = Field(default_factory=dict, description="Additional technical details")
-
-
-class CompilationResult(BaseModel):
- """Results from compilation attempt."""
-
- success: bool
- time_ms: int | None = None
- errors: list[str] = Field(default_factory=list)
- warnings: list[str] = Field(default_factory=list)
- stderr: str | None = None
-
-
-class ExecutionResult(BaseModel):
- """Results from execution test."""
-
- success: bool
- runs_completed: int | None = None
- immediate_crash: bool = False
- timeout: bool = False
- crash_details: str | None = None
-
-
-class CoverageMetrics(BaseModel):
- """Coverage metrics from fuzzing trial."""
-
- initial_edges: int = 0
- final_edges: int = 0
- new_edges_found: int = 0
- growth_rate: str = Field(
- description="Qualitative assessment: 'excellent', 'good', 'poor', 'none'"
- )
- percentage_estimate: float | None = Field(
- None, description="Estimated percentage of target code covered"
- )
- stagnation_time_sec: float | None = Field(
- None, description="Time until coverage stopped growing"
- )
-
-
-class PerformanceMetrics(BaseModel):
- """Performance metrics from fuzzing trial."""
-
- total_execs: int
- execs_per_sec: float
- average_exec_time_us: float | None = None
- performance_rating: str = Field(
- description="'excellent' (>1000/s), 'good' (100-1000/s), 'poor' (<100/s)"
- )
-
-
-class StabilityMetrics(BaseModel):
- """Stability metrics from fuzzing trial."""
-
- status: str = Field(
- description="'stable', 'unstable', 'crashes_frequently', 'hangs'"
- )
- crashes_found: int = 0
- hangs_found: int = 0
- unique_crashes: int = 0
- crash_rate: float = Field(0.0, description="Crashes per 1000 executions")
-
-
-class FuzzingTrial(BaseModel):
- """Results from short fuzzing trial."""
-
- duration_seconds: int
- coverage: CoverageMetrics
- performance: PerformanceMetrics
- stability: StabilityMetrics
- trial_successful: bool
-
-
-class QualityAssessment(BaseModel):
- """Overall quality assessment of the harness."""
-
- score: int = Field(ge=0, le=100, description="Quality score 0-100")
- verdict: str = Field(
- description="'production-ready', 'needs-improvement', 'broken'"
- )
- issues: list[FeedbackIssue] = Field(default_factory=list)
- strengths: list[str] = Field(default_factory=list)
- recommended_actions: list[str] = Field(default_factory=list)
-
-
-class HarnessEvaluation(BaseModel):
- """Complete evaluation of a single harness."""
-
- name: str
- path: str | None = None
- compilation: CompilationResult
- execution: ExecutionResult | None = None
- fuzzing_trial: FuzzingTrial | None = None
- quality: QualityAssessment
-
-
-class EvaluationSummary(BaseModel):
- """Summary of all harness evaluations."""
-
- total_harnesses: int
- production_ready: int
- needs_improvement: int
- broken: int
- average_score: float
- recommended_action: str
-
-
-class HarnessTestReport(BaseModel):
- """Complete harness testing report."""
-
- harnesses: list[HarnessEvaluation]
- summary: EvaluationSummary
- test_configuration: dict[str, Any] = Field(default_factory=dict)
diff --git a/fuzzforge-modules/harness-tester/src/module/models.py b/fuzzforge-modules/harness-tester/src/module/models.py
deleted file mode 100644
index ed6412b..0000000
--- a/fuzzforge-modules/harness-tester/src/module/models.py
+++ /dev/null
@@ -1,27 +0,0 @@
-"""Models for harness-tester module."""
-
-from pathlib import Path
-from typing import Any
-
-from pydantic import BaseModel
-
-from fuzzforge_modules_sdk.api.models import (
- FuzzForgeModuleInputBase,
- FuzzForgeModuleOutputBase,
-)
-
-from module.settings import Settings
-
-
-class Input(FuzzForgeModuleInputBase[Settings]):
- """Input for the harness-tester module."""
-
-
-class Output(FuzzForgeModuleOutputBase):
- """Output for the harness-tester module."""
-
- #: The test report data.
- report: dict[str, Any] | None = None
-
- #: Path to the report JSON file.
- report_file: Path | None = None
diff --git a/fuzzforge-modules/harness-tester/src/module/settings.py b/fuzzforge-modules/harness-tester/src/module/settings.py
deleted file mode 100644
index 01aa011..0000000
--- a/fuzzforge-modules/harness-tester/src/module/settings.py
+++ /dev/null
@@ -1,19 +0,0 @@
-"""Settings for harness-tester module."""
-
-from pydantic import BaseModel, Field
-
-
-class Settings(BaseModel):
- """Settings for the harness-tester module."""
-
- #: Duration for each fuzzing trial in seconds.
- trial_duration_sec: int = Field(default=30, ge=1, le=300)
-
- #: Timeout for harness execution in seconds.
- execution_timeout_sec: int = Field(default=10, ge=1, le=60)
-
- #: Whether to generate coverage reports.
- enable_coverage: bool = Field(default=True)
-
- #: Minimum score threshold for harness to be considered "good".
- min_quality_score: int = Field(default=50, ge=0, le=100)
diff --git a/fuzzforge-modules/rust-analyzer/Dockerfile b/fuzzforge-modules/rust-analyzer/Dockerfile
deleted file mode 100644
index 70b18cb..0000000
--- a/fuzzforge-modules/rust-analyzer/Dockerfile
+++ /dev/null
@@ -1,27 +0,0 @@
-FROM localhost/fuzzforge-modules-sdk:0.1.0
-
-# Module metadata is now read from pyproject.toml [tool.fuzzforge.module] section
-
-# Install system dependencies
-RUN apt-get update && apt-get install -y \
- curl \
- build-essential \
- pkg-config \
- libssl-dev \
- && rm -rf /var/lib/apt/lists/*
-
-# Install Rust toolchain
-RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
-ENV PATH="/root/.cargo/bin:${PATH}"
-
-# Install Rust analysis tools (skipping cargo-geiger as it's heavy)
-# RUN cargo install cargo-geiger --locked || true
-RUN cargo install cargo-audit --locked || true
-
-COPY ./src /app/src
-COPY ./pyproject.toml /app/pyproject.toml
-
-# Remove workspace reference since we're using wheels
-RUN sed -i '/\[tool\.uv\.sources\]/,/^$/d' /app/pyproject.toml
-
-RUN uv sync --find-links /wheels
diff --git a/fuzzforge-modules/rust-analyzer/Makefile b/fuzzforge-modules/rust-analyzer/Makefile
deleted file mode 100644
index cada4d0..0000000
--- a/fuzzforge-modules/rust-analyzer/Makefile
+++ /dev/null
@@ -1,45 +0,0 @@
-PACKAGE=$(word 1, $(shell uv version))
-VERSION=$(word 2, $(shell uv version))
-
-PODMAN?=/usr/bin/podman
-
-SOURCES=./src
-TESTS=./tests
-
-.PHONY: bandit build clean format mypy pytest ruff version
-
-bandit:
- uv run bandit --recursive $(SOURCES)
-
-build:
- $(PODMAN) build --file ./Dockerfile --no-cache --tag $(PACKAGE):$(VERSION)
-
-save: build
- $(PODMAN) save --format oci-archive --output /tmp/$(PACKAGE)-$(VERSION).oci $(PACKAGE):$(VERSION)
-
-clean:
- @find . -type d \( \
- -name '*.egg-info' \
- -o -name '.mypy_cache' \
- -o -name '.pytest_cache' \
- -o -name '.ruff_cache' \
- -o -name '__pycache__' \
- \) -printf 'removing directory %p\n' -exec rm -rf {} +
-
-cloc:
- cloc $(SOURCES)
-
-format:
- uv run ruff format $(SOURCES) $(TESTS)
-
-mypy:
- uv run mypy $(SOURCES)
-
-pytest:
- uv run pytest $(TESTS)
-
-ruff:
- uv run ruff check --fix $(SOURCES) $(TESTS)
-
-version:
- @echo '$(PACKAGE)@$(VERSION)'
diff --git a/fuzzforge-modules/rust-analyzer/README.md b/fuzzforge-modules/rust-analyzer/README.md
deleted file mode 100644
index d0671a1..0000000
--- a/fuzzforge-modules/rust-analyzer/README.md
+++ /dev/null
@@ -1,46 +0,0 @@
-# FuzzForge Modules - FIXME
-
-## Installation
-
-### Python
-
-```shell
-# install the package (users)
-uv sync
-# install the package and all development dependencies (developers)
-uv sync --all-extras
-```
-
-### Container
-
-```shell
-# build the image
-make build
-# run the container
-mkdir -p "${PWD}/data" "${PWD}/data/input" "${PWD}/data/output"
-echo '{"settings":{},"resources":[]}' > "${PWD}/data/input/input.json"
-podman run --rm \
- --volume "${PWD}/data:/data" \
- ':' 'uv run module'
-```
-
-## Usage
-
-```shell
-uv run module
-```
-
-## Development tools
-
-```shell
-# run ruff (formatter)
-make format
-# run mypy (type checker)
-make mypy
-# run tests (pytest)
-make pytest
-# run ruff (linter)
-make ruff
-```
-
-See the file `Makefile` at the root of this directory for more tools.
diff --git a/fuzzforge-modules/rust-analyzer/mypy.ini b/fuzzforge-modules/rust-analyzer/mypy.ini
deleted file mode 100644
index 84e90d2..0000000
--- a/fuzzforge-modules/rust-analyzer/mypy.ini
+++ /dev/null
@@ -1,6 +0,0 @@
-[mypy]
-plugins = pydantic.mypy
-strict = True
-warn_unused_ignores = True
-warn_redundant_casts = True
-warn_return_any = True
diff --git a/fuzzforge-modules/rust-analyzer/pyproject.toml b/fuzzforge-modules/rust-analyzer/pyproject.toml
deleted file mode 100644
index 2f5a512..0000000
--- a/fuzzforge-modules/rust-analyzer/pyproject.toml
+++ /dev/null
@@ -1,52 +0,0 @@
-[project]
-name = "fuzzforge-rust-analyzer"
-version = "0.1.0"
-description = "Analyzes Rust projects to identify functions suitable for fuzzing"
-authors = []
-readme = "README.md"
-requires-python = ">=3.14"
-dependencies = [
- "fuzzforge-modules-sdk==0.0.1",
- "pydantic==2.12.4",
- "structlog==25.5.0",
-]
-
-[project.optional-dependencies]
-lints = [
- "bandit==1.8.6",
- "mypy==1.18.2",
- "ruff==0.14.4",
-]
-tests = [
- "pytest==9.0.2",
-]
-
-[project.scripts]
-module = "module.__main__:main"
-
-[tool.uv]
-package = true
-
-# FuzzForge module metadata for AI agent discovery
-[tool.fuzzforge.module]
-identifier = "fuzzforge-rust-analyzer"
-suggested_predecessors = []
-continuous_mode = false
-
-use_cases = [
- "Analyze Rust crate to find fuzzable functions",
- "First step in Rust fuzzing pipeline before harness generation",
- "Produces fuzzable_functions.json for AI harness generation"
-]
-
-common_inputs = [
- "rust-source-code",
- "Cargo.toml"
-]
-
-output_artifacts = [
- "analysis.json",
- "results.json"
-]
-
-output_treatment = "Read analysis.json which contains: project_info, fuzzable_functions (array with name, signature, file_path, fuzz_score), and vulnerabilities (array of known CVEs). Display fuzzable_functions as a table. Highlight any vulnerabilities found."
diff --git a/fuzzforge-modules/rust-analyzer/ruff.toml b/fuzzforge-modules/rust-analyzer/ruff.toml
deleted file mode 100644
index 6374f62..0000000
--- a/fuzzforge-modules/rust-analyzer/ruff.toml
+++ /dev/null
@@ -1,19 +0,0 @@
-line-length = 120
-
-[lint]
-select = [ "ALL" ]
-ignore = [
- "COM812", # conflicts with the formatter
- "D100", # ignoring missing docstrings in public modules
- "D104", # ignoring missing docstrings in public packages
- "D203", # conflicts with 'D211'
- "D213", # conflicts with 'D212'
- "TD002", # ignoring missing author in 'TODO' statements
- "TD003", # ignoring missing issue link in 'TODO' statements
-]
-
-[lint.per-file-ignores]
-"tests/*" = [
- "PLR2004", # allowing comparisons using unamed numerical constants in tests
- "S101", # allowing 'assert' statements in tests
-]
diff --git a/fuzzforge-modules/rust-analyzer/src/module/__init__.py b/fuzzforge-modules/rust-analyzer/src/module/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/fuzzforge-modules/rust-analyzer/src/module/__main__.py b/fuzzforge-modules/rust-analyzer/src/module/__main__.py
deleted file mode 100644
index bc8914a..0000000
--- a/fuzzforge-modules/rust-analyzer/src/module/__main__.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from typing import TYPE_CHECKING
-
-from fuzzforge_modules_sdk.api import logs
-
-from module.mod import Module
-
-if TYPE_CHECKING:
- from fuzzforge_modules_sdk.api.modules.base import FuzzForgeModule
-
-
-def main() -> None:
- """TODO."""
- logs.configure()
- module: FuzzForgeModule = Module()
- module.main()
-
-
-if __name__ == "__main__":
- main()
diff --git a/fuzzforge-modules/rust-analyzer/src/module/mod.py b/fuzzforge-modules/rust-analyzer/src/module/mod.py
deleted file mode 100644
index 751d3bd..0000000
--- a/fuzzforge-modules/rust-analyzer/src/module/mod.py
+++ /dev/null
@@ -1,314 +0,0 @@
-"""Rust Analyzer module for FuzzForge.
-
-This module analyzes Rust source code to identify fuzzable entry points,
-unsafe blocks, and known vulnerabilities.
-"""
-
-from __future__ import annotations
-
-import json
-import re
-import subprocess
-from pathlib import Path
-from typing import TYPE_CHECKING
-
-from fuzzforge_modules_sdk.api.constants import PATH_TO_OUTPUTS
-from fuzzforge_modules_sdk.api.models import FuzzForgeModuleResults
-from fuzzforge_modules_sdk.api.modules.base import FuzzForgeModule
-
-from module.models import AnalysisResult, EntryPoint, Input, Output, UnsafeBlock, Vulnerability
-from module.settings import Settings
-
-if TYPE_CHECKING:
- from fuzzforge_modules_sdk.api.models import FuzzForgeModuleResource
-
-
-class Module(FuzzForgeModule):
- """Rust Analyzer module - analyzes Rust code for fuzzable entry points."""
-
- def __init__(self) -> None:
- """Initialize an instance of the class."""
- name: str = "rust-analyzer"
- version: str = "0.1.0"
- FuzzForgeModule.__init__(self, name=name, version=version)
- self._project_path: Path | None = None
- self._settings: Settings | None = None
-
- @classmethod
- def _get_input_type(cls) -> type[Input]:
- """Return the input type."""
- return Input
-
- @classmethod
- def _get_output_type(cls) -> type[Output]:
- """Return the output type."""
- return Output
-
- def _prepare(self, settings: Settings) -> None: # type: ignore[override]
- """Prepare the module.
-
- :param settings: Module settings.
-
- """
- self._settings = settings
-
- def _find_cargo_toml(self, resources: list[FuzzForgeModuleResource]) -> Path | None:
- """Find the Cargo.toml file in the resources.
-
- :param resources: List of input resources.
- :returns: Path to Cargo.toml or None.
-
- """
- for resource in resources:
- if resource.path.name == "Cargo.toml":
- return resource.path
- # Check if resource is a directory containing Cargo.toml
- cargo_path = resource.path / "Cargo.toml"
- if cargo_path.exists():
- return cargo_path
- return None
-
- def _parse_cargo_toml(self, cargo_path: Path) -> tuple[str, str, str]:
- """Parse Cargo.toml to extract crate name, version, and lib name.
-
- :param cargo_path: Path to Cargo.toml.
- :returns: Tuple of (crate_name, version, lib_name).
-
- """
- import tomllib
-
- with cargo_path.open("rb") as f:
- data = tomllib.load(f)
-
- package = data.get("package", {})
- crate_name = package.get("name", "unknown")
- version = package.get("version", "0.0.0")
-
- # Get lib name - defaults to crate name with dashes converted to underscores
- lib_section = data.get("lib", {})
- lib_name = lib_section.get("name", crate_name.replace("-", "_"))
-
- return crate_name, version, lib_name
-
- def _find_entry_points(self, project_path: Path) -> list[EntryPoint]:
- """Find fuzzable entry points in the Rust source.
-
- :param project_path: Path to the Rust project.
- :returns: List of entry points.
-
- """
- entry_points: list[EntryPoint] = []
-
- # Patterns for fuzzable functions (take &[u8], &str, or impl Read)
- fuzzable_patterns = [
- r"pub\s+fn\s+(\w+)\s*\([^)]*&\[u8\][^)]*\)",
- r"pub\s+fn\s+(\w+)\s*\([^)]*&str[^)]*\)",
- r"pub\s+fn\s+(\w+)\s*\([^)]*impl\s+Read[^)]*\)",
- r"pub\s+fn\s+(\w+)\s*\([^)]*data:\s*&\[u8\][^)]*\)",
- r"pub\s+fn\s+(\w+)\s*\([^)]*input:\s*&\[u8\][^)]*\)",
- r"pub\s+fn\s+(\w+)\s*\([^)]*buf:\s*&\[u8\][^)]*\)",
- ]
-
- # Also find parse/decode functions
- parser_patterns = [
- r"pub\s+fn\s+(parse\w*)\s*\([^)]*\)",
- r"pub\s+fn\s+(decode\w*)\s*\([^)]*\)",
- r"pub\s+fn\s+(deserialize\w*)\s*\([^)]*\)",
- r"pub\s+fn\s+(from_bytes\w*)\s*\([^)]*\)",
- r"pub\s+fn\s+(read\w*)\s*\([^)]*\)",
- ]
-
- src_path = project_path / "src"
- if not src_path.exists():
- src_path = project_path
-
- for rust_file in src_path.rglob("*.rs"):
- try:
- content = rust_file.read_text()
- lines = content.split("\n")
-
- for line_num, line in enumerate(lines, 1):
- # Check fuzzable patterns
- for pattern in fuzzable_patterns:
- match = re.search(pattern, line)
- if match:
- entry_points.append(
- EntryPoint(
- function=match.group(1),
- file=str(rust_file.relative_to(project_path)),
- line=line_num,
- signature=line.strip(),
- fuzzable=True,
- )
- )
-
- # Check parser patterns (may need manual review)
- for pattern in parser_patterns:
- match = re.search(pattern, line)
- if match:
- # Avoid duplicates
- func_name = match.group(1)
- if not any(ep.function == func_name for ep in entry_points):
- entry_points.append(
- EntryPoint(
- function=func_name,
- file=str(rust_file.relative_to(project_path)),
- line=line_num,
- signature=line.strip(),
- fuzzable=True,
- )
- )
- except Exception:
- continue
-
- return entry_points
-
- def _find_unsafe_blocks(self, project_path: Path) -> list[UnsafeBlock]:
- """Find unsafe blocks in the Rust source.
-
- :param project_path: Path to the Rust project.
- :returns: List of unsafe blocks.
-
- """
- unsafe_blocks: list[UnsafeBlock] = []
-
- src_path = project_path / "src"
- if not src_path.exists():
- src_path = project_path
-
- for rust_file in src_path.rglob("*.rs"):
- try:
- content = rust_file.read_text()
- lines = content.split("\n")
-
- for line_num, line in enumerate(lines, 1):
- if "unsafe" in line and ("{" in line or "fn" in line):
- # Determine context
- context = "unsafe block"
- if "unsafe fn" in line:
- context = "unsafe function"
- elif "unsafe impl" in line:
- context = "unsafe impl"
- elif "*const" in line or "*mut" in line:
- context = "raw pointer operation"
-
- unsafe_blocks.append(
- UnsafeBlock(
- file=str(rust_file.relative_to(project_path)),
- line=line_num,
- context=context,
- )
- )
- except Exception:
- continue
-
- return unsafe_blocks
-
- def _run_cargo_audit(self, project_path: Path) -> list[Vulnerability]:
- """Run cargo-audit to find known vulnerabilities.
-
- :param project_path: Path to the Rust project.
- :returns: List of vulnerabilities.
-
- """
- vulnerabilities: list[Vulnerability] = []
-
- try:
- result = subprocess.run(
- ["cargo", "audit", "--json"],
- cwd=project_path,
- capture_output=True,
- text=True,
- timeout=120,
- )
-
- if result.stdout:
- audit_data = json.loads(result.stdout)
- for vuln in audit_data.get("vulnerabilities", {}).get("list", []):
- advisory = vuln.get("advisory", {})
- vulnerabilities.append(
- Vulnerability(
- advisory_id=advisory.get("id", "UNKNOWN"),
- crate_name=vuln.get("package", {}).get("name", "unknown"),
- version=vuln.get("package", {}).get("version", "0.0.0"),
- title=advisory.get("title", "Unknown vulnerability"),
- severity=advisory.get("severity", "unknown"),
- )
- )
- except (subprocess.TimeoutExpired, json.JSONDecodeError, FileNotFoundError):
- pass
-
- return vulnerabilities
-
- def _run(self, resources: list[FuzzForgeModuleResource]) -> FuzzForgeModuleResults:
- """Run the analysis.
-
- :param resources: Input resources.
- :returns: Module result status.
-
- """
- # Find the Rust project
- cargo_path = self._find_cargo_toml(resources)
- if cargo_path is None:
- self.get_logger().error("No Cargo.toml found in resources")
- return FuzzForgeModuleResults.FAILURE
-
- project_path = cargo_path.parent
- self._project_path = project_path
-
- self.get_logger().info("Analyzing Rust project", project=str(project_path))
-
- # Parse Cargo.toml
- crate_name, crate_version, lib_name = self._parse_cargo_toml(cargo_path)
- self.get_logger().info("Found crate", name=crate_name, version=crate_version, lib_name=lib_name)
-
- # Find entry points
- entry_points = self._find_entry_points(project_path)
- self.get_logger().info("Found entry points", count=len(entry_points))
-
- # Find unsafe blocks
- unsafe_blocks = self._find_unsafe_blocks(project_path)
- self.get_logger().info("Found unsafe blocks", count=len(unsafe_blocks))
-
- # Run cargo-audit if enabled
- vulnerabilities: list[Vulnerability] = []
- if self._settings and self._settings.run_audit:
- vulnerabilities = self._run_cargo_audit(project_path)
- self.get_logger().info("Found vulnerabilities", count=len(vulnerabilities))
-
- # Build result
- analysis = AnalysisResult(
- crate_name=crate_name,
- crate_version=crate_version,
- lib_name=lib_name,
- entry_points=entry_points,
- unsafe_blocks=unsafe_blocks,
- vulnerabilities=vulnerabilities,
- summary={
- "entry_points": len(entry_points),
- "unsafe_blocks": len(unsafe_blocks),
- "vulnerabilities": len(vulnerabilities),
- },
- )
-
- # Set output data for results.json
- self.set_output(
- analysis=analysis.model_dump(),
- )
-
- # Write analysis to output file (for backwards compatibility)
- output_path = PATH_TO_OUTPUTS / "analysis.json"
- output_path.parent.mkdir(parents=True, exist_ok=True)
- output_path.write_text(analysis.model_dump_json(indent=2))
-
- self.get_logger().info("Analysis complete", output=str(output_path))
-
- return FuzzForgeModuleResults.SUCCESS
-
- def _cleanup(self, settings: Settings) -> None: # type: ignore[override]
- """Clean up after execution.
-
- :param settings: Module settings.
-
- """
- pass
diff --git a/fuzzforge-modules/rust-analyzer/src/module/models.py b/fuzzforge-modules/rust-analyzer/src/module/models.py
deleted file mode 100644
index f87f280..0000000
--- a/fuzzforge-modules/rust-analyzer/src/module/models.py
+++ /dev/null
@@ -1,100 +0,0 @@
-"""Models for rust-analyzer module."""
-
-from pathlib import Path
-
-from pydantic import BaseModel
-
-from fuzzforge_modules_sdk.api.models import FuzzForgeModuleInputBase, FuzzForgeModuleOutputBase
-
-from module.settings import Settings
-
-
-class Input(FuzzForgeModuleInputBase[Settings]):
- """Input for the rust-analyzer module."""
-
-
-class EntryPoint(BaseModel):
- """A fuzzable entry point in the Rust codebase."""
-
- #: Function name.
- function: str
-
- #: Source file path.
- file: str
-
- #: Line number.
- line: int
-
- #: Function signature.
- signature: str
-
- #: Whether the function takes &[u8] or similar fuzzable input.
- fuzzable: bool = True
-
-
-class UnsafeBlock(BaseModel):
- """An unsafe block detected in the codebase."""
-
- #: Source file path.
- file: str
-
- #: Line number.
- line: int
-
- #: Context description.
- context: str
-
-
-class Vulnerability(BaseModel):
- """A known vulnerability from cargo-audit."""
-
- #: Advisory ID (e.g., RUSTSEC-2021-0001).
- advisory_id: str
-
- #: Affected crate name.
- crate_name: str
-
- #: Affected version.
- version: str
-
- #: Vulnerability title.
- title: str
-
- #: Severity level.
- severity: str
-
-
-class AnalysisResult(BaseModel):
- """The complete analysis result."""
-
- #: Crate name from Cargo.toml (use this in fuzz/Cargo.toml dependencies).
- crate_name: str
-
- #: Crate version.
- crate_version: str
-
- #: Library name for use in Rust code (use in `use` statements).
- #: In Rust, dashes become underscores: "fuzz-demo" -> "fuzz_demo".
- lib_name: str = ""
-
- #: List of fuzzable entry points.
- entry_points: list[EntryPoint]
-
- #: List of unsafe blocks.
- unsafe_blocks: list[UnsafeBlock]
-
- #: List of known vulnerabilities.
- vulnerabilities: list[Vulnerability]
-
- #: Summary statistics.
- summary: dict[str, int]
-
-
-class Output(FuzzForgeModuleOutputBase):
- """Output for the rust-analyzer module."""
-
- #: The analysis result (as dict for serialization).
- analysis: dict | None = None
-
- #: Path to the analysis JSON file.
- analysis_file: Path | None = None
diff --git a/fuzzforge-modules/rust-analyzer/src/module/settings.py b/fuzzforge-modules/rust-analyzer/src/module/settings.py
deleted file mode 100644
index 17767ff..0000000
--- a/fuzzforge-modules/rust-analyzer/src/module/settings.py
+++ /dev/null
@@ -1,16 +0,0 @@
-"""Settings for rust-analyzer module."""
-
-from fuzzforge_modules_sdk.api.models import FuzzForgeModulesSettingsBase
-
-
-class Settings(FuzzForgeModulesSettingsBase):
- """Settings for the rust-analyzer module."""
-
- #: Whether to run cargo-audit for CVE detection.
- run_audit: bool = True
-
- #: Whether to run cargo-geiger for unsafe detection.
- run_geiger: bool = True
-
- #: Maximum depth for dependency analysis.
- max_depth: int = 3
diff --git a/fuzzforge-modules/rust-analyzer/tests/.gitkeep b/fuzzforge-modules/rust-analyzer/tests/.gitkeep
deleted file mode 100644
index e69de29..0000000
diff --git a/fuzzforge-runner/Makefile b/fuzzforge-runner/Makefile
deleted file mode 100644
index eb78637..0000000
--- a/fuzzforge-runner/Makefile
+++ /dev/null
@@ -1,14 +0,0 @@
-.PHONY: lint test format check
-
-lint:
- uv run ruff check src tests
- uv run mypy src
-
-test:
- uv run pytest tests
-
-format:
- uv run ruff format src tests
- uv run ruff check --fix src tests
-
-check: lint test
diff --git a/fuzzforge-runner/README.md b/fuzzforge-runner/README.md
deleted file mode 100644
index 570abdd..0000000
--- a/fuzzforge-runner/README.md
+++ /dev/null
@@ -1,44 +0,0 @@
-# FuzzForge Runner
-
-Direct execution engine for FuzzForge OSS. Provides simplified module and workflow execution without requiring Temporal or external infrastructure.
-
-## Overview
-
-The Runner is designed for local-first operation, executing FuzzForge modules directly in containerized sandboxes (Docker/Podman) without workflow orchestration overhead.
-
-## Features
-
-- Direct module execution in isolated containers
-- Sequential workflow orchestration (no Temporal required)
-- Local filesystem storage (S3 optional)
-- SQLite-based state management (optional)
-
-## Usage
-
-```python
-from fuzzforge_runner import Runner
-from fuzzforge_runner.settings import Settings
-
-settings = Settings()
-runner = Runner(settings)
-
-# Execute a single module
-result = await runner.execute_module(
- module_identifier="my-module",
- project_path="/path/to/project",
-)
-
-# Execute a workflow (sequential steps)
-result = await runner.execute_workflow(
- workflow_definition=workflow,
- project_path="/path/to/project",
-)
-```
-
-## Configuration
-
-Environment variables:
-
-- `FUZZFORGE_STORAGE_PATH`: Local storage directory (default: `~/.fuzzforge/storage`)
-- `FUZZFORGE_ENGINE_TYPE`: Container engine (`docker` or `podman`)
-- `FUZZFORGE_ENGINE_SOCKET`: Container socket path
diff --git a/fuzzforge-runner/mypy.ini b/fuzzforge-runner/mypy.ini
deleted file mode 100644
index be0671c..0000000
--- a/fuzzforge-runner/mypy.ini
+++ /dev/null
@@ -1,2 +0,0 @@
-[mypy]
-strict = true
diff --git a/fuzzforge-runner/pyproject.toml b/fuzzforge-runner/pyproject.toml
deleted file mode 100644
index e6fab86..0000000
--- a/fuzzforge-runner/pyproject.toml
+++ /dev/null
@@ -1,26 +0,0 @@
-[project]
-name = "fuzzforge-runner"
-version = "0.0.1"
-description = "FuzzForge Runner - Direct execution engine for FuzzForge OSS."
-authors = []
-readme = "README.md"
-requires-python = ">=3.14"
-dependencies = [
- "fuzzforge-common",
- "structlog>=25.5.0",
- "pydantic>=2.12.4",
- "pydantic-settings>=2.8.1",
-]
-
-[project.scripts]
-fuzzforge-runner = "fuzzforge_runner.__main__:main"
-
-[build-system]
-requires = ["hatchling"]
-build-backend = "hatchling.build"
-
-[tool.hatch.build.targets.wheel]
-packages = ["src/fuzzforge_runner"]
-
-[tool.uv.sources]
-fuzzforge-common = { workspace = true }
diff --git a/fuzzforge-runner/pytest.ini b/fuzzforge-runner/pytest.ini
deleted file mode 100644
index c8c9c75..0000000
--- a/fuzzforge-runner/pytest.ini
+++ /dev/null
@@ -1,3 +0,0 @@
-[pytest]
-asyncio_mode = auto
-asyncio_default_fixture_loop_scope = function
diff --git a/fuzzforge-runner/ruff.toml b/fuzzforge-runner/ruff.toml
deleted file mode 100644
index b9f8af9..0000000
--- a/fuzzforge-runner/ruff.toml
+++ /dev/null
@@ -1 +0,0 @@
-extend = "../ruff.toml"
diff --git a/fuzzforge-runner/src/fuzzforge_runner/__init__.py b/fuzzforge-runner/src/fuzzforge_runner/__init__.py
deleted file mode 100644
index 16f6ea6..0000000
--- a/fuzzforge-runner/src/fuzzforge_runner/__init__.py
+++ /dev/null
@@ -1,9 +0,0 @@
-"""FuzzForge Runner - Direct execution engine for FuzzForge OSS."""
-
-from fuzzforge_runner.runner import Runner
-from fuzzforge_runner.settings import Settings
-
-__all__ = [
- "Runner",
- "Settings",
-]
diff --git a/fuzzforge-runner/src/fuzzforge_runner/__main__.py b/fuzzforge-runner/src/fuzzforge_runner/__main__.py
deleted file mode 100644
index 36e4131..0000000
--- a/fuzzforge-runner/src/fuzzforge_runner/__main__.py
+++ /dev/null
@@ -1,28 +0,0 @@
-"""FuzzForge Runner CLI entry point."""
-
-from fuzzforge_runner.runner import Runner
-from fuzzforge_runner.settings import Settings
-
-
-def main() -> None:
- """Entry point for the FuzzForge Runner CLI.
-
- This is a minimal entry point that can be used for testing
- and direct execution. The primary interface is via the MCP server.
-
- """
- import argparse
-
- parser = argparse.ArgumentParser(description="FuzzForge Runner")
- parser.add_argument("--version", action="store_true", help="Print version and exit")
- args = parser.parse_args()
-
- if args.version:
- print("fuzzforge-runner 0.0.1") # noqa: T201
- return
-
- print("FuzzForge Runner - Use via MCP server or programmatically") # noqa: T201
-
-
-if __name__ == "__main__":
- main()
diff --git a/fuzzforge-runner/src/fuzzforge_runner/constants.py b/fuzzforge-runner/src/fuzzforge_runner/constants.py
deleted file mode 100644
index da836fc..0000000
--- a/fuzzforge-runner/src/fuzzforge_runner/constants.py
+++ /dev/null
@@ -1,21 +0,0 @@
-"""FuzzForge Runner constants."""
-
-from pydantic import UUID7
-
-#: Type alias for execution identifiers.
-type FuzzForgeExecutionIdentifier = UUID7
-
-#: Default directory name for module input inside sandbox.
-SANDBOX_INPUT_DIRECTORY: str = "/fuzzforge/input"
-
-#: Default directory name for module output inside sandbox.
-SANDBOX_OUTPUT_DIRECTORY: str = "/fuzzforge/output"
-
-#: Default archive filename for results.
-RESULTS_ARCHIVE_FILENAME: str = "results.tar.gz"
-
-#: Default configuration filename.
-MODULE_CONFIG_FILENAME: str = "config.json"
-
-#: Module entrypoint script name.
-MODULE_ENTRYPOINT: str = "module"
diff --git a/fuzzforge-runner/src/fuzzforge_runner/exceptions.py b/fuzzforge-runner/src/fuzzforge_runner/exceptions.py
deleted file mode 100644
index ad98b2a..0000000
--- a/fuzzforge-runner/src/fuzzforge_runner/exceptions.py
+++ /dev/null
@@ -1,27 +0,0 @@
-"""FuzzForge Runner exceptions."""
-
-from __future__ import annotations
-
-
-class RunnerError(Exception):
- """Base exception for all Runner errors."""
-
-
-class ModuleNotFoundError(RunnerError):
- """Raised when a module cannot be found."""
-
-
-class ModuleExecutionError(RunnerError):
- """Raised when module execution fails."""
-
-
-class WorkflowExecutionError(RunnerError):
- """Raised when workflow execution fails."""
-
-
-class StorageError(RunnerError):
- """Raised when storage operations fail."""
-
-
-class SandboxError(RunnerError):
- """Raised when sandbox operations fail."""
diff --git a/fuzzforge-runner/src/fuzzforge_runner/executor.py b/fuzzforge-runner/src/fuzzforge_runner/executor.py
deleted file mode 100644
index 179ea63..0000000
--- a/fuzzforge-runner/src/fuzzforge_runner/executor.py
+++ /dev/null
@@ -1,785 +0,0 @@
-"""FuzzForge Runner - Direct module execution engine.
-
-This module provides direct execution of FuzzForge modules without
-requiring Temporal workflow orchestration. It's designed for local
-development and OSS deployment scenarios.
-
-"""
-
-from __future__ import annotations
-
-import json
-from io import BytesIO
-from pathlib import Path, PurePath
-from tarfile import TarFile, TarInfo
-from tarfile import open as Archive # noqa: N812
-from tempfile import NamedTemporaryFile, TemporaryDirectory
-from typing import TYPE_CHECKING, Any, cast
-
-from fuzzforge_common.sandboxes.engines.docker.configuration import DockerConfiguration
-from fuzzforge_common.sandboxes.engines.podman.configuration import PodmanConfiguration
-
-from fuzzforge_runner.constants import (
- MODULE_ENTRYPOINT,
- RESULTS_ARCHIVE_FILENAME,
- SANDBOX_INPUT_DIRECTORY,
- SANDBOX_OUTPUT_DIRECTORY,
- FuzzForgeExecutionIdentifier,
-)
-from fuzzforge_runner.exceptions import ModuleExecutionError, SandboxError
-
-if TYPE_CHECKING:
- from fuzzforge_common.sandboxes.engines.base.engine import AbstractFuzzForgeSandboxEngine
- from fuzzforge_runner.settings import EngineSettings, Settings
- from structlog.stdlib import BoundLogger
-
-
-def get_logger() -> BoundLogger:
- """Get structlog logger instance.
-
- :returns: Configured structlog logger.
-
- """
- from structlog import get_logger # noqa: PLC0415
-
- return cast("BoundLogger", get_logger())
-
-
-class ModuleExecutor:
- """Direct executor for FuzzForge modules.
-
- Handles the complete lifecycle of module execution:
- - Spawning isolated sandbox containers
- - Pushing input assets and configuration
- - Running the module
- - Pulling output results
- - Cleanup
-
- """
-
- #: Full settings including engine and registry.
- _settings: Settings
- #: Engine settings for container operations.
- _engine_settings: EngineSettings
-
- def __init__(self, settings: Settings) -> None:
- """Initialize an instance of the class.
-
- :param settings: FuzzForge runner settings.
-
- """
- self._settings = settings
- self._engine_settings = settings.engine
-
- def _get_engine_configuration(self) -> DockerConfiguration | PodmanConfiguration:
- """Get the appropriate engine configuration.
-
- :returns: Engine configuration based on settings.
-
- Note: This is only used when socket mode is explicitly needed.
- The default is now PodmanCLI with custom storage paths.
-
- """
- from fuzzforge_common.sandboxes.engines.enumeration import FuzzForgeSandboxEngines
-
- # Ensure socket has proper scheme
- socket = self._engine_settings.socket
- if not socket.startswith(("unix://", "tcp://", "http://", "ssh://")):
- socket = f"unix://{socket}"
-
- if self._engine_settings.type == "docker":
- return DockerConfiguration(
- kind=FuzzForgeSandboxEngines.DOCKER,
- socket=socket,
- )
- return PodmanConfiguration(
- kind=FuzzForgeSandboxEngines.PODMAN,
- socket=socket,
- )
-
- def _get_engine(self) -> AbstractFuzzForgeSandboxEngine:
- """Get the container engine instance.
-
- Uses DockerCLI by default for simplicity (works on Linux, macOS, Windows).
- PodmanCLI is used when engine type is set to 'podman'.
-
- :returns: Configured container engine.
-
- """
- from fuzzforge_common.sandboxes.engines.docker import DockerCLI
- from fuzzforge_common.sandboxes.engines.podman import PodmanCLI
-
- # Use PodmanCLI for Podman (with custom storage under Snap)
- if self._engine_settings.type == "podman":
- return PodmanCLI(
- graphroot=self._engine_settings.graphroot,
- runroot=self._engine_settings.runroot,
- )
-
- # Use DockerCLI for Docker (default)
- return DockerCLI()
-
- def _check_image_exists(self, module_identifier: str) -> bool:
- """Check if a module image exists locally.
-
- :param module_identifier: Name/identifier of the module image.
- :returns: True if image exists, False otherwise.
-
- """
- engine = self._get_engine()
-
- # Try common tags
- tags_to_check = ["latest", "0.1.0", "0.0.1"]
-
- # Try multiple naming conventions:
- # - fuzzforge-{name}:{tag} (OSS local builds)
- # - fuzzforge-module-{name}:{tag} (OSS local builds with module prefix)
- # - localhost/fuzzforge-module-{name}:{tag} (standard convention)
- # - localhost/{name}:{tag} (legacy/short form)
-
- # For OSS local builds (no localhost/ prefix)
- for tag in tags_to_check:
- # Check direct module name (fuzzforge-cargo-fuzzer:0.1.0)
- if engine.image_exists(f"{module_identifier}:{tag}"):
- return True
- # Check with fuzzforge- prefix if not already present
- if not module_identifier.startswith("fuzzforge-"):
- if engine.image_exists(f"fuzzforge-{module_identifier}:{tag}"):
- return True
-
- # For registry-style naming (localhost/ prefix)
- name_prefixes = [f"fuzzforge-module-{module_identifier}", module_identifier]
-
- for prefix in name_prefixes:
- for tag in tags_to_check:
- image_name = f"localhost/{prefix}:{tag}"
- if engine.image_exists(image_name):
- return True
-
- return False
-
- def _get_local_image_name(self, module_identifier: str) -> str:
- """Get the full local image name for a module.
-
- :param module_identifier: Name/identifier of the module.
- :returns: Full image name (may or may not have localhost prefix).
-
- """
- engine = self._get_engine()
-
- # Try common tags
- tags_to_check = ["latest", "0.1.0", "0.0.1"]
-
- # Check OSS local builds first (no localhost/ prefix)
- for tag in tags_to_check:
- # Direct module name (fuzzforge-cargo-fuzzer:0.1.0)
- direct_name = f"{module_identifier}:{tag}"
- if engine.image_exists(direct_name):
- return direct_name
-
- # With fuzzforge- prefix if not already present
- if not module_identifier.startswith("fuzzforge-"):
- prefixed_name = f"fuzzforge-{module_identifier}:{tag}"
- if engine.image_exists(prefixed_name):
- return prefixed_name
-
- # Check registry-style naming (localhost/ prefix)
- for tag in tags_to_check:
- # Standard convention: localhost/fuzzforge-module-{name}:{tag}
- prefixed_name = f"localhost/fuzzforge-module-{module_identifier}:{tag}"
- if engine.image_exists(prefixed_name):
- return prefixed_name
-
- # Legacy short form: localhost/{name}:{tag}
- short_name = f"localhost/{module_identifier}:{tag}"
- if engine.image_exists(short_name):
- return short_name
-
- # Default fallback
- return f"localhost/{module_identifier}:latest"
-
- def _pull_module_image(self, module_identifier: str, registry_url: str, tag: str = "latest") -> None:
- """Pull a module image from the container registry.
-
- :param module_identifier: Name/identifier of the module to pull.
- :param registry_url: Container registry URL.
- :param tag: Image tag to pull.
- :raises SandboxError: If pull fails.
-
- """
- logger = get_logger()
- engine = self._get_engine()
-
- # Construct full image name
- remote_image = f"{registry_url}/fuzzforge-module-{module_identifier}:{tag}"
- local_image = f"localhost/{module_identifier}:{tag}"
-
- logger.info("pulling module image from registry", module=module_identifier, remote_image=remote_image)
-
- try:
- # Pull the image using engine abstraction
- engine.pull_image(remote_image, timeout=300)
-
- logger.info("module image pulled successfully", module=module_identifier)
-
- # Tag the image locally for consistency
- engine.tag_image(remote_image, local_image)
-
- logger.debug("tagged image locally", local_image=local_image)
-
- except TimeoutError as exc:
- message = f"Module image pull timed out after 5 minutes: {module_identifier}"
- raise SandboxError(message) from exc
- except Exception as exc:
- message = (
- f"Failed to pull module image '{module_identifier}': {exc}\n"
- f"Registry: {registry_url}\n"
- f"Image: {remote_image}"
- )
- raise SandboxError(message) from exc
-
- def _ensure_module_image(self, module_identifier: str, registry_url: str = "", tag: str = "latest") -> None:
- """Ensure module image exists, pulling it if necessary.
-
- :param module_identifier: Name/identifier of the module image.
- :param registry_url: Container registry URL to pull from (empty = local-only mode).
- :param tag: Image tag to pull.
- :raises SandboxError: If image not found locally and no registry configured.
-
- """
- logger = get_logger()
-
- if self._check_image_exists(module_identifier):
- logger.debug("module image exists locally", module=module_identifier)
- return
-
- # If no registry configured, we're in local-only mode
- if not registry_url:
- raise SandboxError(
- f"Module image '{module_identifier}' not found locally.\n"
- "Build it with: make build-modules\n"
- "\n"
- "Or configure a registry URL via FUZZFORGE_REGISTRY__URL environment variable."
- )
-
- logger.info(
- "module image not found locally, pulling from registry",
- module=module_identifier,
- registry=registry_url,
- info="This may take a moment on first run",
- )
- self._pull_module_image(module_identifier, registry_url, tag)
-
- # Verify image now exists
- if not self._check_image_exists(module_identifier):
- message = (
- f"Module image '{module_identifier}' still not found after pull attempt.\n"
- f"Tried to pull from: {registry_url}/fuzzforge-module-{module_identifier}:{tag}"
- )
- raise SandboxError(message)
-
- def spawn_sandbox(self, module_identifier: str, input_volume: Path | None = None) -> str:
- """Create and prepare a sandbox container for module execution.
-
- Automatically pulls the module image from registry if it doesn't exist locally.
-
- :param module_identifier: Name/identifier of the module image.
- :param input_volume: Optional path to mount as /fuzzforge/input in the container.
- :returns: The sandbox container identifier.
- :raises SandboxError: If sandbox creation fails.
-
- """
- logger = get_logger()
- engine = self._get_engine()
-
- # Ensure module image exists (auto-pull if needed)
- # Use registry settings from configuration
- registry_url = self._settings.registry.url
- tag = self._settings.registry.default_tag
- self._ensure_module_image(module_identifier, registry_url, tag)
-
- logger.info("spawning sandbox", module=module_identifier)
- try:
- image = self._get_local_image_name(module_identifier)
-
- # Build volume mappings
- volumes: dict[str, str] | None = None
- if input_volume:
- volumes = {str(input_volume): SANDBOX_INPUT_DIRECTORY}
-
- sandbox_id = engine.create_container(image=image, volumes=volumes)
- logger.info("sandbox spawned", sandbox=sandbox_id, module=module_identifier)
- return sandbox_id
-
- except TimeoutError as exc:
- message = f"Container creation timed out for module {module_identifier}"
- raise SandboxError(message) from exc
- except Exception as exc:
- message = f"Failed to spawn sandbox for module {module_identifier}"
- raise SandboxError(message) from exc
-
- def prepare_input_directory(
- self,
- assets_path: Path,
- configuration: dict[str, Any] | None = None,
- project_path: Path | None = None,
- execution_id: str | None = None,
- ) -> Path:
- """Prepare input directory with assets and configuration.
-
- Creates a directory with input.json describing all resources.
- This directory can be volume-mounted into the container.
-
- If assets_path is a directory, it is used directly (zero-copy mount).
- If assets_path is a file (e.g., tar.gz), it is extracted first.
-
- :param assets_path: Path to the assets (file or directory).
- :param configuration: Optional module configuration dict.
- :param project_path: Project directory for storing inputs in .fuzzforge/.
- :param execution_id: Execution ID for organizing inputs.
- :returns: Path to prepared input directory.
- :raises SandboxError: If preparation fails.
-
- """
- logger = get_logger()
-
- logger.info("preparing input directory", assets=str(assets_path))
-
- try:
- # If assets_path is already a directory, use it directly (zero-copy mount)
- if assets_path.exists() and assets_path.is_dir():
- # Create input.json directly in the source directory
- input_json_path = assets_path / "input.json"
-
- # Scan files and build resource list
- resources = []
- for item in assets_path.iterdir():
- if item.name == "input.json":
- continue
- if item.is_file():
- resources.append(
- {
- "name": item.stem,
- "description": f"Input file: {item.name}",
- "kind": "unknown",
- "path": f"{SANDBOX_INPUT_DIRECTORY}/{item.name}",
- }
- )
- elif item.is_dir():
- resources.append(
- {
- "name": item.name,
- "description": f"Input directory: {item.name}",
- "kind": "unknown",
- "path": f"{SANDBOX_INPUT_DIRECTORY}/{item.name}",
- }
- )
-
- input_data = {
- "settings": configuration or {},
- "resources": resources,
- }
- input_json_path.write_text(json.dumps(input_data, indent=2))
-
- logger.debug("using source directory directly", path=str(assets_path))
- return assets_path
-
- # File input: extract to a directory first
- # Determine input directory location
- if project_path:
- # Store inputs in .fuzzforge/inputs/ for visibility
- from fuzzforge_runner.storage import FUZZFORGE_DIR_NAME
- exec_id = execution_id or "latest"
- input_dir = project_path / FUZZFORGE_DIR_NAME / "inputs" / exec_id
- input_dir.mkdir(parents=True, exist_ok=True)
- # Clean previous contents if exists
- import shutil
- for item in input_dir.iterdir():
- if item.is_file():
- item.unlink()
- elif item.is_dir():
- shutil.rmtree(item)
- else:
- # Fallback to temporary directory
- from tempfile import mkdtemp
- input_dir = Path(mkdtemp(prefix="fuzzforge-input-"))
-
- # Copy/extract assets to input directory
- if assets_path.exists():
- if assets_path.is_file():
- # Check if it's a tar.gz archive that needs extraction
- if assets_path.suffix == ".gz" or assets_path.name.endswith(".tar.gz"):
- # Extract archive contents
- import tarfile
-
- with tarfile.open(assets_path, "r:gz") as tar:
- tar.extractall(path=input_dir)
- logger.debug("extracted tar.gz archive", archive=str(assets_path))
- else:
- # Single file - copy it
- import shutil
-
- shutil.copy2(assets_path, input_dir / assets_path.name)
- else:
- # Directory - copy all files (including subdirectories)
- import shutil
-
- for item in assets_path.iterdir():
- if item.is_file():
- shutil.copy2(item, input_dir / item.name)
- elif item.is_dir():
- shutil.copytree(item, input_dir / item.name, dirs_exist_ok=True)
-
- # Scan files and directories and build resource list
- resources = []
- for item in input_dir.iterdir():
- if item.name == "input.json":
- continue
- if item.is_file():
- resources.append(
- {
- "name": item.stem,
- "description": f"Input file: {item.name}",
- "kind": "unknown",
- "path": f"{SANDBOX_INPUT_DIRECTORY}/{item.name}",
- }
- )
- elif item.is_dir():
- resources.append(
- {
- "name": item.name,
- "description": f"Input directory: {item.name}",
- "kind": "unknown",
- "path": f"{SANDBOX_INPUT_DIRECTORY}/{item.name}",
- }
- )
-
- # Create input.json with settings and resources
- input_data = {
- "settings": configuration or {},
- "resources": resources,
- }
- input_json_path = input_dir / "input.json"
- input_json_path.write_text(json.dumps(input_data, indent=2))
-
- logger.debug("prepared input directory", resources=len(resources), path=str(input_dir))
- return input_dir
-
- except Exception as exc:
- message = f"Failed to prepare input directory"
- raise SandboxError(message) from exc
-
- def _push_config_to_sandbox(self, sandbox: str, configuration: dict[str, Any]) -> None:
- """Write module configuration to sandbox as config.json.
-
- :param sandbox: The sandbox container identifier.
- :param configuration: Configuration dictionary to write.
-
- """
- logger = get_logger()
- engine = self._get_engine()
-
- logger.info("writing configuration to sandbox", sandbox=sandbox)
-
- with NamedTemporaryFile(mode="w", suffix=".json", delete=False) as config_file:
- config_path = Path(config_file.name)
- config_file.write(json.dumps(configuration, indent=2))
-
- try:
- engine.copy_to_container(sandbox, config_path, SANDBOX_INPUT_DIRECTORY)
- except Exception as exc:
- message = f"Failed to copy config.json: {exc}"
- raise SandboxError(message) from exc
- finally:
- config_path.unlink()
-
- def run_module(self, sandbox: str) -> None:
- """Start the sandbox and execute the module.
-
- :param sandbox: The sandbox container identifier.
- :raises ModuleExecutionError: If module execution fails.
-
- """
- logger = get_logger()
- engine = self._get_engine()
-
- logger.info("starting sandbox and running module", sandbox=sandbox)
- try:
- # The container runs its ENTRYPOINT (uv run module) when started
- exit_code, stdout, stderr = engine.start_container_attached(sandbox, timeout=600)
-
- if exit_code != 0:
- logger.error("module execution failed", sandbox=sandbox, stderr=stderr)
- message = f"Module execution failed: {stderr}"
- raise ModuleExecutionError(message)
- logger.info("module execution completed", sandbox=sandbox)
-
- except TimeoutError as exc:
- message = f"Module execution timed out after 10 minutes in sandbox {sandbox}"
- raise ModuleExecutionError(message) from exc
- except ModuleExecutionError:
- raise
- except Exception as exc:
- message = f"Module execution failed in sandbox {sandbox}"
- raise ModuleExecutionError(message) from exc
-
- def pull_results_from_sandbox(self, sandbox: str) -> Path:
- """Pull the results archive from the sandbox.
-
- :param sandbox: The sandbox container identifier.
- :returns: Path to the downloaded results archive (tar.gz file).
- :raises SandboxError: If pull operation fails.
-
- """
- logger = get_logger()
- engine = self._get_engine()
-
- logger.info("pulling results from sandbox", sandbox=sandbox)
- try:
- # Create temporary directory for results
- from tempfile import mkdtemp
-
- temp_dir = Path(mkdtemp(prefix="fuzzforge-results-"))
-
- # Copy entire output directory from container
- try:
- engine.copy_from_container(sandbox, SANDBOX_OUTPUT_DIRECTORY, temp_dir)
- except Exception:
- # If output directory doesn't exist, that's okay - module may not have produced results
- logger.warning("no results found in sandbox", sandbox=sandbox)
-
- # Create tar archive from results directory
- import tarfile
-
- archive_file = NamedTemporaryFile(delete=False, suffix=".tar.gz")
- archive_path = Path(archive_file.name)
- archive_file.close()
-
- with tarfile.open(archive_path, "w:gz") as tar:
- # The output is extracted into a subdirectory named after the source
- output_subdir = temp_dir / "output"
- if output_subdir.exists():
- for item in output_subdir.iterdir():
- tar.add(item, arcname=item.name)
- else:
- for item in temp_dir.iterdir():
- tar.add(item, arcname=item.name)
-
- # Clean up temp directory
- import shutil
-
- shutil.rmtree(temp_dir, ignore_errors=True)
-
- logger.info("results pulled successfully", sandbox=sandbox, archive=str(archive_path))
- return archive_path
-
- except TimeoutError as exc:
- message = f"Timeout pulling results from sandbox {sandbox}"
- raise SandboxError(message) from exc
- except Exception as exc:
- message = f"Failed to pull results from sandbox {sandbox}"
- raise SandboxError(message) from exc
-
- def terminate_sandbox(self, sandbox: str) -> None:
- """Terminate and cleanup the sandbox container.
-
- :param sandbox: The sandbox container identifier.
-
- """
- logger = get_logger()
- engine = self._get_engine()
-
- logger.info("terminating sandbox", sandbox=sandbox)
- try:
- engine.remove_container(sandbox, force=True)
- logger.info("sandbox terminated", sandbox=sandbox)
- except Exception as exc:
- # Log but don't raise - cleanup should be best-effort
- logger.warning("failed to terminate sandbox", sandbox=sandbox, error=str(exc))
-
- async def execute(
- self,
- module_identifier: str,
- assets_path: Path,
- configuration: dict[str, Any] | None = None,
- project_path: Path | None = None,
- execution_id: str | None = None,
- ) -> Path:
- """Execute a module end-to-end.
-
- This is the main entry point that handles the complete execution flow:
- 1. Spawn sandbox
- 2. Push assets and configuration
- 3. Run module
- 4. Pull results
- 5. Terminate sandbox
-
- All intermediate files are stored in {project_path}/.fuzzforge/ for
- easy debugging and visibility.
-
- Source directories are mounted directly without tar.gz compression
- for better performance.
-
- :param module_identifier: Name/identifier of the module to execute.
- :param assets_path: Path to the input assets (file or directory).
- :param configuration: Optional module configuration.
- :param project_path: Project directory for .fuzzforge/ storage.
- :param execution_id: Execution ID for organizing files.
- :returns: Path to the results archive.
- :raises ModuleExecutionError: If any step fails.
-
- """
- logger = get_logger()
- sandbox: str | None = None
- input_dir: Path | None = None
- # Don't cleanup if we're using the source directory directly
- cleanup_input = False
-
- try:
- # 1. Prepare input directory with assets
- input_dir = self.prepare_input_directory(
- assets_path,
- configuration,
- project_path=project_path,
- execution_id=execution_id,
- )
-
- # Only cleanup if we created a temp directory (file input case)
- cleanup_input = input_dir != assets_path and project_path is None
-
- # 2. Spawn sandbox with volume mount
- sandbox = self.spawn_sandbox(module_identifier, input_volume=input_dir)
-
- # 3. Run module
- self.run_module(sandbox)
-
- # 4. Pull results
- results_path = self.pull_results_from_sandbox(sandbox)
-
- logger.info(
- "module execution completed successfully",
- module=module_identifier,
- results=str(results_path),
- )
-
- return results_path
-
- finally:
- # 5. Always cleanup sandbox
- if sandbox:
- self.terminate_sandbox(sandbox)
- # Only cleanup input if it was a temp directory
- if cleanup_input and input_dir and input_dir.exists():
- import shutil
- shutil.rmtree(input_dir, ignore_errors=True)
-
- # -------------------------------------------------------------------------
- # Continuous/Background Execution Methods
- # -------------------------------------------------------------------------
-
- def start_module_continuous(
- self,
- module_identifier: str,
- assets_path: Path,
- configuration: dict[str, Any] | None = None,
- project_path: Path | None = None,
- execution_id: str | None = None,
- ) -> dict[str, Any]:
- """Start a module in continuous/background mode without waiting.
-
- Returns immediately with container info. Use read_module_output() to
- get current status and stop_module_continuous() to stop.
-
- Source directories are mounted directly without tar.gz compression
- for better performance.
-
- :param module_identifier: Name/identifier of the module to execute.
- :param assets_path: Path to the input assets (file or directory).
- :param configuration: Optional module configuration.
- :param project_path: Project directory for .fuzzforge/ storage.
- :param execution_id: Execution ID for organizing files.
- :returns: Dict with container_id, input_dir for later cleanup.
-
- """
- logger = get_logger()
-
- # 1. Prepare input directory with assets
- input_dir = self.prepare_input_directory(
- assets_path,
- configuration,
- project_path=project_path,
- execution_id=execution_id,
- )
-
- # 2. Spawn sandbox with volume mount
- sandbox = self.spawn_sandbox(module_identifier, input_volume=input_dir)
-
- # 3. Start container (non-blocking)
- engine = self._get_engine()
- engine.start_container(sandbox)
-
- logger.info(
- "module started in continuous mode",
- module=module_identifier,
- container_id=sandbox,
- )
-
- return {
- "container_id": sandbox,
- "input_dir": str(input_dir),
- "module": module_identifier,
- }
-
- def read_module_output(self, container_id: str, output_file: str = f"{SANDBOX_OUTPUT_DIRECTORY}/stream.jsonl") -> str:
- """Read output file from a running module container.
-
- :param container_id: The container identifier.
- :param output_file: Path to output file inside container.
- :returns: File contents as string.
-
- """
- engine = self._get_engine()
- return engine.read_file_from_container(container_id, output_file)
-
- def get_module_status(self, container_id: str) -> str:
- """Get the status of a running module container.
-
- :param container_id: The container identifier.
- :returns: Container status (e.g., "running", "exited").
-
- """
- engine = self._get_engine()
- return engine.get_container_status(container_id)
-
- def stop_module_continuous(self, container_id: str, input_dir: str | None = None) -> Path:
- """Stop a continuously running module and collect results.
-
- :param container_id: The container identifier.
- :param input_dir: Optional input directory to cleanup.
- :returns: Path to the results archive.
-
- """
- logger = get_logger()
- engine = self._get_engine()
-
- try:
- # 1. Stop the container gracefully
- status = engine.get_container_status(container_id)
- if status == "running":
- engine.stop_container(container_id, timeout=10)
- logger.info("stopped running container", container_id=container_id)
-
- # 2. Pull results
- results_path = self.pull_results_from_sandbox(container_id)
-
- logger.info("collected results from continuous session", results=str(results_path))
-
- return results_path
-
- finally:
- # 3. Cleanup
- self.terminate_sandbox(container_id)
- if input_dir:
- import shutil
-
- shutil.rmtree(input_dir, ignore_errors=True)
diff --git a/fuzzforge-runner/src/fuzzforge_runner/orchestrator.py b/fuzzforge-runner/src/fuzzforge_runner/orchestrator.py
deleted file mode 100644
index aaaa1f5..0000000
--- a/fuzzforge-runner/src/fuzzforge_runner/orchestrator.py
+++ /dev/null
@@ -1,361 +0,0 @@
-"""FuzzForge Runner - Workflow orchestration without Temporal.
-
-This module provides simplified workflow orchestration for sequential
-module execution without requiring Temporal infrastructure.
-
-"""
-
-from __future__ import annotations
-
-from dataclasses import dataclass, field
-from datetime import UTC, datetime
-from pathlib import Path
-from typing import TYPE_CHECKING, Any, cast
-from uuid import uuid4
-
-from fuzzforge_runner.constants import FuzzForgeExecutionIdentifier
-from fuzzforge_runner.exceptions import WorkflowExecutionError
-from fuzzforge_runner.executor import ModuleExecutor
-
-if TYPE_CHECKING:
- from fuzzforge_runner.settings import Settings
- from fuzzforge_runner.storage import LocalStorage
- from structlog.stdlib import BoundLogger
-
-
-def get_logger() -> BoundLogger:
- """Get structlog logger instance.
-
- :returns: Configured structlog logger.
-
- """
- from structlog import get_logger # noqa: PLC0415
-
- return cast("BoundLogger", get_logger())
-
-
-@dataclass
-class WorkflowStep:
- """Represents a single step in a workflow."""
-
- #: Module identifier to execute.
- module_identifier: str
-
- #: Optional configuration for the module.
- configuration: dict[str, Any] | None = None
-
- #: Step name/label for logging.
- name: str | None = None
-
-
-@dataclass
-class WorkflowDefinition:
- """Defines a workflow as a sequence of module executions."""
-
- #: Workflow name.
- name: str
-
- #: Ordered list of steps to execute.
- steps: list[WorkflowStep] = field(default_factory=list)
-
- #: Optional workflow description.
- description: str | None = None
-
-
-@dataclass
-class StepResult:
- """Result of a single workflow step execution."""
-
- #: Step index (0-based).
- step_index: int
-
- #: Module that was executed.
- module_identifier: str
-
- #: Path to the results archive.
- results_path: Path
-
- #: Execution identifier.
- execution_id: str
-
- #: Execution start time.
- started_at: datetime
-
- #: Execution end time.
- completed_at: datetime
-
- #: Whether execution was successful.
- success: bool = True
-
- #: Error message if failed.
- error: str | None = None
-
-
-@dataclass
-class WorkflowResult:
- """Result of a complete workflow execution."""
-
- #: Workflow execution identifier.
- execution_id: str
-
- #: Workflow name.
- name: str
-
- #: Results for each step.
- steps: list[StepResult] = field(default_factory=list)
-
- #: Overall success status.
- success: bool = True
-
- #: Final results path (from last step).
- final_results_path: Path | None = None
-
-
-class WorkflowOrchestrator:
- """Orchestrates sequential workflow execution.
-
- Executes workflow steps sequentially, passing output from each
- module as input to the next. No Temporal required.
-
- """
-
- #: Module executor instance.
- _executor: ModuleExecutor
-
- #: Storage backend.
- _storage: LocalStorage
-
- def __init__(self, executor: ModuleExecutor, storage: LocalStorage) -> None:
- """Initialize an instance of the class.
-
- :param executor: Module executor for running modules.
- :param storage: Storage backend for managing assets.
-
- """
- self._executor = executor
- self._storage = storage
-
- def _generate_execution_id(self) -> str:
- """Generate a unique execution identifier.
-
- :returns: UUID string for execution tracking.
-
- """
- return str(uuid4())
-
- async def execute_workflow(
- self,
- workflow: WorkflowDefinition,
- project_path: Path,
- initial_assets_path: Path | None = None,
- ) -> WorkflowResult:
- """Execute a workflow as a sequence of module executions.
-
- Each step receives the output of the previous step as input.
- The first step receives the initial assets.
-
- :param workflow: Workflow definition with steps to execute.
- :param project_path: Path to the project directory.
- :param initial_assets_path: Path to initial assets (optional).
- :returns: Workflow execution result.
- :raises WorkflowExecutionError: If workflow execution fails.
-
- """
- logger = get_logger()
- workflow_id = self._generate_execution_id()
-
- logger.info(
- "starting workflow execution",
- workflow=workflow.name,
- execution_id=workflow_id,
- steps=len(workflow.steps),
- )
-
- result = WorkflowResult(
- execution_id=workflow_id,
- name=workflow.name,
- )
-
- if not workflow.steps:
- logger.warning("workflow has no steps", workflow=workflow.name)
- return result
-
- # Track current assets path - starts with initial assets, then uses previous step output
- current_assets: Path | None = initial_assets_path
-
- # If no initial assets, try to get from project
- if current_assets is None:
- current_assets = self._storage.get_project_assets_path(project_path)
-
- try:
- for step_index, step in enumerate(workflow.steps):
- step_name = step.name or f"step-{step_index}"
- step_execution_id = self._generate_execution_id()
-
- logger.info(
- "executing workflow step",
- workflow=workflow.name,
- step=step_name,
- step_index=step_index,
- module=step.module_identifier,
- execution_id=step_execution_id,
- )
-
- started_at = datetime.now(UTC)
-
- try:
- # Ensure we have assets for this step
- if current_assets is None or not current_assets.exists():
- if step_index == 0:
- # First step with no assets - create empty archive
- current_assets = self._storage.create_empty_assets_archive(project_path)
- else:
- message = f"No assets available for step {step_index}"
- raise WorkflowExecutionError(message)
-
- # Execute the module (inputs stored in .fuzzforge/inputs/)
- results_path = await self._executor.execute(
- module_identifier=step.module_identifier,
- assets_path=current_assets,
- configuration=step.configuration,
- project_path=project_path,
- execution_id=step_execution_id,
- )
-
- completed_at = datetime.now(UTC)
-
- # Store results to persistent storage
- stored_path = self._storage.store_execution_results(
- project_path=project_path,
- workflow_id=workflow_id,
- step_index=step_index,
- execution_id=step_execution_id,
- results_path=results_path,
- )
-
- # Clean up temporary results archive after storing
- try:
- if results_path.exists() and results_path != stored_path:
- results_path.unlink()
- except Exception as cleanup_exc:
- logger.warning("failed to clean up temporary results", path=str(results_path), error=str(cleanup_exc))
-
- # Record step result with stored path
- step_result = StepResult(
- step_index=step_index,
- module_identifier=step.module_identifier,
- results_path=stored_path,
- execution_id=step_execution_id,
- started_at=started_at,
- completed_at=completed_at,
- success=True,
- )
- result.steps.append(step_result)
-
- # Next step uses this step's output
- current_assets = stored_path
-
- logger.info(
- "workflow step completed",
- step=step_name,
- step_index=step_index,
- duration_seconds=(completed_at - started_at).total_seconds(),
- )
-
- except Exception as exc:
- completed_at = datetime.now(UTC)
- error_msg = str(exc)
-
- step_result = StepResult(
- step_index=step_index,
- module_identifier=step.module_identifier,
- results_path=Path(),
- execution_id=step_execution_id,
- started_at=started_at,
- completed_at=completed_at,
- success=False,
- error=error_msg,
- )
- result.steps.append(step_result)
- result.success = False
-
- logger.error(
- "workflow step failed",
- step=step_name,
- step_index=step_index,
- error=error_msg,
- )
-
- # Stop workflow on failure
- break
-
- # Set final results path
- if result.steps and result.steps[-1].success:
- result.final_results_path = result.steps[-1].results_path
-
- logger.info(
- "workflow execution completed",
- workflow=workflow.name,
- execution_id=workflow_id,
- success=result.success,
- completed_steps=len([s for s in result.steps if s.success]),
- total_steps=len(workflow.steps),
- )
-
- return result
-
- except Exception as exc:
- message = f"Workflow execution failed: {exc}"
- logger.exception("workflow execution error", workflow=workflow.name)
- raise WorkflowExecutionError(message) from exc
-
- async def execute_single_module(
- self,
- module_identifier: str,
- project_path: Path,
- assets_path: Path | None = None,
- configuration: dict[str, Any] | None = None,
- ) -> StepResult:
- """Execute a single module (convenience method).
-
- This is a simplified interface for executing a single module
- outside of a workflow context.
-
- :param module_identifier: Module to execute.
- :param project_path: Project directory path.
- :param assets_path: Optional path to input assets.
- :param configuration: Optional module configuration.
- :returns: Execution result.
-
- """
- workflow = WorkflowDefinition(
- name=f"single-{module_identifier}",
- steps=[
- WorkflowStep(
- module_identifier=module_identifier,
- configuration=configuration,
- name="main",
- )
- ],
- )
-
- result = await self.execute_workflow(
- workflow=workflow,
- project_path=project_path,
- initial_assets_path=assets_path,
- )
-
- if result.steps:
- return result.steps[0]
-
- # Should not happen, but handle gracefully
- return StepResult(
- step_index=0,
- module_identifier=module_identifier,
- results_path=Path(),
- execution_id=result.execution_id,
- started_at=datetime.now(UTC),
- completed_at=datetime.now(UTC),
- success=False,
- error="No step results produced",
- )
diff --git a/fuzzforge-runner/src/fuzzforge_runner/runner.py b/fuzzforge-runner/src/fuzzforge_runner/runner.py
deleted file mode 100644
index 8ee9b27..0000000
--- a/fuzzforge-runner/src/fuzzforge_runner/runner.py
+++ /dev/null
@@ -1,452 +0,0 @@
-"""FuzzForge Runner - Main runner interface.
-
-This module provides the high-level interface for FuzzForge OSS,
-coordinating module execution, workflow orchestration, and storage.
-
-"""
-
-from __future__ import annotations
-
-from dataclasses import dataclass
-from pathlib import Path
-from typing import TYPE_CHECKING, Any, cast
-
-from fuzzforge_runner.executor import ModuleExecutor
-from fuzzforge_runner.orchestrator import (
- StepResult,
- WorkflowDefinition,
- WorkflowOrchestrator,
- WorkflowResult,
- WorkflowStep,
-)
-from fuzzforge_runner.settings import Settings
-from fuzzforge_runner.storage import LocalStorage
-
-if TYPE_CHECKING:
- from structlog.stdlib import BoundLogger
-
-
-def get_logger() -> BoundLogger:
- """Get structlog logger instance.
-
- :returns: Configured structlog logger.
-
- """
- from structlog import get_logger # noqa: PLC0415
-
- return cast("BoundLogger", get_logger())
-
-
-@dataclass
-class ModuleInfo:
- """Information about an available module."""
-
- #: Module identifier/name.
- identifier: str
-
- #: Module description.
- description: str | None = None
-
- #: Module version.
- version: str | None = None
-
- #: Whether module image exists locally.
- available: bool = True
-
- #: Module identifiers that should run before this one.
- suggested_predecessors: list[str] | None = None
-
- #: Whether module supports continuous/background execution.
- continuous_mode: bool = False
-
- #: Typical use cases and scenarios for this module.
- use_cases: list[str] | None = None
-
- #: Common inputs (e.g., ["rust-source-code", "Cargo.toml"]).
- common_inputs: list[str] | None = None
-
- #: Output artifacts produced (e.g., ["fuzzable_functions.json"]).
- output_artifacts: list[str] | None = None
-
- #: How AI should display/treat outputs.
- output_treatment: str | None = None
-
-
-class Runner:
- """Main FuzzForge Runner interface.
-
- Provides a unified interface for:
- - Module discovery and execution
- - Workflow orchestration
- - Project and asset management
-
- This is the primary entry point for OSS users and the MCP server.
-
- """
-
- #: Runner settings.
- _settings: Settings
-
- #: Module executor.
- _executor: ModuleExecutor
-
- #: Local storage backend.
- _storage: LocalStorage
-
- #: Workflow orchestrator.
- _orchestrator: WorkflowOrchestrator
-
- def __init__(self, settings: Settings | None = None) -> None:
- """Initialize an instance of the class.
-
- :param settings: Runner settings. If None, loads from environment.
-
- """
- self._settings = settings or Settings()
- self._executor = ModuleExecutor(self._settings)
- self._storage = LocalStorage(self._settings.storage.path)
- self._orchestrator = WorkflowOrchestrator(self._executor, self._storage)
-
- @property
- def settings(self) -> Settings:
- """Get runner settings.
-
- :returns: Current settings instance.
-
- """
- return self._settings
-
- @property
- def storage(self) -> LocalStorage:
- """Get storage backend.
-
- :returns: Storage instance.
-
- """
- return self._storage
-
- # -------------------------------------------------------------------------
- # Project Management
- # -------------------------------------------------------------------------
-
- def init_project(self, project_path: Path) -> Path:
- """Initialize a new project.
-
- Creates necessary storage directories for a project.
-
- :param project_path: Path to the project directory.
- :returns: Path to the project storage directory.
-
- """
- logger = get_logger()
- logger.info("initializing project", path=str(project_path))
- return self._storage.init_project(project_path)
-
- def set_project_assets(self, project_path: Path, assets_path: Path) -> Path:
- """Set source path for a project (no copying).
-
- Just stores a reference to the source directory.
- The source is mounted directly into containers at runtime.
-
- :param project_path: Path to the project directory.
- :param assets_path: Path to source directory.
- :returns: The assets path (unchanged).
-
- """
- logger = get_logger()
- logger.info("setting project assets", project=str(project_path), assets=str(assets_path))
- return self._storage.set_project_assets(project_path, assets_path)
-
- # -------------------------------------------------------------------------
- # Module Discovery
- # -------------------------------------------------------------------------
-
- def list_modules(self) -> list[ModuleInfo]:
- """List available modules.
-
- Discovers modules from the configured modules directory.
-
- :returns: List of available modules.
-
- """
- logger = get_logger()
- modules: list[ModuleInfo] = []
-
- modules_path = self._settings.modules_path
- if not modules_path.exists():
- logger.warning("modules directory not found", path=str(modules_path))
- return modules
-
- # Look for module directories (each should have a Dockerfile or be a built image)
- for item in modules_path.iterdir():
- if item.is_dir():
- # Check for module markers
- has_dockerfile = (item / "Dockerfile").exists()
- has_pyproject = (item / "pyproject.toml").exists()
-
- if has_dockerfile or has_pyproject:
- modules.append(
- ModuleInfo(
- identifier=item.name,
- available=has_dockerfile,
- )
- )
-
- logger.info("discovered modules", count=len(modules))
- return modules
-
- def list_module_images(
- self,
- filter_prefix: str = "fuzzforge-",
- include_all_tags: bool = True,
- ) -> list[ModuleInfo]:
- """List available module images from the container engine.
-
- Uses the container engine API to discover built module images.
- Reads metadata from pyproject.toml inside each image.
-
- :param filter_prefix: Prefix to filter images (default: "fuzzforge-").
- :param include_all_tags: If True, include all image tags, not just 'latest'.
- :returns: List of available module images.
-
- """
- import tomllib # noqa: PLC0415
-
- logger = get_logger()
- modules: list[ModuleInfo] = []
- seen: set[str] = set()
-
- # Infrastructure images to skip
- skip_images = {"fuzzforge-modules-sdk", "fuzzforge-runner", "fuzzforge-api"}
-
- engine = self._executor._get_engine()
- images = engine.list_images(filter_prefix=filter_prefix)
-
- for image in images:
- # Only include :latest images unless include_all_tags is set
- if not include_all_tags and image.tag != "latest":
- continue
-
- # Extract module name from repository
- full_name = image.repository.split("/")[-1]
-
- # Skip infrastructure images
- if full_name in skip_images:
- continue
-
- # Extract clean module name (remove fuzzforge-module- prefix if present)
- if full_name.startswith("fuzzforge-module-"):
- module_name = full_name.replace("fuzzforge-module-", "")
- else:
- module_name = full_name
-
- # Skip UUID-like names (temporary/broken containers)
- if module_name.count("-") >= 4 and len(module_name) > 30:
- continue
-
- # Add unique modules
- if module_name not in seen:
- seen.add(module_name)
-
- # Read metadata from pyproject.toml inside the image
- image_ref = f"{image.repository}:{image.tag}"
- module_meta = self._get_module_metadata_from_image(engine, image_ref)
-
- # Get basic info from pyproject.toml [project] section
- project_info = module_meta.get("_project", {})
- fuzzforge_meta = module_meta.get("module", {})
-
- modules.append(
- ModuleInfo(
- identifier=fuzzforge_meta.get("identifier", module_name),
- description=project_info.get("description"),
- version=project_info.get("version", image.tag),
- available=True,
- suggested_predecessors=fuzzforge_meta.get("suggested_predecessors", []),
- continuous_mode=fuzzforge_meta.get("continuous_mode", False),
- use_cases=fuzzforge_meta.get("use_cases", []),
- common_inputs=fuzzforge_meta.get("common_inputs", []),
- output_artifacts=fuzzforge_meta.get("output_artifacts", []),
- output_treatment=fuzzforge_meta.get("output_treatment"),
- )
- )
-
- logger.info("listed module images", count=len(modules))
- return modules
-
- def _get_module_metadata_from_image(self, engine: Any, image_ref: str) -> dict:
- """Read module metadata from pyproject.toml inside a container image.
-
- :param engine: Container engine instance.
- :param image_ref: Image reference (e.g., "fuzzforge-rust-analyzer:latest").
- :returns: Dict with module metadata from [tool.fuzzforge] section.
-
- """
- import tomllib # noqa: PLC0415
-
- logger = get_logger()
-
- try:
- # Read pyproject.toml from the image
- content = engine.read_file_from_image(image_ref, "/app/pyproject.toml")
- if not content:
- logger.debug("no pyproject.toml found in image", image=image_ref)
- return {}
-
- pyproject = tomllib.loads(content)
-
- # Return the [tool.fuzzforge] section plus [project] info
- result = pyproject.get("tool", {}).get("fuzzforge", {})
- result["_project"] = pyproject.get("project", {})
- return result
-
- except Exception as exc:
- logger.debug("failed to read metadata from image", image=image_ref, error=str(exc))
- return {}
-
- def get_module_info(self, module_identifier: str) -> ModuleInfo | None:
- """Get information about a specific module.
-
- :param module_identifier: Module identifier to look up.
- :returns: Module info, or None if not found.
-
- """
- modules = self.list_modules()
- for module in modules:
- if module.identifier == module_identifier:
- return module
- return None
-
- # -------------------------------------------------------------------------
- # Module Execution
- # -------------------------------------------------------------------------
-
- async def execute_module(
- self,
- module_identifier: str,
- project_path: Path,
- configuration: dict[str, Any] | None = None,
- assets_path: Path | None = None,
- ) -> StepResult:
- """Execute a single module.
-
- :param module_identifier: Module to execute.
- :param project_path: Path to the project directory.
- :param configuration: Optional module configuration.
- :param assets_path: Optional path to input assets.
- :returns: Execution result.
-
- """
- logger = get_logger()
- logger.info(
- "executing module",
- module=module_identifier,
- project=str(project_path),
- )
-
- return await self._orchestrator.execute_single_module(
- module_identifier=module_identifier,
- project_path=project_path,
- assets_path=assets_path,
- configuration=configuration,
- )
-
- # -------------------------------------------------------------------------
- # Workflow Execution
- # -------------------------------------------------------------------------
-
- async def execute_workflow(
- self,
- workflow: WorkflowDefinition,
- project_path: Path,
- initial_assets_path: Path | None = None,
- ) -> WorkflowResult:
- """Execute a workflow.
-
- :param workflow: Workflow definition with steps.
- :param project_path: Path to the project directory.
- :param initial_assets_path: Optional path to initial assets.
- :returns: Workflow execution result.
-
- """
- logger = get_logger()
- logger.info(
- "executing workflow",
- workflow=workflow.name,
- project=str(project_path),
- steps=len(workflow.steps),
- )
-
- return await self._orchestrator.execute_workflow(
- workflow=workflow,
- project_path=project_path,
- initial_assets_path=initial_assets_path,
- )
-
- def create_workflow(
- self,
- name: str,
- steps: list[tuple[str, dict[str, Any] | None]],
- description: str | None = None,
- ) -> WorkflowDefinition:
- """Create a workflow definition.
-
- Convenience method for creating workflows programmatically.
-
- :param name: Workflow name.
- :param steps: List of (module_identifier, configuration) tuples.
- :param description: Optional workflow description.
- :returns: Workflow definition.
-
- """
- workflow_steps = [
- WorkflowStep(
- module_identifier=module_id,
- configuration=config,
- name=f"step-{i}",
- )
- for i, (module_id, config) in enumerate(steps)
- ]
-
- return WorkflowDefinition(
- name=name,
- steps=workflow_steps,
- description=description,
- )
-
- # -------------------------------------------------------------------------
- # Results Management
- # -------------------------------------------------------------------------
-
- def get_execution_results(
- self,
- project_path: Path,
- execution_id: str,
- ) -> Path | None:
- """Get results for an execution.
-
- :param project_path: Path to the project directory.
- :param execution_id: Execution ID.
- :returns: Path to results archive, or None if not found.
-
- """
- return self._storage.get_execution_results(project_path, execution_id)
-
- def list_executions(self, project_path: Path) -> list[str]:
- """List all executions for a project.
-
- :param project_path: Path to the project directory.
- :returns: List of execution IDs.
-
- """
- return self._storage.list_executions(project_path)
-
- def extract_results(self, results_path: Path, destination: Path) -> Path:
- """Extract results archive to a directory.
-
- :param results_path: Path to results archive.
- :param destination: Destination directory.
- :returns: Path to extracted directory.
-
- """
- return self._storage.extract_results(results_path, destination)
diff --git a/fuzzforge-runner/src/fuzzforge_runner/settings.py b/fuzzforge-runner/src/fuzzforge_runner/settings.py
deleted file mode 100644
index fa88272..0000000
--- a/fuzzforge-runner/src/fuzzforge_runner/settings.py
+++ /dev/null
@@ -1,109 +0,0 @@
-"""FuzzForge Runner settings configuration."""
-
-from __future__ import annotations
-
-from enum import StrEnum
-from pathlib import Path
-from typing import Literal
-
-from pydantic import BaseModel, Field
-from pydantic_settings import BaseSettings, SettingsConfigDict
-
-
-class EngineType(StrEnum):
- """Supported container engine types."""
-
- DOCKER = "docker"
- PODMAN = "podman"
-
-
-class EngineSettings(BaseModel):
- """Container engine configuration."""
-
- #: Type of container engine to use. Docker is the default for simplicity.
- type: EngineType = EngineType.DOCKER
-
- #: Path to the container engine socket (only used as fallback for socket-based engines).
- socket: str = Field(default="")
-
- #: Custom graph root for Podman storage (only used with Podman under Snap).
- graphroot: Path = Field(default=Path.home() / ".fuzzforge" / "containers" / "storage")
-
- #: Custom run root for Podman runtime state (only used with Podman under Snap).
- runroot: Path = Field(default=Path.home() / ".fuzzforge" / "containers" / "run")
-
-
-class StorageSettings(BaseModel):
- """Storage configuration for local filesystem storage.
-
- OSS uses direct file mounting without archiving for simplicity.
- """
-
- #: Base path for local storage.
- path: Path = Field(default=Path.home() / ".fuzzforge" / "storage")
-
-
-class ProjectSettings(BaseModel):
- """Project configuration."""
-
- #: Default path for FuzzForge projects.
- default_path: Path = Field(default=Path.home() / ".fuzzforge" / "projects")
-
-
-class RegistrySettings(BaseModel):
- """Container registry configuration for module images.
-
- By default, registry URL is empty (local-only mode). When empty,
- modules must be built locally with `make build-modules`.
- Set via FUZZFORGE_REGISTRY__URL environment variable if needed.
- """
-
- #: Registry URL for pulling module images (empty = local-only mode).
- url: str = Field(default="")
-
- #: Default tag to use when pulling images.
- default_tag: str = Field(default="latest")
-
- #: Registry username for authentication (optional).
- username: str | None = None
-
- #: Registry password/token for authentication (optional).
- password: str | None = None
-
-
-class Settings(BaseSettings):
- """FuzzForge Runner settings.
-
- Settings can be configured via environment variables with the prefix
- ``FUZZFORGE_``. Nested settings use underscore as delimiter.
-
- Example:
- ``FUZZFORGE_ENGINE_TYPE=docker``
- ``FUZZFORGE_STORAGE_PATH=/data/fuzzforge``
- ``FUZZFORGE_MODULES_PATH=/path/to/modules``
-
- """
-
- model_config = SettingsConfigDict(
- case_sensitive=False,
- env_nested_delimiter="__",
- env_prefix="FUZZFORGE_",
- )
-
- #: Container engine settings.
- engine: EngineSettings = Field(default_factory=EngineSettings)
-
- #: Storage settings.
- storage: StorageSettings = Field(default_factory=StorageSettings)
-
- #: Project settings.
- project: ProjectSettings = Field(default_factory=ProjectSettings)
-
- #: Container registry settings.
- registry: RegistrySettings = Field(default_factory=RegistrySettings)
-
- #: Path to modules directory (for development/local builds).
- modules_path: Path = Field(default=Path.home() / ".fuzzforge" / "modules")
-
- #: Enable debug logging.
- debug: bool = False
diff --git a/fuzzforge-runner/src/fuzzforge_runner/storage.py b/fuzzforge-runner/src/fuzzforge_runner/storage.py
deleted file mode 100644
index 6b629da..0000000
--- a/fuzzforge-runner/src/fuzzforge_runner/storage.py
+++ /dev/null
@@ -1,363 +0,0 @@
-"""FuzzForge Runner - Local filesystem storage.
-
-This module provides local filesystem storage for OSS deployments.
-
-Storage is placed directly in the project directory as `.fuzzforge/`
-for maximum visibility and ease of debugging.
-
-In OSS mode, source files are referenced (not copied) and mounted
-directly into containers at runtime for zero-copy performance.
-
-"""
-
-from __future__ import annotations
-
-import shutil
-from pathlib import Path
-from tarfile import open as Archive # noqa: N812
-from typing import TYPE_CHECKING, cast
-
-from fuzzforge_runner.constants import RESULTS_ARCHIVE_FILENAME
-from fuzzforge_runner.exceptions import StorageError
-
-if TYPE_CHECKING:
- from structlog.stdlib import BoundLogger
-
-#: Name of the FuzzForge storage directory within projects.
-FUZZFORGE_DIR_NAME: str = ".fuzzforge"
-
-
-def get_logger() -> BoundLogger:
- """Get structlog logger instance.
-
- :returns: Configured structlog logger.
-
- """
- from structlog import get_logger # noqa: PLC0415
-
- return cast("BoundLogger", get_logger())
-
-
-class LocalStorage:
- """Local filesystem storage backend for FuzzForge OSS.
-
- Provides lightweight storage for execution results while using
- direct source mounting (no copying) for input assets.
-
- Storage is placed directly in the project directory as `.fuzzforge/`
- so users can easily inspect outputs and configuration.
-
- Directory structure (inside project directory):
- {project_path}/.fuzzforge/
- config.json # Project config (source path reference)
- runs/ # Execution results
- {execution_id}/
- results.tar.gz
- {workflow_id}/
- modules/
- step-0-{exec_id}/
- results.tar.gz
-
- Source files are NOT copied - they are referenced and mounted directly.
-
- """
-
- #: Base path for global storage (only used for fallback/config).
- _base_path: Path
-
- def __init__(self, base_path: Path) -> None:
- """Initialize an instance of the class.
-
- :param base_path: Root directory for global storage (fallback only).
-
- """
- self._base_path = base_path
- self._ensure_base_path()
-
- def _ensure_base_path(self) -> None:
- """Ensure the base storage directory exists."""
- self._base_path.mkdir(parents=True, exist_ok=True)
-
- def _get_project_path(self, project_path: Path) -> Path:
- """Get the storage path for a project.
-
- Storage is placed directly inside the project as `.fuzzforge/`.
-
- :param project_path: Path to the project directory.
- :returns: Storage path for the project (.fuzzforge inside project).
-
- """
- return project_path / FUZZFORGE_DIR_NAME
-
- def init_project(self, project_path: Path) -> Path:
- """Initialize storage for a new project.
-
- Creates a .fuzzforge/ directory inside the project for storing:
- - assets/: Input files (source code, etc.)
- - inputs/: Prepared module inputs (for debugging)
- - runs/: Execution results from each module
-
- :param project_path: Path to the project directory.
- :returns: Path to the project storage directory.
-
- """
- logger = get_logger()
- storage_path = self._get_project_path(project_path)
-
- # Create directory structure (minimal for OSS)
- storage_path.mkdir(parents=True, exist_ok=True)
- (storage_path / "runs").mkdir(parents=True, exist_ok=True)
-
- # Create .gitignore to avoid committing large files
- gitignore_path = storage_path / ".gitignore"
- if not gitignore_path.exists():
- gitignore_content = """# FuzzForge storage - ignore large/temporary files
-# Execution results (can be very large)
-runs/
-
-# Project configuration
-!config.json
-"""
- gitignore_path.write_text(gitignore_content)
-
- logger.info("initialized project storage", project=project_path.name, storage=str(storage_path))
-
- return storage_path
-
- def get_project_assets_path(self, project_path: Path) -> Path | None:
- """Get the path to project assets (source directory).
-
- Returns the configured source path for the project.
- In OSS mode, this is just a reference to the user's source - no copying.
-
- :param project_path: Path to the project directory.
- :returns: Path to source directory, or None if not configured.
-
- """
- storage_path = self._get_project_path(project_path)
- config_path = storage_path / "config.json"
-
- if config_path.exists():
- import json
- config = json.loads(config_path.read_text())
- source_path = config.get("source_path")
- if source_path:
- path = Path(source_path)
- if path.exists():
- return path
-
- # Fallback: check if project_path itself is the source
- # (common case: user runs from their project directory)
- if (project_path / "Cargo.toml").exists() or (project_path / "src").exists():
- return project_path
-
- return None
-
- def set_project_assets(self, project_path: Path, assets_path: Path) -> Path:
- """Set the source path for a project (no copying).
-
- Just stores a reference to the source directory.
- The source is mounted directly into containers at runtime.
-
- :param project_path: Path to the project directory.
- :param assets_path: Path to source directory.
- :returns: The assets path (unchanged).
- :raises StorageError: If path doesn't exist.
-
- """
- import json
-
- logger = get_logger()
-
- if not assets_path.exists():
- raise StorageError(f"Assets path does not exist: {assets_path}")
-
- # Resolve to absolute path
- assets_path = assets_path.resolve()
-
- # Store reference in config
- storage_path = self._get_project_path(project_path)
- storage_path.mkdir(parents=True, exist_ok=True)
- config_path = storage_path / "config.json"
-
- config: dict = {}
- if config_path.exists():
- config = json.loads(config_path.read_text())
-
- config["source_path"] = str(assets_path)
- config_path.write_text(json.dumps(config, indent=2))
-
- logger.info("set project assets", project=project_path.name, source=str(assets_path))
- return assets_path
-
- def store_execution_results(
- self,
- project_path: Path,
- workflow_id: str | None,
- step_index: int,
- execution_id: str,
- results_path: Path,
- ) -> Path:
- """Store execution results.
-
- :param project_path: Path to the project directory.
- :param workflow_id: Workflow execution ID (None for standalone).
- :param step_index: Step index in workflow.
- :param execution_id: Module execution ID.
- :param results_path: Path to results archive to store.
- :returns: Path to the stored results.
- :raises StorageError: If storage operation fails.
-
- """
- logger = get_logger()
- storage_path = self._get_project_path(project_path)
-
- try:
- if workflow_id:
- # Part of workflow
- dest_dir = storage_path / "runs" / workflow_id / "modules" / f"step-{step_index}-{execution_id}"
- else:
- # Standalone execution
- dest_dir = storage_path / "runs" / execution_id
-
- dest_dir.mkdir(parents=True, exist_ok=True)
- dest_path = dest_dir / RESULTS_ARCHIVE_FILENAME
-
- shutil.copy2(results_path, dest_path)
-
- logger.info(
- "stored execution results",
- execution_id=execution_id,
- path=str(dest_path),
- )
-
- return dest_path
-
- except Exception as exc:
- message = f"Failed to store results: {exc}"
- raise StorageError(message) from exc
-
- def get_execution_results(
- self,
- project_path: Path,
- execution_id: str,
- workflow_id: str | None = None,
- step_index: int | None = None,
- ) -> Path | None:
- """Retrieve execution results.
-
- :param project_path: Path to the project directory.
- :param execution_id: Module execution ID.
- :param workflow_id: Workflow execution ID (None for standalone).
- :param step_index: Step index in workflow.
- :returns: Path to results archive, or None if not found.
-
- """
- storage_path = self._get_project_path(project_path)
-
- if workflow_id and step_index is not None:
- # Direct workflow path lookup
- results_path = (
- storage_path / "runs" / workflow_id / "modules" / f"step-{step_index}-{execution_id}" / RESULTS_ARCHIVE_FILENAME
- )
- if results_path.exists():
- return results_path
-
- # Try standalone path
- results_path = storage_path / "runs" / execution_id / RESULTS_ARCHIVE_FILENAME
- if results_path.exists():
- return results_path
-
- # Search for execution_id in all workflow runs
- runs_dir = storage_path / "runs"
- if runs_dir.exists():
- for workflow_dir in runs_dir.iterdir():
- if not workflow_dir.is_dir():
- continue
-
- # Check if this is a workflow directory (has 'modules' subdirectory)
- modules_dir = workflow_dir / "modules"
- if modules_dir.exists() and modules_dir.is_dir():
- # Search for step directories containing this execution_id
- for step_dir in modules_dir.iterdir():
- if step_dir.is_dir() and execution_id in step_dir.name:
- results_path = step_dir / RESULTS_ARCHIVE_FILENAME
- if results_path.exists():
- return results_path
-
- return None
-
- def list_executions(self, project_path: Path) -> list[str]:
- """List all execution IDs for a project.
-
- :param project_path: Path to the project directory.
- :returns: List of execution IDs.
-
- """
- storage_path = self._get_project_path(project_path)
- runs_dir = storage_path / "runs"
-
- if not runs_dir.exists():
- return []
-
- return [d.name for d in runs_dir.iterdir() if d.is_dir()]
-
- def delete_execution(self, project_path: Path, execution_id: str) -> bool:
- """Delete an execution and its results.
-
- :param project_path: Path to the project directory.
- :param execution_id: Execution ID to delete.
- :returns: True if deleted, False if not found.
-
- """
- logger = get_logger()
- storage_path = self._get_project_path(project_path)
- exec_path = storage_path / "runs" / execution_id
-
- if exec_path.exists():
- shutil.rmtree(exec_path)
- logger.info("deleted execution", execution_id=execution_id)
- return True
-
- return False
-
- def delete_project(self, project_path: Path) -> bool:
- """Delete all storage for a project.
-
- :param project_path: Path to the project directory.
- :returns: True if deleted, False if not found.
-
- """
- logger = get_logger()
- storage_path = self._get_project_path(project_path)
-
- if storage_path.exists():
- shutil.rmtree(storage_path)
- logger.info("deleted project storage", project=project_path.name)
- return True
-
- return False
-
- def extract_results(self, results_path: Path, destination: Path) -> Path:
- """Extract a results archive to a destination directory.
-
- :param results_path: Path to the results archive.
- :param destination: Directory to extract to.
- :returns: Path to extracted directory.
- :raises StorageError: If extraction fails.
-
- """
- logger = get_logger()
-
- try:
- destination.mkdir(parents=True, exist_ok=True)
-
- with Archive(results_path, "r:gz") as tar:
- tar.extractall(path=destination)
-
- logger.info("extracted results", source=str(results_path), destination=str(destination))
- return destination
-
- except Exception as exc:
- message = f"Failed to extract results: {exc}"
- raise StorageError(message) from exc
diff --git a/fuzzforge-runner/tests/__init__.py b/fuzzforge-runner/tests/__init__.py
deleted file mode 100644
index 85ff61d..0000000
--- a/fuzzforge-runner/tests/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-"""Tests package for fuzzforge-runner."""
diff --git a/fuzzforge-runner/tests/conftest.py b/fuzzforge-runner/tests/conftest.py
deleted file mode 100644
index 29bc724..0000000
--- a/fuzzforge-runner/tests/conftest.py
+++ /dev/null
@@ -1,3 +0,0 @@
-"""Pytest configuration for fuzzforge-runner tests."""
-
-import pytest
diff --git a/fuzzforge-tests/src/fuzzforge_tests/fixtures.py b/fuzzforge-tests/src/fuzzforge_tests/fixtures.py
index bedaa5c..3cb1b8e 100644
--- a/fuzzforge-tests/src/fuzzforge_tests/fixtures.py
+++ b/fuzzforge-tests/src/fuzzforge_tests/fixtures.py
@@ -16,10 +16,8 @@ from fuzzforge_common.sandboxes.engines.podman.configuration import PodmanConfig
from podman import PodmanClient
from pydantic import UUID7
-# Type aliases for identifiers (inlined from fuzzforge-types)
+# Type aliases for identifiers
type FuzzForgeProjectIdentifier = UUID7
-type FuzzForgeModuleIdentifier = UUID7
-type FuzzForgeWorkflowIdentifier = UUID7
type FuzzForgeExecutionIdentifier = UUID7
# Constants for validation
@@ -27,14 +25,6 @@ FUZZFORGE_PROJECT_NAME_LENGTH_MIN: int = 3
FUZZFORGE_PROJECT_NAME_LENGTH_MAX: int = 64
FUZZFORGE_PROJECT_DESCRIPTION_LENGTH_MAX: int = 256
-FUZZFORGE_MODULE_NAME_LENGTH_MIN: int = 3
-FUZZFORGE_MODULE_NAME_LENGTH_MAX: int = 64
-FUZZFORGE_MODULE_DESCRIPTION_LENGTH_MAX: int = 256
-
-FUZZFORGE_WORKFLOW_NAME_LENGTH_MIN: int = 3
-FUZZFORGE_WORKFLOW_NAME_LENGTH_MAX: int = 64
-FUZZFORGE_WORKFLOW_DESCRIPTION_LENGTH_MAX: int = 256
-
if TYPE_CHECKING:
from collections.abc import Callable, Generator
from pathlib import Path
@@ -49,8 +39,6 @@ def generate_random_string(
# ===== Project Fixtures =====
-# Note: random_project_identifier is provided by fuzzforge-tests
-# Note: random_module_execution_identifier is provided by fuzzforge-tests
@pytest.fixture
@@ -79,78 +67,6 @@ def random_project_description() -> Callable[[], str]:
return inner
-@pytest.fixture
-def random_module_name() -> Callable[[], str]:
- """Generate random module names."""
-
- def inner() -> str:
- return generate_random_string(
- min_length=FUZZFORGE_MODULE_NAME_LENGTH_MIN,
- max_length=FUZZFORGE_MODULE_NAME_LENGTH_MAX,
- )
-
- return inner
-
-
-@pytest.fixture
-def random_module_description() -> Callable[[], str]:
- """Generate random module descriptions."""
-
- def inner() -> str:
- return generate_random_string(
- min_length=1,
- max_length=FUZZFORGE_MODULE_DESCRIPTION_LENGTH_MAX,
- )
-
- return inner
-
-
-@pytest.fixture
-def random_workflow_identifier() -> Callable[[], FuzzForgeWorkflowIdentifier]:
- """Generate random workflow identifiers."""
-
- def inner() -> FuzzForgeWorkflowIdentifier:
- return uuid7()
-
- return inner
-
-
-@pytest.fixture
-def random_workflow_name() -> Callable[[], str]:
- """Generate random workflow names."""
-
- def inner() -> str:
- return generate_random_string(
- min_length=FUZZFORGE_WORKFLOW_NAME_LENGTH_MIN,
- max_length=FUZZFORGE_WORKFLOW_NAME_LENGTH_MAX,
- )
-
- return inner
-
-
-@pytest.fixture
-def random_workflow_description() -> Callable[[], str]:
- """Generate random workflow descriptions."""
-
- def inner() -> str:
- return generate_random_string(
- min_length=1,
- max_length=FUZZFORGE_WORKFLOW_DESCRIPTION_LENGTH_MAX,
- )
-
- return inner
-
-
-@pytest.fixture
-def random_workflow_execution_identifier() -> Callable[[], FuzzForgeExecutionIdentifier]:
- """Generate random workflow execution identifiers."""
-
- def inner() -> FuzzForgeExecutionIdentifier:
- return uuid7()
-
- return inner
-
-
@pytest.fixture
def random_project_identifier() -> Callable[[], FuzzForgeProjectIdentifier]:
"""Generate random project identifiers.
@@ -169,18 +85,8 @@ def random_project_identifier() -> Callable[[], FuzzForgeProjectIdentifier]:
@pytest.fixture
-def random_module_identifier() -> Callable[[], FuzzForgeModuleIdentifier]:
- """Generate random module identifiers."""
-
- def inner() -> FuzzForgeModuleIdentifier:
- return uuid7()
-
- return inner
-
-
-@pytest.fixture
-def random_module_execution_identifier() -> Callable[[], FuzzForgeExecutionIdentifier]:
- """Generate random workflow execution identifiers.
+def random_execution_identifier() -> Callable[[], FuzzForgeExecutionIdentifier]:
+ """Generate random execution identifiers.
Returns a callable that generates fresh UUID7 identifiers for each call.
This pattern allows generating multiple unique identifiers within a single test.
diff --git a/hub-config.json b/hub-config.json
new file mode 100644
index 0000000..1bc90ec
--- /dev/null
+++ b/hub-config.json
@@ -0,0 +1 @@
+{"servers": []}
diff --git a/pyproject.toml b/pyproject.toml
index 105034f..8b483df 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,12 +1,17 @@
[project]
name = "fuzzforge-oss"
version = "1.0.0"
-description = "FuzzForge OSS - AI-driven security research platform for local execution"
+description = "FuzzForge AI - AI-driven security research platform for local execution"
readme = "README.md"
requires-python = ">=3.14"
authors = [
{ name = "FuzzingLabs", email = "contact@fuzzinglabs.com" }
]
+dependencies = [
+ "fuzzforge-cli",
+ "fuzzforge-mcp",
+ "fuzzforge-common",
+]
[project.optional-dependencies]
dev = [
@@ -21,8 +26,6 @@ dev = [
[tool.uv.workspace]
members = [
"fuzzforge-common",
- "fuzzforge-modules/fuzzforge-modules-sdk",
- "fuzzforge-runner",
"fuzzforge-mcp",
"fuzzforge-cli",
"fuzzforge-tests",
@@ -30,8 +33,6 @@ members = [
[tool.uv.sources]
fuzzforge-common = { workspace = true }
-fuzzforge-modules-sdk = { workspace = true }
-fuzzforge-runner = { workspace = true }
fuzzforge-mcp = { workspace = true }
fuzzforge-cli = { workspace = true }
fuzzforge-tests = { workspace = true }
diff --git a/uv.lock b/uv.lock
new file mode 100644
index 0000000..416147c
--- /dev/null
+++ b/uv.lock
@@ -0,0 +1,1647 @@
+version = 1
+revision = 3
+requires-python = ">=3.14"
+
+[manifest]
+members = [
+ "fuzzforge-cli",
+ "fuzzforge-common",
+ "fuzzforge-mcp",
+ "fuzzforge-oss",
+ "fuzzforge-tests",
+]
+
+[[package]]
+name = "annotated-types"
+version = "0.7.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" },
+]
+
+[[package]]
+name = "anyio"
+version = "4.12.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "idna" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" },
+]
+
+[[package]]
+name = "attrs"
+version = "25.4.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" },
+]
+
+[[package]]
+name = "authlib"
+version = "1.6.6"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cryptography" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/bb/9b/b1661026ff24bc641b76b78c5222d614776b0c085bcfdac9bd15a1cb4b35/authlib-1.6.6.tar.gz", hash = "sha256:45770e8e056d0f283451d9996fbb59b70d45722b45d854d58f32878d0a40c38e", size = 164894, upload-time = "2025-12-12T08:01:41.464Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/54/51/321e821856452f7386c4e9df866f196720b1ad0c5ea1623ea7399969ae3b/authlib-1.6.6-py2.py3-none-any.whl", hash = "sha256:7d9e9bc535c13974313a87f53e8430eb6ea3d1cf6ae4f6efcd793f2e949143fd", size = 244005, upload-time = "2025-12-12T08:01:40.209Z" },
+]
+
+[[package]]
+name = "bandit"
+version = "1.8.6"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+ { name = "pyyaml" },
+ { name = "rich" },
+ { name = "stevedore" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/fb/b5/7eb834e213d6f73aace21938e5e90425c92e5f42abafaf8a6d5d21beed51/bandit-1.8.6.tar.gz", hash = "sha256:dbfe9c25fc6961c2078593de55fd19f2559f9e45b99f1272341f5b95dea4e56b", size = 4240271, upload-time = "2025-07-06T03:10:50.9Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/48/ca/ba5f909b40ea12ec542d5d7bdd13ee31c4d65f3beed20211ef81c18fa1f3/bandit-1.8.6-py3-none-any.whl", hash = "sha256:3348e934d736fcdb68b6aa4030487097e23a501adf3e7827b63658df464dddd0", size = 133808, upload-time = "2025-07-06T03:10:49.134Z" },
+]
+
+[[package]]
+name = "beartype"
+version = "0.22.9"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/c7/94/1009e248bbfbab11397abca7193bea6626806be9a327d399810d523a07cb/beartype-0.22.9.tar.gz", hash = "sha256:8f82b54aa723a2848a56008d18875f91c1db02c32ef6a62319a002e3e25a975f", size = 1608866, upload-time = "2025-12-13T06:50:30.72Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/71/cc/18245721fa7747065ab478316c7fea7c74777d07f37ae60db2e84f8172e8/beartype-0.22.9-py3-none-any.whl", hash = "sha256:d16c9bbc61ea14637596c5f6fbff2ee99cbe3573e46a716401734ef50c3060c2", size = 1333658, upload-time = "2025-12-13T06:50:28.266Z" },
+]
+
+[[package]]
+name = "cachetools"
+version = "6.2.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/bc/1d/ede8680603f6016887c062a2cf4fc8fdba905866a3ab8831aa8aa651320c/cachetools-6.2.4.tar.gz", hash = "sha256:82c5c05585e70b6ba2d3ae09ea60b79548872185d2f24ae1f2709d37299fd607", size = 31731, upload-time = "2025-12-15T18:24:53.744Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2c/fc/1d7b80d0eb7b714984ce40efc78859c022cd930e402f599d8ca9e39c78a4/cachetools-6.2.4-py3-none-any.whl", hash = "sha256:69a7a52634fed8b8bf6e24a050fb60bff1c9bd8f6d24572b99c32d4e71e62a51", size = 11551, upload-time = "2025-12-15T18:24:52.332Z" },
+]
+
+[[package]]
+name = "certifi"
+version = "2026.1.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" },
+]
+
+[[package]]
+name = "cffi"
+version = "2.0.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pycparser", marker = "implementation_name != 'PyPy'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" },
+ { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" },
+ { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" },
+ { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" },
+ { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" },
+ { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" },
+ { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" },
+ { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" },
+ { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" },
+ { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" },
+ { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" },
+ { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" },
+ { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" },
+ { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" },
+ { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" },
+ { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" },
+ { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" },
+ { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" },
+]
+
+[[package]]
+name = "charset-normalizer"
+version = "3.4.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" },
+ { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" },
+ { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" },
+ { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" },
+ { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" },
+ { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" },
+ { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" },
+ { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" },
+ { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" },
+ { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" },
+ { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" },
+ { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" },
+ { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" },
+ { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" },
+ { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" },
+]
+
+[[package]]
+name = "click"
+version = "8.3.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" },
+]
+
+[[package]]
+name = "cloudpickle"
+version = "3.1.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/27/fb/576f067976d320f5f0114a8d9fa1215425441bb35627b1993e5afd8111e5/cloudpickle-3.1.2.tar.gz", hash = "sha256:7fda9eb655c9c230dab534f1983763de5835249750e85fbcef43aaa30a9a2414", size = 22330, upload-time = "2025-11-03T09:25:26.604Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/88/39/799be3f2f0f38cc727ee3b4f1445fe6d5e4133064ec2e4115069418a5bb6/cloudpickle-3.1.2-py3-none-any.whl", hash = "sha256:9acb47f6afd73f60dc1df93bb801b472f05ff42fa6c84167d25cb206be1fbf4a", size = 22228, upload-time = "2025-11-03T09:25:25.534Z" },
+]
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
+]
+
+[[package]]
+name = "cryptography"
+version = "46.0.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" },
+ { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" },
+ { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" },
+ { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" },
+ { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" },
+ { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" },
+ { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" },
+ { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" },
+ { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" },
+ { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" },
+ { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" },
+ { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" },
+ { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" },
+ { url = "https://files.pythonhosted.org/packages/f5/e2/a510aa736755bffa9d2f75029c229111a1d02f8ecd5de03078f4c18d91a3/cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217", size = 7158012, upload-time = "2025-10-15T23:17:19.982Z" },
+ { url = "https://files.pythonhosted.org/packages/73/dc/9aa866fbdbb95b02e7f9d086f1fccfeebf8953509b87e3f28fff927ff8a0/cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5", size = 4288728, upload-time = "2025-10-15T23:17:21.527Z" },
+ { url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078, upload-time = "2025-10-15T23:17:23.042Z" },
+ { url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460, upload-time = "2025-10-15T23:17:24.885Z" },
+ { url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237, upload-time = "2025-10-15T23:17:26.449Z" },
+ { url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344, upload-time = "2025-10-15T23:17:28.06Z" },
+ { url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564, upload-time = "2025-10-15T23:17:29.665Z" },
+ { url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415, upload-time = "2025-10-15T23:17:31.686Z" },
+ { url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457, upload-time = "2025-10-15T23:17:33.478Z" },
+ { url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074, upload-time = "2025-10-15T23:17:35.158Z" },
+ { url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569, upload-time = "2025-10-15T23:17:37.188Z" },
+ { url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941, upload-time = "2025-10-15T23:17:39.236Z" },
+ { url = "https://files.pythonhosted.org/packages/fd/30/27654c1dbaf7e4a3531fa1fc77986d04aefa4d6d78259a62c9dc13d7ad36/cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914", size = 3022339, upload-time = "2025-10-15T23:17:40.888Z" },
+ { url = "https://files.pythonhosted.org/packages/f6/30/640f34ccd4d2a1bc88367b54b926b781b5a018d65f404d409aba76a84b1c/cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db", size = 3494315, upload-time = "2025-10-15T23:17:42.769Z" },
+ { url = "https://files.pythonhosted.org/packages/ba/8b/88cc7e3bd0a8e7b861f26981f7b820e1f46aa9d26cc482d0feba0ecb4919/cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21", size = 2919331, upload-time = "2025-10-15T23:17:44.468Z" },
+ { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" },
+ { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" },
+ { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" },
+ { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" },
+ { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" },
+ { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" },
+ { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" },
+ { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" },
+ { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" },
+ { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" },
+ { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" },
+ { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" },
+ { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" },
+ { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" },
+]
+
+[[package]]
+name = "cyclopts"
+version = "4.5.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "attrs" },
+ { name = "docstring-parser" },
+ { name = "rich" },
+ { name = "rich-rst" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/13/7b/663f3285c1ac0e5d0854bd9db2c87caa6fa3d1a063185e3394a6cdca9151/cyclopts-4.5.0.tar.gz", hash = "sha256:717ac4235548b58d500baf7e688aa4d024caf0ee68f61a012ffd5e29db3099f9", size = 161980, upload-time = "2026-01-16T02:07:16.171Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/12/a3/2e00fececc34a99ae3a5d5702a5dd29c5371e4ed016647301a2b9bcc1976/cyclopts-4.5.0-py3-none-any.whl", hash = "sha256:305b9aa90a9cd0916f0a450b43e50ad5df9c252680731a0719edfb9b20381bf5", size = 199772, upload-time = "2026-01-16T02:07:14.707Z" },
+]
+
+[[package]]
+name = "diskcache"
+version = "5.6.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/3f/21/1c1ffc1a039ddcc459db43cc108658f32c57d271d7289a2794e401d0fdb6/diskcache-5.6.3.tar.gz", hash = "sha256:2c3a3fa2743d8535d832ec61c2054a1641f41775aa7c556758a109941e33e4fc", size = 67916, upload-time = "2023-08-31T06:12:00.316Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/3f/27/4570e78fc0bf5ea0ca45eb1de3818a23787af9b390c0b0a0033a1b8236f9/diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19", size = 45550, upload-time = "2023-08-31T06:11:58.822Z" },
+]
+
+[[package]]
+name = "dnspython"
+version = "2.8.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/8c/8b/57666417c0f90f08bcafa776861060426765fdb422eb10212086fb811d26/dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f", size = 368251, upload-time = "2025-09-07T18:58:00.022Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ba/5a/18ad964b0086c6e62e2e7500f7edc89e3faa45033c71c1893d34eed2b2de/dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af", size = 331094, upload-time = "2025-09-07T18:57:58.071Z" },
+]
+
+[[package]]
+name = "docstring-parser"
+version = "0.17.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b2/9d/c3b43da9515bd270df0f80548d9944e389870713cc1fe2b8fb35fe2bcefd/docstring_parser-0.17.0.tar.gz", hash = "sha256:583de4a309722b3315439bb31d64ba3eebada841f2e2cee23b99df001434c912", size = 27442, upload-time = "2025-07-21T07:35:01.868Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/55/e2/2537ebcff11c1ee1ff17d8d0b6f4db75873e3b0fb32c2d4a2ee31ecb310a/docstring_parser-0.17.0-py3-none-any.whl", hash = "sha256:cf2569abd23dce8099b300f9b4fa8191e9582dda731fd533daf54c4551658708", size = 36896, upload-time = "2025-07-21T07:35:00.684Z" },
+]
+
+[[package]]
+name = "docutils"
+version = "0.22.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ae/b6/03bb70946330e88ffec97aefd3ea75ba575cb2e762061e0e62a213befee8/docutils-0.22.4.tar.gz", hash = "sha256:4db53b1fde9abecbb74d91230d32ab626d94f6badfc575d6db9194a49df29968", size = 2291750, upload-time = "2025-12-18T19:00:26.443Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/02/10/5da547df7a391dcde17f59520a231527b8571e6f46fc8efb02ccb370ab12/docutils-0.22.4-py3-none-any.whl", hash = "sha256:d0013f540772d1420576855455d050a2180186c91c15779301ac2ccb3eeb68de", size = 633196, upload-time = "2025-12-18T19:00:18.077Z" },
+]
+
+[[package]]
+name = "email-validator"
+version = "2.3.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "dnspython" },
+ { name = "idna" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/f5/22/900cb125c76b7aaa450ce02fd727f452243f2e91a61af068b40adba60ea9/email_validator-2.3.0.tar.gz", hash = "sha256:9fc05c37f2f6cf439ff414f8fc46d917929974a82244c20eb10231ba60c54426", size = 51238, upload-time = "2025-08-26T13:09:06.831Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/de/15/545e2b6cf2e3be84bc1ed85613edd75b8aea69807a71c26f4ca6a9258e82/email_validator-2.3.0-py3-none-any.whl", hash = "sha256:80f13f623413e6b197ae73bb10bf4eb0908faf509ad8362c5edeb0be7fd450b4", size = 35604, upload-time = "2025-08-26T13:09:05.858Z" },
+]
+
+[[package]]
+name = "exceptiongroup"
+version = "1.3.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" },
+]
+
+[[package]]
+name = "fakeredis"
+version = "2.33.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "redis" },
+ { name = "sortedcontainers" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/5f/f9/57464119936414d60697fcbd32f38909bb5688b616ae13de6e98384433e0/fakeredis-2.33.0.tar.gz", hash = "sha256:d7bc9a69d21df108a6451bbffee23b3eba432c21a654afc7ff2d295428ec5770", size = 175187, upload-time = "2025-12-16T19:45:52.269Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6e/78/a850fed8aeef96d4a99043c90b818b2ed5419cd5b24a4049fd7cfb9f1471/fakeredis-2.33.0-py3-none-any.whl", hash = "sha256:de535f3f9ccde1c56672ab2fdd6a8efbc4f2619fc2f1acc87b8737177d71c965", size = 119605, upload-time = "2025-12-16T19:45:51.08Z" },
+]
+
+[package.optional-dependencies]
+lua = [
+ { name = "lupa" },
+]
+
+[[package]]
+name = "fastmcp"
+version = "2.14.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "authlib" },
+ { name = "cyclopts" },
+ { name = "exceptiongroup" },
+ { name = "httpx" },
+ { name = "jsonschema-path" },
+ { name = "mcp" },
+ { name = "openapi-pydantic" },
+ { name = "platformdirs" },
+ { name = "py-key-value-aio", extra = ["disk", "keyring", "memory"] },
+ { name = "pydantic", extra = ["email"] },
+ { name = "pydocket" },
+ { name = "pyperclip" },
+ { name = "python-dotenv" },
+ { name = "rich" },
+ { name = "uvicorn" },
+ { name = "websockets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/9e/50/d38e4371bdc34e709f4731b1e882cb7bc50e51c1a224859d4cd381b3a79b/fastmcp-2.14.1.tar.gz", hash = "sha256:132725cbf77b68fa3c3d165eff0cfa47e40c1479457419e6a2cfda65bd84c8d6", size = 8263331, upload-time = "2025-12-15T02:26:27.102Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1d/82/72401d09dc27c27fdf72ad6c2fe331e553e3c3646e01b5ff16473191033d/fastmcp-2.14.1-py3-none-any.whl", hash = "sha256:fb3e365cc1d52573ab89caeba9944dd4b056149097be169bce428e011f0a57e5", size = 412176, upload-time = "2025-12-15T02:26:25.356Z" },
+]
+
+[[package]]
+name = "fuzzforge-cli"
+version = "0.0.1"
+source = { editable = "fuzzforge-cli" }
+dependencies = [
+ { name = "fuzzforge-mcp" },
+ { name = "rich" },
+ { name = "textual" },
+ { name = "typer" },
+]
+
+[package.optional-dependencies]
+lints = [
+ { name = "bandit" },
+ { name = "mypy" },
+ { name = "ruff" },
+]
+tests = [
+ { name = "pytest" },
+]
+
+[package.metadata]
+requires-dist = [
+ { name = "bandit", marker = "extra == 'lints'", specifier = "==1.8.6" },
+ { name = "fuzzforge-mcp", editable = "fuzzforge-mcp" },
+ { name = "mypy", marker = "extra == 'lints'", specifier = "==1.18.2" },
+ { name = "pytest", marker = "extra == 'tests'", specifier = "==9.0.2" },
+ { name = "rich", specifier = ">=14.0.0" },
+ { name = "ruff", marker = "extra == 'lints'", specifier = "==0.14.4" },
+ { name = "textual", specifier = ">=1.0.0" },
+ { name = "typer", specifier = "==0.20.1" },
+]
+provides-extras = ["lints", "tests"]
+
+[[package]]
+name = "fuzzforge-common"
+version = "0.0.1"
+source = { editable = "fuzzforge-common" }
+dependencies = [
+ { name = "podman" },
+ { name = "pydantic" },
+ { name = "structlog" },
+]
+
+[package.optional-dependencies]
+lints = [
+ { name = "bandit" },
+ { name = "mypy" },
+ { name = "ruff" },
+]
+tests = [
+ { name = "pytest" },
+]
+
+[package.metadata]
+requires-dist = [
+ { name = "bandit", marker = "extra == 'lints'", specifier = "==1.8.6" },
+ { name = "mypy", marker = "extra == 'lints'", specifier = "==1.18.2" },
+ { name = "podman", specifier = "==5.6.0" },
+ { name = "pydantic", specifier = "==2.12.4" },
+ { name = "pytest", marker = "extra == 'tests'", specifier = "==9.0.2" },
+ { name = "ruff", marker = "extra == 'lints'", specifier = "==0.14.4" },
+ { name = "structlog", specifier = ">=24.0.0" },
+]
+provides-extras = ["lints", "tests"]
+
+[[package]]
+name = "fuzzforge-mcp"
+version = "0.0.1"
+source = { editable = "fuzzforge-mcp" }
+dependencies = [
+ { name = "fastmcp" },
+ { name = "fuzzforge-common" },
+ { name = "pydantic" },
+ { name = "pydantic-settings" },
+ { name = "structlog" },
+]
+
+[package.optional-dependencies]
+lints = [
+ { name = "bandit" },
+ { name = "mypy" },
+ { name = "ruff" },
+]
+tests = [
+ { name = "fuzzforge-tests" },
+ { name = "pytest" },
+ { name = "pytest-asyncio" },
+ { name = "pytest-httpx" },
+]
+
+[package.metadata]
+requires-dist = [
+ { name = "bandit", marker = "extra == 'lints'", specifier = "==1.8.6" },
+ { name = "fastmcp", specifier = "==2.14.1" },
+ { name = "fuzzforge-common", editable = "fuzzforge-common" },
+ { name = "fuzzforge-tests", marker = "extra == 'tests'", editable = "fuzzforge-tests" },
+ { name = "mypy", marker = "extra == 'lints'", specifier = "==1.18.2" },
+ { name = "pydantic", specifier = "==2.12.4" },
+ { name = "pydantic-settings", specifier = "==2.12.0" },
+ { name = "pytest", marker = "extra == 'tests'", specifier = "==9.0.2" },
+ { name = "pytest-asyncio", marker = "extra == 'tests'", specifier = "==1.3.0" },
+ { name = "pytest-httpx", marker = "extra == 'tests'", specifier = "==0.36.0" },
+ { name = "ruff", marker = "extra == 'lints'", specifier = "==0.14.4" },
+ { name = "structlog", specifier = "==25.5.0" },
+]
+provides-extras = ["lints", "tests"]
+
+[[package]]
+name = "fuzzforge-oss"
+version = "1.0.0"
+source = { virtual = "." }
+dependencies = [
+ { name = "fuzzforge-cli" },
+ { name = "fuzzforge-common" },
+ { name = "fuzzforge-mcp" },
+]
+
+[package.optional-dependencies]
+dev = [
+ { name = "fuzzforge-common" },
+ { name = "fuzzforge-mcp" },
+ { name = "fuzzforge-tests" },
+ { name = "pytest" },
+ { name = "pytest-asyncio" },
+ { name = "pytest-httpx" },
+]
+
+[package.metadata]
+requires-dist = [
+ { name = "fuzzforge-cli", editable = "fuzzforge-cli" },
+ { name = "fuzzforge-common", editable = "fuzzforge-common" },
+ { name = "fuzzforge-common", marker = "extra == 'dev'", editable = "fuzzforge-common" },
+ { name = "fuzzforge-mcp", editable = "fuzzforge-mcp" },
+ { name = "fuzzforge-mcp", marker = "extra == 'dev'", editable = "fuzzforge-mcp" },
+ { name = "fuzzforge-tests", marker = "extra == 'dev'", editable = "fuzzforge-tests" },
+ { name = "pytest", marker = "extra == 'dev'", specifier = "==9.0.2" },
+ { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = "==1.3.0" },
+ { name = "pytest-httpx", marker = "extra == 'dev'", specifier = "==0.36.0" },
+]
+provides-extras = ["dev"]
+
+[[package]]
+name = "fuzzforge-tests"
+version = "0.0.1"
+source = { editable = "fuzzforge-tests" }
+dependencies = [
+ { name = "fuzzforge-common" },
+ { name = "podman" },
+ { name = "pydantic" },
+ { name = "pytest" },
+]
+
+[package.optional-dependencies]
+lints = [
+ { name = "bandit" },
+ { name = "mypy" },
+ { name = "ruff" },
+]
+
+[package.metadata]
+requires-dist = [
+ { name = "bandit", marker = "extra == 'lints'", specifier = "==1.8.6" },
+ { name = "fuzzforge-common", editable = "fuzzforge-common" },
+ { name = "mypy", marker = "extra == 'lints'", specifier = "==1.18.2" },
+ { name = "podman", specifier = "==5.6.0" },
+ { name = "pydantic", specifier = ">=2.12.4" },
+ { name = "pytest", specifier = "==9.0.2" },
+ { name = "ruff", marker = "extra == 'lints'", specifier = "==0.14.4" },
+]
+provides-extras = ["lints"]
+
+[[package]]
+name = "h11"
+version = "0.16.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" },
+]
+
+[[package]]
+name = "httpcore"
+version = "1.0.9"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "certifi" },
+ { name = "h11" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" },
+]
+
+[[package]]
+name = "httpx"
+version = "0.28.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anyio" },
+ { name = "certifi" },
+ { name = "httpcore" },
+ { name = "idna" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" },
+]
+
+[[package]]
+name = "httpx-sse"
+version = "0.4.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/0f/4c/751061ffa58615a32c31b2d82e8482be8dd4a89154f003147acee90f2be9/httpx_sse-0.4.3.tar.gz", hash = "sha256:9b1ed0127459a66014aec3c56bebd93da3c1bc8bb6618c8082039a44889a755d", size = 15943, upload-time = "2025-10-10T21:48:22.271Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d2/fd/6668e5aec43ab844de6fc74927e155a3b37bf40d7c3790e49fc0406b6578/httpx_sse-0.4.3-py3-none-any.whl", hash = "sha256:0ac1c9fe3c0afad2e0ebb25a934a59f4c7823b60792691f779fad2c5568830fc", size = 8960, upload-time = "2025-10-10T21:48:21.158Z" },
+]
+
+[[package]]
+name = "idna"
+version = "3.11"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
+]
+
+[[package]]
+name = "importlib-metadata"
+version = "8.7.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "zipp" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/f3/49/3b30cad09e7771a4982d9975a8cbf64f00d4a1ececb53297f1d9a7be1b10/importlib_metadata-8.7.1.tar.gz", hash = "sha256:49fef1ae6440c182052f407c8d34a68f72efc36db9ca90dc0113398f2fdde8bb", size = 57107, upload-time = "2025-12-21T10:00:19.278Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151", size = 27865, upload-time = "2025-12-21T10:00:18.329Z" },
+]
+
+[[package]]
+name = "iniconfig"
+version = "2.3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
+]
+
+[[package]]
+name = "jaraco-classes"
+version = "3.4.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "more-itertools" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/06/c0/ed4a27bc5571b99e3cff68f8a9fa5b56ff7df1c2251cc715a652ddd26402/jaraco.classes-3.4.0.tar.gz", hash = "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd", size = 11780, upload-time = "2024-03-31T07:27:36.643Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7f/66/b15ce62552d84bbfcec9a4873ab79d993a1dd4edb922cbfccae192bd5b5f/jaraco.classes-3.4.0-py3-none-any.whl", hash = "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790", size = 6777, upload-time = "2024-03-31T07:27:34.792Z" },
+]
+
+[[package]]
+name = "jaraco-context"
+version = "6.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/cb/9c/a788f5bb29c61e456b8ee52ce76dbdd32fd72cd73dd67bc95f42c7a8d13c/jaraco_context-6.1.0.tar.gz", hash = "sha256:129a341b0a85a7db7879e22acd66902fda67882db771754574338898b2d5d86f", size = 15850, upload-time = "2026-01-13T02:53:53.847Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8d/48/aa685dbf1024c7bd82bede569e3a85f82c32fd3d79ba5fea578f0159571a/jaraco_context-6.1.0-py3-none-any.whl", hash = "sha256:a43b5ed85815223d0d3cfdb6d7ca0d2bc8946f28f30b6f3216bda070f68badda", size = 7065, upload-time = "2026-01-13T02:53:53.031Z" },
+]
+
+[[package]]
+name = "jaraco-functools"
+version = "4.4.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "more-itertools" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/0f/27/056e0638a86749374d6f57d0b0db39f29509cce9313cf91bdc0ac4d91084/jaraco_functools-4.4.0.tar.gz", hash = "sha256:da21933b0417b89515562656547a77b4931f98176eb173644c0d35032a33d6bb", size = 19943, upload-time = "2025-12-21T09:29:43.6Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/fd/c4/813bb09f0985cb21e959f21f2464169eca882656849adf727ac7bb7e1767/jaraco_functools-4.4.0-py3-none-any.whl", hash = "sha256:9eec1e36f45c818d9bf307c8948eb03b2b56cd44087b3cdc989abca1f20b9176", size = 10481, upload-time = "2025-12-21T09:29:42.27Z" },
+]
+
+[[package]]
+name = "jeepney"
+version = "0.9.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/7b/6f/357efd7602486741aa73ffc0617fb310a29b588ed0fd69c2399acbb85b0c/jeepney-0.9.0.tar.gz", hash = "sha256:cf0e9e845622b81e4a28df94c40345400256ec608d0e55bb8a3feaa9163f5732", size = 106758, upload-time = "2025-02-27T18:51:01.684Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b2/a3/e137168c9c44d18eff0376253da9f1e9234d0239e0ee230d2fee6cea8e55/jeepney-0.9.0-py3-none-any.whl", hash = "sha256:97e5714520c16fc0a45695e5365a2e11b81ea79bba796e26f9f1d178cb182683", size = 49010, upload-time = "2025-02-27T18:51:00.104Z" },
+]
+
+[[package]]
+name = "jsonschema"
+version = "4.26.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "attrs" },
+ { name = "jsonschema-specifications" },
+ { name = "referencing" },
+ { name = "rpds-py" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b3/fc/e067678238fa451312d4c62bf6e6cf5ec56375422aee02f9cb5f909b3047/jsonschema-4.26.0.tar.gz", hash = "sha256:0c26707e2efad8aa1bfc5b7ce170f3fccc2e4918ff85989ba9ffa9facb2be326", size = 366583, upload-time = "2026-01-07T13:41:07.246Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl", hash = "sha256:d489f15263b8d200f8387e64b4c3a75f06629559fb73deb8fdfb525f2dab50ce", size = 90630, upload-time = "2026-01-07T13:41:05.306Z" },
+]
+
+[[package]]
+name = "jsonschema-path"
+version = "0.3.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pathable" },
+ { name = "pyyaml" },
+ { name = "referencing" },
+ { name = "requests" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/6e/45/41ebc679c2a4fced6a722f624c18d658dee42612b83ea24c1caf7c0eb3a8/jsonschema_path-0.3.4.tar.gz", hash = "sha256:8365356039f16cc65fddffafda5f58766e34bebab7d6d105616ab52bc4297001", size = 11159, upload-time = "2025-01-24T14:33:16.547Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/cb/58/3485da8cb93d2f393bce453adeef16896751f14ba3e2024bc21dc9597646/jsonschema_path-0.3.4-py3-none-any.whl", hash = "sha256:f502191fdc2b22050f9a81c9237be9d27145b9001c55842bece5e94e382e52f8", size = 14810, upload-time = "2025-01-24T14:33:14.652Z" },
+]
+
+[[package]]
+name = "jsonschema-specifications"
+version = "2025.9.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "referencing" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" },
+]
+
+[[package]]
+name = "keyring"
+version = "25.7.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "jaraco-classes" },
+ { name = "jaraco-context" },
+ { name = "jaraco-functools" },
+ { name = "jeepney", marker = "sys_platform == 'linux'" },
+ { name = "pywin32-ctypes", marker = "sys_platform == 'win32'" },
+ { name = "secretstorage", marker = "sys_platform == 'linux'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/43/4b/674af6ef2f97d56f0ab5153bf0bfa28ccb6c3ed4d1babf4305449668807b/keyring-25.7.0.tar.gz", hash = "sha256:fe01bd85eb3f8fb3dd0405defdeac9a5b4f6f0439edbb3149577f244a2e8245b", size = 63516, upload-time = "2025-11-16T16:26:09.482Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/81/db/e655086b7f3a705df045bf0933bdd9c2f79bb3c97bfef1384598bb79a217/keyring-25.7.0-py3-none-any.whl", hash = "sha256:be4a0b195f149690c166e850609a477c532ddbfbaed96a404d4e43f8d5e2689f", size = 39160, upload-time = "2025-11-16T16:26:08.402Z" },
+]
+
+[[package]]
+name = "linkify-it-py"
+version = "2.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "uc-micro-py" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/2e/c9/06ea13676ef354f0af6169587ae292d3e2406e212876a413bf9eece4eb23/linkify_it_py-2.1.0.tar.gz", hash = "sha256:43360231720999c10e9328dc3691160e27a718e280673d444c38d7d3aaa3b98b", size = 29158, upload-time = "2026-03-01T07:48:47.683Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b4/de/88b3be5c31b22333b3ca2f6ff1de4e863d8fe45aaea7485f591970ec1d3e/linkify_it_py-2.1.0-py3-none-any.whl", hash = "sha256:0d252c1594ecba2ecedc444053db5d3a9b7ec1b0dd929c8f1d74dce89f86c05e", size = 19878, upload-time = "2026-03-01T07:48:46.098Z" },
+]
+
+[[package]]
+name = "lupa"
+version = "2.6"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b8/1c/191c3e6ec6502e3dbe25a53e27f69a5daeac3e56de1f73c0138224171ead/lupa-2.6.tar.gz", hash = "sha256:9a770a6e89576be3447668d7ced312cd6fd41d3c13c2462c9dc2c2ab570e45d9", size = 7240282, upload-time = "2025-10-24T07:20:29.738Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/66/9d/d9427394e54d22a35d1139ef12e845fd700d4872a67a34db32516170b746/lupa-2.6-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:dcb6d0a3264873e1653bc188499f48c1fb4b41a779e315eba45256cfe7bc33c1", size = 953818, upload-time = "2025-10-24T07:18:53.378Z" },
+ { url = "https://files.pythonhosted.org/packages/10/41/27bbe81953fb2f9ecfced5d9c99f85b37964cfaf6aa8453bb11283983721/lupa-2.6-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:a37e01f2128f8c36106726cb9d360bac087d58c54b4522b033cc5691c584db18", size = 1915850, upload-time = "2025-10-24T07:18:55.259Z" },
+ { url = "https://files.pythonhosted.org/packages/a3/98/f9ff60db84a75ba8725506bbf448fb085bc77868a021998ed2a66d920568/lupa-2.6-cp314-cp314-macosx_11_0_x86_64.whl", hash = "sha256:458bd7e9ff3c150b245b0fcfbb9bd2593d1152ea7f0a7b91c1d185846da033fe", size = 982344, upload-time = "2025-10-24T07:18:57.05Z" },
+ { url = "https://files.pythonhosted.org/packages/41/f7/f39e0f1c055c3b887d86b404aaf0ca197b5edfd235a8b81b45b25bac7fc3/lupa-2.6-cp314-cp314-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:052ee82cac5206a02df77119c325339acbc09f5ce66967f66a2e12a0f3211cad", size = 1156543, upload-time = "2025-10-24T07:18:59.251Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/9c/59e6cffa0d672d662ae17bd7ac8ecd2c89c9449dee499e3eb13ca9cd10d9/lupa-2.6-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96594eca3c87dd07938009e95e591e43d554c1dbd0385be03c100367141db5a8", size = 1047974, upload-time = "2025-10-24T07:19:01.449Z" },
+ { url = "https://files.pythonhosted.org/packages/23/c6/a04e9cef7c052717fcb28fb63b3824802488f688391895b618e39be0f684/lupa-2.6-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8faddd9d198688c8884091173a088a8e920ecc96cda2ffed576a23574c4b3f6", size = 2073458, upload-time = "2025-10-24T07:19:03.369Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/10/824173d10f38b51fc77785228f01411b6ca28826ce27404c7c912e0e442c/lupa-2.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:daebb3a6b58095c917e76ba727ab37b27477fb926957c825205fbda431552134", size = 1067683, upload-time = "2025-10-24T07:19:06.2Z" },
+ { url = "https://files.pythonhosted.org/packages/b6/dc/9692fbcf3c924d9c4ece2d8d2f724451ac2e09af0bd2a782db1cef34e799/lupa-2.6-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:f3154e68972befe0f81564e37d8142b5d5d79931a18309226a04ec92487d4ea3", size = 1171892, upload-time = "2025-10-24T07:19:08.544Z" },
+ { url = "https://files.pythonhosted.org/packages/84/ff/e318b628d4643c278c96ab3ddea07fc36b075a57383c837f5b11e537ba9d/lupa-2.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e4dadf77b9fedc0bfa53417cc28dc2278a26d4cbd95c29f8927ad4d8fe0a7ef9", size = 2166641, upload-time = "2025-10-24T07:19:10.485Z" },
+ { url = "https://files.pythonhosted.org/packages/12/f7/a6f9ec2806cf2d50826980cdb4b3cffc7691dc6f95e13cc728846d5cb793/lupa-2.6-cp314-cp314-win32.whl", hash = "sha256:cb34169c6fa3bab3e8ac58ca21b8a7102f6a94b6a5d08d3636312f3f02fafd8f", size = 1456857, upload-time = "2025-10-24T07:19:37.989Z" },
+ { url = "https://files.pythonhosted.org/packages/c5/de/df71896f25bdc18360fdfa3b802cd7d57d7fede41a0e9724a4625b412c85/lupa-2.6-cp314-cp314-win_amd64.whl", hash = "sha256:b74f944fe46c421e25d0f8692aef1e842192f6f7f68034201382ac440ef9ea67", size = 1731191, upload-time = "2025-10-24T07:19:40.281Z" },
+ { url = "https://files.pythonhosted.org/packages/47/3c/a1f23b01c54669465f5f4c4083107d496fbe6fb45998771420e9aadcf145/lupa-2.6-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0e21b716408a21ab65723f8841cf7f2f37a844b7a965eeabb785e27fca4099cf", size = 999343, upload-time = "2025-10-24T07:19:12.519Z" },
+ { url = "https://files.pythonhosted.org/packages/c5/6d/501994291cb640bfa2ccf7f554be4e6914afa21c4026bd01bff9ca8aac57/lupa-2.6-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:589db872a141bfff828340079bbdf3e9a31f2689f4ca0d88f97d9e8c2eae6142", size = 2000730, upload-time = "2025-10-24T07:19:14.869Z" },
+ { url = "https://files.pythonhosted.org/packages/53/a5/457ffb4f3f20469956c2d4c4842a7675e884efc895b2f23d126d23e126cc/lupa-2.6-cp314-cp314t-macosx_11_0_x86_64.whl", hash = "sha256:cd852a91a4a9d4dcbb9a58100f820a75a425703ec3e3f049055f60b8533b7953", size = 1021553, upload-time = "2025-10-24T07:19:17.123Z" },
+ { url = "https://files.pythonhosted.org/packages/51/6b/36bb5a5d0960f2a5c7c700e0819abb76fd9bf9c1d8a66e5106416d6e9b14/lupa-2.6-cp314-cp314t-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:0334753be028358922415ca97a64a3048e4ed155413fc4eaf87dd0a7e2752983", size = 1133275, upload-time = "2025-10-24T07:19:20.51Z" },
+ { url = "https://files.pythonhosted.org/packages/19/86/202ff4429f663013f37d2229f6176ca9f83678a50257d70f61a0a97281bf/lupa-2.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:661d895cd38c87658a34780fac54a690ec036ead743e41b74c3fb81a9e65a6aa", size = 1038441, upload-time = "2025-10-24T07:19:22.509Z" },
+ { url = "https://files.pythonhosted.org/packages/a7/42/d8125f8e420714e5b52e9c08d88b5329dfb02dcca731b4f21faaee6cc5b5/lupa-2.6-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6aa58454ccc13878cc177c62529a2056be734da16369e451987ff92784994ca7", size = 2058324, upload-time = "2025-10-24T07:19:24.979Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/2c/47bf8b84059876e877a339717ddb595a4a7b0e8740bacae78ba527562e1c/lupa-2.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1425017264e470c98022bba8cff5bd46d054a827f5df6b80274f9cc71dafd24f", size = 1060250, upload-time = "2025-10-24T07:19:27.262Z" },
+ { url = "https://files.pythonhosted.org/packages/c2/06/d88add2b6406ca1bdec99d11a429222837ca6d03bea42ca75afa169a78cb/lupa-2.6-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:224af0532d216e3105f0a127410f12320f7c5f1aa0300bdf9646b8d9afb0048c", size = 1151126, upload-time = "2025-10-24T07:19:29.522Z" },
+ { url = "https://files.pythonhosted.org/packages/b4/a0/89e6a024c3b4485b89ef86881c9d55e097e7cb0bdb74efb746f2fa6a9a76/lupa-2.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9abb98d5a8fd27c8285302e82199f0e56e463066f88f619d6594a450bf269d80", size = 2153693, upload-time = "2025-10-24T07:19:31.379Z" },
+ { url = "https://files.pythonhosted.org/packages/b6/36/a0f007dc58fc1bbf51fb85dcc82fcb1f21b8c4261361de7dab0e3d8521ef/lupa-2.6-cp314-cp314t-win32.whl", hash = "sha256:1849efeba7a8f6fb8aa2c13790bee988fd242ae404bd459509640eeea3d1e291", size = 1590104, upload-time = "2025-10-24T07:19:33.514Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/5e/db903ce9cf82c48d6b91bf6d63ae4c8d0d17958939a4e04ba6b9f38b8643/lupa-2.6-cp314-cp314t-win_amd64.whl", hash = "sha256:fc1498d1a4fc028bc521c26d0fad4ca00ed63b952e32fb95949bda76a04bad52", size = 1913818, upload-time = "2025-10-24T07:19:36.039Z" },
+]
+
+[[package]]
+name = "markdown-it-py"
+version = "4.0.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "mdurl" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" },
+]
+
+[package.optional-dependencies]
+linkify = [
+ { name = "linkify-it-py" },
+]
+
+[[package]]
+name = "mcp"
+version = "1.25.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anyio" },
+ { name = "httpx" },
+ { name = "httpx-sse" },
+ { name = "jsonschema" },
+ { name = "pydantic" },
+ { name = "pydantic-settings" },
+ { name = "pyjwt", extra = ["crypto"] },
+ { name = "python-multipart" },
+ { name = "pywin32", marker = "sys_platform == 'win32'" },
+ { name = "sse-starlette" },
+ { name = "starlette" },
+ { name = "typing-extensions" },
+ { name = "typing-inspection" },
+ { name = "uvicorn", marker = "sys_platform != 'emscripten'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/d5/2d/649d80a0ecf6a1f82632ca44bec21c0461a9d9fc8934d38cb5b319f2db5e/mcp-1.25.0.tar.gz", hash = "sha256:56310361ebf0364e2d438e5b45f7668cbb124e158bb358333cd06e49e83a6802", size = 605387, upload-time = "2025-12-19T10:19:56.985Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e2/fc/6dc7659c2ae5ddf280477011f4213a74f806862856b796ef08f028e664bf/mcp-1.25.0-py3-none-any.whl", hash = "sha256:b37c38144a666add0862614cc79ec276e97d72aa8ca26d622818d4e278b9721a", size = 233076, upload-time = "2025-12-19T10:19:55.416Z" },
+]
+
+[[package]]
+name = "mdit-py-plugins"
+version = "0.5.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markdown-it-py" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b2/fd/a756d36c0bfba5f6e39a1cdbdbfdd448dc02692467d83816dff4592a1ebc/mdit_py_plugins-0.5.0.tar.gz", hash = "sha256:f4918cb50119f50446560513a8e311d574ff6aaed72606ddae6d35716fe809c6", size = 44655, upload-time = "2025-08-11T07:25:49.083Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/fb/86/dd6e5db36df29e76c7a7699123569a4a18c1623ce68d826ed96c62643cae/mdit_py_plugins-0.5.0-py3-none-any.whl", hash = "sha256:07a08422fc1936a5d26d146759e9155ea466e842f5ab2f7d2266dd084c8dab1f", size = 57205, upload-time = "2025-08-11T07:25:47.597Z" },
+]
+
+[[package]]
+name = "mdurl"
+version = "0.1.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" },
+]
+
+[[package]]
+name = "more-itertools"
+version = "10.8.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ea/5d/38b681d3fce7a266dd9ab73c66959406d565b3e85f21d5e66e1181d93721/more_itertools-10.8.0.tar.gz", hash = "sha256:f638ddf8a1a0d134181275fb5d58b086ead7c6a72429ad725c67503f13ba30bd", size = 137431, upload-time = "2025-09-02T15:23:11.018Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a4/8e/469e5a4a2f5855992e425f3cb33804cc07bf18d48f2db061aec61ce50270/more_itertools-10.8.0-py3-none-any.whl", hash = "sha256:52d4362373dcf7c52546bc4af9a86ee7c4579df9a8dc268be0a2f949d376cc9b", size = 69667, upload-time = "2025-09-02T15:23:09.635Z" },
+]
+
+[[package]]
+name = "mypy"
+version = "1.18.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "mypy-extensions" },
+ { name = "pathspec" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c0/77/8f0d0001ffad290cef2f7f216f96c814866248a0b92a722365ed54648e7e/mypy-1.18.2.tar.gz", hash = "sha256:06a398102a5f203d7477b2923dda3634c36727fa5c237d8f859ef90c42a9924b", size = 3448846, upload-time = "2025-09-19T00:11:10.519Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5a/0c/7d5300883da16f0063ae53996358758b2a2df2a09c72a5061fa79a1f5006/mypy-1.18.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:62f0e1e988ad41c2a110edde6c398383a889d95b36b3e60bcf155f5164c4fdce", size = 12893775, upload-time = "2025-09-19T00:10:03.814Z" },
+ { url = "https://files.pythonhosted.org/packages/50/df/2cffbf25737bdb236f60c973edf62e3e7b4ee1c25b6878629e88e2cde967/mypy-1.18.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8795a039bab805ff0c1dfdb8cd3344642c2b99b8e439d057aba30850b8d3423d", size = 11936852, upload-time = "2025-09-19T00:10:51.631Z" },
+ { url = "https://files.pythonhosted.org/packages/be/50/34059de13dd269227fb4a03be1faee6e2a4b04a2051c82ac0a0b5a773c9a/mypy-1.18.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6ca1e64b24a700ab5ce10133f7ccd956a04715463d30498e64ea8715236f9c9c", size = 12480242, upload-time = "2025-09-19T00:11:07.955Z" },
+ { url = "https://files.pythonhosted.org/packages/5b/11/040983fad5132d85914c874a2836252bbc57832065548885b5bb5b0d4359/mypy-1.18.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d924eef3795cc89fecf6bedc6ed32b33ac13e8321344f6ddbf8ee89f706c05cb", size = 13326683, upload-time = "2025-09-19T00:09:55.572Z" },
+ { url = "https://files.pythonhosted.org/packages/e9/ba/89b2901dd77414dd7a8c8729985832a5735053be15b744c18e4586e506ef/mypy-1.18.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20c02215a080e3a2be3aa50506c67242df1c151eaba0dcbc1e4e557922a26075", size = 13514749, upload-time = "2025-09-19T00:10:44.827Z" },
+ { url = "https://files.pythonhosted.org/packages/25/bc/cc98767cffd6b2928ba680f3e5bc969c4152bf7c2d83f92f5a504b92b0eb/mypy-1.18.2-cp314-cp314-win_amd64.whl", hash = "sha256:749b5f83198f1ca64345603118a6f01a4e99ad4bf9d103ddc5a3200cc4614adf", size = 9982959, upload-time = "2025-09-19T00:10:37.344Z" },
+ { url = "https://files.pythonhosted.org/packages/87/e3/be76d87158ebafa0309946c4a73831974d4d6ab4f4ef40c3b53a385a66fd/mypy-1.18.2-py3-none-any.whl", hash = "sha256:22a1748707dd62b58d2ae53562ffc4d7f8bcc727e8ac7cbc69c053ddc874d47e", size = 2352367, upload-time = "2025-09-19T00:10:15.489Z" },
+]
+
+[[package]]
+name = "mypy-extensions"
+version = "1.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" },
+]
+
+[[package]]
+name = "openapi-pydantic"
+version = "0.5.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pydantic" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/02/2e/58d83848dd1a79cb92ed8e63f6ba901ca282c5f09d04af9423ec26c56fd7/openapi_pydantic-0.5.1.tar.gz", hash = "sha256:ff6835af6bde7a459fb93eb93bb92b8749b754fc6e51b2f1590a19dc3005ee0d", size = 60892, upload-time = "2025-01-08T19:29:27.083Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/12/cf/03675d8bd8ecbf4445504d8071adab19f5f993676795708e36402ab38263/openapi_pydantic-0.5.1-py3-none-any.whl", hash = "sha256:a3a09ef4586f5bd760a8df7f43028b60cafb6d9f61de2acba9574766255ab146", size = 96381, upload-time = "2025-01-08T19:29:25.275Z" },
+]
+
+[[package]]
+name = "opentelemetry-api"
+version = "1.39.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "importlib-metadata" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/97/b9/3161be15bb8e3ad01be8be5a968a9237c3027c5be504362ff800fca3e442/opentelemetry_api-1.39.1.tar.gz", hash = "sha256:fbde8c80e1b937a2c61f20347e91c0c18a1940cecf012d62e65a7caf08967c9c", size = 65767, upload-time = "2025-12-11T13:32:39.182Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/cf/df/d3f1ddf4bb4cb50ed9b1139cc7b1c54c34a1e7ce8fd1b9a37c0d1551a6bd/opentelemetry_api-1.39.1-py3-none-any.whl", hash = "sha256:2edd8463432a7f8443edce90972169b195e7d6a05500cd29e6d13898187c9950", size = 66356, upload-time = "2025-12-11T13:32:17.304Z" },
+]
+
+[[package]]
+name = "opentelemetry-exporter-prometheus"
+version = "0.60b1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "opentelemetry-api" },
+ { name = "opentelemetry-sdk" },
+ { name = "prometheus-client" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/14/39/7dafa6fff210737267bed35a8855b6ac7399b9e582b8cf1f25f842517012/opentelemetry_exporter_prometheus-0.60b1.tar.gz", hash = "sha256:a4011b46906323f71724649d301b4dc188aaa068852e814f4df38cc76eac616b", size = 14976, upload-time = "2025-12-11T13:32:42.944Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9b/0d/4be6bf5477a3eb3d917d2f17d3c0b6720cd6cb97898444a61d43cc983f5c/opentelemetry_exporter_prometheus-0.60b1-py3-none-any.whl", hash = "sha256:49f59178de4f4590e3cef0b8b95cf6e071aae70e1f060566df5546fad773b8fd", size = 13019, upload-time = "2025-12-11T13:32:23.974Z" },
+]
+
+[[package]]
+name = "opentelemetry-instrumentation"
+version = "0.60b1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "opentelemetry-api" },
+ { name = "opentelemetry-semantic-conventions" },
+ { name = "packaging" },
+ { name = "wrapt" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/41/0f/7e6b713ac117c1f5e4e3300748af699b9902a2e5e34c9cf443dde25a01fa/opentelemetry_instrumentation-0.60b1.tar.gz", hash = "sha256:57ddc7974c6eb35865af0426d1a17132b88b2ed8586897fee187fd5b8944bd6a", size = 31706, upload-time = "2025-12-11T13:36:42.515Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/77/d2/6788e83c5c86a2690101681aeef27eeb2a6bf22df52d3f263a22cee20915/opentelemetry_instrumentation-0.60b1-py3-none-any.whl", hash = "sha256:04480db952b48fb1ed0073f822f0ee26012b7be7c3eac1a3793122737c78632d", size = 33096, upload-time = "2025-12-11T13:35:33.067Z" },
+]
+
+[[package]]
+name = "opentelemetry-sdk"
+version = "1.39.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "opentelemetry-api" },
+ { name = "opentelemetry-semantic-conventions" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/eb/fb/c76080c9ba07e1e8235d24cdcc4d125ef7aa3edf23eb4e497c2e50889adc/opentelemetry_sdk-1.39.1.tar.gz", hash = "sha256:cf4d4563caf7bff906c9f7967e2be22d0d6b349b908be0d90fb21c8e9c995cc6", size = 171460, upload-time = "2025-12-11T13:32:49.369Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7c/98/e91cf858f203d86f4eccdf763dcf01cf03f1dae80c3750f7e635bfa206b6/opentelemetry_sdk-1.39.1-py3-none-any.whl", hash = "sha256:4d5482c478513ecb0a5d938dcc61394e647066e0cc2676bee9f3af3f3f45f01c", size = 132565, upload-time = "2025-12-11T13:32:35.069Z" },
+]
+
+[[package]]
+name = "opentelemetry-semantic-conventions"
+version = "0.60b1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "opentelemetry-api" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/91/df/553f93ed38bf22f4b999d9be9c185adb558982214f33eae539d3b5cd0858/opentelemetry_semantic_conventions-0.60b1.tar.gz", hash = "sha256:87c228b5a0669b748c76d76df6c364c369c28f1c465e50f661e39737e84bc953", size = 137935, upload-time = "2025-12-11T13:32:50.487Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7a/5e/5958555e09635d09b75de3c4f8b9cae7335ca545d77392ffe7331534c402/opentelemetry_semantic_conventions-0.60b1-py3-none-any.whl", hash = "sha256:9fa8c8b0c110da289809292b0591220d3a7b53c1526a23021e977d68597893fb", size = 219982, upload-time = "2025-12-11T13:32:36.955Z" },
+]
+
+[[package]]
+name = "packaging"
+version = "26.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" },
+]
+
+[[package]]
+name = "pathable"
+version = "0.4.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/67/93/8f2c2075b180c12c1e9f6a09d1a985bc2036906b13dff1d8917e395f2048/pathable-0.4.4.tar.gz", hash = "sha256:6905a3cd17804edfac7875b5f6c9142a218c7caef78693c2dbbbfbac186d88b2", size = 8124, upload-time = "2025-01-10T18:43:13.247Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7d/eb/b6260b31b1a96386c0a880edebe26f89669098acea8e0318bff6adb378fd/pathable-0.4.4-py3-none-any.whl", hash = "sha256:5ae9e94793b6ef5a4cbe0a7ce9dbbefc1eec38df253763fd0aeeacf2762dbbc2", size = 9592, upload-time = "2025-01-10T18:43:11.88Z" },
+]
+
+[[package]]
+name = "pathspec"
+version = "1.0.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/4c/b2/bb8e495d5262bfec41ab5cb18f522f1012933347fb5d9e62452d446baca2/pathspec-1.0.3.tar.gz", hash = "sha256:bac5cf97ae2c2876e2d25ebb15078eb04d76e4b98921ee31c6f85ade8b59444d", size = 130841, upload-time = "2026-01-09T15:46:46.009Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/32/2b/121e912bd60eebd623f873fd090de0e84f322972ab25a7f9044c056804ed/pathspec-1.0.3-py3-none-any.whl", hash = "sha256:e80767021c1cc524aa3fb14bedda9c34406591343cc42797b386ce7b9354fb6c", size = 55021, upload-time = "2026-01-09T15:46:44.652Z" },
+]
+
+[[package]]
+name = "pathvalidate"
+version = "3.3.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/fa/2a/52a8da6fe965dea6192eb716b357558e103aea0a1e9a8352ad575a8406ca/pathvalidate-3.3.1.tar.gz", hash = "sha256:b18c07212bfead624345bb8e1d6141cdcf15a39736994ea0b94035ad2b1ba177", size = 63262, upload-time = "2025-06-15T09:07:20.736Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9a/70/875f4a23bfc4731703a5835487d0d2fb999031bd415e7d17c0ae615c18b7/pathvalidate-3.3.1-py3-none-any.whl", hash = "sha256:5263baab691f8e1af96092fa5137ee17df5bdfbd6cff1fcac4d6ef4bc2e1735f", size = 24305, upload-time = "2025-06-15T09:07:19.117Z" },
+]
+
+[[package]]
+name = "platformdirs"
+version = "4.5.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/cf/86/0248f086a84f01b37aaec0fa567b397df1a119f73c16f6c7a9aac73ea309/platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda", size = 21715, upload-time = "2025-12-05T13:52:58.638Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" },
+]
+
+[[package]]
+name = "pluggy"
+version = "1.6.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
+]
+
+[[package]]
+name = "podman"
+version = "5.6.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "requests" },
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/3b/36/070e7bf682ac0868450584df79198c178323e80f73b8fb9b6fec8bde0a65/podman-5.6.0.tar.gz", hash = "sha256:cc5f7aa9562e30f992fc170a48da970a7132be60d8a2e2941e6c17bd0a0b35c9", size = 72832, upload-time = "2025-09-05T09:42:40.071Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0a/9e/8c62f05b104d9f00edbb4c298b152deceb393ea67f0288d89d1139d7a859/podman-5.6.0-py3-none-any.whl", hash = "sha256:967ff8ad8c6b851bc5da1a9410973882d80e235a9410b7d1e931ce0c3324fbe3", size = 88713, upload-time = "2025-09-05T09:42:38.405Z" },
+]
+
+[[package]]
+name = "prometheus-client"
+version = "0.24.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f0/58/a794d23feb6b00fc0c72787d7e87d872a6730dd9ed7c7b3e954637d8f280/prometheus_client-0.24.1.tar.gz", hash = "sha256:7e0ced7fbbd40f7b84962d5d2ab6f17ef88a72504dcf7c0b40737b43b2a461f9", size = 85616, upload-time = "2026-01-14T15:26:26.965Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/74/c3/24a2f845e3917201628ecaba4f18bab4d18a337834c1df2a159ee9d22a42/prometheus_client-0.24.1-py3-none-any.whl", hash = "sha256:150db128af71a5c2482b36e588fc8a6b95e498750da4b17065947c16070f4055", size = 64057, upload-time = "2026-01-14T15:26:24.42Z" },
+]
+
+[[package]]
+name = "py-key-value-aio"
+version = "0.3.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "beartype" },
+ { name = "py-key-value-shared" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/93/ce/3136b771dddf5ac905cc193b461eb67967cf3979688c6696e1f2cdcde7ea/py_key_value_aio-0.3.0.tar.gz", hash = "sha256:858e852fcf6d696d231266da66042d3355a7f9871650415feef9fca7a6cd4155", size = 50801, upload-time = "2025-11-17T16:50:04.711Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/99/10/72f6f213b8f0bce36eff21fda0a13271834e9eeff7f9609b01afdc253c79/py_key_value_aio-0.3.0-py3-none-any.whl", hash = "sha256:1c781915766078bfd608daa769fefb97e65d1d73746a3dfb640460e322071b64", size = 96342, upload-time = "2025-11-17T16:50:03.801Z" },
+]
+
+[package.optional-dependencies]
+disk = [
+ { name = "diskcache" },
+ { name = "pathvalidate" },
+]
+keyring = [
+ { name = "keyring" },
+]
+memory = [
+ { name = "cachetools" },
+]
+redis = [
+ { name = "redis" },
+]
+
+[[package]]
+name = "py-key-value-shared"
+version = "0.3.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "beartype" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/7b/e4/1971dfc4620a3a15b4579fe99e024f5edd6e0967a71154771a059daff4db/py_key_value_shared-0.3.0.tar.gz", hash = "sha256:8fdd786cf96c3e900102945f92aa1473138ebe960ef49da1c833790160c28a4b", size = 11666, upload-time = "2025-11-17T16:50:06.849Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/51/e4/b8b0a03ece72f47dce2307d36e1c34725b7223d209fc679315ffe6a4e2c3/py_key_value_shared-0.3.0-py3-none-any.whl", hash = "sha256:5b0efba7ebca08bb158b1e93afc2f07d30b8f40c2fc12ce24a4c0d84f42f9298", size = 19560, upload-time = "2025-11-17T16:50:05.954Z" },
+]
+
+[[package]]
+name = "pycparser"
+version = "3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" },
+]
+
+[[package]]
+name = "pydantic"
+version = "2.12.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "annotated-types" },
+ { name = "pydantic-core" },
+ { name = "typing-extensions" },
+ { name = "typing-inspection" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/96/ad/a17bc283d7d81837c061c49e3eaa27a45991759a1b7eae1031921c6bd924/pydantic-2.12.4.tar.gz", hash = "sha256:0f8cb9555000a4b5b617f66bfd2566264c4984b27589d3b845685983e8ea85ac", size = 821038, upload-time = "2025-11-05T10:50:08.59Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/82/2f/e68750da9b04856e2a7ec56fc6f034a5a79775e9b9a81882252789873798/pydantic-2.12.4-py3-none-any.whl", hash = "sha256:92d3d202a745d46f9be6df459ac5a064fdaa3c1c4cd8adcfa332ccf3c05f871e", size = 463400, upload-time = "2025-11-05T10:50:06.732Z" },
+]
+
+[package.optional-dependencies]
+email = [
+ { name = "email-validator" },
+]
+
+[[package]]
+name = "pydantic-core"
+version = "2.41.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" },
+ { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" },
+ { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" },
+ { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" },
+ { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" },
+ { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" },
+ { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" },
+ { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" },
+ { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" },
+ { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" },
+ { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" },
+ { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" },
+ { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" },
+ { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" },
+ { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" },
+ { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" },
+ { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" },
+ { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" },
+ { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" },
+ { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" },
+ { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" },
+ { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" },
+ { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" },
+]
+
+[[package]]
+name = "pydantic-settings"
+version = "2.12.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pydantic" },
+ { name = "python-dotenv" },
+ { name = "typing-inspection" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/43/4b/ac7e0aae12027748076d72a8764ff1c9d82ca75a7a52622e67ed3f765c54/pydantic_settings-2.12.0.tar.gz", hash = "sha256:005538ef951e3c2a68e1c08b292b5f2e71490def8589d4221b95dab00dafcfd0", size = 194184, upload-time = "2025-11-10T14:25:47.013Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c1/60/5d4751ba3f4a40a6891f24eec885f51afd78d208498268c734e256fb13c4/pydantic_settings-2.12.0-py3-none-any.whl", hash = "sha256:fddb9fd99a5b18da837b29710391e945b1e30c135477f484084ee513adb93809", size = 51880, upload-time = "2025-11-10T14:25:45.546Z" },
+]
+
+[[package]]
+name = "pydocket"
+version = "0.16.6"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cloudpickle" },
+ { name = "fakeredis", extra = ["lua"] },
+ { name = "opentelemetry-api" },
+ { name = "opentelemetry-exporter-prometheus" },
+ { name = "opentelemetry-instrumentation" },
+ { name = "prometheus-client" },
+ { name = "py-key-value-aio", extra = ["memory", "redis"] },
+ { name = "python-json-logger" },
+ { name = "redis" },
+ { name = "rich" },
+ { name = "typer" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/72/00/26befe5f58df7cd1aeda4a8d10bc7d1908ffd86b80fd995e57a2a7b3f7bd/pydocket-0.16.6.tar.gz", hash = "sha256:b96c96ad7692827214ed4ff25fcf941ec38371314db5dcc1ae792b3e9d3a0294", size = 299054, upload-time = "2026-01-09T22:09:15.405Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0a/3f/7483e5a6dc6326b6e0c640619b5c5bd1d6e3c20e54d58f5fb86267cef00e/pydocket-0.16.6-py3-none-any.whl", hash = "sha256:683d21e2e846aa5106274e7d59210331b242d7fb0dce5b08d3b82065663ed183", size = 67697, upload-time = "2026-01-09T22:09:13.436Z" },
+]
+
+[[package]]
+name = "pygments"
+version = "2.19.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
+]
+
+[[package]]
+name = "pyjwt"
+version = "2.10.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785, upload-time = "2024-11-28T03:43:29.933Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" },
+]
+
+[package.optional-dependencies]
+crypto = [
+ { name = "cryptography" },
+]
+
+[[package]]
+name = "pyperclip"
+version = "1.11.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e8/52/d87eba7cb129b81563019d1679026e7a112ef76855d6159d24754dbd2a51/pyperclip-1.11.0.tar.gz", hash = "sha256:244035963e4428530d9e3a6101a1ef97209c6825edab1567beac148ccc1db1b6", size = 12185, upload-time = "2025-09-26T14:40:37.245Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/df/80/fc9d01d5ed37ba4c42ca2b55b4339ae6e200b456be3a1aaddf4a9fa99b8c/pyperclip-1.11.0-py3-none-any.whl", hash = "sha256:299403e9ff44581cb9ba2ffeed69c7aa96a008622ad0c46cb575ca75b5b84273", size = 11063, upload-time = "2025-09-26T14:40:36.069Z" },
+]
+
+[[package]]
+name = "pytest"
+version = "9.0.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+ { name = "iniconfig" },
+ { name = "packaging" },
+ { name = "pluggy" },
+ { name = "pygments" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" },
+]
+
+[[package]]
+name = "pytest-asyncio"
+version = "1.3.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pytest" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" },
+]
+
+[[package]]
+name = "pytest-httpx"
+version = "0.36.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "httpx" },
+ { name = "pytest" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/5574834da9499066fa1a5ea9c336f94dba2eae02298d36dab192fcf95c86/pytest_httpx-0.36.0.tar.gz", hash = "sha256:9edb66a5fd4388ce3c343189bc67e7e1cb50b07c2e3fc83b97d511975e8a831b", size = 56793, upload-time = "2025-12-02T16:34:57.414Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e2/d2/1eb1ea9c84f0d2033eb0b49675afdc71aa4ea801b74615f00f3c33b725e3/pytest_httpx-0.36.0-py3-none-any.whl", hash = "sha256:bd4c120bb80e142df856e825ec9f17981effb84d159f9fa29ed97e2357c3a9c8", size = 20229, upload-time = "2025-12-02T16:34:56.45Z" },
+]
+
+[[package]]
+name = "python-dotenv"
+version = "1.2.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" },
+]
+
+[[package]]
+name = "python-json-logger"
+version = "4.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/29/bf/eca6a3d43db1dae7070f70e160ab20b807627ba953663ba07928cdd3dc58/python_json_logger-4.0.0.tar.gz", hash = "sha256:f58e68eb46e1faed27e0f574a55a0455eecd7b8a5b88b85a784519ba3cff047f", size = 17683, upload-time = "2025-10-06T04:15:18.984Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/51/e5/fecf13f06e5e5f67e8837d777d1bc43fac0ed2b77a676804df5c34744727/python_json_logger-4.0.0-py3-none-any.whl", hash = "sha256:af09c9daf6a813aa4cc7180395f50f2a9e5fa056034c9953aec92e381c5ba1e2", size = 15548, upload-time = "2025-10-06T04:15:17.553Z" },
+]
+
+[[package]]
+name = "python-multipart"
+version = "0.0.21"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/78/96/804520d0850c7db98e5ccb70282e29208723f0964e88ffd9d0da2f52ea09/python_multipart-0.0.21.tar.gz", hash = "sha256:7137ebd4d3bbf70ea1622998f902b97a29434a9e8dc40eb203bbcf7c2a2cba92", size = 37196, upload-time = "2025-12-17T09:24:22.446Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/aa/76/03af049af4dcee5d27442f71b6924f01f3efb5d2bd34f23fcd563f2cc5f5/python_multipart-0.0.21-py3-none-any.whl", hash = "sha256:cf7a6713e01c87aa35387f4774e812c4361150938d20d232800f75ffcf266090", size = 24541, upload-time = "2025-12-17T09:24:21.153Z" },
+]
+
+[[package]]
+name = "pywin32"
+version = "311"
+source = { registry = "https://pypi.org/simple" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" },
+ { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" },
+ { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" },
+]
+
+[[package]]
+name = "pywin32-ctypes"
+version = "0.2.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/85/9f/01a1a99704853cb63f253eea009390c88e7131c67e66a0a02099a8c917cb/pywin32-ctypes-0.2.3.tar.gz", hash = "sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755", size = 29471, upload-time = "2024-08-14T10:15:34.626Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/de/3d/8161f7711c017e01ac9f008dfddd9410dff3674334c233bde66e7ba65bbf/pywin32_ctypes-0.2.3-py3-none-any.whl", hash = "sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8", size = 30756, upload-time = "2024-08-14T10:15:33.187Z" },
+]
+
+[[package]]
+name = "pyyaml"
+version = "6.0.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" },
+ { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" },
+ { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" },
+ { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" },
+ { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" },
+ { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" },
+ { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" },
+ { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" },
+ { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" },
+ { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" },
+ { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" },
+ { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" },
+ { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" },
+ { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" },
+ { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" },
+ { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" },
+]
+
+[[package]]
+name = "redis"
+version = "7.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/43/c8/983d5c6579a411d8a99bc5823cc5712768859b5ce2c8afe1a65b37832c81/redis-7.1.0.tar.gz", hash = "sha256:b1cc3cfa5a2cb9c2ab3ba700864fb0ad75617b41f01352ce5779dabf6d5f9c3c", size = 4796669, upload-time = "2025-11-19T15:54:39.961Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/89/f0/8956f8a86b20d7bb9d6ac0187cf4cd54d8065bc9a1a09eb8011d4d326596/redis-7.1.0-py3-none-any.whl", hash = "sha256:23c52b208f92b56103e17c5d06bdc1a6c2c0b3106583985a76a18f83b265de2b", size = 354159, upload-time = "2025-11-19T15:54:38.064Z" },
+]
+
+[[package]]
+name = "referencing"
+version = "0.36.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "attrs" },
+ { name = "rpds-py" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" },
+]
+
+[[package]]
+name = "requests"
+version = "2.32.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "certifi" },
+ { name = "charset-normalizer" },
+ { name = "idna" },
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
+]
+
+[[package]]
+name = "rich"
+version = "14.2.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markdown-it-py" },
+ { name = "pygments" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393, upload-time = "2025-10-09T14:16:51.245Z" },
+]
+
+[[package]]
+name = "rich-rst"
+version = "1.3.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "docutils" },
+ { name = "rich" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/bc/6d/a506aaa4a9eaa945ed8ab2b7347859f53593864289853c5d6d62b77246e0/rich_rst-1.3.2.tar.gz", hash = "sha256:a1196fdddf1e364b02ec68a05e8ff8f6914fee10fbca2e6b6735f166bb0da8d4", size = 14936, upload-time = "2025-10-14T16:49:45.332Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/13/2f/b4530fbf948867702d0a3f27de4a6aab1d156f406d72852ab902c4d04de9/rich_rst-1.3.2-py3-none-any.whl", hash = "sha256:a99b4907cbe118cf9d18b0b44de272efa61f15117c61e39ebdc431baf5df722a", size = 12567, upload-time = "2025-10-14T16:49:42.953Z" },
+]
+
+[[package]]
+name = "rpds-py"
+version = "0.30.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/20/af/3f2f423103f1113b36230496629986e0ef7e199d2aa8392452b484b38ced/rpds_py-0.30.0.tar.gz", hash = "sha256:dd8ff7cf90014af0c0f787eea34794ebf6415242ee1d6fa91eaba725cc441e84", size = 69469, upload-time = "2025-11-30T20:24:38.837Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/86/81/dad16382ebbd3d0e0328776d8fd7ca94220e4fa0798d1dc5e7da48cb3201/rpds_py-0.30.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:68f19c879420aa08f61203801423f6cd5ac5f0ac4ac82a2368a9fcd6a9a075e0", size = 362099, upload-time = "2025-11-30T20:23:27.316Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/60/19f7884db5d5603edf3c6bce35408f45ad3e97e10007df0e17dd57af18f8/rpds_py-0.30.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ec7c4490c672c1a0389d319b3a9cfcd098dcdc4783991553c332a15acf7249be", size = 353192, upload-time = "2025-11-30T20:23:29.151Z" },
+ { url = "https://files.pythonhosted.org/packages/bf/c4/76eb0e1e72d1a9c4703c69607cec123c29028bff28ce41588792417098ac/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f251c812357a3fed308d684a5079ddfb9d933860fc6de89f2b7ab00da481e65f", size = 384080, upload-time = "2025-11-30T20:23:30.785Z" },
+ { url = "https://files.pythonhosted.org/packages/72/87/87ea665e92f3298d1b26d78814721dc39ed8d2c74b86e83348d6b48a6f31/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac98b175585ecf4c0348fd7b29c3864bda53b805c773cbf7bfdaffc8070c976f", size = 394841, upload-time = "2025-11-30T20:23:32.209Z" },
+ { url = "https://files.pythonhosted.org/packages/77/ad/7783a89ca0587c15dcbf139b4a8364a872a25f861bdb88ed99f9b0dec985/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3e62880792319dbeb7eb866547f2e35973289e7d5696c6e295476448f5b63c87", size = 516670, upload-time = "2025-11-30T20:23:33.742Z" },
+ { url = "https://files.pythonhosted.org/packages/5b/3c/2882bdac942bd2172f3da574eab16f309ae10a3925644e969536553cb4ee/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e7fc54e0900ab35d041b0601431b0a0eb495f0851a0639b6ef90f7741b39a18", size = 408005, upload-time = "2025-11-30T20:23:35.253Z" },
+ { url = "https://files.pythonhosted.org/packages/ce/81/9a91c0111ce1758c92516a3e44776920b579d9a7c09b2b06b642d4de3f0f/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47e77dc9822d3ad616c3d5759ea5631a75e5809d5a28707744ef79d7a1bcfcad", size = 382112, upload-time = "2025-11-30T20:23:36.842Z" },
+ { url = "https://files.pythonhosted.org/packages/cf/8e/1da49d4a107027e5fbc64daeab96a0706361a2918da10cb41769244b805d/rpds_py-0.30.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:b4dc1a6ff022ff85ecafef7979a2c6eb423430e05f1165d6688234e62ba99a07", size = 399049, upload-time = "2025-11-30T20:23:38.343Z" },
+ { url = "https://files.pythonhosted.org/packages/df/5a/7ee239b1aa48a127570ec03becbb29c9d5a9eb092febbd1699d567cae859/rpds_py-0.30.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4559c972db3a360808309e06a74628b95eaccbf961c335c8fe0d590cf587456f", size = 415661, upload-time = "2025-11-30T20:23:40.263Z" },
+ { url = "https://files.pythonhosted.org/packages/70/ea/caa143cf6b772f823bc7929a45da1fa83569ee49b11d18d0ada7f5ee6fd6/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:0ed177ed9bded28f8deb6ab40c183cd1192aa0de40c12f38be4d59cd33cb5c65", size = 565606, upload-time = "2025-11-30T20:23:42.186Z" },
+ { url = "https://files.pythonhosted.org/packages/64/91/ac20ba2d69303f961ad8cf55bf7dbdb4763f627291ba3d0d7d67333cced9/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ad1fa8db769b76ea911cb4e10f049d80bf518c104f15b3edb2371cc65375c46f", size = 591126, upload-time = "2025-11-30T20:23:44.086Z" },
+ { url = "https://files.pythonhosted.org/packages/21/20/7ff5f3c8b00c8a95f75985128c26ba44503fb35b8e0259d812766ea966c7/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:46e83c697b1f1c72b50e5ee5adb4353eef7406fb3f2043d64c33f20ad1c2fc53", size = 553371, upload-time = "2025-11-30T20:23:46.004Z" },
+ { url = "https://files.pythonhosted.org/packages/72/c7/81dadd7b27c8ee391c132a6b192111ca58d866577ce2d9b0ca157552cce0/rpds_py-0.30.0-cp314-cp314-win32.whl", hash = "sha256:ee454b2a007d57363c2dfd5b6ca4a5d7e2c518938f8ed3b706e37e5d470801ed", size = 215298, upload-time = "2025-11-30T20:23:47.696Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/d2/1aaac33287e8cfb07aab2e6b8ac1deca62f6f65411344f1433c55e6f3eb8/rpds_py-0.30.0-cp314-cp314-win_amd64.whl", hash = "sha256:95f0802447ac2d10bcc69f6dc28fe95fdf17940367b21d34e34c737870758950", size = 228604, upload-time = "2025-11-30T20:23:49.501Z" },
+ { url = "https://files.pythonhosted.org/packages/e8/95/ab005315818cc519ad074cb7784dae60d939163108bd2b394e60dc7b5461/rpds_py-0.30.0-cp314-cp314-win_arm64.whl", hash = "sha256:613aa4771c99f03346e54c3f038e4cc574ac09a3ddfb0e8878487335e96dead6", size = 222391, upload-time = "2025-11-30T20:23:50.96Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/68/154fe0194d83b973cdedcdcc88947a2752411165930182ae41d983dcefa6/rpds_py-0.30.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:7e6ecfcb62edfd632e56983964e6884851786443739dbfe3582947e87274f7cb", size = 364868, upload-time = "2025-11-30T20:23:52.494Z" },
+ { url = "https://files.pythonhosted.org/packages/83/69/8bbc8b07ec854d92a8b75668c24d2abcb1719ebf890f5604c61c9369a16f/rpds_py-0.30.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a1d0bc22a7cdc173fedebb73ef81e07faef93692b8c1ad3733b67e31e1b6e1b8", size = 353747, upload-time = "2025-11-30T20:23:54.036Z" },
+ { url = "https://files.pythonhosted.org/packages/ab/00/ba2e50183dbd9abcce9497fa5149c62b4ff3e22d338a30d690f9af970561/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d08f00679177226c4cb8c5265012eea897c8ca3b93f429e546600c971bcbae7", size = 383795, upload-time = "2025-11-30T20:23:55.556Z" },
+ { url = "https://files.pythonhosted.org/packages/05/6f/86f0272b84926bcb0e4c972262f54223e8ecc556b3224d281e6598fc9268/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5965af57d5848192c13534f90f9dd16464f3c37aaf166cc1da1cae1fd5a34898", size = 393330, upload-time = "2025-11-30T20:23:57.033Z" },
+ { url = "https://files.pythonhosted.org/packages/cb/e9/0e02bb2e6dc63d212641da45df2b0bf29699d01715913e0d0f017ee29438/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a4e86e34e9ab6b667c27f3211ca48f73dba7cd3d90f8d5b11be56e5dbc3fb4e", size = 518194, upload-time = "2025-11-30T20:23:58.637Z" },
+ { url = "https://files.pythonhosted.org/packages/ee/ca/be7bca14cf21513bdf9c0606aba17d1f389ea2b6987035eb4f62bd923f25/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d3e6b26f2c785d65cc25ef1e5267ccbe1b069c5c21b8cc724efee290554419", size = 408340, upload-time = "2025-11-30T20:24:00.2Z" },
+ { url = "https://files.pythonhosted.org/packages/c2/c7/736e00ebf39ed81d75544c0da6ef7b0998f8201b369acf842f9a90dc8fce/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:626a7433c34566535b6e56a1b39a7b17ba961e97ce3b80ec62e6f1312c025551", size = 383765, upload-time = "2025-11-30T20:24:01.759Z" },
+ { url = "https://files.pythonhosted.org/packages/4a/3f/da50dfde9956aaf365c4adc9533b100008ed31aea635f2b8d7b627e25b49/rpds_py-0.30.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:acd7eb3f4471577b9b5a41baf02a978e8bdeb08b4b355273994f8b87032000a8", size = 396834, upload-time = "2025-11-30T20:24:03.687Z" },
+ { url = "https://files.pythonhosted.org/packages/4e/00/34bcc2565b6020eab2623349efbdec810676ad571995911f1abdae62a3a0/rpds_py-0.30.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fe5fa731a1fa8a0a56b0977413f8cacac1768dad38d16b3a296712709476fbd5", size = 415470, upload-time = "2025-11-30T20:24:05.232Z" },
+ { url = "https://files.pythonhosted.org/packages/8c/28/882e72b5b3e6f718d5453bd4d0d9cf8df36fddeb4ddbbab17869d5868616/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:74a3243a411126362712ee1524dfc90c650a503502f135d54d1b352bd01f2404", size = 565630, upload-time = "2025-11-30T20:24:06.878Z" },
+ { url = "https://files.pythonhosted.org/packages/3b/97/04a65539c17692de5b85c6e293520fd01317fd878ea1995f0367d4532fb1/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:3e8eeb0544f2eb0d2581774be4c3410356eba189529a6b3e36bbbf9696175856", size = 591148, upload-time = "2025-11-30T20:24:08.445Z" },
+ { url = "https://files.pythonhosted.org/packages/85/70/92482ccffb96f5441aab93e26c4d66489eb599efdcf96fad90c14bbfb976/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:dbd936cde57abfee19ab3213cf9c26be06d60750e60a8e4dd85d1ab12c8b1f40", size = 556030, upload-time = "2025-11-30T20:24:10.956Z" },
+ { url = "https://files.pythonhosted.org/packages/20/53/7c7e784abfa500a2b6b583b147ee4bb5a2b3747a9166bab52fec4b5b5e7d/rpds_py-0.30.0-cp314-cp314t-win32.whl", hash = "sha256:dc824125c72246d924f7f796b4f63c1e9dc810c7d9e2355864b3c3a73d59ade0", size = 211570, upload-time = "2025-11-30T20:24:12.735Z" },
+ { url = "https://files.pythonhosted.org/packages/d0/02/fa464cdfbe6b26e0600b62c528b72d8608f5cc49f96b8d6e38c95d60c676/rpds_py-0.30.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27f4b0e92de5bfbc6f86e43959e6edd1425c33b5e69aab0984a72047f2bcf1e3", size = 226532, upload-time = "2025-11-30T20:24:14.634Z" },
+]
+
+[[package]]
+name = "ruff"
+version = "0.14.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/df/55/cccfca45157a2031dcbb5a462a67f7cf27f8b37d4b3b1cd7438f0f5c1df6/ruff-0.14.4.tar.gz", hash = "sha256:f459a49fe1085a749f15414ca76f61595f1a2cc8778ed7c279b6ca2e1fd19df3", size = 5587844, upload-time = "2025-11-06T22:07:45.033Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/17/b9/67240254166ae1eaa38dec32265e9153ac53645a6c6670ed36ad00722af8/ruff-0.14.4-py3-none-linux_armv6l.whl", hash = "sha256:e6604613ffbcf2297cd5dcba0e0ac9bd0c11dc026442dfbb614504e87c349518", size = 12606781, upload-time = "2025-11-06T22:07:01.841Z" },
+ { url = "https://files.pythonhosted.org/packages/46/c8/09b3ab245d8652eafe5256ab59718641429f68681ee713ff06c5c549f156/ruff-0.14.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d99c0b52b6f0598acede45ee78288e5e9b4409d1ce7f661f0fa36d4cbeadf9a4", size = 12946765, upload-time = "2025-11-06T22:07:05.858Z" },
+ { url = "https://files.pythonhosted.org/packages/14/bb/1564b000219144bf5eed2359edc94c3590dd49d510751dad26202c18a17d/ruff-0.14.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:9358d490ec030f1b51d048a7fd6ead418ed0826daf6149e95e30aa67c168af33", size = 11928120, upload-time = "2025-11-06T22:07:08.023Z" },
+ { url = "https://files.pythonhosted.org/packages/a3/92/d5f1770e9988cc0742fefaa351e840d9aef04ec24ae1be36f333f96d5704/ruff-0.14.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81b40d27924f1f02dfa827b9c0712a13c0e4b108421665322218fc38caf615c2", size = 12370877, upload-time = "2025-11-06T22:07:10.015Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/29/e9282efa55f1973d109faf839a63235575519c8ad278cc87a182a366810e/ruff-0.14.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f5e649052a294fe00818650712083cddc6cc02744afaf37202c65df9ea52efa5", size = 12408538, upload-time = "2025-11-06T22:07:13.085Z" },
+ { url = "https://files.pythonhosted.org/packages/8e/01/930ed6ecfce130144b32d77d8d69f5c610e6d23e6857927150adf5d7379a/ruff-0.14.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa082a8f878deeba955531f975881828fd6afd90dfa757c2b0808aadb437136e", size = 13141942, upload-time = "2025-11-06T22:07:15.386Z" },
+ { url = "https://files.pythonhosted.org/packages/6a/46/a9c89b42b231a9f487233f17a89cbef9d5acd538d9488687a02ad288fa6b/ruff-0.14.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1043c6811c2419e39011890f14d0a30470f19d47d197c4858b2787dfa698f6c8", size = 14544306, upload-time = "2025-11-06T22:07:17.631Z" },
+ { url = "https://files.pythonhosted.org/packages/78/96/9c6cf86491f2a6d52758b830b89b78c2ae61e8ca66b86bf5a20af73d20e6/ruff-0.14.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a9f3a936ac27fb7c2a93e4f4b943a662775879ac579a433291a6f69428722649", size = 14210427, upload-time = "2025-11-06T22:07:19.832Z" },
+ { url = "https://files.pythonhosted.org/packages/71/f4/0666fe7769a54f63e66404e8ff698de1dcde733e12e2fd1c9c6efb689cb5/ruff-0.14.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:95643ffd209ce78bc113266b88fba3d39e0461f0cbc8b55fb92505030fb4a850", size = 13658488, upload-time = "2025-11-06T22:07:22.32Z" },
+ { url = "https://files.pythonhosted.org/packages/ee/79/6ad4dda2cfd55e41ac9ed6d73ef9ab9475b1eef69f3a85957210c74ba12c/ruff-0.14.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:456daa2fa1021bc86ca857f43fe29d5d8b3f0e55e9f90c58c317c1dcc2afc7b5", size = 13354908, upload-time = "2025-11-06T22:07:24.347Z" },
+ { url = "https://files.pythonhosted.org/packages/b5/60/f0b6990f740bb15c1588601d19d21bcc1bd5de4330a07222041678a8e04f/ruff-0.14.4-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:f911bba769e4a9f51af6e70037bb72b70b45a16db5ce73e1f72aefe6f6d62132", size = 13587803, upload-time = "2025-11-06T22:07:26.327Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/da/eaaada586f80068728338e0ef7f29ab3e4a08a692f92eb901a4f06bbff24/ruff-0.14.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:76158a7369b3979fa878612c623a7e5430c18b2fd1c73b214945c2d06337db67", size = 12279654, upload-time = "2025-11-06T22:07:28.46Z" },
+ { url = "https://files.pythonhosted.org/packages/66/d4/b1d0e82cf9bf8aed10a6d45be47b3f402730aa2c438164424783ac88c0ed/ruff-0.14.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f3b8f3b442d2b14c246e7aeca2e75915159e06a3540e2f4bed9f50d062d24469", size = 12357520, upload-time = "2025-11-06T22:07:31.468Z" },
+ { url = "https://files.pythonhosted.org/packages/04/f4/53e2b42cc82804617e5c7950b7079d79996c27e99c4652131c6a1100657f/ruff-0.14.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c62da9a06779deecf4d17ed04939ae8b31b517643b26370c3be1d26f3ef7dbde", size = 12719431, upload-time = "2025-11-06T22:07:33.831Z" },
+ { url = "https://files.pythonhosted.org/packages/a2/94/80e3d74ed9a72d64e94a7b7706b1c1ebaa315ef2076fd33581f6a1cd2f95/ruff-0.14.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5a443a83a1506c684e98acb8cb55abaf3ef725078be40237463dae4463366349", size = 13464394, upload-time = "2025-11-06T22:07:35.905Z" },
+ { url = "https://files.pythonhosted.org/packages/54/1a/a49f071f04c42345c793d22f6cf5e0920095e286119ee53a64a3a3004825/ruff-0.14.4-py3-none-win32.whl", hash = "sha256:643b69cb63cd996f1fc7229da726d07ac307eae442dd8974dbc7cf22c1e18fff", size = 12493429, upload-time = "2025-11-06T22:07:38.43Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/22/e58c43e641145a2b670328fb98bc384e20679b5774258b1e540207580266/ruff-0.14.4-py3-none-win_amd64.whl", hash = "sha256:26673da283b96fe35fa0c939bf8411abec47111644aa9f7cfbd3c573fb125d2c", size = 13635380, upload-time = "2025-11-06T22:07:40.496Z" },
+ { url = "https://files.pythonhosted.org/packages/30/bd/4168a751ddbbf43e86544b4de8b5c3b7be8d7167a2a5cb977d274e04f0a1/ruff-0.14.4-py3-none-win_arm64.whl", hash = "sha256:dd09c292479596b0e6fec8cd95c65c3a6dc68e9ad17b8f2382130f87ff6a75bb", size = 12663065, upload-time = "2025-11-06T22:07:42.603Z" },
+]
+
+[[package]]
+name = "secretstorage"
+version = "3.5.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cryptography" },
+ { name = "jeepney" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/1c/03/e834bcd866f2f8a49a85eaff47340affa3bfa391ee9912a952a1faa68c7b/secretstorage-3.5.0.tar.gz", hash = "sha256:f04b8e4689cbce351744d5537bf6b1329c6fc68f91fa666f60a380edddcd11be", size = 19884, upload-time = "2025-11-23T19:02:53.191Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b7/46/f5af3402b579fd5e11573ce652019a67074317e18c1935cc0b4ba9b35552/secretstorage-3.5.0-py3-none-any.whl", hash = "sha256:0ce65888c0725fcb2c5bc0fdb8e5438eece02c523557ea40ce0703c266248137", size = 15554, upload-time = "2025-11-23T19:02:51.545Z" },
+]
+
+[[package]]
+name = "shellingham"
+version = "1.5.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" },
+]
+
+[[package]]
+name = "sortedcontainers"
+version = "2.4.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" },
+]
+
+[[package]]
+name = "sse-starlette"
+version = "3.2.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anyio" },
+ { name = "starlette" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/8b/8d/00d280c03ffd39aaee0e86ec81e2d3b9253036a0f93f51d10503adef0e65/sse_starlette-3.2.0.tar.gz", hash = "sha256:8127594edfb51abe44eac9c49e59b0b01f1039d0c7461c6fd91d4e03b70da422", size = 27253, upload-time = "2026-01-17T13:11:05.62Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/96/7f/832f015020844a8b8f7a9cbc103dd76ba8e3875004c41e08440ea3a2b41a/sse_starlette-3.2.0-py3-none-any.whl", hash = "sha256:5876954bd51920fc2cd51baee47a080eb88a37b5b784e615abb0b283f801cdbf", size = 12763, upload-time = "2026-01-17T13:11:03.775Z" },
+]
+
+[[package]]
+name = "starlette"
+version = "0.52.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anyio" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c4/68/79977123bb7be889ad680d79a40f339082c1978b5cfcf62c2d8d196873ac/starlette-0.52.1.tar.gz", hash = "sha256:834edd1b0a23167694292e94f597773bc3f89f362be6effee198165a35d62933", size = 2653702, upload-time = "2026-01-18T13:34:11.062Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl", hash = "sha256:0029d43eb3d273bc4f83a08720b4912ea4b071087a3b48db01b7c839f7954d74", size = 74272, upload-time = "2026-01-18T13:34:09.188Z" },
+]
+
+[[package]]
+name = "stevedore"
+version = "5.6.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/96/5b/496f8abebd10c3301129abba7ddafd46c71d799a70c44ab080323987c4c9/stevedore-5.6.0.tar.gz", hash = "sha256:f22d15c6ead40c5bbfa9ca54aa7e7b4a07d59b36ae03ed12ced1a54cf0b51945", size = 516074, upload-time = "2025-11-20T10:06:07.264Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f4/40/8561ce06dc46fd17242c7724ab25b257a2ac1b35f4ebf551b40ce6105cfa/stevedore-5.6.0-py3-none-any.whl", hash = "sha256:4a36dccefd7aeea0c70135526cecb7766c4c84c473b1af68db23d541b6dc1820", size = 54428, upload-time = "2025-11-20T10:06:05.946Z" },
+]
+
+[[package]]
+name = "structlog"
+version = "25.5.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ef/52/9ba0f43b686e7f3ddfeaa78ac3af750292662284b3661e91ad5494f21dbc/structlog-25.5.0.tar.gz", hash = "sha256:098522a3bebed9153d4570c6d0288abf80a031dfdb2048d59a49e9dc2190fc98", size = 1460830, upload-time = "2025-10-27T08:28:23.028Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a8/45/a132b9074aa18e799b891b91ad72133c98d8042c70f6240e4c5f9dabee2f/structlog-25.5.0-py3-none-any.whl", hash = "sha256:a8453e9b9e636ec59bd9e79bbd4a72f025981b3ba0f5837aebf48f02f37a7f9f", size = 72510, upload-time = "2025-10-27T08:28:21.535Z" },
+]
+
+[[package]]
+name = "textual"
+version = "8.0.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markdown-it-py", extra = ["linkify"] },
+ { name = "mdit-py-plugins" },
+ { name = "platformdirs" },
+ { name = "pygments" },
+ { name = "rich" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/be/08/c6bcb1e3c4c9528ec9049f4ac685afdafc72866664270f0deb416ccbba2a/textual-8.0.2.tar.gz", hash = "sha256:7b342f3ee9a5f2f1bd42d7b598cae00ff1275da68536769510db4b7fe8cabf5d", size = 6099270, upload-time = "2026-03-03T20:23:46.858Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/77/bc/0cd17f96f00b6e8bfbca64c574088c85f3c614912b3030f313752e30a099/textual-8.0.2-py3-none-any.whl", hash = "sha256:4ceadbe0e8a30eb80f9995000f4d031f711420a31b02da38f3482957b7c50ce4", size = 719174, upload-time = "2026-03-03T20:23:50.46Z" },
+]
+
+[[package]]
+name = "typer"
+version = "0.20.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "click" },
+ { name = "rich" },
+ { name = "shellingham" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/6d/c1/933d30fd7a123ed981e2a1eedafceab63cb379db0402e438a13bc51bbb15/typer-0.20.1.tar.gz", hash = "sha256:68585eb1b01203689c4199bc440d6be616f0851e9f0eb41e4a778845c5a0fd5b", size = 105968, upload-time = "2025-12-19T16:48:56.302Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c8/52/1f2df7e7d1be3d65ddc2936d820d4a3d9777a54f4204f5ca46b8513eff77/typer-0.20.1-py3-none-any.whl", hash = "sha256:4b3bde918a67c8e03d861aa02deca90a95bbac572e71b1b9be56ff49affdb5a8", size = 47381, upload-time = "2025-12-19T16:48:53.679Z" },
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.15.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
+]
+
+[[package]]
+name = "typing-inspection"
+version = "0.4.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" },
+]
+
+[[package]]
+name = "uc-micro-py"
+version = "2.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/78/67/9a363818028526e2d4579334460df777115bdec1bb77c08f9db88f6389f2/uc_micro_py-2.0.0.tar.gz", hash = "sha256:c53691e495c8db60e16ffc4861a35469b0ba0821fe409a8a7a0a71864d33a811", size = 6611, upload-time = "2026-03-01T06:31:27.526Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/61/73/d21edf5b204d1467e06500080a50f79d49ef2b997c79123a536d4a17d97c/uc_micro_py-2.0.0-py3-none-any.whl", hash = "sha256:3603a3859af53e5a39bc7677713c78ea6589ff188d70f4fee165db88e22b242c", size = 6383, upload-time = "2026-03-01T06:31:26.257Z" },
+]
+
+[[package]]
+name = "urllib3"
+version = "2.6.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" },
+]
+
+[[package]]
+name = "uvicorn"
+version = "0.40.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "click" },
+ { name = "h11" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c3/d1/8f3c683c9561a4e6689dd3b1d345c815f10f86acd044ee1fb9a4dcd0b8c5/uvicorn-0.40.0.tar.gz", hash = "sha256:839676675e87e73694518b5574fd0f24c9d97b46bea16df7b8c05ea1a51071ea", size = 81761, upload-time = "2025-12-21T14:16:22.45Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/3d/d8/2083a1daa7439a66f3a48589a57d576aa117726762618f6bb09fe3798796/uvicorn-0.40.0-py3-none-any.whl", hash = "sha256:c6c8f55bc8bf13eb6fa9ff87ad62308bbbc33d0b67f84293151efe87e0d5f2ee", size = 68502, upload-time = "2025-12-21T14:16:21.041Z" },
+]
+
+[[package]]
+name = "websockets"
+version = "16.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/04/24/4b2031d72e840ce4c1ccb255f693b15c334757fc50023e4db9537080b8c4/websockets-16.0.tar.gz", hash = "sha256:5f6261a5e56e8d5c42a4497b364ea24d94d9563e8fbd44e78ac40879c60179b5", size = 179346, upload-time = "2026-01-10T09:23:47.181Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f3/1d/e88022630271f5bd349ed82417136281931e558d628dd52c4d8621b4a0b2/websockets-16.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8cc451a50f2aee53042ac52d2d053d08bf89bcb31ae799cb4487587661c038a0", size = 177406, upload-time = "2026-01-10T09:23:12.178Z" },
+ { url = "https://files.pythonhosted.org/packages/f2/78/e63be1bf0724eeb4616efb1ae1c9044f7c3953b7957799abb5915bffd38e/websockets-16.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:daa3b6ff70a9241cf6c7fc9e949d41232d9d7d26fd3522b1ad2b4d62487e9904", size = 175085, upload-time = "2026-01-10T09:23:13.511Z" },
+ { url = "https://files.pythonhosted.org/packages/bb/f4/d3c9220d818ee955ae390cf319a7c7a467beceb24f05ee7aaaa2414345ba/websockets-16.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:fd3cb4adb94a2a6e2b7c0d8d05cb94e6f1c81a0cf9dc2694fb65c7e8d94c42e4", size = 175328, upload-time = "2026-01-10T09:23:14.727Z" },
+ { url = "https://files.pythonhosted.org/packages/63/bc/d3e208028de777087e6fb2b122051a6ff7bbcca0d6df9d9c2bf1dd869ae9/websockets-16.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:781caf5e8eee67f663126490c2f96f40906594cb86b408a703630f95550a8c3e", size = 185044, upload-time = "2026-01-10T09:23:15.939Z" },
+ { url = "https://files.pythonhosted.org/packages/ad/6e/9a0927ac24bd33a0a9af834d89e0abc7cfd8e13bed17a86407a66773cc0e/websockets-16.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:caab51a72c51973ca21fa8a18bd8165e1a0183f1ac7066a182ff27107b71e1a4", size = 186279, upload-time = "2026-01-10T09:23:17.148Z" },
+ { url = "https://files.pythonhosted.org/packages/b9/ca/bf1c68440d7a868180e11be653c85959502efd3a709323230314fda6e0b3/websockets-16.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19c4dc84098e523fd63711e563077d39e90ec6702aff4b5d9e344a60cb3c0cb1", size = 185711, upload-time = "2026-01-10T09:23:18.372Z" },
+ { url = "https://files.pythonhosted.org/packages/c4/f8/fdc34643a989561f217bb477cbc47a3a07212cbda91c0e4389c43c296ebf/websockets-16.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a5e18a238a2b2249c9a9235466b90e96ae4795672598a58772dd806edc7ac6d3", size = 184982, upload-time = "2026-01-10T09:23:19.652Z" },
+ { url = "https://files.pythonhosted.org/packages/dd/d1/574fa27e233764dbac9c52730d63fcf2823b16f0856b3329fc6268d6ae4f/websockets-16.0-cp314-cp314-win32.whl", hash = "sha256:a069d734c4a043182729edd3e9f247c3b2a4035415a9172fd0f1b71658a320a8", size = 177915, upload-time = "2026-01-10T09:23:21.458Z" },
+ { url = "https://files.pythonhosted.org/packages/8a/f1/ae6b937bf3126b5134ce1f482365fde31a357c784ac51852978768b5eff4/websockets-16.0-cp314-cp314-win_amd64.whl", hash = "sha256:c0ee0e63f23914732c6d7e0cce24915c48f3f1512ec1d079ed01fc629dab269d", size = 178381, upload-time = "2026-01-10T09:23:22.715Z" },
+ { url = "https://files.pythonhosted.org/packages/06/9b/f791d1db48403e1f0a27577a6beb37afae94254a8c6f08be4a23e4930bc0/websockets-16.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:a35539cacc3febb22b8f4d4a99cc79b104226a756aa7400adc722e83b0d03244", size = 177737, upload-time = "2026-01-10T09:23:24.523Z" },
+ { url = "https://files.pythonhosted.org/packages/bd/40/53ad02341fa33b3ce489023f635367a4ac98b73570102ad2cdd770dacc9a/websockets-16.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b784ca5de850f4ce93ec85d3269d24d4c82f22b7212023c974c401d4980ebc5e", size = 175268, upload-time = "2026-01-10T09:23:25.781Z" },
+ { url = "https://files.pythonhosted.org/packages/74/9b/6158d4e459b984f949dcbbb0c5d270154c7618e11c01029b9bbd1bb4c4f9/websockets-16.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:569d01a4e7fba956c5ae4fc988f0d4e187900f5497ce46339c996dbf24f17641", size = 175486, upload-time = "2026-01-10T09:23:27.033Z" },
+ { url = "https://files.pythonhosted.org/packages/e5/2d/7583b30208b639c8090206f95073646c2c9ffd66f44df967981a64f849ad/websockets-16.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:50f23cdd8343b984957e4077839841146f67a3d31ab0d00e6b824e74c5b2f6e8", size = 185331, upload-time = "2026-01-10T09:23:28.259Z" },
+ { url = "https://files.pythonhosted.org/packages/45/b0/cce3784eb519b7b5ad680d14b9673a31ab8dcb7aad8b64d81709d2430aa8/websockets-16.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:152284a83a00c59b759697b7f9e9cddf4e3c7861dd0d964b472b70f78f89e80e", size = 186501, upload-time = "2026-01-10T09:23:29.449Z" },
+ { url = "https://files.pythonhosted.org/packages/19/60/b8ebe4c7e89fb5f6cdf080623c9d92789a53636950f7abacfc33fe2b3135/websockets-16.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bc59589ab64b0022385f429b94697348a6a234e8ce22544e3681b2e9331b5944", size = 186062, upload-time = "2026-01-10T09:23:31.368Z" },
+ { url = "https://files.pythonhosted.org/packages/88/a8/a080593f89b0138b6cba1b28f8df5673b5506f72879322288b031337c0b8/websockets-16.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:32da954ffa2814258030e5a57bc73a3635463238e797c7375dc8091327434206", size = 185356, upload-time = "2026-01-10T09:23:32.627Z" },
+ { url = "https://files.pythonhosted.org/packages/c2/b6/b9afed2afadddaf5ebb2afa801abf4b0868f42f8539bfe4b071b5266c9fe/websockets-16.0-cp314-cp314t-win32.whl", hash = "sha256:5a4b4cc550cb665dd8a47f868c8d04c8230f857363ad3c9caf7a0c3bf8c61ca6", size = 178085, upload-time = "2026-01-10T09:23:33.816Z" },
+ { url = "https://files.pythonhosted.org/packages/9f/3e/28135a24e384493fa804216b79a6a6759a38cc4ff59118787b9fb693df93/websockets-16.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b14dc141ed6d2dde437cddb216004bcac6a1df0935d79656387bd41632ba0bbd", size = 178531, upload-time = "2026-01-10T09:23:35.016Z" },
+ { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec", size = 171598, upload-time = "2026-01-10T09:23:45.395Z" },
+]
+
+[[package]]
+name = "wrapt"
+version = "1.17.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/95/8f/aeb76c5b46e273670962298c23e7ddde79916cb74db802131d49a85e4b7d/wrapt-1.17.3.tar.gz", hash = "sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0", size = 55547, upload-time = "2025-08-12T05:53:21.714Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/02/a2/cd864b2a14f20d14f4c496fab97802001560f9f41554eef6df201cd7f76c/wrapt-1.17.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cf30f6e3c077c8e6a9a7809c94551203c8843e74ba0c960f4a98cd80d4665d39", size = 54132, upload-time = "2025-08-12T05:51:49.864Z" },
+ { url = "https://files.pythonhosted.org/packages/d5/46/d011725b0c89e853dc44cceb738a307cde5d240d023d6d40a82d1b4e1182/wrapt-1.17.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e228514a06843cae89621384cfe3a80418f3c04aadf8a3b14e46a7be704e4235", size = 39091, upload-time = "2025-08-12T05:51:38.935Z" },
+ { url = "https://files.pythonhosted.org/packages/2e/9e/3ad852d77c35aae7ddebdbc3b6d35ec8013af7d7dddad0ad911f3d891dae/wrapt-1.17.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5ea5eb3c0c071862997d6f3e02af1d055f381b1d25b286b9d6644b79db77657c", size = 39172, upload-time = "2025-08-12T05:51:59.365Z" },
+ { url = "https://files.pythonhosted.org/packages/c3/f7/c983d2762bcce2326c317c26a6a1e7016f7eb039c27cdf5c4e30f4160f31/wrapt-1.17.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:281262213373b6d5e4bb4353bc36d1ba4084e6d6b5d242863721ef2bf2c2930b", size = 87163, upload-time = "2025-08-12T05:52:40.965Z" },
+ { url = "https://files.pythonhosted.org/packages/e4/0f/f673f75d489c7f22d17fe0193e84b41540d962f75fce579cf6873167c29b/wrapt-1.17.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc4a8d2b25efb6681ecacad42fca8859f88092d8732b170de6a5dddd80a1c8fa", size = 87963, upload-time = "2025-08-12T05:52:20.326Z" },
+ { url = "https://files.pythonhosted.org/packages/df/61/515ad6caca68995da2fac7a6af97faab8f78ebe3bf4f761e1b77efbc47b5/wrapt-1.17.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:373342dd05b1d07d752cecbec0c41817231f29f3a89aa8b8843f7b95992ed0c7", size = 86945, upload-time = "2025-08-12T05:52:21.581Z" },
+ { url = "https://files.pythonhosted.org/packages/d3/bd/4e70162ce398462a467bc09e768bee112f1412e563620adc353de9055d33/wrapt-1.17.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d40770d7c0fd5cbed9d84b2c3f2e156431a12c9a37dc6284060fb4bec0b7ffd4", size = 86857, upload-time = "2025-08-12T05:52:43.043Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/b8/da8560695e9284810b8d3df8a19396a6e40e7518059584a1a394a2b35e0a/wrapt-1.17.3-cp314-cp314-win32.whl", hash = "sha256:fbd3c8319de8e1dc79d346929cd71d523622da527cca14e0c1d257e31c2b8b10", size = 37178, upload-time = "2025-08-12T05:53:12.605Z" },
+ { url = "https://files.pythonhosted.org/packages/db/c8/b71eeb192c440d67a5a0449aaee2310a1a1e8eca41676046f99ed2487e9f/wrapt-1.17.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1a4120ae5705f673727d3253de3ed0e016f7cd78dc463db1b31e2463e1f3cf6", size = 39310, upload-time = "2025-08-12T05:53:11.106Z" },
+ { url = "https://files.pythonhosted.org/packages/45/20/2cda20fd4865fa40f86f6c46ed37a2a8356a7a2fde0773269311f2af56c7/wrapt-1.17.3-cp314-cp314-win_arm64.whl", hash = "sha256:507553480670cab08a800b9463bdb881b2edeed77dc677b0a5915e6106e91a58", size = 37266, upload-time = "2025-08-12T05:52:56.531Z" },
+ { url = "https://files.pythonhosted.org/packages/77/ed/dd5cf21aec36c80443c6f900449260b80e2a65cf963668eaef3b9accce36/wrapt-1.17.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ed7c635ae45cfbc1a7371f708727bf74690daedc49b4dba310590ca0bd28aa8a", size = 56544, upload-time = "2025-08-12T05:51:51.109Z" },
+ { url = "https://files.pythonhosted.org/packages/8d/96/450c651cc753877ad100c7949ab4d2e2ecc4d97157e00fa8f45df682456a/wrapt-1.17.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:249f88ed15503f6492a71f01442abddd73856a0032ae860de6d75ca62eed8067", size = 40283, upload-time = "2025-08-12T05:51:39.912Z" },
+ { url = "https://files.pythonhosted.org/packages/d1/86/2fcad95994d9b572db57632acb6f900695a648c3e063f2cd344b3f5c5a37/wrapt-1.17.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5a03a38adec8066d5a37bea22f2ba6bbf39fcdefbe2d91419ab864c3fb515454", size = 40366, upload-time = "2025-08-12T05:52:00.693Z" },
+ { url = "https://files.pythonhosted.org/packages/64/0e/f4472f2fdde2d4617975144311f8800ef73677a159be7fe61fa50997d6c0/wrapt-1.17.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5d4478d72eb61c36e5b446e375bbc49ed002430d17cdec3cecb36993398e1a9e", size = 108571, upload-time = "2025-08-12T05:52:44.521Z" },
+ { url = "https://files.pythonhosted.org/packages/cc/01/9b85a99996b0a97c8a17484684f206cbb6ba73c1ce6890ac668bcf3838fb/wrapt-1.17.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:223db574bb38637e8230eb14b185565023ab624474df94d2af18f1cdb625216f", size = 113094, upload-time = "2025-08-12T05:52:22.618Z" },
+ { url = "https://files.pythonhosted.org/packages/25/02/78926c1efddcc7b3aa0bc3d6b33a822f7d898059f7cd9ace8c8318e559ef/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e405adefb53a435f01efa7ccdec012c016b5a1d3f35459990afc39b6be4d5056", size = 110659, upload-time = "2025-08-12T05:52:24.057Z" },
+ { url = "https://files.pythonhosted.org/packages/dc/ee/c414501ad518ac3e6fe184753632fe5e5ecacdcf0effc23f31c1e4f7bfcf/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:88547535b787a6c9ce4086917b6e1d291aa8ed914fdd3a838b3539dc95c12804", size = 106946, upload-time = "2025-08-12T05:52:45.976Z" },
+ { url = "https://files.pythonhosted.org/packages/be/44/a1bd64b723d13bb151d6cc91b986146a1952385e0392a78567e12149c7b4/wrapt-1.17.3-cp314-cp314t-win32.whl", hash = "sha256:41b1d2bc74c2cac6f9074df52b2efbef2b30bdfe5f40cb78f8ca22963bc62977", size = 38717, upload-time = "2025-08-12T05:53:15.214Z" },
+ { url = "https://files.pythonhosted.org/packages/79/d9/7cfd5a312760ac4dd8bf0184a6ee9e43c33e47f3dadc303032ce012b8fa3/wrapt-1.17.3-cp314-cp314t-win_amd64.whl", hash = "sha256:73d496de46cd2cdbdbcce4ae4bcdb4afb6a11234a1df9c085249d55166b95116", size = 41334, upload-time = "2025-08-12T05:53:14.178Z" },
+ { url = "https://files.pythonhosted.org/packages/46/78/10ad9781128ed2f99dbc474f43283b13fea8ba58723e98844367531c18e9/wrapt-1.17.3-cp314-cp314t-win_arm64.whl", hash = "sha256:f38e60678850c42461d4202739f9bf1e3a737c7ad283638251e79cc49effb6b6", size = 38471, upload-time = "2025-08-12T05:52:57.784Z" },
+ { url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22", size = 23591, upload-time = "2025-08-12T05:53:20.674Z" },
+]
+
+[[package]]
+name = "zipp"
+version = "3.23.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" },
+]