mirror of
https://github.com/FuzzingLabs/fuzzforge_ai.git
synced 2026-02-12 15:12:46 +00:00
feat: FuzzForge AI - complete rewrite for OSS release
This commit is contained in:
9
.gitignore
vendored
Normal file
9
.gitignore
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
*.egg-info
|
||||
*.whl
|
||||
.env
|
||||
.mypy_cache
|
||||
.pytest_cache
|
||||
.ruff_cache
|
||||
.venv
|
||||
.vscode
|
||||
__pycache__
|
||||
1
.python-version
Normal file
1
.python-version
Normal file
@@ -0,0 +1 @@
|
||||
3.14.2
|
||||
93
Makefile
Normal file
93
Makefile
Normal file
@@ -0,0 +1,93 @@
|
||||
.PHONY: help install sync format lint typecheck test build-modules clean
|
||||
|
||||
SHELL := /bin/bash
|
||||
|
||||
# Default target
|
||||
help:
|
||||
@echo "FuzzForge OSS Development Commands"
|
||||
@echo ""
|
||||
@echo " make install - Install all dependencies"
|
||||
@echo " make sync - Sync shared packages from upstream"
|
||||
@echo " make format - Format code with ruff"
|
||||
@echo " make lint - Lint code with ruff"
|
||||
@echo " make typecheck - Type check with mypy"
|
||||
@echo " make test - Run all tests"
|
||||
@echo " make build-modules - Build all module container images"
|
||||
@echo " make clean - Clean build artifacts"
|
||||
@echo ""
|
||||
|
||||
# Install all dependencies
|
||||
install:
|
||||
uv sync
|
||||
|
||||
# Sync shared packages from upstream fuzzforge-core
|
||||
sync:
|
||||
@if [ -z "$(UPSTREAM)" ]; then \
|
||||
echo "Usage: make sync UPSTREAM=/path/to/fuzzforge-core"; \
|
||||
exit 1; \
|
||||
fi
|
||||
./scripts/sync-upstream.sh $(UPSTREAM)
|
||||
|
||||
# Format all packages
|
||||
format:
|
||||
@for pkg in packages/fuzzforge-*/; do \
|
||||
if [ -f "$$pkg/pyproject.toml" ]; then \
|
||||
echo "Formatting $$pkg..."; \
|
||||
cd "$$pkg" && uv run ruff format . && cd -; \
|
||||
fi \
|
||||
done
|
||||
|
||||
# Lint all packages
|
||||
lint:
|
||||
@for pkg in packages/fuzzforge-*/; do \
|
||||
if [ -f "$$pkg/pyproject.toml" ]; then \
|
||||
echo "Linting $$pkg..."; \
|
||||
cd "$$pkg" && uv run ruff check . && cd -; \
|
||||
fi \
|
||||
done
|
||||
|
||||
# Type check all packages
|
||||
typecheck:
|
||||
@for pkg in packages/fuzzforge-*/; do \
|
||||
if [ -f "$$pkg/pyproject.toml" ] && [ -f "$$pkg/mypy.ini" ]; then \
|
||||
echo "Type checking $$pkg..."; \
|
||||
cd "$$pkg" && uv run mypy . && cd -; \
|
||||
fi \
|
||||
done
|
||||
|
||||
# Run all tests
|
||||
test:
|
||||
@for pkg in packages/fuzzforge-*/; do \
|
||||
if [ -f "$$pkg/pytest.ini" ]; then \
|
||||
echo "Testing $$pkg..."; \
|
||||
cd "$$pkg" && uv run pytest && cd -; \
|
||||
fi \
|
||||
done
|
||||
|
||||
# Build all module container images
|
||||
build-modules:
|
||||
@echo "Building FuzzForge module images..."
|
||||
@echo "This uses self-contained storage at ~/.fuzzforge/containers/"
|
||||
@for module in fuzzforge-modules/*/; do \
|
||||
if [ -f "$$module/Dockerfile" ] && \
|
||||
[ "$$module" != "fuzzforge-modules/fuzzforge-modules-sdk/" ] && \
|
||||
[ "$$module" != "fuzzforge-modules/fuzzforge-module-template/" ]; then \
|
||||
name=$$(basename $$module); \
|
||||
version=$$(grep 'version' "$$module/pyproject.toml" 2>/dev/null | head -1 | sed 's/.*"\(.*\)".*/\1/' || echo "0.1.0"); \
|
||||
echo "Building $$name:$$version..."; \
|
||||
podman --root ~/.fuzzforge/containers/storage --runroot ~/.fuzzforge/containers/run \
|
||||
build -t "fuzzforge-$$name:$$version" "$$module" || exit 1; \
|
||||
fi \
|
||||
done
|
||||
@echo ""
|
||||
@echo "✓ All modules built successfully!"
|
||||
@echo " Images stored in: ~/.fuzzforge/containers/storage"
|
||||
|
||||
# Clean build artifacts
|
||||
clean:
|
||||
find . -type d -name "__pycache__" -exec rm -rf {} + 2>/dev/null || true
|
||||
find . -type d -name ".pytest_cache" -exec rm -rf {} + 2>/dev/null || true
|
||||
find . -type d -name ".mypy_cache" -exec rm -rf {} + 2>/dev/null || true
|
||||
find . -type d -name ".ruff_cache" -exec rm -rf {} + 2>/dev/null || true
|
||||
find . -type d -name "*.egg-info" -exec rm -rf {} + 2>/dev/null || true
|
||||
find . -type f -name "*.pyc" -delete 2>/dev/null || true
|
||||
283
README.md
Normal file
283
README.md
Normal file
@@ -0,0 +1,283 @@
|
||||
<h1 align="center"> FuzzForge OSS</h1>
|
||||
<h3 align="center">AI-Powered Security Research Orchestration via MCP</h3>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://discord.gg/8XEX33UUwZ"><img src="https://img.shields.io/discord/1420767905255133267?logo=discord&label=Discord" alt="Discord"></a>
|
||||
<a href="LICENSE"><img src="https://img.shields.io/badge/license-Apache%202.0-blue" alt="License: Apache 2.0"></a>
|
||||
<a href="https://www.python.org/downloads/"><img src="https://img.shields.io/badge/python-3.12%2B-blue" alt="Python 3.12+"/></a>
|
||||
<a href="https://modelcontextprotocol.io"><img src="https://img.shields.io/badge/MCP-compatible-green" alt="MCP Compatible"/></a>
|
||||
<a href="https://fuzzforge.ai"><img src="https://img.shields.io/badge/Website-fuzzforge.ai-purple" alt="Website"/></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<strong>Let AI agents orchestrate your security research workflows locally</strong>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<sub>
|
||||
<a href="#-overview"><b>Overview</b></a> •
|
||||
<a href="#-features"><b>Features</b></a> •
|
||||
<a href="#-installation"><b>Installation</b></a> •
|
||||
<a href="USAGE.md"><b>Usage Guide</b></a> •
|
||||
<a href="#-modules"><b>Modules</b></a> •
|
||||
<a href="#-contributing"><b>Contributing</b></a>
|
||||
</sub>
|
||||
</p>
|
||||
|
||||
---
|
||||
|
||||
> 🚧 **FuzzForge OSS is under active development.** Expect breaking changes and new features!
|
||||
|
||||
---
|
||||
|
||||
## 🚀 Overview
|
||||
|
||||
**FuzzForge OSS** is an open-source runtime that enables AI agents (GitHub Copilot, Claude, etc.) to orchestrate security research workflows through the **Model Context Protocol (MCP)**.
|
||||
|
||||
### The Core: Modules
|
||||
|
||||
At the heart of FuzzForge are **modules** - containerized security tools that AI agents can discover, configure, and orchestrate. Each module encapsulates a specific security capability (static analysis, fuzzing, crash analysis, etc.) and runs in an isolated container.
|
||||
|
||||
- **🔌 Plug & Play**: Modules are self-contained - just pull and run
|
||||
- **🤖 AI-Native**: Designed for AI agent orchestration via MCP
|
||||
- **🔗 Composable**: Chain modules together into automated workflows
|
||||
- **📦 Extensible**: Build custom modules with the Python SDK
|
||||
|
||||
The OSS runtime handles module discovery, execution, and result collection. Security modules (developed separately) provide the actual security tooling - from static analyzers to fuzzers to crash triagers.
|
||||
|
||||
Instead of manually running security tools, describe what you want and let your AI assistant handle it.
|
||||
|
||||
### 🎬 Use Case: Rust Fuzzing Pipeline
|
||||
|
||||
> **Scenario**: Fuzz a Rust crate to discover vulnerabilities using AI-assisted harness generation and parallel fuzzing.
|
||||
|
||||
<table align="center">
|
||||
<tr>
|
||||
<th>1️⃣ Analyze, Generate & Validate Harnesses</th>
|
||||
<th>2️⃣ Run Parallel Continuous Fuzzing</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><img src="assets/demopart2.gif" alt="FuzzForge Demo - Analysis Pipeline" width="100%"></td>
|
||||
<td><img src="assets/demopart1.gif" alt="FuzzForge Demo - Parallel Fuzzing" width="100%"></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center"><sub>AI agent analyzes code, generates harnesses, and validates they compile</sub></td>
|
||||
<td align="center"><sub>Multiple fuzzing sessions run in parallel with live metrics</sub></td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
---
|
||||
|
||||
## ⭐ Support the Project
|
||||
|
||||
If you find FuzzForge useful, please **star the repo** to support development! 🚀
|
||||
|
||||
<a href="https://github.com/FuzzingLabs/fuzzforge-oss/stargazers">
|
||||
<img src="https://img.shields.io/github/stars/FuzzingLabs/fuzzforge-oss?style=social" alt="GitHub Stars">
|
||||
</a>
|
||||
|
||||
---
|
||||
|
||||
## ✨ Features
|
||||
|
||||
| Feature | Description |
|
||||
|---------|-------------|
|
||||
| 🤖 **AI-Native** | Built for MCP - works with GitHub Copilot, Claude, and any MCP-compatible agent |
|
||||
| 📦 **Containerized** | Each module runs in isolation via Podman or Docker |
|
||||
| 🔄 **Continuous Mode** | Long-running tasks (fuzzing) with real-time metrics streaming |
|
||||
| 🔗 **Workflows** | Chain multiple modules together in automated pipelines |
|
||||
| 🛠️ **Extensible** | Create custom modules with the Python SDK |
|
||||
| 🏠 **Local First** | All execution happens on your machine - no cloud required |
|
||||
| 🔒 **Secure** | Sandboxed containers with no network access by default |
|
||||
|
||||
---
|
||||
|
||||
## 🏗️ Architecture
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────────┐
|
||||
│ AI Agent (Copilot/Claude) │
|
||||
└───────────────────────────┬─────────────────────────────────────┘
|
||||
│ MCP Protocol (stdio)
|
||||
▼
|
||||
┌─────────────────────────────────────────────────────────────────┐
|
||||
│ FuzzForge MCP Server │
|
||||
│ ┌─────────────┐ ┌──────────────┐ ┌────────────────────────┐ │
|
||||
│ │list_modules │ │execute_module│ │start_continuous_module │ │
|
||||
│ └─────────────┘ └──────────────┘ └────────────────────────┘ │
|
||||
└───────────────────────────┬─────────────────────────────────────┘
|
||||
│
|
||||
▼
|
||||
┌─────────────────────────────────────────────────────────────────┐
|
||||
│ FuzzForge Runner │
|
||||
│ Container Engine (Podman/Docker) │
|
||||
└───────────────────────────┬─────────────────────────────────────┘
|
||||
│
|
||||
┌───────────────────┼───────────────────┐
|
||||
▼ ▼ ▼
|
||||
┌───────────────┐ ┌───────────────┐ ┌───────────────┐
|
||||
│ Module A │ │ Module B │ │ Module C │
|
||||
│ (Container) │ │ (Container) │ │ (Container) │
|
||||
└───────────────┘ └───────────────┘ └───────────────┘
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📦 Installation
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- **Python 3.12+**
|
||||
- **[uv](https://docs.astral.sh/uv/)** package manager
|
||||
- **Podman** (recommended) or Docker
|
||||
|
||||
### Quick Install
|
||||
|
||||
```bash
|
||||
# Clone the repository
|
||||
git clone https://github.com/FuzzingLabs/fuzzforge-oss.git
|
||||
cd fuzzforge-oss
|
||||
|
||||
# Install dependencies
|
||||
uv sync
|
||||
|
||||
# Start Podman socket (Linux)
|
||||
systemctl --user start podman.socket
|
||||
```
|
||||
|
||||
### Configure MCP for Your AI Agent
|
||||
|
||||
```bash
|
||||
# For GitHub Copilot
|
||||
uv run fuzzforge mcp install copilot
|
||||
|
||||
# For Claude Code (CLI)
|
||||
uv run fuzzforge mcp install claude-code
|
||||
|
||||
# For Claude Desktop (standalone app)
|
||||
uv run fuzzforge mcp install claude-desktop
|
||||
|
||||
# Verify installation
|
||||
uv run fuzzforge mcp status
|
||||
```
|
||||
|
||||
**Restart your editor** and your AI agent will have access to FuzzForge tools!
|
||||
|
||||
---
|
||||
|
||||
## 📦 Modules
|
||||
|
||||
FuzzForge modules are containerized security tools that AI agents can orchestrate. The module ecosystem is designed around a simple principle: **the OSS runtime orchestrates, enterprise modules execute**.
|
||||
|
||||
### Module Ecosystem
|
||||
|
||||
| | FuzzForge OSS | FuzzForge Enterprise Modules |
|
||||
|---|---|---|
|
||||
| **What** | Runtime & MCP server | Security research modules |
|
||||
| **License** | Apache 2.0 | BSL 1.1 (Business Source License) |
|
||||
| **Compatibility** | ✅ Runs any compatible module | ✅ Works with OSS runtime |
|
||||
|
||||
**Enterprise modules** are developed separately and provide production-ready security tooling:
|
||||
|
||||
| Category | Modules | Description |
|
||||
|----------|---------|-------------|
|
||||
| 🔍 **Static Analysis** | Rust Analyzer, Solidity Analyzer, Cairo Analyzer | Code analysis and fuzzable function detection |
|
||||
| 🎯 **Fuzzing** | Cargo Fuzzer, Honggfuzz, AFL++ | Coverage-guided fuzz testing |
|
||||
| 💥 **Crash Analysis** | Crash Triager, Root Cause Analyzer | Automated crash deduplication and analysis |
|
||||
| 🔐 **Vulnerability Detection** | Pattern Matcher, Taint Analyzer | Security vulnerability scanning |
|
||||
| 📝 **Reporting** | Report Generator, SARIF Exporter | Automated security report generation |
|
||||
|
||||
> 💡 **Build your own modules!** The FuzzForge SDK allows you to create custom modules that integrate seamlessly with the OSS runtime. See [Creating Custom Modules](#-creating-custom-modules).
|
||||
|
||||
### Execution Modes
|
||||
|
||||
Modules run in two execution modes:
|
||||
|
||||
#### One-shot Execution
|
||||
|
||||
Run a module once and get results:
|
||||
|
||||
```python
|
||||
result = execute_module("my-analyzer", assets_path="/path/to/project")
|
||||
```
|
||||
|
||||
#### Continuous Execution
|
||||
|
||||
For long-running tasks like fuzzing, with real-time metrics:
|
||||
|
||||
```python
|
||||
# Start continuous execution
|
||||
session = start_continuous_module("my-fuzzer",
|
||||
assets_path="/path/to/project",
|
||||
configuration={"target": "my_target"})
|
||||
|
||||
# Check status with live metrics
|
||||
status = get_continuous_status(session["session_id"])
|
||||
|
||||
# Stop and collect results
|
||||
stop_continuous_module(session["session_id"])
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🛠️ Creating Custom Modules
|
||||
|
||||
Build your own security modules with the FuzzForge SDK:
|
||||
|
||||
```python
|
||||
from fuzzforge_modules_sdk import FuzzForgeModule, FuzzForgeModuleResults
|
||||
|
||||
class MySecurityModule(FuzzForgeModule):
|
||||
def _run(self, resources):
|
||||
self.emit_event("started", target=resources[0].path)
|
||||
|
||||
# Your analysis logic here
|
||||
results = self.analyze(resources)
|
||||
|
||||
self.emit_progress(100, status="completed",
|
||||
message=f"Analysis complete")
|
||||
return FuzzForgeModuleResults.SUCCESS
|
||||
```
|
||||
|
||||
📖 See the [Module SDK Guide](fuzzforge-modules/fuzzforge-modules-sdk/README.md) for details.
|
||||
|
||||
---
|
||||
|
||||
## 📁 Project Structure
|
||||
|
||||
```
|
||||
fuzzforge-oss/
|
||||
├── fuzzforge-cli/ # Command-line interface
|
||||
├── fuzzforge-common/ # Shared abstractions (containers, storage)
|
||||
├── fuzzforge-mcp/ # MCP server for AI agents
|
||||
├── fuzzforge-modules/ # Security modules
|
||||
│ └── fuzzforge-modules-sdk/ # Module development SDK
|
||||
├── fuzzforge-runner/ # Local execution engine
|
||||
├── fuzzforge-types/ # Type definitions & schemas
|
||||
└── demo/ # Demo projects for testing
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🤝 Contributing
|
||||
|
||||
We welcome contributions from the community!
|
||||
|
||||
- 🐛 Report bugs via [GitHub Issues](../../issues)
|
||||
- 💡 Suggest features or improvements
|
||||
- 🔧 Submit pull requests
|
||||
- 📦 Share your custom modules
|
||||
|
||||
See [CONTRIBUTING.md](CONTRIBUTING.md) for guidelines.
|
||||
|
||||
---
|
||||
|
||||
## 📄 License
|
||||
|
||||
Apache 2.0 - See [LICENSE](LICENSE) for details.
|
||||
|
||||
---
|
||||
|
||||
<p align="center">
|
||||
<strong>Built with ❤️ by <a href="https://fuzzinglabs.com">FuzzingLabs</a></strong>
|
||||
</p>
|
||||
439
USAGE.md
Normal file
439
USAGE.md
Normal file
@@ -0,0 +1,439 @@
|
||||
# FuzzForge OSS Usage Guide
|
||||
|
||||
This guide covers everything you need to know to get started with FuzzForge OSS - from installation to running your first security research workflow with AI.
|
||||
|
||||
> **FuzzForge is designed to be used with AI agents** (GitHub Copilot, Claude, etc.) via MCP.
|
||||
> The CLI is available for advanced users but the primary experience is through natural language interaction with your AI assistant.
|
||||
|
||||
---
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Quick Start](#quick-start)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Installation](#installation)
|
||||
- [Building Modules](#building-modules)
|
||||
- [MCP Server Configuration](#mcp-server-configuration)
|
||||
- [GitHub Copilot](#github-copilot)
|
||||
- [Claude Code (CLI)](#claude-code-cli)
|
||||
- [Claude Desktop](#claude-desktop)
|
||||
- [Using FuzzForge with AI](#using-fuzzforge-with-ai)
|
||||
- [CLI Reference](#cli-reference)
|
||||
- [Environment Variables](#environment-variables)
|
||||
- [Troubleshooting](#troubleshooting)
|
||||
|
||||
---
|
||||
|
||||
## Quick Start
|
||||
|
||||
> **Prerequisites:** You need [uv](https://docs.astral.sh/uv/) and [Podman](https://podman.io/) installed.
|
||||
> See the [Prerequisites](#prerequisites) section for installation instructions.
|
||||
|
||||
```bash
|
||||
# 1. Clone and install
|
||||
git clone https://github.com/FuzzingLabs/fuzzforge-oss.git
|
||||
cd fuzzforge-oss
|
||||
uv sync
|
||||
|
||||
# 2. Build the module images (one-time setup)
|
||||
make build-modules
|
||||
|
||||
# 3. Install MCP for your AI agent
|
||||
uv run fuzzforge mcp install copilot # For VS Code + GitHub Copilot
|
||||
# OR
|
||||
uv run fuzzforge mcp install claude-code # For Claude Code CLI
|
||||
|
||||
# 4. Restart your AI agent (VS Code, Claude, etc.)
|
||||
|
||||
# 5. Start talking to your AI:
|
||||
# "List available FuzzForge modules"
|
||||
# "Analyze this Rust crate for fuzzable functions"
|
||||
# "Start fuzzing the parse_input function"
|
||||
```
|
||||
|
||||
> **Note:** FuzzForge uses self-contained container storage (`~/.fuzzforge/containers/`)
|
||||
> which works automatically - no need to configure Podman sockets manually.
|
||||
|
||||
---
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before installing FuzzForge OSS, ensure you have:
|
||||
|
||||
- **Python 3.12+** - [Download Python](https://www.python.org/downloads/)
|
||||
- **uv** package manager - [Install uv](https://docs.astral.sh/uv/)
|
||||
- **Podman** - Container runtime (Docker also works but Podman is recommended)
|
||||
|
||||
### Installing uv
|
||||
|
||||
```bash
|
||||
# Linux/macOS
|
||||
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
|
||||
# Or with pip
|
||||
pip install uv
|
||||
```
|
||||
|
||||
### Installing Podman (Linux)
|
||||
|
||||
```bash
|
||||
# Ubuntu/Debian
|
||||
sudo apt update && sudo apt install -y podman
|
||||
|
||||
# Fedora/RHEL
|
||||
sudo dnf install -y podman
|
||||
|
||||
# Arch Linux
|
||||
sudo pacman -S podman
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Installation
|
||||
|
||||
### 1. Clone the Repository
|
||||
|
||||
```bash
|
||||
git clone https://github.com/FuzzingLabs/fuzzforge-oss.git
|
||||
cd fuzzforge-oss
|
||||
```
|
||||
|
||||
### 2. Install Dependencies
|
||||
|
||||
```bash
|
||||
uv sync
|
||||
```
|
||||
|
||||
This installs all FuzzForge components in a virtual environment.
|
||||
|
||||
### 3. Verify Installation
|
||||
|
||||
```bash
|
||||
uv run fuzzforge --help
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Building Modules
|
||||
|
||||
FuzzForge modules are containerized security tools. After cloning, you need to build them once:
|
||||
|
||||
### Build All Modules
|
||||
|
||||
```bash
|
||||
# From the fuzzforge-oss directory
|
||||
make build-modules
|
||||
```
|
||||
|
||||
This builds all available modules:
|
||||
- `fuzzforge-rust-analyzer` - Analyzes Rust code for fuzzable functions
|
||||
- `fuzzforge-cargo-fuzzer` - Runs cargo-fuzz on Rust crates
|
||||
- `fuzzforge-harness-validator` - Validates generated fuzzing harnesses
|
||||
- `fuzzforge-crash-analyzer` - Analyzes crash inputs
|
||||
|
||||
### Build a Single Module
|
||||
|
||||
```bash
|
||||
# Build a specific module
|
||||
cd fuzzforge-modules/rust-analyzer
|
||||
make build
|
||||
```
|
||||
|
||||
### Verify Modules are Built
|
||||
|
||||
```bash
|
||||
# List built module images
|
||||
podman images | grep fuzzforge
|
||||
```
|
||||
|
||||
You should see something like:
|
||||
```
|
||||
fuzzforge-rust-analyzer 0.1.0 abc123def456 2 minutes ago 850 MB
|
||||
fuzzforge-cargo-fuzzer 0.1.0 789ghi012jkl 2 minutes ago 1.2 GB
|
||||
...
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## MCP Server Configuration
|
||||
|
||||
FuzzForge integrates with AI agents through the Model Context Protocol (MCP). Configure your preferred AI agent to use FuzzForge tools.
|
||||
|
||||
### GitHub Copilot
|
||||
|
||||
```bash
|
||||
# That's it! Just run this command:
|
||||
uv run fuzzforge mcp install copilot
|
||||
```
|
||||
|
||||
The command auto-detects everything:
|
||||
- **FuzzForge root** - Where FuzzForge is installed
|
||||
- **Modules path** - Defaults to `fuzzforge-oss/fuzzforge-modules`
|
||||
- **Podman socket** - Auto-detects `/run/user/<uid>/podman/podman.sock`
|
||||
|
||||
**Optional overrides** (usually not needed):
|
||||
```bash
|
||||
uv run fuzzforge mcp install copilot \
|
||||
--modules /path/to/modules \
|
||||
--engine docker # if using Docker instead of Podman
|
||||
```
|
||||
|
||||
**After installation:**
|
||||
1. Restart VS Code
|
||||
2. Open GitHub Copilot Chat
|
||||
3. FuzzForge tools are now available!
|
||||
|
||||
### Claude Code (CLI)
|
||||
|
||||
```bash
|
||||
uv run fuzzforge mcp install claude-code
|
||||
```
|
||||
|
||||
Installs to `~/.claude.json` so FuzzForge tools are available from any directory.
|
||||
|
||||
**After installation:**
|
||||
1. Run `claude` from any directory
|
||||
2. FuzzForge tools are now available!
|
||||
|
||||
### Claude Desktop
|
||||
|
||||
```bash
|
||||
# Automatic installation
|
||||
uv run fuzzforge mcp install claude-desktop
|
||||
|
||||
# Verify
|
||||
uv run fuzzforge mcp status
|
||||
```
|
||||
|
||||
**After installation:**
|
||||
1. Restart Claude Desktop
|
||||
2. FuzzForge tools are now available!
|
||||
|
||||
### Check MCP Status
|
||||
|
||||
```bash
|
||||
uv run fuzzforge mcp status
|
||||
```
|
||||
|
||||
Shows configuration status for all supported AI agents:
|
||||
|
||||
```
|
||||
┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━┓
|
||||
┃ Agent ┃ Config Path ┃ Status ┃ FuzzForge Configured ┃
|
||||
┡━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━┩
|
||||
│ GitHub Copilot │ ~/.config/Code/User/mcp.json │ ✓ Exists │ ✓ Yes │
|
||||
│ Claude Desktop │ ~/.config/Claude/claude_desktop_config... │ Not found │ - │
|
||||
│ Claude Code │ ~/.claude.json │ ✓ Exists │ ✓ Yes │
|
||||
└──────────────────────┴───────────────────────────────────────────┴──────────────┴─────────────────────────┘
|
||||
```
|
||||
|
||||
### Generate Config Without Installing
|
||||
|
||||
```bash
|
||||
# Preview the configuration that would be installed
|
||||
uv run fuzzforge mcp generate copilot
|
||||
uv run fuzzforge mcp generate claude-desktop
|
||||
uv run fuzzforge mcp generate claude-code
|
||||
```
|
||||
|
||||
### Remove MCP Configuration
|
||||
|
||||
```bash
|
||||
uv run fuzzforge mcp uninstall copilot
|
||||
uv run fuzzforge mcp uninstall claude-desktop
|
||||
uv run fuzzforge mcp uninstall claude-code
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Using FuzzForge with AI
|
||||
|
||||
Once MCP is configured, you interact with FuzzForge through natural language with your AI assistant.
|
||||
|
||||
### Example Conversations
|
||||
|
||||
**Discover available tools:**
|
||||
```
|
||||
You: "What FuzzForge modules are available?"
|
||||
AI: Uses list_modules → "I found 4 modules: rust-analyzer, cargo-fuzzer,
|
||||
harness-validator, and crash-analyzer..."
|
||||
```
|
||||
|
||||
**Analyze code for fuzzing targets:**
|
||||
```
|
||||
You: "Analyze this Rust crate for functions I should fuzz"
|
||||
AI: Uses execute_module("rust-analyzer") → "I found 3 good fuzzing candidates:
|
||||
- parse_input() in src/parser.rs - handles untrusted input
|
||||
- decode_message() in src/codec.rs - complex parsing logic
|
||||
..."
|
||||
```
|
||||
|
||||
**Generate and validate harnesses:**
|
||||
```
|
||||
You: "Generate a fuzzing harness for the parse_input function"
|
||||
AI: Creates harness code, then uses execute_module("harness-validator")
|
||||
→ "Here's a harness that compiles successfully..."
|
||||
```
|
||||
|
||||
**Run continuous fuzzing:**
|
||||
```
|
||||
You: "Start fuzzing parse_input for 10 minutes"
|
||||
AI: Uses start_continuous_module("cargo-fuzzer") → "Started fuzzing session abc123"
|
||||
|
||||
You: "How's the fuzzing going?"
|
||||
AI: Uses get_continuous_status("abc123") → "Running for 5 minutes:
|
||||
- 150,000 executions
|
||||
- 2 crashes found
|
||||
- 45% edge coverage"
|
||||
|
||||
You: "Stop and show me the crashes"
|
||||
AI: Uses stop_continuous_module("abc123") → "Found 2 unique crashes..."
|
||||
```
|
||||
|
||||
### Available MCP Tools
|
||||
|
||||
| Tool | Description |
|
||||
|------|-------------|
|
||||
| `list_modules` | List all available security modules |
|
||||
| `execute_module` | Run a module once and get results |
|
||||
| `start_continuous_module` | Start a long-running module (e.g., fuzzing) |
|
||||
| `get_continuous_status` | Check status of a continuous session |
|
||||
| `stop_continuous_module` | Stop a continuous session |
|
||||
| `list_continuous_sessions` | List all active sessions |
|
||||
| `get_execution_results` | Retrieve results from an execution |
|
||||
| `execute_workflow` | Run a multi-step workflow |
|
||||
|
||||
---
|
||||
|
||||
## CLI Reference
|
||||
|
||||
> **Note:** The CLI is for advanced users. Most users should interact with FuzzForge through their AI assistant.
|
||||
|
||||
### MCP Commands
|
||||
|
||||
```bash
|
||||
uv run fuzzforge mcp status # Check configuration status
|
||||
uv run fuzzforge mcp install <agent> # Install MCP config
|
||||
uv run fuzzforge mcp uninstall <agent> # Remove MCP config
|
||||
uv run fuzzforge mcp generate <agent> # Preview config without installing
|
||||
```
|
||||
|
||||
### Module Commands
|
||||
|
||||
```bash
|
||||
uv run fuzzforge modules list # List available modules
|
||||
uv run fuzzforge modules info <module> # Show module details
|
||||
uv run fuzzforge modules run <module> --assets . # Run a module
|
||||
```
|
||||
|
||||
### Project Commands
|
||||
|
||||
```bash
|
||||
uv run fuzzforge project init # Initialize a project
|
||||
uv run fuzzforge project info # Show project info
|
||||
uv run fuzzforge project executions # List executions
|
||||
uv run fuzzforge project results <id> # Get execution results
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Environment Variables
|
||||
|
||||
Configure FuzzForge using environment variables:
|
||||
|
||||
```bash
|
||||
# Project paths
|
||||
export FUZZFORGE_MODULES_PATH=/path/to/modules
|
||||
export FUZZFORGE_STORAGE_PATH=/path/to/storage
|
||||
|
||||
# Container engine (uses self-contained storage by default)
|
||||
export FUZZFORGE_ENGINE__TYPE=podman # or docker
|
||||
export FUZZFORGE_ENGINE__GRAPHROOT=~/.fuzzforge/containers/storage
|
||||
export FUZZFORGE_ENGINE__RUNROOT=~/.fuzzforge/containers/run
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Podman Socket Not Found
|
||||
|
||||
```
|
||||
Error: Could not connect to Podman socket
|
||||
```
|
||||
|
||||
**Solution:**
|
||||
```bash
|
||||
# Start the Podman socket
|
||||
systemctl --user start podman.socket
|
||||
|
||||
# Check the socket path
|
||||
echo /run/user/$(id -u)/podman/podman.sock
|
||||
```
|
||||
|
||||
### Permission Denied on Socket
|
||||
|
||||
```
|
||||
Error: Permission denied connecting to Podman socket
|
||||
```
|
||||
|
||||
**Solution:**
|
||||
```bash
|
||||
# Ensure Podman is installed and your user can run containers
|
||||
podman run --rm hello-world
|
||||
|
||||
# If using system socket, ensure correct permissions
|
||||
ls -la /run/user/$(id -u)/podman/
|
||||
```
|
||||
|
||||
> **Note:** FuzzForge OSS uses self-contained storage (`~/.fuzzforge/containers/`) by default,
|
||||
> which avoids most permission issues with the Podman socket.
|
||||
|
||||
### No Modules Found
|
||||
|
||||
```
|
||||
No modules found.
|
||||
```
|
||||
|
||||
**Solution:**
|
||||
1. Build the modules first: `make build-modules`
|
||||
2. Check the modules path: `uv run fuzzforge modules list`
|
||||
3. Verify images exist: `podman images | grep fuzzforge`
|
||||
|
||||
### MCP Server Not Starting
|
||||
|
||||
Check the MCP configuration:
|
||||
```bash
|
||||
uv run fuzzforge mcp status
|
||||
```
|
||||
|
||||
Verify the configuration file path exists and contains valid JSON.
|
||||
|
||||
### Module Container Fails to Build
|
||||
|
||||
```bash
|
||||
# Build module container manually to see errors
|
||||
cd fuzzforge-modules/<module-name>
|
||||
podman build -t <module-name> .
|
||||
```
|
||||
|
||||
### Check Logs
|
||||
|
||||
FuzzForge stores execution logs in the storage directory:
|
||||
```bash
|
||||
ls -la ~/.fuzzforge/storage/<project-id>/<execution-id>/
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Next Steps
|
||||
|
||||
- 📖 Read the [Module SDK Guide](fuzzforge-modules/fuzzforge-modules-sdk/README.md) to create custom modules
|
||||
- 🎬 Check the demos in the [README](README.md)
|
||||
- 💬 Join our [Discord](https://discord.gg/8XEX33UUwZ) for support
|
||||
|
||||
---
|
||||
|
||||
<p align="center">
|
||||
<strong>Built with ❤️ by <a href="https://fuzzinglabs.com">FuzzingLabs</a></strong>
|
||||
</p>
|
||||
BIN
assets/demopart1.gif
Normal file
BIN
assets/demopart1.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 360 KiB |
BIN
assets/demopart2.gif
Normal file
BIN
assets/demopart2.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 2.1 MiB |
37
fuzzforge-cli/Makefile
Normal file
37
fuzzforge-cli/Makefile
Normal file
@@ -0,0 +1,37 @@
|
||||
PACKAGE=$(word 1, $(shell uv version))
|
||||
VERSION=$(word 2, $(shell uv version))
|
||||
|
||||
SOURCES=./src
|
||||
TESTS=./tests
|
||||
|
||||
.PHONY: bandit clean cloc format mypy pytest ruff version
|
||||
|
||||
bandit:
|
||||
uv run bandit --recursive $(SOURCES)
|
||||
|
||||
clean:
|
||||
@find . -type d \( \
|
||||
-name '*.egg-info' \
|
||||
-o -name '.mypy_cache' \
|
||||
-o -name '.pytest_cache' \
|
||||
-o -name '.ruff_cache' \
|
||||
-o -name '__pycache__' \
|
||||
\) -printf 'removing directory %p\n' -exec rm -rf {} +
|
||||
|
||||
cloc:
|
||||
cloc $(SOURCES) $(TESTS)
|
||||
|
||||
format:
|
||||
uv run ruff format $(SOURCES) $(TESTS)
|
||||
|
||||
mypy:
|
||||
uv run mypy $(SOURCES) $(TESTS)
|
||||
|
||||
pytest:
|
||||
uv run pytest -vv $(TESTS)
|
||||
|
||||
ruff:
|
||||
uv run ruff check --fix $(SOURCES) $(TESTS)
|
||||
|
||||
version:
|
||||
@echo '$(PACKAGE)@$(VERSION)'
|
||||
3
fuzzforge-cli/README.md
Normal file
3
fuzzforge-cli/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# FuzzForge CLI
|
||||
|
||||
...
|
||||
6
fuzzforge-cli/mypy.ini
Normal file
6
fuzzforge-cli/mypy.ini
Normal file
@@ -0,0 +1,6 @@
|
||||
[mypy]
|
||||
plugins = pydantic.mypy
|
||||
strict = True
|
||||
warn_unused_ignores = True
|
||||
warn_redundant_casts = True
|
||||
warn_return_any = True
|
||||
30
fuzzforge-cli/pyproject.toml
Normal file
30
fuzzforge-cli/pyproject.toml
Normal file
@@ -0,0 +1,30 @@
|
||||
[project]
|
||||
name = "fuzzforge-cli"
|
||||
version = "0.0.1"
|
||||
description = "FuzzForge CLI - Command-line interface for FuzzForge OSS."
|
||||
authors = []
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.14"
|
||||
dependencies = [
|
||||
"fuzzforge-runner==0.0.1",
|
||||
"fuzzforge-types==0.0.1",
|
||||
"rich>=14.0.0",
|
||||
"typer==0.20.1",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
lints = [
|
||||
"bandit==1.8.6",
|
||||
"mypy==1.18.2",
|
||||
"ruff==0.14.4",
|
||||
]
|
||||
tests = [
|
||||
"pytest==9.0.2",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
fuzzforge = "fuzzforge_cli.__main__:main"
|
||||
|
||||
[tool.uv.sources]
|
||||
fuzzforge-runner = { workspace = true }
|
||||
fuzzforge-types = { workspace = true }
|
||||
2
fuzzforge-cli/pytest.ini
Normal file
2
fuzzforge-cli/pytest.ini
Normal file
@@ -0,0 +1,2 @@
|
||||
[pytest]
|
||||
asyncio_mode = auto
|
||||
15
fuzzforge-cli/ruff.toml
Normal file
15
fuzzforge-cli/ruff.toml
Normal file
@@ -0,0 +1,15 @@
|
||||
line-length = 120
|
||||
|
||||
[lint]
|
||||
select = [ "ALL" ]
|
||||
ignore = [
|
||||
"COM812", # conflicts with the formatter
|
||||
"D203", # conflicts with 'D211'
|
||||
"D213", # conflicts with 'D212'
|
||||
]
|
||||
|
||||
[lint.per-file-ignores]
|
||||
"tests/*" = [
|
||||
"PLR2004", # allowing comparisons using unamed numerical constants in tests
|
||||
"S101", # allowing 'assert' statements in tests
|
||||
]
|
||||
1
fuzzforge-cli/src/fuzzforge_cli/__init__.py
Normal file
1
fuzzforge-cli/src/fuzzforge_cli/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""TODO."""
|
||||
12
fuzzforge-cli/src/fuzzforge_cli/__main__.py
Normal file
12
fuzzforge-cli/src/fuzzforge_cli/__main__.py
Normal file
@@ -0,0 +1,12 @@
|
||||
"""TODO."""
|
||||
|
||||
from fuzzforge_cli.application import application
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""TODO."""
|
||||
application()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
96
fuzzforge-cli/src/fuzzforge_cli/application.py
Normal file
96
fuzzforge-cli/src/fuzzforge_cli/application.py
Normal file
@@ -0,0 +1,96 @@
|
||||
"""FuzzForge CLI application."""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Annotated
|
||||
|
||||
from fuzzforge_runner import Runner, Settings
|
||||
from typer import Context as TyperContext
|
||||
from typer import Option, Typer
|
||||
|
||||
from fuzzforge_cli.commands import mcp, modules, projects
|
||||
from fuzzforge_cli.context import Context
|
||||
|
||||
application: Typer = Typer(
|
||||
name="fuzzforge",
|
||||
help="FuzzForge OSS - Security research orchestration platform.",
|
||||
)
|
||||
|
||||
|
||||
@application.callback()
|
||||
def main(
|
||||
project_path: Annotated[
|
||||
Path,
|
||||
Option(
|
||||
"--project",
|
||||
"-p",
|
||||
envvar="FUZZFORGE_PROJECT__DEFAULT_PATH",
|
||||
help="Path to the FuzzForge project directory.",
|
||||
),
|
||||
] = Path.cwd(),
|
||||
modules_path: Annotated[
|
||||
Path,
|
||||
Option(
|
||||
"--modules",
|
||||
"-m",
|
||||
envvar="FUZZFORGE_MODULES_PATH",
|
||||
help="Path to the modules directory.",
|
||||
),
|
||||
] = Path.home() / ".fuzzforge" / "modules",
|
||||
storage_path: Annotated[
|
||||
Path,
|
||||
Option(
|
||||
"--storage",
|
||||
envvar="FUZZFORGE_STORAGE__PATH",
|
||||
help="Path to the storage directory.",
|
||||
),
|
||||
] = Path.home() / ".fuzzforge" / "storage",
|
||||
engine_type: Annotated[
|
||||
str,
|
||||
Option(
|
||||
"--engine",
|
||||
envvar="FUZZFORGE_ENGINE__TYPE",
|
||||
help="Container engine type (docker or podman).",
|
||||
),
|
||||
] = "podman",
|
||||
engine_socket: Annotated[
|
||||
str,
|
||||
Option(
|
||||
"--socket",
|
||||
envvar="FUZZFORGE_ENGINE__SOCKET",
|
||||
help="Container engine socket path.",
|
||||
),
|
||||
] = "",
|
||||
context: TyperContext = None, # type: ignore[assignment]
|
||||
) -> None:
|
||||
"""FuzzForge OSS - Security research orchestration platform.
|
||||
|
||||
Execute security research modules in isolated containers.
|
||||
|
||||
"""
|
||||
from fuzzforge_runner.settings import EngineSettings, ProjectSettings, StorageSettings
|
||||
|
||||
settings = Settings(
|
||||
engine=EngineSettings(
|
||||
type=engine_type, # type: ignore[arg-type]
|
||||
socket=engine_socket,
|
||||
),
|
||||
storage=StorageSettings(
|
||||
path=storage_path,
|
||||
),
|
||||
project=ProjectSettings(
|
||||
default_path=project_path,
|
||||
modules_path=modules_path,
|
||||
),
|
||||
)
|
||||
|
||||
runner = Runner(settings)
|
||||
|
||||
context.obj = Context(
|
||||
runner=runner,
|
||||
project_path=project_path,
|
||||
)
|
||||
|
||||
|
||||
application.add_typer(mcp.application)
|
||||
application.add_typer(modules.application)
|
||||
application.add_typer(projects.application)
|
||||
1
fuzzforge-cli/src/fuzzforge_cli/commands/__init__.py
Normal file
1
fuzzforge-cli/src/fuzzforge_cli/commands/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""TODO."""
|
||||
527
fuzzforge-cli/src/fuzzforge_cli/commands/mcp.py
Normal file
527
fuzzforge-cli/src/fuzzforge_cli/commands/mcp.py
Normal file
@@ -0,0 +1,527 @@
|
||||
"""MCP server configuration commands for FuzzForge CLI.
|
||||
|
||||
This module provides commands for setting up MCP server connections
|
||||
with various AI agents (VS Code Copilot, Claude Code, etc.).
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from enum import StrEnum
|
||||
from pathlib import Path
|
||||
from typing import Annotated
|
||||
|
||||
from rich.console import Console
|
||||
from rich.panel import Panel
|
||||
from rich.syntax import Syntax
|
||||
from rich.table import Table
|
||||
from typer import Argument, Context, Option, Typer
|
||||
|
||||
application: Typer = Typer(
|
||||
name="mcp",
|
||||
help="MCP server configuration commands.",
|
||||
)
|
||||
|
||||
|
||||
class AIAgent(StrEnum):
|
||||
"""Supported AI agents."""
|
||||
|
||||
COPILOT = "copilot" # GitHub Copilot in VS Code
|
||||
CLAUDE_DESKTOP = "claude-desktop" # Claude Desktop app
|
||||
CLAUDE_CODE = "claude-code" # Claude Code CLI (terminal)
|
||||
|
||||
|
||||
def _get_copilot_mcp_path() -> Path:
|
||||
"""Get the GitHub Copilot MCP configuration file path.
|
||||
|
||||
GitHub Copilot uses VS Code's mcp.json for MCP servers.
|
||||
|
||||
:returns: Path to the mcp.json file.
|
||||
|
||||
"""
|
||||
if sys.platform == "darwin":
|
||||
return Path.home() / "Library" / "Application Support" / "Code" / "User" / "mcp.json"
|
||||
elif sys.platform == "win32":
|
||||
return Path(os.environ.get("APPDATA", "")) / "Code" / "User" / "mcp.json"
|
||||
else: # Linux
|
||||
return Path.home() / ".config" / "Code" / "User" / "mcp.json"
|
||||
|
||||
|
||||
def _get_claude_desktop_mcp_path() -> Path:
|
||||
"""Get the Claude Desktop MCP configuration file path.
|
||||
|
||||
:returns: Path to the claude_desktop_config.json file.
|
||||
|
||||
"""
|
||||
if sys.platform == "darwin":
|
||||
return Path.home() / "Library" / "Application Support" / "Claude" / "claude_desktop_config.json"
|
||||
elif sys.platform == "win32":
|
||||
return Path(os.environ.get("APPDATA", "")) / "Claude" / "claude_desktop_config.json"
|
||||
else: # Linux
|
||||
return Path.home() / ".config" / "Claude" / "claude_desktop_config.json"
|
||||
|
||||
|
||||
def _get_claude_code_mcp_path(project_path: Path | None = None) -> Path:
|
||||
"""Get the Claude Code MCP configuration file path.
|
||||
|
||||
Claude Code uses .mcp.json in the project root for project-scoped servers.
|
||||
|
||||
:param project_path: Project directory path. If None, uses current directory.
|
||||
:returns: Path to the .mcp.json file.
|
||||
|
||||
"""
|
||||
if project_path:
|
||||
return project_path / ".mcp.json"
|
||||
return Path.cwd() / ".mcp.json"
|
||||
|
||||
|
||||
def _get_claude_code_user_mcp_path() -> Path:
|
||||
"""Get the Claude Code user-scoped MCP configuration file path.
|
||||
|
||||
:returns: Path to ~/.claude.json file.
|
||||
|
||||
"""
|
||||
return Path.home() / ".claude.json"
|
||||
|
||||
|
||||
def _detect_podman_socket() -> str:
|
||||
"""Auto-detect the Podman socket path.
|
||||
|
||||
:returns: Path to the Podman socket.
|
||||
|
||||
"""
|
||||
uid = os.getuid()
|
||||
socket_paths = [
|
||||
f"/run/user/{uid}/podman/podman.sock",
|
||||
"/run/podman/podman.sock",
|
||||
"/var/run/podman/podman.sock",
|
||||
]
|
||||
|
||||
for path in socket_paths:
|
||||
if Path(path).exists():
|
||||
return path
|
||||
|
||||
# Default to user socket
|
||||
return f"/run/user/{uid}/podman/podman.sock"
|
||||
|
||||
|
||||
def _detect_docker_socket() -> str:
|
||||
"""Auto-detect the Docker socket path.
|
||||
|
||||
:returns: Path to the Docker socket.
|
||||
|
||||
"""
|
||||
socket_paths = [
|
||||
"/var/run/docker.sock",
|
||||
Path.home() / ".docker" / "run" / "docker.sock",
|
||||
]
|
||||
|
||||
for path in socket_paths:
|
||||
if Path(path).exists():
|
||||
return str(path)
|
||||
|
||||
return "/var/run/docker.sock"
|
||||
|
||||
|
||||
def _find_fuzzforge_root() -> Path:
|
||||
"""Find the FuzzForge installation root.
|
||||
|
||||
:returns: Path to fuzzforge-oss directory.
|
||||
|
||||
"""
|
||||
# Try to find from current file location
|
||||
current = Path(__file__).resolve()
|
||||
|
||||
# Walk up to find fuzzforge-oss root
|
||||
for parent in current.parents:
|
||||
if (parent / "fuzzforge-mcp").is_dir() and (parent / "fuzzforge-runner").is_dir():
|
||||
return parent
|
||||
|
||||
# Fall back to cwd
|
||||
return Path.cwd()
|
||||
|
||||
|
||||
def _generate_mcp_config(
|
||||
fuzzforge_root: Path,
|
||||
modules_path: Path,
|
||||
engine_type: str,
|
||||
engine_socket: str,
|
||||
) -> dict:
|
||||
"""Generate MCP server configuration.
|
||||
|
||||
:param fuzzforge_root: Path to fuzzforge-oss installation.
|
||||
:param modules_path: Path to the modules directory.
|
||||
:param engine_type: Container engine type (podman or docker).
|
||||
:param engine_socket: Container engine socket path.
|
||||
:returns: MCP configuration dictionary.
|
||||
|
||||
"""
|
||||
venv_python = fuzzforge_root / ".venv" / "bin" / "python"
|
||||
|
||||
# Use uv run if no venv, otherwise use venv python directly
|
||||
if venv_python.exists():
|
||||
command = str(venv_python)
|
||||
args = ["-m", "fuzzforge_mcp"]
|
||||
else:
|
||||
command = "uv"
|
||||
args = ["--directory", str(fuzzforge_root), "run", "fuzzforge-mcp"]
|
||||
|
||||
# Self-contained storage paths for FuzzForge containers
|
||||
# This isolates FuzzForge from system Podman and avoids snap issues
|
||||
fuzzforge_home = Path.home() / ".fuzzforge"
|
||||
graphroot = fuzzforge_home / "containers" / "storage"
|
||||
runroot = fuzzforge_home / "containers" / "run"
|
||||
|
||||
return {
|
||||
"type": "stdio",
|
||||
"command": command,
|
||||
"args": args,
|
||||
"cwd": str(fuzzforge_root),
|
||||
"env": {
|
||||
"FUZZFORGE_MODULES_PATH": str(modules_path),
|
||||
"FUZZFORGE_ENGINE__TYPE": engine_type,
|
||||
"FUZZFORGE_ENGINE__GRAPHROOT": str(graphroot),
|
||||
"FUZZFORGE_ENGINE__RUNROOT": str(runroot),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@application.command(
|
||||
help="Show current MCP configuration status.",
|
||||
name="status",
|
||||
)
|
||||
def status(context: Context) -> None:
|
||||
"""Show MCP configuration status for all supported agents.
|
||||
|
||||
:param context: Typer context.
|
||||
|
||||
"""
|
||||
console = Console()
|
||||
|
||||
table = Table(title="MCP Configuration Status")
|
||||
table.add_column("Agent", style="cyan")
|
||||
table.add_column("Config Path")
|
||||
table.add_column("Status")
|
||||
table.add_column("FuzzForge Configured")
|
||||
|
||||
fuzzforge_root = _find_fuzzforge_root()
|
||||
|
||||
agents = [
|
||||
("GitHub Copilot", _get_copilot_mcp_path(), "servers"),
|
||||
("Claude Desktop", _get_claude_desktop_mcp_path(), "mcpServers"),
|
||||
("Claude Code", _get_claude_code_user_mcp_path(), "mcpServers"),
|
||||
]
|
||||
|
||||
for name, config_path, servers_key in agents:
|
||||
if config_path.exists():
|
||||
try:
|
||||
config = json.loads(config_path.read_text())
|
||||
servers = config.get(servers_key, {})
|
||||
has_fuzzforge = "fuzzforge" in servers
|
||||
table.add_row(
|
||||
name,
|
||||
str(config_path),
|
||||
"[green]✓ Exists[/green]",
|
||||
"[green]✓ Yes[/green]" if has_fuzzforge else "[yellow]✗ No[/yellow]",
|
||||
)
|
||||
except json.JSONDecodeError:
|
||||
table.add_row(
|
||||
name,
|
||||
str(config_path),
|
||||
"[red]✗ Invalid JSON[/red]",
|
||||
"[dim]-[/dim]",
|
||||
)
|
||||
else:
|
||||
table.add_row(
|
||||
name,
|
||||
str(config_path),
|
||||
"[dim]Not found[/dim]",
|
||||
"[dim]-[/dim]",
|
||||
)
|
||||
|
||||
console.print(table)
|
||||
|
||||
# Show detected environment
|
||||
console.print()
|
||||
console.print("[bold]Detected Environment:[/bold]")
|
||||
console.print(f" FuzzForge Root: {_find_fuzzforge_root()}")
|
||||
console.print(f" Podman Socket: {_detect_podman_socket()}")
|
||||
console.print(f" Docker Socket: {_detect_docker_socket()}")
|
||||
|
||||
|
||||
@application.command(
|
||||
help="Generate MCP configuration for an AI agent.",
|
||||
name="generate",
|
||||
)
|
||||
def generate(
|
||||
context: Context,
|
||||
agent: Annotated[
|
||||
AIAgent,
|
||||
Argument(
|
||||
help="AI agent to generate config for (copilot, claude-desktop, or claude-code).",
|
||||
),
|
||||
],
|
||||
modules_path: Annotated[
|
||||
Path | None,
|
||||
Option(
|
||||
"--modules",
|
||||
"-m",
|
||||
help="Path to the modules directory.",
|
||||
),
|
||||
] = None,
|
||||
engine: Annotated[
|
||||
str,
|
||||
Option(
|
||||
"--engine",
|
||||
"-e",
|
||||
help="Container engine (podman or docker).",
|
||||
),
|
||||
] = "podman",
|
||||
) -> None:
|
||||
"""Generate MCP configuration and print to stdout.
|
||||
|
||||
:param context: Typer context.
|
||||
:param agent: Target AI agent.
|
||||
:param modules_path: Override modules path.
|
||||
:param engine: Container engine type.
|
||||
|
||||
"""
|
||||
console = Console()
|
||||
fuzzforge_root = _find_fuzzforge_root()
|
||||
|
||||
# Use defaults if not specified
|
||||
resolved_modules = modules_path or (fuzzforge_root / "fuzzforge-modules")
|
||||
|
||||
# Detect socket
|
||||
if engine == "podman":
|
||||
socket = _detect_podman_socket()
|
||||
else:
|
||||
socket = _detect_docker_socket()
|
||||
|
||||
# Generate config
|
||||
server_config = _generate_mcp_config(
|
||||
fuzzforge_root=fuzzforge_root,
|
||||
modules_path=resolved_modules,
|
||||
engine_type=engine,
|
||||
engine_socket=socket,
|
||||
)
|
||||
|
||||
# Format based on agent
|
||||
if agent == AIAgent.COPILOT:
|
||||
full_config = {"servers": {"fuzzforge": server_config}}
|
||||
else: # Claude Desktop or Claude Code
|
||||
full_config = {"mcpServers": {"fuzzforge": server_config}}
|
||||
|
||||
config_json = json.dumps(full_config, indent=4)
|
||||
|
||||
console.print(Panel(
|
||||
Syntax(config_json, "json", theme="monokai"),
|
||||
title=f"MCP Configuration for {agent.value}",
|
||||
))
|
||||
|
||||
# Show where to save it
|
||||
if agent == AIAgent.COPILOT:
|
||||
config_path = _get_copilot_mcp_path()
|
||||
elif agent == AIAgent.CLAUDE_CODE:
|
||||
config_path = _get_claude_code_mcp_path(fuzzforge_root)
|
||||
else: # Claude Desktop
|
||||
config_path = _get_claude_desktop_mcp_path()
|
||||
|
||||
console.print()
|
||||
console.print(f"[bold]Save to:[/bold] {config_path}")
|
||||
console.print()
|
||||
console.print("[dim]Or run 'fuzzforge mcp install' to install automatically.[/dim]")
|
||||
|
||||
|
||||
@application.command(
|
||||
help="Install MCP configuration for an AI agent.",
|
||||
name="install",
|
||||
)
|
||||
def install(
|
||||
context: Context,
|
||||
agent: Annotated[
|
||||
AIAgent,
|
||||
Argument(
|
||||
help="AI agent to install config for (copilot, claude-desktop, or claude-code).",
|
||||
),
|
||||
],
|
||||
modules_path: Annotated[
|
||||
Path | None,
|
||||
Option(
|
||||
"--modules",
|
||||
"-m",
|
||||
help="Path to the modules directory.",
|
||||
),
|
||||
] = None,
|
||||
engine: Annotated[
|
||||
str,
|
||||
Option(
|
||||
"--engine",
|
||||
"-e",
|
||||
help="Container engine (podman or docker).",
|
||||
),
|
||||
] = "podman",
|
||||
force: Annotated[
|
||||
bool,
|
||||
Option(
|
||||
"--force",
|
||||
"-f",
|
||||
help="Overwrite existing fuzzforge configuration.",
|
||||
),
|
||||
] = False,
|
||||
) -> None:
|
||||
"""Install MCP configuration for the specified AI agent.
|
||||
|
||||
This will create or update the MCP configuration file, adding the
|
||||
fuzzforge server configuration.
|
||||
|
||||
:param context: Typer context.
|
||||
:param agent: Target AI agent.
|
||||
:param modules_path: Override modules path.
|
||||
:param engine: Container engine type.
|
||||
:param force: Overwrite existing configuration.
|
||||
|
||||
"""
|
||||
console = Console()
|
||||
fuzzforge_root = _find_fuzzforge_root()
|
||||
|
||||
# Determine config path
|
||||
if agent == AIAgent.COPILOT:
|
||||
config_path = _get_copilot_mcp_path()
|
||||
servers_key = "servers"
|
||||
elif agent == AIAgent.CLAUDE_CODE:
|
||||
config_path = _get_claude_code_user_mcp_path()
|
||||
servers_key = "mcpServers"
|
||||
else: # Claude Desktop
|
||||
config_path = _get_claude_desktop_mcp_path()
|
||||
servers_key = "mcpServers"
|
||||
|
||||
# Use defaults if not specified
|
||||
resolved_modules = modules_path or (fuzzforge_root / "fuzzforge-modules")
|
||||
|
||||
# Detect socket
|
||||
if engine == "podman":
|
||||
socket = _detect_podman_socket()
|
||||
else:
|
||||
socket = _detect_docker_socket()
|
||||
|
||||
# Generate server config
|
||||
server_config = _generate_mcp_config(
|
||||
fuzzforge_root=fuzzforge_root,
|
||||
modules_path=resolved_modules,
|
||||
engine_type=engine,
|
||||
engine_socket=socket,
|
||||
)
|
||||
|
||||
# Load existing config or create new
|
||||
if config_path.exists():
|
||||
try:
|
||||
existing_config = json.loads(config_path.read_text())
|
||||
except json.JSONDecodeError:
|
||||
console.print(f"[red]Error: Invalid JSON in {config_path}[/red]")
|
||||
console.print("[dim]Please fix the file manually or delete it.[/dim]")
|
||||
raise SystemExit(1)
|
||||
|
||||
# Check if fuzzforge already exists
|
||||
servers = existing_config.get(servers_key, {})
|
||||
if "fuzzforge" in servers and not force:
|
||||
console.print("[yellow]FuzzForge is already configured.[/yellow]")
|
||||
console.print("[dim]Use --force to overwrite existing configuration.[/dim]")
|
||||
raise SystemExit(1)
|
||||
|
||||
# Add/update fuzzforge
|
||||
if servers_key not in existing_config:
|
||||
existing_config[servers_key] = {}
|
||||
existing_config[servers_key]["fuzzforge"] = server_config
|
||||
|
||||
full_config = existing_config
|
||||
else:
|
||||
# Create new config
|
||||
config_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
full_config = {servers_key: {"fuzzforge": server_config}}
|
||||
|
||||
# Write config
|
||||
config_path.write_text(json.dumps(full_config, indent=4))
|
||||
|
||||
console.print(f"[green]✓ Installed FuzzForge MCP configuration for {agent.value}[/green]")
|
||||
console.print()
|
||||
console.print(f"[bold]Configuration file:[/bold] {config_path}")
|
||||
console.print()
|
||||
console.print("[bold]Settings:[/bold]")
|
||||
console.print(f" Modules Path: {resolved_modules}")
|
||||
console.print(f" Engine: {engine}")
|
||||
console.print(f" Socket: {socket}")
|
||||
console.print()
|
||||
|
||||
console.print("[bold]Next steps:[/bold]")
|
||||
if agent == AIAgent.COPILOT:
|
||||
console.print(" 1. Restart VS Code")
|
||||
console.print(" 2. Open Copilot Chat and look for FuzzForge tools")
|
||||
elif agent == AIAgent.CLAUDE_CODE:
|
||||
console.print(" 1. Run 'claude' from any directory")
|
||||
console.print(" 2. FuzzForge tools will be available")
|
||||
else: # Claude Desktop
|
||||
console.print(" 1. Restart Claude Desktop")
|
||||
console.print(" 2. The fuzzforge MCP server will be available")
|
||||
|
||||
|
||||
@application.command(
|
||||
help="Remove MCP configuration for an AI agent.",
|
||||
name="uninstall",
|
||||
)
|
||||
def uninstall(
|
||||
context: Context,
|
||||
agent: Annotated[
|
||||
AIAgent,
|
||||
Argument(
|
||||
help="AI agent to remove config from (copilot, claude-desktop, or claude-code).",
|
||||
),
|
||||
],
|
||||
) -> None:
|
||||
"""Remove FuzzForge MCP configuration from the specified AI agent.
|
||||
|
||||
:param context: Typer context.
|
||||
:param agent: Target AI agent.
|
||||
|
||||
"""
|
||||
console = Console()
|
||||
fuzzforge_root = _find_fuzzforge_root()
|
||||
|
||||
# Determine config path
|
||||
if agent == AIAgent.COPILOT:
|
||||
config_path = _get_copilot_mcp_path()
|
||||
servers_key = "servers"
|
||||
elif agent == AIAgent.CLAUDE_CODE:
|
||||
config_path = _get_claude_code_user_mcp_path()
|
||||
servers_key = "mcpServers"
|
||||
else: # Claude Desktop
|
||||
config_path = _get_claude_desktop_mcp_path()
|
||||
servers_key = "mcpServers"
|
||||
|
||||
if not config_path.exists():
|
||||
console.print(f"[yellow]Configuration file not found: {config_path}[/yellow]")
|
||||
return
|
||||
|
||||
try:
|
||||
config = json.loads(config_path.read_text())
|
||||
except json.JSONDecodeError:
|
||||
console.print(f"[red]Error: Invalid JSON in {config_path}[/red]")
|
||||
raise SystemExit(1)
|
||||
|
||||
servers = config.get(servers_key, {})
|
||||
if "fuzzforge" not in servers:
|
||||
console.print("[yellow]FuzzForge is not configured.[/yellow]")
|
||||
return
|
||||
|
||||
# Remove fuzzforge
|
||||
del servers["fuzzforge"]
|
||||
|
||||
# Write back
|
||||
config_path.write_text(json.dumps(config, indent=4))
|
||||
|
||||
console.print(f"[green]✓ Removed FuzzForge MCP configuration from {agent.value}[/green]")
|
||||
console.print()
|
||||
console.print("[dim]Restart your AI agent for changes to take effect.[/dim]")
|
||||
166
fuzzforge-cli/src/fuzzforge_cli/commands/modules.py
Normal file
166
fuzzforge-cli/src/fuzzforge_cli/commands/modules.py
Normal file
@@ -0,0 +1,166 @@
|
||||
"""Module management commands for FuzzForge CLI."""
|
||||
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
from typing import Annotated, Any
|
||||
|
||||
from rich.console import Console
|
||||
from rich.table import Table
|
||||
from typer import Argument, Context, Option, Typer
|
||||
|
||||
from fuzzforge_cli.context import get_project_path, get_runner
|
||||
|
||||
application: Typer = Typer(
|
||||
name="modules",
|
||||
help="Module management commands.",
|
||||
)
|
||||
|
||||
|
||||
@application.command(
|
||||
help="List available modules.",
|
||||
name="list",
|
||||
)
|
||||
def list_modules(
|
||||
context: Context,
|
||||
) -> None:
|
||||
"""List all available modules.
|
||||
|
||||
:param context: Typer context.
|
||||
|
||||
"""
|
||||
runner = get_runner(context)
|
||||
modules = runner.list_modules()
|
||||
|
||||
console = Console()
|
||||
|
||||
if not modules:
|
||||
console.print("[yellow]No modules found.[/yellow]")
|
||||
console.print(f" Modules directory: {runner.settings.modules_path}")
|
||||
return
|
||||
|
||||
table = Table(title="Available Modules")
|
||||
table.add_column("Identifier", style="cyan")
|
||||
table.add_column("Available")
|
||||
table.add_column("Description")
|
||||
|
||||
for module in modules:
|
||||
table.add_row(
|
||||
module.identifier,
|
||||
"✓" if module.available else "✗",
|
||||
module.description or "-",
|
||||
)
|
||||
|
||||
console.print(table)
|
||||
|
||||
|
||||
@application.command(
|
||||
help="Execute a module.",
|
||||
name="run",
|
||||
)
|
||||
def run_module(
|
||||
context: Context,
|
||||
module_identifier: Annotated[
|
||||
str,
|
||||
Argument(
|
||||
help="Identifier of the module to execute.",
|
||||
),
|
||||
],
|
||||
assets_path: Annotated[
|
||||
Path | None,
|
||||
Option(
|
||||
"--assets",
|
||||
"-a",
|
||||
help="Path to input assets.",
|
||||
),
|
||||
] = None,
|
||||
config: Annotated[
|
||||
str | None,
|
||||
Option(
|
||||
"--config",
|
||||
"-c",
|
||||
help="Module configuration as JSON string.",
|
||||
),
|
||||
] = None,
|
||||
) -> None:
|
||||
"""Execute a module.
|
||||
|
||||
:param context: Typer context.
|
||||
:param module_identifier: Module to execute.
|
||||
:param assets_path: Optional path to input assets.
|
||||
:param config: Optional JSON configuration.
|
||||
|
||||
"""
|
||||
import json
|
||||
|
||||
runner = get_runner(context)
|
||||
project_path = get_project_path(context)
|
||||
|
||||
configuration: dict[str, Any] | None = None
|
||||
if config:
|
||||
try:
|
||||
configuration = json.loads(config)
|
||||
except json.JSONDecodeError as e:
|
||||
console = Console()
|
||||
console.print(f"[red]✗[/red] Invalid JSON configuration: {e}")
|
||||
return
|
||||
|
||||
console = Console()
|
||||
console.print(f"[blue]→[/blue] Executing module: {module_identifier}")
|
||||
|
||||
async def execute() -> None:
|
||||
result = await runner.execute_module(
|
||||
module_identifier=module_identifier,
|
||||
project_path=project_path,
|
||||
configuration=configuration,
|
||||
assets_path=assets_path,
|
||||
)
|
||||
|
||||
if result.success:
|
||||
console.print(f"[green]✓[/green] Module execution completed")
|
||||
console.print(f" Execution ID: {result.execution_id}")
|
||||
console.print(f" Results: {result.results_path}")
|
||||
else:
|
||||
console.print(f"[red]✗[/red] Module execution failed")
|
||||
console.print(f" Error: {result.error}")
|
||||
|
||||
asyncio.run(execute())
|
||||
|
||||
|
||||
@application.command(
|
||||
help="Show module information.",
|
||||
name="info",
|
||||
)
|
||||
def module_info(
|
||||
context: Context,
|
||||
module_identifier: Annotated[
|
||||
str,
|
||||
Argument(
|
||||
help="Identifier of the module.",
|
||||
),
|
||||
],
|
||||
) -> None:
|
||||
"""Show information about a specific module.
|
||||
|
||||
:param context: Typer context.
|
||||
:param module_identifier: Module to get info for.
|
||||
|
||||
"""
|
||||
runner = get_runner(context)
|
||||
module = runner.get_module_info(module_identifier)
|
||||
|
||||
console = Console()
|
||||
|
||||
if module is None:
|
||||
console.print(f"[red]✗[/red] Module not found: {module_identifier}")
|
||||
return
|
||||
|
||||
table = Table(title=f"Module: {module.identifier}")
|
||||
table.add_column("Property", style="cyan")
|
||||
table.add_column("Value")
|
||||
|
||||
table.add_row("Identifier", module.identifier)
|
||||
table.add_row("Available", "Yes" if module.available else "No")
|
||||
table.add_row("Description", module.description or "-")
|
||||
table.add_row("Version", module.version or "-")
|
||||
|
||||
console.print(table)
|
||||
186
fuzzforge-cli/src/fuzzforge_cli/commands/projects.py
Normal file
186
fuzzforge-cli/src/fuzzforge_cli/commands/projects.py
Normal file
@@ -0,0 +1,186 @@
|
||||
"""Project management commands for FuzzForge CLI."""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Annotated
|
||||
|
||||
from rich.console import Console
|
||||
from rich.table import Table
|
||||
from typer import Argument, Context, Option, Typer
|
||||
|
||||
from fuzzforge_cli.context import get_project_path, get_runner
|
||||
|
||||
application: Typer = Typer(
|
||||
name="project",
|
||||
help="Project management commands.",
|
||||
)
|
||||
|
||||
|
||||
@application.command(
|
||||
help="Initialize a new FuzzForge project.",
|
||||
name="init",
|
||||
)
|
||||
def init_project(
|
||||
context: Context,
|
||||
path: Annotated[
|
||||
Path | None,
|
||||
Argument(
|
||||
help="Path to initialize the project in. Defaults to current directory.",
|
||||
),
|
||||
] = None,
|
||||
) -> None:
|
||||
"""Initialize a new FuzzForge project.
|
||||
|
||||
Creates the necessary storage directories for the project.
|
||||
|
||||
:param context: Typer context.
|
||||
:param path: Path to initialize (defaults to current directory).
|
||||
|
||||
"""
|
||||
runner = get_runner(context)
|
||||
project_path = path or get_project_path(context)
|
||||
|
||||
storage_path = runner.init_project(project_path)
|
||||
|
||||
console = Console()
|
||||
console.print(f"[green]✓[/green] Project initialized at {project_path}")
|
||||
console.print(f" Storage: {storage_path}")
|
||||
|
||||
|
||||
@application.command(
|
||||
help="Set project assets.",
|
||||
name="assets",
|
||||
)
|
||||
def set_assets(
|
||||
context: Context,
|
||||
assets_path: Annotated[
|
||||
Path,
|
||||
Argument(
|
||||
help="Path to assets file or directory.",
|
||||
),
|
||||
],
|
||||
) -> None:
|
||||
"""Set the initial assets for the project.
|
||||
|
||||
:param context: Typer context.
|
||||
:param assets_path: Path to assets.
|
||||
|
||||
"""
|
||||
runner = get_runner(context)
|
||||
project_path = get_project_path(context)
|
||||
|
||||
stored_path = runner.set_project_assets(project_path, assets_path)
|
||||
|
||||
console = Console()
|
||||
console.print(f"[green]✓[/green] Assets stored from {assets_path}")
|
||||
console.print(f" Location: {stored_path}")
|
||||
|
||||
|
||||
@application.command(
|
||||
help="Show project information.",
|
||||
name="info",
|
||||
)
|
||||
def show_info(
|
||||
context: Context,
|
||||
) -> None:
|
||||
"""Show information about the current project.
|
||||
|
||||
:param context: Typer context.
|
||||
|
||||
"""
|
||||
runner = get_runner(context)
|
||||
project_path = get_project_path(context)
|
||||
|
||||
executions = runner.list_executions(project_path)
|
||||
assets_path = runner.storage.get_project_assets_path(project_path)
|
||||
|
||||
console = Console()
|
||||
table = Table(title=f"Project: {project_path.name}")
|
||||
table.add_column("Property", style="cyan")
|
||||
table.add_column("Value")
|
||||
|
||||
table.add_row("Path", str(project_path))
|
||||
table.add_row("Has Assets", "Yes" if assets_path else "No")
|
||||
table.add_row("Assets Path", str(assets_path) if assets_path else "-")
|
||||
table.add_row("Executions", str(len(executions)))
|
||||
|
||||
console.print(table)
|
||||
|
||||
|
||||
@application.command(
|
||||
help="List all executions.",
|
||||
name="executions",
|
||||
)
|
||||
def list_executions(
|
||||
context: Context,
|
||||
) -> None:
|
||||
"""List all executions for the project.
|
||||
|
||||
:param context: Typer context.
|
||||
|
||||
"""
|
||||
runner = get_runner(context)
|
||||
project_path = get_project_path(context)
|
||||
|
||||
executions = runner.list_executions(project_path)
|
||||
|
||||
console = Console()
|
||||
|
||||
if not executions:
|
||||
console.print("[yellow]No executions found.[/yellow]")
|
||||
return
|
||||
|
||||
table = Table(title="Executions")
|
||||
table.add_column("ID", style="cyan")
|
||||
table.add_column("Has Results")
|
||||
|
||||
for exec_id in executions:
|
||||
has_results = runner.get_execution_results(project_path, exec_id) is not None
|
||||
table.add_row(exec_id, "✓" if has_results else "-")
|
||||
|
||||
console.print(table)
|
||||
|
||||
|
||||
@application.command(
|
||||
help="Get execution results.",
|
||||
name="results",
|
||||
)
|
||||
def get_results(
|
||||
context: Context,
|
||||
execution_id: Annotated[
|
||||
str,
|
||||
Argument(
|
||||
help="Execution ID to get results for.",
|
||||
),
|
||||
],
|
||||
extract_to: Annotated[
|
||||
Path | None,
|
||||
Option(
|
||||
"--extract",
|
||||
"-x",
|
||||
help="Extract results to this directory.",
|
||||
),
|
||||
] = None,
|
||||
) -> None:
|
||||
"""Get results for a specific execution.
|
||||
|
||||
:param context: Typer context.
|
||||
:param execution_id: Execution ID.
|
||||
:param extract_to: Optional directory to extract to.
|
||||
|
||||
"""
|
||||
runner = get_runner(context)
|
||||
project_path = get_project_path(context)
|
||||
|
||||
results_path = runner.get_execution_results(project_path, execution_id)
|
||||
|
||||
console = Console()
|
||||
|
||||
if results_path is None:
|
||||
console.print(f"[red]✗[/red] No results found for execution {execution_id}")
|
||||
return
|
||||
|
||||
console.print(f"[green]✓[/green] Results: {results_path}")
|
||||
|
||||
if extract_to:
|
||||
extracted = runner.extract_results(results_path, extract_to)
|
||||
console.print(f" Extracted to: {extracted}")
|
||||
64
fuzzforge-cli/src/fuzzforge_cli/context.py
Normal file
64
fuzzforge-cli/src/fuzzforge_cli/context.py
Normal file
@@ -0,0 +1,64 @@
|
||||
"""FuzzForge CLI context management."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
from fuzzforge_runner import Runner, Settings
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typer import Context as TyperContext
|
||||
|
||||
|
||||
class Context:
|
||||
"""CLI context holding the runner instance and settings."""
|
||||
|
||||
_runner: Runner
|
||||
_project_path: Path
|
||||
|
||||
def __init__(self, runner: Runner, project_path: Path) -> None:
|
||||
"""Initialize an instance of the class.
|
||||
|
||||
:param runner: FuzzForge runner instance.
|
||||
:param project_path: Path to the current project.
|
||||
|
||||
"""
|
||||
self._runner = runner
|
||||
self._project_path = project_path
|
||||
|
||||
def get_runner(self) -> Runner:
|
||||
"""Get the runner instance.
|
||||
|
||||
:return: Runner instance.
|
||||
|
||||
"""
|
||||
return self._runner
|
||||
|
||||
def get_project_path(self) -> Path:
|
||||
"""Get the current project path.
|
||||
|
||||
:return: Project path.
|
||||
|
||||
"""
|
||||
return self._project_path
|
||||
|
||||
|
||||
def get_runner(context: TyperContext) -> Runner:
|
||||
"""Get runner from Typer context.
|
||||
|
||||
:param context: Typer context.
|
||||
:return: Runner instance.
|
||||
|
||||
"""
|
||||
return cast("Context", context.obj).get_runner()
|
||||
|
||||
|
||||
def get_project_path(context: TyperContext) -> Path:
|
||||
"""Get project path from Typer context.
|
||||
|
||||
:param context: Typer context.
|
||||
:return: Project path.
|
||||
|
||||
"""
|
||||
return cast("Context", context.obj).get_project_path()
|
||||
18
fuzzforge-cli/src/fuzzforge_cli/utilities.py
Normal file
18
fuzzforge-cli/src/fuzzforge_cli/utilities.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""CLI utility functions."""
|
||||
|
||||
from rich.console import Console
|
||||
from rich.table import Table
|
||||
from typer import Exit
|
||||
|
||||
|
||||
def on_error(message: str) -> None:
|
||||
"""Display an error message and exit.
|
||||
|
||||
:param message: Error message to display.
|
||||
|
||||
"""
|
||||
table = Table()
|
||||
table.add_column("Error")
|
||||
table.add_row(message)
|
||||
Console().print(table)
|
||||
raise Exit(code=1)
|
||||
1
fuzzforge-cli/tests/__init__.py
Normal file
1
fuzzforge-cli/tests/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""TODO."""
|
||||
1
fuzzforge-cli/tests/conftest.py
Normal file
1
fuzzforge-cli/tests/conftest.py
Normal file
@@ -0,0 +1 @@
|
||||
"""TODO."""
|
||||
42
fuzzforge-common/Makefile
Normal file
42
fuzzforge-common/Makefile
Normal file
@@ -0,0 +1,42 @@
|
||||
PACKAGE=$(word 1, $(shell uv version))
|
||||
VERSION=$(word 2, $(shell uv version))
|
||||
|
||||
ARTIFACTS?=./dist
|
||||
|
||||
SOURCES=./src
|
||||
TESTS=./tests
|
||||
|
||||
.PHONY: bandit clean format mypy pytest ruff version wheel
|
||||
|
||||
bandit:
|
||||
uv run bandit --recursive $(SOURCES)
|
||||
|
||||
clean:
|
||||
@find . -type d \( \
|
||||
-name '*.egg-info' \
|
||||
-o -name '.mypy_cache' \
|
||||
-o -name '.pytest_cache' \
|
||||
-o -name '.ruff_cache' \
|
||||
-o -name '__pycache__' \
|
||||
\) -printf 'removing directory %p\n' -exec rm -rf {} +
|
||||
|
||||
cloc:
|
||||
cloc $(SOURCES)
|
||||
|
||||
format:
|
||||
uv run ruff format $(SOURCES) $(TESTS)
|
||||
|
||||
mypy:
|
||||
uv run mypy $(SOURCES)
|
||||
|
||||
pytest:
|
||||
uv run pytest $(TESTS)
|
||||
|
||||
ruff:
|
||||
uv run ruff check --fix $(SOURCES) $(TESTS)
|
||||
|
||||
version:
|
||||
@echo '$(PACKAGE)@$(VERSION)'
|
||||
|
||||
wheel:
|
||||
uv build --out-dir $(ARTIFACTS)
|
||||
3
fuzzforge-common/README.md
Normal file
3
fuzzforge-common/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# FuzzForge Common
|
||||
|
||||
...
|
||||
12
fuzzforge-common/mypy.ini
Normal file
12
fuzzforge-common/mypy.ini
Normal file
@@ -0,0 +1,12 @@
|
||||
[mypy]
|
||||
plugins = pydantic.mypy
|
||||
strict = True
|
||||
warn_unused_ignores = True
|
||||
warn_redundant_casts = True
|
||||
warn_return_any = True
|
||||
|
||||
[mypy-botocore.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-boto3.*]
|
||||
ignore_missing_imports = True
|
||||
25
fuzzforge-common/pyproject.toml
Normal file
25
fuzzforge-common/pyproject.toml
Normal file
@@ -0,0 +1,25 @@
|
||||
[project]
|
||||
name = "fuzzforge-common"
|
||||
version = "0.0.1"
|
||||
description = "FuzzForge's common types and utilities."
|
||||
authors = []
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.14"
|
||||
dependencies = [
|
||||
"fuzzforge-types==0.0.1",
|
||||
"podman==5.6.0",
|
||||
"pydantic==2.12.4",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
lints = [
|
||||
"bandit==1.8.6",
|
||||
"mypy==1.18.2",
|
||||
"ruff==0.14.4",
|
||||
]
|
||||
tests = [
|
||||
"pytest==9.0.2",
|
||||
]
|
||||
|
||||
[tool.uv.sources]
|
||||
fuzzforge-types = { workspace = true }
|
||||
3
fuzzforge-common/pytest.ini
Normal file
3
fuzzforge-common/pytest.ini
Normal file
@@ -0,0 +1,3 @@
|
||||
[pytest]
|
||||
env =
|
||||
DOCKER_HOST=unix:///run/user/1000/podman/podman.sock
|
||||
20
fuzzforge-common/ruff.toml
Normal file
20
fuzzforge-common/ruff.toml
Normal file
@@ -0,0 +1,20 @@
|
||||
line-length = 120
|
||||
|
||||
[lint]
|
||||
select = [ "ALL" ]
|
||||
ignore = [
|
||||
"COM812", # conflicts with the formatter
|
||||
"D100", # ignoring missing docstrings in public modules
|
||||
"D104", # ignoring missing docstrings in public packages
|
||||
"D203", # conflicts with 'D211'
|
||||
"D213", # conflicts with 'D212'
|
||||
"TD002", # ignoring missing author in 'TODO' statements
|
||||
"TD003", # ignoring missing issue link in 'TODO' statements
|
||||
]
|
||||
|
||||
[lint.per-file-ignores]
|
||||
"tests/*" = [
|
||||
"ANN401", # allowing 'typing.Any' to be used to type function parameters in tests
|
||||
"PLR2004", # allowing comparisons using unamed numerical constants in tests
|
||||
"S101", # allowing 'assert' statements in tests
|
||||
]
|
||||
54
fuzzforge-common/src/fuzzforge_common/__init__.py
Normal file
54
fuzzforge-common/src/fuzzforge_common/__init__.py
Normal file
@@ -0,0 +1,54 @@
|
||||
"""FuzzForge Common - Shared abstractions and implementations for FuzzForge.
|
||||
|
||||
This package provides:
|
||||
- Sandbox engine abstractions (Podman, Docker)
|
||||
- Storage abstractions (S3) - requires 'storage' extra
|
||||
- Common exceptions
|
||||
|
||||
Example usage:
|
||||
from fuzzforge_common import (
|
||||
AbstractFuzzForgeSandboxEngine,
|
||||
ImageInfo,
|
||||
Podman,
|
||||
PodmanConfiguration,
|
||||
)
|
||||
|
||||
# For storage (requires boto3):
|
||||
from fuzzforge_common.storage import Storage
|
||||
"""
|
||||
|
||||
from fuzzforge_common.exceptions import FuzzForgeError
|
||||
from fuzzforge_common.sandboxes import (
|
||||
AbstractFuzzForgeEngineConfiguration,
|
||||
AbstractFuzzForgeSandboxEngine,
|
||||
Docker,
|
||||
DockerConfiguration,
|
||||
FuzzForgeSandboxEngines,
|
||||
ImageInfo,
|
||||
Podman,
|
||||
PodmanConfiguration,
|
||||
)
|
||||
|
||||
# Storage exceptions are always available (no boto3 required)
|
||||
from fuzzforge_common.storage.exceptions import (
|
||||
FuzzForgeStorageError,
|
||||
StorageConnectionError,
|
||||
StorageDownloadError,
|
||||
StorageUploadError,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"AbstractFuzzForgeEngineConfiguration",
|
||||
"AbstractFuzzForgeSandboxEngine",
|
||||
"Docker",
|
||||
"DockerConfiguration",
|
||||
"FuzzForgeError",
|
||||
"FuzzForgeSandboxEngines",
|
||||
"FuzzForgeStorageError",
|
||||
"ImageInfo",
|
||||
"Podman",
|
||||
"PodmanConfiguration",
|
||||
"StorageConnectionError",
|
||||
"StorageDownloadError",
|
||||
"StorageUploadError",
|
||||
]
|
||||
24
fuzzforge-common/src/fuzzforge_common/exceptions.py
Normal file
24
fuzzforge-common/src/fuzzforge_common/exceptions.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
|
||||
|
||||
class FuzzForgeError(Exception):
|
||||
"""Base exception for all FuzzForge custom exceptions.
|
||||
|
||||
All domain exceptions should inherit from this base to enable
|
||||
consistent exception handling and hierarchy navigation.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, message: str, details: dict[str, Any] | None = None) -> None:
|
||||
"""Initialize FuzzForge error.
|
||||
|
||||
:param message: Error message.
|
||||
:param details: Optional error details dictionary.
|
||||
|
||||
"""
|
||||
Exception.__init__(self, message)
|
||||
self.message = message
|
||||
self.details = details or {}
|
||||
0
fuzzforge-common/src/fuzzforge_common/py.typed
Normal file
0
fuzzforge-common/src/fuzzforge_common/py.typed
Normal file
23
fuzzforge-common/src/fuzzforge_common/sandboxes/__init__.py
Normal file
23
fuzzforge-common/src/fuzzforge_common/sandboxes/__init__.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""FuzzForge sandbox abstractions and implementations."""
|
||||
|
||||
from fuzzforge_common.sandboxes.engines import (
|
||||
AbstractFuzzForgeEngineConfiguration,
|
||||
AbstractFuzzForgeSandboxEngine,
|
||||
Docker,
|
||||
DockerConfiguration,
|
||||
FuzzForgeSandboxEngines,
|
||||
ImageInfo,
|
||||
Podman,
|
||||
PodmanConfiguration,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"AbstractFuzzForgeEngineConfiguration",
|
||||
"AbstractFuzzForgeSandboxEngine",
|
||||
"Docker",
|
||||
"DockerConfiguration",
|
||||
"FuzzForgeSandboxEngines",
|
||||
"ImageInfo",
|
||||
"Podman",
|
||||
"PodmanConfiguration",
|
||||
]
|
||||
@@ -0,0 +1,21 @@
|
||||
"""Container engine implementations for FuzzForge sandboxes."""
|
||||
|
||||
from fuzzforge_common.sandboxes.engines.base import (
|
||||
AbstractFuzzForgeEngineConfiguration,
|
||||
AbstractFuzzForgeSandboxEngine,
|
||||
ImageInfo,
|
||||
)
|
||||
from fuzzforge_common.sandboxes.engines.docker import Docker, DockerConfiguration
|
||||
from fuzzforge_common.sandboxes.engines.enumeration import FuzzForgeSandboxEngines
|
||||
from fuzzforge_common.sandboxes.engines.podman import Podman, PodmanConfiguration
|
||||
|
||||
__all__ = [
|
||||
"AbstractFuzzForgeEngineConfiguration",
|
||||
"AbstractFuzzForgeSandboxEngine",
|
||||
"Docker",
|
||||
"DockerConfiguration",
|
||||
"FuzzForgeSandboxEngines",
|
||||
"ImageInfo",
|
||||
"Podman",
|
||||
"PodmanConfiguration",
|
||||
]
|
||||
@@ -0,0 +1,15 @@
|
||||
"""Base engine abstractions."""
|
||||
|
||||
from fuzzforge_common.sandboxes.engines.base.configuration import (
|
||||
AbstractFuzzForgeEngineConfiguration,
|
||||
)
|
||||
from fuzzforge_common.sandboxes.engines.base.engine import (
|
||||
AbstractFuzzForgeSandboxEngine,
|
||||
ImageInfo,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"AbstractFuzzForgeEngineConfiguration",
|
||||
"AbstractFuzzForgeSandboxEngine",
|
||||
"ImageInfo",
|
||||
]
|
||||
@@ -0,0 +1,24 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from fuzzforge_common.sandboxes.engines.enumeration import (
|
||||
FuzzForgeSandboxEngines, # noqa: TC001 (required by 'pydantic' at runtime)
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from fuzzforge_common.sandboxes.engines.base.engine import AbstractFuzzForgeSandboxEngine
|
||||
|
||||
|
||||
class AbstractFuzzForgeEngineConfiguration(ABC, BaseModel):
|
||||
"""TODO."""
|
||||
|
||||
#: TODO.
|
||||
kind: FuzzForgeSandboxEngines
|
||||
|
||||
@abstractmethod
|
||||
def into_engine(self) -> AbstractFuzzForgeSandboxEngine:
|
||||
"""TODO."""
|
||||
message: str = f"method 'into_engine' is not implemented for class '{self.__class__.__name__}'"
|
||||
raise NotImplementedError(message)
|
||||
@@ -0,0 +1,281 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pathlib import Path, PurePath
|
||||
|
||||
|
||||
@dataclass
|
||||
class ImageInfo:
|
||||
"""Information about a container image."""
|
||||
|
||||
#: Full image reference (e.g., "localhost/fuzzforge-module-echidna:latest").
|
||||
reference: str
|
||||
|
||||
#: Repository name (e.g., "localhost/fuzzforge-module-echidna").
|
||||
repository: str
|
||||
|
||||
#: Image tag (e.g., "latest").
|
||||
tag: str
|
||||
|
||||
#: Image ID (short hash).
|
||||
image_id: str | None = None
|
||||
|
||||
#: Image size in bytes.
|
||||
size: int | None = None
|
||||
|
||||
|
||||
class AbstractFuzzForgeSandboxEngine(ABC):
|
||||
"""Abstract class used as a base for all FuzzForge sandbox engine classes."""
|
||||
|
||||
@abstractmethod
|
||||
def list_images(self, filter_prefix: str | None = None) -> list[ImageInfo]:
|
||||
"""List available container images.
|
||||
|
||||
:param filter_prefix: Optional prefix to filter images (e.g., "localhost/").
|
||||
:returns: List of ImageInfo objects for available images.
|
||||
|
||||
"""
|
||||
message: str = f"method 'list_images' is not implemented for class '{self.__class__.__name__}'"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
@abstractmethod
|
||||
def register_archive(self, archive: Path, repository: str) -> None:
|
||||
"""TODO.
|
||||
|
||||
:param archive: TODO.
|
||||
|
||||
"""
|
||||
message: str = f"method 'register_archive' is not implemented for class '{self.__class__.__name__}'"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
@abstractmethod
|
||||
def spawn_sandbox(self, image: str) -> str:
|
||||
"""Spawn a sandbox based on the given image.
|
||||
|
||||
:param image: The image the sandbox should be based on.
|
||||
:returns: The sandbox identifier.
|
||||
|
||||
"""
|
||||
message: str = f"method 'spawn_sandbox' is not implemented for class '{self.__class__.__name__}'"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
@abstractmethod
|
||||
def push_archive_to_sandbox(self, identifier: str, source: Path, destination: PurePath) -> None:
|
||||
"""TODO.
|
||||
|
||||
:param identifier: TODO.
|
||||
:param source: TODO.
|
||||
:param destination: TODO.
|
||||
|
||||
"""
|
||||
message: str = f"method 'push_archive_to_sandbox' is not implemented for class '{self.__class__.__name__}'"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
@abstractmethod
|
||||
def start_sandbox(self, identifier: str) -> None:
|
||||
"""TODO.
|
||||
|
||||
:param identifier: The identifier of the sandbox to start.
|
||||
|
||||
"""
|
||||
message: str = f"method 'start_sandbox' is not implemented for class '{self.__class__.__name__}'"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
@abstractmethod
|
||||
def execute_inside_sandbox(self, identifier: str, command: list[str]) -> None:
|
||||
"""Execute a command inside the sandbox matching the given identifier and wait for completion.
|
||||
|
||||
:param sandbox: The identifier of the sandbox.
|
||||
:param command: The command to run.
|
||||
|
||||
"""
|
||||
message: str = f"method 'execute_inside_sandbox' is not implemented for class '{self.__class__.__name__}'"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
@abstractmethod
|
||||
def pull_archive_from_sandbox(self, identifier: str, source: PurePath) -> Path:
|
||||
"""TODO.
|
||||
|
||||
:param identifier: TODO.
|
||||
:param source: TODO.
|
||||
:returns: TODO.
|
||||
|
||||
"""
|
||||
message: str = f"method 'pull_archive_from_sandbox' is not implemented for class '{self.__class__.__name__}'"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
@abstractmethod
|
||||
def terminate_sandbox(self, identifier: str) -> None:
|
||||
"""Terminate the sandbox matching the given identifier.
|
||||
|
||||
:param identifier: The identifier of the sandbox to terminate.
|
||||
|
||||
"""
|
||||
message: str = f"method 'terminate_sandbox' is not implemented for class '{self.__class__.__name__}'"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Extended Container Operations
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
@abstractmethod
|
||||
def image_exists(self, image: str) -> bool:
|
||||
"""Check if a container image exists locally.
|
||||
|
||||
:param image: Full image reference (e.g., "localhost/module:latest").
|
||||
:returns: True if image exists, False otherwise.
|
||||
|
||||
"""
|
||||
message: str = f"method 'image_exists' is not implemented for class '{self.__class__.__name__}'"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
@abstractmethod
|
||||
def pull_image(self, image: str, timeout: int = 300) -> None:
|
||||
"""Pull an image from a container registry.
|
||||
|
||||
:param image: Full image reference to pull.
|
||||
:param timeout: Timeout in seconds for the pull operation.
|
||||
:raises FuzzForgeError: If pull fails.
|
||||
|
||||
"""
|
||||
message: str = f"method 'pull_image' is not implemented for class '{self.__class__.__name__}'"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
@abstractmethod
|
||||
def tag_image(self, source: str, target: str) -> None:
|
||||
"""Tag an image with a new name.
|
||||
|
||||
:param source: Source image reference.
|
||||
:param target: Target image reference.
|
||||
|
||||
"""
|
||||
message: str = f"method 'tag_image' is not implemented for class '{self.__class__.__name__}'"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
@abstractmethod
|
||||
def create_container(
|
||||
self,
|
||||
image: str,
|
||||
volumes: dict[str, str] | None = None,
|
||||
) -> str:
|
||||
"""Create a container from an image.
|
||||
|
||||
:param image: Image to create container from.
|
||||
:param volumes: Optional volume mappings {host_path: container_path}.
|
||||
:returns: Container identifier.
|
||||
|
||||
"""
|
||||
message: str = f"method 'create_container' is not implemented for class '{self.__class__.__name__}'"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
@abstractmethod
|
||||
def start_container_attached(
|
||||
self,
|
||||
identifier: str,
|
||||
timeout: int = 600,
|
||||
) -> tuple[int, str, str]:
|
||||
"""Start a container and wait for it to complete.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
:param timeout: Timeout in seconds for execution.
|
||||
:returns: Tuple of (exit_code, stdout, stderr).
|
||||
|
||||
"""
|
||||
message: str = f"method 'start_container_attached' is not implemented for class '{self.__class__.__name__}'"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
@abstractmethod
|
||||
def copy_to_container(self, identifier: str, source: Path, destination: str) -> None:
|
||||
"""Copy a file or directory to a container.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
:param source: Source path on host.
|
||||
:param destination: Destination path in container.
|
||||
|
||||
"""
|
||||
message: str = f"method 'copy_to_container' is not implemented for class '{self.__class__.__name__}'"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
@abstractmethod
|
||||
def copy_from_container(self, identifier: str, source: str, destination: Path) -> None:
|
||||
"""Copy a file or directory from a container.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
:param source: Source path in container.
|
||||
:param destination: Destination path on host.
|
||||
|
||||
"""
|
||||
message: str = f"method 'copy_from_container' is not implemented for class '{self.__class__.__name__}'"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
@abstractmethod
|
||||
def remove_container(self, identifier: str, *, force: bool = False) -> None:
|
||||
"""Remove a container.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
:param force: Force removal even if running.
|
||||
|
||||
"""
|
||||
message: str = f"method 'remove_container' is not implemented for class '{self.__class__.__name__}'"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Continuous/Background Execution Operations
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
@abstractmethod
|
||||
def start_container(self, identifier: str) -> None:
|
||||
"""Start a container without waiting for it to complete (detached mode).
|
||||
|
||||
:param identifier: Container identifier.
|
||||
|
||||
"""
|
||||
message: str = f"method 'start_container' is not implemented for class '{self.__class__.__name__}'"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
@abstractmethod
|
||||
def get_container_status(self, identifier: str) -> str:
|
||||
"""Get the status of a container.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
:returns: Container status (e.g., "running", "exited", "created").
|
||||
|
||||
"""
|
||||
message: str = f"method 'get_container_status' is not implemented for class '{self.__class__.__name__}'"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
@abstractmethod
|
||||
def stop_container(self, identifier: str, timeout: int = 10) -> None:
|
||||
"""Stop a running container gracefully.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
:param timeout: Seconds to wait before killing.
|
||||
|
||||
"""
|
||||
message: str = f"method 'stop_container' is not implemented for class '{self.__class__.__name__}'"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
@abstractmethod
|
||||
def read_file_from_container(self, identifier: str, path: str) -> str:
|
||||
"""Read a file from inside a running container using exec.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
:param path: Path to file inside container.
|
||||
:returns: File contents as string.
|
||||
|
||||
"""
|
||||
message: str = f"method 'read_file_from_container' is not implemented for class '{self.__class__.__name__}'"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
@abstractmethod
|
||||
def list_containers(self, all_containers: bool = True) -> list[dict]:
|
||||
"""List containers.
|
||||
|
||||
:param all_containers: Include stopped containers.
|
||||
:returns: List of container info dicts.
|
||||
|
||||
"""
|
||||
message: str = f"method 'list_containers' is not implemented for class '{self.__class__.__name__}'"
|
||||
raise NotImplementedError(message)
|
||||
@@ -0,0 +1,11 @@
|
||||
"""Docker container engine implementation."""
|
||||
|
||||
from fuzzforge_common.sandboxes.engines.docker.configuration import (
|
||||
DockerConfiguration,
|
||||
)
|
||||
from fuzzforge_common.sandboxes.engines.docker.engine import Docker
|
||||
|
||||
__all__ = [
|
||||
"Docker",
|
||||
"DockerConfiguration",
|
||||
]
|
||||
@@ -0,0 +1,22 @@
|
||||
from typing import TYPE_CHECKING, Literal
|
||||
|
||||
from fuzzforge_common.sandboxes.engines.base.configuration import AbstractFuzzForgeEngineConfiguration
|
||||
from fuzzforge_common.sandboxes.engines.docker.engine import Docker
|
||||
from fuzzforge_common.sandboxes.engines.enumeration import FuzzForgeSandboxEngines
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from fuzzforge_common.sandboxes.engines.base.engine import AbstractFuzzForgeSandboxEngine
|
||||
|
||||
|
||||
class DockerConfiguration(AbstractFuzzForgeEngineConfiguration):
|
||||
"""TODO."""
|
||||
|
||||
#: TODO.
|
||||
kind: Literal[FuzzForgeSandboxEngines.DOCKER] = FuzzForgeSandboxEngines.DOCKER
|
||||
|
||||
#: TODO.
|
||||
socket: str
|
||||
|
||||
def into_engine(self) -> AbstractFuzzForgeSandboxEngine:
|
||||
"""TODO."""
|
||||
return Docker(socket=self.socket)
|
||||
@@ -0,0 +1,174 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from fuzzforge_common.sandboxes.engines.base.engine import AbstractFuzzForgeSandboxEngine, ImageInfo
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pathlib import Path, PurePath
|
||||
|
||||
|
||||
class Docker(AbstractFuzzForgeSandboxEngine):
|
||||
"""TODO."""
|
||||
|
||||
#: TODO.
|
||||
__socket: str
|
||||
|
||||
def __init__(self, socket: str) -> None:
|
||||
"""Initialize an instance of the class.
|
||||
|
||||
:param socket: TODO.
|
||||
|
||||
"""
|
||||
super().__init__()
|
||||
self.__socket = socket
|
||||
|
||||
def list_images(self, filter_prefix: str | None = None) -> list[ImageInfo]:
|
||||
"""List available container images.
|
||||
|
||||
:param filter_prefix: Optional prefix to filter images (e.g., "localhost/").
|
||||
:returns: List of ImageInfo objects for available images.
|
||||
|
||||
"""
|
||||
# TODO: Implement Docker image listing
|
||||
message: str = "Docker engine list_images is not yet implemented"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
def register_archive(self, archive: Path, repository: str) -> None:
|
||||
"""TODO.
|
||||
|
||||
:param archive: TODO.
|
||||
|
||||
"""
|
||||
return super().register_archive(archive=archive, repository=repository)
|
||||
|
||||
def spawn_sandbox(self, image: str) -> str:
|
||||
"""Spawn a sandbox based on the given image.
|
||||
|
||||
:param image: The image the sandbox should be based on.
|
||||
:returns: The sandbox identifier.
|
||||
|
||||
"""
|
||||
return super().spawn_sandbox(image)
|
||||
|
||||
def push_archive_to_sandbox(self, identifier: str, source: Path, destination: PurePath) -> None:
|
||||
"""TODO.
|
||||
|
||||
:param identifier: TODO.
|
||||
:param source: TODO.
|
||||
:param destination: TODO.
|
||||
|
||||
"""
|
||||
super().push_archive_to_sandbox(identifier, source, destination)
|
||||
|
||||
def start_sandbox(self, identifier: str) -> None:
|
||||
"""TODO.
|
||||
|
||||
:param identifier: The identifier of the sandbox to start.
|
||||
|
||||
"""
|
||||
super().start_sandbox(identifier)
|
||||
|
||||
def execute_inside_sandbox(self, identifier: str, command: list[str]) -> None:
|
||||
"""Execute a command inside the sandbox matching the given identifier and wait for completion.
|
||||
|
||||
:param sandbox: The identifier of the sandbox.
|
||||
:param command: The command to run.
|
||||
|
||||
"""
|
||||
super().execute_inside_sandbox(identifier, command)
|
||||
|
||||
def pull_archive_from_sandbox(self, identifier: str, source: PurePath) -> Path:
|
||||
"""TODO.
|
||||
|
||||
:param identifier: TODO.
|
||||
:param source: TODO.
|
||||
:returns: TODO.
|
||||
|
||||
"""
|
||||
return super().pull_archive_from_sandbox(identifier, source)
|
||||
|
||||
def terminate_sandbox(self, identifier: str) -> None:
|
||||
"""Terminate the sandbox matching the given identifier.
|
||||
|
||||
:param identifier: The identifier of the sandbox to terminate.
|
||||
|
||||
"""
|
||||
super().terminate_sandbox(identifier)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Extended Container Operations (stubs - not yet implemented)
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
def image_exists(self, image: str) -> bool:
|
||||
"""Check if a container image exists locally."""
|
||||
message: str = "Docker engine image_exists is not yet implemented"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
def pull_image(self, image: str, timeout: int = 300) -> None:
|
||||
"""Pull an image from a container registry."""
|
||||
message: str = "Docker engine pull_image is not yet implemented"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
def tag_image(self, source: str, target: str) -> None:
|
||||
"""Tag an image with a new name."""
|
||||
message: str = "Docker engine tag_image is not yet implemented"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
def create_container(
|
||||
self,
|
||||
image: str,
|
||||
volumes: dict[str, str] | None = None,
|
||||
) -> str:
|
||||
"""Create a container from an image."""
|
||||
message: str = "Docker engine create_container is not yet implemented"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
def start_container_attached(
|
||||
self,
|
||||
identifier: str,
|
||||
timeout: int = 600,
|
||||
) -> tuple[int, str, str]:
|
||||
"""Start a container and wait for it to complete."""
|
||||
message: str = "Docker engine start_container_attached is not yet implemented"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
def copy_to_container(self, identifier: str, source: Path, destination: str) -> None:
|
||||
"""Copy a file or directory to a container."""
|
||||
message: str = "Docker engine copy_to_container is not yet implemented"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
def copy_from_container(self, identifier: str, source: str, destination: Path) -> None:
|
||||
"""Copy a file or directory from a container."""
|
||||
message: str = "Docker engine copy_from_container is not yet implemented"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
def remove_container(self, identifier: str, *, force: bool = False) -> None:
|
||||
"""Remove a container."""
|
||||
message: str = "Docker engine remove_container is not yet implemented"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
def start_container(self, identifier: str) -> None:
|
||||
"""Start a container without waiting for it to complete."""
|
||||
message: str = "Docker engine start_container is not yet implemented"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
def get_container_status(self, identifier: str) -> str:
|
||||
"""Get the status of a container."""
|
||||
message: str = "Docker engine get_container_status is not yet implemented"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
def stop_container(self, identifier: str, timeout: int = 10) -> None:
|
||||
"""Stop a running container gracefully."""
|
||||
message: str = "Docker engine stop_container is not yet implemented"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
def read_file_from_container(self, identifier: str, path: str) -> str:
|
||||
"""Read a file from inside a running container using exec."""
|
||||
message: str = "Docker engine read_file_from_container is not yet implemented"
|
||||
raise NotImplementedError(message)
|
||||
|
||||
def list_containers(self, all_containers: bool = True) -> list[dict]:
|
||||
"""List containers."""
|
||||
message: str = "Docker engine list_containers is not yet implemented"
|
||||
raise NotImplementedError(message)
|
||||
@@ -0,0 +1,11 @@
|
||||
from enum import StrEnum
|
||||
|
||||
|
||||
class FuzzForgeSandboxEngines(StrEnum):
|
||||
"""TODO."""
|
||||
|
||||
#: TODO.
|
||||
DOCKER = "docker"
|
||||
|
||||
#: TODO.
|
||||
PODMAN = "podman"
|
||||
@@ -0,0 +1,13 @@
|
||||
"""Podman container engine implementation."""
|
||||
|
||||
from fuzzforge_common.sandboxes.engines.podman.cli import PodmanCLI
|
||||
from fuzzforge_common.sandboxes.engines.podman.configuration import (
|
||||
PodmanConfiguration,
|
||||
)
|
||||
from fuzzforge_common.sandboxes.engines.podman.engine import Podman
|
||||
|
||||
__all__ = [
|
||||
"Podman",
|
||||
"PodmanCLI",
|
||||
"PodmanConfiguration",
|
||||
]
|
||||
@@ -0,0 +1,444 @@
|
||||
"""Podman CLI engine with custom storage support.
|
||||
|
||||
This engine uses subprocess calls to the Podman CLI instead of the socket API,
|
||||
allowing for custom storage paths (--root, --runroot) that work regardless of
|
||||
system Podman configuration or snap environment issues.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import tarfile
|
||||
from io import BytesIO
|
||||
from pathlib import Path, PurePath
|
||||
from tempfile import NamedTemporaryFile
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
from fuzzforge_common.exceptions import FuzzForgeError
|
||||
from fuzzforge_common.sandboxes.engines.base.engine import AbstractFuzzForgeSandboxEngine, ImageInfo
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from structlog.stdlib import BoundLogger
|
||||
|
||||
|
||||
def get_logger() -> BoundLogger:
|
||||
"""Get structured logger."""
|
||||
from structlog import get_logger # noqa: PLC0415 (required by temporal)
|
||||
|
||||
return cast("BoundLogger", get_logger())
|
||||
|
||||
|
||||
class PodmanCLI(AbstractFuzzForgeSandboxEngine):
|
||||
"""Podman engine using CLI with custom storage paths.
|
||||
|
||||
This implementation uses subprocess calls to the Podman CLI with --root
|
||||
and --runroot flags, providing isolation from system Podman storage.
|
||||
This is particularly useful when running from VS Code snap which sets
|
||||
XDG_DATA_HOME to a version-specific path.
|
||||
"""
|
||||
|
||||
__graphroot: Path
|
||||
__runroot: Path
|
||||
|
||||
def __init__(self, graphroot: Path, runroot: Path) -> None:
|
||||
"""Initialize the PodmanCLI engine.
|
||||
|
||||
:param graphroot: Path to container image storage.
|
||||
:param runroot: Path to container runtime state.
|
||||
|
||||
"""
|
||||
AbstractFuzzForgeSandboxEngine.__init__(self)
|
||||
self.__graphroot = graphroot
|
||||
self.__runroot = runroot
|
||||
|
||||
# Ensure directories exist
|
||||
self.__graphroot.mkdir(parents=True, exist_ok=True)
|
||||
self.__runroot.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _base_cmd(self) -> list[str]:
|
||||
"""Get base Podman command with storage flags.
|
||||
|
||||
:returns: Base command list with --root and --runroot.
|
||||
|
||||
"""
|
||||
return [
|
||||
"podman",
|
||||
"--root", str(self.__graphroot),
|
||||
"--runroot", str(self.__runroot),
|
||||
]
|
||||
|
||||
def _run(self, args: list[str], *, check: bool = True, capture: bool = True) -> subprocess.CompletedProcess:
|
||||
"""Run a Podman command.
|
||||
|
||||
:param args: Command arguments (without 'podman').
|
||||
:param check: Raise exception on non-zero exit.
|
||||
:param capture: Capture stdout/stderr.
|
||||
:returns: CompletedProcess result.
|
||||
|
||||
"""
|
||||
cmd = self._base_cmd() + args
|
||||
get_logger().debug("running podman command", cmd=" ".join(cmd))
|
||||
return subprocess.run(
|
||||
cmd,
|
||||
check=check,
|
||||
capture_output=capture,
|
||||
text=True,
|
||||
)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Image Operations
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
def list_images(self, filter_prefix: str | None = None) -> list[ImageInfo]:
|
||||
"""List available container images.
|
||||
|
||||
:param filter_prefix: Optional prefix to filter images.
|
||||
:returns: List of ImageInfo objects.
|
||||
|
||||
"""
|
||||
result = self._run(["images", "--format", "json"])
|
||||
images: list[ImageInfo] = []
|
||||
|
||||
try:
|
||||
data = json.loads(result.stdout) if result.stdout.strip() else []
|
||||
except json.JSONDecodeError:
|
||||
get_logger().warning("failed to parse podman images output")
|
||||
return images
|
||||
|
||||
for image in data:
|
||||
# Get repository and tag from Names
|
||||
names = image.get("Names") or []
|
||||
for name in names:
|
||||
if filter_prefix and not name.startswith(filter_prefix):
|
||||
continue
|
||||
|
||||
# Parse repository and tag
|
||||
if ":" in name:
|
||||
repo, tag = name.rsplit(":", 1)
|
||||
else:
|
||||
repo = name
|
||||
tag = "latest"
|
||||
|
||||
images.append(
|
||||
ImageInfo(
|
||||
reference=name,
|
||||
repository=repo,
|
||||
tag=tag,
|
||||
image_id=image.get("Id", "")[:12],
|
||||
size=image.get("Size"),
|
||||
)
|
||||
)
|
||||
|
||||
get_logger().debug("listed images", count=len(images), filter_prefix=filter_prefix)
|
||||
return images
|
||||
|
||||
def image_exists(self, image: str) -> bool:
|
||||
"""Check if a container image exists locally.
|
||||
|
||||
:param image: Full image reference.
|
||||
:returns: True if image exists.
|
||||
|
||||
"""
|
||||
result = self._run(["image", "exists", image], check=False)
|
||||
return result.returncode == 0
|
||||
|
||||
def pull_image(self, image: str, timeout: int = 300) -> None:
|
||||
"""Pull an image from a container registry.
|
||||
|
||||
:param image: Full image reference.
|
||||
:param timeout: Timeout in seconds.
|
||||
|
||||
"""
|
||||
get_logger().info("pulling image", image=image)
|
||||
try:
|
||||
self._run(["pull", image])
|
||||
get_logger().info("image pulled successfully", image=image)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
message = f"Failed to pull image '{image}': {exc.stderr}"
|
||||
raise FuzzForgeError(message) from exc
|
||||
|
||||
def tag_image(self, source: str, target: str) -> None:
|
||||
"""Tag an image with a new name.
|
||||
|
||||
:param source: Source image reference.
|
||||
:param target: Target image reference.
|
||||
|
||||
"""
|
||||
self._run(["tag", source, target])
|
||||
get_logger().debug("tagged image", source=source, target=target)
|
||||
|
||||
def build_image(self, context_path: Path, tag: str, dockerfile: str = "Dockerfile") -> None:
|
||||
"""Build an image from a Dockerfile.
|
||||
|
||||
:param context_path: Path to build context.
|
||||
:param tag: Image tag.
|
||||
:param dockerfile: Dockerfile name.
|
||||
|
||||
"""
|
||||
get_logger().info("building image", tag=tag, context=str(context_path))
|
||||
self._run(["build", "-t", tag, "-f", dockerfile, str(context_path)])
|
||||
get_logger().info("image built successfully", tag=tag)
|
||||
|
||||
def register_archive(self, archive: Path, repository: str) -> None:
|
||||
"""Load an image from a tar archive.
|
||||
|
||||
:param archive: Path to tar archive.
|
||||
:param repository: Repository name for the loaded image.
|
||||
|
||||
"""
|
||||
result = self._run(["load", "-i", str(archive)])
|
||||
# Tag the loaded image
|
||||
# Parse loaded image ID from output
|
||||
for line in result.stdout.splitlines():
|
||||
if "Loaded image:" in line:
|
||||
loaded_image = line.split("Loaded image:")[-1].strip()
|
||||
self._run(["tag", loaded_image, f"{repository}:latest"])
|
||||
break
|
||||
get_logger().debug("registered archive", archive=str(archive), repository=repository)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Container Operations
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
def spawn_sandbox(self, image: str) -> str:
|
||||
"""Spawn a sandbox (container) from an image.
|
||||
|
||||
:param image: Image to create container from.
|
||||
:returns: Container identifier.
|
||||
|
||||
"""
|
||||
result = self._run(["create", image])
|
||||
container_id = result.stdout.strip()
|
||||
get_logger().debug("created container", container_id=container_id)
|
||||
return container_id
|
||||
|
||||
def create_container(
|
||||
self,
|
||||
image: str,
|
||||
volumes: dict[str, str] | None = None,
|
||||
) -> str:
|
||||
"""Create a container from an image.
|
||||
|
||||
:param image: Image to create container from.
|
||||
:param volumes: Optional volume mappings {host_path: container_path}.
|
||||
:returns: Container identifier.
|
||||
|
||||
"""
|
||||
args = ["create"]
|
||||
if volumes:
|
||||
for host_path, container_path in volumes.items():
|
||||
args.extend(["-v", f"{host_path}:{container_path}:ro"])
|
||||
args.append(image)
|
||||
|
||||
result = self._run(args)
|
||||
container_id = result.stdout.strip()
|
||||
get_logger().debug("created container", container_id=container_id, image=image)
|
||||
return container_id
|
||||
|
||||
def start_sandbox(self, identifier: str) -> None:
|
||||
"""Start a container.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
|
||||
"""
|
||||
self._run(["start", identifier])
|
||||
get_logger().debug("started container", container_id=identifier)
|
||||
|
||||
def start_container(self, identifier: str) -> None:
|
||||
"""Start a container without waiting.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
|
||||
"""
|
||||
self._run(["start", identifier])
|
||||
get_logger().debug("started container (detached)", container_id=identifier)
|
||||
|
||||
def start_container_attached(
|
||||
self,
|
||||
identifier: str,
|
||||
timeout: int = 600,
|
||||
) -> tuple[int, str, str]:
|
||||
"""Start a container and wait for completion.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
:param timeout: Timeout in seconds.
|
||||
:returns: Tuple of (exit_code, stdout, stderr).
|
||||
|
||||
"""
|
||||
get_logger().debug("starting container attached", container_id=identifier)
|
||||
# Start the container
|
||||
self._run(["start", identifier])
|
||||
|
||||
# Wait for completion
|
||||
wait_result = self._run(["wait", identifier])
|
||||
exit_code = int(wait_result.stdout.strip()) if wait_result.stdout.strip() else -1
|
||||
|
||||
# Get logs
|
||||
stdout_result = self._run(["logs", identifier], check=False)
|
||||
stdout_str = stdout_result.stdout or ""
|
||||
stderr_str = stdout_result.stderr or ""
|
||||
|
||||
get_logger().debug("container finished", container_id=identifier, exit_code=exit_code)
|
||||
return (exit_code, stdout_str, stderr_str)
|
||||
|
||||
def execute_inside_sandbox(self, identifier: str, command: list[str]) -> None:
|
||||
"""Execute a command inside a container.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
:param command: Command to run.
|
||||
|
||||
"""
|
||||
get_logger().debug("executing command in container", container_id=identifier)
|
||||
self._run(["exec", identifier] + command)
|
||||
|
||||
def push_archive_to_sandbox(self, identifier: str, source: Path, destination: PurePath) -> None:
|
||||
"""Copy an archive to a container.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
:param source: Source archive path.
|
||||
:param destination: Destination path in container.
|
||||
|
||||
"""
|
||||
get_logger().debug("copying to container", container_id=identifier, source=str(source))
|
||||
self._run(["cp", str(source), f"{identifier}:{destination}"])
|
||||
|
||||
def pull_archive_from_sandbox(self, identifier: str, source: PurePath) -> Path:
|
||||
"""Copy files from a container to a local archive.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
:param source: Source path in container.
|
||||
:returns: Path to local archive.
|
||||
|
||||
"""
|
||||
get_logger().debug("copying from container", container_id=identifier, source=str(source))
|
||||
with NamedTemporaryFile(delete=False, delete_on_close=False, suffix=".tar") as tmp:
|
||||
self._run(["cp", f"{identifier}:{source}", tmp.name])
|
||||
return Path(tmp.name)
|
||||
|
||||
def copy_to_container(self, identifier: str, source: Path, destination: str) -> None:
|
||||
"""Copy a file or directory to a container.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
:param source: Source path on host.
|
||||
:param destination: Destination path in container.
|
||||
|
||||
"""
|
||||
self._run(["cp", str(source), f"{identifier}:{destination}"])
|
||||
get_logger().debug("copied to container", source=str(source), destination=destination)
|
||||
|
||||
def copy_from_container(self, identifier: str, source: str, destination: Path) -> None:
|
||||
"""Copy a file or directory from a container.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
:param source: Source path in container.
|
||||
:param destination: Destination path on host.
|
||||
|
||||
"""
|
||||
destination.mkdir(parents=True, exist_ok=True)
|
||||
self._run(["cp", f"{identifier}:{source}", str(destination)])
|
||||
get_logger().debug("copied from container", source=source, destination=str(destination))
|
||||
|
||||
def terminate_sandbox(self, identifier: str) -> None:
|
||||
"""Terminate and remove a container.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
|
||||
"""
|
||||
# Stop if running
|
||||
self._run(["stop", identifier], check=False)
|
||||
# Remove
|
||||
self._run(["rm", "-f", identifier], check=False)
|
||||
get_logger().debug("terminated container", container_id=identifier)
|
||||
|
||||
def remove_container(self, identifier: str, *, force: bool = False) -> None:
|
||||
"""Remove a container.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
:param force: Force removal.
|
||||
|
||||
"""
|
||||
args = ["rm"]
|
||||
if force:
|
||||
args.append("-f")
|
||||
args.append(identifier)
|
||||
self._run(args, check=False)
|
||||
get_logger().debug("removed container", container_id=identifier)
|
||||
|
||||
def stop_container(self, identifier: str, timeout: int = 10) -> None:
|
||||
"""Stop a running container.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
:param timeout: Seconds to wait before killing.
|
||||
|
||||
"""
|
||||
self._run(["stop", "-t", str(timeout), identifier], check=False)
|
||||
get_logger().debug("stopped container", container_id=identifier)
|
||||
|
||||
def get_container_status(self, identifier: str) -> str:
|
||||
"""Get the status of a container.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
:returns: Container status.
|
||||
|
||||
"""
|
||||
result = self._run(["inspect", "--format", "{{.State.Status}}", identifier], check=False)
|
||||
return result.stdout.strip() if result.returncode == 0 else "unknown"
|
||||
|
||||
def read_file_from_container(self, identifier: str, path: str) -> str:
|
||||
"""Read a file from inside a container.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
:param path: Path to file in container.
|
||||
:returns: File contents.
|
||||
|
||||
"""
|
||||
result = self._run(["exec", identifier, "cat", path], check=False)
|
||||
if result.returncode != 0:
|
||||
get_logger().debug("failed to read file from container", path=path)
|
||||
return ""
|
||||
return result.stdout
|
||||
|
||||
def list_containers(self, all_containers: bool = True) -> list[dict]:
|
||||
"""List containers.
|
||||
|
||||
:param all_containers: Include stopped containers.
|
||||
:returns: List of container info dicts.
|
||||
|
||||
"""
|
||||
args = ["ps", "--format", "json"]
|
||||
if all_containers:
|
||||
args.append("-a")
|
||||
|
||||
result = self._run(args)
|
||||
try:
|
||||
data = json.loads(result.stdout) if result.stdout.strip() else []
|
||||
# Handle both list and single object responses
|
||||
if isinstance(data, dict):
|
||||
data = [data]
|
||||
return [
|
||||
{
|
||||
"Id": c.get("Id", ""),
|
||||
"Names": c.get("Names", []),
|
||||
"Status": c.get("State", ""),
|
||||
"Image": c.get("Image", ""),
|
||||
}
|
||||
for c in data
|
||||
]
|
||||
except json.JSONDecodeError:
|
||||
return []
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Utility Methods
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
def get_storage_info(self) -> dict:
|
||||
"""Get storage configuration info.
|
||||
|
||||
:returns: Dict with graphroot and runroot paths.
|
||||
|
||||
"""
|
||||
return {
|
||||
"graphroot": str(self.__graphroot),
|
||||
"runroot": str(self.__runroot),
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
from typing import TYPE_CHECKING, Literal
|
||||
|
||||
from fuzzforge_common.sandboxes.engines.base.configuration import AbstractFuzzForgeEngineConfiguration
|
||||
from fuzzforge_common.sandboxes.engines.enumeration import FuzzForgeSandboxEngines
|
||||
from fuzzforge_common.sandboxes.engines.podman.engine import Podman
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from fuzzforge_common.sandboxes.engines.base.engine import AbstractFuzzForgeSandboxEngine
|
||||
|
||||
|
||||
class PodmanConfiguration(AbstractFuzzForgeEngineConfiguration):
|
||||
"""TODO."""
|
||||
|
||||
#: TODO.
|
||||
kind: Literal[FuzzForgeSandboxEngines.PODMAN] = FuzzForgeSandboxEngines.PODMAN
|
||||
|
||||
#: TODO.
|
||||
socket: str
|
||||
|
||||
def into_engine(self) -> AbstractFuzzForgeSandboxEngine:
|
||||
"""TODO."""
|
||||
return Podman(socket=self.socket)
|
||||
@@ -0,0 +1,496 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import tarfile
|
||||
from io import BytesIO
|
||||
from pathlib import Path, PurePath
|
||||
from tempfile import NamedTemporaryFile
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
from podman.errors import ImageNotFound
|
||||
|
||||
from fuzzforge_common.exceptions import FuzzForgeError
|
||||
from fuzzforge_common.sandboxes.engines.base.engine import AbstractFuzzForgeSandboxEngine, ImageInfo
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from podman import PodmanClient
|
||||
from podman.domain.containers import Container
|
||||
from structlog.stdlib import BoundLogger
|
||||
|
||||
|
||||
def get_logger() -> BoundLogger:
|
||||
"""TODO."""
|
||||
from structlog import get_logger # noqa: PLC0415 (required by temporal)
|
||||
|
||||
return cast("BoundLogger", get_logger())
|
||||
|
||||
|
||||
class Podman(AbstractFuzzForgeSandboxEngine):
|
||||
"""TODO."""
|
||||
|
||||
#: TODO.
|
||||
__socket: str
|
||||
|
||||
def __init__(self, socket: str) -> None:
|
||||
"""Initialize an instance of the class.
|
||||
|
||||
:param socket: TODO.
|
||||
|
||||
"""
|
||||
AbstractFuzzForgeSandboxEngine.__init__(self)
|
||||
self.__socket = socket
|
||||
|
||||
def get_client(self) -> PodmanClient:
|
||||
"""TODO.
|
||||
|
||||
:returns TODO.
|
||||
|
||||
"""
|
||||
from podman import PodmanClient # noqa: PLC0415 (required by temporal)
|
||||
|
||||
return PodmanClient(base_url=self.__socket)
|
||||
|
||||
def list_images(self, filter_prefix: str | None = None) -> list[ImageInfo]:
|
||||
"""List available container images.
|
||||
|
||||
:param filter_prefix: Optional prefix to filter images (e.g., "localhost/").
|
||||
:returns: List of ImageInfo objects for available images.
|
||||
|
||||
"""
|
||||
client: PodmanClient = self.get_client()
|
||||
images: list[ImageInfo] = []
|
||||
|
||||
with client:
|
||||
for image in client.images.list():
|
||||
# Get all tags for this image
|
||||
tags = image.tags or []
|
||||
for tag in tags:
|
||||
# Apply filter if specified
|
||||
if filter_prefix and not tag.startswith(filter_prefix):
|
||||
continue
|
||||
|
||||
# Parse repository and tag
|
||||
if ":" in tag:
|
||||
repo, tag_name = tag.rsplit(":", 1)
|
||||
else:
|
||||
repo = tag
|
||||
tag_name = "latest"
|
||||
|
||||
images.append(
|
||||
ImageInfo(
|
||||
reference=tag,
|
||||
repository=repo,
|
||||
tag=tag_name,
|
||||
image_id=image.short_id if hasattr(image, "short_id") else image.id[:12],
|
||||
size=image.attrs.get("Size") if hasattr(image, "attrs") else None,
|
||||
)
|
||||
)
|
||||
|
||||
get_logger().debug("listed images", count=len(images), filter_prefix=filter_prefix)
|
||||
return images
|
||||
|
||||
def register_archive(self, archive: Path, repository: str) -> None:
|
||||
"""TODO.
|
||||
|
||||
:param archive: TODO.
|
||||
|
||||
"""
|
||||
client: PodmanClient = self.get_client()
|
||||
with client:
|
||||
images = list(client.images.load(file_path=archive))
|
||||
if len(images) != 1:
|
||||
message: str = "expected only one image"
|
||||
raise FuzzForgeError(message)
|
||||
image = images[0]
|
||||
image.tag(repository=repository, tag="latest")
|
||||
|
||||
def spawn_sandbox(self, image: str) -> str:
|
||||
"""Spawn a sandbox based on the given image.
|
||||
|
||||
:param image: The image the sandbox should be based on.
|
||||
:returns: The sandbox identifier.
|
||||
|
||||
"""
|
||||
client: PodmanClient = self.get_client()
|
||||
with client:
|
||||
container: Container = client.containers.create(image=image)
|
||||
container_identifier: str = container.id
|
||||
get_logger().debug("create podman container", container_identifier=container_identifier)
|
||||
return container_identifier
|
||||
|
||||
def push_archive_to_sandbox(self, identifier: str, source: Path, destination: PurePath) -> None:
|
||||
"""TODO.
|
||||
|
||||
:param identifier: TODO.
|
||||
:param source: TODO.
|
||||
:param destination: TODO.
|
||||
|
||||
"""
|
||||
client: PodmanClient = self.get_client()
|
||||
with client:
|
||||
container: Container = client.containers.get(key=identifier)
|
||||
get_logger().debug(
|
||||
"push archive to podman container",
|
||||
container_identifier=identifier,
|
||||
container_status=container.status,
|
||||
)
|
||||
# reading everything at once for now, even though this temporary solution is not viable with large files,
|
||||
# since the podman sdk does not currently expose a way to chunk uploads.
|
||||
# in order to fix this issue, we could directly interact with the podman rest api or make a contribution
|
||||
# to the podman sdk in order to allow the 'put_archive' method to support chunked uploads.
|
||||
data: bytes = source.read_bytes()
|
||||
container.put_archive(path=str(destination), data=data)
|
||||
|
||||
def start_sandbox(self, identifier: str) -> None:
|
||||
"""Start the sandbox matching the given identifier.
|
||||
|
||||
:param identifier: The identifier of the sandbox to start.
|
||||
|
||||
"""
|
||||
client: PodmanClient = self.get_client()
|
||||
with client:
|
||||
container: Container = client.containers.get(key=identifier)
|
||||
get_logger().debug(
|
||||
"start podman container",
|
||||
container_identifier=identifier,
|
||||
container_status=container.status,
|
||||
)
|
||||
container.start()
|
||||
|
||||
def execute_inside_sandbox(self, identifier: str, command: list[str]) -> None:
|
||||
"""Execute a command inside the sandbox matching the given identifier and wait for completion.
|
||||
|
||||
:param sandbox: The identifier of the sandbox.
|
||||
:param command: The command to run.
|
||||
|
||||
"""
|
||||
client: PodmanClient = self.get_client()
|
||||
with client:
|
||||
container: Container = client.containers.get(key=identifier)
|
||||
get_logger().debug(
|
||||
"executing command inside podman container",
|
||||
container_identifier=identifier,
|
||||
container_status=container.status,
|
||||
)
|
||||
(status, (stdout, stderr)) = container.exec_run(cmd=command, demux=True)
|
||||
get_logger().debug(
|
||||
"command execution result",
|
||||
status=status,
|
||||
stdout_size=len(stdout) if stdout else 0,
|
||||
stderr_size=len(stderr) if stderr else 0,
|
||||
)
|
||||
|
||||
def pull_archive_from_sandbox(self, identifier: str, source: PurePath) -> Path:
|
||||
"""TODO.
|
||||
|
||||
:param identifier: TODO.
|
||||
:param source: TODO.
|
||||
:returns: TODO.
|
||||
|
||||
"""
|
||||
client: PodmanClient = self.get_client()
|
||||
with client:
|
||||
container: Container = client.containers.get(key=identifier)
|
||||
get_logger().debug(
|
||||
"pull archive from podman container",
|
||||
container_identifier=identifier,
|
||||
container_status=container.status,
|
||||
)
|
||||
with NamedTemporaryFile(delete=False, delete_on_close=False) as file:
|
||||
stream, _stat = container.get_archive(path=str(source))
|
||||
for chunk in stream:
|
||||
file.write(chunk)
|
||||
get_logger().debug(
|
||||
"created archive",
|
||||
archive=file.name,
|
||||
)
|
||||
return Path(file.name)
|
||||
|
||||
def terminate_sandbox(self, identifier: str) -> None:
|
||||
"""Terminate the sandbox matching the given identifier.
|
||||
|
||||
:param identifier: The identifier of the sandbox to terminate.
|
||||
|
||||
"""
|
||||
client: PodmanClient = self.get_client()
|
||||
with client:
|
||||
container: Container = client.containers.get(key=identifier)
|
||||
get_logger().debug(
|
||||
"kill podman container",
|
||||
container_identifier=identifier,
|
||||
container_status=container.status,
|
||||
)
|
||||
# Only kill running containers; for created/stopped, skip to remove
|
||||
if container.status in ("running", "paused"):
|
||||
container.kill()
|
||||
get_logger().debug(
|
||||
"remove podman container",
|
||||
container_identifier=identifier,
|
||||
container_status=container.status,
|
||||
)
|
||||
container.remove()
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Extended Container Operations
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
def image_exists(self, image: str) -> bool:
|
||||
"""Check if a container image exists locally.
|
||||
|
||||
:param image: Full image reference (e.g., "localhost/module:latest").
|
||||
:returns: True if image exists, False otherwise.
|
||||
|
||||
"""
|
||||
client: PodmanClient = self.get_client()
|
||||
with client:
|
||||
try:
|
||||
client.images.get(name=image)
|
||||
except ImageNotFound:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def pull_image(self, image: str, timeout: int = 300) -> None:
|
||||
"""Pull an image from a container registry.
|
||||
|
||||
:param image: Full image reference to pull.
|
||||
:param timeout: Timeout in seconds for the pull operation.
|
||||
:raises FuzzForgeError: If pull fails.
|
||||
|
||||
"""
|
||||
client: PodmanClient = self.get_client()
|
||||
with client:
|
||||
try:
|
||||
get_logger().info("pulling image", image=image)
|
||||
client.images.pull(repository=image)
|
||||
get_logger().info("image pulled successfully", image=image)
|
||||
except Exception as exc:
|
||||
message = f"Failed to pull image '{image}': {exc}"
|
||||
raise FuzzForgeError(message) from exc
|
||||
|
||||
def tag_image(self, source: str, target: str) -> None:
|
||||
"""Tag an image with a new name.
|
||||
|
||||
:param source: Source image reference.
|
||||
:param target: Target image reference.
|
||||
|
||||
"""
|
||||
client: PodmanClient = self.get_client()
|
||||
with client:
|
||||
image = client.images.get(name=source)
|
||||
# Parse target into repository and tag
|
||||
if ":" in target:
|
||||
repo, tag = target.rsplit(":", 1)
|
||||
else:
|
||||
repo = target
|
||||
tag = "latest"
|
||||
image.tag(repository=repo, tag=tag)
|
||||
get_logger().debug("tagged image", source=source, target=target)
|
||||
|
||||
def create_container(
|
||||
self,
|
||||
image: str,
|
||||
volumes: dict[str, str] | None = None,
|
||||
) -> str:
|
||||
"""Create a container from an image.
|
||||
|
||||
:param image: Image to create container from.
|
||||
:param volumes: Optional volume mappings {host_path: container_path}.
|
||||
:returns: Container identifier.
|
||||
|
||||
"""
|
||||
client: PodmanClient = self.get_client()
|
||||
with client:
|
||||
# Build volume mounts in podman format
|
||||
mounts = []
|
||||
if volumes:
|
||||
for host_path, container_path in volumes.items():
|
||||
mounts.append({"type": "bind", "source": host_path, "target": container_path, "read_only": True})
|
||||
|
||||
container: Container = client.containers.create(image=image, mounts=mounts if mounts else None)
|
||||
container_id: str = str(container.id)
|
||||
get_logger().debug("created container", container_id=container_id, image=image)
|
||||
return container_id
|
||||
|
||||
def start_container_attached(
|
||||
self,
|
||||
identifier: str,
|
||||
timeout: int = 600,
|
||||
) -> tuple[int, str, str]:
|
||||
"""Start a container and wait for it to complete.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
:param timeout: Timeout in seconds for execution.
|
||||
:returns: Tuple of (exit_code, stdout, stderr).
|
||||
|
||||
"""
|
||||
client: PodmanClient = self.get_client()
|
||||
with client:
|
||||
container: Container = client.containers.get(key=identifier)
|
||||
get_logger().debug("starting container attached", container_id=identifier)
|
||||
|
||||
# Start the container
|
||||
container.start()
|
||||
|
||||
# Wait for completion with timeout
|
||||
result = container.wait(timeout=timeout)
|
||||
exit_code: int = result.get("StatusCode", -1) if isinstance(result, dict) else int(result)
|
||||
|
||||
# Get logs
|
||||
stdout_raw = container.logs(stdout=True, stderr=False)
|
||||
stderr_raw = container.logs(stdout=False, stderr=True)
|
||||
|
||||
# Decode if bytes
|
||||
stdout_str: str = ""
|
||||
stderr_str: str = ""
|
||||
if isinstance(stdout_raw, bytes):
|
||||
stdout_str = stdout_raw.decode("utf-8", errors="replace")
|
||||
elif isinstance(stdout_raw, str):
|
||||
stdout_str = stdout_raw
|
||||
if isinstance(stderr_raw, bytes):
|
||||
stderr_str = stderr_raw.decode("utf-8", errors="replace")
|
||||
elif isinstance(stderr_raw, str):
|
||||
stderr_str = stderr_raw
|
||||
|
||||
get_logger().debug("container finished", container_id=identifier, exit_code=exit_code)
|
||||
return (exit_code, stdout_str, stderr_str)
|
||||
|
||||
def copy_to_container(self, identifier: str, source: Path, destination: str) -> None:
|
||||
"""Copy a file or directory to a container.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
:param source: Source path on host.
|
||||
:param destination: Destination path in container.
|
||||
|
||||
"""
|
||||
client: PodmanClient = self.get_client()
|
||||
with client:
|
||||
container: Container = client.containers.get(key=identifier)
|
||||
|
||||
# Create tar archive in memory
|
||||
tar_buffer = BytesIO()
|
||||
with tarfile.open(fileobj=tar_buffer, mode="w") as tar:
|
||||
tar.add(str(source), arcname=Path(source).name)
|
||||
tar_buffer.seek(0)
|
||||
|
||||
# Use put_archive to copy
|
||||
container.put_archive(path=destination, data=tar_buffer.read())
|
||||
get_logger().debug("copied to container", source=str(source), destination=destination)
|
||||
|
||||
def copy_from_container(self, identifier: str, source: str, destination: Path) -> None:
|
||||
"""Copy a file or directory from a container.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
:param source: Source path in container.
|
||||
:param destination: Destination path on host.
|
||||
|
||||
"""
|
||||
client: PodmanClient = self.get_client()
|
||||
with client:
|
||||
container: Container = client.containers.get(key=identifier)
|
||||
|
||||
# Get archive from container
|
||||
stream, _stat = container.get_archive(path=source)
|
||||
|
||||
# Write to temp file and extract
|
||||
tar_buffer = BytesIO()
|
||||
for chunk in stream:
|
||||
tar_buffer.write(chunk)
|
||||
tar_buffer.seek(0)
|
||||
|
||||
# Extract to destination
|
||||
destination.mkdir(parents=True, exist_ok=True)
|
||||
with tarfile.open(fileobj=tar_buffer, mode="r") as tar:
|
||||
tar.extractall(path=destination) # noqa: S202 (trusted source)
|
||||
|
||||
get_logger().debug("copied from container", source=source, destination=str(destination))
|
||||
|
||||
def remove_container(self, identifier: str, *, force: bool = False) -> None:
|
||||
"""Remove a container.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
:param force: Force removal even if running.
|
||||
|
||||
"""
|
||||
client: PodmanClient = self.get_client()
|
||||
with client:
|
||||
container: Container = client.containers.get(key=identifier)
|
||||
if force and container.status in ("running", "paused"):
|
||||
container.kill()
|
||||
container.remove()
|
||||
get_logger().debug("removed container", container_id=identifier)
|
||||
|
||||
def start_container(self, identifier: str) -> None:
|
||||
"""Start a container without waiting for it to complete.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
|
||||
"""
|
||||
client: PodmanClient = self.get_client()
|
||||
with client:
|
||||
container: Container = client.containers.get(key=identifier)
|
||||
container.start()
|
||||
get_logger().debug("started container (detached)", container_id=identifier)
|
||||
|
||||
def get_container_status(self, identifier: str) -> str:
|
||||
"""Get the status of a container.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
:returns: Container status (e.g., "running", "exited", "created").
|
||||
|
||||
"""
|
||||
client: PodmanClient = self.get_client()
|
||||
with client:
|
||||
container: Container = client.containers.get(key=identifier)
|
||||
return str(container.status)
|
||||
|
||||
def stop_container(self, identifier: str, timeout: int = 10) -> None:
|
||||
"""Stop a running container gracefully.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
:param timeout: Seconds to wait before killing.
|
||||
|
||||
"""
|
||||
client: PodmanClient = self.get_client()
|
||||
with client:
|
||||
container: Container = client.containers.get(key=identifier)
|
||||
if container.status == "running":
|
||||
container.stop(timeout=timeout)
|
||||
get_logger().debug("stopped container", container_id=identifier)
|
||||
|
||||
def read_file_from_container(self, identifier: str, path: str) -> str:
|
||||
"""Read a file from inside a running container using exec.
|
||||
|
||||
:param identifier: Container identifier.
|
||||
:param path: Path to file inside container.
|
||||
:returns: File contents as string.
|
||||
|
||||
"""
|
||||
client: PodmanClient = self.get_client()
|
||||
with client:
|
||||
container: Container = client.containers.get(key=identifier)
|
||||
(status, (stdout, stderr)) = container.exec_run(cmd=["cat", path], demux=True)
|
||||
if status != 0:
|
||||
error_msg = stderr.decode("utf-8", errors="replace") if stderr else "File not found"
|
||||
get_logger().debug("failed to read file from container", path=path, error=error_msg)
|
||||
return ""
|
||||
return stdout.decode("utf-8", errors="replace") if stdout else ""
|
||||
|
||||
def list_containers(self, all_containers: bool = True) -> list[dict]:
|
||||
"""List containers.
|
||||
|
||||
:param all_containers: Include stopped containers.
|
||||
:returns: List of container info dicts.
|
||||
|
||||
"""
|
||||
client: PodmanClient = self.get_client()
|
||||
with client:
|
||||
containers = client.containers.list(all=all_containers)
|
||||
return [
|
||||
{
|
||||
"Id": str(c.id),
|
||||
"Names": [c.name] if hasattr(c, "name") else [],
|
||||
"Status": str(c.status),
|
||||
"Image": str(c.image) if hasattr(c, "image") else "",
|
||||
}
|
||||
for c in containers
|
||||
]
|
||||
19
fuzzforge-common/src/fuzzforge_common/storage/__init__.py
Normal file
19
fuzzforge-common/src/fuzzforge_common/storage/__init__.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""FuzzForge storage abstractions.
|
||||
|
||||
Storage class requires boto3. Import it explicitly:
|
||||
from fuzzforge_common.storage.s3 import Storage
|
||||
"""
|
||||
|
||||
from fuzzforge_common.storage.exceptions import (
|
||||
FuzzForgeStorageError,
|
||||
StorageConnectionError,
|
||||
StorageDownloadError,
|
||||
StorageUploadError,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"FuzzForgeStorageError",
|
||||
"StorageConnectionError",
|
||||
"StorageDownloadError",
|
||||
"StorageUploadError",
|
||||
]
|
||||
@@ -0,0 +1,20 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
from fuzzforge_common.storage.s3 import Storage
|
||||
|
||||
|
||||
class StorageConfiguration(BaseModel):
|
||||
"""TODO."""
|
||||
|
||||
#: S3 endpoint URL (e.g., "http://localhost:9000" for MinIO).
|
||||
endpoint: str
|
||||
|
||||
#: S3 access key ID for authentication.
|
||||
access_key: str
|
||||
|
||||
#: S3 secret access key for authentication.
|
||||
secret_key: str
|
||||
|
||||
def into_storage(self) -> Storage:
|
||||
"""TODO."""
|
||||
return Storage(endpoint=self.endpoint, access_key=self.access_key, secret_key=self.secret_key)
|
||||
108
fuzzforge-common/src/fuzzforge_common/storage/exceptions.py
Normal file
108
fuzzforge-common/src/fuzzforge_common/storage/exceptions.py
Normal file
@@ -0,0 +1,108 @@
|
||||
from fuzzforge_common.exceptions import FuzzForgeError
|
||||
|
||||
|
||||
class FuzzForgeStorageError(FuzzForgeError):
|
||||
"""Base exception for all storage-related errors.
|
||||
|
||||
Raised when storage operations (upload, download, connection) fail
|
||||
during workflow execution.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class StorageConnectionError(FuzzForgeStorageError):
|
||||
"""Failed to connect to storage service.
|
||||
|
||||
:param endpoint: The storage endpoint that failed to connect.
|
||||
:param reason: The underlying exception message.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, endpoint: str, reason: str) -> None:
|
||||
"""Initialize storage connection error.
|
||||
|
||||
:param endpoint: The storage endpoint that failed to connect.
|
||||
:param reason: The underlying exception message.
|
||||
|
||||
"""
|
||||
FuzzForgeStorageError.__init__(
|
||||
self,
|
||||
f"Failed to connect to storage at {endpoint}: {reason}",
|
||||
)
|
||||
self.endpoint = endpoint
|
||||
self.reason = reason
|
||||
|
||||
|
||||
class StorageUploadError(FuzzForgeStorageError):
|
||||
"""Failed to upload object to storage.
|
||||
|
||||
:param bucket: The target bucket name.
|
||||
:param object_key: The target object key.
|
||||
:param reason: The underlying exception message.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, bucket: str, object_key: str, reason: str) -> None:
|
||||
"""Initialize storage upload error.
|
||||
|
||||
:param bucket: The target bucket name.
|
||||
:param object_key: The target object key.
|
||||
:param reason: The underlying exception message.
|
||||
|
||||
"""
|
||||
FuzzForgeStorageError.__init__(
|
||||
self,
|
||||
f"Failed to upload to {bucket}/{object_key}: {reason}",
|
||||
)
|
||||
self.bucket = bucket
|
||||
self.object_key = object_key
|
||||
self.reason = reason
|
||||
|
||||
|
||||
class StorageDownloadError(FuzzForgeStorageError):
|
||||
"""Failed to download object from storage.
|
||||
|
||||
:param bucket: The source bucket name.
|
||||
:param object_key: The source object key.
|
||||
:param reason: The underlying exception message.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, bucket: str, object_key: str, reason: str) -> None:
|
||||
"""Initialize storage download error.
|
||||
|
||||
:param bucket: The source bucket name.
|
||||
:param object_key: The source object key.
|
||||
:param reason: The underlying exception message.
|
||||
|
||||
"""
|
||||
FuzzForgeStorageError.__init__(
|
||||
self,
|
||||
f"Failed to download from {bucket}/{object_key}: {reason}",
|
||||
)
|
||||
self.bucket = bucket
|
||||
self.object_key = object_key
|
||||
self.reason = reason
|
||||
|
||||
|
||||
class StorageDeletionError(FuzzForgeStorageError):
|
||||
"""Failed to delete bucket from storage.
|
||||
|
||||
:param bucket: The bucket name that failed to delete.
|
||||
:param reason: The underlying exception message.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, bucket: str, reason: str) -> None:
|
||||
"""Initialize storage deletion error.
|
||||
|
||||
:param bucket: The bucket name that failed to delete.
|
||||
:param reason: The underlying exception message.
|
||||
|
||||
"""
|
||||
FuzzForgeStorageError.__init__(
|
||||
self,
|
||||
f"Failed to delete bucket {bucket}: {reason}",
|
||||
)
|
||||
self.bucket = bucket
|
||||
self.reason = reason
|
||||
351
fuzzforge-common/src/fuzzforge_common/storage/s3.py
Normal file
351
fuzzforge-common/src/fuzzforge_common/storage/s3.py
Normal file
@@ -0,0 +1,351 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path, PurePath
|
||||
from tarfile import TarInfo
|
||||
from tarfile import open as Archive # noqa: N812
|
||||
from tempfile import NamedTemporaryFile
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
from fuzzforge_common.storage.exceptions import StorageDeletionError, StorageDownloadError, StorageUploadError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from botocore.client import BaseClient
|
||||
from structlog.stdlib import BoundLogger
|
||||
|
||||
|
||||
def get_logger() -> BoundLogger:
|
||||
"""Get structlog logger instance.
|
||||
|
||||
Uses deferred import pattern required by Temporal for serialization.
|
||||
|
||||
:returns: Configured structlog logger.
|
||||
|
||||
"""
|
||||
from structlog import get_logger # noqa: PLC0415 (required by temporal)
|
||||
|
||||
return cast("BoundLogger", get_logger())
|
||||
|
||||
|
||||
class Storage:
|
||||
"""S3-compatible storage backend implementation using boto3.
|
||||
|
||||
Supports MinIO, AWS S3, and other S3-compatible storage services.
|
||||
Uses error-driven approach (EAFP) to handle bucket creation and
|
||||
avoid race conditions.
|
||||
|
||||
"""
|
||||
|
||||
#: S3 endpoint URL (e.g., "http://localhost:9000" for MinIO).
|
||||
__endpoint: str
|
||||
|
||||
#: S3 access key ID for authentication.
|
||||
__access_key: str
|
||||
|
||||
#: S3 secret access key for authentication.
|
||||
__secret_key: str
|
||||
|
||||
def __init__(self, endpoint: str, access_key: str, secret_key: str) -> None:
|
||||
"""Initialize an instance of the class.
|
||||
|
||||
:param endpoint: TODO.
|
||||
:param access_key: TODO.
|
||||
:param secret_key: TODO.
|
||||
|
||||
"""
|
||||
self.__endpoint = endpoint
|
||||
self.__access_key = access_key
|
||||
self.__secret_key = secret_key
|
||||
|
||||
def _get_client(self) -> BaseClient:
|
||||
"""Create boto3 S3 client with configured credentials.
|
||||
|
||||
Uses deferred import pattern required by Temporal for serialization.
|
||||
|
||||
:returns: Configured boto3 S3 client.
|
||||
|
||||
"""
|
||||
import boto3 # noqa: PLC0415 (required by temporal)
|
||||
|
||||
return boto3.client(
|
||||
"s3",
|
||||
endpoint_url=self.__endpoint,
|
||||
aws_access_key_id=self.__access_key,
|
||||
aws_secret_access_key=self.__secret_key,
|
||||
)
|
||||
|
||||
def create_bucket(self, bucket: str) -> None:
|
||||
"""Create the S3 bucket if it does not already exist.
|
||||
|
||||
Idempotent operation - succeeds if bucket already exists and is owned by you.
|
||||
Fails if bucket exists but is owned by another account.
|
||||
|
||||
:raise ClientError: If bucket creation fails (permissions, name conflicts, etc.).
|
||||
|
||||
"""
|
||||
logger = get_logger()
|
||||
client = self._get_client()
|
||||
|
||||
logger.debug("creating_bucket", bucket=bucket)
|
||||
|
||||
try:
|
||||
client.create_bucket(Bucket=bucket)
|
||||
logger.info("bucket_created", bucket=bucket)
|
||||
|
||||
except ClientError as e:
|
||||
error_code = e.response.get("Error", {}).get("Code")
|
||||
|
||||
# Bucket already exists and we own it - this is fine
|
||||
if error_code in ("BucketAlreadyOwnedByYou", "BucketAlreadyExists"):
|
||||
logger.debug(
|
||||
"bucket_already_exists",
|
||||
bucket=bucket,
|
||||
error_code=error_code,
|
||||
)
|
||||
return
|
||||
|
||||
# Other errors are actual failures
|
||||
logger.exception(
|
||||
"bucket_creation_failed",
|
||||
bucket=bucket,
|
||||
error_code=error_code,
|
||||
)
|
||||
raise
|
||||
|
||||
def delete_bucket(self, bucket: str) -> None:
|
||||
"""Delete an S3 bucket and all its contents.
|
||||
|
||||
Idempotent operation - succeeds if bucket doesn't exist.
|
||||
Handles pagination for buckets with many objects.
|
||||
|
||||
:param bucket: The name of the bucket to delete.
|
||||
:raises StorageDeletionError: If bucket deletion fails.
|
||||
|
||||
"""
|
||||
logger = get_logger()
|
||||
client = self._get_client()
|
||||
|
||||
logger.debug("deleting_bucket", bucket=bucket)
|
||||
|
||||
try:
|
||||
# S3 requires bucket to be empty before deletion
|
||||
# Delete all objects first with pagination support
|
||||
continuation_token = None
|
||||
|
||||
while True:
|
||||
# List objects (up to 1000 per request)
|
||||
list_params = {"Bucket": bucket}
|
||||
if continuation_token:
|
||||
list_params["ContinuationToken"] = continuation_token
|
||||
|
||||
response = client.list_objects_v2(**list_params)
|
||||
|
||||
# Delete objects if any exist (max 1000 per delete_objects call)
|
||||
if "Contents" in response:
|
||||
objects = [{"Key": obj["Key"]} for obj in response["Contents"]]
|
||||
client.delete_objects(Bucket=bucket, Delete={"Objects": objects})
|
||||
logger.debug("deleted_objects", bucket=bucket, count=len(objects))
|
||||
|
||||
# Check if more objects exist
|
||||
if not response.get("IsTruncated", False):
|
||||
break
|
||||
|
||||
continuation_token = response.get("NextContinuationToken")
|
||||
|
||||
# Now delete the empty bucket
|
||||
client.delete_bucket(Bucket=bucket)
|
||||
logger.info("bucket_deleted", bucket=bucket)
|
||||
|
||||
except ClientError as error:
|
||||
error_code = error.response.get("Error", {}).get("Code")
|
||||
|
||||
# Idempotent - bucket already doesn't exist
|
||||
if error_code == "NoSuchBucket":
|
||||
logger.debug("bucket_does_not_exist", bucket=bucket)
|
||||
return
|
||||
|
||||
# Other errors are actual failures
|
||||
logger.exception(
|
||||
"bucket_deletion_failed",
|
||||
bucket=bucket,
|
||||
error_code=error_code,
|
||||
)
|
||||
raise StorageDeletionError(bucket=bucket, reason=str(error)) from error
|
||||
|
||||
def upload_file(
|
||||
self,
|
||||
bucket: str,
|
||||
file: Path,
|
||||
key: str,
|
||||
) -> None:
|
||||
"""Upload archive file to S3 storage at specified object key.
|
||||
|
||||
Assumes bucket exists. Fails gracefully if bucket or other resources missing.
|
||||
|
||||
:param bucket: TODO.
|
||||
:param file: Local path to the archive file to upload.
|
||||
:param key: Object key (path) in S3 where file should be uploaded.
|
||||
:raise StorageUploadError: If upload operation fails.
|
||||
|
||||
"""
|
||||
from boto3.exceptions import S3UploadFailedError # noqa: PLC0415 (required by 'temporal' at runtime)
|
||||
|
||||
logger = get_logger()
|
||||
client = self._get_client()
|
||||
|
||||
logger.debug(
|
||||
"uploading_archive_to_storage",
|
||||
bucket=bucket,
|
||||
object_key=key,
|
||||
archive_path=str(file),
|
||||
)
|
||||
|
||||
try:
|
||||
client.upload_file(
|
||||
Filename=str(file),
|
||||
Bucket=bucket,
|
||||
Key=key,
|
||||
)
|
||||
logger.info(
|
||||
"archive_uploaded_successfully",
|
||||
bucket=bucket,
|
||||
object_key=key,
|
||||
)
|
||||
|
||||
except S3UploadFailedError as e:
|
||||
# Check if this is a NoSuchBucket error - create bucket and retry
|
||||
if "NoSuchBucket" in str(e):
|
||||
logger.info(
|
||||
"bucket_does_not_exist_creating",
|
||||
bucket=bucket,
|
||||
)
|
||||
self.create_bucket(bucket=bucket)
|
||||
# Retry upload after creating bucket
|
||||
try:
|
||||
client.upload_file(
|
||||
Filename=str(file),
|
||||
Bucket=bucket,
|
||||
Key=key,
|
||||
)
|
||||
logger.info(
|
||||
"archive_uploaded_successfully_after_bucket_creation",
|
||||
bucket=bucket,
|
||||
object_key=key,
|
||||
)
|
||||
except S3UploadFailedError as retry_error:
|
||||
logger.exception(
|
||||
"upload_failed_after_bucket_creation",
|
||||
bucket=bucket,
|
||||
object_key=key,
|
||||
)
|
||||
raise StorageUploadError(
|
||||
bucket=bucket,
|
||||
object_key=key,
|
||||
reason=str(retry_error),
|
||||
) from retry_error
|
||||
else:
|
||||
logger.exception(
|
||||
"upload_failed",
|
||||
bucket=bucket,
|
||||
object_key=key,
|
||||
)
|
||||
raise StorageUploadError(
|
||||
bucket=bucket,
|
||||
object_key=key,
|
||||
reason=str(e),
|
||||
) from e
|
||||
|
||||
def download_file(self, bucket: str, key: PurePath) -> Path:
|
||||
"""Download a single file from S3 storage.
|
||||
|
||||
Downloads the file to a temporary location and returns the path.
|
||||
|
||||
:param bucket: S3 bucket name.
|
||||
:param key: Object key (path) in S3 to download.
|
||||
:returns: Path to the downloaded file.
|
||||
:raise StorageDownloadError: If download operation fails.
|
||||
|
||||
"""
|
||||
logger = get_logger()
|
||||
client = self._get_client()
|
||||
|
||||
logger.debug(
|
||||
"downloading_file_from_storage",
|
||||
bucket=bucket,
|
||||
object_key=str(key),
|
||||
)
|
||||
|
||||
try:
|
||||
# Create temporary file for download
|
||||
with NamedTemporaryFile(delete=False, suffix=".tar.gz") as temp_file:
|
||||
temp_path = Path(temp_file.name)
|
||||
|
||||
# Download object to temp file
|
||||
client.download_file(
|
||||
Bucket=bucket,
|
||||
Key=str(key),
|
||||
Filename=str(temp_path),
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"file_downloaded_successfully",
|
||||
bucket=bucket,
|
||||
object_key=str(key),
|
||||
local_path=str(temp_path),
|
||||
)
|
||||
|
||||
return temp_path
|
||||
|
||||
except ClientError as error:
|
||||
error_code = error.response.get("Error", {}).get("Code")
|
||||
logger.exception(
|
||||
"download_failed",
|
||||
bucket=bucket,
|
||||
object_key=str(key),
|
||||
error_code=error_code,
|
||||
)
|
||||
raise StorageDownloadError(
|
||||
bucket=bucket,
|
||||
object_key=str(key),
|
||||
reason=f"{error_code}: {error!s}",
|
||||
) from error
|
||||
|
||||
def download_directory(self, bucket: str, directory: PurePath) -> Path:
|
||||
"""TODO.
|
||||
|
||||
:param bucket: TODO.
|
||||
:param directory: TODO.
|
||||
:returns: TODO.
|
||||
|
||||
"""
|
||||
with NamedTemporaryFile(delete=False) as file:
|
||||
path: Path = Path(file.name)
|
||||
# end-with
|
||||
client: Any = self._get_client()
|
||||
with Archive(name=str(path), mode="w:gz") as archive:
|
||||
paginator = client.get_paginator("list_objects_v2")
|
||||
try:
|
||||
pages = paginator.paginate(Bucket=bucket, Prefix=str(directory))
|
||||
except ClientError as exception:
|
||||
raise StorageDownloadError(
|
||||
bucket=bucket,
|
||||
object_key=str(directory),
|
||||
reason=exception.response["Error"]["Code"],
|
||||
) from exception
|
||||
for page in pages:
|
||||
for entry in page.get("Contents", []):
|
||||
key: str = entry["Key"]
|
||||
try:
|
||||
response: dict[str, Any] = client.get_object(Bucket=bucket, Key=key)
|
||||
except ClientError as exception:
|
||||
raise StorageDownloadError(
|
||||
bucket=bucket,
|
||||
object_key=key,
|
||||
reason=exception.response["Error"]["Code"],
|
||||
) from exception
|
||||
archive.addfile(TarInfo(name=key), fileobj=response["Body"])
|
||||
# end-for
|
||||
# end-for
|
||||
# end-with
|
||||
return path
|
||||
8
fuzzforge-common/src/fuzzforge_common/temporal/queues.py
Normal file
8
fuzzforge-common/src/fuzzforge_common/temporal/queues.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from enum import StrEnum
|
||||
|
||||
|
||||
class TemporalQueues(StrEnum):
|
||||
"""Enumeration of available `Temporal Task Queues`."""
|
||||
|
||||
#: The default task queue.
|
||||
DEFAULT = "default-task-queue"
|
||||
@@ -0,0 +1,46 @@
|
||||
from enum import StrEnum
|
||||
from typing import Literal
|
||||
|
||||
from fuzzforge_types import FuzzForgeWorkflowIdentifier # noqa: TC002 (required by 'pydantic' at runtime)
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class Base(BaseModel):
|
||||
"""TODO."""
|
||||
|
||||
|
||||
class FuzzForgeWorkflowSteps(StrEnum):
|
||||
"""Workflow step types."""
|
||||
|
||||
#: Execute a FuzzForge module
|
||||
RUN_FUZZFORGE_MODULE = "run-fuzzforge-module"
|
||||
|
||||
|
||||
class FuzzForgeWorkflowStep(Base):
|
||||
"""TODO."""
|
||||
|
||||
#: The type of the workflow's step.
|
||||
kind: FuzzForgeWorkflowSteps
|
||||
|
||||
|
||||
class RunFuzzForgeModule(FuzzForgeWorkflowStep):
|
||||
"""Execute a FuzzForge module."""
|
||||
|
||||
kind: Literal[FuzzForgeWorkflowSteps.RUN_FUZZFORGE_MODULE] = FuzzForgeWorkflowSteps.RUN_FUZZFORGE_MODULE
|
||||
#: The name of the module.
|
||||
module: str
|
||||
#: The container of the module.
|
||||
container: str
|
||||
|
||||
|
||||
class FuzzForgeWorkflowDefinition(Base):
|
||||
"""The definition of a FuzzForge workflow."""
|
||||
|
||||
#: The author of the workflow.
|
||||
author: str
|
||||
#: The identifier of the workflow.
|
||||
identifier: FuzzForgeWorkflowIdentifier
|
||||
#: The name of the workflow.
|
||||
name: str
|
||||
#: The collection of steps that compose the workflow.
|
||||
steps: list[RunFuzzForgeModule]
|
||||
@@ -0,0 +1,24 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
from fuzzforge_common.sandboxes.engines.docker.configuration import (
|
||||
DockerConfiguration, # noqa: TC001 (required by pydantic at runtime)
|
||||
)
|
||||
from fuzzforge_common.sandboxes.engines.podman.configuration import (
|
||||
PodmanConfiguration, # noqa: TC001 (required by pydantic at runtime)
|
||||
)
|
||||
from fuzzforge_common.storage.configuration import StorageConfiguration # noqa: TC001 (required by pydantic at runtime)
|
||||
|
||||
|
||||
class TemporalWorkflowParameters(BaseModel):
|
||||
"""Base parameters for Temporal workflows.
|
||||
|
||||
Provides common configuration shared across all workflow types,
|
||||
including sandbox engine and storage backend instances.
|
||||
|
||||
"""
|
||||
|
||||
#: Sandbox engine for container operations (Docker or Podman).
|
||||
engine_configuration: PodmanConfiguration | DockerConfiguration
|
||||
|
||||
#: Storage backend for uploading/downloading execution artifacts.
|
||||
storage_configuration: StorageConfiguration
|
||||
108
fuzzforge-common/src/fuzzforge_common/workflows/bridge_utils.py
Normal file
108
fuzzforge-common/src/fuzzforge_common/workflows/bridge_utils.py
Normal file
@@ -0,0 +1,108 @@
|
||||
"""Helper utilities for working with bridge transformations."""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
def load_transform_from_file(file_path: str | Path) -> str:
|
||||
"""Load bridge transformation code from a Python file.
|
||||
|
||||
This reads the transformation function from a .py file and extracts
|
||||
the code as a string suitable for the bridge module.
|
||||
|
||||
Args:
|
||||
file_path: Path to Python file containing transform() function
|
||||
|
||||
Returns:
|
||||
Python code as a string
|
||||
|
||||
Example:
|
||||
>>> code = load_transform_from_file("transformations/add_line_numbers.py")
|
||||
>>> # code contains the transform() function as a string
|
||||
|
||||
"""
|
||||
path = Path(file_path)
|
||||
|
||||
if not path.exists():
|
||||
raise FileNotFoundError(f"Transformation file not found: {file_path}")
|
||||
|
||||
if path.suffix != ".py":
|
||||
raise ValueError(f"Transformation file must be .py file, got: {path.suffix}")
|
||||
|
||||
# Read the entire file
|
||||
code = path.read_text()
|
||||
|
||||
return code
|
||||
|
||||
|
||||
def create_bridge_input(
|
||||
transform_file: str | Path,
|
||||
input_filename: str | None = None,
|
||||
output_filename: str | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Create bridge module input configuration from a transformation file.
|
||||
|
||||
Args:
|
||||
transform_file: Path to Python file with transform() function
|
||||
input_filename: Optional specific input file to transform
|
||||
output_filename: Optional specific output filename
|
||||
|
||||
Returns:
|
||||
Dictionary suitable for bridge module's input.json
|
||||
|
||||
Example:
|
||||
>>> config = create_bridge_input("transformations/add_line_numbers.py")
|
||||
>>> import json
|
||||
>>> json.dump(config, open("input.json", "w"))
|
||||
|
||||
"""
|
||||
code = load_transform_from_file(transform_file)
|
||||
|
||||
return {
|
||||
"code": code,
|
||||
"input_filename": input_filename,
|
||||
"output_filename": output_filename,
|
||||
}
|
||||
|
||||
|
||||
def validate_transform_function(file_path: str | Path) -> bool:
|
||||
"""Validate that a Python file contains a valid transform() function.
|
||||
|
||||
Args:
|
||||
file_path: Path to Python file to validate
|
||||
|
||||
Returns:
|
||||
True if valid, raises exception otherwise
|
||||
|
||||
Raises:
|
||||
ValueError: If transform() function is not found or invalid
|
||||
|
||||
"""
|
||||
code = load_transform_from_file(file_path)
|
||||
|
||||
# Check if transform function is defined
|
||||
if "def transform(" not in code:
|
||||
raise ValueError(
|
||||
f"File {file_path} must contain a 'def transform(data)' function"
|
||||
)
|
||||
|
||||
# Try to compile the code
|
||||
try:
|
||||
compile(code, str(file_path), "exec")
|
||||
except SyntaxError as e:
|
||||
raise ValueError(f"Syntax error in {file_path}: {e}") from e
|
||||
|
||||
# Try to execute and verify transform exists
|
||||
namespace: dict[str, Any] = {"__builtins__": __builtins__}
|
||||
try:
|
||||
exec(code, namespace)
|
||||
except Exception as e:
|
||||
raise ValueError(f"Failed to execute {file_path}: {e}") from e
|
||||
|
||||
if "transform" not in namespace:
|
||||
raise ValueError(f"No 'transform' function found in {file_path}")
|
||||
|
||||
if not callable(namespace["transform"]):
|
||||
raise ValueError(f"'transform' in {file_path} is not callable")
|
||||
|
||||
return True
|
||||
27
fuzzforge-common/src/fuzzforge_common/workflows/default.py
Normal file
27
fuzzforge-common/src/fuzzforge_common/workflows/default.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from fuzzforge_types import (
|
||||
FuzzForgeExecutionIdentifier, # noqa: TC002 (required by pydantic at runtime)
|
||||
FuzzForgeProjectIdentifier, # noqa: TC002 (required by pydantic at runtime)
|
||||
)
|
||||
|
||||
from fuzzforge_common.workflows.base.definitions import (
|
||||
FuzzForgeWorkflowDefinition, # noqa: TC001 (required by pydantic at runtime)
|
||||
)
|
||||
from fuzzforge_common.workflows.base.parameters import TemporalWorkflowParameters
|
||||
|
||||
|
||||
class ExecuteFuzzForgeWorkflowParameters(TemporalWorkflowParameters):
|
||||
"""Parameters for the default FuzzForge workflow orchestration.
|
||||
|
||||
Contains workflow definition and execution tracking identifiers
|
||||
for coordinating multi-module workflows.
|
||||
|
||||
"""
|
||||
|
||||
#: UUID7 identifier of this specific workflow execution.
|
||||
execution_identifier: FuzzForgeExecutionIdentifier
|
||||
|
||||
#: UUID7 identifier of the project this execution belongs to.
|
||||
project_identifier: FuzzForgeProjectIdentifier
|
||||
|
||||
#: The definition of the FuzzForge workflow to run.
|
||||
workflow_definition: FuzzForgeWorkflowDefinition
|
||||
80
fuzzforge-common/src/fuzzforge_common/workflows/modules.py
Normal file
80
fuzzforge-common/src/fuzzforge_common/workflows/modules.py
Normal file
@@ -0,0 +1,80 @@
|
||||
from typing import Any, Literal
|
||||
|
||||
from fuzzforge_types import (
|
||||
FuzzForgeExecutionIdentifier, # noqa: TC002 (required by pydantic at runtime)
|
||||
FuzzForgeProjectIdentifier, # noqa: TC002 (required by pydantic at runtime)
|
||||
)
|
||||
|
||||
from fuzzforge_common.workflows.base.parameters import TemporalWorkflowParameters
|
||||
|
||||
|
||||
class ExecuteFuzzForgeModuleParameters(TemporalWorkflowParameters):
|
||||
"""Parameters for executing a single FuzzForge module workflow.
|
||||
|
||||
Contains module execution configuration including container image,
|
||||
project context, and execution tracking identifiers.
|
||||
|
||||
Supports workflow chaining where modules can be executed in sequence,
|
||||
with each module's output becoming the next module's input.
|
||||
|
||||
"""
|
||||
|
||||
#: The identifier of this module execution.
|
||||
execution_identifier: FuzzForgeExecutionIdentifier
|
||||
|
||||
#: The identifier/name of the module to execute.
|
||||
#: FIXME: Currently accepts both UUID (for registry lookups) and container names (e.g., "text-generator:0.0.1").
|
||||
#: This should be split into module_identifier (UUID) and container_image (string) in the future.
|
||||
module_identifier: str
|
||||
|
||||
#: The identifier of the project this module execution belongs to.
|
||||
project_identifier: FuzzForgeProjectIdentifier
|
||||
|
||||
#: Optional configuration dictionary for the module.
|
||||
#: Will be written to /data/input/config.json in the sandbox.
|
||||
module_configuration: dict[str, Any] | None = None
|
||||
|
||||
# Workflow chaining fields
|
||||
|
||||
#: The identifier of the parent workflow execution (if part of a multi-module workflow).
|
||||
#: For standalone module executions, this equals execution_identifier.
|
||||
workflow_execution_identifier: FuzzForgeExecutionIdentifier | None = None
|
||||
|
||||
#: Position of this module in the workflow (0-based).
|
||||
#: 0 = first module (reads from project assets)
|
||||
#: N > 0 = subsequent module (reads from previous module's output)
|
||||
step_index: int = 0
|
||||
|
||||
#: Execution identifier of the previous module in the workflow chain.
|
||||
#: None for first module (step_index=0).
|
||||
#: Used to locate previous module's output in storage.
|
||||
previous_step_execution_identifier: FuzzForgeExecutionIdentifier | None = None
|
||||
|
||||
|
||||
class WorkflowStep(TemporalWorkflowParameters):
|
||||
"""A step in a workflow - a module execution.
|
||||
|
||||
Steps are executed sequentially in a workflow. Each step runs a containerized module.
|
||||
|
||||
Examples:
|
||||
# Module step
|
||||
WorkflowStep(
|
||||
step_index=0,
|
||||
step_type="module",
|
||||
module_identifier="text-generator:0.0.1"
|
||||
)
|
||||
|
||||
"""
|
||||
|
||||
#: Position of this step in the workflow (0-based)
|
||||
step_index: int
|
||||
|
||||
#: Type of step: "module" (bridges are also modules now)
|
||||
step_type: Literal["module"]
|
||||
|
||||
#: Module identifier (container image name like "text-generator:0.0.1")
|
||||
#: Required if step_type="module"
|
||||
module_identifier: str | None = None
|
||||
|
||||
#: Optional module configuration
|
||||
module_configuration: dict[str, Any] | None = None
|
||||
0
fuzzforge-common/tests/.gitkeep
Normal file
0
fuzzforge-common/tests/.gitkeep
Normal file
0
fuzzforge-common/tests/__init__.py
Normal file
0
fuzzforge-common/tests/__init__.py
Normal file
1
fuzzforge-common/tests/conftest.py
Normal file
1
fuzzforge-common/tests/conftest.py
Normal file
@@ -0,0 +1 @@
|
||||
pytest_plugins = ["fuzzforge_tests.fixtures"]
|
||||
0
fuzzforge-common/tests/unit/__init__.py
Normal file
0
fuzzforge-common/tests/unit/__init__.py
Normal file
0
fuzzforge-common/tests/unit/engines/__init__.py
Normal file
0
fuzzforge-common/tests/unit/engines/__init__.py
Normal file
9
fuzzforge-common/tests/unit/engines/conftest.py
Normal file
9
fuzzforge-common/tests/unit/engines/conftest.py
Normal file
@@ -0,0 +1,9 @@
|
||||
import pytest
|
||||
|
||||
from fuzzforge_common.sandboxes.engines.podman.engine import Podman
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def podman_engine(podman_socket: str) -> Podman:
|
||||
"""TODO."""
|
||||
return Podman(socket=podman_socket)
|
||||
21
fuzzforge-common/tests/unit/engines/test_podman.py
Normal file
21
fuzzforge-common/tests/unit/engines/test_podman.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from typing import TYPE_CHECKING
|
||||
from uuid import uuid4
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pathlib import Path
|
||||
|
||||
from podman import PodmanClient
|
||||
|
||||
from fuzzforge_common.sandboxes.engines.podman.engine import Podman
|
||||
|
||||
|
||||
def test_can_register_oci(
|
||||
path_to_oci: Path,
|
||||
podman_engine: Podman,
|
||||
podman_client: PodmanClient,
|
||||
) -> None:
|
||||
"""TODO."""
|
||||
repository: str = str(uuid4())
|
||||
podman_engine.register_archive(archive=path_to_oci, repository=repository)
|
||||
assert podman_client.images.exists(key=repository)
|
||||
podman_client.images.get(name=repository).remove()
|
||||
0
fuzzforge-common/tests/unit/storage/__init__.py
Normal file
0
fuzzforge-common/tests/unit/storage/__init__.py
Normal file
42
fuzzforge-common/tests/unit/storage/test_storage.py
Normal file
42
fuzzforge-common/tests/unit/storage/test_storage.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from fuzzforge_common.storage.configuration import StorageConfiguration
|
||||
|
||||
|
||||
def test_download_directory(
|
||||
storage_configuration: StorageConfiguration,
|
||||
boto3_client: Any,
|
||||
random_bucket: str,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""TODO."""
|
||||
bucket = random_bucket
|
||||
storage = storage_configuration.into_storage()
|
||||
|
||||
d1 = tmp_path.joinpath("d1")
|
||||
f1 = d1.joinpath("f1")
|
||||
d2 = tmp_path.joinpath("d2")
|
||||
f2 = d2.joinpath("f2")
|
||||
d3 = d2.joinpath("d3")
|
||||
f3 = d3.joinpath("d3")
|
||||
|
||||
d1.mkdir()
|
||||
d2.mkdir()
|
||||
d3.mkdir()
|
||||
f1.touch()
|
||||
f2.touch()
|
||||
f3.touch()
|
||||
|
||||
for path in [f1, f2, f3]:
|
||||
key: Path = Path("assets", path.relative_to(other=tmp_path))
|
||||
boto3_client.upload_file(
|
||||
Bucket=bucket,
|
||||
Filename=str(path),
|
||||
Key=str(key),
|
||||
)
|
||||
|
||||
path = storage.download_directory(bucket=bucket, directory="assets")
|
||||
|
||||
assert path.is_file()
|
||||
11
fuzzforge-mcp/Dockerfile
Normal file
11
fuzzforge-mcp/Dockerfile
Normal file
@@ -0,0 +1,11 @@
|
||||
FROM docker.io/debian:trixie
|
||||
|
||||
ARG PACKAGE
|
||||
|
||||
COPY --from=ghcr.io/astral-sh/uv:0.9.10 /uv /bin/
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN /bin/uv venv && /bin/uv pip install --find-links /wheels $PACKAGE
|
||||
|
||||
CMD [ "/bin/uv", "run", "uvicorn", "fuzzforge_mcp.application:app"]
|
||||
51
fuzzforge-mcp/Makefile
Normal file
51
fuzzforge-mcp/Makefile
Normal file
@@ -0,0 +1,51 @@
|
||||
PACKAGE=$(word 1, $(shell uv version))
|
||||
VERSION=$(word 2, $(shell uv version))
|
||||
|
||||
PODMAN?=/usr/bin/podman
|
||||
|
||||
ARTIFACTS?=./dist
|
||||
SOURCES=./src
|
||||
TESTS=./tests
|
||||
|
||||
.PHONY: bandit clean cloc format image mypy pytest ruff version wheel
|
||||
|
||||
bandit:
|
||||
uv run bandit --recursive $(SOURCES)
|
||||
|
||||
clean:
|
||||
@find . -type d \( \
|
||||
-name '*.egg-info' \
|
||||
-o -name '.mypy_cache' \
|
||||
-o -name '.pytest_cache' \
|
||||
-o -name '.ruff_cache' \
|
||||
-o -name '__pycache__' \
|
||||
\) -printf 'removing directory %p\n' -exec rm -rf {} +
|
||||
|
||||
cloc:
|
||||
cloc $(SOURCES) $(TESTS)
|
||||
|
||||
format:
|
||||
uv run ruff format $(SOURCES) $(TESTS)
|
||||
|
||||
image:
|
||||
$(PODMAN) build \
|
||||
--build-arg PACKAGE=$(PACKAGE) \
|
||||
--file ./Dockerfile \
|
||||
--no-cache \
|
||||
--tag $(PACKAGE):$(VERSION) \
|
||||
--volume $(ARTIFACTS):/wheels
|
||||
|
||||
mypy:
|
||||
uv run mypy $(SOURCES) $(TESTS)
|
||||
|
||||
pytest:
|
||||
uv run pytest -v $(TESTS)
|
||||
|
||||
ruff:
|
||||
uv run ruff check --fix $(SOURCES) $(TESTS)
|
||||
|
||||
version:
|
||||
@echo '$(PACKAGE)@$(VERSION)'
|
||||
|
||||
wheel:
|
||||
uv build --out-dir $(ARTIFACTS)
|
||||
223
fuzzforge-mcp/README.md
Normal file
223
fuzzforge-mcp/README.md
Normal file
@@ -0,0 +1,223 @@
|
||||
# FuzzForge MCP
|
||||
|
||||
Model Context Protocol (MCP) server that enables AI agents to orchestrate FuzzForge security research modules.
|
||||
|
||||
## Overview
|
||||
|
||||
FuzzForge MCP provides a standardized interface for AI agents (Claude Code, GitHub Copilot, Claude Desktop) to:
|
||||
|
||||
- List and discover available security modules
|
||||
- Execute modules in isolated containers
|
||||
- Chain modules together in workflows
|
||||
- Manage project assets and results
|
||||
|
||||
The server communicates with AI agents using the [Model Context Protocol](https://modelcontextprotocol.io/) over stdio.
|
||||
|
||||
## Installation
|
||||
|
||||
### Automatic Installation (Recommended)
|
||||
|
||||
Use the FuzzForge CLI to automatically configure MCP for your AI agent:
|
||||
|
||||
```bash
|
||||
# For GitHub Copilot
|
||||
uv run fuzzforge mcp install copilot
|
||||
|
||||
# For Claude Code (VS Code extension)
|
||||
uv run fuzzforge mcp install claude-code
|
||||
|
||||
# For Claude Desktop (standalone app)
|
||||
uv run fuzzforge mcp install claude-desktop
|
||||
|
||||
# Verify installation
|
||||
uv run fuzzforge mcp status
|
||||
```
|
||||
|
||||
After installation, restart your AI agent to activate the connection.
|
||||
|
||||
### Manual Installation
|
||||
|
||||
For custom setups, you can manually configure the MCP server.
|
||||
|
||||
#### Claude Code (`.mcp.json` in project root)
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"fuzzforge": {
|
||||
"command": "/path/to/fuzzforge-oss/.venv/bin/python",
|
||||
"args": ["-m", "fuzzforge_mcp"],
|
||||
"cwd": "/path/to/fuzzforge-oss",
|
||||
"env": {
|
||||
"FUZZFORGE_MODULES_PATH": "/path/to/fuzzforge-oss/fuzzforge-modules",
|
||||
"FUZZFORGE_ENGINE__TYPE": "podman",
|
||||
"FUZZFORGE_ENGINE__GRAPHROOT": "~/.fuzzforge/containers/storage",
|
||||
"FUZZFORGE_ENGINE__RUNROOT": "~/.fuzzforge/containers/run"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### GitHub Copilot (`~/.config/Code/User/mcp.json`)
|
||||
|
||||
```json
|
||||
{
|
||||
"servers": {
|
||||
"fuzzforge": {
|
||||
"type": "stdio",
|
||||
"command": "/path/to/fuzzforge-oss/.venv/bin/python",
|
||||
"args": ["-m", "fuzzforge_mcp"],
|
||||
"cwd": "/path/to/fuzzforge-oss",
|
||||
"env": {
|
||||
"FUZZFORGE_MODULES_PATH": "/path/to/fuzzforge-oss/fuzzforge-modules",
|
||||
"FUZZFORGE_ENGINE__TYPE": "podman",
|
||||
"FUZZFORGE_ENGINE__GRAPHROOT": "~/.fuzzforge/containers/storage",
|
||||
"FUZZFORGE_ENGINE__RUNROOT": "~/.fuzzforge/containers/run"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### Claude Desktop (`~/.config/Claude/claude_desktop_config.json`)
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"fuzzforge": {
|
||||
"type": "stdio",
|
||||
"command": "/path/to/fuzzforge-oss/.venv/bin/python",
|
||||
"args": ["-m", "fuzzforge_mcp"],
|
||||
"cwd": "/path/to/fuzzforge-oss",
|
||||
"env": {
|
||||
"FUZZFORGE_MODULES_PATH": "/path/to/fuzzforge-oss/fuzzforge-modules",
|
||||
"FUZZFORGE_ENGINE__TYPE": "podman",
|
||||
"FUZZFORGE_ENGINE__GRAPHROOT": "~/.fuzzforge/containers/storage",
|
||||
"FUZZFORGE_ENGINE__RUNROOT": "~/.fuzzforge/containers/run"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Environment Variables
|
||||
|
||||
| Variable | Required | Default | Description |
|
||||
| -------- | -------- | ------- | ----------- |
|
||||
| `FUZZFORGE_MODULES_PATH` | Yes | - | Path to the modules directory |
|
||||
| `FUZZFORGE_ENGINE__TYPE` | No | `podman` | Container engine (`podman` or `docker`) |
|
||||
| `FUZZFORGE_ENGINE__GRAPHROOT` | No | `~/.fuzzforge/containers/storage` | Container image storage path |
|
||||
| `FUZZFORGE_ENGINE__RUNROOT` | No | `~/.fuzzforge/containers/run` | Container runtime state path |
|
||||
|
||||
## Available Tools
|
||||
|
||||
The MCP server exposes the following tools to AI agents:
|
||||
|
||||
### Project Management
|
||||
|
||||
- **`init_project`** - Initialize a new FuzzForge project
|
||||
- **`set_project_assets`** - Set initial assets (source code, contracts, etc.) for the project
|
||||
|
||||
### Module Management
|
||||
|
||||
- **`list_modules`** - List all available security research modules
|
||||
- **`execute_module`** - Execute a single module in an isolated container
|
||||
|
||||
### Workflow Management
|
||||
|
||||
- **`execute_workflow`** - Execute a workflow consisting of multiple chained modules
|
||||
|
||||
### Resources
|
||||
|
||||
The server also provides resources for accessing:
|
||||
|
||||
- Project information and configuration
|
||||
- Module metadata and schemas
|
||||
- Execution results and artifacts
|
||||
- Workflow definitions and status
|
||||
|
||||
## Usage Examples
|
||||
|
||||
### From AI Agent (e.g., Claude Code)
|
||||
|
||||
Once configured, AI agents can interact with FuzzForge naturally:
|
||||
|
||||
```text
|
||||
User: List the available security modules
|
||||
|
||||
AI Agent: [Calls list_modules tool]
|
||||
```
|
||||
|
||||
```text
|
||||
User: Run echidna fuzzer on my Solidity contracts
|
||||
|
||||
AI Agent: [Calls init_project, set_project_assets, then execute_module]
|
||||
```
|
||||
|
||||
```text
|
||||
User: Create a workflow that compiles contracts, runs slither, then echidna
|
||||
|
||||
AI Agent: [Calls execute_workflow with appropriate steps]
|
||||
```
|
||||
|
||||
### Direct Testing (Development)
|
||||
|
||||
For testing during development, you can run the MCP server directly:
|
||||
|
||||
```bash
|
||||
# Run MCP server in stdio mode (for AI agents)
|
||||
uv run python -m fuzzforge_mcp
|
||||
|
||||
# Run HTTP server for testing (not for production)
|
||||
uv run uvicorn fuzzforge_mcp.application:app --reload
|
||||
```
|
||||
|
||||
## Architecture
|
||||
|
||||
```text
|
||||
┌─────────────────────────────────────────┐
|
||||
│ AI Agent (Claude/Copilot) │
|
||||
│ via MCP Protocol │
|
||||
└─────────────────────────────────────────┘
|
||||
│
|
||||
│ stdio/JSON-RPC
|
||||
▼
|
||||
┌─────────────────────────────────────────┐
|
||||
│ FuzzForge MCP Server │
|
||||
│ Tools: init_project, list_modules, │
|
||||
│ execute_module, execute_workflow│
|
||||
└─────────────────────────────────────────┘
|
||||
│
|
||||
▼
|
||||
┌─────────────────────────────────────────┐
|
||||
│ FuzzForge Runner │
|
||||
│ Podman/Docker Orchestration │
|
||||
└─────────────────────────────────────────┘
|
||||
│
|
||||
┌─────────┼─────────┐
|
||||
▼ ▼ ▼
|
||||
[Module 1] [Module 2] [Module 3]
|
||||
Container Container Container
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
### Building the Package
|
||||
|
||||
```bash
|
||||
# Install development dependencies
|
||||
uv sync
|
||||
|
||||
# Run type checking
|
||||
uv run mypy src/
|
||||
|
||||
# Run tests
|
||||
uv run pytest
|
||||
```
|
||||
|
||||
## See Also
|
||||
|
||||
- [FuzzForge Main README](../README.md) - Overall project documentation
|
||||
- [Module SDK](../fuzzforge-modules/fuzzforge-modules-sdk/README.md) - Creating custom modules
|
||||
- [Model Context Protocol](https://modelcontextprotocol.io/) - MCP specification
|
||||
6
fuzzforge-mcp/mypy.ini
Normal file
6
fuzzforge-mcp/mypy.ini
Normal file
@@ -0,0 +1,6 @@
|
||||
[mypy]
|
||||
plugins = pydantic.mypy
|
||||
strict = True
|
||||
warn_unused_ignores = True
|
||||
warn_redundant_casts = True
|
||||
warn_return_any = True
|
||||
34
fuzzforge-mcp/pyproject.toml
Normal file
34
fuzzforge-mcp/pyproject.toml
Normal file
@@ -0,0 +1,34 @@
|
||||
[project]
|
||||
name = "fuzzforge-mcp"
|
||||
version = "0.0.1"
|
||||
description = "FuzzForge MCP Server - AI agent gateway for FuzzForge OSS."
|
||||
authors = []
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.14"
|
||||
dependencies = [
|
||||
"fastmcp==2.14.1",
|
||||
"fuzzforge-runner==0.0.1",
|
||||
"fuzzforge-types==0.0.1",
|
||||
"pydantic==2.12.4",
|
||||
"pydantic-settings==2.12.0",
|
||||
"structlog==25.5.0",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
fuzzforge-mcp = "fuzzforge_mcp.__main__:main"
|
||||
|
||||
[project.optional-dependencies]
|
||||
lints = [
|
||||
"bandit==1.8.6",
|
||||
"mypy==1.18.2",
|
||||
"ruff==0.14.4",
|
||||
]
|
||||
tests = [
|
||||
"pytest==9.0.2",
|
||||
"pytest-asyncio==1.3.0",
|
||||
"pytest-httpx==0.36.0",
|
||||
]
|
||||
|
||||
[tool.uv.sources]
|
||||
fuzzforge-runner = { workspace = true }
|
||||
fuzzforge-types = { workspace = true }
|
||||
2
fuzzforge-mcp/pytest.ini
Normal file
2
fuzzforge-mcp/pytest.ini
Normal file
@@ -0,0 +1,2 @@
|
||||
[pytest]
|
||||
asyncio_mode = auto
|
||||
16
fuzzforge-mcp/ruff.toml
Normal file
16
fuzzforge-mcp/ruff.toml
Normal file
@@ -0,0 +1,16 @@
|
||||
line-length = 120
|
||||
|
||||
[lint]
|
||||
select = [ "ALL" ]
|
||||
ignore = [
|
||||
"COM812", # conflicts with the formatter
|
||||
"D203", # conflicts with 'D211'
|
||||
"D213", # conflicts with 'D212'
|
||||
]
|
||||
|
||||
[lint.per-file-ignores]
|
||||
"tests/*" = [
|
||||
"PLR0913", # allowing functions with many arguments in tests (required for fixtures)
|
||||
"PLR2004", # allowing comparisons using unamed numerical constants in tests
|
||||
"S101", # allowing 'assert' statements in tests
|
||||
]
|
||||
1
fuzzforge-mcp/src/fuzzforge_mcp/__init__.py
Normal file
1
fuzzforge-mcp/src/fuzzforge_mcp/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""TODO."""
|
||||
17
fuzzforge-mcp/src/fuzzforge_mcp/__main__.py
Normal file
17
fuzzforge-mcp/src/fuzzforge_mcp/__main__.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""FuzzForge MCP Server entry point."""
|
||||
|
||||
from fuzzforge_mcp.application import mcp
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Run the FuzzForge MCP server in stdio mode.
|
||||
|
||||
This is the primary entry point for AI agent integration.
|
||||
The server communicates via stdin/stdout using the MCP protocol.
|
||||
|
||||
"""
|
||||
mcp.run(transport="stdio")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
64
fuzzforge-mcp/src/fuzzforge_mcp/application.py
Normal file
64
fuzzforge-mcp/src/fuzzforge_mcp/application.py
Normal file
@@ -0,0 +1,64 @@
|
||||
"""FuzzForge MCP Server Application.
|
||||
|
||||
This is the main entry point for the FuzzForge MCP server, providing
|
||||
AI agents with tools to execute security research modules.
|
||||
|
||||
"""
|
||||
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from fastmcp import FastMCP
|
||||
from fastmcp.server.middleware.error_handling import ErrorHandlingMiddleware
|
||||
|
||||
from fuzzforge_mcp import resources, tools
|
||||
from fuzzforge_runner import Settings
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import AsyncGenerator
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(_: FastMCP) -> AsyncGenerator[Settings]:
|
||||
"""Initialize MCP server lifespan context.
|
||||
|
||||
Loads settings from environment variables and makes them
|
||||
available to all tools and resources.
|
||||
|
||||
:param mcp: FastMCP server instance (unused).
|
||||
:return: Settings instance for dependency injection.
|
||||
|
||||
"""
|
||||
settings: Settings = Settings()
|
||||
yield settings
|
||||
|
||||
|
||||
mcp: FastMCP = FastMCP(
|
||||
name="FuzzForge MCP Server",
|
||||
instructions="""
|
||||
FuzzForge is a security research orchestration platform. Use these tools to:
|
||||
|
||||
1. **List modules**: Discover available security research modules
|
||||
2. **Execute modules**: Run modules in isolated containers
|
||||
3. **Execute workflows**: Chain multiple modules together
|
||||
4. **Manage projects**: Initialize and configure projects
|
||||
5. **Get results**: Retrieve execution results
|
||||
|
||||
Typical workflow:
|
||||
1. Initialize a project with `init_project`
|
||||
2. Set project assets with `set_project_assets` (optional)
|
||||
3. List available modules with `list_modules`
|
||||
4. Execute a module with `execute_module`
|
||||
5. Get results with `get_execution_results`
|
||||
""",
|
||||
lifespan=lifespan,
|
||||
)
|
||||
|
||||
mcp.add_middleware(middleware=ErrorHandlingMiddleware())
|
||||
|
||||
mcp.mount(resources.mcp)
|
||||
mcp.mount(tools.mcp)
|
||||
|
||||
# HTTP app for testing (primary mode is stdio)
|
||||
app = mcp.http_app()
|
||||
|
||||
48
fuzzforge-mcp/src/fuzzforge_mcp/dependencies.py
Normal file
48
fuzzforge-mcp/src/fuzzforge_mcp/dependencies.py
Normal file
@@ -0,0 +1,48 @@
|
||||
"""Dependency injection helpers for FuzzForge MCP."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
from fastmcp.server.dependencies import get_context
|
||||
from fuzzforge_runner import Runner, Settings
|
||||
|
||||
from fuzzforge_mcp.exceptions import FuzzForgeMCPError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from fastmcp import Context
|
||||
|
||||
|
||||
def get_settings() -> Settings:
|
||||
"""Get MCP server settings from context.
|
||||
|
||||
:return: Settings instance.
|
||||
:raises FuzzForgeMCPError: If settings not available.
|
||||
|
||||
"""
|
||||
context: Context = get_context()
|
||||
if context.request_context is None:
|
||||
message: str = "Request context not available"
|
||||
raise FuzzForgeMCPError(message)
|
||||
return cast("Settings", context.request_context.lifespan_context)
|
||||
|
||||
|
||||
def get_project_path() -> Path:
|
||||
"""Get the current project path.
|
||||
|
||||
:return: Path to the current project.
|
||||
|
||||
"""
|
||||
settings: Settings = get_settings()
|
||||
return Path(settings.project.default_path)
|
||||
|
||||
|
||||
def get_runner() -> Runner:
|
||||
"""Get a configured Runner instance.
|
||||
|
||||
:return: Runner instance configured from MCP settings.
|
||||
|
||||
"""
|
||||
settings: Settings = get_settings()
|
||||
return Runner(settings)
|
||||
5
fuzzforge-mcp/src/fuzzforge_mcp/exceptions.py
Normal file
5
fuzzforge-mcp/src/fuzzforge_mcp/exceptions.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""TODO."""
|
||||
|
||||
|
||||
class FuzzForgeMCPError(Exception):
|
||||
"""TODO."""
|
||||
16
fuzzforge-mcp/src/fuzzforge_mcp/resources/__init__.py
Normal file
16
fuzzforge-mcp/src/fuzzforge_mcp/resources/__init__.py
Normal file
@@ -0,0 +1,16 @@
|
||||
"""FuzzForge MCP Resources."""
|
||||
|
||||
from fastmcp import FastMCP
|
||||
|
||||
from fuzzforge_mcp.resources import executions, modules, project, workflows
|
||||
|
||||
mcp: FastMCP = FastMCP()
|
||||
|
||||
mcp.mount(executions.mcp)
|
||||
mcp.mount(modules.mcp)
|
||||
mcp.mount(project.mcp)
|
||||
mcp.mount(workflows.mcp)
|
||||
|
||||
__all__ = [
|
||||
"mcp",
|
||||
]
|
||||
75
fuzzforge-mcp/src/fuzzforge_mcp/resources/executions.py
Normal file
75
fuzzforge-mcp/src/fuzzforge_mcp/resources/executions.py
Normal file
@@ -0,0 +1,75 @@
|
||||
"""Execution resources for FuzzForge MCP."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from fastmcp import FastMCP
|
||||
from fastmcp.exceptions import ResourceError
|
||||
|
||||
from fuzzforge_mcp.dependencies import get_project_path, get_runner
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from fuzzforge_runner import Runner
|
||||
|
||||
|
||||
mcp: FastMCP = FastMCP()
|
||||
|
||||
|
||||
@mcp.resource("fuzzforge://executions/")
|
||||
async def list_executions() -> list[dict[str, Any]]:
|
||||
"""List all executions for the current project.
|
||||
|
||||
Returns a list of execution IDs and basic metadata.
|
||||
|
||||
:return: List of execution information dictionaries.
|
||||
|
||||
"""
|
||||
runner: Runner = get_runner()
|
||||
project_path: Path = get_project_path()
|
||||
|
||||
try:
|
||||
execution_ids = runner.list_executions(project_path)
|
||||
|
||||
return [
|
||||
{
|
||||
"execution_id": exec_id,
|
||||
"has_results": runner.get_execution_results(project_path, exec_id) is not None,
|
||||
}
|
||||
for exec_id in execution_ids
|
||||
]
|
||||
|
||||
except Exception as exception:
|
||||
message: str = f"Failed to list executions: {exception}"
|
||||
raise ResourceError(message) from exception
|
||||
|
||||
|
||||
@mcp.resource("fuzzforge://executions/{execution_id}")
|
||||
async def get_execution(execution_id: str) -> dict[str, Any]:
|
||||
"""Get information about a specific execution.
|
||||
|
||||
:param execution_id: The execution ID to retrieve.
|
||||
:return: Execution information dictionary.
|
||||
|
||||
"""
|
||||
runner: Runner = get_runner()
|
||||
project_path: Path = get_project_path()
|
||||
|
||||
try:
|
||||
results_path = runner.get_execution_results(project_path, execution_id)
|
||||
|
||||
if results_path is None:
|
||||
raise ResourceError(f"Execution not found: {execution_id}")
|
||||
|
||||
return {
|
||||
"execution_id": execution_id,
|
||||
"results_path": str(results_path),
|
||||
"results_exist": results_path.exists(),
|
||||
}
|
||||
|
||||
except ResourceError:
|
||||
raise
|
||||
except Exception as exception:
|
||||
message: str = f"Failed to get execution: {exception}"
|
||||
raise ResourceError(message) from exception
|
||||
78
fuzzforge-mcp/src/fuzzforge_mcp/resources/modules.py
Normal file
78
fuzzforge-mcp/src/fuzzforge_mcp/resources/modules.py
Normal file
@@ -0,0 +1,78 @@
|
||||
"""Module resources for FuzzForge MCP."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from fastmcp import FastMCP
|
||||
from fastmcp.exceptions import ResourceError
|
||||
|
||||
from fuzzforge_mcp.dependencies import get_runner
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from fuzzforge_runner import Runner
|
||||
from fuzzforge_runner.runner import ModuleInfo
|
||||
|
||||
|
||||
mcp: FastMCP = FastMCP()
|
||||
|
||||
|
||||
@mcp.resource("fuzzforge://modules/")
|
||||
async def list_modules() -> list[dict[str, Any]]:
|
||||
"""List all available FuzzForge modules.
|
||||
|
||||
Returns information about modules that can be executed,
|
||||
including their identifiers and availability status.
|
||||
|
||||
:return: List of module information dictionaries.
|
||||
|
||||
"""
|
||||
runner: Runner = get_runner()
|
||||
|
||||
try:
|
||||
modules: list[ModuleInfo] = runner.list_modules()
|
||||
|
||||
return [
|
||||
{
|
||||
"identifier": module.identifier,
|
||||
"description": module.description,
|
||||
"version": module.version,
|
||||
"available": module.available,
|
||||
}
|
||||
for module in modules
|
||||
]
|
||||
|
||||
except Exception as exception:
|
||||
message: str = f"Failed to list modules: {exception}"
|
||||
raise ResourceError(message) from exception
|
||||
|
||||
|
||||
@mcp.resource("fuzzforge://modules/{module_identifier}")
|
||||
async def get_module(module_identifier: str) -> dict[str, Any]:
|
||||
"""Get information about a specific module.
|
||||
|
||||
:param module_identifier: The identifier of the module to retrieve.
|
||||
:return: Module information dictionary.
|
||||
|
||||
"""
|
||||
runner: Runner = get_runner()
|
||||
|
||||
try:
|
||||
module: ModuleInfo | None = runner.get_module_info(module_identifier)
|
||||
|
||||
if module is None:
|
||||
raise ResourceError(f"Module not found: {module_identifier}")
|
||||
|
||||
return {
|
||||
"identifier": module.identifier,
|
||||
"description": module.description,
|
||||
"version": module.version,
|
||||
"available": module.available,
|
||||
}
|
||||
|
||||
except ResourceError:
|
||||
raise
|
||||
except Exception as exception:
|
||||
message: str = f"Failed to get module: {exception}"
|
||||
raise ResourceError(message) from exception
|
||||
|
||||
84
fuzzforge-mcp/src/fuzzforge_mcp/resources/project.py
Normal file
84
fuzzforge-mcp/src/fuzzforge_mcp/resources/project.py
Normal file
@@ -0,0 +1,84 @@
|
||||
"""Project resources for FuzzForge MCP."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from fastmcp import FastMCP
|
||||
from fastmcp.exceptions import ResourceError
|
||||
|
||||
from fuzzforge_mcp.dependencies import get_project_path, get_runner
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from fuzzforge_runner import Runner
|
||||
|
||||
|
||||
mcp: FastMCP = FastMCP()
|
||||
|
||||
|
||||
@mcp.resource("fuzzforge://project")
|
||||
async def get_project() -> dict[str, Any]:
|
||||
"""Get information about the current project.
|
||||
|
||||
Returns the current project configuration including paths
|
||||
and available executions.
|
||||
|
||||
:return: Project information dictionary.
|
||||
|
||||
"""
|
||||
runner: Runner = get_runner()
|
||||
project_path: Path = get_project_path()
|
||||
|
||||
try:
|
||||
executions = runner.list_executions(project_path)
|
||||
assets_path = runner.storage.get_project_assets_path(project_path)
|
||||
|
||||
return {
|
||||
"path": str(project_path),
|
||||
"name": project_path.name,
|
||||
"has_assets": assets_path is not None,
|
||||
"assets_path": str(assets_path) if assets_path else None,
|
||||
"execution_count": len(executions),
|
||||
"recent_executions": executions[:10], # Last 10 executions
|
||||
}
|
||||
|
||||
except Exception as exception:
|
||||
message: str = f"Failed to get project info: {exception}"
|
||||
raise ResourceError(message) from exception
|
||||
|
||||
|
||||
@mcp.resource("fuzzforge://project/settings")
|
||||
async def get_project_settings() -> dict[str, Any]:
|
||||
"""Get current FuzzForge settings.
|
||||
|
||||
Returns the active configuration for the MCP server including
|
||||
engine, storage, and project settings.
|
||||
|
||||
:return: Settings dictionary.
|
||||
|
||||
"""
|
||||
from fuzzforge_mcp.dependencies import get_settings
|
||||
|
||||
try:
|
||||
settings = get_settings()
|
||||
|
||||
return {
|
||||
"engine": {
|
||||
"type": settings.engine.type,
|
||||
"socket": settings.engine.socket,
|
||||
},
|
||||
"storage": {
|
||||
"path": str(settings.storage.path),
|
||||
},
|
||||
"project": {
|
||||
"path": str(settings.project.path),
|
||||
"modules_path": str(settings.modules_path),
|
||||
},
|
||||
"debug": settings.debug,
|
||||
}
|
||||
|
||||
except Exception as exception:
|
||||
message: str = f"Failed to get settings: {exception}"
|
||||
raise ResourceError(message) from exception
|
||||
|
||||
53
fuzzforge-mcp/src/fuzzforge_mcp/resources/workflows.py
Normal file
53
fuzzforge-mcp/src/fuzzforge_mcp/resources/workflows.py
Normal file
@@ -0,0 +1,53 @@
|
||||
"""Workflow resources for FuzzForge MCP.
|
||||
|
||||
Note: In FuzzForge OSS, workflows are defined at runtime rather than
|
||||
stored. This resource provides documentation about workflow capabilities.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from fastmcp import FastMCP
|
||||
|
||||
|
||||
mcp: FastMCP = FastMCP()
|
||||
|
||||
|
||||
@mcp.resource("fuzzforge://workflows/help")
|
||||
async def get_workflow_help() -> dict[str, Any]:
|
||||
"""Get help information about creating workflows.
|
||||
|
||||
Workflows in FuzzForge OSS are defined at execution time rather
|
||||
than stored. Use the execute_workflow tool with step definitions.
|
||||
|
||||
:return: Workflow documentation.
|
||||
|
||||
"""
|
||||
return {
|
||||
"description": "Workflows chain multiple modules together",
|
||||
"usage": "Use the execute_workflow tool with step definitions",
|
||||
"example": {
|
||||
"workflow_name": "security-audit",
|
||||
"steps": [
|
||||
{
|
||||
"module": "compile-contracts",
|
||||
"configuration": {"solc_version": "0.8.0"},
|
||||
},
|
||||
{
|
||||
"module": "slither",
|
||||
"configuration": {},
|
||||
},
|
||||
{
|
||||
"module": "echidna",
|
||||
"configuration": {"test_limit": 10000},
|
||||
},
|
||||
],
|
||||
},
|
||||
"step_format": {
|
||||
"module": "Module identifier (required)",
|
||||
"configuration": "Module-specific configuration (optional)",
|
||||
"name": "Step name for logging (optional)",
|
||||
},
|
||||
}
|
||||
16
fuzzforge-mcp/src/fuzzforge_mcp/tools/__init__.py
Normal file
16
fuzzforge-mcp/src/fuzzforge_mcp/tools/__init__.py
Normal file
@@ -0,0 +1,16 @@
|
||||
"""FuzzForge MCP Tools."""
|
||||
|
||||
from fastmcp import FastMCP
|
||||
|
||||
from fuzzforge_mcp.tools import modules, projects, workflows
|
||||
|
||||
mcp: FastMCP = FastMCP()
|
||||
|
||||
mcp.mount(modules.mcp)
|
||||
mcp.mount(projects.mcp)
|
||||
mcp.mount(workflows.mcp)
|
||||
|
||||
__all__ = [
|
||||
"mcp",
|
||||
]
|
||||
|
||||
347
fuzzforge-mcp/src/fuzzforge_mcp/tools/modules.py
Normal file
347
fuzzforge-mcp/src/fuzzforge_mcp/tools/modules.py
Normal file
@@ -0,0 +1,347 @@
|
||||
"""Module tools for FuzzForge MCP."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from fastmcp import FastMCP
|
||||
from fastmcp.exceptions import ToolError
|
||||
|
||||
from fuzzforge_mcp.dependencies import get_project_path, get_runner, get_settings
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from fuzzforge_runner import Runner
|
||||
from fuzzforge_runner.orchestrator import StepResult
|
||||
|
||||
|
||||
mcp: FastMCP = FastMCP()
|
||||
|
||||
# Track running background executions
|
||||
_background_executions: dict[str, dict[str, Any]] = {}
|
||||
|
||||
|
||||
@mcp.tool
|
||||
async def list_modules() -> dict[str, Any]:
|
||||
"""List all available FuzzForge modules.
|
||||
|
||||
Returns information about modules that can be executed,
|
||||
including their identifiers and availability status.
|
||||
|
||||
:return: Dictionary with list of available modules and their details.
|
||||
|
||||
"""
|
||||
try:
|
||||
runner: Runner = get_runner()
|
||||
settings = get_settings()
|
||||
|
||||
# Use the engine abstraction to list images
|
||||
modules = runner.list_module_images(filter_prefix="localhost/")
|
||||
|
||||
available_modules = [
|
||||
{
|
||||
"identifier": module.identifier,
|
||||
"image": f"localhost/{module.identifier}:{module.version or 'latest'}",
|
||||
"available": module.available,
|
||||
}
|
||||
for module in modules
|
||||
]
|
||||
|
||||
return {
|
||||
"modules": available_modules,
|
||||
"count": len(available_modules),
|
||||
"container_engine": settings.engine.type,
|
||||
"registry_url": settings.registry.url,
|
||||
"registry_tag": settings.registry.default_tag,
|
||||
}
|
||||
|
||||
except Exception as exception:
|
||||
message: str = f"Failed to list modules: {exception}"
|
||||
raise ToolError(message) from exception
|
||||
|
||||
|
||||
@mcp.tool
|
||||
async def execute_module(
|
||||
module_identifier: str,
|
||||
configuration: dict[str, Any] | None = None,
|
||||
assets_path: str | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Execute a FuzzForge module in an isolated container.
|
||||
|
||||
This tool runs a module in a sandboxed environment.
|
||||
The module receives input assets and produces output results.
|
||||
|
||||
:param module_identifier: The identifier of the module to execute.
|
||||
:param configuration: Optional configuration dict to pass to the module.
|
||||
:param assets_path: Optional path to input assets. If not provided, uses project assets.
|
||||
:return: Execution result including status and results path.
|
||||
|
||||
"""
|
||||
runner: Runner = get_runner()
|
||||
project_path: Path = get_project_path()
|
||||
|
||||
try:
|
||||
result: StepResult = await runner.execute_module(
|
||||
module_identifier=module_identifier,
|
||||
project_path=project_path,
|
||||
configuration=configuration,
|
||||
assets_path=Path(assets_path) if assets_path else None,
|
||||
)
|
||||
|
||||
return {
|
||||
"success": result.success,
|
||||
"execution_id": result.execution_id,
|
||||
"module": result.module_identifier,
|
||||
"results_path": str(result.results_path) if result.results_path else None,
|
||||
"started_at": result.started_at.isoformat(),
|
||||
"completed_at": result.completed_at.isoformat(),
|
||||
"error": result.error,
|
||||
}
|
||||
|
||||
except Exception as exception:
|
||||
message: str = f"Module execution failed: {exception}"
|
||||
raise ToolError(message) from exception
|
||||
|
||||
|
||||
@mcp.tool
|
||||
async def start_continuous_module(
|
||||
module_identifier: str,
|
||||
configuration: dict[str, Any] | None = None,
|
||||
assets_path: str | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Start a module in continuous/background mode.
|
||||
|
||||
The module will run indefinitely until stopped with stop_continuous_module().
|
||||
Use get_continuous_status() to check progress and metrics.
|
||||
|
||||
This is useful for long-running modules that should run until
|
||||
the user decides to stop them.
|
||||
|
||||
:param module_identifier: The module to run.
|
||||
:param configuration: Optional configuration. Set max_duration to 0 for infinite.
|
||||
:param assets_path: Optional path to input assets.
|
||||
:return: Execution info including session_id for monitoring.
|
||||
|
||||
"""
|
||||
runner: Runner = get_runner()
|
||||
project_path: Path = get_project_path()
|
||||
session_id = str(uuid.uuid4())[:8]
|
||||
|
||||
# Set infinite duration if not specified
|
||||
if configuration is None:
|
||||
configuration = {}
|
||||
if "max_duration" not in configuration:
|
||||
configuration["max_duration"] = 0 # 0 = infinite
|
||||
|
||||
try:
|
||||
# Determine assets path
|
||||
if assets_path:
|
||||
actual_assets_path = Path(assets_path)
|
||||
else:
|
||||
storage = runner.storage
|
||||
actual_assets_path = storage.get_project_assets_path(project_path)
|
||||
|
||||
# Use the new non-blocking executor method
|
||||
executor = runner._executor
|
||||
result = executor.start_module_continuous(
|
||||
module_identifier=module_identifier,
|
||||
assets_path=actual_assets_path,
|
||||
configuration=configuration,
|
||||
)
|
||||
|
||||
# Store execution info for tracking
|
||||
_background_executions[session_id] = {
|
||||
"session_id": session_id,
|
||||
"module": module_identifier,
|
||||
"configuration": configuration,
|
||||
"started_at": datetime.now(timezone.utc).isoformat(),
|
||||
"status": "running",
|
||||
"container_id": result["container_id"],
|
||||
"input_dir": result["input_dir"],
|
||||
}
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"session_id": session_id,
|
||||
"module": module_identifier,
|
||||
"container_id": result["container_id"],
|
||||
"status": "running",
|
||||
"message": f"Continuous module started. Use get_continuous_status('{session_id}') to monitor progress.",
|
||||
}
|
||||
|
||||
except Exception as exception:
|
||||
message: str = f"Failed to start continuous module: {exception}"
|
||||
raise ToolError(message) from exception
|
||||
|
||||
|
||||
def _get_continuous_status_impl(session_id: str) -> dict[str, Any]:
|
||||
"""Internal helper to get continuous session status (non-tool version)."""
|
||||
if session_id not in _background_executions:
|
||||
raise ToolError(f"Unknown session: {session_id}. Use list_continuous_sessions() to see active sessions.")
|
||||
|
||||
execution = _background_executions[session_id]
|
||||
container_id = execution.get("container_id")
|
||||
|
||||
# Initialize metrics
|
||||
metrics: dict[str, Any] = {
|
||||
"total_executions": 0,
|
||||
"total_crashes": 0,
|
||||
"exec_per_sec": 0,
|
||||
"coverage": 0,
|
||||
"current_target": "",
|
||||
"latest_events": [],
|
||||
}
|
||||
|
||||
# Read stream.jsonl from inside the running container
|
||||
if container_id:
|
||||
try:
|
||||
runner: Runner = get_runner()
|
||||
executor = runner._executor
|
||||
|
||||
# Check container status first
|
||||
container_status = executor.get_module_status(container_id)
|
||||
if container_status != "running":
|
||||
execution["status"] = "stopped" if container_status == "exited" else container_status
|
||||
|
||||
# Read stream.jsonl from container
|
||||
stream_content = executor.read_module_output(container_id, "/data/output/stream.jsonl")
|
||||
|
||||
if stream_content:
|
||||
lines = stream_content.strip().split("\n")
|
||||
# Get last 20 events
|
||||
recent_lines = lines[-20:] if len(lines) > 20 else lines
|
||||
crash_count = 0
|
||||
|
||||
for line in recent_lines:
|
||||
try:
|
||||
event = json.loads(line)
|
||||
metrics["latest_events"].append(event)
|
||||
|
||||
# Extract metrics from events
|
||||
if event.get("event") == "metrics":
|
||||
metrics["total_executions"] = event.get("executions", 0)
|
||||
metrics["current_target"] = event.get("target", "")
|
||||
metrics["exec_per_sec"] = event.get("exec_per_sec", 0)
|
||||
metrics["coverage"] = event.get("coverage", 0)
|
||||
|
||||
if event.get("event") == "crash_detected":
|
||||
crash_count += 1
|
||||
|
||||
except json.JSONDecodeError:
|
||||
continue
|
||||
|
||||
metrics["total_crashes"] = crash_count
|
||||
|
||||
except Exception as e:
|
||||
metrics["error"] = str(e)
|
||||
|
||||
# Calculate elapsed time
|
||||
started_at = execution.get("started_at", "")
|
||||
elapsed_seconds = 0
|
||||
if started_at:
|
||||
try:
|
||||
start_time = datetime.fromisoformat(started_at)
|
||||
elapsed_seconds = int((datetime.now(timezone.utc) - start_time).total_seconds())
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return {
|
||||
"session_id": session_id,
|
||||
"module": execution.get("module"),
|
||||
"status": execution.get("status"),
|
||||
"container_id": container_id,
|
||||
"started_at": started_at,
|
||||
"elapsed_seconds": elapsed_seconds,
|
||||
"elapsed_human": f"{elapsed_seconds // 60}m {elapsed_seconds % 60}s",
|
||||
"metrics": metrics,
|
||||
}
|
||||
|
||||
|
||||
@mcp.tool
|
||||
async def get_continuous_status(session_id: str) -> dict[str, Any]:
|
||||
"""Get the current status and metrics of a running continuous session.
|
||||
|
||||
Call this periodically (e.g., every 30 seconds) to get live updates
|
||||
on progress and metrics.
|
||||
|
||||
:param session_id: The session ID returned by start_continuous_module().
|
||||
:return: Current status, metrics, and any events found.
|
||||
|
||||
"""
|
||||
return _get_continuous_status_impl(session_id)
|
||||
|
||||
|
||||
@mcp.tool
|
||||
async def stop_continuous_module(session_id: str) -> dict[str, Any]:
|
||||
"""Stop a running continuous session.
|
||||
|
||||
This will gracefully stop the module and collect any results.
|
||||
|
||||
:param session_id: The session ID of the session to stop.
|
||||
:return: Final status and summary of the session.
|
||||
|
||||
"""
|
||||
if session_id not in _background_executions:
|
||||
raise ToolError(f"Unknown session: {session_id}")
|
||||
|
||||
execution = _background_executions[session_id]
|
||||
container_id = execution.get("container_id")
|
||||
input_dir = execution.get("input_dir")
|
||||
|
||||
try:
|
||||
# Get final metrics before stopping (use helper, not the tool)
|
||||
final_metrics = _get_continuous_status_impl(session_id)
|
||||
|
||||
# Stop the container and collect results
|
||||
results_path = None
|
||||
if container_id:
|
||||
runner: Runner = get_runner()
|
||||
executor = runner._executor
|
||||
|
||||
try:
|
||||
results_path = executor.stop_module_continuous(container_id, input_dir)
|
||||
except Exception:
|
||||
# Container may have already stopped
|
||||
pass
|
||||
|
||||
execution["status"] = "stopped"
|
||||
execution["stopped_at"] = datetime.now(timezone.utc).isoformat()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"session_id": session_id,
|
||||
"message": "Continuous session stopped",
|
||||
"results_path": str(results_path) if results_path else None,
|
||||
"final_metrics": final_metrics.get("metrics", {}),
|
||||
"elapsed": final_metrics.get("elapsed_human", ""),
|
||||
}
|
||||
|
||||
except Exception as exception:
|
||||
message: str = f"Failed to stop continuous module: {exception}"
|
||||
raise ToolError(message) from exception
|
||||
|
||||
|
||||
@mcp.tool
|
||||
async def list_continuous_sessions() -> dict[str, Any]:
|
||||
"""List all active and recent continuous sessions.
|
||||
|
||||
:return: List of continuous sessions with their status.
|
||||
|
||||
"""
|
||||
sessions = []
|
||||
for session_id, execution in _background_executions.items():
|
||||
sessions.append({
|
||||
"session_id": session_id,
|
||||
"module": execution.get("module"),
|
||||
"status": execution.get("status"),
|
||||
"started_at": execution.get("started_at"),
|
||||
})
|
||||
|
||||
return {
|
||||
"sessions": sessions,
|
||||
"count": len(sessions),
|
||||
}
|
||||
|
||||
145
fuzzforge-mcp/src/fuzzforge_mcp/tools/projects.py
Normal file
145
fuzzforge-mcp/src/fuzzforge_mcp/tools/projects.py
Normal file
@@ -0,0 +1,145 @@
|
||||
"""Project management tools for FuzzForge MCP."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from fastmcp import FastMCP
|
||||
from fastmcp.exceptions import ToolError
|
||||
|
||||
from fuzzforge_mcp.dependencies import get_project_path, get_runner
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from fuzzforge_runner import Runner
|
||||
|
||||
|
||||
mcp: FastMCP = FastMCP()
|
||||
|
||||
|
||||
@mcp.tool
|
||||
async def init_project(project_path: str | None = None) -> dict[str, Any]:
|
||||
"""Initialize a new FuzzForge project.
|
||||
|
||||
Creates the necessary storage directories for a project. This should
|
||||
be called before executing modules or workflows.
|
||||
|
||||
:param project_path: Path to the project directory. If not provided, uses current directory.
|
||||
:return: Project initialization result.
|
||||
|
||||
"""
|
||||
runner: Runner = get_runner()
|
||||
|
||||
try:
|
||||
path = Path(project_path) if project_path else get_project_path()
|
||||
storage_path = runner.init_project(path)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"project_path": str(path),
|
||||
"storage_path": str(storage_path),
|
||||
"message": f"Project initialized at {path}",
|
||||
}
|
||||
|
||||
except Exception as exception:
|
||||
message: str = f"Failed to initialize project: {exception}"
|
||||
raise ToolError(message) from exception
|
||||
|
||||
|
||||
@mcp.tool
|
||||
async def set_project_assets(assets_path: str) -> dict[str, Any]:
|
||||
"""Set the initial assets for a project.
|
||||
|
||||
Assets are input files that will be provided to modules during execution.
|
||||
This could be source code, contracts, binaries, etc.
|
||||
|
||||
:param assets_path: Path to assets file (archive) or directory.
|
||||
:return: Result including stored assets path.
|
||||
|
||||
"""
|
||||
runner: Runner = get_runner()
|
||||
project_path: Path = get_project_path()
|
||||
|
||||
try:
|
||||
stored_path = runner.set_project_assets(
|
||||
project_path=project_path,
|
||||
assets_path=Path(assets_path),
|
||||
)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"project_path": str(project_path),
|
||||
"assets_path": str(stored_path),
|
||||
"message": f"Assets stored from {assets_path}",
|
||||
}
|
||||
|
||||
except Exception as exception:
|
||||
message: str = f"Failed to set project assets: {exception}"
|
||||
raise ToolError(message) from exception
|
||||
|
||||
|
||||
@mcp.tool
|
||||
async def list_executions() -> dict[str, Any]:
|
||||
"""List all executions for the current project.
|
||||
|
||||
Returns a list of execution IDs that can be used to retrieve results.
|
||||
|
||||
:return: List of execution IDs.
|
||||
|
||||
"""
|
||||
runner: Runner = get_runner()
|
||||
project_path: Path = get_project_path()
|
||||
|
||||
try:
|
||||
executions = runner.list_executions(project_path)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"project_path": str(project_path),
|
||||
"executions": executions,
|
||||
"count": len(executions),
|
||||
}
|
||||
|
||||
except Exception as exception:
|
||||
message: str = f"Failed to list executions: {exception}"
|
||||
raise ToolError(message) from exception
|
||||
|
||||
|
||||
@mcp.tool
|
||||
async def get_execution_results(execution_id: str, extract_to: str | None = None) -> dict[str, Any]:
|
||||
"""Get results for a specific execution.
|
||||
|
||||
:param execution_id: The execution ID to retrieve results for.
|
||||
:param extract_to: Optional directory to extract results to.
|
||||
:return: Result including path to results archive.
|
||||
|
||||
"""
|
||||
runner: Runner = get_runner()
|
||||
project_path: Path = get_project_path()
|
||||
|
||||
try:
|
||||
results_path = runner.get_execution_results(project_path, execution_id)
|
||||
|
||||
if results_path is None:
|
||||
return {
|
||||
"success": False,
|
||||
"execution_id": execution_id,
|
||||
"error": "Execution results not found",
|
||||
}
|
||||
|
||||
result = {
|
||||
"success": True,
|
||||
"execution_id": execution_id,
|
||||
"results_path": str(results_path),
|
||||
}
|
||||
|
||||
# Extract if requested
|
||||
if extract_to:
|
||||
extracted_path = runner.extract_results(results_path, Path(extract_to))
|
||||
result["extracted_path"] = str(extracted_path)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as exception:
|
||||
message: str = f"Failed to get execution results: {exception}"
|
||||
raise ToolError(message) from exception
|
||||
92
fuzzforge-mcp/src/fuzzforge_mcp/tools/workflows.py
Normal file
92
fuzzforge-mcp/src/fuzzforge_mcp/tools/workflows.py
Normal file
@@ -0,0 +1,92 @@
|
||||
"""Workflow tools for FuzzForge MCP."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from fastmcp import FastMCP
|
||||
from fastmcp.exceptions import ToolError
|
||||
from fuzzforge_runner.orchestrator import WorkflowDefinition, WorkflowStep
|
||||
|
||||
from fuzzforge_mcp.dependencies import get_project_path, get_runner
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from fuzzforge_runner import Runner
|
||||
from fuzzforge_runner.orchestrator import WorkflowResult
|
||||
|
||||
|
||||
mcp: FastMCP = FastMCP()
|
||||
|
||||
|
||||
@mcp.tool
|
||||
async def execute_workflow(
|
||||
workflow_name: str,
|
||||
steps: list[dict[str, Any]],
|
||||
initial_assets_path: str | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Execute a workflow consisting of multiple module steps.
|
||||
|
||||
A workflow chains multiple modules together, passing the output of each
|
||||
module as input to the next. This enables complex pipelines.
|
||||
|
||||
:param workflow_name: Name for this workflow execution.
|
||||
:param steps: List of step definitions, each with "module" and optional "configuration".
|
||||
:param initial_assets_path: Optional path to initial assets for the first step.
|
||||
:return: Workflow execution result including status of each step.
|
||||
|
||||
Example steps format:
|
||||
[
|
||||
{"module": "module-a", "configuration": {"key": "value"}},
|
||||
{"module": "module-b", "configuration": {}},
|
||||
{"module": "module-c"}
|
||||
]
|
||||
|
||||
"""
|
||||
runner: Runner = get_runner()
|
||||
project_path: Path = get_project_path()
|
||||
|
||||
try:
|
||||
# Convert step dicts to WorkflowStep objects
|
||||
workflow_steps = [
|
||||
WorkflowStep(
|
||||
module_identifier=step["module"],
|
||||
configuration=step.get("configuration"),
|
||||
name=step.get("name", f"step-{i}"),
|
||||
)
|
||||
for i, step in enumerate(steps)
|
||||
]
|
||||
|
||||
workflow = WorkflowDefinition(
|
||||
name=workflow_name,
|
||||
steps=workflow_steps,
|
||||
)
|
||||
|
||||
result: WorkflowResult = await runner.execute_workflow(
|
||||
workflow=workflow,
|
||||
project_path=project_path,
|
||||
initial_assets_path=Path(initial_assets_path) if initial_assets_path else None,
|
||||
)
|
||||
|
||||
return {
|
||||
"success": result.success,
|
||||
"execution_id": result.execution_id,
|
||||
"workflow_name": result.name,
|
||||
"final_results_path": str(result.final_results_path) if result.final_results_path else None,
|
||||
"steps": [
|
||||
{
|
||||
"step_index": step.step_index,
|
||||
"module": step.module_identifier,
|
||||
"success": step.success,
|
||||
"execution_id": step.execution_id,
|
||||
"results_path": str(step.results_path) if step.results_path else None,
|
||||
"error": step.error,
|
||||
}
|
||||
for step in result.steps
|
||||
],
|
||||
}
|
||||
|
||||
except Exception as exception:
|
||||
message: str = f"Workflow execution failed: {exception}"
|
||||
raise ToolError(message) from exception
|
||||
|
||||
1
fuzzforge-mcp/tests/__init__.py
Normal file
1
fuzzforge-mcp/tests/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""TODO."""
|
||||
34
fuzzforge-mcp/tests/conftest.py
Normal file
34
fuzzforge-mcp/tests/conftest.py
Normal file
@@ -0,0 +1,34 @@
|
||||
"""TODO."""
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pytest
|
||||
from fastmcp import Client
|
||||
|
||||
from fuzzforge_mcp.application import mcp
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import AsyncGenerator, Callable
|
||||
|
||||
from fastmcp.client import FastMCPTransport
|
||||
from fuzzforge_types import FuzzForgeProjectIdentifier
|
||||
|
||||
pytest_plugins = ["fuzzforge_tests.fixtures"]
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def environment(
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
random_project_identifier: Callable[[], FuzzForgeProjectIdentifier],
|
||||
) -> None:
|
||||
"""TODO."""
|
||||
monkeypatch.setenv("FUZZFORGE_PROJECT_IDENTIFIER", str(random_project_identifier()))
|
||||
monkeypatch.setenv("FUZZFORGE_API_HOST", "127.0.0.1")
|
||||
monkeypatch.setenv("FUZZFORGE_API_PORT", "8000")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def mcp_client() -> AsyncGenerator[Client[FastMCPTransport]]:
|
||||
"""TODO."""
|
||||
async with Client(transport=mcp) as client:
|
||||
yield client
|
||||
189
fuzzforge-mcp/tests/test_resources.py
Normal file
189
fuzzforge-mcp/tests/test_resources.py
Normal file
@@ -0,0 +1,189 @@
|
||||
"""TODO."""
|
||||
|
||||
from datetime import UTC, datetime
|
||||
from http import HTTPMethod
|
||||
from json import loads
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
from fuzzforge_sdk.api.responses.executions import (
|
||||
GetFuzzForgeModuleExecutionsResponse,
|
||||
GetFuzzForgeWorkflowExecutionsResponse,
|
||||
ModuleExecutionSummary,
|
||||
WorkflowExecutionSummary,
|
||||
)
|
||||
from fuzzforge_sdk.api.responses.modules import (
|
||||
GetFuzzForgeModuleDefinitionResponse,
|
||||
GetFuzzForgeModuleDefinitionsResponse,
|
||||
ModuleDefinitionSummary,
|
||||
)
|
||||
from fuzzforge_types import FuzzForgeExecutionStatus, FuzzForgeModule
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
|
||||
from fastmcp import Client
|
||||
from fastmcp.client import FastMCPTransport
|
||||
from fuzzforge_types import (
|
||||
FuzzForgeExecutionIdentifier,
|
||||
FuzzForgeModuleIdentifier,
|
||||
FuzzForgeProjectIdentifier,
|
||||
FuzzForgeWorkflowIdentifier,
|
||||
)
|
||||
from mcp.types import TextResourceContents
|
||||
from pytest_httpx import HTTPXMock
|
||||
|
||||
|
||||
async def test_get_fuzzforge_module_when_server_returns_valid_data(
|
||||
httpx_mock: HTTPXMock,
|
||||
mcp_client: Client[FastMCPTransport],
|
||||
random_module_description: Callable[[], str],
|
||||
random_module_identifier: Callable[[], FuzzForgeModuleIdentifier],
|
||||
random_module_name: Callable[[], str],
|
||||
) -> None:
|
||||
"""TODO."""
|
||||
module = GetFuzzForgeModuleDefinitionResponse(
|
||||
module_description=random_module_description(),
|
||||
module_identifier=random_module_identifier(),
|
||||
module_name=random_module_name(),
|
||||
created_at=datetime.now(tz=UTC),
|
||||
updated_at=datetime.now(tz=UTC),
|
||||
)
|
||||
httpx_mock.add_response(
|
||||
json=module.model_dump(mode="json"),
|
||||
method=HTTPMethod.GET,
|
||||
url=f"http://127.0.0.1:8000/modules/{module.module_identifier}",
|
||||
)
|
||||
response = FuzzForgeModule.model_validate_json(
|
||||
json_data=cast(
|
||||
"TextResourceContents",
|
||||
(await mcp_client.read_resource(f"fuzzforge://modules/{module.module_identifier}"))[0],
|
||||
).text,
|
||||
)
|
||||
assert response.module_description == module.module_description
|
||||
assert response.module_identifier == module.module_identifier
|
||||
assert response.module_name == module.module_name
|
||||
|
||||
|
||||
async def test_get_fuzzforge_modules_when_server_returns_valid_data(
|
||||
httpx_mock: HTTPXMock,
|
||||
mcp_client: Client[FastMCPTransport],
|
||||
random_module_description: Callable[[], str],
|
||||
random_module_identifier: Callable[[], FuzzForgeModuleIdentifier],
|
||||
random_module_name: Callable[[], str],
|
||||
) -> None:
|
||||
"""TODO."""
|
||||
modules = [
|
||||
ModuleDefinitionSummary(
|
||||
module_description=random_module_description(),
|
||||
module_identifier=random_module_identifier(),
|
||||
module_name=random_module_name(),
|
||||
created_at=datetime.now(tz=UTC),
|
||||
updated_at=datetime.now(tz=UTC),
|
||||
),
|
||||
ModuleDefinitionSummary(
|
||||
module_description=random_module_description(),
|
||||
module_identifier=random_module_identifier(),
|
||||
module_name=random_module_name(),
|
||||
created_at=datetime.now(tz=UTC),
|
||||
updated_at=datetime.now(tz=UTC),
|
||||
),
|
||||
]
|
||||
httpx_mock.add_response(
|
||||
json=GetFuzzForgeModuleDefinitionsResponse(
|
||||
modules=modules,
|
||||
total=2,
|
||||
limit=100,
|
||||
offset=0,
|
||||
).model_dump(mode="json"),
|
||||
method=HTTPMethod.GET,
|
||||
url="http://127.0.0.1:8000/modules",
|
||||
)
|
||||
response = [
|
||||
ModuleDefinitionSummary.model_validate(entry)
|
||||
for entry in loads(
|
||||
cast("TextResourceContents", (await mcp_client.read_resource("fuzzforge://modules/"))[0]).text
|
||||
)
|
||||
]
|
||||
assert len(response) == len(modules)
|
||||
for expected, module in zip(modules, response, strict=True):
|
||||
assert module.module_description == expected.module_description
|
||||
assert module.module_identifier == expected.module_identifier
|
||||
assert module.module_name == expected.module_name
|
||||
|
||||
|
||||
async def test_get_executions_when_server_returns_valid_data(
|
||||
httpx_mock: HTTPXMock,
|
||||
mcp_client: Client[FastMCPTransport],
|
||||
random_module_identifier: Callable[[], FuzzForgeModuleIdentifier],
|
||||
random_module_execution_identifier: Callable[[], FuzzForgeExecutionIdentifier],
|
||||
random_workflow_identifier: Callable[[], FuzzForgeWorkflowIdentifier],
|
||||
random_workflow_execution_identifier: Callable[[], FuzzForgeExecutionIdentifier],
|
||||
) -> None:
|
||||
"""TODO."""
|
||||
project_identifier: FuzzForgeProjectIdentifier = mcp_client.transport.server._lifespan_result.PROJECT_IDENTIFIER # type: ignore[union-attr] # noqa: SLF001
|
||||
modules = [
|
||||
ModuleExecutionSummary(
|
||||
execution_identifier=random_module_execution_identifier(),
|
||||
module_identifier=random_module_identifier(),
|
||||
execution_status=FuzzForgeExecutionStatus.PENDING,
|
||||
error=None,
|
||||
created_at=datetime.now(tz=UTC),
|
||||
updated_at=datetime.now(tz=UTC),
|
||||
),
|
||||
ModuleExecutionSummary(
|
||||
execution_identifier=random_module_execution_identifier(),
|
||||
module_identifier=random_module_identifier(),
|
||||
execution_status=FuzzForgeExecutionStatus.PENDING,
|
||||
error=None,
|
||||
created_at=datetime.now(tz=UTC),
|
||||
updated_at=datetime.now(tz=UTC),
|
||||
),
|
||||
]
|
||||
workflows = [
|
||||
WorkflowExecutionSummary(
|
||||
execution_identifier=random_workflow_execution_identifier(),
|
||||
workflow_identifier=random_workflow_identifier(),
|
||||
workflow_status=FuzzForgeExecutionStatus.PENDING,
|
||||
error=None,
|
||||
created_at=datetime.now(tz=UTC),
|
||||
updated_at=datetime.now(tz=UTC),
|
||||
),
|
||||
WorkflowExecutionSummary(
|
||||
execution_identifier=random_workflow_execution_identifier(),
|
||||
workflow_identifier=random_workflow_identifier(),
|
||||
workflow_status=FuzzForgeExecutionStatus.PENDING,
|
||||
error=None,
|
||||
created_at=datetime.now(tz=UTC),
|
||||
updated_at=datetime.now(tz=UTC),
|
||||
),
|
||||
]
|
||||
httpx_mock.add_response(
|
||||
json=GetFuzzForgeModuleExecutionsResponse(
|
||||
executions=modules,
|
||||
project_identifier=project_identifier,
|
||||
total=2,
|
||||
).model_dump(mode="json"),
|
||||
method=HTTPMethod.GET,
|
||||
url=f"http://127.0.0.1:8000/projects/{project_identifier}/modules",
|
||||
)
|
||||
httpx_mock.add_response(
|
||||
json=GetFuzzForgeWorkflowExecutionsResponse(
|
||||
workflows=workflows,
|
||||
project_identifier=project_identifier,
|
||||
total=2,
|
||||
).model_dump(mode="json"),
|
||||
method=HTTPMethod.GET,
|
||||
url=f"http://127.0.0.1:8000/projects/{project_identifier}/workflows",
|
||||
)
|
||||
response = loads(cast("TextResourceContents", (await mcp_client.read_resource("fuzzforge://executions/"))[0]).text)
|
||||
assert len(response) == len(modules) + len(workflows)
|
||||
for expected_module, module in zip(
|
||||
modules, [ModuleExecutionSummary.model_validate(entry) for entry in response[: len(modules)]], strict=True
|
||||
):
|
||||
assert module.execution_identifier == expected_module.execution_identifier
|
||||
assert module.module_identifier == expected_module.module_identifier
|
||||
for expected_workflow, workflow in zip(
|
||||
workflows, [WorkflowExecutionSummary.model_validate(entry) for entry in response[len(workflows) :]], strict=True
|
||||
):
|
||||
assert workflow.execution_identifier == expected_workflow.execution_identifier
|
||||
assert workflow.workflow_identifier == expected_workflow.workflow_identifier
|
||||
24
fuzzforge-modules/cargo-fuzzer/Dockerfile
Normal file
24
fuzzforge-modules/cargo-fuzzer/Dockerfile
Normal file
@@ -0,0 +1,24 @@
|
||||
FROM localhost/fuzzforge-modules-sdk:0.1.0
|
||||
|
||||
# Install system dependencies for Rust compilation
|
||||
RUN apt-get update && apt-get install -y \
|
||||
curl \
|
||||
build-essential \
|
||||
pkg-config \
|
||||
libssl-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Rust toolchain with nightly (required for cargo-fuzz)
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain nightly
|
||||
ENV PATH="/root/.cargo/bin:${PATH}"
|
||||
|
||||
# Install cargo-fuzz
|
||||
RUN cargo install cargo-fuzz --locked || true
|
||||
|
||||
COPY ./src /app/src
|
||||
COPY ./pyproject.toml /app/pyproject.toml
|
||||
|
||||
# Remove workspace reference since we're using wheels
|
||||
RUN sed -i '/\[tool\.uv\.sources\]/,/^$/d' /app/pyproject.toml
|
||||
|
||||
RUN uv sync --find-links /wheels
|
||||
45
fuzzforge-modules/cargo-fuzzer/Makefile
Normal file
45
fuzzforge-modules/cargo-fuzzer/Makefile
Normal file
@@ -0,0 +1,45 @@
|
||||
PACKAGE=$(word 1, $(shell uv version))
|
||||
VERSION=$(word 2, $(shell uv version))
|
||||
|
||||
PODMAN?=/usr/bin/podman
|
||||
|
||||
SOURCES=./src
|
||||
TESTS=./tests
|
||||
|
||||
.PHONY: bandit build clean format mypy pytest ruff version
|
||||
|
||||
bandit:
|
||||
uv run bandit --recursive $(SOURCES)
|
||||
|
||||
build:
|
||||
$(PODMAN) build --file ./Dockerfile --no-cache --tag $(PACKAGE):$(VERSION)
|
||||
|
||||
save: build
|
||||
$(PODMAN) save --format oci-archive --output /tmp/$(PACKAGE)-$(VERSION).oci $(PACKAGE):$(VERSION)
|
||||
|
||||
clean:
|
||||
@find . -type d \( \
|
||||
-name '*.egg-info' \
|
||||
-o -name '.mypy_cache' \
|
||||
-o -name '.pytest_cache' \
|
||||
-o -name '.ruff_cache' \
|
||||
-o -name '__pycache__' \
|
||||
\) -printf 'removing directory %p\n' -exec rm -rf {} +
|
||||
|
||||
cloc:
|
||||
cloc $(SOURCES)
|
||||
|
||||
format:
|
||||
uv run ruff format $(SOURCES) $(TESTS)
|
||||
|
||||
mypy:
|
||||
uv run mypy $(SOURCES)
|
||||
|
||||
pytest:
|
||||
uv run pytest $(TESTS)
|
||||
|
||||
ruff:
|
||||
uv run ruff check --fix $(SOURCES) $(TESTS)
|
||||
|
||||
version:
|
||||
@echo '$(PACKAGE)@$(VERSION)'
|
||||
46
fuzzforge-modules/cargo-fuzzer/README.md
Normal file
46
fuzzforge-modules/cargo-fuzzer/README.md
Normal file
@@ -0,0 +1,46 @@
|
||||
# FuzzForge Modules - FIXME
|
||||
|
||||
## Installation
|
||||
|
||||
### Python
|
||||
|
||||
```shell
|
||||
# install the package (users)
|
||||
uv sync
|
||||
# install the package and all development dependencies (developers)
|
||||
uv sync --all-extras
|
||||
```
|
||||
|
||||
### Container
|
||||
|
||||
```shell
|
||||
# build the image
|
||||
make build
|
||||
# run the container
|
||||
mkdir -p "${PWD}/data" "${PWD}/data/input" "${PWD}/data/output"
|
||||
echo '{"settings":{},"resources":[]}' > "${PWD}/data/input/input.json"
|
||||
podman run --rm \
|
||||
--volume "${PWD}/data:/data" \
|
||||
'<name>:<version>' 'uv run module'
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```shell
|
||||
uv run module
|
||||
```
|
||||
|
||||
## Development tools
|
||||
|
||||
```shell
|
||||
# run ruff (formatter)
|
||||
make format
|
||||
# run mypy (type checker)
|
||||
make mypy
|
||||
# run tests (pytest)
|
||||
make pytest
|
||||
# run ruff (linter)
|
||||
make ruff
|
||||
```
|
||||
|
||||
See the file `Makefile` at the root of this directory for more tools.
|
||||
6
fuzzforge-modules/cargo-fuzzer/mypy.ini
Normal file
6
fuzzforge-modules/cargo-fuzzer/mypy.ini
Normal file
@@ -0,0 +1,6 @@
|
||||
[mypy]
|
||||
plugins = pydantic.mypy
|
||||
strict = True
|
||||
warn_unused_ignores = True
|
||||
warn_redundant_casts = True
|
||||
warn_return_any = True
|
||||
31
fuzzforge-modules/cargo-fuzzer/pyproject.toml
Normal file
31
fuzzforge-modules/cargo-fuzzer/pyproject.toml
Normal file
@@ -0,0 +1,31 @@
|
||||
[project]
|
||||
name = "cargo-fuzzer"
|
||||
version = "0.1.0"
|
||||
description = "FuzzForge module that runs cargo-fuzz with libFuzzer on Rust targets"
|
||||
authors = []
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.14"
|
||||
dependencies = [
|
||||
"fuzzforge-modules-sdk==0.0.1",
|
||||
"pydantic==2.12.4",
|
||||
"structlog==25.5.0",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
lints = [
|
||||
"bandit==1.8.6",
|
||||
"mypy==1.18.2",
|
||||
"ruff==0.14.4",
|
||||
]
|
||||
tests = [
|
||||
"pytest==9.0.2",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
module = "module.__main__:main"
|
||||
|
||||
[tool.uv.sources]
|
||||
fuzzforge-modules-sdk = { workspace = true }
|
||||
|
||||
[tool.uv]
|
||||
package = true
|
||||
19
fuzzforge-modules/cargo-fuzzer/ruff.toml
Normal file
19
fuzzforge-modules/cargo-fuzzer/ruff.toml
Normal file
@@ -0,0 +1,19 @@
|
||||
line-length = 120
|
||||
|
||||
[lint]
|
||||
select = [ "ALL" ]
|
||||
ignore = [
|
||||
"COM812", # conflicts with the formatter
|
||||
"D100", # ignoring missing docstrings in public modules
|
||||
"D104", # ignoring missing docstrings in public packages
|
||||
"D203", # conflicts with 'D211'
|
||||
"D213", # conflicts with 'D212'
|
||||
"TD002", # ignoring missing author in 'TODO' statements
|
||||
"TD003", # ignoring missing issue link in 'TODO' statements
|
||||
]
|
||||
|
||||
[lint.per-file-ignores]
|
||||
"tests/*" = [
|
||||
"PLR2004", # allowing comparisons using unamed numerical constants in tests
|
||||
"S101", # allowing 'assert' statements in tests
|
||||
]
|
||||
19
fuzzforge-modules/cargo-fuzzer/src/module/__main__.py
Normal file
19
fuzzforge-modules/cargo-fuzzer/src/module/__main__.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from fuzzforge_modules_sdk.api import logs
|
||||
|
||||
from module.mod import Module
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from fuzzforge_modules_sdk.api.modules.base import FuzzForgeModule
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""TODO."""
|
||||
logs.configure()
|
||||
module: FuzzForgeModule = Module()
|
||||
module.main()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user