mirror of
https://github.com/FuzzingLabs/fuzzforge_ai.git
synced 2026-03-01 05:13:19 +00:00
Compare commits
6 Commits
fuzzforge-
...
fix/fuzzfo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9e7c56ec09 | ||
|
|
c6e9557541 | ||
|
|
829e8b994b | ||
|
|
be55bd3426 | ||
|
|
9ea4d66586 | ||
|
|
ec16b37410 |
@@ -1,6 +1,6 @@
|
||||
# Contributing to FuzzForge OSS
|
||||
# Contributing to FuzzForge AI
|
||||
|
||||
Thank you for your interest in contributing to FuzzForge OSS! We welcome contributions from the community and are excited to collaborate with you.
|
||||
Thank you for your interest in contributing to FuzzForge AI! We welcome contributions from the community and are excited to collaborate with you.
|
||||
|
||||
**Our Vision**: FuzzForge aims to be a **universal platform for security research** across all cybersecurity domains. Through our modular architecture, any security tool—from fuzzing engines to cloud scanners, from mobile app analyzers to IoT security tools—can be integrated as a containerized module and controlled via AI agents.
|
||||
|
||||
@@ -360,8 +360,8 @@ Beyond modules, you can contribute to FuzzForge's core components.
|
||||
|
||||
1. **Clone and Install**
|
||||
```bash
|
||||
git clone https://github.com/FuzzingLabs/fuzzforge-oss.git
|
||||
cd fuzzforge-oss
|
||||
git clone https://github.com/FuzzingLabs/fuzzforge-ai.git
|
||||
cd fuzzforge-ai
|
||||
uv sync --all-extras
|
||||
```
|
||||
|
||||
@@ -538,7 +538,7 @@ Before submitting a new module:
|
||||
|
||||
## License
|
||||
|
||||
By contributing to FuzzForge OSS, you agree that your contributions will be licensed under the same license as the project (see [LICENSE](LICENSE)).
|
||||
By contributing to FuzzForge AI, you agree that your contributions will be licensed under the same license as the project (see [LICENSE](LICENSE)).
|
||||
|
||||
For module contributions:
|
||||
- Modules you create remain under the project license
|
||||
|
||||
18
Makefile
18
Makefile
@@ -4,7 +4,7 @@ SHELL := /bin/bash
|
||||
|
||||
# Default target
|
||||
help:
|
||||
@echo "FuzzForge OSS Development Commands"
|
||||
@echo "FuzzForge AI Development Commands"
|
||||
@echo ""
|
||||
@echo " make install - Install all dependencies"
|
||||
@echo " make sync - Sync shared packages from upstream"
|
||||
@@ -30,7 +30,7 @@ sync:
|
||||
|
||||
# Format all packages
|
||||
format:
|
||||
@for pkg in packages/fuzzforge-*/; do \
|
||||
@for pkg in fuzzforge-*/; do \
|
||||
if [ -f "$$pkg/pyproject.toml" ]; then \
|
||||
echo "Formatting $$pkg..."; \
|
||||
cd "$$pkg" && uv run ruff format . && cd -; \
|
||||
@@ -39,7 +39,7 @@ format:
|
||||
|
||||
# Lint all packages
|
||||
lint:
|
||||
@for pkg in packages/fuzzforge-*/; do \
|
||||
@for pkg in fuzzforge-*/; do \
|
||||
if [ -f "$$pkg/pyproject.toml" ]; then \
|
||||
echo "Linting $$pkg..."; \
|
||||
cd "$$pkg" && uv run ruff check . && cd -; \
|
||||
@@ -48,7 +48,7 @@ lint:
|
||||
|
||||
# Type check all packages
|
||||
typecheck:
|
||||
@for pkg in packages/fuzzforge-*/; do \
|
||||
@for pkg in fuzzforge-*/; do \
|
||||
if [ -f "$$pkg/pyproject.toml" ] && [ -f "$$pkg/mypy.ini" ]; then \
|
||||
echo "Type checking $$pkg..."; \
|
||||
cd "$$pkg" && uv run mypy . && cd -; \
|
||||
@@ -57,7 +57,7 @@ typecheck:
|
||||
|
||||
# Run all tests
|
||||
test:
|
||||
@for pkg in packages/fuzzforge-*/; do \
|
||||
@for pkg in fuzzforge-*/; do \
|
||||
if [ -f "$$pkg/pytest.ini" ]; then \
|
||||
echo "Testing $$pkg..."; \
|
||||
cd "$$pkg" && uv run pytest && cd -; \
|
||||
@@ -80,12 +80,18 @@ build-modules:
|
||||
echo "Using Docker"; \
|
||||
CONTAINER_CMD="docker"; \
|
||||
fi; \
|
||||
sdk_version=$$(grep 'version' "fuzzforge-modules/fuzzforge-modules-sdk/pyproject.toml" 2>/dev/null | head -1 | sed 's/.*"\(.*\)".*/\1/' || echo "0.1.0"); \
|
||||
echo "Building fuzzforge-modules-sdk:$$sdk_version (base image)..."; \
|
||||
$$CONTAINER_CMD build \
|
||||
-t "fuzzforge-modules-sdk:$$sdk_version" \
|
||||
-t "localhost/fuzzforge-modules-sdk:$$sdk_version" \
|
||||
"fuzzforge-modules/fuzzforge-modules-sdk/" || exit 1; \
|
||||
for module in fuzzforge-modules/*/; do \
|
||||
if [ -f "$$module/Dockerfile" ] && \
|
||||
[ "$$module" != "fuzzforge-modules/fuzzforge-modules-sdk/" ] && \
|
||||
[ "$$module" != "fuzzforge-modules/fuzzforge-module-template/" ]; then \
|
||||
name=$$(basename $$module); \
|
||||
version=$$(grep 'version' "$$module/pyproject.toml" 2>/dev/null | head -1 | sed 's/.*"\(.*\\)".*/\\1/' || echo "0.1.0"); \
|
||||
version=$$(grep 'version' "$$module/pyproject.toml" 2>/dev/null | head -1 | sed 's/.*"\(.*\)".*/\1/' || echo "0.1.0"); \
|
||||
echo "Building $$name:$$version..."; \
|
||||
$$CONTAINER_CMD build -t "fuzzforge-$$name:$$version" "$$module" || exit 1; \
|
||||
fi \
|
||||
|
||||
16
README.md
16
README.md
@@ -1,4 +1,4 @@
|
||||
<h1 align="center"> FuzzForge OSS</h1>
|
||||
<h1 align="center"> FuzzForge AI</h1>
|
||||
<h3 align="center">AI-Powered Security Research Orchestration via MCP</h3>
|
||||
|
||||
<p align="center">
|
||||
@@ -26,13 +26,13 @@
|
||||
|
||||
---
|
||||
|
||||
> 🚧 **FuzzForge OSS is under active development.** Expect breaking changes and new features!
|
||||
> 🚧 **FuzzForge AI is under active development.** Expect breaking changes and new features!
|
||||
|
||||
---
|
||||
|
||||
## 🚀 Overview
|
||||
|
||||
**FuzzForge OSS** is an open-source runtime that enables AI agents (GitHub Copilot, Claude, etc.) to orchestrate security research workflows through the **Model Context Protocol (MCP)**.
|
||||
**FuzzForge AI** is an open-source runtime that enables AI agents (GitHub Copilot, Claude, etc.) to orchestrate security research workflows through the **Model Context Protocol (MCP)**.
|
||||
|
||||
### The Core: Modules
|
||||
|
||||
@@ -171,7 +171,7 @@ FuzzForge modules are containerized security tools that AI agents can orchestrat
|
||||
|
||||
### Module Ecosystem
|
||||
|
||||
| | FuzzForge OSS | FuzzForge Enterprise Modules |
|
||||
| | FuzzForge AI | FuzzForge Enterprise Modules |
|
||||
|---|---|---|
|
||||
| **What** | Runtime & MCP server | Security research modules |
|
||||
| **License** | Apache 2.0 | BSL 1.1 (Business Source License) |
|
||||
@@ -259,6 +259,14 @@ fuzzforge_ai/
|
||||
|
||||
---
|
||||
|
||||
## 🗺️ What's Next
|
||||
|
||||
**[MCP Security Hub](https://github.com/FuzzingLabs/mcp-security-hub) integration** — Bridge 175+ offensive security tools (Nmap, Nuclei, Ghidra, and more) into FuzzForge workflows, all orchestrated by AI agents.
|
||||
|
||||
See [ROADMAP.md](ROADMAP.md) for the full roadmap.
|
||||
|
||||
---
|
||||
|
||||
## 🤝 Contributing
|
||||
|
||||
We welcome contributions from the community!
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# FuzzForge OSS Roadmap
|
||||
# FuzzForge AI Roadmap
|
||||
|
||||
This document outlines the planned features and development direction for FuzzForge OSS.
|
||||
This document outlines the planned features and development direction for FuzzForge AI.
|
||||
|
||||
---
|
||||
|
||||
|
||||
28
USAGE.md
28
USAGE.md
@@ -1,6 +1,6 @@
|
||||
# FuzzForge OSS Usage Guide
|
||||
# FuzzForge AI Usage Guide
|
||||
|
||||
This guide covers everything you need to know to get started with FuzzForge OSS - from installation to running your first security research workflow with AI.
|
||||
This guide covers everything you need to know to get started with FuzzForge AI - from installation to running your first security research workflow with AI.
|
||||
|
||||
> **FuzzForge is designed to be used with AI agents** (GitHub Copilot, Claude, etc.) via MCP.
|
||||
> The CLI is available for advanced users but the primary experience is through natural language interaction with your AI assistant.
|
||||
@@ -31,8 +31,8 @@ This guide covers everything you need to know to get started with FuzzForge OSS
|
||||
|
||||
```bash
|
||||
# 1. Clone and install
|
||||
git clone https://github.com/FuzzingLabs/fuzzforge-oss.git
|
||||
cd fuzzforge-oss
|
||||
git clone https://github.com/FuzzingLabs/fuzzforge-ai.git
|
||||
cd fuzzforge-ai
|
||||
uv sync
|
||||
|
||||
# 2. Build the module images (one-time setup)
|
||||
@@ -57,9 +57,9 @@ uv run fuzzforge mcp install claude-code # For Claude Code CLI
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before installing FuzzForge OSS, ensure you have:
|
||||
Before installing FuzzForge AI, ensure you have:
|
||||
|
||||
- **Python 3.12+** - [Download Python](https://www.python.org/downloads/)
|
||||
- **Python 3.14+** - [Download Python](https://www.python.org/downloads/)
|
||||
- **uv** package manager - [Install uv](https://docs.astral.sh/uv/)
|
||||
- **Docker** - Container runtime ([Install Docker](https://docs.docker.com/get-docker/))
|
||||
|
||||
@@ -95,8 +95,8 @@ sudo usermod -aG docker $USER
|
||||
### 1. Clone the Repository
|
||||
|
||||
```bash
|
||||
git clone https://github.com/FuzzingLabs/fuzzforge-oss.git
|
||||
cd fuzzforge-oss
|
||||
git clone https://github.com/FuzzingLabs/fuzzforge-ai.git
|
||||
cd fuzzforge-ai
|
||||
```
|
||||
|
||||
### 2. Install Dependencies
|
||||
@@ -122,14 +122,14 @@ FuzzForge modules are containerized security tools. After cloning, you need to b
|
||||
### Build All Modules
|
||||
|
||||
```bash
|
||||
# From the fuzzforge-oss directory
|
||||
# From the fuzzforge-ai directory
|
||||
make build-modules
|
||||
```
|
||||
|
||||
This builds all available modules:
|
||||
- `fuzzforge-rust-analyzer` - Analyzes Rust code for fuzzable functions
|
||||
- `fuzzforge-cargo-fuzzer` - Runs cargo-fuzz on Rust crates
|
||||
- `fuzzforge-harness-validator` - Validates generated fuzzing harnesses
|
||||
- `fuzzforge-harness-tester` - Tests and validates generated fuzzing harnesses
|
||||
- `fuzzforge-crash-analyzer` - Analyzes crash inputs
|
||||
|
||||
### Build a Single Module
|
||||
@@ -169,7 +169,7 @@ uv run fuzzforge mcp install copilot
|
||||
|
||||
The command auto-detects everything:
|
||||
- **FuzzForge root** - Where FuzzForge is installed
|
||||
- **Modules path** - Defaults to `fuzzforge-oss/fuzzforge-modules`
|
||||
- **Modules path** - Defaults to `fuzzforge-ai/fuzzforge-modules`
|
||||
- **Docker socket** - Auto-detects `/var/run/docker.sock`
|
||||
|
||||
**Optional overrides** (usually not needed):
|
||||
@@ -428,14 +428,14 @@ If you prefer Podman:
|
||||
uv run fuzzforge mcp install copilot --engine podman
|
||||
|
||||
# Or set environment variable
|
||||
export FUZZFORGE_ENGINE=podman
|
||||
export FUZZFORGE_ENGINE__TYPE=podman
|
||||
```
|
||||
|
||||
### Check Logs
|
||||
|
||||
FuzzForge stores execution logs in the storage directory:
|
||||
FuzzForge stores execution results inside your project directory:
|
||||
```bash
|
||||
ls -la ~/.fuzzforge/storage/<project-id>/<execution-id>/
|
||||
ls -la <your-project>/.fuzzforge/runs/<execution-id>/
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
[project]
|
||||
name = "fuzzforge-cli"
|
||||
version = "0.0.1"
|
||||
description = "FuzzForge CLI - Command-line interface for FuzzForge OSS."
|
||||
description = "FuzzForge CLI - Command-line interface for FuzzForge AI."
|
||||
authors = []
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.14"
|
||||
dependencies = [
|
||||
"fuzzforge-runner==0.0.1",
|
||||
"fuzzforge-types==0.0.1",
|
||||
"rich>=14.0.0",
|
||||
"typer==0.20.1",
|
||||
]
|
||||
@@ -27,4 +26,3 @@ fuzzforge = "fuzzforge_cli.__main__:main"
|
||||
|
||||
[tool.uv.sources]
|
||||
fuzzforge-runner = { workspace = true }
|
||||
fuzzforge-types = { workspace = true }
|
||||
|
||||
@@ -12,7 +12,7 @@ from fuzzforge_cli.context import Context
|
||||
|
||||
application: Typer = Typer(
|
||||
name="fuzzforge",
|
||||
help="FuzzForge OSS - Security research orchestration platform.",
|
||||
help="FuzzForge AI - Security research orchestration platform.",
|
||||
)
|
||||
|
||||
|
||||
@@ -62,7 +62,7 @@ def main(
|
||||
] = "",
|
||||
context: TyperContext = None, # type: ignore[assignment]
|
||||
) -> None:
|
||||
"""FuzzForge OSS - Security research orchestration platform.
|
||||
"""FuzzForge AI - Security research orchestration platform.
|
||||
|
||||
Execute security research modules in isolated containers.
|
||||
|
||||
|
||||
@@ -129,13 +129,13 @@ def _detect_docker_socket() -> str:
|
||||
def _find_fuzzforge_root() -> Path:
|
||||
"""Find the FuzzForge installation root.
|
||||
|
||||
:returns: Path to fuzzforge-oss directory.
|
||||
:returns: Path to fuzzforge-ai directory.
|
||||
|
||||
"""
|
||||
# Try to find from current file location
|
||||
current = Path(__file__).resolve()
|
||||
|
||||
# Walk up to find fuzzforge-oss root
|
||||
# Walk up to find fuzzforge-ai root
|
||||
for parent in current.parents:
|
||||
if (parent / "fuzzforge-mcp").is_dir() and (parent / "fuzzforge-runner").is_dir():
|
||||
return parent
|
||||
@@ -152,7 +152,7 @@ def _generate_mcp_config(
|
||||
) -> dict:
|
||||
"""Generate MCP server configuration.
|
||||
|
||||
:param fuzzforge_root: Path to fuzzforge-oss installation.
|
||||
:param fuzzforge_root: Path to fuzzforge-ai installation.
|
||||
:param modules_path: Path to the modules directory.
|
||||
:param engine_type: Container engine type (podman or docker).
|
||||
:param engine_socket: Container engine socket path.
|
||||
@@ -326,7 +326,7 @@ def generate(
|
||||
if agent == AIAgent.COPILOT:
|
||||
config_path = _get_copilot_mcp_path()
|
||||
elif agent == AIAgent.CLAUDE_CODE:
|
||||
config_path = _get_claude_code_mcp_path(fuzzforge_root)
|
||||
config_path = _get_claude_code_user_mcp_path()
|
||||
else: # Claude Desktop
|
||||
config_path = _get_claude_desktop_mcp_path()
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ authors = []
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.14"
|
||||
dependencies = [
|
||||
"fuzzforge-types==0.0.1",
|
||||
"podman==5.6.0",
|
||||
"pydantic==2.12.4",
|
||||
"structlog>=24.0.0",
|
||||
@@ -22,5 +21,4 @@ tests = [
|
||||
"pytest==9.0.2",
|
||||
]
|
||||
|
||||
[tool.uv.sources]
|
||||
fuzzforge-types = { workspace = true }
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
This package provides:
|
||||
- Sandbox engine abstractions (Podman, Docker)
|
||||
- Storage abstractions (S3) - requires 'storage' extra
|
||||
- Common exceptions
|
||||
|
||||
Example usage:
|
||||
@@ -12,9 +11,6 @@ Example usage:
|
||||
Podman,
|
||||
PodmanConfiguration,
|
||||
)
|
||||
|
||||
# For storage (requires boto3):
|
||||
from fuzzforge_common.storage import Storage
|
||||
"""
|
||||
|
||||
from fuzzforge_common.exceptions import FuzzForgeError
|
||||
@@ -29,14 +25,6 @@ from fuzzforge_common.sandboxes import (
|
||||
PodmanConfiguration,
|
||||
)
|
||||
|
||||
# Storage exceptions are always available (no boto3 required)
|
||||
from fuzzforge_common.storage.exceptions import (
|
||||
FuzzForgeStorageError,
|
||||
StorageConnectionError,
|
||||
StorageDownloadError,
|
||||
StorageUploadError,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"AbstractFuzzForgeEngineConfiguration",
|
||||
"AbstractFuzzForgeSandboxEngine",
|
||||
@@ -44,11 +32,7 @@ __all__ = [
|
||||
"DockerConfiguration",
|
||||
"FuzzForgeError",
|
||||
"FuzzForgeSandboxEngines",
|
||||
"FuzzForgeStorageError",
|
||||
"ImageInfo",
|
||||
"Podman",
|
||||
"PodmanConfiguration",
|
||||
"StorageConnectionError",
|
||||
"StorageDownloadError",
|
||||
"StorageUploadError",
|
||||
]
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
"""FuzzForge storage abstractions.
|
||||
|
||||
Storage class requires boto3. Import it explicitly:
|
||||
from fuzzforge_common.storage.s3 import Storage
|
||||
"""
|
||||
|
||||
from fuzzforge_common.storage.exceptions import (
|
||||
FuzzForgeStorageError,
|
||||
StorageConnectionError,
|
||||
StorageDownloadError,
|
||||
StorageUploadError,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"FuzzForgeStorageError",
|
||||
"StorageConnectionError",
|
||||
"StorageDownloadError",
|
||||
"StorageUploadError",
|
||||
]
|
||||
@@ -1,20 +0,0 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
from fuzzforge_common.storage.s3 import Storage
|
||||
|
||||
|
||||
class StorageConfiguration(BaseModel):
|
||||
"""TODO."""
|
||||
|
||||
#: S3 endpoint URL (e.g., "http://localhost:9000" for MinIO).
|
||||
endpoint: str
|
||||
|
||||
#: S3 access key ID for authentication.
|
||||
access_key: str
|
||||
|
||||
#: S3 secret access key for authentication.
|
||||
secret_key: str
|
||||
|
||||
def into_storage(self) -> Storage:
|
||||
"""TODO."""
|
||||
return Storage(endpoint=self.endpoint, access_key=self.access_key, secret_key=self.secret_key)
|
||||
@@ -1,108 +0,0 @@
|
||||
from fuzzforge_common.exceptions import FuzzForgeError
|
||||
|
||||
|
||||
class FuzzForgeStorageError(FuzzForgeError):
|
||||
"""Base exception for all storage-related errors.
|
||||
|
||||
Raised when storage operations (upload, download, connection) fail
|
||||
during workflow execution.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class StorageConnectionError(FuzzForgeStorageError):
|
||||
"""Failed to connect to storage service.
|
||||
|
||||
:param endpoint: The storage endpoint that failed to connect.
|
||||
:param reason: The underlying exception message.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, endpoint: str, reason: str) -> None:
|
||||
"""Initialize storage connection error.
|
||||
|
||||
:param endpoint: The storage endpoint that failed to connect.
|
||||
:param reason: The underlying exception message.
|
||||
|
||||
"""
|
||||
FuzzForgeStorageError.__init__(
|
||||
self,
|
||||
f"Failed to connect to storage at {endpoint}: {reason}",
|
||||
)
|
||||
self.endpoint = endpoint
|
||||
self.reason = reason
|
||||
|
||||
|
||||
class StorageUploadError(FuzzForgeStorageError):
|
||||
"""Failed to upload object to storage.
|
||||
|
||||
:param bucket: The target bucket name.
|
||||
:param object_key: The target object key.
|
||||
:param reason: The underlying exception message.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, bucket: str, object_key: str, reason: str) -> None:
|
||||
"""Initialize storage upload error.
|
||||
|
||||
:param bucket: The target bucket name.
|
||||
:param object_key: The target object key.
|
||||
:param reason: The underlying exception message.
|
||||
|
||||
"""
|
||||
FuzzForgeStorageError.__init__(
|
||||
self,
|
||||
f"Failed to upload to {bucket}/{object_key}: {reason}",
|
||||
)
|
||||
self.bucket = bucket
|
||||
self.object_key = object_key
|
||||
self.reason = reason
|
||||
|
||||
|
||||
class StorageDownloadError(FuzzForgeStorageError):
|
||||
"""Failed to download object from storage.
|
||||
|
||||
:param bucket: The source bucket name.
|
||||
:param object_key: The source object key.
|
||||
:param reason: The underlying exception message.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, bucket: str, object_key: str, reason: str) -> None:
|
||||
"""Initialize storage download error.
|
||||
|
||||
:param bucket: The source bucket name.
|
||||
:param object_key: The source object key.
|
||||
:param reason: The underlying exception message.
|
||||
|
||||
"""
|
||||
FuzzForgeStorageError.__init__(
|
||||
self,
|
||||
f"Failed to download from {bucket}/{object_key}: {reason}",
|
||||
)
|
||||
self.bucket = bucket
|
||||
self.object_key = object_key
|
||||
self.reason = reason
|
||||
|
||||
|
||||
class StorageDeletionError(FuzzForgeStorageError):
|
||||
"""Failed to delete bucket from storage.
|
||||
|
||||
:param bucket: The bucket name that failed to delete.
|
||||
:param reason: The underlying exception message.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, bucket: str, reason: str) -> None:
|
||||
"""Initialize storage deletion error.
|
||||
|
||||
:param bucket: The bucket name that failed to delete.
|
||||
:param reason: The underlying exception message.
|
||||
|
||||
"""
|
||||
FuzzForgeStorageError.__init__(
|
||||
self,
|
||||
f"Failed to delete bucket {bucket}: {reason}",
|
||||
)
|
||||
self.bucket = bucket
|
||||
self.reason = reason
|
||||
@@ -1,351 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path, PurePath
|
||||
from tarfile import TarInfo
|
||||
from tarfile import open as Archive # noqa: N812
|
||||
from tempfile import NamedTemporaryFile
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
from fuzzforge_common.storage.exceptions import StorageDeletionError, StorageDownloadError, StorageUploadError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from botocore.client import BaseClient
|
||||
from structlog.stdlib import BoundLogger
|
||||
|
||||
|
||||
def get_logger() -> BoundLogger:
|
||||
"""Get structlog logger instance.
|
||||
|
||||
Uses deferred import pattern required by Temporal for serialization.
|
||||
|
||||
:returns: Configured structlog logger.
|
||||
|
||||
"""
|
||||
from structlog import get_logger # noqa: PLC0415 (required by temporal)
|
||||
|
||||
return cast("BoundLogger", get_logger())
|
||||
|
||||
|
||||
class Storage:
|
||||
"""S3-compatible storage backend implementation using boto3.
|
||||
|
||||
Supports MinIO, AWS S3, and other S3-compatible storage services.
|
||||
Uses error-driven approach (EAFP) to handle bucket creation and
|
||||
avoid race conditions.
|
||||
|
||||
"""
|
||||
|
||||
#: S3 endpoint URL (e.g., "http://localhost:9000" for MinIO).
|
||||
__endpoint: str
|
||||
|
||||
#: S3 access key ID for authentication.
|
||||
__access_key: str
|
||||
|
||||
#: S3 secret access key for authentication.
|
||||
__secret_key: str
|
||||
|
||||
def __init__(self, endpoint: str, access_key: str, secret_key: str) -> None:
|
||||
"""Initialize an instance of the class.
|
||||
|
||||
:param endpoint: TODO.
|
||||
:param access_key: TODO.
|
||||
:param secret_key: TODO.
|
||||
|
||||
"""
|
||||
self.__endpoint = endpoint
|
||||
self.__access_key = access_key
|
||||
self.__secret_key = secret_key
|
||||
|
||||
def _get_client(self) -> BaseClient:
|
||||
"""Create boto3 S3 client with configured credentials.
|
||||
|
||||
Uses deferred import pattern required by Temporal for serialization.
|
||||
|
||||
:returns: Configured boto3 S3 client.
|
||||
|
||||
"""
|
||||
import boto3 # noqa: PLC0415 (required by temporal)
|
||||
|
||||
return boto3.client(
|
||||
"s3",
|
||||
endpoint_url=self.__endpoint,
|
||||
aws_access_key_id=self.__access_key,
|
||||
aws_secret_access_key=self.__secret_key,
|
||||
)
|
||||
|
||||
def create_bucket(self, bucket: str) -> None:
|
||||
"""Create the S3 bucket if it does not already exist.
|
||||
|
||||
Idempotent operation - succeeds if bucket already exists and is owned by you.
|
||||
Fails if bucket exists but is owned by another account.
|
||||
|
||||
:raise ClientError: If bucket creation fails (permissions, name conflicts, etc.).
|
||||
|
||||
"""
|
||||
logger = get_logger()
|
||||
client = self._get_client()
|
||||
|
||||
logger.debug("creating_bucket", bucket=bucket)
|
||||
|
||||
try:
|
||||
client.create_bucket(Bucket=bucket)
|
||||
logger.info("bucket_created", bucket=bucket)
|
||||
|
||||
except ClientError as e:
|
||||
error_code = e.response.get("Error", {}).get("Code")
|
||||
|
||||
# Bucket already exists and we own it - this is fine
|
||||
if error_code in ("BucketAlreadyOwnedByYou", "BucketAlreadyExists"):
|
||||
logger.debug(
|
||||
"bucket_already_exists",
|
||||
bucket=bucket,
|
||||
error_code=error_code,
|
||||
)
|
||||
return
|
||||
|
||||
# Other errors are actual failures
|
||||
logger.exception(
|
||||
"bucket_creation_failed",
|
||||
bucket=bucket,
|
||||
error_code=error_code,
|
||||
)
|
||||
raise
|
||||
|
||||
def delete_bucket(self, bucket: str) -> None:
|
||||
"""Delete an S3 bucket and all its contents.
|
||||
|
||||
Idempotent operation - succeeds if bucket doesn't exist.
|
||||
Handles pagination for buckets with many objects.
|
||||
|
||||
:param bucket: The name of the bucket to delete.
|
||||
:raises StorageDeletionError: If bucket deletion fails.
|
||||
|
||||
"""
|
||||
logger = get_logger()
|
||||
client = self._get_client()
|
||||
|
||||
logger.debug("deleting_bucket", bucket=bucket)
|
||||
|
||||
try:
|
||||
# S3 requires bucket to be empty before deletion
|
||||
# Delete all objects first with pagination support
|
||||
continuation_token = None
|
||||
|
||||
while True:
|
||||
# List objects (up to 1000 per request)
|
||||
list_params = {"Bucket": bucket}
|
||||
if continuation_token:
|
||||
list_params["ContinuationToken"] = continuation_token
|
||||
|
||||
response = client.list_objects_v2(**list_params)
|
||||
|
||||
# Delete objects if any exist (max 1000 per delete_objects call)
|
||||
if "Contents" in response:
|
||||
objects = [{"Key": obj["Key"]} for obj in response["Contents"]]
|
||||
client.delete_objects(Bucket=bucket, Delete={"Objects": objects})
|
||||
logger.debug("deleted_objects", bucket=bucket, count=len(objects))
|
||||
|
||||
# Check if more objects exist
|
||||
if not response.get("IsTruncated", False):
|
||||
break
|
||||
|
||||
continuation_token = response.get("NextContinuationToken")
|
||||
|
||||
# Now delete the empty bucket
|
||||
client.delete_bucket(Bucket=bucket)
|
||||
logger.info("bucket_deleted", bucket=bucket)
|
||||
|
||||
except ClientError as error:
|
||||
error_code = error.response.get("Error", {}).get("Code")
|
||||
|
||||
# Idempotent - bucket already doesn't exist
|
||||
if error_code == "NoSuchBucket":
|
||||
logger.debug("bucket_does_not_exist", bucket=bucket)
|
||||
return
|
||||
|
||||
# Other errors are actual failures
|
||||
logger.exception(
|
||||
"bucket_deletion_failed",
|
||||
bucket=bucket,
|
||||
error_code=error_code,
|
||||
)
|
||||
raise StorageDeletionError(bucket=bucket, reason=str(error)) from error
|
||||
|
||||
def upload_file(
|
||||
self,
|
||||
bucket: str,
|
||||
file: Path,
|
||||
key: str,
|
||||
) -> None:
|
||||
"""Upload archive file to S3 storage at specified object key.
|
||||
|
||||
Assumes bucket exists. Fails gracefully if bucket or other resources missing.
|
||||
|
||||
:param bucket: TODO.
|
||||
:param file: Local path to the archive file to upload.
|
||||
:param key: Object key (path) in S3 where file should be uploaded.
|
||||
:raise StorageUploadError: If upload operation fails.
|
||||
|
||||
"""
|
||||
from boto3.exceptions import S3UploadFailedError # noqa: PLC0415 (required by 'temporal' at runtime)
|
||||
|
||||
logger = get_logger()
|
||||
client = self._get_client()
|
||||
|
||||
logger.debug(
|
||||
"uploading_archive_to_storage",
|
||||
bucket=bucket,
|
||||
object_key=key,
|
||||
archive_path=str(file),
|
||||
)
|
||||
|
||||
try:
|
||||
client.upload_file(
|
||||
Filename=str(file),
|
||||
Bucket=bucket,
|
||||
Key=key,
|
||||
)
|
||||
logger.info(
|
||||
"archive_uploaded_successfully",
|
||||
bucket=bucket,
|
||||
object_key=key,
|
||||
)
|
||||
|
||||
except S3UploadFailedError as e:
|
||||
# Check if this is a NoSuchBucket error - create bucket and retry
|
||||
if "NoSuchBucket" in str(e):
|
||||
logger.info(
|
||||
"bucket_does_not_exist_creating",
|
||||
bucket=bucket,
|
||||
)
|
||||
self.create_bucket(bucket=bucket)
|
||||
# Retry upload after creating bucket
|
||||
try:
|
||||
client.upload_file(
|
||||
Filename=str(file),
|
||||
Bucket=bucket,
|
||||
Key=key,
|
||||
)
|
||||
logger.info(
|
||||
"archive_uploaded_successfully_after_bucket_creation",
|
||||
bucket=bucket,
|
||||
object_key=key,
|
||||
)
|
||||
except S3UploadFailedError as retry_error:
|
||||
logger.exception(
|
||||
"upload_failed_after_bucket_creation",
|
||||
bucket=bucket,
|
||||
object_key=key,
|
||||
)
|
||||
raise StorageUploadError(
|
||||
bucket=bucket,
|
||||
object_key=key,
|
||||
reason=str(retry_error),
|
||||
) from retry_error
|
||||
else:
|
||||
logger.exception(
|
||||
"upload_failed",
|
||||
bucket=bucket,
|
||||
object_key=key,
|
||||
)
|
||||
raise StorageUploadError(
|
||||
bucket=bucket,
|
||||
object_key=key,
|
||||
reason=str(e),
|
||||
) from e
|
||||
|
||||
def download_file(self, bucket: str, key: PurePath) -> Path:
|
||||
"""Download a single file from S3 storage.
|
||||
|
||||
Downloads the file to a temporary location and returns the path.
|
||||
|
||||
:param bucket: S3 bucket name.
|
||||
:param key: Object key (path) in S3 to download.
|
||||
:returns: Path to the downloaded file.
|
||||
:raise StorageDownloadError: If download operation fails.
|
||||
|
||||
"""
|
||||
logger = get_logger()
|
||||
client = self._get_client()
|
||||
|
||||
logger.debug(
|
||||
"downloading_file_from_storage",
|
||||
bucket=bucket,
|
||||
object_key=str(key),
|
||||
)
|
||||
|
||||
try:
|
||||
# Create temporary file for download
|
||||
with NamedTemporaryFile(delete=False, suffix=".tar.gz") as temp_file:
|
||||
temp_path = Path(temp_file.name)
|
||||
|
||||
# Download object to temp file
|
||||
client.download_file(
|
||||
Bucket=bucket,
|
||||
Key=str(key),
|
||||
Filename=str(temp_path),
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"file_downloaded_successfully",
|
||||
bucket=bucket,
|
||||
object_key=str(key),
|
||||
local_path=str(temp_path),
|
||||
)
|
||||
|
||||
return temp_path
|
||||
|
||||
except ClientError as error:
|
||||
error_code = error.response.get("Error", {}).get("Code")
|
||||
logger.exception(
|
||||
"download_failed",
|
||||
bucket=bucket,
|
||||
object_key=str(key),
|
||||
error_code=error_code,
|
||||
)
|
||||
raise StorageDownloadError(
|
||||
bucket=bucket,
|
||||
object_key=str(key),
|
||||
reason=f"{error_code}: {error!s}",
|
||||
) from error
|
||||
|
||||
def download_directory(self, bucket: str, directory: PurePath) -> Path:
|
||||
"""TODO.
|
||||
|
||||
:param bucket: TODO.
|
||||
:param directory: TODO.
|
||||
:returns: TODO.
|
||||
|
||||
"""
|
||||
with NamedTemporaryFile(delete=False) as file:
|
||||
path: Path = Path(file.name)
|
||||
# end-with
|
||||
client: Any = self._get_client()
|
||||
with Archive(name=str(path), mode="w:gz") as archive:
|
||||
paginator = client.get_paginator("list_objects_v2")
|
||||
try:
|
||||
pages = paginator.paginate(Bucket=bucket, Prefix=str(directory))
|
||||
except ClientError as exception:
|
||||
raise StorageDownloadError(
|
||||
bucket=bucket,
|
||||
object_key=str(directory),
|
||||
reason=exception.response["Error"]["Code"],
|
||||
) from exception
|
||||
for page in pages:
|
||||
for entry in page.get("Contents", []):
|
||||
key: str = entry["Key"]
|
||||
try:
|
||||
response: dict[str, Any] = client.get_object(Bucket=bucket, Key=key)
|
||||
except ClientError as exception:
|
||||
raise StorageDownloadError(
|
||||
bucket=bucket,
|
||||
object_key=key,
|
||||
reason=exception.response["Error"]["Code"],
|
||||
) from exception
|
||||
archive.addfile(TarInfo(name=key), fileobj=response["Body"])
|
||||
# end-for
|
||||
# end-for
|
||||
# end-with
|
||||
return path
|
||||
@@ -1,8 +0,0 @@
|
||||
from enum import StrEnum
|
||||
|
||||
|
||||
class TemporalQueues(StrEnum):
|
||||
"""Enumeration of available `Temporal Task Queues`."""
|
||||
|
||||
#: The default task queue.
|
||||
DEFAULT = "default-task-queue"
|
||||
@@ -1,46 +0,0 @@
|
||||
from enum import StrEnum
|
||||
from typing import Literal
|
||||
|
||||
from fuzzforge_types import FuzzForgeWorkflowIdentifier # noqa: TC002 (required by 'pydantic' at runtime)
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class Base(BaseModel):
|
||||
"""TODO."""
|
||||
|
||||
|
||||
class FuzzForgeWorkflowSteps(StrEnum):
|
||||
"""Workflow step types."""
|
||||
|
||||
#: Execute a FuzzForge module
|
||||
RUN_FUZZFORGE_MODULE = "run-fuzzforge-module"
|
||||
|
||||
|
||||
class FuzzForgeWorkflowStep(Base):
|
||||
"""TODO."""
|
||||
|
||||
#: The type of the workflow's step.
|
||||
kind: FuzzForgeWorkflowSteps
|
||||
|
||||
|
||||
class RunFuzzForgeModule(FuzzForgeWorkflowStep):
|
||||
"""Execute a FuzzForge module."""
|
||||
|
||||
kind: Literal[FuzzForgeWorkflowSteps.RUN_FUZZFORGE_MODULE] = FuzzForgeWorkflowSteps.RUN_FUZZFORGE_MODULE
|
||||
#: The name of the module.
|
||||
module: str
|
||||
#: The container of the module.
|
||||
container: str
|
||||
|
||||
|
||||
class FuzzForgeWorkflowDefinition(Base):
|
||||
"""The definition of a FuzzForge workflow."""
|
||||
|
||||
#: The author of the workflow.
|
||||
author: str
|
||||
#: The identifier of the workflow.
|
||||
identifier: FuzzForgeWorkflowIdentifier
|
||||
#: The name of the workflow.
|
||||
name: str
|
||||
#: The collection of steps that compose the workflow.
|
||||
steps: list[RunFuzzForgeModule]
|
||||
@@ -1,24 +0,0 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
from fuzzforge_common.sandboxes.engines.docker.configuration import (
|
||||
DockerConfiguration, # noqa: TC001 (required by pydantic at runtime)
|
||||
)
|
||||
from fuzzforge_common.sandboxes.engines.podman.configuration import (
|
||||
PodmanConfiguration, # noqa: TC001 (required by pydantic at runtime)
|
||||
)
|
||||
from fuzzforge_common.storage.configuration import StorageConfiguration # noqa: TC001 (required by pydantic at runtime)
|
||||
|
||||
|
||||
class TemporalWorkflowParameters(BaseModel):
|
||||
"""Base parameters for Temporal workflows.
|
||||
|
||||
Provides common configuration shared across all workflow types,
|
||||
including sandbox engine and storage backend instances.
|
||||
|
||||
"""
|
||||
|
||||
#: Sandbox engine for container operations (Docker or Podman).
|
||||
engine_configuration: PodmanConfiguration | DockerConfiguration
|
||||
|
||||
#: Storage backend for uploading/downloading execution artifacts.
|
||||
storage_configuration: StorageConfiguration
|
||||
@@ -1,108 +0,0 @@
|
||||
"""Helper utilities for working with bridge transformations."""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
def load_transform_from_file(file_path: str | Path) -> str:
|
||||
"""Load bridge transformation code from a Python file.
|
||||
|
||||
This reads the transformation function from a .py file and extracts
|
||||
the code as a string suitable for the bridge module.
|
||||
|
||||
Args:
|
||||
file_path: Path to Python file containing transform() function
|
||||
|
||||
Returns:
|
||||
Python code as a string
|
||||
|
||||
Example:
|
||||
>>> code = load_transform_from_file("transformations/add_line_numbers.py")
|
||||
>>> # code contains the transform() function as a string
|
||||
|
||||
"""
|
||||
path = Path(file_path)
|
||||
|
||||
if not path.exists():
|
||||
raise FileNotFoundError(f"Transformation file not found: {file_path}")
|
||||
|
||||
if path.suffix != ".py":
|
||||
raise ValueError(f"Transformation file must be .py file, got: {path.suffix}")
|
||||
|
||||
# Read the entire file
|
||||
code = path.read_text()
|
||||
|
||||
return code
|
||||
|
||||
|
||||
def create_bridge_input(
|
||||
transform_file: str | Path,
|
||||
input_filename: str | None = None,
|
||||
output_filename: str | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Create bridge module input configuration from a transformation file.
|
||||
|
||||
Args:
|
||||
transform_file: Path to Python file with transform() function
|
||||
input_filename: Optional specific input file to transform
|
||||
output_filename: Optional specific output filename
|
||||
|
||||
Returns:
|
||||
Dictionary suitable for bridge module's input.json
|
||||
|
||||
Example:
|
||||
>>> config = create_bridge_input("transformations/add_line_numbers.py")
|
||||
>>> import json
|
||||
>>> json.dump(config, open("input.json", "w"))
|
||||
|
||||
"""
|
||||
code = load_transform_from_file(transform_file)
|
||||
|
||||
return {
|
||||
"code": code,
|
||||
"input_filename": input_filename,
|
||||
"output_filename": output_filename,
|
||||
}
|
||||
|
||||
|
||||
def validate_transform_function(file_path: str | Path) -> bool:
|
||||
"""Validate that a Python file contains a valid transform() function.
|
||||
|
||||
Args:
|
||||
file_path: Path to Python file to validate
|
||||
|
||||
Returns:
|
||||
True if valid, raises exception otherwise
|
||||
|
||||
Raises:
|
||||
ValueError: If transform() function is not found or invalid
|
||||
|
||||
"""
|
||||
code = load_transform_from_file(file_path)
|
||||
|
||||
# Check if transform function is defined
|
||||
if "def transform(" not in code:
|
||||
raise ValueError(
|
||||
f"File {file_path} must contain a 'def transform(data)' function"
|
||||
)
|
||||
|
||||
# Try to compile the code
|
||||
try:
|
||||
compile(code, str(file_path), "exec")
|
||||
except SyntaxError as e:
|
||||
raise ValueError(f"Syntax error in {file_path}: {e}") from e
|
||||
|
||||
# Try to execute and verify transform exists
|
||||
namespace: dict[str, Any] = {"__builtins__": __builtins__}
|
||||
try:
|
||||
exec(code, namespace)
|
||||
except Exception as e:
|
||||
raise ValueError(f"Failed to execute {file_path}: {e}") from e
|
||||
|
||||
if "transform" not in namespace:
|
||||
raise ValueError(f"No 'transform' function found in {file_path}")
|
||||
|
||||
if not callable(namespace["transform"]):
|
||||
raise ValueError(f"'transform' in {file_path} is not callable")
|
||||
|
||||
return True
|
||||
@@ -1,27 +0,0 @@
|
||||
from fuzzforge_types import (
|
||||
FuzzForgeExecutionIdentifier, # noqa: TC002 (required by pydantic at runtime)
|
||||
FuzzForgeProjectIdentifier, # noqa: TC002 (required by pydantic at runtime)
|
||||
)
|
||||
|
||||
from fuzzforge_common.workflows.base.definitions import (
|
||||
FuzzForgeWorkflowDefinition, # noqa: TC001 (required by pydantic at runtime)
|
||||
)
|
||||
from fuzzforge_common.workflows.base.parameters import TemporalWorkflowParameters
|
||||
|
||||
|
||||
class ExecuteFuzzForgeWorkflowParameters(TemporalWorkflowParameters):
|
||||
"""Parameters for the default FuzzForge workflow orchestration.
|
||||
|
||||
Contains workflow definition and execution tracking identifiers
|
||||
for coordinating multi-module workflows.
|
||||
|
||||
"""
|
||||
|
||||
#: UUID7 identifier of this specific workflow execution.
|
||||
execution_identifier: FuzzForgeExecutionIdentifier
|
||||
|
||||
#: UUID7 identifier of the project this execution belongs to.
|
||||
project_identifier: FuzzForgeProjectIdentifier
|
||||
|
||||
#: The definition of the FuzzForge workflow to run.
|
||||
workflow_definition: FuzzForgeWorkflowDefinition
|
||||
@@ -1,80 +0,0 @@
|
||||
from typing import Any, Literal
|
||||
|
||||
from fuzzforge_types import (
|
||||
FuzzForgeExecutionIdentifier, # noqa: TC002 (required by pydantic at runtime)
|
||||
FuzzForgeProjectIdentifier, # noqa: TC002 (required by pydantic at runtime)
|
||||
)
|
||||
|
||||
from fuzzforge_common.workflows.base.parameters import TemporalWorkflowParameters
|
||||
|
||||
|
||||
class ExecuteFuzzForgeModuleParameters(TemporalWorkflowParameters):
|
||||
"""Parameters for executing a single FuzzForge module workflow.
|
||||
|
||||
Contains module execution configuration including container image,
|
||||
project context, and execution tracking identifiers.
|
||||
|
||||
Supports workflow chaining where modules can be executed in sequence,
|
||||
with each module's output becoming the next module's input.
|
||||
|
||||
"""
|
||||
|
||||
#: The identifier of this module execution.
|
||||
execution_identifier: FuzzForgeExecutionIdentifier
|
||||
|
||||
#: The identifier/name of the module to execute.
|
||||
#: FIXME: Currently accepts both UUID (for registry lookups) and container names (e.g., "text-generator:0.0.1").
|
||||
#: This should be split into module_identifier (UUID) and container_image (string) in the future.
|
||||
module_identifier: str
|
||||
|
||||
#: The identifier of the project this module execution belongs to.
|
||||
project_identifier: FuzzForgeProjectIdentifier
|
||||
|
||||
#: Optional configuration dictionary for the module.
|
||||
#: Will be written to /data/input/config.json in the sandbox.
|
||||
module_configuration: dict[str, Any] | None = None
|
||||
|
||||
# Workflow chaining fields
|
||||
|
||||
#: The identifier of the parent workflow execution (if part of a multi-module workflow).
|
||||
#: For standalone module executions, this equals execution_identifier.
|
||||
workflow_execution_identifier: FuzzForgeExecutionIdentifier | None = None
|
||||
|
||||
#: Position of this module in the workflow (0-based).
|
||||
#: 0 = first module (reads from project assets)
|
||||
#: N > 0 = subsequent module (reads from previous module's output)
|
||||
step_index: int = 0
|
||||
|
||||
#: Execution identifier of the previous module in the workflow chain.
|
||||
#: None for first module (step_index=0).
|
||||
#: Used to locate previous module's output in storage.
|
||||
previous_step_execution_identifier: FuzzForgeExecutionIdentifier | None = None
|
||||
|
||||
|
||||
class WorkflowStep(TemporalWorkflowParameters):
|
||||
"""A step in a workflow - a module execution.
|
||||
|
||||
Steps are executed sequentially in a workflow. Each step runs a containerized module.
|
||||
|
||||
Examples:
|
||||
# Module step
|
||||
WorkflowStep(
|
||||
step_index=0,
|
||||
step_type="module",
|
||||
module_identifier="text-generator:0.0.1"
|
||||
)
|
||||
|
||||
"""
|
||||
|
||||
#: Position of this step in the workflow (0-based)
|
||||
step_index: int
|
||||
|
||||
#: Type of step: "module" (bridges are also modules now)
|
||||
step_type: Literal["module"]
|
||||
|
||||
#: Module identifier (container image name like "text-generator:0.0.1")
|
||||
#: Required if step_type="module"
|
||||
module_identifier: str | None = None
|
||||
|
||||
#: Optional module configuration
|
||||
module_configuration: dict[str, Any] | None = None
|
||||
@@ -1,42 +0,0 @@
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from fuzzforge_common.storage.configuration import StorageConfiguration
|
||||
|
||||
|
||||
def test_download_directory(
|
||||
storage_configuration: StorageConfiguration,
|
||||
boto3_client: Any,
|
||||
random_bucket: str,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""TODO."""
|
||||
bucket = random_bucket
|
||||
storage = storage_configuration.into_storage()
|
||||
|
||||
d1 = tmp_path.joinpath("d1")
|
||||
f1 = d1.joinpath("f1")
|
||||
d2 = tmp_path.joinpath("d2")
|
||||
f2 = d2.joinpath("f2")
|
||||
d3 = d2.joinpath("d3")
|
||||
f3 = d3.joinpath("d3")
|
||||
|
||||
d1.mkdir()
|
||||
d2.mkdir()
|
||||
d3.mkdir()
|
||||
f1.touch()
|
||||
f2.touch()
|
||||
f3.touch()
|
||||
|
||||
for path in [f1, f2, f3]:
|
||||
key: Path = Path("assets", path.relative_to(other=tmp_path))
|
||||
boto3_client.upload_file(
|
||||
Bucket=bucket,
|
||||
Filename=str(path),
|
||||
Key=str(key),
|
||||
)
|
||||
|
||||
path = storage.download_directory(bucket=bucket, directory="assets")
|
||||
|
||||
assert path.is_file()
|
||||
@@ -45,11 +45,11 @@ For custom setups, you can manually configure the MCP server.
|
||||
{
|
||||
"mcpServers": {
|
||||
"fuzzforge": {
|
||||
"command": "/path/to/fuzzforge-oss/.venv/bin/python",
|
||||
"command": "/path/to/fuzzforge-ai/.venv/bin/python",
|
||||
"args": ["-m", "fuzzforge_mcp"],
|
||||
"cwd": "/path/to/fuzzforge-oss",
|
||||
"cwd": "/path/to/fuzzforge-ai",
|
||||
"env": {
|
||||
"FUZZFORGE_MODULES_PATH": "/path/to/fuzzforge-oss/fuzzforge-modules",
|
||||
"FUZZFORGE_MODULES_PATH": "/path/to/fuzzforge-ai/fuzzforge-modules",
|
||||
"FUZZFORGE_ENGINE__TYPE": "docker"
|
||||
}
|
||||
}
|
||||
@@ -64,11 +64,11 @@ For custom setups, you can manually configure the MCP server.
|
||||
"servers": {
|
||||
"fuzzforge": {
|
||||
"type": "stdio",
|
||||
"command": "/path/to/fuzzforge-oss/.venv/bin/python",
|
||||
"command": "/path/to/fuzzforge-ai/.venv/bin/python",
|
||||
"args": ["-m", "fuzzforge_mcp"],
|
||||
"cwd": "/path/to/fuzzforge-oss",
|
||||
"cwd": "/path/to/fuzzforge-ai",
|
||||
"env": {
|
||||
"FUZZFORGE_MODULES_PATH": "/path/to/fuzzforge-oss/fuzzforge-modules",
|
||||
"FUZZFORGE_MODULES_PATH": "/path/to/fuzzforge-ai/fuzzforge-modules",
|
||||
"FUZZFORGE_ENGINE__TYPE": "docker"
|
||||
}
|
||||
}
|
||||
@@ -83,11 +83,11 @@ For custom setups, you can manually configure the MCP server.
|
||||
"mcpServers": {
|
||||
"fuzzforge": {
|
||||
"type": "stdio",
|
||||
"command": "/path/to/fuzzforge-oss/.venv/bin/python",
|
||||
"command": "/path/to/fuzzforge-ai/.venv/bin/python",
|
||||
"args": ["-m", "fuzzforge_mcp"],
|
||||
"cwd": "/path/to/fuzzforge-oss",
|
||||
"cwd": "/path/to/fuzzforge-ai",
|
||||
"env": {
|
||||
"FUZZFORGE_MODULES_PATH": "/path/to/fuzzforge-oss/fuzzforge-modules",
|
||||
"FUZZFORGE_MODULES_PATH": "/path/to/fuzzforge-ai/fuzzforge-modules",
|
||||
"FUZZFORGE_ENGINE__TYPE": "docker"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
[project]
|
||||
name = "fuzzforge-mcp"
|
||||
version = "0.0.1"
|
||||
description = "FuzzForge MCP Server - AI agent gateway for FuzzForge OSS."
|
||||
description = "FuzzForge MCP Server - AI agent gateway for FuzzForge AI."
|
||||
authors = []
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.14"
|
||||
dependencies = [
|
||||
"fastmcp==2.14.1",
|
||||
"fuzzforge-common==0.0.1",
|
||||
"fuzzforge-runner==0.0.1",
|
||||
"fuzzforge-types==0.0.1",
|
||||
"pydantic==2.12.4",
|
||||
"pydantic-settings==2.12.0",
|
||||
"structlog==25.5.0",
|
||||
@@ -24,11 +24,13 @@ lints = [
|
||||
"ruff==0.14.4",
|
||||
]
|
||||
tests = [
|
||||
"fuzzforge-tests==0.0.1",
|
||||
"pytest==9.0.2",
|
||||
"pytest-asyncio==1.3.0",
|
||||
"pytest-httpx==0.36.0",
|
||||
]
|
||||
|
||||
[tool.uv.sources]
|
||||
fuzzforge-common = { workspace = true }
|
||||
fuzzforge-runner = { workspace = true }
|
||||
fuzzforge-types = { workspace = true }
|
||||
fuzzforge-tests = { workspace = true }
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Workflow resources for FuzzForge MCP.
|
||||
|
||||
Note: In FuzzForge OSS, workflows are defined at runtime rather than
|
||||
Note: In FuzzForge AI, workflows are defined at runtime rather than
|
||||
stored. This resource provides documentation about workflow capabilities.
|
||||
|
||||
"""
|
||||
@@ -19,7 +19,7 @@ mcp: FastMCP = FastMCP()
|
||||
async def get_workflow_help() -> dict[str, Any]:
|
||||
"""Get help information about creating workflows.
|
||||
|
||||
Workflows in FuzzForge OSS are defined at execution time rather
|
||||
Workflows in FuzzForge AI are defined at execution time rather
|
||||
than stored. Use the execute_workflow tool with step definitions.
|
||||
|
||||
:return: Workflow documentation.
|
||||
|
||||
@@ -49,24 +49,19 @@ async def list_modules() -> dict[str, Any]:
|
||||
"image": f"{module.identifier}:{module.version or 'latest'}",
|
||||
"available": module.available,
|
||||
"description": module.description,
|
||||
# New metadata fields from pyproject.toml
|
||||
"category": module.category,
|
||||
"language": module.language,
|
||||
"pipeline_stage": module.pipeline_stage,
|
||||
"pipeline_order": module.pipeline_order,
|
||||
"dependencies": module.dependencies,
|
||||
"continuous_mode": module.continuous_mode,
|
||||
"typical_duration": module.typical_duration,
|
||||
"suggested_predecessors": module.suggested_predecessors,
|
||||
# AI-discoverable metadata
|
||||
"use_cases": module.use_cases,
|
||||
"input_requirements": module.input_requirements,
|
||||
"common_inputs": module.common_inputs,
|
||||
"output_artifacts": module.output_artifacts,
|
||||
"output_treatment": module.output_treatment,
|
||||
}
|
||||
for module in modules
|
||||
]
|
||||
|
||||
# Sort by pipeline_order if available
|
||||
available_modules.sort(key=lambda m: (m.get("pipeline_order") or 999, m["identifier"]))
|
||||
# Sort alphabetically by identifier
|
||||
available_modules.sort(key=lambda m: m["identifier"])
|
||||
|
||||
return {
|
||||
"modules": available_modules,
|
||||
|
||||
@@ -11,7 +11,7 @@ if TYPE_CHECKING:
|
||||
from collections.abc import AsyncGenerator, Callable
|
||||
|
||||
from fastmcp.client import FastMCPTransport
|
||||
from fuzzforge_types import FuzzForgeProjectIdentifier
|
||||
from fuzzforge_tests.fixtures import FuzzForgeProjectIdentifier
|
||||
|
||||
pytest_plugins = ["fuzzforge_tests.fixtures"]
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""MCP tool tests for FuzzForge OSS.
|
||||
"""MCP tool tests for FuzzForge AI.
|
||||
|
||||
Tests the MCP tools that are available in the OSS version.
|
||||
"""
|
||||
|
||||
@@ -18,8 +18,8 @@ WORKDIR /app
|
||||
# Create FuzzForge standard directories
|
||||
RUN mkdir -p /fuzzforge/input /fuzzforge/output
|
||||
|
||||
# Copy wheels directory (built by parent Makefile)
|
||||
COPY .wheels /wheels
|
||||
# Create wheels directory (populated with pre-built wheels when available)
|
||||
RUN mkdir -p /wheels
|
||||
|
||||
# Set up uv for the container
|
||||
ENV UV_SYSTEM_PYTHON=1
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# FuzzForge Runner
|
||||
|
||||
Direct execution engine for FuzzForge OSS. Provides simplified module and workflow execution without requiring Temporal or external infrastructure.
|
||||
Direct execution engine for FuzzForge AI. Provides simplified module and workflow execution without requiring Temporal or external infrastructure.
|
||||
|
||||
## Overview
|
||||
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
[project]
|
||||
name = "fuzzforge-runner"
|
||||
version = "0.0.1"
|
||||
description = "FuzzForge Runner - Direct execution engine for FuzzForge OSS."
|
||||
description = "FuzzForge Runner - Direct execution engine for FuzzForge AI."
|
||||
authors = []
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.14"
|
||||
dependencies = [
|
||||
"fuzzforge-common",
|
||||
"fuzzforge-types",
|
||||
"structlog>=25.5.0",
|
||||
"pydantic>=2.12.4",
|
||||
"pydantic-settings>=2.8.1",
|
||||
@@ -25,4 +24,3 @@ packages = ["src/fuzzforge_runner"]
|
||||
|
||||
[tool.uv.sources]
|
||||
fuzzforge-common = { workspace = true }
|
||||
fuzzforge-types = { workspace = true }
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""FuzzForge Runner - Direct execution engine for FuzzForge OSS."""
|
||||
"""FuzzForge Runner - Direct execution engine for FuzzForge AI."""
|
||||
|
||||
from fuzzforge_runner.runner import Runner
|
||||
from fuzzforge_runner.settings import Settings
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
"""FuzzForge Runner constants."""
|
||||
|
||||
from pydantic import UUID7
|
||||
|
||||
#: Type alias for execution identifiers.
|
||||
type FuzzForgeExecutionIdentifier = UUID7
|
||||
|
||||
#: Default directory name for module input inside sandbox.
|
||||
SANDBOX_INPUT_DIRECTORY: str = "/data/input"
|
||||
SANDBOX_INPUT_DIRECTORY: str = "/fuzzforge/input"
|
||||
|
||||
#: Default directory name for module output inside sandbox.
|
||||
SANDBOX_OUTPUT_DIRECTORY: str = "/data/output"
|
||||
SANDBOX_OUTPUT_DIRECTORY: str = "/fuzzforge/output"
|
||||
|
||||
#: Default archive filename for results.
|
||||
RESULTS_ARCHIVE_FILENAME: str = "results.tar.gz"
|
||||
|
||||
@@ -18,13 +18,13 @@ from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
from fuzzforge_common.sandboxes.engines.docker.configuration import DockerConfiguration
|
||||
from fuzzforge_common.sandboxes.engines.podman.configuration import PodmanConfiguration
|
||||
from fuzzforge_types.executions import FuzzForgeExecutionIdentifier
|
||||
|
||||
from fuzzforge_runner.constants import (
|
||||
MODULE_ENTRYPOINT,
|
||||
RESULTS_ARCHIVE_FILENAME,
|
||||
SANDBOX_INPUT_DIRECTORY,
|
||||
SANDBOX_OUTPUT_DIRECTORY,
|
||||
FuzzForgeExecutionIdentifier,
|
||||
)
|
||||
from fuzzforge_runner.exceptions import ModuleExecutionError, SandboxError
|
||||
|
||||
@@ -284,7 +284,7 @@ class ModuleExecutor:
|
||||
Automatically pulls the module image from registry if it doesn't exist locally.
|
||||
|
||||
:param module_identifier: Name/identifier of the module image.
|
||||
:param input_volume: Optional path to mount as /data/input in the container.
|
||||
:param input_volume: Optional path to mount as /fuzzforge/input in the container.
|
||||
:returns: The sandbox container identifier.
|
||||
:raises SandboxError: If sandbox creation fails.
|
||||
|
||||
@@ -362,7 +362,7 @@ class ModuleExecutor:
|
||||
"name": item.stem,
|
||||
"description": f"Input file: {item.name}",
|
||||
"kind": "unknown",
|
||||
"path": f"/data/input/{item.name}",
|
||||
"path": f"{SANDBOX_INPUT_DIRECTORY}/{item.name}",
|
||||
}
|
||||
)
|
||||
elif item.is_dir():
|
||||
@@ -371,7 +371,7 @@ class ModuleExecutor:
|
||||
"name": item.name,
|
||||
"description": f"Input directory: {item.name}",
|
||||
"kind": "unknown",
|
||||
"path": f"/data/input/{item.name}",
|
||||
"path": f"{SANDBOX_INPUT_DIRECTORY}/{item.name}",
|
||||
}
|
||||
)
|
||||
|
||||
@@ -441,7 +441,7 @@ class ModuleExecutor:
|
||||
"name": item.stem,
|
||||
"description": f"Input file: {item.name}",
|
||||
"kind": "unknown",
|
||||
"path": f"/data/input/{item.name}",
|
||||
"path": f"{SANDBOX_INPUT_DIRECTORY}/{item.name}",
|
||||
}
|
||||
)
|
||||
elif item.is_dir():
|
||||
@@ -450,7 +450,7 @@ class ModuleExecutor:
|
||||
"name": item.name,
|
||||
"description": f"Input directory: {item.name}",
|
||||
"kind": "unknown",
|
||||
"path": f"/data/input/{item.name}",
|
||||
"path": f"{SANDBOX_INPUT_DIRECTORY}/{item.name}",
|
||||
}
|
||||
)
|
||||
|
||||
@@ -730,7 +730,7 @@ class ModuleExecutor:
|
||||
"module": module_identifier,
|
||||
}
|
||||
|
||||
def read_module_output(self, container_id: str, output_file: str = "/data/output/stream.jsonl") -> str:
|
||||
def read_module_output(self, container_id: str, output_file: str = f"{SANDBOX_OUTPUT_DIRECTORY}/stream.jsonl") -> str:
|
||||
"""Read output file from a running module container.
|
||||
|
||||
:param container_id: The container identifier.
|
||||
|
||||
@@ -13,8 +13,7 @@ from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
from uuid import uuid4
|
||||
|
||||
from fuzzforge_types.executions import FuzzForgeExecutionIdentifier
|
||||
|
||||
from fuzzforge_runner.constants import FuzzForgeExecutionIdentifier
|
||||
from fuzzforge_runner.exceptions import WorkflowExecutionError
|
||||
from fuzzforge_runner.executor import ModuleExecutor
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""FuzzForge Runner - Main runner interface.
|
||||
|
||||
This module provides the high-level interface for FuzzForge OSS,
|
||||
This module provides the high-level interface for FuzzForge AI,
|
||||
coordinating module execution, workflow orchestration, and storage.
|
||||
|
||||
"""
|
||||
|
||||
@@ -39,7 +39,7 @@ def get_logger() -> BoundLogger:
|
||||
|
||||
|
||||
class LocalStorage:
|
||||
"""Local filesystem storage backend for FuzzForge OSS.
|
||||
"""Local filesystem storage backend for FuzzForge AI.
|
||||
|
||||
Provides lightweight storage for execution results while using
|
||||
direct source mounting (no copying) for input assets.
|
||||
|
||||
@@ -6,12 +6,10 @@ authors = []
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.14"
|
||||
dependencies = [
|
||||
"boto3==1.42.8",
|
||||
"podman==5.6.0",
|
||||
"pydantic>=2.12.4",
|
||||
"pytest==9.0.2",
|
||||
"fuzzforge-common==0.0.1",
|
||||
"fuzzforge-types==0.0.1",
|
||||
"testcontainers[minio]==4.13.3",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
@@ -23,4 +21,3 @@ lints = [
|
||||
|
||||
[tool.uv.sources]
|
||||
fuzzforge-common = { workspace = true }
|
||||
fuzzforge-types = { workspace = true }
|
||||
|
||||
@@ -8,17 +8,21 @@ common test utilities shared across multiple FuzzForge packages.
|
||||
import random
|
||||
import string
|
||||
from os import environ
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
from typing import TYPE_CHECKING
|
||||
from uuid import uuid4, uuid7
|
||||
|
||||
import boto3
|
||||
import pytest
|
||||
from fuzzforge_common.sandboxes.engines.podman.configuration import PodmanConfiguration
|
||||
from fuzzforge_common.storage.configuration import StorageConfiguration
|
||||
from podman import PodmanClient
|
||||
from testcontainers.minio import MinioContainer
|
||||
from pydantic import UUID7
|
||||
|
||||
# Constants for validation (moved from enterprise SDK)
|
||||
# Type aliases for identifiers (inlined from fuzzforge-types)
|
||||
type FuzzForgeProjectIdentifier = UUID7
|
||||
type FuzzForgeModuleIdentifier = UUID7
|
||||
type FuzzForgeWorkflowIdentifier = UUID7
|
||||
type FuzzForgeExecutionIdentifier = UUID7
|
||||
|
||||
# Constants for validation
|
||||
FUZZFORGE_PROJECT_NAME_LENGTH_MIN: int = 3
|
||||
FUZZFORGE_PROJECT_NAME_LENGTH_MAX: int = 64
|
||||
FUZZFORGE_PROJECT_DESCRIPTION_LENGTH_MAX: int = 256
|
||||
@@ -35,16 +39,6 @@ if TYPE_CHECKING:
|
||||
from collections.abc import Callable, Generator
|
||||
from pathlib import Path
|
||||
|
||||
from fuzzforge_types import (
|
||||
FuzzForgeExecutionIdentifier,
|
||||
FuzzForgeModuleIdentifier,
|
||||
FuzzForgeProjectIdentifier,
|
||||
FuzzForgeWorkflowIdentifier,
|
||||
)
|
||||
|
||||
|
||||
MINIO_DEFAULT_IMAGE: str = "minio/minio:RELEASE.2025-09-07T16-13-09Z"
|
||||
|
||||
|
||||
def generate_random_string(
|
||||
min_length: int,
|
||||
@@ -201,65 +195,6 @@ def random_module_execution_identifier() -> Callable[[], FuzzForgeExecutionIdent
|
||||
return inner
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def minio_container() -> Generator[MinioContainer]:
|
||||
"""Provide MinIO testcontainer for test session.
|
||||
|
||||
Creates a MinIO container that persists for the entire test session.
|
||||
All tests share the same container but use different buckets/keys.
|
||||
|
||||
:return: MinIO container instance.
|
||||
|
||||
"""
|
||||
with MinioContainer(image=MINIO_DEFAULT_IMAGE) as container:
|
||||
yield container
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def minio_container_configuration(minio_container: MinioContainer) -> dict[str, str]:
|
||||
"""TODO."""
|
||||
return cast("dict[str, str]", minio_container.get_config())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def storage_configuration(minio_container_configuration: dict[str, str]) -> StorageConfiguration:
|
||||
"""Provide S3 storage backend connected to MinIO testcontainer.
|
||||
|
||||
Creates the bucket in MinIO before returning the backend instance.
|
||||
|
||||
:param minio_container: MinIO testcontainer fixture.
|
||||
:return: Configured S3StorageBackend instance with bucket already created.
|
||||
|
||||
"""
|
||||
return StorageConfiguration(
|
||||
endpoint=f"http://{minio_container_configuration['endpoint']}",
|
||||
access_key=minio_container_configuration["access_key"],
|
||||
secret_key=minio_container_configuration["secret_key"],
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def boto3_client(minio_container_configuration: dict[str, str]) -> Any:
|
||||
"""TODO."""
|
||||
return boto3.client(
|
||||
"s3",
|
||||
endpoint_url=f"http://{minio_container_configuration['endpoint']}",
|
||||
aws_access_key_id=minio_container_configuration["access_key"],
|
||||
aws_secret_access_key=minio_container_configuration["secret_key"],
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def random_bucket(
|
||||
boto3_client: Any,
|
||||
random_project_identifier: Callable[[], FuzzForgeProjectIdentifier],
|
||||
) -> str:
|
||||
"""TODO."""
|
||||
project_identifier: FuzzForgeProjectIdentifier = random_project_identifier()
|
||||
boto3_client.create_bucket(Bucket=str(project_identifier))
|
||||
return str(project_identifier)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def podman_socket() -> str:
|
||||
"""TODO."""
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
PACKAGE=$(word 1, $(shell uv version))
|
||||
VERSION=$(word 2, $(shell uv version))
|
||||
|
||||
ARTIFACTS?=./dist
|
||||
SOURCES=./src
|
||||
|
||||
.PHONY: clean format mypy ruff version wheel
|
||||
|
||||
clean:
|
||||
@find . -type d \( \
|
||||
-name '*.egg-info' \
|
||||
-o -name '.mypy_cache' \
|
||||
-o -name '.ruff_cache' \
|
||||
-o -name '__pycache__' \
|
||||
\) -printf 'removing directory %p\n' -exec rm -rf {} +
|
||||
|
||||
cloc:
|
||||
cloc $(SOURCES)
|
||||
|
||||
format:
|
||||
uv run ruff format $(SOURCES)
|
||||
|
||||
mypy:
|
||||
uv run mypy $(SOURCES)
|
||||
|
||||
ruff:
|
||||
uv run ruff check --fix $(SOURCES)
|
||||
|
||||
version:
|
||||
@echo '$(PACKAGE)@$(VERSION)'
|
||||
|
||||
wheel:
|
||||
uv build --out-dir $(ARTIFACTS)
|
||||
@@ -1,3 +0,0 @@
|
||||
# FuzzForge types
|
||||
|
||||
...
|
||||
@@ -1,6 +0,0 @@
|
||||
[mypy]
|
||||
plugins = pydantic.mypy
|
||||
strict = True
|
||||
warn_unused_ignores = True
|
||||
warn_redundant_casts = True
|
||||
warn_return_any = True
|
||||
@@ -1,17 +0,0 @@
|
||||
[project]
|
||||
name = "fuzzforge-types"
|
||||
version = "0.0.1"
|
||||
description = "Collection of types for the FuzzForge API."
|
||||
authors = []
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.14"
|
||||
dependencies = [
|
||||
"pydantic==2.12.4",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
lints = [
|
||||
"bandit==1.8.6",
|
||||
"mypy==1.18.2",
|
||||
"ruff==0.14.4",
|
||||
]
|
||||
@@ -1,19 +0,0 @@
|
||||
line-length = 120
|
||||
|
||||
[lint]
|
||||
select = [ "ALL" ]
|
||||
ignore = [
|
||||
"COM812", # conflicts with the formatter
|
||||
"D100", # ignoring missing docstrings in public modules
|
||||
"D104", # ignoring missing docstrings in public packages
|
||||
"D203", # conflicts with 'D211'
|
||||
"D213", # conflicts with 'D212'
|
||||
"TD002", # ignoring missing author in 'TODO' statements
|
||||
"TD003", # ignoring missing issue link in 'TODO' statements
|
||||
]
|
||||
|
||||
[lint.per-file-ignores]
|
||||
"tests/*" = [
|
||||
"PLR2004", # allowing comparisons using unamed numerical constants in tests
|
||||
"S101", # allowing 'assert' statements in tests
|
||||
]
|
||||
@@ -1,37 +0,0 @@
|
||||
"""FuzzForge types package.
|
||||
|
||||
This package exports all public types used across FuzzForge components.
|
||||
|
||||
"""
|
||||
|
||||
from fuzzforge_types.definitions import (
|
||||
FuzzForgeDefinitionIdentifier,
|
||||
FuzzForgeDefinitionTypes,
|
||||
)
|
||||
from fuzzforge_types.executions import (
|
||||
FuzzForgeExecution,
|
||||
FuzzForgeExecutionError,
|
||||
FuzzForgeExecutionIdentifier,
|
||||
FuzzForgeExecutionIncludeFilter,
|
||||
FuzzForgeExecutionStatus,
|
||||
)
|
||||
from fuzzforge_types.identifiers import FuzzForgeProjectIdentifier
|
||||
from fuzzforge_types.modules import FuzzForgeModule, FuzzForgeModuleIdentifier
|
||||
from fuzzforge_types.projects import FuzzForgeProject
|
||||
from fuzzforge_types.workflows import FuzzForgeWorkflow, FuzzForgeWorkflowIdentifier
|
||||
|
||||
__all__ = [
|
||||
"FuzzForgeDefinitionIdentifier",
|
||||
"FuzzForgeDefinitionTypes",
|
||||
"FuzzForgeExecution",
|
||||
"FuzzForgeExecutionError",
|
||||
"FuzzForgeExecutionIdentifier",
|
||||
"FuzzForgeExecutionIncludeFilter",
|
||||
"FuzzForgeExecutionStatus",
|
||||
"FuzzForgeModule",
|
||||
"FuzzForgeModuleIdentifier",
|
||||
"FuzzForgeProject",
|
||||
"FuzzForgeProjectIdentifier",
|
||||
"FuzzForgeWorkflow",
|
||||
"FuzzForgeWorkflowIdentifier",
|
||||
]
|
||||
@@ -1,11 +0,0 @@
|
||||
"""TODO."""
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class Base(BaseModel):
|
||||
"""TODO."""
|
||||
|
||||
model_config = {
|
||||
"from_attributes": True,
|
||||
}
|
||||
@@ -1,26 +0,0 @@
|
||||
"""Definition types for FuzzForge.
|
||||
|
||||
This module defines the base types and enums for FuzzForge definitions,
|
||||
including modules and workflows.
|
||||
|
||||
"""
|
||||
|
||||
from enum import StrEnum
|
||||
|
||||
from pydantic import UUID7
|
||||
|
||||
|
||||
class FuzzForgeDefinitionTypes(StrEnum):
|
||||
"""Kind of FuzzForge definition.
|
||||
|
||||
Discriminator enum used to distinguish between module and workflow definitions
|
||||
in the unified definitions table.
|
||||
|
||||
"""
|
||||
|
||||
MODULE_DEFINITION = "module"
|
||||
WORKFLOW_DEFINITION = "workflow"
|
||||
|
||||
|
||||
# Type aliases for definition identifiers
|
||||
type FuzzForgeDefinitionIdentifier = UUID7
|
||||
@@ -1,75 +0,0 @@
|
||||
"""TODO."""
|
||||
|
||||
from datetime import datetime # noqa: TC003
|
||||
from enum import StrEnum
|
||||
|
||||
from pydantic import UUID7, Field
|
||||
|
||||
from fuzzforge_types.bases import Base
|
||||
from fuzzforge_types.definitions import FuzzForgeDefinitionIdentifier, FuzzForgeDefinitionTypes # noqa: TC001
|
||||
from fuzzforge_types.identifiers import FuzzForgeProjectIdentifier # noqa: TC001
|
||||
|
||||
|
||||
class FuzzForgeExecutionStatus(StrEnum):
|
||||
"""TODO."""
|
||||
|
||||
PENDING = "PENDING"
|
||||
RUNNING = "RUNNING"
|
||||
FINISHED = "FINISHED"
|
||||
|
||||
|
||||
class FuzzForgeExecutionError(StrEnum):
|
||||
"""TODO."""
|
||||
|
||||
GENERIC_ERROR = "GENERIC_ERROR"
|
||||
|
||||
|
||||
class FuzzForgeExecutionIncludeFilter(StrEnum):
|
||||
"""Filter for including specific execution types when listing.
|
||||
|
||||
Used to filter executions by their definition kind (module or workflow).
|
||||
This filter is required when listing executions to ensure explicit intent.
|
||||
|
||||
"""
|
||||
|
||||
ALL = "all"
|
||||
MODULES = "modules"
|
||||
WORKFLOWS = "workflows"
|
||||
|
||||
|
||||
# Type alias for unified execution identifiers
|
||||
type FuzzForgeExecutionIdentifier = UUID7
|
||||
|
||||
|
||||
class FuzzForgeExecution(Base):
|
||||
"""DTO for unified execution data.
|
||||
|
||||
Represents both module and workflow executions in a single model.
|
||||
The definition_kind field discriminates between the two types.
|
||||
|
||||
"""
|
||||
|
||||
execution_identifier: FuzzForgeExecutionIdentifier = Field(
|
||||
description="The identifier of this execution.",
|
||||
)
|
||||
execution_status: FuzzForgeExecutionStatus = Field(
|
||||
description="The current status of the execution.",
|
||||
)
|
||||
execution_error: FuzzForgeExecutionError | None = Field(
|
||||
description="The error associated with the execution, if any.",
|
||||
)
|
||||
project_identifier: FuzzForgeProjectIdentifier = Field(
|
||||
description="The identifier of the project this execution belongs to.",
|
||||
)
|
||||
definition_identifier: FuzzForgeDefinitionIdentifier = Field(
|
||||
description="The identifier of the definition (module or workflow) being executed.",
|
||||
)
|
||||
definition_kind: FuzzForgeDefinitionTypes = Field(
|
||||
description="The kind of definition being executed (module or workflow).",
|
||||
)
|
||||
created_at: datetime = Field(
|
||||
description="The creation date of the execution.",
|
||||
)
|
||||
updated_at: datetime = Field(
|
||||
description="The latest modification date of the execution.",
|
||||
)
|
||||
@@ -1,5 +0,0 @@
|
||||
"""TODO."""
|
||||
|
||||
from pydantic import UUID7
|
||||
|
||||
type FuzzForgeProjectIdentifier = UUID7
|
||||
@@ -1,30 +0,0 @@
|
||||
"""TODO."""
|
||||
|
||||
from datetime import datetime # noqa: TC003
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
from fuzzforge_types.bases import Base
|
||||
from fuzzforge_types.definitions import FuzzForgeDefinitionIdentifier
|
||||
|
||||
type FuzzForgeModuleIdentifier = FuzzForgeDefinitionIdentifier
|
||||
|
||||
|
||||
class FuzzForgeModule(Base):
|
||||
"""TODO."""
|
||||
|
||||
module_description: str = Field(
|
||||
description="The description of the module.",
|
||||
)
|
||||
module_identifier: FuzzForgeModuleIdentifier = Field(
|
||||
description="The identifier of the module.",
|
||||
)
|
||||
module_name: str = Field(
|
||||
description="The name of the module.",
|
||||
)
|
||||
created_at: datetime = Field(
|
||||
description="The creation date of the module.",
|
||||
)
|
||||
updated_at: datetime = Field(
|
||||
description="The latest modification date of the module.",
|
||||
)
|
||||
@@ -1,34 +0,0 @@
|
||||
"""TODO."""
|
||||
|
||||
from datetime import datetime # noqa: TC003
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
from fuzzforge_types.bases import Base
|
||||
from fuzzforge_types.executions import FuzzForgeExecution # noqa: TC001
|
||||
from fuzzforge_types.identifiers import FuzzForgeProjectIdentifier # noqa: TC001
|
||||
|
||||
|
||||
class FuzzForgeProject(Base):
|
||||
"""TODO."""
|
||||
|
||||
project_description: str = Field(
|
||||
description="The description of the project.",
|
||||
)
|
||||
project_identifier: FuzzForgeProjectIdentifier = Field(
|
||||
description="The identifier of the project.",
|
||||
)
|
||||
project_name: str = Field(
|
||||
description="The name of the project.",
|
||||
)
|
||||
created_at: datetime = Field(
|
||||
description="The creation date of the project.",
|
||||
)
|
||||
updated_at: datetime = Field(
|
||||
description="The latest modification date of the project.",
|
||||
)
|
||||
|
||||
executions: list[FuzzForgeExecution] | None = Field(
|
||||
default=None,
|
||||
description="The module and workflow executions associated with the project.",
|
||||
)
|
||||
@@ -1,30 +0,0 @@
|
||||
"""TODO."""
|
||||
|
||||
from datetime import datetime # noqa: TC003
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
from fuzzforge_types.bases import Base
|
||||
from fuzzforge_types.definitions import FuzzForgeDefinitionIdentifier
|
||||
|
||||
type FuzzForgeWorkflowIdentifier = FuzzForgeDefinitionIdentifier
|
||||
|
||||
|
||||
class FuzzForgeWorkflow(Base):
|
||||
"""TODO."""
|
||||
|
||||
workflow_description: str = Field(
|
||||
description="The description of the workflow.",
|
||||
)
|
||||
workflow_identifier: FuzzForgeWorkflowIdentifier = Field(
|
||||
description="The identifier of the workflow.",
|
||||
)
|
||||
workflow_name: str = Field(
|
||||
description="The name of the workflow.",
|
||||
)
|
||||
created_at: datetime = Field(
|
||||
description="The creation date of the workflow.",
|
||||
)
|
||||
updated_at: datetime = Field(
|
||||
description="The latest modification date of the workflow.",
|
||||
)
|
||||
@@ -1,7 +1,7 @@
|
||||
[project]
|
||||
name = "fuzzforge-oss"
|
||||
name = "fuzzforge-ai"
|
||||
version = "1.0.0"
|
||||
description = "FuzzForge OSS - AI-driven security research platform for local execution"
|
||||
description = "FuzzForge AI - AI-driven security research platform for local execution"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.14"
|
||||
authors = [
|
||||
@@ -15,14 +15,12 @@ dev = [
|
||||
"pytest-httpx==0.36.0",
|
||||
"fuzzforge-tests",
|
||||
"fuzzforge-common",
|
||||
"fuzzforge-types",
|
||||
"fuzzforge-mcp",
|
||||
]
|
||||
|
||||
[tool.uv.workspace]
|
||||
members = [
|
||||
"fuzzforge-common",
|
||||
"fuzzforge-types",
|
||||
"fuzzforge-modules/fuzzforge-modules-sdk",
|
||||
"fuzzforge-runner",
|
||||
"fuzzforge-mcp",
|
||||
@@ -32,7 +30,6 @@ members = [
|
||||
|
||||
[tool.uv.sources]
|
||||
fuzzforge-common = { workspace = true }
|
||||
fuzzforge-types = { workspace = true }
|
||||
fuzzforge-modules-sdk = { workspace = true }
|
||||
fuzzforge-runner = { workspace = true }
|
||||
fuzzforge-mcp = { workspace = true }
|
||||
|
||||
Reference in New Issue
Block a user