Compare commits

..

2 Commits

Author SHA1 Message Date
AFredefon
e7022c2c82 Merge pull request #48 from FuzzingLabs/dev 2026-03-17 08:15:42 +01:00
AFredefon
07c32de294 Merge pull request #46 from FuzzingLabs/dev
Refactor hub integration and enhance TUI with new features
2026-03-11 08:07:05 +01:00
7 changed files with 3 additions and 908 deletions

View File

@@ -294,17 +294,3 @@ class HubConfig(BaseModel):
default=True,
description="Cache discovered tools",
)
#: Workflow hints indexed by "after:<tool_name>" keys.
#: Loaded inline or merged from workflow_hints_file.
workflow_hints: dict[str, Any] = Field(
default_factory=dict,
description="Workflow hints indexed by 'after:<tool_name>'",
)
#: Optional path to an external workflow-hints.json file.
#: Relative paths are resolved relative to the hub-config.json location.
workflow_hints_file: str | None = Field(
default=None,
description="Path to an external workflow-hints.json to load and merge",
)

View File

@@ -87,28 +87,6 @@ class HubRegistry:
config=server_config,
)
# Load and merge external workflow hints file if specified.
if self._config.workflow_hints_file:
hints_path = Path(self._config.workflow_hints_file)
if not hints_path.is_absolute():
hints_path = config_path.parent / hints_path
if hints_path.exists():
try:
with hints_path.open() as hf:
hints_data = json.load(hf)
self._config.workflow_hints.update(hints_data.get("hints", {}))
logger.info(
"Loaded workflow hints",
path=str(hints_path),
hints=len(self._config.workflow_hints),
)
except Exception as hints_err:
logger.warning(
"Failed to load workflow hints file",
path=str(hints_path),
error=str(hints_err),
)
logger.info(
"Loaded hub configuration",
path=str(config_path),
@@ -240,15 +218,6 @@ class HubRegistry:
server.discovery_error = None
server.tools = tools
def get_workflow_hint(self, tool_name: str) -> dict | None:
"""Get the workflow hint for a tool by name.
:param tool_name: Tool name (e.g. ``binwalk_extract``).
:returns: Hint dict for the ``after:<tool_name>`` key, or None.
"""
return self._config.workflow_hints.get(f"after:{tool_name}") or None
def get_all_tools(self) -> list:
"""Get all discovered tools from all servers.

View File

@@ -13,7 +13,6 @@ from __future__ import annotations
import json
import logging
import mimetypes
from datetime import UTC, datetime
from pathlib import Path
from tarfile import open as Archive # noqa: N812
@@ -91,7 +90,6 @@ class LocalStorage:
"# FuzzForge storage - ignore large/temporary files\n"
"runs/\n"
"output/\n"
"artifacts.json\n"
"!config.json\n"
)
@@ -275,203 +273,3 @@ class LocalStorage:
except Exception as exc:
msg = f"Failed to extract results: {exc}"
raise StorageError(msg) from exc
# ------------------------------------------------------------------
# Artifact tracking
# ------------------------------------------------------------------
def _artifacts_path(self, project_path: Path) -> Path:
"""Get the path to the artifacts registry file.
:param project_path: Path to the project directory.
:returns: Path to artifacts.json.
"""
return self._get_project_path(project_path) / "artifacts.json"
def _load_artifacts(self, project_path: Path) -> list[dict[str, Any]]:
"""Load the artifact registry from disk.
:param project_path: Path to the project directory.
:returns: List of artifact dicts.
"""
path = self._artifacts_path(project_path)
if path.exists():
try:
return json.loads(path.read_text()) # type: ignore[no-any-return]
except (json.JSONDecodeError, OSError):
return []
return []
def _save_artifacts(self, project_path: Path, artifacts: list[dict[str, Any]]) -> None:
"""Persist the artifact registry to disk.
:param project_path: Path to the project directory.
:param artifacts: Full artifact list to write.
"""
path = self._artifacts_path(project_path)
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(artifacts, indent=2, default=str))
def _classify_file(self, file_path: Path) -> str:
"""Classify a file into a human-friendly type string.
:param file_path: Path to the file.
:returns: Type string (e.g. "elf-binary", "text", "directory").
"""
mime, _ = mimetypes.guess_type(str(file_path))
suffix = file_path.suffix.lower()
# Try reading ELF magic for binaries with no extension
if mime is None and suffix == "":
try:
header = file_path.read_bytes()[:4]
if header == b"\x7fELF":
return "elf-binary"
except OSError:
pass
if mime:
if "json" in mime:
return "json"
if "text" in mime or "xml" in mime or "yaml" in mime:
return "text"
if "image" in mime:
return "image"
if "octet-stream" in mime:
return "binary"
type_map: dict[str, str] = {
".json": "json",
".sarif": "sarif",
".md": "markdown",
".txt": "text",
".log": "text",
".csv": "csv",
".yaml": "yaml",
".yml": "yaml",
".xml": "xml",
".html": "html",
".elf": "elf-binary",
".so": "elf-binary",
".bin": "binary",
".gz": "archive",
".tar": "archive",
".zip": "archive",
}
return type_map.get(suffix, "binary")
def scan_artifacts(
self,
project_path: Path,
server_name: str,
tool_name: str,
) -> list[dict[str, Any]]:
"""Scan the output directory for new or modified files and register them.
Compares the current state of .fuzzforge/output/ against the existing
artifact registry and registers any new or modified files.
:param project_path: Path to the project directory.
:param server_name: Hub server that produced the artifacts.
:param tool_name: Tool that produced the artifacts.
:returns: List of newly registered artifact dicts.
"""
output_path = self.get_project_output_path(project_path)
if output_path is None or not output_path.exists():
return []
existing = self._load_artifacts(project_path)
known: dict[str, dict[str, Any]] = {a["path"]: a for a in existing}
now = datetime.now(tz=UTC).isoformat()
new_artifacts: list[dict[str, Any]] = []
for file_path in output_path.rglob("*"):
if not file_path.is_file():
continue
# Use the container-style path (/app/output/...) so it's
# directly usable in subsequent tool calls.
relative = file_path.relative_to(output_path)
container_path = f"/app/output/{relative}"
stat = file_path.stat()
size = stat.st_size
mtime = datetime.fromtimestamp(stat.st_mtime, tz=UTC).isoformat()
prev = known.get(container_path)
if prev and prev.get("mtime") == mtime and prev.get("size") == size:
continue # Unchanged — skip
artifact: dict[str, Any] = {
"path": container_path,
"host_path": str(file_path),
"type": self._classify_file(file_path),
"size": size,
"mtime": mtime,
"source_server": server_name,
"source_tool": tool_name,
"registered_at": now,
}
if prev:
# Update existing entry in-place
idx = next(i for i, a in enumerate(existing) if a["path"] == container_path)
existing[idx] = artifact
else:
existing.append(artifact)
new_artifacts.append(artifact)
if new_artifacts:
self._save_artifacts(project_path, existing)
logger.info(
"Registered %d new artifact(s) from %s:%s",
len(new_artifacts),
server_name,
tool_name,
)
return new_artifacts
def list_artifacts(
self,
project_path: Path,
*,
source: str | None = None,
artifact_type: str | None = None,
) -> list[dict[str, Any]]:
"""List registered artifacts, with optional filters.
:param project_path: Path to the project directory.
:param source: Filter by source server name.
:param artifact_type: Filter by artifact type (e.g. "elf-binary", "json").
:returns: List of matching artifact dicts.
"""
artifacts = self._load_artifacts(project_path)
if source:
artifacts = [a for a in artifacts if a.get("source_server") == source]
if artifact_type:
artifacts = [a for a in artifacts if a.get("type") == artifact_type]
return artifacts
def get_artifact(self, project_path: Path, path: str) -> dict[str, Any] | None:
"""Get a single artifact by its container path.
:param project_path: Path to the project directory.
:param path: Container path of the artifact (e.g. /app/output/...).
:returns: Artifact dict, or None if not found.
"""
artifacts = self._load_artifacts(project_path)
for artifact in artifacts:
if artifact["path"] == path:
return artifact
return None

View File

@@ -291,33 +291,7 @@ async def execute_hub_tool(
except Exception: # noqa: BLE001, S110 - never fail the tool call due to recording issues
pass
# Scan for new artifacts produced by the tool in /app/output.
response = result.to_dict()
try:
storage = get_storage()
project_path = get_project_path()
new_artifacts = storage.scan_artifacts(
project_path=project_path,
server_name=result.server_name,
tool_name=result.tool_name,
)
if new_artifacts:
response["artifacts"] = [
{"path": a["path"], "type": a["type"], "size": a["size"]}
for a in new_artifacts
]
except Exception: # noqa: BLE001, S110 - never fail the tool call due to artifact scanning
pass
# Append workflow suggestions based on hints configured for this tool.
try:
hint = executor.registry.get_workflow_hint(result.tool_name)
if hint:
response["suggested_next_steps"] = hint
except Exception: # noqa: BLE001, S110 - never fail the tool call due to hint lookup
pass
return response
return result.to_dict()
except Exception as e:
if isinstance(e, ToolError):

View File

@@ -146,70 +146,3 @@ async def get_execution_results(execution_id: str, extract_to: str | None = None
except Exception as exception:
message: str = f"Failed to get execution results: {exception}"
raise ToolError(message) from exception
@mcp.tool
async def list_artifacts(
source: str | None = None,
artifact_type: str | None = None,
) -> dict[str, Any]:
"""List all artifacts produced by hub tools in the current project.
Artifacts are files created by tool executions in /app/output/.
They are automatically tracked after each execute_hub_tool call.
:param source: Filter by source server name (e.g. "binwalk-mcp").
:param artifact_type: Filter by type (e.g. "elf-binary", "json", "text", "archive").
:return: List of artifacts with path, type, size, and source info.
"""
storage = get_storage()
project_path: Path = get_project_path()
try:
artifacts = storage.list_artifacts(
project_path,
source=source,
artifact_type=artifact_type,
)
return {
"success": True,
"artifacts": artifacts,
"count": len(artifacts),
}
except Exception as exception:
message: str = f"Failed to list artifacts: {exception}"
raise ToolError(message) from exception
@mcp.tool
async def get_artifact(path: str) -> dict[str, Any]:
"""Get metadata for a specific artifact by its container path.
:param path: Container path of the artifact (e.g. /app/output/extract_abc123/squashfs-root/usr/sbin/httpd).
:return: Artifact metadata including path, type, size, source tool, and timestamps.
"""
storage = get_storage()
project_path: Path = get_project_path()
try:
artifact = storage.get_artifact(project_path, path)
if artifact is None:
return {
"success": False,
"path": path,
"error": "Artifact not found",
}
return {
"success": True,
"artifact": artifact,
}
except Exception as exception:
message: str = f"Failed to get artifact: {exception}"
raise ToolError(message) from exception

View File

@@ -1,566 +1 @@
{
"servers": [
{
"name": "bloodhound-mcp",
"description": "bloodhound-mcp \u2014 active-directory",
"type": "docker",
"image": "bloodhound-mcp:latest",
"category": "active-directory",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "binwalk-mcp",
"description": "binwalk-mcp \u2014 binary-analysis",
"type": "docker",
"image": "binwalk-mcp:latest",
"category": "binary-analysis",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "capa-mcp",
"description": "capa-mcp \u2014 binary-analysis",
"type": "docker",
"image": "capa-mcp:latest",
"category": "binary-analysis",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "ghidra-mcp",
"description": "ghidra-mcp \u2014 binary-analysis",
"type": "docker",
"image": "ghidra-mcp:latest",
"category": "binary-analysis",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "ida-mcp",
"description": "ida-mcp \u2014 binary-analysis",
"type": "docker",
"image": "ida-mcp:latest",
"category": "binary-analysis",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "radare2-mcp",
"description": "radare2-mcp \u2014 binary-analysis",
"type": "docker",
"image": "radare2-mcp:latest",
"category": "binary-analysis",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "yara-mcp",
"description": "yara-mcp \u2014 binary-analysis",
"type": "docker",
"image": "yara-mcp:latest",
"category": "binary-analysis",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "daml-viewer-mcp",
"description": "daml-viewer-mcp \u2014 blockchain",
"type": "docker",
"image": "daml-viewer-mcp:latest",
"category": "blockchain",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "medusa-mcp",
"description": "medusa-mcp \u2014 blockchain",
"type": "docker",
"image": "medusa-mcp:latest",
"category": "blockchain",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "solazy-mcp",
"description": "solazy-mcp \u2014 blockchain",
"type": "docker",
"image": "solazy-mcp:latest",
"category": "blockchain",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "prowler-mcp",
"description": "prowler-mcp \u2014 cloud-security",
"type": "docker",
"image": "prowler-mcp:latest",
"category": "cloud-security",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "roadrecon-mcp",
"description": "roadrecon-mcp \u2014 cloud-security",
"type": "docker",
"image": "roadrecon-mcp:latest",
"category": "cloud-security",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "trivy-mcp",
"description": "trivy-mcp \u2014 cloud-security",
"type": "docker",
"image": "trivy-mcp:latest",
"category": "cloud-security",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "semgrep-mcp",
"description": "semgrep-mcp \u2014 code-security",
"type": "docker",
"image": "semgrep-mcp:latest",
"category": "code-security",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "searchsploit-mcp",
"description": "searchsploit-mcp \u2014 exploitation",
"type": "docker",
"image": "searchsploit-mcp:latest",
"category": "exploitation",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "boofuzz-mcp",
"description": "boofuzz-mcp \u2014 fuzzing",
"type": "docker",
"image": "boofuzz-mcp:latest",
"category": "fuzzing",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "dharma-mcp",
"description": "dharma-mcp \u2014 fuzzing",
"type": "docker",
"image": "dharma-mcp:latest",
"category": "fuzzing",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "dnstwist-mcp",
"description": "dnstwist-mcp \u2014 osint",
"type": "docker",
"image": "dnstwist-mcp:latest",
"category": "osint",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "maigret-mcp",
"description": "maigret-mcp \u2014 osint",
"type": "docker",
"image": "maigret-mcp:latest",
"category": "osint",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "hashcat-mcp",
"description": "hashcat-mcp \u2014 password-cracking",
"type": "docker",
"image": "hashcat-mcp:latest",
"category": "password-cracking",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "externalattacker-mcp",
"description": "externalattacker-mcp \u2014 reconnaissance",
"type": "docker",
"image": "externalattacker-mcp:latest",
"category": "reconnaissance",
"capabilities": [
"NET_RAW"
],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "masscan-mcp",
"description": "masscan-mcp \u2014 reconnaissance",
"type": "docker",
"image": "masscan-mcp:latest",
"category": "reconnaissance",
"capabilities": [
"NET_RAW"
],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "networksdb-mcp",
"description": "networksdb-mcp \u2014 reconnaissance",
"type": "docker",
"image": "networksdb-mcp:latest",
"category": "reconnaissance",
"capabilities": [
"NET_RAW"
],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "nmap-mcp",
"description": "nmap-mcp \u2014 reconnaissance",
"type": "docker",
"image": "nmap-mcp:latest",
"category": "reconnaissance",
"capabilities": [
"NET_RAW"
],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "pd-tools-mcp",
"description": "pd-tools-mcp \u2014 reconnaissance",
"type": "docker",
"image": "pd-tools-mcp:latest",
"category": "reconnaissance",
"capabilities": [
"NET_RAW"
],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "shodan-mcp",
"description": "shodan-mcp \u2014 reconnaissance",
"type": "docker",
"image": "shodan-mcp:latest",
"category": "reconnaissance",
"capabilities": [
"NET_RAW"
],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "whatweb-mcp",
"description": "whatweb-mcp \u2014 reconnaissance",
"type": "docker",
"image": "whatweb-mcp:latest",
"category": "reconnaissance",
"capabilities": [
"NET_RAW"
],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "zoomeye-mcp",
"description": "zoomeye-mcp \u2014 reconnaissance",
"type": "docker",
"image": "zoomeye-mcp:latest",
"category": "reconnaissance",
"capabilities": [
"NET_RAW"
],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "gitleaks-mcp",
"description": "gitleaks-mcp \u2014 secrets",
"type": "docker",
"image": "gitleaks-mcp:latest",
"category": "secrets",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "otx-mcp",
"description": "otx-mcp \u2014 threat-intel",
"type": "docker",
"image": "otx-mcp:latest",
"category": "threat-intel",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "virustotal-mcp",
"description": "virustotal-mcp \u2014 threat-intel",
"type": "docker",
"image": "virustotal-mcp:latest",
"category": "threat-intel",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "burp-mcp",
"description": "burp-mcp \u2014 web-security",
"type": "docker",
"image": "burp-mcp:latest",
"category": "web-security",
"capabilities": [
"NET_RAW"
],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "ffuf-mcp",
"description": "ffuf-mcp \u2014 web-security",
"type": "docker",
"image": "ffuf-mcp:latest",
"category": "web-security",
"capabilities": [
"NET_RAW"
],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "nikto-mcp",
"description": "nikto-mcp \u2014 web-security",
"type": "docker",
"image": "nikto-mcp:latest",
"category": "web-security",
"capabilities": [
"NET_RAW"
],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "nuclei-mcp",
"description": "nuclei-mcp \u2014 web-security",
"type": "docker",
"image": "nuclei-mcp:latest",
"category": "web-security",
"capabilities": [
"NET_RAW"
],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "sqlmap-mcp",
"description": "sqlmap-mcp \u2014 web-security",
"type": "docker",
"image": "sqlmap-mcp:latest",
"category": "web-security",
"capabilities": [
"NET_RAW"
],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "waybackurls-mcp",
"description": "waybackurls-mcp \u2014 web-security",
"type": "docker",
"image": "waybackurls-mcp:latest",
"category": "web-security",
"capabilities": [
"NET_RAW"
],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "go-analyzer-mcp",
"description": "Go static analysis: fuzzable entry points, existing Fuzz* targets, unsafe/cgo usage, CVE scanning via govulncheck",
"type": "docker",
"image": "go-analyzer-mcp:latest",
"category": "code-security",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "go-harness-tester-mcp",
"description": "Test Go fuzz harness quality: compilation, seed execution, fuzzing trial, quality scoring 0-100",
"type": "docker",
"image": "go-harness-tester-mcp:latest",
"category": "code-security",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "go-fuzzer-mcp",
"description": "Run Go native fuzzing (go test -fuzz) with blocking and continuous modes, crash collection, session management",
"type": "docker",
"image": "go-fuzzer-mcp:latest",
"category": "code-security",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
},
{
"name": "go-crash-analyzer-mcp",
"description": "Analyze Go fuzzing crashes: reproduce, classify (nil-deref, OOR, panic, race, etc.), deduplicate by stack signature",
"type": "docker",
"image": "go-crash-analyzer-mcp:latest",
"category": "code-security",
"capabilities": [],
"volumes": [
"/home/afredefon/.fuzzforge/hub/workspace:/data"
],
"enabled": true,
"source_hub": "mcp-security-hub"
}
],
"workflow_hints_file": "mcp-security-hub/workflow-hints.json"
}
{"servers": []}

View File

@@ -1,6 +1,6 @@
[project]
name = "fuzzforge-oss"
version = "0.8.0"
version = "1.0.0"
description = "FuzzForge AI - AI-driven security research platform for local execution"
readme = "README.md"
requires-python = ">=3.14"