Files
Vyntral 3a4c230aa7 feat: v2.0 full rewrite — event-driven pipeline, AI + Nuclei + proxy
Complete architectural overhaul. Replaces the v0.1 monolithic scanner
with an event-driven pipeline of auto-registered modules.

Foundation (internal/):
- eventbus: typed pub/sub, 20 event types, race-safe, drop counter
- module: registry with phase-based selection
- store: thread-safe host store with per-host locks + deep-copy reads
- pipeline: coordinator with phase barriers + panic recovery
- config: 5 scan profiles + 3 AI tiers + YAML loader + auto-discovery

Modules (26 auto-registered across 6 phases):
- Discovery: passive (26 sources), bruteforce, recursive, AXFR, GitHub
  dorks, CT streaming, permutation, reverse DNS, vhost, ASN, supply
  chain (npm + PyPI)
- Enrichment: HTTP probe + tech fingerprint + TLS appliance ID, ports
- Analysis: security checks, takeover (110+ sigs), cloud, JavaScript,
  GraphQL, JWT, headers (OWASP), HTTP smuggling, AI cascade, Nuclei
- Reporting: TXT/JSON/CSV writer + AI scan brief

AI layer (internal/ai/ + internal/modules/ai/):
- Three profiles: lean (16 GB), balanced (32 GB MoE), heavy (64 GB)
- Six event-driven handlers: CVE, JS file, HTTP response, secret
  filter, multi-agent vuln enrichment, anomaly + executive report
- Content-hash cache dedups Ollama calls across hosts
- Auto-pull of missing models via /api/pull with streaming progress
- End-of-scan AI SCAN BRIEF in terminal with top chains + next actions

Nuclei compat layer (internal/nucleitpl/):
- Executes ~13k community templates (HTTP subset)
- Auto-download of nuclei-templates ZIP to ~/.god-eye/nuclei-templates
- Scope filter rejects off-host templates (eliminates OSINT FPs)

Operations:
- Interactive wizard (internal/wizard/) — zero-flag launch
- LivePrinter (internal/tui/) — colorized event stream
- Diff engine + scheduler (internal/diff, internal/scheduler) for
  continuous ASM monitoring with webhook alerts
- Proxy support (internal/proxyconf/): http / https / socks5 / socks5h
  + basic auth

Fixes #1 — native SOCKS5 / Tor compatibility via --proxy flag.

185 unit tests across 15 packages, all race-detector clean.
2026-04-18 16:48:41 +02:00

333 lines
9.4 KiB
Go

package wizard
import (
"context"
"fmt"
"io"
"strings"
"god-eye/internal/ai"
"god-eye/internal/config"
"god-eye/internal/output"
"god-eye/internal/validator"
)
// Choice is everything the wizard decided. Caller applies it to the
// scan Config, then runs the pipeline.
type Choice struct {
Target string
AIProfile string // "lean", "balanced", "heavy", or "" (no AI)
AIAutoPull bool
AIVerbose bool
ScanProfile string // "quick" / "bugbounty" / "pentest" / "asm-continuous" / "stealth-max"
MonitorInterval string // e.g. "24h" when asm-continuous chosen, empty otherwise
Live bool
LiveVerbosity int
Output string
Format string
Pipeline bool // always true — wizard runs the v2 pipeline
}
// Options tunes wizard behavior, mainly for tests.
type Options struct {
In io.Reader // stdin
Out io.Writer // stdout
OllamaURL string // defaults to http://localhost:11434
}
// Run executes the interactive flow and returns the user's choices.
// Returns ErrCancelled if the user aborts at any stage.
func Run(ctx context.Context, opts Options) (*Choice, error) {
if opts.OllamaURL == "" {
opts.OllamaURL = "http://localhost:11434"
}
p := newPrompter(opts.In, opts.Out)
choice := &Choice{Pipeline: true, LiveVerbosity: 1, Format: "txt", AIAutoPull: true}
printBanner(opts.Out)
// 1) AI tier selection.
aiProfile, err := selectAIProfile(p)
if err != nil {
return nil, err
}
choice.AIProfile = aiProfile
// 2) If AI chosen, check Ollama + models, ask to pull missing.
if aiProfile != "" {
if err := handleAIModels(ctx, p, opts.OllamaURL, aiProfile, choice); err != nil {
return nil, err
}
}
// 3) Target domain.
domain, err := p.askDomain()
if err != nil {
return nil, err
}
choice.Target = validator.SanitizeDomain(domain)
// 4) Scan profile.
scanProfile, interval, err := selectScanProfile(p)
if err != nil {
return nil, err
}
choice.ScanProfile = scanProfile
choice.MonitorInterval = interval
// 5) Live terminal view.
live, err := p.yesNo("Enable live colorized event view?", true)
if err != nil {
return nil, err
}
choice.Live = live
if live {
v, err := selectLiveVerbosity(p)
if err != nil {
return nil, err
}
choice.LiveVerbosity = v
}
// 6) AI verbose (only when AI selected).
if aiProfile != "" {
aiVerb, err := p.yesNo("Log every AI query to stderr (verbose)?", false)
if err != nil {
return nil, err
}
choice.AIVerbose = aiVerb
}
// 7) Output file (optional).
outFile, err := p.askText("Save report to file", "", false, func(path string) error {
return validator.ValidateOutputPath(path)
})
if err != nil {
return nil, err
}
choice.Output = outFile
if outFile != "" {
f, err := selectOutputFormat(p)
if err != nil {
return nil, err
}
choice.Format = f
}
// 8) Final summary + confirm.
printSummary(opts.Out, choice)
confirm, err := p.yesNo("Start scan?", true)
if err != nil {
return nil, err
}
if !confirm {
return nil, ErrCancelled
}
return choice, nil
}
// --- step implementations ------------------------------------------------
func selectAIProfile(p *prompter) (string, error) {
opts := []string{
output.BoldWhite("Lean") + output.Dim(" — 16GB RAM · qwen3:1.7b + qwen2.5-coder:14b (default)"),
output.BoldWhite("Balanced") + output.Dim(" — 32GB RAM · qwen3:4b + qwen3-coder:30b (MoE, 256K ctx)"),
output.BoldWhite("Heavy") + output.Dim(" — 64GB RAM · qwen3:8b + qwen3-coder:30b (max quality)"),
output.BoldWhite("No AI") + output.Dim(" — Pure recon without LLM analysis"),
}
n, err := p.choose("Select AI tier", opts, 1)
if err != nil {
return "", err
}
switch n {
case 1:
return "lean", nil
case 2:
return "balanced", nil
case 3:
return "heavy", nil
default:
return "", nil
}
}
func handleAIModels(ctx context.Context, p *prompter, ollamaURL, aiProfile string, choice *Choice) error {
profile, _ := config.AIProfileByName(aiProfile)
needed := []string{profile.FastModel, profile.DeepModel}
p.printf("\n%s Checking Ollama at %s…\n", output.BoldCyan("⚙"), output.Dim(ollamaURL))
ensurer := ai.NewModelEnsurer(ollamaURL)
if err := ensurer.Reachable(ctx); err != nil {
p.printf(" %s %v\n", output.Yellow("⚠"), err)
p.printf(" %s Start %s in another terminal, then retry.\n",
output.Dim("→"), output.BoldWhite("ollama serve"))
skip, yesErr := p.yesNo("Continue without AI for this run?", true)
if yesErr != nil {
return yesErr
}
if skip {
choice.AIProfile = ""
return nil
}
return ErrCancelled
}
installed, err := ensurer.Installed(ctx)
if err != nil {
return fmt.Errorf("query ollama: %w", err)
}
var missing []string
for _, m := range needed {
if !modelInstalled(installed, m) {
missing = append(missing, m)
}
}
if len(missing) == 0 {
p.printf(" %s All required models already present: %s\n",
output.Green("✓"), output.Dim(strings.Join(needed, ", ")))
return nil
}
p.printf(" %s Missing models: %s\n", output.Yellow("↓"),
output.BoldYellow(strings.Join(missing, ", ")))
pull, err := p.yesNo("Download missing models now?", true)
if err != nil {
return err
}
choice.AIAutoPull = pull
if !pull {
p.printf(" %s Skipping auto-pull — AI modules will no-op if models are still missing at scan time.\n",
output.Dim("·"))
return nil
}
ensurer.Verbose = true
ensurer.Writer = p.w
if err := ensurer.EnsureAll(ctx, missing); err != nil {
return fmt.Errorf("pull: %w", err)
}
return nil
}
func selectScanProfile(p *prompter) (string, string, error) {
opts := []string{
output.BoldWhite("Quick") + output.Dim(" — passive enum + HTTP probe, no brute"),
output.BoldWhite("Bug bounty") + output.Dim(" — full recon, AI + all features, stealth off (default)"),
output.BoldWhite("Pentest") + output.Dim(" — full recon + light stealth, AI on"),
output.BoldWhite("ASM continuous") + output.Dim(" — recurring scans with diff + alerts"),
output.BoldWhite("Stealth max") + output.Dim(" — paranoid evasion, slow, passive-first"),
}
n, err := p.choose("Select scan profile", opts, 2)
if err != nil {
return "", "", err
}
switch n {
case 1:
return "quick", "", nil
case 2:
return "bugbounty", "", nil
case 3:
return "pentest", "", nil
case 4:
// Ask for interval when ASM continuous chosen.
interval, err := p.askText("Re-scan interval (Go duration: 30m, 6h, 24h)", "24h", true, func(s string) error {
if s == "" {
return fmt.Errorf("interval required")
}
return nil
})
if err != nil {
return "", "", err
}
return "asm-continuous", interval, nil
case 5:
return "stealth-max", "", nil
}
return "bugbounty", "", nil
}
func selectLiveVerbosity(p *prompter) (int, error) {
opts := []string{
output.BoldWhite("Findings only") + output.Dim(" — vulns, secrets, takeovers (quiet)"),
output.BoldWhite("Normal") + output.Dim(" — findings + discovery + HTTP (default)"),
output.BoldWhite("Noisy") + output.Dim(" — everything including phase markers + module errors"),
}
n, err := p.choose("Live view verbosity", opts, 2)
if err != nil {
return 1, err
}
return n - 1, nil
}
func selectOutputFormat(p *prompter) (string, error) {
opts := []string{"txt", "json", "csv"}
n, err := p.choose("Report format", opts, 2)
if err != nil {
return "txt", err
}
return opts[n-1], nil
}
// modelInstalled is a local copy of ai.alreadyInstalled to avoid
// exporting the helper from the ai package.
func modelInstalled(installed map[string]bool, model string) bool {
if installed[model] || installed[model+":latest"] {
return true
}
if strings.Contains(model, ":") {
base := strings.SplitN(model, ":", 2)[0]
if installed[base] || installed[base+":latest"] {
return true
}
}
return false
}
// --- presentation --------------------------------------------------------
func printBanner(w io.Writer) {
fmt.Fprintln(w)
fmt.Fprintln(w, output.BoldCyan("═══════════════════════════════════════════════════════════"))
fmt.Fprintln(w, " "+output.BoldGreen("God's Eye v2")+output.Dim(" — interactive setup"))
fmt.Fprintln(w, " "+output.Dim("Ctrl-C to abort at any time."))
fmt.Fprintln(w, output.BoldCyan("═══════════════════════════════════════════════════════════"))
}
func printSummary(w io.Writer, c *Choice) {
fmt.Fprintln(w)
fmt.Fprintln(w, output.BoldCyan("─── Scan summary ───"))
// Pad the key before applying ANSI dim codes — if we padded after,
// the ANSI escape sequences would count toward %-Ns width and the
// output would look ragged.
kv := func(k, v string) {
padded := fmt.Sprintf("%-16s", k)
fmt.Fprintf(w, " %s %s\n", output.Dim(padded), output.BoldWhite(v))
}
kv("Target", c.Target)
kv("Scan profile", c.ScanProfile)
if c.AIProfile != "" {
kv("AI tier", c.AIProfile)
kv("AI auto-pull", boolStr(c.AIAutoPull))
kv("AI verbose", boolStr(c.AIVerbose))
} else {
kv("AI tier", "(disabled)")
}
kv("Live view", fmt.Sprintf("%s (v=%d)", boolStr(c.Live), c.LiveVerbosity))
if c.MonitorInterval != "" {
kv("Monitor every", c.MonitorInterval)
}
if c.Output != "" {
kv("Output", fmt.Sprintf("%s (format=%s)", c.Output, c.Format))
}
}
func boolStr(b bool) string {
if b {
return "yes"
}
return "no"
}