mirror of
https://github.com/Vyntral/god-eye.git
synced 2026-05-16 21:43:34 +02:00
3a4c230aa7
Complete architectural overhaul. Replaces the v0.1 monolithic scanner with an event-driven pipeline of auto-registered modules. Foundation (internal/): - eventbus: typed pub/sub, 20 event types, race-safe, drop counter - module: registry with phase-based selection - store: thread-safe host store with per-host locks + deep-copy reads - pipeline: coordinator with phase barriers + panic recovery - config: 5 scan profiles + 3 AI tiers + YAML loader + auto-discovery Modules (26 auto-registered across 6 phases): - Discovery: passive (26 sources), bruteforce, recursive, AXFR, GitHub dorks, CT streaming, permutation, reverse DNS, vhost, ASN, supply chain (npm + PyPI) - Enrichment: HTTP probe + tech fingerprint + TLS appliance ID, ports - Analysis: security checks, takeover (110+ sigs), cloud, JavaScript, GraphQL, JWT, headers (OWASP), HTTP smuggling, AI cascade, Nuclei - Reporting: TXT/JSON/CSV writer + AI scan brief AI layer (internal/ai/ + internal/modules/ai/): - Three profiles: lean (16 GB), balanced (32 GB MoE), heavy (64 GB) - Six event-driven handlers: CVE, JS file, HTTP response, secret filter, multi-agent vuln enrichment, anomaly + executive report - Content-hash cache dedups Ollama calls across hosts - Auto-pull of missing models via /api/pull with streaming progress - End-of-scan AI SCAN BRIEF in terminal with top chains + next actions Nuclei compat layer (internal/nucleitpl/): - Executes ~13k community templates (HTTP subset) - Auto-download of nuclei-templates ZIP to ~/.god-eye/nuclei-templates - Scope filter rejects off-host templates (eliminates OSINT FPs) Operations: - Interactive wizard (internal/wizard/) — zero-flag launch - LivePrinter (internal/tui/) — colorized event stream - Diff engine + scheduler (internal/diff, internal/scheduler) for continuous ASM monitoring with webhook alerts - Proxy support (internal/proxyconf/): http / https / socks5 / socks5h + basic auth Fixes #1 — native SOCKS5 / Tor compatibility via --proxy flag. 185 unit tests across 15 packages, all race-detector clean.
196 lines
5.0 KiB
Go
196 lines
5.0 KiB
Go
// Package httpprobe probes every resolved host with HTTPS/HTTP and extracts
|
|
// status code, title, server, technology stack, and TLS information.
|
|
//
|
|
// Runs in PhaseEnrichment. Reads hosts from the store (not events) to avoid
|
|
// the phase-barrier race where late subscribers miss earlier events.
|
|
package httpprobe
|
|
|
|
import (
|
|
"context"
|
|
"crypto/tls"
|
|
"sync"
|
|
"time"
|
|
|
|
"god-eye/internal/eventbus"
|
|
gohttp "god-eye/internal/http"
|
|
"god-eye/internal/module"
|
|
"god-eye/internal/store"
|
|
)
|
|
|
|
const ModuleName = "http.probe"
|
|
|
|
type probeModule struct{}
|
|
|
|
func Register() { module.Register(&probeModule{}) }
|
|
|
|
func (*probeModule) Name() string { return ModuleName }
|
|
func (*probeModule) Phase() module.Phase { return module.PhaseEnrichment }
|
|
func (*probeModule) Consumes() []eventbus.EventType { return []eventbus.EventType{eventbus.EventDNSResolved} }
|
|
func (*probeModule) Produces() []eventbus.EventType {
|
|
return []eventbus.EventType{eventbus.EventHTTPProbed, eventbus.EventTLSAnalyzed, eventbus.EventTechDetected}
|
|
}
|
|
func (*probeModule) DefaultEnabled() bool { return true }
|
|
|
|
func (p *probeModule) Run(mctx module.Context) error {
|
|
if mctx.Config.Bool("no_probe", false) {
|
|
return nil
|
|
}
|
|
|
|
conc := mctx.Config.Int("concurrency", 500)
|
|
if conc <= 0 {
|
|
conc = 500
|
|
}
|
|
timeout := mctx.Config.Int("timeout", 5)
|
|
|
|
// Dedup across drain + late events.
|
|
processed := make(map[string]struct{})
|
|
var processedMu sync.Mutex
|
|
shouldProcess := func(host string) bool {
|
|
processedMu.Lock()
|
|
defer processedMu.Unlock()
|
|
if _, dup := processed[host]; dup {
|
|
return false
|
|
}
|
|
processed[host] = struct{}{}
|
|
return true
|
|
}
|
|
|
|
work := make(chan string, conc*2)
|
|
var wg sync.WaitGroup
|
|
for i := 0; i < conc; i++ {
|
|
wg.Add(1)
|
|
go func() {
|
|
defer wg.Done()
|
|
for host := range work {
|
|
p.probeOne(mctx, host, timeout)
|
|
}
|
|
}()
|
|
}
|
|
|
|
// Drain: every host in the store with at least one IP is worth probing.
|
|
for _, h := range mctx.Store.All(mctx.Ctx) {
|
|
if h == nil || h.Subdomain == "" || len(h.IPs) == 0 {
|
|
continue
|
|
}
|
|
if !shouldProcess(h.Subdomain) {
|
|
continue
|
|
}
|
|
select {
|
|
case work <- h.Subdomain:
|
|
case <-mctx.Ctx.Done():
|
|
close(work)
|
|
wg.Wait()
|
|
return nil
|
|
}
|
|
}
|
|
|
|
// Also listen for late DNSResolved events (recursive/permutation running
|
|
// concurrently in other modules may produce new resolves during our
|
|
// phase — pick them up).
|
|
sub := mctx.Bus.Subscribe(eventbus.EventDNSResolved, func(_ context.Context, e eventbus.Event) {
|
|
ev, ok := e.(eventbus.DNSResolved)
|
|
if !ok || len(ev.IPs) == 0 {
|
|
return
|
|
}
|
|
if !shouldProcess(ev.Subdomain) {
|
|
return
|
|
}
|
|
select {
|
|
case work <- ev.Subdomain:
|
|
case <-mctx.Ctx.Done():
|
|
}
|
|
})
|
|
defer sub.Unsubscribe()
|
|
|
|
// Brief window for late arrivals.
|
|
select {
|
|
case <-time.After(500 * time.Millisecond):
|
|
case <-mctx.Ctx.Done():
|
|
}
|
|
|
|
close(work)
|
|
wg.Wait()
|
|
return nil
|
|
}
|
|
|
|
func (p *probeModule) probeOne(mctx module.Context, host string, timeout int) {
|
|
if mctx.Ctx.Err() != nil {
|
|
return
|
|
}
|
|
r := gohttp.ProbeHTTP(host, timeout)
|
|
if r == nil || r.StatusCode == 0 {
|
|
return
|
|
}
|
|
|
|
_ = mctx.Store.Upsert(mctx.Ctx, host, func(h *store.Host) {
|
|
h.StatusCode = r.StatusCode
|
|
h.ContentLength = r.ContentLength
|
|
h.Title = r.Title
|
|
h.Server = r.Server
|
|
if len(r.Tech) > 0 {
|
|
store.AddTechnologies(h, r.Tech)
|
|
}
|
|
h.ResponseMs = r.ResponseMs
|
|
h.TLSVersion = r.TLSVersion
|
|
h.TLSIssuer = r.TLSIssuer
|
|
h.TLSSelfSigned = r.TLSSelfSigned
|
|
if r.TLSExpiry != "" {
|
|
if tm, err := time.Parse("2006-01-02", r.TLSExpiry); err == nil {
|
|
h.TLSExpiry = tm
|
|
}
|
|
}
|
|
if r.TLSFingerprint != nil {
|
|
fp := *r.TLSFingerprint
|
|
h.TLSFingerprint = &store.TLSFingerprint{
|
|
Vendor: fp.Vendor,
|
|
Product: fp.Product,
|
|
Version: fp.Version,
|
|
ApplianceKind: fp.ApplianceType,
|
|
InternalHosts: append([]string(nil), fp.InternalHosts...),
|
|
}
|
|
}
|
|
})
|
|
|
|
mctx.Bus.Publish(mctx.Ctx, eventbus.HTTPProbed{
|
|
EventMeta: eventbus.EventMeta{At: time.Now(), Source: ModuleName, Target: host},
|
|
URL: "https://" + host,
|
|
StatusCode: r.StatusCode,
|
|
ContentLength: r.ContentLength,
|
|
Title: r.Title,
|
|
Server: r.Server,
|
|
Technologies: append([]string(nil), r.Tech...),
|
|
ResponseMs: r.ResponseMs,
|
|
TLSVersion: r.TLSVersion,
|
|
TLSSelfSigned: r.TLSSelfSigned,
|
|
})
|
|
|
|
for _, t := range r.Tech {
|
|
if t == "" {
|
|
continue
|
|
}
|
|
mctx.Bus.Publish(mctx.Ctx, eventbus.TechDetected{
|
|
EventMeta: eventbus.EventMeta{At: time.Now(), Source: ModuleName, Target: host},
|
|
Host: host,
|
|
Technology: t,
|
|
Confidence: 0.8,
|
|
})
|
|
}
|
|
|
|
if r.TLSFingerprint != nil {
|
|
mctx.Bus.Publish(mctx.Ctx, eventbus.TLSAnalyzed{
|
|
EventMeta: eventbus.EventMeta{At: time.Now(), Source: ModuleName, Target: host},
|
|
Host: host,
|
|
Version: r.TLSVersion,
|
|
Issuer: r.TLSIssuer,
|
|
SelfSigned: r.TLSSelfSigned,
|
|
Vendor: r.TLSFingerprint.Vendor,
|
|
Product: r.TLSFingerprint.Product,
|
|
ApplianceKind: r.TLSFingerprint.ApplianceType,
|
|
InternalHosts: append([]string(nil), r.TLSFingerprint.InternalHosts...),
|
|
})
|
|
}
|
|
}
|
|
|
|
// keep tls import stable
|
|
var _ = tls.VersionTLS13
|