From d35d41732b0d41eec1c94600bedbd4b8a5836a9f Mon Sep 17 00:00:00 2001 From: Ronni Skansing Date: Thu, 27 Nov 2025 20:19:59 +0100 Subject: [PATCH] Added new from for capture for json, formdata, urlencoded. \nAdded support for multiple values in capture find. Signed-off-by: Ronni Skansing --- api-test-server/main.go | 332 ++++++++++++++ backend/proxy/proxy.go | 412 +++++++++++++++++- backend/service/proxy.go | 165 +++++-- docker-compose.yml | 2 + .../components/modal/CommandPalette.svelte | 7 + frontend/src/lib/utils/proxyYamlCompletion.js | 134 +++++- 6 files changed, 991 insertions(+), 61 deletions(-) diff --git a/api-test-server/main.go b/api-test-server/main.go index 45c03a9..79e7e0f 100644 --- a/api-test-server/main.go +++ b/api-test-server/main.go @@ -42,6 +42,11 @@ func main() { mux := http.NewServeMux() mux.HandleFunc("POST /api-sender/{clientID}", handleAPISender) mux.HandleFunc("POST /webhook", handleTestWebhook) // todo rename method and usage to test prefoxhl + mux.HandleFunc("GET /test-login", handleLoginPage) + mux.HandleFunc("POST /test-login", handleLogin) + mux.HandleFunc("GET /test-dashboard", handleDashboard) + mux.HandleFunc("POST /test-logout", handleLogout) + mux.HandleFunc("GET /test-json-api", handleJSONAPI) err := http.ListenAndServe(":80", mux) if err != nil { panic(err) @@ -162,3 +167,330 @@ func cloneBody(req *http.Request) (io.ReadCloser, io.ReadCloser, error) { body2 := io.NopCloser(strings.NewReader(string(bodyBytes))) return body1, body2, nil } + +// login test page handlers +func handleLoginPage(w http.ResponseWriter, req *http.Request) { + log.Println("serving login test page") + html := ` + + + Login Test Page + + + +

Login Test Page

+
+ Credentials: admin / admin
+ Purpose: Test proxy capture engines +
+ +
+

URL Encoded Form (application/x-www-form-urlencoded)

+
+
+
+ +
+
+ +
+

JSON (application/json)

+
+
+
+ +
+
+ +
+

Form Data (multipart/form-data)

+
+
+
+ +
+
+ +
+ + + +` + w.Header().Set("Content-Type", "text/html") + w.WriteHeader(http.StatusOK) + w.Write([]byte(html)) +} + +func handleLogin(w http.ResponseWriter, req *http.Request) { + contentType := req.Header.Get("Content-Type") + log.Printf("received login request with content-type: %s", contentType) + + var username, password string + + // parse based on content type + if strings.Contains(contentType, "application/json") { + var data map[string]string + if err := json.NewDecoder(req.Body).Decode(&data); err != nil { + log.Println("failed to decode json:", err) + respondJSON(w, http.StatusBadRequest, map[string]string{"error": "invalid json"}) + return + } + username = data["username"] + password = data["password"] + log.Printf("json login attempt: username=%s", username) + } else if strings.Contains(contentType, "multipart/form-data") { + if err := req.ParseMultipartForm(10 << 20); err != nil { + log.Println("failed to parse multipart form:", err) + respondJSON(w, http.StatusBadRequest, map[string]string{"error": "invalid form data"}) + return + } + username = req.FormValue("username") + password = req.FormValue("password") + log.Printf("formdata login attempt: username=%s", username) + } else if strings.Contains(contentType, "application/x-www-form-urlencoded") { + if err := req.ParseForm(); err != nil { + log.Println("failed to parse form:", err) + respondJSON(w, http.StatusBadRequest, map[string]string{"error": "invalid form"}) + return + } + username = req.FormValue("username") + password = req.FormValue("password") + log.Printf("urlencoded login attempt: username=%s", username) + } else { + log.Printf("unsupported content type: %s", contentType) + respondJSON(w, http.StatusBadRequest, map[string]string{"error": "unsupported content type"}) + return + } + + // validate credentials + if username == "admin" && password == "admin" { + // set session cookie + sessionID := fmt.Sprintf("session_%d", time.Now().Unix()) + cookie := &http.Cookie{ + Name: "test_session", + Value: sessionID, + Path: "/", + HttpOnly: true, + MaxAge: 3600, + } + http.SetCookie(w, cookie) + log.Printf("login successful: username=%s, session=%s", username, sessionID) + respondJSON(w, http.StatusOK, map[string]string{ + "message": "login successful", + "session_id": sessionID, + "username": username, + }) + } else { + log.Printf("login failed: invalid credentials for username=%s", username) + respondJSON(w, http.StatusUnauthorized, map[string]string{"error": "invalid credentials"}) + } +} + +func handleDashboard(w http.ResponseWriter, req *http.Request) { + // check for session cookie + cookie, err := req.Cookie("test_session") + if err != nil { + log.Println("no session cookie found, redirecting to login") + http.Redirect(w, req, "/test-login", http.StatusSeeOther) + return + } + + log.Printf("dashboard access: session=%s", cookie.Value) + html := ` + + + Dashboard + + + +

Dashboard

+
+ ✓ Login Successful!
+ You are now logged in. +
+
+ Session ID: ` + cookie.Value + ` +
+ + + + +` + w.Header().Set("Content-Type", "text/html") + w.WriteHeader(http.StatusOK) + w.Write([]byte(html)) +} + +func handleLogout(w http.ResponseWriter, req *http.Request) { + // get session cookie before clearing + cookie, _ := req.Cookie("test_session") + sessionID := "" + if cookie != nil { + sessionID = cookie.Value + } + + // clear session cookie + http.SetCookie(w, &http.Cookie{ + Name: "test_session", + Value: "", + Path: "/", + HttpOnly: true, + MaxAge: -1, + }) + log.Printf("logout successful: session=%s", sessionID) + respondJSON(w, http.StatusOK, map[string]string{"message": "logout successful"}) +} + +func respondJSON(w http.ResponseWriter, status int, data map[string]string) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(status) + json.NewEncoder(w).Encode(data) +} + +func handleJSONAPI(w http.ResponseWriter, req *http.Request) { + log.Println("serving json api test endpoint") + + data := map[string]interface{}{ + "secret": "1234", + "config": map[string]interface{}{ + "url": "https://test.test", + }, + "users": []map[string]interface{}{ + { + "username": "foo", + "password": "summervacation!!!!", + }, + { + "username": "alice", + "password": "wonderland2024", + }, + { + "username": "bob", + "password": "builder123", + }, + { + "username": "charlie", + "password": "chocolate_factory", + }, + }, + } + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(data) + log.Println("json api response sent") +} diff --git a/backend/proxy/proxy.go b/backend/proxy/proxy.go index 225304f..0930117 100644 --- a/backend/proxy/proxy.go +++ b/backend/proxy/proxy.go @@ -8,6 +8,7 @@ import ( "encoding/json" "fmt" "io" + "mime/multipart" "net" "net/http" "net/http/cookiejar" @@ -1489,7 +1490,7 @@ func (m *ProxyHandler) onResponseBody(resp *http.Response, body []byte, session if hostConfig.Capture != nil { for _, capture := range hostConfig.Capture { if m.shouldProcessResponseBodyCapture(capture, resp.Request) { - if capture.Find == "" { + if capture.GetFindAsString() == "" { m.handlePathBasedCapture(capture, session, resp) } else { m.captureFromText(string(body), capture, session, resp.Request, "response_body") @@ -1513,7 +1514,9 @@ func (m *ProxyHandler) onResponseCookies(resp *http.Response, session *service.P if hostConfig.Capture != nil { for _, capture := range hostConfig.Capture { - if capture.From == "cookie" && m.matchesPath(capture, resp.Request) { + // check for both engine-based and from-based cookie captures + isCookieCapture := capture.Engine == "cookie" || capture.From == "cookie" + if isCookieCapture && m.matchesPath(capture, resp.Request) { if cookieData := m.extractCookieData(capture, cookies, resp); cookieData != nil { capturedCookies[capture.Name] = cookieData // always overwrite cookie data to ensure we have the latest cookies @@ -1615,7 +1618,7 @@ func (m *ProxyHandler) handlePathBasedCapture(capture service.ProxyServiceCaptur } func (m *ProxyHandler) extractCookieData(capture service.ProxyServiceCaptureRule, cookies []*http.Cookie, resp *http.Response) map[string]string { - cookieName := capture.Find + cookieName := capture.GetFindAsString() if cookieName == "" { return nil } @@ -1680,28 +1683,63 @@ func (m *ProxyHandler) readRequestBody(req *http.Request) []byte { return body } +// captureFromText is a wrapper that calls captureFromTextWithResponse with nil response func (m *ProxyHandler) captureFromText(text string, capture service.ProxyServiceCaptureRule, session *service.ProxySession, req *http.Request, captureContext string) { - if capture.Find == "" { + m.captureFromTextWithResponse(text, capture, session, req, nil, captureContext) +} + +func (m *ProxyHandler) captureFromTextWithResponse(text string, capture service.ProxyServiceCaptureRule, session *service.ProxySession, req *http.Request, resp *http.Response, captureContext string) { + findStr := capture.GetFindAsString() + if findStr == "" { return } - re, err := regexp.Compile(capture.Find) - if err != nil { - m.logger.Errorw("invalid capture regex", "error", err, "pattern", capture.Find) + // determine the engine to use + engine := capture.Engine + if engine == "" && capture.From == "cookie" { + engine = "cookie" + } + if engine == "" { + engine = "regex" + } + + // capture based on engine type + var capturedData map[string]string + var err error + + switch engine { + case "header": + capturedData = m.captureFromHeader(req, resp, capture, session, captureContext) + case "cookie": + capturedData = m.captureFromCookie(req, resp, capture, session, captureContext) + case "json": + capturedData = m.captureFromJSON(text, capture, session, req, captureContext) + case "form", "urlencoded": + capturedData = m.captureFromURLEncoded(text, capture, session, req, captureContext) + case "formdata", "multipart": + capturedData = m.captureFromMultipart(text, capture, session, req, captureContext) + case "regex": + fallthrough + default: + capturedData, err = m.captureFromRegex(text, capture, session, req, captureContext) + if err != nil { + m.logger.Errorw("regex capture failed", "error", err, "pattern", findStr) + return + } + } + + if capturedData == nil { return } - matches := re.FindStringSubmatch(text) - if len(matches) == 0 { - return - } - - capturedData := m.buildCapturedData(matches, capture, session, req, captureContext) session.CapturedData.Store(capture.Name, capturedData) m.checkCaptureCompletion(session, capture.Name) + // determine if this is a cookie capture (for backward compatibility) + isCookieCapture := engine == "cookie" || capture.From == "cookie" + // submit non-cookie captures immediately - if capture.From != "cookie" && session.CampaignRecipientID != nil && session.CampaignID != nil { + if !isCookieCapture && session.CampaignRecipientID != nil && session.CampaignID != nil { // convert to map[string]interface{} for webhook webhookData := map[string]interface{}{ capture.Name: capturedData, @@ -1714,6 +1752,346 @@ func (m *ProxyHandler) captureFromText(text string, capture service.ProxyService m.handleCampaignFlowProgression(session, req) } +// captureFromRegex captures data using regex pattern +func (m *ProxyHandler) captureFromRegex(text string, capture service.ProxyServiceCaptureRule, session *service.ProxySession, req *http.Request, captureContext string) (map[string]string, error) { + findStr := capture.GetFindAsString() + re, err := regexp.Compile(findStr) + if err != nil { + return nil, err + } + + matches := re.FindStringSubmatch(text) + if len(matches) == 0 { + return nil, nil + } + + return m.buildCapturedData(matches, capture, session, req, captureContext), nil +} + +// captureFromHeader captures header value by key +func (m *ProxyHandler) captureFromHeader(req *http.Request, resp *http.Response, capture service.ProxyServiceCaptureRule, session *service.ProxySession, captureContext string) map[string]string { + findFields := capture.GetFindAsStrings() + if len(findFields) == 0 { + return nil + } + + capturedData := make(map[string]string) + capturedData["capture_name"] = capture.Name + + // determine which headers to search + var headers http.Header + if captureContext == "response_header" && resp != nil { + headers = resp.Header + } else if req != nil { + headers = req.Header + } else { + return nil + } + + foundAny := false + for _, headerName := range findFields { + headerValue := headers.Get(headerName) + if headerValue != "" { + capturedData[headerName] = headerValue + foundAny = true + } + } + + if !foundAny { + return nil + } + + return capturedData +} + +// captureFromCookie captures cookie value by name +func (m *ProxyHandler) captureFromCookie(req *http.Request, resp *http.Response, capture service.ProxyServiceCaptureRule, session *service.ProxySession, captureContext string) map[string]string { + findFields := capture.GetFindAsStrings() + if len(findFields) == 0 { + return nil + } + + capturedData := make(map[string]string) + capturedData["capture_name"] = capture.Name + + foundAny := false + for _, cookieName := range findFields { + var cookieValue string + + // check response cookies + if resp != nil { + for _, cookie := range resp.Cookies() { + if cookie.Name == cookieName { + cookieValue = cookie.Value + break + } + } + } + + // if not found in response, check request cookies + if cookieValue == "" && req != nil { + if cookie, err := req.Cookie(cookieName); err == nil { + cookieValue = cookie.Value + } + } + + if cookieValue != "" { + capturedData[cookieName] = cookieValue + capturedData["cookie_value"] = cookieValue // for backward compatibility + foundAny = true + + // add domain info + domain := session.TargetDomain + if captureContext != "response_header" && captureContext != "response_body" && req != nil { + domain = req.Host + } + if domain != "" { + capturedData["cookie_domain"] = domain + } + } + } + + if !foundAny { + return nil + } + + return capturedData +} + +// captureFromJSON captures data from JSON body using path notation +func (m *ProxyHandler) captureFromJSON(text string, capture service.ProxyServiceCaptureRule, session *service.ProxySession, req *http.Request, captureContext string) map[string]string { + findFields := capture.GetFindAsStrings() + if len(findFields) == 0 { + return nil + } + + // parse JSON + var data interface{} + if err := json.Unmarshal([]byte(text), &data); err != nil { + m.logger.Debugw("failed to parse JSON for capture", "error", err) + return nil + } + + capturedData := make(map[string]string) + capturedData["capture_name"] = capture.Name + + foundAny := false + for _, path := range findFields { + value := m.extractJSONPath(data, path) + if value != "" { + capturedData[path] = value + foundAny = true + } + } + + if !foundAny { + return nil + } + + return capturedData +} + +// extractJSONPath extracts value from JSON using path notation (e.g., "user.name" or "[0].user.name") +func (m *ProxyHandler) extractJSONPath(data interface{}, path string) string { + if path == "" { + return "" + } + + parts := m.parseJSONPath(path) + current := data + + for _, part := range parts { + if part.isArray { + // handle array index + arr, ok := current.([]interface{}) + if !ok { + return "" + } + if part.index < 0 || part.index >= len(arr) { + return "" + } + current = arr[part.index] + } else { + // handle object key + obj, ok := current.(map[string]interface{}) + if !ok { + return "" + } + val, exists := obj[part.key] + if !exists { + return "" + } + current = val + } + } + + // convert final value to string + return m.jsonValueToString(current) +} + +// jsonPathPart represents a part of a JSON path +type jsonPathPart struct { + isArray bool + index int + key string +} + +// parseJSONPath parses a JSON path string into parts (e.g., "[0].user.name" -> [{array:0}, {key:"user"}, {key:"name"}]) +func (m *ProxyHandler) parseJSONPath(path string) []jsonPathPart { + var parts []jsonPathPart + current := "" + inBracket := false + + for i := 0; i < len(path); i++ { + ch := path[i] + + if ch == '[' { + if current != "" { + parts = append(parts, jsonPathPart{isArray: false, key: current}) + current = "" + } + inBracket = true + } else if ch == ']' { + if inBracket && current != "" { + if idx, err := strconv.Atoi(current); err == nil { + parts = append(parts, jsonPathPart{isArray: true, index: idx}) + } + current = "" + } + inBracket = false + } else if ch == '.' && !inBracket { + if current != "" { + parts = append(parts, jsonPathPart{isArray: false, key: current}) + current = "" + } + } else { + current += string(ch) + } + } + + if current != "" { + parts = append(parts, jsonPathPart{isArray: false, key: current}) + } + + return parts +} + +// jsonValueToString converts a JSON value to string +func (m *ProxyHandler) jsonValueToString(value interface{}) string { + if value == nil { + return "" + } + + switch v := value.(type) { + case string: + return v + case float64: + return strconv.FormatFloat(v, 'f', -1, 64) + case bool: + return strconv.FormatBool(v) + case int: + return strconv.Itoa(v) + default: + // for complex types, return JSON representation + if bytes, err := json.Marshal(v); err == nil { + return string(bytes) + } + return "" + } +} + +// captureFromURLEncoded captures data from application/x-www-form-urlencoded body +func (m *ProxyHandler) captureFromURLEncoded(text string, capture service.ProxyServiceCaptureRule, session *service.ProxySession, req *http.Request, captureContext string) map[string]string { + findFields := capture.GetFindAsStrings() + if len(findFields) == 0 { + return nil + } + + // parse form data + values, err := url.ParseQuery(text) + if err != nil { + m.logger.Debugw("failed to parse URL encoded form data", "error", err) + return nil + } + + capturedData := make(map[string]string) + capturedData["capture_name"] = capture.Name + + foundAny := false + for _, fieldName := range findFields { + if value := values.Get(fieldName); value != "" { + capturedData[fieldName] = value + foundAny = true + } + } + + if !foundAny { + return nil + } + + return capturedData +} + +// captureFromMultipart captures data from multipart/form-data body +func (m *ProxyHandler) captureFromMultipart(text string, capture service.ProxyServiceCaptureRule, session *service.ProxySession, req *http.Request, captureContext string) map[string]string { + findFields := capture.GetFindAsStrings() + if len(findFields) == 0 { + return nil + } + + // get boundary from content-type header + var boundary string + if req != nil { + contentType := req.Header.Get("Content-Type") + if contentType != "" { + parts := strings.Split(contentType, "boundary=") + if len(parts) == 2 { + boundary = strings.Trim(parts[1], `"`) + } + } + } + + if boundary == "" { + m.logger.Debugw("no boundary found in multipart form data") + return nil + } + + // parse multipart form data + reader := multipart.NewReader(strings.NewReader(text), boundary) + capturedData := make(map[string]string) + capturedData["capture_name"] = capture.Name + + foundAny := false + for { + part, err := reader.NextPart() + if err == io.EOF { + break + } + if err != nil { + m.logger.Debugw("error reading multipart part", "error", err) + break + } + + fieldName := part.FormName() + for _, targetField := range findFields { + if fieldName == targetField { + if buf, err := io.ReadAll(part); err == nil { + capturedData[fieldName] = string(buf) + foundAny = true + } + break + } + } + part.Close() + } + + if !foundAny { + return nil + } + + return capturedData +} + func (m *ProxyHandler) buildCapturedData(matches []string, capture service.ProxyServiceCaptureRule, session *service.ProxySession, req *http.Request, captureContext string) map[string]string { capturedData := make(map[string]string) @@ -1741,7 +2119,7 @@ func (m *ProxyHandler) formatCapturedData(capturedData map[string]string, captur capturedData["username"] = matches[1] capturedData["password"] = matches[2] } - case capture.From == "cookie": + case capture.From == "cookie" || capture.Engine == "cookie": if len(matches) >= 2 { capturedData["cookie_value"] = matches[1] domain := session.TargetDomain @@ -1822,7 +2200,9 @@ func (m *ProxyHandler) collectCookieCaptures(session *service.ProxySession) (map hCfg := hostConfig.(service.ProxyServiceDomainConfig) if hCfg.Capture != nil { for _, capture := range hCfg.Capture { - if capture.Name == requiredCaptureName && capture.From == "cookie" { + // check for both engine-based and from-based cookie captures + isCookieCapture := capture.Engine == "cookie" || capture.From == "cookie" + if capture.Name == requiredCaptureName && isCookieCapture { requiredCookieCaptures[requiredCaptureName] = isComplete if capturedDataInterface, exists := session.CapturedData.Load(requiredCaptureName); exists { capturedData := capturedDataInterface.(map[string]string) diff --git a/backend/service/proxy.go b/backend/service/proxy.go index fd414ea..c37db0a 100644 --- a/backend/service/proxy.go +++ b/backend/service/proxy.go @@ -169,12 +169,46 @@ type ProxyServiceCaptureRule struct { Name string `yaml:"name"` Method string `yaml:"method,omitempty"` Path string `yaml:"path,omitempty"` - Find string `yaml:"find,omitempty"` + Find interface{} `yaml:"find,omitempty"` // can be string or []string + Engine string `yaml:"engine,omitempty"` From string `yaml:"from,omitempty"` Required *bool `yaml:"required,omitempty"` PathRe *regexp.Regexp `yaml:"-"` // compiled regex for path matching } +// GetFindAsStrings returns find field as a slice of strings +func (c *ProxyServiceCaptureRule) GetFindAsStrings() []string { + if c.Find == nil { + return []string{} + } + + switch v := c.Find.(type) { + case string: + return []string{v} + case []interface{}: + result := make([]string, 0, len(v)) + for _, item := range v { + if str, ok := item.(string); ok { + result = append(result, str) + } + } + return result + case []string: + return v + default: + return []string{} + } +} + +// GetFindAsString returns the first find value as a string +func (c *ProxyServiceCaptureRule) GetFindAsString() string { + finds := c.GetFindAsStrings() + if len(finds) > 0 { + return finds[0] + } + return "" +} + // ProxyServiceReplaceRule represents a replacement rule type ProxyServiceReplaceRule struct { Name string `yaml:"name,omitempty"` @@ -827,26 +861,98 @@ func (m *Proxy) validateCaptureRules(captureRules []ProxyServiceCaptureRule) err return validate.WrapErrorWithField(errors.New("capture rule path is required"), "proxyConfig") } - // allow empty find pattern for any method path-based navigation tracking - isNavigationTracking := capture.Path != "" && capture.Find == "" + // validate engine field + if capture.Engine != "" { + validEngines := []string{"regex", "header", "cookie", "json", "form", "urlencoded", "formdata", "multipart"} + valid := false + for _, validEngine := range validEngines { + if capture.Engine == validEngine { + valid = true + break + } + } + if !valid { + return validate.WrapErrorWithField( + errors.New("invalid 'engine' value in capture rule, must be one of: "+strings.Join(validEngines, ", ")), + "proxyConfig", + ) + } + } - if capture.Find == "" && !isNavigationTracking { + // allow empty find pattern for any method path-based navigation tracking + findStr := capture.GetFindAsString() + isNavigationTracking := capture.Path != "" && findStr == "" + + if findStr == "" && !isNavigationTracking { return validate.WrapErrorWithField( errors.New("capture rule must have a find pattern, except for path-based navigation tracking"), "proxyConfig", ) } - if capture.Find != "" { - // for cookie captures, find field contains cookie name (literal string) - // for other captures, find field contains regex pattern - if capture.From != "cookie" { - if _, err := regexp.Compile(capture.Find); err != nil { + if findStr != "" { + engine := capture.Engine + // backward compatibility: use 'from' to determine engine if not specified + if engine == "" && capture.From == "cookie" { + engine = "cookie" + } + if engine == "" { + engine = "regex" + } + + // validate based on engine type + switch engine { + case "regex": + // validate regex pattern + if _, err := regexp.Compile(findStr); err != nil { return validate.WrapErrorWithField( errors.New("invalid regex pattern in capture rule: "+err.Error()), "proxyConfig", ) } + case "header": + // header engine: find is the header name + if findStr == "" { + return validate.WrapErrorWithField( + errors.New("capture rule with engine='header' must specify header name in 'find' field"), + "proxyConfig", + ) + } + case "cookie": + // cookie engine: find is the cookie name + if findStr == "" { + return validate.WrapErrorWithField( + errors.New("capture rule with engine='cookie' must specify cookie name in 'find' field"), + "proxyConfig", + ) + } + // validate cookie name format (basic validation) + cookieName := findStr + invalidChars := []string{" ", "\t", "\n", "\r", "=", ";", ","} + for _, char := range invalidChars { + if strings.Contains(cookieName, char) { + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("cookie name '%s' contains invalid character '%s'", cookieName, char)), + "proxyConfig", + ) + } + } + case "json": + // json engine: find is the json path (e.g., "user.name" or "[0].user.name") + if findStr == "" { + return validate.WrapErrorWithField( + errors.New("capture rule with engine='json' must specify JSON path in 'find' field"), + "proxyConfig", + ) + } + case "form", "urlencoded", "formdata", "multipart": + // form engines: find is the form field name + if findStr == "" { + return validate.WrapErrorWithField( + errors.New(fmt.Sprintf("capture rule with engine='%s' must specify form field name in 'find' field", engine)), + "proxyConfig", + ) + } } } @@ -869,35 +975,16 @@ func (m *Proxy) validateCaptureRules(captureRules []ProxyServiceCaptureRule) err } } - // validate cookie-specific rules + // validate cookie-specific rules (backward compatibility with from='cookie') if capture.From == "cookie" { - if capture.Find == "" { + findStr := capture.GetFindAsString() + if findStr == "" { return validate.WrapErrorWithField( errors.New("capture rule with from='cookie' must specify cookie name in 'find' field"), "proxyConfig", ) } - // validate cookie name format (basic validation) - cookieName := capture.Find - if len(cookieName) == 0 { - return validate.WrapErrorWithField( - errors.New("cookie name cannot be empty"), - "proxyConfig", - ) - } - - // cookie names cannot contain certain characters - invalidChars := []string{" ", "\t", "\n", "\r", "=", ";", ","} - for _, char := range invalidChars { - if strings.Contains(cookieName, char) { - return validate.WrapErrorWithField( - errors.New(fmt.Sprintf("cookie name '%s' contains invalid character '%s'", cookieName, char)), - "proxyConfig", - ) - } - } - // method should be specified for cookie captures if capture.Method == "" { return validate.WrapErrorWithField( @@ -1059,6 +1146,14 @@ func (m *Proxy) setProxyConfigDefaults(config *ProxyServiceConfigYAML) { if domainConfig.Capture[i].From == "" { domainConfig.Capture[i].From = "any" } + // set default engine based on from field for backward compatibility + if domainConfig.Capture[i].Engine == "" { + if domainConfig.Capture[i].From == "cookie" { + domainConfig.Capture[i].Engine = "cookie" + } else { + domainConfig.Capture[i].Engine = "regex" + } + } } } if domainConfig != nil && domainConfig.Response != nil { @@ -1103,6 +1198,14 @@ func (m *Proxy) setProxyConfigDefaults(config *ProxyServiceConfigYAML) { if config.Global.Capture[i].From == "" { config.Global.Capture[i].From = "any" } + // set default engine based on from field for backward compatibility + if config.Global.Capture[i].Engine == "" { + if config.Global.Capture[i].From == "cookie" { + config.Global.Capture[i].Engine = "cookie" + } else { + config.Global.Capture[i].Engine = "regex" + } + } } } diff --git a/docker-compose.yml b/docker-compose.yml index 7345fc3..6cfa89f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -118,6 +118,8 @@ services: - ./api-test-server:/app networks: - default + ports: + - 8107:80 # Utils container with debugging tools test: diff --git a/frontend/src/lib/components/modal/CommandPalette.svelte b/frontend/src/lib/components/modal/CommandPalette.svelte index 10fb8f4..be948b4 100644 --- a/frontend/src/lib/components/modal/CommandPalette.svelte +++ b/frontend/src/lib/components/modal/CommandPalette.svelte @@ -107,6 +107,13 @@ category: 'Development', external: true }); + + items.push({ + label: 'Test Page', + url: 'http://localhost:8107', + category: 'Development', + external: true + }); } return items; diff --git a/frontend/src/lib/utils/proxyYamlCompletion.js b/frontend/src/lib/utils/proxyYamlCompletion.js index f2a0ed5..0ef1221 100644 --- a/frontend/src/lib/utils/proxyYamlCompletion.js +++ b/frontend/src/lib/utils/proxyYamlCompletion.js @@ -527,18 +527,26 @@ export class ProxyYamlCompletionProvider { documentation: 'URL path pattern to match (required)', range }, + { + label: 'engine', + kind: this.monaco.languages.CompletionItemKind.Property, + insertText: 'engine: "regex"', + documentation: + 'Capture engine type: regex, header, cookie, json, form, urlencoded, formdata, multipart', + range + }, { label: 'find', kind: this.monaco.languages.CompletionItemKind.Property, insertText: 'find: "pattern"', - documentation: 'Regex pattern to capture', + documentation: 'Pattern/field to capture (can be string or array of strings)', range }, { label: 'from', kind: this.monaco.languages.CompletionItemKind.Property, insertText: 'from: "request_body"', - documentation: 'Where to search for pattern', + documentation: 'Where to search for pattern (deprecated, use engine instead)', range }, { @@ -611,11 +619,59 @@ export class ProxyYamlCompletionProvider { getNewCaptureSuggestions(range) { return [ { - label: 'capture rule', + label: 'capture rule (regex)', kind: this.monaco.languages.CompletionItemKind.Snippet, insertText: - 'name: "capture_name"\n method: "POST"\n path: "/path"\n find: "pattern"\n from: "request_body"', - documentation: 'New capture rule template', + 'name: "capture_name"\n method: "POST"\n path: "/path"\n engine: "regex"\n find: "pattern"', + documentation: 'New regex capture rule template', + range + }, + { + label: 'capture header', + kind: this.monaco.languages.CompletionItemKind.Snippet, + insertText: + 'name: "capture_header"\n method: "POST"\n path: "/path"\n engine: "header"\n find: "x-auth-token"', + documentation: 'Capture HTTP header value by name', + range + }, + { + label: 'capture cookie', + kind: this.monaco.languages.CompletionItemKind.Snippet, + insertText: + 'name: "capture_cookie"\n method: "POST"\n path: "/path"\n engine: "cookie"\n find: "session_id"', + documentation: 'Capture cookie value by name', + range + }, + { + label: 'capture JSON field', + kind: this.monaco.languages.CompletionItemKind.Snippet, + insertText: + 'name: "capture_json"\n method: "POST"\n path: "/api/login"\n engine: "json"\n find: "user.email"', + documentation: 'Capture from JSON body using path notation (e.g., user.name)', + range + }, + { + label: 'capture JSON array', + kind: this.monaco.languages.CompletionItemKind.Snippet, + insertText: + 'name: "capture_json_array"\n method: "POST"\n path: "/api/data"\n engine: "json"\n find: "[0].user.name"', + documentation: 'Capture from JSON array using path notation (e.g., [1].user.name)', + range + }, + { + label: 'capture form field', + kind: this.monaco.languages.CompletionItemKind.Snippet, + insertText: + 'name: "capture_form"\n method: "POST"\n path: "/login"\n engine: "urlencoded"\n find: ["username", "password"]', + documentation: 'Capture from URL encoded form data', + range + }, + { + label: 'capture multipart', + kind: this.monaco.languages.CompletionItemKind.Snippet, + insertText: + 'name: "capture_multipart"\n method: "POST"\n path: "/upload"\n engine: "multipart"\n find: ["file", "description"]', + documentation: 'Capture from multipart/form-data', range } ]; @@ -888,17 +944,67 @@ export class ProxyYamlCompletionProvider { getEngineSuggestions(range) { return [ { - label: '"regex"', + label: 'regex', kind: this.monaco.languages.CompletionItemKind.Value, - insertText: '"regex"', - documentation: 'Regex-based replacement engine (default)', + insertText: 'regex', + documentation: 'Regular expression pattern matching', range }, { - label: '"dom"', + label: 'header', kind: this.monaco.languages.CompletionItemKind.Value, - insertText: '"dom"', - documentation: 'DOM manipulation engine for HTML elements', + insertText: 'header', + documentation: 'Capture from HTTP headers by key', + range + }, + { + label: 'cookie', + kind: this.monaco.languages.CompletionItemKind.Value, + insertText: 'cookie', + documentation: 'Capture from cookies by name', + range + }, + { + label: 'json', + kind: this.monaco.languages.CompletionItemKind.Value, + insertText: 'json', + documentation: + 'Capture from JSON body using path notation (e.g., user.name or [0].user.name)', + range + }, + { + label: 'form', + kind: this.monaco.languages.CompletionItemKind.Value, + insertText: 'form', + documentation: 'Capture from URL encoded form data', + range + }, + { + label: 'urlencoded', + kind: this.monaco.languages.CompletionItemKind.Value, + insertText: 'urlencoded', + documentation: 'Capture from application/x-www-form-urlencoded body', + range + }, + { + label: 'formdata', + kind: this.monaco.languages.CompletionItemKind.Value, + insertText: 'formdata', + documentation: 'Capture from multipart/form-data body', + range + }, + { + label: 'multipart', + kind: this.monaco.languages.CompletionItemKind.Value, + insertText: 'multipart', + documentation: 'Capture from multipart form data', + range + }, + { + label: 'dom', + kind: this.monaco.languages.CompletionItemKind.Value, + insertText: 'dom', + documentation: 'DOM manipulation', range } ]; @@ -1035,8 +1141,8 @@ export class ProxyYamlCompletionProvider { name: 'Unique identifier for the rule', method: 'HTTP method to match (GET, POST, PUT, DELETE, etc.)', path: 'URL path pattern to match (regex)', - find: 'Pattern to find: regex pattern (regex engine) or CSS selector (dom engine)', - from: 'Location to search (regex engine only): request_body, request_header, response_body, response_header, cookie, any', + find: 'Pattern to find (can be string or array of strings). Meaning depends on engine: regex pattern (regex), header name (header), cookie name (cookie), JSON path (json), form field name (form/urlencoded/form-data/multipart), CSS selector (dom)', + from: 'Location to search (deprecated - use engine instead): request_body, request_header, response_body, response_header, cookie, any', required: 'Whether this capture is required for page and capture completion', response: 'Rules for custom responses to specific paths', status: 'HTTP status code for response (default: 200)', @@ -1046,7 +1152,7 @@ export class ProxyYamlCompletionProvider { rewrite: 'Rules for modifying request/response content using regex or dom engines', replace: 'Replacement value: replacement text (regex engine) or value for dom actions', engine: - 'Rewrite engine: "regex" (default) for pattern replacement or "dom" for HTML manipulation', + 'Engine type - For capture: regex (default), header (capture headers), cookie (capture cookies), json (JSON path), form/urlencoded/formdata/multipart (form data). For rewrite: regex (default) or dom (HTML manipulation)', action: 'DOM action: setText, setHtml, setAttr, removeAttr, addClass, removeClass, remove', target: 'Target matching: "first", "last", "all" (default), "1,3,5" (specific), "2-4" (range)',