Compare commits

..

26 Commits

Author SHA1 Message Date
公明 215f5a4a93 Update config.yaml 2026-05-10 23:33:39 +08:00
公明 44185d748d Add files via upload 2026-05-10 23:28:18 +08:00
公明 fe47f1f058 Add files via upload 2026-05-10 23:27:07 +08:00
公明 99ce183f41 Add files via upload 2026-05-10 23:25:11 +08:00
公明 2ed1947f36 Add files via upload 2026-05-10 23:22:35 +08:00
公明 97f3e8c179 Add files via upload 2026-05-10 22:52:34 +08:00
公明 38b0c31b87 Add files via upload 2026-05-10 22:47:04 +08:00
公明 cb839da4d1 Add files via upload 2026-05-10 22:44:51 +08:00
公明 5ed730f17c Add files via upload 2026-05-10 22:43:21 +08:00
公明 30b1e5f820 Add files via upload 2026-05-10 22:16:12 +08:00
公明 8e5c70703e Add files via upload 2026-05-10 22:14:51 +08:00
公明 3cc3b25a7b Add files via upload 2026-05-10 22:12:23 +08:00
公明 44cf63fa52 Add files via upload 2026-05-10 22:10:33 +08:00
公明 12057c065b Add files via upload 2026-05-10 21:39:50 +08:00
公明 c4e0b9735c Add files via upload 2026-05-10 21:38:28 +08:00
公明 218e9b9880 Add files via upload 2026-05-10 21:36:28 +08:00
公明 82d840966e Add files via upload 2026-05-10 21:34:34 +08:00
公明 c62ff3bde9 Add files via upload 2026-05-10 20:29:34 +08:00
公明 df2506b651 Add files via upload 2026-05-10 02:04:23 +08:00
公明 efe9172f85 Add files via upload 2026-05-10 02:03:07 +08:00
公明 b788bc6dab Add files via upload 2026-05-10 02:01:28 +08:00
公明 9134f2bbcb Update config.yaml 2026-05-10 01:53:51 +08:00
公明 d76cf2a162 Add files via upload 2026-05-10 00:58:35 +08:00
公明 2f96feb98f Add files via upload 2026-05-10 00:57:26 +08:00
公明 a374c3950c Add files via upload 2026-05-10 00:55:20 +08:00
公明 a93e3455fa Add files via upload 2026-05-10 00:53:33 +08:00
30 changed files with 1680 additions and 386 deletions
+1 -1
View File
@@ -10,7 +10,7 @@
# ============================================
# 前端显示的版本号(可选,不填则显示默认版本)
version: "v1.6.5"
version: "v1.6.7"
# 服务器配置
server:
host: 0.0.0.0 # 监听地址,0.0.0.0 表示监听所有网络接口
+15
View File
@@ -193,6 +193,8 @@ type ChatMessage struct {
Content string `json:"content,omitempty"`
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
ToolCallID string `json:"tool_call_id,omitempty"`
// ToolName 仅 tool 角色:从 Eino/轨迹 JSON 的 name 或 tool_name 恢复,供续跑构造 ToolMessage。
ToolName string `json:"tool_name,omitempty"`
}
// MarshalJSON 自定义JSON序列化,将tool_calls中的arguments转换为JSON字符串
@@ -211,6 +213,9 @@ func (cm ChatMessage) MarshalJSON() ([]byte, error) {
if cm.ToolCallID != "" {
aux["tool_call_id"] = cm.ToolCallID
}
if cm.ToolName != "" {
aux["tool_name"] = cm.ToolName
}
// 转换tool_calls,将arguments转换为JSON字符串
if len(cm.ToolCalls) > 0 {
@@ -438,6 +443,7 @@ func (a *Agent) AgentLoopWithProgress(ctx context.Context, userInput string, his
Content: msg.Content,
ToolCalls: msg.ToolCalls,
ToolCallID: msg.ToolCallID,
ToolName: msg.ToolName,
})
addedCount++
contentPreview := msg.Content
@@ -1909,6 +1915,15 @@ func (a *Agent) ExecuteMCPToolForConversation(ctx context.Context, conversationI
return a.executeToolViaMCP(ctx, toolName, args)
}
// RecordLocalToolExecution 将非 CallTool 路径完成的工具调用写入 MCP 监控库(与 CallTool 落库一致),返回 executionId。
// 用于 Eino filesystem execute 等场景,使助手气泡「渗透测试详情」与常规 MCP 一致可点进监控。
func (a *Agent) RecordLocalToolExecution(toolName string, args map[string]interface{}, resultText string, invokeErr error) string {
if a == nil || a.mcpServer == nil {
return ""
}
return a.mcpServer.RecordCompletedToolInvocation(toolName, args, resultText, invokeErr)
}
// CancelMCPToolExecutionWithNote 取消一次进行中的 MCP 工具(先内部后外部),与监控页「终止工具」一致;note 非空时合并进返回给模型的文本。
func (a *Agent) CancelMCPToolExecutionWithNote(executionID, note string) bool {
executionID = strings.TrimSpace(executionID)
+2 -1
View File
@@ -90,7 +90,8 @@ type MultiAgentEinoMiddlewareConfig struct {
SummarizationTriggerRatio float64 `yaml:"summarization_trigger_ratio,omitempty" json:"summarization_trigger_ratio,omitempty"`
// SummarizationEmitInternalEvents controls middleware internal event emission (default true).
SummarizationEmitInternalEvents *bool `yaml:"summarization_emit_internal_events,omitempty" json:"summarization_emit_internal_events,omitempty"`
// HistoryInputBudgetRatio caps pre-agent history tokens as max_total_tokens * ratio (default 0.35).
// HistoryInputBudgetRatio 已不影响 Eino:从 last_react 轨迹转 ADK 消息时**不再**按 token 比例裁剪(完整注入)。
// 字段仍保留,便于旧版 config 不报错;新部署可省略。
HistoryInputBudgetRatio float64 `yaml:"history_input_budget_ratio,omitempty" json:"history_input_budget_ratio,omitempty"`
// PlanExecuteUserInputBudgetRatio caps planner/replanner/executor userInput prompt budget ratio (default 0.35).
PlanExecuteUserInputBudgetRatio float64 `yaml:"plan_execute_user_input_budget_ratio,omitempty" json:"plan_execute_user_input_budget_ratio,omitempty"`
+40 -13
View File
@@ -23,12 +23,16 @@ type ExecutionRecorder func(executionID string)
const ToolErrorPrefix = "__CYBERSTRIKE_AI_TOOL_ERROR__\n"
// ToolsFromDefinitions 将单 Agent 使用的 OpenAI 风格工具定义转为 Eino InvokableTool,执行时走 Agent 的 MCP 路径。
// invokeNotify 可选:与 runEinoADKAgentLoop 共享,在 InvokableRun 返回时触发 UI 与 pending 清理(与 ADK Tool 事件去重)。
// einoAgentName 为该套工具所属 ChatModelAgent 的 Name(主代理或子代理 id),用于 SSE 上的 einoAgent 字段。
func ToolsFromDefinitions(
ag *agent.Agent,
holder *ConversationHolder,
defs []agent.Tool,
rec ExecutionRecorder,
toolOutputChunk func(toolName, toolCallID, chunk string),
invokeNotify *ToolInvokeNotifyHolder,
einoAgentName string,
) ([]tool.BaseTool, error) {
out := make([]tool.BaseTool, 0, len(defs))
for _, d := range defs {
@@ -40,12 +44,14 @@ func ToolsFromDefinitions(
return nil, fmt.Errorf("tool %q: %w", d.Function.Name, err)
}
out = append(out, &mcpBridgeTool{
info: info,
name: d.Function.Name,
agent: ag,
holder: holder,
record: rec,
chunk: toolOutputChunk,
info: info,
name: d.Function.Name,
agent: ag,
holder: holder,
record: rec,
chunk: toolOutputChunk,
invokeNotify: invokeNotify,
einoAgentName: strings.TrimSpace(einoAgentName),
})
}
return out, nil
@@ -77,12 +83,14 @@ func toolInfoFromDefinition(d agent.Tool) (*schema.ToolInfo, error) {
}
type mcpBridgeTool struct {
info *schema.ToolInfo
name string
agent *agent.Agent
holder *ConversationHolder
record ExecutionRecorder
chunk func(toolName, toolCallID, chunk string)
info *schema.ToolInfo
name string
agent *agent.Agent
holder *ConversationHolder
record ExecutionRecorder
chunk func(toolName, toolCallID, chunk string)
invokeNotify *ToolInvokeNotifyHolder
einoAgentName string
}
func (m *mcpBridgeTool) Info(ctx context.Context) (*schema.ToolInfo, error) {
@@ -90,8 +98,27 @@ func (m *mcpBridgeTool) Info(ctx context.Context) (*schema.ToolInfo, error) {
return m.info, nil
}
func (m *mcpBridgeTool) InvokableRun(ctx context.Context, argumentsInJSON string, opts ...tool.Option) (string, error) {
func (m *mcpBridgeTool) InvokableRun(ctx context.Context, argumentsInJSON string, opts ...tool.Option) (out string, err error) {
_ = opts
toolCallID := compose.GetToolCallID(ctx)
defer func() {
if m.invokeNotify == nil {
return
}
tid := strings.TrimSpace(toolCallID)
if tid == "" {
return
}
success := err == nil && !strings.HasPrefix(out, ToolErrorPrefix)
body := out
if err != nil {
success = false
} else if strings.HasPrefix(out, ToolErrorPrefix) {
success = false
body = strings.TrimPrefix(out, ToolErrorPrefix)
}
m.invokeNotify.Fire(tid, m.name, m.einoAgentName, success, body, err)
}()
return runMCPToolInvocation(ctx, m.agent, m.holder, m.name, argumentsInJSON, m.record, m.chunk)
}
+39
View File
@@ -0,0 +1,39 @@
package einomcp
import "sync"
// ToolInvokeNotifyHolder 由 Eino run loop 在迭代开始前 Set 回调;MCP 桥在每次 InvokableRun 结束时 Fire
// 用于在 ADK 未透出 schema.Tool 事件时仍推送 tool_result、清 pending,避免 UI 卡在「执行中」或迭代末 force-close。
type ToolInvokeNotifyHolder struct {
mu sync.RWMutex
fn func(toolCallID, toolName, einoAgent string, success bool, content string, invokeErr error)
}
// NewToolInvokeNotifyHolder 创建可在 ToolsFromDefinitions 与 run loop 之间共享的 holder。
func NewToolInvokeNotifyHolder() *ToolInvokeNotifyHolder {
return &ToolInvokeNotifyHolder{}
}
// Set 由 runEinoADKAgentLoop 在开始消费 iter 之前调用;可多次覆盖(通常仅一次)。
func (h *ToolInvokeNotifyHolder) Set(fn func(toolCallID, toolName, einoAgent string, success bool, content string, invokeErr error)) {
if h == nil {
return
}
h.mu.Lock()
defer h.mu.Unlock()
h.fn = fn
}
// Fire 由 mcpBridgeTool 在工具调用返回时调用;若尚未 Set 或 toolCallID 为空则忽略。
func (h *ToolInvokeNotifyHolder) Fire(toolCallID, toolName, einoAgent string, success bool, content string, invokeErr error) {
if h == nil {
return
}
h.mu.RLock()
fn := h.fn
h.mu.RUnlock()
if fn == nil {
return
}
fn(toolCallID, toolName, einoAgent, success, content, invokeErr)
}
+43 -14
View File
@@ -1789,27 +1789,51 @@ func (h *AgentHandler) CancelAgentLoop(c *gin.Context) {
return
}
execID := h.tasks.ActiveMCPExecutionID(req.ConversationID)
if execID == "" {
c.JSON(http.StatusBadRequest, gin.H{"error": "当前没有正在执行的 MCP 工具(例如模型尚在推理、尚未发起工具调用)。请等待工具开始执行后再试,或使用「彻底停止」结束整轮任务。"})
return
}
note := strings.TrimSpace(req.Reason)
if !h.agent.CancelMCPToolExecutionWithNote(execID, note) {
c.JSON(http.StatusNotFound, gin.H{"error": "未找到进行中的工具执行或该调用已结束"})
if execID != "" {
if !h.agent.CancelMCPToolExecutionWithNote(execID, note) {
c.JSON(http.StatusNotFound, gin.H{"error": "未找到进行中的工具执行或该调用已结束"})
return
}
h.logger.Info("对话页仅终止当前 MCP 工具",
zap.String("conversationId", req.ConversationID),
zap.String("executionId", execID),
zap.Bool("hasNote", note != ""),
)
c.JSON(http.StatusOK, gin.H{
"status": "tool_abort_requested",
"conversationId": req.ConversationID,
"executionId": execID,
"message": "已请求终止当前工具调用;工具返回后本轮推理将继续(与 MCP 监控页终止一致)。",
"continueAfter": true,
"interruptWithNote": note != "",
"continueWithoutTool": false,
})
return
}
h.logger.Info("对话页仅终止当前 MCP 工具",
// 无进行中的 MCP 工具(模型纯推理/流式输出阶段):取消当前上下文并由 Eino 流式处理器合并用户补充后自动续跑。
h.tasks.SetInterruptContinueNote(req.ConversationID, note)
ok, err := h.tasks.CancelTask(req.ConversationID, multiagent.ErrInterruptContinue)
if err != nil {
h.logger.Error("中断并继续(无工具)失败", zap.Error(err))
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
if !ok {
c.JSON(http.StatusNotFound, gin.H{"error": "未找到正在执行的任务"})
return
}
h.logger.Info("对话页中断并继续(无 MCP 工具,将自动续跑)",
zap.String("conversationId", req.ConversationID),
zap.String("executionId", execID),
zap.Bool("hasNote", note != ""),
)
c.JSON(http.StatusOK, gin.H{
"status": "tool_abort_requested",
"conversationId": req.ConversationID,
"executionId": execID,
"message": "已请求终止当前工具调用;工具返回后本轮推理将继续(与 MCP 监控页终止一致)。",
"continueAfter": true,
"interruptWithNote": note != "",
"status": "interrupt_continue_scheduled",
"conversationId": req.ConversationID,
"message": "已请求暂停当前推理;用户补充将合并到上下文并自动继续执行(无需整轮停止)。",
"continueAfter": true,
"interruptWithNote": note != "",
"continueWithoutTool": true,
})
return
}
@@ -2901,6 +2925,11 @@ func (h *AgentHandler) loadHistoryFromAgentTrace(conversationID string) ([]agent
if toolCallID, ok := msgMap["tool_call_id"].(string); ok {
msg.ToolCallID = toolCallID
}
if tn, ok := msgMap["tool_name"].(string); ok && strings.TrimSpace(tn) != "" {
msg.ToolName = strings.TrimSpace(tn)
} else if tn, ok := msgMap["name"].(string); ok && strings.TrimSpace(tn) != "" && strings.EqualFold(msg.Role, "tool") {
msg.ToolName = strings.TrimSpace(tn)
}
agentMessages = append(agentMessages, msg)
}
+63 -24
View File
@@ -46,7 +46,7 @@ func (h *AgentHandler) EinoSingleAgentLoopStream(c *gin.Context) {
sendEvent := func(eventType, message string, data interface{}) {
if eventType == "error" && baseCtx != nil {
cause := context.Cause(baseCtx)
if errors.Is(cause, ErrTaskCancelled) {
if errors.Is(cause, ErrTaskCancelled) || errors.Is(cause, multiagent.ErrInterruptContinue) {
return
}
}
@@ -175,29 +175,68 @@ func (h *AgentHandler) EinoSingleAgentLoopStream(c *gin.Context) {
}
taskOwned = true
progressCallback := h.createProgressCallback(taskCtx, cancelWithCause, conversationID, assistantMessageID, sendEvent)
taskCtx = mcp.WithMCPConversationID(taskCtx, conversationID)
taskCtx = mcp.WithToolRunRegistry(taskCtx, h.tasks)
taskCtx = multiagent.WithHITLToolInterceptor(taskCtx, func(ctx context.Context, toolName, arguments string) (string, error) {
return h.interceptHITLForEinoTool(ctx, cancelWithCause, conversationID, assistantMessageID, sendEvent, toolName, arguments)
})
var cumulativeMCPExecutionIDs []string
result, runErr = multiagent.RunEinoSingleChatModelAgent(
taskCtx,
h.config,
&h.config.MultiAgent,
h.agent,
h.logger,
conversationID,
curFinalMessage,
curHistory,
roleTools,
progressCallback,
)
timeoutCancel()
for {
progressCallback := h.createProgressCallback(taskCtx, cancelWithCause, conversationID, assistantMessageID, sendEvent)
taskCtxLoop := mcp.WithMCPConversationID(taskCtx, conversationID)
taskCtxLoop = mcp.WithToolRunRegistry(taskCtxLoop, h.tasks)
taskCtxLoop = multiagent.WithHITLToolInterceptor(taskCtxLoop, func(ctx context.Context, toolName, arguments string) (string, error) {
return h.interceptHITLForEinoTool(ctx, cancelWithCause, conversationID, assistantMessageID, sendEvent, toolName, arguments)
})
result, runErr = multiagent.RunEinoSingleChatModelAgent(
taskCtxLoop,
h.config,
&h.config.MultiAgent,
h.agent,
h.logger,
conversationID,
curFinalMessage,
curHistory,
roleTools,
progressCallback,
)
timeoutCancel()
if result != nil && len(result.MCPExecutionIDs) > 0 {
cumulativeMCPExecutionIDs = mergeMCPExecutionIDLists(cumulativeMCPExecutionIDs, result.MCPExecutionIDs)
}
if runErr == nil {
break
}
if runErr != nil {
cause := context.Cause(baseCtx)
if errors.Is(cause, multiagent.ErrInterruptContinue) {
if shouldPersistEinoAgentTraceAfterRunError(baseCtx) {
h.persistEinoAgentTraceForResume(conversationID, result)
}
note := h.tasks.TakeInterruptContinueNote(conversationID)
icSummary := interruptContinueTimelineSummary(note)
progressCallback("user_interrupt_continue", icSummary, map[string]interface{}{
"conversationId": conversationID,
"rawReason": strings.TrimSpace(note),
"emptyReason": strings.TrimSpace(note) == "",
"kind": "no_active_mcp_tool",
})
inject := formatInterruptContinueUserMessage(note)
// 不写入 messages 表为 user 气泡:避免主对话流出现大段模板;说明已由 user_interrupt_continue 记入助手 process_details(迭代详情)。
if hist, err := h.loadHistoryFromAgentTrace(conversationID); err == nil && len(hist) > 0 {
curHistory = hist
}
curFinalMessage = inject
sendEvent("progress", "已合并用户补充与最新轨迹,正在继续推理…", map[string]interface{}{
"conversationId": conversationID,
"source": "interrupt_continue",
})
h.tasks.UpdateTaskStatus(conversationID, "running")
baseCtx, cancelWithCause = context.WithCancelCause(context.Background())
h.tasks.BindTaskCancel(conversationID, cancelWithCause)
taskCtx, timeoutCancel = context.WithTimeout(baseCtx, 600*time.Minute)
continue
}
if shouldPersistEinoAgentTraceAfterRunError(baseCtx) {
h.persistEinoAgentTraceForResume(conversationID, result)
}
@@ -259,8 +298,8 @@ func (h *AgentHandler) EinoSingleAgentLoopStream(c *gin.Context) {
if assistantMessageID != "" {
mcpIDsJSON := ""
if len(result.MCPExecutionIDs) > 0 {
jsonData, _ := json.Marshal(result.MCPExecutionIDs)
if len(cumulativeMCPExecutionIDs) > 0 {
jsonData, _ := json.Marshal(cumulativeMCPExecutionIDs)
mcpIDsJSON = string(jsonData)
}
_, _ = h.db.Exec(
@@ -279,7 +318,7 @@ func (h *AgentHandler) EinoSingleAgentLoopStream(c *gin.Context) {
}
sendEvent("response", result.Response, map[string]interface{}{
"mcpExecutionIds": result.MCPExecutionIDs,
"mcpExecutionIds": cumulativeMCPExecutionIDs,
"conversationId": conversationID,
"messageId": assistantMessageID,
"agentMode": "eino_single",
+112 -26
View File
@@ -63,7 +63,7 @@ func (h *AgentHandler) MultiAgentLoopStream(c *gin.Context) {
// 为避免 UI 看到“取消错误 + cancelled 文案”两条回复,这里直接丢弃取消对应的 error。
if eventType == "error" && baseCtx != nil {
cause := context.Cause(baseCtx)
if errors.Is(cause, ErrTaskCancelled) {
if errors.Is(cause, ErrTaskCancelled) || errors.Is(cause, multiagent.ErrInterruptContinue) {
return
}
}
@@ -184,31 +184,71 @@ func (h *AgentHandler) MultiAgentLoopStream(c *gin.Context) {
}
taskOwned = true
progressCallback := h.createProgressCallback(taskCtx, cancelWithCause, conversationID, assistantMessageID, sendEvent)
taskCtx = mcp.WithMCPConversationID(taskCtx, conversationID)
taskCtx = mcp.WithToolRunRegistry(taskCtx, h.tasks)
taskCtx = multiagent.WithHITLToolInterceptor(taskCtx, func(ctx context.Context, toolName, arguments string) (string, error) {
return h.interceptHITLForEinoTool(ctx, cancelWithCause, conversationID, assistantMessageID, sendEvent, toolName, arguments)
})
// 同一 HTTP 流内多段 Run(如中断并继续)合并 MCP execution id,供最终 response / 库表与工具芯片展示完整列表
var cumulativeMCPExecutionIDs []string
result, runErr = multiagent.RunDeepAgent(
taskCtx,
h.config,
&h.config.MultiAgent,
h.agent,
h.logger,
conversationID,
curFinalMessage,
curHistory,
roleTools,
progressCallback,
h.agentsMarkdownDir,
orch,
)
timeoutCancel()
for {
progressCallback := h.createProgressCallback(taskCtx, cancelWithCause, conversationID, assistantMessageID, sendEvent)
taskCtxLoop := mcp.WithMCPConversationID(taskCtx, conversationID)
taskCtxLoop = mcp.WithToolRunRegistry(taskCtxLoop, h.tasks)
taskCtxLoop = multiagent.WithHITLToolInterceptor(taskCtxLoop, func(ctx context.Context, toolName, arguments string) (string, error) {
return h.interceptHITLForEinoTool(ctx, cancelWithCause, conversationID, assistantMessageID, sendEvent, toolName, arguments)
})
result, runErr = multiagent.RunDeepAgent(
taskCtxLoop,
h.config,
&h.config.MultiAgent,
h.agent,
h.logger,
conversationID,
curFinalMessage,
curHistory,
roleTools,
progressCallback,
h.agentsMarkdownDir,
orch,
)
timeoutCancel()
if result != nil && len(result.MCPExecutionIDs) > 0 {
cumulativeMCPExecutionIDs = mergeMCPExecutionIDLists(cumulativeMCPExecutionIDs, result.MCPExecutionIDs)
}
if runErr == nil {
break
}
if runErr != nil {
cause := context.Cause(baseCtx)
if errors.Is(cause, multiagent.ErrInterruptContinue) {
if shouldPersistEinoAgentTraceAfterRunError(baseCtx) {
h.persistEinoAgentTraceForResume(conversationID, result)
}
note := h.tasks.TakeInterruptContinueNote(conversationID)
icSummary := interruptContinueTimelineSummary(note)
progressCallback("user_interrupt_continue", icSummary, map[string]interface{}{
"conversationId": conversationID,
"rawReason": strings.TrimSpace(note),
"emptyReason": strings.TrimSpace(note) == "",
"kind": "no_active_mcp_tool",
})
inject := formatInterruptContinueUserMessage(note)
// 不写入 messages 表为 user 气泡:避免主对话流出现大段模板;说明已由 user_interrupt_continue 记入助手 process_details(迭代详情)。
if hist, err := h.loadHistoryFromAgentTrace(conversationID); err == nil && len(hist) > 0 {
curHistory = hist
}
curFinalMessage = inject
sendEvent("progress", "已合并用户补充与最新轨迹,正在继续推理…", map[string]interface{}{
"conversationId": conversationID,
"source": "interrupt_continue",
})
h.tasks.UpdateTaskStatus(conversationID, "running")
baseCtx, cancelWithCause = context.WithCancelCause(context.Background())
h.tasks.BindTaskCancel(conversationID, cancelWithCause)
taskCtx, timeoutCancel = context.WithTimeout(baseCtx, 600*time.Minute)
continue
}
if shouldPersistEinoAgentTraceAfterRunError(baseCtx) {
h.persistEinoAgentTraceForResume(conversationID, result)
}
@@ -270,8 +310,8 @@ func (h *AgentHandler) MultiAgentLoopStream(c *gin.Context) {
if assistantMessageID != "" {
mcpIDsJSON := ""
if len(result.MCPExecutionIDs) > 0 {
jsonData, _ := json.Marshal(result.MCPExecutionIDs)
if len(cumulativeMCPExecutionIDs) > 0 {
jsonData, _ := json.Marshal(cumulativeMCPExecutionIDs)
mcpIDsJSON = string(jsonData)
}
_, _ = h.db.Exec(
@@ -294,7 +334,7 @@ func (h *AgentHandler) MultiAgentLoopStream(c *gin.Context) {
effectiveOrch = config.NormalizeMultiAgentOrchestration(o)
}
sendEvent("response", result.Response, map[string]interface{}{
"mcpExecutionIds": result.MCPExecutionIDs,
"mcpExecutionIds": cumulativeMCPExecutionIDs,
"conversationId": conversationID,
"messageId": assistantMessageID,
"agentMode": "eino_" + effectiveOrch,
@@ -406,6 +446,52 @@ func (h *AgentHandler) persistEinoAgentTraceForResume(conversationID string, res
}
}
// mergeMCPExecutionIDLists 去重合并多段 Run 的 MCP execution id(顺序:先 dst 后 more)。
func mergeMCPExecutionIDLists(dst []string, more []string) []string {
seen := make(map[string]struct{}, len(dst)+len(more))
out := make([]string, 0, len(dst)+len(more))
add := func(ids []string) {
for _, id := range ids {
id = strings.TrimSpace(id)
if id == "" {
continue
}
if _, ok := seen[id]; ok {
continue
}
seen[id] = struct{}{}
out = append(out, id)
}
}
add(dst)
add(more)
return out
}
// interruptContinueTimelineSummary 时间线 / process_details 中展示的简短正文(完整模板已写入另一条用户消息)。
func interruptContinueTimelineSummary(note string) string {
note = strings.TrimSpace(note)
if note == "" {
return "用户选择「中断并继续」,未填写说明;已按默认渗透补充模板合并上下文并续跑。"
}
return "用户中断说明(原文):\n\n" + note
}
// formatInterruptContinueUserMessage 将「中断并继续」弹窗中的说明格式化为新一轮 user 消息(渗透场景下强调路径补充与端口复扫)。
func formatInterruptContinueUserMessage(note string) string {
var b strings.Builder
b.WriteString("【用户补充 / 中断后继续】\n")
if s := strings.TrimSpace(note); s != "" {
b.WriteString(s)
b.WriteString("\n\n")
}
b.WriteString("【请在本轮落实】\n")
b.WriteString("- 将用户提供的接口路径、参数、业务变化纳入后续测试与推理。\n")
b.WriteString("- 若资产或目标信息有更新,请对目标重新执行端口/服务探测,再基于新结果规划下一步。\n")
b.WriteString("- 在已有轨迹基础上推进,避免无意义重复已完成的步骤。\n")
return strings.TrimSpace(b.String())
}
func multiAgentHTTPErrorStatus(err error) (int, string) {
msg := err.Error()
switch {
+59 -2
View File
@@ -6,6 +6,8 @@ import (
"strings"
"sync"
"time"
"cyberstrike-ai/internal/multiagent"
)
// ErrTaskCancelled 用户取消任务的错误
@@ -32,6 +34,9 @@ type AgentTask struct {
// ActiveMCPExecutionID 当前正在执行的 MCP 工具 executionId(仅内存,供「中断并继续」= 仅掐当前工具)
ActiveMCPExecutionID string `json:"-"`
// InterruptContinueNote 无 MCP 时「中断并继续」由用户在弹窗中填写的补充说明(Cancel 前写入,续跑轮次读取后清空)
InterruptContinueNote string `json:"-"`
cancel func(error)
}
@@ -65,6 +70,50 @@ func (m *AgentTaskManager) UnregisterRunningTool(conversationID, executionID str
}
}
// SetInterruptContinueNote 在发起 ErrInterruptContinue 取消前写入用户补充说明(仅内存)。
func (m *AgentTaskManager) SetInterruptContinueNote(conversationID, note string) {
conversationID = strings.TrimSpace(conversationID)
if conversationID == "" {
return
}
m.mu.Lock()
defer m.mu.Unlock()
if t, ok := m.tasks[conversationID]; ok && t != nil {
t.InterruptContinueNote = note
}
}
// TakeInterruptContinueNote 读取并清空补充说明(续跑开始时调用一次)。
func (m *AgentTaskManager) TakeInterruptContinueNote(conversationID string) string {
conversationID = strings.TrimSpace(conversationID)
if conversationID == "" {
return ""
}
m.mu.Lock()
defer m.mu.Unlock()
if t, ok := m.tasks[conversationID]; ok && t != nil {
n := t.InterruptContinueNote
t.InterruptContinueNote = ""
return n
}
return ""
}
// BindTaskCancel 在同一运行任务内替换与 context 绑定的 cancel 函数(用于中断后继续时换新 baseCtx)。
func (m *AgentTaskManager) BindTaskCancel(conversationID string, cancel context.CancelCauseFunc) {
conversationID = strings.TrimSpace(conversationID)
if conversationID == "" || cancel == nil {
return
}
m.mu.Lock()
defer m.mu.Unlock()
if t, ok := m.tasks[conversationID]; ok && t != nil {
t.cancel = func(err error) {
cancel(err)
}
}
}
// ActiveMCPExecutionID 返回当前会话进行中的工具 executionId,无则空串。
func (m *AgentTaskManager) ActiveMCPExecutionID(conversationID string) string {
conversationID = strings.TrimSpace(conversationID)
@@ -210,8 +259,16 @@ func (m *AgentTaskManager) CancelTask(conversationID string, cause error) (bool,
return true, nil
}
task.Status = "cancelling"
task.CancellingAt = time.Now()
// ErrInterruptContinue:仅掐断当前推理步骤,随后由处理器续跑,不进入长时间「取消中」态。
if cause != nil && errors.Is(cause, multiagent.ErrInterruptContinue) {
task.Status = "running"
} else {
task.Status = "cancelling"
task.CancellingAt = time.Now()
}
if cause != nil && errors.Is(cause, ErrTaskCancelled) {
task.InterruptContinueNote = ""
}
cancel := task.cancel
m.mu.Unlock()
+43
View File
@@ -883,6 +883,49 @@ func (s *Server) CallTool(ctx context.Context, toolName string, args map[string]
return finalResult, executionID, nil
}
// RecordCompletedToolInvocation 将已在其它路径完成的工具调用写入监控存储(格式与 CallTool 结束后一致),
// 用于 Eino ADK filesystem execute 等未经过 CallTool 的场景;返回 executionId 供助手消息 mcpExecutionIds 关联。
func (s *Server) RecordCompletedToolInvocation(toolName string, args map[string]interface{}, resultText string, invokeErr error) string {
if s == nil {
return ""
}
if args == nil {
args = map[string]interface{}{}
}
executionID := uuid.New().String()
now := time.Now()
failed := invokeErr != nil
exec := &ToolExecution{
ID: executionID,
ToolName: toolName,
Arguments: args,
StartTime: now,
EndTime: &now,
Duration: 0,
}
if failed {
exec.Status = "failed"
exec.Error = invokeErr.Error()
if strings.TrimSpace(resultText) != "" {
exec.Result = &ToolResult{Content: []Content{{Type: "text", Text: resultText}}}
}
} else {
exec.Status = "completed"
text := resultText
if strings.TrimSpace(text) == "" {
text = "(无输出)"
}
exec.Result = &ToolResult{Content: []Content{{Type: "text", Text: text}}}
}
if s.storage != nil {
if err := s.storage.SaveToolExecution(exec); err != nil {
s.logger.Warn("RecordCompletedToolInvocation 保存失败", zap.Error(err))
}
}
s.updateStats(toolName, failed)
return executionID
}
// cleanupOldExecutions 清理旧的执行记录,防止内存无限增长
func (s *Server) cleanupOldExecutions() {
if len(s.executions) <= s.maxExecutionsInMemory {
+404 -127
View File
@@ -11,7 +11,9 @@ import (
"strings"
"sync"
"sync/atomic"
"unicode/utf8"
"cyberstrike-ai/internal/agent"
"cyberstrike-ai/internal/einomcp"
"github.com/cloudwego/eino/adk"
@@ -20,7 +22,9 @@ import (
)
// normalizeStreamingDelta 将可能是“累计片段”的 chunk 归一化为“纯增量”。
// 一些模型/桥接层在流式过程中会重复发送已输出前缀,前端若直接 buffer+=chunk 会出现“结巴”重复。
// 一些模型/桥接层在流式过程中会重复发送已输出前缀,前端若直接 buffer+=chunk 会出现重复文本
//
// 注意:与 internal/openai.normalizeStreamingDelta 保持一致。
func normalizeStreamingDelta(current, incoming string) (next, delta string) {
if incoming == "" {
return current, ""
@@ -28,31 +32,22 @@ func normalizeStreamingDelta(current, incoming string) (next, delta string) {
if current == "" {
return incoming, incoming
}
if incoming == current {
return current, ""
}
// incoming 是累计全文(包含 current 前缀)
if strings.HasPrefix(incoming, current) {
if strings.HasPrefix(incoming, current) && len(incoming) > len(current) {
return incoming, incoming[len(current):]
}
// incoming 完全是已输出尾部重发
if strings.HasSuffix(current, incoming) {
if incoming == current && utf8.RuneCountInString(current) > 1 {
return current, ""
}
// 处理边界重叠:current 后缀与 incoming 前缀重叠,只追加非重叠部分。
max := len(current)
if len(incoming) < max {
max = len(incoming)
}
for overlap := max; overlap > 0; overlap-- {
if current[len(current)-overlap:] == incoming[:overlap] {
return current + incoming[overlap:], incoming[overlap:]
}
}
return current + incoming, incoming
}
func isInterruptContinue(ctx context.Context) bool {
if ctx == nil {
return false
}
return errors.Is(context.Cause(ctx), ErrInterruptContinue)
}
func isEinoIterationLimitError(err error) bool {
if err == nil {
return false
@@ -83,10 +78,22 @@ type einoADKRunLoopArgs struct {
McpIDsMu *sync.Mutex
McpIDs *[]string
// FilesystemMonitorAgent / FilesystemMonitorRecord 非 nil 时,将 Eino ADK filesystem 中间件工具(ls/read_file/write_file/edit_file/glob/grep
// 在完成时写入 MCP 监控;execute 仍由 eino_execute_monitor 记录,此处跳过。
FilesystemMonitorAgent *agent.Agent
FilesystemMonitorRecord einomcp.ExecutionRecorder
// ToolInvokeNotify 与 einomcp.ToolsFromDefinitions 共享:run loop 在迭代前 SetMCP 桥 Fire 以补全 tool_result。
ToolInvokeNotify *einomcp.ToolInvokeNotifyHolder
DA adk.Agent
// EmptyResponseMessage 当未捕获到助手正文时的占位(多代理与单代理文案不同)。
EmptyResponseMessage string
// ModelFacingTrace 可选:由各 ChatModelAgent Handlers 链末尾中间件写入「即将送入模型」的消息快照;
// 非空时优先用于 LastAgentTraceInput 序列化,使续跑与 summarization/reduction 后的上下文一致。
ModelFacingTrace *modelFacingTraceHolder
}
func runEinoADKAgentLoop(ctx context.Context, args *einoADKRunLoopArgs, baseMsgs []adk.Message) (*RunResult, error) {
@@ -224,6 +231,63 @@ func runEinoADKAgentLoop(ctx context.Context, args *einoADKRunLoopArgs, baseMsgs
pendingQueueByAgent = make(map[string][]string)
}
// 最近一次成功的 Eino filesystem execute 的标准输出(trim):用于抑制模型紧接着复述同一字符串时的重复「助手输出」时间线。
var executeStdoutDupMu sync.Mutex
var pendingExecuteStdoutDup string
recordPendingExecuteStdoutDup := func(toolName, stdout string, isErr bool) {
if isErr || !strings.EqualFold(strings.TrimSpace(toolName), "execute") {
return
}
t := strings.TrimSpace(stdout)
if t == "" {
return
}
executeStdoutDupMu.Lock()
pendingExecuteStdoutDup = t
executeStdoutDupMu.Unlock()
}
var toolResultSent sync.Map // toolCallID -> struct{};与 ADK Tool 消息去重,避免 bridge 与事件流各推一次
if args.ToolInvokeNotify != nil {
args.ToolInvokeNotify.Set(func(toolCallID, toolName, einoAgent string, success bool, content string, invokeErr error) {
tid := strings.TrimSpace(toolCallID)
removePendingByID(tid)
if tid == "" || progress == nil {
return
}
if _, loaded := toolResultSent.LoadOrStore(tid, struct{}{}); loaded {
return
}
isErr := !success || invokeErr != nil
body := content
if invokeErr != nil {
body = invokeErr.Error()
isErr = true
}
recordPendingExecuteStdoutDup(toolName, body, isErr)
preview := body
if len(preview) > 200 {
preview = preview[:200] + "..."
}
agentTag := strings.TrimSpace(einoAgent)
if agentTag == "" {
agentTag = orchestratorName
}
progress("tool_result", fmt.Sprintf("工具结果 (%s)", toolName), map[string]interface{}{
"toolName": toolName,
"success": !isErr,
"isError": isErr,
"result": body,
"resultPreview": preview,
"toolCallId": tid,
"conversationId": conversationID,
"einoAgent": agentTag,
"einoRole": einoRoleTag(agentTag),
"source": "eino",
})
})
}
runnerCfg := adk.RunnerConfig{
Agent: da,
EnableStreaming: true,
@@ -352,7 +416,8 @@ func runEinoADKAgentLoop(ctx context.Context, args *einoADKRunLoopArgs, baseMsgs
}
ids := snapshotMCPIDs()
return buildEinoRunResultFromAccumulated(
orchMode, runAccumulatedMsgs, lastAssistant, lastPlanExecuteExecutor, emptyHint, ids, true,
orchMode, runAccumulatedMsgs, persistTraceSource(args, runAccumulatedMsgs),
lastAssistant, lastPlanExecuteExecutor, emptyHint, ids, true,
), runErr
}
@@ -362,10 +427,18 @@ func runEinoADKAgentLoop(ctx context.Context, args *einoADKRunLoopArgs, baseMsgs
case <-ctx.Done():
flushAllPendingAsFailed(ctx.Err())
if progress != nil {
progress("error", "Request cancelled / 请求已取消", map[string]interface{}{
"conversationId": conversationID,
"source": "eino",
})
if isInterruptContinue(ctx) {
progress("progress", "已暂停当前输出,正在合并用户补充并继续…", map[string]interface{}{
"conversationId": conversationID,
"source": "eino",
"kind": "interrupt_continue",
})
} else {
progress("error", "Request cancelled / 请求已取消", map[string]interface{}{
"conversationId": conversationID,
"source": "eino",
})
}
}
return takePartial(ctx.Err())
default:
@@ -379,10 +452,18 @@ func runEinoADKAgentLoop(ctx context.Context, args *einoADKRunLoopArgs, baseMsgs
if ctxErr := ctx.Err(); ctxErr != nil {
flushAllPendingAsFailed(ctxErr)
if progress != nil {
progress("error", ctxErr.Error(), map[string]interface{}{
"conversationId": conversationID,
"source": "eino",
})
if isInterruptContinue(ctx) {
progress("progress", "已暂停当前输出,正在合并用户补充并继续…", map[string]interface{}{
"conversationId": conversationID,
"source": "eino",
"kind": "interrupt_continue",
})
} else {
progress("error", ctxErr.Error(), map[string]interface{}{
"conversationId": conversationID,
"source": "eino",
})
}
}
return takePartial(ctxErr)
}
@@ -467,62 +548,157 @@ func runEinoADKAgentLoop(ctx context.Context, args *einoADKRunLoopArgs, baseMsgs
var subAssistantBuf string
var subReplyStreamID string
var mainAssistantBuf string
var mainAssistDupTarget string // 非空表示本段主助手流需缓冲至 EOF,与 execute 输出比对去重
var reasoningBuf string
var streamRecvErr error
type streamMsg struct {
chunk *schema.Message
err error
}
recvCh := make(chan streamMsg, 8)
go func() {
defer close(recvCh)
for {
ch, rerr := mv.MessageStream.Recv()
recvCh <- streamMsg{chunk: ch, err: rerr}
if rerr != nil {
return
}
}
}()
streamRecvLoop:
for {
chunk, rerr := mv.MessageStream.Recv()
if rerr != nil {
if errors.Is(rerr, io.EOF) {
break
select {
case <-ctx.Done():
streamRecvErr = ctx.Err()
break streamRecvLoop
case sm, ok := <-recvCh:
if !ok {
break streamRecvLoop
}
if logger != nil {
logger.Warn("eino stream recv error, flushing incomplete stream",
zap.Error(rerr),
zap.String("agent", ev.AgentName),
zap.Int("toolFragments", len(toolStreamFragments)))
chunk, rerr := sm.chunk, sm.err
if rerr != nil {
if errors.Is(rerr, io.EOF) {
break streamRecvLoop
}
if logger != nil {
logger.Warn("eino stream recv error, flushing incomplete stream",
zap.Error(rerr),
zap.String("agent", ev.AgentName),
zap.Int("toolFragments", len(toolStreamFragments)))
}
streamRecvErr = rerr
break streamRecvLoop
}
streamRecvErr = rerr
break
}
if chunk == nil {
continue
}
if progress != nil && strings.TrimSpace(chunk.ReasoningContent) != "" {
var reasoningDelta string
reasoningBuf, reasoningDelta = normalizeStreamingDelta(reasoningBuf, chunk.ReasoningContent)
if reasoningDelta != "" {
if reasoningStreamID == "" {
reasoningStreamID = fmt.Sprintf("eino-reasoning-%s-%d", conversationID, atomic.AddInt64(&reasoningStreamSeq, 1))
progress("thinking_stream_start", " ", map[string]interface{}{
"streamId": reasoningStreamID,
"source": "eino",
"einoAgent": ev.AgentName,
"einoRole": einoRoleTag(ev.AgentName),
"orchestration": orchMode,
if chunk == nil {
continue
}
if progress != nil && strings.TrimSpace(chunk.ReasoningContent) != "" {
var reasoningDelta string
reasoningBuf, reasoningDelta = normalizeStreamingDelta(reasoningBuf, chunk.ReasoningContent)
if reasoningDelta != "" {
if reasoningStreamID == "" {
reasoningStreamID = fmt.Sprintf("eino-reasoning-%s-%d", conversationID, atomic.AddInt64(&reasoningStreamSeq, 1))
progress("thinking_stream_start", " ", map[string]interface{}{
"streamId": reasoningStreamID,
"source": "eino",
"einoAgent": ev.AgentName,
"einoRole": einoRoleTag(ev.AgentName),
"orchestration": orchMode,
})
}
progress("thinking_stream_delta", reasoningDelta, map[string]interface{}{
"streamId": reasoningStreamID,
})
}
progress("thinking_stream_delta", reasoningDelta, map[string]interface{}{
"streamId": reasoningStreamID,
})
}
if chunk.Content != "" {
if progress != nil && streamsMainAssistant(ev.AgentName) {
var contentDelta string
mainAssistantBuf, contentDelta = normalizeStreamingDelta(mainAssistantBuf, chunk.Content)
if contentDelta != "" {
if mainAssistDupTarget == "" {
executeStdoutDupMu.Lock()
if pendingExecuteStdoutDup != "" {
mainAssistDupTarget = pendingExecuteStdoutDup
}
executeStdoutDupMu.Unlock()
}
if mainAssistDupTarget != "" {
// 已展示过 tool_result,缓冲全文;EOF 后与 execute 输出相同则不再发助手流
} else {
if !streamHeaderSent {
progress("response_start", "", map[string]interface{}{
"conversationId": conversationID,
"mcpExecutionIds": snapshotMCPIDs(),
"messageGeneratedBy": "eino:" + ev.AgentName,
"einoRole": "orchestrator",
"einoAgent": ev.AgentName,
"orchestration": orchMode,
})
streamHeaderSent = true
}
progress("response_delta", contentDelta, map[string]interface{}{
"conversationId": conversationID,
"mcpExecutionIds": snapshotMCPIDs(),
"einoRole": "orchestrator",
"einoAgent": ev.AgentName,
"orchestration": orchMode,
})
}
}
} else if !streamsMainAssistant(ev.AgentName) {
var subDelta string
subAssistantBuf, subDelta = normalizeStreamingDelta(subAssistantBuf, chunk.Content)
if subDelta != "" {
if progress != nil {
if subReplyStreamID == "" {
subReplyStreamID = fmt.Sprintf("eino-sub-reply-%s-%d", conversationID, atomic.AddInt64(&einoSubReplyStreamSeq, 1))
progress("eino_agent_reply_stream_start", "", map[string]interface{}{
"streamId": subReplyStreamID,
"einoAgent": ev.AgentName,
"einoRole": "sub",
"conversationId": conversationID,
"source": "eino",
})
}
progress("eino_agent_reply_stream_delta", subDelta, map[string]interface{}{
"streamId": subReplyStreamID,
"conversationId": conversationID,
})
}
}
}
}
if len(chunk.ToolCalls) > 0 {
toolStreamFragments = append(toolStreamFragments, chunk.ToolCalls...)
}
}
if chunk.Content != "" {
if progress != nil && streamsMainAssistant(ev.AgentName) {
var contentDelta string
mainAssistantBuf, contentDelta = normalizeStreamingDelta(mainAssistantBuf, chunk.Content)
if contentDelta != "" {
if !streamHeaderSent {
progress("response_start", "", map[string]interface{}{
"conversationId": conversationID,
"mcpExecutionIds": snapshotMCPIDs(),
"messageGeneratedBy": "eino:" + ev.AgentName,
"einoRole": "orchestrator",
"einoAgent": ev.AgentName,
"orchestration": orchMode,
})
streamHeaderSent = true
}
progress("response_delta", contentDelta, map[string]interface{}{
}
if streamsMainAssistant(ev.AgentName) {
s := strings.TrimSpace(mainAssistantBuf)
if mainAssistDupTarget != "" {
executeStdoutDupMu.Lock()
pendingExecuteStdoutDup = ""
executeStdoutDupMu.Unlock()
if s != "" && s == mainAssistDupTarget {
// 与刚展示的 execute 结果完全一致:不再发助手流式事件,仍写入轨迹与最终回复字段
lastAssistant = s
runAccumulatedMsgs = append(runAccumulatedMsgs, schema.AssistantMessage(s, nil))
if orchMode == "plan_execute" && strings.EqualFold(strings.TrimSpace(ev.AgentName), "executor") {
lastPlanExecuteExecutor = UnwrapPlanExecuteUserText(s)
}
} else if s != "" {
if progress != nil {
progress("response_start", "", map[string]interface{}{
"conversationId": conversationID,
"mcpExecutionIds": snapshotMCPIDs(),
"messageGeneratedBy": "eino:" + ev.AgentName,
"einoRole": "orchestrator",
"einoAgent": ev.AgentName,
"orchestration": orchMode,
})
progress("response_delta", s, map[string]interface{}{
"conversationId": conversationID,
"mcpExecutionIds": snapshotMCPIDs(),
"einoRole": "orchestrator",
@@ -530,35 +706,13 @@ func runEinoADKAgentLoop(ctx context.Context, args *einoADKRunLoopArgs, baseMsgs
"orchestration": orchMode,
})
}
} else if !streamsMainAssistant(ev.AgentName) {
var subDelta string
subAssistantBuf, subDelta = normalizeStreamingDelta(subAssistantBuf, chunk.Content)
if subDelta != "" {
if progress != nil {
if subReplyStreamID == "" {
subReplyStreamID = fmt.Sprintf("eino-sub-reply-%s-%d", conversationID, atomic.AddInt64(&einoSubReplyStreamSeq, 1))
progress("eino_agent_reply_stream_start", "", map[string]interface{}{
"streamId": subReplyStreamID,
"einoAgent": ev.AgentName,
"einoRole": "sub",
"conversationId": conversationID,
"source": "eino",
})
}
progress("eino_agent_reply_stream_delta", subDelta, map[string]interface{}{
"streamId": subReplyStreamID,
"conversationId": conversationID,
})
}
lastAssistant = s
runAccumulatedMsgs = append(runAccumulatedMsgs, schema.AssistantMessage(s, nil))
if orchMode == "plan_execute" && strings.EqualFold(strings.TrimSpace(ev.AgentName), "executor") {
lastPlanExecuteExecutor = UnwrapPlanExecuteUserText(s)
}
}
}
if len(chunk.ToolCalls) > 0 {
toolStreamFragments = append(toolStreamFragments, chunk.ToolCalls...)
}
}
if streamsMainAssistant(ev.AgentName) {
if s := strings.TrimSpace(mainAssistantBuf); s != "" {
} else if s != "" {
lastAssistant = s
runAccumulatedMsgs = append(runAccumulatedMsgs, schema.AssistantMessage(s, nil))
if orchMode == "plan_execute" && strings.EqualFold(strings.TrimSpace(ev.AgentName), "executor") {
@@ -588,10 +742,17 @@ func runEinoADKAgentLoop(ctx context.Context, args *einoADKRunLoopArgs, baseMsgs
}
var lastToolChunk *schema.Message
if merged := mergeStreamingToolCallFragments(toolStreamFragments); len(merged) > 0 {
lastToolChunk = &schema.Message{ToolCalls: merged}
lastToolChunk = mergeMessageToolCalls(&schema.Message{ToolCalls: merged})
}
tryEmitToolCallsOnce(lastToolChunk, ev.AgentName, orchestratorName, conversationID, progress, toolEmitSeen, subAgentToolStep, markPending)
// 流式路径此前只把 tool_calls 推给进度 UI,未写入 runAccumulatedMsgs;落库后 loadHistory→RepairOrphan 会删掉全部 tool 结果,表现为「续跑/下轮失忆」。
if lastToolChunk != nil && len(lastToolChunk.ToolCalls) > 0 {
runAccumulatedMsgs = append(runAccumulatedMsgs, schema.AssistantMessage("", lastToolChunk.ToolCalls))
}
if streamRecvErr != nil {
if isInterruptContinue(ctx) {
return takePartial(streamRecvErr)
}
if progress != nil {
progress("eino_stream_error", streamRecvErr.Error(), map[string]interface{}{
"conversationId": conversationID,
@@ -627,26 +788,42 @@ func runEinoADKAgentLoop(ctx context.Context, args *einoADKRunLoopArgs, baseMsgs
body := strings.TrimSpace(msg.Content)
if body != "" {
if streamsMainAssistant(ev.AgentName) {
if progress != nil {
progress("response_start", "", map[string]interface{}{
"conversationId": conversationID,
"mcpExecutionIds": snapshotMCPIDs(),
"messageGeneratedBy": "eino:" + ev.AgentName,
"einoRole": "orchestrator",
"einoAgent": ev.AgentName,
"orchestration": orchMode,
})
progress("response_delta", body, map[string]interface{}{
"conversationId": conversationID,
"mcpExecutionIds": snapshotMCPIDs(),
"einoRole": "orchestrator",
"einoAgent": ev.AgentName,
"orchestration": orchMode,
})
}
lastAssistant = body
if orchMode == "plan_execute" && strings.EqualFold(strings.TrimSpace(ev.AgentName), "executor") {
lastPlanExecuteExecutor = UnwrapPlanExecuteUserText(body)
executeStdoutDupMu.Lock()
dup := pendingExecuteStdoutDup
if dup != "" && body == dup {
pendingExecuteStdoutDup = ""
executeStdoutDupMu.Unlock()
lastAssistant = body
if orchMode == "plan_execute" && strings.EqualFold(strings.TrimSpace(ev.AgentName), "executor") {
lastPlanExecuteExecutor = UnwrapPlanExecuteUserText(body)
}
// 非流式:与 execute 输出相同则跳过助手通道展示(msg 已在上方写入 runAccumulatedMsgs
} else {
if dup != "" {
pendingExecuteStdoutDup = ""
}
executeStdoutDupMu.Unlock()
if progress != nil {
progress("response_start", "", map[string]interface{}{
"conversationId": conversationID,
"mcpExecutionIds": snapshotMCPIDs(),
"messageGeneratedBy": "eino:" + ev.AgentName,
"einoRole": "orchestrator",
"einoAgent": ev.AgentName,
"orchestration": orchMode,
})
progress("response_delta", body, map[string]interface{}{
"conversationId": conversationID,
"mcpExecutionIds": snapshotMCPIDs(),
"einoRole": "orchestrator",
"einoAgent": ev.AgentName,
"orchestration": orchMode,
})
}
lastAssistant = body
if orchMode == "plan_execute" && strings.EqualFold(strings.TrimSpace(ev.AgentName), "executor") {
lastPlanExecuteExecutor = UnwrapPlanExecuteUserText(body)
}
}
} else if progress != nil {
progress("eino_agent_reply", body, map[string]interface{}{
@@ -702,12 +879,19 @@ func runEinoADKAgentLoop(ctx context.Context, args *einoADKRunLoopArgs, baseMsgs
break
}
}
} else {
removePendingByID(toolCallID)
}
if toolCallID != "" {
removePendingByID(toolCallID)
if _, loaded := toolResultSent.LoadOrStore(toolCallID, struct{}{}); loaded {
// ToolInvokeNotify 可能已推过 tool_result(如 execute 流式包装里 Fire 仅携带截断后的 stdout),
// 此处仍应用 ADK Tool 消息中的完整内容刷新去重基准,避免模型复述全文时与截断串比对失败而重复展示「助手输出」。
recordPendingExecuteStdoutDup(toolName, content, isErr)
continue
}
data["toolCallId"] = toolCallID
}
recordPendingExecuteStdoutDup(toolName, content, isErr)
recordEinoADKFilesystemToolMonitor(args.FilesystemMonitorAgent, args.FilesystemMonitorRecord, toolName, toolCallID, runAccumulatedMsgs, content, isErr)
progress("tool_result", fmt.Sprintf("工具结果 (%s)", toolName), data)
}
}
@@ -717,11 +901,21 @@ func runEinoADKAgentLoop(ctx context.Context, args *einoADKRunLoopArgs, baseMsgs
mcpIDsMu.Unlock()
out := buildEinoRunResultFromAccumulated(
orchMode, runAccumulatedMsgs, lastAssistant, lastPlanExecuteExecutor, emptyHint, ids, false,
orchMode, runAccumulatedMsgs, persistTraceSource(args, runAccumulatedMsgs),
lastAssistant, lastPlanExecuteExecutor, emptyHint, ids, false,
)
return out, nil
}
func persistTraceSource(args *einoADKRunLoopArgs, fallback []adk.Message) []adk.Message {
if args != nil && args.ModelFacingTrace != nil {
if snap := args.ModelFacingTrace.Snapshot(); len(snap) > 0 {
return snap
}
}
return fallback
}
func einoPartialRunLastOutputHint() string {
return "[执行未正常结束(用户停止、超时或异常)。续跑时请基于上文已产生的工具与结果继续,勿重复已完成步骤。]\n" +
"[Run ended abnormally; continue from the trace above without repeating completed steps.]"
@@ -730,13 +924,18 @@ func einoPartialRunLastOutputHint() string {
func buildEinoRunResultFromAccumulated(
orchMode string,
runAccumulatedMsgs []adk.Message,
persistMsgs []adk.Message,
lastAssistant string,
lastPlanExecuteExecutor string,
emptyHint string,
mcpIDs []string,
partial bool,
) *RunResult {
histJSON, _ := json.Marshal(runAccumulatedMsgs)
traceForJSON := persistMsgs
if len(traceForJSON) == 0 {
traceForJSON = runAccumulatedMsgs
}
histJSON, _ := json.Marshal(traceForJSON)
cleaned := strings.TrimSpace(lastAssistant)
if orchMode == "plan_execute" {
if e := strings.TrimSpace(lastPlanExecuteExecutor); e != "" {
@@ -745,6 +944,11 @@ func buildEinoRunResultFromAccumulated(
cleaned = UnwrapPlanExecuteUserText(cleaned)
}
}
if cleaned == "" {
if fb := strings.TrimSpace(einoExtractFallbackAssistantFromMsgs(runAccumulatedMsgs)); fb != "" {
cleaned = fb
}
}
cleaned = dedupeRepeatedParagraphs(cleaned, 80)
cleaned = dedupeParagraphsByLineFingerprint(cleaned, 100)
// 防止超长响应导致 JSON 序列化慢或 OOM(多代理拼接大量工具输出时可能触发)。
@@ -771,6 +975,79 @@ func buildEinoRunResultFromAccumulated(
return out
}
// einoExtractFallbackAssistantFromMsgs 在「主通道未产出助手正文」时,从 Eino ADK 轨迹中回填用户可见回复。
// 典型场景:监督者仅调用 exitfinal_result 落在 Tool 消息中),或工具结果已写入历史但 lastAssistant 未更新。
//
// 优先级:最后一次 exit 工具输出 → 最后一条含 exit 的助手 tool_calls 参数中的 final_result。
func einoExtractFallbackAssistantFromMsgs(msgs []adk.Message) string {
for i := len(msgs) - 1; i >= 0; i-- {
m := msgs[i]
if m == nil || m.Role != schema.Tool {
continue
}
if !strings.EqualFold(strings.TrimSpace(m.ToolName), adk.ToolInfoExit.Name) {
continue
}
content := strings.TrimSpace(m.Content)
if content == "" || strings.HasPrefix(content, einomcp.ToolErrorPrefix) {
continue
}
return content
}
for i := len(msgs) - 1; i >= 0; i-- {
m := msgs[i]
if m == nil || m.Role != schema.Assistant {
continue
}
if s := einoExtractExitFinalFromAssistantToolCalls(m); s != "" {
return s
}
}
return ""
}
func einoExtractExitFinalFromAssistantToolCalls(msg *schema.Message) string {
if msg == nil || len(msg.ToolCalls) == 0 {
return ""
}
for i := len(msg.ToolCalls) - 1; i >= 0; i-- {
tc := msg.ToolCalls[i]
if !strings.EqualFold(strings.TrimSpace(tc.Function.Name), adk.ToolInfoExit.Name) {
continue
}
if s := einoParseExitFinalResultArguments(tc.Function.Arguments); s != "" {
return s
}
}
return ""
}
func einoParseExitFinalResultArguments(arguments string) string {
arguments = strings.TrimSpace(arguments)
if arguments == "" {
return ""
}
var wrap struct {
FinalResult json.RawMessage `json:"final_result"`
}
if err := json.Unmarshal([]byte(arguments), &wrap); err != nil || len(wrap.FinalResult) == 0 {
return ""
}
var s string
if err := json.Unmarshal(wrap.FinalResult, &s); err == nil {
return strings.TrimSpace(s)
}
var anyVal interface{}
if err := json.Unmarshal(wrap.FinalResult, &anyVal); err != nil {
return ""
}
b, err := json.Marshal(anyVal)
if err != nil {
return ""
}
return strings.TrimSpace(string(b))
}
func buildEinoCheckpointID(orchMode string) string {
mode := sanitizeEinoPathSegment(strings.TrimSpace(orchMode))
if mode == "" {
@@ -0,0 +1,31 @@
package multiagent
import (
"fmt"
"cyberstrike-ai/internal/agent"
"cyberstrike-ai/internal/einomcp"
)
// newEinoExecuteMonitorCallback 在 Eino filesystem execute 结束时写入 MCP 监控库并 recorder(executionId)
// 与 CallTool 路径一致,供助手消息展示「渗透测试详情」芯片。
func newEinoExecuteMonitorCallback(ag *agent.Agent, recorder einomcp.ExecutionRecorder) func(command, stdout string, success bool, invokeErr error) {
return func(command, stdout string, success bool, invokeErr error) {
if ag == nil || recorder == nil {
return
}
var err error
if !success {
if invokeErr != nil {
err = invokeErr
} else {
err = fmt.Errorf("execute failed")
}
}
args := map[string]interface{}{"command": command}
id := ag.RecordLocalToolExecution("execute", args, stdout, err)
if id != "" {
recorder(id)
}
}
}
@@ -2,11 +2,16 @@ package multiagent
import (
"context"
"errors"
"fmt"
"io"
"strings"
"cyberstrike-ai/internal/einomcp"
"cyberstrike-ai/internal/security"
"github.com/cloudwego/eino/adk/filesystem"
"github.com/cloudwego/eino/compose"
"github.com/cloudwego/eino/schema"
)
@@ -14,8 +19,15 @@ import (
// 官方 execute 工具默认走 ExecuteStreaming 且不设 RunInBackendGround;末尾带 & 时子进程仍与管道相连,
// streamStdout 按行读取会在无换行输出时长时间阻塞(与 MCP 工具 exec 的独立实现不同)。
// 对「完全后台」命令自动开启 RunInBackendGround,与 local.runCmdInBackground 行为对齐。
//
// 使用 Pipe 将内层流转发给调用方:在 inner EOF 后、关闭 Pipe 前同步调用 ToolInvokeNotify.Fire
// 保证 run loop 在模型开始下一轮输出前已记录 execute 结果(用于 UI 与「重复助手复述」去重)。
type einoStreamingShellWrap struct {
inner filesystem.StreamingShell
inner filesystem.StreamingShell
invokeNotify *einomcp.ToolInvokeNotifyHolder
einoAgentName string
// recordMonitor 在 execute 流结束后写入 tool_executions 并 recorder(executionId),使「渗透测试详情」与常规 MCP 一致。
recordMonitor func(command, stdout string, success bool, invokeErr error)
}
func (w *einoStreamingShellWrap) ExecuteStreaming(ctx context.Context, input *filesystem.ExecuteRequest) (*schema.StreamReader[*filesystem.ExecuteResponse], error) {
@@ -26,8 +38,73 @@ func (w *einoStreamingShellWrap) ExecuteStreaming(ctx context.Context, input *fi
return w.inner.ExecuteStreaming(ctx, nil)
}
req := *input
cmd := strings.TrimSpace(req.Command)
if security.IsBackgroundShellCommand(req.Command) && !req.RunInBackendGround {
req.RunInBackendGround = true
}
return w.inner.ExecuteStreaming(ctx, &req)
sr, err := w.inner.ExecuteStreaming(ctx, &req)
if err != nil {
return nil, err
}
tid := strings.TrimSpace(compose.GetToolCallID(ctx))
if sr == nil || w.invokeNotify == nil || tid == "" {
return sr, nil
}
outR, outW := schema.Pipe[*filesystem.ExecuteResponse](32)
agentTag := strings.TrimSpace(w.einoAgentName)
go func(inner *schema.StreamReader[*filesystem.ExecuteResponse], command string) {
defer inner.Close()
var sb strings.Builder
const maxCapture = 16 * 1024
success := true
var invokeErr error
exitCode := 0
hasExitCode := false
for {
resp, rerr := inner.Recv()
if errors.Is(rerr, io.EOF) {
break
}
if rerr != nil {
success = false
invokeErr = rerr
_ = outW.Send(nil, rerr)
break
}
if resp != nil {
if resp.ExitCode != nil {
hasExitCode = true
exitCode = *resp.ExitCode
}
if remain := maxCapture - sb.Len(); remain > 0 {
out := resp.Output
if len(out) > remain {
out = out[:remain]
}
sb.WriteString(out)
}
if outW.Send(resp, nil) {
success = false
invokeErr = fmt.Errorf("execute stream closed by consumer")
break
}
}
}
if success && hasExitCode && exitCode != 0 {
success = false
invokeErr = fmt.Errorf("execute exited with code %d", exitCode)
}
if w.recordMonitor != nil {
w.recordMonitor(command, sb.String(), success, invokeErr)
}
w.invokeNotify.Fire(tid, "execute", agentTag, success, sb.String(), invokeErr)
outW.Close()
}(sr, cmd)
return outR, nil
}
@@ -0,0 +1,62 @@
package multiagent
import (
"testing"
"github.com/cloudwego/eino/schema"
)
func TestEinoExtractFallbackAssistantFromMsgs_exitToolMessage(t *testing.T) {
u := schema.UserMessage("hi")
tm := schema.ToolMessage("answer for user", "call-exit-1")
tm.ToolName = "exit"
if got := einoExtractFallbackAssistantFromMsgs([]*schema.Message{u, tm}); got != "answer for user" {
t.Fatalf("got %q", got)
}
}
func TestEinoExtractFallbackAssistantFromMsgs_lastExitWins(t *testing.T) {
msgs := []*schema.Message{
schema.UserMessage("hi"),
toolExitMsg("first", "c1"),
toolExitMsg("second", "c2"),
}
if got := einoExtractFallbackAssistantFromMsgs(msgs); got != "second" {
t.Fatalf("got %q", got)
}
}
func TestEinoExtractFallbackAssistantFromMsgs_fromAssistantToolCalls(t *testing.T) {
m := schema.AssistantMessage("", []schema.ToolCall{{
ID: "x",
Type: "function",
Function: schema.FunctionCall{
Name: "exit",
Arguments: `{"final_result":"from args"}`,
},
}})
if got := einoExtractFallbackAssistantFromMsgs([]*schema.Message{m}); got != "from args" {
t.Fatalf("got %q", got)
}
}
func TestEinoExtractFallbackAssistantFromMsgs_prefersToolOverEarlierAssistant(t *testing.T) {
asst := schema.AssistantMessage("", []schema.ToolCall{{
ID: "x",
Type: "function",
Function: schema.FunctionCall{
Name: "exit",
Arguments: `{"final_result":"from args"}`,
},
}})
tool := toolExitMsg("from tool", "c1")
if got := einoExtractFallbackAssistantFromMsgs([]*schema.Message{asst, tool}); got != "from tool" {
t.Fatalf("got %q", got)
}
}
func toolExitMsg(content, callID string) *schema.Message {
m := schema.ToolMessage(content, callID)
m.ToolName = "exit"
return m
}
@@ -0,0 +1,101 @@
package multiagent
import (
"encoding/json"
"errors"
"strings"
"cyberstrike-ai/internal/agent"
"cyberstrike-ai/internal/einomcp"
"github.com/cloudwego/eino/adk"
"github.com/cloudwego/eino/schema"
)
// einoADKFilesystemToolNames 与 cloudwego/eino/adk/middlewares/filesystem 默认 ToolName* 一致。
// execute 已由 eino_execute_monitor 落库,此处不包含。
var einoADKFilesystemToolNames = map[string]struct{}{
"ls": {},
"read_file": {},
"write_file": {},
"edit_file": {},
"glob": {},
"grep": {},
}
func isBuiltinEinoADKFilesystemToolName(name string) bool {
n := strings.ToLower(strings.TrimSpace(name))
_, ok := einoADKFilesystemToolNames[n]
return ok
}
func toolCallArgsFromAccumulated(msgs []adk.Message, toolCallID, expectToolName string) map[string]interface{} {
tid := strings.TrimSpace(toolCallID)
expect := strings.TrimSpace(expectToolName)
for i := len(msgs) - 1; i >= 0; i-- {
m := msgs[i]
if m == nil || m.Role != schema.Assistant || len(m.ToolCalls) == 0 {
continue
}
for j := len(m.ToolCalls) - 1; j >= 0; j-- {
tc := m.ToolCalls[j]
if tid != "" && strings.TrimSpace(tc.ID) != tid {
continue
}
fn := strings.TrimSpace(tc.Function.Name)
if expect != "" && !strings.EqualFold(fn, expect) {
continue
}
raw := strings.TrimSpace(tc.Function.Arguments)
if raw == "" {
return map[string]interface{}{}
}
var args map[string]interface{}
if err := json.Unmarshal([]byte(raw), &args); err != nil {
return map[string]interface{}{"arguments_raw": raw}
}
if args == nil {
return map[string]interface{}{}
}
return args
}
}
return map[string]interface{}{}
}
// recordEinoADKFilesystemToolMonitor 将 Eino ADK filesystem 中间件工具结果写入 MCP 监控(与 execute / MCP 桥芯片一致)。
func recordEinoADKFilesystemToolMonitor(
ag *agent.Agent,
rec einomcp.ExecutionRecorder,
toolName string,
toolCallID string,
msgs []adk.Message,
resultText string,
isErr bool,
) {
if ag == nil || rec == nil {
return
}
name := strings.TrimSpace(toolName)
if name == "" || strings.EqualFold(name, "execute") {
return
}
if !isBuiltinEinoADKFilesystemToolName(name) {
return
}
args := toolCallArgsFromAccumulated(msgs, toolCallID, name)
storedName := "eino_fs::" + strings.ToLower(name)
var invErr error
if isErr {
t := strings.TrimSpace(resultText)
if t == "" {
invErr = errors.New("tool error")
} else {
invErr = errors.New(t)
}
}
id := ag.RecordLocalToolExecution(storedName, args, resultText, invErr)
if id != "" {
rec(id)
}
}
@@ -0,0 +1,84 @@
package multiagent
import (
"context"
"encoding/json"
"sync"
"github.com/cloudwego/eino/adk"
)
// modelFacingTraceHolder 保存「即将送入 ChatModel」的消息快照(已走 summarization / reduction / orphan 修剪等),
// 用于 last_react_input 落库,使续跑与「上下文压缩后」的模型视角一致,而非仅依赖事件流 append 的 runAccumulatedMsgs。
type modelFacingTraceHolder struct {
mu sync.Mutex
// msgs 为深拷贝后的切片,避免框架后续原地修改污染快照
msgs []adk.Message
}
func newModelFacingTraceHolder() *modelFacingTraceHolder {
return &modelFacingTraceHolder{}
}
// Snapshot 返回当前快照的再一次深拷贝(供序列化落库,避免与 holder 互斥长期持锁)。
func (h *modelFacingTraceHolder) Snapshot() []adk.Message {
if h == nil {
return nil
}
h.mu.Lock()
defer h.mu.Unlock()
return cloneADKMessagesForTrace(h.msgs)
}
func (h *modelFacingTraceHolder) storeFromState(state *adk.ChatModelAgentState) {
if h == nil || state == nil || len(state.Messages) == 0 {
return
}
cloned := cloneADKMessagesForTrace(state.Messages)
if len(cloned) == 0 {
return
}
h.mu.Lock()
h.msgs = cloned
h.mu.Unlock()
}
func cloneADKMessagesForTrace(msgs []adk.Message) []adk.Message {
if len(msgs) == 0 {
return nil
}
b, err := json.Marshal(msgs)
if err != nil {
return nil
}
var out []adk.Message
if err := json.Unmarshal(b, &out); err != nil {
return nil
}
return out
}
// modelFacingTraceMiddleware 必须在 Handlers 链中处于 **BeforeModel 最后**telemetry 之后),
// 此时 state.Messages 即为本次 LLM 调用的最终入参。
type modelFacingTraceMiddleware struct {
adk.BaseChatModelAgentMiddleware
holder *modelFacingTraceHolder
}
func newModelFacingTraceMiddleware(holder *modelFacingTraceHolder) adk.ChatModelAgentMiddleware {
if holder == nil {
return nil
}
return &modelFacingTraceMiddleware{holder: holder}
}
func (m *modelFacingTraceMiddleware) BeforeModelRewriteState(
ctx context.Context,
state *adk.ChatModelAgentState,
mc *adk.ModelContext,
) (context.Context, *adk.ChatModelAgentState, error) {
if m.holder != nil && state != nil {
m.holder.storeFromState(state)
}
return ctx, state, nil
}
@@ -41,6 +41,8 @@ type PlanExecuteRootArgs struct {
FilesystemMiddleware adk.ChatModelAgentMiddleware
// PlannerReplannerRewriteHandlers applies BeforeModelRewriteState pipeline for planner/replanner input.
PlannerReplannerRewriteHandlers []adk.ChatModelAgentMiddleware
// ModelFacingTrace 可选:由 Executor Handlers 链末尾写入,供 last_react 与 summarization 后上下文对齐。
ModelFacingTrace *modelFacingTraceHolder
}
// NewPlanExecuteRoot 返回 plan → execute → replan 预置编排根节点(与 Deep / Supervisor 并列)。
@@ -101,6 +103,11 @@ func NewPlanExecuteRoot(ctx context.Context, a *PlanExecuteRootArgs) (adk.Resuma
if teleMw := newEinoModelInputTelemetryMiddleware(a.Logger, a.ModelName, a.ConversationID, "plan_execute_executor"); teleMw != nil {
execHandlers = append(execHandlers, teleMw)
}
if a.ModelFacingTrace != nil {
if capMw := newModelFacingTraceMiddleware(a.ModelFacingTrace); capMw != nil {
execHandlers = append(execHandlers, capMw)
}
}
executor, err := newPlanExecuteExecutor(ctx, &planexecute.ExecutorConfig{
Model: a.ExecModel,
ToolsConfig: a.ToolsCfg,
+27 -16
View File
@@ -86,8 +86,10 @@ func RunEinoSingleChatModelAgent(
})
}
toolInvokeNotify := einomcp.NewToolInvokeNotifyHolder()
einoExecMonitor := newEinoExecuteMonitorCallback(ag, recorder)
mainDefs := ag.ToolsForRole(roleTools)
mainTools, err := einomcp.ToolsFromDefinitions(ag, holder, mainDefs, recorder, toolOutputChunk)
mainTools, err := einomcp.ToolsFromDefinitions(ag, holder, mainDefs, recorder, toolOutputChunk, toolInvokeNotify, einoSingleAgentName)
if err != nil {
return nil, err
}
@@ -130,13 +132,15 @@ func RunEinoSingleChatModelAgent(
return nil, fmt.Errorf("eino single summarization: %w", err)
}
handlers := make([]adk.ChatModelAgentMiddleware, 0, 4)
modelFacingTrace := newModelFacingTraceHolder()
handlers := make([]adk.ChatModelAgentMiddleware, 0, 8)
if len(mainOrchestratorPre) > 0 {
handlers = append(handlers, mainOrchestratorPre...)
}
if einoSkillMW != nil {
if einoFSTools && einoLoc != nil {
fsMw, fsErr := subAgentFilesystemMiddleware(ctx, einoLoc)
fsMw, fsErr := subAgentFilesystemMiddleware(ctx, einoLoc, toolInvokeNotify, einoSingleAgentName, einoExecMonitor)
if fsErr != nil {
return nil, fmt.Errorf("eino single filesystem 中间件: %w", fsErr)
}
@@ -148,6 +152,9 @@ func RunEinoSingleChatModelAgent(
if teleMw := newEinoModelInputTelemetryMiddleware(logger, appCfg.OpenAI.Model, conversationID, "eino_single"); teleMw != nil {
handlers = append(handlers, teleMw)
}
if capMw := newModelFacingTraceMiddleware(modelFacingTrace); capMw != nil {
handlers = append(handlers, capMw)
}
maxIter := ma.MaxIteration
if maxIter <= 0 {
@@ -162,7 +169,7 @@ func RunEinoSingleChatModelAgent(
Tools: mainToolsForCfg,
UnknownToolsHandler: einomcp.UnknownToolReminderHandler(),
ToolCallMiddlewares: []compose.ToolMiddleware{
{Invokable: hitlToolCallMiddleware()},
hitlToolCallMiddleware(),
{Invokable: softRecoveryToolCallMiddleware()},
},
},
@@ -221,18 +228,22 @@ func RunEinoSingleChatModelAgent(
}
return runEinoADKAgentLoop(ctx, &einoADKRunLoopArgs{
OrchMode: "eino_single",
OrchestratorName: einoSingleAgentName,
ConversationID: conversationID,
Progress: progress,
Logger: logger,
SnapshotMCPIDs: snapshotMCPIDs,
StreamsMainAssistant: streamsMainAssistant,
EinoRoleTag: einoRoleTag,
CheckpointDir: ma.EinoMiddleware.CheckpointDir,
McpIDsMu: &mcpIDsMu,
McpIDs: &mcpIDs,
DA: chatAgent,
OrchMode: "eino_single",
OrchestratorName: einoSingleAgentName,
ConversationID: conversationID,
Progress: progress,
Logger: logger,
SnapshotMCPIDs: snapshotMCPIDs,
StreamsMainAssistant: streamsMainAssistant,
EinoRoleTag: einoRoleTag,
CheckpointDir: ma.EinoMiddleware.CheckpointDir,
McpIDsMu: &mcpIDsMu,
McpIDs: &mcpIDs,
FilesystemMonitorAgent: ag,
FilesystemMonitorRecord: recorder,
ToolInvokeNotify: toolInvokeNotify,
DA: chatAgent,
ModelFacingTrace: modelFacingTrace,
EmptyResponseMessage: "(Eino ADK single-agent session completed but no assistant text was captured. Check process details or logs.) " +
"Eino ADK 单代理会话已完成,但未捕获到助手文本输出。请查看过程详情或日志。)",
}, baseMsgs)
+15 -3
View File
@@ -8,6 +8,7 @@ import (
"strings"
"cyberstrike-ai/internal/config"
"cyberstrike-ai/internal/einomcp"
localbk "github.com/cloudwego/eino-ext/adk/backend/local"
"github.com/cloudwego/eino/adk"
@@ -75,12 +76,23 @@ func prepareEinoSkills(
// subAgentFilesystemMiddleware returns filesystem middleware for a sub-agent when Deep itself
// does not set Backend (fsTools false on orchestrator) but we still want tools on subs — not used;
// when orchestrator has Backend, builtin FS is only on outer agent; subs need explicit FS for parity.
func subAgentFilesystemMiddleware(ctx context.Context, loc *localbk.Local) (adk.ChatModelAgentMiddleware, error) {
func subAgentFilesystemMiddleware(
ctx context.Context,
loc *localbk.Local,
invokeNotify *einomcp.ToolInvokeNotifyHolder,
einoAgentName string,
recordMonitor func(command, stdout string, success bool, invokeErr error),
) (adk.ChatModelAgentMiddleware, error) {
if loc == nil {
return nil, nil
}
return filesystem.New(ctx, &filesystem.MiddlewareConfig{
Backend: loc,
StreamingShell: &einoStreamingShellWrap{inner: loc},
Backend: loc,
StreamingShell: &einoStreamingShellWrap{
inner: loc,
invokeNotify: invokeNotify,
einoAgentName: strings.TrimSpace(einoAgentName),
recordMonitor: recordMonitor,
},
})
}
+54 -12
View File
@@ -8,6 +8,7 @@ import (
"github.com/cloudwego/eino/adk"
"github.com/cloudwego/eino/compose"
"github.com/cloudwego/eino/schema"
)
type hitlInterceptorKey struct{}
@@ -41,7 +42,31 @@ func WithHITLToolInterceptor(ctx context.Context, fn HITLToolInterceptor) contex
return context.WithValue(ctx, hitlInterceptorKey{}, fn)
}
func hitlToolCallMiddleware() compose.InvokableToolMiddleware {
// hitlToolCallMiddleware 同时注册 Invokable 与 Streamable。
// Eino filesystem 的 execute 为流式工具(StreamableTool),仅挂 Invokable 时人机协同不会拦截,会直接执行。
func hitlToolCallMiddleware() compose.ToolMiddleware {
return compose.ToolMiddleware{
Invokable: hitlInvokableToolCallMiddleware(),
Streamable: hitlStreamableToolCallMiddleware(),
}
}
func hitlClearReturnDirectlyIfTransfer(ctx context.Context, toolName string) {
if !strings.EqualFold(strings.TrimSpace(toolName), adk.TransferToAgentToolName) {
return
}
_ = compose.ProcessState[*adk.State](ctx, func(_ context.Context, st *adk.State) error {
if st == nil {
return nil
}
st.ReturnDirectlyToolCallID = ""
st.HasReturnDirectly = false
st.ReturnDirectlyEvent = nil
return nil
})
}
func hitlInvokableToolCallMiddleware() compose.InvokableToolMiddleware {
return func(next compose.InvokableToolEndpoint) compose.InvokableToolEndpoint {
return func(ctx context.Context, input *compose.ToolInput) (*compose.ToolOutput, error) {
if input != nil {
@@ -55,17 +80,7 @@ func hitlToolCallMiddleware() compose.InvokableToolMiddleware {
// transfer_to_agent 在 Eino 中标记为 returnDirectly:工具成功后 ReAct 子图会直接 END,
// 并依赖真实工具内的 SendToolGenAction 触发移交。HITL 拒绝时不会执行真实工具,
// 若仍走 returnDirectly 分支,监督者会在无 Transfer 动作的情况下结束,模型不再迭代。
if strings.EqualFold(strings.TrimSpace(input.Name), adk.TransferToAgentToolName) {
_ = compose.ProcessState[*adk.State](ctx, func(_ context.Context, st *adk.State) error {
if st == nil {
return nil
}
st.ReturnDirectlyToolCallID = ""
st.HasReturnDirectly = false
st.ReturnDirectlyEvent = nil
return nil
})
}
hitlClearReturnDirectlyIfTransfer(ctx, input.Name)
return &compose.ToolOutput{Result: msg}, nil
}
return nil, err
@@ -79,3 +94,30 @@ func hitlToolCallMiddleware() compose.InvokableToolMiddleware {
}
}
}
func hitlStreamableToolCallMiddleware() compose.StreamableToolMiddleware {
return func(next compose.StreamableToolEndpoint) compose.StreamableToolEndpoint {
return func(ctx context.Context, input *compose.ToolInput) (*compose.StreamToolOutput, error) {
if input != nil {
if fn, ok := ctx.Value(hitlInterceptorKey{}).(HITLToolInterceptor); ok && fn != nil {
edited, err := fn(ctx, input.Name, input.Arguments)
if err != nil {
if IsHumanRejectError(err) {
msg := fmt.Sprintf("[HITL Reject] Tool '%s' was rejected by human reviewer. Reason: %s\nPlease adjust parameters/plan and continue without this call.",
input.Name, strings.TrimSpace(err.Error()))
hitlClearReturnDirectlyIfTransfer(ctx, input.Name)
return &compose.StreamToolOutput{
Result: schema.StreamReaderFromArray([]string{msg}),
}, nil
}
return nil, err
}
if edited != "" {
input.Arguments = edited
}
}
}
return next(ctx, input)
}
}
}
+7
View File
@@ -0,0 +1,7 @@
package multiagent
import "errors"
// ErrInterruptContinue 作为 context.CancelCause 使用:用户选择「中断并继续」且当前无进行中的 MCP 工具时,
// 取消当前推理/流式输出,并在同一会话任务内携带用户补充说明自动续跑下一轮(类似 Hermes 式人机回合)。
var ErrInterruptContinue = errors.New("agent interrupt: continue with user-supplied context")
+99 -84
View File
@@ -110,6 +110,7 @@ func RunDeepAgent(
mcpIDs = append(mcpIDs, id)
mcpIDsMu.Unlock()
}
einoExecMonitor := newEinoExecuteMonitorCallback(ag, recorder)
// 与单代理流式一致:在 response_start / response_delta 的 data 中带当前 mcpExecutionIds,供主聊天绑定复制与展示。
snapshotMCPIDs := func() []string {
@@ -120,6 +121,7 @@ func RunDeepAgent(
return out
}
toolInvokeNotify := einomcp.NewToolInvokeNotifyHolder()
mainDefs := ag.ToolsForRole(roleTools)
toolOutputChunk := func(toolName, toolCallID, chunk string) {
// When toolCallId is missing, frontend ignores tool_result_delta.
@@ -137,16 +139,6 @@ func RunDeepAgent(
})
}
mainTools, err := einomcp.ToolsFromDefinitions(ag, holder, mainDefs, recorder, toolOutputChunk)
if err != nil {
return nil, err
}
mainToolsForCfg, mainOrchestratorPre, err := prependEinoMiddlewares(ctx, &ma.EinoMiddleware, einoMWMain, mainTools, einoLoc, skillsRoot, conversationID, logger)
if err != nil {
return nil, err
}
httpClient := &http.Client{
Timeout: 30 * time.Minute,
Transport: &http.Transport{
@@ -222,7 +214,7 @@ func RunDeepAgent(
}
subDefs := ag.ToolsForRole(roleTools)
subTools, err := einomcp.ToolsFromDefinitions(ag, holder, subDefs, recorder, toolOutputChunk)
subTools, err := einomcp.ToolsFromDefinitions(ag, holder, subDefs, recorder, toolOutputChunk, toolInvokeNotify, id)
if err != nil {
return nil, fmt.Errorf("子代理 %q 工具: %w", id, err)
}
@@ -248,7 +240,7 @@ func RunDeepAgent(
}
if einoSkillMW != nil {
if einoFSTools && einoLoc != nil {
subFs, fsErr := subAgentFilesystemMiddleware(ctx, einoLoc)
subFs, fsErr := subAgentFilesystemMiddleware(ctx, einoLoc, toolInvokeNotify, id, einoExecMonitor)
if fsErr != nil {
return nil, fmt.Errorf("子代理 %q filesystem 中间件: %w", id, fsErr)
}
@@ -293,7 +285,7 @@ func RunDeepAgent(
Tools: subToolsForCfg,
UnknownToolsHandler: einomcp.UnknownToolReminderHandler(),
ToolCallMiddlewares: []compose.ToolMiddleware{
{Invokable: hitlToolCallMiddleware()},
hitlToolCallMiddleware(),
{Invokable: softRecoveryToolCallMiddleware()},
},
},
@@ -319,6 +311,8 @@ func RunDeepAgent(
return nil, fmt.Errorf("多代理主 summarization 中间件: %w", err)
}
modelFacingTrace := newModelFacingTraceHolder()
// 与 deep.Config.Name / supervisor 主代理 Name 一致。
orchestratorName := "cyberstrike-deep"
orchDescription := "Coordinates specialist agents and MCP tools for authorized security testing."
@@ -338,6 +332,16 @@ func RunDeepAgent(
orchDescription = d
}
}
mainTools, err := einomcp.ToolsFromDefinitions(ag, holder, mainDefs, recorder, toolOutputChunk, toolInvokeNotify, orchestratorName)
if err != nil {
return nil, err
}
mainToolsForCfg, mainOrchestratorPre, err := prependEinoMiddlewares(ctx, &ma.EinoMiddleware, einoMWMain, mainTools, einoLoc, skillsRoot, conversationID, logger)
if err != nil {
return nil, err
}
orchInstruction = injectToolNamesOnlyInstruction(ctx, orchInstruction, mainTools)
if logger != nil {
mainNames := collectToolNames(ctx, mainTools)
@@ -381,7 +385,12 @@ func RunDeepAgent(
var deepShell filesystem.StreamingShell
if einoLoc != nil && einoFSTools {
deepBackend = einoLoc
deepShell = einoLoc
deepShell = &einoStreamingShellWrap{
inner: einoLoc,
invokeNotify: toolInvokeNotify,
einoAgentName: orchestratorName,
recordMonitor: einoExecMonitor,
}
}
// noNestedTaskMiddleware 必须在最外层(最先拦截),防止 skill 或其他中间件内部触发 task 调用绕过检测。
@@ -400,6 +409,9 @@ func RunDeepAgent(
if teleMw := newEinoModelInputTelemetryMiddleware(logger, appCfg.OpenAI.Model, conversationID, "deep_orchestrator"); teleMw != nil {
deepHandlers = append(deepHandlers, teleMw)
}
if capMw := newModelFacingTraceMiddleware(modelFacingTrace); capMw != nil {
deepHandlers = append(deepHandlers, capMw)
}
supHandlers := []adk.ChatModelAgentMiddleware{}
if len(mainOrchestratorPre) > 0 {
@@ -413,13 +425,16 @@ func RunDeepAgent(
if teleMw := newEinoModelInputTelemetryMiddleware(logger, appCfg.OpenAI.Model, conversationID, "supervisor_orchestrator"); teleMw != nil {
supHandlers = append(supHandlers, teleMw)
}
if capMw := newModelFacingTraceMiddleware(modelFacingTrace); capMw != nil {
supHandlers = append(supHandlers, capMw)
}
mainToolsCfg := adk.ToolsConfig{
ToolsNodeConfig: compose.ToolsNodeConfig{
Tools: mainToolsForCfg,
UnknownToolsHandler: einomcp.UnknownToolReminderHandler(),
ToolCallMiddlewares: []compose.ToolMiddleware{
{Invokable: hitlToolCallMiddleware()},
hitlToolCallMiddleware(),
{Invokable: softRecoveryToolCallMiddleware()},
},
},
@@ -438,7 +453,7 @@ func RunDeepAgent(
// 构建 filesystem 中间件(与 Deep sub-agent 一致)
var peFsMw adk.ChatModelAgentMiddleware
if einoSkillMW != nil && einoFSTools && einoLoc != nil {
peFsMw, err = subAgentFilesystemMiddleware(ctx, einoLoc)
peFsMw, err = subAgentFilesystemMiddleware(ctx, einoLoc, toolInvokeNotify, "executor", einoExecMonitor)
if err != nil {
return nil, fmt.Errorf("plan_execute filesystem 中间件: %w", err)
}
@@ -458,6 +473,7 @@ func RunDeepAgent(
ExecPreMiddlewares: mainOrchestratorPre,
SkillMiddleware: einoSkillMW,
FilesystemMiddleware: peFsMw,
ModelFacingTrace: modelFacingTrace,
PlannerReplannerRewriteHandlers: []adk.ChatModelAgentMiddleware{
mainSumMw,
// 孤儿 tool 消息兜底:必须挂在 summarization 之后、telemetry 之前。
@@ -549,95 +565,94 @@ func RunDeepAgent(
}
return runEinoADKAgentLoop(ctx, &einoADKRunLoopArgs{
OrchMode: orchMode,
OrchestratorName: orchestratorName,
ConversationID: conversationID,
Progress: progress,
Logger: logger,
SnapshotMCPIDs: snapshotMCPIDs,
StreamsMainAssistant: streamsMainAssistant,
EinoRoleTag: einoRoleTag,
CheckpointDir: ma.EinoMiddleware.CheckpointDir,
McpIDsMu: &mcpIDsMu,
McpIDs: &mcpIDs,
DA: da,
OrchMode: orchMode,
OrchestratorName: orchestratorName,
ConversationID: conversationID,
Progress: progress,
Logger: logger,
SnapshotMCPIDs: snapshotMCPIDs,
StreamsMainAssistant: streamsMainAssistant,
EinoRoleTag: einoRoleTag,
CheckpointDir: ma.EinoMiddleware.CheckpointDir,
McpIDsMu: &mcpIDsMu,
McpIDs: &mcpIDs,
FilesystemMonitorAgent: ag,
FilesystemMonitorRecord: recorder,
ToolInvokeNotify: toolInvokeNotify,
DA: da,
ModelFacingTrace: modelFacingTrace,
EmptyResponseMessage: "(Eino multi-agent orchestration completed but no assistant text was captured. Check process details or logs.) " +
"(Eino 多代理编排已完成,但未捕获到助手文本输出。请查看过程详情或日志。)",
}, baseMsgs)
}
func chatToolCallsToSchema(tcs []agent.ToolCall) []schema.ToolCall {
if len(tcs) == 0 {
return nil
}
out := make([]schema.ToolCall, 0, len(tcs))
for _, tc := range tcs {
if strings.TrimSpace(tc.ID) == "" {
continue
}
argsStr := ""
if tc.Function.Arguments != nil {
b, err := json.Marshal(tc.Function.Arguments)
if err == nil {
argsStr = string(b)
}
}
typ := tc.Type
if typ == "" {
typ = "function"
}
out = append(out, schema.ToolCall{
ID: tc.ID,
Type: typ,
Function: schema.FunctionCall{
Name: tc.Function.Name,
Arguments: argsStr,
},
})
}
return out
}
// historyToMessages 将轨迹恢复的 ChatMessage 转为 Eino ADK 消息:**不裁剪条数、不按 token 预算截断**,
// 并保留 user / assistant(含仅 tool_calls/ tool,与库中 last_react 轨迹一致。
func historyToMessages(history []agent.ChatMessage, appCfg *config.Config, mwCfg *config.MultiAgentEinoMiddlewareConfig) []adk.Message {
_ = appCfg
_ = mwCfg
if len(history) == 0 {
return nil
}
// Keep a bounded tail first; then enforce a token budget.
const maxHistoryMessages = 200
start := 0
if len(history) > maxHistoryMessages {
start = len(history) - maxHistoryMessages
}
raw := make([]adk.Message, 0, len(history[start:]))
for _, h := range history[start:] {
switch h.Role {
raw := make([]adk.Message, 0, len(history))
for _, h := range history {
role := strings.ToLower(strings.TrimSpace(h.Role))
switch role {
case "user":
if strings.TrimSpace(h.Content) != "" {
raw = append(raw, schema.UserMessage(h.Content))
}
case "assistant":
if strings.TrimSpace(h.Content) == "" && len(h.ToolCalls) > 0 {
toolSchema := chatToolCallsToSchema(h.ToolCalls)
if len(toolSchema) > 0 || strings.TrimSpace(h.Content) != "" {
raw = append(raw, schema.AssistantMessage(h.Content, toolSchema))
}
case "tool":
if strings.TrimSpace(h.ToolCallID) == "" && strings.TrimSpace(h.Content) == "" {
continue
}
if strings.TrimSpace(h.Content) != "" {
raw = append(raw, schema.AssistantMessage(h.Content, nil))
var opts []schema.ToolMessageOption
if tn := strings.TrimSpace(h.ToolName); tn != "" {
opts = append(opts, schema.WithToolName(tn))
}
raw = append(raw, schema.ToolMessage(h.Content, h.ToolCallID, opts...))
default:
continue
}
}
if len(raw) == 0 {
return raw
}
maxTotal := 120000
modelName := "gpt-4o"
if appCfg != nil {
if appCfg.OpenAI.MaxTotalTokens > 0 {
maxTotal = appCfg.OpenAI.MaxTotalTokens
}
if m := strings.TrimSpace(appCfg.OpenAI.Model); m != "" {
modelName = m
}
}
ratio := 0.35
if mwCfg != nil {
ratio = mwCfg.HistoryInputBudgetRatioEffective()
}
budget := int(float64(maxTotal) * ratio)
if budget < 4096 {
budget = 4096
}
tc := agent.NewTikTokenCounter()
outRev := make([]adk.Message, 0, len(raw))
used := 0
for i := len(raw) - 1; i >= 0; i-- {
msg := raw[i]
n, err := tc.Count(modelName, string(msg.Role)+"\n"+msg.Content)
if err != nil {
n = (len(msg.Content) + 3) / 4
}
if n <= 0 {
n = 1
}
if used+n > budget {
break
}
used += n
outRev = append(outRev, msg)
}
out := make([]adk.Message, 0, len(outRev))
for i := len(outRev) - 1; i >= 0; i-- {
out = append(out, outRev[i])
}
return out
return raw
}
// mergeStreamingToolCallFragments 将流式多帧的 ToolCall 按 index 合并 arguments(与 schema.concatToolCalls 行为一致)。
@@ -0,0 +1,56 @@
package openai
import "testing"
func TestNormalizeStreamingDelta_RepeatedCharBoundary(t *testing.T) {
// 流式在重复数字边界分片:不得把 "43" 的首字符与 "194" 尾字符误合并。
cur, d := normalizeStreamingDelta("https://x:194", "43")
if want := "https://x:19443"; cur != want {
t.Fatalf("next: want %q got %q", want, cur)
}
if d != "43" {
t.Fatalf("delta: want %q got %q", "43", d)
}
}
func TestNormalizeStreamingDelta_CumulativePrefix(t *testing.T) {
cur, d := normalizeStreamingDelta("今天", "今天天气")
if cur != "今天天气" || d != "天气" {
t.Fatalf("got cur=%q d=%q", cur, d)
}
}
func TestNormalizeStreamingDelta_FullRetransmit(t *testing.T) {
cur, d := normalizeStreamingDelta("今天", "今天")
if d != "" || cur != "今天" {
t.Fatalf("got cur=%q d=%q", cur, d)
}
}
func TestNormalizeStreamingDelta_SingleRuneRepeated(t *testing.T) {
cur, d := normalizeStreamingDelta("呀", "呀")
if want := "呀呀"; cur != want {
t.Fatalf("next: want %q got %q", want, cur)
}
if d != "呀" {
t.Fatalf("delta: want %q got %q", "呀", d)
}
cur, d = normalizeStreamingDelta("4", "4")
if want := "44"; cur != want {
t.Fatalf("next: want %q got %q", want, cur)
}
if d != "4" {
t.Fatalf("delta: want %q got %q", "4", d)
}
}
func TestNormalizeStreamingDelta_CumulativeExtendsNumber(t *testing.T) {
// 已缓冲 "194" 后收到累计串 "19443"(注意 "1943" 并非 "19443" 的前缀,不能靠误写的中间态测 HasPrefix)。
cur, d := normalizeStreamingDelta("194", "19443")
if want := "19443"; cur != want {
t.Fatalf("next: want %q got %q", want, cur)
}
if d != "43" {
t.Fatalf("delta: want %q got %q", "43", d)
}
}
+12 -17
View File
@@ -10,6 +10,7 @@ import (
"net/http"
"strings"
"time"
"unicode/utf8"
"cyberstrike-ai/internal/config"
@@ -34,7 +35,15 @@ func (e *APIError) Error() string {
}
// normalizeStreamingDelta 将可能是“累计片段/重发片段”的内容归一化为“纯增量”。
// 部分兼容网关会返回累计 content;若直接 append 会出现重复文本(结巴)
// 部分兼容网关会返回累计 content;若直接 append 会出现重复文本。
//
// 注意:
// - 不做「任意后缀与前缀重叠」合并;流式可能在重复字符边界分片("194"+"43"→"19443")。
// - HasPrefix 仅在 incoming 严格长于 current 时视为累计全文,否则会把分片产生的第二个相同
// 单字/单码点(叠字、44、22 等)误判为「整段重复」而吞字。
// - incoming==current 仅当 current 长度 >1 个码点时才视为整包重发;单码点重复必须走拼接。
// - 不再使用「current 以 incoming 结尾则丢弃」:否则 "1943"+"43" 会误吞增量(19443 显示成 1943)。
// 若网关重复发送尾部片段,应重复送完整累计串,由 HasPrefix 分支去重。
func normalizeStreamingDelta(current, incoming string) (next, delta string) {
if incoming == "" {
return current, ""
@@ -42,26 +51,12 @@ func normalizeStreamingDelta(current, incoming string) (next, delta string) {
if current == "" {
return incoming, incoming
}
if incoming == current {
return current, ""
}
if strings.HasPrefix(incoming, current) {
if strings.HasPrefix(incoming, current) && len(incoming) > len(current) {
return incoming, incoming[len(current):]
}
if strings.HasSuffix(current, incoming) {
if incoming == current && utf8.RuneCountInString(current) > 1 {
return current, ""
}
// 边界重叠:current 后缀与 incoming 前缀重合,仅追加非重叠部分。
max := len(current)
if len(incoming) < max {
max = len(incoming)
}
for overlap := max; overlap > 0; overlap-- {
if current[len(current)-overlap:] == incoming[:overlap] {
return current + incoming[overlap:], incoming[overlap:]
}
}
return current + incoming, incoming
}
+15
View File
@@ -3593,6 +3593,11 @@ header {
background: rgba(255, 112, 67, 0.12);
}
.timeline-item-user_interrupt_continue {
border-left-color: #d97706;
background: rgba(217, 119, 6, 0.08);
}
.timeline-item-header {
display: flex;
align-items: center;
@@ -3623,6 +3628,12 @@ header {
line-height: 1.6;
}
/* 流式增量阶段纯文本展示(避免半段 Markdown 反复解析) */
.timeline-item-content.timeline-stream-plain {
white-space: pre-wrap;
word-break: break-word;
}
.tool-details {
display: flex;
flex-direction: column;
@@ -18300,6 +18311,10 @@ button.chat-files-dropdown-item:hover:not(:disabled) {
transform: translateX(-50%) translateY(0);
}
.chat-files-toast.chat-toast--error {
background: #b91c1c;
}
/* 对话附件读取 / 文件管理上传 进度条 */
/* [hidden] 默认会被本类的 display:flex 覆盖,须显式隐藏否则空闲时仍露出灰条 */
.chat-upload-progress-row[hidden] {
+2 -1
View File
@@ -288,6 +288,7 @@
"error": "Error",
"streamNetworkErrorHint": "Connection lost ({{detail}}). A long task may still be running on the server; check running tasks at the top or refresh this conversation later.",
"taskCancelled": "Task cancelled",
"userInterruptContinueTitle": "⏸️ User interrupt & continue",
"unknownTool": "Unknown tool",
"einoAgentReplyTitle": "Sub-agent reply",
"einoStreamErrorTitle": "⚠️ Eino stream interrupted ({{agent}})",
@@ -396,7 +397,7 @@
"stopTask": "Stop task",
"interruptModalTitle": "Interrupt current step",
"interruptReasonLabel": "Interrupt note",
"interruptModalHint": "Same as MCP monitor \"Stop tool\": ends only the in-flight tool call; the conversation and this run continue. Optional note is merged into the tool result (bilingual USER INTERRUPT NOTE, not raw CLI). Leave empty for a plain stop. If no tool is running yet (model still thinking), wait for a tool call or use \"Stop completely\".",
"interruptModalHint": "When a tool is running: same as MCP monitor \"Stop tool\" only that call is stopped and the run continues; your note can be merged into the tool result (USER INTERRUPT NOTE). When no tool is running (model thinking/streaming only): \"Interrupt & continue\" still works — current output pauses, your note is merged into context and the run resumes automatically; the progress timeline shows a \"User interrupt & continue\" entry. Use this instead of a full stop when you only want to steer; use \"Stop completely\" to end the whole task.",
"interruptReasonPlaceholder": "e.g. Tool is too slow—skip and summarize…",
"interruptReasonRequired": "Please enter a short note so the model can continue accordingly.",
"interruptSubmitting": "Submitting...",
+2 -1
View File
@@ -277,6 +277,7 @@
"error": "错误",
"streamNetworkErrorHint": "连接已中断({{detail}})。长时间任务可能仍在后端执行,请查看顶部「运行中」任务或稍后刷新本对话。",
"taskCancelled": "任务已取消",
"userInterruptContinueTitle": "⏸️ 用户中断并继续",
"unknownTool": "未知工具",
"einoAgentReplyTitle": "子代理回复",
"einoStreamErrorTitle": "⚠️ Eino 流式中断({{agent}}",
@@ -385,7 +386,7 @@
"stopTask": "停止任务",
"interruptModalTitle": "中断当前步骤",
"interruptReasonLabel": "中断说明",
"interruptModalHint": "与 MCP 监控页「终止工具」一致仅结束当前这一次工具调用,整条对话与本轮推理会继续;工具返回中可附带说明(中英 USER INTERRUPT NOTE 块,与命令行原文区分)。留空则等同仅终止工具。若当前没有工具在执行(模型尚在思考),请等待工具开始或改用「彻底停止」。",
"interruptModalHint": "有工具在执行时:与 MCP 监控页「终止工具」一致仅结束当前这一次工具调用,本轮推理会继续;说明可写入工具返回(USER INTERRUPT NOTE)。无工具在执行(模型纯思考/流式输出):仍可「中断并继续」——会暂停当前输出,把你的说明合并进上下文并自动续跑;进度详情时间线会出现「用户中断并继续」条目。不需要整轮停止时请优先用本按钮;要结束整条任务请用「彻底停止」。",
"interruptReasonPlaceholder": "例如:工具耗时过长,请先跳过并总结当前结果…",
"interruptReasonRequired": "请填写中断说明,以便模型根据你的意图继续。",
"interruptSubmitting": "提交中...",
+40 -3
View File
@@ -26,6 +26,11 @@ const DRAFT_SAVE_DELAY = 500; // 500ms防抖延迟
// 对话文件上传相关(后端会拼接路径与内容发给大模型,前端不再重复发文件列表)
const MAX_CHAT_FILES = 10;
const CHAT_FILE_DEFAULT_PROMPT = '请根据上传的文件内容进行分析。';
/** 与 handler.formatInterruptContinueUserMessage 首段一致;主对话不展示,仅迭代详情(user_interrupt_continue */
const CHAT_INTERRUPT_CONTINUE_USER_PREFIX = '【用户补充 / 中断后继续】';
function isInterruptContinueInjectChatMessage(content) {
return typeof content === 'string' && content.trimStart().startsWith(CHAT_INTERRUPT_CONTINUE_USER_PREFIX);
}
/**
* 对话附件选文件后异步 POST /api/chat-uploads发送时只传 serverPath绝对路径请求体不再内联大文件内容
* @type {{ id: number, fileName: string, mimeType: string, serverPath: string|null, uploading: boolean, uploadPercent: number, uploadPromise: Promise<void>|null, uploadError: string|null }[]}
@@ -51,6 +56,28 @@ const HITL_MODE_REVIEW_EDIT = 'review_edit';
const HITL_MODE_OPTIONS = [HITL_MODE_OFF, HITL_MODE_APPROVAL, HITL_MODE_REVIEW_EDIT];
let hitlApplyFeedbackTimer = null;
/** 非阻塞提示(与 chat-files-toast 样式共用) */
function showChatToast(message, type) {
const text = message == null ? '' : String(message);
if (!text) return;
const el = document.createElement('div');
el.className = 'chat-files-toast' + (type === 'error' ? ' chat-toast--error' : '');
el.setAttribute('role', 'status');
el.textContent = text;
document.body.appendChild(el);
requestAnimationFrame(function () {
el.classList.add('chat-files-toast-visible');
});
const hideMs = type === 'error' ? 4500 : 2600;
setTimeout(function () {
el.classList.remove('chat-files-toast-visible');
setTimeout(function () { el.remove(); }, 300);
}, hideMs);
}
if (typeof window !== 'undefined') {
window.showChatToast = showChatToast;
}
function normalizeOrchestrationClient(s) {
const v = String(s || '').trim().toLowerCase().replace(/-/g, '_');
if (v === 'plan_execute' || v === 'planexecute' || v === 'pe') return 'plan_execute';
@@ -293,7 +320,7 @@ function showHitlApplyFeedback(text, isError, partial) {
}
if (!el) {
if (text && isError) {
alert(text);
showChatToast(text, 'error');
}
return;
}
@@ -2237,6 +2264,10 @@ function renderProcessDetails(messageId, processDetails) {
itemTitle = agPx + '🧑‍⚖️ HITL · ' + hitlMsg;
} else if (eventType === 'progress') {
itemTitle = typeof window.translateProgressMessage === 'function' ? window.translateProgressMessage(detail.message || '') : (detail.message || '');
} else if (eventType === 'user_interrupt_continue') {
itemTitle = typeof window.t === 'function'
? window.t('chat.userInterruptContinueTitle')
: '⏸️ 用户中断并继续';
}
addTimelineItem(timeline, eventType, {
@@ -2853,7 +2884,7 @@ async function loadConversation(conversationId) {
const conversation = await response.json();
if (!response.ok) {
alert('加载对话失败: ' + (conversation.error || '未知错误'));
showChatToast('加载对话失败: ' + (conversation.error || '未知错误'), 'error');
return;
}
if (seq !== loadConversationRequestSeq) {
@@ -2953,6 +2984,9 @@ async function loadConversation(conversationId) {
// 渲染单条消息的辅助函数
const renderOneMessage = (msg) => {
if (msg.role === 'user' && isInterruptContinueInjectChatMessage(msg.content)) {
return;
}
let displayContent = msg.content;
if (msg.role === 'assistant' && msg.content === '处理中...' && msg.processDetails && msg.processDetails.length > 0) {
for (let i = msg.processDetails.length - 1; i >= 0; i--) {
@@ -3061,7 +3095,7 @@ async function loadConversation(conversationId) {
}
} catch (error) {
console.error('加载对话失败:', error);
alert('加载对话失败: ' + error.message);
showChatToast('加载对话失败: ' + (error && error.message ? error.message : String(error)), 'error');
}
}
@@ -6617,6 +6651,9 @@ function formatConversationAsMarkdown(conversation, options = {}) {
}
messages.forEach((msg, index) => {
if (msg && msg.role === 'user' && isInterruptContinueInjectChatMessage(msg.content)) {
return;
}
const role = getConversationRoleLabel(msg && msg.role);
const timestamp = formatConversationDateForMarkdown(msg && msg.createdAt);
const content = msg && typeof msg.content === 'string' ? msg.content : '';
+143 -36
View File
@@ -273,6 +273,47 @@ function escapeHtmlLocal(text) {
return div.innerHTML;
}
/**
* internal/openai.normalizeStreamingDelta 一致兼容网关/模型返回累计全文或整包重发
* 避免前端 buffer += chunk 与后端已归一化的增量叠加导致逐段重复响应中显示了响应中显示了
* @returns {[string, string]} [nextBuffer, effectiveDelta]
*/
function normalizeStreamingDeltaJs(current, incoming) {
const cur = current == null ? '' : String(current);
const inc = incoming == null ? '' : String(incoming);
if (inc === '') {
return [cur, ''];
}
if (cur === '') {
return [inc, inc];
}
if (inc.startsWith(cur) && inc.length > cur.length) {
return [inc, inc.slice(cur.length)];
}
const runeCount = Array.from(cur).length;
if (inc === cur && runeCount > 1) {
return [cur, ''];
}
return [cur + inc, inc];
}
if (typeof window !== 'undefined') {
window.normalizeStreamingDeltaJs = normalizeStreamingDeltaJs;
}
/** 流式 delta:纯文本,避免每条全量 marked + DOMPurify */
function setTimelineItemContentStreamPlain(contentEl, text) {
if (!contentEl) return;
contentEl.classList.add('timeline-stream-plain');
contentEl.textContent = text == null ? '' : String(text);
}
/** 流结束或非流式:富文本(已消毒的 HTML 字符串) */
function setTimelineItemContentStreamRich(contentEl, html) {
if (!contentEl) return;
contentEl.classList.remove('timeline-stream-plain');
contentEl.innerHTML = html;
}
function formatAssistantMarkdownContent(text) {
const raw = text == null ? '' : String(text);
if (typeof marked !== 'undefined') {
@@ -743,19 +784,33 @@ function integrateProgressToMCPSection(progressId, assistantMessageId, mcpExecut
mcpSection.appendChild(buttonsContainer);
}
const hasExecBtns = buttonsContainer.querySelector('.mcp-detail-btn:not(.process-detail-btn)');
if (mcpIds.length > 0 && !hasExecBtns) {
mcpIds.forEach((execId, index) => {
let maxExecIndex = 0;
const existingExecBtns = buttonsContainer.querySelectorAll('.mcp-detail-btn:not(.process-detail-btn)');
existingExecBtns.forEach(function (btn) {
const n = parseInt(btn.dataset.execIndex, 10);
if (!isNaN(n) && n > maxExecIndex) maxExecIndex = n;
});
const seenExec = new Set();
existingExecBtns.forEach(function (btn) {
if (btn.dataset.execId) seenExec.add(String(btn.dataset.execId).trim());
});
let appendedAny = false;
if (mcpIds.length > 0) {
mcpIds.forEach(function (execId) {
const id = execId != null ? String(execId).trim() : '';
if (!id || seenExec.has(id)) return;
seenExec.add(id);
maxExecIndex += 1;
appendedAny = true;
const detailBtn = document.createElement('button');
detailBtn.className = 'mcp-detail-btn';
detailBtn.dataset.execId = execId;
detailBtn.dataset.execIndex = String(index + 1);
detailBtn.innerHTML = '<span>' + (typeof window.t === 'function' ? window.t('chat.callNumber', { n: index + 1 }) : '调用 #' + (index + 1)) + '</span>';
detailBtn.onclick = () => showMCPDetail(execId);
detailBtn.dataset.execId = id;
detailBtn.dataset.execIndex = String(maxExecIndex);
detailBtn.innerHTML = '<span>' + (typeof window.t === 'function' ? window.t('chat.callNumber', { n: maxExecIndex }) : '调用 #' + maxExecIndex) + '</span>';
detailBtn.onclick = function () { showMCPDetail(id); };
buttonsContainer.appendChild(detailBtn);
});
// 使用批量 API 一次性获取所有工具名称(消除 N 次单独请求)
if (typeof batchUpdateButtonToolNames === 'function') {
if (appendedAny && typeof batchUpdateButtonToolNames === 'function') {
batchUpdateButtonToolNames(buttonsContainer, mcpIds);
}
}
@@ -1038,6 +1093,24 @@ function resolveStreamTimeline(progressId) {
return timeline;
}
/** 去重合并 MCP execution id(顺序:先 prev 后 next),用于多段 Run / 多次 SSE 同一任务。 */
function mergeMcpExecutionIDLists(prev, next) {
const seen = new Set();
const out = [];
const add = function (arr) {
if (!Array.isArray(arr)) return;
for (let i = 0; i < arr.length; i++) {
const s = arr[i] != null ? String(arr[i]).trim() : '';
if (!s || seen.has(s)) continue;
seen.add(s);
out.push(s);
}
};
add(prev);
add(next);
return out;
}
// 处理流式事件
function handleStreamEvent(event, progressElement, progressId,
getAssistantId, setAssistantId, getMcpIds, setMcpIds) {
@@ -1160,7 +1233,19 @@ function handleStreamEvent(event, progressElement, progressId,
state = new Map();
thinkingStreamStateByProgressId.set(progressId, state);
}
// 若已存在,重置 buffer
// 同一 streamId 重复 start:复用已有条目,避免孤儿卡片 + 新条目重复收 delta
if (state.has(streamId)) {
const ex = state.get(streamId);
ex.buffer = '';
const existingItem = document.getElementById(ex.itemId);
if (existingItem) {
const contentEl = existingItem.querySelector('.timeline-item-content');
if (contentEl) {
setTimelineItemContentStreamPlain(contentEl, '');
}
}
break;
}
const thinkBase = typeof window.t === 'function' ? window.t('chat.aiThinking') : 'AI思考';
const title = timelineAgentBracketPrefix(d) + '🤔 ' + thinkBase;
const itemId = addTimelineItem(timeline, 'thinking', {
@@ -1182,17 +1267,14 @@ function handleStreamEvent(event, progressElement, progressId,
const s = state.get(streamId);
const delta = event.message || '';
s.buffer += delta;
const merged = normalizeStreamingDeltaJs(s.buffer, delta);
s.buffer = merged[0];
const item = document.getElementById(s.itemId);
if (item) {
const contentEl = item.querySelector('.timeline-item-content');
if (contentEl) {
if (typeof formatMarkdown === 'function') {
contentEl.innerHTML = formatMarkdown(s.buffer);
} else {
contentEl.textContent = s.buffer;
}
setTimelineItemContentStreamPlain(contentEl, s.buffer);
}
}
break;
@@ -1210,11 +1292,10 @@ function handleStreamEvent(event, progressElement, progressId,
if (item) {
const contentEl = item.querySelector('.timeline-item-content');
if (contentEl) {
// contentEl.innerHTML 用于兼容 Markdown 展示
if (typeof formatMarkdown === 'function') {
contentEl.innerHTML = formatMarkdown(s.buffer);
setTimelineItemContentStreamRich(contentEl, formatMarkdown(s.buffer));
} else {
contentEl.textContent = s.buffer;
setTimelineItemContentStreamPlain(contentEl, s.buffer);
}
}
}
@@ -1271,6 +1352,19 @@ function handleStreamEvent(event, progressElement, progressId,
});
break;
case 'user_interrupt_continue': {
const d = event.data || {};
const titleBase = typeof window.t === 'function'
? window.t('chat.userInterruptContinueTitle')
: '⏸️ 用户中断并继续';
addTimelineItem(timeline, 'user_interrupt_continue', {
title: titleBase,
message: event.message || '',
data: d
});
break;
}
case 'eino_stream_error': {
const d = event.data || {};
const agent = d.einoAgent ? String(d.einoAgent) : '';
@@ -1456,6 +1550,18 @@ function handleStreamEvent(event, progressElement, progressId,
stateMap = new Map();
einoAgentReplyStreamStateByProgressId.set(progressId, stateMap);
}
if (stateMap.has(streamId)) {
const ex = stateMap.get(streamId);
ex.buffer = '';
const existingItem = document.getElementById(ex.itemId);
if (existingItem) {
let contentEl = existingItem.querySelector('.timeline-item-content');
if (contentEl) {
setTimelineItemContentStreamPlain(contentEl, '');
}
}
break;
}
const streamingLabel = typeof window.t === 'function' ? window.t('timeline.running') : '执行中...';
const replyTitleBase = typeof window.t === 'function' ? window.t('chat.einoAgentReplyTitle') : '子代理回复';
const itemId = addTimelineItem(timeline, 'eino_agent_reply', {
@@ -1477,7 +1583,8 @@ function handleStreamEvent(event, progressElement, progressId,
const stateMap = einoAgentReplyStreamStateByProgressId.get(progressId);
if (!stateMap || !stateMap.has(streamId)) break;
const s = stateMap.get(streamId);
s.buffer += delta;
const merged = normalizeStreamingDeltaJs(s.buffer, delta);
s.buffer = merged[0];
const item = document.getElementById(s.itemId);
if (item) {
let contentEl = item.querySelector('.timeline-item-content');
@@ -1490,11 +1597,7 @@ function handleStreamEvent(event, progressElement, progressId,
}
}
if (contentEl) {
if (typeof formatMarkdown === 'function') {
contentEl.innerHTML = formatMarkdown(s.buffer);
} else {
contentEl.textContent = s.buffer;
}
setTimelineItemContentStreamPlain(contentEl, s.buffer);
}
}
break;
@@ -1522,9 +1625,9 @@ function handleStreamEvent(event, progressElement, progressId,
item.appendChild(contentEl);
}
if (typeof formatMarkdown === 'function') {
contentEl.innerHTML = formatMarkdown(full);
setTimelineItemContentStreamRich(contentEl, formatMarkdown(full));
} else {
contentEl.textContent = full;
setTimelineItemContentStreamPlain(contentEl, full);
}
if (d.einoAgent != null && String(d.einoAgent).trim() !== '') {
item.dataset.einoAgent = String(d.einoAgent).trim();
@@ -1614,7 +1717,7 @@ function handleStreamEvent(event, progressElement, progressId,
const responseData = event.data || {};
const mcpIds = responseData.mcpExecutionIds || [];
setMcpIds(mcpIds);
setMcpIds(mergeMcpExecutionIDLists(typeof getMcpIds === 'function' ? (getMcpIds() || []) : [], mcpIds));
if (responseData.conversationId) {
// 如果用户已经开始了新对话(currentConversationId 为 null),且这个事件来自旧对话,则忽略
@@ -1665,7 +1768,8 @@ function handleStreamEvent(event, progressElement, progressId,
}
const deltaContent = event.message || '';
state.buffer += deltaContent;
const mergedResp = normalizeStreamingDeltaJs(state.buffer, deltaContent);
state.buffer = mergedResp[0];
// 更新时间线条目内容
if (state.itemId) {
@@ -1675,11 +1779,7 @@ function handleStreamEvent(event, progressElement, progressId,
if (contentEl) {
const meta = state.streamMeta || responseData;
const body = formatTimelineStreamBody(state.buffer, meta);
if (typeof formatMarkdown === 'function') {
contentEl.innerHTML = formatMarkdown(body);
} else {
contentEl.textContent = body;
}
setTimelineItemContentStreamPlain(contentEl, body);
}
}
}
@@ -1693,7 +1793,7 @@ function handleStreamEvent(event, progressElement, progressId,
// 先更新 mcp ids
const responseData = event.data || {};
const mcpIds = responseData.mcpExecutionIds || [];
const mcpIds = mergeMcpExecutionIDLists(typeof getMcpIds === 'function' ? (getMcpIds() || []) : [], responseData.mcpExecutionIds || []);
setMcpIds(mcpIds);
// 更新对话ID
@@ -2217,7 +2317,7 @@ async function attachRunningTaskEventStream(conversationId) {
if (line.indexOf('data: ') === 0) {
try {
const eventData = JSON.parse(line.slice(6));
handleStreamEvent(eventData, null, progressId, getAssistantIdFn, setAssistantIdFn, function () { return mcpIds; }, function (ids) { mcpIds = ids; });
handleStreamEvent(eventData, null, progressId, getAssistantIdFn, setAssistantIdFn, function () { return mcpIds; }, function (ids) { mcpIds = mergeMcpExecutionIDLists(mcpIds, ids || []); });
} catch (e) {
console.error('task-events parse', e);
}
@@ -2430,6 +2530,11 @@ function addTimelineItem(timeline, type, options) {
${escapeHtml(options.message || taskCancelledLabel)}
</div>
`;
} else if (type === 'user_interrupt_continue' && options.message) {
const streamBody = typeof formatTimelineStreamBody === 'function'
? formatTimelineStreamBody(options.message, options.data)
: options.message;
content += `<div class="timeline-item-content">${formatMarkdown(streamBody)}</div>`;
}
item.innerHTML = content;
@@ -3331,6 +3436,8 @@ function refreshProgressAndTimelineI18n() {
titleSpan.textContent = ap + '\uD83D\uDCAC ' + _t('chat.einoAgentReplyTitle');
} else if (type === 'cancelled') {
titleSpan.textContent = '\u26D4 ' + _t('chat.taskCancelled');
} else if (type === 'user_interrupt_continue') {
titleSpan.textContent = _t('chat.userInterruptContinueTitle');
} else if (type === 'progress' && item.dataset.progressMessage !== undefined) {
titleSpan.textContent = typeof window.translateProgressMessage === 'function' ? window.translateProgressMessage(item.dataset.progressMessage) : item.dataset.progressMessage;
}
+23 -3
View File
@@ -2898,7 +2898,10 @@ function runWebshellAiSend(conn, inputEl, sendBtn, messagesContainer) {
} else if (_et === 'response_delta') {
var deltaText = (_em != null && _em !== '') ? String(_em) : '';
if (deltaText) {
streamingTarget += deltaText;
var normR = (typeof window.normalizeStreamingDeltaJs === 'function')
? window.normalizeStreamingDeltaJs(streamingTarget, deltaText)
: [streamingTarget + deltaText, deltaText];
streamingTarget = normR[0];
webshellStreamingTypingId += 1;
streamingTypingId = webshellStreamingTypingId;
runWebshellAiStreamingTyping(assistantDiv, streamingTarget, streamingTypingId, messagesContainer);
@@ -2952,6 +2955,11 @@ function runWebshellAiSend(conn, inputEl, sendBtn, messagesContainer) {
// ─── Thinking (non-stream + stream) ───
} else if (_et === 'thinking_stream_start' && _ed.streamId) {
if (wsThinkingStreams.has(_ed.streamId)) {
var tsExist = wsThinkingStreams.get(_ed.streamId);
tsExist.buf = '';
if (tsExist.body) tsExist.body.textContent = '';
} else {
var thinkSLabel = wsTOr('chat.aiThinking', 'AI 思考');
var thinkSItem = document.createElement('div');
thinkSItem.className = 'webshell-ai-timeline-item webshell-ai-timeline-thinking';
@@ -2962,11 +2970,14 @@ function runWebshellAiSend(conn, inputEl, sendBtn, messagesContainer) {
timelineContainer.appendChild(thinkSItem);
timelineContainer.classList.add('has-items');
wsThinkingStreams.set(_ed.streamId, { el: thinkSItem, body: thinkSPre, buf: '' });
}
if (!streamingTarget) assistantDiv.textContent = '…';
} else if (_et === 'thinking_stream_delta' && _ed.streamId) {
var tsD = wsThinkingStreams.get(_ed.streamId);
if (tsD) {
tsD.buf += (_em || '');
var normT = (typeof window.normalizeStreamingDeltaJs === 'function')
? window.normalizeStreamingDeltaJs(tsD.buf, _em || '') : [tsD.buf + (_em || ''), _em || ''];
tsD.buf = normT[0];
if (typeof formatMarkdown === 'function') {
tsD.body.innerHTML = formatMarkdown(tsD.buf);
} else {
@@ -3076,6 +3087,12 @@ function runWebshellAiSend(conn, inputEl, sendBtn, messagesContainer) {
// ─── Eino sub-agent reply streaming ───
} else if (_et === 'eino_agent_reply_stream_start' && _ed.streamId) {
if (einoSubReplyStreams.has(_ed.streamId)) {
var stExist = einoSubReplyStreams.get(_ed.streamId);
stExist.buf = '';
var preExist = stExist.el && stExist.el.querySelector('.webshell-eino-reply-stream-body');
if (preExist) preExist.textContent = '';
} else {
var repTS = wsTOr('chat.einoAgentReplyTitle', '子代理回复');
var runTS = wsTOr('timeline.running', '执行中...');
var itemS = document.createElement('div');
@@ -3084,11 +3101,14 @@ function runWebshellAiSend(conn, inputEl, sendBtn, messagesContainer) {
timelineContainer.appendChild(itemS);
timelineContainer.classList.add('has-items');
einoSubReplyStreams.set(_ed.streamId, { el: itemS, buf: '' });
}
if (!streamingTarget) assistantDiv.textContent = '…';
} else if (_et === 'eino_agent_reply_stream_delta' && _ed.streamId) {
var stD = einoSubReplyStreams.get(_ed.streamId);
if (stD) {
stD.buf += (_em || '');
var normS = (typeof window.normalizeStreamingDeltaJs === 'function')
? window.normalizeStreamingDeltaJs(stD.buf, _em || '') : [stD.buf + (_em || ''), _em || ''];
stD.buf = normS[0];
var preD = stD.el.querySelector('.webshell-eino-reply-stream-body');
if (!preD) {
preD = document.createElement('pre');