mirror of
https://github.com/Ed1s0nZ/CyberStrikeAI.git
synced 2026-05-17 13:43:31 +02:00
Compare commits
5 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| f196992b91 | |||
| f64b7653ac | |||
| 2a9b18ba7b | |||
| 6f70d7b851 | |||
| 157f1c9754 |
+1
-1
@@ -10,7 +10,7 @@
|
||||
# ============================================
|
||||
|
||||
# 前端显示的版本号(可选,不填则显示默认版本)
|
||||
version: "v1.4.6"
|
||||
version: "v1.4.7"
|
||||
# 服务器配置
|
||||
server:
|
||||
host: 0.0.0.0 # 监听地址,0.0.0.0 表示监听所有网络接口
|
||||
|
||||
@@ -92,6 +92,19 @@ func (m *mcpBridgeTool) Info(ctx context.Context) (*schema.ToolInfo, error) {
|
||||
|
||||
func (m *mcpBridgeTool) InvokableRun(ctx context.Context, argumentsInJSON string, opts ...tool.Option) (string, error) {
|
||||
_ = opts
|
||||
return runMCPToolInvocation(ctx, m.agent, m.holder, m.name, argumentsInJSON, m.record, m.chunk)
|
||||
}
|
||||
|
||||
// runMCPToolInvocation 与 mcpBridgeTool.InvokableRun 共用。
|
||||
func runMCPToolInvocation(
|
||||
ctx context.Context,
|
||||
ag *agent.Agent,
|
||||
holder *ConversationHolder,
|
||||
toolName string,
|
||||
argumentsInJSON string,
|
||||
record ExecutionRecorder,
|
||||
chunk func(toolName, toolCallID, chunk string),
|
||||
) (string, error) {
|
||||
var args map[string]interface{}
|
||||
if argumentsInJSON != "" && argumentsInJSON != "null" {
|
||||
if err := json.Unmarshal([]byte(argumentsInJSON), &args); err != nil {
|
||||
@@ -102,44 +115,62 @@ func (m *mcpBridgeTool) InvokableRun(ctx context.Context, argumentsInJSON string
|
||||
args = map[string]interface{}{}
|
||||
}
|
||||
|
||||
// Stream tool output (stdout/stderr) to upper layer via security.Executor's callback.
|
||||
// This enables multi-agent mode to show execution progress on the frontend.
|
||||
if m.chunk != nil {
|
||||
if chunk != nil {
|
||||
toolCallID := compose.GetToolCallID(ctx)
|
||||
if toolCallID != "" {
|
||||
if existing, ok := ctx.Value(security.ToolOutputCallbackCtxKey).(security.ToolOutputCallback); ok && existing != nil {
|
||||
// Chain existing callback (if any) + our progress forwarder.
|
||||
ctx = context.WithValue(ctx, security.ToolOutputCallbackCtxKey, security.ToolOutputCallback(func(c string) {
|
||||
existing(c)
|
||||
if strings.TrimSpace(c) == "" {
|
||||
return
|
||||
}
|
||||
m.chunk(m.name, toolCallID, c)
|
||||
chunk(toolName, toolCallID, c)
|
||||
}))
|
||||
} else {
|
||||
ctx = context.WithValue(ctx, security.ToolOutputCallbackCtxKey, security.ToolOutputCallback(func(c string) {
|
||||
if strings.TrimSpace(c) == "" {
|
||||
return
|
||||
}
|
||||
m.chunk(m.name, toolCallID, c)
|
||||
chunk(toolName, toolCallID, c)
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
conv := m.holder.Get()
|
||||
res, err := m.agent.ExecuteMCPToolForConversation(ctx, conv, m.name, args)
|
||||
res, err := ag.ExecuteMCPToolForConversation(ctx, holder.Get(), toolName, args)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if res == nil {
|
||||
return "", nil
|
||||
}
|
||||
if res.ExecutionID != "" && m.record != nil {
|
||||
m.record(res.ExecutionID)
|
||||
if res.ExecutionID != "" && record != nil {
|
||||
record(res.ExecutionID)
|
||||
}
|
||||
if res.IsError {
|
||||
return ToolErrorPrefix + res.Result, nil
|
||||
}
|
||||
return res.Result, nil
|
||||
}
|
||||
|
||||
// UnknownToolReminderHandler 供 compose.ToolsNodeConfig.UnknownToolsHandler 使用:
|
||||
// 模型请求了未注册的工具名时,仅返回说明性文本,error 恒为 nil,以便 ReAct 继续迭代而不中断图执行。
|
||||
// 不进行名称猜测或映射,避免误执行。
|
||||
func UnknownToolReminderHandler() func(ctx context.Context, name, input string) (string, error) {
|
||||
return func(ctx context.Context, name, input string) (string, error) {
|
||||
_ = ctx
|
||||
_ = input
|
||||
return unknownToolReminderText(strings.TrimSpace(name)), nil
|
||||
}
|
||||
}
|
||||
|
||||
func unknownToolReminderText(requested string) string {
|
||||
if requested == "" {
|
||||
requested = "(empty)"
|
||||
}
|
||||
return fmt.Sprintf(`The tool name %q is not registered for this agent.
|
||||
|
||||
Please retry using only names that appear in the tool definitions for this turn (exact match, case-sensitive). Do not invent or rename tools; adjust your plan and continue.
|
||||
|
||||
(工具 %q 未注册:请仅使用本回合上下文中给出的工具名称,须完全一致;请勿自行改写或猜测名称,并继续后续步骤。)`, requested, requested)
|
||||
}
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
package einomcp
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestUnknownToolReminderText(t *testing.T) {
|
||||
s := unknownToolReminderText("bad_tool")
|
||||
if !strings.Contains(s, "bad_tool") {
|
||||
t.Fatalf("expected requested name in message: %s", s)
|
||||
}
|
||||
if strings.Contains(s, "Tools currently available") {
|
||||
t.Fatal("unified message must not list tool names")
|
||||
}
|
||||
}
|
||||
+147
-8
@@ -79,8 +79,8 @@ type AgentHandler struct {
|
||||
knowledgeManager interface { // 知识库管理器接口
|
||||
LogRetrieval(conversationID, messageID, query, riskType string, retrievedItems []string) error
|
||||
}
|
||||
skillsManager *skills.Manager // Skills管理器
|
||||
agentsMarkdownDir string // 多代理:Markdown 子 Agent 目录(绝对路径,空则不从磁盘合并)
|
||||
skillsManager *skills.Manager // Skills管理器
|
||||
agentsMarkdownDir string // 多代理:Markdown 子 Agent 目录(绝对路径,空则不从磁盘合并)
|
||||
}
|
||||
|
||||
// NewAgentHandler 创建新的Agent处理器
|
||||
@@ -122,8 +122,8 @@ func (h *AgentHandler) SetAgentsMarkdownDir(absDir string) {
|
||||
|
||||
// ChatAttachment 聊天附件(用户上传的文件)
|
||||
type ChatAttachment struct {
|
||||
FileName string `json:"fileName"` // 展示用文件名
|
||||
Content string `json:"content,omitempty"` // 文本或 base64;若已预先上传到服务器可留空
|
||||
FileName string `json:"fileName"` // 展示用文件名
|
||||
Content string `json:"content,omitempty"` // 文本或 base64;若已预先上传到服务器可留空
|
||||
MimeType string `json:"mimeType,omitempty"`
|
||||
ServerPath string `json:"serverPath,omitempty"` // 已保存在 chat_uploads 下的绝对路径(由 POST /api/chat-uploads 返回)
|
||||
}
|
||||
@@ -714,6 +714,73 @@ func (h *AgentHandler) createProgressCallback(conversationID, assistantMessageID
|
||||
// 用于保存tool_call事件中的参数,以便在tool_result时使用
|
||||
toolCallCache := make(map[string]map[string]interface{}) // toolCallId -> arguments
|
||||
|
||||
// thinking_stream_*:不逐条落库,按 streamId 聚合,在后续关键事件前补一条可持久化的 thinking
|
||||
type thinkingBuf struct {
|
||||
b strings.Builder
|
||||
meta map[string]interface{}
|
||||
}
|
||||
thinkingStreams := make(map[string]*thinkingBuf) // streamId -> buf
|
||||
flushedThinking := make(map[string]bool) // streamId -> flushed
|
||||
|
||||
// response_start + response_delta:前端时间线显示为「📝 规划中」(monitor.js),不落逐条 delta;
|
||||
// 聚合为一条 planning 写入 process_details,刷新后与线上一致。
|
||||
var respPlan struct {
|
||||
meta map[string]interface{}
|
||||
b strings.Builder
|
||||
}
|
||||
flushResponsePlan := func() {
|
||||
if assistantMessageID == "" {
|
||||
return
|
||||
}
|
||||
content := strings.TrimSpace(respPlan.b.String())
|
||||
if content == "" {
|
||||
respPlan.meta = nil
|
||||
respPlan.b.Reset()
|
||||
return
|
||||
}
|
||||
data := map[string]interface{}{
|
||||
"source": "response_stream",
|
||||
}
|
||||
for k, v := range respPlan.meta {
|
||||
data[k] = v
|
||||
}
|
||||
if err := h.db.AddProcessDetail(assistantMessageID, conversationID, "planning", content, data); err != nil {
|
||||
h.logger.Warn("保存过程详情失败", zap.Error(err), zap.String("eventType", "planning"))
|
||||
}
|
||||
respPlan.meta = nil
|
||||
respPlan.b.Reset()
|
||||
}
|
||||
|
||||
flushThinkingStreams := func() {
|
||||
if assistantMessageID == "" {
|
||||
return
|
||||
}
|
||||
for sid, tb := range thinkingStreams {
|
||||
if sid == "" || flushedThinking[sid] || tb == nil {
|
||||
continue
|
||||
}
|
||||
content := strings.TrimSpace(tb.b.String())
|
||||
if content == "" {
|
||||
flushedThinking[sid] = true
|
||||
continue
|
||||
}
|
||||
data := map[string]interface{}{
|
||||
"streamId": sid,
|
||||
}
|
||||
for k, v := range tb.meta {
|
||||
// 避免覆盖 streamId
|
||||
if k == "streamId" {
|
||||
continue
|
||||
}
|
||||
data[k] = v
|
||||
}
|
||||
if err := h.db.AddProcessDetail(assistantMessageID, conversationID, "thinking", content, data); err != nil {
|
||||
h.logger.Warn("保存过程详情失败", zap.Error(err), zap.String("eventType", "thinking"))
|
||||
}
|
||||
flushedThinking[sid] = true
|
||||
}
|
||||
}
|
||||
|
||||
return func(eventType, message string, data interface{}) {
|
||||
// 如果提供了sendEventFunc,发送流式事件
|
||||
if sendEventFunc != nil {
|
||||
@@ -846,25 +913,97 @@ func (h *AgentHandler) createProgressCallback(conversationID, assistantMessageID
|
||||
|
||||
// 子代理回复流式增量不落库;结束时合并为一条 eino_agent_reply
|
||||
if assistantMessageID != "" && eventType == "eino_agent_reply_stream_end" {
|
||||
flushResponsePlan()
|
||||
// 确保思考流在子代理回复前能持久化(刷新后可读)
|
||||
flushThinkingStreams()
|
||||
if err := h.db.AddProcessDetail(assistantMessageID, conversationID, "eino_agent_reply", message, data); err != nil {
|
||||
h.logger.Warn("保存过程详情失败", zap.Error(err), zap.String("eventType", eventType))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// 保存过程详情到数据库(排除response/done事件,它们会在后面单独处理)
|
||||
// 另外:response_start/response_delta 是模型流式增量,保存会导致过程详情膨胀,因此不落库。
|
||||
// 多代理主代理「规划中」:response_start / response_delta 仅用于 SSE,聚合落一条 planning
|
||||
if eventType == "response_start" {
|
||||
flushResponsePlan()
|
||||
respPlan.meta = nil
|
||||
if dataMap, ok := data.(map[string]interface{}); ok {
|
||||
respPlan.meta = make(map[string]interface{}, len(dataMap))
|
||||
for k, v := range dataMap {
|
||||
respPlan.meta[k] = v
|
||||
}
|
||||
}
|
||||
respPlan.b.Reset()
|
||||
return
|
||||
}
|
||||
if eventType == "response_delta" {
|
||||
respPlan.b.WriteString(message)
|
||||
if dataMap, ok := data.(map[string]interface{}); ok && respPlan.meta == nil {
|
||||
respPlan.meta = make(map[string]interface{}, len(dataMap))
|
||||
for k, v := range dataMap {
|
||||
respPlan.meta[k] = v
|
||||
}
|
||||
} else if dataMap, ok := data.(map[string]interface{}); ok {
|
||||
for k, v := range dataMap {
|
||||
respPlan.meta[k] = v
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
if eventType == "response" {
|
||||
flushResponsePlan()
|
||||
return
|
||||
}
|
||||
|
||||
// 聚合 thinking_stream_*(ReasoningContent),不逐条落库
|
||||
if eventType == "thinking_stream_start" {
|
||||
if dataMap, ok := data.(map[string]interface{}); ok {
|
||||
if sid, ok2 := dataMap["streamId"].(string); ok2 && sid != "" {
|
||||
tb := thinkingStreams[sid]
|
||||
if tb == nil {
|
||||
tb = &thinkingBuf{meta: map[string]interface{}{}}
|
||||
thinkingStreams[sid] = tb
|
||||
}
|
||||
// 记录元信息(source/einoAgent/einoRole/iteration 等)
|
||||
for k, v := range dataMap {
|
||||
tb.meta[k] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
if eventType == "thinking_stream_delta" {
|
||||
if dataMap, ok := data.(map[string]interface{}); ok {
|
||||
if sid, ok2 := dataMap["streamId"].(string); ok2 && sid != "" {
|
||||
tb := thinkingStreams[sid]
|
||||
if tb == nil {
|
||||
tb = &thinkingBuf{meta: map[string]interface{}{}}
|
||||
thinkingStreams[sid] = tb
|
||||
}
|
||||
// delta 片段直接拼接;message 本身就是 reasoning content
|
||||
tb.b.WriteString(message)
|
||||
// 有时 delta 先到 start 未到,补充元信息
|
||||
for k, v := range dataMap {
|
||||
tb.meta[k] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// 保存过程详情到数据库(排除 response/done;response 正文已在 messages 表)
|
||||
// response_start/response_delta 已聚合为 planning,不落逐条。
|
||||
if assistantMessageID != "" &&
|
||||
eventType != "response" &&
|
||||
eventType != "done" &&
|
||||
eventType != "response_start" &&
|
||||
eventType != "response_delta" &&
|
||||
eventType != "tool_result_delta" &&
|
||||
eventType != "thinking_stream_start" &&
|
||||
eventType != "thinking_stream_delta" &&
|
||||
eventType != "eino_agent_reply_stream_start" &&
|
||||
eventType != "eino_agent_reply_stream_delta" &&
|
||||
eventType != "eino_agent_reply_stream_end" {
|
||||
// 在关键过程事件落库前,先把「规划中」与 thinking_stream 落库
|
||||
flushResponsePlan()
|
||||
flushThinkingStreams()
|
||||
if err := h.db.AddProcessDetail(assistantMessageID, conversationID, eventType, message, data); err != nil {
|
||||
h.logger.Warn("保存过程详情失败", zap.Error(err), zap.String("eventType", eventType))
|
||||
}
|
||||
|
||||
+271
-229
@@ -101,8 +101,8 @@ func RunDeepAgent(
|
||||
return
|
||||
}
|
||||
progress("tool_result_delta", chunk, map[string]interface{}{
|
||||
"toolName": toolName,
|
||||
"toolCallId": toolCallID,
|
||||
"toolName": toolName,
|
||||
"toolCallId": toolCallID,
|
||||
// index/total/iteration are optional for UI; we don't know them in this bridge.
|
||||
"index": 0,
|
||||
"total": 0,
|
||||
@@ -221,7 +221,8 @@ func RunDeepAgent(
|
||||
Model: subModel,
|
||||
ToolsConfig: adk.ToolsConfig{
|
||||
ToolsNodeConfig: compose.ToolsNodeConfig{
|
||||
Tools: subTools,
|
||||
Tools: subTools,
|
||||
UnknownToolsHandler: einomcp.UnknownToolReminderHandler(),
|
||||
},
|
||||
EmitInternalEvents: true,
|
||||
},
|
||||
@@ -275,7 +276,8 @@ func RunDeepAgent(
|
||||
},
|
||||
ToolsConfig: adk.ToolsConfig{
|
||||
ToolsNodeConfig: compose.ToolsNodeConfig{
|
||||
Tools: mainTools,
|
||||
Tools: mainTools,
|
||||
UnknownToolsHandler: einomcp.UnknownToolReminderHandler(),
|
||||
},
|
||||
EmitInternalEvents: true,
|
||||
},
|
||||
@@ -284,14 +286,8 @@ func RunDeepAgent(
|
||||
return nil, fmt.Errorf("deep.New: %w", err)
|
||||
}
|
||||
|
||||
msgs := historyToMessages(history)
|
||||
msgs = append(msgs, schema.UserMessage(userMessage))
|
||||
|
||||
runner := adk.NewRunner(ctx, adk.RunnerConfig{
|
||||
Agent: da,
|
||||
EnableStreaming: true,
|
||||
})
|
||||
iter := runner.Run(ctx, msgs)
|
||||
baseMsgs := historyToMessages(history)
|
||||
baseMsgs = append(baseMsgs, schema.UserMessage(userMessage))
|
||||
|
||||
streamsMainAssistant := func(agent string) bool {
|
||||
return agent == "" || agent == orchestratorName
|
||||
@@ -303,255 +299,301 @@ func RunDeepAgent(
|
||||
return "sub"
|
||||
}
|
||||
|
||||
// 仅保留主代理最后一次 assistant 输出,避免把多轮中间回复拼接到最终答案。
|
||||
var lastRunMsgs []adk.Message
|
||||
var lastAssistant string
|
||||
var reasoningStreamSeq int64
|
||||
var einoSubReplyStreamSeq int64
|
||||
toolEmitSeen := make(map[string]struct{})
|
||||
// 主代理「外层轮次」:首次进入编排器为第 1 轮,每从子代理回到编排器 +1。
|
||||
// 子代理「步数」:该子代理每次发起一批工具调用前 +1(近似 ReAct 步)。
|
||||
var einoMainRound int
|
||||
var einoLastAgent string
|
||||
subAgentToolStep := make(map[string]int)
|
||||
for {
|
||||
ev, ok := iter.Next()
|
||||
if !ok {
|
||||
break
|
||||
|
||||
attemptLoop:
|
||||
for attempt := 0; attempt < maxToolCallArgumentsJSONAttempts; attempt++ {
|
||||
msgs := make([]adk.Message, 0, len(baseMsgs)+attempt)
|
||||
msgs = append(msgs, baseMsgs...)
|
||||
for i := 0; i < attempt; i++ {
|
||||
msgs = append(msgs, toolCallArgumentsJSONRetryHint())
|
||||
}
|
||||
if ev == nil {
|
||||
continue
|
||||
}
|
||||
if ev.Err != nil {
|
||||
|
||||
if attempt > 0 {
|
||||
mcpIDsMu.Lock()
|
||||
mcpIDs = mcpIDs[:0]
|
||||
mcpIDsMu.Unlock()
|
||||
if logger != nil {
|
||||
logger.Warn("eino DeepAgent: 工具参数 JSON 被接口拒绝,追加提示后重试",
|
||||
zap.Int("attempt", attempt),
|
||||
zap.Int("maxAttempts", maxToolCallArgumentsJSONAttempts))
|
||||
}
|
||||
if progress != nil {
|
||||
progress("error", ev.Err.Error(), map[string]interface{}{
|
||||
// 使用专用事件类型 eino_recovery,便于前端时间线展示(progress 仅改标题,不进时间线)
|
||||
progress("eino_recovery", toolCallArgumentsJSONRecoveryTimelineMessage(attempt), map[string]interface{}{
|
||||
"conversationId": conversationID,
|
||||
"source": "eino",
|
||||
"source": "eino",
|
||||
"einoRetry": attempt,
|
||||
"runIndex": attempt + 1, // 第几轮完整运行(1 为首次,重试后递增)
|
||||
"maxRuns": maxToolCallArgumentsJSONAttempts,
|
||||
"reason": "invalid_tool_arguments_json",
|
||||
})
|
||||
}
|
||||
return nil, ev.Err
|
||||
}
|
||||
if ev.AgentName != "" && progress != nil {
|
||||
if streamsMainAssistant(ev.AgentName) {
|
||||
if einoMainRound == 0 {
|
||||
einoMainRound = 1
|
||||
progress("iteration", "", map[string]interface{}{
|
||||
"iteration": 1,
|
||||
"einoScope": "main",
|
||||
"einoRole": "orchestrator",
|
||||
"einoAgent": orchestratorName,
|
||||
"conversationId": conversationID,
|
||||
"source": "eino",
|
||||
})
|
||||
} else if einoLastAgent != "" && !streamsMainAssistant(einoLastAgent) {
|
||||
einoMainRound++
|
||||
progress("iteration", "", map[string]interface{}{
|
||||
"iteration": einoMainRound,
|
||||
"einoScope": "main",
|
||||
"einoRole": "orchestrator",
|
||||
"einoAgent": orchestratorName,
|
||||
"conversationId": conversationID,
|
||||
"source": "eino",
|
||||
})
|
||||
}
|
||||
}
|
||||
einoLastAgent = ev.AgentName
|
||||
progress("progress", fmt.Sprintf("[Eino] %s", ev.AgentName), map[string]interface{}{
|
||||
"conversationId": conversationID,
|
||||
"einoAgent": ev.AgentName,
|
||||
"einoRole": einoRoleTag(ev.AgentName),
|
||||
})
|
||||
}
|
||||
if ev.Output == nil || ev.Output.MessageOutput == nil {
|
||||
continue
|
||||
}
|
||||
mv := ev.Output.MessageOutput
|
||||
|
||||
if mv.IsStreaming && mv.MessageStream != nil {
|
||||
streamHeaderSent := false
|
||||
var reasoningStreamID string
|
||||
var toolStreamFragments []schema.ToolCall
|
||||
var subAssistantBuf strings.Builder
|
||||
var subReplyStreamID string
|
||||
var mainAssistantBuf strings.Builder
|
||||
for {
|
||||
chunk, rerr := mv.MessageStream.Recv()
|
||||
if rerr != nil {
|
||||
if errors.Is(rerr, io.EOF) {
|
||||
break
|
||||
}
|
||||
// 仅保留主代理最后一次 assistant 输出;每轮重试重置,避免拼接失败轮次的片段。
|
||||
lastAssistant = ""
|
||||
var reasoningStreamSeq int64
|
||||
var einoSubReplyStreamSeq int64
|
||||
toolEmitSeen := make(map[string]struct{})
|
||||
var einoMainRound int
|
||||
var einoLastAgent string
|
||||
subAgentToolStep := make(map[string]int)
|
||||
|
||||
runner := adk.NewRunner(ctx, adk.RunnerConfig{
|
||||
Agent: da,
|
||||
EnableStreaming: true,
|
||||
})
|
||||
iter := runner.Run(ctx, msgs)
|
||||
|
||||
for {
|
||||
ev, ok := iter.Next()
|
||||
if !ok {
|
||||
lastRunMsgs = msgs
|
||||
break attemptLoop
|
||||
}
|
||||
if ev == nil {
|
||||
continue
|
||||
}
|
||||
if ev.Err != nil {
|
||||
if isRecoverableToolCallArgumentsJSONError(ev.Err) && attempt+1 < maxToolCallArgumentsJSONAttempts {
|
||||
if logger != nil {
|
||||
logger.Warn("eino stream recv", zap.Error(rerr))
|
||||
logger.Warn("eino: recoverable tool-call JSON error from model/API", zap.Error(ev.Err), zap.Int("attempt", attempt))
|
||||
}
|
||||
break
|
||||
continue attemptLoop
|
||||
}
|
||||
if chunk == nil {
|
||||
continue
|
||||
}
|
||||
if progress != nil && strings.TrimSpace(chunk.ReasoningContent) != "" {
|
||||
if reasoningStreamID == "" {
|
||||
reasoningStreamID = fmt.Sprintf("eino-reasoning-%s-%d", conversationID, atomic.AddInt64(&reasoningStreamSeq, 1))
|
||||
progress("thinking_stream_start", " ", map[string]interface{}{
|
||||
"streamId": reasoningStreamID,
|
||||
"source": "eino",
|
||||
"einoAgent": ev.AgentName,
|
||||
"einoRole": einoRoleTag(ev.AgentName),
|
||||
})
|
||||
}
|
||||
progress("thinking_stream_delta", chunk.ReasoningContent, map[string]interface{}{
|
||||
"streamId": reasoningStreamID,
|
||||
if progress != nil {
|
||||
progress("error", ev.Err.Error(), map[string]interface{}{
|
||||
"conversationId": conversationID,
|
||||
"source": "eino",
|
||||
})
|
||||
}
|
||||
if chunk.Content != "" {
|
||||
if progress != nil && streamsMainAssistant(ev.AgentName) {
|
||||
if !streamHeaderSent {
|
||||
progress("response_start", "", map[string]interface{}{
|
||||
"conversationId": conversationID,
|
||||
"mcpExecutionIds": snapshotMCPIDs(),
|
||||
"messageGeneratedBy": "eino:" + ev.AgentName,
|
||||
"einoRole": "orchestrator",
|
||||
})
|
||||
streamHeaderSent = true
|
||||
}
|
||||
progress("response_delta", chunk.Content, map[string]interface{}{
|
||||
"conversationId": conversationID,
|
||||
"mcpExecutionIds": snapshotMCPIDs(),
|
||||
"einoRole": "orchestrator",
|
||||
})
|
||||
mainAssistantBuf.WriteString(chunk.Content)
|
||||
} else if !streamsMainAssistant(ev.AgentName) {
|
||||
if progress != nil {
|
||||
if subReplyStreamID == "" {
|
||||
subReplyStreamID = fmt.Sprintf("eino-sub-reply-%s-%d", conversationID, atomic.AddInt64(&einoSubReplyStreamSeq, 1))
|
||||
progress("eino_agent_reply_stream_start", "", map[string]interface{}{
|
||||
"streamId": subReplyStreamID,
|
||||
"einoAgent": ev.AgentName,
|
||||
"einoRole": "sub",
|
||||
"conversationId": conversationID,
|
||||
"source": "eino",
|
||||
})
|
||||
}
|
||||
progress("eino_agent_reply_stream_delta", chunk.Content, map[string]interface{}{
|
||||
"streamId": subReplyStreamID,
|
||||
"conversationId": conversationID,
|
||||
})
|
||||
}
|
||||
subAssistantBuf.WriteString(chunk.Content)
|
||||
}
|
||||
}
|
||||
// 收集流式 tool_calls 全部分片;arguments 在最后一帧常为 "",需按 index/id 合并后才能展示 subagent_type/description。
|
||||
if len(chunk.ToolCalls) > 0 {
|
||||
toolStreamFragments = append(toolStreamFragments, chunk.ToolCalls...)
|
||||
}
|
||||
return nil, ev.Err
|
||||
}
|
||||
if streamsMainAssistant(ev.AgentName) {
|
||||
if s := strings.TrimSpace(mainAssistantBuf.String()); s != "" {
|
||||
lastAssistant = s
|
||||
}
|
||||
}
|
||||
if subAssistantBuf.Len() > 0 && progress != nil {
|
||||
if s := strings.TrimSpace(subAssistantBuf.String()); s != "" {
|
||||
if subReplyStreamID != "" {
|
||||
progress("eino_agent_reply_stream_end", s, map[string]interface{}{
|
||||
"streamId": subReplyStreamID,
|
||||
"einoAgent": ev.AgentName,
|
||||
"einoRole": "sub",
|
||||
"conversationId": conversationID,
|
||||
"source": "eino",
|
||||
})
|
||||
} else {
|
||||
progress("eino_agent_reply", s, map[string]interface{}{
|
||||
if ev.AgentName != "" && progress != nil {
|
||||
if streamsMainAssistant(ev.AgentName) {
|
||||
if einoMainRound == 0 {
|
||||
einoMainRound = 1
|
||||
progress("iteration", "", map[string]interface{}{
|
||||
"iteration": 1,
|
||||
"einoScope": "main",
|
||||
"einoRole": "orchestrator",
|
||||
"einoAgent": orchestratorName,
|
||||
"conversationId": conversationID,
|
||||
"einoAgent": ev.AgentName,
|
||||
"einoRole": "sub",
|
||||
"source": "eino",
|
||||
"source": "eino",
|
||||
})
|
||||
} else if einoLastAgent != "" && !streamsMainAssistant(einoLastAgent) {
|
||||
einoMainRound++
|
||||
progress("iteration", "", map[string]interface{}{
|
||||
"iteration": einoMainRound,
|
||||
"einoScope": "main",
|
||||
"einoRole": "orchestrator",
|
||||
"einoAgent": orchestratorName,
|
||||
"conversationId": conversationID,
|
||||
"source": "eino",
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
var lastToolChunk *schema.Message
|
||||
if merged := mergeStreamingToolCallFragments(toolStreamFragments); len(merged) > 0 {
|
||||
lastToolChunk = &schema.Message{ToolCalls: merged}
|
||||
}
|
||||
tryEmitToolCallsOnce(lastToolChunk, ev.AgentName, orchestratorName, conversationID, progress, toolEmitSeen, subAgentToolStep)
|
||||
continue
|
||||
}
|
||||
|
||||
msg, gerr := mv.GetMessage()
|
||||
if gerr != nil || msg == nil {
|
||||
continue
|
||||
}
|
||||
tryEmitToolCallsOnce(mergeMessageToolCalls(msg), ev.AgentName, orchestratorName, conversationID, progress, toolEmitSeen, subAgentToolStep)
|
||||
|
||||
if mv.Role == schema.Assistant {
|
||||
if progress != nil && strings.TrimSpace(msg.ReasoningContent) != "" {
|
||||
progress("thinking", strings.TrimSpace(msg.ReasoningContent), map[string]interface{}{
|
||||
einoLastAgent = ev.AgentName
|
||||
progress("progress", fmt.Sprintf("[Eino] %s", ev.AgentName), map[string]interface{}{
|
||||
"conversationId": conversationID,
|
||||
"source": "eino",
|
||||
"einoAgent": ev.AgentName,
|
||||
"einoRole": einoRoleTag(ev.AgentName),
|
||||
})
|
||||
}
|
||||
body := strings.TrimSpace(msg.Content)
|
||||
if body != "" {
|
||||
if streamsMainAssistant(ev.AgentName) {
|
||||
if progress != nil {
|
||||
progress("response_start", "", map[string]interface{}{
|
||||
"conversationId": conversationID,
|
||||
"mcpExecutionIds": snapshotMCPIDs(),
|
||||
"messageGeneratedBy": "eino:" + ev.AgentName,
|
||||
"einoRole": "orchestrator",
|
||||
})
|
||||
progress("response_delta", body, map[string]interface{}{
|
||||
"conversationId": conversationID,
|
||||
"mcpExecutionIds": snapshotMCPIDs(),
|
||||
"einoRole": "orchestrator",
|
||||
if ev.Output == nil || ev.Output.MessageOutput == nil {
|
||||
continue
|
||||
}
|
||||
mv := ev.Output.MessageOutput
|
||||
|
||||
if mv.IsStreaming && mv.MessageStream != nil {
|
||||
streamHeaderSent := false
|
||||
var reasoningStreamID string
|
||||
var toolStreamFragments []schema.ToolCall
|
||||
var subAssistantBuf strings.Builder
|
||||
var subReplyStreamID string
|
||||
var mainAssistantBuf strings.Builder
|
||||
for {
|
||||
chunk, rerr := mv.MessageStream.Recv()
|
||||
if rerr != nil {
|
||||
if errors.Is(rerr, io.EOF) {
|
||||
break
|
||||
}
|
||||
if logger != nil {
|
||||
logger.Warn("eino stream recv", zap.Error(rerr))
|
||||
}
|
||||
break
|
||||
}
|
||||
if chunk == nil {
|
||||
continue
|
||||
}
|
||||
if progress != nil && strings.TrimSpace(chunk.ReasoningContent) != "" {
|
||||
if reasoningStreamID == "" {
|
||||
reasoningStreamID = fmt.Sprintf("eino-reasoning-%s-%d", conversationID, atomic.AddInt64(&reasoningStreamSeq, 1))
|
||||
progress("thinking_stream_start", " ", map[string]interface{}{
|
||||
"streamId": reasoningStreamID,
|
||||
"source": "eino",
|
||||
"einoAgent": ev.AgentName,
|
||||
"einoRole": einoRoleTag(ev.AgentName),
|
||||
})
|
||||
}
|
||||
progress("thinking_stream_delta", chunk.ReasoningContent, map[string]interface{}{
|
||||
"streamId": reasoningStreamID,
|
||||
})
|
||||
}
|
||||
lastAssistant = body
|
||||
} else if progress != nil {
|
||||
progress("eino_agent_reply", body, map[string]interface{}{
|
||||
if chunk.Content != "" {
|
||||
if progress != nil && streamsMainAssistant(ev.AgentName) {
|
||||
if !streamHeaderSent {
|
||||
progress("response_start", "", map[string]interface{}{
|
||||
"conversationId": conversationID,
|
||||
"mcpExecutionIds": snapshotMCPIDs(),
|
||||
"messageGeneratedBy": "eino:" + ev.AgentName,
|
||||
"einoRole": "orchestrator",
|
||||
})
|
||||
streamHeaderSent = true
|
||||
}
|
||||
progress("response_delta", chunk.Content, map[string]interface{}{
|
||||
"conversationId": conversationID,
|
||||
"mcpExecutionIds": snapshotMCPIDs(),
|
||||
"einoRole": "orchestrator",
|
||||
})
|
||||
mainAssistantBuf.WriteString(chunk.Content)
|
||||
} else if !streamsMainAssistant(ev.AgentName) {
|
||||
if progress != nil {
|
||||
if subReplyStreamID == "" {
|
||||
subReplyStreamID = fmt.Sprintf("eino-sub-reply-%s-%d", conversationID, atomic.AddInt64(&einoSubReplyStreamSeq, 1))
|
||||
progress("eino_agent_reply_stream_start", "", map[string]interface{}{
|
||||
"streamId": subReplyStreamID,
|
||||
"einoAgent": ev.AgentName,
|
||||
"einoRole": "sub",
|
||||
"conversationId": conversationID,
|
||||
"source": "eino",
|
||||
})
|
||||
}
|
||||
progress("eino_agent_reply_stream_delta", chunk.Content, map[string]interface{}{
|
||||
"streamId": subReplyStreamID,
|
||||
"conversationId": conversationID,
|
||||
})
|
||||
}
|
||||
subAssistantBuf.WriteString(chunk.Content)
|
||||
}
|
||||
}
|
||||
// 收集流式 tool_calls 全部分片;arguments 在最后一帧常为 "",需按 index/id 合并后才能展示 subagent_type/description。
|
||||
if len(chunk.ToolCalls) > 0 {
|
||||
toolStreamFragments = append(toolStreamFragments, chunk.ToolCalls...)
|
||||
}
|
||||
}
|
||||
if streamsMainAssistant(ev.AgentName) {
|
||||
if s := strings.TrimSpace(mainAssistantBuf.String()); s != "" {
|
||||
lastAssistant = s
|
||||
}
|
||||
}
|
||||
if subAssistantBuf.Len() > 0 && progress != nil {
|
||||
if s := strings.TrimSpace(subAssistantBuf.String()); s != "" {
|
||||
if subReplyStreamID != "" {
|
||||
progress("eino_agent_reply_stream_end", s, map[string]interface{}{
|
||||
"streamId": subReplyStreamID,
|
||||
"einoAgent": ev.AgentName,
|
||||
"einoRole": "sub",
|
||||
"conversationId": conversationID,
|
||||
"source": "eino",
|
||||
})
|
||||
} else {
|
||||
progress("eino_agent_reply", s, map[string]interface{}{
|
||||
"conversationId": conversationID,
|
||||
"einoAgent": ev.AgentName,
|
||||
"einoRole": "sub",
|
||||
"source": "eino",
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
var lastToolChunk *schema.Message
|
||||
if merged := mergeStreamingToolCallFragments(toolStreamFragments); len(merged) > 0 {
|
||||
lastToolChunk = &schema.Message{ToolCalls: merged}
|
||||
}
|
||||
tryEmitToolCallsOnce(lastToolChunk, ev.AgentName, orchestratorName, conversationID, progress, toolEmitSeen, subAgentToolStep)
|
||||
continue
|
||||
}
|
||||
|
||||
msg, gerr := mv.GetMessage()
|
||||
if gerr != nil || msg == nil {
|
||||
continue
|
||||
}
|
||||
tryEmitToolCallsOnce(mergeMessageToolCalls(msg), ev.AgentName, orchestratorName, conversationID, progress, toolEmitSeen, subAgentToolStep)
|
||||
|
||||
if mv.Role == schema.Assistant {
|
||||
if progress != nil && strings.TrimSpace(msg.ReasoningContent) != "" {
|
||||
progress("thinking", strings.TrimSpace(msg.ReasoningContent), map[string]interface{}{
|
||||
"conversationId": conversationID,
|
||||
"einoAgent": ev.AgentName,
|
||||
"einoRole": "sub",
|
||||
"source": "eino",
|
||||
"einoAgent": ev.AgentName,
|
||||
"einoRole": einoRoleTag(ev.AgentName),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if mv.Role == schema.Tool && progress != nil {
|
||||
toolName := msg.ToolName
|
||||
if toolName == "" {
|
||||
toolName = mv.ToolName
|
||||
body := strings.TrimSpace(msg.Content)
|
||||
if body != "" {
|
||||
if streamsMainAssistant(ev.AgentName) {
|
||||
if progress != nil {
|
||||
progress("response_start", "", map[string]interface{}{
|
||||
"conversationId": conversationID,
|
||||
"mcpExecutionIds": snapshotMCPIDs(),
|
||||
"messageGeneratedBy": "eino:" + ev.AgentName,
|
||||
"einoRole": "orchestrator",
|
||||
})
|
||||
progress("response_delta", body, map[string]interface{}{
|
||||
"conversationId": conversationID,
|
||||
"mcpExecutionIds": snapshotMCPIDs(),
|
||||
"einoRole": "orchestrator",
|
||||
})
|
||||
}
|
||||
lastAssistant = body
|
||||
} else if progress != nil {
|
||||
progress("eino_agent_reply", body, map[string]interface{}{
|
||||
"conversationId": conversationID,
|
||||
"einoAgent": ev.AgentName,
|
||||
"einoRole": "sub",
|
||||
"source": "eino",
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// bridge 工具在 res.IsError=true 时会返回带前缀的内容;这里解析为 success/isError,避免前端误判为成功。
|
||||
content := msg.Content
|
||||
isErr := false
|
||||
if strings.HasPrefix(content, einomcp.ToolErrorPrefix) {
|
||||
isErr = true
|
||||
content = strings.TrimPrefix(content, einomcp.ToolErrorPrefix)
|
||||
}
|
||||
if mv.Role == schema.Tool && progress != nil {
|
||||
toolName := msg.ToolName
|
||||
if toolName == "" {
|
||||
toolName = mv.ToolName
|
||||
}
|
||||
|
||||
preview := content
|
||||
if len(preview) > 200 {
|
||||
preview = preview[:200] + "..."
|
||||
// bridge 工具在 res.IsError=true 时会返回带前缀的内容;这里解析为 success/isError,避免前端误判为成功。
|
||||
content := msg.Content
|
||||
isErr := false
|
||||
if strings.HasPrefix(content, einomcp.ToolErrorPrefix) {
|
||||
isErr = true
|
||||
content = strings.TrimPrefix(content, einomcp.ToolErrorPrefix)
|
||||
}
|
||||
|
||||
preview := content
|
||||
if len(preview) > 200 {
|
||||
preview = preview[:200] + "..."
|
||||
}
|
||||
data := map[string]interface{}{
|
||||
"toolName": toolName,
|
||||
"success": !isErr,
|
||||
"isError": isErr,
|
||||
"result": content,
|
||||
"resultPreview": preview,
|
||||
"conversationId": conversationID,
|
||||
"einoAgent": ev.AgentName,
|
||||
"einoRole": einoRoleTag(ev.AgentName),
|
||||
"source": "eino",
|
||||
}
|
||||
if msg.ToolCallID != "" {
|
||||
data["toolCallId"] = msg.ToolCallID
|
||||
}
|
||||
progress("tool_result", fmt.Sprintf("工具结果 (%s)", toolName), data)
|
||||
}
|
||||
data := map[string]interface{}{
|
||||
"toolName": toolName,
|
||||
"success": !isErr,
|
||||
"isError": isErr,
|
||||
"result": content,
|
||||
"resultPreview": preview,
|
||||
"conversationId": conversationID,
|
||||
"einoAgent": ev.AgentName,
|
||||
"einoRole": einoRoleTag(ev.AgentName),
|
||||
"source": "eino",
|
||||
}
|
||||
if msg.ToolCallID != "" {
|
||||
data["toolCallId"] = msg.ToolCallID
|
||||
}
|
||||
progress("tool_result", fmt.Sprintf("工具结果 (%s)", toolName), data)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -559,7 +601,7 @@ func RunDeepAgent(
|
||||
ids := append([]string(nil), mcpIDs...)
|
||||
mcpIDsMu.Unlock()
|
||||
|
||||
histJSON, _ := json.Marshal(msgs)
|
||||
histJSON, _ := json.Marshal(lastRunMsgs)
|
||||
cleaned := strings.TrimSpace(lastAssistant)
|
||||
cleaned = dedupeRepeatedParagraphs(cleaned, 80)
|
||||
cleaned = dedupeParagraphsByLineFingerprint(cleaned, 100)
|
||||
|
||||
@@ -0,0 +1,50 @@
|
||||
package multiagent
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/cloudwego/eino/schema"
|
||||
)
|
||||
|
||||
// maxToolCallArgumentsJSONAttempts 含首次运行:首次 + 自动重试次数。
|
||||
// 例如为 3 表示最多共 3 次完整 DeepAgent 运行(2 次失败后各追加一条纠错提示)。
|
||||
const maxToolCallArgumentsJSONAttempts = 3
|
||||
|
||||
// toolCallArgumentsJSONRetryHint 追加在用户消息后,提示模型输出合法 JSON 工具参数(部分云厂商会在流式阶段校验 arguments)。
|
||||
func toolCallArgumentsJSONRetryHint() *schema.Message {
|
||||
return schema.UserMessage(`[系统提示] 上一次输出中,工具调用的 function.arguments 不是合法 JSON,接口已拒绝。请重新生成:每个 tool call 的 arguments 必须是完整、可解析的 JSON 对象字符串(键名用双引号,无多余逗号,括号配对)。不要输出截断或不完整的 JSON。
|
||||
|
||||
[System] Your previous tool call used invalid JSON in function.arguments and was rejected by the API. Regenerate with strictly valid JSON objects only (double-quoted keys, matched braces, no trailing commas).`)
|
||||
}
|
||||
|
||||
// toolCallArgumentsJSONRecoveryTimelineMessage 供 eino_recovery 事件落库与前端时间线展示。
|
||||
func toolCallArgumentsJSONRecoveryTimelineMessage(attempt int) string {
|
||||
return fmt.Sprintf(
|
||||
"接口拒绝了无效的工具参数 JSON。已向对话追加系统提示并要求模型重新生成合法的 function.arguments。"+
|
||||
"当前为第 %d/%d 轮完整运行。\n\n"+
|
||||
"The API rejected invalid JSON in tool arguments. A system hint was appended. This is full run %d of %d.",
|
||||
attempt+1, maxToolCallArgumentsJSONAttempts, attempt+1, maxToolCallArgumentsJSONAttempts,
|
||||
)
|
||||
}
|
||||
|
||||
// isRecoverableToolCallArgumentsJSONError 判断是否为「工具参数非合法 JSON」类流式错误,可通过追加提示后重跑一轮。
|
||||
func isRecoverableToolCallArgumentsJSONError(err error) bool {
|
||||
if err == nil {
|
||||
return false
|
||||
}
|
||||
s := strings.ToLower(err.Error())
|
||||
if !strings.Contains(s, "json") {
|
||||
return false
|
||||
}
|
||||
if strings.Contains(s, "function.arguments") || strings.Contains(s, "function arguments") {
|
||||
return true
|
||||
}
|
||||
if strings.Contains(s, "invalidparameter") && strings.Contains(s, "json") {
|
||||
return true
|
||||
}
|
||||
if strings.Contains(s, "must be in json format") {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
package multiagent
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestIsRecoverableToolCallArgumentsJSONError(t *testing.T) {
|
||||
yes := errors.New(`failed to receive stream chunk: error, <400> InternalError.Algo.InvalidParameter: The "function.arguments" parameter of the code model must be in JSON format.`)
|
||||
if !isRecoverableToolCallArgumentsJSONError(yes) {
|
||||
t.Fatal("expected recoverable for function.arguments + JSON")
|
||||
}
|
||||
no := errors.New("unrelated network failure")
|
||||
if isRecoverableToolCallArgumentsJSONError(no) {
|
||||
t.Fatal("expected not recoverable")
|
||||
}
|
||||
}
|
||||
@@ -169,6 +169,7 @@
|
||||
"taskCancelled": "Task cancelled",
|
||||
"unknownTool": "Unknown tool",
|
||||
"einoAgentReplyTitle": "Sub-agent reply",
|
||||
"einoRecoveryTitle": "🔄 Invalid tool JSON · run {{n}}/{{max}} (hint appended)",
|
||||
"noDescription": "No description",
|
||||
"noResponseData": "No response data",
|
||||
"loading": "Loading...",
|
||||
|
||||
@@ -169,6 +169,7 @@
|
||||
"taskCancelled": "任务已取消",
|
||||
"unknownTool": "未知工具",
|
||||
"einoAgentReplyTitle": "子代理回复",
|
||||
"einoRecoveryTitle": "🔄 工具参数无效 · 第 {{n}}/{{max}} 轮(已追加提示)",
|
||||
"noDescription": "暂无描述",
|
||||
"noResponseData": "暂无响应数据",
|
||||
"loading": "加载中...",
|
||||
|
||||
@@ -1741,6 +1741,9 @@ function renderProcessDetails(messageId, processDetails) {
|
||||
itemTitle = agPx + (typeof window.t === 'function' ? window.t('chat.iterationRound', { n: data.iteration || 1 }) : '第 ' + (data.iteration || 1) + ' 轮迭代');
|
||||
} else if (eventType === 'thinking') {
|
||||
itemTitle = agPx + '🤔 ' + (typeof window.t === 'function' ? window.t('chat.aiThinking') : 'AI思考');
|
||||
} else if (eventType === 'planning') {
|
||||
// 与流式 monitor.js 中 response_start/response_delta 展示的「规划中」一致(落库聚合)
|
||||
itemTitle = agPx + '📝 ' + (typeof window.t === 'function' ? window.t('chat.planning') : '规划中');
|
||||
} else if (eventType === 'tool_calls_detected') {
|
||||
itemTitle = agPx + '🔧 ' + (typeof window.t === 'function' ? window.t('chat.toolCallsDetected', { count: data.count || 0 }) : '检测到 ' + (data.count || 0) + ' 个工具调用');
|
||||
} else if (eventType === 'tool_call') {
|
||||
@@ -1760,6 +1763,10 @@ function renderProcessDetails(messageId, processDetails) {
|
||||
itemTitle = agPx + execLine;
|
||||
} else if (eventType === 'eino_agent_reply') {
|
||||
itemTitle = agPx + '💬 ' + (typeof window.t === 'function' ? window.t('chat.einoAgentReplyTitle') : '子代理回复');
|
||||
} else if (eventType === 'eino_recovery') {
|
||||
const ri = data.runIndex != null ? data.runIndex : (data.einoRetry != null ? data.einoRetry + 1 : 1);
|
||||
const mx = data.maxRuns != null ? data.maxRuns : 3;
|
||||
itemTitle = (typeof window.t === 'function' ? window.t('chat.einoRecoveryTitle', { n: ri, max: mx }) : ('🔄 第 ' + ri + '/' + mx + ' 轮(已追加提示)'));
|
||||
} else if (eventType === 'knowledge_retrieval') {
|
||||
itemTitle = '📚 ' + (typeof window.t === 'function' ? window.t('chat.knowledgeRetrieval') : '知识检索');
|
||||
} else if (eventType === 'error') {
|
||||
|
||||
@@ -889,7 +889,22 @@ function handleStreamEvent(event, progressElement, progressId,
|
||||
data: event.data
|
||||
});
|
||||
break;
|
||||
|
||||
|
||||
case 'eino_recovery': {
|
||||
const d = event.data || {};
|
||||
const runIdx = d.runIndex != null ? d.runIndex : (d.einoRetry != null ? d.einoRetry + 1 : 1);
|
||||
const maxRuns = d.maxRuns != null ? d.maxRuns : 3;
|
||||
const title = typeof window.t === 'function'
|
||||
? window.t('chat.einoRecoveryTitle', { n: runIdx, max: maxRuns })
|
||||
: ('🔄 工具参数无效 · 第 ' + runIdx + '/' + maxRuns + ' 轮(已追加提示)');
|
||||
addTimelineItem(timeline, 'eino_recovery', {
|
||||
title: title,
|
||||
message: event.message || '',
|
||||
data: event.data
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
case 'tool_call':
|
||||
const toolInfo = event.data || {};
|
||||
const toolName = toolInfo.toolName || (typeof window.t === 'function' ? window.t('chat.unknownTool') : '未知工具');
|
||||
@@ -1463,6 +1478,15 @@ function addTimelineItem(timeline, type, options) {
|
||||
if (type === 'progress' && options.message) {
|
||||
item.dataset.progressMessage = options.message;
|
||||
}
|
||||
if (type === 'eino_recovery' && options.data) {
|
||||
const d = options.data;
|
||||
if (d.runIndex != null) {
|
||||
item.dataset.recoveryRunIndex = String(d.runIndex);
|
||||
}
|
||||
if (d.maxRuns != null) {
|
||||
item.dataset.recoveryMaxRuns = String(d.maxRuns);
|
||||
}
|
||||
}
|
||||
if (type === 'tool_calls_detected' && options.data && options.data.count != null) {
|
||||
item.dataset.toolCallsCount = String(options.data.count);
|
||||
}
|
||||
@@ -1516,7 +1540,7 @@ function addTimelineItem(timeline, type, options) {
|
||||
`;
|
||||
|
||||
// 根据类型添加详细内容
|
||||
if (type === 'thinking' && options.message) {
|
||||
if ((type === 'thinking' || type === 'planning') && options.message) {
|
||||
content += `<div class="timeline-item-content">${formatMarkdown(options.message)}</div>`;
|
||||
} else if (type === 'tool_call' && options.data) {
|
||||
const data = options.data;
|
||||
@@ -1561,6 +1585,12 @@ function addTimelineItem(timeline, type, options) {
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
} else if (type === 'eino_recovery' && options.message) {
|
||||
content += `
|
||||
<div class="timeline-item-content timeline-eino-recovery">
|
||||
${escapeHtml(options.message).replace(/\n/g, '<br>')}
|
||||
</div>
|
||||
`;
|
||||
} else if (type === 'cancelled') {
|
||||
const taskCancelledLabel = typeof window.t === 'function' ? window.t('chat.taskCancelled') : '任务已取消';
|
||||
content += `
|
||||
@@ -1569,7 +1599,7 @@ function addTimelineItem(timeline, type, options) {
|
||||
</div>
|
||||
`;
|
||||
}
|
||||
|
||||
|
||||
item.innerHTML = content;
|
||||
if (options.data) {
|
||||
applyEinoTimelineRole(item, options.data);
|
||||
@@ -2390,6 +2420,8 @@ function refreshProgressAndTimelineI18n() {
|
||||
}
|
||||
} else if (type === 'thinking') {
|
||||
titleSpan.textContent = ap + '\uD83E\uDD14 ' + _t('chat.aiThinking');
|
||||
} else if (type === 'planning') {
|
||||
titleSpan.textContent = ap + '\uD83D\uDCDD ' + _t('chat.planning');
|
||||
} else if (type === 'tool_calls_detected' && item.dataset.toolCallsCount != null) {
|
||||
const count = parseInt(item.dataset.toolCallsCount, 10) || 0;
|
||||
titleSpan.textContent = ap + '\uD83D\uDD27 ' + _t('chat.toolCallsDetected', { count: count });
|
||||
@@ -2405,6 +2437,10 @@ function refreshProgressAndTimelineI18n() {
|
||||
titleSpan.textContent = ap + icon + (success ? _t('chat.toolExecComplete', { name: name }) : _t('chat.toolExecFailed', { name: name }));
|
||||
} else if (type === 'eino_agent_reply') {
|
||||
titleSpan.textContent = ap + '\uD83D\uDCAC ' + _t('chat.einoAgentReplyTitle');
|
||||
} else if (type === 'eino_recovery' && item.dataset.recoveryRunIndex) {
|
||||
const n = parseInt(item.dataset.recoveryRunIndex, 10) || 1;
|
||||
const mx = parseInt(item.dataset.recoveryMaxRuns, 10) || 3;
|
||||
titleSpan.textContent = _t('chat.einoRecoveryTitle', { n: n, max: mx });
|
||||
} else if (type === 'cancelled') {
|
||||
titleSpan.textContent = '\u26D4 ' + _t('chat.taskCancelled');
|
||||
} else if (type === 'progress' && item.dataset.progressMessage !== undefined) {
|
||||
|
||||
Reference in New Issue
Block a user