Merge pull request #101 from donnel666/feat/claude-api-bridge

feat: add Claude API bridge - transparent OpenAI-to-Anthropic protoco…
This commit is contained in:
公明
2026-04-17 10:08:10 +08:00
committed by GitHub
8 changed files with 1139 additions and 55 deletions
+2
View File
@@ -34,7 +34,9 @@ log:
# - DeepSeek: https://api.deepseek.com/v1
# - 其他兼容 OpenAI 协议的 API
# 常用模型: gpt-4, gpt-3.5-turbo, deepseek-chat, claude-3-opus 等
# provider: 可选值 openai(默认) | claude(自动桥接到 Anthropic Claude Messages API)
openai:
provider: openai # API 提供商: openai(默认,兼容OpenAI协议) | claude(自动桥接到Anthropic Claude Messages API)
base_url: https://dashscope.aliyuncs.com/compatible-mode/v1 # API 基础 URL(必填)
api_key: sk-xxxxxx # API 密钥(必填)
model: qwen3-max # 模型名称(必填)
+1
View File
@@ -129,6 +129,7 @@ type MCPConfig struct {
}
type OpenAIConfig struct {
Provider string `yaml:"provider,omitempty" json:"provider,omitempty"` // API 提供商: "openai"(默认) 或 "claude"claude 时自动桥接为 Anthropic Messages API
APIKey string `yaml:"api_key" json:"api_key"`
BaseURL string `yaml:"base_url" json:"base_url"`
Model string `yaml:"model" json:"model"`
+35 -55
View File
@@ -3,9 +3,7 @@ package handler
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"os"
"path/filepath"
@@ -18,6 +16,7 @@ import (
"cyberstrike-ai/internal/config"
"cyberstrike-ai/internal/knowledge"
"cyberstrike-ai/internal/mcp"
"cyberstrike-ai/internal/openai"
"cyberstrike-ai/internal/security"
"github.com/gin-gonic/gin"
@@ -795,9 +794,10 @@ func (h *ConfigHandler) UpdateConfig(c *gin.Context) {
// TestOpenAIRequest 测试OpenAI连接请求
type TestOpenAIRequest struct {
BaseURL string `json:"base_url"`
APIKey string `json:"api_key"`
Model string `json:"model"`
Provider string `json:"provider"`
BaseURL string `json:"base_url"`
APIKey string `json:"api_key"`
Model string `json:"model"`
}
// TestOpenAI 测试OpenAI API连接是否可用
@@ -819,7 +819,11 @@ func (h *ConfigHandler) TestOpenAI(c *gin.Context) {
baseURL := strings.TrimSuffix(strings.TrimSpace(req.BaseURL), "/")
if baseURL == "" {
baseURL = "https://api.openai.com/v1"
if strings.EqualFold(strings.TrimSpace(req.Provider), "claude") {
baseURL = "https://api.anthropic.com"
} else {
baseURL = "https://api.openai.com/v1"
}
}
// 构造一个最小的 chat completion 请求
@@ -831,57 +835,19 @@ func (h *ConfigHandler) TestOpenAI(c *gin.Context) {
"max_tokens": 5,
}
body, err := json.Marshal(payload)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": "构造请求失败"})
return
// 使用内部 openai Client 进行测试,若 provider 为 claude 会自动走桥接层
tmpCfg := &config.OpenAIConfig{
Provider: req.Provider,
BaseURL: baseURL,
APIKey: strings.TrimSpace(req.APIKey),
Model: req.Model,
}
client := openai.NewClient(tmpCfg, nil, h.logger)
ctx, cancel := context.WithTimeout(c.Request.Context(), 30*time.Second)
defer cancel()
httpReq, err := http.NewRequestWithContext(ctx, http.MethodPost, baseURL+"/chat/completions", bytes.NewReader(body))
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": "构造HTTP请求失败: " + err.Error()})
return
}
httpReq.Header.Set("Content-Type", "application/json")
httpReq.Header.Set("Authorization", "Bearer "+strings.TrimSpace(req.APIKey))
start := time.Now()
resp, err := http.DefaultClient.Do(httpReq)
latency := time.Since(start)
if err != nil {
c.JSON(http.StatusOK, gin.H{
"success": false,
"error": "连接失败: " + err.Error(),
})
return
}
defer resp.Body.Close()
respBody, _ := io.ReadAll(resp.Body)
if resp.StatusCode != http.StatusOK {
// 尝试提取错误信息
var errResp struct {
Error struct {
Message string `json:"message"`
} `json:"error"`
}
errMsg := string(respBody)
if json.Unmarshal(respBody, &errResp) == nil && errResp.Error.Message != "" {
errMsg = errResp.Error.Message
}
c.JSON(http.StatusOK, gin.H{
"success": false,
"error": fmt.Sprintf("API 返回错误 (HTTP %d): %s", resp.StatusCode, errMsg),
"status_code": resp.StatusCode,
})
return
}
// 解析响应并严格验证是否为有效的 chat completion 响应
var chatResp struct {
ID string `json:"id"`
Object string `json:"object"`
@@ -893,10 +859,21 @@ func (h *ConfigHandler) TestOpenAI(c *gin.Context) {
} `json:"message"`
} `json:"choices"`
}
if err := json.Unmarshal(respBody, &chatResp); err != nil {
err := client.ChatCompletion(ctx, payload, &chatResp)
latency := time.Since(start)
if err != nil {
if apiErr, ok := err.(*openai.APIError); ok {
c.JSON(http.StatusOK, gin.H{
"success": false,
"error": fmt.Sprintf("API 返回错误 (HTTP %d): %s", apiErr.StatusCode, apiErr.Body),
"status_code": apiErr.StatusCode,
})
return
}
c.JSON(http.StatusOK, gin.H{
"success": false,
"error": "API 响应不是有效的 JSON,请检查 Base URL 是否正确",
"error": "连接失败: " + err.Error(),
})
return
}
@@ -905,14 +882,14 @@ func (h *ConfigHandler) TestOpenAI(c *gin.Context) {
if len(chatResp.Choices) == 0 {
c.JSON(http.StatusOK, gin.H{
"success": false,
"error": "API 响应缺少 choices 字段,请检查 Base URL 路径是否正确(通常以 /v1 结尾)",
"error": "API 响应缺少 choices 字段,请检查 Base URL 路径是否正确",
})
return
}
if chatResp.ID == "" && chatResp.Model == "" {
c.JSON(http.StatusOK, gin.H{
"success": false,
"error": "API 响应格式不符合 OpenAI 规范,请检查 Base URL 是否正确",
"error": "API 响应格式不符合预期,请检查 Base URL 是否正确",
})
return
}
@@ -1272,6 +1249,9 @@ func updateMCPConfig(doc *yaml.Node, cfg config.MCPConfig) {
func updateOpenAIConfig(doc *yaml.Node, cfg config.OpenAIConfig) {
root := doc.Content[0]
openaiNode := ensureMap(root, "openai")
if cfg.Provider != "" {
setStringInMap(openaiNode, "provider", cfg.Provider)
}
setStringInMap(openaiNode, "api_key", cfg.APIKey)
setStringInMap(openaiNode, "base_url", cfg.BaseURL)
setStringInMap(openaiNode, "model", cfg.Model)
+4
View File
@@ -19,6 +19,7 @@ import (
"cyberstrike-ai/internal/agents"
"cyberstrike-ai/internal/config"
"cyberstrike-ai/internal/einomcp"
"cyberstrike-ai/internal/openai"
einoopenai "github.com/cloudwego/eino-ext/components/model/openai"
"github.com/cloudwego/eino/adk"
@@ -141,6 +142,9 @@ func RunDeepAgent(
},
}
// 若配置为 Claude provider,注入自动桥接 transport,对 Eino 透明走 Anthropic Messages API
httpClient = openai.NewEinoHTTPClient(&appCfg.OpenAI, httpClient)
baseModelCfg := &einoopenai.ChatModelConfig{
APIKey: appCfg.OpenAI.APIKey,
BaseURL: strings.TrimSuffix(appCfg.OpenAI.BaseURL, "/"),
File diff suppressed because it is too large Load Diff
+9
View File
@@ -64,6 +64,9 @@ func (c *Client) ChatCompletion(ctx context.Context, payload interface{}, out in
if strings.TrimSpace(c.config.APIKey) == "" {
return fmt.Errorf("openai api key is empty")
}
if c.isClaude() {
return c.claudeChatCompletion(ctx, payload, out)
}
baseURL := strings.TrimSuffix(c.config.BaseURL, "/")
if baseURL == "" {
@@ -156,6 +159,9 @@ func (c *Client) ChatCompletionStream(ctx context.Context, payload interface{},
if strings.TrimSpace(c.config.APIKey) == "" {
return "", fmt.Errorf("openai api key is empty")
}
if c.isClaude() {
return c.claudeChatCompletionStream(ctx, payload, onDelta)
}
baseURL := strings.TrimSuffix(c.config.BaseURL, "/")
if baseURL == "" {
@@ -294,6 +300,9 @@ func (c *Client) ChatCompletionStreamWithToolCalls(
if strings.TrimSpace(c.config.APIKey) == "" {
return "", nil, "", fmt.Errorf("openai api key is empty")
}
if c.isClaude() {
return c.claudeChatCompletionStreamWithToolCalls(ctx, payload, onContentDelta)
}
baseURL := strings.TrimSuffix(c.config.BaseURL, "/")
if baseURL == "" {
+8
View File
@@ -102,6 +102,10 @@ async function loadConfig(loadTools = true) {
currentConfig = await response.json();
// 填充OpenAI配置
const providerEl = document.getElementById('openai-provider');
if (providerEl) {
providerEl.value = currentConfig.openai.provider || 'openai';
}
document.getElementById('openai-api-key').value = currentConfig.openai.api_key || '';
document.getElementById('openai-base-url').value = currentConfig.openai.base_url || '';
document.getElementById('openai-model').value = currentConfig.openai.model || '';
@@ -753,6 +757,7 @@ async function applySettings() {
});
// 验证必填字段
const provider = document.getElementById('openai-provider')?.value || 'openai';
const apiKey = document.getElementById('openai-api-key').value.trim();
const baseUrl = document.getElementById('openai-base-url').value.trim();
const model = document.getElementById('openai-model').value.trim();
@@ -821,6 +826,7 @@ async function applySettings() {
const wecomAgentIdVal = document.getElementById('robot-wecom-agent-id')?.value.trim();
const config = {
openai: {
provider: provider,
api_key: apiKey,
base_url: baseUrl,
model: model
@@ -981,6 +987,7 @@ async function testOpenAIConnection() {
const btn = document.getElementById('test-openai-btn');
const resultEl = document.getElementById('test-openai-result');
const provider = document.getElementById('openai-provider')?.value || 'openai';
const baseUrl = document.getElementById('openai-base-url').value.trim();
const apiKey = document.getElementById('openai-api-key').value.trim();
const model = document.getElementById('openai-model').value.trim();
@@ -1001,6 +1008,7 @@ async function testOpenAIConnection() {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
provider: provider,
base_url: baseUrl,
api_key: apiKey,
model: model
+7
View File
@@ -1365,6 +1365,13 @@
<div class="settings-subsection">
<h4 data-i18n="settingsBasic.openaiConfig">OpenAI 配置</h4>
<div class="settings-form">
<div class="form-group">
<label for="openai-provider">API 提供商</label>
<select id="openai-provider" style="width: 100%; padding: 0.5rem 0.75rem; border: 1px solid var(--border-color, #e2e8f0); border-radius: 6px; background: var(--card-bg, #fff); color: var(--text-color, #2d3748); font-size: 0.875rem;">
<option value="openai">OpenAI / 兼容 OpenAI 协议</option>
<option value="claude">Claude (Anthropic Messages API)</option>
</select>
</div>
<div class="form-group">
<label for="openai-base-url">Base URL <span style="color: red;">*</span></label>
<input type="text" id="openai-base-url" data-i18n="settingsBasic.openaiBaseUrlPlaceholder" data-i18n-attr="placeholder" placeholder="https://api.openai.com/v1" required />