Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
89 changes: 72 additions & 17 deletions internal/transformer/convert/claude_openai.go
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ func ClaudeReqToOpenAI(claudeReq []byte, model string) ([]byte, error) {
var textParts []string
var toolCalls []transformer.OpenAIToolCall
var toolResults []transformer.OpenAIMessage
hasThinking := false

for _, block := range content {
m, ok := block.(map[string]interface{})
Expand All @@ -51,22 +52,35 @@ func ClaudeReqToOpenAI(claudeReq []byte, model string) ([]byte, error) {
case "thinking":
// Skip thinking blocks - they are Claude's internal reasoning
// and should not be forwarded to other APIs
hasThinking = true
continue
case "tool_use":
args, _ := json.Marshal(m["input"])
id, ok := m["id"].(string)
if !ok || id == "" {
continue
}
name, ok := m["name"].(string)
if !ok || name == "" {
continue
}
toolCalls = append(toolCalls, transformer.OpenAIToolCall{
ID: m["id"].(string),
ID: id,
Type: "function",
Function: struct {
Name string `json:"name"`
Arguments string `json:"arguments"`
}{Name: m["name"].(string), Arguments: string(args)},
}{Name: name, Arguments: string(args)},
})
case "tool_result":
callID, ok := m["tool_use_id"].(string)
if !ok || callID == "" {
continue
}
toolResults = append(toolResults, transformer.OpenAIMessage{
Role: "tool",
Content: extractToolResultContent(m["content"]),
ToolCallID: m["tool_use_id"].(string),
ToolCallID: callID,
})
}
}
Expand All @@ -81,6 +95,11 @@ func ClaudeReqToOpenAI(claudeReq []byte, model string) ([]byte, error) {
openaiMsg.ToolCalls = toolCalls
}
messages = append(messages, openaiMsg)
} else if hasThinking && msg.Role == "assistant" {
messages = append(messages, transformer.OpenAIMessage{
Role: "assistant",
Content: "(thinking...)",
})
}

// Add tool result messages
Expand Down Expand Up @@ -317,7 +336,7 @@ func OpenAIRespToClaude(openaiResp []byte) ([]byte, error) {
if len(resp.Choices) > 0 {
choice := resp.Choices[0]
if choice.Message.Content != "" {
content = append(content, map[string]interface{}{"type": "text", "text": choice.Message.Content})
content = append(content, splitThinkTaggedText(choice.Message.Content)...)
}
for _, tc := range choice.Message.ToolCalls {
var args map[string]interface{}
Expand Down Expand Up @@ -428,6 +447,8 @@ func OpenAIStreamToClaude(event []byte, ctx *transformer.StreamContext) ([]byte,
if jsonData == "" || jsonData == "[DONE]" {
if jsonData == "[DONE]" {
var result []byte
emitText, emitThinking := makeThinkEmitters(ctx, &result)
flushThinkTaggedStream(ctx, emitText, emitThinking)
// Close any open content blocks before message_stop
if ctx.ThinkingBlockStarted {
result = append(result, buildClaudeEvent("content_block_stop", map[string]interface{}{"index": ctx.ThinkingIndex})...)
Expand Down Expand Up @@ -476,11 +497,34 @@ func OpenAIStreamToClaude(event []byte, ctx *transformer.StreamContext) ([]byte,
}

if len(chunk.Choices) == 0 {
if chunk.Usage != nil {
usageObj := map[string]interface{}{
"input_tokens": chunk.Usage.PromptTokens,
"output_tokens": chunk.Usage.CompletionTokens,
}
msgDelta := map[string]interface{}{
"delta": map[string]interface{}{},
"usage": usageObj,
}
result = append(result, buildClaudeEvent("message_delta", msgDelta)...)
}
return result, nil
}

choice := chunk.Choices[0]
delta := choice.Delta
if chunk.Usage != nil && delta.Role == "" && delta.Content == "" && delta.ReasoningContent == "" && len(delta.ToolCalls) == 0 && choice.FinishReason == nil {
usageObj := map[string]interface{}{
"input_tokens": chunk.Usage.PromptTokens,
"output_tokens": chunk.Usage.CompletionTokens,
}
msgDelta := map[string]interface{}{
"delta": map[string]interface{}{},
"usage": usageObj,
}
result = append(result, buildClaudeEvent("message_delta", msgDelta)...)
return result, nil
}

// Reasoning/Thinking content (before text content)
if delta.ReasoningContent != "" {
Expand All @@ -499,20 +543,32 @@ func OpenAIStreamToClaude(event []byte, ctx *transformer.StreamContext) ([]byte,

// Text content
if delta.Content != "" {
// Close thinking block if transitioning to text
if ctx.ThinkingBlockStarted && !ctx.ContentBlockStarted {
result = append(result, buildClaudeEvent("content_block_stop", map[string]interface{}{"index": ctx.ThinkingIndex})...)
ctx.ThinkingBlockStarted = false
content := ctx.ThinkingBuffer + delta.Content
ctx.ThinkingBuffer = ""

emitText, emitThinking := makeThinkEmitters(ctx, &result)
emitTextWithClose := func(text string) {
if text == "" {
return
}
if ctx.ThinkingBlockStarted && !ctx.ContentBlockStarted && !ctx.InThinkingTag {
result = append(result, buildClaudeEvent("content_block_stop", map[string]interface{}{"index": ctx.ThinkingIndex})...)
ctx.ThinkingBlockStarted = false
}
emitText(text)
}
if !ctx.ContentBlockStarted {
ctx.ContentBlockStarted = true
result = append(result, buildClaudeEvent("content_block_start", map[string]interface{}{
"index": ctx.ContentIndex, "content_block": map[string]interface{}{"type": "text", "text": ""},
})...)
emitThinkingWithClose := func(text string) {
if text == "" {
return
}
emitThinking(text)
if ctx.ThinkingBlockStarted {
result = append(result, buildClaudeEvent("content_block_stop", map[string]interface{}{"index": ctx.ThinkingIndex})...)
ctx.ThinkingBlockStarted = false
}
}
result = append(result, buildClaudeEvent("content_block_delta", map[string]interface{}{
"index": ctx.ContentIndex, "delta": map[string]interface{}{"type": "text_delta", "text": delta.Content},
})...)

consumeThinkTaggedStream(content, ctx, emitTextWithClose, emitThinkingWithClose)
}

// Tool calls
Expand Down Expand Up @@ -667,4 +723,3 @@ func extractToolResultContent(content interface{}) string {
}
return ""
}

68 changes: 56 additions & 12 deletions internal/transformer/convert/claude_openai2.go
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ func OpenAI2RespToClaude(openai2Resp []byte) ([]byte, error) {
case "message":
for _, part := range item.Content {
if part.Type == "output_text" {
content = append(content, map[string]interface{}{"type": "text", "text": part.Text})
content = append(content, splitThinkTaggedText(part.Text)...)
}
}
case "function_call":
Expand Down Expand Up @@ -335,7 +335,7 @@ func ClaudeStreamToOpenAI2(event []byte, ctx *transformer.StreamContext) ([]byte
partial := delta["partial_json"].(string)
ctx.ToolArguments += partial
writeEvent(map[string]interface{}{
"type": "response.function_call_arguments.delta",
"type": "response.function_call_arguments.delta",
"output_index": ctx.ToolIndex, "delta": partial,
})
}
Expand All @@ -347,7 +347,7 @@ func ClaudeStreamToOpenAI2(event []byte, ctx *transformer.StreamContext) ([]byte
if ctx.ToolBlockStarted && blockIdx == ctx.ToolIndex {
// function_call_arguments.done
writeEvent(map[string]interface{}{
"type": "response.function_call_arguments.done",
"type": "response.function_call_arguments.done",
"output_index": blockIdx, "arguments": ctx.ToolArguments,
})
// output_item.done for function_call
Expand Down Expand Up @@ -414,7 +414,23 @@ func OpenAI2StreamToClaude(event []byte, ctx *transformer.StreamContext) ([]byte
_, jsonData := parseSSE(event)
if jsonData == "" || jsonData == "[DONE]" {
if jsonData == "[DONE]" {
return buildClaudeEvent("message_stop", map[string]interface{}{}), nil
var result []byte
emitText, emitThinking := makeThinkEmitters(ctx, &result)
flushThinkTaggedStream(ctx, emitText, emitThinking)
if ctx.ThinkingBlockStarted {
result = append(result, buildClaudeEvent("content_block_stop", map[string]interface{}{"index": ctx.ThinkingIndex})...)
ctx.ThinkingBlockStarted = false
}
if ctx.ContentBlockStarted {
result = append(result, buildClaudeEvent("content_block_stop", map[string]interface{}{"index": ctx.ContentIndex})...)
ctx.ContentBlockStarted = false
}
if ctx.ToolBlockStarted {
result = append(result, buildClaudeEvent("content_block_stop", map[string]interface{}{"index": ctx.ToolIndex})...)
ctx.ToolBlockStarted = false
}
result = append(result, buildClaudeEvent("message_stop", map[string]interface{}{})...)
return result, nil
}
return nil, nil
}
Expand All @@ -440,18 +456,39 @@ func OpenAI2StreamToClaude(event []byte, ctx *transformer.StreamContext) ([]byte
})...)

case "response.output_text.delta":
if !ctx.ContentBlockStarted {
ctx.ContentBlockStarted = true
result = append(result, buildClaudeEvent("content_block_start", map[string]interface{}{
"index": ctx.ContentIndex, "content_block": map[string]interface{}{"type": "text", "text": ""},
})...)
content := ctx.ThinkingBuffer + evt.Delta
ctx.ThinkingBuffer = ""

emitText, emitThinking := makeThinkEmitters(ctx, &result)
emitTextWithClose := func(text string) {
if text == "" {
return
}
if ctx.ThinkingBlockStarted && !ctx.ContentBlockStarted && !ctx.InThinkingTag {
result = append(result, buildClaudeEvent("content_block_stop", map[string]interface{}{"index": ctx.ThinkingIndex})...)
ctx.ThinkingBlockStarted = false
}
emitText(text)
}
result = append(result, buildClaudeEvent("content_block_delta", map[string]interface{}{
"index": ctx.ContentIndex, "delta": map[string]interface{}{"type": "text_delta", "text": evt.Delta},
})...)
emitThinkingWithClose := func(text string) {
if text == "" {
return
}
emitThinking(text)
if ctx.ThinkingBlockStarted {
result = append(result, buildClaudeEvent("content_block_stop", map[string]interface{}{"index": ctx.ThinkingIndex})...)
ctx.ThinkingBlockStarted = false
}
}

consumeThinkTaggedStream(content, ctx, emitTextWithClose, emitThinkingWithClose)

case "response.output_item.added":
if evt.Item != nil && evt.Item.Type == "function_call" {
if ctx.ThinkingBlockStarted {
result = append(result, buildClaudeEvent("content_block_stop", map[string]interface{}{"index": ctx.ThinkingIndex})...)
ctx.ThinkingBlockStarted = false
}
// Close text block if open
if ctx.ContentBlockStarted {
result = append(result, buildClaudeEvent("content_block_stop", map[string]interface{}{"index": ctx.ContentIndex})...)
Expand Down Expand Up @@ -486,8 +523,15 @@ func OpenAI2StreamToClaude(event []byte, ctx *transformer.StreamContext) ([]byte
}

case "response.completed":
emitText, emitThinking := makeThinkEmitters(ctx, &result)
flushThinkTaggedStream(ctx, emitText, emitThinking)
if ctx.ThinkingBlockStarted {
result = append(result, buildClaudeEvent("content_block_stop", map[string]interface{}{"index": ctx.ThinkingIndex})...)
ctx.ThinkingBlockStarted = false
}
if ctx.ContentBlockStarted {
result = append(result, buildClaudeEvent("content_block_stop", map[string]interface{}{"index": ctx.ContentIndex})...)
ctx.ContentBlockStarted = false
}
stopReason := "end_turn"
if ctx.ToolIndex > 0 || ctx.CurrentToolID != "" {
Expand Down
91 changes: 91 additions & 0 deletions internal/transformer/convert/claude_openai2_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
package convert

import (
"encoding/json"
"strings"
"testing"

"github.com/lich0821/ccNexus/internal/transformer"
)

func TestOpenAI2RespToClaudeWithThinking(t *testing.T) {
openai2Resp := `{
"id": "resp_1",
"object": "response",
"status": "completed",
"output": [{
"type": "message",
"role": "assistant",
"content": [{
"type": "output_text",
"text": "<think>Reason</think>Answer"
}]
}],
"usage": {
"input_tokens": 3,
"output_tokens": 5,
"total_tokens": 8
}
}`

claudeRespBytes, err := OpenAI2RespToClaude([]byte(openai2Resp))
if err != nil {
t.Fatalf("OpenAI2RespToClaude failed: %v", err)
}

var claudeResp map[string]interface{}
if err := json.Unmarshal(claudeRespBytes, &claudeResp); err != nil {
t.Fatalf("Failed to unmarshal Claude response: %v", err)
}

content, ok := claudeResp["content"].([]interface{})
if !ok {
t.Fatalf("Expected content to be an array, got %T", claudeResp["content"])
}
if len(content) != 2 {
t.Fatalf("Expected 2 content blocks, got %d", len(content))
}
if content[0].(map[string]interface{})["type"] != "thinking" {
t.Fatalf("Expected first block thinking, got %v", content[0])
}
if content[1].(map[string]interface{})["type"] != "text" {
t.Fatalf("Expected second block text, got %v", content[1])
}
}

func TestOpenAI2StreamToClaudeWithThinking(t *testing.T) {
ctx := transformer.NewStreamContext()
ctx.ModelName = "claude-3-sonnet-20240229"

chunks := []string{
`data: {"type":"response.created","response":{"id":"resp_1","object":"response","status":"in_progress"}}`,
`data: {"type":"response.output_text.delta","delta":"<think>Reason</think>Hello"}`,
`data: {"type":"response.completed","response":{"id":"resp_1","object":"response","status":"completed"}}`,
`data: [DONE]`,
}

var allEvents []string
for _, chunk := range chunks {
events, err := OpenAI2StreamToClaude([]byte(chunk), ctx)
if err != nil {
t.Fatalf("OpenAI2StreamToClaude failed: %v", err)
}
if events != nil {
allEvents = append(allEvents, string(events))
}
}

fullEvents := strings.Join(allEvents, "")
if !strings.Contains(fullEvents, "\"type\":\"thinking\"") {
t.Fatalf("Expected thinking block start, but not found")
}
if !strings.Contains(fullEvents, "\"thinking\":\"Reason\"") {
t.Fatalf("Expected thinking delta 'Reason', but not found")
}
if !strings.Contains(fullEvents, "\"text\":\"Hello\"") {
t.Fatalf("Expected text delta 'Hello', but not found")
}
if strings.Contains(fullEvents, "<think>") || strings.Contains(fullEvents, "</think>") {
t.Fatalf("Unexpected think tags leaked into output")
}
}
Loading
Loading