feat: implement support for thinking blocks in Gemini API and enable thinking by default for supported models

This commit is contained in:
CJACK
2026-05-03 01:00:06 +08:00
parent a901250de7
commit 7c0bc9ec0f
10 changed files with 177 additions and 33 deletions

View File

@@ -27,11 +27,32 @@ func TestNormalizeClaudeRequestUsesGlobalAliasMapping(t *testing.T) {
if out.Standard.ResolvedModel != "deepseek-v4-pro-search" {
t.Fatalf("resolved model mismatch: got=%q", out.Standard.ResolvedModel)
}
if out.Standard.Thinking || !out.Standard.Search {
if !out.Standard.Thinking || !out.Standard.Search {
t.Fatalf("unexpected flags: thinking=%v search=%v", out.Standard.Thinking, out.Standard.Search)
}
}
func TestNormalizeClaudeRequestDisablesThinkingWhenRequested(t *testing.T) {
req := map[string]any{
"model": "claude-opus-4-6",
"messages": []any{
map[string]any{"role": "user", "content": "hello"},
},
"thinking": map[string]any{"type": "disabled"},
}
out, err := normalizeClaudeRequest(mockClaudeConfig{
aliases: map[string]string{
"claude-opus-4-6": "deepseek-v4-pro",
},
}, req)
if err != nil {
t.Fatalf("normalizeClaudeRequest error: %v", err)
}
if out.Standard.Thinking {
t.Fatalf("expected explicit Claude thinking disable to win")
}
}
func TestNormalizeClaudeRequestEnablesThinkingWhenRequested(t *testing.T) {
req := map[string]any{
"model": "claude-opus-4-6",

View File

@@ -67,17 +67,12 @@ func (h *Handler) handleClaudeDirect(w http.ResponseWriter, r *http.Request) boo
writeClaudeError(w, http.StatusBadRequest, "invalid json")
return true
}
exposeThinking := false
if enabled, ok := util.ResolveThinkingOverride(req); ok && enabled {
exposeThinking = true
} else if _, ok := util.ResolveThinkingOverride(req); !ok && !util.ToBool(req["stream"]) {
req["thinking"] = map[string]any{"type": "enabled"}
}
norm, err := normalizeClaudeRequest(h.Store, req)
if err != nil {
writeClaudeError(w, http.StatusBadRequest, err.Error())
return true
}
exposeThinking := norm.Standard.Thinking
a, err := h.Auth.Determine(r)
if err != nil {
writeClaudeError(w, http.StatusUnauthorized, err.Error())
@@ -140,7 +135,7 @@ func (h *Handler) proxyViaOpenAI(w http.ResponseWriter, r *http.Request, store C
}
}
translatedReq := translatorcliproxy.ToOpenAI(sdktranslator.FormatClaude, translateModel, raw, stream)
translatedReq, exposeThinking := applyClaudeThinkingPolicyToOpenAIRequest(translatedReq, req, stream)
translatedReq, exposeThinking := applyClaudeThinkingPolicyToOpenAIRequest(translatedReq, req)
isVercelPrepare := strings.TrimSpace(r.URL.Query().Get("__stream_prepare")) == "1"
isVercelRelease := strings.TrimSpace(r.URL.Query().Get("__stream_release")) == "1"
@@ -215,7 +210,7 @@ func (h *Handler) proxyViaOpenAI(w http.ResponseWriter, r *http.Request, store C
return true
}
func applyClaudeThinkingPolicyToOpenAIRequest(translated []byte, original map[string]any, stream bool) ([]byte, bool) {
func applyClaudeThinkingPolicyToOpenAIRequest(translated []byte, original map[string]any) ([]byte, bool) {
req := map[string]any{}
if err := json.Unmarshal(translated, &req); err != nil {
return translated, false
@@ -225,7 +220,7 @@ func applyClaudeThinkingPolicyToOpenAIRequest(translated []byte, original map[st
if _, translatedHasOverride := util.ResolveThinkingOverride(req); translatedHasOverride {
return translated, false
}
enabled = !stream
enabled = true
}
typ := "disabled"
if enabled {
@@ -234,9 +229,9 @@ func applyClaudeThinkingPolicyToOpenAIRequest(translated []byte, original map[st
req["thinking"] = map[string]any{"type": typ}
out, err := json.Marshal(req)
if err != nil {
return translated, ok && enabled
return translated, enabled
}
return out, ok && enabled
return out, enabled
}
func stripClaudeThinkingBlocks(raw []byte) []byte {

View File

@@ -166,7 +166,7 @@ func TestClaudeProxyViaOpenAIEnablesThinkingWhenRequested(t *testing.T) {
}
}
func TestClaudeProxyViaOpenAIKeepsStreamDefaultThinkingDisabled(t *testing.T) {
func TestClaudeProxyViaOpenAIEnablesStreamThinkingByDefault(t *testing.T) {
openAI := &openAIProxyCaptureStub{}
h := &Handler{
Store: claudeProxyStoreStub{aliases: map[string]string{"claude-sonnet-4-6": "deepseek-v4-flash"}},
@@ -178,12 +178,12 @@ func TestClaudeProxyViaOpenAIKeepsStreamDefaultThinkingDisabled(t *testing.T) {
h.Messages(rec, req)
thinking, _ := openAI.seenReq["thinking"].(map[string]any)
if thinking["type"] != "disabled" {
t.Fatalf("expected Claude stream default to keep downstream thinking disabled, got %#v", openAI.seenReq)
if thinking["type"] != "enabled" {
t.Fatalf("expected Claude stream default to enable downstream thinking, got %#v", openAI.seenReq)
}
}
func TestClaudeProxyViaOpenAIStripsThinkingBlocksFromNonStreamResponse(t *testing.T) {
func TestClaudeProxyViaOpenAIExposesThinkingBlocksByDefault(t *testing.T) {
body := `{"id":"chatcmpl_1","object":"chat.completion","created":1,"model":"claude-sonnet-4-5","choices":[{"index":0,"message":{"role":"assistant","content":null,"reasoning_content":"internal reasoning","tool_calls":[{"id":"call_1","type":"function","function":{"name":"search","arguments":"{\"q\":\"x\"}"}}]},"finish_reason":"tool_calls"}],"usage":{"prompt_tokens":1,"completion_tokens":1,"total_tokens":2}}`
h := &Handler{OpenAI: openAIProxyStub{status: 200, body: body}}
req := httptest.NewRequest(http.MethodPost, "/anthropic/v1/messages", strings.NewReader(`{"model":"claude-sonnet-4-5","messages":[{"role":"user","content":"hi"}],"stream":false}`))
@@ -195,14 +195,31 @@ func TestClaudeProxyViaOpenAIStripsThinkingBlocksFromNonStreamResponse(t *testin
t.Fatalf("unexpected status: %d body=%s", rec.Code, rec.Body.String())
}
got := rec.Body.String()
if strings.Contains(got, `"type":"thinking"`) {
t.Fatalf("expected converted Claude response to strip thinking block, got %s", got)
if !strings.Contains(got, `"type":"thinking"`) {
t.Fatalf("expected converted Claude response to expose thinking block, got %s", got)
}
if !strings.Contains(got, `"tool_use"`) {
t.Fatalf("expected converted Claude response to preserve tool_use, got %s", got)
}
}
func TestClaudeProxyViaOpenAIStripsThinkingBlocksWhenDisabled(t *testing.T) {
body := `{"id":"chatcmpl_1","object":"chat.completion","created":1,"model":"claude-sonnet-4-5","choices":[{"index":0,"message":{"role":"assistant","content":"ok","reasoning_content":"internal reasoning"},"finish_reason":"stop"}],"usage":{"prompt_tokens":1,"completion_tokens":1,"total_tokens":2}}`
h := &Handler{OpenAI: openAIProxyStub{status: 200, body: body}}
req := httptest.NewRequest(http.MethodPost, "/anthropic/v1/messages", strings.NewReader(`{"model":"claude-sonnet-4-5","messages":[{"role":"user","content":"hi"}],"thinking":{"type":"disabled"},"stream":false}`))
rec := httptest.NewRecorder()
h.Messages(rec, req)
if rec.Code != http.StatusOK {
t.Fatalf("unexpected status: %d body=%s", rec.Code, rec.Body.String())
}
got := rec.Body.String()
if strings.Contains(got, `"type":"thinking"`) {
t.Fatalf("expected disabled thinking to strip thinking block, got %s", got)
}
}
func TestClaudeProxyTranslatesInlineImageToOpenAIDataURL(t *testing.T) {
openAI := &openAIProxyCaptureStub{}
h := &Handler{OpenAI: openAI}

View File

@@ -32,11 +32,11 @@ func normalizeClaudeRequest(store ConfigReader, req map[string]any) (claudeNorma
dsPayload := convertClaudeToDeepSeek(payload, store)
dsModel, _ := dsPayload["model"].(string)
_, searchEnabled, ok := config.GetModelConfig(dsModel)
defaultThinkingEnabled, searchEnabled, ok := config.GetModelConfig(dsModel)
if !ok {
searchEnabled = false
}
thinkingEnabled := util.ResolveThinkingEnabled(req, false)
thinkingEnabled := util.ResolveThinkingEnabled(req, defaultThinkingEnabled)
if config.IsNoThinkingModel(dsModel) {
thinkingEnabled = false
}

View File

@@ -343,8 +343,17 @@ func buildGeminiGenerateContentResponseFromTurn(turn assistantturn.Turn) map[str
}
func buildGeminiPartsFromTurn(turn assistantturn.Turn) []map[string]any {
thinkingPart := func() []map[string]any {
if turn.Thinking == "" {
return nil
}
return []map[string]any{{"text": turn.Thinking, "thought": true}}
}
if len(turn.ToolCalls) > 0 {
parts := make([]map[string]any, 0, len(turn.ToolCalls))
parts := thinkingPart()
if parts == nil {
parts = make([]map[string]any, 0, len(turn.ToolCalls))
}
for _, tc := range turn.ToolCalls {
parts = append(parts, map[string]any{
"functionCall": map[string]any{
@@ -355,11 +364,14 @@ func buildGeminiPartsFromTurn(turn assistantturn.Turn) []map[string]any {
}
return parts
}
text := turn.Text
if text == "" {
text = turn.Thinking
parts := thinkingPart()
if turn.Text != "" {
parts = append(parts, map[string]any{"text": turn.Text})
}
return []map[string]any{{"text": text}}
if len(parts) == 0 {
parts = append(parts, map[string]any{"text": ""})
}
return parts
}
//nolint:unused // retained for native Gemini non-stream handling path.
@@ -380,8 +392,17 @@ func buildGeminiPartsFromFinal(finalText, finalThinking string, toolNames []stri
if len(detected) == 0 && finalThinking != "" {
detected = toolcall.ParseToolCalls(finalThinking, toolNames)
}
thinkingPart := func() []map[string]any {
if finalThinking == "" {
return nil
}
return []map[string]any{{"text": finalThinking, "thought": true}}
}
if len(detected) > 0 {
parts := make([]map[string]any, 0, len(detected))
parts := thinkingPart()
if parts == nil {
parts = make([]map[string]any, 0, len(detected))
}
for _, tc := range detected {
parts = append(parts, map[string]any{
"functionCall": map[string]any{
@@ -393,9 +414,12 @@ func buildGeminiPartsFromFinal(finalText, finalThinking string, toolNames []stri
return parts
}
text := finalText
if text == "" {
text = finalThinking
parts := thinkingPart()
if finalText != "" {
parts = append(parts, map[string]any{"text": finalText})
}
return []map[string]any{{"text": text}}
if len(parts) == 0 {
parts = append(parts, map[string]any{"text": ""})
}
return parts
}

View File

@@ -134,6 +134,21 @@ func (s *geminiStreamRuntime) onParsed(parsed sse.LineResult) streamengine.Parse
accumulated := s.accumulator.Apply(parsed)
for _, p := range accumulated.Parts {
if p.Type == "thinking" {
if p.VisibleText == "" || s.bufferContent {
continue
}
s.sendChunk(map[string]any{
"candidates": []map[string]any{
{
"index": 0,
"content": map[string]any{
"role": "model",
"parts": []map[string]any{{"text": p.VisibleText, "thought": true}},
},
},
},
"modelVersion": s.model,
})
continue
}
if p.RawText == "" || p.CitationOnly || p.VisibleText == "" {

View File

@@ -257,6 +257,56 @@ func TestStreamGenerateContentEmitsSSE(t *testing.T) {
}
}
func TestNativeStreamGenerateContentEmitsThoughtParts(t *testing.T) {
h := &Handler{}
resp := makeGeminiUpstreamResponse(
`data: {"p":"response/thinking_content","v":"think"}`,
`data: {"p":"response/content","v":"answer"}`,
`data: [DONE]`,
)
rec := httptest.NewRecorder()
req := httptest.NewRequest(http.MethodPost, "/v1beta/models/gemini-2.5-pro:streamGenerateContent", nil)
h.handleStreamGenerateContent(rec, req, resp, "gemini-2.5-pro", "prompt", true, false, nil, nil)
frames := extractGeminiSSEFrames(t, rec.Body.String())
if len(frames) < 2 {
t.Fatalf("expected thought and text stream frames, body=%s", rec.Body.String())
}
var gotThought, gotText string
for _, frame := range frames {
for _, part := range geminiPartsFromFrame(frame) {
if part["thought"] == true {
gotThought += asString(part["text"])
} else {
gotText += asString(part["text"])
}
}
}
if gotThought != "think" {
t.Fatalf("expected thought part, got %q body=%s", gotThought, rec.Body.String())
}
if !strings.Contains(gotText, "answer") {
t.Fatalf("expected text part answer, got %q body=%s", gotText, rec.Body.String())
}
}
func TestBuildGeminiPartsFromFinalIncludesThoughtPart(t *testing.T) {
parts := buildGeminiPartsFromFinal("answer", "think", nil)
if len(parts) != 2 {
t.Fatalf("expected thought + answer parts, got %#v", parts)
}
if parts[0]["thought"] != true || parts[0]["text"] != "think" {
t.Fatalf("expected first part to be thought, got %#v", parts[0])
}
if _, ok := parts[1]["thought"]; ok {
t.Fatalf("expected second part to be visible text, got %#v", parts[1])
}
if parts[1]["text"] != "answer" {
t.Fatalf("expected answer text, got %#v", parts[1])
}
}
func TestGeminiProxyTranslatesInlineImageToOpenAIDataURL(t *testing.T) {
openAI := &geminiOpenAISuccessStub{}
h := &Handler{Store: testGeminiConfig{}, OpenAI: openAI}
@@ -396,3 +446,21 @@ func extractGeminiSSEFrames(t *testing.T, body string) []map[string]any {
}
return out
}
func geminiPartsFromFrame(frame map[string]any) []map[string]any {
candidates, _ := frame["candidates"].([]any)
if len(candidates) == 0 {
return nil
}
c0, _ := candidates[0].(map[string]any)
content, _ := c0["content"].(map[string]any)
rawParts, _ := content["parts"].([]any)
parts := make([]map[string]any, 0, len(rawParts))
for _, raw := range rawParts {
part, _ := raw.(map[string]any)
if part != nil {
parts = append(parts, part)
}
}
return parts
}