From 8ab028c52ac7162c052bd141b2acd64e9660fe6f Mon Sep 17 00:00:00 2001 From: shern-point Date: Thu, 30 Apr 2026 00:44:59 +0800 Subject: [PATCH] feat: seed PromptTokenText during request normalization Capture the fully built prompt at normalization time for OpenAI and Gemini-compatible requests so usage paths can reuse the original context text. --- internal/promptcompat/request_normalize.go | 58 +++++++++++----------- 1 file changed, 30 insertions(+), 28 deletions(-) diff --git a/internal/promptcompat/request_normalize.go b/internal/promptcompat/request_normalize.go index 8efa772..fbb9d4c 100644 --- a/internal/promptcompat/request_normalize.go +++ b/internal/promptcompat/request_normalize.go @@ -39,20 +39,21 @@ func NormalizeOpenAIChatRequest(store ConfigReader, req map[string]any, traceID refFileIDs := CollectOpenAIRefFileIDs(req) return StandardRequest{ - Surface: "openai_chat", - RequestedModel: strings.TrimSpace(model), - ResolvedModel: resolvedModel, - ResponseModel: responseModel, - Messages: messagesRaw, - ToolsRaw: req["tools"], - FinalPrompt: finalPrompt, - ToolNames: toolNames, - ToolChoice: toolPolicy, - Stream: util.ToBool(req["stream"]), - Thinking: thinkingEnabled, - Search: searchEnabled, - RefFileIDs: refFileIDs, - PassThrough: passThrough, + Surface: "openai_chat", + RequestedModel: strings.TrimSpace(model), + ResolvedModel: resolvedModel, + ResponseModel: responseModel, + Messages: messagesRaw, + PromptTokenText: finalPrompt, + ToolsRaw: req["tools"], + FinalPrompt: finalPrompt, + ToolNames: toolNames, + ToolChoice: toolPolicy, + Stream: util.ToBool(req["stream"]), + Thinking: thinkingEnabled, + Search: searchEnabled, + RefFileIDs: refFileIDs, + PassThrough: passThrough, }, nil } @@ -99,20 +100,21 @@ func NormalizeOpenAIResponsesRequest(store ConfigReader, req map[string]any, tra refFileIDs := CollectOpenAIRefFileIDs(req) return StandardRequest{ - Surface: "openai_responses", - RequestedModel: model, - ResolvedModel: resolvedModel, - ResponseModel: model, - Messages: messagesRaw, - ToolsRaw: req["tools"], - FinalPrompt: finalPrompt, - ToolNames: toolNames, - ToolChoice: toolPolicy, - Stream: util.ToBool(req["stream"]), - Thinking: thinkingEnabled, - Search: searchEnabled, - RefFileIDs: refFileIDs, - PassThrough: passThrough, + Surface: "openai_responses", + RequestedModel: model, + ResolvedModel: resolvedModel, + ResponseModel: model, + Messages: messagesRaw, + PromptTokenText: finalPrompt, + ToolsRaw: req["tools"], + FinalPrompt: finalPrompt, + ToolNames: toolNames, + ToolChoice: toolPolicy, + Stream: util.ToBool(req["stream"]), + Thinking: thinkingEnabled, + Search: searchEnabled, + RefFileIDs: refFileIDs, + PassThrough: passThrough, }, nil }