mirror of
https://github.com/CJackHwang/ds2api.git
synced 2026-05-04 08:25:26 +08:00
Preserve Gemini prompt token text during normalization and remove the hardcoded DeepSeek model from native Gemini usage helpers.
200 lines
5.3 KiB
Go
200 lines
5.3 KiB
Go
package gemini
|
|
|
|
import (
|
|
"encoding/json"
|
|
"io"
|
|
"net/http"
|
|
"strings"
|
|
"time"
|
|
|
|
dsprotocol "ds2api/internal/deepseek/protocol"
|
|
"ds2api/internal/sse"
|
|
streamengine "ds2api/internal/stream"
|
|
)
|
|
|
|
//nolint:unused // retained for native Gemini stream handling path.
|
|
func (h *Handler) handleStreamGenerateContent(w http.ResponseWriter, r *http.Request, resp *http.Response, model, finalPrompt string, thinkingEnabled, searchEnabled bool, toolNames []string) {
|
|
defer func() { _ = resp.Body.Close() }()
|
|
if resp.StatusCode != http.StatusOK {
|
|
body, _ := io.ReadAll(resp.Body)
|
|
writeGeminiError(w, resp.StatusCode, strings.TrimSpace(string(body)))
|
|
return
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "text/event-stream")
|
|
w.Header().Set("Cache-Control", "no-cache, no-transform")
|
|
w.Header().Set("Connection", "keep-alive")
|
|
w.Header().Set("X-Accel-Buffering", "no")
|
|
|
|
rc := http.NewResponseController(w)
|
|
_, canFlush := w.(http.Flusher)
|
|
runtime := newGeminiStreamRuntime(w, rc, canFlush, model, finalPrompt, thinkingEnabled, searchEnabled, h.compatStripReferenceMarkers(), toolNames)
|
|
|
|
initialType := "text"
|
|
if thinkingEnabled {
|
|
initialType = "thinking"
|
|
}
|
|
streamengine.ConsumeSSE(streamengine.ConsumeConfig{
|
|
Context: r.Context(),
|
|
Body: resp.Body,
|
|
ThinkingEnabled: thinkingEnabled,
|
|
InitialType: initialType,
|
|
KeepAliveInterval: time.Duration(dsprotocol.KeepAliveTimeout) * time.Second,
|
|
IdleTimeout: time.Duration(dsprotocol.StreamIdleTimeout) * time.Second,
|
|
MaxKeepAliveNoInput: dsprotocol.MaxKeepaliveCount,
|
|
}, streamengine.ConsumeHooks{
|
|
OnParsed: runtime.onParsed,
|
|
OnFinalize: func(_ streamengine.StopReason, _ error) {
|
|
runtime.finalize()
|
|
},
|
|
})
|
|
}
|
|
|
|
//nolint:unused // retained for native Gemini stream handling path.
|
|
type geminiStreamRuntime struct {
|
|
w http.ResponseWriter
|
|
rc *http.ResponseController
|
|
canFlush bool
|
|
|
|
model string
|
|
finalPrompt string
|
|
|
|
thinkingEnabled bool
|
|
searchEnabled bool
|
|
bufferContent bool
|
|
stripReferenceMarkers bool
|
|
toolNames []string
|
|
|
|
thinking strings.Builder
|
|
text strings.Builder
|
|
}
|
|
|
|
//nolint:unused // retained for native Gemini stream handling path.
|
|
func newGeminiStreamRuntime(
|
|
w http.ResponseWriter,
|
|
rc *http.ResponseController,
|
|
canFlush bool,
|
|
model string,
|
|
finalPrompt string,
|
|
thinkingEnabled bool,
|
|
searchEnabled bool,
|
|
stripReferenceMarkers bool,
|
|
toolNames []string,
|
|
) *geminiStreamRuntime {
|
|
return &geminiStreamRuntime{
|
|
w: w,
|
|
rc: rc,
|
|
canFlush: canFlush,
|
|
model: model,
|
|
finalPrompt: finalPrompt,
|
|
thinkingEnabled: thinkingEnabled,
|
|
searchEnabled: searchEnabled,
|
|
bufferContent: len(toolNames) > 0,
|
|
stripReferenceMarkers: stripReferenceMarkers,
|
|
toolNames: toolNames,
|
|
}
|
|
}
|
|
|
|
//nolint:unused // retained for native Gemini stream handling path.
|
|
func (s *geminiStreamRuntime) sendChunk(payload map[string]any) {
|
|
b, _ := json.Marshal(payload)
|
|
_, _ = s.w.Write([]byte("data: "))
|
|
_, _ = s.w.Write(b)
|
|
_, _ = s.w.Write([]byte("\n\n"))
|
|
if s.canFlush {
|
|
_ = s.rc.Flush()
|
|
}
|
|
}
|
|
|
|
//nolint:unused // retained for native Gemini stream handling path.
|
|
func (s *geminiStreamRuntime) onParsed(parsed sse.LineResult) streamengine.ParsedDecision {
|
|
if !parsed.Parsed {
|
|
return streamengine.ParsedDecision{}
|
|
}
|
|
if parsed.ContentFilter || parsed.ErrorMessage != "" || parsed.Stop {
|
|
return streamengine.ParsedDecision{Stop: true}
|
|
}
|
|
|
|
contentSeen := false
|
|
for _, p := range parsed.Parts {
|
|
cleanedText := cleanVisibleOutput(p.Text, s.stripReferenceMarkers)
|
|
if cleanedText == "" {
|
|
continue
|
|
}
|
|
if p.Type != "thinking" && s.searchEnabled && sse.IsCitation(cleanedText) {
|
|
continue
|
|
}
|
|
contentSeen = true
|
|
if p.Type == "thinking" {
|
|
if s.thinkingEnabled {
|
|
trimmed := sse.TrimContinuationOverlap(s.thinking.String(), cleanedText)
|
|
if trimmed == "" {
|
|
continue
|
|
}
|
|
s.thinking.WriteString(trimmed)
|
|
}
|
|
continue
|
|
}
|
|
trimmed := sse.TrimContinuationOverlap(s.text.String(), cleanedText)
|
|
if trimmed == "" {
|
|
continue
|
|
}
|
|
s.text.WriteString(trimmed)
|
|
if s.bufferContent {
|
|
continue
|
|
}
|
|
s.sendChunk(map[string]any{
|
|
"candidates": []map[string]any{
|
|
{
|
|
"index": 0,
|
|
"content": map[string]any{
|
|
"role": "model",
|
|
"parts": []map[string]any{{"text": trimmed}},
|
|
},
|
|
},
|
|
},
|
|
"modelVersion": s.model,
|
|
})
|
|
}
|
|
return streamengine.ParsedDecision{ContentSeen: contentSeen}
|
|
}
|
|
|
|
//nolint:unused // retained for native Gemini stream handling path.
|
|
func (s *geminiStreamRuntime) finalize() {
|
|
finalThinking := s.thinking.String()
|
|
finalText := cleanVisibleOutput(s.text.String(), s.stripReferenceMarkers)
|
|
|
|
if s.bufferContent {
|
|
parts := buildGeminiPartsFromFinal(finalText, finalThinking, s.toolNames)
|
|
s.sendChunk(map[string]any{
|
|
"candidates": []map[string]any{
|
|
{
|
|
"index": 0,
|
|
"content": map[string]any{
|
|
"role": "model",
|
|
"parts": parts,
|
|
},
|
|
},
|
|
},
|
|
"modelVersion": s.model,
|
|
})
|
|
}
|
|
|
|
s.sendChunk(map[string]any{
|
|
"candidates": []map[string]any{
|
|
{
|
|
"index": 0,
|
|
"content": map[string]any{
|
|
"role": "model",
|
|
"parts": []map[string]any{
|
|
{"text": ""},
|
|
},
|
|
},
|
|
"finishReason": "STOP",
|
|
},
|
|
},
|
|
"modelVersion": s.model,
|
|
"usageMetadata": buildGeminiUsage(s.model, s.finalPrompt, finalThinking, finalText),
|
|
})
|
|
}
|