package sse import ( "net/http" "strings" dsprotocol "ds2api/internal/deepseek/protocol" ) // CollectResult holds the aggregated text and thinking content from a // DeepSeek SSE stream, consumed to completion (non-streaming use case). type CollectResult struct { Text string Thinking string ContentFilter bool CitationLinks map[int]string } // CollectStream fully consumes a DeepSeek SSE response and separates // thinking content from text content. This replaces the duplicated // stream-collection logic in openai.handleNonStream, claude.collectDeepSeek, // and admin.testAccount. // // The caller is responsible for closing resp.Body unless closeBody is true. func CollectStream(resp *http.Response, thinkingEnabled bool, closeBody bool) CollectResult { if closeBody { defer func() { _ = resp.Body.Close() }() } text := strings.Builder{} thinking := strings.Builder{} contentFilter := false stopped := false collector := newCitationLinkCollector() currentType := "text" if thinkingEnabled { currentType = "thinking" } _ = dsprotocol.ScanSSELines(resp, func(line []byte) bool { chunk, done, parsed := ParseDeepSeekSSELine(line) if parsed && !done { collector.ingestChunk(chunk) } if done { return false } if stopped { return true } result := ParseDeepSeekContentLine(line, thinkingEnabled, currentType) currentType = result.NextType if !result.Parsed { return true } if result.Stop { if result.ContentFilter { contentFilter = true } // Keep scanning to collect late-arriving citation metadata lines // that can appear after response/status=FINISHED, but stop as soon // as [DONE] arrives. stopped = true return true } for _, p := range result.Parts { if p.Type == "thinking" { trimmed := TrimContinuationOverlap(thinking.String(), p.Text) thinking.WriteString(trimmed) } else { trimmed := TrimContinuationOverlap(text.String(), p.Text) text.WriteString(trimmed) } } return true }) return CollectResult{ Text: text.String(), Thinking: thinking.String(), ContentFilter: contentFilter, CitationLinks: collector.build(), } }