feat: implement error handling for empty upstream responses in chat streams and update UI to display stream-level errors

This commit is contained in:
CJACK
2026-04-13 03:22:38 +08:00
parent acb110865f
commit d78789a66e
4 changed files with 124 additions and 1 deletions

View File

@@ -98,6 +98,19 @@ func (s *chatStreamRuntime) sendDone() {
}
}
func (s *chatStreamRuntime) sendFailedChunk(status int, message, code string) {
s.sendChunk(map[string]any{
"status_code": status,
"error": map[string]any{
"message": message,
"type": openAIErrorType(status),
"code": code,
"param": nil,
},
})
s.sendDone()
}
func (s *chatStreamRuntime) finalize(finishReason string) {
finalThinking := s.thinking.String()
finalText := cleanVisibleOutput(s.text.String(), s.stripReferenceMarkers)
@@ -168,6 +181,21 @@ func (s *chatStreamRuntime) finalize(finishReason string) {
if len(detected.Calls) > 0 || s.toolCallsEmitted {
finishReason = "tool_calls"
}
if len(detected.Calls) == 0 && !s.toolCallsEmitted && strings.TrimSpace(finalText) == "" {
status := http.StatusTooManyRequests
message := "Upstream model returned empty output."
code := "upstream_empty_output"
if strings.TrimSpace(finalThinking) != "" {
message = "Upstream model returned reasoning without visible output."
}
if finishReason == "content_filter" {
status = http.StatusBadRequest
message = "Upstream content filtered the response and returned no output."
code = "content_filter"
}
s.sendFailedChunk(status, message, code)
return
}
usage := openaifmt.BuildChatUsage(s.finalPrompt, finalThinking, finalText)
s.sendChunk(openaifmt.BuildChatStreamChunk(
s.completionID,
@@ -184,6 +212,9 @@ func (s *chatStreamRuntime) onParsed(parsed sse.LineResult) streamengine.ParsedD
return streamengine.ParsedDecision{}
}
if parsed.ContentFilter {
if strings.TrimSpace(s.text.String()) == "" {
return streamengine.ParsedDecision{Stop: true, StopReason: streamengine.StopReason("content_filter")}
}
return streamengine.ParsedDecision{Stop: true, StopReason: streamengine.StopReasonHandlerRequested}
}
if parsed.ErrorMessage != "" {

View File

@@ -243,6 +243,49 @@ func TestChatCompletionsStreamContentFilterStopsNormallyWithoutLeak(t *testing.T
}
}
func TestChatCompletionsStreamEmitsFailureFrameWhenUpstreamOutputEmpty(t *testing.T) {
statuses := make([]int, 0, 1)
h := &Handler{
Store: mockOpenAIConfig{wideInput: true},
Auth: streamStatusAuthStub{},
DS: streamStatusDSStub{resp: makeOpenAISSEHTTPResponse("data: [DONE]")},
}
r := chi.NewRouter()
r.Use(captureStatusMiddleware(&statuses))
RegisterRoutes(r, h)
reqBody := `{"model":"deepseek-chat","messages":[{"role":"user","content":"hi"}],"stream":true}`
req := httptest.NewRequest(http.MethodPost, "/v1/chat/completions", strings.NewReader(reqBody))
req.Header.Set("Authorization", "Bearer direct-token")
req.Header.Set("Content-Type", "application/json")
rec := httptest.NewRecorder()
r.ServeHTTP(rec, req)
if rec.Code != http.StatusOK {
t.Fatalf("expected 200, got %d body=%s", rec.Code, rec.Body.String())
}
if len(statuses) != 1 || statuses[0] != http.StatusOK {
t.Fatalf("expected captured status 200, got %#v", statuses)
}
frames, done := parseSSEDataFrames(t, rec.Body.String())
if !done {
t.Fatalf("expected [DONE], body=%s", rec.Body.String())
}
if len(frames) != 1 {
t.Fatalf("expected one failure frame, got %#v body=%s", frames, rec.Body.String())
}
last := frames[0]
statusCode, ok := last["status_code"].(float64)
if !ok || int(statusCode) != http.StatusTooManyRequests {
t.Fatalf("expected status_code=429, got %#v body=%s", last["status_code"], rec.Body.String())
}
errObj, _ := last["error"].(map[string]any)
if asString(errObj["code"]) != "upstream_empty_output" {
t.Fatalf("expected code=upstream_empty_output, got %#v", last)
}
}
func TestResponsesStreamUsageIgnoresBatchAccumulatedTokenUsage(t *testing.T) {
statuses := make([]int, 0, 1)
h := &Handler{

View File

@@ -133,7 +133,9 @@ export default function ChatPanel({
)}
<div className="text-sm leading-7 text-foreground whitespace-pre-wrap">
{streamingContent || response?.choices?.[0]?.message?.content || (response?.error && <span className="text-destructive font-medium">{response.error}</span>) || (loading && <span className="text-muted-foreground italic">{t('apiTester.generating')}</span>)}
{response?.success === false
? <span className="text-destructive font-medium">{response.error || t('apiTester.requestFailed')}</span>
: (streamingContent || response?.choices?.[0]?.message?.content || (loading && <span className="text-muted-foreground italic">{t('apiTester.generating')}</span>))}
{isStreaming && <span className="inline-block w-1.5 h-4 bg-primary ml-1 align-middle animate-pulse" />}
</div>
</div>

View File

@@ -63,6 +63,28 @@ export function useChatStreamClient({
}
}, [attachedFiles, t])
const extractStreamError = useCallback((json) => {
const error = json?.error
if (!error || typeof error !== 'object') {
return null
}
const message = typeof error.message === 'string' && error.message.trim()
? error.message.trim()
: t('apiTester.requestFailed')
const rawStatus = Number(json?.status_code ?? error.status_code ?? error.http_status)
const statusCode = Number.isFinite(rawStatus) && rawStatus > 0
? rawStatus
: (error.code === 'content_filter' ? 400 : 429)
return {
message,
statusCode,
code: typeof error.code === 'string' ? error.code : '',
type: typeof error.type === 'string' ? error.type : '',
}
}, [t])
const runTest = useCallback(async () => {
if (!effectiveKey) {
onMessage('error', t('apiTester.missingApiKey'))
@@ -141,7 +163,9 @@ export function useChatStreamClient({
let buffer = ''
let accumulatedThinking = ''
let accumulatedContent = ''
let streamError = null
streamLoop:
while (true) {
const { done, value } = await reader.read()
if (done) break
@@ -159,6 +183,11 @@ export function useChatStreamClient({
try {
const json = JSON.parse(dataStr)
const errorPayload = extractStreamError(json)
if (errorPayload) {
streamError = errorPayload
break streamLoop
}
const choice = json.choices?.[0]
if (choice?.delta) {
const delta = choice.delta
@@ -177,6 +206,23 @@ export function useChatStreamClient({
}
}
if (streamError) {
await reader.cancel().catch(() => {})
setStreamingContent('')
setStreamingThinking('')
setResponse({
success: false,
status_code: streamError.statusCode,
error: streamError.message,
code: streamError.code,
type: streamError.type,
})
onMessage('error', streamError.message)
setLoading(false)
setIsStreaming(false)
return
}
setResponse({
success: true,
status_code: res.status,
@@ -214,6 +260,7 @@ export function useChatStreamClient({
attachedFiles,
effectiveKey,
extractErrorMessage,
extractStreamError,
message,
model,
onMessage,