From 0bdbb3a4ef5ddf0433a2081c5370f234b2a943eb Mon Sep 17 00:00:00 2001 From: "CJACK." Date: Tue, 17 Feb 2026 12:08:45 +0800 Subject: [PATCH] Stream Go proxy responses to Vercel clients --- api/chat-stream.js | 32 +++++++++++++++++++++--- internal/adapter/openai/vercel_stream.go | 8 +++--- 2 files changed, 32 insertions(+), 8 deletions(-) diff --git a/api/chat-stream.js b/api/chat-stream.js index 6281b28..852f58b 100644 --- a/api/chat-stream.js +++ b/api/chat-stream.js @@ -62,7 +62,7 @@ module.exports = async function handler(req, res) { } // Keep all non-stream behavior on Go side to avoid compatibility regressions. - if (!toBool(payload.stream) || (Array.isArray(payload.tools) && payload.tools.length > 0)) { + if (!toBool(payload.stream)) { await proxyToGo(req, res, rawBody); return; } @@ -626,8 +626,34 @@ async function proxyToGo(req, res, rawBody) { } res.setHeader(key, value); }); - const bytes = Buffer.from(await upstream.arrayBuffer()); - res.end(bytes); + + if (!upstream.body || typeof upstream.body.getReader !== 'function') { + const bytes = Buffer.from(await upstream.arrayBuffer()); + res.end(bytes); + return; + } + + const reader = upstream.body.getReader(); + try { + // eslint-disable-next-line no-constant-condition + while (true) { + const { value, done } = await reader.read(); + if (done) { + break; + } + if (value && value.length > 0) { + res.write(Buffer.from(value)); + if (typeof res.flush === 'function') { + res.flush(); + } + } + } + res.end(); + } catch (_err) { + if (!res.writableEnded) { + res.end(); + } + } } function writeOpenAIError(res, status, message) { diff --git a/internal/adapter/openai/vercel_stream.go b/internal/adapter/openai/vercel_stream.go index 7fceb93..653f3cf 100644 --- a/internal/adapter/openai/vercel_stream.go +++ b/internal/adapter/openai/vercel_stream.go @@ -56,11 +56,6 @@ func (h *Handler) handleVercelStreamPrepare(w http.ResponseWriter, r *http.Reque writeOpenAIError(w, http.StatusBadRequest, "stream must be true") return } - if tools, ok := req["tools"].([]any); ok && len(tools) > 0 { - writeOpenAIError(w, http.StatusBadRequest, "tools are not supported by vercel stream prepare") - return - } - model, _ := req["model"].(string) messagesRaw, _ := req["messages"].([]any) if model == "" || len(messagesRaw) == 0 { @@ -74,6 +69,9 @@ func (h *Handler) handleVercelStreamPrepare(w http.ResponseWriter, r *http.Reque } messages := normalizeMessages(messagesRaw) + if tools, ok := req["tools"].([]any); ok && len(tools) > 0 { + messages, _ = injectToolPrompt(messages, tools) + } finalPrompt := util.MessagesPrepare(messages) sessionID, err := h.DS.CreateSession(r.Context(), a, 3)