feat(openai): add root route aliases

This commit is contained in:
NgoQuocViet2001
2026-04-30 01:24:53 +07:00
parent 85e256ad4d
commit 7dc3af40b2
8 changed files with 26 additions and 3 deletions

View File

@@ -165,6 +165,8 @@ Gemini-compatible clients can also send `x-goog-api-key`, `?key=`, or `?api_key=
| PUT | `/admin/chat-history/settings` | Admin | Update conversation history retention limit |
| GET | `/admin/version` | Admin | Check current version and latest Release |
OpenAI `/v1/*` paths are canonical. For clients configured with the bare DS2API service URL, the same OpenAI handlers are also exposed through root shortcuts: `/models`, `/models/{id}`, `/chat/completions`, `/responses`, `/responses/{response_id}`, `/embeddings`, and `/files`.
---
## Health Endpoints

2
API.md
View File

@@ -165,6 +165,8 @@ Gemini 兼容客户端还可以使用 `x-goog-api-key`、`?key=` 或 `?api_key=`
| PUT | `/admin/chat-history/settings` | Admin | 更新对话记录保留条数 |
| GET | `/admin/version` | Admin | 查询当前版本与最新 Release |
OpenAI `/v1/*` 仍是规范路径。对于只配置 DS2API 根地址的客户端,同一套 OpenAI handler 也通过根路径快捷路由暴露:`/models``/models/{id}``/chat/completions``/responses``/responses/{response_id}``/embeddings``/files`
---
## 健康检查

View File

@@ -131,6 +131,8 @@ flowchart LR
| WebUI 管理台 | `/admin` 单页应用(中英文双语、深色模式,支持查看服务器端对话记录) |
| 运维探针 | `GET /healthz`(存活)、`GET /readyz`(就绪) |
OpenAI `/v1/*` 仍是推荐的规范路径;同时支持 `/models`、`/chat/completions`、`/responses`、`/embeddings`、`/files` 等根路径快捷路由,方便只配置 DS2API 根地址的第三方客户端。
## 平台兼容矩阵
| 级别 | 平台 | 当前状态 |

View File

@@ -128,6 +128,8 @@ For the full module-by-module architecture and directory responsibilities, see [
| WebUI Admin Panel | SPA at `/admin` (bilingual Chinese/English, dark mode, with server-side conversation history) |
| Health Probes | `GET /healthz` (liveness), `GET /readyz` (readiness) |
OpenAI `/v1/*` routes remain canonical, and DS2API also accepts root shortcuts such as `/models`, `/chat/completions`, `/responses`, `/embeddings`, and `/files` for clients configured with the bare service URL.
## Platform Compatibility Matrix
| Tier | Platform | Status |

View File

@@ -62,7 +62,7 @@ func (s Service) ApplyCurrentInputFile(ctx context.Context, a *auth.RequestAuth,
stdReq.RefFileIDs = prependUniqueRefFileID(stdReq.RefFileIDs, fileID)
stdReq.FinalPrompt, stdReq.ToolNames = promptcompat.BuildOpenAIPrompt(messages, stdReq.ToolsRaw, "", stdReq.ToolChoice, stdReq.Thinking)
// Token accounting must reflect the actual downstream context:
// the uploaded IGNORE.txt file content + the neutral live prompt.
// the uploaded history.txt file content + the neutral live prompt.
stdReq.PromptTokenText = fileText + "\n" + stdReq.FinalPrompt
return stdReq, nil
}

View File

@@ -336,8 +336,8 @@ func TestApplyCurrentInputFilePreservesFullContextPromptForTokenCounting(t *test
if !strings.Contains(out.PromptTokenText, "first user turn") || !strings.Contains(out.PromptTokenText, "latest user turn") {
t.Fatalf("expected prompt token text to contain file context with full conversation, got %q", out.PromptTokenText)
}
if !strings.Contains(out.PromptTokenText, "[file content end]") || !strings.Contains(out.PromptTokenText, "[file name]: IGNORE") {
t.Fatalf("expected prompt token text to include IGNORE.txt file wrapper, got %q", out.PromptTokenText)
if strings.Contains(out.PromptTokenText, "[file content end]") || strings.Contains(out.PromptTokenText, "[file content begin]") || strings.Contains(out.PromptTokenText, "[file name]:") {
t.Fatalf("expected prompt token text to use the raw history.txt transcript without wrapper tags, got %q", out.PromptTokenText)
}
if !strings.Contains(out.PromptTokenText, "Answer the latest user request directly.") {
t.Fatalf("expected prompt token text to also include neutral live prompt, got %q", out.PromptTokenText)

View File

@@ -98,6 +98,14 @@ func NewApp() (*App, error) {
r.Get("/v1/responses/{response_id}", responsesHandler.GetResponseByID)
r.Post("/v1/files", filesHandler.UploadFile)
r.Post("/v1/embeddings", embeddingsHandler.Embeddings)
// Root OpenAI aliases support clients configured with the bare DS2API service URL.
r.Get("/models", modelsHandler.ListModels)
r.Get("/models/{model_id}", modelsHandler.GetModel)
r.Post("/chat/completions", chatHandler.ChatCompletions)
r.Post("/responses", responsesHandler.Responses)
r.Get("/responses/{response_id}", responsesHandler.GetResponseByID)
r.Post("/files", filesHandler.UploadFile)
r.Post("/embeddings", embeddingsHandler.Embeddings)
claude.RegisterRoutes(r, claudeHandler)
gemini.RegisterRoutes(r, geminiHandler)
r.Route("/admin", func(ar chi.Router) {

View File

@@ -37,6 +37,13 @@ func TestAPIRoutesRemainRegistered(t *testing.T) {
"GET /v1/responses/{response_id}",
"POST /v1/files",
"POST /v1/embeddings",
"GET /models",
"GET /models/{model_id}",
"POST /chat/completions",
"POST /responses",
"GET /responses/{response_id}",
"POST /files",
"POST /embeddings",
"GET /anthropic/v1/models",
"POST /anthropic/v1/messages",
"POST /anthropic/v1/messages/count_tokens",