feat: enforce request body size limits and restrict inline file count to prevent resource exhaustion

This commit is contained in:
CJACK
2026-04-13 03:55:14 +08:00
parent 2aee80d0d3
commit 25234af301
6 changed files with 36 additions and 0 deletions

View File

@@ -26,8 +26,13 @@ func (h *Handler) Embeddings(w http.ResponseWriter, r *http.Request) {
}
defer h.Auth.Release(a)
r.Body = http.MaxBytesReader(w, r.Body, openAIGeneralMaxSize)
var req map[string]any
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
if strings.Contains(strings.ToLower(err.Error()), "too large") {
writeOpenAIError(w, http.StatusRequestEntityTooLarge, "request body too large")
return
}
writeOpenAIError(w, http.StatusBadRequest, "invalid json")
return
}

View File

@@ -15,6 +15,8 @@ import (
"ds2api/internal/deepseek"
)
const maxInlineFilesPerRequest = 50
type inlineFileUploadError struct {
status int
message string
@@ -39,6 +41,7 @@ type inlineUploadState struct {
handler *Handler
auth *auth.RequestAuth
uploadedByID map[string]string
uploadCount int
}
type inlineDecodedFile struct {
@@ -129,10 +132,14 @@ func (s *inlineUploadState) tryUploadBlock(block map[string]any) (map[string]any
if !ok {
return nil, false, nil
}
if s.uploadCount >= maxInlineFilesPerRequest {
return nil, true, fmt.Errorf("exceeded maximum of %d inline files per request", maxInlineFilesPerRequest)
}
fileID, err := s.uploadInlineFile(decoded)
if err != nil {
return nil, true, &inlineFileUploadError{status: http.StatusInternalServerError, message: "Failed to upload inline file.", err: err}
}
s.uploadCount++
replacement := map[string]any{
"type": decoded.ReplacementType,
"file_id": fileID,

View File

@@ -5,6 +5,7 @@ import (
"encoding/json"
"io"
"net/http"
"strings"
"time"
"ds2api/internal/auth"
@@ -43,8 +44,13 @@ func (h *Handler) ChatCompletions(w http.ResponseWriter, r *http.Request) {
r = r.WithContext(auth.WithAuth(r.Context(), a))
r.Body = http.MaxBytesReader(w, r.Body, openAIGeneralMaxSize)
var req map[string]any
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
if strings.Contains(strings.ToLower(err.Error()), "too large") {
writeOpenAIError(w, http.StatusRequestEntityTooLarge, "request body too large")
return
}
writeOpenAIError(w, http.StatusBadRequest, "invalid json")
return
}

View File

@@ -28,7 +28,13 @@ func (h *Handler) UploadFile(w http.ResponseWriter, r *http.Request) {
writeOpenAIError(w, http.StatusBadRequest, "content-type must be multipart/form-data")
return
}
// Enforce a hard cap on the total request body size to prevent OOM
r.Body = http.MaxBytesReader(w, r.Body, openAIUploadMaxSize)
if err := r.ParseMultipartForm(openAIUploadMaxMemory); err != nil {
if strings.Contains(strings.ToLower(err.Error()), "too large") {
writeOpenAIError(w, http.StatusRequestEntityTooLarge, "file size exceeds limit")
return
}
writeOpenAIError(w, http.StatusBadRequest, "invalid multipart form")
return
}

View File

@@ -13,6 +13,13 @@ import (
"ds2api/internal/util"
)
const (
// openAIUploadMaxSize limits total multipart request body size (100 MiB).
openAIUploadMaxSize = 100 << 20
// openAIGeneralMaxSize limits total JSON request body size (100 MiB).
openAIGeneralMaxSize = 100 << 20
)
// writeJSON is a package-internal alias kept to avoid mass-renaming across
// every call-site in this package.
var writeJSON = util.WriteJSON

View File

@@ -65,8 +65,13 @@ func (h *Handler) Responses(w http.ResponseWriter, r *http.Request) {
return
}
r.Body = http.MaxBytesReader(w, r.Body, openAIGeneralMaxSize)
var req map[string]any
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
if strings.Contains(strings.ToLower(err.Error()), "too large") {
writeOpenAIError(w, http.StatusRequestEntityTooLarge, "request body too large")
return
}
writeOpenAIError(w, http.StatusBadRequest, "invalid json")
return
}