feat: Implement admin settings UI, enhance admin authentication with password hashing, and add new streaming runtime logic for Claude and OpenAI adapters with extensive compatibility tests.

This commit is contained in:
CJACK
2026-02-19 02:45:38 +08:00
parent d21aedac83
commit 7307a5cc9a
64 changed files with 4078 additions and 967 deletions

46
internal/admin/deps.go Normal file
View File

@@ -0,0 +1,46 @@
package admin
import (
"context"
"net/http"
"ds2api/internal/account"
"ds2api/internal/auth"
"ds2api/internal/config"
"ds2api/internal/deepseek"
)
type ConfigStore interface {
Snapshot() config.Config
Keys() []string
Accounts() []config.Account
FindAccount(identifier string) (config.Account, bool)
UpdateAccountToken(identifier, token string) error
Update(mutator func(*config.Config) error) error
ExportJSONAndBase64() (string, string, error)
IsEnvBacked() bool
SetVercelSync(hash string, ts int64) error
AdminPasswordHash() string
AdminJWTExpireHours() int
AdminJWTValidAfterUnix() int64
RuntimeAccountMaxInflight() int
RuntimeAccountMaxQueue(defaultSize int) int
RuntimeGlobalMaxInflight(defaultSize int) int
}
type PoolController interface {
Reset()
Status() map[string]any
ApplyRuntimeLimits(maxInflightPerAccount, maxQueueSize, globalMaxInflight int)
}
type DeepSeekCaller interface {
Login(ctx context.Context, acc config.Account) (string, error)
CreateSession(ctx context.Context, a *auth.RequestAuth, maxAttempts int) (string, error)
GetPow(ctx context.Context, a *auth.RequestAuth, maxAttempts int) (string, error)
CallCompletion(ctx context.Context, a *auth.RequestAuth, payload map[string]any, powResp string, maxAttempts int) (*http.Response, error)
}
var _ ConfigStore = (*config.Store)(nil)
var _ PoolController = (*account.Pool)(nil)
var _ DeepSeekCaller = (*deepseek.Client)(nil)

View File

@@ -2,16 +2,12 @@ package admin
import (
"github.com/go-chi/chi/v5"
"ds2api/internal/account"
"ds2api/internal/config"
"ds2api/internal/deepseek"
)
type Handler struct {
Store *config.Store
Pool *account.Pool
DS *deepseek.Client
Store ConfigStore
Pool PoolController
DS DeepSeekCaller
}
func RegisterRoutes(r chi.Router, h *Handler) {
@@ -22,6 +18,11 @@ func RegisterRoutes(r chi.Router, h *Handler) {
pr.Get("/vercel/config", h.getVercelConfig)
pr.Get("/config", h.getConfig)
pr.Post("/config", h.updateConfig)
pr.Get("/settings", h.getSettings)
pr.Put("/settings", h.updateSettings)
pr.Post("/settings/password", h.updateSettingsPassword)
pr.Post("/config/import", h.configImport)
pr.Get("/config/export", h.configExport)
pr.Post("/keys", h.addKey)
pr.Delete("/keys/{key}", h.deleteKey)
pr.Get("/accounts", h.listAccounts)

View File

@@ -12,7 +12,7 @@ import (
func (h *Handler) requireAdmin(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if err := authn.VerifyAdminRequest(r); err != nil {
if err := authn.VerifyAdminRequestWithStore(r, h.Store); err != nil {
writeJSON(w, http.StatusUnauthorized, map[string]any{"detail": err.Error()})
return
}
@@ -25,18 +25,18 @@ func (h *Handler) login(w http.ResponseWriter, r *http.Request) {
_ = json.NewDecoder(r.Body).Decode(&req)
adminKey, _ := req["admin_key"].(string)
expireHours := intFrom(req["expire_hours"])
if expireHours <= 0 {
expireHours = 24
}
if adminKey != authn.AdminKey() {
if !authn.VerifyAdminCredential(adminKey, h.Store) {
writeJSON(w, http.StatusUnauthorized, map[string]any{"detail": "Invalid admin key"})
return
}
token, err := authn.CreateJWT(expireHours)
token, err := authn.CreateJWTWithStore(expireHours, h.Store)
if err != nil {
writeJSON(w, http.StatusInternalServerError, map[string]any{"detail": err.Error()})
return
}
if expireHours <= 0 {
expireHours = h.Store.AdminJWTExpireHours()
}
writeJSON(w, http.StatusOK, map[string]any{"success": true, "token": token, "expires_in": expireHours * 3600})
}
@@ -47,7 +47,7 @@ func (h *Handler) verify(w http.ResponseWriter, r *http.Request) {
return
}
token := strings.TrimSpace(header[7:])
payload, err := authn.VerifyJWT(token)
payload, err := authn.VerifyJWTWithStore(token, h.Store)
if err != nil {
writeJSON(w, http.StatusUnauthorized, map[string]any{"detail": err.Error()})
return

View File

@@ -5,7 +5,6 @@ import (
"encoding/json"
"fmt"
"net/http"
"sort"
"strings"
"github.com/go-chi/chi/v5"
@@ -204,38 +203,191 @@ func (h *Handler) batchImport(w http.ResponseWriter, r *http.Request) {
}
func (h *Handler) exportConfig(w http.ResponseWriter, _ *http.Request) {
h.configExport(w, nil)
}
func (h *Handler) configExport(w http.ResponseWriter, _ *http.Request) {
snap := h.Store.Snapshot()
jsonStr, b64, err := h.Store.ExportJSONAndBase64()
if err != nil {
writeJSON(w, http.StatusInternalServerError, map[string]any{"detail": err.Error()})
return
}
writeJSON(w, http.StatusOK, map[string]any{"json": jsonStr, "base64": b64})
writeJSON(w, http.StatusOK, map[string]any{
"success": true,
"config": snap,
"json": jsonStr,
"base64": b64,
})
}
func (h *Handler) configImport(w http.ResponseWriter, r *http.Request) {
var req map[string]any
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
writeJSON(w, http.StatusBadRequest, map[string]any{"detail": "invalid json"})
return
}
mode := strings.TrimSpace(strings.ToLower(r.URL.Query().Get("mode")))
if mode == "" {
mode = strings.TrimSpace(strings.ToLower(fieldString(req, "mode")))
}
if mode == "" {
mode = "merge"
}
if mode != "merge" && mode != "replace" {
writeJSON(w, http.StatusBadRequest, map[string]any{"detail": "mode must be merge or replace"})
return
}
payload := req
if raw, ok := req["config"].(map[string]any); ok && len(raw) > 0 {
payload = raw
}
rawJSON, err := json.Marshal(payload)
if err != nil {
writeJSON(w, http.StatusBadRequest, map[string]any{"detail": "invalid config payload"})
return
}
var incoming config.Config
if err := json.Unmarshal(rawJSON, &incoming); err != nil {
writeJSON(w, http.StatusBadRequest, map[string]any{"detail": err.Error()})
return
}
importedKeys, importedAccounts := 0, 0
err = h.Store.Update(func(c *config.Config) error {
next := c.Clone()
if mode == "replace" {
next = incoming.Clone()
next.VercelSyncHash = c.VercelSyncHash
next.VercelSyncTime = c.VercelSyncTime
importedKeys = len(next.Keys)
importedAccounts = len(next.Accounts)
} else {
existingKeys := map[string]struct{}{}
for _, k := range next.Keys {
existingKeys[k] = struct{}{}
}
for _, k := range incoming.Keys {
key := strings.TrimSpace(k)
if key == "" {
continue
}
if _, ok := existingKeys[key]; ok {
continue
}
existingKeys[key] = struct{}{}
next.Keys = append(next.Keys, key)
importedKeys++
}
existingAccounts := map[string]struct{}{}
for _, acc := range next.Accounts {
existingAccounts[acc.Identifier()] = struct{}{}
}
for _, acc := range incoming.Accounts {
id := acc.Identifier()
if id == "" {
continue
}
if _, ok := existingAccounts[id]; ok {
continue
}
existingAccounts[id] = struct{}{}
next.Accounts = append(next.Accounts, acc)
importedAccounts++
}
if len(incoming.ClaudeMapping) > 0 {
if next.ClaudeMapping == nil {
next.ClaudeMapping = map[string]string{}
}
for k, v := range incoming.ClaudeMapping {
next.ClaudeMapping[k] = v
}
}
if len(incoming.ClaudeModelMap) > 0 {
if next.ClaudeModelMap == nil {
next.ClaudeModelMap = map[string]string{}
}
for k, v := range incoming.ClaudeModelMap {
next.ClaudeModelMap[k] = v
}
}
if len(incoming.ModelAliases) > 0 {
if next.ModelAliases == nil {
next.ModelAliases = map[string]string{}
}
for k, v := range incoming.ModelAliases {
next.ModelAliases[k] = v
}
}
if strings.TrimSpace(incoming.Toolcall.Mode) != "" {
next.Toolcall.Mode = incoming.Toolcall.Mode
}
if strings.TrimSpace(incoming.Toolcall.EarlyEmitConfidence) != "" {
next.Toolcall.EarlyEmitConfidence = incoming.Toolcall.EarlyEmitConfidence
}
if incoming.Responses.StoreTTLSeconds > 0 {
next.Responses.StoreTTLSeconds = incoming.Responses.StoreTTLSeconds
}
if strings.TrimSpace(incoming.Embeddings.Provider) != "" {
next.Embeddings.Provider = incoming.Embeddings.Provider
}
if strings.TrimSpace(incoming.Admin.PasswordHash) != "" {
next.Admin.PasswordHash = incoming.Admin.PasswordHash
}
if incoming.Admin.JWTExpireHours > 0 {
next.Admin.JWTExpireHours = incoming.Admin.JWTExpireHours
}
if incoming.Admin.JWTValidAfterUnix > 0 {
next.Admin.JWTValidAfterUnix = incoming.Admin.JWTValidAfterUnix
}
if incoming.Runtime.AccountMaxInflight > 0 {
next.Runtime.AccountMaxInflight = incoming.Runtime.AccountMaxInflight
}
if incoming.Runtime.AccountMaxQueue > 0 {
next.Runtime.AccountMaxQueue = incoming.Runtime.AccountMaxQueue
}
if incoming.Runtime.GlobalMaxInflight > 0 {
next.Runtime.GlobalMaxInflight = incoming.Runtime.GlobalMaxInflight
}
}
normalizeSettingsConfig(&next)
if err := validateSettingsConfig(next); err != nil {
return newRequestError(err.Error())
}
*c = next
return nil
})
if err != nil {
if detail, ok := requestErrorDetail(err); ok {
writeJSON(w, http.StatusBadRequest, map[string]any{"detail": detail})
return
}
writeJSON(w, http.StatusInternalServerError, map[string]any{"detail": err.Error()})
return
}
h.Pool.Reset()
writeJSON(w, http.StatusOK, map[string]any{
"success": true,
"mode": mode,
"imported_keys": importedKeys,
"imported_accounts": importedAccounts,
"message": "config imported",
})
}
func (h *Handler) computeSyncHash() string {
snap := h.Store.Snapshot()
syncable := map[string]any{"keys": snap.Keys, "accounts": []map[string]any{}}
accounts := make([]map[string]any, 0, len(snap.Accounts))
for _, a := range snap.Accounts {
m := map[string]any{}
if a.Email != "" {
m["email"] = a.Email
}
if a.Mobile != "" {
m["mobile"] = a.Mobile
}
if a.Password != "" {
m["password"] = a.Password
}
accounts = append(accounts, m)
}
sort.Slice(accounts, func(i, j int) bool {
ai := fmt.Sprintf("%v%v", accounts[i]["email"], accounts[i]["mobile"])
aj := fmt.Sprintf("%v%v", accounts[j]["email"], accounts[j]["mobile"])
return ai < aj
})
syncable["accounts"] = accounts
b, _ := json.Marshal(syncable)
snap := h.Store.Snapshot().Clone()
snap.VercelSyncHash = ""
snap.VercelSyncTime = 0
b, _ := json.Marshal(snap)
sum := md5.Sum(b)
return fmt.Sprintf("%x", sum)
}

View File

@@ -0,0 +1,321 @@
package admin
import (
"encoding/json"
"fmt"
"net/http"
"strings"
"time"
authn "ds2api/internal/auth"
"ds2api/internal/config"
)
func (h *Handler) getSettings(w http.ResponseWriter, _ *http.Request) {
snap := h.Store.Snapshot()
recommended := defaultRuntimeRecommended(len(snap.Accounts), h.Store.RuntimeAccountMaxInflight())
needsSync := config.IsVercel() && snap.VercelSyncHash != "" && snap.VercelSyncHash != h.computeSyncHash()
writeJSON(w, http.StatusOK, map[string]any{
"success": true,
"admin": map[string]any{
"has_password_hash": strings.TrimSpace(snap.Admin.PasswordHash) != "",
"jwt_expire_hours": h.Store.AdminJWTExpireHours(),
"jwt_valid_after_unix": snap.Admin.JWTValidAfterUnix,
"default_password_warning": authn.UsingDefaultAdminKey(h.Store),
},
"runtime": map[string]any{
"account_max_inflight": h.Store.RuntimeAccountMaxInflight(),
"account_max_queue": h.Store.RuntimeAccountMaxQueue(recommended),
"global_max_inflight": h.Store.RuntimeGlobalMaxInflight(recommended),
},
"toolcall": snap.Toolcall,
"responses": snap.Responses,
"embeddings": snap.Embeddings,
"claude_mapping": settingsClaudeMapping(snap),
"model_aliases": snap.ModelAliases,
"env_backed": h.Store.IsEnvBacked(),
"needs_vercel_sync": needsSync,
})
}
func (h *Handler) updateSettings(w http.ResponseWriter, r *http.Request) {
var req map[string]any
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
writeJSON(w, http.StatusBadRequest, map[string]any{"detail": "invalid json"})
return
}
adminCfg, runtimeCfg, toolcallCfg, responsesCfg, embeddingsCfg, claudeMap, aliasMap, err := parseSettingsUpdateRequest(req)
if err != nil {
writeJSON(w, http.StatusBadRequest, map[string]any{"detail": err.Error()})
return
}
if runtimeCfg != nil {
if err := validateMergedRuntimeSettings(h.Store.Snapshot().Runtime, runtimeCfg); err != nil {
writeJSON(w, http.StatusBadRequest, map[string]any{"detail": err.Error()})
return
}
}
if err := h.Store.Update(func(c *config.Config) error {
if adminCfg != nil {
if adminCfg.JWTExpireHours > 0 {
c.Admin.JWTExpireHours = adminCfg.JWTExpireHours
}
}
if runtimeCfg != nil {
if runtimeCfg.AccountMaxInflight > 0 {
c.Runtime.AccountMaxInflight = runtimeCfg.AccountMaxInflight
}
if runtimeCfg.AccountMaxQueue > 0 {
c.Runtime.AccountMaxQueue = runtimeCfg.AccountMaxQueue
}
if runtimeCfg.GlobalMaxInflight > 0 {
c.Runtime.GlobalMaxInflight = runtimeCfg.GlobalMaxInflight
}
}
if toolcallCfg != nil {
if strings.TrimSpace(toolcallCfg.Mode) != "" {
c.Toolcall.Mode = strings.TrimSpace(toolcallCfg.Mode)
}
if strings.TrimSpace(toolcallCfg.EarlyEmitConfidence) != "" {
c.Toolcall.EarlyEmitConfidence = strings.TrimSpace(toolcallCfg.EarlyEmitConfidence)
}
}
if responsesCfg != nil && responsesCfg.StoreTTLSeconds > 0 {
c.Responses.StoreTTLSeconds = responsesCfg.StoreTTLSeconds
}
if embeddingsCfg != nil && strings.TrimSpace(embeddingsCfg.Provider) != "" {
c.Embeddings.Provider = strings.TrimSpace(embeddingsCfg.Provider)
}
if claudeMap != nil {
c.ClaudeMapping = claudeMap
c.ClaudeModelMap = nil
}
if aliasMap != nil {
c.ModelAliases = aliasMap
}
return nil
}); err != nil {
writeJSON(w, http.StatusInternalServerError, map[string]any{"detail": err.Error()})
return
}
h.applyRuntimeSettings()
needsSync := config.IsVercel() || h.Store.IsEnvBacked()
writeJSON(w, http.StatusOK, map[string]any{
"success": true,
"message": "settings updated and hot reloaded",
"env_backed": h.Store.IsEnvBacked(),
"needs_vercel_sync": needsSync,
"manual_sync_message": "配置已保存。Vercel 部署请在 Vercel Sync 页面手动同步。",
})
}
func validateMergedRuntimeSettings(current config.RuntimeConfig, incoming *config.RuntimeConfig) error {
merged := current
if incoming != nil {
if incoming.AccountMaxInflight > 0 {
merged.AccountMaxInflight = incoming.AccountMaxInflight
}
if incoming.AccountMaxQueue > 0 {
merged.AccountMaxQueue = incoming.AccountMaxQueue
}
if incoming.GlobalMaxInflight > 0 {
merged.GlobalMaxInflight = incoming.GlobalMaxInflight
}
}
return validateRuntimeSettings(merged)
}
func (h *Handler) updateSettingsPassword(w http.ResponseWriter, r *http.Request) {
var req map[string]any
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
writeJSON(w, http.StatusBadRequest, map[string]any{"detail": "invalid json"})
return
}
newPassword := strings.TrimSpace(fieldString(req, "new_password"))
if newPassword == "" {
newPassword = strings.TrimSpace(fieldString(req, "password"))
}
if len(newPassword) < 4 {
writeJSON(w, http.StatusBadRequest, map[string]any{"detail": "new password must be at least 4 characters"})
return
}
now := time.Now().Unix()
hash := authn.HashAdminPassword(newPassword)
if err := h.Store.Update(func(c *config.Config) error {
c.Admin.PasswordHash = hash
c.Admin.JWTValidAfterUnix = now
return nil
}); err != nil {
writeJSON(w, http.StatusInternalServerError, map[string]any{"detail": err.Error()})
return
}
writeJSON(w, http.StatusOK, map[string]any{
"success": true,
"message": "password updated",
"force_relogin": true,
"jwt_valid_after_unix": now,
})
}
func (h *Handler) applyRuntimeSettings() {
if h == nil || h.Store == nil || h.Pool == nil {
return
}
accountCount := len(h.Store.Accounts())
maxPer := h.Store.RuntimeAccountMaxInflight()
recommended := defaultRuntimeRecommended(accountCount, maxPer)
maxQueue := h.Store.RuntimeAccountMaxQueue(recommended)
global := h.Store.RuntimeGlobalMaxInflight(recommended)
h.Pool.ApplyRuntimeLimits(maxPer, maxQueue, global)
}
func defaultRuntimeRecommended(accountCount, maxPer int) int {
if maxPer <= 0 {
maxPer = 1
}
if accountCount <= 0 {
return maxPer
}
return accountCount * maxPer
}
func settingsClaudeMapping(c config.Config) map[string]string {
if len(c.ClaudeMapping) > 0 {
return c.ClaudeMapping
}
if len(c.ClaudeModelMap) > 0 {
return c.ClaudeModelMap
}
return map[string]string{"fast": "deepseek-chat", "slow": "deepseek-reasoner"}
}
func parseSettingsUpdateRequest(req map[string]any) (*config.AdminConfig, *config.RuntimeConfig, *config.ToolcallConfig, *config.ResponsesConfig, *config.EmbeddingsConfig, map[string]string, map[string]string, error) {
var (
adminCfg *config.AdminConfig
runtimeCfg *config.RuntimeConfig
toolcallCfg *config.ToolcallConfig
respCfg *config.ResponsesConfig
embCfg *config.EmbeddingsConfig
claudeMap map[string]string
aliasMap map[string]string
)
if raw, ok := req["admin"].(map[string]any); ok {
cfg := &config.AdminConfig{}
if v, exists := raw["jwt_expire_hours"]; exists {
n := intFrom(v)
if n < 1 || n > 720 {
return nil, nil, nil, nil, nil, nil, nil, fmt.Errorf("admin.jwt_expire_hours must be between 1 and 720")
}
cfg.JWTExpireHours = n
}
adminCfg = cfg
}
if raw, ok := req["runtime"].(map[string]any); ok {
cfg := &config.RuntimeConfig{}
if v, exists := raw["account_max_inflight"]; exists {
n := intFrom(v)
if n < 1 || n > 256 {
return nil, nil, nil, nil, nil, nil, nil, fmt.Errorf("runtime.account_max_inflight must be between 1 and 256")
}
cfg.AccountMaxInflight = n
}
if v, exists := raw["account_max_queue"]; exists {
n := intFrom(v)
if n < 1 || n > 200000 {
return nil, nil, nil, nil, nil, nil, nil, fmt.Errorf("runtime.account_max_queue must be between 1 and 200000")
}
cfg.AccountMaxQueue = n
}
if v, exists := raw["global_max_inflight"]; exists {
n := intFrom(v)
if n < 1 || n > 200000 {
return nil, nil, nil, nil, nil, nil, nil, fmt.Errorf("runtime.global_max_inflight must be between 1 and 200000")
}
cfg.GlobalMaxInflight = n
}
if cfg.AccountMaxInflight > 0 && cfg.GlobalMaxInflight > 0 && cfg.GlobalMaxInflight < cfg.AccountMaxInflight {
return nil, nil, nil, nil, nil, nil, nil, fmt.Errorf("runtime.global_max_inflight must be >= runtime.account_max_inflight")
}
runtimeCfg = cfg
}
if raw, ok := req["toolcall"].(map[string]any); ok {
cfg := &config.ToolcallConfig{}
if v, exists := raw["mode"]; exists {
mode := strings.ToLower(strings.TrimSpace(fmt.Sprintf("%v", v)))
switch mode {
case "feature_match", "off":
cfg.Mode = mode
default:
return nil, nil, nil, nil, nil, nil, nil, fmt.Errorf("toolcall.mode must be feature_match or off")
}
}
if v, exists := raw["early_emit_confidence"]; exists {
level := strings.ToLower(strings.TrimSpace(fmt.Sprintf("%v", v)))
switch level {
case "high", "low", "off":
cfg.EarlyEmitConfidence = level
default:
return nil, nil, nil, nil, nil, nil, nil, fmt.Errorf("toolcall.early_emit_confidence must be high, low or off")
}
}
toolcallCfg = cfg
}
if raw, ok := req["responses"].(map[string]any); ok {
cfg := &config.ResponsesConfig{}
if v, exists := raw["store_ttl_seconds"]; exists {
n := intFrom(v)
if n < 30 || n > 86400 {
return nil, nil, nil, nil, nil, nil, nil, fmt.Errorf("responses.store_ttl_seconds must be between 30 and 86400")
}
cfg.StoreTTLSeconds = n
}
respCfg = cfg
}
if raw, ok := req["embeddings"].(map[string]any); ok {
cfg := &config.EmbeddingsConfig{}
if v, exists := raw["provider"]; exists {
p := strings.TrimSpace(fmt.Sprintf("%v", v))
if p == "" {
return nil, nil, nil, nil, nil, nil, nil, fmt.Errorf("embeddings.provider cannot be empty")
}
cfg.Provider = p
}
embCfg = cfg
}
if raw, ok := req["claude_mapping"].(map[string]any); ok {
claudeMap = map[string]string{}
for k, v := range raw {
key := strings.TrimSpace(k)
val := strings.TrimSpace(fmt.Sprintf("%v", v))
if key == "" || val == "" {
continue
}
claudeMap[key] = val
}
}
if raw, ok := req["model_aliases"].(map[string]any); ok {
aliasMap = map[string]string{}
for k, v := range raw {
key := strings.TrimSpace(k)
val := strings.TrimSpace(fmt.Sprintf("%v", v))
if key == "" || val == "" {
continue
}
aliasMap[key] = val
}
}
return adminCfg, runtimeCfg, toolcallCfg, respCfg, embCfg, claudeMap, aliasMap, nil
}

View File

@@ -0,0 +1,267 @@
package admin
import (
"bytes"
"encoding/json"
"net/http"
"net/http/httptest"
"testing"
authn "ds2api/internal/auth"
)
func TestGetSettingsDefaultPasswordWarning(t *testing.T) {
t.Setenv("DS2API_ADMIN_KEY", "")
h := newAdminTestHandler(t, `{"keys":["k1"]}`)
req := httptest.NewRequest(http.MethodGet, "/admin/settings", nil)
rec := httptest.NewRecorder()
h.getSettings(rec, req)
if rec.Code != http.StatusOK {
t.Fatalf("status=%d body=%s", rec.Code, rec.Body.String())
}
var body map[string]any
_ = json.Unmarshal(rec.Body.Bytes(), &body)
admin, _ := body["admin"].(map[string]any)
warn, _ := admin["default_password_warning"].(bool)
if !warn {
t.Fatalf("expected default password warning true, body=%v", body)
}
}
func TestUpdateSettingsValidation(t *testing.T) {
h := newAdminTestHandler(t, `{"keys":["k1"]}`)
payload := map[string]any{
"runtime": map[string]any{
"account_max_inflight": 0,
},
}
b, _ := json.Marshal(payload)
req := httptest.NewRequest(http.MethodPut, "/admin/settings", bytes.NewReader(b))
rec := httptest.NewRecorder()
h.updateSettings(rec, req)
if rec.Code != http.StatusBadRequest {
t.Fatalf("expected 400, got %d body=%s", rec.Code, rec.Body.String())
}
}
func TestUpdateSettingsValidationWithMergedRuntimeSnapshot(t *testing.T) {
h := newAdminTestHandler(t, `{
"keys":["k1"],
"runtime":{
"account_max_inflight":8,
"global_max_inflight":8
}
}`)
payload := map[string]any{
"runtime": map[string]any{
"account_max_inflight": 16,
},
}
b, _ := json.Marshal(payload)
req := httptest.NewRequest(http.MethodPut, "/admin/settings", bytes.NewReader(b))
rec := httptest.NewRecorder()
h.updateSettings(rec, req)
if rec.Code != http.StatusBadRequest {
t.Fatalf("expected 400, got %d body=%s", rec.Code, rec.Body.String())
}
if !bytes.Contains(rec.Body.Bytes(), []byte("runtime.global_max_inflight")) {
t.Fatalf("expected merged runtime validation detail, got %s", rec.Body.String())
}
}
func TestUpdateSettingsWithoutRuntimeSkipsMergedRuntimeValidation(t *testing.T) {
h := newAdminTestHandler(t, `{
"keys":["k1"],
"runtime":{
"account_max_inflight":8,
"global_max_inflight":4
}
}`)
payload := map[string]any{
"responses": map[string]any{
"store_ttl_seconds": 600,
},
}
b, _ := json.Marshal(payload)
req := httptest.NewRequest(http.MethodPut, "/admin/settings", bytes.NewReader(b))
rec := httptest.NewRecorder()
h.updateSettings(rec, req)
if rec.Code != http.StatusOK {
t.Fatalf("expected 200, got %d body=%s", rec.Code, rec.Body.String())
}
if got := h.Store.Snapshot().Responses.StoreTTLSeconds; got != 600 {
t.Fatalf("store_ttl_seconds=%d want=600", got)
}
}
func TestUpdateSettingsHotReloadRuntime(t *testing.T) {
h := newAdminTestHandler(t, `{
"keys":["k1"],
"accounts":[{"email":"a@test.com","token":"t1"},{"email":"b@test.com","token":"t2"}]
}`)
payload := map[string]any{
"runtime": map[string]any{
"account_max_inflight": 3,
"account_max_queue": 20,
"global_max_inflight": 5,
},
}
b, _ := json.Marshal(payload)
req := httptest.NewRequest(http.MethodPut, "/admin/settings", bytes.NewReader(b))
rec := httptest.NewRecorder()
h.updateSettings(rec, req)
if rec.Code != http.StatusOK {
t.Fatalf("status=%d body=%s", rec.Code, rec.Body.String())
}
status := h.Pool.Status()
if got := intFrom(status["max_inflight_per_account"]); got != 3 {
t.Fatalf("max_inflight_per_account=%d want=3", got)
}
if got := intFrom(status["max_queue_size"]); got != 20 {
t.Fatalf("max_queue_size=%d want=20", got)
}
if got := intFrom(status["global_max_inflight"]); got != 5 {
t.Fatalf("global_max_inflight=%d want=5", got)
}
}
func TestUpdateSettingsPasswordInvalidatesOldJWT(t *testing.T) {
hash := authn.HashAdminPassword("old-password")
h := newAdminTestHandler(t, `{"admin":{"password_hash":"`+hash+`"}}`)
token, err := authn.CreateJWTWithStore(1, h.Store)
if err != nil {
t.Fatalf("create jwt failed: %v", err)
}
if _, err := authn.VerifyJWTWithStore(token, h.Store); err != nil {
t.Fatalf("verify before update failed: %v", err)
}
body := map[string]any{"new_password": "new-password"}
b, _ := json.Marshal(body)
req := httptest.NewRequest(http.MethodPost, "/admin/settings/password", bytes.NewReader(b))
rec := httptest.NewRecorder()
h.updateSettingsPassword(rec, req)
if rec.Code != http.StatusOK {
t.Fatalf("status=%d body=%s", rec.Code, rec.Body.String())
}
if _, err := authn.VerifyJWTWithStore(token, h.Store); err == nil {
t.Fatal("expected old token to be invalid after password update")
}
if !authn.VerifyAdminCredential("new-password", h.Store) {
t.Fatal("expected new password credential to be accepted")
}
}
func TestConfigImportMergeAndReplace(t *testing.T) {
h := newAdminTestHandler(t, `{
"keys":["k1"],
"accounts":[{"email":"a@test.com","password":"p1"}]
}`)
merge := map[string]any{
"mode": "merge",
"config": map[string]any{
"keys": []any{"k1", "k2"},
"accounts": []any{
map[string]any{"email": "a@test.com", "password": "p1"},
map[string]any{"email": "b@test.com", "password": "p2"},
},
},
}
mergeBytes, _ := json.Marshal(merge)
mergeReq := httptest.NewRequest(http.MethodPost, "/admin/config/import?mode=merge", bytes.NewReader(mergeBytes))
mergeRec := httptest.NewRecorder()
h.configImport(mergeRec, mergeReq)
if mergeRec.Code != http.StatusOK {
t.Fatalf("merge status=%d body=%s", mergeRec.Code, mergeRec.Body.String())
}
if got := len(h.Store.Keys()); got != 2 {
t.Fatalf("keys after merge=%d want=2", got)
}
if got := len(h.Store.Accounts()); got != 2 {
t.Fatalf("accounts after merge=%d want=2", got)
}
replace := map[string]any{
"mode": "replace",
"config": map[string]any{
"keys": []any{"k9"},
},
}
replaceBytes, _ := json.Marshal(replace)
replaceReq := httptest.NewRequest(http.MethodPost, "/admin/config/import?mode=replace", bytes.NewReader(replaceBytes))
replaceRec := httptest.NewRecorder()
h.configImport(replaceRec, replaceReq)
if replaceRec.Code != http.StatusOK {
t.Fatalf("replace status=%d body=%s", replaceRec.Code, replaceRec.Body.String())
}
keys := h.Store.Keys()
if len(keys) != 1 || keys[0] != "k9" {
t.Fatalf("unexpected keys after replace: %#v", keys)
}
if got := len(h.Store.Accounts()); got != 0 {
t.Fatalf("accounts after replace=%d want=0", got)
}
}
func TestConfigImportRejectsInvalidRuntimeBounds(t *testing.T) {
h := newAdminTestHandler(t, `{"keys":["k1"]}`)
payload := map[string]any{
"mode": "replace",
"config": map[string]any{
"keys": []any{"k2"},
"runtime": map[string]any{
"account_max_inflight": 300,
},
},
}
b, _ := json.Marshal(payload)
req := httptest.NewRequest(http.MethodPost, "/admin/config/import?mode=replace", bytes.NewReader(b))
rec := httptest.NewRecorder()
h.configImport(rec, req)
if rec.Code != http.StatusBadRequest {
t.Fatalf("expected 400, got %d body=%s", rec.Code, rec.Body.String())
}
if !bytes.Contains(rec.Body.Bytes(), []byte("runtime.account_max_inflight")) {
t.Fatalf("expected runtime bound detail, got %s", rec.Body.String())
}
keys := h.Store.Keys()
if len(keys) != 1 || keys[0] != "k1" {
t.Fatalf("store should remain unchanged, keys=%v", keys)
}
}
func TestConfigImportRejectsMergedRuntimeConflict(t *testing.T) {
h := newAdminTestHandler(t, `{
"keys":["k1"],
"runtime":{
"account_max_inflight":8,
"global_max_inflight":8
}
}`)
payload := map[string]any{
"mode": "merge",
"config": map[string]any{
"runtime": map[string]any{
"account_max_inflight": 16,
},
},
}
b, _ := json.Marshal(payload)
req := httptest.NewRequest(http.MethodPost, "/admin/config/import?mode=merge", bytes.NewReader(b))
rec := httptest.NewRecorder()
h.configImport(rec, req)
if rec.Code != http.StatusBadRequest {
t.Fatalf("expected 400, got %d body=%s", rec.Code, rec.Body.String())
}
if !bytes.Contains(rec.Body.Bytes(), []byte("runtime.global_max_inflight")) {
t.Fatalf("expected merged runtime validation detail, got %s", rec.Body.String())
}
snap := h.Store.Snapshot()
if snap.Runtime.AccountMaxInflight != 8 || snap.Runtime.GlobalMaxInflight != 8 {
t.Fatalf("runtime should remain unchanged, runtime=%+v", snap.Runtime)
}
}

View File

@@ -3,8 +3,8 @@ package admin
import (
"bytes"
"context"
"encoding/base64"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
@@ -19,6 +19,62 @@ func (h *Handler) syncVercel(w http.ResponseWriter, r *http.Request) {
writeJSON(w, http.StatusBadRequest, map[string]any{"detail": "invalid json"})
return
}
opts, err := parseVercelSyncOptions(req)
if err != nil {
writeJSON(w, http.StatusBadRequest, map[string]any{"detail": err.Error()})
return
}
validated, failed := h.validateAccountsForVercelSync(r.Context(), opts.AutoValidate)
_, cfgB64, err := h.Store.ExportJSONAndBase64()
if err != nil {
writeJSON(w, http.StatusInternalServerError, map[string]any{"detail": err.Error()})
return
}
client := &http.Client{Timeout: 30 * time.Second}
params := buildVercelParams(opts.TeamID)
headers := map[string]string{"Authorization": "Bearer " + opts.VercelToken}
envResp, status, err := vercelRequest(r.Context(), client, http.MethodGet, "https://api.vercel.com/v9/projects/"+opts.ProjectID+"/env", params, headers, nil)
if err != nil || status != http.StatusOK {
writeJSON(w, statusOr(status, http.StatusInternalServerError), map[string]any{"detail": "获取环境变量失败"})
return
}
envs, _ := envResp["envs"].([]any)
status, err = upsertVercelEnv(r.Context(), client, opts.ProjectID, params, headers, envs, "DS2API_CONFIG_JSON", cfgB64)
if err != nil || (status != http.StatusOK && status != http.StatusCreated) {
writeJSON(w, statusOr(status, http.StatusInternalServerError), map[string]any{"detail": "更新环境变量失败"})
return
}
savedCreds := h.saveVercelProjectCredentials(r.Context(), client, opts, params, headers, envs)
manual, deployURL := triggerVercelDeployment(r.Context(), client, opts.ProjectID, params, headers)
_ = h.Store.SetVercelSync(h.computeSyncHash(), time.Now().Unix())
result := map[string]any{"success": true, "validated_accounts": validated}
if manual {
result["message"] = "配置已同步到 Vercel请手动触发重新部署"
result["manual_deploy_required"] = true
} else {
result["message"] = "配置已同步,正在重新部署..."
result["deployment_url"] = deployURL
}
if len(failed) > 0 {
result["failed_accounts"] = failed
}
if len(savedCreds) > 0 {
result["saved_credentials"] = savedCreds
}
writeJSON(w, http.StatusOK, result)
}
type vercelSyncOptions struct {
VercelToken string
ProjectID string
TeamID string
AutoValidate bool
SaveCreds bool
UsePreconfig bool
}
func parseVercelSyncOptions(req map[string]any) (vercelSyncOptions, error) {
vercelToken, _ := req["vercel_token"].(string)
projectID, _ := req["project_id"].(string)
teamID, _ := req["team_id"].(string)
@@ -40,108 +96,117 @@ func (h *Handler) syncVercel(w http.ResponseWriter, r *http.Request) {
if strings.TrimSpace(teamID) == "" {
teamID = strings.TrimSpace(os.Getenv("VERCEL_TEAM_ID"))
}
vercelToken = strings.TrimSpace(vercelToken)
projectID = strings.TrimSpace(projectID)
teamID = strings.TrimSpace(teamID)
if vercelToken == "" || projectID == "" {
writeJSON(w, http.StatusBadRequest, map[string]any{"detail": "需要 Vercel Token 和 Project ID"})
return
return vercelSyncOptions{}, fmt.Errorf("需要 Vercel Token 和 Project ID")
}
return vercelSyncOptions{
VercelToken: vercelToken,
ProjectID: projectID,
TeamID: teamID,
AutoValidate: autoValidate,
SaveCreds: saveCreds,
UsePreconfig: usePreconfig,
}, nil
}
func buildVercelParams(teamID string) url.Values {
params := url.Values{}
if strings.TrimSpace(teamID) != "" {
params.Set("teamId", strings.TrimSpace(teamID))
}
return params
}
func (h *Handler) validateAccountsForVercelSync(ctx context.Context, enabled bool) (int, []string) {
if !enabled {
return 0, nil
}
validated, failed := 0, []string{}
if autoValidate {
for _, acc := range h.Store.Snapshot().Accounts {
if strings.TrimSpace(acc.Token) != "" {
continue
}
token, err := h.DS.Login(r.Context(), acc)
if err != nil {
failed = append(failed, acc.Identifier())
} else {
validated++
_ = h.Store.UpdateAccountToken(acc.Identifier(), token)
}
time.Sleep(500 * time.Millisecond)
for _, acc := range h.Store.Snapshot().Accounts {
if strings.TrimSpace(acc.Token) != "" {
continue
}
token, err := h.DS.Login(ctx, acc)
if err != nil {
failed = append(failed, acc.Identifier())
} else {
validated++
_ = h.Store.UpdateAccountToken(acc.Identifier(), token)
}
time.Sleep(500 * time.Millisecond)
}
return validated, failed
}
cfgJSON, _, err := h.Store.ExportJSONAndBase64()
if err != nil {
writeJSON(w, http.StatusInternalServerError, map[string]any{"detail": err.Error()})
return
func upsertVercelEnv(ctx context.Context, client *http.Client, projectID string, params url.Values, headers map[string]string, envs []any, key, value string) (int, error) {
existingID := findEnvID(envs, key)
if existingID != "" {
_, status, err := vercelRequest(ctx, client, http.MethodPatch, "https://api.vercel.com/v9/projects/"+projectID+"/env/"+existingID, params, headers, map[string]any{"value": value})
return status, err
}
cfgB64 := base64.StdEncoding.EncodeToString([]byte(cfgJSON))
client := &http.Client{Timeout: 30 * time.Second}
params := url.Values{}
if teamID != "" {
params.Set("teamId", teamID)
_, status, err := vercelRequest(ctx, client, http.MethodPost, "https://api.vercel.com/v10/projects/"+projectID+"/env", params, headers, map[string]any{
"key": key,
"value": value,
"type": "encrypted",
"target": []string{"production", "preview"},
})
return status, err
}
func (h *Handler) saveVercelProjectCredentials(ctx context.Context, client *http.Client, opts vercelSyncOptions, params url.Values, headers map[string]string, envs []any) []string {
if !opts.SaveCreds || opts.UsePreconfig {
return nil
}
headers := map[string]string{"Authorization": "Bearer " + vercelToken}
envResp, status, err := vercelRequest(r.Context(), client, http.MethodGet, "https://api.vercel.com/v9/projects/"+projectID+"/env", params, headers, nil)
if err != nil || status != http.StatusOK {
writeJSON(w, statusOr(status, http.StatusInternalServerError), map[string]any{"detail": "获取环境变量失败"})
return
saved := []string{}
creds := [][2]string{{"VERCEL_TOKEN", opts.VercelToken}, {"VERCEL_PROJECT_ID", opts.ProjectID}}
if opts.TeamID != "" {
creds = append(creds, [2]string{"VERCEL_TEAM_ID", opts.TeamID})
}
envs, _ := envResp["envs"].([]any)
existingEnvID := findEnvID(envs, "DS2API_CONFIG_JSON")
if existingEnvID != "" {
_, status, err = vercelRequest(r.Context(), client, http.MethodPatch, "https://api.vercel.com/v9/projects/"+projectID+"/env/"+existingEnvID, params, headers, map[string]any{"value": cfgB64})
} else {
_, status, err = vercelRequest(r.Context(), client, http.MethodPost, "https://api.vercel.com/v10/projects/"+projectID+"/env", params, headers, map[string]any{"key": "DS2API_CONFIG_JSON", "value": cfgB64, "type": "encrypted", "target": []string{"production", "preview"}})
}
if err != nil || (status != http.StatusOK && status != http.StatusCreated) {
writeJSON(w, statusOr(status, http.StatusInternalServerError), map[string]any{"detail": "更新环境变量失败"})
return
}
savedCreds := []string{}
if saveCreds && !usePreconfig {
creds := [][2]string{{"VERCEL_TOKEN", vercelToken}, {"VERCEL_PROJECT_ID", projectID}}
if teamID != "" {
creds = append(creds, [2]string{"VERCEL_TEAM_ID", teamID})
}
for _, kv := range creds {
id := findEnvID(envs, kv[0])
if id != "" {
_, status, _ = vercelRequest(r.Context(), client, http.MethodPatch, "https://api.vercel.com/v9/projects/"+projectID+"/env/"+id, params, headers, map[string]any{"value": kv[1]})
} else {
_, status, _ = vercelRequest(r.Context(), client, http.MethodPost, "https://api.vercel.com/v10/projects/"+projectID+"/env", params, headers, map[string]any{"key": kv[0], "value": kv[1], "type": "encrypted", "target": []string{"production", "preview"}})
}
if status == http.StatusOK || status == http.StatusCreated {
savedCreds = append(savedCreds, kv[0])
}
for _, kv := range creds {
status, _ := upsertVercelEnv(ctx, client, opts.ProjectID, params, headers, envs, kv[0], kv[1])
if status == http.StatusOK || status == http.StatusCreated {
saved = append(saved, kv[0])
}
}
projectResp, status, _ := vercelRequest(r.Context(), client, http.MethodGet, "https://api.vercel.com/v9/projects/"+projectID, params, headers, nil)
manual := true
deployURL := ""
if status == http.StatusOK {
if link, ok := projectResp["link"].(map[string]any); ok {
if linkType, _ := link["type"].(string); linkType == "github" {
repoID := intFrom(link["repoId"])
ref, _ := link["productionBranch"].(string)
if ref == "" {
ref = "main"
}
depResp, depStatus, _ := vercelRequest(r.Context(), client, http.MethodPost, "https://api.vercel.com/v13/deployments", params, headers, map[string]any{"name": projectID, "project": projectID, "target": "production", "gitSource": map[string]any{"type": "github", "repoId": repoID, "ref": ref}})
if depStatus == http.StatusOK || depStatus == http.StatusCreated {
deployURL, _ = depResp["url"].(string)
manual = false
}
}
}
return saved
}
func triggerVercelDeployment(ctx context.Context, client *http.Client, projectID string, params url.Values, headers map[string]string) (bool, string) {
projectResp, status, _ := vercelRequest(ctx, client, http.MethodGet, "https://api.vercel.com/v9/projects/"+projectID, params, headers, nil)
if status != http.StatusOK {
return true, ""
}
_ = h.Store.SetVercelSync(h.computeSyncHash(), time.Now().Unix())
result := map[string]any{"success": true, "validated_accounts": validated}
if manual {
result["message"] = "配置已同步到 Vercel请手动触发重新部署"
result["manual_deploy_required"] = true
} else {
result["message"] = "配置已同步,正在重新部署..."
result["deployment_url"] = deployURL
link, ok := projectResp["link"].(map[string]any)
if !ok {
return true, ""
}
if len(failed) > 0 {
result["failed_accounts"] = failed
linkType, _ := link["type"].(string)
if linkType != "github" {
return true, ""
}
if len(savedCreds) > 0 {
result["saved_credentials"] = savedCreds
repoID := intFrom(link["repoId"])
ref, _ := link["productionBranch"].(string)
if ref == "" {
ref = "main"
}
writeJSON(w, http.StatusOK, result)
depResp, depStatus, _ := vercelRequest(ctx, client, http.MethodPost, "https://api.vercel.com/v13/deployments", params, headers, map[string]any{
"name": projectID,
"project": projectID,
"target": "production",
"gitSource": map[string]any{
"type": "github",
"repoId": repoID,
"ref": ref,
},
})
if depStatus != http.StatusOK && depStatus != http.StatusCreated {
return true, ""
}
deployURL, _ := depResp["url"].(string)
return false, deployURL
}
func (h *Handler) vercelStatus(w http.ResponseWriter, _ *http.Request) {

View File

@@ -96,7 +96,7 @@ func accountMatchesIdentifier(acc config.Account, identifier string) bool {
return acc.Identifier() == id
}
func findAccountByIdentifier(store *config.Store, identifier string) (config.Account, bool) {
func findAccountByIdentifier(store ConfigStore, identifier string) (config.Account, bool) {
id := strings.TrimSpace(identifier)
if id == "" {
return config.Account{}, false

View File

@@ -0,0 +1,23 @@
package admin
import "errors"
type requestError struct {
detail string
}
func (e *requestError) Error() string {
return e.detail
}
func newRequestError(detail string) error {
return &requestError{detail: detail}
}
func requestErrorDetail(err error) (string, bool) {
var reqErr *requestError
if errors.As(err, &reqErr) {
return reqErr.detail, true
}
return "", false
}

View File

@@ -0,0 +1,64 @@
package admin
import (
"fmt"
"strings"
"ds2api/internal/config"
)
func normalizeSettingsConfig(c *config.Config) {
if c == nil {
return
}
c.Admin.PasswordHash = strings.TrimSpace(c.Admin.PasswordHash)
c.Toolcall.Mode = strings.ToLower(strings.TrimSpace(c.Toolcall.Mode))
c.Toolcall.EarlyEmitConfidence = strings.ToLower(strings.TrimSpace(c.Toolcall.EarlyEmitConfidence))
c.Embeddings.Provider = strings.TrimSpace(c.Embeddings.Provider)
}
func validateSettingsConfig(c config.Config) error {
if c.Admin.JWTExpireHours != 0 && (c.Admin.JWTExpireHours < 1 || c.Admin.JWTExpireHours > 720) {
return fmt.Errorf("admin.jwt_expire_hours must be between 1 and 720")
}
if err := validateRuntimeSettings(c.Runtime); err != nil {
return err
}
if c.Responses.StoreTTLSeconds != 0 && (c.Responses.StoreTTLSeconds < 30 || c.Responses.StoreTTLSeconds > 86400) {
return fmt.Errorf("responses.store_ttl_seconds must be between 30 and 86400")
}
if mode := strings.TrimSpace(c.Toolcall.Mode); mode != "" {
switch mode {
case "feature_match", "off":
default:
return fmt.Errorf("toolcall.mode must be feature_match or off")
}
}
if level := strings.TrimSpace(c.Toolcall.EarlyEmitConfidence); level != "" {
switch level {
case "high", "low", "off":
default:
return fmt.Errorf("toolcall.early_emit_confidence must be high, low or off")
}
}
if c.Embeddings.Provider != "" && strings.TrimSpace(c.Embeddings.Provider) == "" {
return fmt.Errorf("embeddings.provider cannot be empty")
}
return nil
}
func validateRuntimeSettings(runtime config.RuntimeConfig) error {
if runtime.AccountMaxInflight != 0 && (runtime.AccountMaxInflight < 1 || runtime.AccountMaxInflight > 256) {
return fmt.Errorf("runtime.account_max_inflight must be between 1 and 256")
}
if runtime.AccountMaxQueue != 0 && (runtime.AccountMaxQueue < 1 || runtime.AccountMaxQueue > 200000) {
return fmt.Errorf("runtime.account_max_queue must be between 1 and 200000")
}
if runtime.GlobalMaxInflight != 0 && (runtime.GlobalMaxInflight < 1 || runtime.GlobalMaxInflight > 200000) {
return fmt.Errorf("runtime.global_max_inflight must be between 1 and 200000")
}
if runtime.AccountMaxInflight > 0 && runtime.GlobalMaxInflight > 0 && runtime.GlobalMaxInflight < runtime.AccountMaxInflight {
return fmt.Errorf("runtime.global_max_inflight must be >= runtime.account_max_inflight")
}
return nil
}