增加不同上下文模式

This commit is contained in:
CJACK
2026-04-26 14:21:15 +08:00
parent c09a4b51a5
commit 22e951b4c4
23 changed files with 129 additions and 14 deletions

View File

@@ -75,6 +75,12 @@ func TestGetSettingsIncludesHistorySplitDefaults(t *testing.T) {
if got := boolFrom(thinkingInjection["enabled"]); !got {
t.Fatalf("expected thinking_injection.enabled=true, body=%v", body)
}
if got, _ := thinkingInjection["prompt"].(string); got != "" {
t.Fatalf("expected empty custom thinking prompt, got %q body=%v", got, body)
}
if got, _ := thinkingInjection["default_prompt"].(string); got == "" {
t.Fatalf("expected default thinking prompt, body=%v", body)
}
}
func TestUpdateSettingsValidation(t *testing.T) {
@@ -264,6 +270,7 @@ func TestUpdateSettingsThinkingInjection(t *testing.T) {
payload := map[string]any{
"thinking_injection": map[string]any{
"enabled": false,
"prompt": " custom thinking prompt ",
},
}
b, _ := json.Marshal(payload)
@@ -280,6 +287,9 @@ func TestUpdateSettingsThinkingInjection(t *testing.T) {
if h.Store.ThinkingInjectionEnabled() {
t.Fatal("expected thinking injection accessor to reflect disabled config")
}
if got := h.Store.ThinkingInjectionPrompt(); got != "custom thinking prompt" {
t.Fatalf("expected custom thinking prompt, got %q", got)
}
}
func TestUpdateSettingsAutoDeleteMode(t *testing.T) {

View File

@@ -199,6 +199,9 @@ func parseSettingsUpdateRequest(req map[string]any) (*config.AdminConfig, *confi
b := boolFrom(v)
cfg.Enabled = &b
}
if v, exists := raw["prompt"]; exists {
cfg.Prompt = strings.TrimSpace(fmt.Sprintf("%v", v))
}
thinkingInjCfg = cfg
}

View File

@@ -6,6 +6,7 @@ import (
authn "ds2api/internal/auth"
"ds2api/internal/config"
"ds2api/internal/promptcompat"
)
func (h *Handler) getSettings(w http.ResponseWriter, _ *http.Request) {
@@ -39,7 +40,9 @@ func (h *Handler) getSettings(w http.ResponseWriter, _ *http.Request) {
"min_chars": h.Store.CurrentInputFileMinChars(),
},
"thinking_injection": map[string]any{
"enabled": h.Store.ThinkingInjectionEnabled(),
"enabled": h.Store.ThinkingInjectionEnabled(),
"prompt": h.Store.ThinkingInjectionPrompt(),
"default_prompt": promptcompat.DefaultThinkingInjectionPrompt,
},
"model_aliases": snap.ModelAliases,
"env_backed": h.Store.IsEnvBacked(),

View File

@@ -89,6 +89,7 @@ func (h *Handler) updateSettings(w http.ResponseWriter, r *http.Request) {
}
if thinkingInjCfg != nil {
c.ThinkingInjection.Enabled = thinkingInjCfg.Enabled
c.ThinkingInjection.Prompt = thinkingInjCfg.Prompt
}
if aliasMap != nil {
c.ModelAliases = aliasMap

View File

@@ -38,6 +38,7 @@ type ConfigStore interface {
CurrentInputFileEnabled() bool
CurrentInputFileMinChars() int
ThinkingInjectionEnabled() bool
ThinkingInjectionPrompt() string
CompatStripReferenceMarkers() bool
AutoDeleteSessions() bool
}

View File

@@ -23,6 +23,7 @@ type mockOpenAIConfig struct {
currentInputEnabled bool
currentInputMin int
thinkingInjection *bool
thinkingPrompt string
}
func (m mockOpenAIConfig) ModelAliases() map[string]string { return m.aliases }
@@ -58,6 +59,7 @@ func (m mockOpenAIConfig) ThinkingInjectionEnabled() bool {
}
return *m.thinkingInjection
}
func (m mockOpenAIConfig) ThinkingInjectionPrompt() string { return m.thinkingPrompt }
type streamStatusAuthStub struct{}

View File

@@ -19,6 +19,7 @@ type mockOpenAIConfig struct {
currentInputEnabled bool
currentInputMin int
thinkingInjection *bool
thinkingPrompt string
}
func (m mockOpenAIConfig) ModelAliases() map[string]string { return m.aliases }
@@ -54,6 +55,7 @@ func (m mockOpenAIConfig) ThinkingInjectionEnabled() bool {
}
return *m.thinkingInjection
}
func (m mockOpenAIConfig) ThinkingInjectionPrompt() string { return m.thinkingPrompt }
func TestNormalizeOpenAIChatRequestWithConfigInterface(t *testing.T) {
cfg := mockOpenAIConfig{

View File

@@ -183,6 +183,36 @@ func TestApplyThinkingInjectionAppendsLatestUserPrompt(t *testing.T) {
}
}
func TestApplyThinkingInjectionUsesCustomPrompt(t *testing.T) {
ds := &inlineUploadDSStub{}
h := &openAITestSurface{
Store: mockOpenAIConfig{
wideInput: true,
thinkingInjection: boolPtr(true),
thinkingPrompt: "custom thinking format",
},
DS: ds,
}
req := map[string]any{
"model": "deepseek-v4-flash",
"messages": []any{
map[string]any{"role": "user", "content": "hello"},
},
}
stdReq, err := promptcompat.NormalizeOpenAIChatRequest(h.Store, req, "")
if err != nil {
t.Fatalf("normalize failed: %v", err)
}
out, err := h.applyHistorySplit(context.Background(), &auth.RequestAuth{DeepSeekToken: "token"}, stdReq)
if err != nil {
t.Fatalf("apply thinking injection failed: %v", err)
}
if !strings.Contains(out.FinalPrompt, "hello\n\ncustom thinking format") {
t.Fatalf("expected custom thinking injection after latest user message, got %s", out.FinalPrompt)
}
}
func TestApplyHistorySplitDirectPassThroughWhenBothSplitsDisabled(t *testing.T) {
ds := &inlineUploadDSStub{}
h := &openAITestSurface{

View File

@@ -48,6 +48,7 @@ type ConfigReader interface {
CurrentInputFileEnabled() bool
CurrentInputFileMinChars() int
ThinkingInjectionEnabled() bool
ThinkingInjectionPrompt() string
}
type Deps struct {

View File

@@ -6,7 +6,7 @@ func ApplyThinkingInjection(store ConfigReader, stdReq promptcompat.StandardRequ
if store == nil || !store.ThinkingInjectionEnabled() || !stdReq.Thinking {
return stdReq
}
messages, changed := promptcompat.AppendThinkingInjectionToLatestUser(stdReq.Messages)
messages, changed := promptcompat.AppendThinkingInjectionPromptToLatestUser(stdReq.Messages, store.ThinkingInjectionPrompt())
if !changed {
return stdReq
}