增加不同上下文模式

This commit is contained in:
CJACK
2026-04-26 14:21:15 +08:00
parent c09a4b51a5
commit 22e951b4c4
23 changed files with 129 additions and 14 deletions

View File

@@ -51,7 +51,7 @@ func (c Config) MarshalJSON() ([]byte, error) {
if c.CurrentInputFile.Enabled != nil || c.CurrentInputFile.MinChars != 0 {
m["current_input_file"] = c.CurrentInputFile
}
if c.ThinkingInjection.Enabled != nil {
if c.ThinkingInjection.Enabled != nil || strings.TrimSpace(c.ThinkingInjection.Prompt) != "" {
m["thinking_injection"] = c.ThinkingInjection
}
if c.VercelSyncHash != "" {
@@ -177,6 +177,7 @@ func (c Config) Clone() Config {
},
ThinkingInjection: ThinkingInjectionConfig{
Enabled: cloneBoolPtr(c.ThinkingInjection.Enabled),
Prompt: c.ThinkingInjection.Prompt,
},
VercelSyncHash: c.VercelSyncHash,
VercelSyncTime: c.VercelSyncTime,

View File

@@ -184,5 +184,6 @@ type CurrentInputFileConfig struct {
}
type ThinkingInjectionConfig struct {
Enabled *bool `json:"enabled,omitempty"`
Enabled *bool `json:"enabled,omitempty"`
Prompt string `json:"prompt,omitempty"`
}

View File

@@ -208,3 +208,9 @@ func (s *Store) ThinkingInjectionEnabled() bool {
}
return *s.cfg.ThinkingInjection.Enabled
}
func (s *Store) ThinkingInjectionPrompt() string {
s.mu.RLock()
defer s.mu.RUnlock()
return strings.TrimSpace(s.cfg.ThinkingInjection.Prompt)
}

View File

@@ -83,4 +83,9 @@ func TestStoreThinkingInjectionAccessors(t *testing.T) {
if store.ThinkingInjectionEnabled() {
t.Fatal("expected thinking injection disabled by explicit config")
}
store.cfg.ThinkingInjection.Prompt = " custom thinking prompt "
if got := store.ThinkingInjectionPrompt(); got != "custom thinking prompt" {
t.Fatalf("thinking injection prompt=%q want custom thinking prompt", got)
}
}

View File

@@ -75,6 +75,12 @@ func TestGetSettingsIncludesHistorySplitDefaults(t *testing.T) {
if got := boolFrom(thinkingInjection["enabled"]); !got {
t.Fatalf("expected thinking_injection.enabled=true, body=%v", body)
}
if got, _ := thinkingInjection["prompt"].(string); got != "" {
t.Fatalf("expected empty custom thinking prompt, got %q body=%v", got, body)
}
if got, _ := thinkingInjection["default_prompt"].(string); got == "" {
t.Fatalf("expected default thinking prompt, body=%v", body)
}
}
func TestUpdateSettingsValidation(t *testing.T) {
@@ -264,6 +270,7 @@ func TestUpdateSettingsThinkingInjection(t *testing.T) {
payload := map[string]any{
"thinking_injection": map[string]any{
"enabled": false,
"prompt": " custom thinking prompt ",
},
}
b, _ := json.Marshal(payload)
@@ -280,6 +287,9 @@ func TestUpdateSettingsThinkingInjection(t *testing.T) {
if h.Store.ThinkingInjectionEnabled() {
t.Fatal("expected thinking injection accessor to reflect disabled config")
}
if got := h.Store.ThinkingInjectionPrompt(); got != "custom thinking prompt" {
t.Fatalf("expected custom thinking prompt, got %q", got)
}
}
func TestUpdateSettingsAutoDeleteMode(t *testing.T) {

View File

@@ -199,6 +199,9 @@ func parseSettingsUpdateRequest(req map[string]any) (*config.AdminConfig, *confi
b := boolFrom(v)
cfg.Enabled = &b
}
if v, exists := raw["prompt"]; exists {
cfg.Prompt = strings.TrimSpace(fmt.Sprintf("%v", v))
}
thinkingInjCfg = cfg
}

View File

@@ -6,6 +6,7 @@ import (
authn "ds2api/internal/auth"
"ds2api/internal/config"
"ds2api/internal/promptcompat"
)
func (h *Handler) getSettings(w http.ResponseWriter, _ *http.Request) {
@@ -39,7 +40,9 @@ func (h *Handler) getSettings(w http.ResponseWriter, _ *http.Request) {
"min_chars": h.Store.CurrentInputFileMinChars(),
},
"thinking_injection": map[string]any{
"enabled": h.Store.ThinkingInjectionEnabled(),
"enabled": h.Store.ThinkingInjectionEnabled(),
"prompt": h.Store.ThinkingInjectionPrompt(),
"default_prompt": promptcompat.DefaultThinkingInjectionPrompt,
},
"model_aliases": snap.ModelAliases,
"env_backed": h.Store.IsEnvBacked(),

View File

@@ -89,6 +89,7 @@ func (h *Handler) updateSettings(w http.ResponseWriter, r *http.Request) {
}
if thinkingInjCfg != nil {
c.ThinkingInjection.Enabled = thinkingInjCfg.Enabled
c.ThinkingInjection.Prompt = thinkingInjCfg.Prompt
}
if aliasMap != nil {
c.ModelAliases = aliasMap

View File

@@ -38,6 +38,7 @@ type ConfigStore interface {
CurrentInputFileEnabled() bool
CurrentInputFileMinChars() int
ThinkingInjectionEnabled() bool
ThinkingInjectionPrompt() string
CompatStripReferenceMarkers() bool
AutoDeleteSessions() bool
}

View File

@@ -23,6 +23,7 @@ type mockOpenAIConfig struct {
currentInputEnabled bool
currentInputMin int
thinkingInjection *bool
thinkingPrompt string
}
func (m mockOpenAIConfig) ModelAliases() map[string]string { return m.aliases }
@@ -58,6 +59,7 @@ func (m mockOpenAIConfig) ThinkingInjectionEnabled() bool {
}
return *m.thinkingInjection
}
func (m mockOpenAIConfig) ThinkingInjectionPrompt() string { return m.thinkingPrompt }
type streamStatusAuthStub struct{}

View File

@@ -19,6 +19,7 @@ type mockOpenAIConfig struct {
currentInputEnabled bool
currentInputMin int
thinkingInjection *bool
thinkingPrompt string
}
func (m mockOpenAIConfig) ModelAliases() map[string]string { return m.aliases }
@@ -54,6 +55,7 @@ func (m mockOpenAIConfig) ThinkingInjectionEnabled() bool {
}
return *m.thinkingInjection
}
func (m mockOpenAIConfig) ThinkingInjectionPrompt() string { return m.thinkingPrompt }
func TestNormalizeOpenAIChatRequestWithConfigInterface(t *testing.T) {
cfg := mockOpenAIConfig{

View File

@@ -183,6 +183,36 @@ func TestApplyThinkingInjectionAppendsLatestUserPrompt(t *testing.T) {
}
}
func TestApplyThinkingInjectionUsesCustomPrompt(t *testing.T) {
ds := &inlineUploadDSStub{}
h := &openAITestSurface{
Store: mockOpenAIConfig{
wideInput: true,
thinkingInjection: boolPtr(true),
thinkingPrompt: "custom thinking format",
},
DS: ds,
}
req := map[string]any{
"model": "deepseek-v4-flash",
"messages": []any{
map[string]any{"role": "user", "content": "hello"},
},
}
stdReq, err := promptcompat.NormalizeOpenAIChatRequest(h.Store, req, "")
if err != nil {
t.Fatalf("normalize failed: %v", err)
}
out, err := h.applyHistorySplit(context.Background(), &auth.RequestAuth{DeepSeekToken: "token"}, stdReq)
if err != nil {
t.Fatalf("apply thinking injection failed: %v", err)
}
if !strings.Contains(out.FinalPrompt, "hello\n\ncustom thinking format") {
t.Fatalf("expected custom thinking injection after latest user message, got %s", out.FinalPrompt)
}
}
func TestApplyHistorySplitDirectPassThroughWhenBothSplitsDisabled(t *testing.T) {
ds := &inlineUploadDSStub{}
h := &openAITestSurface{

View File

@@ -48,6 +48,7 @@ type ConfigReader interface {
CurrentInputFileEnabled() bool
CurrentInputFileMinChars() int
ThinkingInjectionEnabled() bool
ThinkingInjectionPrompt() string
}
type Deps struct {

View File

@@ -6,7 +6,7 @@ func ApplyThinkingInjection(store ConfigReader, stdReq promptcompat.StandardRequ
if store == nil || !store.ThinkingInjectionEnabled() || !stdReq.Thinking {
return stdReq
}
messages, changed := promptcompat.AppendThinkingInjectionToLatestUser(stdReq.Messages)
messages, changed := promptcompat.AppendThinkingInjectionPromptToLatestUser(stdReq.Messages, store.ThinkingInjectionPrompt())
if !changed {
return stdReq
}

View File

@@ -12,9 +12,17 @@ const (
)
func AppendThinkingInjectionToLatestUser(messages []any) ([]any, bool) {
return AppendThinkingInjectionPromptToLatestUser(messages, "")
}
func AppendThinkingInjectionPromptToLatestUser(messages []any, injectionPrompt string) ([]any, bool) {
if len(messages) == 0 {
return messages, false
}
injectionPrompt = strings.TrimSpace(injectionPrompt)
if injectionPrompt == "" {
injectionPrompt = DefaultThinkingInjectionPrompt
}
for i := len(messages) - 1; i >= 0; i-- {
msg, ok := messages[i].(map[string]any)
if !ok {
@@ -24,10 +32,11 @@ func AppendThinkingInjectionToLatestUser(messages []any) ([]any, bool) {
continue
}
content := msg["content"]
if strings.Contains(NormalizeOpenAIContentForPrompt(content), ThinkingInjectionMarker) {
normalizedContent := NormalizeOpenAIContentForPrompt(content)
if strings.Contains(normalizedContent, ThinkingInjectionMarker) || strings.Contains(normalizedContent, injectionPrompt) {
return messages, false
}
updatedContent := appendThinkingInjectionToContent(content)
updatedContent := appendThinkingInjectionToContent(content, injectionPrompt)
out := append([]any(nil), messages...)
cloned := make(map[string]any, len(msg))
for k, v := range msg {
@@ -40,20 +49,20 @@ func AppendThinkingInjectionToLatestUser(messages []any) ([]any, bool) {
return messages, false
}
func appendThinkingInjectionToContent(content any) any {
func appendThinkingInjectionToContent(content any, injectionPrompt string) any {
switch x := content.(type) {
case string:
return appendTextBlock(x, DefaultThinkingInjectionPrompt)
return appendTextBlock(x, injectionPrompt)
case []any:
out := append([]any(nil), x...)
out = append(out, map[string]any{
"type": "text",
"text": DefaultThinkingInjectionPrompt,
"text": injectionPrompt,
})
return out
default:
text := NormalizeOpenAIContentForPrompt(content)
return appendTextBlock(text, DefaultThinkingInjectionPrompt)
return appendTextBlock(text, injectionPrompt)
}
}

View File

@@ -51,6 +51,21 @@ func TestAppendThinkingInjectionToLatestUserArrayContent(t *testing.T) {
}
}
func TestAppendThinkingInjectionToLatestUserCustomPrompt(t *testing.T) {
messages := []any{
map[string]any{"role": "user", "content": "latest"},
}
out, changed := AppendThinkingInjectionPromptToLatestUser(messages, "custom thinking format")
if !changed {
t.Fatal("expected custom thinking injection to be appended")
}
content, _ := out[0].(map[string]any)["content"].(string)
if !strings.Contains(content, "latest\n\ncustom thinking format") {
t.Fatalf("expected custom injection after latest user text, got %q", content)
}
}
func TestAppendThinkingInjectionToLatestUserSkipsDuplicate(t *testing.T) {
messages := []any{
map[string]any{"role": "user", "content": "latest\n\n" + DefaultThinkingInjectionPrompt},