feat: enhance content filtering, token usage tracking, and stream error handling in chat-stream modules

This commit is contained in:
CJACK
2026-04-05 13:41:38 +08:00
parent 47dc121690
commit eff979d9ef
13 changed files with 439 additions and 33 deletions

View File

@@ -17,6 +17,8 @@ const {
normalizePreparedToolNames,
boolDefaultTrue,
filterIncrementalToolCallDeltasByAllowed,
buildUsage,
estimateTokens,
shouldSkipPath,
isNodeStreamSupportedPath,
extractPathname,
@@ -245,6 +247,84 @@ test('parseChunkForContent strips reference markers from fragment content', () =
assert.deepEqual(parsed.parts, [{ text: '广州天气 多云', type: 'text' }]);
});
test('parseChunkForContent detects content_filter status and carries output tokens', () => {
const chunk = {
p: 'response',
v: [
{ p: 'status', v: 'CONTENT_FILTER' },
{ p: 'accumulated_token_usage', v: 77 },
],
};
const parsed = parseChunkForContent(chunk, false, 'text');
assert.equal(parsed.parsed, true);
assert.equal(parsed.finished, true);
assert.equal(parsed.contentFilter, true);
assert.equal(parsed.outputTokens, 77);
assert.deepEqual(parsed.parts, []);
});
test('parseChunkForContent keeps error branches distinct from content_filter status', () => {
const chunk = {
error: { message: 'boom' },
code: 'content_filter',
accumulated_token_usage: 88,
};
const parsed = parseChunkForContent(chunk, false, 'text');
assert.equal(parsed.parsed, true);
assert.equal(parsed.finished, true);
assert.equal(parsed.contentFilter, false);
assert.equal(parsed.errorMessage.length > 0, true);
assert.equal(parsed.outputTokens, 0);
assert.deepEqual(parsed.parts, []);
});
test('parseChunkForContent preserves output tokens on FINISHED lines', () => {
const parsed = parseChunkForContent(
{ p: 'response/status', v: 'FINISHED', accumulated_token_usage: 190 },
false,
'text',
);
assert.equal(parsed.parsed, true);
assert.equal(parsed.finished, true);
assert.equal(parsed.contentFilter, false);
assert.equal(parsed.outputTokens, 190);
assert.deepEqual(parsed.parts, []);
});
test('parseChunkForContent strips leaked CONTENT_FILTER suffix and preserves line breaks', () => {
const leaked = parseChunkForContent(
{ p: 'response/content', v: '正常输出CONTENT_FILTER你好这个问题我暂时无法回答' },
false,
'text',
);
assert.deepEqual(leaked.parts, [{ text: '正常输出', type: 'text' }]);
const newlineTail = parseChunkForContent(
{ p: 'response/content', v: 'line1\nCONTENT_FILTERblocked' },
false,
'text',
);
assert.deepEqual(newlineTail.parts, [{ text: 'line1\n', type: 'text' }]);
const newlineOnly = parseChunkForContent(
{ p: 'response/content', v: '\nCONTENT_FILTERblocked' },
false,
'text',
);
assert.deepEqual(newlineOnly.parts, [{ text: '\n', type: 'text' }]);
});
test('estimateTokens preserves whitespace-only strings and buildUsage accepts output token overrides', () => {
assert.equal(estimateTokens(' '), 1);
assert.equal(estimateTokens('\n'), 1);
const usage = buildUsage('abcd', 'ef', 'gh', 99);
assert.equal(usage.prompt_tokens, 1);
assert.equal(usage.completion_tokens, 99);
assert.equal(usage.total_tokens, 100);
assert.equal(usage.completion_tokens_details.reasoning_tokens, 1);
});
test('shouldSkipPath skips dynamic response/fragments/*/status paths only', () => {
assert.equal(shouldSkipPath('response/fragments/-16/status'), true);
assert.equal(shouldSkipPath('response/fragments/8/status'), true);

View File

@@ -30,6 +30,9 @@ test('js compat: sse fixtures', () => {
assert.deepEqual(got.parts, expected.parts, `${name}: parts mismatch`);
assert.equal(got.finished, expected.finished, `${name}: finished mismatch`);
assert.equal(got.newType, expected.new_type, `${name}: newType mismatch`);
assert.equal(Boolean(got.contentFilter), Boolean(expected.content_filter), `${name}: contentFilter mismatch`);
assert.equal(Number(got.outputTokens || 0), Number(expected.output_tokens || 0), `${name}: outputTokens mismatch`);
assert.equal(got.errorMessage || '', expected.error_message || '', `${name}: errorMessage mismatch`);
}
});