fix(story-summary): stabilize L0 extraction on upstream main

This commit is contained in:
2026-04-02 17:00:00 +08:00
parent 69864d97b5
commit 6601471aac
3 changed files with 31 additions and 25 deletions

View File

@@ -17,8 +17,9 @@ const MODULE_ID = 'atom-extraction';
const CONCURRENCY = 10;
const RETRY_COUNT = 2;
const RETRY_DELAY = 500;
const DEFAULT_TIMEOUT = 20000;
const DEFAULT_TIMEOUT = 40000;
const STAGGER_DELAY = 80;
const DEBUG_RAW_PREVIEW_LEN = 800;
let batchCancelled = false;
@@ -81,14 +82,18 @@ const SYSTEM_PROMPT = `你是场景摘要器。从一轮对话中提取1-2个场
输出:
{"anchors":[{"scene":"火山口上艾拉举起圣剑刺穿古龙的心脏,龙血溅满铠甲,古龙轰然倒地,艾拉跪倒在滚烫的岩石上痛哭,完成了她不得不做的弑杀","edges":[{"s":"艾拉","t":"古龙","r":"以圣剑刺穿心脏"}],"where":"火山口"}]}`;
const JSON_PREFILL = '{"anchors":[';
// ============================================================================
// 睡眠工具
// ============================================================================
const sleep = (ms) => new Promise(r => setTimeout(r, ms));
function previewText(text, maxLen = DEBUG_RAW_PREVIEW_LEN) {
const raw = String(text ?? '').replace(/\s+/g, ' ').trim();
if (!raw) return '(empty)';
return raw.length > maxLen ? `${raw.slice(0, maxLen)} ...(truncated)` : raw;
}
const ACTION_STRIP_WORDS = [
'突然', '非常', '有些', '有点', '轻轻', '悄悄', '缓缓', '立刻',
'马上', '然后', '并且', '而且', '开始', '继续', '再次', '正在',
@@ -206,7 +211,7 @@ async function extractAtomsForRoundWithRetry(userMessage, aiMessage, aiFloor, op
const aiText = filterText(aiMessage.mes);
parts.push(`<assistant>\n${aiText}\n</assistant>`);
const input = `<round>\n${parts.join('\n')}\n</round>`;
const input = `<round>\n${parts.join('\n')}\n</round>\n请读取上述 <round> 内容,提取 1-2 个场景锚点,并严格按 JSON 输出。\n不要解释,不要续写,不要角色扮演,不要输出 JSON 以外的任何内容。`;
for (let attempt = 0; attempt <= RETRY_COUNT; attempt++) {
if (batchCancelled) return [];
@@ -215,7 +220,6 @@ async function extractAtomsForRoundWithRetry(userMessage, aiMessage, aiFloor, op
const response = await callLLM([
{ role: 'system', content: SYSTEM_PROMPT },
{ role: 'user', content: input },
{ role: 'assistant', content: JSON_PREFILL },
], {
temperature: 0.3,
max_tokens: 600,
@@ -223,6 +227,7 @@ async function extractAtomsForRoundWithRetry(userMessage, aiMessage, aiFloor, op
});
const rawText = String(response || '');
xbLog.info(MODULE_ID, `floor ${aiFloor} attempt ${attempt} rawText(len=${rawText.length}): ${previewText(rawText)}`);
if (!rawText.trim()) {
if (attempt < RETRY_COUNT) {
await sleep(RETRY_DELAY);
@@ -231,11 +236,11 @@ async function extractAtomsForRoundWithRetry(userMessage, aiMessage, aiFloor, op
return null;
}
const fullJson = JSON_PREFILL + rawText;
xbLog.info(MODULE_ID, `floor ${aiFloor} attempt ${attempt} parseSource(len=${rawText.length}): ${previewText(rawText)}`);
let parsed;
try {
parsed = parseJson(fullJson);
parsed = parseJson(rawText);
} catch (e) {
xbLog.warn(MODULE_ID, `floor ${aiFloor} JSON解析失败 (attempt ${attempt})`);
if (attempt < RETRY_COUNT) {
@@ -248,6 +253,7 @@ async function extractAtomsForRoundWithRetry(userMessage, aiMessage, aiFloor, op
// 兼容:优先 anchors回退 atoms
const rawAnchors = parsed?.anchors;
if (!rawAnchors || !Array.isArray(rawAnchors)) {
xbLog.warn(MODULE_ID, `floor ${aiFloor} attempt ${attempt} 缺少有效 anchorsparsed=${previewText(JSON.stringify(parsed))}`);
if (attempt < RETRY_COUNT) {
await sleep(RETRY_DELAY);
continue;
@@ -261,6 +267,12 @@ async function extractAtomsForRoundWithRetry(userMessage, aiMessage, aiFloor, op
.map((a, idx) => anchorToAtom(a, aiFloor, idx))
.filter(Boolean);
xbLog.info(MODULE_ID, `floor ${aiFloor} attempt ${attempt} anchors=${rawAnchors.length} atoms=${atoms.length}`);
if (rawAnchors.length === 0) {
return [];
}
return atoms;
} catch (e) {
@@ -373,4 +385,3 @@ export async function batchExtractAtoms(chat, onProgress) {
return allAtoms;
}

View File

@@ -29,12 +29,13 @@ function b64UrlEncode(str) {
/**
* 统一LLM调用 - 走酒馆后端(非流式)
* assistant prefill 用 bottomassistant 参数传递
* 临时改为标准 messages 调用,避免 bottomassistant prefill 兼容性问题。
*/
export async function callLLM(messages, options = {}) {
const {
temperature = 0.2,
max_tokens = 500,
timeout = 40000,
} = options;
const mod = getStreamingModule();
@@ -45,14 +46,7 @@ export async function callLLM(messages, options = {}) {
throw new Error('L0 requires siliconflow API key');
}
// 分离 assistant prefill
let topMessages = [...messages];
let assistantPrefill = '';
if (topMessages.length > 0 && topMessages[topMessages.length - 1]?.role === 'assistant') {
const lastMsg = topMessages.pop();
assistantPrefill = lastMsg.content || '';
}
const topMessages = [...messages].filter(msg => msg?.role !== 'assistant');
const top64 = b64UrlEncode(JSON.stringify(topMessages));
const uniqueId = generateUniqueId('l0');
@@ -74,13 +68,14 @@ export async function callLLM(messages, options = {}) {
args.enable_thinking = 'false';
}
// ★ 用 bottomassistant 参数传递 prefill
if (assistantPrefill) {
args.bottomassistant = assistantPrefill;
}
try {
const result = await mod.xbgenrawCommand(args, '');
const timeoutPromise = new Promise((_, reject) => {
setTimeout(() => reject(new Error(`L0 request timeout after ${timeout}ms`)), timeout);
});
const result = await Promise.race([
mod.xbgenrawCommand(args, ''),
timeoutPromise,
]);
return String(result ?? '');
} catch (e) {
xbLog.error(MODULE_ID, 'LLM调用失败', e);