fix(ena-planner): restore stable baseline and add stream preview in input

This commit is contained in:
2026-02-25 21:05:30 +08:00
parent 886ae7168d
commit fafdb18eaf
3 changed files with 131 additions and 233 deletions

View File

@@ -40,14 +40,16 @@ export const DEFAULT_PROMPT_BLOCKS = [
3. 推进而非重复:每次规划应让故事向前推进,避免原地踏步
4. 留有空间:给出方向但不要过度规定细节,让主 AI 有创作余地
5. 遵守世界观:世界书中的规则和设定是硬约束,不可违反
`,
如有思考过程,请放在 <thinking> 中(会被自动剔除)。`,
},
{
id: 'ena-default-assistant-001',
role: 'assistant',
name: 'Assistant Seed',
content: `<think>
让我分析当前情境,梳理玩家意图、已有伏笔和世界观约束,然后规划下一步走向,输出放在<plot>...</plot>和<note>...</note>两个块中...
让我分析当前情境,梳理玩家意图、已有伏笔和世界观约束,然后规划下一步走向...
规划结果输出在<plot>...</plot>和<note>...</note>两个块中
</think>`,
},
];

View File

@@ -99,25 +99,38 @@
<div class="form-group">
<label class="form-label">渠道类型</label>
<select id="ep_api_channel" class="input">
<option value="st_main">ST Main API (no setup)</option>
<option value="openai">OpenAI compatible</option>
<option value="gemini">Gemini compatible</option>
<option value="claude">Claude compatible</option>
<option value="openai">OpenAI 兼容</option>
<option value="gemini">Gemini 兼容</option>
<option value="claude">Claude 兼容</option>
</select>
</div>
<div class="form-group hidden" id="ep_model_row">
<label class="form-label">模型</label>
<input id="ep_model" type="text" class="input" placeholder="gpt-4o, claude-3-5-sonnet...">
<div class="form-group">
<label class="form-label">路径前缀</label>
<select id="ep_prefix_mode" class="input">
<option value="auto">自动 (如 /v1)</option>
<option value="custom">自定义</option>
</select>
</div>
</div>
<div class="form-row hidden" id="ep_api_url_key_row">
<div class="form-group">
<label class="form-label">API 地址</label>
<input id="ep_api_base" type="text" class="input" placeholder="https://api.openai.com">
</div>
<div class="form-group hidden" id="ep_custom_prefix_group">
<label class="form-label">自定义前缀</label>
<input id="ep_prefix_custom" type="text" class="input" placeholder="/v1">
</div>
<div class="form-row">
<div class="form-group">
<label class="form-label">API URL</label>
<input id="ep_api_base" type="text" class="input" placeholder="https://api.openai.com 或代理地址">
<label class="form-label">API Key</label>
<div class="input-row">
<input id="ep_api_key" type="password" class="input" placeholder="sk-...">
<button id="ep_toggle_key" class="btn">显示</button>
</div>
</div>
<div class="form-group">
<label class="form-label">API KEY</label>
<input id="ep_api_key" type="password" class="input" placeholder="仅本地保存,不上传">
<label class="form-label">模型</label>
<input id="ep_model" type="text" class="input" placeholder="gpt-4o, claude-3-5-sonnet...">
</div>
</div>
<div id="ep_model_selector" class="hidden" style="margin-top:12px;">
@@ -126,7 +139,7 @@
<option value="">-- 从列表选择 --</option>
</select>
</div>
<div class="btn-group" style="margin-top:16px;" id="ep_model_actions">
<div class="btn-group" style="margin-top:16px;">
<button id="ep_fetch_models" class="btn">拉取模型列表</button>
<button id="ep_test_conn" class="btn">测试连接</button>
</div>
@@ -425,18 +438,8 @@
if (viewId === 'debug') post('xb-ena:logs-request');
}
function updateApiChannelUI() {
const isMain = $('ep_api_channel').value === 'st_main';
$('ep_api_url_key_row').classList.toggle('hidden', isMain);
$('ep_model_row').classList.toggle('hidden', isMain);
$('ep_model_actions').classList.toggle('hidden', isMain);
if (isMain) $('ep_model_selector').classList.add('hidden');
const model = $('ep_model');
if (model && isMain) {
model.placeholder = 'Empty = follow ST main model';
} else if (model) {
model.placeholder = 'gpt-4o, claude-3-5-sonnet...';
}
function updatePrefixModeUI() {
$('ep_custom_prefix_group').classList.toggle('hidden', $('ep_prefix_mode').value !== 'custom');
}
/* ── Type conversion ── */
@@ -551,19 +554,17 @@
});
}
function renderTemplateSelect(selected) {
function renderTemplateSelect(selected = '') {
const sel = $('ep_tpl_select');
const prev = sel?.value || '';
const target = typeof selected === 'string' ? selected : prev;
sel.innerHTML = '<option value="">-- 选择模板 --</option>';
const names = Object.keys(cfg?.promptTemplates || {});
names.forEach(name => {
const opt = document.createElement('option');
opt.value = name;
opt.textContent = name;
opt.selected = name === selected;
sel.appendChild(opt);
});
sel.value = names.includes(target) ? target : '';
}
/* ── Undo ── */
@@ -649,9 +650,11 @@
$('ep_skip_plot').value = String(toBool(cfg.skipIfPlotPresent, true));
const api = cfg.api || {};
$('ep_api_channel').value = api.channel || 'st_main';
$('ep_api_base').value = api.url || '';
$('ep_api_key').value = api.key || '';
$('ep_api_channel').value = api.channel || 'openai';
$('ep_prefix_mode').value = api.prefixMode || 'auto';
$('ep_api_base').value = api.baseUrl || '';
$('ep_prefix_custom').value = api.customPrefix || '';
$('ep_api_key').value = api.apiKey || '';
$('ep_model').value = api.model || '';
$('ep_stream').value = String(toBool(api.stream, false));
$('ep_temp').value = String(toNum(api.temperature, 1));
@@ -671,7 +674,7 @@
$('ep_logs_max').value = String(toNum(cfg.logsMax, 20));
setBadge(toBool(cfg.enabled, true));
updateApiChannelUI();
updatePrefixModeUI();
renderTemplateSelect();
renderPromptList();
renderLogs();
@@ -685,8 +688,10 @@
p.api = {
channel: $('ep_api_channel').value,
url: $('ep_api_base').value.trim(),
key: $('ep_api_key').value.trim(),
prefixMode: $('ep_prefix_mode').value,
baseUrl: $('ep_api_base').value.trim(),
customPrefix: $('ep_prefix_custom').value.trim(),
apiKey: $('ep_api_key').value,
model: $('ep_model').value.trim(),
stream: toBool($('ep_stream').value, false),
temperature: toNum($('ep_temp').value, 1),
@@ -729,7 +734,17 @@
setLocalStatus('ep_test_status', '测试中…', 'loading');
});
$('ep_api_channel').addEventListener('change', () => { updateApiChannelUI(); scheduleSave(); });
$('ep_toggle_key').addEventListener('click', () => {
const input = $('ep_api_key');
const btn = $('ep_toggle_key');
if (input.type === 'password') {
input.type = 'text'; btn.textContent = '隐藏';
} else {
input.type = 'password'; btn.textContent = '显示';
}
});
$('ep_prefix_mode').addEventListener('change', updatePrefixModeUI);
$('ep_fetch_models').addEventListener('click', () => {
post('xb-ena:fetch-models');

View File

@@ -1,6 +1,5 @@
import { extension_settings } from '../../../../../extensions.js';
import { getRequestHeaders, saveSettingsDebounced, substituteParamsExtended } from '../../../../../../script.js';
import { chat_completion_sources, getChatCompletionModel, oai_settings } from '../../../../../openai.js';
import { getStorySummaryForEna } from '../story-summary/story-summary.js';
import { extensionFolderPath } from '../../core/constants.js';
import { EnaPlannerStorage } from '../../core/server-storage.js';
@@ -41,9 +40,11 @@ function getDefaultSettings() {
// Planner API
api: {
channel: 'st_main',
url: '',
key: '',
channel: 'openai',
baseUrl: '',
prefixMode: 'auto',
customPrefix: '',
apiKey: '',
model: '',
stream: false,
temperature: 1,
@@ -137,6 +138,10 @@ async function saveConfigNow() {
}
}
function toastInfo(msg) {
if (window.toastr?.info) return window.toastr.info(msg);
console.log('[EnaPlanner]', msg);
}
function toastErr(msg) {
if (window.toastr?.error) return window.toastr.error(msg);
console.error('[EnaPlanner]', msg);
@@ -163,11 +168,30 @@ function nowISO() {
return new Date().toISOString();
}
function normalizeProxyBaseUrl(url) {
let base = String(url || '').trim().replace(/\/+$/, '');
if (/\/v1$/i.test(base)) base = base.replace(/\/v1$/i, '');
if (/\/v1beta$/i.test(base)) base = base.replace(/\/v1beta$/i, '');
return base;
function normalizeUrlBase(u) {
if (!u) return '';
return u.replace(/\/+$/g, '');
}
function getDefaultPrefixByChannel(channel) {
if (channel === 'gemini') return '/v1beta';
return '/v1';
}
function buildApiPrefix() {
const s = ensureSettings();
if (s.api.prefixMode === 'custom' && s.api.customPrefix?.trim()) return s.api.customPrefix.trim();
return getDefaultPrefixByChannel(s.api.channel);
}
function buildUrl(path) {
const s = ensureSettings();
const base = normalizeUrlBase(s.api.baseUrl);
const prefix = buildApiPrefix();
const p = prefix.startsWith('/') ? prefix : `/${prefix}`;
const finalPrefix = p.replace(/\/+$/g, '');
const finalPath = path.startsWith('/') ? path : `/${path}`;
return `${base}${finalPrefix}${finalPath}`;
}
function setSendUIBusy(busy) {
@@ -177,49 +201,6 @@ function setSendUIBusy(busy) {
if (textarea) textarea.disabled = !!busy;
}
function ensurePlanningStatusEl() {
const ta = getSendTextarea();
if (!ta) return null;
let el = document.getElementById('xb-ena-planning-status');
if (el) return el;
el = document.createElement('div');
el.id = 'xb-ena-planning-status';
el.style.cssText = [
'margin-top:6px',
'font-size:12px',
'line-height:1.4',
'color:var(--SmartThemeBodyColor,#c9d1d9)',
'opacity:.82',
'display:none',
].join(';');
ta.insertAdjacentElement('afterend', el);
return el;
}
function setPlanningStatus(text, type = 'info') {
const el = ensurePlanningStatusEl();
if (!el) return;
el.textContent = text || '';
el.style.display = text ? 'block' : 'none';
if (!text) return;
if (type === 'error') {
el.style.color = '#f87171';
} else if (type === 'success') {
el.style.color = '#3ecf8e';
} else {
el.style.color = 'var(--SmartThemeBodyColor,#c9d1d9)';
}
}
function clearPlanningStatus(delay = 0) {
if (delay > 0) {
setTimeout(() => setPlanningStatus(''), delay);
return;
}
setPlanningStatus('');
}
function safeStringify(val) {
if (val == null) return '';
if (typeof val === 'string') return val;
@@ -915,6 +896,10 @@ function filterPlannerForInput(rawFull) {
return noThink;
}
function filterPlannerPreview(rawPartial) {
return stripThinkBlocks(rawPartial);
}
/**
* -------------------------
* Planner API calls
@@ -922,34 +907,16 @@ function filterPlannerForInput(rawFull) {
*/
async function callPlanner(messages, options = {}) {
const s = ensureSettings();
const channel = String(s.api?.channel || 'st_main').toLowerCase();
const source = {
st_main: String(oai_settings?.chat_completion_source || chat_completion_sources.OPENAI),
openai: chat_completion_sources.OPENAI,
claude: chat_completion_sources.CLAUDE,
gemini: chat_completion_sources.MAKERSUITE,
google: chat_completion_sources.MAKERSUITE,
}[channel];
if (!source) throw new Error(`Unsupported channel: ${channel}`);
if (!s.api.baseUrl) throw new Error('未配置 API URL');
if (!s.api.apiKey) throw new Error('未配置 API KEY');
if (!s.api.model) throw new Error('未选择模型');
const model = channel === 'st_main'
? String(getChatCompletionModel?.() || '').trim()
: String(s.api?.model || '').trim();
if (!model) throw new Error('No model selected in ST main panel or Ena settings');
const providerUrl = normalizeProxyBaseUrl(s.api?.url);
const providerKey = String(s.api?.key || '').trim();
if (channel !== 'st_main') {
if (!providerUrl) throw new Error('Please provide API URL');
if (!providerKey) throw new Error('Please provide API KEY');
}
const url = buildUrl('/chat/completions');
const body = {
type: 'quiet',
model,
model: s.api.model,
messages,
stream: !!s.api.stream,
chat_completion_source: source,
custom_prompt_post_processing: oai_settings?.custom_prompt_post_processing,
stream: !!s.api.stream
};
const t = Number(s.api.temperature);
@@ -965,48 +932,11 @@ async function callPlanner(messages, options = {}) {
const mt = s.api.max_tokens === '' ? null : Number(s.api.max_tokens);
if (mt != null && !Number.isNaN(mt) && mt > 0) body.max_tokens = mt;
if (source === chat_completion_sources.MAKERSUITE && body.max_tokens != null) {
body.max_output_tokens = body.max_tokens;
body.use_makersuite_sysprompt = false;
}
const reverseProxy = channel === 'st_main'
? String(oai_settings?.reverse_proxy || '').trim()
: providerUrl;
const proxyPassword = channel === 'st_main'
? String(oai_settings?.proxy_password || '').trim()
: providerKey;
if (reverseProxy && [
chat_completion_sources.CLAUDE,
chat_completion_sources.OPENAI,
chat_completion_sources.MISTRALAI,
chat_completion_sources.MAKERSUITE,
chat_completion_sources.VERTEXAI,
chat_completion_sources.DEEPSEEK,
chat_completion_sources.XAI,
chat_completion_sources.COHERE,
].includes(source)) {
body.reverse_proxy = reverseProxy;
if (proxyPassword) body.proxy_password = proxyPassword;
}
if (source === chat_completion_sources.CUSTOM) {
body.custom_url = channel === 'st_main' ? oai_settings?.custom_url : providerUrl;
body.custom_include_headers = oai_settings?.custom_include_headers;
if (proxyPassword) body.proxy_password = proxyPassword;
}
if (source === chat_completion_sources.AZURE_OPENAI) {
body.azure_base_url = oai_settings?.azure_base_url;
body.azure_deployment_name = oai_settings?.azure_deployment_name;
body.azure_api_version = oai_settings?.azure_api_version;
}
const res = await fetch('/api/backends/chat-completions/generate', {
const res = await fetch(url, {
method: 'POST',
headers: {
...getRequestHeaders(),
Authorization: `Bearer ${s.api.apiKey}`,
'Content-Type': 'application/json'
},
body: JSON.stringify(body)
@@ -1014,17 +944,17 @@ async function callPlanner(messages, options = {}) {
if (!res.ok) {
const text = await res.text().catch(() => '');
throw new Error(`Planner request failed: ${res.status} ${text}`.slice(0, 500));
throw new Error(`规划请求失败: ${res.status} ${text}`.slice(0, 500));
}
if (!s.api.stream) {
const data = await res.json();
if (data?.error) throw new Error(data.error?.message || 'Planner request failed');
const text = String(data?.choices?.[0]?.message?.content ?? data?.choices?.[0]?.text ?? '');
if (text) options?.onDelta?.(text, text);
return text;
}
// SSE stream
const reader = res.body.getReader();
const decoder = new TextDecoder('utf-8');
let buf = '';
@@ -1045,82 +975,38 @@ async function callPlanner(messages, options = {}) {
if (payload === '[DONE]') continue;
try {
const j = JSON.parse(payload);
if (j?.error) throw new Error(j.error?.message || 'Planner request failed');
const delta = j?.choices?.[0]?.delta;
const piece = delta?.content ?? delta?.text ?? '';
if (piece) {
full += piece;
options?.onDelta?.(piece, full);
}
} catch {
// ignore non-json chunks
}
} catch { }
}
}
}
return full;
}
async function fetchModelsForUi() {
const s = ensureSettings();
const channel = String(s.api?.channel || 'st_main').toLowerCase();
if (channel === 'st_main') {
const source = String(oai_settings?.chat_completion_source || chat_completion_sources.OPENAI);
const payload = {
chat_completion_source: source,
reverse_proxy: oai_settings?.reverse_proxy,
proxy_password: oai_settings?.proxy_password,
};
const res = await fetch('/api/backends/chat-completions/status', {
method: 'POST',
headers: {
...getRequestHeaders(),
'Content-Type': 'application/json'
},
body: JSON.stringify(payload),
cache: 'no-cache',
});
if (!res.ok) {
const text = await res.text().catch(() => '');
throw new Error(`Model list request failed: ${res.status} ${text}`.slice(0, 300));
if (!s.api.baseUrl) throw new Error('请先填写 API URL');
if (!s.api.apiKey) throw new Error('请先填写 API KEY');
const url = buildUrl('/models');
const res = await fetch(url, {
method: 'GET',
headers: {
...getRequestHeaders(),
Authorization: `Bearer ${s.api.apiKey}`
}
const data = await res.json();
const list = Array.isArray(data?.data) ? data.data : [];
return list.map(x => x?.id).filter(Boolean);
});
if (!res.ok) {
const text = await res.text().catch(() => '');
throw new Error(`拉取模型失败: ${res.status} ${text}`.slice(0, 300));
}
// Keep consistent with story-summary: direct URL probing for non-ST channels.
let baseUrl = String(s.api?.url || '').trim().replace(/\/+$/, '');
const apiKey = String(s.api?.key || '').trim();
if (!baseUrl) throw new Error('请先填写 API URL');
if (!apiKey) throw new Error('请先填写 API KEY');
const tryFetch = async (url) => {
try {
const res = await fetch(url, { headers: { Authorization: `Bearer ${apiKey}`, Accept: 'application/json' } });
if (!res.ok) return null;
const data = await res.json();
if (!data) return null;
if (Array.isArray(data?.data)) return data.data.map(x => x?.id).filter(Boolean);
if (Array.isArray(data?.models)) return data.models.map(x => x?.id || x?.name).filter(Boolean);
} catch { }
return null;
};
if (baseUrl.endsWith('/v1')) baseUrl = baseUrl.slice(0, -3);
if (baseUrl.endsWith('/v1beta')) baseUrl = baseUrl.slice(0, -7);
const candidates = channel === 'gemini' || channel === 'google'
? [`${baseUrl}/v1beta/models`, `${baseUrl}/v1/models`, `${baseUrl}/models`]
: [`${baseUrl}/v1/models`, `${baseUrl}/models`];
for (const url of candidates) {
const models = await tryFetch(url);
if (models?.length) return [...new Set(models)];
}
throw new Error('未获取到模型');
const data = await res.json();
const list = Array.isArray(data?.data) ? data.data : [];
return list.map(x => x?.id).filter(Boolean);
}
async function debugWorldbookForUi() {
@@ -1280,7 +1166,7 @@ async function buildPlannerMessages(rawUserInput) {
* Planning runner + logging
* --------------------------
*/
async function runPlanningOnce(rawUserInput, silent = false) {
async function runPlanningOnce(rawUserInput, silent = false, options = {}) {
const s = ensureSettings();
const log = {
@@ -1292,10 +1178,7 @@ async function runPlanningOnce(rawUserInput, silent = false) {
const { messages } = await buildPlannerMessages(rawUserInput);
log.requestMessages = messages;
const rawReply = await Promise.race([
callPlanner(messages),
new Promise((_, reject) => setTimeout(() => reject(new Error('规划超时,请重试')), 120000)),
]);
const rawReply = await callPlanner(messages, options);
log.rawReply = rawReply;
const filtered = filterPlannerForInput(rawReply);
@@ -1342,24 +1225,26 @@ async function doInterceptAndPlanThenSend() {
state.isPlanning = true;
setSendUIBusy(true);
setPlanningStatus('Planning...');
try {
const { filtered } = await runPlanningOnce(raw, false);
toastInfo('Ena Planner正在规划…');
const { filtered } = await runPlanningOnce(raw, false, {
onDelta(_piece, full) {
if (!state.isPlanning) return;
if (!ensureSettings().api.stream) return;
const preview = filterPlannerPreview(full);
ta.value = `${raw}\n\n${preview}`.trim();
}
});
const merged = `${raw}\n\n${filtered}`.trim();
ta.value = merged;
state.lastInjectedText = merged;
setPlanningStatus('Planning done', 'success');
state.bypassNextSend = true;
btn.click();
} catch (err) {
setPlanningStatus(String(err?.message || 'Planning failed'), 'error');
throw err;
} finally {
state.isPlanning = false;
setSendUIBusy(false);
clearPlanningStatus(2000);
setTimeout(() => { state.bypassNextSend = false; }, 800);
}
}
@@ -1370,11 +1255,6 @@ function installSendInterceptors() {
const btn = getSendButton();
if (!btn) return;
if (e.target !== btn && !btn.contains(e.target)) return;
if (state.isPlanning) {
e.preventDefault();
e.stopImmediatePropagation();
return;
}
if (!shouldInterceptNow()) return;
e.preventDefault();
e.stopImmediatePropagation();
@@ -1581,3 +1461,4 @@ export function cleanupEnaPlanner() {
}
delete window.xiaobaixEnaPlanner;
}