refactor(ena-planner): align API settings with story-summary and backend proxy flow

This commit is contained in:
2026-02-25 14:50:01 +08:00
parent 5272799b15
commit 841d5f0e1f
3 changed files with 177 additions and 109 deletions

View File

@@ -1,5 +1,6 @@
import { extension_settings } from '../../../../../extensions.js';
import { getRequestHeaders, saveSettingsDebounced, substituteParamsExtended } from '../../../../../../script.js';
import { chat_completion_sources, getChatCompletionModel, oai_settings } from '../../../../../openai.js';
import { getStorySummaryForEna } from '../story-summary/story-summary.js';
import { extensionFolderPath } from '../../core/constants.js';
import { EnaPlannerStorage } from '../../core/server-storage.js';
@@ -40,11 +41,9 @@ function getDefaultSettings() {
// Planner API
api: {
channel: 'openai',
baseUrl: '',
prefixMode: 'auto',
customPrefix: '',
apiKey: '',
channel: 'st_main',
url: '',
key: '',
model: '',
stream: false,
temperature: 1,
@@ -168,30 +167,11 @@ function nowISO() {
return new Date().toISOString();
}
function normalizeUrlBase(u) {
if (!u) return '';
return u.replace(/\/+$/g, '');
}
function getDefaultPrefixByChannel(channel) {
if (channel === 'gemini') return '/v1beta';
return '/v1';
}
function buildApiPrefix() {
const s = ensureSettings();
if (s.api.prefixMode === 'custom' && s.api.customPrefix?.trim()) return s.api.customPrefix.trim();
return getDefaultPrefixByChannel(s.api.channel);
}
function buildUrl(path) {
const s = ensureSettings();
const base = normalizeUrlBase(s.api.baseUrl);
const prefix = buildApiPrefix();
const p = prefix.startsWith('/') ? prefix : `/${prefix}`;
const finalPrefix = p.replace(/\/+$/g, '');
const finalPath = path.startsWith('/') ? path : `/${path}`;
return `${base}${finalPrefix}${finalPath}`;
function normalizeProxyBaseUrl(url) {
let base = String(url || '').trim().replace(/\/+$/, '');
if (/\/v1$/i.test(base)) base = base.replace(/\/v1$/i, '');
if (/\/v1beta$/i.test(base)) base = base.replace(/\/v1beta$/i, '');
return base;
}
function setSendUIBusy(busy) {
@@ -901,18 +881,39 @@ function filterPlannerForInput(rawFull) {
* Planner API calls
* --------------------------
*/
async function callPlanner(messages) {
async function callPlanner(messages, options = {}) {
const s = ensureSettings();
if (!s.api.baseUrl) throw new Error('未配置 API URL');
if (!s.api.apiKey) throw new Error('未配置 API KEY');
if (!s.api.model) throw new Error('未选择模型');
const channel = String(s.api?.channel || 'st_main').toLowerCase();
const source = {
st_main: String(oai_settings?.chat_completion_source || chat_completion_sources.OPENAI),
openai: chat_completion_sources.OPENAI,
claude: chat_completion_sources.CLAUDE,
gemini: chat_completion_sources.MAKERSUITE,
google: chat_completion_sources.MAKERSUITE,
cohere: chat_completion_sources.COHERE,
deepseek: chat_completion_sources.DEEPSEEK,
custom: chat_completion_sources.CUSTOM,
}[channel];
if (!source) throw new Error(`Unsupported channel: ${channel}`);
const url = buildUrl('/chat/completions');
const model = channel === 'st_main'
? String(getChatCompletionModel?.() || '').trim()
: String(s.api?.model || '').trim();
if (!model) throw new Error('No model selected in ST main panel or Ena settings');
const providerUrl = normalizeProxyBaseUrl(s.api?.url);
const providerKey = String(s.api?.key || '').trim();
if (channel !== 'st_main') {
if (!providerUrl) throw new Error('Please provide API URL');
if (!providerKey) throw new Error('Please provide API KEY');
}
const body = {
model: s.api.model,
type: 'quiet',
model,
messages,
stream: !!s.api.stream
stream: !!s.api.stream,
chat_completion_source: source,
custom_prompt_post_processing: oai_settings?.custom_prompt_post_processing,
};
const t = Number(s.api.temperature);
@@ -928,11 +929,48 @@ async function callPlanner(messages) {
const mt = s.api.max_tokens === '' ? null : Number(s.api.max_tokens);
if (mt != null && !Number.isNaN(mt) && mt > 0) body.max_tokens = mt;
const res = await fetch(url, {
if (source === chat_completion_sources.MAKERSUITE && body.max_tokens != null) {
body.max_output_tokens = body.max_tokens;
body.use_makersuite_sysprompt = false;
}
const reverseProxy = channel === 'st_main'
? String(oai_settings?.reverse_proxy || '').trim()
: providerUrl;
const proxyPassword = channel === 'st_main'
? String(oai_settings?.proxy_password || '').trim()
: providerKey;
if (reverseProxy && [
chat_completion_sources.CLAUDE,
chat_completion_sources.OPENAI,
chat_completion_sources.MISTRALAI,
chat_completion_sources.MAKERSUITE,
chat_completion_sources.VERTEXAI,
chat_completion_sources.DEEPSEEK,
chat_completion_sources.XAI,
chat_completion_sources.COHERE,
].includes(source)) {
body.reverse_proxy = reverseProxy;
if (proxyPassword) body.proxy_password = proxyPassword;
}
if (source === chat_completion_sources.CUSTOM) {
body.custom_url = channel === 'st_main' ? oai_settings?.custom_url : providerUrl;
body.custom_include_headers = oai_settings?.custom_include_headers;
if (proxyPassword) body.proxy_password = proxyPassword;
}
if (source === chat_completion_sources.AZURE_OPENAI) {
body.azure_base_url = oai_settings?.azure_base_url;
body.azure_deployment_name = oai_settings?.azure_deployment_name;
body.azure_api_version = oai_settings?.azure_api_version;
}
const res = await fetch('/api/backends/chat-completions/generate', {
method: 'POST',
headers: {
...getRequestHeaders(),
Authorization: `Bearer ${s.api.apiKey}`,
'Content-Type': 'application/json'
},
body: JSON.stringify(body)
@@ -940,15 +978,17 @@ async function callPlanner(messages) {
if (!res.ok) {
const text = await res.text().catch(() => '');
throw new Error(`规划请求失败: ${res.status} ${text}`.slice(0, 500));
throw new Error(`Planner request failed: ${res.status} ${text}`.slice(0, 500));
}
if (!s.api.stream) {
const data = await res.json();
return String(data?.choices?.[0]?.message?.content ?? data?.choices?.[0]?.text ?? '');
if (data?.error) throw new Error(data.error?.message || 'Planner request failed');
const text = String(data?.choices?.[0]?.message?.content ?? data?.choices?.[0]?.text ?? '');
if (text) options?.onDelta?.(text, text);
return text;
}
// SSE stream
const reader = res.body.getReader();
const decoder = new TextDecoder('utf-8');
let buf = '';
@@ -969,31 +1009,74 @@ async function callPlanner(messages) {
if (payload === '[DONE]') continue;
try {
const j = JSON.parse(payload);
if (j?.error) throw new Error(j.error?.message || 'Planner request failed');
const delta = j?.choices?.[0]?.delta;
const piece = delta?.content ?? delta?.text ?? '';
if (piece) full += piece;
} catch { }
if (piece) {
full += piece;
options?.onDelta?.(piece, full);
}
} catch {
// ignore non-json chunks
}
}
}
}
return full;
}
async function fetchModelsForUi() {
const s = ensureSettings();
if (!s.api.baseUrl) throw new Error('请先填写 API URL');
if (!s.api.apiKey) throw new Error('请先填写 API KEY');
const url = buildUrl('/models');
const res = await fetch(url, {
method: 'GET',
const channel = String(s.api?.channel || 'st_main').toLowerCase();
const source = channel === 'st_main'
? String(oai_settings?.chat_completion_source || chat_completion_sources.OPENAI)
: ({
openai: chat_completion_sources.OPENAI,
claude: chat_completion_sources.CLAUDE,
gemini: chat_completion_sources.MAKERSUITE,
google: chat_completion_sources.MAKERSUITE,
cohere: chat_completion_sources.COHERE,
deepseek: chat_completion_sources.DEEPSEEK,
custom: chat_completion_sources.CUSTOM,
}[channel]);
if (!source) throw new Error(`Unsupported channel: ${channel}`);
const providerUrl = normalizeProxyBaseUrl(s.api?.url);
const providerKey = String(s.api?.key || '').trim();
if (channel !== 'st_main') {
if (!providerUrl) throw new Error('Please provide API URL');
if (!providerKey) throw new Error('Please provide API KEY');
}
const payload = {
chat_completion_source: source,
reverse_proxy: channel === 'st_main' ? oai_settings?.reverse_proxy : providerUrl,
proxy_password: channel === 'st_main' ? oai_settings?.proxy_password : providerKey,
};
if (source === chat_completion_sources.CUSTOM) {
payload.custom_url = oai_settings?.custom_url;
payload.custom_include_headers = oai_settings?.custom_include_headers;
}
if (source === chat_completion_sources.AZURE_OPENAI) {
payload.azure_base_url = oai_settings?.azure_base_url;
payload.azure_deployment_name = oai_settings?.azure_deployment_name;
payload.azure_api_version = oai_settings?.azure_api_version;
}
const res = await fetch('/api/backends/chat-completions/status', {
method: 'POST',
headers: {
...getRequestHeaders(),
Authorization: `Bearer ${s.api.apiKey}`
}
'Content-Type': 'application/json'
},
body: JSON.stringify(payload),
cache: 'no-cache',
});
if (!res.ok) {
const text = await res.text().catch(() => '');
throw new Error(`拉取模型失败: ${res.status} ${text}`.slice(0, 300));
throw new Error(`Model list request failed: ${res.status} ${text}`.slice(0, 300));
}
const data = await res.json();
const list = Array.isArray(data?.data) ? data.data : [];
@@ -1446,4 +1529,3 @@ export function cleanupEnaPlanner() {
delete window.xiaobaixEnaPlanner;
}