diff --git a/modules/story-summary/data/config.js b/modules/story-summary/data/config.js
index a7944c4..a5b5f52 100644
--- a/modules/story-summary/data/config.js
+++ b/modules/story-summary/data/config.js
@@ -254,6 +254,12 @@ export const DEFAULT_SUMMARY_USER_CONFIRM_PROMPT = `怎么截断了!重新完
`;
export const DEFAULT_SUMMARY_ASSISTANT_PREFILL_PROMPT = '下面重新生成完整JSON。';
+const DEFAULT_VECTOR_PROVIDER = "siliconflow";
+const DEFAULT_L0_URL = "https://api.siliconflow.cn/v1";
+const DEFAULT_OPENROUTER_URL = "https://openrouter.ai/api/v1";
+const DEFAULT_L0_MODEL = "Qwen/Qwen3-8B";
+const DEFAULT_EMBEDDING_MODEL = "BAAI/bge-m3";
+const DEFAULT_RERANK_MODEL = "BAAI/bge-reranker-v2-m3";
export function getSettings() {
const ext = (extension_settings[EXT_ID] ||= {});
@@ -261,6 +267,51 @@ export function getSettings() {
return ext;
}
+function normalizeOpenAiCompatApiConfig(src, defaults = {}) {
+ const provider = String(src?.provider || defaults.provider || DEFAULT_VECTOR_PROVIDER).toLowerCase();
+ const defaultUrl = provider === "openrouter"
+ ? DEFAULT_OPENROUTER_URL
+ : String(defaults.url || DEFAULT_L0_URL);
+ return {
+ provider,
+ url: String(src?.url || defaultUrl || "").trim(),
+ key: String(src?.key || defaults.key || "").trim(),
+ model: String(src?.model || defaults.model || "").trim(),
+ modelCache: Array.isArray(src?.modelCache) ? src.modelCache.filter(Boolean) : [],
+ };
+}
+
+function normalizeVectorConfig(rawVector = null) {
+ const legacyOnline = rawVector?.online || {};
+ const sharedProvider = String(legacyOnline.provider || DEFAULT_VECTOR_PROVIDER).toLowerCase();
+ const sharedUrl = String(legacyOnline.url || (sharedProvider === "openrouter" ? DEFAULT_OPENROUTER_URL : DEFAULT_L0_URL)).trim();
+ const sharedKey = String(legacyOnline.key || "").trim();
+
+ return {
+ enabled: !!rawVector?.enabled,
+ engine: "online",
+ l0Concurrency: Math.max(1, Math.min(50, Number(rawVector?.l0Concurrency) || 10)),
+ l0Api: normalizeOpenAiCompatApiConfig(rawVector?.l0Api, {
+ provider: sharedProvider,
+ url: sharedUrl,
+ key: sharedKey,
+ model: DEFAULT_L0_MODEL,
+ }),
+ embeddingApi: normalizeOpenAiCompatApiConfig(rawVector?.embeddingApi, {
+ provider: DEFAULT_VECTOR_PROVIDER,
+ url: DEFAULT_L0_URL,
+ key: sharedKey,
+ model: DEFAULT_EMBEDDING_MODEL,
+ }),
+ rerankApi: normalizeOpenAiCompatApiConfig(rawVector?.rerankApi, {
+ provider: DEFAULT_VECTOR_PROVIDER,
+ url: DEFAULT_L0_URL,
+ key: sharedKey,
+ model: DEFAULT_RERANK_MODEL,
+ }),
+ };
+}
+
export function getSummaryPanelConfig() {
const clampKeepVisibleCount = (value) => {
const n = Number.parseInt(value, 10);
@@ -299,7 +350,7 @@ export function getSummaryPanelConfig() {
summaryAssistantPrefillPrompt: DEFAULT_SUMMARY_ASSISTANT_PREFILL_PROMPT,
memoryTemplate: DEFAULT_MEMORY_PROMPT_TEMPLATE,
},
- vector: null,
+ vector: normalizeVectorConfig(),
};
try {
@@ -320,7 +371,7 @@ export function getSummaryPanelConfig() {
ui: { ...defaults.ui, ...(parsed.ui || {}) },
textFilterRules,
prompts: { ...defaults.prompts, ...(parsed.prompts || {}) },
- vector: parsed.vector || null,
+ vector: normalizeVectorConfig(parsed.vector || null),
};
if (result.trigger.timing === "manual") result.trigger.enabled = false;
@@ -349,16 +400,7 @@ export function getVectorConfig() {
if (!raw) return null;
const parsed = JSON.parse(raw);
- const cfg = parsed.vector || null;
- if (!cfg) return null;
-
- // Keep vector side normalized to online + siliconflow.
- cfg.engine = "online";
- cfg.online = cfg.online || {};
- cfg.online.provider = "siliconflow";
- cfg.online.model = "BAAI/bge-m3";
-
- return cfg;
+ return parsed.vector ? normalizeVectorConfig(parsed.vector) : normalizeVectorConfig();
} catch {
return null;
}
@@ -376,15 +418,7 @@ export function saveVectorConfig(vectorCfg) {
const raw = localStorage.getItem("summary_panel_config") || "{}";
const parsed = JSON.parse(raw);
- parsed.vector = {
- enabled: !!vectorCfg?.enabled,
- engine: "online",
- online: {
- provider: "siliconflow",
- key: vectorCfg?.online?.key || "",
- model: "BAAI/bge-m3",
- },
- };
+ parsed.vector = normalizeVectorConfig(vectorCfg || null);
localStorage.setItem("summary_panel_config", JSON.stringify(parsed));
CommonSettingStorage.set(SUMMARY_CONFIG_KEY, parsed);
diff --git a/modules/story-summary/story-summary-ui.js b/modules/story-summary/story-summary-ui.js
index b5490fd..97c49fc 100644
--- a/modules/story-summary/story-summary-ui.js
+++ b/modules/story-summary/story-summary-ui.js
@@ -297,6 +297,11 @@ All checks passed. Beginning incremental extraction...
claude: { url: 'https://api.anthropic.com', needKey: true, canFetch: false },
custom: { url: '', needKey: true, canFetch: true }
};
+ const VECTOR_PROVIDER_DEFAULTS = {
+ siliconflow: { url: 'https://api.siliconflow.cn/v1', needKey: true, canFetch: true },
+ openrouter: { url: 'https://openrouter.ai/api/v1', needKey: true, canFetch: true },
+ custom: { url: '', needKey: true, canFetch: true }
+ };
const SECTION_META = {
keywords: { title: '编辑关键词', hint: '每行一个关键词,格式:关键词|权重(核心/重要/一般)' },
@@ -344,7 +349,14 @@ All checks passed. Beginning incremental extraction...
memoryTemplate: '',
},
textFilterRules: [...DEFAULT_FILTER_RULES],
- vector: { enabled: false, engine: 'online', local: { modelId: 'bge-small-zh' }, online: { provider: 'siliconflow', url: '', key: '', model: '' } }
+ vector: {
+ enabled: false,
+ engine: 'online',
+ l0Concurrency: 10,
+ l0Api: { provider: 'siliconflow', url: 'https://api.siliconflow.cn/v1', key: '', model: 'Qwen/Qwen3-8B', modelCache: [] },
+ embeddingApi: { provider: 'siliconflow', url: 'https://api.siliconflow.cn/v1', key: '', model: 'BAAI/bge-m3', modelCache: [] },
+ rerankApi: { provider: 'siliconflow', url: 'https://api.siliconflow.cn/v1', key: '', model: 'BAAI/bge-reranker-v2-m3', modelCache: [] }
+ }
};
let summaryData = { keywords: [], events: [], characters: { main: [], relationships: [] }, arcs: [], facts: [] };
@@ -369,6 +381,42 @@ All checks passed. Beginning incremental extraction...
window.parent.postMessage({ source: 'LittleWhiteBox-StoryFrame', type, ...data }, PARENT_ORIGIN);
}
+ function normalizeVectorConfigUI(raw = null) {
+ const base = JSON.parse(JSON.stringify(config.vector));
+ const legacyOnline = raw?.online || {};
+ const sharedKey = String(legacyOnline.key || '').trim();
+ const sharedUrl = String(legacyOnline.url || '').trim();
+
+ if (raw) {
+ base.enabled = !!raw.enabled;
+ base.engine = 'online';
+ base.l0Concurrency = Math.max(1, Math.min(50, Number(raw.l0Concurrency) || 10));
+ Object.assign(base.l0Api, {
+ provider: raw.l0Api?.provider || legacyOnline.provider || base.l0Api.provider,
+ url: raw.l0Api?.url || sharedUrl || base.l0Api.url,
+ key: raw.l0Api?.key || sharedKey || base.l0Api.key,
+ model: raw.l0Api?.model || base.l0Api.model,
+ modelCache: Array.isArray(raw.l0Api?.modelCache) ? raw.l0Api.modelCache : [],
+ });
+ Object.assign(base.embeddingApi, {
+ provider: raw.embeddingApi?.provider || base.embeddingApi.provider,
+ url: raw.embeddingApi?.url || sharedUrl || base.embeddingApi.url,
+ key: raw.embeddingApi?.key || sharedKey || base.embeddingApi.key,
+ model: raw.embeddingApi?.model || legacyOnline.model || base.embeddingApi.model,
+ modelCache: Array.isArray(raw.embeddingApi?.modelCache) ? raw.embeddingApi.modelCache : [],
+ });
+ Object.assign(base.rerankApi, {
+ provider: raw.rerankApi?.provider || base.rerankApi.provider,
+ url: raw.rerankApi?.url || sharedUrl || base.rerankApi.url,
+ key: raw.rerankApi?.key || sharedKey || base.rerankApi.key,
+ model: raw.rerankApi?.model || base.rerankApi.model,
+ modelCache: Array.isArray(raw.rerankApi?.modelCache) ? raw.rerankApi.modelCache : [],
+ });
+ }
+
+ return base;
+ }
+
// ═══════════════════════════════════════════════════════════════════════════
// Config Management
// ═══════════════════════════════════════════════════════════════════════════
@@ -387,7 +435,7 @@ All checks passed. Beginning incremental extraction...
config.textFilterRules = Array.isArray(p.textFilterRules)
? p.textFilterRules
: (Array.isArray(p.vector?.textFilterRules) ? p.vector.textFilterRules : [...DEFAULT_FILTER_RULES]);
- if (p.vector) config.vector = p.vector;
+ if (p.vector) config.vector = normalizeVectorConfigUI(p.vector);
if (config.trigger.timing === 'manual' && config.trigger.enabled) {
config.trigger.enabled = false;
saveConfig();
@@ -409,7 +457,7 @@ All checks passed. Beginning incremental extraction...
: (Array.isArray(cfg.vector?.textFilterRules)
? cfg.vector.textFilterRules
: (Array.isArray(config.textFilterRules) ? config.textFilterRules : [...DEFAULT_FILTER_RULES]));
- if (cfg.vector) config.vector = cfg.vector;
+ if (cfg.vector) config.vector = normalizeVectorConfigUI(cfg.vector);
if (config.trigger.timing === 'manual') config.trigger.enabled = false;
localStorage.setItem('summary_panel_config', JSON.stringify(config));
}
@@ -422,7 +470,14 @@ All checks passed. Beginning incremental extraction...
config.textFilterRules = collectFilterRules();
}
if (!config.vector) {
- config.vector = { enabled: false, engine: 'online', online: { provider: 'siliconflow', key: '', model: 'BAAI/bge-m3' } };
+ config.vector = {
+ enabled: false,
+ engine: 'online',
+ l0Concurrency: 10,
+ l0Api: { provider: 'siliconflow', url: 'https://api.siliconflow.cn/v1', key: '', model: 'Qwen/Qwen3-8B', modelCache: [] },
+ embeddingApi: { provider: 'siliconflow', url: 'https://api.siliconflow.cn/v1', key: '', model: 'BAAI/bge-m3', modelCache: [] },
+ rerankApi: { provider: 'siliconflow', url: 'https://api.siliconflow.cn/v1', key: '', model: 'BAAI/bge-reranker-v2-m3', modelCache: [] }
+ };
}
localStorage.setItem('summary_panel_config', JSON.stringify(config));
postMsg('SAVE_PANEL_CONFIG', { config });
@@ -435,15 +490,107 @@ All checks passed. Beginning incremental extraction...
// Vector Config UI
// ═══════════════════════════════════════════════════════════════════════════
+ function getVectorApiConfig(prefix) {
+ return {
+ provider: $(`${prefix}-api-provider`)?.value || 'siliconflow',
+ url: $(`${prefix}-api-url`)?.value?.trim() || '',
+ key: $(`${prefix}-api-key`)?.value?.trim() || '',
+ model: $(`${prefix}-api-model-text`)?.value?.trim() || '',
+ modelCache: Array.isArray(config.vector?.[`${prefix}Api`]?.modelCache)
+ ? [...config.vector[`${prefix}Api`].modelCache]
+ : [],
+ };
+ }
+
+ function loadVectorApiConfig(prefix, cfg) {
+ const next = cfg || {};
+ $(`${prefix}-api-provider`).value = next.provider || 'siliconflow';
+ $(`${prefix}-api-url`).value = next.url || '';
+ $(`${prefix}-api-key`).value = next.key || '';
+ $(`${prefix}-api-model-text`).value = next.model || '';
+
+ const cache = Array.isArray(next.modelCache) ? next.modelCache : [];
+ setSelectOptions($(`${prefix}-api-model-select`), cache, '请选择');
+ $(`${prefix}-api-model-select`).value = cache.includes(next.model) ? next.model : '';
+ updateVectorProviderUI(prefix, next.provider || 'siliconflow');
+ }
+
+ function updateVectorProviderUI(prefix, provider) {
+ const pv = VECTOR_PROVIDER_DEFAULTS[provider] || VECTOR_PROVIDER_DEFAULTS.custom;
+ const cache = Array.isArray(config.vector?.[`${prefix}Api`]?.modelCache)
+ ? config.vector[`${prefix}Api`].modelCache
+ : [];
+ const hasModelCache = cache.length > 0;
+
+ $(`${prefix}-api-url-row`).classList.toggle('hidden', false);
+ $(`${prefix}-api-key-row`).classList.toggle('hidden', !pv.needKey);
+ $(`${prefix}-api-model-manual-row`).classList.toggle('hidden', false);
+ $(`${prefix}-api-model-select-row`).classList.toggle('hidden', !hasModelCache);
+ $(`${prefix}-api-connect-row`).classList.toggle('hidden', !pv.canFetch);
+ $(`${prefix}-api-connect-status`).classList.toggle('hidden', !pv.canFetch);
+
+ const urlInput = $(`${prefix}-api-url`);
+ if (urlInput && !urlInput.value && pv.url) urlInput.value = pv.url;
+ }
+
+ async function fetchVectorModels(prefix) {
+ const provider = $(`${prefix}-api-provider`).value;
+ const pv = VECTOR_PROVIDER_DEFAULTS[provider] || VECTOR_PROVIDER_DEFAULTS.custom;
+ const statusEl = $(`${prefix}-api-connect-status`);
+ const btn = $(`${prefix}-btn-connect`);
+ if (!pv.canFetch) {
+ statusEl.textContent = '当前渠道不支持自动拉取模型';
+ return;
+ }
+
+ let baseUrl = $(`${prefix}-api-url`).value.trim().replace(/\/+$/, '');
+ const apiKey = $(`${prefix}-api-key`).value.trim();
+ if (!apiKey) {
+ statusEl.textContent = '请先填写 API KEY';
+ return;
+ }
+
+ btn.disabled = true;
+ btn.textContent = '连接中...';
+ statusEl.textContent = '连接中...';
+
+ try {
+ const tryFetch = async url => {
+ const res = await fetch(url, {
+ headers: { Authorization: `Bearer ${apiKey}`, Accept: 'application/json' }
+ });
+ return res.ok ? (await res.json())?.data?.map(m => m?.id).filter(Boolean) || null : null;
+ };
+
+ if (baseUrl.endsWith('/v1')) baseUrl = baseUrl.slice(0, -3);
+ let models = await tryFetch(`${baseUrl}/v1/models`);
+ if (!models) models = await tryFetch(`${baseUrl}/models`);
+ if (!models?.length) throw new Error('未获取到模型列表');
+
+ config.vector[`${prefix}Api`].modelCache = [...new Set(models)];
+ setSelectOptions($(`${prefix}-api-model-select`), config.vector[`${prefix}Api`].modelCache, '请选择');
+ $(`${prefix}-api-model-select-row`).classList.remove('hidden');
+ if (!$(`${prefix}-api-model-text`).value.trim()) {
+ $(`${prefix}-api-model-text`).value = models[0];
+ $(`${prefix}-api-model-select`).value = models[0];
+ }
+ statusEl.textContent = `拉取成功:${models.length} 个模型`;
+ } catch (e) {
+ statusEl.textContent = '拉取失败:' + (e.message || '请检查 URL 和 KEY');
+ } finally {
+ btn.disabled = false;
+ btn.textContent = '连接 / 拉取模型列表';
+ }
+ }
+
function getVectorConfig() {
return {
enabled: $('vector-enabled')?.checked || false,
engine: 'online',
- online: {
- provider: 'siliconflow',
- key: $('vector-api-key')?.value?.trim() || '',
- model: 'BAAI/bge-m3',
- },
+ l0Concurrency: Math.max(1, Math.min(50, Number($('vector-l0-concurrency')?.value) || 10)),
+ l0Api: getVectorApiConfig('l0'),
+ embeddingApi: getVectorApiConfig('embedding'),
+ rerankApi: getVectorApiConfig('rerank'),
};
}
@@ -451,11 +598,10 @@ All checks passed. Beginning incremental extraction...
if (!cfg) return;
$('vector-enabled').checked = !!cfg.enabled;
$('vector-config-area').classList.toggle('hidden', !cfg.enabled);
-
- if (cfg.online?.key) {
- $('vector-api-key').value = cfg.online.key;
- }
-
+ $('vector-l0-concurrency').value = String(Math.max(1, Math.min(50, Number(cfg.l0Concurrency) || 10)));
+ loadVectorApiConfig('l0', cfg.l0Api || {});
+ loadVectorApiConfig('embedding', cfg.embeddingApi || {});
+ loadVectorApiConfig('rerank', cfg.rerankApi || {});
}
// ═══════════════════════════════════════════════════════════════════════════
@@ -536,13 +682,6 @@ All checks passed. Beginning incremental extraction...
el.textContent = count;
}
- function updateOnlineStatus(status, message) {
- const dot = $('online-api-status').querySelector('.status-dot');
- const text = $('online-api-status').querySelector('.status-text');
- dot.className = 'status-dot ' + status;
- text.textContent = message;
- }
-
function updateVectorStats(stats) {
$('vector-atom-count').textContent = stats.stateVectors || 0;
$('vector-chunk-count').textContent = stats.chunkCount || 0;
@@ -649,14 +788,28 @@ All checks passed. Beginning incremental extraction...
$('btn-test-vector-api').onclick = () => {
saveConfig(); // 先保存新 Key 到 localStorage
postMsg('VECTOR_TEST_ONLINE', {
- provider: 'siliconflow',
- config: {
- key: $('vector-api-key').value.trim(),
- model: 'BAAI/bge-m3',
- }
+ provider: getVectorConfig().embeddingApi.provider,
+ config: getVectorConfig().embeddingApi
});
};
+ ['l0', 'embedding', 'rerank'].forEach(prefix => {
+ $(`${prefix}-api-provider`).onchange = e => {
+ const pv = VECTOR_PROVIDER_DEFAULTS[e.target.value] || VECTOR_PROVIDER_DEFAULTS.custom;
+ const target = config.vector[`${prefix}Api`] ||= { modelCache: [] };
+ target.provider = e.target.value;
+ if (!target.url && pv.url) target.url = pv.url;
+ if (!pv.canFetch) target.modelCache = [];
+ updateVectorProviderUI(prefix, e.target.value);
+ };
+
+ $(`${prefix}-api-model-select`).onchange = e => {
+ if (e.target.value) $(`${prefix}-api-model-text`).value = e.target.value;
+ };
+
+ $(`${prefix}-btn-connect`).onclick = () => fetchVectorModels(prefix);
+ });
+
$('btn-add-filter-rule').onclick = addFilterRule;
$('btn-gen-vectors').onclick = () => {
diff --git a/modules/story-summary/story-summary.html b/modules/story-summary/story-summary.html
index 94ec320..b9f4a41 100644
--- a/modules/story-summary/story-summary.html
+++ b/modules/story-summary/story-summary.html
@@ -414,31 +414,139 @@
-
+
Step.0
- 填写 API Key
+ 配置模型 API
-
-
-
- 💡
硅基流动
- 内置使用的模型完全免费。建议实名认证以获得更高并发。
+
+ 推荐给 L0 使用便宜或免费的大批量模型,不建议直接消耗酒馆主 API。推荐:硅基流动 / OpenRouter / 自定义 OpenAI 兼容接口。
-
-
-
-
-
-
-
-
未测试
+
+
+
默认 10。免费账号可调低到 1-3,线路稳定时可自行调高。
+
+
L0 锚点提取模型
+
+
+
+
-
+
+
+
+
+
+
+
+
+
+
Embedding 模型
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Rerank 模型
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/modules/story-summary/story-summary.js b/modules/story-summary/story-summary.js
index be36c37..245cdd6 100644
--- a/modules/story-summary/story-summary.js
+++ b/modules/story-summary/story-summary.js
@@ -356,8 +356,8 @@ async function handleAnchorGenerate() {
return;
}
- if (!vectorCfg.online?.key) {
- postToFrame({ type: "VECTOR_ONLINE_STATUS", status: "error", message: "请配置 API Key" });
+ if (!vectorCfg.l0Api?.key) {
+ postToFrame({ type: "VECTOR_ONLINE_STATUS", status: "error", message: "请配置 L0 API Key" });
return;
}
@@ -448,8 +448,8 @@ async function handleGenerateVectors(vectorCfg) {
const { chatId, chat } = getContext();
if (!chatId || !chat?.length) return;
- if (!vectorCfg.online?.key) {
- postToFrame({ type: "VECTOR_ONLINE_STATUS", status: "error", message: "请配置 API Key" });
+ if (!vectorCfg.embeddingApi?.key) {
+ postToFrame({ type: "VECTOR_ONLINE_STATUS", status: "error", message: "请配置 Embedding API Key" });
return;
}
diff --git a/modules/story-summary/vector/llm/llm-service.js b/modules/story-summary/vector/llm/llm-service.js
index 4d10576..3608fbd 100644
--- a/modules/story-summary/vector/llm/llm-service.js
+++ b/modules/story-summary/vector/llm/llm-service.js
@@ -2,14 +2,15 @@
// vector/llm/llm-service.js - 修复 prefill 传递方式
// ═══════════════════════════════════════════════════════════════════════════
import { xbLog } from '../../../../core/debug-core.js';
-import { getApiKey } from './siliconflow.js';
+import { getVectorConfig } from '../../data/config.js';
const MODULE_ID = 'vector-llm-service';
-const SILICONFLOW_API_URL = 'https://api.siliconflow.cn/v1';
const DEFAULT_L0_MODEL = 'Qwen/Qwen3-8B';
+const DEFAULT_L0_API_URL = 'https://api.siliconflow.cn/v1';
let callCounter = 0;
const activeL0SessionIds = new Set();
+let l0KeyIndex = 0;
function getStreamingModule() {
const mod = window.xiaobaixStreamingGeneration;
@@ -28,6 +29,28 @@ function b64UrlEncode(str) {
return btoa(bin).replace(/\+/g, '-').replace(/\//g, '_').replace(/=+$/, '');
}
+function getL0ApiConfig() {
+ const cfg = getVectorConfig() || {};
+ return cfg.l0Api || {
+ provider: 'siliconflow',
+ url: DEFAULT_L0_API_URL,
+ key: '',
+ model: DEFAULT_L0_MODEL,
+ };
+}
+
+function getNextKey(rawKey) {
+ const keys = String(rawKey || '')
+ .split(/[,;|\n]+/)
+ .map(k => k.trim())
+ .filter(Boolean);
+ if (!keys.length) return '';
+ if (keys.length === 1) return keys[0];
+ const idx = l0KeyIndex % keys.length;
+ l0KeyIndex = (l0KeyIndex + 1) % keys.length;
+ return keys[idx];
+}
+
/**
* 统一LLM调用 - 走酒馆后端(非流式)
* 临时改为标准 messages 调用,避免 bottomassistant prefill 兼容性问题。
@@ -42,7 +65,8 @@ export async function callLLM(messages, options = {}) {
const mod = getStreamingModule();
if (!mod) throw new Error('Streaming module not ready');
- const apiKey = getApiKey() || '';
+ const apiCfg = getL0ApiConfig();
+ const apiKey = getNextKey(apiCfg.key);
if (!apiKey) {
throw new Error('L0 requires siliconflow API key');
}
@@ -60,11 +84,11 @@ export async function callLLM(messages, options = {}) {
temperature: String(temperature),
max_tokens: String(max_tokens),
api: 'openai',
- apiurl: SILICONFLOW_API_URL,
+ apiurl: String(apiCfg.url || DEFAULT_L0_API_URL).trim(),
apipassword: apiKey,
- model: DEFAULT_L0_MODEL,
+ model: String(apiCfg.model || DEFAULT_L0_MODEL).trim(),
};
- const isQwen3 = String(DEFAULT_L0_MODEL || '').includes('Qwen3');
+ const isQwen3 = String(args.model || '').includes('Qwen3');
if (isQwen3) {
args.enable_thinking = 'false';
}
diff --git a/modules/story-summary/vector/llm/reranker.js b/modules/story-summary/vector/llm/reranker.js
index 2702076..4fbd37f 100644
--- a/modules/story-summary/vector/llm/reranker.js
+++ b/modules/story-summary/vector/llm/reranker.js
@@ -4,15 +4,38 @@
// ═══════════════════════════════════════════════════════════════════════════
import { xbLog } from '../../../../core/debug-core.js';
-import { getApiKey } from './siliconflow.js';
+import { getVectorConfig } from '../../data/config.js';
const MODULE_ID = 'reranker';
-const RERANK_URL = 'https://api.siliconflow.cn/v1/rerank';
+const DEFAULT_RERANK_URL = 'https://api.siliconflow.cn/v1';
const RERANK_MODEL = 'BAAI/bge-reranker-v2-m3';
const DEFAULT_TIMEOUT = 15000;
const MAX_DOCUMENTS = 100; // API 限制
const RERANK_BATCH_SIZE = 20;
const RERANK_MAX_CONCURRENCY = 5;
+let rerankKeyIndex = 0;
+
+function getRerankApiConfig() {
+ const cfg = getVectorConfig() || {};
+ return cfg.rerankApi || {
+ provider: 'siliconflow',
+ url: DEFAULT_RERANK_URL,
+ key: '',
+ model: RERANK_MODEL,
+ };
+}
+
+function getNextRerankKey(rawKey) {
+ const keys = String(rawKey || '')
+ .split(/[,;|\n]+/)
+ .map(k => k.trim())
+ .filter(Boolean);
+ if (!keys.length) return '';
+ if (keys.length === 1) return keys[0];
+ const idx = rerankKeyIndex % keys.length;
+ rerankKeyIndex = (rerankKeyIndex + 1) % keys.length;
+ return keys[idx];
+}
/**
* 对文档列表进行 Rerank 精排
@@ -37,7 +60,8 @@ export async function rerank(query, documents, options = {}) {
return { results: [], failed: false };
}
- const key = getApiKey();
+ const apiCfg = getRerankApiConfig();
+ const key = getNextRerankKey(apiCfg.key);
if (!key) {
xbLog.warn(MODULE_ID, '未配置 API Key,跳过 rerank');
return { results: documents.map((_, i) => ({ index: i, relevance_score: 0 })), failed: true };
@@ -72,14 +96,15 @@ export async function rerank(query, documents, options = {}) {
try {
const T0 = performance.now();
- const response = await fetch(RERANK_URL, {
+ const baseUrl = String(apiCfg.url || DEFAULT_RERANK_URL).replace(/\/+$/, '');
+ const response = await fetch(`${baseUrl}/rerank`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${key}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({
- model: RERANK_MODEL,
+ model: String(apiCfg.model || RERANK_MODEL),
// Zero-darkbox: do not silently truncate query.
query,
documents: validDocs,
diff --git a/modules/story-summary/vector/llm/siliconflow.js b/modules/story-summary/vector/llm/siliconflow.js
index 1a7bb7d..ea36603 100644
--- a/modules/story-summary/vector/llm/siliconflow.js
+++ b/modules/story-summary/vector/llm/siliconflow.js
@@ -1,31 +1,39 @@
// ═══════════════════════════════════════════════════════════════════════════
-// siliconflow.js - Embedding + 多 Key 轮询
+// siliconflow.js - OpenAI-compatible Embedding + 多 Key 轮询
//
// 在 API Key 输入框中用逗号、分号、竖线或换行分隔多个 Key,例如:
// sk-aaa,sk-bbb,sk-ccc
// 每次调用自动轮询到下一个 Key,并发请求会均匀分布到所有 Key 上。
// ═══════════════════════════════════════════════════════════════════════════
+import { getVectorConfig } from '../../data/config.js';
+
const BASE_URL = 'https://api.siliconflow.cn';
const EMBEDDING_MODEL = 'BAAI/bge-m3';
// ★ 多 Key 轮询状态
let _keyIndex = 0;
+function getEmbeddingApiConfig() {
+ const cfg = getVectorConfig() || {};
+ return cfg.embeddingApi || {
+ provider: 'siliconflow',
+ url: `${BASE_URL}/v1`,
+ key: '',
+ model: EMBEDDING_MODEL,
+ };
+}
+
/**
* 从 localStorage 解析所有 Key(支持逗号、分号、竖线、换行分隔)
*/
-function parseKeys() {
+function parseKeys(rawKey) {
try {
- const raw = localStorage.getItem('summary_panel_config');
- if (raw) {
- const parsed = JSON.parse(raw);
- const keyStr = parsed.vector?.online?.key || '';
- return keyStr
- .split(/[,;|\n]+/)
- .map(k => k.trim())
- .filter(k => k.length > 0);
- }
+ const keyStr = String(rawKey || '');
+ return keyStr
+ .split(/[,;|\n]+/)
+ .map(k => k.trim())
+ .filter(k => k.length > 0);
} catch { }
return [];
}
@@ -34,8 +42,8 @@ function parseKeys() {
* 获取下一个可用的 API Key(轮询)
* 每次调用返回不同的 Key,自动循环
*/
-export function getApiKey() {
- const keys = parseKeys();
+export function getApiKey(rawKey = null) {
+ const keys = parseKeys(rawKey ?? getEmbeddingApiConfig().key);
if (!keys.length) return null;
if (keys.length === 1) return keys[0];
@@ -51,7 +59,7 @@ export function getApiKey() {
* 获取当前配置的 Key 数量(供外部模块动态调整并发用)
*/
export function getKeyCount() {
- return Math.max(1, parseKeys().length);
+ return Math.max(1, parseKeys(getEmbeddingApiConfig().key).length);
}
// ═══════════════════════════════════════════════════════════════════════════
@@ -61,22 +69,24 @@ export function getKeyCount() {
export async function embed(texts, options = {}) {
if (!texts?.length) return [];
- const key = getApiKey();
- if (!key) throw new Error('未配置硅基 API Key');
+ const apiCfg = options.apiConfig || getEmbeddingApiConfig();
+ const key = getApiKey(apiCfg.key);
+ if (!key) throw new Error('未配置 Embedding API Key');
const { timeout = 30000, signal } = options;
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), timeout);
try {
- const response = await fetch(`${BASE_URL}/v1/embeddings`, {
+ const baseUrl = String(apiCfg.url || `${BASE_URL}/v1`).replace(/\/+$/, '');
+ const response = await fetch(`${baseUrl}/embeddings`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${key}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({
- model: EMBEDDING_MODEL,
+ model: String(apiCfg.model || EMBEDDING_MODEL),
input: texts,
}),
signal: signal || controller.signal,
diff --git a/modules/story-summary/vector/pipeline/state-integration.js b/modules/story-summary/vector/pipeline/state-integration.js
index 996bbb8..a99c0f1 100644
--- a/modules/story-summary/vector/pipeline/state-integration.js
+++ b/modules/story-summary/vector/pipeline/state-integration.js
@@ -29,7 +29,7 @@ import { filterText } from '../utils/text-filter.js';
const MODULE_ID = 'state-integration';
// ★ 并发配置
-const CONCURRENCY = 10;
+const DEFAULT_CONCURRENCY = 10;
const STAGGER_DELAY = 15;
const DEBUG_CONCURRENCY = true;
const R_AGG_MAX_CHARS = 256;
@@ -168,7 +168,9 @@ export async function incrementalExtractAtoms(chatId, chat, onProgress, options
return { built: 0 };
}
- xbLog.info(MODULE_ID, `增量 L0 提取:pending=${pendingPairs.length}, concurrency=${CONCURRENCY}`);
+ const concurrency = Math.max(1, Math.min(50, Number(vectorCfg?.l0Concurrency) || DEFAULT_CONCURRENCY));
+
+ xbLog.info(MODULE_ID, `增量 L0 提取:pending=${pendingPairs.length}, concurrency=${concurrency}`);
let completed = 0;
let failed = 0;
@@ -181,14 +183,6 @@ export async function incrementalExtractAtoms(chatId, chat, onProgress, options
// ★ Phase 1: 收集所有新提取的 atoms(不向量化)
const allNewAtoms = [];
- // ★ 限流检测:连续失败 N 次后暂停并降速
- let consecutiveFailures = 0;
- let rateLimited = false;
- const RATE_LIMIT_THRESHOLD = 6; // 连续失败多少次触发限流保护
- const RATE_LIMIT_WAIT_MS = 60000; // 限流后等待时间(60 秒)
- const RETRY_INTERVAL_MS = 1000; // 降速模式下每次请求间隔(1 秒)
- const RETRY_CONCURRENCY = 1; // ★ 降速模式下的并发数(默认1,建议不要超过5)
-
// ★ 通用处理单个 pair 的逻辑(复用于正常模式和降速模式)
const processPair = async (pair, idx, workerId) => {
const floor = pair.aiFloor;
@@ -209,9 +203,6 @@ export async function incrementalExtractAtoms(chatId, chat, onProgress, options
throw new Error('llm_failed');
}
- // ★ 成功:重置连续失败计数
- consecutiveFailures = 0;
-
if (!atoms.length) {
setL0FloorStatus(floor, { status: 'empty', reason: 'llm_empty', atoms: 0 });
} else {
@@ -231,13 +222,6 @@ export async function incrementalExtractAtoms(chatId, chat, onProgress, options
reason: String(e?.message || e).replace(/\s+/g, ' ').slice(0, 120),
});
failed++;
-
- // ★ 限流检测:连续失败累加
- consecutiveFailures++;
- if (consecutiveFailures >= RATE_LIMIT_THRESHOLD && !rateLimited) {
- rateLimited = true;
- xbLog.warn(MODULE_ID, `连续失败 ${consecutiveFailures} 次,疑似触发 API 限流,将暂停所有并发`);
- }
} finally {
active--;
if (!extractionCancelled) {
@@ -252,12 +236,12 @@ export async function incrementalExtractAtoms(chatId, chat, onProgress, options
};
// ★ 并发池处理(保持固定并发度)
- const poolSize = Math.min(CONCURRENCY, pendingPairs.length);
+ const poolSize = Math.min(concurrency, pendingPairs.length);
let nextIndex = 0;
let started = 0;
const runWorker = async (workerId) => {
while (true) {
- if (extractionCancelled || rateLimited) return;
+ if (extractionCancelled) return;
const idx = nextIndex++;
if (idx >= pendingPairs.length) return;
@@ -267,7 +251,7 @@ export async function incrementalExtractAtoms(chatId, chat, onProgress, options
await new Promise(r => setTimeout(r, stagger * STAGGER_DELAY));
}
- if (extractionCancelled || rateLimited) return;
+ if (extractionCancelled) return;
await processPair(pair, idx, workerId);
}
@@ -279,61 +263,6 @@ export async function incrementalExtractAtoms(chatId, chat, onProgress, options
xbLog.info(MODULE_ID, `L0 pool done completed=${completed}/${total} failed=${failed} peakActive=${peakActive} elapsedMs=${elapsed}`);
}
- // ═════════════════════════════════════════════════════════════════════
- // ★ 限流恢复:重置进度,从头开始以限速模式慢慢跑
- // ═════════════════════════════════════════════════════════════════════
- if (rateLimited && !extractionCancelled) {
- const waitSec = RATE_LIMIT_WAIT_MS / 1000;
- xbLog.info(MODULE_ID, `限流保护:将重置进度并从头开始降速重来(并发=${RETRY_CONCURRENCY}, 间隔=${RETRY_INTERVAL_MS}ms)`);
- onProgress?.(`疑似限流,${waitSec}s 后降速重头开始...`, completed, total);
-
- await new Promise(r => setTimeout(r, RATE_LIMIT_WAIT_MS));
-
- if (!extractionCancelled) {
- // ★ 核心逻辑:重置计数器,让 UI 从 0 开始跑,给用户“重头开始”的反馈
- rateLimited = false;
- consecutiveFailures = 0;
- completed = 0;
- failed = 0;
-
- let retryNextIdx = 0;
-
- xbLog.info(MODULE_ID, `限流恢复:开始降速模式扫描 ${pendingPairs.length} 个楼层`);
-
- const retryWorkers = Math.min(RETRY_CONCURRENCY, pendingPairs.length);
- const runRetryWorker = async (wid) => {
- while (true) {
- if (extractionCancelled) return;
- const idx = retryNextIdx++;
- if (idx >= pendingPairs.length) return;
-
- const pair = pendingPairs[idx];
- const floor = pair.aiFloor;
-
- // ★ 检查该楼层状态
- const st = getL0FloorStatus(floor);
- if (st?.status === 'ok' || st?.status === 'empty') {
- // 刚才已经成功了,直接跳过(仅增加进度计数)
- completed++;
- onProgress?.(`提取: ${completed}/${total} (跳过已完成)`, completed, total);
- continue;
- }
-
- // ★ 没做过的,用 slow 模式处理
- await processPair(pair, idx, `retry-${wid}`);
-
- // 每个请求后休息,避免再次触发限流
- if (idx < pendingPairs.length - 1 && RETRY_INTERVAL_MS > 0) {
- await new Promise(r => setTimeout(r, RETRY_INTERVAL_MS));
- }
- }
- };
-
- await Promise.all(Array.from({ length: retryWorkers }, (_, i) => runRetryWorker(i)));
- xbLog.info(MODULE_ID, `降速重头开始阶段结束`);
- }
- }
-
try {
saveMetadataDebounced?.();
} catch { }
diff --git a/modules/story-summary/vector/utils/embedder.js b/modules/story-summary/vector/utils/embedder.js
index 1bc0dcd..48f52f7 100644
--- a/modules/story-summary/vector/utils/embedder.js
+++ b/modules/story-summary/vector/utils/embedder.js
@@ -1,15 +1,13 @@
// ═══════════════════════════════════════════════════════════════════════════
-// Story Summary - Embedder (v2 - 统一硅基)
-// 所有 embedding 请求转发到 siliconflow.js
+// Story Summary - Embedder
// ═══════════════════════════════════════════════════════════════════════════
-import { embed as sfEmbed, getApiKey } from '../llm/siliconflow.js';
+import { embed as sfEmbed } from '../llm/siliconflow.js';
// ═══════════════════════════════════════════════════════════════════════════
// 统一 embed 接口
// ═══════════════════════════════════════════════════════════════════════════
export async function embed(texts, config, options = {}) {
- // 忽略旧的 config 参数,统一走硅基
return await sfEmbed(texts, options);
}
@@ -18,8 +16,10 @@ export async function embed(texts, config, options = {}) {
// ═══════════════════════════════════════════════════════════════════════════
export function getEngineFingerprint(config) {
- // 统一使用硅基 bge-m3
- return 'siliconflow:bge-m3:1024';
+ const api = config?.embeddingApi || {};
+ const provider = String(api.provider || 'siliconflow').toLowerCase();
+ const model = String(api.model || 'BAAI/bge-m3').trim() || 'BAAI/bge-m3';
+ return `${provider}:${model}:1024`;
}
// ═══════════════════════════════════════════════════════════════════════════
@@ -47,14 +47,13 @@ export async function deleteLocalModelCache() { }
// 在线服务测试
// ═══════════════════════════════════════════════════════════════════════════
-export async function testOnlineService() {
- const key = getApiKey();
- if (!key) {
- throw new Error('请配置硅基 API Key');
+export async function testOnlineService(_provider, config = {}) {
+ if (!config?.key) {
+ throw new Error('请配置 Embedding API Key');
}
try {
- const [vec] = await sfEmbed(['测试连接']);
+ const [vec] = await sfEmbed(['测试连接'], { apiConfig: config });
return { success: true, dims: vec?.length || 0 };
} catch (e) {
throw new Error(`连接失败: ${e.message}`);
@@ -62,7 +61,6 @@ export async function testOnlineService() {
}
export async function fetchOnlineModels() {
- // 硅基模型固定
return ['BAAI/bge-m3'];
}
@@ -78,6 +76,6 @@ export const ONLINE_PROVIDERS = {
siliconflow: {
id: 'siliconflow',
name: '硅基流动',
- baseUrl: 'https://api.siliconflow.cn',
+ baseUrl: 'https://api.siliconflow.cn/v1',
},
};