fix(story-summary): improve vector api provider state

This commit is contained in:
2026-04-03 17:01:05 +08:00
parent 7b921b80e0
commit 485016abdd
3 changed files with 193 additions and 44 deletions

View File

@@ -261,6 +261,44 @@ const DEFAULT_L0_MODEL = "Qwen/Qwen3-8B";
const DEFAULT_EMBEDDING_MODEL = "BAAI/bge-m3"; const DEFAULT_EMBEDDING_MODEL = "BAAI/bge-m3";
const DEFAULT_RERANK_MODEL = "BAAI/bge-reranker-v2-m3"; const DEFAULT_RERANK_MODEL = "BAAI/bge-reranker-v2-m3";
function getVectorProviderDefaultUrl(provider) {
return provider === "openrouter" ? DEFAULT_OPENROUTER_URL : DEFAULT_L0_URL;
}
function createDefaultProviderProfile(provider, model = "") {
return {
url: provider === "custom" ? "" : getVectorProviderDefaultUrl(provider),
key: "",
model: model || "",
modelCache: [],
};
}
function normalizeProviderProfiles(supportedProviders, srcProfiles, currentProvider, currentValues, defaultModel) {
const out = {};
supportedProviders.forEach((provider) => {
const raw = srcProfiles?.[provider] || {};
const defaults = createDefaultProviderProfile(provider, defaultModel);
out[provider] = {
url: String(raw.url || defaults.url || "").trim(),
key: String(raw.key || "").trim(),
model: String(raw.model || defaults.model || "").trim(),
modelCache: Array.isArray(raw.modelCache) ? raw.modelCache.filter(Boolean) : [],
};
});
if (currentProvider && out[currentProvider]) {
if (currentValues?.url && !out[currentProvider].url) out[currentProvider].url = String(currentValues.url).trim();
if (currentValues?.key && !out[currentProvider].key) out[currentProvider].key = String(currentValues.key).trim();
if (currentValues?.model && !out[currentProvider].model) out[currentProvider].model = String(currentValues.model).trim();
if (Array.isArray(currentValues?.modelCache) && !out[currentProvider].modelCache.length) {
out[currentProvider].modelCache = currentValues.modelCache.filter(Boolean);
}
}
return out;
}
export function getSettings() { export function getSettings() {
const ext = (extension_settings[EXT_ID] ||= {}); const ext = (extension_settings[EXT_ID] ||= {});
ext.storySummary ||= { enabled: true }; ext.storySummary ||= { enabled: true };
@@ -269,15 +307,24 @@ export function getSettings() {
function normalizeOpenAiCompatApiConfig(src, defaults = {}) { function normalizeOpenAiCompatApiConfig(src, defaults = {}) {
const provider = String(src?.provider || defaults.provider || DEFAULT_VECTOR_PROVIDER).toLowerCase(); const provider = String(src?.provider || defaults.provider || DEFAULT_VECTOR_PROVIDER).toLowerCase();
const defaultUrl = provider === "openrouter" const supportedProviders = Array.isArray(defaults.supportedProviders) && defaults.supportedProviders.length
? DEFAULT_OPENROUTER_URL ? defaults.supportedProviders
: String(defaults.url || DEFAULT_L0_URL); : [provider, "custom"];
const providers = normalizeProviderProfiles(
supportedProviders,
src?.providers,
provider,
src,
defaults.model || ""
);
const current = providers[provider] || createDefaultProviderProfile(provider, defaults.model || "");
return { return {
provider, provider,
url: String(src?.url || defaultUrl || "").trim(), url: String(current.url || "").trim(),
key: String(src?.key || defaults.key || "").trim(), key: String(current.key || defaults.key || "").trim(),
model: String(src?.model || defaults.model || "").trim(), model: String(current.model || defaults.model || "").trim(),
modelCache: Array.isArray(src?.modelCache) ? src.modelCache.filter(Boolean) : [], modelCache: Array.isArray(current.modelCache) ? current.modelCache.filter(Boolean) : [],
providers,
}; };
} }
@@ -296,18 +343,21 @@ function normalizeVectorConfig(rawVector = null) {
url: sharedUrl, url: sharedUrl,
key: sharedKey, key: sharedKey,
model: DEFAULT_L0_MODEL, model: DEFAULT_L0_MODEL,
supportedProviders: ["siliconflow", "openrouter", "custom"],
}), }),
embeddingApi: normalizeOpenAiCompatApiConfig(rawVector?.embeddingApi, { embeddingApi: normalizeOpenAiCompatApiConfig(rawVector?.embeddingApi, {
provider: DEFAULT_VECTOR_PROVIDER, provider: DEFAULT_VECTOR_PROVIDER,
url: DEFAULT_L0_URL, url: DEFAULT_L0_URL,
key: sharedKey, key: sharedKey,
model: DEFAULT_EMBEDDING_MODEL, model: DEFAULT_EMBEDDING_MODEL,
supportedProviders: ["siliconflow", "custom"],
}), }),
rerankApi: normalizeOpenAiCompatApiConfig(rawVector?.rerankApi, { rerankApi: normalizeOpenAiCompatApiConfig(rawVector?.rerankApi, {
provider: DEFAULT_VECTOR_PROVIDER, provider: DEFAULT_VECTOR_PROVIDER,
url: DEFAULT_L0_URL, url: DEFAULT_L0_URL,
key: sharedKey, key: sharedKey,
model: DEFAULT_RERANK_MODEL, model: DEFAULT_RERANK_MODEL,
supportedProviders: ["siliconflow", "custom"],
}), }),
}; };
} }

View File

@@ -303,6 +303,18 @@ All checks passed. Beginning incremental extraction...
custom: { url: '', needKey: true, canFetch: true } custom: { url: '', needKey: true, canFetch: true }
}; };
const VECTOR_API_SUPPORTED_PROVIDERS = {
l0: ['siliconflow', 'openrouter', 'custom'],
embedding: ['siliconflow', 'custom'],
rerank: ['siliconflow', 'custom'],
};
const VECTOR_API_DEFAULT_MODELS = {
l0: 'Qwen/Qwen3-8B',
embedding: 'BAAI/bge-m3',
rerank: 'BAAI/bge-reranker-v2-m3',
};
function setStatusText(el, message, kind = '') { function setStatusText(el, message, kind = '') {
if (!el) return; if (!el) return;
el.textContent = message || ''; el.textContent = message || '';
@@ -315,6 +327,44 @@ All checks passed. Beginning incremental extraction...
: ''; : '';
} }
function createDefaultProviderProfile(provider, model = '') {
const pv = VECTOR_PROVIDER_DEFAULTS[provider] || VECTOR_PROVIDER_DEFAULTS.custom;
return {
url: pv.url || '',
key: '',
model: model || '',
modelCache: [],
};
}
function normalizeProviderProfiles(prefix, apiCfg = {}) {
const supported = VECTOR_API_SUPPORTED_PROVIDERS[prefix] || ['custom'];
const model = apiCfg.model || VECTOR_API_DEFAULT_MODELS[prefix] || '';
const out = {};
supported.forEach(provider => {
const raw = apiCfg.providers?.[provider] || {};
const defaults = createDefaultProviderProfile(provider, model);
out[provider] = {
url: String(raw.url || defaults.url || '').trim(),
key: String(raw.key || '').trim(),
model: String(raw.model || defaults.model || '').trim(),
modelCache: Array.isArray(raw.modelCache) ? raw.modelCache.filter(Boolean) : [],
};
});
const currentProvider = String(apiCfg.provider || supported[0] || 'custom').toLowerCase();
if (out[currentProvider]) {
if (apiCfg.url && !out[currentProvider].url) out[currentProvider].url = String(apiCfg.url).trim();
if (apiCfg.key && !out[currentProvider].key) out[currentProvider].key = String(apiCfg.key).trim();
if (apiCfg.model && !out[currentProvider].model) out[currentProvider].model = String(apiCfg.model).trim();
if (Array.isArray(apiCfg.modelCache) && !out[currentProvider].modelCache.length) {
out[currentProvider].modelCache = apiCfg.modelCache.filter(Boolean);
}
}
return out;
}
const SECTION_META = { const SECTION_META = {
keywords: { title: '编辑关键词', hint: '每行一个关键词,格式:关键词|权重(核心/重要/一般)' }, keywords: { title: '编辑关键词', hint: '每行一个关键词,格式:关键词|权重(核心/重要/一般)' },
events: { title: '编辑事件时间线', hint: '编辑时,每个事件要素都应完整' }, events: { title: '编辑事件时间线', hint: '编辑时,每个事件要素都应完整' },
@@ -365,9 +415,28 @@ All checks passed. Beginning incremental extraction...
enabled: false, enabled: false,
engine: 'online', engine: 'online',
l0Concurrency: 10, l0Concurrency: 10,
l0Api: { provider: 'siliconflow', url: 'https://api.siliconflow.cn/v1', key: '', model: 'Qwen/Qwen3-8B', modelCache: [] }, l0Api: {
embeddingApi: { provider: 'siliconflow', url: 'https://api.siliconflow.cn/v1', key: '', model: 'BAAI/bge-m3', modelCache: [] }, provider: 'siliconflow', url: 'https://api.siliconflow.cn/v1', key: '', model: 'Qwen/Qwen3-8B', modelCache: [],
rerankApi: { provider: 'siliconflow', url: 'https://api.siliconflow.cn/v1', key: '', model: 'BAAI/bge-reranker-v2-m3', modelCache: [] } providers: {
siliconflow: createDefaultProviderProfile('siliconflow', 'Qwen/Qwen3-8B'),
openrouter: createDefaultProviderProfile('openrouter', 'Qwen/Qwen3-8B'),
custom: createDefaultProviderProfile('custom', 'Qwen/Qwen3-8B'),
}
},
embeddingApi: {
provider: 'siliconflow', url: 'https://api.siliconflow.cn/v1', key: '', model: 'BAAI/bge-m3', modelCache: [],
providers: {
siliconflow: createDefaultProviderProfile('siliconflow', 'BAAI/bge-m3'),
custom: createDefaultProviderProfile('custom', 'BAAI/bge-m3'),
}
},
rerankApi: {
provider: 'siliconflow', url: 'https://api.siliconflow.cn/v1', key: '', model: 'BAAI/bge-reranker-v2-m3', modelCache: [],
providers: {
siliconflow: createDefaultProviderProfile('siliconflow', 'BAAI/bge-reranker-v2-m3'),
custom: createDefaultProviderProfile('custom', 'BAAI/bge-reranker-v2-m3'),
}
}
} }
}; };
@@ -409,6 +478,7 @@ All checks passed. Beginning incremental extraction...
key: raw.l0Api?.key || sharedKey || base.l0Api.key, key: raw.l0Api?.key || sharedKey || base.l0Api.key,
model: raw.l0Api?.model || base.l0Api.model, model: raw.l0Api?.model || base.l0Api.model,
modelCache: Array.isArray(raw.l0Api?.modelCache) ? raw.l0Api.modelCache : [], modelCache: Array.isArray(raw.l0Api?.modelCache) ? raw.l0Api.modelCache : [],
providers: normalizeProviderProfiles('l0', raw.l0Api || {}),
}); });
Object.assign(base.embeddingApi, { Object.assign(base.embeddingApi, {
provider: raw.embeddingApi?.provider || base.embeddingApi.provider, provider: raw.embeddingApi?.provider || base.embeddingApi.provider,
@@ -416,6 +486,7 @@ All checks passed. Beginning incremental extraction...
key: raw.embeddingApi?.key || sharedKey || base.embeddingApi.key, key: raw.embeddingApi?.key || sharedKey || base.embeddingApi.key,
model: raw.embeddingApi?.model || legacyOnline.model || base.embeddingApi.model, model: raw.embeddingApi?.model || legacyOnline.model || base.embeddingApi.model,
modelCache: Array.isArray(raw.embeddingApi?.modelCache) ? raw.embeddingApi.modelCache : [], modelCache: Array.isArray(raw.embeddingApi?.modelCache) ? raw.embeddingApi.modelCache : [],
providers: normalizeProviderProfiles('embedding', raw.embeddingApi || {}),
}); });
Object.assign(base.rerankApi, { Object.assign(base.rerankApi, {
provider: raw.rerankApi?.provider || base.rerankApi.provider, provider: raw.rerankApi?.provider || base.rerankApi.provider,
@@ -423,6 +494,7 @@ All checks passed. Beginning incremental extraction...
key: raw.rerankApi?.key || sharedKey || base.rerankApi.key, key: raw.rerankApi?.key || sharedKey || base.rerankApi.key,
model: raw.rerankApi?.model || base.rerankApi.model, model: raw.rerankApi?.model || base.rerankApi.model,
modelCache: Array.isArray(raw.rerankApi?.modelCache) ? raw.rerankApi.modelCache : [], modelCache: Array.isArray(raw.rerankApi?.modelCache) ? raw.rerankApi.modelCache : [],
providers: normalizeProviderProfiles('rerank', raw.rerankApi || {}),
}); });
} }
@@ -503,35 +575,65 @@ All checks passed. Beginning incremental extraction...
// ═══════════════════════════════════════════════════════════════════════════ // ═══════════════════════════════════════════════════════════════════════════
function getVectorApiConfig(prefix) { function getVectorApiConfig(prefix) {
return { const provider = $(`${prefix}-api-provider`)?.value || 'siliconflow';
provider: $(`${prefix}-api-provider`)?.value || 'siliconflow', const providers = normalizeProviderProfiles(prefix, config.vector?.[`${prefix}Api`] || {});
providers[provider] = {
url: $(`${prefix}-api-url`)?.value?.trim() || '', url: $(`${prefix}-api-url`)?.value?.trim() || '',
key: $(`${prefix}-api-key`)?.value?.trim() || '', key: $(`${prefix}-api-key`)?.value?.trim() || '',
model: $(`${prefix}-api-model-text`)?.value?.trim() || '', model: $(`${prefix}-api-model-text`)?.value?.trim() || '',
modelCache: Array.isArray(config.vector?.[`${prefix}Api`]?.modelCache) modelCache: Array.isArray(config.vector?.[`${prefix}Api`]?.providers?.[provider]?.modelCache)
? [...config.vector[`${prefix}Api`].modelCache] ? [...config.vector[`${prefix}Api`].providers[provider].modelCache]
: [], : [],
}; };
return {
provider,
url: providers[provider]?.url || '',
key: providers[provider]?.key || '',
model: providers[provider]?.model || '',
modelCache: Array.isArray(providers[provider]?.modelCache) ? [...providers[provider].modelCache] : [],
providers,
};
} }
function loadVectorApiConfig(prefix, cfg) { function loadVectorApiConfig(prefix, cfg) {
const next = cfg || {}; const next = cfg || {};
$(`${prefix}-api-provider`).value = next.provider || 'siliconflow'; const provider = next.provider || 'siliconflow';
$(`${prefix}-api-url`).value = next.url || ''; const profiles = normalizeProviderProfiles(prefix, next);
$(`${prefix}-api-key`).value = next.key || ''; const profile = profiles[provider] || createDefaultProviderProfile(provider, VECTOR_API_DEFAULT_MODELS[prefix]);
$(`${prefix}-api-model-text`).value = next.model || ''; $(`${prefix}-api-provider`).value = provider;
$(`${prefix}-api-url`).value = profile.url || '';
$(`${prefix}-api-key`).value = profile.key || '';
$(`${prefix}-api-model-text`).value = profile.model || '';
const cache = Array.isArray(next.modelCache) ? next.modelCache : []; const cache = Array.isArray(profile.modelCache) ? profile.modelCache : [];
setSelectOptions($(`${prefix}-api-model-select`), cache, '请选择'); setSelectOptions($(`${prefix}-api-model-select`), cache, '请选择');
$(`${prefix}-api-model-select`).value = cache.includes(next.model) ? next.model : ''; $(`${prefix}-api-model-select`).value = cache.includes(profile.model) ? profile.model : '';
updateVectorProviderUI(prefix, next.provider || 'siliconflow'); updateVectorProviderUI(prefix, provider);
}
function saveCurrentVectorApiProfile(prefix) {
const apiCfg = config.vector[`${prefix}Api`] ||= {};
const provider = $(`${prefix}-api-provider`)?.value || apiCfg.provider || 'siliconflow';
apiCfg.providers = normalizeProviderProfiles(prefix, apiCfg);
apiCfg.providers[provider] = {
url: $(`${prefix}-api-url`)?.value?.trim() || '',
key: $(`${prefix}-api-key`)?.value?.trim() || '',
model: $(`${prefix}-api-model-text`)?.value?.trim() || '',
modelCache: Array.isArray(apiCfg.providers?.[provider]?.modelCache) ? [...apiCfg.providers[provider].modelCache] : [],
};
apiCfg.provider = provider;
apiCfg.url = apiCfg.providers[provider].url;
apiCfg.key = apiCfg.providers[provider].key;
apiCfg.model = apiCfg.providers[provider].model;
apiCfg.modelCache = [...apiCfg.providers[provider].modelCache];
} }
function updateVectorProviderUI(prefix, provider) { function updateVectorProviderUI(prefix, provider) {
const pv = VECTOR_PROVIDER_DEFAULTS[provider] || VECTOR_PROVIDER_DEFAULTS.custom; const pv = VECTOR_PROVIDER_DEFAULTS[provider] || VECTOR_PROVIDER_DEFAULTS.custom;
const cache = Array.isArray(config.vector?.[`${prefix}Api`]?.modelCache) const apiCfg = config.vector?.[`${prefix}Api`] || {};
? config.vector[`${prefix}Api`].modelCache apiCfg.providers = normalizeProviderProfiles(prefix, apiCfg);
: []; const profile = apiCfg.providers[provider] || createDefaultProviderProfile(provider, VECTOR_API_DEFAULT_MODELS[prefix]);
const cache = Array.isArray(profile.modelCache) ? profile.modelCache : [];
const hasModelCache = cache.length > 0; const hasModelCache = cache.length > 0;
$(`${prefix}-api-url-row`).classList.toggle('hidden', false); $(`${prefix}-api-url-row`).classList.toggle('hidden', false);
@@ -546,15 +648,17 @@ All checks passed. Beginning incremental extraction...
if (provider === 'custom') { if (provider === 'custom') {
urlInput.readOnly = false; urlInput.readOnly = false;
urlInput.placeholder = 'https://your-openai-compatible-api/v1'; urlInput.placeholder = 'https://your-openai-compatible-api/v1';
urlInput.value = profile.url || '';
} else { } else {
urlInput.value = pv.url || ''; urlInput.value = pv.url || '';
urlInput.readOnly = true; urlInput.readOnly = true;
urlInput.placeholder = pv.url || ''; urlInput.placeholder = pv.url || '';
if (config.vector?.[`${prefix}Api`]) {
config.vector[`${prefix}Api`].url = pv.url || '';
}
} }
} }
$(`${prefix}-api-key`).value = profile.key || '';
$(`${prefix}-api-model-text`).value = profile.model || '';
setSelectOptions($(`${prefix}-api-model-select`), cache, '请选择');
$(`${prefix}-api-model-select`).value = cache.includes(profile.model) ? profile.model : '';
} }
async function fetchVectorModels(prefix) { async function fetchVectorModels(prefix) {
@@ -591,8 +695,11 @@ All checks passed. Beginning incremental extraction...
if (!models) models = await tryFetch(`${baseUrl}/models`); if (!models) models = await tryFetch(`${baseUrl}/models`);
if (!models?.length) throw new Error('未获取到模型列表'); if (!models?.length) throw new Error('未获取到模型列表');
config.vector[`${prefix}Api`].modelCache = [...new Set(models)]; const apiCfg = config.vector[`${prefix}Api`] ||= {};
setSelectOptions($(`${prefix}-api-model-select`), config.vector[`${prefix}Api`].modelCache, '请选择'); apiCfg.providers = normalizeProviderProfiles(prefix, apiCfg);
apiCfg.providers[provider] ||= createDefaultProviderProfile(provider, VECTOR_API_DEFAULT_MODELS[prefix]);
apiCfg.providers[provider].modelCache = [...new Set(models)];
setSelectOptions($(`${prefix}-api-model-select`), apiCfg.providers[provider].modelCache, '请选择');
$(`${prefix}-api-model-select-row`).classList.remove('hidden'); $(`${prefix}-api-model-select-row`).classList.remove('hidden');
if (!$(`${prefix}-api-model-text`).value.trim()) { if (!$(`${prefix}-api-model-text`).value.trim()) {
$(`${prefix}-api-model-text`).value = models[0]; $(`${prefix}-api-model-text`).value = models[0];
@@ -821,18 +928,11 @@ All checks passed. Beginning incremental extraction...
['l0', 'embedding', 'rerank'].forEach(prefix => { ['l0', 'embedding', 'rerank'].forEach(prefix => {
$(`${prefix}-api-provider`).onchange = e => { $(`${prefix}-api-provider`).onchange = e => {
const oldProvider = config.vector[`${prefix}Api`]?.provider || 'siliconflow'; saveCurrentVectorApiProfile(prefix);
const pv = VECTOR_PROVIDER_DEFAULTS[e.target.value] || VECTOR_PROVIDER_DEFAULTS.custom; const target = config.vector[`${prefix}Api`] ||= {};
const target = config.vector[`${prefix}Api`] ||= { modelCache: [] }; target.providers = normalizeProviderProfiles(prefix, target);
if (e.target.value === 'custom') {
if (target.url === (VECTOR_PROVIDER_DEFAULTS[oldProvider]?.url || '')) {
target.url = '';
}
} else {
target.url = pv.url || '';
}
target.provider = e.target.value; target.provider = e.target.value;
if (!pv.canFetch) target.modelCache = []; target.providers[e.target.value] ||= createDefaultProviderProfile(e.target.value, VECTOR_API_DEFAULT_MODELS[prefix]);
updateVectorProviderUI(prefix, e.target.value); updateVectorProviderUI(prefix, e.target.value);
}; };

View File

@@ -425,10 +425,6 @@
<div class="settings-hint" style="margin-bottom:12px;"> <div class="settings-hint" style="margin-bottom:12px;">
推荐给 L0 使用便宜或免费的大批量模型,不建议直接消耗酒馆主 API。推荐硅基流动 / OpenRouter / 自定义 OpenAI 兼容接口。 推荐给 L0 使用便宜或免费的大批量模型,不建议直接消耗酒馆主 API。推荐硅基流动 / OpenRouter / 自定义 OpenAI 兼容接口。
</div> </div>
<label>L0 并发数</label>
<input type="number" id="vector-l0-concurrency" min="1" max="50" step="1" value="10">
<div class="settings-hint" style="margin-bottom:16px;">默认 10。免费账号可调低到 1-3线路稳定时可自行调高。</div>
<details class="settings-collapse" id="l0-api-collapse" style="margin-bottom:12px;"> <details class="settings-collapse" id="l0-api-collapse" style="margin-bottom:12px;">
<summary class="settings-collapse-header"> <summary class="settings-collapse-header">
<span>L0 锚点提取模型</span> <span>L0 锚点提取模型</span>
@@ -437,6 +433,9 @@
</svg> </svg>
</summary> </summary>
<div class="settings-collapse-content"> <div class="settings-collapse-content">
<label>L0 并发数</label>
<input type="number" id="vector-l0-concurrency" min="1" max="50" step="1" value="10">
<div class="settings-hint" style="margin-bottom:16px;">默认 10。免费账号可调低到 1-3线路稳定时可自行调高。</div>
<div class="settings-row"> <div class="settings-row">
<div class="settings-field"> <div class="settings-field">
<label>渠道</label> <label>渠道</label>