Update recall logic and remove unused state-recall
This commit is contained in:
@@ -1,189 +0,0 @@
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// Story Summary - State Recall (L0)
|
||||
// L0 语义锚点召回 + floor bonus + 虚拟 chunk 转换
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
import { getContext } from '../../../../../../../extensions.js';
|
||||
import { getAllStateVectors, getStateAtoms } from '../storage/state-store.js';
|
||||
import { getMeta } from '../storage/chunk-store.js';
|
||||
import { getEngineFingerprint } from '../utils/embedder.js';
|
||||
import { xbLog } from '../../../../core/debug-core.js';
|
||||
|
||||
const MODULE_ID = 'state-recall';
|
||||
|
||||
const CONFIG = {
|
||||
MAX_RESULTS: 20,
|
||||
MIN_SIMILARITY: 0.55,
|
||||
};
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// 工具函数
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
function cosineSimilarity(a, b) {
|
||||
if (!a?.length || !b?.length || a.length !== b.length) return 0;
|
||||
let dot = 0, nA = 0, nB = 0;
|
||||
for (let i = 0; i < a.length; i++) {
|
||||
dot += a[i] * b[i];
|
||||
nA += a[i] * a[i];
|
||||
nB += b[i] * b[i];
|
||||
}
|
||||
return nA && nB ? dot / (Math.sqrt(nA) * Math.sqrt(nB)) : 0;
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// L0 向量检索
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
/**
|
||||
* 检索与 query 相似的 StateAtoms
|
||||
* @returns {Array<{atom, similarity}>}
|
||||
*/
|
||||
export async function searchStateAtoms(queryVector, vectorConfig) {
|
||||
const { chatId } = getContext();
|
||||
if (!chatId || !queryVector?.length) return [];
|
||||
|
||||
// 检查 fingerprint
|
||||
const meta = await getMeta(chatId);
|
||||
const fp = getEngineFingerprint(vectorConfig);
|
||||
if (meta.fingerprint && meta.fingerprint !== fp) {
|
||||
xbLog.warn(MODULE_ID, 'fingerprint 不匹配,跳过 L0 召回');
|
||||
return [];
|
||||
}
|
||||
|
||||
// 获取向量
|
||||
const stateVectors = await getAllStateVectors(chatId);
|
||||
if (!stateVectors.length) return [];
|
||||
|
||||
// 获取 atoms(用于关联 semantic 等字段)
|
||||
const atoms = getStateAtoms();
|
||||
const atomMap = new Map(atoms.map(a => [a.atomId, a]));
|
||||
|
||||
// 计算相似度
|
||||
const scored = stateVectors
|
||||
.map(sv => {
|
||||
const atom = atomMap.get(sv.atomId);
|
||||
if (!atom) return null;
|
||||
|
||||
return {
|
||||
atomId: sv.atomId,
|
||||
floor: sv.floor,
|
||||
similarity: cosineSimilarity(queryVector, sv.vector),
|
||||
atom,
|
||||
};
|
||||
})
|
||||
.filter(Boolean)
|
||||
.filter(s => s.similarity >= CONFIG.MIN_SIMILARITY)
|
||||
.sort((a, b) => b.similarity - a.similarity)
|
||||
.slice(0, CONFIG.MAX_RESULTS);
|
||||
|
||||
return scored;
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// Floor Bonus 构建
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
/**
|
||||
* 构建 L0 相关楼层的加权映射
|
||||
* @returns {Map<number, number>}
|
||||
*/
|
||||
export function buildL0FloorBonus(l0Results, bonusFactor = 0.10) {
|
||||
const floorBonus = new Map();
|
||||
|
||||
for (const r of l0Results || []) {
|
||||
// 每个楼层只加一次,取最高相似度对应的 bonus
|
||||
// 简化处理:统一加 bonusFactor,不区分相似度高低
|
||||
if (!floorBonus.has(r.floor)) {
|
||||
floorBonus.set(r.floor, bonusFactor);
|
||||
}
|
||||
}
|
||||
|
||||
return floorBonus;
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// 虚拟 Chunk 转换
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
/**
|
||||
* 将 L0 结果转换为虚拟 chunk 格式
|
||||
* 用于和 L1 chunks 统一处理
|
||||
*/
|
||||
export function stateToVirtualChunks(l0Results) {
|
||||
return (l0Results || []).map(r => ({
|
||||
chunkId: `state-${r.atomId}`,
|
||||
floor: r.floor,
|
||||
chunkIdx: -1, // 负值,排序时排在 L1 前面
|
||||
speaker: '📌', // 固定标记
|
||||
isUser: false,
|
||||
text: r.atom.semantic,
|
||||
textHash: null,
|
||||
similarity: r.similarity,
|
||||
isL0: true, // 标记字段
|
||||
// 保留原始 atom 信息
|
||||
_atom: r.atom,
|
||||
}));
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// 每楼层稀疏去重
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
/**
|
||||
* 合并 L0 和 L1 chunks
|
||||
* @param {Array} l0Chunks - L0 虚拟 chunks(带 similarity)
|
||||
* @param {Array} l1Chunks - L1 真实 chunks(无 similarity)
|
||||
* @param {number} limit - 每楼层上限
|
||||
* @returns {Array} 合并后的 chunks
|
||||
*/
|
||||
export function mergeAndSparsify(l0Chunks, l1Chunks, limit = 2) {
|
||||
// 构建 L0 楼层 → 最高 similarity 映射
|
||||
const floorSimilarity = new Map();
|
||||
for (const c of (l0Chunks || [])) {
|
||||
const existing = floorSimilarity.get(c.floor) || 0;
|
||||
if ((c.similarity || 0) > existing) {
|
||||
floorSimilarity.set(c.floor, c.similarity || 0);
|
||||
}
|
||||
}
|
||||
|
||||
// L1 继承所属楼层的 L0 similarity
|
||||
const l1WithScore = (l1Chunks || []).map(c => ({
|
||||
...c,
|
||||
similarity: floorSimilarity.get(c.floor) || 0.5,
|
||||
}));
|
||||
|
||||
// 合并并按相似度排序
|
||||
const all = [...(l0Chunks || []), ...l1WithScore]
|
||||
.sort((a, b) => {
|
||||
// 相似度优先
|
||||
const simDiff = (b.similarity || 0) - (a.similarity || 0);
|
||||
if (Math.abs(simDiff) > 0.01) return simDiff;
|
||||
|
||||
// 同楼层:L0 优先于 L1
|
||||
if (a.floor === b.floor) {
|
||||
if (a.isL0 && !b.isL0) return -1;
|
||||
if (!a.isL0 && b.isL0) return 1;
|
||||
}
|
||||
|
||||
// 按楼层升序
|
||||
return a.floor - b.floor;
|
||||
});
|
||||
|
||||
// 每楼层稀疏去重
|
||||
const byFloor = new Map();
|
||||
|
||||
for (const c of all) {
|
||||
const arr = byFloor.get(c.floor) || [];
|
||||
if (arr.length < limit) {
|
||||
arr.push(c);
|
||||
byFloor.set(c.floor, arr);
|
||||
}
|
||||
}
|
||||
|
||||
// 扁平化并保持排序
|
||||
return Array.from(byFloor.values())
|
||||
.flat()
|
||||
.sort((a, b) => (b.similarity || 0) - (a.similarity || 0));
|
||||
}
|
||||
|
||||
@@ -1,59 +1,60 @@
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// Story Summary - Metrics Collector
|
||||
// 召回质量指标收集与格式化
|
||||
// Story Summary - Metrics Collector (v2 - 统一命名)
|
||||
//
|
||||
// 命名规范:
|
||||
// - 存储层用 L0/L1/L2/L3(StateAtom/Chunk/Event/Fact)
|
||||
// - 指标层用语义名称:anchor/evidence/event/constraint/arc
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
/**
|
||||
* 创建空的指标对象
|
||||
* @returns {object} 指标对象
|
||||
*/
|
||||
export function createMetrics() {
|
||||
return {
|
||||
// L0 Query Understanding
|
||||
l0: {
|
||||
// Anchor (L0 StateAtoms) - 语义锚点
|
||||
anchor: {
|
||||
needRecall: false,
|
||||
intent: '',
|
||||
focusEntities: [],
|
||||
queries: [],
|
||||
implicitTopics: [],
|
||||
queryExpansionTime: 0,
|
||||
atomsMatched: 0,
|
||||
matched: 0,
|
||||
floorsHit: 0,
|
||||
topAtoms: [],
|
||||
topHits: [],
|
||||
},
|
||||
|
||||
// L1 Constraints (Facts)
|
||||
l1: {
|
||||
factsTotal: 0,
|
||||
factsInjected: 0,
|
||||
factsFiltered: 0,
|
||||
// Constraint (L3 Facts) - 世界约束
|
||||
constraint: {
|
||||
total: 0,
|
||||
filtered: 0,
|
||||
injected: 0,
|
||||
tokens: 0,
|
||||
samples: [],
|
||||
},
|
||||
|
||||
// L2 Narrative Retrieval
|
||||
l2: {
|
||||
eventsInStore: 0,
|
||||
eventsConsidered: 0,
|
||||
eventsSelected: 0,
|
||||
byRecallType: { direct: 0, causal: 0, context: 0 },
|
||||
// Event (L2 Events) - 事件摘要
|
||||
event: {
|
||||
inStore: 0,
|
||||
considered: 0,
|
||||
selected: 0,
|
||||
byRecallType: { direct: 0, related: 0, causal: 0 },
|
||||
similarityDistribution: { min: 0, max: 0, mean: 0, median: 0 },
|
||||
entityFilterStats: null,
|
||||
entityFilter: null,
|
||||
causalChainDepth: 0,
|
||||
causalEventsCount: 0,
|
||||
entitiesLoaded: 0,
|
||||
causalCount: 0,
|
||||
entitiesUsed: 0,
|
||||
entityNames: [],
|
||||
retrievalTime: 0,
|
||||
},
|
||||
|
||||
// L3 Evidence Assembly
|
||||
l3: {
|
||||
floorsFromL0: 0,
|
||||
l1Total: 0,
|
||||
l1AfterCoarse: 0,
|
||||
chunksInRange: 0,
|
||||
chunksInRangeByType: { l0Virtual: 0, l1Real: 0 },
|
||||
chunksSelected: 0,
|
||||
chunksSelectedByType: { l0Virtual: 0, l1Real: 0 },
|
||||
// Evidence (L1 Chunks) - 原文证据
|
||||
evidence: {
|
||||
floorsFromAnchors: 0,
|
||||
chunkTotal: 0,
|
||||
chunkAfterCoarse: 0,
|
||||
merged: 0,
|
||||
mergedByType: { anchorVirtual: 0, chunkReal: 0 },
|
||||
selected: 0,
|
||||
selectedByType: { anchorVirtual: 0, chunkReal: 0 },
|
||||
contextPairsAdded: 0,
|
||||
tokens: 0,
|
||||
assemblyTime: 0,
|
||||
@@ -61,16 +62,22 @@ export function createMetrics() {
|
||||
beforeRerank: 0,
|
||||
afterRerank: 0,
|
||||
rerankTime: 0,
|
||||
rerankScoreDistribution: null,
|
||||
rerankScores: null,
|
||||
},
|
||||
|
||||
// L4 Formatting
|
||||
l4: {
|
||||
// Arc - 人物弧光
|
||||
arc: {
|
||||
injected: 0,
|
||||
tokens: 0,
|
||||
},
|
||||
|
||||
// Formatting - 格式化
|
||||
formatting: {
|
||||
sectionsIncluded: [],
|
||||
formattingTime: 0,
|
||||
time: 0,
|
||||
},
|
||||
|
||||
// Budget Summary
|
||||
// Budget Summary - 预算
|
||||
budget: {
|
||||
total: 0,
|
||||
limit: 0,
|
||||
@@ -78,26 +85,26 @@ export function createMetrics() {
|
||||
breakdown: {
|
||||
constraints: 0,
|
||||
events: 0,
|
||||
chunks: 0,
|
||||
recentOrphans: 0,
|
||||
distantEvidence: 0,
|
||||
recentEvidence: 0,
|
||||
arcs: 0,
|
||||
},
|
||||
},
|
||||
|
||||
// Total Timing
|
||||
// Timing - 计时
|
||||
timing: {
|
||||
queryExpansion: 0,
|
||||
l0Search: 0,
|
||||
l1Constraints: 0,
|
||||
l2Retrieval: 0,
|
||||
l3Retrieval: 0,
|
||||
l3Rerank: 0,
|
||||
l3Assembly: 0,
|
||||
l4Formatting: 0,
|
||||
anchorSearch: 0,
|
||||
constraintFilter: 0,
|
||||
eventRetrieval: 0,
|
||||
evidenceRetrieval: 0,
|
||||
evidenceRerank: 0,
|
||||
evidenceAssembly: 0,
|
||||
formatting: 0,
|
||||
total: 0,
|
||||
},
|
||||
|
||||
// Quality Indicators
|
||||
// Quality Indicators - 质量指标
|
||||
quality: {
|
||||
constraintCoverage: 100,
|
||||
eventPrecisionProxy: 0,
|
||||
@@ -109,6 +116,8 @@ export function createMetrics() {
|
||||
|
||||
/**
|
||||
* 计算相似度分布统计
|
||||
* @param {number[]} similarities - 相似度数组
|
||||
* @returns {{min: number, max: number, mean: number, median: number}}
|
||||
*/
|
||||
export function calcSimilarityStats(similarities) {
|
||||
if (!similarities?.length) {
|
||||
@@ -128,6 +137,8 @@ export function calcSimilarityStats(similarities) {
|
||||
|
||||
/**
|
||||
* 格式化指标为可读日志
|
||||
* @param {object} metrics - 指标对象
|
||||
* @returns {string} 格式化后的日志
|
||||
*/
|
||||
export function formatMetricsLog(metrics) {
|
||||
const m = metrics;
|
||||
@@ -139,51 +150,50 @@ export function formatMetricsLog(metrics) {
|
||||
lines.push('════════════════════════════════════════');
|
||||
lines.push('');
|
||||
|
||||
// L0 Query Understanding
|
||||
lines.push('[L0] Query Understanding');
|
||||
lines.push(`├─ need_recall: ${m.l0.needRecall}`);
|
||||
if (m.l0.needRecall) {
|
||||
lines.push(`├─ intent: ${m.l0.intent || 'mixed'}`);
|
||||
lines.push(`├─ focus_entities: [${(m.l0.focusEntities || []).join(', ')}]`);
|
||||
lines.push(`├─ queries: [${(m.l0.queries || []).slice(0, 3).join(', ')}]`);
|
||||
lines.push(`├─ query_expansion_time: ${m.l0.queryExpansionTime}ms`);
|
||||
lines.push(`├─ atoms_matched: ${m.l0.atomsMatched || 0}`);
|
||||
lines.push(`└─ floors_hit: ${m.l0.floorsHit || 0}`);
|
||||
// Anchor (L0 StateAtoms)
|
||||
lines.push('[Anchor] L0 StateAtoms - 语义锚点');
|
||||
lines.push(`├─ need_recall: ${m.anchor.needRecall}`);
|
||||
if (m.anchor.needRecall) {
|
||||
lines.push(`├─ focus_entities: [${(m.anchor.focusEntities || []).join(', ')}]`);
|
||||
lines.push(`├─ queries: [${(m.anchor.queries || []).slice(0, 3).join(', ')}]`);
|
||||
lines.push(`├─ query_expansion_time: ${m.anchor.queryExpansionTime}ms`);
|
||||
lines.push(`├─ matched: ${m.anchor.matched || 0}`);
|
||||
lines.push(`└─ floors_hit: ${m.anchor.floorsHit || 0}`);
|
||||
}
|
||||
lines.push('');
|
||||
|
||||
// L1 Constraints
|
||||
lines.push('[L1] Constraints (Facts)');
|
||||
lines.push(`├─ facts_total: ${m.l1.factsTotal}`);
|
||||
lines.push(`├─ facts_filtered: ${m.l1.factsFiltered || 0}`);
|
||||
lines.push(`├─ facts_injected: ${m.l1.factsInjected}`);
|
||||
lines.push(`├─ tokens: ${m.l1.tokens}`);
|
||||
if (m.l1.samples && m.l1.samples.length > 0) {
|
||||
lines.push(`└─ samples: "${m.l1.samples.slice(0, 2).join('", "')}"`);
|
||||
// Constraint (L3 Facts)
|
||||
lines.push('[Constraint] L3 Facts - 世界约束');
|
||||
lines.push(`├─ total: ${m.constraint.total}`);
|
||||
lines.push(`├─ filtered: ${m.constraint.filtered || 0}`);
|
||||
lines.push(`├─ injected: ${m.constraint.injected}`);
|
||||
lines.push(`├─ tokens: ${m.constraint.tokens}`);
|
||||
if (m.constraint.samples && m.constraint.samples.length > 0) {
|
||||
lines.push(`└─ samples: "${m.constraint.samples.slice(0, 2).join('", "')}"`);
|
||||
}
|
||||
lines.push('');
|
||||
|
||||
// L2 Narrative Retrieval
|
||||
lines.push('[L2] Narrative Retrieval');
|
||||
lines.push(`├─ events_in_store: ${m.l2.eventsInStore}`);
|
||||
lines.push(`├─ events_considered: ${m.l2.eventsConsidered}`);
|
||||
// Event (L2 Events)
|
||||
lines.push('[Event] L2 Events - 事件摘要');
|
||||
lines.push(`├─ in_store: ${m.event.inStore}`);
|
||||
lines.push(`├─ considered: ${m.event.considered}`);
|
||||
|
||||
if (m.l2.entityFilterStats) {
|
||||
const ef = m.l2.entityFilterStats;
|
||||
if (m.event.entityFilter) {
|
||||
const ef = m.event.entityFilter;
|
||||
lines.push(`├─ entity_filter:`);
|
||||
lines.push(`│ ├─ focus_entities: [${(ef.focusEntities || []).join(', ')}]`);
|
||||
lines.push(`│ ├─ before_filter: ${ef.before}`);
|
||||
lines.push(`│ ├─ after_filter: ${ef.after}`);
|
||||
lines.push(`│ └─ filtered_out: ${ef.filtered}`);
|
||||
lines.push(`│ ├─ before: ${ef.before}`);
|
||||
lines.push(`│ ├─ after: ${ef.after}`);
|
||||
lines.push(`│ └─ filtered: ${ef.filtered}`);
|
||||
}
|
||||
|
||||
lines.push(`├─ events_selected: ${m.l2.eventsSelected}`);
|
||||
lines.push(`├─ selected: ${m.event.selected}`);
|
||||
lines.push(`├─ by_recall_type:`);
|
||||
lines.push(`│ ├─ direct: ${m.l2.byRecallType.direct}`);
|
||||
lines.push(`│ ├─ causal: ${m.l2.byRecallType.causal}`);
|
||||
lines.push(`│ └─ context: ${m.l2.byRecallType.context}`);
|
||||
lines.push(`│ ├─ direct: ${m.event.byRecallType.direct}`);
|
||||
lines.push(`│ ├─ related: ${m.event.byRecallType.related}`);
|
||||
lines.push(`│ └─ causal: ${m.event.byRecallType.causal}`);
|
||||
|
||||
const sim = m.l2.similarityDistribution;
|
||||
const sim = m.event.similarityDistribution;
|
||||
if (sim && sim.max > 0) {
|
||||
lines.push(`├─ similarity_distribution:`);
|
||||
lines.push(`│ ├─ min: ${sim.min}`);
|
||||
@@ -192,93 +202,100 @@ export function formatMetricsLog(metrics) {
|
||||
lines.push(`│ └─ median: ${sim.median}`);
|
||||
}
|
||||
|
||||
lines.push(`├─ causal_chain: depth=${m.l2.causalChainDepth}, events=${m.l2.causalEventsCount}`);
|
||||
lines.push(`├─ entities_loaded: ${m.l2.entitiesLoaded} [${(m.l2.entityNames || []).join(', ')}]`);
|
||||
lines.push(`└─ retrieval_time: ${m.l2.retrievalTime}ms`);
|
||||
lines.push(`├─ causal_chain: depth=${m.event.causalChainDepth}, count=${m.event.causalCount}`);
|
||||
lines.push(`└─ entities_used: ${m.event.entitiesUsed} [${(m.event.entityNames || []).join(', ')}]`);
|
||||
lines.push('');
|
||||
|
||||
// L3 Evidence Assembly
|
||||
lines.push('[L3] Evidence Assembly');
|
||||
lines.push(`├─ floors_from_l0: ${m.l3.floorsFromL0}`);
|
||||
// Evidence (L1 Chunks)
|
||||
lines.push('[Evidence] L1 Chunks - 原文证据');
|
||||
lines.push(`├─ floors_from_anchors: ${m.evidence.floorsFromAnchors}`);
|
||||
|
||||
// L1 粗筛信息
|
||||
if (m.l3.l1Total > 0) {
|
||||
lines.push(`├─ l1_coarse_filter:`);
|
||||
lines.push(`│ ├─ total: ${m.l3.l1Total}`);
|
||||
lines.push(`│ ├─ after: ${m.l3.l1AfterCoarse}`);
|
||||
lines.push(`│ └─ filtered: ${m.l3.l1Total - m.l3.l1AfterCoarse}`);
|
||||
// 粗筛信息
|
||||
if (m.evidence.chunkTotal > 0) {
|
||||
lines.push(`├─ coarse_filter:`);
|
||||
lines.push(`│ ├─ total: ${m.evidence.chunkTotal}`);
|
||||
lines.push(`│ ├─ after: ${m.evidence.chunkAfterCoarse}`);
|
||||
lines.push(`│ └─ filtered: ${m.evidence.chunkTotal - m.evidence.chunkAfterCoarse}`);
|
||||
}
|
||||
|
||||
lines.push(`├─ chunks_merged: ${m.l3.chunksInRange}`);
|
||||
if (m.l3.chunksInRangeByType) {
|
||||
const cir = m.l3.chunksInRangeByType;
|
||||
lines.push(`│ ├─ l0_virtual: ${cir.l0Virtual || 0}`);
|
||||
lines.push(`│ └─ l1_real: ${cir.l1Real || 0}`);
|
||||
lines.push(`├─ merged: ${m.evidence.merged}`);
|
||||
if (m.evidence.mergedByType) {
|
||||
const mt = m.evidence.mergedByType;
|
||||
lines.push(`│ ├─ anchor_virtual: ${mt.anchorVirtual || 0}`);
|
||||
lines.push(`│ └─ chunk_real: ${mt.chunkReal || 0}`);
|
||||
}
|
||||
|
||||
// Rerank 信息
|
||||
if (m.l3.rerankApplied) {
|
||||
if (m.evidence.rerankApplied) {
|
||||
lines.push(`├─ rerank_applied: true`);
|
||||
lines.push(`│ ├─ before: ${m.l3.beforeRerank}`);
|
||||
lines.push(`│ ├─ after: ${m.l3.afterRerank}`);
|
||||
lines.push(`│ └─ time: ${m.l3.rerankTime}ms`);
|
||||
if (m.l3.rerankScoreDistribution) {
|
||||
const rd = m.l3.rerankScoreDistribution;
|
||||
lines.push(`├─ rerank_scores: min=${rd.min}, max=${rd.max}, mean=${rd.mean}`);
|
||||
lines.push(`│ ├─ before: ${m.evidence.beforeRerank}`);
|
||||
lines.push(`│ ├─ after: ${m.evidence.afterRerank}`);
|
||||
lines.push(`│ └─ time: ${m.evidence.rerankTime}ms`);
|
||||
if (m.evidence.rerankScores) {
|
||||
const rs = m.evidence.rerankScores;
|
||||
lines.push(`├─ rerank_scores: min=${rs.min}, max=${rs.max}, mean=${rs.mean}`);
|
||||
}
|
||||
} else {
|
||||
lines.push(`├─ rerank_applied: false`);
|
||||
}
|
||||
|
||||
lines.push(`├─ chunks_selected: ${m.l3.chunksSelected}`);
|
||||
if (m.l3.chunksSelectedByType) {
|
||||
const cs = m.l3.chunksSelectedByType;
|
||||
lines.push(`│ ├─ l0_virtual: ${cs.l0Virtual || 0}`);
|
||||
lines.push(`│ └─ l1_real: ${cs.l1Real || 0}`);
|
||||
lines.push(`├─ selected: ${m.evidence.selected}`);
|
||||
if (m.evidence.selectedByType) {
|
||||
const st = m.evidence.selectedByType;
|
||||
lines.push(`│ ├─ anchor_virtual: ${st.anchorVirtual || 0}`);
|
||||
lines.push(`│ └─ chunk_real: ${st.chunkReal || 0}`);
|
||||
}
|
||||
|
||||
lines.push(`├─ context_pairs_added: ${m.l3.contextPairsAdded}`);
|
||||
lines.push(`├─ tokens: ${m.l3.tokens}`);
|
||||
lines.push(`└─ assembly_time: ${m.l3.assemblyTime}ms`);
|
||||
lines.push(`├─ context_pairs_added: ${m.evidence.contextPairsAdded}`);
|
||||
lines.push(`├─ tokens: ${m.evidence.tokens}`);
|
||||
lines.push(`└─ assembly_time: ${m.evidence.assemblyTime}ms`);
|
||||
lines.push('');
|
||||
|
||||
// L4 Formatting
|
||||
lines.push('[L4] Prompt Formatting');
|
||||
lines.push(`├─ sections: [${(m.l4.sectionsIncluded || []).join(', ')}]`);
|
||||
lines.push(`└─ formatting_time: ${m.l4.formattingTime}ms`);
|
||||
// Arc
|
||||
if (m.arc.injected > 0) {
|
||||
lines.push('[Arc] 人物弧光');
|
||||
lines.push(`├─ injected: ${m.arc.injected}`);
|
||||
lines.push(`└─ tokens: ${m.arc.tokens}`);
|
||||
lines.push('');
|
||||
}
|
||||
|
||||
// Formatting
|
||||
lines.push('[Formatting] 格式化');
|
||||
lines.push(`├─ sections: [${(m.formatting.sectionsIncluded || []).join(', ')}]`);
|
||||
lines.push(`└─ time: ${m.formatting.time}ms`);
|
||||
lines.push('');
|
||||
|
||||
// Budget Summary
|
||||
lines.push('[Budget Summary]');
|
||||
lines.push('[Budget] 预算');
|
||||
lines.push(`├─ total_tokens: ${m.budget.total}`);
|
||||
lines.push(`├─ budget_limit: ${m.budget.limit}`);
|
||||
lines.push(`├─ limit: ${m.budget.limit}`);
|
||||
lines.push(`├─ utilization: ${m.budget.utilization}%`);
|
||||
lines.push(`└─ breakdown:`);
|
||||
const bd = m.budget.breakdown || {};
|
||||
lines.push(` ├─ constraints (L1): ${bd.constraints || 0}`);
|
||||
lines.push(` ├─ events (L2): ${bd.events || 0}`);
|
||||
lines.push(` ├─ chunks (L3): ${bd.chunks || 0}`);
|
||||
lines.push(` ├─ recent_orphans: ${bd.recentOrphans || 0}`);
|
||||
lines.push(` ├─ constraints: ${bd.constraints || 0}`);
|
||||
lines.push(` ├─ events: ${bd.events || 0}`);
|
||||
lines.push(` ├─ distant_evidence: ${bd.distantEvidence || 0}`);
|
||||
lines.push(` ├─ recent_evidence: ${bd.recentEvidence || 0}`);
|
||||
lines.push(` └─ arcs: ${bd.arcs || 0}`);
|
||||
lines.push('');
|
||||
|
||||
// Timing
|
||||
lines.push('[Timing]');
|
||||
lines.push('[Timing] 计时');
|
||||
lines.push(`├─ query_expansion: ${m.timing.queryExpansion}ms`);
|
||||
lines.push(`├─ l0_search: ${m.timing.l0Search}ms`);
|
||||
lines.push(`├─ l1_constraints: ${m.timing.l1Constraints}ms`);
|
||||
lines.push(`├─ l2_retrieval: ${m.timing.l2Retrieval}ms`);
|
||||
lines.push(`├─ l3_retrieval: ${m.timing.l3Retrieval}ms`);
|
||||
if (m.timing.l3Rerank > 0) {
|
||||
lines.push(`├─ l3_rerank: ${m.timing.l3Rerank}ms`);
|
||||
lines.push(`├─ anchor_search: ${m.timing.anchorSearch}ms`);
|
||||
lines.push(`├─ constraint_filter: ${m.timing.constraintFilter}ms`);
|
||||
lines.push(`├─ event_retrieval: ${m.timing.eventRetrieval}ms`);
|
||||
lines.push(`├─ evidence_retrieval: ${m.timing.evidenceRetrieval}ms`);
|
||||
if (m.timing.evidenceRerank > 0) {
|
||||
lines.push(`├─ evidence_rerank: ${m.timing.evidenceRerank}ms`);
|
||||
}
|
||||
lines.push(`├─ l3_assembly: ${m.timing.l3Assembly}ms`);
|
||||
lines.push(`├─ l4_formatting: ${m.timing.l4Formatting}ms`);
|
||||
lines.push(`├─ evidence_assembly: ${m.timing.evidenceAssembly}ms`);
|
||||
lines.push(`├─ formatting: ${m.timing.formatting}ms`);
|
||||
lines.push(`└─ total: ${m.timing.total}ms`);
|
||||
lines.push('');
|
||||
|
||||
// Quality Indicators
|
||||
lines.push('[Quality Indicators]');
|
||||
lines.push('[Quality] 质量指标');
|
||||
lines.push(`├─ constraint_coverage: ${m.quality.constraintCoverage}%`);
|
||||
lines.push(`├─ event_precision_proxy: ${m.quality.eventPrecisionProxy}`);
|
||||
lines.push(`├─ evidence_density: ${m.quality.evidenceDensity}%`);
|
||||
@@ -302,25 +319,27 @@ export function formatMetricsLog(metrics) {
|
||||
|
||||
/**
|
||||
* 检测潜在问题
|
||||
* @param {object} metrics - 指标对象
|
||||
* @returns {string[]} 问题列表
|
||||
*/
|
||||
export function detectIssues(metrics) {
|
||||
const issues = [];
|
||||
const m = metrics;
|
||||
|
||||
// 召回比例问题
|
||||
if (m.l2.eventsConsidered > 0) {
|
||||
const selectRatio = m.l2.eventsSelected / m.l2.eventsConsidered;
|
||||
// 事件召回比例问题
|
||||
if (m.event.considered > 0) {
|
||||
const selectRatio = m.event.selected / m.event.considered;
|
||||
if (selectRatio < 0.1) {
|
||||
issues.push(`Event selection ratio too low (${(selectRatio * 100).toFixed(1)}%) - threshold may be too high`);
|
||||
}
|
||||
if (selectRatio > 0.6 && m.l2.eventsConsidered > 10) {
|
||||
if (selectRatio > 0.6 && m.event.considered > 10) {
|
||||
issues.push(`Event selection ratio high (${(selectRatio * 100).toFixed(1)}%) - may include noise`);
|
||||
}
|
||||
}
|
||||
|
||||
// 实体过滤问题
|
||||
if (m.l2.entityFilterStats) {
|
||||
const ef = m.l2.entityFilterStats;
|
||||
if (m.event.entityFilter) {
|
||||
const ef = m.event.entityFilter;
|
||||
if (ef.filtered === 0 && ef.before > 10) {
|
||||
issues.push(`No events filtered by entity - focus entities may be too broad or missing`);
|
||||
}
|
||||
@@ -330,58 +349,58 @@ export function detectIssues(metrics) {
|
||||
}
|
||||
|
||||
// 相似度问题
|
||||
if (m.l2.similarityDistribution && m.l2.similarityDistribution.min > 0 && m.l2.similarityDistribution.min < 0.5) {
|
||||
issues.push(`Low similarity events included (min=${m.l2.similarityDistribution.min})`);
|
||||
if (m.event.similarityDistribution && m.event.similarityDistribution.min > 0 && m.event.similarityDistribution.min < 0.5) {
|
||||
issues.push(`Low similarity events included (min=${m.event.similarityDistribution.min})`);
|
||||
}
|
||||
|
||||
// 因果链问题
|
||||
if (m.l2.eventsSelected > 0 && m.l2.causalEventsCount === 0 && m.l2.byRecallType.direct === 0) {
|
||||
if (m.event.selected > 0 && m.event.causalCount === 0 && m.event.byRecallType.direct === 0) {
|
||||
issues.push('No direct or causal events - query expansion may be inaccurate');
|
||||
}
|
||||
|
||||
// L0 atoms 问题
|
||||
if ((m.l0.atomsMatched || 0) === 0) {
|
||||
issues.push('L0 atoms not matched - may need to generate anchors');
|
||||
// 锚点匹配问题
|
||||
if ((m.anchor.matched || 0) === 0) {
|
||||
issues.push('No anchors matched - may need to generate anchors');
|
||||
}
|
||||
|
||||
// L1 粗筛问题
|
||||
if (m.l3.l1Total > 0 && m.l3.l1AfterCoarse > 0) {
|
||||
const coarseFilterRatio = 1 - (m.l3.l1AfterCoarse / m.l3.l1Total);
|
||||
// 证据粗筛问题
|
||||
if (m.evidence.chunkTotal > 0 && m.evidence.chunkAfterCoarse > 0) {
|
||||
const coarseFilterRatio = 1 - (m.evidence.chunkAfterCoarse / m.evidence.chunkTotal);
|
||||
if (coarseFilterRatio > 0.9) {
|
||||
issues.push(`Very high L1 coarse filter ratio (${(coarseFilterRatio * 100).toFixed(0)}%) - query may be too specific`);
|
||||
issues.push(`Very high evidence coarse filter ratio (${(coarseFilterRatio * 100).toFixed(0)}%) - query may be too specific`);
|
||||
}
|
||||
}
|
||||
|
||||
// Rerank 相关问题
|
||||
if (m.l3.rerankApplied) {
|
||||
if (m.l3.beforeRerank > 0 && m.l3.afterRerank > 0) {
|
||||
const filterRatio = 1 - (m.l3.afterRerank / m.l3.beforeRerank);
|
||||
if (m.evidence.rerankApplied) {
|
||||
if (m.evidence.beforeRerank > 0 && m.evidence.afterRerank > 0) {
|
||||
const filterRatio = 1 - (m.evidence.afterRerank / m.evidence.beforeRerank);
|
||||
if (filterRatio > 0.7) {
|
||||
issues.push(`High rerank filter ratio (${(filterRatio * 100).toFixed(0)}%) - many irrelevant chunks removed`);
|
||||
}
|
||||
}
|
||||
|
||||
if (m.l3.rerankScoreDistribution) {
|
||||
const rd = m.l3.rerankScoreDistribution;
|
||||
if (rd.max < 0.5) {
|
||||
issues.push(`Low rerank scores (max=${rd.max}) - query may be poorly matched`);
|
||||
if (m.evidence.rerankScores) {
|
||||
const rs = m.evidence.rerankScores;
|
||||
if (rs.max < 0.5) {
|
||||
issues.push(`Low rerank scores (max=${rs.max}) - query may be poorly matched`);
|
||||
}
|
||||
if (rd.mean < 0.3) {
|
||||
issues.push(`Very low average rerank score (mean=${rd.mean}) - context may be weak`);
|
||||
if (rs.mean < 0.3) {
|
||||
issues.push(`Very low average rerank score (mean=${rs.mean}) - context may be weak`);
|
||||
}
|
||||
}
|
||||
|
||||
if (m.l3.rerankTime > 2000) {
|
||||
issues.push(`Slow rerank (${m.l3.rerankTime}ms) - may affect response time`);
|
||||
if (m.evidence.rerankTime > 2000) {
|
||||
issues.push(`Slow rerank (${m.evidence.rerankTime}ms) - may affect response time`);
|
||||
}
|
||||
}
|
||||
|
||||
// 证据密度问题
|
||||
if (m.l3.chunksSelected > 0 && m.l3.chunksSelectedByType) {
|
||||
const l1Real = m.l3.chunksSelectedByType.l1Real || 0;
|
||||
const density = l1Real / m.l3.chunksSelected;
|
||||
if (density < 0.3 && m.l3.chunksSelected > 10) {
|
||||
issues.push(`Low L1 chunk ratio in selected (${(density * 100).toFixed(0)}%) - may lack concrete evidence`);
|
||||
if (m.evidence.selected > 0 && m.evidence.selectedByType) {
|
||||
const chunkReal = m.evidence.selectedByType.chunkReal || 0;
|
||||
const density = chunkReal / m.evidence.selected;
|
||||
if (density < 0.3 && m.evidence.selected > 10) {
|
||||
issues.push(`Low real chunk ratio in selected (${(density * 100).toFixed(0)}%) - may lack concrete evidence`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// Story Summary - Recall Engine (v4 - L0 无上限 + L1 粗筛)
|
||||
// Story Summary - Recall Engine (v5 - 统一命名)
|
||||
//
|
||||
// 命名规范:
|
||||
// - 存储层用 L0/L1/L2/L3(StateAtom/Chunk/Event/Fact)
|
||||
// - 召回层用语义名称:anchor/evidence/event/constraint
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
import { getAllEventVectors, getChunksByFloors, getMeta, getChunkVectorsByIds } from '../storage/chunk-store.js';
|
||||
@@ -22,17 +26,17 @@ const CONFIG = {
|
||||
// Query Expansion
|
||||
QUERY_EXPANSION_TIMEOUT: 6000,
|
||||
|
||||
// L0 配置 - 去掉硬上限,提高阈值
|
||||
L0_MIN_SIMILARITY: 0.58,
|
||||
// Anchor (L0 StateAtoms) 配置
|
||||
ANCHOR_MIN_SIMILARITY: 0.58,
|
||||
|
||||
// L1 粗筛配置
|
||||
L1_MAX_CANDIDATES: 100,
|
||||
// Evidence (L1 Chunks) 粗筛配置
|
||||
EVIDENCE_COARSE_MAX: 100,
|
||||
|
||||
// L2 配置
|
||||
L2_CANDIDATE_MAX: 100,
|
||||
L2_SELECT_MAX: 50,
|
||||
L2_MIN_SIMILARITY: 0.55,
|
||||
L2_MMR_LAMBDA: 0.72,
|
||||
// Event (L2 Events) 配置
|
||||
EVENT_CANDIDATE_MAX: 100,
|
||||
EVENT_SELECT_MAX: 50,
|
||||
EVENT_MIN_SIMILARITY: 0.55,
|
||||
EVENT_MMR_LAMBDA: 0.72,
|
||||
|
||||
// Rerank 配置
|
||||
RERANK_THRESHOLD: 80,
|
||||
@@ -48,6 +52,12 @@ const CONFIG = {
|
||||
// 工具函数
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
/**
|
||||
* 计算余弦相似度
|
||||
* @param {number[]} a - 向量A
|
||||
* @param {number[]} b - 向量B
|
||||
* @returns {number} 相似度 [0, 1]
|
||||
*/
|
||||
function cosineSimilarity(a, b) {
|
||||
if (!a?.length || !b?.length || a.length !== b.length) return 0;
|
||||
let dot = 0, nA = 0, nB = 0;
|
||||
@@ -59,6 +69,11 @@ function cosineSimilarity(a, b) {
|
||||
return nA && nB ? dot / (Math.sqrt(nA) * Math.sqrt(nB)) : 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* 标准化字符串(用于实体匹配)
|
||||
* @param {string} s - 输入字符串
|
||||
* @returns {string} 标准化后的字符串
|
||||
*/
|
||||
function normalize(s) {
|
||||
return String(s || '')
|
||||
.normalize('NFKC')
|
||||
@@ -67,10 +82,21 @@ function normalize(s) {
|
||||
.toLowerCase();
|
||||
}
|
||||
|
||||
/**
|
||||
* 清理文本用于召回
|
||||
* @param {string} text - 原始文本
|
||||
* @returns {string} 清理后的文本
|
||||
*/
|
||||
function cleanForRecall(text) {
|
||||
return filterText(text).replace(/\[tts:[^\]]*\]/gi, '').trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* 从 focus entities 中移除用户名
|
||||
* @param {string[]} focusEntities - 焦点实体列表
|
||||
* @param {string} userName - 用户名
|
||||
* @returns {string[]} 过滤后的实体列表
|
||||
*/
|
||||
function removeUserNameFromFocus(focusEntities, userName) {
|
||||
const u = normalize(userName);
|
||||
if (!u) return Array.isArray(focusEntities) ? focusEntities : [];
|
||||
@@ -81,6 +107,13 @@ function removeUserNameFromFocus(focusEntities, userName) {
|
||||
.filter(e => normalize(e) !== u);
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建 rerank 查询文本
|
||||
* @param {object} expansion - query expansion 结果
|
||||
* @param {object[]} lastMessages - 最近消息
|
||||
* @param {string} pendingUserMessage - 待发送的用户消息
|
||||
* @returns {string} 查询文本
|
||||
*/
|
||||
function buildRerankQuery(expansion, lastMessages, pendingUserMessage) {
|
||||
const parts = [];
|
||||
|
||||
@@ -109,9 +142,18 @@ function buildRerankQuery(expansion, lastMessages, pendingUserMessage) {
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// MMR 选择
|
||||
// MMR 选择算法
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
/**
|
||||
* Maximal Marginal Relevance 选择
|
||||
* @param {object[]} candidates - 候选项
|
||||
* @param {number} k - 选择数量
|
||||
* @param {number} lambda - 相关性/多样性权衡参数
|
||||
* @param {Function} getVector - 获取向量的函数
|
||||
* @param {Function} getScore - 获取分数的函数
|
||||
* @returns {object[]} 选中的候选项
|
||||
*/
|
||||
function mmrSelect(candidates, k, lambda, getVector, getScore) {
|
||||
const selected = [];
|
||||
const ids = new Set();
|
||||
@@ -152,31 +194,38 @@ function mmrSelect(candidates, k, lambda, getVector, getScore) {
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// L0 检索:无上限,阈值过滤
|
||||
// [Anchors] L0 StateAtoms 检索
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
async function searchL0(queryVector, vectorConfig, metrics) {
|
||||
/**
|
||||
* 检索语义锚点(L0 StateAtoms)
|
||||
* @param {number[]} queryVector - 查询向量
|
||||
* @param {object} vectorConfig - 向量配置
|
||||
* @param {object} metrics - 指标对象
|
||||
* @returns {Promise<{hits: object[], floors: Set<number>}>}
|
||||
*/
|
||||
async function recallAnchors(queryVector, vectorConfig, metrics) {
|
||||
const { chatId } = getContext();
|
||||
if (!chatId || !queryVector?.length) {
|
||||
return { atoms: [], floors: new Set() };
|
||||
return { hits: [], floors: new Set() };
|
||||
}
|
||||
|
||||
const meta = await getMeta(chatId);
|
||||
const fp = getEngineFingerprint(vectorConfig);
|
||||
if (meta.fingerprint && meta.fingerprint !== fp) {
|
||||
xbLog.warn(MODULE_ID, 'L0 fingerprint 不匹配');
|
||||
return { atoms: [], floors: new Set() };
|
||||
xbLog.warn(MODULE_ID, 'Anchor fingerprint 不匹配');
|
||||
return { hits: [], floors: new Set() };
|
||||
}
|
||||
|
||||
const stateVectors = await getAllStateVectors(chatId);
|
||||
if (!stateVectors.length) {
|
||||
return { atoms: [], floors: new Set() };
|
||||
return { hits: [], floors: new Set() };
|
||||
}
|
||||
|
||||
const atomsList = getStateAtoms();
|
||||
const atomMap = new Map(atomsList.map(a => [a.atomId, a]));
|
||||
|
||||
// ★ 只按阈值过滤,不设硬上限
|
||||
// 按阈值过滤,不设硬上限
|
||||
const scored = stateVectors
|
||||
.map(sv => {
|
||||
const atom = atomMap.get(sv.atomId);
|
||||
@@ -190,69 +239,79 @@ async function searchL0(queryVector, vectorConfig, metrics) {
|
||||
};
|
||||
})
|
||||
.filter(Boolean)
|
||||
.filter(s => s.similarity >= CONFIG.L0_MIN_SIMILARITY)
|
||||
.filter(s => s.similarity >= CONFIG.ANCHOR_MIN_SIMILARITY)
|
||||
.sort((a, b) => b.similarity - a.similarity);
|
||||
|
||||
const floors = new Set(scored.map(s => s.floor));
|
||||
|
||||
if (metrics) {
|
||||
metrics.l0.atomsMatched = scored.length;
|
||||
metrics.l0.floorsHit = floors.size;
|
||||
metrics.l0.topAtoms = scored.slice(0, 5).map(s => ({
|
||||
metrics.anchor.matched = scored.length;
|
||||
metrics.anchor.floorsHit = floors.size;
|
||||
metrics.anchor.topHits = scored.slice(0, 5).map(s => ({
|
||||
floor: s.floor,
|
||||
semantic: s.atom?.semantic?.slice(0, 50),
|
||||
similarity: Math.round(s.similarity * 1000) / 1000,
|
||||
}));
|
||||
}
|
||||
|
||||
return { atoms: scored, floors };
|
||||
return { hits: scored, floors };
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// 统计 chunks 类型构成
|
||||
// [Evidence] L1 Chunks 拉取 + 粗筛 + Rerank
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
function countChunksByType(chunks) {
|
||||
let l0Virtual = 0;
|
||||
let l1Real = 0;
|
||||
/**
|
||||
* 统计 evidence 类型构成
|
||||
* @param {object[]} chunks - chunk 列表
|
||||
* @returns {{anchorVirtual: number, chunkReal: number}}
|
||||
*/
|
||||
function countEvidenceByType(chunks) {
|
||||
let anchorVirtual = 0;
|
||||
let chunkReal = 0;
|
||||
|
||||
for (const c of chunks || []) {
|
||||
if (c.isL0) {
|
||||
l0Virtual++;
|
||||
if (c.isAnchorVirtual) {
|
||||
anchorVirtual++;
|
||||
} else {
|
||||
l1Real++;
|
||||
chunkReal++;
|
||||
}
|
||||
}
|
||||
|
||||
return { l0Virtual, l1Real };
|
||||
return { anchorVirtual, chunkReal };
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// L3 拉取 + L1 粗筛 + Rerank
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
async function getChunksFromL0Floors(l0Floors, l0Atoms, queryVector, queryText, metrics) {
|
||||
/**
|
||||
* 根据锚点命中楼层拉取证据(L1 Chunks)
|
||||
* @param {Set<number>} anchorFloors - 锚点命中的楼层
|
||||
* @param {object[]} anchorHits - 锚点命中结果
|
||||
* @param {number[]} queryVector - 查询向量
|
||||
* @param {string} queryText - rerank 查询文本
|
||||
* @param {object} metrics - 指标对象
|
||||
* @returns {Promise<object[]>} 证据 chunks
|
||||
*/
|
||||
async function pullEvidenceByFloors(anchorFloors, anchorHits, queryVector, queryText, metrics) {
|
||||
const { chatId } = getContext();
|
||||
if (!chatId || !l0Floors.size) {
|
||||
if (!chatId || !anchorFloors.size) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const floorArray = Array.from(l0Floors);
|
||||
const floorArray = Array.from(anchorFloors);
|
||||
|
||||
// 1. 构建 L0 虚拟 chunks
|
||||
const l0VirtualChunks = (l0Atoms || []).map(a => ({
|
||||
chunkId: `state-${a.atomId}`,
|
||||
// 1. 构建锚点虚拟 chunks(来自 L0 StateAtoms)
|
||||
const anchorVirtualChunks = (anchorHits || []).map(a => ({
|
||||
chunkId: `anchor-${a.atomId}`,
|
||||
floor: a.floor,
|
||||
chunkIdx: -1,
|
||||
speaker: '📌',
|
||||
isUser: false,
|
||||
text: a.atom?.semantic || '',
|
||||
similarity: a.similarity,
|
||||
isL0: true,
|
||||
isAnchorVirtual: true,
|
||||
_atom: a.atom,
|
||||
}));
|
||||
|
||||
// 2. 拉取 L1 chunks
|
||||
// 2. 拉取真实 chunks(来自 L1)
|
||||
let dbChunks = [];
|
||||
try {
|
||||
dbChunks = await getChunksByFloors(chatId, floorArray);
|
||||
@@ -260,8 +319,8 @@ async function getChunksFromL0Floors(l0Floors, l0Atoms, queryVector, queryText,
|
||||
xbLog.warn(MODULE_ID, '从 DB 拉取 chunks 失败', e);
|
||||
}
|
||||
|
||||
// 3. ★ L1 向量粗筛
|
||||
let l1Filtered = [];
|
||||
// 3. L1 向量粗筛
|
||||
let coarseFiltered = [];
|
||||
if (dbChunks.length > 0 && queryVector?.length) {
|
||||
const chunkIds = dbChunks.map(c => c.chunkId);
|
||||
let chunkVectors = [];
|
||||
@@ -270,54 +329,51 @@ async function getChunksFromL0Floors(l0Floors, l0Atoms, queryVector, queryText,
|
||||
} catch (e) {
|
||||
xbLog.warn(MODULE_ID, 'L1 向量获取失败', e);
|
||||
}
|
||||
|
||||
|
||||
const vectorMap = new Map(chunkVectors.map(v => [v.chunkId, v.vector]));
|
||||
|
||||
l1Filtered = dbChunks
|
||||
coarseFiltered = dbChunks
|
||||
.map(c => {
|
||||
const vec = vectorMap.get(c.chunkId);
|
||||
if (!vec?.length) return null;
|
||||
|
||||
return {
|
||||
...c,
|
||||
isL0: false,
|
||||
isAnchorVirtual: false,
|
||||
similarity: cosineSimilarity(queryVector, vec),
|
||||
};
|
||||
})
|
||||
.filter(Boolean)
|
||||
.sort((a, b) => b.similarity - a.similarity)
|
||||
.slice(0, CONFIG.L1_MAX_CANDIDATES);
|
||||
.slice(0, CONFIG.EVIDENCE_COARSE_MAX);
|
||||
}
|
||||
|
||||
// 4. 合并
|
||||
const allChunks = [...l0VirtualChunks, ...l1Filtered];
|
||||
const allEvidence = [...anchorVirtualChunks, ...coarseFiltered];
|
||||
|
||||
// ★ 更新 metrics
|
||||
// 更新 metrics
|
||||
if (metrics) {
|
||||
metrics.l3.floorsFromL0 = floorArray.length;
|
||||
metrics.l3.l1Total = dbChunks.length;
|
||||
metrics.l3.l1AfterCoarse = l1Filtered.length;
|
||||
metrics.l3.chunksInRange = l0VirtualChunks.length + l1Filtered.length;
|
||||
metrics.l3.chunksInRangeByType = {
|
||||
l0Virtual: l0VirtualChunks.length,
|
||||
l1Real: l1Filtered.length,
|
||||
};
|
||||
metrics.evidence.floorsFromAnchors = floorArray.length;
|
||||
metrics.evidence.chunkTotal = dbChunks.length;
|
||||
metrics.evidence.chunkAfterCoarse = coarseFiltered.length;
|
||||
metrics.evidence.merged = allEvidence.length;
|
||||
metrics.evidence.mergedByType = countEvidenceByType(allEvidence);
|
||||
}
|
||||
|
||||
// 5. 是否需要 Rerank
|
||||
if (allChunks.length <= CONFIG.RERANK_THRESHOLD) {
|
||||
if (allEvidence.length <= CONFIG.RERANK_THRESHOLD) {
|
||||
if (metrics) {
|
||||
metrics.l3.rerankApplied = false;
|
||||
metrics.l3.chunksSelected = allChunks.length;
|
||||
metrics.l3.chunksSelectedByType = countChunksByType(allChunks);
|
||||
metrics.evidence.rerankApplied = false;
|
||||
metrics.evidence.selected = allEvidence.length;
|
||||
metrics.evidence.selectedByType = countEvidenceByType(allEvidence);
|
||||
}
|
||||
return allChunks;
|
||||
return allEvidence;
|
||||
}
|
||||
|
||||
// 6. Rerank 精排
|
||||
const T_Rerank_Start = performance.now();
|
||||
|
||||
const reranked = await rerankChunks(queryText, allChunks, {
|
||||
const reranked = await rerankChunks(queryText, allEvidence, {
|
||||
topN: CONFIG.RERANK_TOP_N,
|
||||
minScore: CONFIG.RERANK_MIN_SCORE,
|
||||
});
|
||||
@@ -325,18 +381,18 @@ async function getChunksFromL0Floors(l0Floors, l0Atoms, queryVector, queryText,
|
||||
const rerankTime = Math.round(performance.now() - T_Rerank_Start);
|
||||
|
||||
if (metrics) {
|
||||
metrics.l3.rerankApplied = true;
|
||||
metrics.l3.beforeRerank = allChunks.length;
|
||||
metrics.l3.afterRerank = reranked.length;
|
||||
metrics.l3.chunksSelected = reranked.length;
|
||||
metrics.l3.chunksSelectedByType = countChunksByType(reranked);
|
||||
metrics.l3.rerankTime = rerankTime;
|
||||
metrics.timing.l3Rerank = rerankTime;
|
||||
metrics.evidence.rerankApplied = true;
|
||||
metrics.evidence.beforeRerank = allEvidence.length;
|
||||
metrics.evidence.afterRerank = reranked.length;
|
||||
metrics.evidence.selected = reranked.length;
|
||||
metrics.evidence.selectedByType = countEvidenceByType(reranked);
|
||||
metrics.evidence.rerankTime = rerankTime;
|
||||
metrics.timing.evidenceRerank = rerankTime;
|
||||
|
||||
const scores = reranked.map(c => c._rerankScore || 0).filter(s => s > 0);
|
||||
if (scores.length > 0) {
|
||||
scores.sort((a, b) => a - b);
|
||||
metrics.l3.rerankScoreDistribution = {
|
||||
metrics.evidence.rerankScores = {
|
||||
min: Number(scores[0].toFixed(3)),
|
||||
max: Number(scores[scores.length - 1].toFixed(3)),
|
||||
mean: Number((scores.reduce((a, b) => a + b, 0) / scores.length).toFixed(3)),
|
||||
@@ -344,16 +400,25 @@ async function getChunksFromL0Floors(l0Floors, l0Atoms, queryVector, queryText,
|
||||
}
|
||||
}
|
||||
|
||||
xbLog.info(MODULE_ID, `L3: ${dbChunks.length} L1 → ${l1Filtered.length} 粗筛 → ${reranked.length} Rerank (${rerankTime}ms)`);
|
||||
xbLog.info(MODULE_ID, `Evidence: ${dbChunks.length} L1 → ${coarseFiltered.length} coarse → ${reranked.length} rerank (${rerankTime}ms)`);
|
||||
|
||||
return reranked;
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// L2 检索(保持不变)
|
||||
// [Events] L2 Events 检索
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
async function searchL2Events(queryVector, allEvents, vectorConfig, focusEntities, metrics) {
|
||||
/**
|
||||
* 检索事件(L2 Events)
|
||||
* @param {number[]} queryVector - 查询向量
|
||||
* @param {object[]} allEvents - 所有事件
|
||||
* @param {object} vectorConfig - 向量配置
|
||||
* @param {string[]} focusEntities - 焦点实体
|
||||
* @param {object} metrics - 指标对象
|
||||
* @returns {Promise<object[]>} 事件命中结果
|
||||
*/
|
||||
async function recallEvents(queryVector, allEvents, vectorConfig, focusEntities, metrics) {
|
||||
const { chatId } = getContext();
|
||||
if (!chatId || !queryVector?.length || !allEvents?.length) {
|
||||
return [];
|
||||
@@ -362,7 +427,7 @@ async function searchL2Events(queryVector, allEvents, vectorConfig, focusEntitie
|
||||
const meta = await getMeta(chatId);
|
||||
const fp = getEngineFingerprint(vectorConfig);
|
||||
if (meta.fingerprint && meta.fingerprint !== fp) {
|
||||
xbLog.warn(MODULE_ID, 'L2 fingerprint 不匹配');
|
||||
xbLog.warn(MODULE_ID, 'Event fingerprint 不匹配');
|
||||
return [];
|
||||
}
|
||||
|
||||
@@ -395,18 +460,19 @@ async function searchL2Events(queryVector, allEvents, vectorConfig, focusEntitie
|
||||
});
|
||||
|
||||
if (metrics) {
|
||||
metrics.l2.eventsInStore = allEvents.length;
|
||||
metrics.event.inStore = allEvents.length;
|
||||
}
|
||||
|
||||
let candidates = scored
|
||||
.filter(s => s.similarity >= CONFIG.L2_MIN_SIMILARITY)
|
||||
.filter(s => s.similarity >= CONFIG.EVENT_MIN_SIMILARITY)
|
||||
.sort((a, b) => b.similarity - a.similarity)
|
||||
.slice(0, CONFIG.L2_CANDIDATE_MAX);
|
||||
.slice(0, CONFIG.EVENT_CANDIDATE_MAX);
|
||||
|
||||
if (metrics) {
|
||||
metrics.l2.eventsConsidered = candidates.length;
|
||||
metrics.event.considered = candidates.length;
|
||||
}
|
||||
|
||||
// 实体过滤
|
||||
if (focusSet.size > 0) {
|
||||
const beforeFilter = candidates.length;
|
||||
|
||||
@@ -416,7 +482,7 @@ async function searchL2Events(queryVector, allEvents, vectorConfig, focusEntitie
|
||||
});
|
||||
|
||||
if (metrics) {
|
||||
metrics.l2.entityFilterStats = {
|
||||
metrics.event.entityFilter = {
|
||||
focusEntities: focusEntities || [],
|
||||
before: beforeFilter,
|
||||
after: candidates.length,
|
||||
@@ -425,21 +491,22 @@ async function searchL2Events(queryVector, allEvents, vectorConfig, focusEntitie
|
||||
}
|
||||
}
|
||||
|
||||
// MMR 选择
|
||||
const selected = mmrSelect(
|
||||
candidates,
|
||||
CONFIG.L2_SELECT_MAX,
|
||||
CONFIG.L2_MMR_LAMBDA,
|
||||
CONFIG.EVENT_SELECT_MAX,
|
||||
CONFIG.EVENT_MMR_LAMBDA,
|
||||
c => c.vector,
|
||||
c => c.similarity
|
||||
);
|
||||
|
||||
let directCount = 0;
|
||||
let contextCount = 0;
|
||||
let relatedCount = 0;
|
||||
|
||||
const results = selected.map(s => {
|
||||
const recallType = s._hasEntityMatch ? 'DIRECT' : 'SIMILAR';
|
||||
const recallType = s._hasEntityMatch ? 'DIRECT' : 'RELATED';
|
||||
if (recallType === 'DIRECT') directCount++;
|
||||
else contextCount++;
|
||||
else relatedCount++;
|
||||
|
||||
return {
|
||||
event: s.event,
|
||||
@@ -450,18 +517,23 @@ async function searchL2Events(queryVector, allEvents, vectorConfig, focusEntitie
|
||||
});
|
||||
|
||||
if (metrics) {
|
||||
metrics.l2.eventsSelected = results.length;
|
||||
metrics.l2.byRecallType = { direct: directCount, context: contextCount, causal: 0 };
|
||||
metrics.l2.similarityDistribution = calcSimilarityStats(results.map(r => r.similarity));
|
||||
metrics.event.selected = results.length;
|
||||
metrics.event.byRecallType = { direct: directCount, related: relatedCount, causal: 0 };
|
||||
metrics.event.similarityDistribution = calcSimilarityStats(results.map(r => r.similarity));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// 因果链追溯(保持不变)
|
||||
// [Causation] 因果链追溯
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
/**
|
||||
* 构建事件索引
|
||||
* @param {object[]} allEvents - 所有事件
|
||||
* @returns {Map<string, object>} 事件索引
|
||||
*/
|
||||
function buildEventIndex(allEvents) {
|
||||
const map = new Map();
|
||||
for (const e of allEvents || []) {
|
||||
@@ -470,7 +542,14 @@ function buildEventIndex(allEvents) {
|
||||
return map;
|
||||
}
|
||||
|
||||
function traceCausalAncestors(recalledEvents, eventIndex, maxDepth = CONFIG.CAUSAL_CHAIN_MAX_DEPTH) {
|
||||
/**
|
||||
* 追溯因果链
|
||||
* @param {object[]} eventHits - 事件命中结果
|
||||
* @param {Map<string, object>} eventIndex - 事件索引
|
||||
* @param {number} maxDepth - 最大深度
|
||||
* @returns {{results: object[], maxDepth: number}}
|
||||
*/
|
||||
function traceCausation(eventHits, eventIndex, maxDepth = CONFIG.CAUSAL_CHAIN_MAX_DEPTH) {
|
||||
const out = new Map();
|
||||
const idRe = /^evt-\d+$/;
|
||||
let maxActualDepth = 0;
|
||||
@@ -497,7 +576,7 @@ function traceCausalAncestors(recalledEvents, eventIndex, maxDepth = CONFIG.CAUS
|
||||
}
|
||||
}
|
||||
|
||||
for (const r of recalledEvents || []) {
|
||||
for (const r of eventHits || []) {
|
||||
const rid = r?.event?.id;
|
||||
if (!rid) continue;
|
||||
for (const cid of (r.event?.causedBy || [])) {
|
||||
@@ -520,6 +599,13 @@ function traceCausalAncestors(recalledEvents, eventIndex, maxDepth = CONFIG.CAUS
|
||||
// 辅助函数
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
/**
|
||||
* 获取最近消息
|
||||
* @param {object[]} chat - 聊天记录
|
||||
* @param {number} count - 消息数量
|
||||
* @param {boolean} excludeLastAi - 是否排除最后的 AI 消息
|
||||
* @returns {object[]} 最近消息
|
||||
*/
|
||||
function getLastMessages(chat, count = 4, excludeLastAi = false) {
|
||||
if (!chat?.length) return [];
|
||||
|
||||
@@ -532,6 +618,13 @@ function getLastMessages(chat, count = 4, excludeLastAi = false) {
|
||||
return messages.slice(-count);
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建查询文本
|
||||
* @param {object[]} chat - 聊天记录
|
||||
* @param {number} count - 消息数量
|
||||
* @param {boolean} excludeLastAi - 是否排除最后的 AI 消息
|
||||
* @returns {string} 查询文本
|
||||
*/
|
||||
export function buildQueryText(chat, count = 2, excludeLastAi = false) {
|
||||
if (!chat?.length) return '';
|
||||
|
||||
@@ -551,6 +644,14 @@ export function buildQueryText(chat, count = 2, excludeLastAi = false) {
|
||||
// 主函数
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
/**
|
||||
* 执行记忆召回
|
||||
* @param {string} queryText - 查询文本
|
||||
* @param {object[]} allEvents - 所有事件(L2)
|
||||
* @param {object} vectorConfig - 向量配置
|
||||
* @param {object} options - 选项
|
||||
* @returns {Promise<object>} 召回结果
|
||||
*/
|
||||
export async function recallMemory(queryText, allEvents, vectorConfig, options = {}) {
|
||||
const T0 = performance.now();
|
||||
const { chat, name1 } = getContext();
|
||||
@@ -559,8 +660,16 @@ export async function recallMemory(queryText, allEvents, vectorConfig, options =
|
||||
const metrics = createMetrics();
|
||||
|
||||
if (!allEvents?.length) {
|
||||
metrics.l0.needRecall = false;
|
||||
return { events: [], chunks: [], causalEvents: [], focusEntities: [], elapsed: 0, logText: 'No events.', metrics };
|
||||
metrics.anchor.needRecall = false;
|
||||
return {
|
||||
events: [],
|
||||
evidenceChunks: [],
|
||||
causalChain: [],
|
||||
focusEntities: [],
|
||||
elapsed: 0,
|
||||
logText: 'No events.',
|
||||
metrics,
|
||||
};
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
@@ -587,11 +696,11 @@ export async function recallMemory(queryText, allEvents, vectorConfig, options =
|
||||
|
||||
const focusEntities = removeUserNameFromFocus(expansion.focus, name1);
|
||||
|
||||
metrics.l0.needRecall = true;
|
||||
metrics.l0.focusEntities = focusEntities;
|
||||
metrics.l0.queries = expansion.queries || [];
|
||||
metrics.l0.queryExpansionTime = Math.round(performance.now() - T_QE_Start);
|
||||
metrics.timing.queryExpansion = metrics.l0.queryExpansionTime;
|
||||
metrics.anchor.needRecall = true;
|
||||
metrics.anchor.focusEntities = focusEntities;
|
||||
metrics.anchor.queries = expansion.queries || [];
|
||||
metrics.anchor.queryExpansionTime = Math.round(performance.now() - T_QE_Start);
|
||||
metrics.timing.queryExpansion = metrics.anchor.queryExpansionTime;
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
// Step 2: 向量化查询
|
||||
@@ -604,54 +713,70 @@ export async function recallMemory(queryText, allEvents, vectorConfig, options =
|
||||
} catch (e) {
|
||||
xbLog.error(MODULE_ID, '向量化失败', e);
|
||||
metrics.timing.total = Math.round(performance.now() - T0);
|
||||
return { events: [], chunks: [], causalEvents: [], focusEntities, elapsed: metrics.timing.total, logText: 'Embedding failed.', metrics };
|
||||
return {
|
||||
events: [],
|
||||
evidenceChunks: [],
|
||||
causalChain: [],
|
||||
focusEntities,
|
||||
elapsed: metrics.timing.total,
|
||||
logText: 'Embedding failed.',
|
||||
metrics,
|
||||
};
|
||||
}
|
||||
|
||||
if (!queryVector?.length) {
|
||||
metrics.timing.total = Math.round(performance.now() - T0);
|
||||
return { events: [], chunks: [], causalEvents: [], focusEntities, elapsed: metrics.timing.total, logText: 'Empty query vector.', metrics };
|
||||
return {
|
||||
events: [],
|
||||
evidenceChunks: [],
|
||||
causalChain: [],
|
||||
focusEntities,
|
||||
elapsed: metrics.timing.total,
|
||||
logText: 'Empty query vector.',
|
||||
metrics,
|
||||
};
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
// Step 3: L0 检索
|
||||
// Step 3: Anchor (L0) 检索
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
|
||||
const T_L0_Start = performance.now();
|
||||
const T_Anchor_Start = performance.now();
|
||||
|
||||
const { atoms: l0Atoms, floors: l0Floors } = await searchL0(queryVector, vectorConfig, metrics);
|
||||
const { hits: anchorHits, floors: anchorFloors } = await recallAnchors(queryVector, vectorConfig, metrics);
|
||||
|
||||
metrics.timing.l0Search = Math.round(performance.now() - T_L0_Start);
|
||||
metrics.timing.anchorSearch = Math.round(performance.now() - T_Anchor_Start);
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
// Step 4: L3 拉取 + L1 粗筛 + Rerank
|
||||
// Step 4: Evidence (L1) 拉取 + 粗筛 + Rerank
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
|
||||
const T_L3_Start = performance.now();
|
||||
const T_Evidence_Start = performance.now();
|
||||
|
||||
const rerankQuery = buildRerankQuery(expansion, lastMessages, pendingUserMessage);
|
||||
const chunks = await getChunksFromL0Floors(l0Floors, l0Atoms, queryVector, rerankQuery, metrics);
|
||||
const evidenceChunks = await pullEvidenceByFloors(anchorFloors, anchorHits, queryVector, rerankQuery, metrics);
|
||||
|
||||
metrics.timing.l3Retrieval = Math.round(performance.now() - T_L3_Start);
|
||||
metrics.timing.evidenceRetrieval = Math.round(performance.now() - T_Evidence_Start);
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
// Step 5: L2 独立检索
|
||||
// Step 5: Event (L2) 独立检索
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
|
||||
const T_L2_Start = performance.now();
|
||||
const T_Event_Start = performance.now();
|
||||
|
||||
const eventResults = await searchL2Events(queryVector, allEvents, vectorConfig, focusEntities, metrics);
|
||||
const eventHits = await recallEvents(queryVector, allEvents, vectorConfig, focusEntities, metrics);
|
||||
|
||||
metrics.timing.l2Retrieval = Math.round(performance.now() - T_L2_Start);
|
||||
metrics.timing.eventRetrieval = Math.round(performance.now() - T_Event_Start);
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
// Step 6: 因果链追溯
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
|
||||
const eventIndex = buildEventIndex(allEvents);
|
||||
const { results: causalMap, maxDepth: causalMaxDepth } = traceCausalAncestors(eventResults, eventIndex);
|
||||
const { results: causalMap, maxDepth: causalMaxDepth } = traceCausation(eventHits, eventIndex);
|
||||
|
||||
const recalledIdSet = new Set(eventResults.map(x => x?.event?.id).filter(Boolean));
|
||||
const causalEvents = causalMap
|
||||
const recalledIdSet = new Set(eventHits.map(x => x?.event?.id).filter(Boolean));
|
||||
const causalChain = causalMap
|
||||
.filter(x => x?.event?.id && !recalledIdSet.has(x.event.id))
|
||||
.map(x => ({
|
||||
event: x.event,
|
||||
@@ -661,11 +786,11 @@ export async function recallMemory(queryText, allEvents, vectorConfig, options =
|
||||
chainFrom: x.chainFrom,
|
||||
}));
|
||||
|
||||
if (metrics.l2.byRecallType) {
|
||||
metrics.l2.byRecallType.causal = causalEvents.length;
|
||||
if (metrics.event.byRecallType) {
|
||||
metrics.event.byRecallType.causal = causalChain.length;
|
||||
}
|
||||
metrics.l2.causalChainDepth = causalMaxDepth;
|
||||
metrics.l2.causalEventsCount = causalEvents.length;
|
||||
metrics.event.causalChainDepth = causalMaxDepth;
|
||||
metrics.event.causalCount = causalChain.length;
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════
|
||||
// 完成
|
||||
@@ -673,24 +798,24 @@ export async function recallMemory(queryText, allEvents, vectorConfig, options =
|
||||
|
||||
metrics.timing.total = Math.round(performance.now() - T0);
|
||||
|
||||
metrics.l2.entityNames = focusEntities;
|
||||
metrics.l2.entitiesLoaded = focusEntities.length;
|
||||
metrics.event.entityNames = focusEntities;
|
||||
metrics.event.entitiesUsed = focusEntities.length;
|
||||
|
||||
console.group('%c[Recall v4]', 'color: #7c3aed; font-weight: bold');
|
||||
console.group('%c[Recall v5]', 'color: #7c3aed; font-weight: bold');
|
||||
console.log(`Elapsed: ${metrics.timing.total}ms`);
|
||||
console.log(`Query Expansion: focus=[${expansion.focus.join(', ')}]`);
|
||||
console.log(`L0: ${l0Atoms.length} atoms → ${l0Floors.size} floors`);
|
||||
console.log(`L3: ${metrics.l3.l1Total || 0} L1 → ${metrics.l3.l1AfterCoarse || 0} 粗筛 → ${chunks.length} final`);
|
||||
if (metrics.l3.rerankApplied) {
|
||||
console.log(`L3 Rerank: ${metrics.l3.beforeRerank} → ${metrics.l3.afterRerank} (${metrics.l3.rerankTime}ms)`);
|
||||
console.log(`Anchors: ${anchorHits.length} hits → ${anchorFloors.size} floors`);
|
||||
console.log(`Evidence: ${metrics.evidence.chunkTotal || 0} L1 → ${metrics.evidence.chunkAfterCoarse || 0} coarse → ${evidenceChunks.length} final`);
|
||||
if (metrics.evidence.rerankApplied) {
|
||||
console.log(`Evidence Rerank: ${metrics.evidence.beforeRerank} → ${metrics.evidence.afterRerank} (${metrics.evidence.rerankTime}ms)`);
|
||||
}
|
||||
console.log(`L2: ${eventResults.length} events, ${causalEvents.length} causal`);
|
||||
console.log(`Events: ${eventHits.length} hits, ${causalChain.length} causal`);
|
||||
console.groupEnd();
|
||||
|
||||
return {
|
||||
events: eventResults,
|
||||
causalEvents,
|
||||
chunks,
|
||||
events: eventHits,
|
||||
causalChain,
|
||||
evidenceChunks,
|
||||
expansion,
|
||||
focusEntities,
|
||||
elapsed: metrics.timing.total,
|
||||
|
||||
Reference in New Issue
Block a user