Refine diffusion graph channels and drop legacy who compatibility

This commit is contained in:
2026-02-13 15:56:22 +08:00
parent 9ba120364c
commit 6aa1547d6f
6 changed files with 110 additions and 180 deletions

View File

@@ -20,7 +20,9 @@
// 阶段 5: Lexical Retrieval + Dense-Gated Event Merge
// 阶段 6: Floor W-RRF Fusion + Rerank + L1 配对
// 阶段 7: L1 配对组装L0 → top-1 AI L1 + top-1 USER L1
// 阶段 8: Causation Trace
// 阶段 7.5: PPR Diffusion
// 阶段 8: L0 → L2 反向查找(后置,基于最终 l0Selected
// 阶段 9: Causation Trace
// ═══════════════════════════════════════════════════════════════════════════
import { getAllEventVectors, getChunksByFloors, getMeta, getChunkVectorsByIds } from '../storage/chunk-store.js';
@@ -1114,6 +1116,7 @@ export async function recallMemory(allEvents, vectorConfig, options = {}) {
const eventIndex = buildEventIndex(allEvents);
let lexicalEventCount = 0;
let lexicalEventFilteredByDense = 0;
let l0LinkedCount = 0;
const focusSetForLexical = new Set((bundle.focusEntities || []).map(normalize));
for (const eid of lexicalResult.eventIds) {
@@ -1149,46 +1152,6 @@ export async function recallMemory(allEvents, vectorConfig, options = {}) {
lexicalEventCount++;
}
// ═══════════════════════════════════════════════════════════════════
// 阶段 5.5: L0 → L2 反向查找
// 已召回的 L0 楼层落在某 L2 事件范围内,但该 L2 自身未被召回
// ═══════════════════════════════════════════════════════════════════
const recalledL0Floors = new Set(anchorHits.map(h => h.floor));
let l0LinkedCount = 0;
for (const event of allEvents) {
if (existingEventIds.has(event.id)) continue;
const range = parseFloorRange(event.summary);
if (!range) continue;
let hasOverlap = false;
for (const floor of recalledL0Floors) {
if (floor >= range.start && floor <= range.end) {
hasOverlap = true;
break;
}
}
if (!hasOverlap) continue;
// 实体分类:与所有路径统一标准
const participants = (event.participants || []).map(p => normalize(p));
const hasEntityMatch = focusSetForLexical.size > 0
&& participants.some(p => focusSetForLexical.has(p));
const evVec = eventVectorMap.get(event.id);
const sim = evVec?.length ? cosineSimilarity(queryVector_v1, evVec) : 0;
eventHits.push({
event,
similarity: sim,
_recallType: hasEntityMatch ? 'DIRECT' : 'RELATED',
});
existingEventIds.add(event.id);
l0LinkedCount++;
}
if (metrics) {
metrics.lexical.eventFilteredByDense = lexicalEventFilteredByDense;
@@ -1196,14 +1159,10 @@ export async function recallMemory(allEvents, vectorConfig, options = {}) {
metrics.event.byRecallType.lexical = lexicalEventCount;
metrics.event.selected += lexicalEventCount;
}
if (l0LinkedCount > 0) {
metrics.event.byRecallType.l0Linked = l0LinkedCount;
metrics.event.selected += l0LinkedCount;
}
}
xbLog.info(MODULE_ID,
`Lexical: chunks=${lexicalResult.chunkIds.length} events=${lexicalResult.eventIds.length} mergedEvents=+${lexicalEventCount} filteredByDense=${lexicalEventFilteredByDense} l0Linked=+${l0LinkedCount} (${lexTime}ms)`
`Lexical: chunks=${lexicalResult.chunkIds.length} events=${lexicalResult.eventIds.length} mergedEvents=+${lexicalEventCount} filteredByDense=${lexicalEventFilteredByDense} floorFiltered=${metrics.lexical.floorFilteredByDense || 0} (${lexTime}ms)`
);
// ═══════════════════════════════════════════════════════════════════
@@ -1248,7 +1207,56 @@ export async function recallMemory(allEvents, vectorConfig, options = {}) {
metrics.timing.diffusion = metrics.diffusion?.time || 0;
// ═══════════════════════════════════════════════════════════════════
// 阶段 7: Causation Trace
// Stage 8: L0 → L2 反向查找(后置,基于最终 l0Selected
// ═══════════════════════════════════════════════════════════════════
const recalledL0Floors = new Set(l0Selected.map(x => x.floor));
for (const event of allEvents) {
if (existingEventIds.has(event.id)) continue;
const range = parseFloorRange(event.summary);
if (!range) continue;
let hasOverlap = false;
for (const floor of recalledL0Floors) {
if (floor >= range.start && floor <= range.end) {
hasOverlap = true;
break;
}
}
if (!hasOverlap) continue;
// Dense similarity 门槛(与 Lexical Event 对齐)
const evVec = eventVectorMap.get(event.id);
const sim = evVec?.length ? cosineSimilarity(queryVector_v1, evVec) : 0;
if (sim < CONFIG.LEXICAL_EVENT_DENSE_MIN) continue;
// 实体分类:与所有路径统一标准
const participants = (event.participants || []).map(p => normalize(p));
const hasEntityMatch = focusSetForLexical.size > 0
&& participants.some(p => focusSetForLexical.has(p));
eventHits.push({
event,
similarity: sim,
_recallType: hasEntityMatch ? 'DIRECT' : 'RELATED',
});
existingEventIds.add(event.id);
l0LinkedCount++;
}
if (metrics && l0LinkedCount > 0) {
metrics.event.byRecallType.l0Linked = l0LinkedCount;
metrics.event.selected += l0LinkedCount;
}
xbLog.info(MODULE_ID,
`L0-linked events: ${recalledL0Floors.size} floors → ${l0LinkedCount} events linked (sim≥${CONFIG.LEXICAL_EVENT_DENSE_MIN})`
);
// ═══════════════════════════════════════════════════════════════════
// 阶段 9: Causation Trace
// ═══════════════════════════════════════════════════════════════════
const { results: causalMap, maxDepth: causalMaxDepth } = traceCausation(eventHits, eventIndex);
@@ -1288,7 +1296,7 @@ export async function recallMemory(allEvents, vectorConfig, options = {}) {
console.log(`Fusion (floor, weighted): dense=${metrics.fusion.denseFloors} lex=${metrics.fusion.lexFloors} → cap=${metrics.fusion.afterCap} (${metrics.fusion.time}ms)`);
console.log(`Floor Rerank: ${metrics.evidence.beforeRerank || 0}${metrics.evidence.floorsSelected || 0} floors → L0=${metrics.evidence.l0Collected || 0} (${metrics.evidence.rerankTime || 0}ms)`);
console.log(`L1: ${metrics.evidence.l1Pulled || 0} pulled → ${metrics.evidence.l1Attached || 0} attached (${metrics.evidence.l1CosineTime || 0}ms)`);
console.log(`Events: ${eventHits.length} hits, ${causalChain.length} causal`);
console.log(`Events: ${eventHits.length} hits (l0Linked=+${l0LinkedCount}), ${causalChain.length} causal`);
console.log(`Diffusion: ${metrics.diffusion?.seedCount || 0} seeds → ${metrics.diffusion?.pprActivated || 0} activated → ${metrics.diffusion?.finalCount || 0} final (${metrics.diffusion?.time || 0}ms)`);
console.groupEnd();