File size: 5,141 Bytes
c3a3710 dbb04e4 c3a3710 dbb04e4 c3a3710 dbb04e4 c3a3710 dbb04e4 c3a3710 dbb04e4 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 | from __future__ import annotations
from dataclasses import dataclass, field
from datetime import datetime, timezone
from typing import TYPE_CHECKING, Dict, Any, Optional
import math
from .binary_hdv import BinaryHDV
from .config import get_config
if TYPE_CHECKING:
from .provenance import ProvenanceRecord
@dataclass
class MemoryNode:
"""
Holographic memory neuron (Phase 3.0+).
Uses BinaryHDV for efficient storage and computation.
Phase 4.3: Temporal Recall - supports episodic chaining and time-based indexing.
"""
id: str
hdv: BinaryHDV
content: str # Original text/data
metadata: Dict[str, Any] = field(default_factory=dict)
created_at: datetime = field(default_factory=lambda: datetime.now(timezone.utc))
last_accessed: datetime = field(default_factory=lambda: datetime.now(timezone.utc))
# Phase 3.0: Tiering & LTP
tier: str = "hot" # "hot", "warm", "cold"
access_count: int = 1
ltp_strength: float = 0.5 # Current retrieval strength
# Legacy Free Energy signals (mapped to importance)
epistemic_value: float = 0.0 # Reduces uncertainty?
pragmatic_value: float = 0.0 # Helps achieve goals?
# Phase 4.3: Episodic Chaining - links to temporally adjacent memories
previous_id: Optional[str] = None # UUID of the memory created immediately before this one
# Phase 5.0 — Agent 1: Trust & Provenance
provenance: Optional["ProvenanceRecord"] = field(default=None, repr=False)
# Phase 5.0 — Agent 2: Adaptive Temporal Decay
# Per-memory stability: S_i = S_base * (1 + k * access_count)
# Starts at 1.0; increases logarithmically on access.
stability: float = 1.0
review_candidate: bool = False # Set by ForgettingCurveManager when near decay threshold
def access(self, update_weights: bool = True):
"""Retrieve memory (reconsolidation)"""
now = datetime.now(timezone.utc)
self.last_accessed = now
if update_weights:
self.access_count += 1
# Decay old strength first? Or just recalculate?
# We recalculate based on new access count
self.calculate_ltp()
# Phase 5.0: update per-memory stability on each successful access
# S_i grows logarithmically so older frequently-accessed memories are more stable
import math as _math
self.stability = max(1.0, 1.0 + _math.log1p(self.access_count) * 0.5)
# Legacy updates
self.epistemic_value *= 1.01
self.epistemic_value = min(self.epistemic_value, 1.0)
def calculate_ltp(self) -> float:
"""
Calculate Long-Term Potentiation (LTP) strength.
Formula: S = I * log(1 + A) * e^(-lambda * T)
"""
config = get_config()
# I = Importance (derived from legacy values or default)
importance = max(
config.ltp.initial_importance,
(self.epistemic_value + self.pragmatic_value) / 2
)
# A = Access count
access_factor = math.log1p(self.access_count)
# T = Time since creation (days)
age = self.age_days()
# Decay
decay = math.exp(-config.ltp.decay_lambda * age)
self.ltp_strength = importance * access_factor * decay
# Clamp? No, it can grow. But maybe clamp for meaningful comparison.
# Check permanence threshold
if self.ltp_strength > config.ltp.permanence_threshold:
# Prevent decay below threshold if verified permanent?
# For now just let it be high.
pass
return self.ltp_strength
def get_free_energy_score(self) -> float:
"""
Legacy score, now aliased to LTP strength for compatibility.
"""
# If LTP hasn't been calculated recently, do it now
return self.calculate_ltp()
def age_days(self) -> float:
"""Age of memory in days (for decay calculations)"""
# Use timezone-aware now
delta = datetime.now(timezone.utc) - self.created_at
return delta.total_seconds() / 86400.0
@property
def unix_timestamp(self) -> int:
"""Unix timestamp (seconds since epoch) for Qdrant indexing."""
return int(self.created_at.timestamp())
@property
def iso_date(self) -> str:
"""ISO 8601 date string for human-readable time metadata."""
return self.created_at.isoformat()
def age_seconds(self) -> float:
"""Age of memory in seconds (for fine-grained chrono-weighting)."""
delta = datetime.now(timezone.utc) - self.created_at
return delta.total_seconds()
def __lt__(self, other):
# Sort by LTP strength descending? No, __lt__ is valid for sorting.
# Default sort by ID is fine, but for priority queues we might want LTP.
# Let's keep ID for stability and use key= attr for sorting.
return self.id < other.id
|