| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| use std::collections::HashMap; |
| use std::sync::{Arc, LazyLock, Mutex, RwLock}; |
| use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH}; |
|
|
| use crate::agent_state::{AgentStateDb, MemoryType}; |
| use crate::gate_training::{GateTrainingCollector, TrainingSignal}; |
| use crate::paths::spf_root; |
| use crate::transformer_tools::TransformerState; |
| use crate::http::ServerState; |
| use serde_json::Value; |
|
|
| |
| |
| |
| |
| |
| |
| |
|
|
| static READ_TRACKER: LazyLock<Mutex<HashMap<String, u32>>> = |
| LazyLock::new(|| Mutex::new(HashMap::new())); |
|
|
| |
| |
| |
|
|
| |
| const SIGNAL_DRAIN_INTERVAL: Duration = Duration::from_secs(30); |
|
|
| |
| const DROP_CHECK_INTERVAL: Duration = Duration::from_secs(60); |
|
|
| |
| const TIER_LOOP_INTERVAL: Duration = Duration::from_secs(1800); |
|
|
| |
| const MIN_RELEVANCE: f64 = 0.1; |
|
|
| |
| const PROMOTE_TOP_PERCENT: f64 = 0.20; |
|
|
| |
| const PROMOTE_ALL_THRESHOLD: f64 = 0.50; |
|
|
| |
| |
| |
|
|
| |
| |
| |
| |
| |
| |
| |
| pub fn start_memory_router( |
| collector: Arc<GateTrainingCollector>, |
| transformer: Option<Arc<RwLock<TransformerState>>>, |
| state: Arc<ServerState>, |
| ) { |
| std::thread::Builder::new() |
| .name("flint-memory-router".into()) |
| .spawn(move || { |
| run_router(collector, transformer, state); |
| }) |
| .expect("[FLINT-MR] failed to spawn memory router thread"); |
| } |
|
|
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| |
| |
| pub struct FlintContext { |
| |
| pub brain_hits: String, |
| |
| pub timestamp: String, |
| |
| pub skip_processing: bool, |
| |
| pub target_file: Option<String>, |
| |
| pub args_summary: String, |
| } |
|
|
| |
| |
| const BYPASS_TOOLS: &[&str] = &[ |
| "spf_status", "spf_calculate", "spf_session", |
| "spf_config_paths", "spf_config_stats", |
| "spf_tmp_list", "spf_tmp_stats", "spf_tmp_active", "spf_tmp_get", |
| "spf_agent_stats", "spf_agent_session_info", "spf_agent_context", |
| "spf_brain_status", "spf_brain_list", "spf_brain_list_docs", |
| "spf_pool_status", "spf_mesh_status", "spf_mesh_peers", |
| "spf_transformer_status", "spf_transformer_metrics", |
| "spf_rag_status", "spf_rag_bandwidth_status", "spf_rag_list_feeds", |
| "spf_rag_list_gathered", "spf_rag_pending_searches", |
| "spf_chat_rooms", "spf_chat_history", |
| ]; |
|
|
| |
| |
| |
| |
| |
| pub fn flint_intercept(tool: &str, args: &Value) -> FlintContext { |
| let timestamp = chrono::Utc::now().to_rfc3339(); |
|
|
| |
| if BYPASS_TOOLS.contains(&tool) { |
| return FlintContext { |
| brain_hits: String::new(), |
| timestamp, |
| skip_processing: true, |
| target_file: None, |
| args_summary: String::new(), |
| }; |
| } |
|
|
| |
| let target_file = args.get("file_path") |
| .and_then(|v| v.as_str()) |
| .map(String::from); |
|
|
| |
| let query = build_intercept_query(tool, args); |
|
|
| |
| let brain_hits = if query.len() > 5 { |
| crate::brain_local::brain_search(&query, "default", 3) |
| } else { |
| String::new() |
| }; |
|
|
| |
| |
| |
| let anchor_hits = if matches!(tool, "Write" | "Edit" | "spf_write" | "spf_edit") { |
| if let Some(ref fp) = target_file { |
| build_anchor_context(fp) |
| } else { |
| String::new() |
| } |
| } else { |
| String::new() |
| }; |
|
|
| |
| let combined_hits = match (brain_hits.is_empty(), anchor_hits.is_empty()) { |
| (true, true) => String::new(), |
| (true, false) => anchor_hits, |
| (false, true) => brain_hits, |
| (false, false) => format!("{}\n\n{}", brain_hits, anchor_hits), |
| }; |
|
|
| |
| let args_summary: String = args.as_object() |
| .map(|obj| { |
| obj.iter() |
| .map(|(k, v)| { |
| let val: String = match v { |
| Value::String(s) => s.chars().take(200).collect(), |
| other => { |
| let s = other.to_string(); |
| s.chars().take(200).collect() |
| } |
| }; |
| format!("{}={}", k, val) |
| }) |
| .collect::<Vec<_>>() |
| .join(", ") |
| }) |
| .unwrap_or_default(); |
|
|
| FlintContext { |
| brain_hits: combined_hits, |
| timestamp, |
| skip_processing: false, |
| target_file, |
| args_summary, |
| } |
| } |
|
|
| |
| |
| |
| |
| |
| |
| |
| pub fn flint_process_result( |
| tool: &str, |
| result: &Value, |
| ctx: &FlintContext, |
| agent_db: &Option<AgentStateDb>, |
| ) -> Value { |
| |
| if ctx.skip_processing { |
| return result.clone(); |
| } |
|
|
| |
| if result.get("_blocked").and_then(|v| v.as_bool()).unwrap_or(false) { |
| return result.clone(); |
| } |
|
|
| |
| let text = result.get("text").and_then(|v| v.as_str()).unwrap_or(""); |
|
|
| |
| let relevance = score_result(tool, text); |
|
|
| |
| if relevance > 0.5 { |
| if let Some(ref db) = agent_db { |
| let summary: String = text.chars().take(500).collect(); |
| let tags = vec![ |
| format!("tool:{}", tool), |
| "source:flint_dispatch".to_string(), |
| format!("relevance:{:.2}", relevance), |
| ]; |
| if let Err(e) = db.create_memory(&summary, MemoryType::Working, tags, "flint_dispatch") { |
| eprintln!("[FLINT] result memory store error: {}", e); |
| } |
| } |
| } |
|
|
| |
| |
| |
| if !ctx.args_summary.is_empty() && !text.is_empty() { |
| if let Some(ref db) = agent_db { |
| let result_summary: String = text.chars().take(500).collect(); |
| let pair = format!("{}|{}|{}", tool, ctx.args_summary, result_summary); |
| let lang_key = format!("lang:{}", ctx.timestamp); |
| let _ = db.set_state(&lang_key, &pair); |
| } |
| } |
|
|
| |
| |
| |
| if text.len() > 2000 { |
| let ts_date = &ctx.timestamp[..ctx.timestamp.len().min(10)]; |
| let title = format!("result:{}:{}", tool, ts_date); |
| crate::brain_local::brain_store(text, &title, "flint_results"); |
| } |
|
|
| |
| |
| |
| |
| if tool == "Read" { |
| if let Some(ref fp) = ctx.target_file { |
| if let Ok(mut tracker) = READ_TRACKER.lock() { |
| let count = tracker.entry(fp.clone()).or_insert(0); |
| *count += 1; |
| } |
| } |
| } |
|
|
| |
| let compressed = compress_result(tool, text); |
|
|
| |
| let mut output = result.clone(); |
| if let Some(obj) = output.as_object_mut() { |
| |
| if compressed.len() < text.len() { |
| obj.insert("text".to_string(), Value::String(compressed)); |
| } |
| |
| if let Some(ctx_val) = build_context_injection(&ctx.brain_hits) { |
| obj.insert("_flint_context".to_string(), ctx_val); |
| } |
| } |
|
|
| output |
| } |
|
|
| |
| fn build_intercept_query(tool: &str, args: &Value) -> String { |
| |
| let key_arg = if let Some(fp) = args.get("file_path").and_then(|v| v.as_str()) { |
| fp.to_string() |
| } else if let Some(q) = args.get("query").and_then(|v| v.as_str()) { |
| q.to_string() |
| } else if let Some(p) = args.get("pattern").and_then(|v| v.as_str()) { |
| p.to_string() |
| } else if let Some(cmd) = args.get("command").and_then(|v| v.as_str()) { |
| cmd.chars().take(100).collect() |
| } else if let Some(text) = args.get("text").and_then(|v| v.as_str()) { |
| text.chars().take(100).collect() |
| } else if let Some(url) = args.get("url").and_then(|v| v.as_str()) { |
| url.to_string() |
| } else { |
| String::new() |
| }; |
|
|
| if key_arg.is_empty() { |
| tool.to_string() |
| } else { |
| format!("{} {}", tool, key_arg) |
| } |
| } |
|
|
| |
| |
| |
| fn score_result(tool: &str, text: &str) -> f64 { |
| let len = text.len(); |
|
|
| |
| if len < 20 { |
| return 0.0; |
| } |
|
|
| |
| let len_score = ((len as f64).ln() / 10.0).min(1.0); |
|
|
| |
| let tool_weight = match tool { |
| |
| "Read" | "spf_brain_search" | "spf_brain_recall" | "spf_brain_context" => 0.8, |
| "Grep" | "Glob" => 0.7, |
| |
| "Bash" | "spf_web_fetch" | "spf_web_search" | "spf_rag_smart_search" => 0.6, |
| |
| "Write" | "Edit" | "spf_brain_store" | "spf_brain_index" => 0.4, |
| |
| "spf_chat_send" | "spf_mesh_call" => 0.5, |
| |
| _ => 0.5, |
| }; |
|
|
| (len_score * tool_weight).min(1.0) |
| } |
|
|
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| |
| |
| fn compress_result(tool: &str, text: &str) -> String { |
| |
| |
| |
| if tool == "Read" { |
| return text.to_string(); |
| } |
|
|
| |
| if matches!(tool, |
| "spf_brain_search" | "spf_brain_recall" | "spf_brain_context" | |
| "spf_brain_store" | "spf_brain_index" | "spf_brain_list" | |
| "spf_brain_get_doc" | "spf_brain_list_docs" | |
| "spf_voice_mode" | "spf_voice_call" | "spf_voice_team" | |
| "spf_chat_send" | "spf_chat_history" | "spf_chat_rooms" | |
| "spf_mesh_call" | "spf_mesh_status" | "spf_mesh_peers" |
| ) { |
| return text.to_string(); |
| } |
|
|
| |
| |
| static GATE_START: std::sync::LazyLock<std::time::Instant> = |
| std::sync::LazyLock::new(std::time::Instant::now); |
| if GATE_START.elapsed() < std::time::Duration::from_secs(86400) { |
| return text.to_string(); |
| } |
|
|
| let len = text.len(); |
|
|
| |
| if len < 500 { |
| return text.to_string(); |
| } |
|
|
| |
| if len <= 5000 { |
| let lines: Vec<&str> = text.lines().collect(); |
| let line_count = lines.len(); |
|
|
| |
| if line_count <= 15 { |
| return text.to_string(); |
| } |
|
|
| |
| let head: String = lines[..8].join("\n"); |
| let tail: String = lines[line_count - 3..].join("\n"); |
|
|
| return format!( |
| "{}\n\n[FLINT: {} lines, {} bytes — showing head/tail]\n\n{}", |
| head, line_count, len, tail |
| ); |
| } |
|
|
| |
| let head: String = text.chars().take(200).collect(); |
| let total_chars = text.chars().count(); |
| let tail: String = text.chars().skip(total_chars.saturating_sub(100)).collect(); |
| let line_count = text.lines().count(); |
|
|
| format!( |
| "{}\n\n[FLINT DIGEST: {} bytes, {} lines — original stored in brain collection=\"flint_results\".\n Use spf_brain_recall(collection=\"flint_results\") to retrieve full content.]\n\n{}", |
| head, len, line_count, tail |
| ) |
| } |
|
|
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| |
| |
| fn build_context_injection(brain_hits: &str) -> Option<Value> { |
| |
| if brain_hits.is_empty() |
| || brain_hits.starts_with("No results") |
| || brain_hits.starts_with("Brain not initialized") |
| || brain_hits.starts_with("Brain search error") |
| { |
| return None; |
| } |
|
|
| |
| let truncated: String = brain_hits.chars().take(2000).collect(); |
|
|
| Some(serde_json::json!({ |
| "hits": truncated, |
| "source": "flint_intercept" |
| })) |
| } |
|
|
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| |
| |
| |
| |
| |
| pub fn build_anchor_context(target_file: &str) -> String { |
| |
| let filename = target_file |
| .rsplit('/') |
| .next() |
| .unwrap_or(target_file); |
|
|
| |
| let file_ctx = crate::brain_local::brain_search( |
| &format!("file:{} module functions structs", filename), |
| "spf_source", |
| 3, |
| ); |
|
|
| |
| if file_ctx.starts_with("No results") |
| || file_ctx.starts_with("Brain not initialized") |
| || file_ctx.starts_with("Brain search error") |
| { |
| return String::new(); |
| } |
|
|
| |
| let dep_ctx = crate::brain_local::brain_search( |
| &format!("imports dependencies types used by {}", filename), |
| "spf_source", |
| 3, |
| ); |
|
|
| let mut out = format!("BUILD ANCHOR (brain — spf_source):\n{}", file_ctx); |
|
|
| if !dep_ctx.starts_with("No results") |
| && !dep_ctx.starts_with("Brain not initialized") |
| && !dep_ctx.starts_with("Brain search error") |
| { |
| out.push_str(&format!("\n\nCONNECTED TYPES:\n{}", dep_ctx)); |
| } |
|
|
| |
| if out.len() > 3000 { |
| out.truncate(3000); |
| out.push_str("\n[FLINT: anchor context truncated at 3000 chars]"); |
| } |
|
|
| out |
| } |
|
|
| |
| |
| |
|
|
| fn run_router( |
| collector: Arc<GateTrainingCollector>, |
| transformer: Option<Arc<RwLock<TransformerState>>>, |
| state: Arc<ServerState>, |
| ) { |
| |
| let cm_db_path = spf_root().join("LIVE/LMDB5/LMDB5.DB"); |
| if let Ok(db) = AgentStateDb::open(&cm_db_path) { |
| |
| if let Ok(Some(saved)) = db.get_state_typed::<crate::gate_training::ConfusionMatrix>("training:confusion_matrix") { |
| let total = saved.total(); |
| collector.restore_matrix(saved); |
| eprintln!("[FLINT-MR] Restored confusion matrix: {} decisions", total); |
| } |
| |
| if let Ok(Some(fp_signals)) = db.get_state_typed::<Vec<crate::gate_training::TrainingSignal>>("training:fp_locked") { |
| let count = fp_signals.len(); |
| collector.restore_fp_locked(fp_signals); |
| eprintln!("[FLINT-MR] Restored {} FP-locked signals", count); |
| } |
| } |
|
|
| let mut last_drop_check = Instant::now(); |
| let mut last_tier_run = Instant::now(); |
| let mut last_train = Instant::now(); |
|
|
| loop { |
| std::thread::sleep(SIGNAL_DRAIN_INTERVAL); |
|
|
| |
| route_signals(&collector); |
|
|
| |
| if last_drop_check.elapsed() >= DROP_CHECK_INTERVAL { |
| check_drop_folder(); |
| last_drop_check = Instant::now(); |
| } |
|
|
| |
| if last_tier_run.elapsed() >= TIER_LOOP_INTERVAL { |
| run_tiered_promotion(); |
| write_flint_metrics(&state); |
| last_tier_run = Instant::now(); |
| } |
|
|
| |
| |
| |
| |
| if transformer.is_some() { |
| let tlog_count = { |
| let db_path = spf_root().join("LIVE/LMDB5/LMDB5.DB"); |
| let mut count = 0usize; |
| if let Ok(db) = AgentStateDb::open(&db_path) { |
| if let Ok(keys) = db.list_state_keys() { |
| count = keys.iter().filter(|k| k.starts_with("tlog:")).count(); |
| } |
| } |
| count |
| }; |
| if last_train.elapsed() >= Duration::from_secs(3600) || tlog_count >= 16 { |
| let config = crate::config::TransformerConfig::load( |
| &spf_root().join("LIVE/CONFIG/transformer.json"), |
| ) |
| .unwrap_or_default(); |
| let args = serde_json::json!({"batch_size": config.batch_size}); |
| let result = crate::transformer_tools::handle_train(&transformer, &args, &config); |
| eprintln!("[FLINT-MR] Auto-train (tlog: {}): {}", tlog_count, result); |
| last_train = Instant::now(); |
| } |
| } |
| } |
| } |
|
|
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| fn write_flint_metrics(state: &Arc<ServerState>) { |
| let agent_db = match state.agent_db.as_ref() { |
| Some(db) => db, |
| None => return, |
| }; |
|
|
| let working = agent_db.get_by_type(MemoryType::Working).map(|v| v.len()).unwrap_or(0); |
| let fact = agent_db.get_by_type(MemoryType::Fact).map(|v| v.len()).unwrap_or(0); |
| let pinned = agent_db.get_by_type(MemoryType::Pinned).map(|v| v.len()).unwrap_or(0); |
| let (total_mem, sessions, state_keys, tags) = agent_db.db_stats().unwrap_or((0, 0, 0, 0)); |
|
|
| let timestamp = chrono::Utc::now().to_rfc3339(); |
| let content = format!( |
| "# FLINT METRICS — Auto-generated by FLINT router\n\ |
| # Updated: {}\n\n\ |
| Working memories: {}\n\ |
| Fact memories: {}\n\ |
| Pinned memories: {}\n\ |
| Total memories: {}\n\ |
| Sessions: {}\n\ |
| State keys: {}\n\ |
| Tags: {}\n", |
| timestamp, working, fact, pinned, total_mem, sessions, state_keys, tags |
| ); |
|
|
| let path = spf_root() |
| .join("LIVE/PROJECTS/PROJECTS/FLINT_METRICS.txt") |
| .to_string_lossy() |
| .to_string(); |
|
|
| let args = serde_json::json!({ |
| "file_path": path, |
| "content": content, |
| }); |
|
|
| let resp = crate::dispatch::call( |
| state, |
| crate::dispatch::Source::Transformer { |
| role: "flint-router".into(), |
| model_id: "memory".into(), |
| }, |
| "Write", |
| &args, |
| ); |
|
|
| if resp.status != "ok" { |
| eprintln!("[FLINT-MR] metrics write: {}", resp.result); |
| } |
| } |
|
|
| |
| |
| |
| |
| |
| |
| |
|
|
| fn route_signals(collector: &Arc<GateTrainingCollector>) -> usize { |
| let signals = collector.drain_signals(); |
| if signals.is_empty() { |
| return 0; |
| } |
| let count = signals.len(); |
|
|
| |
| let db_path = spf_root().join("LIVE/LMDB5/LMDB5.DB"); |
| let agent_db = AgentStateDb::open(&db_path).ok(); |
|
|
| for signal in &signals { |
| |
| let relevance = score_signal(signal); |
|
|
| |
| if relevance < MIN_RELEVANCE && !signal.false_positive && signal.evil_score < 0.1 { |
| continue; |
| } |
|
|
| |
| let collection = route_collection(signal); |
|
|
| |
| let text = format_signal_text(signal, relevance); |
| let title = format!("gate:{}:{}", signal.tool, &signal.timestamp[..10]); |
|
|
| |
| write_brain_doc(&text, &title); |
|
|
| |
| |
| let store_result = crate::brain_local::brain_store(&text, &title, collection); |
|
|
| |
| { |
| static STORE_FAILURES: std::sync::atomic::AtomicU32 = std::sync::atomic::AtomicU32::new(0); |
| if store_result.contains("error") || store_result.contains("not initialized") { |
| let failures = STORE_FAILURES.fetch_add(1, std::sync::atomic::Ordering::Relaxed) + 1; |
| if failures >= 3 { |
| eprintln!("[FLINT-MR] brain_store failed {} consecutive times — expiring old memories", failures); |
| if let Some(ref db) = agent_db { |
| match db.expire_memories() { |
| Ok(n) => eprintln!("[FLINT-MR] expired {} old memories", n), |
| Err(e) => eprintln!("[FLINT-MR] expire failed: {}", e), |
| } |
| } |
| STORE_FAILURES.store(0, std::sync::atomic::Ordering::Relaxed); |
| } |
| } else { |
| STORE_FAILURES.store(0, std::sync::atomic::Ordering::Relaxed); |
| } |
| } |
|
|
| |
| |
| |
| |
| |
| |
| if let Some(ref db) = agent_db { |
| if let Ok(json) = serde_json::to_string(signal) { |
| let tlog_key = format!("tlog:{}", signal.timestamp); |
| if let Err(e) = db.set_state(&tlog_key, &json) { |
| eprintln!("[FLINT-MR] tlog persist error: {}", e); |
| } |
| } |
| } |
| } |
|
|
| |
| |
| let cm_db_path = spf_root().join("LIVE/LMDB5/LMDB5.DB"); |
| if let Ok(cm_db) = AgentStateDb::open(&cm_db_path) { |
| |
| let matrix = collector.confusion_matrix(); |
| if matrix.total() > 0 { |
| if let Err(e) = cm_db.set_state_typed("training:confusion_matrix", &matrix) { |
| eprintln!("[FLINT-MR] confusion matrix persist error: {}", e); |
| } |
| } |
| |
| let fp_locked = collector.get_fp_locked(); |
| if !fp_locked.is_empty() { |
| if let Err(e) = cm_db.set_state_typed("training:fp_locked", &fp_locked) { |
| eprintln!("[FLINT-MR] FP-locked persist error: {}", e); |
| } |
| } |
| } |
|
|
| count |
| } |
|
|
| |
| |
| |
| |
| |
| fn score_signal(signal: &TrainingSignal) -> f64 { |
| let weight = signal.weight() as f64; |
| let label_abs = signal.label().abs() as f64; |
| let evil = signal.evil_score as f64; |
| let repeat = (signal.recent_call_count as f64 * 0.1).min(1.0); |
|
|
| |
| if signal.evil_score > 0.4 || signal.false_positive { |
| return 1.0; |
| } |
|
|
| |
| if !signal.allowed { |
| return (weight * label_abs * 0.8 + evil + repeat).min(1.0); |
| } |
|
|
| |
| if signal.user_override { |
| return (weight * label_abs * 0.6 + repeat).min(1.0); |
| } |
|
|
| |
| (weight * label_abs * 0.3 + evil + repeat * 0.5).min(1.0) |
| } |
|
|
| |
| |
| |
| |
| fn route_collection(signal: &TrainingSignal) -> &'static str { |
| |
| if signal.evil_score > 0.4 || signal.false_positive || !signal.allowed { |
| return "flint_training"; |
| } |
| |
| if signal.user_override { |
| return "flint_episodic"; |
| } |
| |
| "flint_training" |
| } |
|
|
| |
| fn format_signal_text(signal: &TrainingSignal, relevance: f64) -> String { |
| format!( |
| "GATE SIGNAL | tool={} source={} allowed={} label={:.1} weight={:.1} evil={:.2} relevance={:.2} fp={} override={} | context=[{}] | ts={}", |
| signal.tool, |
| signal.source, |
| signal.allowed, |
| signal.label(), |
| signal.weight(), |
| signal.evil_score, |
| relevance, |
| signal.false_positive, |
| signal.user_override, |
| signal.preceding_tools.join(","), |
| signal.timestamp, |
| ) |
| } |
|
|
| |
| |
| fn write_brain_doc(text: &str, title: &str) { |
| let docs_dir = spf_root().join("LIVE/BRAIN/DOCS"); |
| if !docs_dir.exists() { |
| if let Err(e) = std::fs::create_dir_all(&docs_dir) { |
| eprintln!("[FLINT-MR] failed to create BRAIN/DOCS: {}", e); |
| return; |
| } |
| } |
|
|
| let ts = SystemTime::now() |
| .duration_since(UNIX_EPOCH) |
| .map(|d| d.as_millis()) |
| .unwrap_or(0); |
|
|
| |
| let hash: u64 = title |
| .bytes() |
| .fold(0u64, |acc, b| acc.wrapping_mul(31).wrapping_add(b as u64)); |
|
|
| let filename = format!("{:08x}{:08x}.txt", (ts & 0xFFFFFFFF) as u32, hash as u32); |
| let path = docs_dir.join(&filename); |
|
|
| if let Err(e) = std::fs::write(&path, text) { |
| eprintln!("[FLINT-MR] failed to write brain doc {}: {}", filename, e); |
| } |
| } |
|
|
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| fn check_drop_folder() { |
| let drop_dir = spf_root().join("LIVE/TMP/stoneshell-brain/knowledge"); |
| if !drop_dir.exists() { |
| return; |
| } |
|
|
| |
| let indexed_marker = drop_dir.join(".indexed"); |
| let already_indexed: std::collections::HashSet<String> = |
| std::fs::read_to_string(&indexed_marker) |
| .unwrap_or_default() |
| .lines() |
| .filter(|l| !l.is_empty()) |
| .map(str::to_owned) |
| .collect(); |
|
|
| let entries = match std::fs::read_dir(&drop_dir) { |
| Ok(e) => e, |
| Err(_) => return, |
| }; |
|
|
| let mut newly_indexed: Vec<String> = Vec::new(); |
|
|
| for entry in entries.flatten() { |
| let path = entry.path(); |
| if !path.is_file() { |
| continue; |
| } |
|
|
| let name = path |
| .file_name() |
| .and_then(|n| n.to_str()) |
| .unwrap_or("") |
| .to_string(); |
|
|
| |
| if name.starts_with('.') { |
| continue; |
| } |
|
|
| |
| let ext = path.extension().and_then(|e| e.to_str()).unwrap_or(""); |
| if !matches!(ext, "md" | "txt" | "rs" | "json") { |
| continue; |
| } |
|
|
| if already_indexed.contains(&name) { |
| continue; |
| } |
|
|
| |
| let path_str = path.to_string_lossy().to_string(); |
| let result = crate::brain_local::brain_index_path(&path_str, "flint_knowledge"); |
| eprintln!("[FLINT-MR] indexed drop file '{}': {}", name, result); |
| newly_indexed.push(name); |
| } |
|
|
| if !newly_indexed.is_empty() { |
| |
| let mut all: Vec<String> = already_indexed.into_iter().collect(); |
| all.extend(newly_indexed); |
| let _ = std::fs::write(&indexed_marker, all.join("\n") + "\n"); |
| } |
| } |
|
|
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| fn run_tiered_promotion() { |
| let db_path = spf_root().join("LIVE/LMDB5/LMDB5.DB"); |
| let db = match AgentStateDb::open(&db_path) { |
| Ok(d) => d, |
| Err(e) => { |
| eprintln!("[FLINT-MR] tiered promotion: cannot open LMDB5: {}", e); |
| return; |
| } |
| }; |
|
|
| |
| match db.expire_memories() { |
| Ok(n) => { |
| if n > 0 { |
| eprintln!("[FLINT-MR] expired {} stale memories", n); |
| } |
| } |
| Err(e) => eprintln!("[FLINT-MR] expire_memories error: {}", e), |
| } |
|
|
| |
| promote_tier(&db, MemoryType::Working, MemoryType::Fact, "1h→12h"); |
|
|
| |
| promote_tier(&db, MemoryType::Fact, MemoryType::Pinned, "12h→48h"); |
| } |
|
|
| |
| |
| |
| |
| fn promote_tier(db: &AgentStateDb, from: MemoryType, to: MemoryType, label: &str) { |
| let memories = match db.get_by_type(from) { |
| Ok(m) => m, |
| Err(e) => { |
| eprintln!("[FLINT-MR] get_by_type({:?}) error: {}", from, e); |
| return; |
| } |
| }; |
|
|
| if memories.is_empty() { |
| return; |
| } |
|
|
| |
| let mut scored: Vec<(f64, &crate::agent_state::MemoryEntry)> = memories |
| .iter() |
| .map(|m| { |
| let score = m.relevance * (1.0 + m.access_count as f64 * 0.1); |
| (score, m) |
| }) |
| .collect(); |
|
|
| |
| scored.sort_by(|a, b| b.0.partial_cmp(&a.0).unwrap_or(std::cmp::Ordering::Equal)); |
|
|
| |
| let active_count = scored.iter().filter(|(s, _)| *s > 0.5).count(); |
| let promote_all = active_count as f64 / scored.len() as f64 > PROMOTE_ALL_THRESHOLD; |
|
|
| let promote_n = if promote_all { |
| scored.len() |
| } else { |
| ((scored.len() as f64 * PROMOTE_TOP_PERCENT).ceil() as usize).max(1) |
| }; |
|
|
| let mut promoted = 0usize; |
|
|
| for (_, mem) in scored.iter().take(promote_n) { |
| |
| let mut new_tags = mem.tags.clone(); |
| new_tags.push(format!("promoted:{}", label)); |
|
|
| match db.create_memory(&mem.content, to, new_tags, &mem.source) { |
| Ok(_) => { |
| |
| let _ = db.forget(&mem.id); |
| promoted += 1; |
| } |
| Err(e) => eprintln!("[FLINT-MR] promote error ({}): {}", label, e), |
| } |
| } |
|
|
| if promoted > 0 { |
| eprintln!( |
| "[FLINT-MR] promoted {}/{} memories ({}{})", |
| promoted, |
| memories.len(), |
| label, |
| if promote_all { " — all active" } else { "" } |
| ); |
| } |
| } |
|
|