| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| use crate::tensor::Tensor; |
| use crate::train::TrainingExample; |
| #[cfg(test)] |
| use crate::train::TrainingTarget; |
| use crate::gate_training::TrainingSignal; |
|
|
| |
| |
|
|
| |
| |
| |
|
|
| |
| |
| |
| |
| |
| |
| pub struct OnlineEWC { |
| pub fisher: Vec<f32>, |
| pub reference_weights: Vec<f32>, |
| pub lambda: f32, |
| pub fisher_decay: f32, |
| pub active: bool, |
| pub update_count: u64, |
| } |
|
|
| impl OnlineEWC { |
| pub fn new(total_params: usize, lambda: f32) -> Self { |
| Self { |
| fisher: vec![0.0; total_params], |
| reference_weights: vec![0.0; total_params], |
| lambda, |
| fisher_decay: 0.9, |
| active: false, |
| update_count: 0, |
| } |
| } |
|
|
| |
| pub fn penalty(&self, current_weights: &[f32]) -> (f32, Vec<f32>) { |
| if !self.active { |
| return (0.0, vec![0.0; current_weights.len()]); |
| } |
|
|
| let mut loss = 0.0f32; |
| let mut grads = vec![0.0f32; current_weights.len()]; |
|
|
| for i in 0..current_weights.len().min(self.fisher.len()) { |
| let diff = current_weights[i] - self.reference_weights[i]; |
| loss += self.fisher[i] * diff * diff; |
| grads[i] = 2.0 * self.lambda * self.fisher[i] * diff; |
| } |
|
|
| (0.5 * self.lambda * loss, grads) |
| } |
|
|
| |
| pub fn update_fisher(&mut self, gradients: &[f32]) { |
| let decay = self.fisher_decay; |
| for i in 0..self.fisher.len().min(gradients.len()) { |
| let new_fisher = gradients[i] * gradients[i]; |
| self.fisher[i] = decay * self.fisher[i] + (1.0 - decay) * new_fisher; |
| } |
| self.update_count += 1; |
| } |
|
|
| |
| pub fn snapshot_weights(&mut self, weights: &[f32]) { |
| self.reference_weights = weights.to_vec(); |
| self.active = true; |
| } |
|
|
| pub fn memory_bytes(&self) -> usize { |
| (self.fisher.len() + self.reference_weights.len()) * 4 |
| } |
|
|
| pub fn save_state(&self) -> (Vec<f32>, Vec<f32>, f32, u64) { |
| (self.fisher.clone(), self.reference_weights.clone(), self.lambda, self.update_count) |
| } |
|
|
| pub fn load_state(&mut self, fisher: Vec<f32>, ref_weights: Vec<f32>, lambda: f32, count: u64) { |
| self.fisher = fisher; |
| self.reference_weights = ref_weights; |
| self.lambda = lambda; |
| self.update_count = count; |
| self.active = !self.reference_weights.is_empty() |
| && self.reference_weights.iter().any(|&w| w != 0.0); |
| } |
|
|
| |
| pub fn fisher_as_tensor(&self) -> Tensor { |
| Tensor::from_data(self.fisher.clone(), vec![self.fisher.len()]) |
| .unwrap_or_else(|_| Tensor::zeros(&[self.fisher.len()])) |
| } |
|
|
| |
| |
| pub fn save_to_file(&self, path: &std::path::Path) -> std::io::Result<()> { |
| use std::io::Write; |
| let mut f = std::fs::File::create(path)?; |
| let count = self.fisher.len() as u32; |
| f.write_all(&count.to_le_bytes())?; |
| f.write_all(&self.lambda.to_le_bytes())?; |
| f.write_all(&self.update_count.to_le_bytes())?; |
| for &v in &self.fisher { |
| f.write_all(&v.to_le_bytes())?; |
| } |
| for &v in &self.reference_weights { |
| f.write_all(&v.to_le_bytes())?; |
| } |
| f.flush() |
| } |
|
|
| |
| pub fn load_from_file(path: &std::path::Path) -> std::io::Result<Self> { |
| use std::io::Read; |
| let mut f = std::fs::File::open(path)?; |
| let mut buf4 = [0u8; 4]; |
| let mut buf8 = [0u8; 8]; |
|
|
| f.read_exact(&mut buf4)?; |
| let count = u32::from_le_bytes(buf4) as usize; |
| f.read_exact(&mut buf4)?; |
| let lambda = f32::from_le_bytes(buf4); |
| f.read_exact(&mut buf8)?; |
| let update_count = u64::from_le_bytes(buf8); |
|
|
| let mut fisher = vec![0.0f32; count]; |
| for v in &mut fisher { |
| f.read_exact(&mut buf4)?; |
| *v = f32::from_le_bytes(buf4); |
| } |
| let mut reference_weights = vec![0.0f32; count]; |
| for v in &mut reference_weights { |
| f.read_exact(&mut buf4)?; |
| *v = f32::from_le_bytes(buf4); |
| } |
|
|
| let active = reference_weights.iter().any(|&w| w != 0.0); |
| Ok(Self { |
| fisher, |
| reference_weights, |
| lambda, |
| fisher_decay: 0.9, |
| active, |
| update_count, |
| }) |
| } |
| } |
|
|
| |
| |
| |
|
|
| |
| |
| |
| |
| |
| pub struct ExperienceReplay { |
| |
| buffer: Vec<TrainingExample>, |
| |
| fp_locked: Vec<TrainingExample>, |
| capacity: usize, |
| write_pos: usize, |
| total_added: u64, |
| } |
|
|
| impl ExperienceReplay { |
| pub fn new(capacity: usize) -> Self { |
| Self { |
| buffer: Vec::with_capacity(capacity), |
| fp_locked: Vec::new(), |
| capacity, |
| write_pos: 0, |
| total_added: 0, |
| } |
| } |
|
|
| |
| pub fn add(&mut self, example: TrainingExample) { |
| if example.weight >= 4.0 { |
| |
| self.fp_locked.push(example); |
| } else { |
| if self.buffer.len() < self.capacity { |
| self.buffer.push(example); |
| } else { |
| self.buffer[self.write_pos] = example; |
| } |
| self.write_pos = (self.write_pos + 1) % self.capacity; |
| } |
| self.total_added += 1; |
| } |
|
|
| |
| pub fn sample(&self, n: usize, seed: u64) -> Vec<TrainingExample> { |
| let mut samples = Vec::new(); |
|
|
| |
| samples.extend(self.fp_locked.iter().cloned()); |
|
|
| |
| if !self.buffer.is_empty() { |
| let count = n.min(self.buffer.len()); |
| let mut state = seed; |
| for _ in 0..count { |
| state = xorshift64(state); |
| let idx = (state as usize) % self.buffer.len(); |
| samples.push(self.buffer[idx].clone()); |
| } |
| } |
|
|
| samples |
| } |
|
|
| pub fn len(&self) -> usize { |
| self.buffer.len() + self.fp_locked.len() |
| } |
|
|
| pub fn regular_len(&self) -> usize { |
| self.buffer.len() |
| } |
|
|
| pub fn fp_locked_len(&self) -> usize { |
| self.fp_locked.len() |
| } |
|
|
| pub fn is_empty(&self) -> bool { |
| self.buffer.is_empty() && self.fp_locked.is_empty() |
| } |
|
|
| pub fn fill_ratio(&self) -> f32 { |
| self.buffer.len() as f32 / self.capacity.max(1) as f32 |
| } |
|
|
| pub fn total_added(&self) -> u64 { |
| self.total_added |
| } |
| } |
|
|
| fn xorshift64(mut state: u64) -> u64 { |
| if state == 0 { state = 0xdeadbeef; } |
| state ^= state << 13; |
| state ^= state >> 7; |
| state ^= state << 17; |
| state |
| } |
|
|
| |
| |
| |
|
|
| |
| |
| #[derive(Debug, Clone)] |
| pub struct ConfidenceConfig { |
| |
| pub allow_threshold: f32, |
| |
| pub block_threshold: f32, |
| |
| } |
|
|
| impl Default for ConfidenceConfig { |
| fn default() -> Self { |
| Self { |
| allow_threshold: 0.8, |
| block_threshold: 0.2, |
| } |
| } |
| } |
|
|
| |
| #[derive(Debug, Clone, PartialEq)] |
| pub enum ConfidenceDecision { |
| |
| AutoAllow(f32), |
| |
| AutoBlock(f32), |
| |
| AskUser(f32), |
| } |
|
|
| impl ConfidenceConfig { |
| |
| pub fn decide(&self, confidence: f32) -> ConfidenceDecision { |
| if confidence >= self.allow_threshold { |
| ConfidenceDecision::AutoAllow(confidence) |
| } else if confidence <= self.block_threshold { |
| ConfidenceDecision::AutoBlock(confidence) |
| } else { |
| ConfidenceDecision::AskUser(confidence) |
| } |
| } |
| } |
|
|
| |
| |
| |
| |
|
|
| |
| |
| |
| pub fn signal_to_tokens(signal: &TrainingSignal) -> Vec<usize> { |
| use crate::tokenizer::{Tokenizer, TOOL_ID, GATE_ID, SPF_ID}; |
|
|
| let tokenizer = Tokenizer::new(); |
| let mut tokens: Vec<usize> = Vec::new(); |
|
|
| |
| tokens.push(TOOL_ID as usize); |
| tokens.extend(tokenizer.encode(&signal.tool).iter().map(|&id| id as usize)); |
|
|
| |
| tokens.push(GATE_ID as usize); |
| tokens.extend(tokenizer.encode(&signal.source).iter().map(|&id| id as usize)); |
|
|
| |
| if !signal.preceding_tools.is_empty() { |
| tokens.push(SPF_ID as usize); |
| for prev_tool in &signal.preceding_tools { |
| tokens.extend(tokenizer.encode(prev_tool).iter().map(|&id| id as usize)); |
| tokens.push(SPF_ID as usize); |
| } |
| } |
|
|
| |
| |
| let freq_token = 6_usize; |
| for _ in 0..signal.recent_call_count.min(10) { |
| tokens.push(freq_token); |
| } |
|
|
| tokens |
| } |
|
|
| |
| |
|
|
| |
| |
| |
|
|
| |
| |
| |
| |
| |
| |
| |
| pub fn handle_brain_sync( |
| frame: &crate::framing::Frame, |
| peer_key: &str, |
| _transformer: &Option<std::sync::Arc<std::sync::RwLock<crate::transformer_tools::TransformerState>>>, |
| ) -> Option<crate::framing::Frame> { |
| let payload = match frame.payload_str() { |
| Ok(s) => s, |
| Err(e) => { |
| eprintln!("[SPF-BRAIN-SYNC] Invalid UTF-8 from {}: {}", &peer_key[..8.min(peer_key.len())], e); |
| let err = serde_json::json!({ |
| "type": "brain_sync_error", |
| "error": "Invalid UTF-8 payload", |
| "from": peer_key, |
| }); |
| return Some(crate::framing::Frame::new( |
| crate::framing::StreamType::BrainSync, |
| err.to_string().into_bytes(), |
| )); |
| } |
| }; |
|
|
| let data: serde_json::Value = match serde_json::from_str(payload) { |
| Ok(v) => v, |
| Err(e) => { |
| eprintln!("[SPF-BRAIN-SYNC] Invalid JSON from {}: {}", &peer_key[..8.min(peer_key.len())], e); |
| let err = serde_json::json!({ |
| "type": "brain_sync_error", |
| "error": format!("JSON parse: {}", e), |
| "from": peer_key, |
| }); |
| return Some(crate::framing::Frame::new( |
| crate::framing::StreamType::BrainSync, |
| err.to_string().into_bytes(), |
| )); |
| } |
| }; |
|
|
| let sync_type = data.get("type").and_then(|v| v.as_str()).unwrap_or("unknown"); |
| let signal_count = data.get("signals") |
| .and_then(|v| v.as_array()) |
| .map(|a| a.len()) |
| .unwrap_or(0); |
|
|
| eprintln!("[SPF-BRAIN-SYNC] Received {} from {}: {} signals", |
| sync_type, &peer_key[..8.min(peer_key.len())], signal_count); |
|
|
| |
| |
| let mut signals_processed: usize = 0; |
| if let Some(signals_array) = data.get("signals").and_then(|v| v.as_array()) { |
| let db_path = crate::paths::spf_root().join("LIVE/LMDB5/LMDB5.DB"); |
| if let Ok(db) = crate::agent_state::AgentStateDb::open(&db_path) { |
| for signal_json in signals_array { |
| if let Ok(signal) = serde_json::from_value::< |
| crate::gate_training::TrainingSignal |
| >(signal_json.clone()) { |
| if let Ok(json_str) = serde_json::to_string(&signal) { |
| let tlog_key = format!("tlog:{}", signal.timestamp); |
| let _ = db.set_state(&tlog_key, &json_str); |
| signals_processed += 1; |
| } |
| } |
| } |
| } |
| } |
|
|
| let ack = serde_json::json!({ |
| "type": "brain_sync_ack", |
| "sync_type": sync_type, |
| "signals_received": signal_count, |
| "signals_processed": signals_processed, |
| "training_ready": signals_processed > 0, |
| "from": peer_key, |
| "status": "accepted" |
| }); |
| Some(crate::framing::Frame::new( |
| crate::framing::StreamType::BrainSync, |
| ack.to_string().into_bytes(), |
| )) |
| } |
|
|
| |
| |
| |
|
|
| #[cfg(test)] |
| mod tests { |
| use super::*; |
|
|
| |
|
|
| |
|
|
| #[test] |
| fn test_ewc_penalty_inactive() { |
| let ewc = OnlineEWC::new(100, 1000.0); |
| let (loss, grads) = ewc.penalty(&vec![1.0; 100]); |
| assert_eq!(loss, 0.0); |
| assert!(grads.iter().all(|&g| g == 0.0)); |
| } |
|
|
| #[test] |
| fn test_ewc_penalty_active() { |
| let mut ewc = OnlineEWC::new(4, 1.0); |
| ewc.reference_weights = vec![1.0, 2.0, 3.0, 4.0]; |
| ewc.fisher = vec![1.0, 1.0, 1.0, 1.0]; |
| ewc.active = true; |
|
|
| let (loss, _) = ewc.penalty(&[1.0, 2.0, 3.0, 4.0]); |
| assert_eq!(loss, 0.0); |
|
|
| let (loss, grads) = ewc.penalty(&[2.0, 3.0, 4.0, 5.0]); |
| assert!(loss > 0.0); |
| assert!(grads[0] > 0.0); |
| } |
|
|
| #[test] |
| fn test_ewc_fisher_update() { |
| let mut ewc = OnlineEWC::new(4, 1000.0); |
| ewc.update_fisher(&[0.1, 0.2, 0.3, 0.4]); |
| assert_eq!(ewc.update_count, 1); |
| assert!((ewc.fisher[0] - 0.1 * 0.01).abs() < 1e-6); |
| } |
|
|
| #[test] |
| fn test_ewc_memory() { |
| let ewc = OnlineEWC::new(5_000_000, 1000.0); |
| assert_eq!(ewc.memory_bytes(), 40_000_000); |
| } |
|
|
| |
|
|
| #[test] |
| fn test_replay_basic() { |
| let mut replay = ExperienceReplay::new(5); |
| for i in 0..3 { |
| replay.add(TrainingExample { |
| input_tokens: vec![i], |
| target: TrainingTarget::GateDecision(1.0), |
| weight: 1.0, |
| }); |
| } |
| assert_eq!(replay.len(), 3); |
| assert_eq!(replay.regular_len(), 3); |
| assert_eq!(replay.fp_locked_len(), 0); |
| } |
|
|
| #[test] |
| fn test_replay_overflow() { |
| let mut replay = ExperienceReplay::new(3); |
| for i in 0..5 { |
| replay.add(TrainingExample { |
| input_tokens: vec![i], |
| target: TrainingTarget::GateDecision(1.0), |
| weight: 1.0, |
| }); |
| } |
| assert_eq!(replay.regular_len(), 3); |
| assert_eq!(replay.total_added(), 5); |
| } |
|
|
| #[test] |
| fn test_replay_fp_locked() { |
| let mut replay = ExperienceReplay::new(3); |
|
|
| |
| replay.add(TrainingExample { |
| input_tokens: vec![99], |
| target: TrainingTarget::GateDecision(-1.0), |
| weight: 4.0, |
| }); |
|
|
| |
| for i in 0..10 { |
| replay.add(TrainingExample { |
| input_tokens: vec![i], |
| target: TrainingTarget::GateDecision(1.0), |
| weight: 1.0, |
| }); |
| } |
|
|
| |
| assert_eq!(replay.fp_locked_len(), 1); |
| assert_eq!(replay.regular_len(), 3); |
|
|
| |
| let samples = replay.sample(2, 42); |
| let fp_count = samples.iter().filter(|s| s.weight >= 4.0).count(); |
| assert!(fp_count >= 1, "FP-locked example must be in every sample"); |
| } |
|
|
| #[test] |
| fn test_replay_fp_never_evicted() { |
| let mut replay = ExperienceReplay::new(2); |
|
|
| |
| for _ in 0..3 { |
| replay.add(TrainingExample { |
| input_tokens: vec![0], |
| target: TrainingTarget::GateDecision(-1.0), |
| weight: 6.0, |
| }); |
| } |
|
|
| |
| assert_eq!(replay.fp_locked_len(), 3); |
| assert_eq!(replay.regular_len(), 0); |
| } |
|
|
| |
|
|
| #[test] |
| fn test_confidence_auto_allow() { |
| let conf = ConfidenceConfig::default(); |
| assert_eq!(conf.decide(0.95), ConfidenceDecision::AutoAllow(0.95)); |
| assert_eq!(conf.decide(0.8), ConfidenceDecision::AutoAllow(0.8)); |
| } |
|
|
| #[test] |
| fn test_confidence_auto_block() { |
| let conf = ConfidenceConfig::default(); |
| assert_eq!(conf.decide(0.1), ConfidenceDecision::AutoBlock(0.1)); |
| assert_eq!(conf.decide(0.2), ConfidenceDecision::AutoBlock(0.2)); |
| } |
|
|
| #[test] |
| fn test_confidence_ask_user() { |
| let conf = ConfidenceConfig::default(); |
| assert_eq!(conf.decide(0.5), ConfidenceDecision::AskUser(0.5)); |
| assert_eq!(conf.decide(0.3), ConfidenceDecision::AskUser(0.3)); |
| assert_eq!(conf.decide(0.79), ConfidenceDecision::AskUser(0.79)); |
| } |
|
|
| |
|
|
| |
|
|
| #[test] |
| fn test_signal_to_tokens() { |
| let signal = TrainingSignal { |
| tool: "spf_read".into(), source: "stdio".into(), allowed: true, |
| status: "ok".into(), duration_ms: 0, timestamp: "t".into(), |
| user_override: false, false_positive: false, |
| recent_call_count: 3, preceding_tools: vec!["spf_write".into()], |
| evil_score: 0.0, |
| }; |
| let tokens = signal_to_tokens(&signal); |
| assert_eq!(tokens[0], 4); |
| |
| assert!(tokens.contains(&5)); |
| assert!(tokens.contains(&7)); |
| |
| let freq_count = tokens.iter().filter(|&&t| t == 6).count(); |
| assert_eq!(freq_count, 3); |
| } |
|
|
| #[test] |
| fn test_signal_to_tokens_no_context() { |
| let signal = TrainingSignal { |
| tool: "test".into(), source: "http".into(), allowed: false, |
| status: "error".into(), duration_ms: 0, timestamp: "t".into(), |
| user_override: false, false_positive: false, |
| recent_call_count: 0, preceding_tools: vec![], |
| evil_score: 0.0, |
| }; |
| let tokens = signal_to_tokens(&signal); |
| assert_eq!(tokens[0], 4); |
| assert!(tokens.contains(&5)); |
| assert!(!tokens.contains(&7)); |
| assert!(!tokens.contains(&6)); |
| } |
| } |
|
|