| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| use serde_json::{json, Value}; |
| use std::sync::{Arc, RwLock}; |
|
|
| use crate::config::TransformerConfig; |
| use crate::agent_state::AgentStateDb; |
| use crate::paths::spf_root; |
|
|
| |
| |
| |
|
|
| |
| pub const FLINT_NAME: &str = "FLINT"; |
| pub const FLINT_VERSION: &str = "0.1.0"; |
|
|
| |
| |
| |
|
|
| |
| |
| |
| pub struct TransformerState { |
| |
| pub model: crate::transformer::SPFTransformer, |
| |
| pub config: TransformerConfig, |
| |
| pub training_step: u64, |
| |
| pub last_checkpoint: String, |
| |
| pub is_training: bool, |
| |
| pub total_loss: f64, |
| |
| pub batches_completed: u64, |
| |
| pub gate_alignment: f64, |
| |
| pub role: String, |
| |
| |
| pub collector: Option<std::sync::Arc<crate::gate_training::GateTrainingCollector>>, |
| |
| pub optimizer: Option<crate::train::AdamW>, |
| |
| pub chat_enabled: bool, |
| } |
|
|
| impl TransformerState { |
| |
| pub fn from_config(config: &TransformerConfig, role: &str) -> Self { |
| let model_config = crate::transformer::TransformerModelConfig { |
| vocab_size: config.vocab_size, |
| d_model: config.d_model, |
| n_heads: config.n_heads, |
| n_layers: config.n_layers, |
| d_ff: config.d_ff, |
| max_seq_len: config.max_seq_len, |
| ln_eps: 1e-5, |
| }; |
|
|
| let model = crate::transformer::SPFTransformer::new(model_config, 42); |
|
|
| |
| |
| let optimizer = if config.online_learning { |
| let param_sizes: Vec<usize> = model.weights().iter().map(|w| w.numel()).collect(); |
| Some(crate::train::AdamW::new( |
| crate::train::AdamWConfig { lr: config.learning_rate as f32, ..Default::default() }, |
| ¶m_sizes, |
| )) |
| } else { |
| None |
| }; |
|
|
| Self { |
| model, |
| config: config.clone(), |
| training_step: 0, |
| last_checkpoint: String::new(), |
| is_training: false, |
| total_loss: 0.0, |
| batches_completed: 0, |
| gate_alignment: 0.0, |
| role: role.to_string(), |
| collector: None, |
| optimizer, |
| chat_enabled: false, |
| } |
| } |
| } |
|
|
| |
| |
| |
|
|
| |
| |
| pub fn tool_definitions() -> Vec<Value> { |
| vec![ |
| json!({ |
| "name": "spf_transformer_status", |
| "description": "Get FLINT transformer status (loaded, params, checkpoint, role)", |
| "inputSchema": { |
| "type": "object", |
| "properties": {}, |
| "required": [] |
| } |
| }), |
| json!({ |
| "name": "spf_transformer_infer", |
| "description": "Run FLINT inference: prompt → response. Returns generated tokens.", |
| "inputSchema": { |
| "type": "object", |
| "properties": { |
| "prompt": {"type": "string", "description": "Input text prompt"}, |
| "max_tokens": {"type": "integer", "description": "Max tokens to generate (default: 64)"}, |
| "temperature": {"type": "number", "description": "Sampling temperature (default: from config)"} |
| }, |
| "required": ["prompt"] |
| } |
| }), |
| json!({ |
| "name": "spf_transformer_chat", |
| "description": "Chat with FLINT (conversation tracking, multi-turn)", |
| "inputSchema": { |
| "type": "object", |
| "properties": { |
| "message": {"type": "string", "description": "User message"}, |
| "conversation_id": {"type": "string", "description": "Conversation ID (optional, auto-generated if omitted)"} |
| }, |
| "required": ["message"] |
| } |
| }), |
| json!({ |
| "name": "spf_transformer_train", |
| "description": "Trigger FLINT manual training batch from accumulated gate signals", |
| "inputSchema": { |
| "type": "object", |
| "properties": { |
| "batch_size": {"type": "integer", "description": "Override batch size (optional)"} |
| }, |
| "required": [] |
| } |
| }), |
| json!({ |
| "name": "spf_transformer_metrics", |
| "description": "Get FLINT learning metrics (loss, accuracy, gate alignment, training step)", |
| "inputSchema": { |
| "type": "object", |
| "properties": {}, |
| "required": [] |
| } |
| }), |
| |
| json!({ |
| "name": "spf_flint_train_evil", |
| "description": "Mark a tool call as evil/harmful. Creates negative training signal for FLINT.", |
| "inputSchema": { |
| "type": "object", |
| "properties": { |
| "tool": {"type": "string", "description": "Tool name that was evil"}, |
| "reason": {"type": "string", "description": "Why this call was evil (optional)"} |
| }, |
| "required": ["tool"] |
| } |
| }), |
| json!({ |
| "name": "spf_flint_train_good", |
| "description": "Mark a tool call as good/safe. Creates positive training signal for FLINT.", |
| "inputSchema": { |
| "type": "object", |
| "properties": { |
| "tool": {"type": "string", "description": "Tool name that was good"}, |
| "reason": {"type": "string", "description": "Why this call was good (optional)"} |
| }, |
| "required": ["tool"] |
| } |
| }), |
| ] |
| } |
|
|
| |
| |
| |
|
|
| |
| |
| pub fn handle_status( |
| transformer: &Option<Arc<RwLock<TransformerState>>>, |
| config: &TransformerConfig, |
| ) -> Value { |
| match transformer { |
| None => { |
| json!({"type": "text", "text": format!( |
| "FLINT: NOT LOADED\nVersion: {}\nEnabled: {}\nConfig: d_model={}, n_heads={}, n_layers={}, vocab={}\nEstimated params: {}\nEstimated memory: {}MB", |
| FLINT_VERSION, |
| config.enabled, |
| config.d_model, config.n_heads, config.n_layers, config.vocab_size, |
| config.estimated_params(), |
| config.estimated_memory_bytes() / 1_000_000 |
| )}) |
| } |
| Some(state_lock) => { |
| let state = state_lock.read().unwrap(); |
| let avg_loss = if state.batches_completed > 0 { |
| state.total_loss / state.batches_completed as f64 |
| } else { |
| 0.0 |
| }; |
| json!({"type": "text", "text": format!( |
| "FLINT: LOADED v{} ({})\n\ |
| Role: {}\n\ |
| Training step: {}\n\ |
| Batches completed: {}\n\ |
| Avg loss: {:.6}\n\ |
| Gate alignment: {:.2}%\n\ |
| Currently training: {}\n\ |
| Last checkpoint: {}\n\ |
| Config: d_model={}, n_heads={}, n_layers={}, vocab={}\n\ |
| Online learning: {}\n\ |
| EWC lambda: {}", |
| FLINT_VERSION, |
| if state.is_training { "training" } else { "idle" }, |
| state.role, |
| state.training_step, |
| state.batches_completed, |
| avg_loss, |
| state.gate_alignment * 100.0, |
| state.is_training, |
| if state.last_checkpoint.is_empty() { "none" } else { &state.last_checkpoint }, |
| config.d_model, config.n_heads, config.n_layers, config.vocab_size, |
| config.online_learning, |
| config.ewc_lambda, |
| )}) |
| } |
| } |
| } |
|
|
| |
| |
| pub fn handle_infer( |
| transformer: &Option<Arc<RwLock<TransformerState>>>, |
| args: &Value, |
| config: &TransformerConfig, |
| tokenizer_path: &str, |
| ) -> Value { |
| let state_lock = match transformer { |
| Some(s) => s, |
| None => return json!({"type": "text", "text": "ERROR: FLINT not loaded. Enable in transformer.json."}), |
| }; |
|
|
| let prompt = match args.get("prompt").and_then(|v| v.as_str()) { |
| Some(p) => p, |
| None => return json!({"type": "text", "text": "ERROR: 'prompt' parameter required"}), |
| }; |
|
|
| let max_tokens = args.get("max_tokens") |
| .and_then(|v| v.as_u64()) |
| .unwrap_or(64) as usize; |
|
|
| let temperature = args.get("temperature") |
| .and_then(|v| v.as_f64()) |
| .unwrap_or(config.temperature); |
|
|
| |
| let tokenizer = match crate::tokenizer::Tokenizer::load(tokenizer_path) { |
| Ok(t) => t, |
| Err(e) => return json!({"type": "text", "text": format!("ERROR: Failed to load tokenizer: {}", e)}), |
| }; |
|
|
| |
| let input_ids = tokenizer.encode(prompt); |
| if input_ids.is_empty() { |
| return json!({"type": "text", "text": "ERROR: Empty input after tokenization"}); |
| } |
|
|
| |
| let state = state_lock.read().unwrap(); |
|
|
| |
| let output_ids = match state.model.generate(&input_ids, max_tokens, temperature as f32, 42) { |
| Ok(ids) => ids, |
| Err(e) => return json!({"type": "text", "text": format!("ERROR: Generation failed: {}", e)}), |
| }; |
|
|
| |
| let output_text = tokenizer.decode(&output_ids); |
|
|
| json!({"type": "text", "text": format!( |
| "[FLINT] Input: {} ({} tokens)\nOutput: {} ({} tokens)\nTemperature: {:.2}", |
| prompt, input_ids.len(), |
| output_text, output_ids.len(), |
| temperature |
| )}) |
| } |
|
|
| |
| |
| pub fn handle_chat( |
| transformer: &Option<Arc<RwLock<TransformerState>>>, |
| args: &Value, |
| config: &TransformerConfig, |
| tokenizer_path: &str, |
| ) -> Value { |
| let state_lock = match transformer { |
| Some(s) => s, |
| None => return json!({"type": "text", "text": "ERROR: FLINT not loaded. Enable in transformer.json."}), |
| }; |
|
|
| |
| if let Some(enabled) = args.get("enabled").and_then(|v| v.as_bool()) { |
| let mut state = state_lock.write().unwrap(); |
| state.chat_enabled = enabled; |
| return json!({"type": "text", "text": format!( |
| "FLINT chat {}", if enabled { "ON — will respond to messages" } else { "OFF — silent mode" } |
| )}); |
| } |
|
|
| |
| { |
| let state = state_lock.read().unwrap(); |
| if !state.chat_enabled { |
| return json!({"type": "text", "text": |
| "FLINT chat is OFF. Use spf_transformer_chat with {\"enabled\": true} to activate." |
| }); |
| } |
| } |
|
|
| let message = match args.get("message").and_then(|v| v.as_str()) { |
| Some(m) => m, |
| None => return json!({"type": "text", "text": "ERROR: 'message' parameter required"}), |
| }; |
|
|
| let conversation_id = args.get("conversation_id") |
| .and_then(|v| v.as_str()) |
| .unwrap_or("default"); |
|
|
| |
| let tokenizer = match crate::tokenizer::Tokenizer::load(tokenizer_path) { |
| Ok(t) => t, |
| Err(e) => return json!({"type": "text", "text": format!("ERROR: Failed to load tokenizer: {}", e)}), |
| }; |
|
|
| |
| |
| let episodic = crate::brain_local::brain_context(message, "flint_episodic", 1000); |
| let knowledge = crate::brain_local::brain_context(message, "flint_knowledge", 500); |
| let has_context = !episodic.trim().is_empty() || !knowledge.trim().is_empty(); |
|
|
| |
| let chat_prompt = if has_context { |
| format!("{}\n{}\n<user> {} <assistant>", knowledge, episodic, message) |
| } else { |
| format!("<user> {} <assistant>", message) |
| }; |
| let input_ids = tokenizer.encode(&chat_prompt); |
|
|
| |
| let state = state_lock.read().unwrap(); |
|
|
| |
| let max_response = 128.min(config.max_seq_len.saturating_sub(input_ids.len())); |
| let output_ids = match state.model.generate(&input_ids, max_response, config.temperature as f32, 42) { |
| Ok(ids) => ids, |
| Err(e) => return json!({"type": "text", "text": format!("ERROR: Chat generation failed: {}", e)}), |
| }; |
|
|
| let response_text = tokenizer.decode(&output_ids); |
|
|
| |
| let qa_entry = format!("Q: {}\nA: {}", message, response_text); |
| let _ = crate::brain_local::brain_store(&qa_entry, conversation_id, "flint_episodic"); |
|
|
| json!({"type": "text", "text": format!( |
| "conversation: {}\nuser: {}\nFLINT: {}", |
| conversation_id, message, response_text |
| )}) |
| } |
|
|
| |
| |
| pub fn handle_train( |
| transformer: &Option<Arc<RwLock<TransformerState>>>, |
| args: &Value, |
| config: &TransformerConfig, |
| ) -> Value { |
| let state_lock = match transformer { |
| Some(s) => s, |
| None => return json!({"type": "text", "text": "ERROR: FLINT not loaded. Enable in transformer.json."}), |
| }; |
|
|
| let batch_size = args.get("batch_size") |
| .and_then(|v| v.as_u64()) |
| .unwrap_or(config.batch_size as u64) as usize; |
|
|
| |
| let mut state = state_lock.write().unwrap(); |
|
|
| if state.is_training { |
| return json!({"type": "text", "text": "BUSY: FLINT training already in progress. Wait for completion."}); |
| } |
|
|
| state.is_training = true; |
|
|
| |
| |
| |
| |
| |
| let db_path = spf_root().join("LIVE/LMDB5/LMDB5.DB"); |
| let mut signals: Vec<crate::gate_training::TrainingSignal> = Vec::new(); |
| let mut consumed_keys: Vec<String> = Vec::new(); |
|
|
| if let Ok(db) = AgentStateDb::open(&db_path) { |
| if let Ok(keys) = db.list_state_keys() { |
| let mut tlog_keys: Vec<String> = keys.into_iter() |
| .filter(|k| k.starts_with("tlog:")) |
| .collect(); |
| tlog_keys.sort(); |
|
|
| for key in &tlog_keys { |
| if let Ok(Some(json)) = db.get_state(key) { |
| if let Ok(signal) = serde_json::from_str::<crate::gate_training::TrainingSignal>(&json) { |
| signals.push(signal); |
| consumed_keys.push(key.clone()); |
| } |
| } |
| } |
|
|
| |
| for key in &consumed_keys { |
| let _ = db.delete_state(key); |
| } |
| } |
| } |
|
|
| |
| if signals.is_empty() { |
| signals = match &state.collector { |
| Some(collector) => collector.drain_signals(), |
| None => Vec::new(), |
| }; |
| } |
|
|
| if signals.is_empty() { |
| state.is_training = false; |
| return json!({"type": "text", "text": "No pending training signals. Gate decisions accumulate signals automatically."}); |
| } |
|
|
| let signal_count = signals.len(); |
|
|
| |
| let mut examples: Vec<crate::train::TrainingExample> = signals.iter().map(|signal| { |
| crate::train::TrainingExample { |
| input_tokens: crate::learning::signal_to_tokens(signal), |
| target: crate::train::TrainingTarget::GateDecision(signal.label()), |
| weight: signal.weight(), |
| } |
| }).collect(); |
|
|
| |
| examples.truncate(batch_size); |
|
|
| |
| let previous_avg_loss = if state.batches_completed > 0 { |
| (state.total_loss / state.batches_completed as f64) as f32 |
| } else { |
| f32::MAX |
| }; |
|
|
| |
| let cloned_weights: Vec<crate::tensor::Tensor> = state.model.weights().iter() |
| .map(|w| crate::tensor::Tensor { data: w.data.clone(), shape: w.shape.clone() }) |
| .collect(); |
|
|
| |
| |
| |
|
|
| |
| let mut total_loss = 0.0f32; |
| let mut correct = 0u64; |
| let mut processed = 0u64; |
| let mut all_grads: Option<Vec<crate::tensor::Tensor>> = None; |
|
|
| for example in &examples { |
| let tokens: Vec<u32> = example.input_tokens.iter() |
| .map(|&t| t as u32) |
| .collect(); |
| let seq_len = tokens.len().min(config.max_seq_len); |
| if seq_len == 0 { continue; } |
|
|
| |
| match state.model.forward_causal_with_cache(&tokens[..seq_len], 1, seq_len) { |
| Ok((logits, cache)) => { |
| |
| let last_offset = (seq_len - 1) * config.vocab_size; |
| let gate_logit = logits.data.get(last_offset).copied().unwrap_or(0.0); |
| let prediction = 1.0 / (1.0 + (-gate_logit).exp()); |
|
|
| let label = match &example.target { |
| crate::train::TrainingTarget::GateDecision(l) => l.clamp(0.0, 1.0), |
| crate::train::TrainingTarget::NextToken(_) => continue, |
| }; |
| let pred_t = crate::tensor::Tensor::from_data( |
| vec![prediction], vec![1] |
| ).unwrap(); |
|
|
| if let Ok((loss, d_pred)) = crate::train::binary_ce_loss( |
| &pred_t, &[label], &[example.weight] |
| ) { |
| total_loss += loss; |
| let predicted_allow = prediction > 0.5; |
| let actual_allow = label > 0.5; |
| if predicted_allow == actual_allow { correct += 1; } |
| processed += 1; |
|
|
| |
| |
| let mut d_logits_data = vec![0.0f32; seq_len * config.vocab_size]; |
| d_logits_data[last_offset] = d_pred.data[0]; |
| if let Ok(d_logits) = crate::tensor::Tensor::from_data( |
| d_logits_data, vec![1, seq_len, config.vocab_size] |
| ) { |
| if let Ok(grads) = crate::train::model_backward_causal( |
| &d_logits, &cache, &state.model, |
| ) { |
| |
| match &mut all_grads { |
| None => all_grads = Some(grads), |
| Some(acc) => { |
| for (a, g) in acc.iter_mut().zip(grads.iter()) { |
| for (av, gv) in a.data.iter_mut().zip(g.data.iter()) { |
| *av += *gv; |
| } |
| } |
| } |
| } |
| } |
| } |
| } |
| } |
| Err(e) => { |
| eprintln!("[FLINT-TRAIN] Forward pass error: {}", e); |
| continue; |
| } |
| } |
| } |
|
|
| |
| let avg_loss = if processed > 0 { total_loss / processed as f32 } else { 0.0 }; |
| let alignment = if processed > 0 { correct as f64 / processed as f64 } else { 0.0 }; |
| let mut weights_adopted = false; |
|
|
| |
| let ewc_path = spf_root().join("LIVE/MODELS/ewc_state.bin"); |
| let total_params: usize = cloned_weights.iter().map(|t| t.data.len()).sum(); |
| let mut ewc = if ewc_path.exists() { |
| crate::learning::OnlineEWC::load_from_file(&ewc_path) |
| .unwrap_or_else(|_| crate::learning::OnlineEWC::new(total_params, config.ewc_lambda as f32)) |
| } else { |
| crate::learning::OnlineEWC::new(total_params, config.ewc_lambda as f32) |
| }; |
|
|
| if let Some(grads) = all_grads { |
| |
| let scale = 1.0 / processed.max(1) as f32; |
| let mut grad_refs: Vec<crate::tensor::Tensor> = grads.iter().map(|g| { |
| let scaled: Vec<f32> = g.data.iter().map(|&v| v * scale).collect(); |
| crate::tensor::Tensor { data: scaled, shape: g.shape.clone() } |
| }).collect(); |
|
|
| |
| if ewc.active { |
| let flat_weights: Vec<f32> = cloned_weights.iter() |
| .flat_map(|t| t.data.iter().copied()) |
| .collect(); |
| let (_ewc_loss, ewc_grads) = ewc.penalty(&flat_weights); |
| let mut offset = 0; |
| for grad_tensor in grad_refs.iter_mut() { |
| let n = grad_tensor.data.len(); |
| for i in 0..n { |
| if offset + i < ewc_grads.len() { |
| grad_tensor.data[i] += ewc_grads[offset + i]; |
| } |
| } |
| offset += n; |
| } |
| } |
|
|
| |
| let mut cloned = cloned_weights; |
| if let Some(ref mut optimizer) = state.optimizer { |
| let mut param_refs: Vec<&mut crate::tensor::Tensor> = cloned.iter_mut().collect(); |
| let grad_ref_slice: Vec<&crate::tensor::Tensor> = grad_refs.iter().collect(); |
| let lr = config.learning_rate as f32; |
| optimizer.step(&mut param_refs, &grad_ref_slice, lr); |
|
|
| |
| |
| |
| let mut model_weights = state.model.weights_mut(); |
| for (mw, cw) in model_weights.iter_mut().zip(cloned.iter()) { |
| mw.data.copy_from_slice(&cw.data); |
| } |
| weights_adopted = true; |
| eprintln!("[FLINT-TRAIN] Batch loss: {:.6} (prev: {:.6}). Weights adopted.", avg_loss, previous_avg_loss); |
|
|
| |
| let flat_grads: Vec<f32> = grad_refs.iter() |
| .flat_map(|t| t.data.iter().copied()) |
| .collect(); |
| ewc.update_fisher(&flat_grads); |
|
|
| |
| let flat_adopted: Vec<f32> = state.model.weights().iter() |
| .flat_map(|t| t.data.iter().copied()) |
| .collect(); |
| ewc.snapshot_weights(&flat_adopted); |
| } |
| } |
|
|
| |
| if let Err(e) = ewc.save_to_file(&ewc_path) { |
| eprintln!("[FLINT-TRAIN] EWC save error: {}", e); |
| } else if ewc.update_count > 0 { |
| if let Ok(db) = AgentStateDb::open(&db_path) { |
| let meta = format!( |
| "{{\"update_count\":{},\"lambda\":{},\"params\":{},\"active\":{}}}", |
| ewc.update_count, ewc.lambda, total_params, ewc.active |
| ); |
| let _ = db.set_state("ewc:meta", &meta); |
| } |
| } |
|
|
| |
| state.training_step += 1; |
| state.batches_completed += 1; |
| state.total_loss += avg_loss as f64; |
| state.gate_alignment = alignment; |
| state.is_training = false; |
|
|
| |
| |
| if weights_adopted { |
| let ckpt_dir = spf_root().join("LIVE/MODELS"); |
| let _ = std::fs::create_dir_all(&ckpt_dir); |
| let ckpt_path = ckpt_dir.join(&config.writer_checkpoint); |
| let weight_refs = state.model.weights(); |
| match crate::checkpoint::serialize_weights(&weight_refs, "flint_writer", state.training_step) { |
| Ok(bytes) => match std::fs::write(&ckpt_path, &bytes) { |
| Ok(()) => { |
| state.last_checkpoint = ckpt_path.to_string_lossy().to_string(); |
| eprintln!("[FLINT-TRAIN] Checkpoint saved: step={}", state.training_step); |
| } |
| Err(e) => eprintln!("[FLINT-TRAIN] Checkpoint write failed: {}", e), |
| }, |
| Err(e) => eprintln!("[FLINT-TRAIN] Checkpoint serialize failed: {}", e), |
| } |
| } |
|
|
| json!({"type": "text", "text": format!( |
| "FLINT training batch completed\n\ |
| Signals drained: {}\n\ |
| Examples processed: {}/{}\n\ |
| Average loss: {:.6}\n\ |
| Gate alignment: {:.1}%\n\ |
| Weights adopted: {}\n\ |
| Training step: {}\n\ |
| Total batches: {}", |
| signal_count, |
| processed, |
| examples.len(), |
| avg_loss, |
| alignment * 100.0, |
| weights_adopted, |
| state.training_step, |
| state.batches_completed, |
| )}) |
| } |
|
|
| |
| |
| pub fn handle_metrics( |
| transformer: &Option<Arc<RwLock<TransformerState>>>, |
| config: &TransformerConfig, |
| ) -> Value { |
| match transformer { |
| None => { |
| json!({"type": "text", "text": "FLINT: NOT LOADED\nNo metrics available."}) |
| } |
| Some(state_lock) => { |
| let state = state_lock.read().unwrap(); |
| let avg_loss = if state.batches_completed > 0 { |
| state.total_loss / state.batches_completed as f64 |
| } else { |
| f64::NAN |
| }; |
|
|
| json!({"type": "text", "text": format!( |
| "=== FLINT Metrics ===\n\ |
| Version: {}\n\ |
| Role: {}\n\ |
| Training step: {}\n\ |
| Batches completed: {}\n\ |
| Average loss: {:.6}\n\ |
| Gate alignment: {:.2}%\n\ |
| Learning rate: {:.2e}\n\ |
| EWC lambda: {}\n\ |
| Online learning: {}\n\ |
| Replay buffer: {} slots\n\ |
| Last checkpoint: {}", |
| FLINT_VERSION, |
| state.role, |
| state.training_step, |
| state.batches_completed, |
| avg_loss, |
| state.gate_alignment * 100.0, |
| config.learning_rate, |
| config.ewc_lambda, |
| config.online_learning, |
| config.replay_buffer_size, |
| if state.last_checkpoint.is_empty() { "none" } else { &state.last_checkpoint }, |
| )}) |
| } |
| } |
| } |
|
|
| |
| |
| |
|
|
| |
| |
| |
| pub fn handle_train_evil(args: &Value) -> Value { |
| let tool = args.get("tool").and_then(|v| v.as_str()).unwrap_or(""); |
| let reason = args.get("reason").and_then(|v| v.as_str()).unwrap_or("user labeled evil"); |
|
|
| if tool.is_empty() { |
| return json!({"type": "text", "text": "ERROR: 'tool' parameter required. Specify which tool call was evil."}); |
| } |
|
|
| let timestamp = chrono::Utc::now().to_rfc3339(); |
| let signal = crate::gate_training::TrainingSignal { |
| tool: tool.to_string(), |
| source: "evil_label".to_string(), |
| allowed: true, |
| status: "evil".to_string(), |
| duration_ms: 0, |
| timestamp: timestamp.clone(), |
| user_override: false, |
| false_positive: true, |
| recent_call_count: 0, |
| preceding_tools: Vec::new(), |
| evil_score: 1.0, |
| }; |
|
|
| |
| let db_path = spf_root().join("LIVE/LMDB5/LMDB5.DB"); |
| if let Ok(db) = AgentStateDb::open(&db_path) { |
| if let Ok(json) = serde_json::to_string(&signal) { |
| let tlog_key = format!("tlog:{}", timestamp); |
| let _ = db.set_state(&tlog_key, &json); |
| } |
| } |
|
|
| json!({"type": "text", "text": format!( |
| "FLINT evil label recorded for '{}': {}. Will be included in next training batch.", |
| tool, reason |
| )}) |
| } |
|
|
| |
| |
| |
| pub fn handle_train_good(args: &Value) -> Value { |
| let tool = args.get("tool").and_then(|v| v.as_str()).unwrap_or(""); |
| let reason = args.get("reason").and_then(|v| v.as_str()).unwrap_or("user labeled good"); |
|
|
| if tool.is_empty() { |
| return json!({"type": "text", "text": "ERROR: 'tool' parameter required. Specify which tool call was good."}); |
| } |
|
|
| let timestamp = chrono::Utc::now().to_rfc3339(); |
| let signal = crate::gate_training::TrainingSignal { |
| tool: tool.to_string(), |
| source: "good_label".to_string(), |
| allowed: true, |
| status: "ok".to_string(), |
| duration_ms: 0, |
| timestamp: timestamp.clone(), |
| user_override: false, |
| false_positive: false, |
| recent_call_count: 0, |
| preceding_tools: Vec::new(), |
| evil_score: 0.0, |
| }; |
|
|
| let db_path = spf_root().join("LIVE/LMDB5/LMDB5.DB"); |
| if let Ok(db) = AgentStateDb::open(&db_path) { |
| if let Ok(json) = serde_json::to_string(&signal) { |
| let tlog_key = format!("tlog:{}", timestamp); |
| let _ = db.set_state(&tlog_key, &json); |
| } |
| } |
|
|
| json!({"type": "text", "text": format!( |
| "FLINT good label recorded for '{}': {}. Will be included in next training batch.", |
| tool, reason |
| )}) |
| } |
|
|
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| |
| |
| |
|
|
| #[cfg(test)] |
| mod tests { |
| use super::*; |
|
|
| fn make_test_config() -> TransformerConfig { |
| TransformerConfig { |
| enabled: true, |
| d_model: 64, |
| n_heads: 4, |
| n_layers: 2, |
| vocab_size: 256, |
| max_seq_len: 64, |
| d_ff: 256, |
| learning_rate: 1e-4, |
| batch_size: 4, |
| online_learning: true, |
| ewc_lambda: 0.4, |
| replay_buffer_size: 100, |
| temperature: 0.7, |
| writer_checkpoint: "test_writer.spfc".to_string(), |
| researcher_checkpoint: "test_researcher.spfc".to_string(), |
| } |
| } |
|
|
| #[test] |
| fn test_flint_identity() { |
| assert_eq!(FLINT_NAME, "FLINT"); |
| assert_eq!(FLINT_VERSION, "0.1.0"); |
| } |
|
|
| #[test] |
| fn test_tool_definitions_count() { |
| let defs = tool_definitions(); |
| assert_eq!(defs.len(), 7, "Should have 5 transformer + 2 FL-8 tools"); |
| } |
|
|
| #[test] |
| fn test_tool_definitions_names() { |
| let defs = tool_definitions(); |
| let names: Vec<&str> = defs.iter() |
| .map(|d| d["name"].as_str().unwrap()) |
| .collect(); |
| assert!(names.contains(&"spf_transformer_status")); |
| assert!(names.contains(&"spf_transformer_infer")); |
| assert!(names.contains(&"spf_transformer_chat")); |
| assert!(names.contains(&"spf_transformer_train")); |
| assert!(names.contains(&"spf_transformer_metrics")); |
| } |
|
|
| #[test] |
| fn test_tool_definitions_mention_flint() { |
| let defs = tool_definitions(); |
| for def in &defs { |
| let desc = def["description"].as_str().unwrap(); |
| assert!(desc.contains("FLINT"), |
| "Tool {} description should mention FLINT", def["name"]); |
| } |
| } |
|
|
| #[test] |
| fn test_tool_definitions_have_schemas() { |
| let defs = tool_definitions(); |
| for def in &defs { |
| assert!(def.get("inputSchema").is_some(), |
| "Tool {} missing inputSchema", def["name"]); |
| assert_eq!(def["inputSchema"]["type"], "object", |
| "Tool {} schema should be object", def["name"]); |
| } |
| } |
|
|
| #[test] |
| fn test_status_not_loaded() { |
| let config = make_test_config(); |
| let result = handle_status(&None, &config); |
| let text = result["text"].as_str().unwrap(); |
| assert!(text.contains("FLINT: NOT LOADED")); |
| assert!(text.contains("d_model=64")); |
| } |
|
|
| #[test] |
| fn test_status_loaded() { |
| let config = make_test_config(); |
| let state = TransformerState::from_config(&config, "writer"); |
| let locked = Arc::new(RwLock::new(state)); |
| let result = handle_status(&Some(locked), &config); |
| let text = result["text"].as_str().unwrap(); |
| assert!(text.contains("FLINT: LOADED")); |
| assert!(text.contains("writer")); |
| assert!(text.contains("Training step: 0")); |
| } |
|
|
| #[test] |
| fn test_infer_not_loaded() { |
| let config = make_test_config(); |
| let args = json!({"prompt": "hello"}); |
| let result = handle_infer(&None, &args, &config, "/nonexistent"); |
| let text = result["text"].as_str().unwrap(); |
| assert!(text.contains("FLINT not loaded")); |
| } |
|
|
| #[test] |
| fn test_infer_missing_prompt() { |
| let config = make_test_config(); |
| let state = TransformerState::from_config(&config, "writer"); |
| let locked = Arc::new(RwLock::new(state)); |
| let args = json!({}); |
| let result = handle_infer(&Some(locked), &args, &config, "/nonexistent"); |
| let text = result["text"].as_str().unwrap(); |
| assert!(text.contains("ERROR: 'prompt' parameter required")); |
| } |
|
|
| #[test] |
| fn test_train_not_loaded() { |
| let config = make_test_config(); |
| let args = json!({}); |
| let result = handle_train(&None, &args, &config); |
| let text = result["text"].as_str().unwrap(); |
| assert!(text.contains("FLINT not loaded")); |
| } |
|
|
| #[test] |
| fn test_train_no_collector() { |
| let config = make_test_config(); |
| let state = TransformerState::from_config(&config, "writer"); |
| let locked = Arc::new(RwLock::new(state)); |
| let args = json!({"batch_size": 8}); |
|
|
| let result = handle_train(&Some(locked.clone()), &args, &config); |
| let text = result["text"].as_str().unwrap(); |
| |
| assert!(text.contains("No training signal collector")); |
| } |
|
|
| #[test] |
| fn test_metrics_not_loaded() { |
| let config = make_test_config(); |
| let result = handle_metrics(&None, &config); |
| let text = result["text"].as_str().unwrap(); |
| assert!(text.contains("FLINT: NOT LOADED")); |
| } |
|
|
| #[test] |
| fn test_metrics_loaded() { |
| let config = make_test_config(); |
| let mut state = TransformerState::from_config(&config, "researcher"); |
| state.batches_completed = 10; |
| state.total_loss = 5.0; |
| state.gate_alignment = 0.85; |
| let locked = Arc::new(RwLock::new(state)); |
|
|
| let result = handle_metrics(&Some(locked), &config); |
| let text = result["text"].as_str().unwrap(); |
| assert!(text.contains("FLINT Metrics")); |
| assert!(text.contains("researcher")); |
| assert!(text.contains("85.00%")); |
| assert!(text.contains("Batches completed: 10")); |
| } |
|
|
| #[test] |
| fn test_transformer_state_from_config() { |
| let config = make_test_config(); |
| let state = TransformerState::from_config(&config, "writer"); |
| assert_eq!(state.role, "writer"); |
| assert_eq!(state.training_step, 0); |
| assert!(!state.is_training); |
| assert!(state.last_checkpoint.is_empty()); |
| } |
|
|
| #[test] |
| fn test_chat_not_loaded() { |
| let config = make_test_config(); |
| let args = json!({"message": "hello"}); |
| let result = handle_chat(&None, &args, &config, "/nonexistent"); |
| let text = result["text"].as_str().unwrap(); |
| assert!(text.contains("FLINT not loaded")); |
| } |
| } |
|
|