| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| |
| |
| |
|
|
| #[cfg(test)] |
| mod integration { |
| |
| |
| |
| |
|
|
| #[test] |
| fn test_tensor_attention_ffn_pipeline() { |
| use crate::tensor::Tensor; |
| use crate::attention::MultiHeadAttention; |
| use crate::ffn::FeedForwardNetwork; |
|
|
| let d_model = 64; |
| let n_heads = 4; |
| let seq_len = 8; |
| let batch_size = 1; |
|
|
| |
| let input = Tensor::randn(&[seq_len, d_model]); |
|
|
| |
| let attn = MultiHeadAttention::new(d_model, n_heads); |
| let attn_out = attn.forward(&input, &input, &input, None) |
| .expect("Attention forward failed"); |
| assert_eq!(attn_out.shape(), &[seq_len, d_model]); |
|
|
| |
| let ffn = FeedForwardNetwork::new(d_model, d_model * 4); |
| let ffn_out = ffn.forward(&attn_out) |
| .expect("FFN forward failed"); |
| assert_eq!(ffn_out.shape(), &[seq_len, d_model]); |
|
|
| |
| let data = ffn_out.data(); |
| assert!(data.iter().all(|v| v.is_finite()), "Output contains NaN/Inf"); |
| } |
|
|
| |
| |
| |
| |
|
|
| #[test] |
| fn test_tokenizer_vocab_transformer_compat() { |
| use crate::tokenizer::Tokenizer; |
| use crate::transformer::TransformerModelConfig; |
|
|
| let config = TransformerModelConfig { |
| vocab_size: 256, |
| d_model: 32, |
| n_heads: 2, |
| n_layers: 1, |
| d_ff: 128, |
| max_seq_len: 64, |
| }; |
|
|
| |
| let tokenizer = Tokenizer::new(); |
| let text = "Hello SPF"; |
| let ids = tokenizer.encode(text); |
|
|
| |
| for id in &ids { |
| assert!(*id < config.vocab_size, |
| "Token ID {} exceeds vocab_size {}", id, config.vocab_size); |
| } |
|
|
| |
| let decoded = tokenizer.decode(&ids); |
| assert_eq!(decoded.trim(), text.trim(), |
| "Tokenizer round-trip failed: '{}' → {:?} → '{}'", text, ids, decoded); |
| } |
|
|
| |
| |
| |
| |
|
|
| #[test] |
| fn test_full_transformer_forward() { |
| use crate::transformer::{SPFTransformer, TransformerModelConfig}; |
|
|
| let config = TransformerModelConfig { |
| vocab_size: 128, |
| d_model: 32, |
| n_heads: 2, |
| n_layers: 1, |
| d_ff: 128, |
| max_seq_len: 16, |
| }; |
|
|
| let model = SPFTransformer::new(config.clone(), 42); |
| let input_ids = vec![1, 5, 10, 15, 20]; |
|
|
| |
| let logits = model.forward_causal(&input_ids); |
| |
| assert_eq!(logits.shape()[0], input_ids.len()); |
| assert_eq!(logits.shape()[1], config.vocab_size); |
|
|
| |
| assert!(logits.data().iter().all(|v| v.is_finite()), |
| "Logits contain NaN/Inf"); |
| } |
|
|
| |
| |
| |
|
|
| #[test] |
| fn test_checkpoint_roundtrip_preserves_output() { |
| use crate::transformer::{SPFTransformer, TransformerModelConfig}; |
| use crate::checkpoint; |
|
|
| let config = TransformerModelConfig { |
| vocab_size: 64, |
| d_model: 16, |
| n_heads: 2, |
| n_layers: 1, |
| d_ff: 64, |
| max_seq_len: 8, |
| }; |
|
|
| let model = SPFTransformer::new(config.clone(), 42); |
| let input_ids = vec![1, 2, 3, 4]; |
|
|
| |
| let logits_before = model.forward_causal(&input_ids); |
|
|
| |
| let weights_refs = model.weights(); |
| let data = checkpoint::serialize_weights(&weights_refs, "test", 0) |
| .expect("Serialize should succeed"); |
| assert!(!data.is_empty(), "Checkpoint data should not be empty"); |
|
|
| |
| let mut model2 = SPFTransformer::new(config.clone(), 42); |
| let (checkpoint_weights, _meta) = checkpoint::deserialize_weights(&data) |
| .expect("Deserialize should succeed"); |
| let mut model2_weights = model2.weights_mut(); |
| checkpoint::apply_weights(&mut model2_weights, &checkpoint_weights) |
| .expect("Apply weights should succeed"); |
|
|
| |
| let logits_after = model2.forward_causal(&input_ids); |
|
|
| |
| let before_data = logits_before.data(); |
| let after_data = logits_after.data(); |
| assert_eq!(before_data.len(), after_data.len()); |
| for (a, b) in before_data.iter().zip(after_data.iter()) { |
| assert!((a - b).abs() < 1e-6, |
| "Checkpoint load changed output: {} vs {}", a, b); |
| } |
| } |
|
|
| |
| |
| |
|
|
| #[test] |
| fn test_autoregressive_generation() { |
| use crate::transformer::{SPFTransformer, TransformerModelConfig}; |
|
|
| let config = TransformerModelConfig { |
| vocab_size: 64, |
| d_model: 16, |
| n_heads: 2, |
| n_layers: 1, |
| d_ff: 64, |
| max_seq_len: 32, |
| }; |
|
|
| let model = SPFTransformer::new(config.clone(), 42); |
| let prompt = vec![1, 2, 3]; |
| let max_new = 10; |
|
|
| let output = model.generate(&prompt, max_new, 0.8, 42) |
| .expect("Generate should succeed"); |
|
|
| |
| assert!(output.len() > prompt.len(), |
| "Generate should produce more tokens than prompt"); |
| assert!(output.len() <= prompt.len() + max_new, |
| "Generate should not exceed max_new"); |
|
|
| |
| for id in &output { |
| assert!(*id < config.vocab_size, |
| "Generated ID {} exceeds vocab {}", id, config.vocab_size); |
| } |
| } |
|
|
| |
| |
| |
| |
|
|
| #[test] |
| fn test_framing_100_messages_roundtrip() { |
| use crate::framing::{Frame, StreamType}; |
|
|
| let types = [ |
| StreamType::ToolRpc, |
| StreamType::ChatText, |
| StreamType::VoiceAudio, |
| StreamType::PipelineTask, |
| StreamType::PipelineResult, |
| StreamType::BrainSync, |
| StreamType::WeightSync, |
| StreamType::Control, |
| ]; |
|
|
| for i in 0..100 { |
| let stream_type = types[i % types.len()]; |
| let payload = format!("message_{:03}", i); |
| let frame = Frame::new(stream_type, payload.as_bytes().to_vec()); |
|
|
| |
| let bytes = frame.to_bytes(); |
| assert!(bytes.len() >= 5, "Frame must have at least 5 byte header"); |
|
|
| |
| let (parsed, consumed) = crate::framing::parse_frame(&bytes) |
| .expect("Should parse successfully") |
| .expect("Should have complete frame"); |
|
|
| assert_eq!(consumed, bytes.len()); |
| assert_eq!(parsed.stream_type, stream_type); |
| assert_eq!(parsed.payload_str().unwrap(), payload); |
| } |
| } |
|
|
| |
| |
| |
|
|
| #[test] |
| fn test_protocol_detection_bytes() { |
| |
| let legacy_first_byte = b'{'; |
| assert_eq!(legacy_first_byte, 0x7B); |
| |
| assert!(legacy_first_byte > 0x08, |
| "Legacy byte must not overlap with stream type range 0x01-0x08"); |
|
|
| |
| use crate::framing::StreamType; |
| let framed_types = [ |
| (StreamType::ToolRpc, 0x01u8), |
| (StreamType::ChatText, 0x02), |
| (StreamType::VoiceAudio, 0x03), |
| (StreamType::PipelineTask, 0x04), |
| (StreamType::PipelineResult, 0x05), |
| (StreamType::BrainSync, 0x06), |
| (StreamType::WeightSync, 0x07), |
| (StreamType::Control, 0x08), |
| ]; |
|
|
| for (st, expected_byte) in &framed_types { |
| let frame = crate::framing::Frame::new(*st, vec![0x00]); |
| let bytes = frame.to_bytes(); |
| assert_eq!(bytes[0], *expected_byte, |
| "StreamType {:?} should encode as 0x{:02X}", st, expected_byte); |
| } |
| } |
|
|
| |
| |
| |
| |
|
|
| #[test] |
| fn test_source_variants_complete() { |
| use crate::dispatch::Source; |
|
|
| let variants: Vec<Source> = vec![ |
| Source::Stdio, |
| Source::Http, |
| Source::Mesh { peer_key: "abc123".into() }, |
| Source::Transformer { role: "writer".into(), model_id: "v1".into() }, |
| Source::Pipeline { stream_id: "s1".into(), peer_key: "pk1".into() }, |
| ]; |
|
|
| for src in &variants { |
| let json = serde_json::to_string(src) |
| .expect("Source should serialize"); |
| let _back: Source = serde_json::from_str(&json) |
| .expect("Source should deserialize"); |
| } |
|
|
| |
| assert_eq!(variants.len(), 5, "Should have 5 Source variants"); |
| } |
|
|
| |
| |
| |
| |
|
|
| #[test] |
| fn test_config_to_state_integration() { |
| use crate::config::TransformerConfig; |
| use crate::transformer_tools::TransformerState; |
|
|
| let config = TransformerConfig { |
| enabled: true, |
| d_model: 64, |
| n_heads: 4, |
| n_layers: 2, |
| vocab_size: 256, |
| max_seq_len: 32, |
| d_ff: 256, |
| ..TransformerConfig::default() |
| }; |
|
|
| let state = TransformerState::from_config(&config, "writer"); |
| assert_eq!(state.role, "writer"); |
| assert_eq!(state.training_step, 0); |
| assert!(!state.is_training); |
|
|
| |
| let input_ids = vec![1, 2, 3]; |
| let logits = state.model.forward_causal(&input_ids); |
| assert_eq!(logits.shape()[1], 256); |
| } |
|
|
| |
| |
| |
|
|
| #[test] |
| fn test_transformer_tools_status() { |
| use crate::config::TransformerConfig; |
| use crate::transformer_tools; |
|
|
| let config = TransformerConfig::default(); |
|
|
| |
| let result = transformer_tools::handle_status(&None, &config); |
| let text = result["text"].as_str().unwrap(); |
| assert!(text.contains("NOT LOADED")); |
|
|
| |
| let state = transformer_tools::TransformerState::from_config(&config, "researcher"); |
| let locked = std::sync::Arc::new(std::sync::RwLock::new(state)); |
| let result = transformer_tools::handle_status(&Some(locked), &config); |
| let text = result["text"].as_str().unwrap(); |
| assert!(text.contains("LOADED")); |
| assert!(text.contains("researcher")); |
| } |
|
|
| |
| |
| |
| |
|
|
| #[test] |
| fn test_gate_training_signal_format() { |
| use crate::gate_training::{TrainingSignal, SignalBuffer}; |
|
|
| let signal = TrainingSignal { |
| tool: "spf_read".to_string(), |
| allowed: true, |
| label: 1.0, |
| weight: 1.0, |
| complexity: 15, |
| duration_ms: 42, |
| timestamp: "2026-02-28T12:00:00Z".to_string(), |
| source_type: "stdio".to_string(), |
| }; |
|
|
| |
| let json = serde_json::to_string(&signal).unwrap(); |
| let back: TrainingSignal = serde_json::from_str(&json).unwrap(); |
| assert_eq!(back.tool, "spf_read"); |
| assert!(back.allowed); |
| assert_eq!(back.complexity, 15); |
|
|
| |
| let mut buffer = SignalBuffer::new(100); |
| buffer.add(signal); |
| assert_eq!(buffer.len(), 1); |
| } |
|
|
| |
| |
| |
| |
|
|
| #[test] |
| fn test_training_batch_loss() { |
| use crate::tensor::Tensor; |
| use crate::train; |
|
|
| |
| let batch_size = 4; |
| let logits = Tensor::randn(&[batch_size, 2]); |
| let targets = vec![1, 0, 1, 1]; |
|
|
| |
| let mut target_data = vec![0.0f32; batch_size * 2]; |
| for (i, &t) in targets.iter().enumerate() { |
| target_data[i * 2 + t] = 1.0; |
| } |
| let target_tensor = Tensor::from_data(&[batch_size, 2], target_data); |
|
|
| let loss = train::cross_entropy_loss(&logits, &target_tensor); |
| assert!(loss.is_finite(), "Loss should be finite, got {}", loss); |
| assert!(loss >= 0.0, "Cross-entropy loss should be non-negative"); |
| } |
|
|
| |
|
|
| |
| |
| |
| |
|
|
| #[test] |
| fn test_pipeline_batch_lifecycle() { |
| use crate::pipeline::*; |
|
|
| let mut state = PipelineState::new(); |
|
|
| |
| let batch = PipelineBatch { |
| stream_id: "integration_test_stream".to_string(), |
| tasks: vec![ |
| PipelineTask { |
| task_id: "it_1".into(), |
| stream_id: "integration_test_stream".into(), |
| tool: "spf_read".into(), |
| args: serde_json::json!({"file_path": "/test"}), |
| chain_next: None, |
| priority: 0, |
| timeout_ms: 5000, |
| pipe_field: None, |
| }, |
| PipelineTask { |
| task_id: "it_2".into(), |
| stream_id: "integration_test_stream".into(), |
| tool: "spf_glob".into(), |
| args: serde_json::json!({"pattern": "*.rs"}), |
| chain_next: None, |
| priority: 0, |
| timeout_ms: 5000, |
| pipe_field: None, |
| }, |
| PipelineTask { |
| task_id: "it_3".into(), |
| stream_id: "integration_test_stream".into(), |
| tool: "spf_grep".into(), |
| args: serde_json::json!({"pattern": "test"}), |
| chain_next: None, |
| priority: 0, |
| timeout_ms: 5000, |
| pipe_field: None, |
| }, |
| ], |
| mode: BatchMode::Parallel, |
| submitted_by: "integration_test".into(), |
| submitted_at: "2026-02-28T12:00:00Z".into(), |
| }; |
|
|
| let sid = state.submit_batch(batch).expect("Submit should succeed"); |
| assert_eq!(sid, "integration_test_stream"); |
| assert!(!state.is_stream_complete(&sid)); |
|
|
| |
| let mut dispatched = Vec::new(); |
| while let Some(task) = state.next_task(&sid) { |
| dispatched.push(task); |
| } |
| assert_eq!(dispatched.len(), 3); |
|
|
| |
| for task in &dispatched { |
| state.record_result(PipelineResult { |
| task_id: task.task_id.clone(), |
| stream_id: sid.clone(), |
| status: PipelineStatus::Ok, |
| result: serde_json::json!({"output": "done"}), |
| error: None, |
| duration_ms: 10, |
| executed_by: "test_worker".into(), |
| }); |
| } |
|
|
| assert!(state.is_stream_complete(&sid)); |
| assert_eq!(state.total_completed, 3); |
| assert_eq!(state.total_failed, 0); |
| } |
|
|
| |
| |
| |
|
|
| #[test] |
| fn test_worker_init_from_config() { |
| use crate::config::TransformerConfig; |
| use crate::worker::{WorkerConfig, WorkerState, init_transformer}; |
|
|
| let tc = TransformerConfig { |
| enabled: true, |
| d_model: 32, |
| n_heads: 2, |
| n_layers: 1, |
| vocab_size: 64, |
| max_seq_len: 16, |
| d_ff: 128, |
| ..TransformerConfig::default() |
| }; |
|
|
| let wc = WorkerConfig { |
| role: "writer".into(), |
| ..WorkerConfig::default() |
| }; |
|
|
| |
| let state = init_transformer(&tc, &wc).expect("Init should succeed"); |
| assert_eq!(state.role, "writer"); |
|
|
| |
| let ws = WorkerState::new(wc, tc); |
| let status = ws.status_json(); |
| assert_eq!(status["mode"], "worker"); |
| assert_eq!(status["role"], "writer"); |
| } |
|
|
| |
| |
| |
|
|
| #[test] |
| fn test_chat_multi_turn_context() { |
| use crate::chat::{ChatEngine, ChatMessage, MessageType}; |
|
|
| let mut engine = ChatEngine::new("agent_001".into()); |
|
|
| |
| let messages = vec![ |
| ("user", "What tools does SPF have?"), |
| ("agent_001", "SPF has 55+ tools across 6 categories."), |
| ("user", "Tell me about the gate."), |
| ("agent_001", "The gate enforces security with default-deny."), |
| ("user", "How does training work?"), |
| ]; |
|
|
| for (from, text) in &messages { |
| let msg = ChatMessage { |
| id: format!("msg_{}", text.len()), |
| from: from.to_string(), |
| to: if *from == "user" { "agent_001" } else { "user" }.to_string(), |
| text: text.to_string(), |
| timestamp: "2026-02-28T12:00:00Z".into(), |
| conversation_id: "conv_integration".into(), |
| msg_type: if *from == "user" { MessageType::UserText } else { MessageType::AgentResponse }, |
| }; |
| engine.receive_message(msg); |
| } |
|
|
| assert_eq!(engine.conversation_count(), 1); |
| assert_eq!(engine.total_messages(), 5); |
|
|
| |
| let ctx = engine.get_context("conv_integration").unwrap(); |
| assert!(ctx.contains("How does training work?")); |
| assert!(ctx.contains("default-deny")); |
|
|
| |
| let last_2 = engine.get_history("conv_integration", 2); |
| assert_eq!(last_2.len(), 2); |
| assert_eq!(last_2[1].text, "How does training work?"); |
| } |
|
|
| |
| |
| |
|
|
| #[test] |
| fn test_voice_stubs_all_unavailable() { |
| use crate::voice::*; |
|
|
| let mut input = StubAudioInput::new(); |
| assert!(!input.is_available()); |
| assert!(input.read_frame().is_err()); |
|
|
| let mut output = StubAudioOutput::new(); |
| assert!(!output.is_available()); |
| assert!(output.write_frame(&[0; 640]).is_err()); |
|
|
| let mut stt = StubSTT; |
| assert!(!stt.is_available()); |
| assert!(stt.transcribe(&[]).is_err()); |
| assert!(stt.supported_languages().is_empty()); |
|
|
| let mut tts = StubTTS; |
| assert!(!tts.is_available()); |
| assert!(tts.synthesize("hello").is_err()); |
| assert!(tts.available_voices().is_empty()); |
|
|
| |
| let status = VoiceStatus::from_stubs(); |
| assert!(!status.audio_input_available); |
| assert!(!status.stt_available); |
| assert!(!status.tts_available); |
| assert_eq!(status.codec, "opus"); |
| } |
|
|
| |
| |
| |
| |
|
|
| #[test] |
| fn test_pipeline_mesh_frame_integration() { |
| use crate::pipeline::{PipelineTask, PipelineResult, PipelineStatus}; |
| use crate::framing::StreamType; |
|
|
| |
| let task = PipelineTask { |
| task_id: "mesh_t1".into(), |
| stream_id: "mesh_stream".into(), |
| tool: "spf_brain_search".into(), |
| args: serde_json::json!({"query": "transformer architecture"}), |
| chain_next: None, |
| priority: 1, |
| timeout_ms: 10000, |
| pipe_field: None, |
| }; |
|
|
| let frame = crate::pipeline::task_to_frame(&task); |
| assert_eq!(frame.stream_type, StreamType::PipelineTask); |
|
|
| |
| let wire_bytes = frame.to_bytes(); |
| let (received, _) = crate::framing::parse_frame(&wire_bytes) |
| .unwrap().unwrap(); |
| assert_eq!(received.stream_type, StreamType::PipelineTask); |
|
|
| |
| let decoded_task = crate::pipeline::task_from_frame(&received).unwrap(); |
| assert_eq!(decoded_task.task_id, "mesh_t1"); |
| assert_eq!(decoded_task.tool, "spf_brain_search"); |
|
|
| |
| let result = PipelineResult { |
| task_id: "mesh_t1".into(), |
| stream_id: "mesh_stream".into(), |
| status: PipelineStatus::Ok, |
| result: serde_json::json!({"matches": 5}), |
| error: None, |
| duration_ms: 42, |
| executed_by: "worker_1".into(), |
| }; |
|
|
| let result_frame = crate::pipeline::result_to_frame(&result); |
| assert_eq!(result_frame.stream_type, StreamType::PipelineResult); |
|
|
| let result_bytes = result_frame.to_bytes(); |
| let (recv_result, _) = crate::framing::parse_frame(&result_bytes) |
| .unwrap().unwrap(); |
| let decoded_result = crate::pipeline::result_from_frame(&recv_result).unwrap(); |
| assert_eq!(decoded_result.status, PipelineStatus::Ok); |
| assert_eq!(decoded_result.executed_by, "worker_1"); |
| } |
|
|
| |
| |
| |
| |
|
|
| #[test] |
| fn test_chat_mesh_frame_integration() { |
| use crate::chat::{ChatMessage, MessageType}; |
| use crate::framing::{Frame, StreamType}; |
|
|
| let msg = ChatMessage { |
| id: "chat_mesh_1".into(), |
| from: "peer_a".into(), |
| to: "peer_b".into(), |
| text: "Hello through the mesh!".into(), |
| timestamp: "2026-02-28T12:00:00Z".into(), |
| conversation_id: "mesh_conv_1".into(), |
| msg_type: MessageType::UserText, |
| }; |
|
|
| |
| let payload = crate::chat::message_to_bytes(&msg).unwrap(); |
| let frame = Frame::new(StreamType::ChatText, payload); |
| assert_eq!(frame.stream_type, StreamType::ChatText); |
|
|
| |
| let bytes = frame.to_bytes(); |
| let (received, _) = crate::framing::parse_frame(&bytes) |
| .unwrap().unwrap(); |
|
|
| |
| let decoded = crate::chat::message_from_bytes(&received.payload).unwrap(); |
| assert_eq!(decoded.from, "peer_a"); |
| assert_eq!(decoded.text, "Hello through the mesh!"); |
| assert_eq!(decoded.conversation_id, "mesh_conv_1"); |
| } |
|
|
| |
| |
| |
| |
|
|
| #[test] |
| fn test_config_param_estimate_reasonable() { |
| use crate::config::TransformerConfig; |
|
|
| |
| let config = TransformerConfig::default(); |
| let estimated = config.estimated_params(); |
|
|
| |
| assert!(estimated > 1_000_000, "Should be >1M params, got {}", estimated); |
| assert!(estimated < 20_000_000, "Should be <20M params, got {}", estimated); |
|
|
| |
| let mem_mb = config.estimated_memory_bytes() / 1_000_000; |
| assert!(mem_mb < 500, "Memory should be <500MB, got {}MB", mem_mb); |
|
|
| |
| let small = TransformerConfig { |
| d_model: 32, |
| n_heads: 2, |
| n_layers: 1, |
| vocab_size: 64, |
| d_ff: 128, |
| ..TransformerConfig::default() |
| }; |
| let small_params = small.estimated_params(); |
| assert!(small_params < estimated, |
| "Small config ({}) should have fewer params than default ({})", |
| small_params, estimated); |
| } |
| } |
|
|