SPFsmartGATE / src /chat.rs
JosephStoneCellAI's picture
Upload 45 files
1269259 verified
// SPF Smart Gateway - Chat Engine (Block P)
// Copyright 2026 Joseph Stone - All Rights Reserved
//
// Text chat system for user ↔ user, user ↔ agent, agent ↔ agent communication.
// Messages flow over mesh StreamType::ChatText (0x02) and local LMDB storage.
// Transformer (Researcher config) generates responses in decoder-only/causal mode.
//
// Features:
// - ChatMessage format with conversation tracking
// - Conversation context window (last N messages as transformer input)
// - Chat history persisted in agent_state LMDB
// - Room-based routing (1:1 and group)
// - MCP tool interfaces: spf_chat_send, spf_chat_history, spf_chat_rooms
//
// Depends on: transformer.rs (Block E), framing.rs (Block F, StreamType::ChatText)
// Storage: agent_state LMDB with "chat:" key prefix
use serde::{Deserialize, Serialize};
use std::collections::{HashMap, HashSet};
/// Block QQ: Rate limiter for FLINT auto-responses — max 1 per 5 seconds per peer
/// Uses Option<HashMap> because HashMap::new() is not const-compatible.
static FLINT_RATE_LIMIT: std::sync::Mutex<Option<HashMap<String, std::time::Instant>>>
= std::sync::Mutex::new(None);
// ============================================================================
// CHAT MESSAGE FORMAT
// ============================================================================
/// A single chat message.
/// Serialized as JSON for mesh transport (framing StreamType::ChatText = 0x02)
/// and LMDB storage.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChatMessage {
/// Unique message ID (timestamp_ms + sender hash)
pub id: String,
/// Sender identifier (peer pub_key short hash or "user" or "system")
pub from: String,
/// Recipient ("all" for room broadcast, or specific peer short hash)
pub to: String,
/// Message text content
pub text: String,
/// Timestamp (RFC3339)
pub timestamp: String,
/// Conversation/room ID
pub conversation_id: String,
/// Message type
pub msg_type: MessageType,
}
/// Message type classification
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub enum MessageType {
/// Regular user text message
UserText,
/// Transformer-generated response
AgentResponse,
/// System notification (join, leave, error)
System,
/// Tool result shared in chat
ToolResult,
}
// ============================================================================
// CONVERSATION TRACKING
// ============================================================================
/// A conversation (chat room) with message history and context window.
#[derive(Debug, Clone)]
pub struct Conversation {
/// Unique conversation ID
pub id: String,
/// Display name for the conversation
pub name: String,
/// Participants (peer short hashes)
pub participants: Vec<String>,
/// Message history (chronological order)
pub messages: Vec<ChatMessage>,
/// Maximum messages to retain in memory (older ones in LMDB only)
pub max_memory_messages: usize,
/// Context window size for transformer input (last N messages)
pub context_window: usize,
/// Creation timestamp
pub created: String,
/// Last activity timestamp
pub last_activity: String,
}
impl Conversation {
/// Create a new conversation
pub fn new(id: String, name: String, participants: Vec<String>, timestamp: String) -> Self {
Self {
id,
name,
participants,
messages: Vec::new(),
max_memory_messages: 500,
context_window: 20,
created: timestamp.clone(),
last_activity: timestamp,
}
}
/// Add a message to the conversation
pub fn add_message(&mut self, msg: ChatMessage) {
self.last_activity = msg.timestamp.clone();
self.messages.push(msg);
// Trim in-memory buffer if over limit
if self.messages.len() > self.max_memory_messages {
// Keep the most recent messages
let drain_count = self.messages.len() - self.max_memory_messages;
self.messages.drain(..drain_count);
}
}
/// Get the last N messages for transformer context window
pub fn context_messages(&self) -> &[ChatMessage] {
let start = self.messages.len().saturating_sub(self.context_window);
&self.messages[start..]
}
/// Format context messages as a single string for transformer input
/// Format: "from: text\nfrom: text\n..."
pub fn context_as_text(&self) -> String {
self.context_messages()
.iter()
.map(|m| format!("{}: {}", m.from, m.text))
.collect::<Vec<_>>()
.join("\n")
}
/// Total message count (in memory)
pub fn message_count(&self) -> usize {
self.messages.len()
}
/// Check if a peer is a participant
pub fn has_participant(&self, peer: &str) -> bool {
self.participants.iter().any(|p| p == peer)
}
}
// ============================================================================
// CHAT ENGINE
// ============================================================================
/// Central chat engine managing all conversations.
///
/// Handles:
/// - Routing messages to correct conversations
/// - Creating conversations on first contact
/// - Preparing transformer context for response generation
/// - LMDB persistence of chat history
pub struct ChatEngine {
/// Active conversations indexed by ID
conversations: Vec<Conversation>,
/// Local identity (short pub_key hash)
pub local_identity: String,
/// Default context window size for new conversations
pub default_context_window: usize,
}
impl ChatEngine {
/// Create a new chat engine
pub fn new(local_identity: String) -> Self {
Self {
conversations: Vec::new(),
local_identity,
default_context_window: 20,
}
}
/// Process an incoming message. Creates conversation if needed.
/// Returns the conversation ID the message was routed to.
pub fn receive_message(&mut self, msg: ChatMessage) -> String {
let conv_id = msg.conversation_id.clone();
// Find or create conversation
if !self.conversations.iter().any(|c| c.id == conv_id) {
let participants = vec![msg.from.clone(), msg.to.clone()];
let conv = Conversation::new(
conv_id.clone(),
format!("Chat with {}", msg.from),
participants,
msg.timestamp.clone(),
);
self.conversations.push(conv);
}
if let Some(conv) = self.conversations.iter_mut().find(|c| c.id == conv_id) {
conv.add_message(msg);
}
conv_id
}
/// Create a new outgoing message
pub fn create_message(
&self,
to: &str,
text: &str,
conversation_id: &str,
timestamp: &str,
) -> ChatMessage {
let id = format!("{}_{}", timestamp.replace([':', '-', 'T', 'Z'], ""),
&self.local_identity[..8.min(self.local_identity.len())]);
ChatMessage {
id,
from: self.local_identity.clone(),
to: to.to_string(),
text: text.to_string(),
timestamp: timestamp.to_string(),
conversation_id: conversation_id.to_string(),
msg_type: MessageType::AgentResponse,
}
}
/// Get context text for transformer input (for a given conversation)
/// Returns None if conversation doesn't exist
pub fn get_context(&self, conversation_id: &str) -> Option<String> {
self.conversations.iter()
.find(|c| c.id == conversation_id)
.map(|c| c.context_as_text())
}
/// Get conversation by ID
pub fn get_conversation(&self, id: &str) -> Option<&Conversation> {
self.conversations.iter().find(|c| c.id == id)
}
/// List all active conversations
pub fn list_conversations(&self) -> Vec<ConversationSummary> {
self.conversations.iter().map(|c| ConversationSummary {
id: c.id.clone(),
name: c.name.clone(),
participants: c.participants.clone(),
message_count: c.message_count(),
last_activity: c.last_activity.clone(),
}).collect()
}
/// Get chat history for a conversation (last N messages)
pub fn get_history(&self, conversation_id: &str, limit: usize) -> Vec<&ChatMessage> {
match self.conversations.iter().find(|c| c.id == conversation_id) {
Some(conv) => {
let start = conv.messages.len().saturating_sub(limit);
conv.messages[start..].iter().collect()
}
None => Vec::new(),
}
}
/// Total conversations
pub fn conversation_count(&self) -> usize {
self.conversations.len()
}
/// Total messages across all conversations
pub fn total_messages(&self) -> usize {
self.conversations.iter().map(|c| c.message_count()).sum()
}
// ---- Block FF: LMDB Persistence ----
/// Serialize all messages in a conversation as LMDB-ready key-value pairs.
/// Returns Vec<(key, serialized_bytes)> for direct LMDB insertion.
/// Caller (mcp.rs handler) performs the actual LMDB write.
pub fn persist_conversation(&self, conversation_id: &str) -> Option<Vec<(String, Vec<u8>)>> {
let conv = self.conversations.iter().find(|c| c.id == conversation_id)?;
let pairs: Vec<(String, Vec<u8>)> = conv.messages.iter()
.filter_map(|msg| {
let key = message_key(conversation_id, &msg.id);
serialize_message(msg).ok().map(|bytes| (key, bytes))
})
.collect();
Some(pairs)
}
/// Load messages from serialized LMDB data into a conversation.
/// Creates the conversation if it doesn't exist.
/// Returns the number of messages successfully loaded.
pub fn load_conversation_messages(
&mut self,
conversation_id: &str,
name: &str,
serialized_messages: &[(&str, Vec<u8>)],
) -> Result<usize, String> {
if !self.conversations.iter().any(|c| c.id == conversation_id) {
let timestamp = chrono::Utc::now().to_rfc3339();
self.conversations.push(Conversation::new(
conversation_id.to_string(),
name.to_string(),
Vec::new(),
timestamp,
));
}
let conv = self.conversations.iter_mut()
.find(|c| c.id == conversation_id)
.ok_or_else(|| "Conversation not found after creation".to_string())?;
let mut loaded = 0;
for (_key, data) in serialized_messages {
if let Ok(msg) = deserialize_message(data) {
if !conv.participants.contains(&msg.from) {
conv.participants.push(msg.from.clone());
}
conv.add_message(msg);
loaded += 1;
}
}
Ok(loaded)
}
}
/// Summary of a conversation (for listing)
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ConversationSummary {
pub id: String,
pub name: String,
pub participants: Vec<String>,
pub message_count: usize,
pub last_activity: String,
}
// ============================================================================
// LMDB PERSISTENCE
// ============================================================================
/// Serialize a chat message for LMDB storage
pub fn serialize_message(msg: &ChatMessage) -> Result<Vec<u8>, String> {
serde_json::to_vec(msg).map_err(|e| format!("Serialize error: {}", e))
}
/// Deserialize a chat message from LMDB storage
pub fn deserialize_message(data: &[u8]) -> Result<ChatMessage, String> {
serde_json::from_slice(data).map_err(|e| format!("Deserialize error: {}", e))
}
/// LMDB key for a chat message
/// Format: "chat:conv:{conversation_id}:msg:{message_id}"
pub fn message_key(conversation_id: &str, message_id: &str) -> String {
format!("chat:conv:{}:msg:{}", conversation_id, message_id)
}
/// LMDB key for conversation metadata
pub fn conversation_key(conversation_id: &str) -> String {
format!("chat:conv:{}:meta", conversation_id)
}
/// LMDB key prefix for all messages in a conversation
pub fn conversation_prefix(conversation_id: &str) -> String {
format!("chat:conv:{}:msg:", conversation_id)
}
/// LMDB key for chat engine metadata (conversation list)
pub fn chat_meta_key() -> &'static str {
"chat:meta"
}
// ============================================================================
// MESH TRANSPORT HELPERS
// ============================================================================
/// Serialize a chat message for mesh transport (StreamType::ChatText = 0x02)
/// This is the payload that goes inside a framing::Frame
pub fn message_to_bytes(msg: &ChatMessage) -> Result<Vec<u8>, String> {
serde_json::to_vec(msg).map_err(|e| format!("Serialize error: {}", e))
}
/// Deserialize a chat message received from mesh transport
pub fn message_from_bytes(data: &[u8]) -> Result<ChatMessage, String> {
serde_json::from_slice(data).map_err(|e| format!("Deserialize error: {}", e))
}
// ============================================================================
// MESH STREAM HANDLER
// ============================================================================
/// Handle an incoming ChatText mesh frame.
/// Parses the ChatMessage from the frame payload, logs receipt,
/// and returns an acknowledgment frame. Zero silent drops.
///
/// Called from: mesh.rs stream_router() for StreamType::ChatText (0x02)
pub fn handle_mesh_chat(
frame: &crate::framing::Frame,
peer_key: &str,
transformer: &Option<std::sync::Arc<std::sync::RwLock<crate::transformer_tools::TransformerState>>>,
) -> Option<crate::framing::Frame> {
match message_from_bytes(&frame.payload) {
Ok(msg) => {
eprintln!("[SPF-CHAT] Received from {}: {} (conv: {})",
&peer_key[..8.min(peer_key.len())],
msg.text.chars().take(50).collect::<String>(),
msg.conversation_id);
// Block QQ: Check chat_enabled + rate limit before FLINT auto-response
let should_respond = {
let can_respond = transformer.as_ref().map_or(false, |t| {
let state = t.read().unwrap();
state.chat_enabled
});
if can_respond {
// Rate limit: max 1 FLINT response per 5 seconds per peer
let mut limiter = FLINT_RATE_LIMIT.lock().unwrap_or_else(|e| e.into_inner());
let limits = limiter.get_or_insert_with(HashMap::new);
let now = std::time::Instant::now();
let peer_id = peer_key.to_string();
if let Some(last) = limits.get(&peer_id) {
if now.duration_since(*last).as_secs() < 5 {
eprintln!("[{}] Rate limited for peer {}", FLINT_NAME,
&peer_key[..8.min(peer_key.len())]);
false
} else {
limits.insert(peer_id, now);
true
}
} else {
limits.insert(peer_id, now);
true
}
} else {
false
}
};
// Generate FLINT response with context injection + quality scoring (Block FF)
// Block QQ: Only when chat_enabled=true AND rate limit allows
let response_text = if should_respond {
let flint_result = generate_flint_response(
&msg.text,
None, // Conversation context injected when ChatEngine wired to mesh
transformer,
true, // Use brain for context enrichment
);
flint_result.and_then(|r| {
match r.quality {
ResponseQuality::Accept { score } => {
eprintln!("[{}] Generated response (quality: {:.2}, ctx: {} tok, gen: {} tok)",
FLINT_NAME, score, r.context_tokens_used, r.generation_tokens);
Some(format!("[{}] {}", FLINT_NAME, r.text)) // Block QQ: prefix
}
ResponseQuality::Reject { reason, score } => {
eprintln!("[{}] Response rejected (score: {:.2}): {}",
FLINT_NAME, score, reason);
None
}
}
})
} else {
None
};
let response = serde_json::json!({
"type": "chat_response",
"message_id": msg.id,
"conversation_id": msg.conversation_id,
"from": peer_key,
"status": "received",
"response": response_text.as_deref().unwrap_or(""),
"generated": response_text.is_some(),
});
Some(crate::framing::Frame::chat(&response.to_string()))
}
Err(e) => {
eprintln!("[SPF-CHAT] Failed to parse chat message from {}: {}", &peer_key[..8.min(peer_key.len())], e);
let err = serde_json::json!({
"type": "chat_error",
"error": format!("Parse error: {}", e),
"from": peer_key,
});
Some(crate::framing::Frame::chat(&err.to_string()))
}
}
}
// ============================================================================
// CALL SIGNALING — Voice call establishment over mesh (Block GG)
// ============================================================================
/// Voice call signal — sent via mesh to establish/teardown voice calls.
/// Transported as ChatMessage with msg_type = System and JSON signal payload.
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub enum CallSignal {
/// Initiate a voice call (caller → receiver)
Ring { caller: String },
/// Accept an incoming call (receiver → caller)
Accept { callee: String },
/// Reject an incoming call (receiver → caller)
Reject { callee: String, reason: String },
/// End an active call (either party)
Hangup { from: String, reason: String },
}
impl CallSignal {
/// Serialize to JSON string for mesh transport
pub fn to_json(&self) -> String {
serde_json::to_string(self).unwrap_or_default()
}
/// Deserialize from JSON string
pub fn from_json(s: &str) -> Result<Self, String> {
serde_json::from_str(s).map_err(|e| format!("CallSignal parse: {}", e))
}
/// Wrap this signal as a ChatMessage for mesh transport
pub fn to_chat_message(&self, from: &str, to: &str, conversation_id: &str) -> ChatMessage {
ChatMessage {
id: format!("call_{}", chrono::Utc::now().timestamp_millis()),
from: from.to_string(),
to: to.to_string(),
text: self.to_json(),
timestamp: chrono::Utc::now().to_rfc3339(),
conversation_id: conversation_id.to_string(),
msg_type: MessageType::System,
}
}
/// Check if a ChatMessage contains a call signal
pub fn from_chat_message(msg: &ChatMessage) -> Option<Self> {
if msg.msg_type == MessageType::System {
Self::from_json(&msg.text).ok()
} else {
None
}
}
}
// ============================================================================
// PEER PRESENCE — Online/offline tracking for mesh peers (Block GG)
// ============================================================================
/// Peer presence status
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub enum PeerStatus {
Online,
Busy,
InCall,
Offline,
}
/// Tracked presence for a mesh peer
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PeerPresence {
pub peer_key: String,
pub short_key: String,
pub status: PeerStatus,
pub last_seen: String,
pub name: Option<String>,
}
impl PeerPresence {
pub fn new(peer_key: &str) -> Self {
Self {
peer_key: peer_key.to_string(),
short_key: peer_key[..8.min(peer_key.len())].to_string(),
status: PeerStatus::Online,
last_seen: chrono::Utc::now().to_rfc3339(),
name: None,
}
}
pub fn update_seen(&mut self) {
self.last_seen = chrono::Utc::now().to_rfc3339();
}
pub fn set_status(&mut self, status: PeerStatus) {
self.status = status;
self.update_seen();
}
pub fn is_available(&self) -> bool {
matches!(self.status, PeerStatus::Online)
}
pub fn to_json_value(&self) -> serde_json::Value {
serde_json::json!({
"peer_key": self.peer_key,
"short_key": self.short_key,
"status": format!("{:?}", self.status),
"last_seen": self.last_seen,
"name": self.name,
})
}
}
// ============================================================================
// FLINT — Enhanced Chat Generation (Block FF)
// Focused Learning Intelligence for Network Threats
// Copyright 2026 Joseph Stone - All Rights Reserved
// ============================================================================
/// FLINT identity
pub const FLINT_NAME: &str = "FLINT";
pub const FLINT_VERSION: &str = "0.1.0";
/// Quality assessment for a generated response
#[derive(Debug, Clone, PartialEq)]
pub enum ResponseQuality {
/// Good quality — send to user
Accept { score: f32 },
/// Poor quality — discard, use fallback
Reject { reason: String, score: f32 },
}
/// A FLINT-generated response with metadata
#[derive(Debug, Clone)]
pub struct FlintResponse {
/// Generated text
pub text: String,
/// Quality assessment
pub quality: ResponseQuality,
/// Tokens consumed by context (conversation + brain)
pub context_tokens_used: usize,
/// Tokens generated in response
pub generation_tokens: usize,
}
/// Build a structured prompt with conversation context and optional brain knowledge.
///
/// Format:
/// <context> [brain knowledge] </context>
/// [conversation history]
/// <user> latest_message <assistant>
///
/// Context is trimmed to max_context_chars to prevent prompt overflow.
pub fn build_prompt_with_context(
user_message: &str,
conversation_context: Option<&str>,
brain_context: Option<&str>,
max_context_chars: usize,
) -> String {
let mut prompt = String::new();
// Brain context first (knowledge base)
if let Some(brain) = brain_context {
let trimmed = if brain.len() > max_context_chars / 2 {
&brain[..max_context_chars / 2]
} else {
brain
};
prompt.push_str("<context> ");
prompt.push_str(trimmed);
prompt.push_str(" </context>\n");
}
// Conversation history (recent messages)
if let Some(conv) = conversation_context {
let summarized = summarize_long_context(conv, max_context_chars / 2);
if !summarized.is_empty() {
prompt.push_str(&summarized);
prompt.push('\n');
}
}
// Current user message + generation trigger
prompt.push_str(&format!("<user> {} <assistant>", user_message));
prompt
}
/// Score response quality. Returns Accept/Reject with reasoning.
///
/// Checks:
/// 1. Minimum length (>= 3 chars)
/// 2. Repetition detection (trigram repeated 3+ times = reject)
/// 3. Combined length + diversity score (threshold: 0.2)
pub fn score_response(response: &str) -> ResponseQuality {
let trimmed = response.trim();
// Empty or near-empty
if trimmed.len() < 3 {
return ResponseQuality::Reject {
reason: "Response too short".into(),
score: 0.0,
};
}
let words: Vec<&str> = trimmed.split_whitespace().collect();
// Repetition detection — trigram repeated 3+ times
if words.len() >= 9 {
let mut trigram_counts: HashMap<String, usize> = HashMap::new();
for window in words.windows(3) {
let trigram = window.join(" ").to_lowercase();
*trigram_counts.entry(trigram).or_insert(0) += 1;
}
let max_repeat = trigram_counts.values().max().copied().unwrap_or(0);
if max_repeat >= 3 {
return ResponseQuality::Reject {
reason: format!("Excessive repetition (trigram repeated {}x)", max_repeat),
score: 0.1,
};
}
}
// Length score (capped at 1.0 for 200+ chars)
let length_score = (trimmed.len() as f32 / 200.0).min(1.0);
// Diversity score — unique words / total words
let unique: HashSet<&str> = words.iter().copied().collect();
let diversity = if words.is_empty() { 0.0 } else {
unique.len() as f32 / words.len() as f32
};
let score = (length_score * 0.4 + diversity * 0.6).min(1.0);
if score < 0.2 {
ResponseQuality::Reject {
reason: "Low quality score".into(),
score,
}
} else {
ResponseQuality::Accept { score }
}
}
/// Summarize conversation context if it exceeds max_chars.
///
/// Strategy: keep first 2 messages (topic establishment) + separator +
/// as many recent messages as fit within budget.
pub fn summarize_long_context(context: &str, max_chars: usize) -> String {
if context.len() <= max_chars {
return context.to_string();
}
let lines: Vec<&str> = context.lines().collect();
if lines.len() <= 4 {
return context[..max_chars].to_string();
}
// Keep first 2 lines (topic) + most recent lines that fit
let header: String = lines[..2].join("\n");
let separator = "\n[...]\n";
let remaining = max_chars.saturating_sub(header.len() + separator.len());
let mut tail_lines = Vec::new();
let mut tail_len = 0;
for line in lines.iter().rev() {
if tail_len + line.len() + 1 > remaining {
break;
}
tail_lines.push(*line);
tail_len += line.len() + 1;
}
tail_lines.reverse();
format!("{}{}{}", header, separator, tail_lines.join("\n"))
}
/// Search brain for context relevant to a query.
/// Runs the brain CLI synchronously. Returns None if brain unavailable.
// Copyright 2026 Joseph Stone — All Rights Reserved
pub fn search_brain_for_context(query: &str, _max_results: usize) -> Option<String> {
let result = crate::brain_local::brain_context(query, "default", 2000);
if result.starts_with("Brain not initialized") || result.starts_with("No context") {
None
} else {
Some(result)
}
}
/// Generate a complete FLINT response with context injection and quality scoring.
///
/// Pipeline:
/// 1. Load tokenizer
/// 2. Search brain for relevant context (optional)
/// 3. Build structured prompt (brain + conversation + user message)
/// 4. Generate via transformer (both writer and researcher roles)
/// 5. Score response quality (accept/reject)
///
/// Both writer and researcher roles can generate. Writer uses lower temperature
/// for more focused responses. Researcher uses higher for conversational flow.
pub fn generate_flint_response(
user_message: &str,
conversation_context: Option<&str>,
transformer: &Option<std::sync::Arc<std::sync::RwLock<crate::transformer_tools::TransformerState>>>,
use_brain: bool,
) -> Option<FlintResponse> {
let t = transformer.as_ref()?;
let state = t.read().ok()?;
// Load tokenizer
let tokenizer_path = crate::paths::spf_root().join("LIVE/MODELS/tokenizer.json");
let tokenizer = crate::tokenizer::Tokenizer::load(
&tokenizer_path.to_string_lossy()
).ok()?;
// Brain context injection (optional)
let brain_context = if use_brain {
search_brain_for_context(user_message, 3)
} else {
None
};
// Build structured prompt
let prompt = build_prompt_with_context(
user_message,
conversation_context,
brain_context.as_deref(),
2000,
);
// Token budget: leave room for generation within model capacity
let context_tokens = tokenizer.encode(&prompt).len();
let max_gen_tokens = 128.min(512_usize.saturating_sub(context_tokens));
// Temperature by role — writer more focused, researcher more creative
let temperature = match state.role.as_str() {
"writer" => 0.5,
_ => 0.7,
};
// Generate — use training_step as seed for variety across sessions
let result = state.model.generate_text(
&prompt,
max_gen_tokens,
temperature,
state.training_step,
&tokenizer,
).ok()?;
// Quality scoring
let quality = score_response(&result);
let gen_tokens = tokenizer.encode(&result).len();
Some(FlintResponse {
text: result,
quality,
context_tokens_used: context_tokens,
generation_tokens: gen_tokens,
})
}
// ============================================================================
// TESTS
// ============================================================================
#[cfg(test)]
mod tests {
use super::*;
fn test_timestamp() -> String {
"2026-02-28T12:00:00Z".to_string()
}
fn make_msg(from: &str, to: &str, text: &str, conv_id: &str) -> ChatMessage {
ChatMessage {
id: format!("msg_{}", text.len()),
from: from.to_string(),
to: to.to_string(),
text: text.to_string(),
timestamp: test_timestamp(),
conversation_id: conv_id.to_string(),
msg_type: MessageType::UserText,
}
}
// --- Conversation tests ---
#[test]
fn test_conversation_add_message() {
let mut conv = Conversation::new(
"conv1".into(), "Test".into(), vec!["alice".into(), "bob".into()],
test_timestamp(),
);
conv.add_message(make_msg("alice", "bob", "hello", "conv1"));
conv.add_message(make_msg("bob", "alice", "hi there", "conv1"));
assert_eq!(conv.message_count(), 2);
assert_eq!(conv.messages[0].text, "hello");
assert_eq!(conv.messages[1].text, "hi there");
}
#[test]
fn test_conversation_context_window() {
let mut conv = Conversation::new(
"conv1".into(), "Test".into(), vec!["a".into()], test_timestamp(),
);
conv.context_window = 3;
for i in 0..10 {
conv.add_message(make_msg("user", "agent", &format!("msg {}", i), "conv1"));
}
let ctx = conv.context_messages();
assert_eq!(ctx.len(), 3);
assert_eq!(ctx[0].text, "msg 7");
assert_eq!(ctx[2].text, "msg 9");
}
#[test]
fn test_conversation_context_as_text() {
let mut conv = Conversation::new(
"c1".into(), "Test".into(), vec!["user".into()], test_timestamp(),
);
conv.context_window = 2;
conv.add_message(make_msg("user", "agent", "what is SPF?", "c1"));
conv.add_message(make_msg("agent", "user", "SPF is Smart Gateway", "c1"));
let text = conv.context_as_text();
assert!(text.contains("user: what is SPF?"));
assert!(text.contains("agent: SPF is Smart Gateway"));
}
#[test]
fn test_conversation_memory_trim() {
let mut conv = Conversation::new(
"c1".into(), "Test".into(), vec!["a".into()], test_timestamp(),
);
conv.max_memory_messages = 5;
for i in 0..20 {
conv.add_message(make_msg("user", "agent", &format!("msg {}", i), "c1"));
}
assert_eq!(conv.message_count(), 5);
assert_eq!(conv.messages[0].text, "msg 15"); // oldest retained
}
#[test]
fn test_conversation_participants() {
let conv = Conversation::new(
"c1".into(), "Test".into(),
vec!["alice".into(), "bob".into()], test_timestamp(),
);
assert!(conv.has_participant("alice"));
assert!(conv.has_participant("bob"));
assert!(!conv.has_participant("charlie"));
}
// --- Chat Engine tests ---
#[test]
fn test_engine_receive_creates_conversation() {
let mut engine = ChatEngine::new("local_agent".into());
assert_eq!(engine.conversation_count(), 0);
engine.receive_message(make_msg("alice", "local_agent", "hello", "conv1"));
assert_eq!(engine.conversation_count(), 1);
}
#[test]
fn test_engine_receive_routes_to_existing() {
let mut engine = ChatEngine::new("agent".into());
engine.receive_message(make_msg("alice", "agent", "hello", "room1"));
engine.receive_message(make_msg("bob", "agent", "hi", "room1"));
engine.receive_message(make_msg("alice", "agent", "how are you", "room1"));
assert_eq!(engine.conversation_count(), 1);
let conv = engine.get_conversation("room1").unwrap();
assert_eq!(conv.message_count(), 3);
}
#[test]
fn test_engine_multiple_conversations() {
let mut engine = ChatEngine::new("agent".into());
engine.receive_message(make_msg("alice", "agent", "hello", "conv1"));
engine.receive_message(make_msg("bob", "agent", "hi", "conv2"));
assert_eq!(engine.conversation_count(), 2);
assert_eq!(engine.total_messages(), 2);
}
#[test]
fn test_engine_create_message() {
let engine = ChatEngine::new("531d83fa".into());
let msg = engine.create_message("alice", "Hello!", "conv1", "2026-02-28T12:00:00Z");
assert_eq!(msg.from, "531d83fa");
assert_eq!(msg.to, "alice");
assert_eq!(msg.text, "Hello!");
assert_eq!(msg.conversation_id, "conv1");
assert_eq!(msg.msg_type, MessageType::AgentResponse);
}
#[test]
fn test_engine_get_context() {
let mut engine = ChatEngine::new("agent".into());
engine.receive_message(make_msg("user", "agent", "tell me about SPF", "c1"));
let ctx = engine.get_context("c1").unwrap();
assert!(ctx.contains("tell me about SPF"));
assert!(engine.get_context("nonexistent").is_none());
}
#[test]
fn test_engine_get_history() {
let mut engine = ChatEngine::new("agent".into());
for i in 0..10 {
engine.receive_message(make_msg("user", "agent", &format!("msg {}", i), "c1"));
}
let history = engine.get_history("c1", 3);
assert_eq!(history.len(), 3);
assert_eq!(history[0].text, "msg 7");
}
#[test]
fn test_engine_list_conversations() {
let mut engine = ChatEngine::new("agent".into());
engine.receive_message(make_msg("alice", "agent", "hi", "room1"));
engine.receive_message(make_msg("bob", "agent", "hey", "room2"));
let list = engine.list_conversations();
assert_eq!(list.len(), 2);
assert_eq!(list[0].id, "room1");
assert_eq!(list[1].id, "room2");
}
// --- Serialization tests ---
#[test]
fn test_message_serialize_roundtrip() {
let msg = make_msg("alice", "bob", "hello world", "conv1");
let bytes = serialize_message(&msg).unwrap();
let loaded = deserialize_message(&bytes).unwrap();
assert_eq!(loaded.from, "alice");
assert_eq!(loaded.text, "hello world");
assert_eq!(loaded.conversation_id, "conv1");
}
#[test]
fn test_mesh_transport_roundtrip() {
let msg = make_msg("peer_a", "peer_b", "mesh message", "mesh_conv");
let bytes = message_to_bytes(&msg).unwrap();
let loaded = message_from_bytes(&bytes).unwrap();
assert_eq!(loaded.from, "peer_a");
assert_eq!(loaded.text, "mesh message");
}
#[test]
fn test_lmdb_keys() {
assert_eq!(message_key("conv1", "msg42"), "chat:conv:conv1:msg:msg42");
assert_eq!(conversation_key("conv1"), "chat:conv:conv1:meta");
assert_eq!(conversation_prefix("conv1"), "chat:conv:conv1:msg:");
assert_eq!(chat_meta_key(), "chat:meta");
}
#[test]
fn test_message_types() {
let user_msg = make_msg("user", "agent", "hello", "c1");
assert_eq!(user_msg.msg_type, MessageType::UserText);
let engine = ChatEngine::new("agent".into());
let agent_msg = engine.create_message("user", "hi", "c1", "t");
assert_eq!(agent_msg.msg_type, MessageType::AgentResponse);
}
// --- Block GG: CallSignal tests ---
#[test]
fn test_call_signal_roundtrip() {
let signal = CallSignal::Ring { caller: "alice".into() };
let json = signal.to_json();
let parsed = CallSignal::from_json(&json).unwrap();
assert_eq!(parsed, CallSignal::Ring { caller: "alice".into() });
}
#[test]
fn test_call_signal_all_variants() {
let signals = vec![
CallSignal::Ring { caller: "a".into() },
CallSignal::Accept { callee: "b".into() },
CallSignal::Reject { callee: "c".into(), reason: "busy".into() },
CallSignal::Hangup { from: "d".into(), reason: "done".into() },
];
for signal in &signals {
let json = signal.to_json();
assert!(!json.is_empty());
let parsed = CallSignal::from_json(&json).unwrap();
assert_eq!(&parsed, signal);
}
}
#[test]
fn test_call_signal_to_chat_message() {
let signal = CallSignal::Ring { caller: "alice".into() };
let msg = signal.to_chat_message("alice", "bob", "call_1");
assert_eq!(msg.from, "alice");
assert_eq!(msg.to, "bob");
assert_eq!(msg.conversation_id, "call_1");
assert_eq!(msg.msg_type, MessageType::System);
assert!(msg.text.contains("Ring"));
}
#[test]
fn test_call_signal_from_chat_message() {
let signal = CallSignal::Accept { callee: "bob".into() };
let msg = signal.to_chat_message("bob", "alice", "call_1");
let recovered = CallSignal::from_chat_message(&msg).unwrap();
assert_eq!(recovered, CallSignal::Accept { callee: "bob".into() });
}
#[test]
fn test_call_signal_from_non_system_message() {
let msg = make_msg("user", "agent", "hello", "c1");
// UserText messages are not call signals
assert!(CallSignal::from_chat_message(&msg).is_none());
}
#[test]
fn test_call_signal_parse_error() {
assert!(CallSignal::from_json("not valid json").is_err());
}
// --- Block GG: PeerPresence tests ---
#[test]
fn test_peer_presence_new() {
let presence = PeerPresence::new("abcdef1234567890");
assert_eq!(presence.short_key, "abcdef12");
assert_eq!(presence.status, PeerStatus::Online);
assert!(presence.is_available());
assert!(presence.name.is_none());
}
#[test]
fn test_peer_presence_status_changes() {
let mut presence = PeerPresence::new("1234567890abcdef");
assert!(presence.is_available());
presence.set_status(PeerStatus::Busy);
assert_eq!(presence.status, PeerStatus::Busy);
assert!(!presence.is_available());
presence.set_status(PeerStatus::InCall);
assert_eq!(presence.status, PeerStatus::InCall);
assert!(!presence.is_available());
presence.set_status(PeerStatus::Online);
assert!(presence.is_available());
}
#[test]
fn test_peer_presence_to_json() {
let presence = PeerPresence::new("deadbeef12345678");
let json = presence.to_json_value();
assert_eq!(json["short_key"], "deadbeef");
assert_eq!(json["status"], "Online");
}
#[test]
fn test_peer_status_all_variants() {
let statuses = vec![
PeerStatus::Online,
PeerStatus::Busy,
PeerStatus::InCall,
PeerStatus::Offline,
];
for status in &statuses {
let json = serde_json::to_string(status).unwrap();
let parsed: PeerStatus = serde_json::from_str(&json).unwrap();
assert_eq!(&parsed, status);
}
}
// --- Block FF: FLINT Enhanced Generation tests ---
#[test]
fn test_flint_identity() {
assert_eq!(FLINT_NAME, "FLINT");
assert!(!FLINT_VERSION.is_empty());
}
#[test]
fn test_build_prompt_basic() {
let prompt = build_prompt_with_context("hello", None, None, 2000);
assert_eq!(prompt, "<user> hello <assistant>");
}
#[test]
fn test_build_prompt_with_conversation() {
let ctx = "user: what is SPF?\nagent: SPF is Smart Gateway";
let prompt = build_prompt_with_context("tell me more", Some(ctx), None, 2000);
assert!(prompt.contains("what is SPF?"));
assert!(prompt.ends_with("<user> tell me more <assistant>"));
}
#[test]
fn test_build_prompt_with_brain() {
let brain = "SPF is a Rust MCP gateway with Ed25519 identity";
let prompt = build_prompt_with_context("what is SPF?", None, Some(brain), 2000);
assert!(prompt.contains("<context>"));
assert!(prompt.contains("Ed25519"));
assert!(prompt.contains("<user> what is SPF? <assistant>"));
}
#[test]
fn test_build_prompt_with_both() {
let conv = "user: hello\nagent: hi";
let brain = "relevant knowledge";
let prompt = build_prompt_with_context("question", Some(conv), Some(brain), 2000);
assert!(prompt.contains("<context>"));
assert!(prompt.contains("relevant knowledge"));
assert!(prompt.contains("hello"));
assert!(prompt.ends_with("<user> question <assistant>"));
}
#[test]
fn test_score_response_accept() {
let good = "SPF is a secure gateway system that routes tool calls through complexity enforcement";
match score_response(good) {
ResponseQuality::Accept { score } => assert!(score > 0.2),
ResponseQuality::Reject { .. } => panic!("Expected accept"),
}
}
#[test]
fn test_score_response_reject_empty() {
match score_response("") {
ResponseQuality::Reject { reason, .. } => assert!(reason.contains("too short")),
ResponseQuality::Accept { .. } => panic!("Expected reject"),
}
}
#[test]
fn test_score_response_reject_short() {
match score_response("hi") {
ResponseQuality::Reject { reason, .. } => assert!(reason.contains("too short")),
ResponseQuality::Accept { .. } => panic!("Expected reject"),
}
}
#[test]
fn test_score_response_reject_repetition() {
let repetitive = "the the the the the the the the the the the the";
match score_response(repetitive) {
ResponseQuality::Reject { reason, .. } => assert!(reason.contains("repetition")),
ResponseQuality::Accept { .. } => panic!("Expected reject for repetition"),
}
}
#[test]
fn test_summarize_short_context() {
let short = "user: hello\nagent: hi";
assert_eq!(summarize_long_context(short, 1000), short);
}
#[test]
fn test_summarize_long_context_truncates() {
let mut long = String::new();
for i in 0..100 {
long.push_str(&format!("user: message number {} with some content\n", i));
}
let summarized = summarize_long_context(&long, 200);
assert!(summarized.len() <= 250); // Allow line boundary overflow
assert!(summarized.contains("message number 0")); // First preserved
assert!(summarized.contains("[...]")); // Separator present
}
#[test]
fn test_flint_response_struct() {
let r = FlintResponse {
text: "Hello world".into(),
quality: ResponseQuality::Accept { score: 0.8 },
context_tokens_used: 50,
generation_tokens: 10,
};
assert_eq!(r.text, "Hello world");
assert_eq!(r.context_tokens_used, 50);
assert_eq!(r.generation_tokens, 10);
}
#[test]
fn test_persist_conversation_returns_pairs() {
let mut engine = ChatEngine::new("agent".into());
engine.receive_message(make_msg("user", "agent", "hello", "c1"));
engine.receive_message(make_msg("agent", "user", "hi there", "c1"));
let pairs = engine.persist_conversation("c1").unwrap();
assert_eq!(pairs.len(), 2);
assert!(pairs[0].0.starts_with("chat:conv:c1:msg:"));
}
#[test]
fn test_persist_nonexistent_conversation() {
let engine = ChatEngine::new("agent".into());
assert!(engine.persist_conversation("nonexistent").is_none());
}
}