| """ | |
| Test LangGraph Workflow Integration | |
| """ | |
| import asyncio | |
| from src.llm.langchain_ollama_client import get_langchain_client | |
| from src.workflow.langgraph_workflow import create_workflow | |
| from src.workflow.langgraph_state import ScenarioType | |
| def main(): | |
| print("Testing LangGraph Integration...") | |
| print() | |
| # Initialize client | |
| client = get_langchain_client(default_complexity='standard', enable_monitoring=False) | |
| print("✓ LangChain client created") | |
| # Create workflow | |
| workflow = create_workflow(llm_client=client) | |
| print("✓ Workflow created") | |
| print() | |
| print("Available models:") | |
| for complexity, info in client.list_models().items(): | |
| print(f" {complexity}: {info['model']} ({info['size_gb']}GB) - {info['description']}") | |
| print() | |
| print("✓ All components ready!") | |
| if __name__ == "__main__": | |
| main() | |