| from fastapi import APIRouter, Depends, HTTPException |
| from sqlalchemy.orm import Session |
| from typing import List |
| import logging |
| from openai import OpenAI |
|
|
| from core.database import get_db |
| from models import db_models, schemas |
| from api.auth import get_current_user |
| from services.rag_service import rag_service |
| from core.config import settings |
|
|
| router = APIRouter(prefix="/api/chat", tags=["AI Conversation"]) |
| logger = logging.getLogger(__name__) |
|
|
| @router.get("/history", response_model=List[schemas.ChatMessageResponse]) |
| async def get_chat_history( |
| current_user: db_models.User = Depends(get_current_user), |
| db: Session = Depends(get_db) |
| ): |
| """ |
| Retrieves the full AI conversation history for the current user. |
| """ |
| messages = db.query(db_models.ChatMessage).filter( |
| db_models.ChatMessage.user_id == current_user.id |
| ).order_by(db_models.ChatMessage.created_at.asc()).all() |
| return messages |
|
|
| @router.delete("/history") |
| async def clear_chat_history( |
| current_user: db_models.User = Depends(get_current_user), |
| db: Session = Depends(get_db) |
| ): |
| """ |
| Wipes the conversation history clean (Fresh Start). |
| """ |
| db.query(db_models.ChatMessage).filter( |
| db_models.ChatMessage.user_id == current_user.id |
| ).delete() |
| db.commit() |
| return {"message": "All AI conversation history has been cleared."} |
|
|
| @router.post("/query", response_model=schemas.ChatMessageResponse) |
| async def ask_ai( |
| message_in: schemas.ChatMessageCreate, |
| current_user: db_models.User = Depends(get_current_user), |
| db: Session = Depends(get_db) |
| ): |
| """ |
| Unified AI Endpoint: |
| - Use this for general chat. |
| - Use this for PDF/Document specific questions (by providing rag_doc_id). |
| |
| It automatically manages conversation history and RAG context retrieval. |
| """ |
| try: |
| openai_client = OpenAI(api_key=settings.OPENAI_API_KEY) |
| |
| history = db.query(db_models.ChatMessage).filter( |
| db_models.ChatMessage.user_id == current_user.id |
| ).order_by(db_models.ChatMessage.id.desc()).limit(10).all() |
| history.reverse() |
|
|
| |
| user_msg = db_models.ChatMessage( |
| user_id=current_user.id, |
| role="user", |
| content=message_in.query, |
| rag_doc_id=message_in.rag_doc_id |
| ) |
| db.add(user_msg) |
| db.commit() |
|
|
| |
| context = "" |
| doc_filename = "" |
| if message_in.rag_doc_id: |
| rag_doc = db.query(db_models.RAGDocument).filter( |
| db_models.RAGDocument.id == message_in.rag_doc_id, |
| db_models.RAGDocument.user_id == current_user.id |
| ).first() |
| if rag_doc: |
| doc_filename = rag_doc.filename |
| results = rag_service.search_document( |
| query=message_in.query, |
| doc_id=rag_doc.azure_doc_id, |
| user_id=current_user.id, |
| top_k=5 |
| ) |
| context = "\n\n".join([r["content"] for r in results]) |
|
|
| |
| llm_messages = [ |
| { |
| "role": "system", |
| "content": ( |
| "You are a helpful AI assistant on the CreatorStudio platform. " |
| "Use the provided conversation history and document context to answer the user. " |
| "If the user refers to 'last message' or 'previous context', look at the history provided below." |
| ) |
| } |
| ] |
| |
| |
| for msg in history: |
| llm_messages.append({"role": msg.role, "content": msg.content}) |
|
|
| |
| if context: |
| llm_messages.append({ |
| "role": "system", |
| "content": f"REFERENTIAL KNOWLEDGE FROM DOCUMENT '{doc_filename}':\n\n{context}" |
| }) |
|
|
| |
| llm_messages.append({"role": "user", "content": message_in.query}) |
|
|
| |
| response = openai_client.chat.completions.create( |
| model="gpt-4o-mini", |
| messages=llm_messages, |
| temperature=0.7 |
| ) |
| ai_response_text = response.choices[0].message.content |
|
|
| |
| assistant_msg = db_models.ChatMessage( |
| user_id=current_user.id, |
| role="assistant", |
| content=ai_response_text, |
| rag_doc_id=message_in.rag_doc_id |
| ) |
| db.add(assistant_msg) |
| db.commit() |
| db.refresh(assistant_msg) |
|
|
| return assistant_msg |
|
|
| except Exception as e: |
| logger.error(f"Unified AI Query failed: {e}") |
| raise HTTPException(status_code=500, detail=f"AI Error: {str(e)}") |