File size: 7,491 Bytes
ae8ae79 71fc0be ae8ae79 71fc0be ee3c548 71fc0be ae8ae79 71fc0be ae8ae79 71fc0be ae8ae79 3880f40 ae8ae79 8332e97 3880f40 71fc0be e96a058 ae8ae79 ea73075 ae8ae79 ea73075 ae8ae79 70c785c ae8ae79 70c785c ae8ae79 71fc0be ae8ae79 70c785c 71fc0be 70c785c 71fc0be ffcea3f 71fc0be 70c785c 71fc0be ffcea3f 70c785c 71fc0be 70c785c ae8ae79 70c785c 71fc0be | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 | from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
import requests
import os
import logging
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
app = FastAPI()
# Enable CORS
app.add_middleware(
CORSMiddleware,
allow_origins=["*"], # Allow all origins, or replace with your frontend URL
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Read OpenRouter API key from environment variable (HF Secret)
OPENROUTER_API_KEY = os.environ.get("OPENROUTER_API_KEY")
if not OPENROUTER_API_KEY:
raise RuntimeError("OPENROUTER_API_KEY environment variable not set!")
# Request schema
class ExplainRequest(BaseModel):
message: str
label: str = None # Optional, for explanations
model_id: str = "openai/gpt-oss-120b:free" # Default model, can be overridden
@app.get("/")
def root():
return {
"status": "healthy",
"service": "Anti-Phishing Explainer",
"endpoints": {
"/explain": "POST - Generate explanation for classification",
"/classify": "POST - Classify text as Phishing or Safe",
"/health": "GET - Health check"
}
}
@app.get("/health")
def health():
return {
"status": "healthy",
"service": "explainer",
"openrouter_configured": bool(OPENROUTER_API_KEY)
}
@app.post("/explain")
def explain(req: ExplainRequest):
"""Generate a human-readable explanation for why a message was classified as Phishing or Safe"""
user_message = req.message.strip()
label = req.label.strip() if req.label else None
if not user_message or not label:
raise HTTPException(status_code=400, detail="Missing message or label")
# Updated system prompt with bullet-point format and language adaptation
system_prompt = (
f"You are a robot that identifies phishing and safe messages. "
f"The message was classified as '{label}'. "
"Explain why this decision was made and point out any words or patterns that led to it. "
"No greetings, introductions, or closing remarks. "
"Don't restate the message or its classification. "
"Output only the explanation as bullet points. "
"Limit each bullet to 1–2 sentences. "
"Limit the number of bullets to 3-4. "
f"Message:\n\n{user_message}\n\n"
"Respond using the same language as the message."
)
url = "https://openrouter.ai/api/v1/chat/completions"
headers = {
"Authorization": f"Bearer {OPENROUTER_API_KEY}",
"Content-Type": "application/json"
}
payload = {
"model": req.model_id,
"messages": [
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_message}
]
}
try:
logger.info(f"Calling OpenRouter /explain with model: {req.model_id}")
response = requests.post(url, headers=headers, json=payload, timeout=20)
response.raise_for_status()
result = response.json()
# Check for OpenRouter error in response body (even with 200 status)
if "error" in result:
error_detail = result.get("error", {}).get("message", str(result.get("error")))
logger.error(f"OpenRouter returned error in /explain: {error_detail}")
logger.error(f"Full response: {result}")
raise HTTPException(status_code=500, detail=f"OpenRouter error: {error_detail}")
reply = result.get("choices", [{}])[0].get("message", {}).get("content", "").strip()
if not reply:
logger.error(f"OpenRouter returned empty response in /explain. Full result: {result}")
reply = "[No explanation returned]"
logger.info("Explanation generated successfully")
return {"reply": reply}
except requests.RequestException as e:
logger.error(f"OpenRouter network error in /explain: {e}")
raise HTTPException(status_code=500, detail=f"Error contacting OpenRouter: {e}")
except Exception as e:
logger.error(f"Unexpected error in /explain: {e}")
raise HTTPException(status_code=500, detail=f"Unexpected error: {e}")
@app.post("/classify")
def classify(req: ExplainRequest):
"""Classify text as Phishing or Safe via OpenRouter"""
user_message = req.message.strip()
if not user_message:
raise HTTPException(status_code=400, detail="Missing message")
# Use provided model_id or default
model_id = req.model_id or "arcee-ai/trinity-large-preview:free"
system_prompt = (
'You are a phishing detector. Classify the text as "Phishing" or "Safe". '
'Respond ONLY with valid JSON: {"label": "Phishing"|"Safe", "confidence": <0-100 float>}. '
'No other text.'
)
url = "https://openrouter.ai/api/v1/chat/completions"
headers = {
"Authorization": f"Bearer {OPENROUTER_API_KEY}",
"Content-Type": "application/json"
}
payload = {
"model": model_id, # Use the passed model_id
"messages": [
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_message}
],
"temperature": 0.0,
"max_tokens": 2000 # Increased for models with extended reasoning/thinking
}
try:
logger.info(f"Calling OpenRouter /classify with model: {model_id}")
response = requests.post(url, headers=headers, json=payload, timeout=20)
response.raise_for_status()
result = response.json()
# Check for OpenRouter error in response body (even with 200 status)
if "error" in result:
error_detail = result.get("error", {}).get("message", str(result.get("error")))
logger.error(f"OpenRouter returned error in /classify: {error_detail}")
logger.error(f"Full response: {result}")
raise HTTPException(status_code=500, detail=f"OpenRouter error: {error_detail}")
reply = result.get("choices", [{}])[0].get("message", {}).get("content", "").strip()
if not reply:
logger.warning(f"Empty content in response. Finish reason: {result.get('choices', [{}])[0].get('finish_reason')}")
# If content is empty, try to extract from reasoning or log for debugging
reasoning = result.get("choices", [{}])[0].get("message", {}).get("reasoning", "")
if reasoning:
logger.warning(f"Model has reasoning but no content. This may indicate truncation.")
logger.error(f"OpenRouter returned empty response in /classify. Full result: {result}")
raise HTTPException(status_code=500, detail="No response from OpenRouter")
logger.info(f"Classification successful with model {model_id}: {reply}")
return {"reply": reply, "status": response.status_code, "model": model_id}
except requests.RequestException as e:
logger.error(f"OpenRouter network error in /classify: {e}")
raise HTTPException(status_code=500, detail=f"Error contacting OpenRouter: {e}")
except Exception as e:
logger.error(f"Unexpected error in /classify: {e}")
raise HTTPException(status_code=500, detail=f"Unexpected error: {e}")
# Optional: Run with uvicorn
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=7860) |