| import requests | |
| OLLAMA_URL = "" | |
| def query_llm(prompt: str, model: str = "") -> str: | |
| payload = { | |
| "model": model, | |
| "prompt": prompt, | |
| "stream": False | |
| } | |
| try: | |
| response = requests.post(OLLAMA_URL, json=payload, timeout=60) | |
| response.raise_for_status() | |
| data = response.json() | |
| return data.get("response", "").strip() | |
| except Exception as e: | |
| return f"[Error] {e}" | |