Gen-HVAC's picture
Upload 4 files
0575976 verified
raw
history blame contribute delete
434 Bytes
import requests
OLLAMA_URL = ""
def query_llm(prompt: str, model: str = "") -> str:
payload = {
"model": model,
"prompt": prompt,
"stream": False
}
try:
response = requests.post(OLLAMA_URL, json=payload, timeout=60)
response.raise_for_status()
data = response.json()
return data.get("response", "").strip()
except Exception as e:
return f"[Error] {e}"