FlakyTestSleuthOpenEnvRL / gemini_client.py
vedkdev's picture
Upload folder using huggingface_hub
f328654 verified
import os
import httpx
API_KEY = os.environ.get("GEMINI_API_KEY", "your-api-key-here")
MODEL_NAME = os.environ.get("MODEL_NAME", "gemini-2.0-flash")
BASE_URL = "https://generativelanguage.googleapis.com/v1beta"
def chat(messages: list[dict], **kwargs) -> str:
headers = {
"Content-Type": "application/json"
}
# Convert messages to Gemini format
contents = []
for msg in messages:
role = "user" if msg["role"] == "user" else "model"
contents.append({
"role": role,
"parts": [{"text": msg["content"]}]
})
payload = {
"contents": contents,
"generationConfig": {
"temperature": kwargs.get("temperature", 1.0),
"maxOutputTokens": kwargs.get("max_tokens", 2048),
"topP": kwargs.get("top_p", 0.95),
"topK": kwargs.get("top_k", 40)
}
}
url = f"{BASE_URL}/models/{MODEL_NAME}:generateContent?key={API_KEY}"
response = httpx.post(
url,
json=payload,
headers=headers,
timeout=60.0
)
if response.status_code != 200:
raise Exception(f"Error: {response.status_code} - {response.text}")
result = response.json()
return result["candidates"][0]["content"]["parts"][0]["text"]
if __name__ == "__main__":
response = chat([
{"role": "user", "content": "Hello! What model are you?"}
])
print(response)