Spaces:
Running
Running
| """ | |
| Groq AI Studio Pro - Glassmorphism Edition with Auto-Fallback | |
| Now with password protection to secure your API key. | |
| """ | |
| import os | |
| import json | |
| import base64 | |
| import requests | |
| import gradio as gr | |
| from datetime import datetime | |
| from typing import Optional, List, Dict, Any, Tuple | |
| # ==================== CONFIGURATION ==================== | |
| GROQ_API_KEY = os.getenv("GROQ_API_KEY", "") | |
| GITHUB_TOKEN = os.getenv("GITHUB_TOKEN", "") | |
| GITHUB_REPO = os.getenv("GITHUB_REPO", "") | |
| ACCESS_PASSWORD = os.getenv("ACCESS_PASSWORD", "") # Secret password | |
| # Fallback chain - if selected model fails, try these in order | |
| MODEL_FALLBACK_CHAIN = [ | |
| "llama-3.3-70b-versatile", | |
| "meta-llama/llama-4-scout-17b-16e-instruct", | |
| "meta-llama/llama-4-maverick-17b-128e-instruct", | |
| "llama-3.1-8b-instant", | |
| "openai/gpt-oss-120b", | |
| "openai/gpt-oss-20b", | |
| "qwen/qwen3-32b", | |
| "mixtral-8x7b-32768", | |
| ] | |
| DEFAULT_MODELS = [ | |
| "llama-3.3-70b-versatile", | |
| "meta-llama/llama-4-scout-17b-16e-instruct", | |
| "meta-llama/llama-4-maverick-17b-128e-instruct", | |
| "llama-3.1-8b-instant", | |
| "openai/gpt-oss-120b", | |
| "openai/gpt-oss-20b", | |
| "deepseek-r1-distill-llama-70b", | |
| "qwen/qwen3-32b", | |
| "gemma2-9b-it", | |
| "mixtral-8x7b-32768", | |
| ] | |
| # ==================== HELPER: CONVERT HISTORY ==================== | |
| def to_chatbot_messages(history: List[List[str]]) -> List[Dict[str, str]]: | |
| messages = [] | |
| for user_msg, assistant_msg in history: | |
| if user_msg: | |
| messages.append({"role": "user", "content": user_msg}) | |
| if assistant_msg and assistant_msg != "β³ Thinking...": | |
| messages.append({"role": "assistant", "content": assistant_msg}) | |
| return messages | |
| # ==================== GROQ API CLIENT ==================== | |
| class GroqClient: | |
| def __init__(self, api_key: str): | |
| self.api_key = api_key | |
| self.base_url = "https://api.groq.com/openai/v1" | |
| self.headers = { | |
| "Authorization": f"Bearer {api_key}", | |
| "Content-Type": "application/json" | |
| } | |
| def fetch_models(self) -> List[str]: | |
| url = f"{self.base_url}/models" | |
| try: | |
| response = requests.get(url, headers=self.headers, timeout=10) | |
| if response.status_code == 200: | |
| data = response.json() | |
| return [m["id"] for m in data.get("data", [])] | |
| except Exception as e: | |
| print(f"Error fetching models: {e}") | |
| return [] | |
| def chat_completion(self, messages, model, temperature=0.7, max_tokens=4096, | |
| stream=False, retry_with_fallback=True): | |
| url = f"{self.base_url}/chat/completions" | |
| models_to_try = [model] | |
| if retry_with_fallback and model in MODEL_FALLBACK_CHAIN: | |
| idx = MODEL_FALLBACK_CHAIN.index(model) | |
| models_to_try.extend(MODEL_FALLBACK_CHAIN[idx+1:]) | |
| elif retry_with_fallback: | |
| models_to_try.extend(MODEL_FALLBACK_CHAIN) | |
| last_error = None | |
| for try_model in models_to_try: | |
| payload = { | |
| "model": try_model, | |
| "messages": messages, | |
| "temperature": temperature, | |
| "max_completion_tokens": max_tokens, | |
| "stream": stream | |
| } | |
| try: | |
| response = requests.post(url, headers=self.headers, json=payload, timeout=60) | |
| if response.status_code == 200: | |
| return response.json(), try_model | |
| error_data = response.json() if response.text else {} | |
| error_msg = error_data.get("error", {}).get("message", "") | |
| if "model_not_found" in error_msg or "does not exist" in error_msg: | |
| print(f"Model {try_model} not available, trying fallback...") | |
| last_error = f"{try_model}: {error_msg}" | |
| continue | |
| return {"error": error_msg or f"HTTP {response.status_code}", "detail": response.text}, try_model | |
| except requests.exceptions.RequestException as e: | |
| last_error = str(e) | |
| continue | |
| return {"error": f"All models failed. Last error: {last_error}"}, model | |
| # ==================== GITHUB INTEGRATION ==================== | |
| class GitHubIntegration: | |
| def __init__(self, token: str, repo: str): | |
| self.token = token | |
| self.repo = repo | |
| self.base_url = "https://api.github.com" | |
| self.headers = { | |
| "Authorization": f"token {token}", | |
| "Accept": "application/vnd.github.v3+json" | |
| } | |
| def commit_file(self, content, filename, path="groq-studio-sessions", message=None): | |
| if not self.token or not self.repo: | |
| return {"status": "error", "message": "GitHub credentials not configured"} | |
| timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") | |
| full_filename = f"{timestamp}_{filename}" | |
| file_path = f"{path}/{full_filename}" | |
| content_b64 = base64.b64encode(content.encode()).decode() | |
| check_url = f"{self.base_url}/repos/{self.repo}/contents/{file_path}" | |
| try: | |
| check_resp = requests.get(check_url, headers=self.headers) | |
| sha = check_resp.json().get("sha") if check_resp.status_code == 200 else None | |
| data = { | |
| "message": message or f"Update from Groq Studio - {timestamp}", | |
| "content": content_b64, | |
| "branch": "main" | |
| } | |
| if sha: | |
| data["sha"] = sha | |
| resp = requests.put(check_url, headers=self.headers, json=data) | |
| resp.raise_for_status() | |
| result = resp.json() | |
| return {"status": "success", "url": result["content"]["html_url"]} | |
| except Exception as e: | |
| return {"status": "error", "message": str(e)} | |
| def create_gist(self, content, description="Groq Studio Export", public=False): | |
| if not self.token: | |
| return {"status": "error", "message": "GitHub token not configured"} | |
| url = f"{self.base_url}/gists" | |
| data = { | |
| "description": description, | |
| "public": public, | |
| "files": {"groq_session.md": {"content": content}} | |
| } | |
| try: | |
| resp = requests.post(url, headers=self.headers, json=data) | |
| resp.raise_for_status() | |
| result = resp.json() | |
| return {"status": "success", "url": result["html_url"]} | |
| except Exception as e: | |
| return {"status": "error", "message": str(e)} | |
| # ==================== UI FUNCTIONS ==================== | |
| def refresh_models(api_key: str) -> gr.update: | |
| if not api_key: | |
| return gr.update(choices=DEFAULT_MODELS, value=DEFAULT_MODELS[0]) | |
| client = GroqClient(api_key) | |
| models = client.fetch_models() | |
| if models: | |
| priority = ["llama-3.3-70b-versatile", "meta-llama/llama-4", "gpt-oss", "qwen", "llama-3.1"] | |
| sorted_models = [] | |
| for p in priority: | |
| sorted_models.extend([m for m in models if p in m and m not in sorted_models]) | |
| sorted_models.extend([m for m in models if m not in sorted_models]) | |
| return gr.update(choices=sorted_models, value=sorted_models[0]) | |
| return gr.update(choices=DEFAULT_MODELS, value=DEFAULT_MODELS[0]) | |
| def check_password(password: str) -> bool: | |
| """Return True if password matches ACCESS_PASSWORD.""" | |
| return ACCESS_PASSWORD and password == ACCESS_PASSWORD | |
| def process_chat( | |
| message: str, | |
| history: List[List[str]], | |
| model: str, | |
| temperature: float, | |
| max_tokens: int, | |
| system_prompt: str, | |
| access_password: str, # new: password input | |
| manual_api_key: str, # new: manual API key input | |
| auto_fallback: bool | |
| ) -> tuple: | |
| # Determine which API key to use | |
| if check_password(access_password): | |
| # Correct password: use host's key | |
| api_key = GROQ_API_KEY | |
| if not api_key: | |
| yield to_chatbot_messages(history + [[message, "β Host API key not configured."]]), "", "β No host key" | |
| return | |
| else: | |
| # No or wrong password: user must provide their own key | |
| api_key = manual_api_key | |
| if not api_key: | |
| yield to_chatbot_messages(history + [[message, "β Please enter your own Groq API Key or provide the correct access password."]]), "", "β No API Key" | |
| return | |
| client = GroqClient(api_key) | |
| # Build messages for API | |
| api_messages = [] | |
| if system_prompt: | |
| api_messages.append({"role": "system", "content": system_prompt}) | |
| for human, assistant in history: | |
| api_messages.append({"role": "user", "content": human}) | |
| if assistant: | |
| api_messages.append({"role": "assistant", "content": assistant}) | |
| api_messages.append({"role": "user", "content": message}) | |
| # Show typing indicator | |
| temp_history = history + [[message, "β³ Thinking..."]] | |
| yield to_chatbot_messages(temp_history), "", f"π Using {model}..." | |
| response, model_used = client.chat_completion( | |
| messages=api_messages, | |
| model=model, | |
| temperature=temperature, | |
| max_tokens=max_tokens, | |
| retry_with_fallback=auto_fallback | |
| ) | |
| if "error" in response: | |
| error_msg = f"β Error: {response['error']}" | |
| if "detail" in response: | |
| error_msg += f"\n\nDetails: {response['detail']}" | |
| history.append([message, error_msg]) | |
| status = f"β Error with {model_used}" | |
| else: | |
| try: | |
| content = response["choices"][0]["message"]["content"] | |
| usage = response.get("usage", {}) | |
| fallback_notice = "" | |
| if model_used != model: | |
| fallback_notice = f" (β οΈ Fallback from {model})" | |
| info = f"\n\n---\n*Model: {model_used}{fallback_notice} | Tokens: {usage.get('total_tokens', 'N/A')}*" | |
| history.append([message, content + info]) | |
| status = f"β Success with {model_used}" + (" (fallback)" if model_used != model else "") | |
| except Exception as e: | |
| history.append([message, f"β Error parsing response: {str(e)}"]) | |
| status = f"β Parse Error" | |
| yield to_chatbot_messages(history), "", status | |
| def export_to_github(history, filename, commit_msg, github_token, github_repo): | |
| token = github_token or GITHUB_TOKEN | |
| repo = github_repo or GITHUB_REPO | |
| if not token or not repo: | |
| return "β Error: GitHub token and repo not configured." | |
| content = format_conversation(history) | |
| gh = GitHubIntegration(token, repo) | |
| result = gh.commit_file(content, filename or "session.md", message=commit_msg) | |
| return f"β Committed! [View]({result['url']})" if result["status"] == "success" else f"β {result['message']}" | |
| def create_gist_export(history, description, github_token): | |
| token = github_token or GITHUB_TOKEN | |
| if not token: | |
| return "β Error: GitHub token not configured." | |
| content = format_conversation(history) | |
| gh = GitHubIntegration(token, "") | |
| result = gh.create_gist(content, description) | |
| return f"β Gist created! [View]({result['url']})" if result["status"] == "success" else f"β {result['message']}" | |
| def format_conversation(history): | |
| content = "# Groq Studio Session\n\n" | |
| content += f"**Date:** {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n\n## Conversation\n\n" | |
| for i, (human, assistant) in enumerate(history, 1): | |
| if not assistant or assistant == "β³ Thinking...": | |
| continue | |
| content += f"### Turn {i}\n\n**User:**\n{human}\n\n**Assistant:**\n{assistant}\n\n---\n\n" | |
| return content | |
| def clear_chat(): | |
| return [], [], "π Ready" | |
| def debug_request(message, history, model, temperature, max_tokens, system_prompt, api_key): | |
| key = api_key or GROQ_API_KEY | |
| messages = [] | |
| if system_prompt: | |
| messages.append({"role": "system", "content": system_prompt}) | |
| for human, assistant in history: | |
| messages.append({"role": "user", "content": human}) | |
| if assistant: | |
| messages.append({"role": "assistant", "content": assistant}) | |
| messages.append({"role": "user", "content": message}) | |
| payload = { | |
| "model": model, | |
| "messages": messages, | |
| "temperature": temperature, | |
| "max_completion_tokens": max_tokens | |
| } | |
| curl_cmd = f"""curl -X POST https://api.groq.com/openai/v1/chat/completions \\ | |
| -H "Authorization: Bearer {key[:10]}..." \\ | |
| -H "Content-Type: application/json" \\ | |
| -d '{json.dumps(payload)}'""" | |
| return f"## cURL\n```bash\n{curl_cmd}\n```" | |
| # ==================== CUSTOM CSS ==================== | |
| CUSTOM_CSS = """ | |
| /* Your existing glassmorphism CSS β unchanged */ | |
| @import url('https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600;700&display=swap'); | |
| :root { | |
| --glass-bg: rgba(20, 20, 30, 0.6); | |
| --glass-border: rgba(255, 255, 255, 0.1); | |
| --accent-primary: #ff6b6b; | |
| --accent-secondary: #4ecdc4; | |
| --text-primary: #ffffff; | |
| --gradient-1: linear-gradient(135deg, #667eea 0%, #764ba2 100%); | |
| } | |
| body { | |
| font-family: 'Inter', sans-serif !important; | |
| background: linear-gradient(-45deg, #0f0c29, #302b63, #24243e, #1a1a2e) !important; | |
| background-size: 400% 400% !important; | |
| animation: gradientBG 15s ease infinite !important; | |
| min-height: 100vh; | |
| } | |
| @keyframes gradientBG { | |
| 0% { background-position: 0% 50%; } | |
| 50% { background-position: 100% 50%; } | |
| 100% { background-position: 0% 50%; } | |
| } | |
| .glass-panel { | |
| background: var(--glass-bg) !important; | |
| backdrop-filter: blur(20px) !important; | |
| border: 1px solid var(--glass-border) !important; | |
| border-radius: 24px !important; | |
| box-shadow: 0 8px 32px 0 rgba(0, 0, 0, 0.37) !important; | |
| } | |
| .chatbot { | |
| background: rgba(0, 0, 0, 0.3) !important; | |
| border-radius: 20px !important; | |
| border: 1px solid var(--glass-border) !important; | |
| height: 600px !important; | |
| } | |
| .chatbot .message { | |
| background: var(--glass-bg) !important; | |
| border: 1px solid var(--glass-border) !important; | |
| border-radius: 18px !important; | |
| margin: 8px 0 !important; | |
| padding: 16px !important; | |
| backdrop-filter: blur(10px) !important; | |
| } | |
| .chatbot .message.user { | |
| background: linear-gradient(135deg, rgba(102, 126, 234, 0.3), rgba(118, 75, 162, 0.3)) !important; | |
| border-left: 4px solid #667eea !important; | |
| } | |
| .chatbot .message.bot { | |
| background: linear-gradient(135deg, rgba(78, 205, 196, 0.2), rgba(68, 160, 141, 0.2)) !important; | |
| border-left: 4px solid #4ecdc4 !important; | |
| } | |
| input, textarea, select { | |
| background: rgba(0, 0, 0, 0.4) !important; | |
| border: 1px solid var(--glass-border) !important; | |
| border-radius: 12px !important; | |
| color: var(--text-primary) !important; | |
| padding: 12px 16px !important; | |
| } | |
| button { | |
| background: var(--gradient-1) !important; | |
| border: none !important; | |
| border-radius: 12px !important; | |
| color: white !important; | |
| font-weight: 600 !important; | |
| padding: 12px 24px !important; | |
| transition: all 0.3s ease !important; | |
| } | |
| button:hover { | |
| transform: translateY(-2px) !important; | |
| box-shadow: 0 10px 30px rgba(102, 126, 234, 0.4) !important; | |
| } | |
| .status-bar { | |
| background: rgba(0, 0, 0, 0.5) !important; | |
| border-radius: 8px !important; | |
| padding: 8px 16px !important; | |
| font-size: 12px !important; | |
| color: #4ecdc4 !important; | |
| border: 1px solid var(--glass-border) !important; | |
| } | |
| @media (max-width: 768px) { | |
| .glass-panel { border-radius: 16px !important; margin: 8px !important; } | |
| .chatbot { height: 400px !important; } | |
| button { padding: 10px 16px !important; font-size: 14px !important; } | |
| } | |
| """ | |
| # ==================== GRADIO INTERFACE ==================== | |
| def create_interface(): | |
| with gr.Blocks(title="Groq AI Studio Pro") as demo: | |
| gr.HTML(""" | |
| <div class="glass-panel" style="text-align: center; padding: 20px; margin-bottom: 20px;"> | |
| <h1 style="background: linear-gradient(135deg, #fff 0%, #a8edea 100%); -webkit-background-clip: text; -webkit-text-fill-color: transparent; font-weight: 700; margin: 0;"> | |
| β‘ Groq AI Studio Pro | |
| </h1> | |
| <p style="color: #b0b0b0; margin-top: 8px;"> | |
| <span style="color: #4ecdc4;">β</span> π Password Protected | Auto-Fallback | GitHub Integration | |
| </p> | |
| </div> | |
| """) | |
| state_history = gr.State([]) | |
| with gr.Row(): | |
| with gr.Column(scale=3): | |
| with gr.Group(elem_classes="glass-panel"): | |
| chatbot = gr.Chatbot(label="Conversation", elem_classes="chatbot") | |
| with gr.Row(): | |
| msg_input = gr.Textbox(placeholder="Type your message...", scale=4, show_label=False, lines=1) | |
| send_btn = gr.Button("Send β€", scale=1, variant="primary") | |
| with gr.Row(): | |
| clear_btn = gr.Button("ποΈ Clear", variant="secondary") | |
| debug_btn = gr.Button("π§ Debug", variant="secondary") | |
| export_btn = gr.Button("π€ Export", variant="secondary") | |
| status_text = gr.Textbox(value="π Ready", interactive=False, elem_classes="status-bar") | |
| with gr.Column(scale=1): | |
| with gr.Group(elem_classes="glass-panel"): | |
| gr.Markdown("### π Access & API Keys") | |
| # Password field | |
| access_password = gr.Textbox( | |
| label="Access Password", | |
| placeholder="Enter password to use host API key", | |
| type="password" | |
| ) | |
| # Manual API key (only needed if password wrong or missing) | |
| manual_api_key = gr.Textbox( | |
| label="Your Groq API Key (if no password)", | |
| placeholder="gsk_...", | |
| type="password" | |
| ) | |
| gr.Markdown("### βοΈ Configuration") | |
| with gr.Accordion("π GitHub Settings", open=False): | |
| github_token_input = gr.Textbox(label="GitHub Token", type="password", value=GITHUB_TOKEN) | |
| github_repo_input = gr.Textbox(label="GitHub Repo", value=GITHUB_REPO) | |
| with gr.Row(): | |
| model_dropdown = gr.Dropdown(choices=DEFAULT_MODELS, value=DEFAULT_MODELS[0], label="Model") | |
| refresh_btn = gr.Button("π", size="sm") | |
| auto_fallback = gr.Checkbox(label="Auto-Fallback", value=True) | |
| system_prompt = gr.Textbox(label="System Prompt", lines=3, | |
| value="You are a helpful AI assistant powered by Groq.") | |
| with gr.Row(): | |
| temperature = gr.Slider(0, 2, value=0.7, label="Temperature") | |
| max_tokens = gr.Slider(100, 8192, value=4096, label="Max Tokens") | |
| # Debug & Export modals (simplified for brevity, same as before) | |
| with gr.Row(visible=False) as debug_row: | |
| with gr.Column(): | |
| debug_output = gr.Code(label="Debug Request", language="python") | |
| close_debug = gr.Button("Close") | |
| with gr.Row(visible=False) as export_row: | |
| with gr.Column(): | |
| gr.Markdown("### Export to GitHub") | |
| export_filename = gr.Textbox(label="Filename", value="session.md") | |
| export_message = gr.Textbox(label="Commit Message", value="Update from Groq Studio") | |
| with gr.Row(): | |
| commit_btn = gr.Button("π₯ Commit to Repo", variant="primary") | |
| gist_btn = gr.Button("π Create Gist") | |
| export_result = gr.Markdown() | |
| close_export = gr.Button("Close") | |
| # Event handlers | |
| refresh_btn.click(refresh_models, [manual_api_key], [model_dropdown]) | |
| send_event = msg_input.submit( | |
| process_chat, | |
| [msg_input, state_history, model_dropdown, temperature, max_tokens, system_prompt, | |
| access_password, manual_api_key, auto_fallback], | |
| [chatbot, msg_input, status_text] | |
| ) | |
| send_btn.click( | |
| process_chat, | |
| [msg_input, state_history, model_dropdown, temperature, max_tokens, system_prompt, | |
| access_password, manual_api_key, auto_fallback], | |
| [chatbot, msg_input, status_text] | |
| ) | |
| clear_btn.click(clear_chat, outputs=[chatbot, state_history, status_text]) | |
| debug_btn.click( | |
| debug_request, | |
| [msg_input, state_history, model_dropdown, temperature, max_tokens, system_prompt, manual_api_key], | |
| [debug_output] | |
| ).then(lambda: gr.update(visible=True), outputs=[debug_row]) | |
| close_debug.click(lambda: gr.update(visible=False), outputs=[debug_row]) | |
| export_btn.click(lambda: gr.update(visible=True), outputs=[export_row]) | |
| close_export.click(lambda: gr.update(visible=False), outputs=[export_row]) | |
| commit_btn.click( | |
| export_to_github, | |
| [state_history, export_filename, export_message, github_token_input, github_repo_input], | |
| [export_result] | |
| ) | |
| gist_btn.click( | |
| create_gist_export, | |
| [state_history, export_message, github_token_input], | |
| [export_result] | |
| ) | |
| gr.HTML(""" | |
| <div style="text-align: center; padding: 20px; color: #666; font-size: 12px;"> | |
| <p>π Password protection enabled. Use the correct password to access the host API key.</p> | |
| </div> | |
| """) | |
| return demo | |
| if __name__ == "__main__": | |
| demo = create_interface() | |
| demo.launch(server_name="0.0.0.0", server_port=7860, css=CUSTOM_CSS) |