| import os | |
| from groq import Groq | |
| import gradio as gr | |
| groq_api_key = os.getenv("GROQ_API_KEY") | |
| def chat_with_groq(message): | |
| client = Groq( | |
| api_key=groq_api_key, | |
| ) | |
| chat_completion = client.chat.completions.create( | |
| messages=[ | |
| { | |
| "role": "user", | |
| "content": message, | |
| } | |
| ], | |
| model="llama-3.3-70b-versatile", # Using the same model as before | |
| ) | |
| return chat_completion.choices[0].message.content | |
| iface = gr.Interface(fn=chat_with_groq, inputs="textbox", outputs="textbox", title="Groq Chat with Llama 3.3 DDS") | |
| iface.launch() |