| import streamlit as st
|
| from langchain_community.utilities import SQLDatabase
|
| from langchain_core.prompts import ChatPromptTemplate
|
| from langchain_openai import ChatOpenAI
|
| import os
|
| import re
|
|
|
|
|
|
|
| HF_TOKEN = os.environ.get("HF_TOKEN", "")
|
| os.environ["OPENAI_API_KEY"] = HF_TOKEN
|
| HF_BASE_URL = "https://api-inference.huggingface.com/v1"
|
|
|
|
|
| def connectDatabase(username, port, host, password, database):
|
| try:
|
| db_uri = f"postgresql://{username}:{password}@{host}:{port}/{database}"
|
| st.session_state.db = SQLDatabase.from_uri(db_uri)
|
| st.success("β
Database connected successfully.")
|
| except Exception as e:
|
| st.session_state.db = None
|
| st.error(f"β Failed to connect to database:\n{e}")
|
|
|
|
|
| def runQuery(query):
|
| if "db" in st.session_state and st.session_state.db:
|
| try:
|
| return st.session_state.db.run(query)
|
| except Exception as e:
|
| return f"β Query execution failed: {e}"
|
| return "β Please connect to database first."
|
|
|
|
|
| def getDatabaseSchema():
|
| if "db" in st.session_state and st.session_state.db:
|
| try:
|
| return st.session_state.db.get_table_info()
|
| except Exception as e:
|
| return f"β Failed to fetch schema: {e}"
|
| return "β Please connect to database first."
|
|
|
|
|
| def getQueryFromLLM(question):
|
| template = """
|
| Below is the schema of a PostgreSQL database. Read the schema carefully and answer the user's question using a valid SQL query. Be careful with table and column names (case-sensitive). Only provide the SQL query, and nothing else.
|
|
|
| Schema:
|
| {schema}
|
|
|
| Sekarang giliran Anda:
|
| Question: {question}
|
| SQL query:
|
| """
|
| try:
|
| schema = getDatabaseSchema()[:3000]
|
| prompt = ChatPromptTemplate.from_template(template)
|
| chain = prompt | llm
|
| response = chain.invoke({"question": question, "schema": schema})
|
| query_only = re.sub(r"<think>.*?</think>", "", response.content, flags=re.DOTALL).strip()
|
| return query_only
|
| except Exception as e:
|
| st.error(f"β Model gagal merespons: {e}")
|
| return "Gagal membuat query SQL."
|
|
|
|
|
| def getResponseForQueryResult(question, query, result):
|
| template = """
|
| Berdasarkan pertanyaan, SQL query, dan hasilnya, berikan jawaban dalam bahasa alami. Gunakan bahasa Indonesia jika pertanyaannya dalam bahasa Indonesia.
|
|
|
| Schema:
|
| {schema}
|
|
|
| Pertanyaan: {question}
|
| SQL query: {query}
|
| Hasil: {result}
|
| Jawaban:
|
| """
|
| prompt2 = ChatPromptTemplate.from_template(template)
|
| chain2 = prompt2 | llm
|
| response = chain2.invoke({
|
| "question": question,
|
| "schema": getDatabaseSchema(),
|
| "query": query,
|
| "result": result
|
| })
|
| return response.content.strip()
|
|
|
|
|
| st.set_page_config(page_title="Chat with PostgreSQL DB", page_icon="π§ ", layout="centered")
|
| st.title("π§ Chat with your Database")
|
|
|
|
|
| if "chat" not in st.session_state:
|
| st.session_state.chat = []
|
|
|
|
|
| with st.sidebar:
|
| st.subheader("π Koneksi Database")
|
| host = st.text_input("Host", value="aws-0-ap-southeast-1.pooler.supabase.com")
|
| port = st.text_input("Port", value="6543")
|
| username = st.text_input("Username", value="postgres")
|
| password = st.text_input("Password", type="password", value="password")
|
| database = st.text_input("Database", value="postgres")
|
| if st.button("Connect"):
|
| connectDatabase(username, port, host, password, database)
|
|
|
|
|
| llm = ChatOpenAI(
|
| model="qwen/qwen3-1.7b",
|
| base_url=HF_BASE_URL,
|
| api_key=HF_TOKEN,
|
| temperature=0
|
| )
|
|
|
|
|
| question = st.chat_input("Tanyakan sesuatu tentang database Anda...")
|
|
|
|
|
| if question:
|
| if "db" not in st.session_state or st.session_state.db is None:
|
| st.error("β Please connect to database first.")
|
| else:
|
| st.session_state.chat.append({"role": "user", "content": question})
|
|
|
| with st.spinner("π‘ Sedang berpikir..."):
|
| query = getQueryFromLLM(question)
|
| result = runQuery(query)
|
| response = getResponseForQueryResult(question, query, result)
|
|
|
| st.session_state.chat.append({"role": "assistant", "content": response})
|
|
|
| with st.expander("π§Ύ Generated SQL Query"):
|
| st.code(query, language="sql")
|
|
|
| with st.expander("π Raw Result"):
|
| st.write(result)
|
|
|
|
|
| for chat in st.session_state.chat:
|
| st.chat_message(chat["role"]).markdown(chat["content"])
|
|
|