Spaces:
Runtime error
Runtime error
| import streamlit as st | |
| from g4f.client import Client | |
| import sqlite3 | |
| import pyperclip | |
| import os | |
| # --- KONFIGURASI GROQ (via environment variable) --- | |
| GROQ_API_KEY = os.getenv("GROQ_API_KEY") # Set di .env atau environment | |
| if not GROQ_API_KEY: | |
| st.error("GROQ_API_KEY tidak ditemukan! Tambahkan di environment atau .env") | |
| st.stop() | |
| conn = sqlite3.connect('chat_history.db', check_same_thread=False) | |
| c = conn.cursor() | |
| c.execute('''CREATE TABLE IF NOT EXISTS chat_history | |
| (conversation_id INTEGER, role TEXT, content TEXT)''') | |
| conn.commit() | |
| def local_css(file_name): | |
| try: | |
| with open(file_name) as f: | |
| st.markdown(f"<style>{f.read()}</style>", unsafe_allow_html=True) | |
| except FileNotFoundError: | |
| st.warning("style.css tidak ditemukan. Mengabaikan custom CSS.") | |
| local_css("style.css") | |
| # --- MODEL LIST (via g4f + Groq) --- | |
| models = { | |
| "π€ Qwen 2.5 Coder (72B)": "qwen2.5-coder-32b", | |
| "π LLaMA 3.1 70B": "llama-3.1-70b-versatile", | |
| "β‘ LLaMA 3.1 8B": "llama-3.1-8b-instant", | |
| "π§ Qwen 2.5 72B": "qwen2.5-72b-instruct", | |
| "βοΈ Mixtral 8x7B": "mixtral-8x7b-32768", | |
| } | |
| client = Client( | |
| provider="groq", | |
| api_key=GROQ_API_KEY | |
| ) | |
| def main(): | |
| st.set_page_config(page_title="DarkGPT", page_icon="π€", initial_sidebar_state="expanded") | |
| # Inisialisasi session state | |
| if "chat_history" not in st.session_state: | |
| st.session_state.chat_history = [] | |
| if "conversation_id" not in st.session_state: | |
| st.session_state.conversation_id = 1 | |
| # Header + Model Selector | |
| col1, col2, col3 = st.columns([1, 1, 1]) | |
| with col1: | |
| st.header("DarkGPT") | |
| with col3: | |
| selected_model_name = st.selectbox("Pilih Model", list(models.keys()), index=0) | |
| selected_model = models[selected_model_name] | |
| # Sidebar: New Chat | |
| if st.sidebar.button("β¨ New Chat"): | |
| st.session_state.chat_history = [] | |
| st.session_state.conversation_id += 1 | |
| st.rerun() | |
| # Sidebar: Load History | |
| st.sidebar.write("### Chat History") | |
| c.execute("SELECT DISTINCT conversation_id FROM chat_history ORDER BY conversation_id DESC") | |
| for (conv_id,) in c.fetchall(): | |
| c.execute("SELECT content FROM chat_history WHERE conversation_id = ? AND role = 'bot' LIMIT 1", (conv_id,)) | |
| first_msg = c.fetchone() | |
| if first_msg: | |
| preview = " ".join(first_msg[0].split()[:6]) + "..." | |
| if st.sidebar.button(preview, key=f"load_{conv_id}"): | |
| load_conversation(conv_id) | |
| # Sidebar: Clear All | |
| if st.sidebar.button("ποΈ Clear All History"): | |
| c.execute("DELETE FROM chat_history") | |
| conn.commit() | |
| st.session_state.chat_history = [] | |
| st.rerun() | |
| # Chat Input | |
| st.markdown("---") | |
| if user_input := st.chat_input("Ketik pesan..."): | |
| with st.chat_message("user"): | |
| st.markdown(user_input) | |
| # Simpan user message | |
| st.session_state.chat_history.append({"role": "user", "content": user_input}) | |
| # Generate response | |
| with st.chat_message("assistant"): | |
| with st.spinner("Thinking..."): | |
| try: | |
| response = client.chat.completions.create( | |
| model=selected_model, | |
| messages=[{"role": m["role"], "content": m["content"]} for m in st.session_state.chat_history], | |
| temperature=0.7, | |
| max_tokens=2048 | |
| ) | |
| bot_response = response.choices[0].message.content | |
| st.markdown(bot_response) | |
| # Simpan bot response | |
| st.session_state.chat_history.append({"role": "assistant", "content": bot_response}) | |
| # Simpan ke DB | |
| for msg in st.session_state.chat_history[-2:]: # hanya 2 pesan terakhir | |
| c.execute( | |
| "INSERT INTO chat_history VALUES (?, ?, ?)", | |
| (st.session_state.conversation_id, msg["role"], msg["content"]) | |
| ) | |
| conn.commit() | |
| except Exception as e: | |
| st.error(f"Error: {str(e)}") | |
| for msg in st.session_state.chat_history[:-2]: | |
| with st.chat_message(msg["role"]): | |
| st.markdown(msg["content"]) | |
| def load_conversation(conv_id): | |
| c.execute("SELECT role, content FROM chat_history WHERE conversation_id = ? ORDER BY rowid", (conv_id,)) | |
| st.session_state.chat_history = [{"role": r, "content": c} for r, c in c.fetchall()] | |
| st.session_state.conversation_id = conv_id | |
| st.rerun() | |
| if __name__ == "__main__": | |
| main() |