Spaces:
Sleeping
Sleeping
| import os | |
| import json | |
| import sqlite3 | |
| from datetime import datetime | |
| import streamlit as st | |
| from langchain_huggingface import HuggingFaceEmbeddings | |
| from langchain_chroma import Chroma | |
| from langchain_groq import ChatGroq | |
| from langchain.memory import ConversationBufferMemory | |
| from langchain.chains import ConversationalRetrievalChain | |
| from vectorize_documents import embeddings | |
| working_dir = os.path.dirname(os.path.abspath(__file__)) | |
| config_data = json.load(open(f"{working_dir}/config.json")) | |
| GROQ_API_KEY = config_data["GROQ_API_KEY"] | |
| os.environ["GROQ_API_KEY"]= GROQ_API_KEY | |
| # Set up the database with check_same_thread=False | |
| def setup_db(): | |
| conn = sqlite3.connect("chat_history.db", check_same_thread=False) # Ensure thread-safe connection | |
| cursor = conn.cursor() | |
| cursor.execute(""" | |
| CREATE TABLE IF NOT EXISTS chat_histories ( | |
| id INTEGER PRIMARY KEY AUTOINCREMENT, | |
| username TEXT, | |
| timestamp TEXT, | |
| day TEXT, | |
| user_message TEXT, | |
| assistant_response TEXT | |
| ) | |
| """) | |
| conn.commit() | |
| return conn # Return the connection | |
| # Function to save chat history to SQLite | |
| def save_chat_history(conn, username, timestamp, day, user_message, assistant_response): | |
| cursor = conn.cursor() | |
| cursor.execute(""" | |
| INSERT INTO chat_histories (username, timestamp, day, user_message, assistant_response) | |
| VALUES (?, ?, ?, ?, ?) | |
| """, (username, timestamp, day, user_message, assistant_response)) | |
| conn.commit() | |
| # Function to set up vectorstore for embeddings | |
| def setup_vectorstore(): | |
| embeddings = HuggingFaceEmbeddings() | |
| vectorstore = Chroma(persist_directory="House_vectordb", embedding_function=embeddings) | |
| return vectorstore | |
| # Function to set up the chatbot chain | |
| def chat_chain(vectorstore): | |
| llm = ChatGroq(model="llama-3.3-70b-versatile", temperature=0) | |
| retriever = vectorstore.as_retriever() | |
| memory = ConversationBufferMemory( | |
| llm=llm, | |
| output_key="answer", | |
| memory_key="chat_history", | |
| return_messages=True | |
| ) | |
| chain = ConversationalRetrievalChain.from_llm( | |
| llm=llm, | |
| retriever=retriever, | |
| chain_type="stuff", | |
| memory=memory, | |
| verbose=True, | |
| return_source_documents=True | |
| ) | |
| return chain | |
| # Streamlit UI setup | |
| st.set_page_config(page_title="House.Ai", page_icon="🤖AI", layout="centered") | |
| st.title("🤖 House.Ai") | |
| st.subheader("You can ask your general questions and queries to our AI") | |
| # Step 1: Initialize the connection and check if the user is already logged in | |
| if "conn" not in st.session_state: | |
| st.session_state.conn = setup_db() | |
| if "username" not in st.session_state: | |
| username = st.text_input("Enter your name to proceed:") | |
| if username: | |
| with st.spinner("Loading chatbot interface... Please wait."): | |
| st.session_state.username = username | |
| st.session_state.chat_history = [] # Initialize empty chat history in memory | |
| st.session_state.vectorstore = setup_vectorstore() | |
| st.session_state.conversational_chain = chat_chain(st.session_state.vectorstore) | |
| st.success(f"Welcome, {username}! The chatbot interface is ready.") | |
| else: | |
| username = st.session_state.username | |
| # Step 2: Initialize components if not already set | |
| if "conversational_chain" not in st.session_state: | |
| st.session_state.vectorstore = setup_vectorstore() | |
| st.session_state.conversational_chain = chat_chain(st.session_state.vectorstore) | |
| # Step 3: Display the chat history in the UI | |
| if "username" in st.session_state: | |
| st.subheader(f"Hello {username}, start your query below!") | |
| # Display chat history (messages exchanged between user and assistant) | |
| if st.session_state.chat_history: | |
| for message in st.session_state.chat_history: | |
| if message['role'] == 'user': | |
| with st.chat_message("user"): | |
| st.markdown(message["content"]) | |
| elif message['role'] == 'assistant': | |
| with st.chat_message("assistant"): | |
| st.markdown(message["content"]) | |
| # Input field for the user to type their message | |
| user_input = st.chat_input("Ask AI....") | |
| if user_input: | |
| with st.spinner("Processing your query... Please wait."): | |
| # Save user input to chat history in memory | |
| st.session_state.chat_history.append({"role": "user", "content": user_input}) | |
| # Display user's message in chatbot (for UI display) | |
| with st.chat_message("user"): | |
| st.markdown(user_input) | |
| # Get assistant's response from the chain | |
| with st.chat_message("assistant"): | |
| response = st.session_state.conversational_chain({"question": user_input}) | |
| assistant_response = response["answer"] | |
| st.markdown(assistant_response) | |
| # Save assistant's response to chat history in memory | |
| st.session_state.chat_history.append({"role": "assistant", "content": assistant_response}) | |
| # Save the chat history to the database (SQLite) | |
| timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") | |
| day = datetime.now().strftime("%A") # Get the day of the week (e.g., Monday) | |
| save_chat_history(st.session_state.conn, username, timestamp, day, user_input, assistant_response) | |