Chat_Bot_using_LangChain / pages /1_Interactive Chat Bot.py
AjayKr09's picture
Update pages/1_Interactive Chat Bot.py
392151a verified
raw
history blame
1.92 kB
import streamlit as st
import os
from langchain_core.prompts import ChatPromptTemplate
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_core.prompts import MessagesPlaceholder
from langchain.memory import ConversationBufferWindowMemory
from operator import itemgetter
from langchain_core.runnables import RunnableLambda, RunnablePassthrough
# Set the API key for Google Generative AI
os.environ['GOOGLE_API_KEY'] = 'AIzaSyBHPIIk4-BOgXvnQ2_o6c2wTGpY2ByRIDs'
# Initialize the Google Generative AI model
model_gemini = ChatGoogleGenerativeAI(model='gemini-pro', temperature=0, max_output_tokens=500, convert_system_message_to_human=True)
# Define the prompt
prompt = ChatPromptTemplate.from_messages(
[
('system', 'you are a good assistant.'),
MessagesPlaceholder(variable_name='history'),
("human", "{input}")
]
)
# Initialize memory in session state
if 'memory' not in st.session_state:
st.session_state.memory = ConversationBufferWindowMemory(k=10, return_messages=True)
# Define the chain
chain = (RunnablePassthrough.assign(history=RunnableLambda(st.session_state.memory.load_memory_variables) | itemgetter("history")) |
prompt | model_gemini)
# Streamlit app
st.title("Interactive Chatbot")
# Initialize session state for user input
if 'user_input' not in st.session_state:
st.session_state.user_input = ""
# Input from user
user_input = st.text_area("User: ", st.session_state.user_input, height=100)
if st.button("Submit"):
response = chain.invoke({"input": user_input})
st.write(f"Assistant: {response.content}")
st.session_state.memory.save_context({"input": user_input}, {"output": response.content})
st.session_state.user_input = "" # Clear the input box
# Display chat history
if st.checkbox("Show Chat History"):
chat_history = st.session_state.memory.load_memory_variables({})
st.write(chat_history)