File size: 6,218 Bytes
d39e359 a9c674c d39e359 62ad187 d39e359 0f84c49 d39e359 0f84c49 62ad187 0f84c49 62ad187 0f84c49 62ad187 0f84c49 62ad187 d39e359 0f84c49 d39e359 0f84c49 d39e359 0f84c49 d39e359 0f84c49 d39e359 a9c674c d39e359 a9c674c d39e359 0f84c49 d39e359 0f84c49 d39e359 0f84c49 62ad187 8776fe2 0f84c49 a9c674c 0f84c49 d39e359 0f84c49 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 |
import streamlit as st
import os
import requests
from bs4 import BeautifulSoup
from langchain.embeddings import OpenAIEmbeddings
from langchain.vectorstores import FAISS
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.memory import ConversationBufferMemory
from langchain.chat_models import ChatOpenAI
from langchain.agents import initialize_agent, AgentType
from langchain.tools import Tool
from langchain.chains import RetrievalQA
# Page configuration
st.set_page_config(page_title="Book Q&A System", page_icon="π")
# App title
st.title("Alice in Wonderland Q&A System")
# Sidebar for API key
with st.sidebar:
st.title("Settings")
# API Key input with clear instructions
st.markdown("### OpenAI API Key")
st.markdown("**Enter your OpenAI API key to use this application**")
openai_api_key = st.text_input("OpenAI API Key", type="password",
help="Get your API key from https://platform.openai.com/account/api-keys")
# Test button for API key validation
if st.button("Validate API Key"):
if not openai_api_key:
st.error("Please enter an API key")
else:
try:
# Set the API key in environment variables
os.environ["OPENAI_API_KEY"] = openai_api_key
# Initialize a basic model to test
llm = ChatOpenAI(
model_name="gpt-3.5-turbo",
temperature=0
)
# Test with a simple prompt
result = llm.predict("Say 'API key is valid' if you can read this.")
if "API key is valid" in result:
st.success("API key is valid!")
else:
st.error("API key validation failed.")
except Exception as e:
st.error(f"API key error: {str(e)}")
# Reset button
if st.button("Reset Chat"):
st.session_state.messages = []
st.session_state.agent = None
st.session_state.book_processed = False
st.experimental_rerun()
# Initialize session state
if "messages" not in st.session_state:
st.session_state.messages = []
if "book_processed" not in st.session_state:
st.session_state.book_processed = False
# Display chat history
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
# Alice in Wonderland URL
book_url = "https://www.gutenberg.org/files/11/11-0.txt"
# Function to fetch and process the book
@st.cache_resource
def process_book(book_url, api_key):
# Download the book
response = requests.get(book_url)
if response.status_code != 200:
return "Error downloading the book"
# Parse the text
text = response.text
# Clean the text (simple cleaning)
soup = BeautifulSoup(text, 'html.parser')
cleaned_text = soup.get_text()
# Split the text into chunks
text_splitter = RecursiveCharacterTextSplitter(
chunk_size=1000,
chunk_overlap=200,
length_function=len
)
chunks = text_splitter.split_text(cleaned_text)
# Create embeddings using OpenAI
embeddings = OpenAIEmbeddings(
openai_api_key=openai_api_key
)
# Create vector store
vector_store = FAISS.from_texts(chunks, embeddings)
return vector_store
# Function to create a LangChain Agent with tools
def create_langchain_agent(vector_store):
# Set API key for the session
os.environ["OPENAI_API_KEY"] = openai_api_key
llm = ChatOpenAI(
model_name="gpt-3.5-turbo",
temperature=0.7
)
# Create a retrieval tool
retrieval_qa = RetrievalQA.from_chain_type(
llm=llm,
chain_type="stuff",
retriever=vector_store.as_retriever(search_kwargs={"k": 3})
)
tools = [
Tool(
name="BookQATool",
func=retrieval_qa.run,
description="Useful for answering questions about Alice in Wonderland. Input should be a question about the book."
)
]
# Create memory
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
# Initialize agent
agent = initialize_agent(
tools,
llm,
agent=AgentType.CHAT_CONVERSATIONAL_REACT_DESCRIPTION,
verbose=True,
memory=memory,
handle_parsing_errors=True
)
return agent
# Main content area
if not openai_api_key:
# Display prominent instructions if no API key
st.warning(" Please enter your OpenAI API key in the sidebar to use this application.")
else:
# Process the book when needed
if not st.session_state.book_processed:
with st.spinner("Processing Alice in Wonderland... This may take a minute."):
vector_store = process_book(book_url, openai_api_key)
st.session_state.agent = create_langchain_agent(vector_store)
st.session_state.book_processed = True
st.success("Alice in Wonderland processed successfully!")
# Chat input - only show if API key is provided
if prompt := st.chat_input("Ask a question about Alice in Wonderland"):
# Add user message to chat
st.session_state.messages.append({"role": "user", "content": prompt})
# Display user message
with st.chat_message("user"):
st.markdown(prompt)
# Generate and display assistant response
with st.chat_message("assistant"):
with st.spinner("Thinking..."):
try:
response = st.session_state.agent.run(input=prompt)
st.markdown(response)
except Exception as e:
st.error(f"Error: {str(e)}")
response = "I encountered an error processing your question. Please try a different question or reset the chat."
st.markdown(response)
# Add assistant response to chat
st.session_state.messages.append({"role": "assistant", "content": response}) |