-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathapp.py
67 lines (51 loc) · 2.11 KB
/
app.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
import streamlit as st
import os
from transformers import GPT2TokenizerFast
from langchain.document_loaders import PyPDFLoader
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.embeddings import OpenAIEmbeddings
from langchain.vectorstores import FAISS
from langchain.chains.question_answering import load_qa_chain
from langchain.llms import OpenAI
from langchain.chains import ConversationalRetrievalChain
from st_files_connection import FilesConnection
# Retrieve OpenAI API key from GitHub Secrets
openai_api_key = st.secrets["OPENAI_API_KEY"]
# Set OpenAI API key as an environment variable
os.environ["OPENAI_API_KEY"] = openai_api_key
st.title("Law Assist")
st.write("Ask me legal questions, and I'll provide answers!")
st.write("Created by: Vidhan Mehta, Sumith Sigtia, Shabiul Hasnain Siddiqui, Swathi")
# Define the path to your PDF file
pdf_path = "merge.pdf"
# Load PDF and split by pages
loader = PyPDFLoader(pdf_path)
pages = loader.load_and_split()
# Convert pages to chunks
chunks = pages
# Create embeddings model with OpenAI API key
embeddings = OpenAIEmbeddings(api_key=openai_api_key)
# Create vector database
db = FAISS.from_documents(chunks, embeddings)
# Load QA chain
chain = load_qa_chain(OpenAI(temperature=0), chain_type="stuff")
# Create conversation chain
qa = ConversationalRetrievalChain.from_llm(OpenAI(temperature=0.1), db.as_retriever())
# Initialize chat history
chat_history = []
def generate_response(user_input):
result = qa({"question": user_input, "chat_history": chat_history})
chat_history.append(("You:", user_input))
chat_history.append(("Bot:", result['answer']))
return result['answer']
user_input = st.text_input("You:", value="")
if user_input:
if user_input.lower() == 'exit':
st.write("Thank you for using the LAW Assist chatbot!")
else:
bot_response = generate_response(user_input)
for speaker, text in chat_history:
st.text(speaker)
st.write(text)
# Clear the input field
user_input = ""