From 353f48914dfe141d4b27ff6dc1f30e970e587c0e Mon Sep 17 00:00:00 2001 From: ShubhamSaboo Date: Wed, 22 May 2024 18:58:42 -0500 Subject: [PATCH] Add local chat with pdf --- chat_with_pdf/chat_pdf_llama3.py | 41 ++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 chat_with_pdf/chat_pdf_llama3.py diff --git a/chat_with_pdf/chat_pdf_llama3.py b/chat_with_pdf/chat_pdf_llama3.py new file mode 100644 index 0000000..0c20e08 --- /dev/null +++ b/chat_with_pdf/chat_pdf_llama3.py @@ -0,0 +1,41 @@ +# Import necessary libraries +import os +import tempfile +import streamlit as st +from embedchain import App + +# Define the embedchain_bot function +def embedchain_bot(db_path): + return App.from_config( + config={ + "llm": {"provider": "ollama", "config": {"model": "llama3:instruct", "max_tokens": 250, "temperature": 0.5, "stream": True, "base_url": 'http://localhost:11434'}}, + "vectordb": {"provider": "chroma", "config": {"dir": db_path}}, + "embedder": {"provider": "ollama", "config": {"model": "llama3:instruct", "base_url": 'http://localhost:11434'}}, + } + ) + +st.title("Chat with PDF") +st.caption("This app allows you to chat with a PDF using Llama3 running locally wiht Ollama!") + +# Create a temporary directory to store the PDF file +db_path = tempfile.mkdtemp() +# Create an instance of the embedchain App +app = embedchain_bot(db_path) + +# Upload a PDF file +pdf_file = st.file_uploader("Upload a PDF file", type="pdf") + +# If a PDF file is uploaded, add it to the knowledge base +if pdf_file: + with tempfile.NamedTemporaryFile(delete=False, suffix=".pdf") as f: + f.write(pdf_file.getvalue()) + app.add(f.name, data_type="pdf_file") + os.remove(f.name) + st.success(f"Added {pdf_file.name} to knowledge base!") + +# Ask a question about the PDF +prompt = st.text_input("Ask a question about the PDF") +# Display the answer +if prompt: + answer = app.chat(prompt) + st.write(answer) \ No newline at end of file