mirror of
https://github.com/Shubhamsaboo/awesome-llm-apps.git
synced 2026-03-08 23:13:56 -05:00
Add docstring
Modified files: - rag_tutorials/autonomous_rag/autorag.py - rag_tutorials/llama3.1_local_rag/llama3.1_local_rag.py - rag_tutorials/local_hybrid_search_rag/local_main.py
This commit is contained in:
@@ -19,6 +19,19 @@ DB_URL = "postgresql+psycopg://ai:ai@localhost:5532/ai"
|
||||
# Function to set up the Assistant, utilizing caching for resource efficiency
|
||||
@st.cache_resource
|
||||
def setup_assistant(api_key: str) -> Agent:
|
||||
"""Initializes and returns an AI Assistant agent with caching for efficiency.
|
||||
|
||||
This function sets up an AI Assistant agent using the OpenAI GPT-4o-mini model
|
||||
and configures it with a knowledge base, storage, and web search tools. The
|
||||
assistant is designed to first search its knowledge base before querying the
|
||||
internet, providing clear and concise answers.
|
||||
|
||||
Args:
|
||||
api_key (str): The API key required to access the OpenAI services.
|
||||
|
||||
Returns:
|
||||
Agent: An initialized Assistant agent configured with a language model,
|
||||
knowledge base, storage, and additional tools for enhanced functionality."""
|
||||
llm = OpenAIChat(id="gpt-4o-mini", api_key=api_key)
|
||||
# Set up the Assistant with storage, knowledge base, and tools
|
||||
return Agent(
|
||||
@@ -47,6 +60,16 @@ def setup_assistant(api_key: str) -> Agent:
|
||||
|
||||
# Function to add a PDF document to the knowledge base
|
||||
def add_document(agent: Agent, file: BytesIO):
|
||||
"""Add a PDF document to the agent's knowledge base.
|
||||
|
||||
This function reads a PDF document from a file-like object and adds its contents to the specified agent's knowledge base. If the document is successfully read, the contents are loaded into the knowledge base with the option to upsert existing data.
|
||||
|
||||
Args:
|
||||
agent (Agent): The agent whose knowledge base will be updated.
|
||||
file (BytesIO): A file-like object containing the PDF document to be added.
|
||||
|
||||
Returns:
|
||||
None: The function does not return a value but provides feedback on whether the operation was successful."""
|
||||
reader = PDFReader()
|
||||
docs = reader.read(file)
|
||||
if docs:
|
||||
@@ -57,10 +80,35 @@ def add_document(agent: Agent, file: BytesIO):
|
||||
|
||||
# Function to query the Assistant and return a response
|
||||
def query_assistant(agent: Agent, question: str) -> str:
|
||||
"""Queries the Assistant and returns a response.
|
||||
|
||||
Args:
|
||||
agent (Agent): An instance of the Agent class used to process the query.
|
||||
question (str): The question to be asked to the Assistant.
|
||||
|
||||
Returns:
|
||||
str: The response generated by the Assistant for the given question."""
|
||||
return "".join([delta for delta in agent.run(question)])
|
||||
|
||||
# Main function to handle Streamlit app layout and interactions
|
||||
def main():
|
||||
"""Main function to handle the layout and interactions for the Streamlit app.
|
||||
|
||||
This function sets up the Streamlit app configuration, handles user inputs such
|
||||
as OpenAI API key, PDF uploads, and user questions, and interacts with an
|
||||
autonomous retrieval-augmented generation (RAG) assistant based on GPT-4o.
|
||||
|
||||
The app allows users to upload PDF documents to enhance the knowledge base and
|
||||
submit questions to receive generated responses.
|
||||
|
||||
Side Effects:
|
||||
- Configures Streamlit page and title.
|
||||
- Prompts users to input an OpenAI API key and a question.
|
||||
- Allows users to upload PDF documents.
|
||||
- Displays responses generated by querying an assistant.
|
||||
|
||||
Raises:
|
||||
StreamlitWarning: If the OpenAI API key is not provided."""
|
||||
st.set_page_config(page_title="AutoRAG", layout="wide")
|
||||
st.title("🤖 Auto-RAG: Autonomous RAG with GPT-4o")
|
||||
|
||||
|
||||
@@ -28,6 +28,17 @@ if webpage_url:
|
||||
|
||||
# 3. Call Ollama Llama3 model
|
||||
def ollama_llm(question, context):
|
||||
"""Generates a response to a question using the Ollama Llama3 model.
|
||||
|
||||
This function takes a question and its context, formats them into a prompt,
|
||||
and invokes the Ollama Llama3 model to generate a response.
|
||||
|
||||
Args:
|
||||
question (str): The question to be answered by the model.
|
||||
context (str): The context or additional information related to the question.
|
||||
|
||||
Returns:
|
||||
str: The response generated by the Ollama Llama3 model, stripped of leading and trailing whitespace."""
|
||||
formatted_prompt = f"Question: {question}\n\nContext: {context}"
|
||||
response = ollama.invoke([('human', formatted_prompt)])
|
||||
return response.content.strip()
|
||||
@@ -36,9 +47,24 @@ if webpage_url:
|
||||
retriever = vectorstore.as_retriever()
|
||||
|
||||
def combine_docs(docs):
|
||||
"""Combines the content of multiple document objects into a single string.
|
||||
|
||||
Args:
|
||||
docs (list): A list of document objects, each having a 'page_content' attribute.
|
||||
|
||||
Returns:
|
||||
str: A string consisting of the combined 'page_content' of all document objects,
|
||||
separated by two newline characters."""
|
||||
return "\n\n".join(doc.page_content for doc in docs)
|
||||
|
||||
def rag_chain(question):
|
||||
"""Processes a question to retrieve and format relevant documents, and generates a response using a language model.
|
||||
|
||||
Args:
|
||||
question (str): The question or query that needs to be answered.
|
||||
|
||||
Returns:
|
||||
str: The response generated by the language model based on the retrieved and formatted documents."""
|
||||
retrieved_docs = retriever.invoke(question)
|
||||
formatted_context = combine_docs(retrieved_docs)
|
||||
return ollama_llm(question, formatted_context)
|
||||
|
||||
@@ -20,6 +20,23 @@ Instead, you MUST treat the context as if its contents are entirely part of your
|
||||
""".strip()
|
||||
|
||||
def initialize_config(settings: Dict[str, Any]) -> RAGLiteConfig:
|
||||
"""Initializes and returns a RAGLiteConfig object based on provided settings.
|
||||
|
||||
This function constructs a RAGLiteConfig object using the database URL,
|
||||
language model path, and embedder path specified in the `settings` dictionary.
|
||||
The configuration includes default options for embedder normalization and
|
||||
chunk size. A reranker is also initialized with a predefined model.
|
||||
|
||||
Args:
|
||||
settings (Dict[str, Any]): A dictionary containing configuration
|
||||
parameters. Expected keys are 'DBUrl', 'LLMPath', and 'EmbedderPath'.
|
||||
|
||||
Returns:
|
||||
RAGLiteConfig: An initialized configuration object for RAGLite.
|
||||
|
||||
Raises:
|
||||
ValueError: If there is an error in the configuration process, such as
|
||||
missing keys or invalid values in the settings dictionary."""
|
||||
try:
|
||||
return RAGLiteConfig(
|
||||
db_url=settings["DBUrl"],
|
||||
@@ -33,6 +50,17 @@ def initialize_config(settings: Dict[str, Any]) -> RAGLiteConfig:
|
||||
raise ValueError(f"Configuration error: {e}")
|
||||
|
||||
def process_document(file_path: str) -> bool:
|
||||
"""Processes a document by inserting it into a system with a given configuration.
|
||||
|
||||
This function attempts to insert a document specified by the file path into
|
||||
a system using a predefined configuration stored in the session state. It
|
||||
logs an error if the operation fails.
|
||||
|
||||
Args:
|
||||
file_path (str): The path to the document file that needs to be processed.
|
||||
|
||||
Returns:
|
||||
bool: True if the document is successfully processed; False if an error occurs."""
|
||||
try:
|
||||
if not st.session_state.get('my_config'):
|
||||
raise ValueError("Configuration not initialized")
|
||||
@@ -43,6 +71,18 @@ def process_document(file_path: str) -> bool:
|
||||
return False
|
||||
|
||||
def perform_search(query: str) -> List[dict]:
|
||||
"""Conducts a hybrid search and returns reranked results.
|
||||
|
||||
This function performs a hybrid search using the provided query and
|
||||
attempts to retrieve and rerank relevant chunks. It returns a list of
|
||||
reranked search results.
|
||||
|
||||
Args:
|
||||
query (str): The search query string.
|
||||
|
||||
Returns:
|
||||
List[dict]: A list of dictionaries containing reranked search results.
|
||||
Returns an empty list if no results are found or if an error occurs."""
|
||||
try:
|
||||
chunk_ids, scores = hybrid_search(query, num_results=10, config=st.session_state.my_config)
|
||||
if not chunk_ids:
|
||||
|
||||
Reference in New Issue
Block a user