diff --git a/local_llama3.1_tool_use/README.md b/local_llama3.1_tool_use/README.md new file mode 100644 index 0000000..19ef874 --- /dev/null +++ b/local_llama3.1_tool_use/README.md @@ -0,0 +1,46 @@ +## 🦙 Local Llama3 Tool Use +This Streamlit app demonstrates function calling with the local Llama3 model using Ollama. It allows users to interact with an AI assistant that can access specific tools based on user selection. + +### Features +- Utilizes local Llama3 model via Ollama as LLM +- Integrates YFinance for stock data retrieval and SerpAPI for web search capabilities +- Dynamic tool selection through a user-friendly sidebar +- Real-time chat interface with the AI assistant + +### How to get Started? + +1. Clone the GitHub repository + +```bash +git clone https://github.com/Shubhamsaboo/awesome-llm-apps.git +cd local-llama3-tool-use +``` +2. Install the required dependencies: + +```bash +pip install -r requirements.txt +``` + +3. Get your OpenAI API Key + +- Set up your SerpAPI API key: Export your SerpAPI API key as an environment variable. +```bash +export SERPAPI_API_KEY=your_api_key_here +``` + +4. Run the Streamlit App +```bash +streamlit run llama3_tool_use.py +``` + +## How it Works? + +1. **Tool Selection:** Users can select which tools (YFinance and/or SerpAPI) they want the assistant to use via checkboxes in the sidebar. + +2. **Assistant Initialization:** The app initializes or updates the assistant based on the selected tools. + +3. **Chat Interface:** Users can ask questions through a chat input, and the assistant responds using the enabled tools. + +4. **Real-time Response:** The assistant's response is displayed in real-time, with a typing indicator. + +5. **Tool Usage Display:** The app shows which tools are currently enabled in the sidebar. \ No newline at end of file diff --git a/local_llama3.1_tool_use/llama3_tool_use.py b/local_llama3.1_tool_use/llama3_tool_use.py new file mode 100644 index 0000000..f42872c --- /dev/null +++ b/local_llama3.1_tool_use/llama3_tool_use.py @@ -0,0 +1,90 @@ +import streamlit as st +import os +from phi.assistant import Assistant +from phi.llm.ollama import Ollama +from phi.tools.yfinance import YFinanceTools +from phi.tools.serpapi_tools import SerpApiTools + +st.set_page_config(page_title="Llama-3 Tool Use", page_icon="🦙") + +# Ensure SERPAPI_API_KEY is set +if 'SERPAPI_API_KEY' not in os.environ: + st.error("Please set the SERPAPI_API_KEY environment variable.") + st.stop() + +def get_assistant(tools): + return Assistant( + name="llama3_assistant", + llm=Ollama(model="llama3"), + tools=tools, + description="You are a helpful assistant that can access specific tools based on user selection.", + show_tool_calls=True, + debug_mode=True, + # This setting adds the current datetime to the instructions + add_datetime_to_instructions=True, + + ) + +st.title("🦙 Local Llama-3 Tool Use") +st.markdown(""" +This app demonstrates function calling with the local Llama3 model using Ollama. +Select tools in the sidebar and ask relevant questions! +""") + +# Sidebar for tool selection +st.sidebar.title("Tool Selection") +use_yfinance = st.sidebar.checkbox("YFinance (Stock Data)", value=True) +use_serpapi = st.sidebar.checkbox("SerpAPI (Web Search)", value=True) + +# Initialize or update the assistant based on selected tools +tools = [] +if use_yfinance: + tools.append(YFinanceTools(stock_price=True, company_info=True)) +if use_serpapi: + tools.append(SerpApiTools()) + +if "assistant" not in st.session_state or st.session_state.get("tools") != tools: + st.session_state.assistant = get_assistant(tools) + st.session_state.tools = tools + st.session_state.messages = [] # Reset messages when tools change + +# Display current tool status +st.sidebar.markdown("### Current Tools:") +st.sidebar.markdown(f"- YFinance: {'Enabled' if use_yfinance else 'Disabled'}") +st.sidebar.markdown(f"- SerpAPI: {'Enabled' if use_serpapi else 'Disabled'}") + +# Chat interface +for message in st.session_state.get("messages", []): + with st.chat_message(message["role"]): + st.markdown(message["content"]) + +if prompt := st.chat_input("Ask a question based on the enabled tools"): + st.session_state.messages.append({"role": "user", "content": prompt}) + with st.chat_message("user"): + st.markdown(prompt) + + with st.chat_message("assistant"): + response_container = st.empty() + response = "" + for chunk in st.session_state.assistant.run(prompt): + response += chunk + response_container.write(response + "▌") + response_container.write(response) + st.session_state.messages.append({"role": "assistant", "content": response}) + +# Sidebar instructions +st.sidebar.markdown(""" +### How to use: +1. Select the tools you want to use in the sidebar +2. Ask questions related to the enabled tools +3. The assistant will use only the selected tools to answe +### Note: +Make sure you have set the SERPAPI_API_KEY environment variable to use the SerpAPI tool. +""") + +st.sidebar.markdown(""" +### Sample questions: +- YFinance: "What's the current price of AAPL?" +- SerpAPI: "What are the latest developments in AI?" +- Both: "Compare TSLA stock price with recent news about Tesla's performance" +""") diff --git a/local_llama3.1_tool_use/requirements.txt b/local_llama3.1_tool_use/requirements.txt new file mode 100644 index 0000000..f3bea12 --- /dev/null +++ b/local_llama3.1_tool_use/requirements.txt @@ -0,0 +1,3 @@ +streamlit +ollama +phidata \ No newline at end of file