From 1d560e773791127ccd1ac685984fc16b6fd22c0b Mon Sep 17 00:00:00 2001 From: zachary62 Date: Sun, 23 Mar 2025 12:07:37 -0400 Subject: [PATCH] add demo notebook --- cookbook/pocketflow-agent/demo.ipynb | 300 +++++++++++++++++++++++++++ 1 file changed, 300 insertions(+) create mode 100644 cookbook/pocketflow-agent/demo.ipynb diff --git a/cookbook/pocketflow-agent/demo.ipynb b/cookbook/pocketflow-agent/demo.ipynb new file mode 100644 index 0000000..d6feebe --- /dev/null +++ b/cookbook/pocketflow-agent/demo.ipynb @@ -0,0 +1,300 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "plaintext" + } + }, + "outputs": [], + "source": [ + "! pip install pocketflow>=0.0.1\n", + "! pip install aiohttp>=3.8.0 \n", + "! pip install openai>=1.0.0 \n", + "! pip install duckduckgo-search>=7.5.2 " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "plaintext" + } + }, + "outputs": [], + "source": [ + "# utils.py\n", + "from openai import OpenAI\n", + "import os\n", + "from duckduckgo_search import DDGS\n", + "\n", + "def call_llm(prompt): \n", + " client = OpenAI(api_key=os.environ.get(\"OPENAI_API_KEY\", \"your-api-key\"))\n", + " r = client.chat.completions.create(\n", + " model=\"gpt-4o\",\n", + " messages=[{\"role\": \"user\", \"content\": prompt}]\n", + " )\n", + " return r.choices[0].message.content\n", + "\n", + "def search_web(query):\n", + " results = DDGS().text(query, max_results=5)\n", + " # Convert results to a string\n", + " results_str = \"\\n\\n\".join([f\"Title: {r['title']}\\nURL: {r['href']}\\nSnippet: {r['body']}\" for r in results])\n", + " return results_str\n", + "\n", + "print(\"## Testing call_llm\")\n", + "prompt = \"In a few words, what is the meaning of life?\"\n", + "print(f\"## Prompt: {prompt}\")\n", + "response = call_llm(prompt)\n", + "print(f\"## Response: {response}\")\n", + "\n", + "print(\"## Testing search_web\")\n", + "query = \"Who won the Nobel Prize in Physics 2024?\"\n", + "print(f\"## Query: {query}\")\n", + "results = search_web(query)\n", + "print(f\"## Results: {results}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "plaintext" + } + }, + "outputs": [], + "source": [ + "# nodes.py\n", + "from pocketflow import Node\n", + "from utils import call_llm, search_web\n", + "import yaml\n", + "\n", + "class DecideAction(Node):\n", + " def prep(self, shared):\n", + " \"\"\"Prepare the context and question for the decision-making process.\"\"\"\n", + " # Get the current context (default to \"No previous search\" if none exists)\n", + " context = shared.get(\"context\", \"No previous search\")\n", + " # Get the question from the shared store\n", + " question = shared[\"question\"]\n", + " # Return both for the exec step\n", + " return question, context\n", + " \n", + " def exec(self, inputs):\n", + " \"\"\"Call the LLM to decide whether to search or answer.\"\"\"\n", + " question, context = inputs\n", + " \n", + " print(f\"šŸ¤” Agent deciding what to do next...\")\n", + " \n", + " # Create a prompt to help the LLM decide what to do next with proper yaml formatting\n", + " prompt = f\"\"\"\n", + "### CONTEXT\n", + "You are a research assistant that can search the web.\n", + "Question: {question}\n", + "Previous Research: {context}\n", + "\n", + "### ACTION SPACE\n", + "[1] search\n", + " Description: Look up more information on the web\n", + " Parameters:\n", + " - query (str): What to search for\n", + "\n", + "[2] answer\n", + " Description: Answer the question with current knowledge\n", + " Parameters:\n", + " - answer (str): Final answer to the question\n", + "\n", + "## NEXT ACTION\n", + "Decide the next action based on the context and available actions.\n", + "Return your response in this format:\n", + "\n", + "```yaml\n", + "thinking: |\n", + " \n", + "action: search OR answer\n", + "reason: \n", + "answer: \n", + "search_query: \n", + "```\n", + "IMPORTANT: Make sure to:\n", + "1. Use proper indentation (4 spaces) for all multi-line fields\n", + "2. Use the | character for multi-line text fields\n", + "3. Keep single-line fields without the | character\n", + "\"\"\"\n", + " \n", + " # Call the LLM to make a decision\n", + " response = call_llm(prompt)\n", + " \n", + " # Parse the response to get the decision\n", + " yaml_str = response.split(\"```yaml\")[1].split(\"```\")[0].strip()\n", + " decision = yaml.safe_load(yaml_str)\n", + " \n", + " return decision\n", + " \n", + " def post(self, shared, prep_res, exec_res):\n", + " \"\"\"Save the decision and determine the next step in the flow.\"\"\"\n", + " # If LLM decided to search, save the search query\n", + " if exec_res[\"action\"] == \"search\":\n", + " shared[\"search_query\"] = exec_res[\"search_query\"]\n", + " print(f\"šŸ” Agent decided to search for: {exec_res['search_query']}\")\n", + " else:\n", + " shared[\"context\"] = exec_res[\"answer\"] #save the context if LLM gives the answer without searching.\n", + " print(f\"šŸ’” Agent decided to answer the question\")\n", + " \n", + " # Return the action to determine the next node in the flow\n", + " return exec_res[\"action\"]\n", + "\n", + "class SearchWeb(Node):\n", + " def prep(self, shared):\n", + " \"\"\"Get the search query from the shared store.\"\"\"\n", + " return shared[\"search_query\"]\n", + " \n", + " def exec(self, search_query):\n", + " \"\"\"Search the web for the given query.\"\"\"\n", + " # Call the search utility function\n", + " print(f\"🌐 Searching the web for: {search_query}\")\n", + " results = search_web(search_query)\n", + " return results\n", + " \n", + " def post(self, shared, prep_res, exec_res):\n", + " \"\"\"Save the search results and go back to the decision node.\"\"\"\n", + " # Add the search results to the context in the shared store\n", + " previous = shared.get(\"context\", \"\")\n", + " shared[\"context\"] = previous + \"\\n\\nSEARCH: \" + shared[\"search_query\"] + \"\\nRESULTS: \" + exec_res\n", + " \n", + " print(f\"šŸ“š Found information, analyzing results...\")\n", + " \n", + " # Always go back to the decision node after searching\n", + " return \"decide\"\n", + "\n", + "class AnswerQuestion(Node):\n", + " def prep(self, shared):\n", + " \"\"\"Get the question and context for answering.\"\"\"\n", + " return shared[\"question\"], shared.get(\"context\", \"\")\n", + " \n", + " def exec(self, inputs):\n", + " \"\"\"Call the LLM to generate a final answer.\"\"\"\n", + " question, context = inputs\n", + " \n", + " print(f\"āœļø Crafting final answer...\")\n", + " \n", + " # Create a prompt for the LLM to answer the question\n", + " prompt = f\"\"\"\n", + "### CONTEXT\n", + "Based on the following information, answer the question.\n", + "Question: {question}\n", + "Research: {context}\n", + "\n", + "## YOUR ANSWER:\n", + "Provide a comprehensive answer using the research results.\n", + "\"\"\"\n", + " # Call the LLM to generate an answer\n", + " answer = call_llm(prompt)\n", + " return answer\n", + " \n", + " def post(self, shared, prep_res, exec_res):\n", + " \"\"\"Save the final answer and complete the flow.\"\"\"\n", + " # Save the answer in the shared store\n", + " shared[\"answer\"] = exec_res\n", + " \n", + " print(f\"āœ… Answer generated successfully\")\n", + " \n", + " # We're done - no need to continue the flow\n", + " return \"done\" " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "plaintext" + } + }, + "outputs": [], + "source": [ + "# flow.py\n", + "from pocketflow import Flow\n", + "\n", + "def create_agent_flow():\n", + " \"\"\"\n", + " Create and connect the nodes to form a complete agent flow.\n", + " \n", + " The flow works like this:\n", + " 1. DecideAction node decides whether to search or answer\n", + " 2. If search, go to SearchWeb node\n", + " 3. If answer, go to AnswerQuestion node\n", + " 4. After SearchWeb completes, go back to DecideAction\n", + " \n", + " Returns:\n", + " Flow: A complete research agent flow\n", + " \"\"\"\n", + " # Create instances of each node\n", + " decide = DecideAction()\n", + " search = SearchWeb()\n", + " answer = AnswerQuestion()\n", + " \n", + " # Connect the nodes\n", + " # If DecideAction returns \"search\", go to SearchWeb\n", + " decide - \"search\" >> search\n", + " \n", + " # If DecideAction returns \"answer\", go to AnswerQuestion\n", + " decide - \"answer\" >> answer\n", + " \n", + " # After SearchWeb completes and returns \"decide\", go back to DecideAction\n", + " search - \"decide\" >> decide\n", + " \n", + " # Create and return the flow, starting with the DecideAction node\n", + " return Flow(start=decide) " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "plaintext" + } + }, + "outputs": [], + "source": [ + "# main.py\n", + "import sys\n", + "\n", + "def main():\n", + " \"\"\"Simple function to process a question.\"\"\"\n", + " # Default question\n", + " default_question = \"Who won the Nobel Prize in Physics 2024?\"\n", + " \n", + " # Get question from command line if provided with --\n", + " question = default_question\n", + " for arg in sys.argv[1:]:\n", + " if arg.startswith(\"--\"):\n", + " question = arg[2:]\n", + " break\n", + " \n", + " # Create the agent flow\n", + " agent_flow = create_agent_flow()\n", + " \n", + " # Process the question\n", + " shared = {\"question\": question}\n", + " print(f\"šŸ¤” Processing question: {question}\")\n", + " agent_flow.run(shared)\n", + " print(\"\\nšŸŽÆ Final Answer:\")\n", + " print(shared.get(\"answer\", \"No answer found\"))\n", + "\n", + "main()\n" + ] + } + ], + "metadata": { + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +}