From 194a97a31d4e5835821b507197c840f31b287cb6 Mon Sep 17 00:00:00 2001 From: zachary62 Date: Fri, 27 Dec 2024 17:42:28 +0000 Subject: [PATCH] demo --- cookbook/demo.ipynb | 79 +++++++++++++++++++++++++++------------------ 1 file changed, 47 insertions(+), 32 deletions(-) diff --git a/cookbook/demo.ipynb b/cookbook/demo.ipynb index a5a6f3c..db067a5 100644 --- a/cookbook/demo.ipynb +++ b/cookbook/demo.ipynb @@ -2,28 +2,9 @@ "cells": [ { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "No relevant file found: the question has no relevant file because while some files discuss startups, none specifically address how to find or generate startup ideas\n", - "No question asked\n" - ] - }, - { - "data": { - "text/plain": [ - "'default'" - ] - }, - "execution_count": 2, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# Example App for text summarization & QA using minillmflow\n", "from minillmflow import Node, BatchNode, Flow, BatchFlow, AsyncNode, AsyncFlow, BatchAsyncFlow\n", @@ -37,8 +18,15 @@ " model=\"gpt-4\",\n", " messages=[{\"role\": \"user\", \"content\": prompt}]\n", " )\n", - " return r.choices[0].message.content\n", - "\n", + " return r.choices[0].message.content" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "# 2) Create a shared store (dict) for Node/Flow data exchange.\n", "# This can be replaced with a DB or other storage.\n", "# Design the structure / schema based on the app requirements.\n", @@ -60,8 +48,15 @@ "\n", "load_data = LoadData()\n", "# Run the data-loading node once\n", - "load_data.run(shared)\n", - "\n", + "load_data.run(shared)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "# 4) Create a Node that summarizes a single file using the LLM.\n", "class SummarizeFile(Node):\n", " def prep(self, shared):\n", @@ -81,8 +76,15 @@ "# For testing, we set params directly on the node.\n", "# In real usage, you'd set them in a Flow or BatchFlow.\n", "summarize_file.set_params({\"filename\":\"addiction.txt\"})\n", - "summarize_file.run(shared)\n", - "\n", + "summarize_file.run(shared)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "# 5) If data is large, we can apply a map-reduce pattern:\n", "# - MapSummaries(BatchNode) => chunk the file and summarize each chunk\n", "# - ReduceSummaries(Node) => combine those chunk-level summaries\n", @@ -135,8 +137,15 @@ "file_summary_flow = Flow(start=map_summaries)\n", "# When a flow params is set, it will recursively set its params to all nodes in the flow\n", "file_summary_flow.set_params({\"filename\":\"before.txt\"})\n", - "file_summary_flow.run(shared)\n", - "\n", + "file_summary_flow.run(shared)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "# 7) Summarize all files using a BatchFlow that reruns file_summary_flow for each file\n", "class SummarizeAllFiles(BatchFlow):\n", " def prep(self, shared):\n", @@ -147,9 +156,15 @@ " return [{\"filename\":filename} for filename in shared['data']]\n", "\n", "summarize_all_files = SummarizeAllFiles(start=file_summary_flow)\n", - "summarize_all_files.run(shared)\n", - "\n", - "\n", + "summarize_all_files.run(shared)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "# 8) QA Agent: Find the most relevant file based on summary with actions\n", "# if no question is asked:\n", "# (a) end: terminate the flow \n",