From 558808f6dbab816d7845bbb7cf02f7b8afebdda0 Mon Sep 17 00:00:00 2001 From: zachary62 Date: Sat, 29 Mar 2025 11:22:06 -0400 Subject: [PATCH] add mcp example --- cookbook/pocketflow-mcp/README.md | 59 ++++++++++++++++++++++++ cookbook/pocketflow-mcp/main.py | 55 ++++++++++++++++++++++ cookbook/pocketflow-mcp/requirements.txt | 3 ++ cookbook/pocketflow-mcp/simple_client.py | 44 ++++++++++++++++++ cookbook/pocketflow-mcp/simple_server.py | 14 ++++++ cookbook/pocketflow-mcp/utils.py | 21 +++++++++ 6 files changed, 196 insertions(+) create mode 100644 cookbook/pocketflow-mcp/README.md create mode 100644 cookbook/pocketflow-mcp/main.py create mode 100644 cookbook/pocketflow-mcp/requirements.txt create mode 100644 cookbook/pocketflow-mcp/simple_client.py create mode 100644 cookbook/pocketflow-mcp/simple_server.py create mode 100644 cookbook/pocketflow-mcp/utils.py diff --git a/cookbook/pocketflow-mcp/README.md b/cookbook/pocketflow-mcp/README.md new file mode 100644 index 0000000..05d51e2 --- /dev/null +++ b/cookbook/pocketflow-mcp/README.md @@ -0,0 +1,59 @@ +# Simple PocketFlow Chat + +A basic chat application using PocketFlow with OpenAI's GPT-4o model. + +## Features + +- Conversational chat interface in the terminal +- Maintains full conversation history for context +- Simple implementation demonstrating PocketFlow's node and flow concepts + +## Run It + +1. Make sure your OpenAI API key is set: + ```bash + export OPENAI_API_KEY="your-api-key-here" + ``` + Alternatively, you can edit the `utils.py` file to include your API key directly. + +2. Install requirements and run the application: + ```bash + pip install -r requirements.txt + python main.py + ``` + + +## Run the MCP + +Run the server + +```bash +python simple_server.py +``` + +Run the client + +```bash +python simple_client.py +``` + +## How It Works + +```mermaid +flowchart LR + chat[ChatNode] -->|continue| chat +``` + +The chat application uses: +- A single `ChatNode` with a self-loop that: + - Takes user input in the `prep` method + - Sends the complete conversation history to GPT-4o + - Adds responses to the conversation history + - Loops back to continue the chat until the user types 'exit' + + +## Files + +- [`main.py`](./main.py): Implementation of the ChatNode and chat flow +- [`utils.py`](./utils.py): Simple wrapper for calling the OpenAI API + \ No newline at end of file diff --git a/cookbook/pocketflow-mcp/main.py b/cookbook/pocketflow-mcp/main.py new file mode 100644 index 0000000..126c106 --- /dev/null +++ b/cookbook/pocketflow-mcp/main.py @@ -0,0 +1,55 @@ +from pocketflow import Node, Flow +from utils import call_llm + +class ChatNode(Node): + def prep(self, shared): + # Initialize messages if this is the first run + if "messages" not in shared: + shared["messages"] = [] + print("Welcome to the chat! Type 'exit' to end the conversation.") + + # Get user input + user_input = input("\nYou: ") + + # Check if user wants to exit + if user_input.lower() == 'exit': + return None + + # Add user message to history + shared["messages"].append({"role": "user", "content": user_input}) + + # Return all messages for the LLM + return shared["messages"] + + def exec(self, messages): + if messages is None: + return None + + # Call LLM with the entire conversation history + response = call_llm(messages) + return response + + def post(self, shared, prep_res, exec_res): + if prep_res is None or exec_res is None: + print("\nGoodbye!") + return None # End the conversation + + # Print the assistant's response + print(f"\nAssistant: {exec_res}") + + # Add assistant message to history + shared["messages"].append({"role": "assistant", "content": exec_res}) + + # Loop back to continue the conversation + return "continue" + +# Create the flow with self-loop +chat_node = ChatNode() +chat_node - "continue" >> chat_node # Loop back to continue conversation + +flow = Flow(start=chat_node) + +# Start the chat +if __name__ == "__main__": + shared = {} + flow.run(shared) diff --git a/cookbook/pocketflow-mcp/requirements.txt b/cookbook/pocketflow-mcp/requirements.txt new file mode 100644 index 0000000..e9d3ae9 --- /dev/null +++ b/cookbook/pocketflow-mcp/requirements.txt @@ -0,0 +1,3 @@ +pocketflow>=0.0.1 +openai>=1.0.0 +fastmcp \ No newline at end of file diff --git a/cookbook/pocketflow-mcp/simple_client.py b/cookbook/pocketflow-mcp/simple_client.py new file mode 100644 index 0000000..76174e8 --- /dev/null +++ b/cookbook/pocketflow-mcp/simple_client.py @@ -0,0 +1,44 @@ +import asyncio +from mcp import ClientSession, StdioServerParameters +from mcp.client.stdio import stdio_client + +async def main(): + # Set up connection to your server + server_params = StdioServerParameters( + command="python", + args=["simple_server.py"] + ) + + async with stdio_client(server_params) as (read, write): + async with ClientSession(read, write) as session: + # Initialize the connection + await session.initialize() + + # List available tools + tools_response = await session.list_tools() + + # Extract tools information + tools = tools_response.tools + + # Parse each tool + for tool in tools: + print("\nTool Information:") + print(f" Name: {tool.name}") + print(f" Description: {tool.description}") + print(f" Required Parameters: {tool.inputSchema.get('required', [])}") + + # Parse parameter information + properties = tool.inputSchema.get('properties', {}) + print(" Parameters:") + for param_name, param_info in properties.items(): + param_type = param_info.get('type', 'unknown') + param_title = param_info.get('title', param_name) + print(f" - {param_name} ({param_type}): {param_title}") + + # Call the add tool + result = await session.call_tool("add", {"a": 5, "b": 3}) + result_value = result.content[0].text + print(f"5 + 3 = {result_value}") + +if __name__ == "__main__": + asyncio.run(main()) \ No newline at end of file diff --git a/cookbook/pocketflow-mcp/simple_server.py b/cookbook/pocketflow-mcp/simple_server.py new file mode 100644 index 0000000..166e34d --- /dev/null +++ b/cookbook/pocketflow-mcp/simple_server.py @@ -0,0 +1,14 @@ +from fastmcp import FastMCP + +# Create a named server +mcp = FastMCP("Addition Server") + +# Define an addition tool +@mcp.tool() +def add(a: int, b: int) -> int: + """Add two numbers together""" + return a + b + +# Start the server +if __name__ == "__main__": + mcp.run() \ No newline at end of file diff --git a/cookbook/pocketflow-mcp/utils.py b/cookbook/pocketflow-mcp/utils.py new file mode 100644 index 0000000..ffced04 --- /dev/null +++ b/cookbook/pocketflow-mcp/utils.py @@ -0,0 +1,21 @@ +from openai import OpenAI +import os + +def call_llm(messages): + client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY", "your-api-key")) + + response = client.chat.completions.create( + model="gpt-4o", + messages=messages, + temperature=0.7 + ) + + return response.choices[0].message.content + +if __name__ == "__main__": + # Test the LLM call + messages = [{"role": "user", "content": "In a few words, what's the meaning of life?"}] + response = call_llm(messages) + print(f"Prompt: {messages[0]['content']}") + print(f"Response: {response}") +