add mcp example
This commit is contained in:
parent
eb4eed4c9d
commit
558808f6db
|
|
@ -0,0 +1,59 @@
|
||||||
|
# Simple PocketFlow Chat
|
||||||
|
|
||||||
|
A basic chat application using PocketFlow with OpenAI's GPT-4o model.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Conversational chat interface in the terminal
|
||||||
|
- Maintains full conversation history for context
|
||||||
|
- Simple implementation demonstrating PocketFlow's node and flow concepts
|
||||||
|
|
||||||
|
## Run It
|
||||||
|
|
||||||
|
1. Make sure your OpenAI API key is set:
|
||||||
|
```bash
|
||||||
|
export OPENAI_API_KEY="your-api-key-here"
|
||||||
|
```
|
||||||
|
Alternatively, you can edit the `utils.py` file to include your API key directly.
|
||||||
|
|
||||||
|
2. Install requirements and run the application:
|
||||||
|
```bash
|
||||||
|
pip install -r requirements.txt
|
||||||
|
python main.py
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Run the MCP
|
||||||
|
|
||||||
|
Run the server
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python simple_server.py
|
||||||
|
```
|
||||||
|
|
||||||
|
Run the client
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python simple_client.py
|
||||||
|
```
|
||||||
|
|
||||||
|
## How It Works
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
flowchart LR
|
||||||
|
chat[ChatNode] -->|continue| chat
|
||||||
|
```
|
||||||
|
|
||||||
|
The chat application uses:
|
||||||
|
- A single `ChatNode` with a self-loop that:
|
||||||
|
- Takes user input in the `prep` method
|
||||||
|
- Sends the complete conversation history to GPT-4o
|
||||||
|
- Adds responses to the conversation history
|
||||||
|
- Loops back to continue the chat until the user types 'exit'
|
||||||
|
|
||||||
|
|
||||||
|
## Files
|
||||||
|
|
||||||
|
- [`main.py`](./main.py): Implementation of the ChatNode and chat flow
|
||||||
|
- [`utils.py`](./utils.py): Simple wrapper for calling the OpenAI API
|
||||||
|
|
||||||
|
|
@ -0,0 +1,55 @@
|
||||||
|
from pocketflow import Node, Flow
|
||||||
|
from utils import call_llm
|
||||||
|
|
||||||
|
class ChatNode(Node):
|
||||||
|
def prep(self, shared):
|
||||||
|
# Initialize messages if this is the first run
|
||||||
|
if "messages" not in shared:
|
||||||
|
shared["messages"] = []
|
||||||
|
print("Welcome to the chat! Type 'exit' to end the conversation.")
|
||||||
|
|
||||||
|
# Get user input
|
||||||
|
user_input = input("\nYou: ")
|
||||||
|
|
||||||
|
# Check if user wants to exit
|
||||||
|
if user_input.lower() == 'exit':
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Add user message to history
|
||||||
|
shared["messages"].append({"role": "user", "content": user_input})
|
||||||
|
|
||||||
|
# Return all messages for the LLM
|
||||||
|
return shared["messages"]
|
||||||
|
|
||||||
|
def exec(self, messages):
|
||||||
|
if messages is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Call LLM with the entire conversation history
|
||||||
|
response = call_llm(messages)
|
||||||
|
return response
|
||||||
|
|
||||||
|
def post(self, shared, prep_res, exec_res):
|
||||||
|
if prep_res is None or exec_res is None:
|
||||||
|
print("\nGoodbye!")
|
||||||
|
return None # End the conversation
|
||||||
|
|
||||||
|
# Print the assistant's response
|
||||||
|
print(f"\nAssistant: {exec_res}")
|
||||||
|
|
||||||
|
# Add assistant message to history
|
||||||
|
shared["messages"].append({"role": "assistant", "content": exec_res})
|
||||||
|
|
||||||
|
# Loop back to continue the conversation
|
||||||
|
return "continue"
|
||||||
|
|
||||||
|
# Create the flow with self-loop
|
||||||
|
chat_node = ChatNode()
|
||||||
|
chat_node - "continue" >> chat_node # Loop back to continue conversation
|
||||||
|
|
||||||
|
flow = Flow(start=chat_node)
|
||||||
|
|
||||||
|
# Start the chat
|
||||||
|
if __name__ == "__main__":
|
||||||
|
shared = {}
|
||||||
|
flow.run(shared)
|
||||||
|
|
@ -0,0 +1,3 @@
|
||||||
|
pocketflow>=0.0.1
|
||||||
|
openai>=1.0.0
|
||||||
|
fastmcp
|
||||||
|
|
@ -0,0 +1,44 @@
|
||||||
|
import asyncio
|
||||||
|
from mcp import ClientSession, StdioServerParameters
|
||||||
|
from mcp.client.stdio import stdio_client
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
# Set up connection to your server
|
||||||
|
server_params = StdioServerParameters(
|
||||||
|
command="python",
|
||||||
|
args=["simple_server.py"]
|
||||||
|
)
|
||||||
|
|
||||||
|
async with stdio_client(server_params) as (read, write):
|
||||||
|
async with ClientSession(read, write) as session:
|
||||||
|
# Initialize the connection
|
||||||
|
await session.initialize()
|
||||||
|
|
||||||
|
# List available tools
|
||||||
|
tools_response = await session.list_tools()
|
||||||
|
|
||||||
|
# Extract tools information
|
||||||
|
tools = tools_response.tools
|
||||||
|
|
||||||
|
# Parse each tool
|
||||||
|
for tool in tools:
|
||||||
|
print("\nTool Information:")
|
||||||
|
print(f" Name: {tool.name}")
|
||||||
|
print(f" Description: {tool.description}")
|
||||||
|
print(f" Required Parameters: {tool.inputSchema.get('required', [])}")
|
||||||
|
|
||||||
|
# Parse parameter information
|
||||||
|
properties = tool.inputSchema.get('properties', {})
|
||||||
|
print(" Parameters:")
|
||||||
|
for param_name, param_info in properties.items():
|
||||||
|
param_type = param_info.get('type', 'unknown')
|
||||||
|
param_title = param_info.get('title', param_name)
|
||||||
|
print(f" - {param_name} ({param_type}): {param_title}")
|
||||||
|
|
||||||
|
# Call the add tool
|
||||||
|
result = await session.call_tool("add", {"a": 5, "b": 3})
|
||||||
|
result_value = result.content[0].text
|
||||||
|
print(f"5 + 3 = {result_value}")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(main())
|
||||||
|
|
@ -0,0 +1,14 @@
|
||||||
|
from fastmcp import FastMCP
|
||||||
|
|
||||||
|
# Create a named server
|
||||||
|
mcp = FastMCP("Addition Server")
|
||||||
|
|
||||||
|
# Define an addition tool
|
||||||
|
@mcp.tool()
|
||||||
|
def add(a: int, b: int) -> int:
|
||||||
|
"""Add two numbers together"""
|
||||||
|
return a + b
|
||||||
|
|
||||||
|
# Start the server
|
||||||
|
if __name__ == "__main__":
|
||||||
|
mcp.run()
|
||||||
|
|
@ -0,0 +1,21 @@
|
||||||
|
from openai import OpenAI
|
||||||
|
import os
|
||||||
|
|
||||||
|
def call_llm(messages):
|
||||||
|
client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY", "your-api-key"))
|
||||||
|
|
||||||
|
response = client.chat.completions.create(
|
||||||
|
model="gpt-4o",
|
||||||
|
messages=messages,
|
||||||
|
temperature=0.7
|
||||||
|
)
|
||||||
|
|
||||||
|
return response.choices[0].message.content
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# Test the LLM call
|
||||||
|
messages = [{"role": "user", "content": "In a few words, what's the meaning of life?"}]
|
||||||
|
response = call_llm(messages)
|
||||||
|
print(f"Prompt: {messages[0]['content']}")
|
||||||
|
print(f"Response: {response}")
|
||||||
|
|
||||||
Loading…
Reference in New Issue