-
Notifications
You must be signed in to change notification settings - Fork 22
/
Copy pathlanggraph_agent_mcp_sse_client.py
138 lines (113 loc) · 5.22 KB
/
langgraph_agent_mcp_sse_client.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
#!/usr/bin/env python3
"""
LangGraph MCP Client
This script demonstrates using LangGraph with the MultiServerMCPClient adapter to connect to an
MCP-compatible server and query information using a Bedrock-hosted Claude model.
The script accepts command line arguments for:
- Server host and port
- Model ID to use
- User message to process
Usage:
python langgraph_mcp_client.py --host hostname --port port --model model_id --message "your question"
Example:
python langgraph_mcp_sse_client.py --host ec2-44-192-72-20.compute-1.amazonaws.com --port 8000 \
--model anthropic.claude-3-haiku-20240307-v1:0 --message "my bedrock usage in last 7 days?"
"""
import asyncio
import argparse
from typing import Dict, List, Any, Optional
from langchain_mcp_adapters.client import MultiServerMCPClient
from langgraph.prebuilt import create_react_agent
from langchain_aws import ChatBedrock
def parse_arguments() -> argparse.Namespace:
"""
Parse command line arguments for the LangGraph MCP client.
Returns:
argparse.Namespace: The parsed command line arguments
"""
parser = argparse.ArgumentParser(description='LangGraph MCP Client')
# Server connection arguments
parser.add_argument('--host', type=str, default='ec2-44-192-72-20.compute-1.amazonaws.com',
help='Hostname of the MCP server')
parser.add_argument('--port', type=int, default=8000,
help='Port of the MCP server')
parser.add_argument('--server-name', type=str, default='aws_cost_explorer',
help='Server name identifier in the configuration')
parser.add_argument('--aws-account-id', type=str, default="",
help='AWS account id to use for retrieving cost information, if not specified then the account in which the MCP server is running is used.')
# Model arguments
parser.add_argument('--model', type=str, default='us.anthropic.claude-3-5-haiku-20241022-v1:0',
help='Model ID to use with Bedrock')
# Message arguments
parser.add_argument('--message', type=str, default='my bedrock usage in last 7 days? My account id is {}',
help='Message to send to the agent')
return parser.parse_args()
async def main():
"""
Main function that:
1. Parses command line arguments
2. Sets up the LangChain MCP client and Bedrock model
3. Creates a LangGraph agent with available tools
4. Invokes the agent with the provided message
5. Displays the response
"""
# Parse command line arguments
args = parse_arguments()
args.message = args.message.format(args.aws_account_id)
# Display configuration
secure = 's' if args.port == 443 else ''
server_url = f"http{secure}://{args.host}:{args.port}/sse"
print(f"Connecting to MCP server: {server_url}")
print(f"Using model: {args.model}")
print(f"Message: {args.message}")
# Initialize the model
model = ChatBedrock(model_id=args.model, region_name='us-east-1')
try:
# Initialize MCP client with the server configuration
async with MultiServerMCPClient(
{
args.server_name: {
"url": server_url,
"transport": "sse",
}
}
) as client:
print("Connected to MCP server successfully")
# Get available prompt
prompt = await client.get_prompt(args.server_name, "system_prompt_for_agent", dict(aws_account_id=args.aws_account_id))
print(f"Available prompt: {prompt}")
system_prompt = prompt[0].content
# Get available tools and display them
tools = client.get_tools()
print(f"Available tools: {[tool.name for tool in tools]}")
# Create the agent with the model and tools
agent = create_react_agent(
model,
tools
)
# Format the message with system message first
formatted_messages = [
{"role": "system", "content": system_prompt},
{"role": "user", "content": args.message}
]
print("\nInvoking agent...\n" + "-"*40)
# Invoke the agent with the formatted messages
response = await agent.ainvoke({"messages": formatted_messages})
print("\nResponse:" + "\n" + "-"*40)
# Process and display the response
if response and "messages" in response and response["messages"]:
# Get the last message from the response
last_message = response["messages"][-1]
if isinstance(last_message, dict) and "content" in last_message:
# Display the content of the response
print(last_message["content"])
else:
print(str(last_message.content))
else:
print("No valid response received")
except Exception as e:
print(f"Error: {str(e)}")
import traceback
print(traceback.format_exc())
if __name__ == "__main__":
asyncio.run(main())