Skip to content

Commit 7baab6a

Browse files
committed
implement openmanus client based on mcp
1 parent 792dc66 commit 7baab6a

5 files changed

+232
-196
lines changed

openmanus_server/README.md

+4-2
Original file line numberDiff line numberDiff line change
@@ -111,10 +111,12 @@ After clicking on the hammer icon, you should see tools listed:
111111

112112
### 💻 2. Testing with simple Client Example
113113

114-
Check out `openmanus_client_example.py` to test the openmanus server using the MCP client.
114+
Check out `openmanus_client.py` to test the openmanus server using the MCP client.
115115

116+
#### Demo display
117+
https://github.com/user-attachments/assets/aeacd93d-9bec-46d1-831b-20e898c7507b
116118
```
117-
uv run openmanus_server/openmanus_client_example.py openmanus_server/openmanus_server.py
119+
python openmanus_server/openmanus_client.py
118120
```
119121

120122

openmanus_server/mcp_requirements.txt

+1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
11
# Core dependencies
22
mcp
33
httpx>=0.27.0
4+
tomli>=2.0.0

openmanus_server/openmanus_client.py

+224
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,224 @@
1+
import ast
2+
import asyncio
3+
import os
4+
import sys
5+
from contextlib import AsyncExitStack
6+
from pathlib import Path
7+
from typing import Optional
8+
9+
import tomli
10+
from colorama import Fore, init
11+
from dotenv import load_dotenv
12+
from mcp import ClientSession, StdioServerParameters
13+
from mcp.client.stdio import stdio_client
14+
from openai import AsyncOpenAI
15+
16+
17+
# Initialize colorama
18+
def init_colorama():
19+
init(autoreset=True)
20+
21+
22+
# Load config
23+
def load_config():
24+
config_path = Path(__file__).parent.parent / "config" / "config.toml"
25+
try:
26+
with open(config_path, "rb") as f:
27+
return tomli.load(f)
28+
except FileNotFoundError:
29+
print(f"Error: config.toml not found at {config_path}")
30+
sys.exit(1)
31+
except tomli.TOMLDecodeError as e:
32+
print(f"Error: Invalid TOML in config.toml: {e}")
33+
sys.exit(1)
34+
35+
36+
# Load environment variables (as fallback)
37+
load_dotenv()
38+
39+
40+
class OpenManusClient:
41+
def __init__(self):
42+
# Load configuration
43+
self.config = load_config()
44+
45+
# Initialize session and client objects
46+
self.session: Optional[ClientSession] = None
47+
self.exit_stack = AsyncExitStack()
48+
49+
# Initialize AsyncOpenAI client with config
50+
api_key = self.config["llm"]["api_key"] or os.getenv("OPENAI_API_KEY")
51+
if not api_key:
52+
raise ValueError(
53+
"OpenAI API key not found in config.toml or environment variables"
54+
)
55+
56+
self.openai_client = AsyncOpenAI(
57+
api_key=api_key, base_url=self.config["llm"]["base_url"]
58+
)
59+
60+
async def connect_to_server(self, server_script_path: str = None):
61+
"""Connect to the openmanus MCP server"""
62+
# Use provided path or default from config
63+
script_path = server_script_path or self.config["server"]["default_script"]
64+
65+
server_params = StdioServerParameters(
66+
command="python", args=[script_path], env=None
67+
)
68+
69+
stdio_transport = await self.exit_stack.enter_async_context(
70+
stdio_client(server_params)
71+
)
72+
self.stdio, self.write = stdio_transport
73+
self.session = await self.exit_stack.enter_async_context(
74+
ClientSession(self.stdio, self.write)
75+
)
76+
77+
await self.session.initialize()
78+
79+
# List available tools
80+
response = await self.session.list_tools()
81+
tools = response.tools
82+
print("\nConnected to server with tools:", [tool.name for tool in tools])
83+
84+
async def chat_loop(self):
85+
"""Run an interactive chat loop for testing tools"""
86+
print(Fore.CYAN + "\n🚀 OpenManus MCP Client Started!")
87+
print(Fore.GREEN + "Type your queries or 'quit' to exit.")
88+
print(
89+
Fore.YELLOW
90+
+ "Example query: 'What is the recent news about the stock market?'\n"
91+
)
92+
93+
while True:
94+
try:
95+
query = input(Fore.BLUE + "🔍 Query: ").strip()
96+
97+
if query.lower() == "quit":
98+
print(Fore.RED + "👋 Exiting... Goodbye!")
99+
break
100+
101+
response = await self.process_query(query)
102+
print(Fore.MAGENTA + "\n💬 Response: " + response)
103+
104+
except Exception as e:
105+
print(Fore.RED + f"\n❌ Error: {str(e)}")
106+
107+
async def cleanup(self):
108+
"""Clean up resources"""
109+
await self.exit_stack.aclose()
110+
await self.openai_client.close() # Close the OpenAI client
111+
112+
async def process_query(self, query: str) -> str:
113+
"""Process a query using LLM and available tools"""
114+
# Add a system message to set the context for the model
115+
messages = [
116+
{
117+
"role": "system",
118+
"content": "You are a general-purpose AI assistant called OpenManus. You can help users complete a wide range of tasks, providing detailed information and assistance as needed. Please include emojis in your responses to make them more engaging.",
119+
},
120+
{"role": "user", "content": query},
121+
]
122+
123+
response = await self.session.list_tools()
124+
available_tools = [
125+
{
126+
"type": "function",
127+
"function": {
128+
"name": tool.name,
129+
"description": tool.description,
130+
"parameters": tool.inputSchema,
131+
},
132+
}
133+
for tool in response.tools
134+
]
135+
# Initial LLM API call
136+
response = await self.openai_client.chat.completions.create(
137+
model=self.config["llm"]["model"],
138+
messages=messages,
139+
tools=available_tools,
140+
tool_choice="auto",
141+
)
142+
143+
# Process response and handle tool calls
144+
final_text = []
145+
146+
while True:
147+
message = response.choices[0].message
148+
149+
# Add assistant's message to conversation
150+
messages.append(
151+
{
152+
"role": "assistant",
153+
"content": message.content if message.content else None,
154+
"tool_calls": message.tool_calls
155+
if hasattr(message, "tool_calls")
156+
else None,
157+
}
158+
)
159+
160+
# If no tool calls, we're done
161+
if not hasattr(message, "tool_calls") or not message.tool_calls:
162+
if message.content:
163+
final_text.append(message.content)
164+
break
165+
166+
# Handle tool calls
167+
for tool_call in message.tool_calls:
168+
tool_name = tool_call.function.name
169+
tool_args = tool_call.function.arguments
170+
171+
# Convert tool_args from string to dictionary if necessary
172+
if isinstance(tool_args, str):
173+
try:
174+
tool_args = ast.literal_eval(tool_args)
175+
except (ValueError, SyntaxError) as e:
176+
print(f"Error converting tool_args to dict: {e}")
177+
tool_args = {}
178+
179+
# Ensure tool_args is a dictionary
180+
if not isinstance(tool_args, dict):
181+
tool_args = {}
182+
183+
# Execute tool call
184+
print(f"Calling tool {tool_name} with args: {tool_args}")
185+
result = await self.session.call_tool(tool_name, tool_args)
186+
final_text.append(f"[Calling tool {tool_name}]")
187+
# final_text.append(f"Result: {result.content}")
188+
189+
# Add tool result to messages
190+
messages.append(
191+
{
192+
"role": "tool",
193+
"tool_call_id": tool_call.id,
194+
"content": str(result.content),
195+
}
196+
)
197+
198+
# Get next response from LLM
199+
response = await self.openai_client.chat.completions.create(
200+
model=self.config["llm"]["model"],
201+
messages=messages,
202+
tools=available_tools,
203+
tool_choice="auto",
204+
)
205+
206+
return "\n".join(final_text)
207+
208+
209+
async def main():
210+
if len(sys.argv) > 1:
211+
server_script = sys.argv[1]
212+
else:
213+
server_script = "./openmanus_server/openmanus_server.py"
214+
215+
client = OpenManusClient()
216+
try:
217+
await client.connect_to_server(server_script)
218+
await client.chat_loop()
219+
finally:
220+
await client.cleanup()
221+
222+
223+
if __name__ == "__main__":
224+
asyncio.run(main())

0 commit comments

Comments
 (0)