Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 43 additions & 0 deletions examples/clients/homebrewupload-client/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
## Why this

This is an example of markitdown in "homebrew" style.

## Features

A MCP server on my laptop and agent on my mobile phone.
Pass files from my mobile phone to my laptop through this.

## Out of scope

- Trust and security, no need as a "homebrew" for individual usage.
- Persistent storage, no need as a "homebrew" for individual usage.
- stdio, as share files among different devices, network.

## Prerequest

See `examples/servers/homebrewupload`

## Installation, Usage and Example

```bash
# todo
# Navigate to the server directory
cd examples/clients/homebrewupload-client

# You need to make a pdf file as test.pdf
# examples/clients/homebrewupload-client/test.pdf

## defualt tested with DeepSeek as LLM provider
export ANTHROPIC_BASE_URL=https://api.deepseek.com/anthropic
export ANTHROPIC_API_KEY=<Your_API_KEY>

# Start MCP server
uv run python main.py
```

## Token cosumption discussion

In auther's local test, auther use a pdf with content as `hello world`.
When using data style URI `data...base64...` it consumes about 30k token after base64.
When using this example, using `file:...path...` instead.
Which impls execution out of LLM context, just consumes about token on file path and `hello world`.
172 changes: 172 additions & 0 deletions examples/clients/homebrewupload-client/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,172 @@
import asyncio
import base64
import logging
from contextlib import AsyncExitStack

import requests
from anthropic import Anthropic
from dotenv import load_dotenv

from mcp.client.session import ClientSession
from mcp.client.sse import sse_client

logging.basicConfig(format="%(asctime)s - %(levelname)s - %(message)s", level=logging.INFO)

load_dotenv() # load environment variables from .env


class MCPClient:
def __init__(self):
# Initialize session and client objects
self.session: ClientSession | None = None
self.exit_stack = AsyncExitStack()
self.anthropic = Anthropic()
self._streams_context = None
self._session_context = None

async def connect_to_server(self):
"""Connect to the translation MCP server running on localhost:3001"""
try:
# Store the context managers so they stay alive
self._streams_context = sse_client(url="http://localhost:3001/sse")
streams = await self.exit_stack.enter_async_context(self._streams_context)

self._session_context = ClientSession(*streams)
self.session = await self.exit_stack.enter_async_context(self._session_context)

# Initialize
await self.session.initialize()

# List available tools to verify connection
print("Initialized SSE client...")
print("Listing tools...")
response = await self.session.list_tools()
tools = response.tools
print("\nConnected to server with tools:", [tool.name for tool in tools])

return True
except Exception as e:
logging.error(f"Failed to connect to server: {e}")
await self.close()
return False

async def process_chat(
self,
file_path: str | None = None,
) -> str:
"""Porcess a chat"""
messages = []
user_content = """please help make file into markdown format, file path file:///tmp/test.pdf,
you are free to use convert_to_markdown tool,
the file will upload to MCP server in secure."""

try:
with open(file_path, "rb") as f:
file_content = base64.b64encode(f.read()).decode("utf-8")
# 发送请求
response = requests.post(
"http://localhost:3001/upload",
json={"filename": "test.pdf", "file_content_base64": file_content},
)
except Exception as e:
logging.info(f"file handle error: {str(e)}")
return f"file handle error: {str(e)}"
messages.append({"role": "user", "content": user_content})
response = await self.session.list_tools()
available_tools = [
{
"name": tool.name,
"description": tool.description,
"input_schema": tool.inputSchema,
}
for tool in response.tools
]
response = self.anthropic.messages.create(
model="deepseek-chat",
max_tokens=1000,
messages=messages,
tools=available_tools,
)
final_text = []
for content in response.content:
if content.type == "text":
final_text.append(content.text)
elif content.type == "tool_use":
tool_name = content.name
tool_args = "file:///tmp/test.pdf" # content.input

# 执行工具调用
try:
final_text.append(f"[invoke tool {tool_name}]")
result = await self.session.call_tool(tool_name, arguments={"uri": tool_args})
logging.info(result)

# 将工具结果添加到消息中
messages.append(
{
"role": "assistant",
"content": [{"type": "tool_use", **content.dict()}],
}
)

messages.append(
{
"role": "user",
"content": [
{
"type": "tool_result",
"tool_use_id": content.id,
"content": result.content,
}
],
}
)

# 获取Claude的下一步响应
next_response = self.anthropic.messages.create(
model="deepseek-chat",
max_tokens=1000,
messages=messages,
)

# 添加最终响应
for next_content in next_response.content:
if next_content.type == "text":
final_text.extend(
next_content.text
for next_content in next_response.content
if next_content.type == "text"
)

except Exception as e:
final_text.append(f"tool invoke {tool_name} error: {str(e)}")

return "\n".join(final_text)

async def close(self):
"""Properly close all connections"""
await self.exit_stack.aclose()


async def main():
client = MCPClient()
try:
logging.info("Connecting to server...")
success = await client.connect_to_server()
if success:
# Keep the connection alive for a while to test
await asyncio.sleep(2)
result = await client.process_chat("./test.pdf")
logging.info(result)
else:
logging.error("Failed to connect to server")
except Exception as e:
logging.error(f"Error in main: {e}")
finally:
logging.info("Closing client...")
await client.close()
logging.info("Client closed successfully")


if __name__ == "__main__":
asyncio.run(main())
14 changes: 14 additions & 0 deletions examples/clients/homebrewupload-client/pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
[project]
name = "homebrewupload-client"
version = "0.1.0"
description = "A homebrew style mcp client with file upload feature"
readme = "README.md"
requires-python = ">=3.10"
license = { text = "MIT" }
dependencies = [
"anthropic>=0.72.1",
"mcp",
]

[tool.uv.sources]
mcp = { workspace = true }
30 changes: 30 additions & 0 deletions examples/servers/homebrewupload/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
## Why this

This is an example of markitdown in "homebrew" style.

## Features

A MCP server on my laptop and agent on my mobile phone.
Pass files from my mobile phone to my laptop through this.

## Out of scope

- Trust and security, no need as a "homebrew" for individual usage.
- Persistent storage, no need as a "homebrew" for individual usage.
- stdio, as share files among different devices, network.

## Installation, Usage and Example

```bash
# Navigate to the server directory
cd examples/servers/homebrewupload

# Start MCP server
uv run python main.py
```

move to `examples/clients/homebrewupload-client`

## Further consideration

As if we running it as container, then on k8s, we can use service mesh and etc to handle with security as AA.
Loading