Skip to content

Commit 3f48a1c

Browse files
authored
Merge pull request #177 from modelcontextprotocol/jadamson/fetch-url-serialize-fix
Fix deserialization of URL
2 parents 2578d6f + 45dfd82 commit 3f48a1c

File tree

1 file changed

+6
-6
lines changed

1 file changed

+6
-6
lines changed

src/fetch/src/mcp_server_fetch/server.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ def extract_content_from_html(html: str) -> str:
4444
return content
4545

4646

47-
def get_robots_txt_url(url: AnyUrl | str) -> str:
47+
def get_robots_txt_url(url: str) -> str:
4848
"""Get the robots.txt URL for a given website URL.
4949
5050
Args:
@@ -54,15 +54,15 @@ def get_robots_txt_url(url: AnyUrl | str) -> str:
5454
URL of the robots.txt file
5555
"""
5656
# Parse the URL into components
57-
parsed = urlparse(str(url))
57+
parsed = urlparse(url)
5858

5959
# Reconstruct the base URL with just scheme, netloc, and /robots.txt path
6060
robots_url = urlunparse((parsed.scheme, parsed.netloc, "/robots.txt", "", "", ""))
6161

6262
return robots_url
6363

6464

65-
async def check_may_autonomously_fetch_url(url: AnyUrl | str, user_agent: str) -> None:
65+
async def check_may_autonomously_fetch_url(url: str, user_agent: str) -> None:
6666
"""
6767
Check if the URL can be fetched by the user agent according to the robots.txt file.
6868
Raises a McpError if not.
@@ -106,7 +106,7 @@ async def check_may_autonomously_fetch_url(url: AnyUrl | str, user_agent: str) -
106106

107107

108108
async def fetch_url(
109-
url: AnyUrl | str, user_agent: str, force_raw: bool = False
109+
url: str, user_agent: str, force_raw: bool = False
110110
) -> Tuple[str, str]:
111111
"""
112112
Fetch the URL and return the content in a form ready for the LLM, as well as a prefix string with status information.
@@ -116,7 +116,7 @@ async def fetch_url(
116116
async with AsyncClient() as client:
117117
try:
118118
response = await client.get(
119-
str(url),
119+
url,
120120
follow_redirects=True,
121121
headers={"User-Agent": user_agent},
122122
timeout=30,
@@ -221,7 +221,7 @@ async def call_tool(name, arguments: dict) -> list[TextContent]:
221221
except ValueError as e:
222222
raise McpError(INVALID_PARAMS, str(e))
223223

224-
url = args.url
224+
url = str(args.url)
225225
if not url:
226226
raise McpError(INVALID_PARAMS, "URL is required")
227227

0 commit comments

Comments
 (0)