8
8
"""
9
9
10
10
import os
11
- import asyncio
12
- from typing import Any , Dict , Optional
11
+ from typing import Any , Dict
13
12
13
+ import httpx
14
14
from mcp .server .fastmcp import FastMCP
15
- from scrapegraph_py import AsyncClient
16
15
17
16
18
- class ScapeGraphAsyncClient :
19
- """Async wrapper for the ScapeGraph Python SDK."""
17
+ class ScapeGraphClient :
18
+ """Client for interacting with the ScapeGraph API."""
19
+
20
+ BASE_URL = "https://api.scrapegraphai.com/v1"
20
21
21
22
def __init__ (self , api_key : str ):
22
23
"""
23
- Initialize the ScapeGraph async client.
24
+ Initialize the ScapeGraph API client.
24
25
25
26
Args:
26
27
api_key: API key for ScapeGraph API
27
28
"""
28
- self .client = AsyncClient (api_key = api_key )
29
-
30
- async def markdownify (self , website_url : str ) -> Dict [str , Any ]:
29
+ self .api_key = api_key
30
+ self .headers = {
31
+ "SGAI-APIKEY" : api_key ,
32
+ "Content-Type" : "application/json"
33
+ }
34
+ self .client = httpx .Client (timeout = 60.0 )
35
+
36
+ def markdownify (self , website_url : str ) -> Dict [str , Any ]:
31
37
"""
32
38
Convert a webpage into clean, formatted markdown.
33
39
@@ -37,13 +43,20 @@ async def markdownify(self, website_url: str) -> Dict[str, Any]:
37
43
Returns:
38
44
Dictionary containing the markdown result
39
45
"""
40
- return await self .client .markdownify (website_url = website_url )
46
+ url = f"{ self .BASE_URL } /markdownify"
47
+ data = {
48
+ "website_url" : website_url
49
+ }
50
+
51
+ response = self .client .post (url , headers = self .headers , json = data )
52
+
53
+ if response .status_code != 200 :
54
+ error_msg = f"Error { response .status_code } : { response .text } "
55
+ raise Exception (error_msg )
41
56
42
- async def smartscraper (
43
- self ,
44
- user_prompt : str ,
45
- website_url : str
46
- ) -> Dict [str , Any ]:
57
+ return response .json ()
58
+
59
+ def smartscraper (self , user_prompt : str , website_url : str ) -> Dict [str , Any ]:
47
60
"""
48
61
Extract structured data from a webpage using AI.
49
62
@@ -54,15 +67,21 @@ async def smartscraper(
54
67
Returns:
55
68
Dictionary containing the extracted data
56
69
"""
57
- return await self .client .smartscraper (
58
- user_prompt = user_prompt ,
59
- website_url = website_url
60
- )
61
-
62
- async def searchscraper (
63
- self ,
64
- user_prompt : str
65
- ) -> Dict [str , Any ]:
70
+ url = f"{ self .BASE_URL } /smartscraper"
71
+ data = {
72
+ "user_prompt" : user_prompt ,
73
+ "website_url" : website_url
74
+ }
75
+
76
+ response = self .client .post (url , headers = self .headers , json = data )
77
+
78
+ if response .status_code != 200 :
79
+ error_msg = f"Error { response .status_code } : { response .text } "
80
+ raise Exception (error_msg )
81
+
82
+ return response .json ()
83
+
84
+ def searchscraper (self , user_prompt : str ) -> Dict [str , Any ]:
66
85
"""
67
86
Perform AI-powered web searches with structured results.
68
87
@@ -72,26 +91,35 @@ async def searchscraper(
72
91
Returns:
73
92
Dictionary containing search results and reference URLs
74
93
"""
75
- return await self .client .searchscraper (
76
- user_prompt = user_prompt
77
- )
94
+ url = f"{ self .BASE_URL } /searchscraper"
95
+ data = {
96
+ "user_prompt" : user_prompt
97
+ }
98
+
99
+ response = self .client .post (url , headers = self .headers , json = data )
100
+
101
+ if response .status_code != 200 :
102
+ error_msg = f"Error { response .status_code } : { response .text } "
103
+ raise Exception (error_msg )
78
104
79
- async def close (self ) -> None :
80
- """Close the client to free up resources."""
81
- await self .client .close ()
105
+ return response .json ()
82
106
107
+ def close (self ) -> None :
108
+ """Close the HTTP client."""
109
+ self .client .close ()
83
110
84
- # Create MCP server and AsyncScapeGraphWrapper at module level
111
+
112
+ # Create MCP server
85
113
mcp = FastMCP ("ScapeGraph API MCP Server" )
86
114
87
115
# Default API key (will be overridden in main or by direct assignment)
88
116
default_api_key = os .environ .get ("SGAI_API_KEY" )
89
- scrapegraph_wrapper = ScapeGraphAsyncClient (default_api_key ) if default_api_key else None
117
+ scrapegraph_client = ScapeGraphClient (default_api_key ) if default_api_key else None
90
118
91
119
92
- # Add tools for markdownify
120
+ # Add tool for markdownify
93
121
@mcp .tool ()
94
- async def markdownify (website_url : str ) -> Dict [str , Any ]:
122
+ def markdownify (website_url : str ) -> Dict [str , Any ]:
95
123
"""
96
124
Convert a webpage into clean, formatted markdown.
97
125
@@ -101,18 +129,18 @@ async def markdownify(website_url: str) -> Dict[str, Any]:
101
129
Returns:
102
130
Dictionary containing the markdown result
103
131
"""
104
- if scrapegraph_wrapper is None :
132
+ if scrapegraph_client is None :
105
133
return {"error" : "ScapeGraph client not initialized. Please provide an API key." }
106
134
107
135
try :
108
- return await scrapegraph_wrapper .markdownify (website_url )
136
+ return scrapegraph_client .markdownify (website_url )
109
137
except Exception as e :
110
138
return {"error" : str (e )}
111
139
112
140
113
- # Add tools for smartscraper
141
+ # Add tool for smartscraper
114
142
@mcp .tool ()
115
- async def smartscraper (
143
+ def smartscraper (
116
144
user_prompt : str ,
117
145
website_url : str
118
146
) -> Dict [str , Any ]:
@@ -126,18 +154,18 @@ async def smartscraper(
126
154
Returns:
127
155
Dictionary containing the extracted data
128
156
"""
129
- if scrapegraph_wrapper is None :
157
+ if scrapegraph_client is None :
130
158
return {"error" : "ScapeGraph client not initialized. Please provide an API key." }
131
159
132
160
try :
133
- return await scrapegraph_wrapper .smartscraper (user_prompt , website_url )
161
+ return scrapegraph_client .smartscraper (user_prompt , website_url )
134
162
except Exception as e :
135
163
return {"error" : str (e )}
136
164
137
165
138
- # Add tools for searchscraper
166
+ # Add tool for searchscraper
139
167
@mcp .tool ()
140
- async def searchscraper (
168
+ def searchscraper (
141
169
user_prompt : str
142
170
) -> Dict [str , Any ]:
143
171
"""
@@ -149,21 +177,15 @@ async def searchscraper(
149
177
Returns:
150
178
Dictionary containing search results and reference URLs
151
179
"""
152
- if scrapegraph_wrapper is None :
180
+ if scrapegraph_client is None :
153
181
return {"error" : "ScapeGraph client not initialized. Please provide an API key." }
154
182
155
183
try :
156
- return await scrapegraph_wrapper .searchscraper (user_prompt )
184
+ return scrapegraph_client .searchscraper (user_prompt )
157
185
except Exception as e :
158
186
return {"error" : str (e )}
159
187
160
188
161
- async def cleanup () -> None :
162
- """Clean up resources when the server is shutting down."""
163
- if scrapegraph_wrapper is not None :
164
- await scrapegraph_wrapper .close ()
165
-
166
-
167
189
def main () -> None :
168
190
"""Run the ScapeGraph MCP server."""
169
191
print ("Starting ScapeGraph MCP server!" )
0 commit comments