Skip to content

Commit a157c32

Browse files
committed
fix: removed sdk
1 parent 361f4a1 commit a157c32

File tree

3 files changed

+77
-55
lines changed

3 files changed

+77
-55
lines changed

README.md

+4-4
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
1-
# ScapeGraph MCP Server
1+
# ScrapeGraph MCP Server
22

33
[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
44
[![Python 3.10](https://img.shields.io/badge/python-3.10-blue.svg)](https://www.python.org/downloads/release/python-3100/)
5-
[![smithery badge](https://smithery.ai/badge/@scrapegraphai/scrapegraph-mcp)](https://smithery.ai/server/@scrapegraphai/scrapegraph-mcp)
5+
[![smithery badge](https://smithery.ai/badge/@ScrapeGraphAI/scrapegraph-mcp)](https://smithery.ai/server/@ScrapeGraphAI/scrapegraph-mcp)
66
A [Model Context Protocol](https://modelcontextprotocol.io/introduction) (MCP) server that provides access to the [ScapeGraph AI](https://scrapegraphai.com) API. It allows language models to use AI-powered web scraping capabilities.
77

88
## Available Tools
@@ -22,10 +22,10 @@ You'll need a ScapeGraph API key to use this server. You can obtain one by:
2222

2323
### Installing via Smithery
2424

25-
To install Oura API Integration Server for Claude Desktop automatically via [Smithery](https://smithery.ai/server/@scrapegraphai/scrapegraph-mcp):
25+
To install ScrapeGraph API Integration Server for Claude Desktop automatically via [Smithery](https://smithery.ai/server/@ScrapeGraphAI/scrapegraph-mcp):
2626

2727
```bash
28-
npx -y @smithery/cli install @scrapegraphai/scrapegraph-mcp --client claude
28+
npx -y @smithery/cli install @ScrapeGraphAI/scrapegraph-mcp --client claude
2929
```
3030

3131
### Claude for Desktop

pyproject.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ authors = [
1010
requires-python = ">=3.10"
1111
dependencies = [
1212
"mcp[cli]>=1.3.0",
13-
"scrapegraph-py>=1.12.0",
13+
"httpx>=0.24.0",
1414
]
1515
classifiers = [
1616
"Development Status :: 4 - Beta",

src/scrapegraph_mcp/server.py

+72-50
Original file line numberDiff line numberDiff line change
@@ -8,26 +8,32 @@
88
"""
99

1010
import os
11-
import asyncio
12-
from typing import Any, Dict, Optional
11+
from typing import Any, Dict
1312

13+
import httpx
1414
from mcp.server.fastmcp import FastMCP
15-
from scrapegraph_py import AsyncClient
1615

1716

18-
class ScapeGraphAsyncClient:
19-
"""Async wrapper for the ScapeGraph Python SDK."""
17+
class ScapeGraphClient:
18+
"""Client for interacting with the ScapeGraph API."""
19+
20+
BASE_URL = "https://api.scrapegraphai.com/v1"
2021

2122
def __init__(self, api_key: str):
2223
"""
23-
Initialize the ScapeGraph async client.
24+
Initialize the ScapeGraph API client.
2425
2526
Args:
2627
api_key: API key for ScapeGraph API
2728
"""
28-
self.client = AsyncClient(api_key=api_key)
29-
30-
async def markdownify(self, website_url: str) -> Dict[str, Any]:
29+
self.api_key = api_key
30+
self.headers = {
31+
"SGAI-APIKEY": api_key,
32+
"Content-Type": "application/json"
33+
}
34+
self.client = httpx.Client(timeout=60.0)
35+
36+
def markdownify(self, website_url: str) -> Dict[str, Any]:
3137
"""
3238
Convert a webpage into clean, formatted markdown.
3339
@@ -37,13 +43,20 @@ async def markdownify(self, website_url: str) -> Dict[str, Any]:
3743
Returns:
3844
Dictionary containing the markdown result
3945
"""
40-
return await self.client.markdownify(website_url=website_url)
46+
url = f"{self.BASE_URL}/markdownify"
47+
data = {
48+
"website_url": website_url
49+
}
50+
51+
response = self.client.post(url, headers=self.headers, json=data)
52+
53+
if response.status_code != 200:
54+
error_msg = f"Error {response.status_code}: {response.text}"
55+
raise Exception(error_msg)
4156

42-
async def smartscraper(
43-
self,
44-
user_prompt: str,
45-
website_url: str
46-
) -> Dict[str, Any]:
57+
return response.json()
58+
59+
def smartscraper(self, user_prompt: str, website_url: str) -> Dict[str, Any]:
4760
"""
4861
Extract structured data from a webpage using AI.
4962
@@ -54,15 +67,21 @@ async def smartscraper(
5467
Returns:
5568
Dictionary containing the extracted data
5669
"""
57-
return await self.client.smartscraper(
58-
user_prompt=user_prompt,
59-
website_url=website_url
60-
)
61-
62-
async def searchscraper(
63-
self,
64-
user_prompt: str
65-
) -> Dict[str, Any]:
70+
url = f"{self.BASE_URL}/smartscraper"
71+
data = {
72+
"user_prompt": user_prompt,
73+
"website_url": website_url
74+
}
75+
76+
response = self.client.post(url, headers=self.headers, json=data)
77+
78+
if response.status_code != 200:
79+
error_msg = f"Error {response.status_code}: {response.text}"
80+
raise Exception(error_msg)
81+
82+
return response.json()
83+
84+
def searchscraper(self, user_prompt: str) -> Dict[str, Any]:
6685
"""
6786
Perform AI-powered web searches with structured results.
6887
@@ -72,26 +91,35 @@ async def searchscraper(
7291
Returns:
7392
Dictionary containing search results and reference URLs
7493
"""
75-
return await self.client.searchscraper(
76-
user_prompt=user_prompt
77-
)
94+
url = f"{self.BASE_URL}/searchscraper"
95+
data = {
96+
"user_prompt": user_prompt
97+
}
98+
99+
response = self.client.post(url, headers=self.headers, json=data)
100+
101+
if response.status_code != 200:
102+
error_msg = f"Error {response.status_code}: {response.text}"
103+
raise Exception(error_msg)
78104

79-
async def close(self) -> None:
80-
"""Close the client to free up resources."""
81-
await self.client.close()
105+
return response.json()
82106

107+
def close(self) -> None:
108+
"""Close the HTTP client."""
109+
self.client.close()
83110

84-
# Create MCP server and AsyncScapeGraphWrapper at module level
111+
112+
# Create MCP server
85113
mcp = FastMCP("ScapeGraph API MCP Server")
86114

87115
# Default API key (will be overridden in main or by direct assignment)
88116
default_api_key = os.environ.get("SGAI_API_KEY")
89-
scrapegraph_wrapper = ScapeGraphAsyncClient(default_api_key) if default_api_key else None
117+
scrapegraph_client = ScapeGraphClient(default_api_key) if default_api_key else None
90118

91119

92-
# Add tools for markdownify
120+
# Add tool for markdownify
93121
@mcp.tool()
94-
async def markdownify(website_url: str) -> Dict[str, Any]:
122+
def markdownify(website_url: str) -> Dict[str, Any]:
95123
"""
96124
Convert a webpage into clean, formatted markdown.
97125
@@ -101,18 +129,18 @@ async def markdownify(website_url: str) -> Dict[str, Any]:
101129
Returns:
102130
Dictionary containing the markdown result
103131
"""
104-
if scrapegraph_wrapper is None:
132+
if scrapegraph_client is None:
105133
return {"error": "ScapeGraph client not initialized. Please provide an API key."}
106134

107135
try:
108-
return await scrapegraph_wrapper.markdownify(website_url)
136+
return scrapegraph_client.markdownify(website_url)
109137
except Exception as e:
110138
return {"error": str(e)}
111139

112140

113-
# Add tools for smartscraper
141+
# Add tool for smartscraper
114142
@mcp.tool()
115-
async def smartscraper(
143+
def smartscraper(
116144
user_prompt: str,
117145
website_url: str
118146
) -> Dict[str, Any]:
@@ -126,18 +154,18 @@ async def smartscraper(
126154
Returns:
127155
Dictionary containing the extracted data
128156
"""
129-
if scrapegraph_wrapper is None:
157+
if scrapegraph_client is None:
130158
return {"error": "ScapeGraph client not initialized. Please provide an API key."}
131159

132160
try:
133-
return await scrapegraph_wrapper.smartscraper(user_prompt, website_url)
161+
return scrapegraph_client.smartscraper(user_prompt, website_url)
134162
except Exception as e:
135163
return {"error": str(e)}
136164

137165

138-
# Add tools for searchscraper
166+
# Add tool for searchscraper
139167
@mcp.tool()
140-
async def searchscraper(
168+
def searchscraper(
141169
user_prompt: str
142170
) -> Dict[str, Any]:
143171
"""
@@ -149,21 +177,15 @@ async def searchscraper(
149177
Returns:
150178
Dictionary containing search results and reference URLs
151179
"""
152-
if scrapegraph_wrapper is None:
180+
if scrapegraph_client is None:
153181
return {"error": "ScapeGraph client not initialized. Please provide an API key."}
154182

155183
try:
156-
return await scrapegraph_wrapper.searchscraper(user_prompt)
184+
return scrapegraph_client.searchscraper(user_prompt)
157185
except Exception as e:
158186
return {"error": str(e)}
159187

160188

161-
async def cleanup() -> None:
162-
"""Clean up resources when the server is shutting down."""
163-
if scrapegraph_wrapper is not None:
164-
await scrapegraph_wrapper.close()
165-
166-
167189
def main() -> None:
168190
"""Run the ScapeGraph MCP server."""
169191
print("Starting ScapeGraph MCP server!")

0 commit comments

Comments
 (0)