Skip to content

Commit c67126d

Browse files
committed
fix: revert log back
1 parent 5938a1d commit c67126d

File tree

1 file changed

+28
-121
lines changed

1 file changed

+28
-121
lines changed

src/scrapegraph_mcp/server.py

+28-121
Original file line numberDiff line numberDiff line change
@@ -8,26 +8,12 @@
88
"""
99

1010
import os
11-
import sys
12-
import logging
13-
import traceback
1411
from typing import Any, Dict
1512

1613
import httpx
1714
from mcp.server.fastmcp import FastMCP
1815

1916

20-
# Configure logging
21-
logging.basicConfig(
22-
level=logging.INFO,
23-
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
24-
handlers=[
25-
logging.StreamHandler(sys.stderr)
26-
]
27-
)
28-
logger = logging.getLogger("scrapegraph_mcp")
29-
30-
3117
class ScapeGraphClient:
3218
"""Client for interacting with the ScapeGraph API."""
3319

@@ -40,14 +26,12 @@ def __init__(self, api_key: str):
4026
Args:
4127
api_key: API key for ScapeGraph API
4228
"""
43-
logger.info("Initializing ScapeGraphClient")
4429
self.api_key = api_key
4530
self.headers = {
4631
"SGAI-APIKEY": api_key,
4732
"Content-Type": "application/json"
4833
}
4934
self.client = httpx.Client(timeout=60.0)
50-
logger.info("ScapeGraphClient initialized successfully")
5135

5236
def markdownify(self, website_url: str) -> Dict[str, Any]:
5337
"""
@@ -59,26 +43,18 @@ def markdownify(self, website_url: str) -> Dict[str, Any]:
5943
Returns:
6044
Dictionary containing the markdown result
6145
"""
62-
logger.info(f"Calling markdownify for URL: {website_url}")
6346
url = f"{self.BASE_URL}/markdownify"
6447
data = {
6548
"website_url": website_url
6649
}
6750

68-
try:
69-
logger.debug(f"Making POST request to {url}")
70-
response = self.client.post(url, headers=self.headers, json=data)
71-
72-
if response.status_code != 200:
73-
error_msg = f"Error {response.status_code}: {response.text}"
74-
logger.error(f"API request failed: {error_msg}")
75-
raise Exception(error_msg)
76-
77-
logger.info("markdownify request successful")
78-
return response.json()
79-
except Exception as e:
80-
logger.error(f"Exception in markdownify: {str(e)}")
81-
raise
51+
response = self.client.post(url, headers=self.headers, json=data)
52+
53+
if response.status_code != 200:
54+
error_msg = f"Error {response.status_code}: {response.text}"
55+
raise Exception(error_msg)
56+
57+
return response.json()
8258

8359
def smartscraper(self, user_prompt: str, website_url: str) -> Dict[str, Any]:
8460
"""
@@ -91,27 +67,19 @@ def smartscraper(self, user_prompt: str, website_url: str) -> Dict[str, Any]:
9167
Returns:
9268
Dictionary containing the extracted data
9369
"""
94-
logger.info(f"Calling smartscraper for URL: {website_url} with prompt: {user_prompt}")
9570
url = f"{self.BASE_URL}/smartscraper"
9671
data = {
9772
"user_prompt": user_prompt,
9873
"website_url": website_url
9974
}
10075

101-
try:
102-
logger.debug(f"Making POST request to {url}")
103-
response = self.client.post(url, headers=self.headers, json=data)
104-
105-
if response.status_code != 200:
106-
error_msg = f"Error {response.status_code}: {response.text}"
107-
logger.error(f"API request failed: {error_msg}")
108-
raise Exception(error_msg)
109-
110-
logger.info("smartscraper request successful")
111-
return response.json()
112-
except Exception as e:
113-
logger.error(f"Exception in smartscraper: {str(e)}")
114-
raise
76+
response = self.client.post(url, headers=self.headers, json=data)
77+
78+
if response.status_code != 200:
79+
error_msg = f"Error {response.status_code}: {response.text}"
80+
raise Exception(error_msg)
81+
82+
return response.json()
11583

11684
def searchscraper(self, user_prompt: str) -> Dict[str, Any]:
11785
"""
@@ -123,57 +91,30 @@ def searchscraper(self, user_prompt: str) -> Dict[str, Any]:
12391
Returns:
12492
Dictionary containing search results and reference URLs
12593
"""
126-
logger.info(f"Calling searchscraper with prompt: {user_prompt}")
12794
url = f"{self.BASE_URL}/searchscraper"
12895
data = {
12996
"user_prompt": user_prompt
13097
}
13198

132-
try:
133-
logger.debug(f"Making POST request to {url}")
134-
response = self.client.post(url, headers=self.headers, json=data)
135-
136-
if response.status_code != 200:
137-
error_msg = f"Error {response.status_code}: {response.text}"
138-
logger.error(f"API request failed: {error_msg}")
139-
raise Exception(error_msg)
140-
141-
logger.info("searchscraper request successful")
142-
return response.json()
143-
except Exception as e:
144-
logger.error(f"Exception in searchscraper: {str(e)}")
145-
raise
99+
response = self.client.post(url, headers=self.headers, json=data)
100+
101+
if response.status_code != 200:
102+
error_msg = f"Error {response.status_code}: {response.text}"
103+
raise Exception(error_msg)
104+
105+
return response.json()
146106

147107
def close(self) -> None:
148108
"""Close the HTTP client."""
149-
logger.info("Closing ScapeGraphClient")
150109
self.client.close()
151-
logger.info("ScapeGraphClient closed")
152-
153110

154-
# Log environment information
155-
logger.info(f"Python version: {sys.version}")
156-
logger.info(f"Current working directory: {os.getcwd()}")
157-
logger.info(f"PATH environment variable: {os.environ.get('PATH', 'Not set')}")
158111

159112
# Create MCP server
160-
logger.info("Creating MCP server")
161113
mcp = FastMCP("ScapeGraph API MCP Server")
162-
logger.info("MCP server created")
163114

164115
# Default API key (will be overridden in main or by direct assignment)
165116
default_api_key = os.environ.get("SGAI_API_KEY")
166-
logger.info(f"SGAI_API_KEY environment variable is {'set' if default_api_key else 'not set'}")
167-
168-
scrapegraph_client = None
169-
if default_api_key:
170-
try:
171-
logger.info("Initializing ScapeGraphClient with default API key")
172-
scrapegraph_client = ScapeGraphClient(default_api_key)
173-
logger.info("ScapeGraphClient initialized successfully")
174-
except Exception as e:
175-
logger.error(f"Failed to initialize ScapeGraphClient: {str(e)}")
176-
logger.error(traceback.format_exc())
117+
scrapegraph_client = ScapeGraphClient(default_api_key) if default_api_key else None
177118

178119

179120
# Add tool for markdownify
@@ -188,19 +129,12 @@ def markdownify(website_url: str) -> Dict[str, Any]:
188129
Returns:
189130
Dictionary containing the markdown result
190131
"""
191-
logger.info(f"Tool markdownify called with URL: {website_url}")
192-
193132
if scrapegraph_client is None:
194-
logger.warning("ScapeGraph client not initialized")
195133
return {"error": "ScapeGraph client not initialized. Please provide an API key."}
196134

197135
try:
198-
result = scrapegraph_client.markdownify(website_url)
199-
logger.info("markdownify tool call successful")
200-
return result
136+
return scrapegraph_client.markdownify(website_url)
201137
except Exception as e:
202-
logger.error(f"Error in markdownify tool: {str(e)}")
203-
logger.error(traceback.format_exc())
204138
return {"error": str(e)}
205139

206140

@@ -220,19 +154,12 @@ def smartscraper(
220154
Returns:
221155
Dictionary containing the extracted data
222156
"""
223-
logger.info(f"Tool smartscraper called with URL: {website_url} and prompt: {user_prompt}")
224-
225157
if scrapegraph_client is None:
226-
logger.warning("ScapeGraph client not initialized")
227158
return {"error": "ScapeGraph client not initialized. Please provide an API key."}
228159

229160
try:
230-
result = scrapegraph_client.smartscraper(user_prompt, website_url)
231-
logger.info("smartscraper tool call successful")
232-
return result
161+
return scrapegraph_client.smartscraper(user_prompt, website_url)
233162
except Exception as e:
234-
logger.error(f"Error in smartscraper tool: {str(e)}")
235-
logger.error(traceback.format_exc())
236163
return {"error": str(e)}
237164

238165

@@ -250,40 +177,20 @@ def searchscraper(
250177
Returns:
251178
Dictionary containing search results and reference URLs
252179
"""
253-
logger.info(f"Tool searchscraper called with prompt: {user_prompt}")
254-
255180
if scrapegraph_client is None:
256-
logger.warning("ScapeGraph client not initialized")
257181
return {"error": "ScapeGraph client not initialized. Please provide an API key."}
258182

259183
try:
260-
result = scrapegraph_client.searchscraper(user_prompt)
261-
logger.info("searchscraper tool call successful")
262-
return result
184+
return scrapegraph_client.searchscraper(user_prompt)
263185
except Exception as e:
264-
logger.error(f"Error in searchscraper tool: {str(e)}")
265-
logger.error(traceback.format_exc())
266186
return {"error": str(e)}
267187

268188

269189
def main() -> None:
270190
"""Run the ScapeGraph MCP server."""
271-
try:
272-
logger.info("Starting ScapeGraph MCP server!")
273-
print("Starting ScapeGraph MCP server!", file=sys.stderr)
274-
275-
# Log system information
276-
logger.info(f"Python executable: {sys.executable}")
277-
logger.info(f"Arguments: {sys.argv}")
278-
279-
# Run the server
280-
logger.info("Running MCP server with stdio transport")
281-
mcp.run(transport="stdio")
282-
except Exception as e:
283-
logger.critical(f"Fatal error in main: {str(e)}")
284-
logger.critical(traceback.format_exc())
285-
print(f"Fatal error: {str(e)}", file=sys.stderr)
286-
sys.exit(1)
191+
print("Starting ScapeGraph MCP server!")
192+
# Run the server
193+
mcp.run(transport="stdio")
287194

288195

289196
if __name__ == "__main__":

0 commit comments

Comments
 (0)