forked from Sharan-Kumar-R/Custom-MCP-Server
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmain.py
More file actions
185 lines (168 loc) · 6.19 KB
/
main.py
File metadata and controls
185 lines (168 loc) · 6.19 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
from typing import Any
import httpx
import json
from mcp.server.fastmcp import FastMCP
import os
from dotenv import load_dotenv
load_dotenv()
# Load API keys from .env
RAPIDAPI_KEY = os.getenv("RAPIDAPI_KEY")
SERPER_API_KEY = os.getenv("SERPER_API_KEY")
# API hosts
LINKEDIN_HOST = "fresh-linkedin-profile-data.p.rapidapi.com"
FACEBOOK_HOST = "facebook-scraper3.p.rapidapi.com"
INSTAGRAM_HOST = "instagram-scraper-stable-api.p.rapidapi.com"
LINKEDIN_API_BASE = f"https://{LINKEDIN_HOST}"
FACEBOOK_API_BASE = f"https://{FACEBOOK_HOST}"
INSTAGRAM_API_BASE = f"https://{INSTAGRAM_HOST}"
SERPER_API_BASE = "https://google.serper.dev"
# Check required keys
if not RAPIDAPI_KEY:
raise ValueError("RAPIDAPI_KEY is not set in the environment variables")
if not SERPER_API_KEY:
raise ValueError("SERPER_API_KEY is not set in the environment variables")
# Initialize MCP
mcp = FastMCP("social_web_scraper")
# ---- LINKEDIN PERSONAL PROFILE TOOL ----
async def fetch_personal_profile(linkedin_url: str) -> dict[str, Any] | None:
params = {"linkedin_url": linkedin_url}
headers = {
"x-rapidapi-key": RAPIDAPI_KEY,
"x-rapidapi-host": LINKEDIN_HOST
}
async with httpx.AsyncClient() as client:
try:
response = await client.get(
f"{LINKEDIN_API_BASE}/get-linkedin-profile",
headers=headers,
params=params,
timeout=30.0
)
response.raise_for_status()
return response.json()
except Exception as e:
print(f"Error fetching LinkedIn personal profile: {e}")
return None
@mcp.tool()
async def get_personal_profile(linkedin_url: str) -> str:
"""Fetch full LinkedIn personal profile data for a given URL."""
data = await fetch_personal_profile(linkedin_url)
if not data:
return "Unable to fetch LinkedIn personal profile data."
return json.dumps(data, indent=2)
# ---- LINKEDIN COMPANY PROFILE TOOL ----
async def fetch_company_profile(linkedin_url: str) -> dict[str, Any] | None:
params = {"linkedin_url": linkedin_url}
headers = {
"x-rapidapi-key": RAPIDAPI_KEY,
"x-rapidapi-host": LINKEDIN_HOST
}
async with httpx.AsyncClient() as client:
try:
response = await client.get(
f"{LINKEDIN_API_BASE}/get-company-by-linkedinurl",
headers=headers,
params=params,
timeout=30.0
)
response.raise_for_status()
return response.json()
except Exception as e:
print(f"Error fetching LinkedIn company profile: {e}")
return None
@mcp.tool()
async def get_company_profile(linkedin_url: str) -> str:
"""Fetch full LinkedIn company page data for a given URL."""
data = await fetch_company_profile(linkedin_url)
if not data:
return "Unable to fetch LinkedIn company profile data."
return json.dumps(data, indent=2)
# ---- FACEBOOK PROFILE TOOL ----
async def fetch_facebook_profile(profile_url: str) -> dict[str, Any] | None:
params = {"url": profile_url}
headers = {
"x-rapidapi-key": RAPIDAPI_KEY,
"x-rapidapi-host": FACEBOOK_HOST
}
async with httpx.AsyncClient() as client:
try:
response = await client.get(
f"{FACEBOOK_API_BASE}/profile/details_url",
headers=headers,
params=params,
timeout=30.0
)
response.raise_for_status()
return response.json()
except Exception as e:
print(f"Error fetching Facebook profile: {e}")
return None
@mcp.tool()
async def get_facebook_profile(profile_url: str) -> str:
"""Fetch Facebook profile data for a given public URL."""
data = await fetch_facebook_profile(profile_url)
if not data:
return "Unable to fetch Facebook profile data."
return json.dumps(data, indent=2)
# ---- INSTAGRAM PROFILE TOOL ----
async def fetch_instagram_profile(instagram_url_or_username: str) -> dict[str, Any] | None:
params = {"username_or_url": instagram_url_or_username}
headers = {
"x-rapidapi-key": RAPIDAPI_KEY,
"x-rapidapi-host": INSTAGRAM_HOST
}
async with httpx.AsyncClient() as client:
try:
response = await client.get(
f"{INSTAGRAM_API_BASE}/ig_get_fb_profile_hover.php",
headers=headers,
params=params,
timeout=30.0
)
response.raise_for_status()
return response.json()
except Exception as e:
print(f"Error fetching Instagram profile: {e}")
return None
@mcp.tool()
async def get_instagram_profile(instagram_url_or_username: str) -> str:
"""Fetch Instagram profile data for a given public username or URL."""
data = await fetch_instagram_profile(instagram_url_or_username)
if not data:
return "Unable to fetch Instagram profile data."
return json.dumps(data, indent=2)
# ---- WEBSITE SCRAPER TOOL (Google Serper) ----
async def fetch_google_search(query: str, gl: str = "in", num: int = 10, page: int = 1) -> dict[str, Any] | None:
payload = {
"q": query,
"gl": gl,
"num": num,
"page": page
}
headers = {
"X-API-KEY": SERPER_API_KEY,
"Content-Type": "application/json"
}
async with httpx.AsyncClient() as client:
try:
response = await client.post(
f"{SERPER_API_BASE}/search",
headers=headers,
json=payload,
timeout=30.0
)
response.raise_for_status()
return response.json()
except Exception as e:
print(f"Error fetching Google search data: {e}")
return None
@mcp.tool()
async def scrape_website(query: str, gl: str = "in", num: int = 10, page: int = 1) -> str:
"""Fetch search results for a given query using Google Serper API."""
data = await fetch_google_search(query, gl, num, page)
if not data:
return "Unable to fetch Google search data."
return json.dumps(data, indent=2)
# ---- RUN SERVER ----
if __name__ == "__main__":
mcp.run(transport="stdio")