Alibrown commited on
Commit
94743db
·
verified ·
1 Parent(s): 5f7b741

Update app/mcp.py

Browse files
Files changed (1) hide show
  1. app/mcp.py +306 -220
app/mcp.py CHANGED
@@ -1,268 +1,354 @@
 
1
  # app/mcp.py
2
  # Universal MCP Hub (Sandboxed) - based on PyFundaments Architecture
3
  # Copyright 2026 - Volkan Kücükbudak
4
  # Apache License V. 2 + ESOL 1.1
5
  # Repo: https://github.com/VolkanSah/Universal-MCP-Hub-sandboxed
6
- #
7
  # ARCHITECTURE NOTE:
8
- # This file lives exclusively in /app/ and is ONLY started by main.py (the "Guardian").
9
- # It has NO direct access to API keys, environment variables, or fundament services.
10
- # Everything is injected by the Guardian via the `fundaments` dictionary.
11
- # Direct execution is blocked by design.
12
  #
13
  # TOOL REGISTRATION PRINCIPLE:
14
- # Tools are only registered if their required API key/service is present.
15
- # No key = no tool = no crash. The server always starts, just with fewer tools.
 
 
16
 
17
  import asyncio
18
  import logging
19
  import os
20
- from typing import Dict, Any, Optional
21
 
22
- logger = logging.getLogger('mcp_hub')
23
 
 
24
 
25
- async def start_mcp(fundaments: Dict[str, Any]):
26
- """
27
- The main entry point for the MCP Hub logic.
28
- All fundament services are validated and provided by main.py.
29
 
30
- Args:
31
- fundaments: Dictionary containing initialized services from main.py.
32
- Services are already validated and ready to use.
 
 
 
33
  """
34
  logger.info("MCP Hub starting...")
35
 
36
- # Services are already validated and initialized by main.py
37
- config_service = fundaments["config"]
38
- db_service = fundaments["db"] # Can be None if not needed
39
- encryption_service = fundaments["encryption"] # Can be None if not needed
40
- access_control_service = fundaments["access_control"] # Can be None if not needed
41
- user_handler_service = fundaments["user_handler"] # Can be None if not needed
42
- security_service = fundaments["security"] # Can be None if not needed
43
 
44
  try:
45
  from mcp.server.fastmcp import FastMCP
46
  except ImportError:
47
- logger.critical("FastMCP is not installed. Run: pip install fastmcp")
48
  raise
49
 
50
  mcp = FastMCP(
51
- name="PyFundaments MCP Hub",
52
  instructions=(
53
- "Universal MCP Hub built on PyFundaments. "
54
- "Available tools depend on configured API keys and active services. "
55
  "Use list_active_tools to see what is currently available."
56
  )
57
  )
58
 
59
- # --- LLM Tools (register if API key is present) ---
60
-
61
- if config_service.has("ANTHROPIC_API_KEY"):
62
- import httpx
63
- _key = config_service.get("ANTHROPIC_API_KEY")
64
-
65
- @mcp.tool()
66
- async def anthropic_complete(prompt: str, model: str = "claude-haiku-4-5-20251001", max_tokens: int = 1024) -> str:
67
- """Send a prompt to Anthropic Claude. Models: claude-haiku-4-5-20251001, claude-sonnet-4-6, claude-opus-4-6"""
68
- async with httpx.AsyncClient() as client:
69
- r = await client.post(
70
- "https://api.anthropic.com/v1/messages",
71
- headers={"x-api-key": _key, "anthropic-version": "2023-06-01", "content-type": "application/json"},
72
- json={"model": model, "max_tokens": max_tokens, "messages": [{"role": "user", "content": prompt}]},
73
- timeout=60.0
74
- )
75
- r.raise_for_status()
76
- return r.json()["content"][0]["text"]
77
- logger.info("Tool registered: anthropic_complete")
78
-
79
- if config_service.has("GEMINI_API_KEY"):
80
- import httpx
81
- _key = config_service.get("GEMINI_API_KEY")
82
-
83
- @mcp.tool()
84
- async def gemini_complete(prompt: str, model: str = "gemini-2.0-flash", max_tokens: int = 1024) -> str:
85
- """Send a prompt to Google Gemini. Models: gemini-2.0-flash, gemini-1.5-pro, gemini-1.5-flash"""
86
- async with httpx.AsyncClient() as client:
87
- r = await client.post(
88
- f"https://generativelanguage.googleapis.com/v1beta/models/{model}:generateContent",
89
- params={"key": _key},
90
- json={"contents": [{"parts": [{"text": prompt}]}], "generationConfig": {"maxOutputTokens": max_tokens}},
91
- timeout=60.0
92
- )
93
- r.raise_for_status()
94
- return r.json()["candidates"][0]["content"]["parts"][0]["text"]
95
- logger.info("Tool registered: gemini_complete")
96
-
97
- if config_service.has("OPENROUTER_API_KEY"):
98
- import httpx
99
- _key = config_service.get("OPENROUTER_API_KEY")
100
- _referer = config_service.get("APP_URL", "https://huggingface.co")
101
-
102
- @mcp.tool()
103
- async def openrouter_complete(prompt: str, model: str = "mistralai/mistral-7b-instruct", max_tokens: int = 1024) -> str:
104
- """Send a prompt via OpenRouter (100+ models). Examples: openai/gpt-4o, meta-llama/llama-3-8b-instruct"""
105
- async with httpx.AsyncClient() as client:
106
- r = await client.post(
107
- "https://openrouter.ai/api/v1/chat/completions",
108
- headers={"Authorization": f"Bearer {_key}", "HTTP-Referer": _referer, "content-type": "application/json"},
109
- json={"model": model, "max_tokens": max_tokens, "messages": [{"role": "user", "content": prompt}]},
110
- timeout=60.0
111
- )
112
- r.raise_for_status()
113
- return r.json()["choices"][0]["message"]["content"]
114
- logger.info("Tool registered: openrouter_complete")
115
-
116
- if config_service.has("HF_TOKEN"):
117
- import httpx
118
- _key = config_service.get("HF_TOKEN")
119
-
120
- @mcp.tool()
121
- async def hf_inference(prompt: str, model: str = "mistralai/Mistral-7B-Instruct-v0.3", max_tokens: int = 512) -> str:
122
- """Send a prompt to HuggingFace Inference API. Browse models: https://huggingface.co/models?inference=warm"""
123
- async with httpx.AsyncClient() as client:
124
- r = await client.post(
125
- f"https://api-inference.huggingface.co/models/{model}/v1/chat/completions",
126
- headers={"Authorization": f"Bearer {_key}", "content-type": "application/json"},
127
- json={"model": model, "max_tokens": max_tokens, "messages": [{"role": "user", "content": prompt}]},
128
- timeout=120.0
129
- )
130
- r.raise_for_status()
131
- return r.json()["choices"][0]["message"]["content"]
132
- logger.info("Tool registered: hf_inference")
133
-
134
- # --- Search Tools (register if API key is present) ---
135
-
136
- if config_service.has("BRAVE_API_KEY"):
137
- import httpx
138
- _key = config_service.get("BRAVE_API_KEY")
139
-
140
- @mcp.tool()
141
- async def brave_search(query: str, count: int = 5) -> str:
142
- """Search the web via Brave Search API (independent index, privacy-focused)."""
143
- async with httpx.AsyncClient() as client:
144
- r = await client.get(
145
- "https://api.search.brave.com/res/v1/web/search",
146
- headers={"Accept": "application/json", "X-Subscription-Token": _key},
147
- params={"q": query, "count": min(count, 20)},
148
- timeout=30.0
149
- )
150
- r.raise_for_status()
151
- results = r.json().get("web", {}).get("results", [])
152
- if not results:
153
- return "No results found."
154
- return "\n\n".join([
155
- f"{i}. {res.get('title', '')}\n {res.get('url', '')}\n {res.get('description', '')}"
156
- for i, res in enumerate(results, 1)
157
- ])
158
- logger.info("Tool registered: brave_search")
159
-
160
- if config_service.has("TAVILY_API_KEY"):
161
- import httpx
162
- _key = config_service.get("TAVILY_API_KEY")
163
-
164
- @mcp.tool()
165
- async def tavily_search(query: str, max_results: int = 5) -> str:
166
- """AI-optimized web search via Tavily. Returns synthesized answer + sources."""
167
- async with httpx.AsyncClient() as client:
168
- r = await client.post(
169
- "https://api.tavily.com/search",
170
- json={"api_key": _key, "query": query, "max_results": max_results, "include_answer": True},
171
- timeout=30.0
172
- )
173
- r.raise_for_status()
174
- data = r.json()
175
- parts = []
176
- if data.get("answer"):
177
- parts.append(f"Summary: {data['answer']}")
178
- for res in data.get("results", []):
179
- parts.append(f"- {res['title']}\n {res['url']}\n {res.get('content', '')[:200]}...")
180
- return "\n\n".join(parts)
181
- logger.info("Tool registered: tavily_search")
182
-
183
- # --- DB Tools (register only if DB is initialized) ---
184
-
185
- if db_service is not None:
186
- from fundaments.postgresql import execute_secured_query
187
-
188
- @mcp.tool()
189
- async def db_query(sql: str) -> str:
190
- """Execute a read-only SELECT query. All write operations are blocked."""
191
- if not sql.strip().upper().startswith("SELECT"):
192
- return "Error: Only SELECT statements are permitted."
193
- try:
194
- result = await execute_secured_query(sql, fetch_method='fetch')
195
- if not result:
196
- return "No results."
197
- return str([dict(row) for row in result])
198
- except Exception as e:
199
- logger.error(f"DB query error: {e}")
200
- return f"Database error: {str(e)}"
201
- logger.info("Tool registered: db_query")
202
 
203
- else:
204
- logger.info("No database available - DB tools skipped.")
 
 
 
 
 
 
205
 
206
  # --- System Tools (always registered) ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
207
 
208
  @mcp.tool()
209
  def list_active_tools() -> Dict[str, Any]:
210
- """Show active services and configured integrations (key names only, never values)."""
 
 
 
211
  return {
212
- "fundaments_status": {k: v is not None for k, v in fundaments.items()},
213
- "configured_integrations": [
214
- key for key in [
215
- "ANTHROPIC_API_KEY", "GEMINI_API_KEY", "OPENROUTER_API_KEY",
216
- "HF_TOKEN", "BRAVE_API_KEY", "TAVILY_API_KEY", "DATABASE_URL"
217
- ] if config_service.has(key)
 
 
 
218
  ],
219
- "transport": os.getenv("MCP_TRANSPORT", "stdio"),
220
- "app_mode": os.getenv("APP_MODE", "mcp")
221
  }
222
  logger.info("Tool registered: list_active_tools")
223
 
224
  @mcp.tool()
225
  def health_check() -> Dict[str, str]:
226
- """Health check endpoint for HuggingFace Spaces and monitoring."""
227
- return {"status": "ok", "service": "PyFundaments MCP Hub"}
228
  logger.info("Tool registered: health_check")
229
 
230
- # --- Encryption available ---
231
- if encryption_service:
232
- logger.info("Encryption service active - available for future tools.")
233
-
234
- # --- Auth/Security available ---
235
- if user_handler_service and security_service:
236
- logger.info("Auth services active - available for future tools.")
237
-
238
- # --- Start transport ---
239
- transport = os.getenv("MCP_TRANSPORT", "stdio").lower()
240
- if transport == "sse":
241
- host = os.getenv("HOST", "0.0.0.0")
242
- port = int(os.getenv("PORT", "7860"))
243
- logger.info(f"MCP Hub starting via SSE on {host}:{port}")
244
- mcp.run(transport="sse", host=host, port=port)
245
- else:
246
- logger.info("MCP Hub starting via stdio (local mode)")
247
- await mcp.run_stdio_async() # ← direkt awaiten, kein neuer Loop!
248
-
249
- logger.info("MCP Hub shut down.")
250
-
251
 
252
- # ============================================================
253
- # Direct execution guard - mirrors example.app.py exactly
254
- # ============================================================
255
  if __name__ == '__main__':
256
- print("WARNING: Running mcp.py directly. Fundament modules might not be correctly initialized.")
257
- print("Please run 'python main.py' instead for proper initialization.")
258
-
259
- test_fundaments = {
260
- "config": None,
261
- "db": None,
262
- "encryption": None,
263
- "access_control": None,
264
- "user_handler": None,
265
- "security": None
266
- }
267
-
268
- asyncio.run(start_mcp(test_fundaments))
 
1
+ # =============================================================================
2
  # app/mcp.py
3
  # Universal MCP Hub (Sandboxed) - based on PyFundaments Architecture
4
  # Copyright 2026 - Volkan Kücükbudak
5
  # Apache License V. 2 + ESOL 1.1
6
  # Repo: https://github.com/VolkanSah/Universal-MCP-Hub-sandboxed
7
+ # =============================================================================
8
  # ARCHITECTURE NOTE:
9
+ # This file lives exclusively in app/ and is ONLY started by app/app.py.
10
+ # NO direct access to fundaments/*, .env, or Guardian (main.py).
11
+ # All config comes from app/.pyfun via app/config.py.
 
12
  #
13
  # TOOL REGISTRATION PRINCIPLE:
14
+ # Tools are only registered if their required ENV key exists.
15
+ # No key = no tool = no crash. Server always starts, just with fewer tools.
16
+ # ENV key NAMES come from app/.pyfun — values are never touched here.
17
+ # =============================================================================
18
 
19
  import asyncio
20
  import logging
21
  import os
22
+ from typing import Dict, Any
23
 
24
+ from . import config as app_config # reads app/.pyfun — only config source for app/*
25
 
26
+ logger = logging.getLogger('mcp')
27
 
 
 
 
 
28
 
29
+ async def start_mcp() -> None:
30
+ """
31
+ Main entry point for the MCP Hub.
32
+ Called by app/app.py in its own thread/event loop.
33
+ Reads all config from app/.pyfun via app/config.py.
34
+ NO fundaments passed in — sandboxed.
35
  """
36
  logger.info("MCP Hub starting...")
37
 
38
+ # --- Load transport config from app/.pyfun [HUB] ---
39
+ hub_cfg = app_config.get_hub()
40
+ transport = os.getenv("MCP_TRANSPORT", hub_cfg.get("HUB_TRANSPORT", "stdio")).lower()
41
+ host = os.getenv("HOST", hub_cfg.get("HUB_HOST", "0.0.0.0"))
42
+ port = int(os.getenv("PORT", hub_cfg.get("HUB_PORT", "7860")))
 
 
43
 
44
  try:
45
  from mcp.server.fastmcp import FastMCP
46
  except ImportError:
47
+ logger.critical("FastMCP not installed. Run: pip install mcp")
48
  raise
49
 
50
  mcp = FastMCP(
51
+ name=hub_cfg.get("HUB_NAME", "Universal MCP Hub"),
52
  instructions=(
53
+ f"{hub_cfg.get('HUB_DESCRIPTION', 'Universal MCP Hub on PyFundaments')} "
 
54
  "Use list_active_tools to see what is currently available."
55
  )
56
  )
57
 
58
+ # =========================================================================
59
+ # Tool Registration — MINIMAL BUILD
60
+ # Tools register only if their ENV key exists (value never read here!).
61
+ # Key NAMES come from app/.pyfun [LLM_PROVIDERS] / [SEARCH_PROVIDERS].
62
+ # =========================================================================
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
63
 
64
+ # --- LLM Tools ---
65
+ _register_llm_tools(mcp)
66
+
67
+ # --- Search Tools ---
68
+ _register_search_tools(mcp)
69
+
70
+ # --- DB Tools --- (disabled until db_sync is ready)
71
+ # _register_db_tools(mcp)
72
 
73
  # --- System Tools (always registered) ---
74
+ _register_system_tools(mcp)
75
+
76
+ # =========================================================================
77
+ # Start transport
78
+ # =========================================================================
79
+ if transport == "sse":
80
+ logger.info(f"MCP Hub starting via SSE on {host}:{port}")
81
+ await mcp.run_sse_async(host=host, port=port)
82
+ else:
83
+ logger.info("MCP Hub starting via stdio (local mode)")
84
+ await mcp.run_stdio_async()
85
+
86
+ logger.info("MCP Hub shut down.")
87
+
88
+
89
+ # =============================================================================
90
+ # Tool registration helpers
91
+ # =============================================================================
92
+
93
+ def _register_llm_tools(mcp) -> None:
94
+ """Register LLM tools based on active providers in app/.pyfun + ENV key check."""
95
+ active = app_config.get_active_llm_providers()
96
+
97
+ for name, cfg in active.items():
98
+ env_key = cfg.get("env_key", "")
99
+ if not env_key or not os.getenv(env_key):
100
+ logger.info(f"LLM provider '{name}' skipped — ENV key '{env_key}' not set.")
101
+ continue
102
+
103
+ # Anthropic
104
+ if name == "anthropic":
105
+ import httpx
106
+ _key = os.getenv(env_key)
107
+ _api_ver = cfg.get("api_version_header", "2023-06-01")
108
+ _base_url = cfg.get("base_url", "https://api.anthropic.com/v1")
109
+ _def_model = cfg.get("default_model", "claude-haiku-4-5-20251001")
110
+
111
+ @mcp.tool()
112
+ async def anthropic_complete(
113
+ prompt: str,
114
+ model: str = _def_model,
115
+ max_tokens: int = 1024
116
+ ) -> str:
117
+ """Send a prompt to Anthropic Claude."""
118
+ async with httpx.AsyncClient() as client:
119
+ r = await client.post(
120
+ f"{_base_url}/messages",
121
+ headers={
122
+ "x-api-key": _key,
123
+ "anthropic-version": _api_ver,
124
+ "content-type": "application/json"
125
+ },
126
+ json={
127
+ "model": model,
128
+ "max_tokens": max_tokens,
129
+ "messages": [{"role": "user", "content": prompt}]
130
+ },
131
+ timeout=60.0
132
+ )
133
+ r.raise_for_status()
134
+ return r.json()["content"][0]["text"]
135
+
136
+ logger.info(f"Tool registered: anthropic_complete (model: {_def_model})")
137
+
138
+ # Gemini
139
+ elif name == "gemini":
140
+ import httpx
141
+ _key = os.getenv(env_key)
142
+ _base_url = cfg.get("base_url", "https://generativelanguage.googleapis.com/v1beta")
143
+ _def_model = cfg.get("default_model", "gemini-2.0-flash")
144
+
145
+ @mcp.tool()
146
+ async def gemini_complete(
147
+ prompt: str,
148
+ model: str = _def_model,
149
+ max_tokens: int = 1024
150
+ ) -> str:
151
+ """Send a prompt to Google Gemini."""
152
+ async with httpx.AsyncClient() as client:
153
+ r = await client.post(
154
+ f"{_base_url}/models/{model}:generateContent",
155
+ params={"key": _key},
156
+ json={
157
+ "contents": [{"parts": [{"text": prompt}]}],
158
+ "generationConfig": {"maxOutputTokens": max_tokens}
159
+ },
160
+ timeout=60.0
161
+ )
162
+ r.raise_for_status()
163
+ return r.json()["candidates"][0]["content"]["parts"][0]["text"]
164
+
165
+ logger.info(f"Tool registered: gemini_complete (model: {_def_model})")
166
+
167
+ # OpenRouter
168
+ elif name == "openrouter":
169
+ import httpx
170
+ _key = os.getenv(env_key)
171
+ _base_url = cfg.get("base_url", "https://openrouter.ai/api/v1")
172
+ _def_model = cfg.get("default_model", "mistralai/mistral-7b-instruct")
173
+ _referer = os.getenv("APP_URL", "https://huggingface.co")
174
+
175
+ @mcp.tool()
176
+ async def openrouter_complete(
177
+ prompt: str,
178
+ model: str = _def_model,
179
+ max_tokens: int = 1024
180
+ ) -> str:
181
+ """Send a prompt via OpenRouter (100+ models)."""
182
+ async with httpx.AsyncClient() as client:
183
+ r = await client.post(
184
+ f"{_base_url}/chat/completions",
185
+ headers={
186
+ "Authorization": f"Bearer {_key}",
187
+ "HTTP-Referer": _referer,
188
+ "content-type": "application/json"
189
+ },
190
+ json={
191
+ "model": model,
192
+ "max_tokens": max_tokens,
193
+ "messages": [{"role": "user", "content": prompt}]
194
+ },
195
+ timeout=60.0
196
+ )
197
+ r.raise_for_status()
198
+ return r.json()["choices"][0]["message"]["content"]
199
+
200
+ logger.info(f"Tool registered: openrouter_complete (model: {_def_model})")
201
+
202
+ # HuggingFace
203
+ elif name == "huggingface":
204
+ import httpx
205
+ _key = os.getenv(env_key)
206
+ _base_url = cfg.get("base_url", "https://api-inference.huggingface.co/models")
207
+ _def_model = cfg.get("default_model", "mistralai/Mistral-7B-Instruct-v0.3")
208
+
209
+ @mcp.tool()
210
+ async def hf_inference(
211
+ prompt: str,
212
+ model: str = _def_model,
213
+ max_tokens: int = 512
214
+ ) -> str:
215
+ """Send a prompt to HuggingFace Inference API."""
216
+ async with httpx.AsyncClient() as client:
217
+ r = await client.post(
218
+ f"{_base_url}/{model}/v1/chat/completions",
219
+ headers={
220
+ "Authorization": f"Bearer {_key}",
221
+ "content-type": "application/json"
222
+ },
223
+ json={
224
+ "model": model,
225
+ "max_tokens": max_tokens,
226
+ "messages": [{"role": "user", "content": prompt}]
227
+ },
228
+ timeout=120.0
229
+ )
230
+ r.raise_for_status()
231
+ return r.json()["choices"][0]["message"]["content"]
232
+
233
+ logger.info(f"Tool registered: hf_inference (model: {_def_model})")
234
+
235
+ else:
236
+ logger.info(f"LLM provider '{name}' has no tool handler yet — skipped.")
237
+
238
+
239
+ def _register_search_tools(mcp) -> None:
240
+ """Register search tools based on active providers in app/.pyfun + ENV key check."""
241
+ active = app_config.get_active_search_providers()
242
+
243
+ for name, cfg in active.items():
244
+ env_key = cfg.get("env_key", "")
245
+ if not env_key or not os.getenv(env_key):
246
+ logger.info(f"Search provider '{name}' skipped — ENV key '{env_key}' not set.")
247
+ continue
248
+
249
+ # Brave
250
+ if name == "brave":
251
+ import httpx
252
+ _key = os.getenv(env_key)
253
+ _base_url = cfg.get("base_url", "https://api.search.brave.com/res/v1/web/search")
254
+ _def_results = int(cfg.get("default_results", "5"))
255
+ _max_results = int(cfg.get("max_results", "20"))
256
+
257
+ @mcp.tool()
258
+ async def brave_search(query: str, count: int = _def_results) -> str:
259
+ """Search the web via Brave Search API."""
260
+ async with httpx.AsyncClient() as client:
261
+ r = await client.get(
262
+ _base_url,
263
+ headers={
264
+ "Accept": "application/json",
265
+ "X-Subscription-Token": _key
266
+ },
267
+ params={"q": query, "count": min(count, _max_results)},
268
+ timeout=30.0
269
+ )
270
+ r.raise_for_status()
271
+ results = r.json().get("web", {}).get("results", [])
272
+ if not results:
273
+ return "No results found."
274
+ return "\n\n".join([
275
+ f"{i}. {res.get('title', '')}\n {res.get('url', '')}\n {res.get('description', '')}"
276
+ for i, res in enumerate(results, 1)
277
+ ])
278
+
279
+ logger.info("Tool registered: brave_search")
280
+
281
+ # Tavily
282
+ elif name == "tavily":
283
+ import httpx
284
+ _key = os.getenv(env_key)
285
+ _base_url = cfg.get("base_url", "https://api.tavily.com/search")
286
+ _def_results = int(cfg.get("default_results", "5"))
287
+ _incl_answer = cfg.get("include_answer", "true").lower() == "true"
288
+
289
+ @mcp.tool()
290
+ async def tavily_search(query: str, max_results: int = _def_results) -> str:
291
+ """AI-optimized web search via Tavily."""
292
+ async with httpx.AsyncClient() as client:
293
+ r = await client.post(
294
+ _base_url,
295
+ json={
296
+ "api_key": _key,
297
+ "query": query,
298
+ "max_results": max_results,
299
+ "include_answer": _incl_answer
300
+ },
301
+ timeout=30.0
302
+ )
303
+ r.raise_for_status()
304
+ data = r.json()
305
+ parts = []
306
+ if data.get("answer"):
307
+ parts.append(f"Summary: {data['answer']}")
308
+ for res in data.get("results", []):
309
+ parts.append(
310
+ f"- {res['title']}\n {res['url']}\n {res.get('content', '')[:200]}..."
311
+ )
312
+ return "\n\n".join(parts)
313
+
314
+ logger.info("Tool registered: tavily_search")
315
+
316
+ else:
317
+ logger.info(f"Search provider '{name}' has no tool handler yet — skipped.")
318
+
319
+
320
+ def _register_system_tools(mcp) -> None:
321
+ """System tools — always registered, no ENV key required."""
322
 
323
  @mcp.tool()
324
  def list_active_tools() -> Dict[str, Any]:
325
+ """Show active providers and configured integrations (key names only, never values)."""
326
+ llm = app_config.get_active_llm_providers()
327
+ search = app_config.get_active_search_providers()
328
+ hub = app_config.get_hub()
329
  return {
330
+ "hub": hub.get("HUB_NAME", "Universal MCP Hub"),
331
+ "version": hub.get("HUB_VERSION", ""),
332
+ "active_llm_providers": [
333
+ name for name, cfg in llm.items()
334
+ if os.getenv(cfg.get("env_key", ""))
335
+ ],
336
+ "active_search_providers": [
337
+ name for name, cfg in search.items()
338
+ if os.getenv(cfg.get("env_key", ""))
339
  ],
 
 
340
  }
341
  logger.info("Tool registered: list_active_tools")
342
 
343
  @mcp.tool()
344
  def health_check() -> Dict[str, str]:
345
+ """Health check for monitoring and HuggingFace Spaces."""
346
+ return {"status": "ok", "service": "Universal MCP Hub"}
347
  logger.info("Tool registered: health_check")
348
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
349
 
350
+ # =============================================================================
351
+ # Direct execution guard
352
+ # =============================================================================
353
  if __name__ == '__main__':
354
+ print("WARNING: Run via main.py, not directly.")