From 6d27621b92c725ffb6bf8554022245dab2240662 Mon Sep 17 00:00:00 2001 From: Mihai Criveti Date: Sat, 27 Sep 2025 10:17:50 +0100 Subject: [PATCH 01/35] mTLS support Signed-off-by: Mihai Criveti --- .env.example | 9 + README.md | 6 + charts/mcp-stack/values.yaml | 6 + docs/docs/architecture/plugin-spec/.pages | 2 +- .../plugin-spec/01-architecture.md | 1 - .../plugin-spec/02-core-components.md | 3 +- .../plugin-spec/03-plugin-types.md | 3 +- .../plugin-spec/04-hook-architecture.md | 3 +- .../plugin-spec/05-hook-system.md | 1 - .../plugin-spec/06-gateway-hooks.md | 1 - .../plugin-spec/07-security-hooks.md | 1 - .../plugin-spec/08-external-plugins.md | 16 + .../architecture/plugin-spec/09-security.md | 1 - .../plugin-spec/10-error-handling.md | 1 - .../plugin-spec/11-performance.md | 1 - .../plugin-spec/12-development.md | 1 - .../architecture/plugin-spec/13-testing.md | 1 - .../architecture/plugin-spec/14-conclusion.md | 1 - .../plugin-framework-specification.md | 1 - docs/docs/architecture/plugins.md | 9 + docs/docs/index.md | 6 + docs/docs/manage/mtls.md | 943 ++++++++++++++++++ llms/plugins-llms.md | 11 +- .../plugins/framework/external/mcp/client.py | 51 +- mcpgateway/plugins/framework/models.py | 92 ++ plugins/config.yaml | 4 + plugins/external/config.yaml | 5 + .../plugins/framework/test_models_tls.py | 114 +++ 28 files changed, 1271 insertions(+), 23 deletions(-) create mode 100644 docs/docs/manage/mtls.md create mode 100644 tests/unit/mcpgateway/plugins/framework/test_models_tls.py diff --git a/.env.example b/.env.example index f17cddd4c..a980aa5c7 100644 --- a/.env.example +++ b/.env.example @@ -603,6 +603,15 @@ PLUGINS_ENABLED=false # Default: plugins/config.yaml PLUGIN_CONFIG_FILE=plugins/config.yaml +# Optional defaults for mTLS when connecting to external MCP plugins (STREAMABLEHTTP transport) +# Provide file paths inside the container. Plugin-specific TLS blocks override these defaults. +# PLUGINS_MTLS_CA_BUNDLE=/app/certs/plugins/ca.crt +# PLUGINS_MTLS_CLIENT_CERT=/app/certs/plugins/gateway-client.pem +# PLUGINS_MTLS_CLIENT_KEY=/app/certs/plugins/gateway-client.key +# PLUGINS_MTLS_CLIENT_KEY_PASSWORD= +# PLUGINS_MTLS_VERIFY=true +# PLUGINS_MTLS_CHECK_HOSTNAME=true + ##################################### # Well-Known URI Configuration ##################################### diff --git a/README.md b/README.md index 753aa2a0e..c35ded0a4 100644 --- a/README.md +++ b/README.md @@ -1581,6 +1581,12 @@ MCP Gateway uses Alembic for database migrations. Common commands: | ------------------------------ | ------------------------------------------------ | --------------------- | ------- | | `PLUGINS_ENABLED` | Enable the plugin framework | `false` | bool | | `PLUGIN_CONFIG_FILE` | Path to main plugin configuration file | `plugins/config.yaml` | string | +| `PLUGINS_MTLS_CA_BUNDLE` | (Optional) default CA bundle for external plugin mTLS | _(empty)_ | string | +| `PLUGINS_MTLS_CLIENT_CERT` | (Optional) gateway client certificate for plugin mTLS | _(empty)_ | string | +| `PLUGINS_MTLS_CLIENT_KEY` | (Optional) gateway client key for plugin mTLS | _(empty)_ | string | +| `PLUGINS_MTLS_CLIENT_KEY_PASSWORD` | (Optional) password for plugin client key | _(empty)_ | string | +| `PLUGINS_MTLS_VERIFY` | (Optional) verify remote plugin certificates (`true`/`false`) | `true` | bool | +| `PLUGINS_MTLS_CHECK_HOSTNAME` | (Optional) enforce hostname verification for plugins | `true` | bool | | `PLUGINS_CLI_COMPLETION` | Enable auto-completion for plugins CLI | `false` | bool | | `PLUGINS_CLI_MARKUP_MODE` | Set markup mode for plugins CLI | (none) | `rich`, `markdown`, `disabled` | diff --git a/charts/mcp-stack/values.yaml b/charts/mcp-stack/values.yaml index 5d7078677..4c14863f4 100644 --- a/charts/mcp-stack/values.yaml +++ b/charts/mcp-stack/values.yaml @@ -267,6 +267,12 @@ mcpContextForge: # ─ Plugin Configuration ─ PLUGINS_ENABLED: "false" # enable the plugin framework PLUGIN_CONFIG_FILE: "plugins/config.yaml" # path to main plugin configuration file + PLUGINS_MTLS_CA_BUNDLE: "" # default CA bundle for external plugins (optional) + PLUGINS_MTLS_CLIENT_CERT: "" # gateway client certificate for plugin mTLS + PLUGINS_MTLS_CLIENT_KEY: "" # gateway client key for plugin mTLS (optional) + PLUGINS_MTLS_CLIENT_KEY_PASSWORD: "" # password for the plugin client key (optional) + PLUGINS_MTLS_VERIFY: "true" # verify remote plugin certificates + PLUGINS_MTLS_CHECK_HOSTNAME: "true" # enforce hostname verification when verifying certs PLUGINS_CLI_COMPLETION: "false" # enable auto-completion for plugins CLI PLUGINS_CLI_MARKUP_MODE: "" # set markup mode for plugins CLI diff --git a/docs/docs/architecture/plugin-spec/.pages b/docs/docs/architecture/plugin-spec/.pages index 9e8f3584a..0fb74117a 100644 --- a/docs/docs/architecture/plugin-spec/.pages +++ b/docs/docs/architecture/plugin-spec/.pages @@ -14,4 +14,4 @@ nav: - Performance: 11-performance.md - Development: 12-development.md - Testing: 13-testing.md - - Conclusion: 14-conclusion.md \ No newline at end of file + - Conclusion: 14-conclusion.md diff --git a/docs/docs/architecture/plugin-spec/01-architecture.md b/docs/docs/architecture/plugin-spec/01-architecture.md index b5f0b3266..11cce4ad3 100644 --- a/docs/docs/architecture/plugin-spec/01-architecture.md +++ b/docs/docs/architecture/plugin-spec/01-architecture.md @@ -75,4 +75,3 @@ mcpgateway/plugins/framework/ - Communicate via MCP protocol over various transports - 10-100ms latency depending on service and network - Examples: LlamaGuard, OpenAI Moderation, custom AI services - diff --git a/docs/docs/architecture/plugin-spec/02-core-components.md b/docs/docs/architecture/plugin-spec/02-core-components.md index 536bd3c66..39b28b3ed 100644 --- a/docs/docs/architecture/plugin-spec/02-core-components.md +++ b/docs/docs/architecture/plugin-spec/02-core-components.md @@ -3,7 +3,7 @@ ### 3.1 Plugin Base Class -The base plugin class, of which developers subclass and implement the hooks that are important for their plugins. Hook points are functions that appear interpose on existing MCP and agent-based functionality. +The base plugin class, of which developers subclass and implement the hooks that are important for their plugins. Hook points are functions that appear interpose on existing MCP and agent-based functionality. ```python class Plugin: @@ -122,4 +122,3 @@ class PluginInstanceRegistry: async def shutdown(self) -> None: """Shutdown all registered plugins""" ``` - diff --git a/docs/docs/architecture/plugin-spec/03-plugin-types.md b/docs/docs/architecture/plugin-spec/03-plugin-types.md index 8b58a598a..9616bcf6b 100644 --- a/docs/docs/architecture/plugin-spec/03-plugin-types.md +++ b/docs/docs/architecture/plugin-spec/03-plugin-types.md @@ -37,7 +37,7 @@ The configuration system supports both **native plugins** (running in-process) a ### 4.2 Plugin Configuration Schema -Below is an example of a plugin configuration file. A plugin configuration file can configure one or more plugins in a prioritized list as below. Each individual plugin is an instance of the of a plugin class that subclasses the base `Plugin` object and implements a set of hooks as listed in the configuration. +Below is an example of a plugin configuration file. A plugin configuration file can configure one or more plugins in a prioritized list as below. Each individual plugin is an instance of the of a plugin class that subclasses the base `Plugin` object and implements a set of hooks as listed in the configuration. ```yaml # plugins/config.yaml @@ -408,4 +408,3 @@ The manifest enables development tools to provide: - Follow established tag conventions within your organization The plugin manifest system provides a foundation for plugin ecosystem management, enabling better development workflows, automated tooling, and enhanced discoverability while maintaining consistency across plugin implementations. - diff --git a/docs/docs/architecture/plugin-spec/04-hook-architecture.md b/docs/docs/architecture/plugin-spec/04-hook-architecture.md index 2ccdffe23..09b3327bf 100644 --- a/docs/docs/architecture/plugin-spec/04-hook-architecture.md +++ b/docs/docs/architecture/plugin-spec/04-hook-architecture.md @@ -186,7 +186,7 @@ return PluginResult( **Processing Model**: -Plugin processing uses short circuiting to abort evaluation in the case of a violation and `continue_processing=False`. If the plugin needs to record side effects, such as the bookkeeping, these plugins should be executed first with the highest priority. +Plugin processing uses short circuiting to abort evaluation in the case of a violation and `continue_processing=False`. If the plugin needs to record side effects, such as the bookkeeping, these plugins should be executed first with the highest priority. ### 5.2 HTTP Header Hook Integration Example @@ -477,4 +477,3 @@ async def process_elicitation_response(self, response: ElicitationResponse) -> b return True ``` - diff --git a/docs/docs/architecture/plugin-spec/05-hook-system.md b/docs/docs/architecture/plugin-spec/05-hook-system.md index 07913e25c..cbadba56d 100644 --- a/docs/docs/architecture/plugin-spec/05-hook-system.md +++ b/docs/docs/architecture/plugin-spec/05-hook-system.md @@ -184,4 +184,3 @@ This document covers administrative operation hooks: - Gateway Federation Hooks - Peer gateway management *(Future)* - A2A Agent Hooks - Agent-to-Agent integration management *(Future)* - Entity Lifecycle Hooks - Tool, resource, and prompt registration *(Future)* - diff --git a/docs/docs/architecture/plugin-spec/06-gateway-hooks.md b/docs/docs/architecture/plugin-spec/06-gateway-hooks.md index 604ac5c9a..51411c86f 100644 --- a/docs/docs/architecture/plugin-spec/06-gateway-hooks.md +++ b/docs/docs/architecture/plugin-spec/06-gateway-hooks.md @@ -1667,4 +1667,3 @@ The gateway administrative hooks are organized into the following categories: - Implement proper timeout handling for elicitations - Cache frequently accessed data (permissions, quotas) - Use background tasks for non-critical operations - diff --git a/docs/docs/architecture/plugin-spec/07-security-hooks.md b/docs/docs/architecture/plugin-spec/07-security-hooks.md index bc6d65475..cd789c9d3 100644 --- a/docs/docs/architecture/plugin-spec/07-security-hooks.md +++ b/docs/docs/architecture/plugin-spec/07-security-hooks.md @@ -758,4 +758,3 @@ async def resource_post_fetch(self, payload: ResourcePostFetchPayload, context: - Resource post-fetch may take longer due to content processing - Plugin execution is sequential within priority bands - Failed plugins don't affect other plugins (isolation) - diff --git a/docs/docs/architecture/plugin-spec/08-external-plugins.md b/docs/docs/architecture/plugin-spec/08-external-plugins.md index 6150b999d..3c318d667 100644 --- a/docs/docs/architecture/plugin-spec/08-external-plugins.md +++ b/docs/docs/architecture/plugin-spec/08-external-plugins.md @@ -112,6 +112,11 @@ plugins: mcp: proto: "STREAMABLEHTTP" url: "http://localhost:8000/mcp" + # Optional TLS block when the remote server requires mTLS + # tls: + # ca_bundle: /app/certs/plugins/ca.crt + # client_cert: /app/certs/plugins/gateway-client.pem + # client_key: /app/certs/plugins/gateway-client.key ``` ### 7.2 MCP Protocol Integration @@ -178,10 +183,21 @@ plugins: mcp: proto: "STREAMABLEHTTP" # Transport protocol url: "http://openai-plugin:3000/mcp" # Server URL + # Optional mutual TLS configuration + # tls: + # ca_bundle: /app/certs/plugins/ca.crt + # client_cert: /app/certs/plugins/gateway-client.pem + # verify: true # Optional authentication auth: type: "bearer" token: "${OPENAI_API_KEY}" + +If you prefer centralised defaults, set the environment variables +`PLUGINS_MTLS_CA_BUNDLE`, `PLUGINS_MTLS_CLIENT_CERT`, and related +settings. These values apply whenever a plugin omits its own `tls` +section, allowing a single gateway-wide certificate bundle to be reused +across multiple external plugins. ``` ### 7.5 MCP Transport Types diff --git a/docs/docs/architecture/plugin-spec/09-security.md b/docs/docs/architecture/plugin-spec/09-security.md index eca593942..937678659 100644 --- a/docs/docs/architecture/plugin-spec/09-security.md +++ b/docs/docs/architecture/plugin-spec/09-security.md @@ -65,4 +65,3 @@ except Exception as e: raise PluginError(f"Plugin error: {plugin.name}") # Continue with next plugin in permissive mode ``` - diff --git a/docs/docs/architecture/plugin-spec/10-error-handling.md b/docs/docs/architecture/plugin-spec/10-error-handling.md index de053265f..fa301066c 100644 --- a/docs/docs/architecture/plugin-spec/10-error-handling.md +++ b/docs/docs/architecture/plugin-spec/10-error-handling.md @@ -414,4 +414,3 @@ async def execute(self, plugins: list[PluginRef], ...) -> tuple[PluginResult[T], raise PluginError(f"Plugin error: {plugin.name}") # Continue with next plugin ``` - diff --git a/docs/docs/architecture/plugin-spec/11-performance.md b/docs/docs/architecture/plugin-spec/11-performance.md index 0e502dc07..34486714e 100644 --- a/docs/docs/architecture/plugin-spec/11-performance.md +++ b/docs/docs/architecture/plugin-spec/11-performance.md @@ -14,4 +14,3 @@ - **Context management**: Handle 10,000+ concurrent request contexts - **Memory usage**: Base framework overhead <5MB - **Plugin loading**: Initialize plugins in <10 seconds - diff --git a/docs/docs/architecture/plugin-spec/12-development.md b/docs/docs/architecture/plugin-spec/12-development.md index 3dd281706..8179d5ed0 100644 --- a/docs/docs/architecture/plugin-spec/12-development.md +++ b/docs/docs/architecture/plugin-spec/12-development.md @@ -286,4 +286,3 @@ class TestMyPlugin: - Include execution metrics - Provide health check endpoints - Support debugging modes - diff --git a/docs/docs/architecture/plugin-spec/13-testing.md b/docs/docs/architecture/plugin-spec/13-testing.md index a7dede5bc..4fbe55f89 100644 --- a/docs/docs/architecture/plugin-spec/13-testing.md +++ b/docs/docs/architecture/plugin-spec/13-testing.md @@ -22,4 +22,3 @@ The plugin framework provides comprehensive testing support across multiple leve - Validate external plugin communication - Performance and load testing - Security validation - diff --git a/docs/docs/architecture/plugin-spec/14-conclusion.md b/docs/docs/architecture/plugin-spec/14-conclusion.md index 8f4caccf3..e8dd58b8f 100644 --- a/docs/docs/architecture/plugin-spec/14-conclusion.md +++ b/docs/docs/architecture/plugin-spec/14-conclusion.md @@ -36,4 +36,3 @@ This specification defines a comprehensive, production-ready plugin framework fo This specification serves as the definitive guide for developing, deploying, and operating plugins within the MCP Context Forge ecosystem, ensuring consistency, security, and performance across all plugin implementations. **Document Version**: 1.0 - diff --git a/docs/docs/architecture/plugin-spec/plugin-framework-specification.md b/docs/docs/architecture/plugin-spec/plugin-framework-specification.md index deb2cf1e5..0dc10f525 100644 --- a/docs/docs/architecture/plugin-spec/plugin-framework-specification.md +++ b/docs/docs/architecture/plugin-spec/plugin-framework-specification.md @@ -57,4 +57,3 @@ This specification covers: - **Plugin Manager**: Core service managing plugin lifecycle and execution - **Plugin Context**: Request-scoped state shared between plugins - **Plugin Configuration**: YAML-based plugin setup and parameters - diff --git a/docs/docs/architecture/plugins.md b/docs/docs/architecture/plugins.md index c874fc313..b4d63668b 100644 --- a/docs/docs/architecture/plugins.md +++ b/docs/docs/architecture/plugins.md @@ -839,6 +839,9 @@ plugins: mcp: proto: "STREAMABLEHTTP" url: "http://nodejs-plugin:3000/mcp" + # tls: + # ca_bundle: /app/certs/plugins/ca.crt + # client_cert: /app/certs/plugins/gateway-client.pem # Go plugin - name: "HighPerformanceFilter" @@ -853,6 +856,12 @@ plugins: mcp: proto: "STREAMABLEHTTP" url: "http://rust-plugin:8080/mcp" + # tls: + # verify: true + +Gateway-wide defaults for these TLS options can be supplied via the +`PLUGINS_MTLS_*` environment variables when you want every external +plugin to share the same client certificate and CA bundle. ``` ## Remote Plugin MCP Server Integration diff --git a/docs/docs/index.md b/docs/docs/index.md index 379b4c88d..3bc72c995 100644 --- a/docs/docs/index.md +++ b/docs/docs/index.md @@ -1435,6 +1435,12 @@ MCP Gateway uses Alembic for database migrations. Common commands: | ------------------------------ | ------------------------------------------------ | --------------------- | ------- | | `PLUGINS_ENABLED` | Enable the plugin framework | `false` | bool | | `PLUGIN_CONFIG_FILE` | Path to main plugin configuration file | `plugins/config.yaml` | string | +| `PLUGINS_MTLS_CA_BUNDLE` | (Optional) default CA bundle for external plugin mTLS | _(empty)_ | string | +| `PLUGINS_MTLS_CLIENT_CERT` | (Optional) gateway client certificate for plugin mTLS | _(empty)_ | string | +| `PLUGINS_MTLS_CLIENT_KEY` | (Optional) gateway client key for plugin mTLS | _(empty)_ | string | +| `PLUGINS_MTLS_CLIENT_KEY_PASSWORD` | (Optional) password for plugin client key | _(empty)_ | string | +| `PLUGINS_MTLS_VERIFY` | (Optional) verify remote plugin certificates (`true`/`false`) | `true` | bool | +| `PLUGINS_MTLS_CHECK_HOSTNAME` | (Optional) enforce hostname verification for plugins | `true` | bool | | `PLUGINS_CLI_COMPLETION` | Enable auto-completion for plugins CLI | `false` | bool | | `PLUGINS_CLI_MARKUP_MODE` | Set markup mode for plugins CLI | (none) | `rich`, `markdown`, `disabled` | diff --git a/docs/docs/manage/mtls.md b/docs/docs/manage/mtls.md new file mode 100644 index 000000000..e02ed047f --- /dev/null +++ b/docs/docs/manage/mtls.md @@ -0,0 +1,943 @@ +# mTLS (Mutual TLS) Configuration + +Configure mutual TLS authentication for MCP Gateway to enable certificate-based client authentication and enhanced security. + +## Overview + +Mutual TLS (mTLS) provides bidirectional authentication between clients and servers using X.509 certificates. While native mTLS support is in development ([#568](https://github.com/IBM/mcp-context-forge/issues/568)), MCP Gateway can leverage reverse proxies for production-ready mTLS today. + +## Current Status + +- **Native mTLS**: 🚧 In Progress - tracked in [#568](https://github.com/IBM/mcp-context-forge/issues/568) +- **Proxy-based mTLS**: βœ… Available - using Nginx, Caddy, or other reverse proxies +- **Container Support**: βœ… Ready - lightweight containers support proxy deployment + +## Architecture + +```mermaid +sequenceDiagram + participant Client + participant Proxy as Reverse Proxy
(Nginx/Caddy) + participant Gateway as MCP Gateway + participant MCP as MCP Server + + Client->>Proxy: TLS Handshake
+ Client Certificate + Proxy->>Proxy: Verify Client Cert + Proxy->>Gateway: HTTP + X-SSL Headers + Gateway->>Gateway: Extract User from Headers + Gateway->>MCP: Forward Request + MCP-->>Gateway: Response + Gateway-->>Proxy: Response + Proxy-->>Client: TLS Response +``` + +## Quick Start + +### Option 1: Docker Compose with Nginx mTLS + +1. **Generate certificates** (for testing): + +```bash +# Create certificates directory +mkdir -p certs/mtls + +# Generate CA certificate +openssl req -x509 -newkey rsa:4096 -days 365 -nodes \ + -keyout certs/mtls/ca.key -out certs/mtls/ca.crt \ + -subj "/C=US/ST=State/L=City/O=MCP-CA/CN=MCP Root CA" + +# Generate server certificate +openssl req -newkey rsa:4096 -nodes \ + -keyout certs/mtls/server.key -out certs/mtls/server.csr \ + -subj "/CN=gateway.local" + +openssl x509 -req -in certs/mtls/server.csr \ + -CA certs/mtls/ca.crt -CAkey certs/mtls/ca.key \ + -CAcreateserial -out certs/mtls/server.crt -days 365 + +# Generate client certificate +openssl req -newkey rsa:4096 -nodes \ + -keyout certs/mtls/client.key -out certs/mtls/client.csr \ + -subj "/CN=admin@example.com" + +openssl x509 -req -in certs/mtls/client.csr \ + -CA certs/mtls/ca.crt -CAkey certs/mtls/ca.key \ + -CAcreateserial -out certs/mtls/client.crt -days 365 + +# Create client bundle for testing +cat certs/mtls/client.crt certs/mtls/client.key > certs/mtls/client.pem +``` + +2. **Create Nginx configuration** (`nginx-mtls.conf`): + +```nginx +events { + worker_connections 1024; +} + +http { + upstream mcp_gateway { + server gateway:4444; + } + + server { + listen 443 ssl; + server_name gateway.local; + + # Server certificates + ssl_certificate /etc/nginx/certs/server.crt; + ssl_certificate_key /etc/nginx/certs/server.key; + + # mTLS client verification + ssl_client_certificate /etc/nginx/certs/ca.crt; + ssl_verify_client on; + ssl_verify_depth 2; + + # Strong TLS settings + ssl_protocols TLSv1.2 TLSv1.3; + ssl_ciphers HIGH:!aNULL:!MD5; + ssl_prefer_server_ciphers on; + + location / { + proxy_pass http://mcp_gateway; + proxy_http_version 1.1; + + # Pass client certificate info to MCP Gateway + proxy_set_header X-SSL-Client-Cert $ssl_client_escaped_cert; + proxy_set_header X-SSL-Client-S-DN $ssl_client_s_dn; + proxy_set_header X-SSL-Client-S-DN-CN $ssl_client_s_dn_cn; + proxy_set_header X-SSL-Client-Verify $ssl_client_verify; + proxy_set_header X-Authenticated-User $ssl_client_s_dn_cn; + + # Standard proxy headers + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + + # WebSocket support + location /ws { + proxy_pass http://mcp_gateway; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + proxy_set_header X-SSL-Client-S-DN-CN $ssl_client_s_dn_cn; + proxy_set_header X-Authenticated-User $ssl_client_s_dn_cn; + } + + # SSE support + location ~ ^/servers/.*/sse$ { + proxy_pass http://mcp_gateway; + proxy_http_version 1.1; + proxy_set_header X-SSL-Client-S-DN-CN $ssl_client_s_dn_cn; + proxy_set_header X-Authenticated-User $ssl_client_s_dn_cn; + proxy_set_header Connection ""; + proxy_buffering off; + proxy_cache off; + } + } +} +``` + +3. **Create Docker Compose file** (`docker-compose-mtls.yml`): + +```yaml +version: '3.8' + +services: + nginx-mtls: + image: nginx:alpine + ports: + - "443:443" + volumes: + - ./nginx-mtls.conf:/etc/nginx/nginx.conf:ro + - ./certs/mtls:/etc/nginx/certs:ro + networks: + - mcpnet + depends_on: + - gateway + + gateway: + image: ghcr.io/ibm/mcp-context-forge:latest + environment: + - HOST=0.0.0.0 + - PORT=4444 + - DATABASE_URL=sqlite:////app/data/mcp.db + + # Disable JWT auth and trust proxy headers + - MCP_CLIENT_AUTH_ENABLED=false + - TRUST_PROXY_AUTH=true + - PROXY_USER_HEADER=X-SSL-Client-S-DN-CN + + # Keep admin UI protected + - AUTH_REQUIRED=true + - BASIC_AUTH_USER=admin + - BASIC_AUTH_PASSWORD=changeme + + # Enable admin features + - MCPGATEWAY_UI_ENABLED=true + - MCPGATEWAY_ADMIN_API_ENABLED=true + networks: + - mcpnet + volumes: + - ./data:/app/data # persists SQLite database at /app/data/mcp.db + +networks: + mcpnet: + driver: bridge +``` +> πŸ’Ύ Run `mkdir -p data` before `docker-compose up` so the SQLite database survives restarts. + + +4. **Test the connection**: + +```bash +# Start the services +docker-compose -f docker-compose-mtls.yml up -d + +# Test with client certificate +curl --cert certs/mtls/client.pem \ + --cacert certs/mtls/ca.crt \ + https://localhost/health + +# Test without certificate (should fail) +curl https://localhost/health +# Error: SSL certificate problem +``` + +### Option 2: Caddy with mTLS + +1. **Create Caddyfile** (`Caddyfile.mtls`): + +```caddyfile +{ + # Global options + debug +} + +gateway.local { + # Enable mTLS + tls { + client_auth { + mode require_and_verify + trusted_ca_cert_file /etc/caddy/certs/ca.crt + } + } + + # Reverse proxy to MCP Gateway + reverse_proxy gateway:4444 { + # Pass certificate info as headers + header_up X-SSL-Client-Cert {http.request.tls.client.certificate_pem_escaped} + header_up X-SSL-Client-S-DN {http.request.tls.client.subject} + header_up X-SSL-Client-S-DN-CN {http.request.tls.client.subject_cn} + header_up X-Authenticated-User {http.request.tls.client.subject_cn} + + # WebSocket support + @websocket { + header Connection *Upgrade* + header Upgrade websocket + } + transport http { + versions 1.1 + } + } +} +``` + +2. **Docker Compose with Caddy**: + +```yaml +version: '3.8' + +services: + caddy-mtls: + image: caddy:alpine + ports: + - "443:443" + volumes: + - ./Caddyfile.mtls:/etc/caddy/Caddyfile:ro + - ./certs/mtls:/etc/caddy/certs:ro + - caddy_data:/data + - caddy_config:/config + networks: + - mcpnet + depends_on: + - gateway + + gateway: + # Same configuration as Nginx example + image: ghcr.io/ibm/mcp-context-forge:latest + environment: + - MCP_CLIENT_AUTH_ENABLED=false + - TRUST_PROXY_AUTH=true + - PROXY_USER_HEADER=X-SSL-Client-S-DN-CN + # ... rest of config ... + networks: + - mcpnet + +volumes: + caddy_data: + caddy_config: + +networks: + mcpnet: + driver: bridge +``` + +## Production Configuration + +### Enterprise PKI Integration + +For production deployments, integrate with your enterprise PKI: + +```nginx +# nginx.conf - Enterprise PKI +server { + listen 443 ssl; + + # Server certificates from enterprise CA + ssl_certificate /etc/pki/tls/certs/gateway.crt; + ssl_certificate_key /etc/pki/tls/private/gateway.key; + + # Client CA chain + ssl_client_certificate /etc/pki/tls/certs/enterprise-ca-chain.crt; + ssl_verify_client on; + ssl_verify_depth 3; + + # CRL verification + ssl_crl /etc/pki/tls/crl/enterprise.crl; + + # OCSP stapling + ssl_stapling on; + ssl_stapling_verify on; + ssl_trusted_certificate /etc/pki/tls/certs/enterprise-ca-chain.crt; + + location / { + proxy_pass http://mcp-gateway:4444; + + # Extract user from certificate DN + if ($ssl_client_s_dn ~ /CN=([^\/]+)/) { + set $cert_cn $1; + } + proxy_set_header X-Authenticated-User $cert_cn; + + # Extract organization + if ($ssl_client_s_dn ~ /O=([^\/]+)/) { + set $cert_org $1; + } + proxy_set_header X-User-Organization $cert_org; + } +} +``` + +### Kubernetes Deployment Options + +### Option 1: Helm Chart with TLS Ingress + +The MCP Gateway Helm chart (`charts/mcp-stack`) includes built-in TLS support via Ingress: + +```bash +# Install with TLS enabled +helm install mcp-gateway ./charts/mcp-stack \ + --set mcpContextForge.ingress.enabled=true \ + --set mcpContextForge.ingress.host=gateway.example.com \ + --set mcpContextForge.ingress.tls.enabled=true \ + --set mcpContextForge.ingress.tls.secretName=gateway-tls \ + --set mcpContextForge.ingress.annotations."cert-manager\.io/cluster-issuer"=letsencrypt-prod \ + --set mcpContextForge.ingress.annotations."nginx.ingress.kubernetes.io/auth-tls-secret"=mcp-system/gateway-client-ca \ + --set mcpContextForge.ingress.annotations."nginx.ingress.kubernetes.io/auth-tls-verify-client"=on \ + --set mcpContextForge.ingress.annotations."nginx.ingress.kubernetes.io/auth-tls-verify-depth"="2" \ + --set mcpContextForge.ingress.annotations."nginx.ingress.kubernetes.io/auth-tls-pass-certificate-to-upstream"="true" +``` + + +> ℹ️ The configuration snippet that forwards the client CN is easier to maintain in `values.yaml`; the one-liner above focuses on core flags. + +Or configure via `values.yaml`: + +```yaml +# charts/mcp-stack/values.yaml excerpt +mcpContextForge: + ingress: + enabled: true + className: nginx + host: gateway.example.com + annotations: + cert-manager.io/cluster-issuer: letsencrypt-prod + nginx.ingress.kubernetes.io/auth-tls-secret: mcp-system/gateway-client-ca + nginx.ingress.kubernetes.io/auth-tls-verify-client: "on" + nginx.ingress.kubernetes.io/auth-tls-verify-depth: "2" + nginx.ingress.kubernetes.io/auth-tls-pass-certificate-to-upstream: "true" + nginx.ingress.kubernetes.io/configuration-snippet: | + proxy_set_header X-SSL-Client-S-DN $ssl_client_s_dn; + proxy_set_header X-SSL-Client-S-DN-CN $ssl_client_s_dn_cn; + proxy_set_header X-Authenticated-User $ssl_client_s_dn_cn; + tls: + enabled: true + secretName: gateway-tls # cert-manager will generate this + + secret: + MCP_CLIENT_AUTH_ENABLED: "false" + TRUST_PROXY_AUTH: "true" + PROXY_USER_HEADER: X-SSL-Client-S-DN-CN +``` + +Create the `gateway-client-ca` secret in the same namespace as the release so the Ingress controller can validate client certificates. For example: + +```bash +kubectl create secret generic gateway-client-ca \ + --from-file=ca.crt=certs/mtls/ca.crt \ + --namespace mcp-system +``` + +### Option 2: Kubernetes with Istio mTLS + +Deploy MCP Gateway with automatic mTLS in Istio service mesh: + +```yaml +# gateway-deployment.yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + name: mcp-gateway + namespace: mcp-system +spec: + template: + metadata: + labels: + app: mcp-gateway + annotations: + sidecar.istio.io/inject: "true" + spec: + containers: + - name: mcp-gateway + image: ghcr.io/ibm/mcp-context-forge:latest + env: + - name: MCP_CLIENT_AUTH_ENABLED + value: "false" + - name: TRUST_PROXY_AUTH + value: "true" + - name: PROXY_USER_HEADER + value: "X-SSL-Client-S-DN-CN" +--- +# peer-authentication.yaml +apiVersion: security.istio.io/v1beta1 +kind: PeerAuthentication +metadata: + name: mcp-gateway-mtls + namespace: mcp-system +spec: + selector: + matchLabels: + app: mcp-gateway + mtls: + mode: STRICT +``` + +Istio does not add `X-SSL-Client-S-DN-CN` automatically. Use an `EnvoyFilter` to extract the client certificate common name and forward it as the header referenced by `PROXY_USER_HEADER`: + +```yaml +# envoy-filter-client-cn.yaml +apiVersion: networking.istio.io/v1alpha3 +kind: EnvoyFilter +metadata: + name: append-client-cn-header + namespace: mcp-system +spec: + workloadSelector: + labels: + app: mcp-gateway + configPatches: + - applyTo: HTTP_FILTER + match: + context: SIDECAR_INBOUND + listener: + portNumber: 4444 + filterChain: + filter: + name: envoy.filters.network.http_connection_manager + patch: + operation: INSERT_BEFORE + value: + name: envoy.filters.http.lua + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.http.lua.v3.Lua + inlineCode: | + function envoy_on_request(handle) + local ssl = handle:streamInfo():downstreamSslConnection() + if ssl ~= nil and ssl:peerCertificatePresented() then + local subject = ssl:subjectPeerCertificate() + if subject ~= nil then + local cn = subject:match("CN=([^,/]+)") + if cn ~= nil then + handle:headers():replace("X-SSL-Client-S-DN-CN", cn) + end + end + end + end + function envoy_on_response(handle) + end +``` + +The filter runs in the sidecar and ensures the gateway receives the client's common name rather than the full certificate payload. + +### HAProxy with mTLS + +```haproxy +# haproxy.cfg +global + ssl-default-bind-options ssl-min-ver TLSv1.2 + tune.ssl.default-dh-param 2048 + +frontend mcp_gateway_mtls + bind *:443 ssl crt /etc/haproxy/certs/server.pem ca-file /etc/haproxy/certs/ca.crt verify required + + # Extract certificate information + http-request set-header X-SSL-Client-Cert %[ssl_c_der,base64] + http-request set-header X-SSL-Client-S-DN %[ssl_c_s_dn] + http-request set-header X-SSL-Client-S-DN-CN %[ssl_c_s_dn(CN)] + http-request set-header X-Authenticated-User %[ssl_c_s_dn(CN)] + + default_backend mcp_gateway_backend + +backend mcp_gateway_backend + server gateway gateway:4444 check +``` + +## Certificate Management + +### Certificate Generation Scripts + +Create a script for certificate management (`generate-certs.sh`): + +```bash +#!/bin/bash +set -e + +CERT_DIR="${CERT_DIR:-./certs/mtls}" +CA_DAYS="${CA_DAYS:-3650}" +CERT_DAYS="${CERT_DAYS:-365}" +KEY_SIZE="${KEY_SIZE:-4096}" + +mkdir -p "$CERT_DIR" + +# Generate CA if it doesn't exist +if [ ! -f "$CERT_DIR/ca.crt" ]; then + echo "Generating CA certificate..." + openssl req -x509 -newkey rsa:$KEY_SIZE -days $CA_DAYS -nodes \ + -keyout "$CERT_DIR/ca.key" -out "$CERT_DIR/ca.crt" \ + -subj "/C=US/ST=State/L=City/O=Organization/CN=MCP CA" + echo "CA certificate generated." +fi + +# Function to generate certificates +generate_cert() { + local name=$1 + local cn=$2 + + if [ -f "$CERT_DIR/${name}.crt" ]; then + echo "Certificate for $name already exists, skipping..." + return + fi + + echo "Generating certificate for $name (CN=$cn)..." + + # Generate private key and CSR + openssl req -newkey rsa:$KEY_SIZE -nodes \ + -keyout "$CERT_DIR/${name}.key" -out "$CERT_DIR/${name}.csr" \ + -subj "/CN=$cn" + + # Sign with CA + openssl x509 -req -in "$CERT_DIR/${name}.csr" \ + -CA "$CERT_DIR/ca.crt" -CAkey "$CERT_DIR/ca.key" \ + -CAcreateserial -out "$CERT_DIR/${name}.crt" -days $CERT_DAYS \ + -extfile <(echo "subjectAltName=DNS:$cn") + + # Create bundle + cat "$CERT_DIR/${name}.crt" "$CERT_DIR/${name}.key" > "$CERT_DIR/${name}.pem" + + # Clean up CSR + rm "$CERT_DIR/${name}.csr" + + echo "Certificate for $name generated." +} + +# Generate server certificate +generate_cert "server" "gateway.local" + +# Generate client certificates +generate_cert "admin" "admin@example.com" +generate_cert "user1" "user1@example.com" +generate_cert "service-account" "mcp-service@example.com" + +echo "All certificates generated in $CERT_DIR" +``` + +### Certificate Rotation + +Implement automatic certificate rotation: + +```yaml +# kubernetes CronJob for cert rotation +apiVersion: batch/v1 +kind: CronJob +metadata: + name: cert-rotation + namespace: mcp-system +spec: + schedule: "0 2 * * *" # Daily at 2 AM + jobTemplate: + spec: + template: + spec: + serviceAccountName: cert-rotation + containers: + - name: cert-rotator + image: bitnami/kubectl:1.30 + command: + - /bin/sh + - -c + - | + set -euo pipefail + SECRET_NAME=${CERT_SECRET:-gateway-tls} + CERT_NAME=${CERT_NAME:-gateway-tls-cert} + NAMESPACE=${TARGET_NAMESPACE:-mcp-system} + TLS_CERT=$(kubectl get secret "$SECRET_NAME" -n "$NAMESPACE" -o jsonpath='{.data.tls\.crt}') + if [ -z "$TLS_CERT" ]; then + echo "TLS secret $SECRET_NAME missing or empty" + exit 1 + fi + echo "$TLS_CERT" | base64 -d > /tmp/current.crt + if openssl x509 -checkend 604800 -noout -in /tmp/current.crt; then + echo "Certificate valid for more than 7 days" + else + echo "Certificate expiring soon, requesting renewal" + kubectl cert-manager renew "$CERT_NAME" -n "$NAMESPACE" || echo "Install the kubectl-cert_manager plugin inside the job image to enable automatic renewal" + fi + env: + - name: CERT_SECRET + value: gateway-tls + - name: CERT_NAME + value: gateway-tls-cert + - name: TARGET_NAMESPACE + value: mcp-system + volumeMounts: + - name: tmp + mountPath: /tmp + restartPolicy: OnFailure + volumes: + - name: tmp + emptyDir: {} +``` + +Create a `ServiceAccount`, `Role`, and `RoleBinding` that grant `get` access to the TLS secret and `update` access to the related `Certificate` resource so the job can request renewals. + + +> πŸ”§ Install the [`kubectl-cert_manager` plugin](https://cert-manager.io/docs/reference/kubectl-plugin/) or swap the command for `cmctl renew` if you prefer Jetstack's CLI image, and ensure your job image bundles both `kubectl` and `openssl`. + +## mTLS for External MCP Plugins + +External plugins that use the `STREAMABLEHTTP` transport now support mutual TLS directly from the gateway. This is optionalβ€”if you skip the configuration below, the gateway continues to call plugins exactly as before. Enabling mTLS lets you restrict remote plugin servers so they only accept connections from gateways presenting a trusted client certificate. + +### 1. Issue Certificates for the Remote Plugin + +Reuse the same CA you generated earlier or provision a dedicated one. Create a **server** certificate for the remote plugin endpoint and a **client** certificate for the MCP Gateway: + +```bash +# Server cert for the remote plugin (served by your reverse proxy/mcp server) +openssl req -newkey rsa:4096 -nodes \ + -keyout certs/plugins/remote.key -out certs/plugins/remote.csr \ + -subj "/CN=plugins.internal.example.com" + +openssl x509 -req -in certs/plugins/remote.csr \ + -CA certs/mtls/ca.crt -CAkey certs/mtls/ca.key \ + -CAcreateserial -out certs/plugins/remote.crt -days 365 \ + -extfile <(echo "subjectAltName=DNS:plugins.internal.example.com") + +# Client cert for the gateway +openssl req -newkey rsa:4096 -nodes \ + -keyout certs/plugins/gateway-client.key -out certs/plugins/gateway-client.csr \ + -subj "/CN=mcpgateway" + +openssl x509 -req -in certs/plugins/gateway-client.csr \ + -CA certs/mtls/ca.crt -CAkey certs/mtls/ca.key \ + -CAcreateserial -out certs/plugins/gateway-client.crt -days 365 + +cat certs/plugins/gateway-client.crt certs/plugins/gateway-client.key > certs/plugins/gateway-client.pem +``` + +### 2. Protect the Remote Plugin with mTLS + +Front the remote MCP plugin with a reverse proxy (Nginx, Caddy, Envoy, etc.) that enforces client certificate verification using the CA above. Example Nginx snippet: + +```nginx +server { + listen 9443 ssl; + server_name plugins.internal.example.com; + + ssl_certificate /etc/ssl/private/remote.crt; + ssl_certificate_key /etc/ssl/private/remote.key; + ssl_client_certificate /etc/ssl/private/ca.crt; + ssl_verify_client on; + + location /mcp { + proxy_pass http://plugin-runtime:8000/mcp; + proxy_set_header Host $host; + proxy_set_header X-Forwarded-Proto https; + } +} +``` + +### 3. Mount Certificates into the Gateway + +Expose the CA bundle and gateway client certificate to the gateway container: + +```yaml +# docker-compose override + gateway: + volumes: + - ./certs/plugins:/app/certs/plugins:ro + +# Kubernetes deployment (snippet) +volumeMounts: + - name: plugin-mtls + mountPath: /app/certs/plugins + readOnly: true +volumes: + - name: plugin-mtls + secret: + secretName: gateway-plugin-mtls +``` + +### 4. Configure the Plugin Entry + +Use the new `mcp.tls` block in `plugins/config.yaml` (or the Admin UI) to point the gateway at the certificates. Example external plugin definition: + +```yaml +plugins: + - name: "LlamaGuardSafety" + kind: "external" + hooks: ["prompt_pre_fetch", "tool_pre_invoke"] + mode: "enforce" + priority: 20 + mcp: + proto: STREAMABLEHTTP + url: https://plugins.internal.example.com:9443/mcp + tls: + ca_bundle: /app/certs/plugins/ca.crt + client_cert: /app/certs/plugins/gateway-client.pem + client_key: /app/certs/plugins/gateway-client.key # optional if PEM already bundles key + verify: true + check_hostname: true + + config: + policy: strict +``` + +**Key behavior** +- `verify` controls whether the gateway validates the remote server certificate. Leave `true` in production; set `false` only for local debugging. +- `ca_bundle` may point to a custom CA chain; omit it if the remote certificate chains to a system-trusted CA. +- `client_cert` must reference the gateway certificate. Provide `client_key` only when the key is stored separately. +- `check_hostname` defaults to `true`. Set it to `false` for scenarios where the certificate CN does not match the URL (not recommended outside testing). + +Restart the gateway after updating the config so the external plugin client reloads with the TLS settings. Watch the logs for `Connected to plugin MCP (http) server` to confirm a successful handshake; TLS errors will surface as plugin initialization failures. + +> πŸ’‘ **Tip:** You can set gateway-wide defaults via `PLUGINS_MTLS_CA_BUNDLE`, +> `PLUGINS_MTLS_CLIENT_CERT`, `PLUGINS_MTLS_CLIENT_KEY`, and the other +> `PLUGINS_MTLS_*` environment variables. Any plugin without an explicit +> `tls` block will inherit these values automatically. + + +## Security Best Practices + +### 1. Certificate Validation + +```nginx +# Strict certificate validation +ssl_verify_client on; +ssl_verify_depth 2; + +# Check certificate validity +ssl_session_cache shared:SSL:10m; +ssl_session_timeout 10m; + +# Enable OCSP stapling +ssl_stapling on; +ssl_stapling_verify on; +resolver 8.8.8.8 8.8.4.4 valid=300s; +resolver_timeout 5s; +``` + +### 2. Certificate Pinning + +```python +# MCP Gateway plugin for cert pinning +class CertificatePinningPlugin: + def __init__(self): + self.pinned_certs = { + "admin@example.com": "sha256:HASH...", + "service@example.com": "sha256:HASH..." + } + + async def on_request(self, request): + cert_header = request.headers.get("X-SSL-Client-Cert") + if cert_header: + cert_hash = self.calculate_hash(cert_header) + user = request.headers.get("X-Authenticated-User") + + if user in self.pinned_certs: + if self.pinned_certs[user] != cert_hash: + raise SecurityException("Certificate pin mismatch") +``` + +### 3. Audit Logging + +Configure comprehensive audit logging for mTLS connections: + +```nginx +# nginx.conf - Audit logging +log_format mtls_audit '$remote_addr - $ssl_client_s_dn [$time_local] ' + '"$request" $status $body_bytes_sent ' + '"$http_user_agent" cert_verify:$ssl_client_verify'; + +access_log /var/log/nginx/mtls-audit.log mtls_audit; +``` + +### 4. Rate Limiting by Certificate + +```nginx +# Rate limit by certificate CN +limit_req_zone $ssl_client_s_dn_cn zone=cert_limit:10m rate=10r/s; + +location / { + limit_req zone=cert_limit burst=20 nodelay; + proxy_pass http://mcp-gateway; +} +``` + +## Monitoring & Troubleshooting + +### Health Checks + +```bash +# Check mTLS connectivity +openssl s_client -connect gateway.local:443 \ + -cert certs/mtls/client.crt \ + -key certs/mtls/client.key \ + -CAfile certs/mtls/ca.crt \ + -showcerts + +# Verify certificate +openssl x509 -in certs/mtls/client.crt -text -noout + +# Test with curl +curl -v --cert certs/mtls/client.pem \ + --cacert certs/mtls/ca.crt \ + https://gateway.local/health +``` + +### Common Issues + +| Issue | Cause | Solution | +|-------|-------|----------| +| `SSL certificate verify error` | Missing/invalid client cert | Ensure client cert is valid and signed by CA | +| `400 No required SSL certificate` | mTLS not configured | Check `ssl_verify_client on` in proxy | +| `X-Authenticated-User missing` | Header not passed | Verify proxy_set_header configuration | +| `Connection refused` | Service not running | Check docker-compose logs | +| `Certificate expired` | Cert past validity | Regenerate certificates | + +### Debug Logging + +Enable debug logging in your reverse proxy: + +```nginx +# nginx.conf +error_log /var/log/nginx/error.log debug; + +# Log SSL handshake details +ssl_session_cache shared:SSL:10m; +ssl_session_timeout 10m; +``` + +## Migration Path + +### From JWT to mTLS + +1. **Phase 1**: Deploy proxy with mTLS alongside existing JWT auth +2. **Phase 2**: Run dual-mode (both JWT and mTLS accepted) +3. **Phase 3**: Migrate all clients to certificates +4. **Phase 4**: Disable JWT, enforce mTLS only + +```yaml +# Dual-mode configuration +environment: + # Accept both methods during migration + - MCP_CLIENT_AUTH_ENABLED=true # Keep JWT active + - TRUST_PROXY_AUTH=true # Also trust proxy + - PROXY_USER_HEADER=X-SSL-Client-S-DN-CN +``` + +## Helm Chart Configuration + +The MCP Gateway Helm chart in `charts/mcp-stack/` provides extensive configuration options for TLS and security: + +### Key Security Settings in values.yaml + +```yaml +mcpContextForge: + # JWT Configuration - supports both HMAC and RSA + secret: + JWT_ALGORITHM: HS256 # or RS256 for asymmetric + JWT_SECRET_KEY: my-test-key # for HMAC algorithms + # For RSA/ECDSA, mount keys and set: + # JWT_PUBLIC_KEY_PATH: /app/certs/jwt/public.pem + # JWT_PRIVATE_KEY_PATH: /app/certs/jwt/private.pem + + # Security Headers (enabled by default) + config: + SECURITY_HEADERS_ENABLED: "true" + X_FRAME_OPTIONS: DENY + HSTS_ENABLED: "true" + HSTS_MAX_AGE: "31536000" + SECURE_COOKIES: "true" + + # Ingress with TLS + ingress: + enabled: true + tls: + enabled: true + secretName: gateway-tls +``` + +### Deploying with Helm and mTLS + +```bash +# Create namespace +kubectl create namespace mcp-gateway + +# Install with custom TLS settings +helm install mcp-gateway ./charts/mcp-stack \ + --namespace mcp-gateway \ + --set mcpContextForge.ingress.tls.enabled=true \ + --set mcpContextForge.secret.JWT_ALGORITHM=RS256 \ + --values custom-values.yaml +``` + +## Future Native mTLS Support + +When native mTLS support lands ([#568](https://github.com/IBM/mcp-context-forge/issues/568)), expect: + +- Direct TLS termination in MCP Gateway +- Certificate-based authorization policies +- Integration with enterprise PKI systems +- Built-in certificate validation and revocation checking +- Automatic certificate rotation +- Per-service certificate management + +## Related Documentation + +- [Proxy Authentication](./proxy.md) - Configuring proxy-based authentication +- [Security Features](../architecture/security-features.md) - Overall security architecture +- [Deployment Guide](../deployment/index.md) - Production deployment options +- [Authentication Overview](./securing.md) - All authentication methods diff --git a/llms/plugins-llms.md b/llms/plugins-llms.md index 2a1543180..c2a16c353 100644 --- a/llms/plugins-llms.md +++ b/llms/plugins-llms.md @@ -116,9 +116,12 @@ Plugins: How They Work in MCP Context Forge - name: "MyFilter" kind: "external" priority: 10 - mcp: - proto: STREAMABLEHTTP - url: http://localhost:8000/mcp + mcp: + proto: STREAMABLEHTTP + url: http://localhost:8000/mcp + # tls: + # ca_bundle: /app/certs/plugins/ca.crt + # client_cert: /app/certs/plugins/gateway-client.pem ``` - STDIO alternative: ```yaml @@ -129,7 +132,7 @@ Plugins: How They Work in MCP Context Forge proto: STDIO script: path/to/server.py ``` -- Enable framework in gateway: `.env` must set `PLUGINS_ENABLED=true` and optionally `PLUGIN_CONFIG_FILE=plugins/config.yaml`. +- Enable framework in gateway: `.env` must set `PLUGINS_ENABLED=true` and optionally `PLUGIN_CONFIG_FILE=plugins/config.yaml`. To reuse a gateway-wide mTLS client certificate for multiple external plugins, set `PLUGINS_MTLS_CA_BUNDLE`, `PLUGINS_MTLS_CLIENT_CERT`, and related `PLUGINS_MTLS_*` variables. Individual plugin `tls` blocks override these defaults. **Built‑in Plugins (Examples)** - `ArgumentNormalizer` (`plugins/argument_normalizer/argument_normalizer.py`) diff --git a/mcpgateway/plugins/framework/external/mcp/client.py b/mcpgateway/plugins/framework/external/mcp/client.py index 7facb160e..6b3d6a810 100644 --- a/mcpgateway/plugins/framework/external/mcp/client.py +++ b/mcpgateway/plugins/framework/external/mcp/client.py @@ -14,9 +14,11 @@ import json import logging import os +import ssl from typing import Any, Optional, Type, TypeVar # Third-Party +import httpx from mcp import ClientSession, StdioServerParameters from mcp.client.stdio import stdio_client from mcp.client.streamable_http import streamablehttp_client @@ -28,6 +30,7 @@ from mcpgateway.plugins.framework.errors import convert_exception_to_error, PluginError from mcpgateway.plugins.framework.models import ( HookType, + MCPTransportTLSConfig, PluginConfig, PluginContext, PluginErrorModel, @@ -144,8 +147,54 @@ async def __connect_to_http_server(self, uri: str) -> None: PluginError: if there is an external connection error. """ + plugin_tls = self._config.mcp.tls if self._config and self._config.mcp else None + tls_config = plugin_tls or MCPTransportTLSConfig.from_env() + + def _tls_httpx_client_factory( + headers: Optional[dict[str, str]] = None, + timeout: Optional[httpx.Timeout] = None, + auth: Optional[httpx.Auth] = None, + ) -> httpx.AsyncClient: + """Build an httpx client with TLS configuration for external MCP servers.""" + + kwargs: dict[str, Any] = {"follow_redirects": True} + if headers: + kwargs["headers"] = headers + kwargs["timeout"] = timeout or httpx.Timeout(30.0) + if auth is not None: + kwargs["auth"] = auth + + if not tls_config: + return httpx.AsyncClient(**kwargs) + + try: + ssl_context = ssl.create_default_context() + if not tls_config.verify: + ssl_context.check_hostname = False + ssl_context.verify_mode = ssl.CERT_NONE + else: + if tls_config.ca_bundle: + ssl_context.load_verify_locations(cafile=tls_config.ca_bundle) + if not tls_config.check_hostname: + ssl_context.check_hostname = False + + if tls_config.client_cert: + ssl_context.load_cert_chain( + certfile=tls_config.client_cert, + keyfile=tls_config.client_key, + password=tls_config.client_key_password, + ) + + kwargs["verify"] = ssl_context + except Exception as exc: # pylint: disable=broad-except + raise PluginError(error=PluginErrorModel(message=f"Failed configuring TLS for external plugin: {exc}", plugin_name=self.name)) from exc + + return httpx.AsyncClient(**kwargs) + try: - http_transport = await self._exit_stack.enter_async_context(streamablehttp_client(uri)) + client_factory = _tls_httpx_client_factory if tls_config else None + streamable_client = streamablehttp_client(uri, httpx_client_factory=client_factory) if client_factory else streamablehttp_client(uri) + http_transport = await self._exit_stack.enter_async_context(streamable_client) self._http, self._write, _ = http_transport self._session = await self._exit_stack.enter_async_context(ClientSession(self._http, self._write)) diff --git a/mcpgateway/plugins/framework/models.py b/mcpgateway/plugins/framework/models.py index 85950b1ce..344827965 100644 --- a/mcpgateway/plugins/framework/models.py +++ b/mcpgateway/plugins/framework/models.py @@ -11,6 +11,7 @@ # Standard from enum import Enum +import os from pathlib import Path from typing import Any, Generic, Optional, Self, TypeVar @@ -246,6 +247,88 @@ class AppliedTo(BaseModel): resources: Optional[list[ResourceTemplate]] = None +class MCPTransportTLSConfig(BaseModel): + """TLS configuration for HTTP-based MCP transports. + + Attributes: + verify (bool): Whether to verify the remote certificate chain. + ca_bundle (Optional[str]): Path to a CA bundle file used for verification. + client_cert (Optional[str]): Path to the PEM-encoded client certificate (can include key). + client_key (Optional[str]): Path to the PEM-encoded client private key when stored separately. + client_key_password (Optional[str]): Optional password for the private key file. + check_hostname (bool): Enable hostname verification (default: True). + """ + + verify: bool = Field(default=True, description="Verify the upstream server certificate") + ca_bundle: Optional[str] = Field(default=None, description="Path to CA bundle for upstream verification") + client_cert: Optional[str] = Field(default=None, description="Path to PEM client certificate or bundle") + client_key: Optional[str] = Field(default=None, description="Path to PEM client private key (if separate)") + client_key_password: Optional[str] = Field(default=None, description="Password for the client key, when encrypted") + check_hostname: bool = Field(default=True, description="Enable hostname verification when verify is true") + + @field_validator("ca_bundle", "client_cert", "client_key", mode=AFTER) + @classmethod + def validate_path(cls, value: Optional[str]) -> Optional[str]: + """Expand and validate file paths supplied in TLS configuration.""" + + if not value: + return value + expanded = Path(value).expanduser() + if not expanded.is_file(): + raise ValueError(f"TLS file path does not exist: {value}") + return str(expanded) + + @model_validator(mode=AFTER) + def validate_client_cert(self) -> Self: # pylint: disable=bad-classmethod-argument + """Ensure TLS client certificate options are consistent.""" + + if self.client_key and not self.client_cert: + raise ValueError("client_key requires client_cert to be specified") + return self + + @staticmethod + def _parse_bool(value: Optional[str]) -> Optional[bool]: + """Convert a string environment value to boolean.""" + + if value is None: + return None + normalized = value.strip().lower() + if normalized in {"1", "true", "yes", "on"}: + return True + if normalized in {"0", "false", "no", "off"}: + return False + raise ValueError(f"Invalid boolean value: {value}") + + @classmethod + def from_env(cls) -> Optional["MCPTransportTLSConfig"]: + """Construct a TLS configuration from environment defaults.""" + + env = os.environ + data: dict[str, Any] = {} + + if env.get("PLUGINS_MTLS_CA_BUNDLE"): + data["ca_bundle"] = env["PLUGINS_MTLS_CA_BUNDLE"] + if env.get("PLUGINS_MTLS_CLIENT_CERT"): + data["client_cert"] = env["PLUGINS_MTLS_CLIENT_CERT"] + if env.get("PLUGINS_MTLS_CLIENT_KEY"): + data["client_key"] = env["PLUGINS_MTLS_CLIENT_KEY"] + if env.get("PLUGINS_MTLS_CLIENT_KEY_PASSWORD") is not None: + data["client_key_password"] = env["PLUGINS_MTLS_CLIENT_KEY_PASSWORD"] + + verify_val = cls._parse_bool(env.get("PLUGINS_MTLS_VERIFY")) + if verify_val is not None: + data["verify"] = verify_val + + check_hostname_val = cls._parse_bool(env.get("PLUGINS_MTLS_CHECK_HOSTNAME")) + if check_hostname_val is not None: + data["check_hostname"] = check_hostname_val + + if not data: + return None + + return cls(**data) + + class MCPConfig(BaseModel): """An MCP configuration for external MCP plugin objects. @@ -258,6 +341,7 @@ class MCPConfig(BaseModel): proto: TransportType url: Optional[str] = None script: Optional[str] = None + tls: Optional[MCPTransportTLSConfig] = None @field_validator(URL, mode=AFTER) @classmethod @@ -302,6 +386,14 @@ def validate_script(cls, script: str | None) -> str | None: raise ValueError(f"MCP server script {script} must have a .py or .sh suffix.") return script + @model_validator(mode=AFTER) + def validate_tls_usage(self) -> Self: # pylint: disable=bad-classmethod-argument + """Ensure TLS configuration is only used with HTTP-based transports.""" + + if self.tls and self.proto not in (TransportType.SSE, TransportType.STREAMABLEHTTP): + raise ValueError("TLS configuration is only valid for HTTP/SSE transports") + return self + class PluginConfig(BaseModel): """A plugin configuration. diff --git a/plugins/config.yaml b/plugins/config.yaml index e1a0ecb36..975825bd2 100644 --- a/plugins/config.yaml +++ b/plugins/config.yaml @@ -469,6 +469,10 @@ plugins: # mcp: # proto: STREAMABLEHTTP # url: http://127.0.0.1:8000/mcp + # # tls: + # # ca_bundle: /app/certs/plugins/ca.crt + # # client_cert: /app/certs/plugins/gateway-client.pem + # # verify: true # Circuit Breaker - trip on high error rates or consecutive failures - name: "CircuitBreaker" diff --git a/plugins/external/config.yaml b/plugins/external/config.yaml index d9632f3a9..f4fa1a4fb 100644 --- a/plugins/external/config.yaml +++ b/plugins/external/config.yaml @@ -6,6 +6,9 @@ plugins: mcp: proto: STREAMABLEHTTP url: http://127.0.0.1:3000/mcp + # tls: + # ca_bundle: /app/certs/plugins/ca.crt + # client_cert: /app/certs/plugins/gateway-client.pem - name: "OPAPluginFilter" kind: "external" @@ -13,6 +16,8 @@ plugins: mcp: proto: STREAMABLEHTTP url: http://127.0.0.1:8000/mcp + # tls: + # verify: true # Plugin directories to scan plugin_dirs: diff --git a/tests/unit/mcpgateway/plugins/framework/test_models_tls.py b/tests/unit/mcpgateway/plugins/framework/test_models_tls.py new file mode 100644 index 000000000..d61f8693a --- /dev/null +++ b/tests/unit/mcpgateway/plugins/framework/test_models_tls.py @@ -0,0 +1,114 @@ +"""Tests for TLS configuration on external MCP plugins.""" + +# Standard +from pathlib import Path + +# Third-Party +import pytest + +# First-Party +from mcpgateway.plugins.framework.models import MCPTransportTLSConfig, PluginConfig + + +def _write_pem(path: Path) -> str: + path.write_text("-----BEGIN CERTIFICATE-----\nMIIBszCCAVmgAwIBAgIJALICEFAKE000MA0GCSqGSIb3DQEBCwUAMBQxEjAQBgNV\nBAMMCXRlc3QtY2EwHhcNMjUwMTAxMDAwMDAwWhcNMjYwMTAxMDAwMDAwWjAUMRIw\nEAYDVQQDDAl0ZXN0LWNsaTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB\nALzM8FSo48ByKC16ecEsPpRghr7kDDLOZWisS+8mHb4RLzdrg5e8tRgFuBlbslUT\n8VE+j54v+J2mOv5u18CVeq4xjp1IqP/PpeL9Z8sY2XohGKVCUj8lMiMM6trXwPh3\n4nDXwG8hxhTZWOeAZv93FqMgBANpUAOC0yM5Ar+uSoC2Tbf3juDEnHiVNWdP6hJg\n38zrla9Yh+SPYj9m6z6wG6jZc37SaJnKI/v4ycq31wkK7S226gRA7i72H+eEt1Kp\nI5rkJ+6kkfgeJc8FvbB6c88T9EycneEW7Pm2Xp6gJdxeN1g2jeDJPnWc5Cj9VPYU\nCJPwy6DnKSmGA4MZij19+cUCAwEAAaNQME4wHQYDVR0OBBYEFL0CyJXw5CtP6Ls9\nVgn8BxwysA2fMB8GA1UdIwQYMBaAFL0CyJXw5CtP6Ls9Vgn8BxwysA2fMAwGA1Ud\nEwQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAIgUjACmJS4cGL7yp0T1vpuZi856\nG7k18Om8Ze9fJbVI1MBBxDWS5F9bNOn5z1ytgCMs9VXg7QibQPXlqprcM2aYJWaV\ndHZ92ohqzJ0EB1G2r8x5Fkw3O0mEWcJvl10FgUVHVGzi552MZGFMZ7DAMA4EAq/u\nsOUgWup8uLSyvvl7dao3rJ8k+YkBWkDu6eCKwQn3nNKFB5Bg9P6IKkmDdLhYodl/\nW1q/qmHZapCp8XDsrmS8skWsmcFJFU6f4VDOwdJaNiMgRGQpWlwO4dRw9xvyhsHc\nsOf0HWNvw60sX6Zav8HC0FzDGhGJkpyyU10BzpQLVEf5AEE7MkK5eeqi2+0=\n-----END CERTIFICATE-----\n", encoding="utf-8") + return str(path) + + +@pytest.mark.parametrize( + "verify", + [True, False], +) +def test_plugin_config_supports_tls_block(tmp_path, verify): + ca_path = Path(tmp_path) / "ca.crt" + client_bundle = Path(tmp_path) / "client.pem" + _write_pem(ca_path) + _write_pem(client_bundle) + + config = PluginConfig( + name="ExternalTLSPlugin", + kind="external", + hooks=["prompt_pre_fetch"], + mcp={ + "proto": "STREAMABLEHTTP", + "url": "https://plugins.internal.example.com/mcp", + "tls": { + "ca_bundle": str(ca_path), + "client_cert": str(client_bundle), + "verify": verify, + }, + }, + ) + + assert config.mcp is not None + assert config.mcp.tls is not None + assert config.mcp.tls.client_cert == str(client_bundle) + assert config.mcp.tls.verify == verify + + +def test_plugin_config_tls_missing_cert_raises(tmp_path): + ca_path = Path(tmp_path) / "ca.crt" + _write_pem(ca_path) + + with pytest.raises(ValueError): + PluginConfig( + name="ExternalTLSPlugin", + kind="external", + hooks=["prompt_pre_fetch"], + mcp={ + "proto": "STREAMABLEHTTP", + "url": "https://plugins.internal.example.com/mcp", + "tls": { + "client_key": str(ca_path), + }, + }, + ) + + +def test_plugin_config_tls_missing_file(tmp_path): + missing_path = Path(tmp_path) / "missing.crt" + + with pytest.raises(ValueError): + PluginConfig( + name="ExternalTLSPlugin", + kind="external", + hooks=["prompt_pre_fetch"], + mcp={ + "proto": "STREAMABLEHTTP", + "url": "https://plugins.internal.example.com/mcp", + "tls": { + "ca_bundle": str(missing_path), + }, + }, + ) + + +def test_tls_config_from_env_defaults(monkeypatch, tmp_path): + ca_path = Path(tmp_path) / "ca.crt" + client_cert = Path(tmp_path) / "client.pem" + _write_pem(ca_path) + _write_pem(client_cert) + + monkeypatch.setenv("PLUGINS_MTLS_CA_BUNDLE", str(ca_path)) + monkeypatch.setenv("PLUGINS_MTLS_CLIENT_CERT", str(client_cert)) + monkeypatch.setenv("PLUGINS_MTLS_VERIFY", "true") + monkeypatch.setenv("PLUGINS_MTLS_CHECK_HOSTNAME", "true") + + tls_config = MCPTransportTLSConfig.from_env() + + assert tls_config is not None + assert tls_config.ca_bundle == str(ca_path) + assert tls_config.client_cert == str(client_cert) + assert tls_config.verify is True + assert tls_config.check_hostname is True + + +def test_tls_config_from_env_returns_none(monkeypatch): + monkeypatch.delenv("PLUGINS_MTLS_CA_BUNDLE", raising=False) + monkeypatch.delenv("PLUGINS_MTLS_CLIENT_CERT", raising=False) + monkeypatch.delenv("PLUGINS_MTLS_CLIENT_KEY", raising=False) + monkeypatch.delenv("PLUGINS_MTLS_CLIENT_KEY_PASSWORD", raising=False) + monkeypatch.delenv("PLUGINS_MTLS_VERIFY", raising=False) + monkeypatch.delenv("PLUGINS_MTLS_CHECK_HOSTNAME", raising=False) + + assert MCPTransportTLSConfig.from_env() is None From 80df5469d805868ca69ac68ef888d6c958547cd5 Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Thu, 2 Oct 2025 10:47:09 -0600 Subject: [PATCH 02/35] feat: added mTLS support to plugin mcp servers. Signed-off-by: Teryl Taylor --- Makefile | 162 ++++++++- .../framework/external/mcp/server/runtime.py | 45 ++- .../external/mcp/server/runtime_mtls.py | 330 ++++++++++++++++++ plugin_templates/external/.env.template | 34 ++ 4 files changed, 553 insertions(+), 18 deletions(-) create mode 100644 mcpgateway/plugins/framework/external/mcp/server/runtime_mtls.py diff --git a/Makefile b/Makefile index a4318afb2..8c124d1c1 100644 --- a/Makefile +++ b/Makefile @@ -135,14 +135,18 @@ update: @/bin/bash -c "source $(VENV_DIR)/bin/activate && python3 -m uv pip install -U .[dev]" # help: check-env - Verify all required env vars in .env are present -.PHONY: check-env +.PHONY: check-env check-env-dev + +# Validate .env in production mode check-env: - @echo "πŸ”Ž Checking .env against .env.example..." - @missing=0; \ - for key in $$(grep -Ev '^\s*#|^\s*$$' .env.example | cut -d= -f1); do \ - grep -q "^$$key=" .env || { echo "❌ Missing: $$key"; missing=1; }; \ - done; \ - if [ $$missing -eq 0 ]; then echo "βœ… All environment variables are present."; fi + @echo "πŸ”Ž Validating .env against .env.example using Python (prod)..." + @python -m mcpgateway.scripts.validate_env .env.example + +# Validate .env in development mode (warnings do not fail) +check-env-dev: + @echo "πŸ”Ž Validating .env (dev, warnings do not fail)..." + @python -c "import sys; from mcpgateway.scripts import validate_env as ve; sys.exit(ve.main(env_file='.env', exit_on_warnings=False))" + # ============================================================================= @@ -154,11 +158,17 @@ check-env: # help: certs-jwt - Generate JWT RSA keys in ./certs/jwt/ (idempotent) # help: certs-jwt-ecdsa - Generate JWT ECDSA keys in ./certs/jwt/ (idempotent) # help: certs-all - Generate both TLS certs and JWT keys (combo target) +# help: certs-mcp-ca - Generate MCP CA for plugin mTLS (./certs/mcp/ca/) +# help: certs-mcp-gateway - Generate gateway client certificate (./certs/mcp/gateway/) +# help: certs-mcp-plugin - Generate plugin server certificate (requires PLUGIN_NAME=name) +# help: certs-mcp-all - Generate complete MCP mTLS infrastructure (reads plugins from config.yaml) +# help: certs-mcp-check - Check expiry dates of MCP certificates # help: serve-ssl - Run Gunicorn behind HTTPS on :4444 (uses ./certs) # help: dev - Run fast-reload dev server (uvicorn) # help: run - Execute helper script ./run.sh -.PHONY: serve serve-ssl dev run certs certs-jwt certs-jwt-ecdsa certs-all +.PHONY: serve serve-ssl dev run certs certs-jwt certs-jwt-ecdsa certs-all \ + certs-mcp-ca certs-mcp-gateway certs-mcp-plugin certs-mcp-all certs-mcp-check ## --- Primary servers --------------------------------------------------------- serve: @@ -221,6 +231,142 @@ certs-all: certs certs-jwt ## Generate both TLS certificates and JWT RSA k @echo "πŸ“ JWT: ./certs/jwt/{private,public}.pem" @echo "πŸ’‘ Use JWT_ALGORITHM=RS256 with JWT_PUBLIC_KEY_PATH=certs/jwt/public.pem" +## --- MCP Plugin mTLS Certificate Management ---------------------------------- +# Default validity period for MCP certificates (in days) +MCP_CERT_DAYS ?= 825 + +# Plugin configuration file for automatic certificate generation +MCP_PLUGIN_CONFIG ?= plugins/external/config.yaml + +certs-mcp-ca: ## Generate CA for MCP plugin mTLS + @if [ -f certs/mcp/ca/ca.key ] && [ -f certs/mcp/ca/ca.crt ]; then \ + echo "πŸ” Existing MCP CA found in ./certs/mcp/ca - skipping generation."; \ + echo "⚠️ To regenerate, delete ./certs/mcp/ca and run again."; \ + else \ + echo "πŸ” Generating MCP Certificate Authority ($(MCP_CERT_DAYS) days validity)..."; \ + mkdir -p certs/mcp/ca; \ + openssl genrsa -out certs/mcp/ca/ca.key 4096; \ + openssl req -new -x509 -key certs/mcp/ca/ca.key -out certs/mcp/ca/ca.crt \ + -days $(MCP_CERT_DAYS) \ + -subj "/CN=MCP-Gateway-CA/O=MCPGateway/OU=Plugins"; \ + echo "01" > certs/mcp/ca/ca.srl; \ + echo "βœ… MCP CA created: ./certs/mcp/ca/ca.{key,crt}"; \ + fi + @chmod 600 certs/mcp/ca/ca.key + @chmod 644 certs/mcp/ca/ca.crt + @echo "πŸ”’ Permissions set: ca.key (600), ca.crt (644)" + +certs-mcp-gateway: certs-mcp-ca ## Generate gateway client certificate + @if [ -f certs/mcp/gateway/client.key ] && [ -f certs/mcp/gateway/client.crt ]; then \ + echo "πŸ” Existing gateway client certificate found - skipping generation."; \ + else \ + echo "πŸ” Generating gateway client certificate ($(MCP_CERT_DAYS) days)..."; \ + mkdir -p certs/mcp/gateway; \ + openssl genrsa -out certs/mcp/gateway/client.key 2048; \ + openssl req -new -key certs/mcp/gateway/client.key \ + -out certs/mcp/gateway/client.csr \ + -subj "/CN=mcp-gateway-client/O=MCPGateway/OU=Gateway"; \ + openssl x509 -req -in certs/mcp/gateway/client.csr \ + -CA certs/mcp/ca/ca.crt -CAkey certs/mcp/ca/ca.key \ + -CAcreateserial -out certs/mcp/gateway/client.crt \ + -days $(MCP_CERT_DAYS) -sha256; \ + rm certs/mcp/gateway/client.csr; \ + cp certs/mcp/ca/ca.crt certs/mcp/gateway/ca.crt; \ + echo "βœ… Gateway client certificate created: ./certs/mcp/gateway/"; \ + fi + @chmod 600 certs/mcp/gateway/client.key + @chmod 644 certs/mcp/gateway/client.crt certs/mcp/gateway/ca.crt + @echo "πŸ”’ Permissions set: client.key (600), client.crt (644), ca.crt (644)" + +certs-mcp-plugin: certs-mcp-ca ## Generate plugin server certificate (PLUGIN_NAME=name) + @if [ -z "$(PLUGIN_NAME)" ]; then \ + echo "❌ ERROR: PLUGIN_NAME not set"; \ + echo "πŸ’‘ Usage: make certs-mcp-plugin PLUGIN_NAME=my-plugin"; \ + exit 1; \ + fi + @if [ -f certs/mcp/plugins/$(PLUGIN_NAME)/server.key ] && \ + [ -f certs/mcp/plugins/$(PLUGIN_NAME)/server.crt ]; then \ + echo "πŸ” Existing certificate for plugin '$(PLUGIN_NAME)' found - skipping."; \ + else \ + echo "πŸ” Generating server certificate for plugin '$(PLUGIN_NAME)' ($(MCP_CERT_DAYS) days)..."; \ + mkdir -p certs/mcp/plugins/$(PLUGIN_NAME); \ + openssl genrsa -out certs/mcp/plugins/$(PLUGIN_NAME)/server.key 2048; \ + openssl req -new -key certs/mcp/plugins/$(PLUGIN_NAME)/server.key \ + -out certs/mcp/plugins/$(PLUGIN_NAME)/server.csr \ + -subj "/CN=mcp-plugin-$(PLUGIN_NAME)/O=MCPGateway/OU=Plugins"; \ + openssl x509 -req -in certs/mcp/plugins/$(PLUGIN_NAME)/server.csr \ + -CA certs/mcp/ca/ca.crt -CAkey certs/mcp/ca/ca.key \ + -CAcreateserial -out certs/mcp/plugins/$(PLUGIN_NAME)/server.crt \ + -days $(MCP_CERT_DAYS) -sha256 \ + -extfile <(printf "subjectAltName=DNS:$(PLUGIN_NAME),DNS:mcp-plugin-$(PLUGIN_NAME),DNS:localhost"); \ + rm certs/mcp/plugins/$(PLUGIN_NAME)/server.csr; \ + cp certs/mcp/ca/ca.crt certs/mcp/plugins/$(PLUGIN_NAME)/ca.crt; \ + echo "βœ… Plugin '$(PLUGIN_NAME)' certificate created: ./certs/mcp/plugins/$(PLUGIN_NAME)/"; \ + fi + @chmod 600 certs/mcp/plugins/$(PLUGIN_NAME)/server.key + @chmod 644 certs/mcp/plugins/$(PLUGIN_NAME)/server.crt certs/mcp/plugins/$(PLUGIN_NAME)/ca.crt + @echo "πŸ”’ Permissions set: server.key (600), server.crt (644), ca.crt (644)" + +certs-mcp-all: certs-mcp-ca certs-mcp-gateway ## Generate complete mTLS infrastructure + @echo "πŸ” Generating certificates for plugins..." + @# Read plugin names from config file if it exists + @if [ -f "$(MCP_PLUGIN_CONFIG)" ]; then \ + echo "πŸ“‹ Reading plugin names from $(MCP_PLUGIN_CONFIG)"; \ + python3 -c "import yaml; \ + config = yaml.safe_load(open('$(MCP_PLUGIN_CONFIG)')); \ + plugins = [p['name'] for p in config.get('plugins', []) if p.get('kind') == 'external']; \ + print('\n'.join(plugins))" 2>/dev/null | while read plugin_name; do \ + if [ -n "$$plugin_name" ]; then \ + echo " Generating for: $$plugin_name"; \ + $(MAKE) certs-mcp-plugin PLUGIN_NAME="$$plugin_name"; \ + fi; \ + done || echo "⚠️ PyYAML not installed or config parse failed, generating example plugins..."; \ + fi + @# Fallback to example plugins if no config or parsing failed + @if [ ! -f "$(MCP_PLUGIN_CONFIG)" ] || ! python3 -c "import yaml" 2>/dev/null; then \ + echo "πŸ” Generating certificates for example plugins..."; \ + $(MAKE) certs-mcp-plugin PLUGIN_NAME=example-plugin-a; \ + $(MAKE) certs-mcp-plugin PLUGIN_NAME=example-plugin-b; \ + fi + @echo "" + @echo "🎯 MCP mTLS infrastructure generated successfully!" + @echo "πŸ“ Structure:" + @echo " certs/mcp/ca/ - Certificate Authority" + @echo " certs/mcp/gateway/ - Gateway client certificate" + @echo " certs/mcp/plugins/*/ - Plugin server certificates" + @echo "" + @echo "πŸ’‘ Generate additional plugin certificates with:" + @echo " make certs-mcp-plugin PLUGIN_NAME=your-plugin-name" + @echo "" + @echo "πŸ’‘ Certificate validity: $(MCP_CERT_DAYS) days" + @echo " To change: make certs-mcp-all MCP_CERT_DAYS=365" + +certs-mcp-check: ## Check expiry dates of MCP certificates + @echo "πŸ” Checking MCP certificate expiry dates..." + @echo "" + @if [ -f certs/mcp/ca/ca.crt ]; then \ + echo "πŸ“‹ CA Certificate:"; \ + openssl x509 -in certs/mcp/ca/ca.crt -noout -enddate | sed 's/notAfter=/ Expires: /'; \ + echo ""; \ + fi + @if [ -f certs/mcp/gateway/client.crt ]; then \ + echo "πŸ“‹ Gateway Client Certificate:"; \ + openssl x509 -in certs/mcp/gateway/client.crt -noout -enddate | sed 's/notAfter=/ Expires: /'; \ + echo ""; \ + fi + @if [ -d certs/mcp/plugins ]; then \ + echo "πŸ“‹ Plugin Certificates:"; \ + for plugin_dir in certs/mcp/plugins/*; do \ + if [ -f "$$plugin_dir/server.crt" ]; then \ + plugin_name=$$(basename "$$plugin_dir"); \ + expiry=$$(openssl x509 -in "$$plugin_dir/server.crt" -noout -enddate | sed 's/notAfter=//'); \ + echo " $$plugin_name: $$expiry"; \ + fi; \ + done; \ + echo ""; \ + fi + @echo "πŸ’‘ To regenerate expired certificates, delete the cert directory and run make certs-mcp-all" + ## --- House-keeping ----------------------------------------------------------- # help: clean - Remove caches, build artefacts, virtualenv, docs, certs, coverage, SBOM, database files, etc. .PHONY: clean diff --git a/mcpgateway/plugins/framework/external/mcp/server/runtime.py b/mcpgateway/plugins/framework/external/mcp/server/runtime.py index b7bd9664f..a35a90556 100644 --- a/mcpgateway/plugins/framework/external/mcp/server/runtime.py +++ b/mcpgateway/plugins/framework/external/mcp/server/runtime.py @@ -258,19 +258,44 @@ def resource_post_fetch_func(plugin: Plugin, payload: ResourcePostFetchPayload, async def run(): # pragma: no cover """Run the external plugin SERVER. + Automatically detects mTLS configuration from environment variables and + delegates to runtime_mtls.py if mTLS is enabled. + + Environment Variables (mTLS detection): + MCP_SSL_ENABLED: Enable SSL/TLS (true/false) + MCP_SSL_KEYFILE: Path to server private key + MCP_SSL_CERTFILE: Path to server certificate + MCP_SSL_CA_CERTS: Path to CA bundle (optional) + MCP_SSL_CERT_REQS: Certificate verification mode (0=NONE, 1=OPTIONAL, 2=REQUIRED) + Raises: Exception: if unnable to run the plugin SERVER. """ - global SERVER # pylint: disable=global-statement - SERVER = ExternalPluginServer() - if await SERVER.initialize(): - try: - await main_async() - except Exception: - logger.exception("Caught error while executing plugin server") - raise - finally: - await SERVER.shutdown() + import os + + # Check if mTLS is enabled via environment variables + ssl_enabled = os.getenv("MCP_SSL_ENABLED", "").lower() == "true" + + if ssl_enabled: + # Delegate to mTLS runtime + logger.info("mTLS enabled - delegating to runtime_mtls") + from mcpgateway.plugins.framework.external.mcp.server.runtime_mtls import run_with_mtls + + # Set the global SERVER for runtime_mtls to use + global SERVER # pylint: disable=global-statement + await run_with_mtls() + else: + # Standard runtime without mTLS + global SERVER # pylint: disable=global-statement + SERVER = ExternalPluginServer() + if await SERVER.initialize(): + try: + await main_async() + except Exception: + logger.exception("Caught error while executing plugin server") + raise + finally: + await SERVER.shutdown() if __name__ == "__main__": # pragma: no cover diff --git a/mcpgateway/plugins/framework/external/mcp/server/runtime_mtls.py b/mcpgateway/plugins/framework/external/mcp/server/runtime_mtls.py new file mode 100644 index 000000000..0daed9092 --- /dev/null +++ b/mcpgateway/plugins/framework/external/mcp/server/runtime_mtls.py @@ -0,0 +1,330 @@ +# -*- coding: utf-8 -*- +"""Location: ./mcpgateway/plugins/framework/external/mcp/server/runtime_mtls.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Runtime MCP server for external plugins with mTLS support. + +This module provides a custom wrapper around chuk_mcp_runtime that enables +mutual TLS (mTLS) authentication by directly configuring uvicorn.Config +instead of using the default main_async() entry point. +""" + +# Standard +import asyncio +import contextlib +import logging +import os +import ssl +from typing import Any, AsyncIterator, Dict, Optional + +# Third-Party +import uvicorn +from dotenv import load_dotenv +from mcp.server import Server +from mcp.server.sse import SseServerTransport +from starlette.applications import Starlette +from starlette.middleware import Middleware +from starlette.requests import Request +from starlette.responses import PlainTextResponse, Response +from starlette.routing import Mount, Route + +# chuk_mcp_runtime imports +from chuk_mcp_runtime.common.mcp_tool_decorator import ( + TOOLS_REGISTRY, + initialize_tool_registry, +) +from chuk_mcp_runtime.server.config_loader import find_project_root, load_config +from chuk_mcp_runtime.server.logging_config import configure_logging, get_logger +from chuk_mcp_runtime.server.server import MCPServer, AuthMiddleware +from chuk_mcp_runtime.server.server_registry import ServerRegistry +from chuk_mcp_runtime.session.native_session_management import create_mcp_session_manager +from chuk_mcp_runtime.tools import register_artifacts_tools, register_session_tools + +# First-Party +from mcpgateway.plugins.framework import ExternalPluginServer +from mcpgateway.plugins.framework.external.mcp.server.runtime import ( + get_plugin_config, + get_plugin_configs, + prompt_post_fetch, + prompt_pre_fetch, + resource_post_fetch, + resource_pre_fetch, + tool_post_invoke, + tool_pre_invoke, +) + +load_dotenv() + +logger = get_logger(__name__) + +SERVER: Optional[ExternalPluginServer] = None + + +async def run_with_mtls( + config_paths: Optional[list[str]] = None, + default_config: Optional[Dict[str, Any]] = None, +) -> None: + """Run the MCP runtime with mTLS support. + + This function provides direct control over uvicorn configuration to enable + mutual TLS authentication. SSL/TLS settings are read from environment variables + or the configuration file. + + Environment Variables: + MCP_SSL_ENABLED: Enable SSL/TLS (true/false) + MCP_SSL_KEYFILE: Path to server private key + MCP_SSL_CERTFILE: Path to server certificate + MCP_SSL_KEYFILE_PASSWORD: Optional password for encrypted key + MCP_SSL_CA_CERTS: Path to CA bundle for verifying client certificates + MCP_SSL_CERT_REQS: Certificate verification mode: + - 0 (CERT_NONE): No client certificate required (default TLS) + - 1 (CERT_OPTIONAL): Client certificate optional + - 2 (CERT_REQUIRED): Client certificate required (mTLS) + + Args: + config_paths: Optional list of configuration file paths. + default_config: Optional default configuration dictionary. + + Raises: + RuntimeError: If SSL is enabled but required certificate files are missing. + Exception: If server initialization or startup fails. + """ + global SERVER # pylint: disable=global-statement + + # 1) Configuration and logging setup + cfg = load_config(config_paths, default_config) + configure_logging(cfg) + project_root = find_project_root() + logger.debug("Project root resolved to %s", project_root) + + # 2) Initialize external plugin server + SERVER = ExternalPluginServer() + if not await SERVER.initialize(): + raise RuntimeError("Failed to initialize external plugin server") + + # 3) Native session management initialization + session_manager = create_mcp_session_manager(cfg) + logger.info("Native session manager initialized for sandbox: %s", session_manager.sandbox_id) + + # 4) Optional component bootstrap + if not os.getenv("NO_BOOTSTRAP"): + await ServerRegistry(project_root, cfg).load_server_components() + + # 5) Tool registry initialization + await initialize_tool_registry() + + # 6) Artifact management tools + await register_artifacts_tools(cfg) + logger.debug("Artifact tools registration completed") + + # 7) Session management tools + session_cfg = cfg.copy() + session_cfg.setdefault("session_tools", {})["session_manager"] = session_manager + await register_session_tools(session_cfg) + logger.debug("Session tools registration completed") + + # 8) Create MCP server instance + mcp_server = MCPServer(cfg, tools_registry=TOOLS_REGISTRY) + logger.debug("Local MCP server '%s' starting with native sessions", getattr(mcp_server, "server_name", "local")) + + # 9) Get server configuration + server_mode = cfg.get("server", {}).get("type", "sse") + if server_mode != "sse": + raise ValueError(f"Only 'sse' server mode is supported with mTLS wrapper, got: {server_mode}") + + sse_config = cfg.get("sse", {}) + host = sse_config.get("host", "0.0.0.0") # nosec B104 - Intentional binding for server as default + port = sse_config.get("port", 8000) + sse_path = sse_config.get("sse_path", "/sse") + msg_path = sse_config.get("message_path", "/messages/") + health_path = sse_config.get("health_path", "/health") + + # 10) Setup SSL/TLS configuration + ssl_enabled = os.getenv("MCP_SSL_ENABLED", "false").lower() == "true" + ssl_keyfile = os.getenv("MCP_SSL_KEYFILE") + ssl_certfile = os.getenv("MCP_SSL_CERTFILE") + ssl_keyfile_password = os.getenv("MCP_SSL_KEYFILE_PASSWORD") + ssl_ca_certs = os.getenv("MCP_SSL_CA_CERTS") + ssl_cert_reqs_str = os.getenv("MCP_SSL_CERT_REQS", "0") + + try: + ssl_cert_reqs = int(ssl_cert_reqs_str) + except ValueError: + logger.warning("Invalid MCP_SSL_CERT_REQS value '%s', defaulting to 0 (CERT_NONE)", ssl_cert_reqs_str) + ssl_cert_reqs = ssl.CERT_NONE + + # Validate SSL configuration + if ssl_enabled: + if not ssl_keyfile or not ssl_certfile: + raise RuntimeError("MCP_SSL_ENABLED=true requires MCP_SSL_KEYFILE and MCP_SSL_CERTFILE to be set") + + if ssl_cert_reqs in (ssl.CERT_REQUIRED, ssl.CERT_OPTIONAL) and not ssl_ca_certs: + raise RuntimeError(f"MCP_SSL_CERT_REQS={ssl_cert_reqs} requires MCP_SSL_CA_CERTS to be set for client certificate verification") + + cert_reqs_name = {0: "CERT_NONE", 1: "CERT_OPTIONAL", 2: "CERT_REQUIRED"}.get(ssl_cert_reqs, "UNKNOWN") + logger.info("SSL/TLS enabled: cert_reqs=%s (%d), keyfile=%s, certfile=%s, ca_certs=%s", cert_reqs_name, ssl_cert_reqs, ssl_keyfile, ssl_certfile, ssl_ca_certs or "None") + + if ssl_cert_reqs == ssl.CERT_REQUIRED: + logger.info("mTLS ENABLED - Client certificates are REQUIRED") + elif ssl_cert_reqs == ssl.CERT_OPTIONAL: + logger.info("mTLS OPTIONAL - Client certificates are optional") + else: + logger.info("Standard TLS - No client certificate verification") + + # 11) Create MCP server with SSE transport + server = Server(mcp_server.server_name) + + # Register list_tools handler (from MCPServer) + @server.list_tools() + async def list_tools(): + """List available tools.""" + tools = [] + for tool_name, func in TOOLS_REGISTRY.items(): + if hasattr(func, "_mcp_tool"): + tool_obj = func._mcp_tool + if hasattr(tool_obj, "name") and hasattr(tool_obj, "description"): + tools.append(tool_obj) + return tools + + # Register call_tool handler (delegates to MCPServer's implementation) + @server.call_tool() + async def call_tool(name: str, arguments: Dict[str, Any]): + """Execute a tool.""" + # Reuse the MCPServer's call_tool logic by creating a temporary instance + # and invoking its internal handler + from mcp.types import TextContent + + try: + # Use the existing mcp_server instance + handler = mcp_server._MCPServer__call_tool if hasattr(mcp_server, "_MCPServer__call_tool") else None + + # Since we can't easily access the private handler, we'll replicate the logic + # by importing and calling the tool directly + resolved = name if name in TOOLS_REGISTRY else name + if resolved not in TOOLS_REGISTRY: + raise ValueError(f"Tool not found: {name}") + + func = TOOLS_REGISTRY[resolved] + result = await func(**arguments) + + if isinstance(result, str): + return [TextContent(type="text", text=result)] + elif isinstance(result, dict): + import json + + return [TextContent(type="text", text=json.dumps(result, indent=2))] + else: + return result + except Exception as e: + logger.error("Error in call_tool for '%s': %s", name, e) + return [TextContent(type="text", text=f"Tool execution error: {str(e)}")] + + # 12) Create SSE transport + transport = SseServerTransport(msg_path) + opts = server.create_initialization_options() + + async def handle_sse(request: Request): + """Handle SSE connections.""" + async with transport.connect_sse(request.scope, request.receive, request._send) as streams: + await server.run(streams[0], streams[1], opts) + return Response() + + async def health(request: Request): + """Health check endpoint.""" + return PlainTextResponse("OK") + + # 13) Create Starlette application + app = Starlette( + routes=[ + Route(sse_path, handle_sse, methods=["GET"]), + Mount(msg_path, app=transport.handle_post_message), + Route(health_path, health, methods=["GET"]), + ], + middleware=[ + Middleware( + AuthMiddleware, + auth=cfg.get("server", {}).get("auth"), + health_path=health_path, + ) + ], + ) + + # 14) Configure uvicorn with SSL/TLS settings + uvicorn_config_kwargs = { + "app": app, + "host": host, + "port": port, + "log_level": "info", + } + + if ssl_enabled: + uvicorn_config_kwargs.update( + { + "ssl_keyfile": ssl_keyfile, + "ssl_certfile": ssl_certfile, + "ssl_keyfile_password": ssl_keyfile_password, + "ssl_cert_reqs": ssl_cert_reqs, + "ssl_ca_certs": ssl_ca_certs, + } + ) + + uvicorn_cfg = uvicorn.Config(**uvicorn_config_kwargs) + + # 15) Start server + protocol = "https" if ssl_enabled else "http" + logger.info("Starting MCP (SSE) on %s://%s:%s with mTLS support", protocol, host, port) + + try: + await uvicorn.Server(uvicorn_cfg).serve() + finally: + if SERVER: + await SERVER.shutdown() + + +async def main_async(default_config: Optional[Dict[str, Any]] = None) -> None: + """Async entry point for mTLS runtime. + + Args: + default_config: Optional default configuration dictionary. + """ + try: + import sys + + # Parse command line arguments for config file + argv = sys.argv[1:] + cfg_path = ( + os.getenv("CHUK_MCP_CONFIG_PATH") + or (argv[argv.index("-c") + 1] if "-c" in argv else None) + or (argv[argv.index("--config") + 1] if "--config" in argv else None) + or (argv[0] if argv else None) + ) + + await run_with_mtls(config_paths=[cfg_path] if cfg_path else None, default_config=default_config) + except Exception as exc: + logger.error("Error starting CHUK MCP server with mTLS: %s", exc, exc_info=True) + import sys + + sys.exit(1) + + +def main(default_config: Optional[Dict[str, Any]] = None) -> None: + """Main entry point for mTLS runtime. + + Args: + default_config: Optional default configuration dictionary. + """ + try: + asyncio.run(main_async(default_config)) + except KeyboardInterrupt: + logger.warning("Received Ctrl-C β†’ shutting down") + except Exception as exc: + logger.error("Uncaught exception: %s", exc, exc_info=True) + import sys + + sys.exit(1) + + +if __name__ == "__main__": # pragma: no cover + main() \ No newline at end of file diff --git a/plugin_templates/external/.env.template b/plugin_templates/external/.env.template index 6d9faf358..0715139d9 100644 --- a/plugin_templates/external/.env.template +++ b/plugin_templates/external/.env.template @@ -21,3 +21,37 @@ PLUGINS_CONFIG=./resources/plugins/config.yaml # Configuration path for chuck mcp runtime CHUK_MCP_CONFIG_PATH=./resources/runtime/config.yaml + +##################################### +# MCP External Plugin Server - mTLS Configuration +##################################### + +# Enable SSL/TLS for external plugin MCP server +# Options: true, false (default) +# When true: Enables HTTPS and optionally mTLS for the plugin MCP server +MCP_SSL_ENABLED=false + +# SSL/TLS Certificate Files +# Path to server private key (required when MCP_SSL_ENABLED=true) +# Generate with: openssl genrsa -out certs/mcp/server.key 2048 +# MCP_SSL_KEYFILE=certs/mcp/server.key + +# Path to server certificate (required when MCP_SSL_ENABLED=true) +# Generate with: openssl req -new -x509 -key certs/mcp/server.key -out certs/mcp/server.crt -days 365 +# MCP_SSL_CERTFILE=certs/mcp/server.crt + +# Optional password for encrypted private key +# MCP_SSL_KEYFILE_PASSWORD= + +# mTLS (Mutual TLS) Configuration +# Client certificate verification mode: +# 0 (CERT_NONE): No client certificate required - standard TLS (default) +# 1 (CERT_OPTIONAL): Client certificate optional - validate if provided +# 2 (CERT_REQUIRED): Client certificate required - full mTLS +# Default: 0 (standard TLS without client verification) +MCP_SSL_CERT_REQS=0 + +# CA certificate bundle for verifying client certificates +# Required when MCP_SSL_CERT_REQS=1 or MCP_SSL_CERT_REQS=2 +# Can be a single CA file or a bundle containing multiple CAs +# MCP_SSL_CA_CERTS=certs/mcp/ca.crt From 196568d66aae2a27d46cd59019a47a60bcbb94ac Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Thu, 2 Oct 2025 16:40:01 -0600 Subject: [PATCH 03/35] fix: added streamable http support to runtime_mtls.py Signed-off-by: Teryl Taylor --- .../external/mcp/server/runtime_mtls.py | 644 ++++++++++++------ 1 file changed, 448 insertions(+), 196 deletions(-) diff --git a/mcpgateway/plugins/framework/external/mcp/server/runtime_mtls.py b/mcpgateway/plugins/framework/external/mcp/server/runtime_mtls.py index 0daed9092..3633cf1d9 100644 --- a/mcpgateway/plugins/framework/external/mcp/server/runtime_mtls.py +++ b/mcpgateway/plugins/framework/external/mcp/server/runtime_mtls.py @@ -6,9 +6,9 @@ Runtime MCP server for external plugins with mTLS support. -This module provides a custom wrapper around chuk_mcp_runtime that enables -mutual TLS (mTLS) authentication by directly configuring uvicorn.Config -instead of using the default main_async() entry point. +This module extends MCPServer from chuk_mcp_runtime to add SSL/TLS support +by reading UVICORN_SSL_* environment variables and injecting them into the +uvicorn.Config creation. """ # Standard @@ -22,20 +22,18 @@ # Third-Party import uvicorn from dotenv import load_dotenv -from mcp.server import Server -from mcp.server.sse import SseServerTransport +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager from starlette.applications import Starlette from starlette.middleware import Middleware from starlette.requests import Request from starlette.responses import PlainTextResponse, Response from starlette.routing import Mount, Route +from starlette.types import Scope, Receive, Send # chuk_mcp_runtime imports -from chuk_mcp_runtime.common.mcp_tool_decorator import ( - TOOLS_REGISTRY, - initialize_tool_registry, -) +from chuk_mcp_runtime.common.mcp_tool_decorator import initialize_tool_registry from chuk_mcp_runtime.server.config_loader import find_project_root, load_config +from chuk_mcp_runtime.server.event_store import InMemoryEventStore from chuk_mcp_runtime.server.logging_config import configure_logging, get_logger from chuk_mcp_runtime.server.server import MCPServer, AuthMiddleware from chuk_mcp_runtime.server.server_registry import ServerRegistry @@ -44,16 +42,6 @@ # First-Party from mcpgateway.plugins.framework import ExternalPluginServer -from mcpgateway.plugins.framework.external.mcp.server.runtime import ( - get_plugin_config, - get_plugin_configs, - prompt_post_fetch, - prompt_pre_fetch, - resource_post_fetch, - resource_pre_fetch, - tool_post_invoke, - tool_pre_invoke, -) load_dotenv() @@ -62,222 +50,488 @@ SERVER: Optional[ExternalPluginServer] = None -async def run_with_mtls( - config_paths: Optional[list[str]] = None, - default_config: Optional[Dict[str, Any]] = None, -) -> None: - """Run the MCP runtime with mTLS support. +class SSLCapableMCPServer(MCPServer): + """MCPServer with SSL/TLS support via UVICORN_SSL_* environment variables. - This function provides direct control over uvicorn configuration to enable - mutual TLS authentication. SSL/TLS settings are read from environment variables - or the configuration file. + This class extends the standard MCPServer to support SSL/TLS configuration + through environment variables, enabling mTLS for external plugin servers. Environment Variables: - MCP_SSL_ENABLED: Enable SSL/TLS (true/false) - MCP_SSL_KEYFILE: Path to server private key - MCP_SSL_CERTFILE: Path to server certificate - MCP_SSL_KEYFILE_PASSWORD: Optional password for encrypted key - MCP_SSL_CA_CERTS: Path to CA bundle for verifying client certificates - MCP_SSL_CERT_REQS: Certificate verification mode: + UVICORN_SSL_KEYFILE: Path to server private key + UVICORN_SSL_CERTFILE: Path to server certificate + UVICORN_SSL_KEYFILE_PASSWORD: Optional password for encrypted key + UVICORN_SSL_CA_CERTS: Path to CA bundle for verifying client certificates + UVICORN_SSL_CERT_REQS: Certificate verification mode: - 0 (CERT_NONE): No client certificate required (default TLS) - 1 (CERT_OPTIONAL): Client certificate optional - 2 (CERT_REQUIRED): Client certificate required (mTLS) + """ + + def _get_ssl_config(self) -> Dict[str, Any]: + """Read SSL configuration from UVICORN_SSL_* environment variables. + + Returns: + Dictionary of SSL configuration parameters for uvicorn.Config + + Raises: + RuntimeError: If SSL is enabled but required files are missing + """ + ssl_config = {} + + ssl_keyfile = os.getenv("UVICORN_SSL_KEYFILE") + ssl_certfile = os.getenv("UVICORN_SSL_CERTFILE") + + if not ssl_keyfile and not ssl_certfile: + return ssl_config + + # SSL is enabled + if not ssl_keyfile or not ssl_certfile: + raise RuntimeError( + "Both UVICORN_SSL_KEYFILE and UVICORN_SSL_CERTFILE must be set " + "when enabling SSL/TLS" + ) + + ssl_config["ssl_keyfile"] = ssl_keyfile + ssl_config["ssl_certfile"] = ssl_certfile + + # Optional password for encrypted key + if os.getenv("UVICORN_SSL_KEYFILE_PASSWORD"): + ssl_config["ssl_keyfile_password"] = os.getenv("UVICORN_SSL_KEYFILE_PASSWORD") + + # Client certificate verification + ssl_cert_reqs_str = os.getenv("UVICORN_SSL_CERT_REQS", "0") + try: + ssl_cert_reqs = int(ssl_cert_reqs_str) + except ValueError: + logger.warning( + "Invalid UVICORN_SSL_CERT_REQS value '%s', defaulting to 0 (CERT_NONE)", + ssl_cert_reqs_str + ) + ssl_cert_reqs = ssl.CERT_NONE + + ssl_config["ssl_cert_reqs"] = ssl_cert_reqs + + # CA certificates for client verification + ssl_ca_certs = os.getenv("UVICORN_SSL_CA_CERTS") + if ssl_cert_reqs in (ssl.CERT_REQUIRED, ssl.CERT_OPTIONAL): + if not ssl_ca_certs: + raise RuntimeError( + f"UVICORN_SSL_CERT_REQS={ssl_cert_reqs} requires UVICORN_SSL_CA_CERTS " + "to be set for client certificate verification" + ) + + if ssl_ca_certs: + ssl_config["ssl_ca_certs"] = ssl_ca_certs + + # Log SSL configuration + cert_reqs_name = { + 0: "CERT_NONE", + 1: "CERT_OPTIONAL", + 2: "CERT_REQUIRED" + }.get(ssl_cert_reqs, "UNKNOWN") + + logger.info( + "SSL/TLS enabled: cert_reqs=%s (%d), keyfile=%s, certfile=%s, ca_certs=%s", + cert_reqs_name, + ssl_cert_reqs, + ssl_keyfile, + ssl_certfile, + ssl_ca_certs or "None" + ) + + if ssl_cert_reqs == ssl.CERT_REQUIRED: + logger.info("mTLS ENABLED - Client certificates are REQUIRED") + elif ssl_cert_reqs == ssl.CERT_OPTIONAL: + logger.info("mTLS OPTIONAL - Client certificates are optional") + else: + logger.info("Standard TLS - No client certificate verification") + + return ssl_config + + async def serve(self, custom_handlers: Optional[Dict[str, Any]] = None) -> None: + """Boot the MCP server with SSL/TLS support and serve forever. + + This overrides the parent's serve() method to inject SSL configuration + into uvicorn.Config creation. All other functionality (tool registration, + session management, etc.) is inherited from the parent MCPServer. + + Args: + custom_handlers: Optional custom handlers (e.g., proxy text handler) + """ + # Get SSL configuration from environment variables + ssl_config = self._get_ssl_config() + is_ssl_enabled = bool(ssl_config) + + # Reuse parent's setup for everything except uvicorn Config + await self._setup_artifact_store() + + if not self.tools_registry: + self.tools_registry = await self._import_tools_registry() + + await initialize_tool_registry() + from chuk_mcp_runtime.common.tool_naming import update_naming_maps + update_naming_maps() + + # Create server and register handlers (uses parent's implementation) + from mcp.server import Server + from mcp.server.sse import SseServerTransport + from mcp.types import TextContent, ImageContent, EmbeddedResource + import json + from inspect import isasyncgen + + server = Server(self.server_name) + + # Register list_tools (from parent's implementation) + @server.list_tools() + async def list_tools(): + """List available tools with robust error handling.""" + try: + self.logger.info( + "list_tools called - %d tools total", len(self.tools_registry) + ) + + tools = [] + for tool_name, func in self.tools_registry.items(): + try: + if hasattr(func, "_mcp_tool"): + tool_obj = func._mcp_tool + if hasattr(tool_obj, "name") and hasattr(tool_obj, "description"): + tools.append(tool_obj) + self.logger.debug("Added tool to list: %s", tool_obj.name) + else: + self.logger.warning( + "Tool %s has invalid _mcp_tool object: %s", + tool_name, tool_obj + ) + else: + self.logger.warning( + "Tool %s missing _mcp_tool attribute", tool_name + ) + except Exception as e: + self.logger.error("Error processing tool %s: %s", tool_name, e) + continue + + self.logger.info("Returning %d valid tools", len(tools)) + return tools + except Exception as e: + self.logger.error("Error in list_tools: %s", e) + return [] + + # Register call_tool (from parent's implementation) + @server.call_tool() + async def call_tool(name: str, arguments: Dict[str, Any]): + """Execute a tool with native session management.""" + try: + from chuk_mcp_runtime.server.server import parse_tool_arguments + from chuk_mcp_runtime.common.tool_naming import resolve_tool_name + from chuk_mcp_runtime.session.native_session_management import SessionContext + + # Fix concatenated JSON in arguments + original_args = arguments + if arguments: + if isinstance(arguments, (str, dict)): + arguments = parse_tool_arguments(arguments) + if arguments != original_args: + self.logger.info( + "Fixed concatenated JSON arguments for '%s': %s -> %s", + name, original_args, arguments + ) + + self.logger.debug( + "call_tool called with name='%s', arguments=%s", name, arguments + ) + + # Tool name resolution + resolved = name if name in self.tools_registry else resolve_tool_name(name) + if resolved not in self.tools_registry: + matches = [ + k for k in self.tools_registry + if k.endswith(f"_{name}") or k.endswith(f".{name}") + ] + if len(matches) == 1: + resolved = matches[0] + + if resolved not in self.tools_registry: + raise ValueError(f"Tool not found: {name}") + + func = self.tools_registry[resolved] + self.logger.debug("Resolved tool '%s' to function: %s", name, func) + + # Native session injection + arguments = await self._inject_session_context(resolved, arguments) + + # Execute within session context + async with SessionContext( + self.session_manager, + session_id=arguments.get("session_id"), + auto_create=True + ) as session_id: + self.logger.debug( + "Executing tool '%s' in session %s", resolved, session_id + ) + result = await self._execute_tool_with_timeout(func, resolved, arguments) + self.logger.debug( + "Tool execution completed, result type: %s", type(result) + ) + + # Handle streaming results + if isasyncgen(result): + self.logger.debug( + "Tool returned async generator, collecting chunks for '%s'", + resolved + ) + collected_chunks = [] + chunk_count = 0 + + try: + async for part in result: + chunk_count += 1 + self.logger.debug( + "Collecting streaming chunk %d for '%s'", + chunk_count, resolved + ) + + if isinstance(part, (TextContent, ImageContent, EmbeddedResource)): + collected_chunks.append(part) + elif isinstance(part, str): + collected_chunks.append(TextContent(type="text", text=part)) + elif isinstance(part, dict) and "delta" in part: + collected_chunks.append( + TextContent(type="text", text=part["delta"]) + ) + else: + collected_chunks.append( + TextContent(type="text", text=str(part)) + ) + except Exception as e: + self.logger.error( + "Error collecting streaming chunks for '%s': %s", + resolved, e + ) + return [ + TextContent( + type="text", + text=f"Streaming error: {str(e)}" + ) + ] + + self.logger.info( + "Collected %d streaming chunks for '%s'", + len(collected_chunks), resolved + ) + result = collected_chunks + + # Wrap result if needed + if not isinstance(result, dict) or "session_id" not in result: + if isinstance(result, dict) and not ( + "content" in result and "isError" in result + ): + result = { + "session_id": session_id, + "content": result, + "isError": False + } + elif isinstance(result, str): + result = { + "session_id": session_id, + "content": result, + "isError": False + } + + # Format response + if isinstance(result, list) and all( + isinstance(r, (TextContent, ImageContent, EmbeddedResource)) + for r in result + ): + return result + elif isinstance(result, str): + return [TextContent(type="text", text=result)] + else: + return [TextContent(type="text", text=json.dumps(result, indent=2))] + + except Exception as e: + self.logger.error("Error in call_tool for '%s': %s", name, e) + return [TextContent(type="text", text=f"Tool execution error: {str(e)}")] + + # Transport setup with SSL support + opts = server.create_initialization_options() + mode = self.config.get("server", {}).get("type", "stdio") + + if mode == "stdio": + self.logger.info( + "Starting MCP (stdio) - global timeout %.1fs", self.tool_timeout + ) + from mcp.server.stdio import stdio_server + async with stdio_server() as (r, w): + await server.run(r, w, opts) + + elif mode == "sse": + cfg = self.config.get("sse", {}) + host, port = cfg.get("host", "0.0.0.0"), cfg.get("port", 8000) # nosec B104 + sse_path, msg_path, health_path = ( + cfg.get("sse_path", "/sse"), + cfg.get("message_path", "/messages/"), + cfg.get("health_path", "/health") + ) + transport = SseServerTransport(msg_path) + + async def _handle_sse(request: Request): + async with transport.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await server.run(streams[0], streams[1], opts) + return Response() + + async def health(request: Request): + return PlainTextResponse("OK") + + app = Starlette( + routes=[ + Route(sse_path, _handle_sse, methods=["GET"]), + Mount(msg_path, app=transport.handle_post_message), + Route(health_path, health, methods=["GET"]) + ], + middleware=[ + Middleware( + AuthMiddleware, + auth=self.config.get("server", {}).get("auth"), + health_path=health_path + ) + ] + ) + + protocol = "https" if is_ssl_enabled else "http" + self.logger.info( + "Starting MCP (SSE) on %s://%s:%s - global timeout %.1fs", + protocol, host, port, self.tool_timeout + ) + + # Create uvicorn Config with SSL support + config_kwargs = {"app": app, "host": host, "port": port, "log_level": "info"} + config_kwargs.update(ssl_config) + + await uvicorn.Server(uvicorn.Config(**config_kwargs)).serve() + + elif mode == "streamable-http": + self.logger.info("Starting MCP server over streamable-http") + + streamhttp_config = self.config.get("streamable-http", {}) + host = streamhttp_config.get("host", "127.0.0.1") + port = streamhttp_config.get("port", 3000) + mcp_path = streamhttp_config.get("mcp_path", "/mcp") + json_response = streamhttp_config.get("json_response", True) + stateless = streamhttp_config.get("stateless", True) + + event_store = None if stateless else InMemoryEventStore() + + session_manager = StreamableHTTPSessionManager( + app=server, + event_store=event_store, + stateless=stateless, + json_response=json_response + ) + + async def handle_streamable_http(scope: Scope, receive: Receive, send: Send): + await session_manager.handle_request(scope, receive, send) + + async def health(request: Request): + return PlainTextResponse("OK") + + @contextlib.asynccontextmanager + async def lifespan(app: Starlette) -> AsyncIterator[None]: + async with session_manager.run(): + self.logger.info("Application started with StreamableHTTP session manager!") + try: + yield + finally: + self.logger.info("Application shutting down...") + + app = Starlette( + debug=True, + routes=[ + Mount(mcp_path, handle_streamable_http), + Route("/health", health, methods=["GET"]) + ], + middleware=[ + Middleware( + AuthMiddleware, + auth=self.config.get("server", {}).get("auth") + ) + ], + lifespan=lifespan + ) + + protocol = "https" if is_ssl_enabled else "http" + self.logger.info( + "Starting MCP (StreamableHTTP) on %s://%s:%s - global timeout %.1fs", + protocol, host, port, self.tool_timeout + ) + + # Create uvicorn Config with SSL support + config_kwargs = {"app": app, "host": host, "port": port, "log_level": "info"} + config_kwargs.update(ssl_config) + + await uvicorn.Server(uvicorn.Config(**config_kwargs)).serve() + + else: + raise ValueError(f"Unsupported server mode: {mode}") + + +async def run_with_mtls( + config_paths: Optional[list[str]] = None, + default_config: Optional[Dict[str, Any]] = None, +) -> None: + """Run the MCP runtime with mTLS support. Args: config_paths: Optional list of configuration file paths. default_config: Optional default configuration dictionary. Raises: - RuntimeError: If SSL is enabled but required certificate files are missing. - Exception: If server initialization or startup fails. + RuntimeError: If server initialization fails. """ global SERVER # pylint: disable=global-statement - # 1) Configuration and logging setup + # Configuration and logging setup cfg = load_config(config_paths, default_config) configure_logging(cfg) project_root = find_project_root() logger.debug("Project root resolved to %s", project_root) - # 2) Initialize external plugin server + # Initialize external plugin server SERVER = ExternalPluginServer() if not await SERVER.initialize(): raise RuntimeError("Failed to initialize external plugin server") - # 3) Native session management initialization + # Native session management initialization session_manager = create_mcp_session_manager(cfg) logger.info("Native session manager initialized for sandbox: %s", session_manager.sandbox_id) - # 4) Optional component bootstrap + # Optional component bootstrap if not os.getenv("NO_BOOTSTRAP"): await ServerRegistry(project_root, cfg).load_server_components() - # 5) Tool registry initialization + # Tool registry initialization await initialize_tool_registry() - # 6) Artifact management tools + # Artifact management tools await register_artifacts_tools(cfg) logger.debug("Artifact tools registration completed") - # 7) Session management tools + # Session management tools session_cfg = cfg.copy() session_cfg.setdefault("session_tools", {})["session_manager"] = session_manager await register_session_tools(session_cfg) logger.debug("Session tools registration completed") - # 8) Create MCP server instance - mcp_server = MCPServer(cfg, tools_registry=TOOLS_REGISTRY) - logger.debug("Local MCP server '%s' starting with native sessions", getattr(mcp_server, "server_name", "local")) - - # 9) Get server configuration - server_mode = cfg.get("server", {}).get("type", "sse") - if server_mode != "sse": - raise ValueError(f"Only 'sse' server mode is supported with mTLS wrapper, got: {server_mode}") - - sse_config = cfg.get("sse", {}) - host = sse_config.get("host", "0.0.0.0") # nosec B104 - Intentional binding for server as default - port = sse_config.get("port", 8000) - sse_path = sse_config.get("sse_path", "/sse") - msg_path = sse_config.get("message_path", "/messages/") - health_path = sse_config.get("health_path", "/health") - - # 10) Setup SSL/TLS configuration - ssl_enabled = os.getenv("MCP_SSL_ENABLED", "false").lower() == "true" - ssl_keyfile = os.getenv("MCP_SSL_KEYFILE") - ssl_certfile = os.getenv("MCP_SSL_CERTFILE") - ssl_keyfile_password = os.getenv("MCP_SSL_KEYFILE_PASSWORD") - ssl_ca_certs = os.getenv("MCP_SSL_CA_CERTS") - ssl_cert_reqs_str = os.getenv("MCP_SSL_CERT_REQS", "0") - - try: - ssl_cert_reqs = int(ssl_cert_reqs_str) - except ValueError: - logger.warning("Invalid MCP_SSL_CERT_REQS value '%s', defaulting to 0 (CERT_NONE)", ssl_cert_reqs_str) - ssl_cert_reqs = ssl.CERT_NONE - - # Validate SSL configuration - if ssl_enabled: - if not ssl_keyfile or not ssl_certfile: - raise RuntimeError("MCP_SSL_ENABLED=true requires MCP_SSL_KEYFILE and MCP_SSL_CERTFILE to be set") - - if ssl_cert_reqs in (ssl.CERT_REQUIRED, ssl.CERT_OPTIONAL) and not ssl_ca_certs: - raise RuntimeError(f"MCP_SSL_CERT_REQS={ssl_cert_reqs} requires MCP_SSL_CA_CERTS to be set for client certificate verification") - - cert_reqs_name = {0: "CERT_NONE", 1: "CERT_OPTIONAL", 2: "CERT_REQUIRED"}.get(ssl_cert_reqs, "UNKNOWN") - logger.info("SSL/TLS enabled: cert_reqs=%s (%d), keyfile=%s, certfile=%s, ca_certs=%s", cert_reqs_name, ssl_cert_reqs, ssl_keyfile, ssl_certfile, ssl_ca_certs or "None") - - if ssl_cert_reqs == ssl.CERT_REQUIRED: - logger.info("mTLS ENABLED - Client certificates are REQUIRED") - elif ssl_cert_reqs == ssl.CERT_OPTIONAL: - logger.info("mTLS OPTIONAL - Client certificates are optional") - else: - logger.info("Standard TLS - No client certificate verification") - - # 11) Create MCP server with SSE transport - server = Server(mcp_server.server_name) - - # Register list_tools handler (from MCPServer) - @server.list_tools() - async def list_tools(): - """List available tools.""" - tools = [] - for tool_name, func in TOOLS_REGISTRY.items(): - if hasattr(func, "_mcp_tool"): - tool_obj = func._mcp_tool - if hasattr(tool_obj, "name") and hasattr(tool_obj, "description"): - tools.append(tool_obj) - return tools - - # Register call_tool handler (delegates to MCPServer's implementation) - @server.call_tool() - async def call_tool(name: str, arguments: Dict[str, Any]): - """Execute a tool.""" - # Reuse the MCPServer's call_tool logic by creating a temporary instance - # and invoking its internal handler - from mcp.types import TextContent - - try: - # Use the existing mcp_server instance - handler = mcp_server._MCPServer__call_tool if hasattr(mcp_server, "_MCPServer__call_tool") else None - - # Since we can't easily access the private handler, we'll replicate the logic - # by importing and calling the tool directly - resolved = name if name in TOOLS_REGISTRY else name - if resolved not in TOOLS_REGISTRY: - raise ValueError(f"Tool not found: {name}") - - func = TOOLS_REGISTRY[resolved] - result = await func(**arguments) - - if isinstance(result, str): - return [TextContent(type="text", text=result)] - elif isinstance(result, dict): - import json - - return [TextContent(type="text", text=json.dumps(result, indent=2))] - else: - return result - except Exception as e: - logger.error("Error in call_tool for '%s': %s", name, e) - return [TextContent(type="text", text=f"Tool execution error: {str(e)}")] - - # 12) Create SSE transport - transport = SseServerTransport(msg_path) - opts = server.create_initialization_options() - - async def handle_sse(request: Request): - """Handle SSE connections.""" - async with transport.connect_sse(request.scope, request.receive, request._send) as streams: - await server.run(streams[0], streams[1], opts) - return Response() - - async def health(request: Request): - """Health check endpoint.""" - return PlainTextResponse("OK") - - # 13) Create Starlette application - app = Starlette( - routes=[ - Route(sse_path, handle_sse, methods=["GET"]), - Mount(msg_path, app=transport.handle_post_message), - Route(health_path, health, methods=["GET"]), - ], - middleware=[ - Middleware( - AuthMiddleware, - auth=cfg.get("server", {}).get("auth"), - health_path=health_path, - ) - ], - ) - - # 14) Configure uvicorn with SSL/TLS settings - uvicorn_config_kwargs = { - "app": app, - "host": host, - "port": port, - "log_level": "info", - } - - if ssl_enabled: - uvicorn_config_kwargs.update( - { - "ssl_keyfile": ssl_keyfile, - "ssl_certfile": ssl_certfile, - "ssl_keyfile_password": ssl_keyfile_password, - "ssl_cert_reqs": ssl_cert_reqs, - "ssl_ca_certs": ssl_ca_certs, - } - ) - - uvicorn_cfg = uvicorn.Config(**uvicorn_config_kwargs) - - # 15) Start server - protocol = "https" if ssl_enabled else "http" - logger.info("Starting MCP (SSE) on %s://%s:%s with mTLS support", protocol, host, port) + # Create SSL-capable MCP server instance + from chuk_mcp_runtime.common.mcp_tool_decorator import TOOLS_REGISTRY + mcp_server = SSLCapableMCPServer(cfg, tools_registry=TOOLS_REGISTRY) + logger.debug("SSL-capable MCP server '%s' starting", getattr(mcp_server, "server_name", "local")) + # Serve forever try: - await uvicorn.Server(uvicorn_cfg).serve() + await mcp_server.serve() finally: if SERVER: await SERVER.shutdown() @@ -305,7 +559,6 @@ async def main_async(default_config: Optional[Dict[str, Any]] = None) -> None: except Exception as exc: logger.error("Error starting CHUK MCP server with mTLS: %s", exc, exc_info=True) import sys - sys.exit(1) @@ -322,9 +575,8 @@ def main(default_config: Optional[Dict[str, Any]] = None) -> None: except Exception as exc: logger.error("Uncaught exception: %s", exc, exc_info=True) import sys - sys.exit(1) if __name__ == "__main__": # pragma: no cover - main() \ No newline at end of file + main() From 66be2412188e184327e6b85f721e7c74b35619fa Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Fri, 3 Oct 2025 16:40:18 -0600 Subject: [PATCH 04/35] fix: updated plugin server runtime.py to support mTLS. removed chuck-mcp-runtime Signed-off-by: Teryl Taylor --- mcpgateway/plugins/framework/constants.py | 5 + .../plugins/framework/external/mcp/client.py | 17 +- .../framework/external/mcp/server/runtime.py | 431 ++++++------- .../external/mcp/server/runtime_mtls.py | 582 ------------------ .../framework/external/mcp/server/server.py | 11 +- mcpgateway/plugins/framework/manager.py | 11 +- mcpgateway/plugins/framework/models.py | 183 ++++-- 7 files changed, 370 insertions(+), 870 deletions(-) delete mode 100644 mcpgateway/plugins/framework/external/mcp/server/runtime_mtls.py diff --git a/mcpgateway/plugins/framework/constants.py b/mcpgateway/plugins/framework/constants.py index 7b446624f..155679c57 100644 --- a/mcpgateway/plugins/framework/constants.py +++ b/mcpgateway/plugins/framework/constants.py @@ -32,3 +32,8 @@ TOOL_METADATA = "tool" GATEWAY_METADATA = "gateway" + +# MCP Plugin Server Runtime constants +MCP_SERVER_NAME = "MCP Plugin Server" +MCP_SERVER_INSTRUCTIONS = "External plugin server for MCP Gateway" +GET_PLUGIN_CONFIGS = "get_plugin_configs" diff --git a/mcpgateway/plugins/framework/external/mcp/client.py b/mcpgateway/plugins/framework/external/mcp/client.py index 3f43a4d87..8d354646d 100644 --- a/mcpgateway/plugins/framework/external/mcp/client.py +++ b/mcpgateway/plugins/framework/external/mcp/client.py @@ -30,7 +30,7 @@ from mcpgateway.plugins.framework.errors import convert_exception_to_error, PluginError from mcpgateway.plugins.framework.models import ( HookType, - MCPTransportTLSConfig, + MCPClientTLSConfig, PluginConfig, PluginContext, PluginErrorModel, @@ -147,7 +147,7 @@ async def __connect_to_http_server(self, uri: str) -> None: PluginError: if there is an external connection error after all retries. """ plugin_tls = self._config.mcp.tls if self._config and self._config.mcp else None - tls_config = plugin_tls or MCPTransportTLSConfig.from_env() + tls_config = plugin_tls or MCPClientTLSConfig.from_env() def _tls_httpx_client_factory( headers: Optional[dict[str, str]] = None, @@ -177,11 +177,11 @@ def _tls_httpx_client_factory( if not tls_config.check_hostname: ssl_context.check_hostname = False - if tls_config.client_cert: + if tls_config.certfile: ssl_context.load_cert_chain( - certfile=tls_config.client_cert, - keyfile=tls_config.client_key, - password=tls_config.client_key_password, + certfile=tls_config.certfile, + keyfile=tls_config.keyfile, + password=tls_config.keyfile_password, ) kwargs["verify"] = ssl_context @@ -189,11 +189,12 @@ def _tls_httpx_client_factory( raise PluginError(error=PluginErrorModel(message=f"Failed configuring TLS for external plugin: {exc}", plugin_name=self.name)) from exc return httpx.AsyncClient(**kwargs) + max_retries = 3 base_delay = 1.0 for attempt in range(max_retries): - + try: client_factory = _tls_httpx_client_factory if tls_config else None async with AsyncExitStack() as temp_stack: @@ -206,7 +207,7 @@ def _tls_httpx_client_factory( response = await session.list_tools() tools = response.tools logger.info("Successfully connected to plugin MCP server with tools: %s", " ".join([tool.name for tool in tools])) - + client_factory = _tls_httpx_client_factory if tls_config else None streamable_client = streamablehttp_client(uri, httpx_client_factory=client_factory) if client_factory else streamablehttp_client(uri) http_transport = await self._exit_stack.enter_async_context(streamable_client) diff --git a/mcpgateway/plugins/framework/external/mcp/server/runtime.py b/mcpgateway/plugins/framework/external/mcp/server/runtime.py index a35a90556..32a28cdd5 100644 --- a/mcpgateway/plugins/framework/external/mcp/server/runtime.py +++ b/mcpgateway/plugins/framework/external/mcp/server/runtime.py @@ -1,10 +1,18 @@ +#!/usr/bin/env python3 # -*- coding: utf-8 -*- """Location: ./mcpgateway/plugins/framework/external/mcp/server/runtime.py Copyright 2025 SPDX-License-Identifier: Apache-2.0 -Authors: Fred Araujo +Authors: Fred Araujo, Teryl Taylor -Runtime MCP server for external plugins. +MCP Plugin Runtime using FastMCP with SSL/TLS support. + +This runtime does the following: +- Uses FastMCP from the MCP Python SDK +- Supports both mTLS and non-mTLS configurations +- Reads configuration from PLUGINS_SERVER_* environment variables or uses configurations + the plugin config.yaml +- Implements all plugin hook tools (get_plugin_configs, tool_pre_invoke, etc.) """ # Standard @@ -13,8 +21,8 @@ from typing import Any, Dict # Third-Party -from chuk_mcp_runtime.common.mcp_tool_decorator import mcp_tool -from chuk_mcp_runtime.entry import main_async +from mcp.server.fastmcp import FastMCP +import uvicorn # First-Party from mcpgateway.plugins.framework import ( @@ -34,270 +42,229 @@ ToolPreInvokePayload, ToolPreInvokeResult, ) +from mcpgateway.plugins.framework.constants import ( + GET_PLUGIN_CONFIG, + GET_PLUGIN_CONFIGS, + MCP_SERVER_INSTRUCTIONS, + MCP_SERVER_NAME, +) +from mcpgateway.plugins.framework.models import HookType, MCPServerConfig logger = logging.getLogger(__name__) -SERVER = None - - -@mcp_tool(name="get_plugin_configs", description="Get the plugin configurations installed on the server") -async def get_plugin_configs() -> list[dict]: - """Return a list of plugin configurations for plugins currently installed on the MCP SERVER. - - Returns: - A list of plugin configurations. - """ - return await SERVER.get_plugin_configs() - - -@mcp_tool(name="get_plugin_config", description="Get the plugin configuration installed on the server given a plugin name") -async def get_plugin_config(name: str) -> dict: - """Return a plugin configuration give a plugin name. - - Args: - name: The name of the plugin of which to return the plugin configuration. - - Returns: - A list of plugin configurations. - """ - return await SERVER.get_plugin_config(name) - - -@mcp_tool(name="prompt_pre_fetch", description="Execute prompt prefetch hook for a plugin") -async def prompt_pre_fetch(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: - """Invoke the prompt pre fetch hook for a particular plugin. - - Args: - plugin_name: The name of the plugin to execute. - payload: The prompt name and arguments to be analyzed. - context: The contextual and state information required for the execution of the hook. - - Raises: - ValueError: If unable to retrieve a plugin. - - Returns: - The transformed or filtered response from the plugin hook. - """ - - def prompt_pre_fetch_func(plugin: Plugin, payload: PromptPrehookPayload, context: PluginContext) -> PromptPrehookResult: - """Wrapper function for hook. - - Args: - plugin: The plugin instance. - payload: The tool name and arguments to be analyzed. - context: the contextual and state information required for the execution of the hook. - - Returns: - The transformed or filtered response from the plugin hook. - """ - return plugin.prompt_pre_fetch(payload, context) - - return await SERVER.invoke_hook(PromptPrehookPayload, prompt_pre_fetch_func, plugin_name, payload, context) - - -@mcp_tool(name="prompt_post_fetch", description="Execute prompt postfetch hook for a plugin") -async def prompt_post_fetch(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: - """Call plugin's prompt post-fetch hook. - - Args: - plugin_name: The name of the plugin to execute. - payload: The prompt payload to be analyzed. - context: Contextual information about the hook call. - - Raises: - ValueError: if unable to retrieve a plugin. - - Returns: - The result of the plugin execution. +SERVER: ExternalPluginServer = None + + +class SSLCapableFastMCP(FastMCP): + """FastMCP server with SSL/TLS support using MCPServerConfig.""" + + def __init__(self, server_config: MCPServerConfig, *args, **kwargs): + # Load server config from environment + + self.server_config = server_config + # Override FastMCP settings with our server config + if "host" not in kwargs: + kwargs["host"] = self.server_config.host + if "port" not in kwargs: + kwargs["port"] = self.server_config.port + + super().__init__(*args, **kwargs) + + def _get_ssl_config(self) -> dict: + """Build SSL configuration for uvicorn from MCPServerConfig.""" + ssl_config = {} + + if self.server_config.tls: + tls = self.server_config.tls + if tls.keyfile and tls.certfile: + ssl_config["ssl_keyfile"] = tls.keyfile + ssl_config["ssl_certfile"] = tls.certfile + + if tls.ca_bundle: + ssl_config["ssl_ca_certs"] = tls.ca_bundle + + ssl_config["ssl_cert_reqs"] = tls.ssl_cert_reqs + + if tls.keyfile_password: + ssl_config["ssl_keyfile_password"] = tls.keyfile_password + + logger.info("SSL/TLS enabled (mTLS)") + logger.info(f" Key: {ssl_config['ssl_keyfile']}") + logger.info(f" Cert: {ssl_config['ssl_certfile']}") + if "ssl_ca_certs" in ssl_config: + logger.info(f" CA: {ssl_config['ssl_ca_certs']}") + logger.info(f" Client cert required: {ssl_config['ssl_cert_reqs'] == 2}") + else: + logger.warning("TLS config present but keyfile/certfile not configured") + else: + logger.info("SSL/TLS not enabled") + + return ssl_config + + async def run_streamable_http_async(self) -> None: + """Run the server using StreamableHTTP transport with optional SSL/TLS.""" + starlette_app = self.streamable_http_app() + + # Build uvicorn config with optional SSL + ssl_config = self._get_ssl_config() + config_kwargs = { + "app": starlette_app, + "host": self.settings.host, + "port": self.settings.port, + "log_level": self.settings.log_level.lower(), + } + config_kwargs.update(ssl_config) + + logger.info(f"Starting plugin server on {self.settings.host}:{self.settings.port}") + config = uvicorn.Config(**config_kwargs) + server = uvicorn.Server(config) + await server.serve() + + +async def run(): + """Run the external plugin server with FastMCP. + + Reads configuration from PLUGINS_SERVER_* environment variables: + - PLUGINS_SERVER_HOST: Server host (default: 0.0.0.0) + - PLUGINS_SERVER_PORT: Server port (default: 8000) + - PLUGINS_SERVER_SSL_ENABLED: Enable SSL/TLS (true/false) + - PLUGINS_SERVER_SSL_KEYFILE: Path to server private key + - PLUGINS_SERVER_SSL_CERTFILE: Path to server certificate + - PLUGINS_SERVER_SSL_CA_CERTS: Path to CA bundle for client verification + - PLUGINS_SERVER_SSL_CERT_REQS: Client cert requirement (0=NONE, 1=OPTIONAL, 2=REQUIRED) """ + global SERVER - def prompt_post_fetch_func(plugin: Plugin, payload: PromptPosthookPayload, context: PluginContext) -> PromptPosthookResult: - """Wrapper function for hook. + # Initialize plugin server + SERVER = ExternalPluginServer() - Args: - plugin: The plugin instance. - payload: The tool name and arguments to be analyzed. - context: the contextual and state information required for the execution of the hook. + if not await SERVER.initialize(): + logger.error("Failed to initialize plugin server") + return - Returns: - The transformed or filtered response from the plugin hook. - """ - return plugin.prompt_post_fetch(payload, context) + try: + # Create FastMCP server with SSL support + mcp = SSLCapableFastMCP( + server_config=SERVER.get_server_config(), + name=MCP_SERVER_NAME, + instructions=MCP_SERVER_INSTRUCTIONS, + ) - return await SERVER.invoke_hook(PromptPosthookPayload, prompt_post_fetch_func, plugin_name, payload, context) + # Register plugin hook tools + @mcp.tool(name=GET_PLUGIN_CONFIGS) + async def get_plugin_configs() -> list[dict]: + """Get the plugin configurations installed on the server.""" + return await SERVER.get_plugin_configs() + @mcp.tool(name=GET_PLUGIN_CONFIG) + async def get_plugin_config(name: str) -> dict: + """Get the plugin configuration for a specific plugin. -@mcp_tool(name="tool_pre_invoke", description="Execute tool pre-invoke hook for a plugin") -async def tool_pre_invoke(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: - """Invoke the tool pre-invoke hook for a particular plugin. + Args: + name: The name of the plugin + """ + return await SERVER.get_plugin_config(name) - Args: - plugin_name: The name of the plugin to execute. - payload: The tool name and arguments to be analyzed. - context: The contextual and state information required for the execution of the hook. + @mcp.tool(name=HookType.PROMPT_PRE_FETCH.value) + async def prompt_pre_fetch(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: + """Execute prompt prefetch hook for a plugin. - Raises: - ValueError: If unable to retrieve a plugin. + Args: + plugin_name: The name of the plugin to execute + payload: The prompt name and arguments to be analyzed + context: Contextual information required for execution + """ - Returns: - The transformed or filtered response from the plugin hook. - """ + def prompt_pre_fetch_func(plugin: Plugin, payload: PromptPrehookPayload, context: PluginContext) -> PromptPrehookResult: + return plugin.prompt_pre_fetch(payload, context) - def tool_pre_invoke_func(plugin: Plugin, payload: ToolPreInvokePayload, context: PluginContext) -> ToolPreInvokeResult: - """Wrapper function for hook. + return await SERVER.invoke_hook(PromptPrehookPayload, prompt_pre_fetch_func, plugin_name, payload, context) - Args: - plugin: The plugin instance. - payload: The tool name and arguments to be analyzed. - context: the contextual and state information required for the execution of the hook. + @mcp.tool(name=HookType.PROMPT_POST_FETCH.value) + async def prompt_post_fetch(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: + """Execute prompt postfetch hook for a plugin. - Returns: - The transformed or filtered response from the plugin hook. - """ - return plugin.tool_pre_invoke(payload, context) + Args: + plugin_name: The name of the plugin to execute + payload: The prompt payload to be analyzed + context: Contextual information + """ - return await SERVER.invoke_hook(ToolPreInvokePayload, tool_pre_invoke_func, plugin_name, payload, context) + def prompt_post_fetch_func(plugin: Plugin, payload: PromptPosthookPayload, context: PluginContext) -> PromptPosthookResult: + return plugin.prompt_post_fetch(payload, context) + return await SERVER.invoke_hook(PromptPosthookPayload, prompt_post_fetch_func, plugin_name, payload, context) -@mcp_tool(name="tool_post_invoke", description="Execute tool post-invoke hook for a plugin") -async def tool_post_invoke(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: - """Invoke the tool post-invoke hook for a particular plugin. + @mcp.tool(name=HookType.TOOL_PRE_INVOKE.value) + async def tool_pre_invoke(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: + """Execute tool pre-invoke hook for a plugin. - Args: - plugin_name: The name of the plugin to execute. - payload: The tool name and arguments to be analyzed. - context: the contextual and state information required for the execution of the hook. + Args: + plugin_name: The name of the plugin to execute + payload: The tool name and arguments to be analyzed + context: Contextual information + """ - Raises: - ValueError: If unable to retrieve a plugin. + def tool_pre_invoke_func(plugin: Plugin, payload: ToolPreInvokePayload, context: PluginContext) -> ToolPreInvokeResult: + return plugin.tool_pre_invoke(payload, context) - Returns: - The transformed or filtered response from the plugin hook. - """ - - def tool_post_invoke_func(plugin: Plugin, payload: ToolPostInvokePayload, context: PluginContext) -> ToolPostInvokeResult: - """Wrapper function for hook. + return await SERVER.invoke_hook(ToolPreInvokePayload, tool_pre_invoke_func, plugin_name, payload, context) - Args: - plugin: The plugin instance. - payload: The tool name and arguments to be analyzed. - context: the contextual and state information required for the execution of the hook. + @mcp.tool(name=HookType.TOOL_POST_INVOKE.value) + async def tool_post_invoke(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: + """Execute tool post-invoke hook for a plugin. - Returns: - The transformed or filtered response from the plugin hook. - """ - return plugin.tool_post_invoke(payload, context) + Args: + plugin_name: The name of the plugin to execute + payload: The tool result to be analyzed + context: Contextual information + """ - return await SERVER.invoke_hook(ToolPostInvokePayload, tool_post_invoke_func, plugin_name, payload, context) + def tool_post_invoke_func(plugin: Plugin, payload: ToolPostInvokePayload, context: PluginContext) -> ToolPostInvokeResult: + return plugin.tool_post_invoke(payload, context) + return await SERVER.invoke_hook(ToolPostInvokePayload, tool_post_invoke_func, plugin_name, payload, context) -@mcp_tool(name="resource_pre_fetch", description="Execute resource prefetch hook for a plugin") -async def resource_pre_fetch(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: - """Invoke the resource pre fetch hook for a particular plugin. + @mcp.tool(name=HookType.RESOURCE_PRE_FETCH.value) + async def resource_pre_fetch(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: + """Execute resource prefetch hook for a plugin. - Args: - plugin_name: The name of the plugin to execute. - payload: The resource name and arguments to be analyzed. - context: The contextual and state information required for the execution of the hook. + Args: + plugin_name: The name of the plugin to execute + payload: The resource name and arguments to be analyzed + context: Contextual information + """ - Raises: - ValueError: If unable to retrieve a plugin. + def resource_pre_fetch_func(plugin: Plugin, payload: ResourcePreFetchPayload, context: PluginContext) -> ResourcePreFetchResult: + return plugin.resource_pre_fetch(payload, context) - Returns: - The transformed or filtered response from the plugin hook. - """ + return await SERVER.invoke_hook(ResourcePreFetchPayload, resource_pre_fetch_func, plugin_name, payload, context) - def resource_pre_fetch_func(plugin: Plugin, payload: ResourcePreFetchPayload, context: PluginContext) -> ResourcePreFetchResult: # pragma: no cover - """Wrapper function for hook. + @mcp.tool(name=HookType.RESOURCE_POST_FETCH.value) + async def resource_post_fetch(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: + """Execute resource postfetch hook for a plugin. - Args: - plugin: The plugin instance. - payload: The tool name and arguments to be analyzed. - context: the contextual and state information required for the execution of the hook. + Args: + plugin_name: The name of the plugin to execute + payload: The resource payload to be analyzed + context: Contextual information + """ - Returns: - The transformed or filtered response from the plugin hook. - """ - return plugin.resource_pre_fetch(payload, context) + def resource_post_fetch_func(plugin: Plugin, payload: ResourcePostFetchPayload, context: PluginContext) -> ResourcePostFetchResult: + return plugin.resource_post_fetch(payload, context) - return await SERVER.invoke_hook(ResourcePreFetchPayload, resource_pre_fetch_func, plugin_name, payload, context) + return await SERVER.invoke_hook(ResourcePostFetchPayload, resource_post_fetch_func, plugin_name, payload, context) + # Run with streamable-http transport + logger.info("Starting MCP plugin server with FastMCP") + await mcp.run_streamable_http_async() -@mcp_tool(name="resource_post_fetch", description="Execute resource postfetch hook for a plugin") -async def resource_post_fetch(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: - """Call plugin's resource post-fetch hook. + except Exception: + logger.exception("Caught error while executing plugin server") + raise + finally: + await SERVER.shutdown() - Args: - plugin_name: The name of the plugin to execute. - payload: The resource payload to be analyzed. - context: Contextual information about the hook call. - Raises: - ValueError: if unable to retrieve a plugin. - - Returns: - The result of the plugin execution. - """ - - def resource_post_fetch_func(plugin: Plugin, payload: ResourcePostFetchPayload, context: PluginContext) -> ResourcePostFetchResult: # pragma: no cover - """Wrapper function for hook. - - Args: - plugin: The plugin instance. - payload: The tool name and arguments to be analyzed. - context: the contextual and state information required for the execution of the hook. - - Returns: - The transformed or filtered response from the plugin hook. - """ - return plugin.resource_post_fetch(payload, context) - - return await SERVER.invoke_hook(ResourcePostFetchPayload, resource_post_fetch_func, plugin_name, payload, context) - - -async def run(): # pragma: no cover - """Run the external plugin SERVER. - - Automatically detects mTLS configuration from environment variables and - delegates to runtime_mtls.py if mTLS is enabled. - - Environment Variables (mTLS detection): - MCP_SSL_ENABLED: Enable SSL/TLS (true/false) - MCP_SSL_KEYFILE: Path to server private key - MCP_SSL_CERTFILE: Path to server certificate - MCP_SSL_CA_CERTS: Path to CA bundle (optional) - MCP_SSL_CERT_REQS: Certificate verification mode (0=NONE, 1=OPTIONAL, 2=REQUIRED) - - Raises: - Exception: if unnable to run the plugin SERVER. - """ - import os - - # Check if mTLS is enabled via environment variables - ssl_enabled = os.getenv("MCP_SSL_ENABLED", "").lower() == "true" - - if ssl_enabled: - # Delegate to mTLS runtime - logger.info("mTLS enabled - delegating to runtime_mtls") - from mcpgateway.plugins.framework.external.mcp.server.runtime_mtls import run_with_mtls - - # Set the global SERVER for runtime_mtls to use - global SERVER # pylint: disable=global-statement - await run_with_mtls() - else: - # Standard runtime without mTLS - global SERVER # pylint: disable=global-statement - SERVER = ExternalPluginServer() - if await SERVER.initialize(): - try: - await main_async() - except Exception: - logger.exception("Caught error while executing plugin server") - raise - finally: - await SERVER.shutdown() - - -if __name__ == "__main__": # pragma: no cover - # launch +if __name__ == "__main__": asyncio.run(run()) diff --git a/mcpgateway/plugins/framework/external/mcp/server/runtime_mtls.py b/mcpgateway/plugins/framework/external/mcp/server/runtime_mtls.py deleted file mode 100644 index 3633cf1d9..000000000 --- a/mcpgateway/plugins/framework/external/mcp/server/runtime_mtls.py +++ /dev/null @@ -1,582 +0,0 @@ -# -*- coding: utf-8 -*- -"""Location: ./mcpgateway/plugins/framework/external/mcp/server/runtime_mtls.py -Copyright 2025 -SPDX-License-Identifier: Apache-2.0 -Authors: Teryl Taylor - -Runtime MCP server for external plugins with mTLS support. - -This module extends MCPServer from chuk_mcp_runtime to add SSL/TLS support -by reading UVICORN_SSL_* environment variables and injecting them into the -uvicorn.Config creation. -""" - -# Standard -import asyncio -import contextlib -import logging -import os -import ssl -from typing import Any, AsyncIterator, Dict, Optional - -# Third-Party -import uvicorn -from dotenv import load_dotenv -from mcp.server.streamable_http_manager import StreamableHTTPSessionManager -from starlette.applications import Starlette -from starlette.middleware import Middleware -from starlette.requests import Request -from starlette.responses import PlainTextResponse, Response -from starlette.routing import Mount, Route -from starlette.types import Scope, Receive, Send - -# chuk_mcp_runtime imports -from chuk_mcp_runtime.common.mcp_tool_decorator import initialize_tool_registry -from chuk_mcp_runtime.server.config_loader import find_project_root, load_config -from chuk_mcp_runtime.server.event_store import InMemoryEventStore -from chuk_mcp_runtime.server.logging_config import configure_logging, get_logger -from chuk_mcp_runtime.server.server import MCPServer, AuthMiddleware -from chuk_mcp_runtime.server.server_registry import ServerRegistry -from chuk_mcp_runtime.session.native_session_management import create_mcp_session_manager -from chuk_mcp_runtime.tools import register_artifacts_tools, register_session_tools - -# First-Party -from mcpgateway.plugins.framework import ExternalPluginServer - -load_dotenv() - -logger = get_logger(__name__) - -SERVER: Optional[ExternalPluginServer] = None - - -class SSLCapableMCPServer(MCPServer): - """MCPServer with SSL/TLS support via UVICORN_SSL_* environment variables. - - This class extends the standard MCPServer to support SSL/TLS configuration - through environment variables, enabling mTLS for external plugin servers. - - Environment Variables: - UVICORN_SSL_KEYFILE: Path to server private key - UVICORN_SSL_CERTFILE: Path to server certificate - UVICORN_SSL_KEYFILE_PASSWORD: Optional password for encrypted key - UVICORN_SSL_CA_CERTS: Path to CA bundle for verifying client certificates - UVICORN_SSL_CERT_REQS: Certificate verification mode: - - 0 (CERT_NONE): No client certificate required (default TLS) - - 1 (CERT_OPTIONAL): Client certificate optional - - 2 (CERT_REQUIRED): Client certificate required (mTLS) - """ - - def _get_ssl_config(self) -> Dict[str, Any]: - """Read SSL configuration from UVICORN_SSL_* environment variables. - - Returns: - Dictionary of SSL configuration parameters for uvicorn.Config - - Raises: - RuntimeError: If SSL is enabled but required files are missing - """ - ssl_config = {} - - ssl_keyfile = os.getenv("UVICORN_SSL_KEYFILE") - ssl_certfile = os.getenv("UVICORN_SSL_CERTFILE") - - if not ssl_keyfile and not ssl_certfile: - return ssl_config - - # SSL is enabled - if not ssl_keyfile or not ssl_certfile: - raise RuntimeError( - "Both UVICORN_SSL_KEYFILE and UVICORN_SSL_CERTFILE must be set " - "when enabling SSL/TLS" - ) - - ssl_config["ssl_keyfile"] = ssl_keyfile - ssl_config["ssl_certfile"] = ssl_certfile - - # Optional password for encrypted key - if os.getenv("UVICORN_SSL_KEYFILE_PASSWORD"): - ssl_config["ssl_keyfile_password"] = os.getenv("UVICORN_SSL_KEYFILE_PASSWORD") - - # Client certificate verification - ssl_cert_reqs_str = os.getenv("UVICORN_SSL_CERT_REQS", "0") - try: - ssl_cert_reqs = int(ssl_cert_reqs_str) - except ValueError: - logger.warning( - "Invalid UVICORN_SSL_CERT_REQS value '%s', defaulting to 0 (CERT_NONE)", - ssl_cert_reqs_str - ) - ssl_cert_reqs = ssl.CERT_NONE - - ssl_config["ssl_cert_reqs"] = ssl_cert_reqs - - # CA certificates for client verification - ssl_ca_certs = os.getenv("UVICORN_SSL_CA_CERTS") - if ssl_cert_reqs in (ssl.CERT_REQUIRED, ssl.CERT_OPTIONAL): - if not ssl_ca_certs: - raise RuntimeError( - f"UVICORN_SSL_CERT_REQS={ssl_cert_reqs} requires UVICORN_SSL_CA_CERTS " - "to be set for client certificate verification" - ) - - if ssl_ca_certs: - ssl_config["ssl_ca_certs"] = ssl_ca_certs - - # Log SSL configuration - cert_reqs_name = { - 0: "CERT_NONE", - 1: "CERT_OPTIONAL", - 2: "CERT_REQUIRED" - }.get(ssl_cert_reqs, "UNKNOWN") - - logger.info( - "SSL/TLS enabled: cert_reqs=%s (%d), keyfile=%s, certfile=%s, ca_certs=%s", - cert_reqs_name, - ssl_cert_reqs, - ssl_keyfile, - ssl_certfile, - ssl_ca_certs or "None" - ) - - if ssl_cert_reqs == ssl.CERT_REQUIRED: - logger.info("mTLS ENABLED - Client certificates are REQUIRED") - elif ssl_cert_reqs == ssl.CERT_OPTIONAL: - logger.info("mTLS OPTIONAL - Client certificates are optional") - else: - logger.info("Standard TLS - No client certificate verification") - - return ssl_config - - async def serve(self, custom_handlers: Optional[Dict[str, Any]] = None) -> None: - """Boot the MCP server with SSL/TLS support and serve forever. - - This overrides the parent's serve() method to inject SSL configuration - into uvicorn.Config creation. All other functionality (tool registration, - session management, etc.) is inherited from the parent MCPServer. - - Args: - custom_handlers: Optional custom handlers (e.g., proxy text handler) - """ - # Get SSL configuration from environment variables - ssl_config = self._get_ssl_config() - is_ssl_enabled = bool(ssl_config) - - # Reuse parent's setup for everything except uvicorn Config - await self._setup_artifact_store() - - if not self.tools_registry: - self.tools_registry = await self._import_tools_registry() - - await initialize_tool_registry() - from chuk_mcp_runtime.common.tool_naming import update_naming_maps - update_naming_maps() - - # Create server and register handlers (uses parent's implementation) - from mcp.server import Server - from mcp.server.sse import SseServerTransport - from mcp.types import TextContent, ImageContent, EmbeddedResource - import json - from inspect import isasyncgen - - server = Server(self.server_name) - - # Register list_tools (from parent's implementation) - @server.list_tools() - async def list_tools(): - """List available tools with robust error handling.""" - try: - self.logger.info( - "list_tools called - %d tools total", len(self.tools_registry) - ) - - tools = [] - for tool_name, func in self.tools_registry.items(): - try: - if hasattr(func, "_mcp_tool"): - tool_obj = func._mcp_tool - if hasattr(tool_obj, "name") and hasattr(tool_obj, "description"): - tools.append(tool_obj) - self.logger.debug("Added tool to list: %s", tool_obj.name) - else: - self.logger.warning( - "Tool %s has invalid _mcp_tool object: %s", - tool_name, tool_obj - ) - else: - self.logger.warning( - "Tool %s missing _mcp_tool attribute", tool_name - ) - except Exception as e: - self.logger.error("Error processing tool %s: %s", tool_name, e) - continue - - self.logger.info("Returning %d valid tools", len(tools)) - return tools - except Exception as e: - self.logger.error("Error in list_tools: %s", e) - return [] - - # Register call_tool (from parent's implementation) - @server.call_tool() - async def call_tool(name: str, arguments: Dict[str, Any]): - """Execute a tool with native session management.""" - try: - from chuk_mcp_runtime.server.server import parse_tool_arguments - from chuk_mcp_runtime.common.tool_naming import resolve_tool_name - from chuk_mcp_runtime.session.native_session_management import SessionContext - - # Fix concatenated JSON in arguments - original_args = arguments - if arguments: - if isinstance(arguments, (str, dict)): - arguments = parse_tool_arguments(arguments) - if arguments != original_args: - self.logger.info( - "Fixed concatenated JSON arguments for '%s': %s -> %s", - name, original_args, arguments - ) - - self.logger.debug( - "call_tool called with name='%s', arguments=%s", name, arguments - ) - - # Tool name resolution - resolved = name if name in self.tools_registry else resolve_tool_name(name) - if resolved not in self.tools_registry: - matches = [ - k for k in self.tools_registry - if k.endswith(f"_{name}") or k.endswith(f".{name}") - ] - if len(matches) == 1: - resolved = matches[0] - - if resolved not in self.tools_registry: - raise ValueError(f"Tool not found: {name}") - - func = self.tools_registry[resolved] - self.logger.debug("Resolved tool '%s' to function: %s", name, func) - - # Native session injection - arguments = await self._inject_session_context(resolved, arguments) - - # Execute within session context - async with SessionContext( - self.session_manager, - session_id=arguments.get("session_id"), - auto_create=True - ) as session_id: - self.logger.debug( - "Executing tool '%s' in session %s", resolved, session_id - ) - result = await self._execute_tool_with_timeout(func, resolved, arguments) - self.logger.debug( - "Tool execution completed, result type: %s", type(result) - ) - - # Handle streaming results - if isasyncgen(result): - self.logger.debug( - "Tool returned async generator, collecting chunks for '%s'", - resolved - ) - collected_chunks = [] - chunk_count = 0 - - try: - async for part in result: - chunk_count += 1 - self.logger.debug( - "Collecting streaming chunk %d for '%s'", - chunk_count, resolved - ) - - if isinstance(part, (TextContent, ImageContent, EmbeddedResource)): - collected_chunks.append(part) - elif isinstance(part, str): - collected_chunks.append(TextContent(type="text", text=part)) - elif isinstance(part, dict) and "delta" in part: - collected_chunks.append( - TextContent(type="text", text=part["delta"]) - ) - else: - collected_chunks.append( - TextContent(type="text", text=str(part)) - ) - except Exception as e: - self.logger.error( - "Error collecting streaming chunks for '%s': %s", - resolved, e - ) - return [ - TextContent( - type="text", - text=f"Streaming error: {str(e)}" - ) - ] - - self.logger.info( - "Collected %d streaming chunks for '%s'", - len(collected_chunks), resolved - ) - result = collected_chunks - - # Wrap result if needed - if not isinstance(result, dict) or "session_id" not in result: - if isinstance(result, dict) and not ( - "content" in result and "isError" in result - ): - result = { - "session_id": session_id, - "content": result, - "isError": False - } - elif isinstance(result, str): - result = { - "session_id": session_id, - "content": result, - "isError": False - } - - # Format response - if isinstance(result, list) and all( - isinstance(r, (TextContent, ImageContent, EmbeddedResource)) - for r in result - ): - return result - elif isinstance(result, str): - return [TextContent(type="text", text=result)] - else: - return [TextContent(type="text", text=json.dumps(result, indent=2))] - - except Exception as e: - self.logger.error("Error in call_tool for '%s': %s", name, e) - return [TextContent(type="text", text=f"Tool execution error: {str(e)}")] - - # Transport setup with SSL support - opts = server.create_initialization_options() - mode = self.config.get("server", {}).get("type", "stdio") - - if mode == "stdio": - self.logger.info( - "Starting MCP (stdio) - global timeout %.1fs", self.tool_timeout - ) - from mcp.server.stdio import stdio_server - async with stdio_server() as (r, w): - await server.run(r, w, opts) - - elif mode == "sse": - cfg = self.config.get("sse", {}) - host, port = cfg.get("host", "0.0.0.0"), cfg.get("port", 8000) # nosec B104 - sse_path, msg_path, health_path = ( - cfg.get("sse_path", "/sse"), - cfg.get("message_path", "/messages/"), - cfg.get("health_path", "/health") - ) - transport = SseServerTransport(msg_path) - - async def _handle_sse(request: Request): - async with transport.connect_sse( - request.scope, request.receive, request._send - ) as streams: - await server.run(streams[0], streams[1], opts) - return Response() - - async def health(request: Request): - return PlainTextResponse("OK") - - app = Starlette( - routes=[ - Route(sse_path, _handle_sse, methods=["GET"]), - Mount(msg_path, app=transport.handle_post_message), - Route(health_path, health, methods=["GET"]) - ], - middleware=[ - Middleware( - AuthMiddleware, - auth=self.config.get("server", {}).get("auth"), - health_path=health_path - ) - ] - ) - - protocol = "https" if is_ssl_enabled else "http" - self.logger.info( - "Starting MCP (SSE) on %s://%s:%s - global timeout %.1fs", - protocol, host, port, self.tool_timeout - ) - - # Create uvicorn Config with SSL support - config_kwargs = {"app": app, "host": host, "port": port, "log_level": "info"} - config_kwargs.update(ssl_config) - - await uvicorn.Server(uvicorn.Config(**config_kwargs)).serve() - - elif mode == "streamable-http": - self.logger.info("Starting MCP server over streamable-http") - - streamhttp_config = self.config.get("streamable-http", {}) - host = streamhttp_config.get("host", "127.0.0.1") - port = streamhttp_config.get("port", 3000) - mcp_path = streamhttp_config.get("mcp_path", "/mcp") - json_response = streamhttp_config.get("json_response", True) - stateless = streamhttp_config.get("stateless", True) - - event_store = None if stateless else InMemoryEventStore() - - session_manager = StreamableHTTPSessionManager( - app=server, - event_store=event_store, - stateless=stateless, - json_response=json_response - ) - - async def handle_streamable_http(scope: Scope, receive: Receive, send: Send): - await session_manager.handle_request(scope, receive, send) - - async def health(request: Request): - return PlainTextResponse("OK") - - @contextlib.asynccontextmanager - async def lifespan(app: Starlette) -> AsyncIterator[None]: - async with session_manager.run(): - self.logger.info("Application started with StreamableHTTP session manager!") - try: - yield - finally: - self.logger.info("Application shutting down...") - - app = Starlette( - debug=True, - routes=[ - Mount(mcp_path, handle_streamable_http), - Route("/health", health, methods=["GET"]) - ], - middleware=[ - Middleware( - AuthMiddleware, - auth=self.config.get("server", {}).get("auth") - ) - ], - lifespan=lifespan - ) - - protocol = "https" if is_ssl_enabled else "http" - self.logger.info( - "Starting MCP (StreamableHTTP) on %s://%s:%s - global timeout %.1fs", - protocol, host, port, self.tool_timeout - ) - - # Create uvicorn Config with SSL support - config_kwargs = {"app": app, "host": host, "port": port, "log_level": "info"} - config_kwargs.update(ssl_config) - - await uvicorn.Server(uvicorn.Config(**config_kwargs)).serve() - - else: - raise ValueError(f"Unsupported server mode: {mode}") - - -async def run_with_mtls( - config_paths: Optional[list[str]] = None, - default_config: Optional[Dict[str, Any]] = None, -) -> None: - """Run the MCP runtime with mTLS support. - - Args: - config_paths: Optional list of configuration file paths. - default_config: Optional default configuration dictionary. - - Raises: - RuntimeError: If server initialization fails. - """ - global SERVER # pylint: disable=global-statement - - # Configuration and logging setup - cfg = load_config(config_paths, default_config) - configure_logging(cfg) - project_root = find_project_root() - logger.debug("Project root resolved to %s", project_root) - - # Initialize external plugin server - SERVER = ExternalPluginServer() - if not await SERVER.initialize(): - raise RuntimeError("Failed to initialize external plugin server") - - # Native session management initialization - session_manager = create_mcp_session_manager(cfg) - logger.info("Native session manager initialized for sandbox: %s", session_manager.sandbox_id) - - # Optional component bootstrap - if not os.getenv("NO_BOOTSTRAP"): - await ServerRegistry(project_root, cfg).load_server_components() - - # Tool registry initialization - await initialize_tool_registry() - - # Artifact management tools - await register_artifacts_tools(cfg) - logger.debug("Artifact tools registration completed") - - # Session management tools - session_cfg = cfg.copy() - session_cfg.setdefault("session_tools", {})["session_manager"] = session_manager - await register_session_tools(session_cfg) - logger.debug("Session tools registration completed") - - # Create SSL-capable MCP server instance - from chuk_mcp_runtime.common.mcp_tool_decorator import TOOLS_REGISTRY - mcp_server = SSLCapableMCPServer(cfg, tools_registry=TOOLS_REGISTRY) - logger.debug("SSL-capable MCP server '%s' starting", getattr(mcp_server, "server_name", "local")) - - # Serve forever - try: - await mcp_server.serve() - finally: - if SERVER: - await SERVER.shutdown() - - -async def main_async(default_config: Optional[Dict[str, Any]] = None) -> None: - """Async entry point for mTLS runtime. - - Args: - default_config: Optional default configuration dictionary. - """ - try: - import sys - - # Parse command line arguments for config file - argv = sys.argv[1:] - cfg_path = ( - os.getenv("CHUK_MCP_CONFIG_PATH") - or (argv[argv.index("-c") + 1] if "-c" in argv else None) - or (argv[argv.index("--config") + 1] if "--config" in argv else None) - or (argv[0] if argv else None) - ) - - await run_with_mtls(config_paths=[cfg_path] if cfg_path else None, default_config=default_config) - except Exception as exc: - logger.error("Error starting CHUK MCP server with mTLS: %s", exc, exc_info=True) - import sys - sys.exit(1) - - -def main(default_config: Optional[Dict[str, Any]] = None) -> None: - """Main entry point for mTLS runtime. - - Args: - default_config: Optional default configuration dictionary. - """ - try: - asyncio.run(main_async(default_config)) - except KeyboardInterrupt: - logger.warning("Received Ctrl-C β†’ shutting down") - except Exception as exc: - logger.error("Uncaught exception: %s", exc, exc_info=True) - import sys - sys.exit(1) - - -if __name__ == "__main__": # pragma: no cover - main() diff --git a/mcpgateway/plugins/framework/external/mcp/server/server.py b/mcpgateway/plugins/framework/external/mcp/server/server.py index c2d340e42..3772d03c1 100644 --- a/mcpgateway/plugins/framework/external/mcp/server/server.py +++ b/mcpgateway/plugins/framework/external/mcp/server/server.py @@ -26,6 +26,7 @@ from mcpgateway.plugins.framework.loader.config import ConfigLoader from mcpgateway.plugins.framework.manager import DEFAULT_PLUGIN_TIMEOUT, PluginManager from mcpgateway.plugins.framework.models import ( + MCPServerConfig, PluginContext, PluginErrorModel, PluginResult, @@ -165,6 +166,14 @@ async def initialize(self) -> bool: return self._plugin_manager.initialized async def shutdown(self) -> None: - """Shutdow the plugin server.""" + """Shutdown the plugin server.""" if self._plugin_manager.initialized: await self._plugin_manager.shutdown() + + def get_server_config(self) -> MCPServerConfig: + """Return the configuration for the plugin server. + + Returns: + A server configuration including host, port, and TLS information. + """ + return self._config.server_settings or MCPServerConfig.from_env() or MCPServerConfig() diff --git a/mcpgateway/plugins/framework/manager.py b/mcpgateway/plugins/framework/manager.py index 374d727c4..20005ab50 100644 --- a/mcpgateway/plugins/framework/manager.py +++ b/mcpgateway/plugins/framework/manager.py @@ -614,13 +614,7 @@ async def initialize(self) -> None: for plugin_config in plugins: try: # For disabled plugins, create a stub plugin without full instantiation - if plugin_config.mode == PluginMode.DISABLED: - # Create a minimal stub plugin for display purposes only - stub_plugin = Plugin(plugin_config) - self._registry.register(stub_plugin) - loaded_count += 1 - logger.info(f"Registered disabled plugin: {plugin_config.name} (display only, not instantiated)") - else: + if plugin_config.mode != PluginMode.DISABLED: # Fully instantiate enabled plugins plugin = await self._loader.load_and_instantiate_plugin(plugin_config) if plugin: @@ -629,6 +623,9 @@ async def initialize(self) -> None: logger.info(f"Loaded plugin: {plugin_config.name} (mode: {plugin_config.mode})") else: raise ValueError(f"Unable to instantiate plugin: {plugin_config.name}") + else: + logger.info(f"Plugin: {plugin_config.name} is disabled. Ignoring.") + except Exception as e: # Clean error message without stack trace spam logger.error(f"Failed to load plugin '{plugin_config.name}': {str(e)}") diff --git a/mcpgateway/plugins/framework/models.py b/mcpgateway/plugins/framework/models.py index 344827965..0d45e9db6 100644 --- a/mcpgateway/plugins/framework/models.py +++ b/mcpgateway/plugins/framework/models.py @@ -247,26 +247,22 @@ class AppliedTo(BaseModel): resources: Optional[list[ResourceTemplate]] = None -class MCPTransportTLSConfig(BaseModel): - """TLS configuration for HTTP-based MCP transports. +class MCPTransportTLSConfigBase(BaseModel): + """Base TLS configuration with common fields for both client and server. Attributes: - verify (bool): Whether to verify the remote certificate chain. - ca_bundle (Optional[str]): Path to a CA bundle file used for verification. - client_cert (Optional[str]): Path to the PEM-encoded client certificate (can include key). - client_key (Optional[str]): Path to the PEM-encoded client private key when stored separately. - client_key_password (Optional[str]): Optional password for the private key file. - check_hostname (bool): Enable hostname verification (default: True). + certfile (Optional[str]): Path to the PEM-encoded certificate file. + keyfile (Optional[str]): Path to the PEM-encoded private key file. + ca_bundle (Optional[str]): Path to a CA bundle file for verification. + keyfile_password (Optional[str]): Optional password for encrypted private key. """ - verify: bool = Field(default=True, description="Verify the upstream server certificate") - ca_bundle: Optional[str] = Field(default=None, description="Path to CA bundle for upstream verification") - client_cert: Optional[str] = Field(default=None, description="Path to PEM client certificate or bundle") - client_key: Optional[str] = Field(default=None, description="Path to PEM client private key (if separate)") - client_key_password: Optional[str] = Field(default=None, description="Password for the client key, when encrypted") - check_hostname: bool = Field(default=True, description="Enable hostname verification when verify is true") + certfile: Optional[str] = Field(default=None, description="Path to PEM certificate file") + keyfile: Optional[str] = Field(default=None, description="Path to PEM private key file") + ca_bundle: Optional[str] = Field(default=None, description="Path to CA bundle for verification") + keyfile_password: Optional[str] = Field(default=None, description="Password for encrypted private key") - @field_validator("ca_bundle", "client_cert", "client_key", mode=AFTER) + @field_validator("ca_bundle", "certfile", "keyfile", mode=AFTER) @classmethod def validate_path(cls, value: Optional[str]) -> Optional[str]: """Expand and validate file paths supplied in TLS configuration.""" @@ -279,11 +275,11 @@ def validate_path(cls, value: Optional[str]) -> Optional[str]: return str(expanded) @model_validator(mode=AFTER) - def validate_client_cert(self) -> Self: # pylint: disable=bad-classmethod-argument - """Ensure TLS client certificate options are consistent.""" + def validate_cert_key(self) -> Self: # pylint: disable=bad-classmethod-argument + """Ensure certificate and key options are consistent.""" - if self.client_key and not self.client_cert: - raise ValueError("client_key requires client_cert to be specified") + if self.keyfile and not self.certfile: + raise ValueError("keyfile requires certfile to be specified") return self @staticmethod @@ -299,27 +295,39 @@ def _parse_bool(value: Optional[str]) -> Optional[bool]: return False raise ValueError(f"Invalid boolean value: {value}") + +class MCPClientTLSConfig(MCPTransportTLSConfigBase): + """Client-side TLS configuration (gateway connecting to plugin). + + Attributes: + verify (bool): Whether to verify the remote server certificate. + check_hostname (bool): Enable hostname verification when verify is true. + """ + + verify: bool = Field(default=True, description="Verify the upstream server certificate") + check_hostname: bool = Field(default=True, description="Enable hostname verification") + @classmethod - def from_env(cls) -> Optional["MCPTransportTLSConfig"]: - """Construct a TLS configuration from environment defaults.""" + def from_env(cls) -> Optional["MCPClientTLSConfig"]: + """Construct client TLS configuration from PLUGINS_CLIENT_* environment variables.""" env = os.environ data: dict[str, Any] = {} - if env.get("PLUGINS_MTLS_CA_BUNDLE"): - data["ca_bundle"] = env["PLUGINS_MTLS_CA_BUNDLE"] - if env.get("PLUGINS_MTLS_CLIENT_CERT"): - data["client_cert"] = env["PLUGINS_MTLS_CLIENT_CERT"] - if env.get("PLUGINS_MTLS_CLIENT_KEY"): - data["client_key"] = env["PLUGINS_MTLS_CLIENT_KEY"] - if env.get("PLUGINS_MTLS_CLIENT_KEY_PASSWORD") is not None: - data["client_key_password"] = env["PLUGINS_MTLS_CLIENT_KEY_PASSWORD"] + if env.get("PLUGINS_CLIENT_MTLS_CERTFILE"): + data["certfile"] = env["PLUGINS_CLIENT_MTLS_CERTFILE"] + if env.get("PLUGINS_CLIENT_MTLS_KEYFILE"): + data["keyfile"] = env["PLUGINS_CLIENT_MTLS_KEYFILE"] + if env.get("PLUGINS_CLIENT_MTLS_CA_BUNDLE"): + data["ca_bundle"] = env["PLUGINS_CLIENT_MTLS_CA_BUNDLE"] + if env.get("PLUGINS_CLIENT_MTLS_KEYFILE_PASSWORD") is not None: + data["keyfile_password"] = env["PLUGINS_CLIENT_MTLS_KEYFILE_PASSWORD"] - verify_val = cls._parse_bool(env.get("PLUGINS_MTLS_VERIFY")) + verify_val = cls._parse_bool(env.get("PLUGINS_CLIENT_MTLS_VERIFY")) if verify_val is not None: data["verify"] = verify_val - check_hostname_val = cls._parse_bool(env.get("PLUGINS_MTLS_CHECK_HOSTNAME")) + check_hostname_val = cls._parse_bool(env.get("PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME")) if check_hostname_val is not None: data["check_hostname"] = check_hostname_val @@ -329,19 +337,112 @@ def from_env(cls) -> Optional["MCPTransportTLSConfig"]: return cls(**data) -class MCPConfig(BaseModel): - """An MCP configuration for external MCP plugin objects. +class MCPServerTLSConfig(MCPTransportTLSConfigBase): + """Server-side TLS configuration (plugin accepting gateway connections). + + Attributes: + ssl_cert_reqs (int): Client certificate requirement (0=NONE, 1=OPTIONAL, 2=REQUIRED). + """ + + ssl_cert_reqs: int = Field(default=2, description="Client certificate requirement (0=NONE, 1=OPTIONAL, 2=REQUIRED)") + + @classmethod + def from_env(cls) -> Optional["MCPServerTLSConfig"]: + """Construct server TLS configuration from PLUGINS_SERVER_SSL_* environment variables.""" + + env = os.environ + data: dict[str, Any] = {} + + if env.get("PLUGINS_SERVER_SSL_KEYFILE"): + data["keyfile"] = env["PLUGINS_SERVER_SSL_KEYFILE"] + if env.get("PLUGINS_SERVER_SSL_CERTFILE"): + data["certfile"] = env["PLUGINS_SERVER_SSL_CERTFILE"] + if env.get("PLUGINS_SERVER_SSL_CA_CERTS"): + data["ca_bundle"] = env["PLUGINS_SERVER_SSL_CA_CERTS"] + if env.get("PLUGINS_SERVER_SSL_KEYFILE_PASSWORD") is not None: + data["keyfile_password"] = env["PLUGINS_SERVER_SSL_KEYFILE_PASSWORD"] + + if env.get("PLUGINS_SERVER_SSL_CERT_REQS"): + try: + data["ssl_cert_reqs"] = int(env["PLUGINS_SERVER_SSL_CERT_REQS"]) + except ValueError: + raise ValueError(f"Invalid PLUGINS_SERVER_SSL_CERT_REQS: {env['PLUGINS_SERVER_SSL_CERT_REQS']}") + + if not data: + return None + + return cls(**data) + + +class MCPServerConfig(BaseModel): + """Server-side MCP configuration (plugin running as server). + + Attributes: + host (str): Server host to bind to. + port (int): Server port to bind to. + tls (Optional[MCPServerTLSConfig]): Server-side TLS configuration. + """ + + host: str = Field(default="0.0.0.0", description="Server host to bind to") + port: int = Field(default=8000, description="Server port to bind to") + tls: Optional[MCPServerTLSConfig] = Field(default=None, description="Server-side TLS configuration") + + @staticmethod + def _parse_bool(value: Optional[str]) -> Optional[bool]: + """Convert a string environment value to boolean.""" + + if value is None: + return None + normalized = value.strip().lower() + if normalized in {"1", "true", "yes", "on"}: + return True + if normalized in {"0", "false", "no", "off"}: + return False + raise ValueError(f"Invalid boolean value: {value}") + + @classmethod + def from_env(cls) -> Optional["MCPServerConfig"]: + """Construct server configuration from PLUGINS_SERVER_* environment variables.""" + + env = os.environ + data: dict[str, Any] = {} + + if env.get("PLUGINS_SERVER_HOST"): + data["host"] = env["PLUGINS_SERVER_HOST"] + if env.get("PLUGINS_SERVER_PORT"): + try: + data["port"] = int(env["PLUGINS_SERVER_PORT"]) + except ValueError: + raise ValueError(f"Invalid PLUGINS_SERVER_PORT: {env['PLUGINS_SERVER_PORT']}") + + # Check if SSL/TLS is enabled + ssl_enabled = cls._parse_bool(env.get("PLUGINS_SERVER_SSL_ENABLED")) + if ssl_enabled: + # Load TLS configuration + tls_config = MCPServerTLSConfig.from_env() + if tls_config: + data["tls"] = tls_config + + if not data: + return None + + return cls(**data) + + +class MCPClientConfig(BaseModel): + """Client-side MCP configuration (gateway connecting to external plugin). Attributes: - type (TransportType): The MCP transport type. Can be SSE, STDIO, or STREAMABLEHTTP + proto (TransportType): The MCP transport type. Can be SSE, STDIO, or STREAMABLEHTTP url (Optional[str]): An MCP URL. Only valid when MCP transport type is SSE or STREAMABLEHTTP. script (Optional[str]): The path and name to the STDIO script that runs the plugin server. Only valid for STDIO type. + tls (Optional[MCPClientTLSConfig]): Client-side TLS configuration for mTLS. """ proto: TransportType url: Optional[str] = None script: Optional[str] = None - tls: Optional[MCPTransportTLSConfig] = None + tls: Optional[MCPClientTLSConfig] = None @field_validator(URL, mode=AFTER) @classmethod @@ -412,7 +513,7 @@ class PluginConfig(BaseModel): conditions (Optional[list[PluginCondition]]): the conditions on which the plugin is run. applied_to (Optional[list[AppliedTo]]): the tools, fields, that the plugin is applied to. config (dict[str, Any]): the plugin specific configurations. - mcp (Optional[MCPConfig]): MCP configuration for external plugin when kind is "external". + mcp (Optional[MCPClientConfig]): Client-side MCP configuration (gateway connecting to plugin). """ name: str @@ -428,7 +529,7 @@ class PluginConfig(BaseModel): conditions: Optional[list[PluginCondition]] = None # When to apply applied_to: Optional[AppliedTo] = None # Fields to apply to. config: Optional[dict[str, Any]] = None - mcp: Optional[MCPConfig] = None + mcp: Optional[MCPClientConfig] = None @model_validator(mode=AFTER) def check_url_or_script_filled(self) -> Self: # pylint: disable=bad-classmethod-argument @@ -590,14 +691,16 @@ class Config(BaseModel): """Configurations for plugins. Attributes: - plugins: the list of plugins to enable. - plugin_dirs: The directories in which to look for plugins. - plugin_settings: global settings for plugins. + plugins (Optional[list[PluginConfig]]): the list of plugins to enable. + plugin_dirs (list[str]): The directories in which to look for plugins. + plugin_settings (PluginSettings): global settings for plugins. + server_settings (Optional[MCPServerConfig]): Server-side MCP configuration (when plugins run as server). """ plugins: Optional[list[PluginConfig]] = [] plugin_dirs: list[str] = [] plugin_settings: PluginSettings + server_settings: Optional[MCPServerConfig] = None class PromptPrehookPayload(BaseModel): From a5358d93b110773921b9268d786ad8baf0ed0e79 Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Fri, 3 Oct 2025 16:44:23 -0600 Subject: [PATCH 05/35] fix: switched chuk-mcp-runtime with mcp python sdk to support mTLS. Signed-off-by: Teryl Taylor --- plugin_templates/external/pyproject.toml.jinja | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugin_templates/external/pyproject.toml.jinja b/plugin_templates/external/pyproject.toml.jinja index 6eb6fa286..8bd7aff25 100644 --- a/plugin_templates/external/pyproject.toml.jinja +++ b/plugin_templates/external/pyproject.toml.jinja @@ -44,7 +44,7 @@ authors = [ ] dependencies = [ - "chuk-mcp-runtime>=0.6.5", + "mcp>=1.16.0", "mcp-contextforge-gateway", ] From 6d9bf905be12dc165735a795c963121dddf56311 Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Fri, 3 Oct 2025 16:53:32 -0600 Subject: [PATCH 06/35] fix: updated llmguard and opa plugins to install the mcp official sdk. Signed-off-by: Teryl Taylor --- plugins/external/llmguard/pyproject.toml | 2 +- plugins/external/opa/pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/external/llmguard/pyproject.toml b/plugins/external/llmguard/pyproject.toml index 878530d7a..c53d93e7c 100644 --- a/plugins/external/llmguard/pyproject.toml +++ b/plugins/external/llmguard/pyproject.toml @@ -44,7 +44,7 @@ authors = [ ] dependencies = [ - "chuk-mcp-runtime>=0.6.5", + "mcp>=1.16.0", "mcp-contextforge-gateway", "llm-guard", ] diff --git a/plugins/external/opa/pyproject.toml b/plugins/external/opa/pyproject.toml index 2e789fcad..b9f55b131 100644 --- a/plugins/external/opa/pyproject.toml +++ b/plugins/external/opa/pyproject.toml @@ -44,7 +44,7 @@ authors = [ ] dependencies = [ - "chuk-mcp-runtime>=0.6.5", + "mcp>=1.16.0", "mcp-contextforge-gateway", ] From ba4e215217e1fa91ea0b78934edb3d5ca189f363 Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Mon, 6 Oct 2025 13:24:17 -0600 Subject: [PATCH 07/35] feat: added health check to plugin server runtimes. Signed-off-by: Teryl Taylor --- .../plugins/framework/external/mcp/server/runtime.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/mcpgateway/plugins/framework/external/mcp/server/runtime.py b/mcpgateway/plugins/framework/external/mcp/server/runtime.py index 32a28cdd5..7a6529ffe 100644 --- a/mcpgateway/plugins/framework/external/mcp/server/runtime.py +++ b/mcpgateway/plugins/framework/external/mcp/server/runtime.py @@ -105,6 +105,18 @@ async def run_streamable_http_async(self) -> None: """Run the server using StreamableHTTP transport with optional SSL/TLS.""" starlette_app = self.streamable_http_app() + # Add health check endpoint + from starlette.requests import Request + from starlette.responses import JSONResponse + from starlette.routing import Route + + async def health_check(request: Request): + """Health check endpoint for container orchestration.""" + return JSONResponse({"status": "healthy"}) + + # Add the health route to the Starlette app + starlette_app.routes.append(Route("/health", health_check, methods=["GET"])) + # Build uvicorn config with optional SSL ssl_config = self._get_ssl_config() config_kwargs = { From 214490d8b08067215ca616dfee1cfc35fc32e9f2 Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Mon, 6 Oct 2025 14:31:04 -0600 Subject: [PATCH 08/35] fix: added health check for mtls plugin server Signed-off-by: Teryl Taylor --- .../framework/external/mcp/server/runtime.py | 43 ++++++++++++++++++- 1 file changed, 41 insertions(+), 2 deletions(-) diff --git a/mcpgateway/plugins/framework/external/mcp/server/runtime.py b/mcpgateway/plugins/framework/external/mcp/server/runtime.py index 7a6529ffe..a2e45679e 100644 --- a/mcpgateway/plugins/framework/external/mcp/server/runtime.py +++ b/mcpgateway/plugins/framework/external/mcp/server/runtime.py @@ -101,11 +101,38 @@ def _get_ssl_config(self) -> dict: return ssl_config + async def _start_health_check_server(self, health_port: int) -> None: + """Start a simple HTTP-only health check server on a separate port. + + This allows health checks to work even when the main server uses HTTPS/mTLS. + """ + from starlette.applications import Starlette + from starlette.requests import Request + from starlette.responses import JSONResponse + from starlette.routing import Route + + async def health_check(request: Request): + """Health check endpoint for container orchestration.""" + return JSONResponse({"status": "healthy"}) + + # Create a minimal Starlette app with only the health endpoint + health_app = Starlette(routes=[Route("/health", health_check, methods=["GET"])]) + + logger.info(f"Starting HTTP health check server on {self.settings.host}:{health_port}") + config = uvicorn.Config( + app=health_app, + host=self.settings.host, + port=health_port, + log_level="warning", # Reduce noise from health checks + ) + server = uvicorn.Server(config) + await server.serve() + async def run_streamable_http_async(self) -> None: """Run the server using StreamableHTTP transport with optional SSL/TLS.""" starlette_app = self.streamable_http_app() - # Add health check endpoint + # Add health check endpoint to main app from starlette.requests import Request from starlette.responses import JSONResponse from starlette.routing import Route @@ -130,7 +157,19 @@ async def health_check(request: Request): logger.info(f"Starting plugin server on {self.settings.host}:{self.settings.port}") config = uvicorn.Config(**config_kwargs) server = uvicorn.Server(config) - await server.serve() + + # If SSL is enabled, start a separate HTTP health check server + if ssl_config: + health_port = self.settings.port + 1000 # Use port+1000 for health checks + logger.info(f"SSL enabled - starting separate HTTP health check on port {health_port}") + # Run both servers concurrently + await asyncio.gather( + server.serve(), + self._start_health_check_server(health_port) + ) + else: + # Just run the main server (health check is already on it) + await server.serve() async def run(): From 36b2b138803c2a2c7e1853ed4b6b4311deec2f8c Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Wed, 8 Oct 2025 20:14:28 -0600 Subject: [PATCH 09/35] fix: removed chuk-mcp-runtime, replaced with official mcp library. Signed-off-by: Teryl Taylor --- .../framework/external/mcp/server/runtime.py | 24 ++++++++++++++----- pyproject.toml | 9 ------- 2 files changed, 18 insertions(+), 15 deletions(-) diff --git a/mcpgateway/plugins/framework/external/mcp/server/runtime.py b/mcpgateway/plugins/framework/external/mcp/server/runtime.py index a2e45679e..7054ea00a 100644 --- a/mcpgateway/plugins/framework/external/mcp/server/runtime.py +++ b/mcpgateway/plugins/framework/external/mcp/server/runtime.py @@ -59,6 +59,11 @@ class SSLCapableFastMCP(FastMCP): """FastMCP server with SSL/TLS support using MCPServerConfig.""" def __init__(self, server_config: MCPServerConfig, *args, **kwargs): + """Initialize an SSL capable Fast MCP server. + + Args: + server_config: the MCP server configuration including mTLS information. + """ # Load server config from environment self.server_config = server_config @@ -106,13 +111,18 @@ async def _start_health_check_server(self, health_port: int) -> None: This allows health checks to work even when the main server uses HTTPS/mTLS. """ + # Third-Party from starlette.applications import Starlette from starlette.requests import Request from starlette.responses import JSONResponse from starlette.routing import Route async def health_check(request: Request): - """Health check endpoint for container orchestration.""" + """Health check endpoint for container orchestration. + + Args: + request: the http request from which the health check occurs. + """ return JSONResponse({"status": "healthy"}) # Create a minimal Starlette app with only the health endpoint @@ -133,12 +143,17 @@ async def run_streamable_http_async(self) -> None: starlette_app = self.streamable_http_app() # Add health check endpoint to main app + # Third-Party from starlette.requests import Request from starlette.responses import JSONResponse from starlette.routing import Route async def health_check(request: Request): - """Health check endpoint for container orchestration.""" + """Health check endpoint for container orchestration. + + Args: + request: the http request from which the health check occurs. + """ return JSONResponse({"status": "healthy"}) # Add the health route to the Starlette app @@ -163,10 +178,7 @@ async def health_check(request: Request): health_port = self.settings.port + 1000 # Use port+1000 for health checks logger.info(f"SSL enabled - starting separate HTTP health check on port {health_port}") # Run both servers concurrently - await asyncio.gather( - server.serve(), - self._start_health_check_server(health_port) - ) + await asyncio.gather(server.serve(), self._start_health_check_server(health_port)) else: # Just run the main server (health check is already on it) await server.serve() diff --git a/pyproject.toml b/pyproject.toml index ad78beb54..b45e23bad 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -147,14 +147,6 @@ asyncpg = [ "asyncpg>=0.30.0", ] -# Chuck/Chuk MCP Runtime (optional) - External plugin server runtime -# Provides MCP tool decorators, plugin hooks, and multi-transport server support -# Used by: mcpgateway/plugins/framework/external/mcp/server/runtime.py -# Required only if you plan to create external MCP plugin servers -chuck = [ - "chuk-mcp-runtime>=0.6.5", -] - # Optional dependency groups (development) dev = [ "aiohttp>=3.12.15", @@ -164,7 +156,6 @@ dev = [ "black>=25.1.0", "bump2version>=1.0.1", "check-manifest>=0.50", - "chuk-mcp-runtime>=0.6.5", "code2flow>=2.5.1", "cookiecutter>=2.6.0", "coverage>=7.10.6", From fa6905540bee10abfd999ed27ca60f22f4d546fe Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Wed, 8 Oct 2025 21:03:50 -0600 Subject: [PATCH 10/35] fix: runtime tests. Signed-off-by: Teryl Taylor --- .../framework/external/mcp/server/runtime.py | 221 +++++++++--------- .../plugins/framework/test_models_tls.py | 6 +- 2 files changed, 120 insertions(+), 107 deletions(-) diff --git a/mcpgateway/plugins/framework/external/mcp/server/runtime.py b/mcpgateway/plugins/framework/external/mcp/server/runtime.py index 7054ea00a..f5b070edd 100644 --- a/mcpgateway/plugins/framework/external/mcp/server/runtime.py +++ b/mcpgateway/plugins/framework/external/mcp/server/runtime.py @@ -55,6 +55,113 @@ SERVER: ExternalPluginServer = None +# Module-level tool functions (extracted for testability) + + +async def get_plugin_configs() -> list[dict]: + """Get the plugin configurations installed on the server.""" + return await SERVER.get_plugin_configs() + + +async def get_plugin_config(name: str) -> dict: + """Get the plugin configuration for a specific plugin. + + Args: + name: The name of the plugin + """ + return await SERVER.get_plugin_config(name) + + +async def prompt_pre_fetch(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: + """Execute prompt prefetch hook for a plugin. + + Args: + plugin_name: The name of the plugin to execute + payload: The prompt name and arguments to be analyzed + context: Contextual information required for execution + """ + + def prompt_pre_fetch_func(plugin: Plugin, payload: PromptPrehookPayload, context: PluginContext) -> PromptPrehookResult: + return plugin.prompt_pre_fetch(payload, context) + + return await SERVER.invoke_hook(PromptPrehookPayload, prompt_pre_fetch_func, plugin_name, payload, context) + + +async def prompt_post_fetch(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: + """Execute prompt postfetch hook for a plugin. + + Args: + plugin_name: The name of the plugin to execute + payload: The prompt payload to be analyzed + context: Contextual information + """ + + def prompt_post_fetch_func(plugin: Plugin, payload: PromptPosthookPayload, context: PluginContext) -> PromptPosthookResult: + return plugin.prompt_post_fetch(payload, context) + + return await SERVER.invoke_hook(PromptPosthookPayload, prompt_post_fetch_func, plugin_name, payload, context) + + +async def tool_pre_invoke(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: + """Execute tool pre-invoke hook for a plugin. + + Args: + plugin_name: The name of the plugin to execute + payload: The tool name and arguments to be analyzed + context: Contextual information + """ + + def tool_pre_invoke_func(plugin: Plugin, payload: ToolPreInvokePayload, context: PluginContext) -> ToolPreInvokeResult: + return plugin.tool_pre_invoke(payload, context) + + return await SERVER.invoke_hook(ToolPreInvokePayload, tool_pre_invoke_func, plugin_name, payload, context) + + +async def tool_post_invoke(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: + """Execute tool post-invoke hook for a plugin. + + Args: + plugin_name: The name of the plugin to execute + payload: The tool result to be analyzed + context: Contextual information + """ + + def tool_post_invoke_func(plugin: Plugin, payload: ToolPostInvokePayload, context: PluginContext) -> ToolPostInvokeResult: + return plugin.tool_post_invoke(payload, context) + + return await SERVER.invoke_hook(ToolPostInvokePayload, tool_post_invoke_func, plugin_name, payload, context) + + +async def resource_pre_fetch(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: + """Execute resource prefetch hook for a plugin. + + Args: + plugin_name: The name of the plugin to execute + payload: The resource name and arguments to be analyzed + context: Contextual information + """ + + def resource_pre_fetch_func(plugin: Plugin, payload: ResourcePreFetchPayload, context: PluginContext) -> ResourcePreFetchResult: + return plugin.resource_pre_fetch(payload, context) + + return await SERVER.invoke_hook(ResourcePreFetchPayload, resource_pre_fetch_func, plugin_name, payload, context) + + +async def resource_post_fetch(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: + """Execute resource postfetch hook for a plugin. + + Args: + plugin_name: The name of the plugin to execute + payload: The resource payload to be analyzed + context: Contextual information + """ + + def resource_post_fetch_func(plugin: Plugin, payload: ResourcePostFetchPayload, context: PluginContext) -> ResourcePostFetchResult: + return plugin.resource_post_fetch(payload, context) + + return await SERVER.invoke_hook(ResourcePostFetchPayload, resource_post_fetch_func, plugin_name, payload, context) + + class SSLCapableFastMCP(FastMCP): """FastMCP server with SSL/TLS support using MCPServerConfig.""" @@ -213,110 +320,16 @@ async def run(): instructions=MCP_SERVER_INSTRUCTIONS, ) - # Register plugin hook tools - @mcp.tool(name=GET_PLUGIN_CONFIGS) - async def get_plugin_configs() -> list[dict]: - """Get the plugin configurations installed on the server.""" - return await SERVER.get_plugin_configs() - - @mcp.tool(name=GET_PLUGIN_CONFIG) - async def get_plugin_config(name: str) -> dict: - """Get the plugin configuration for a specific plugin. - - Args: - name: The name of the plugin - """ - return await SERVER.get_plugin_config(name) - - @mcp.tool(name=HookType.PROMPT_PRE_FETCH.value) - async def prompt_pre_fetch(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: - """Execute prompt prefetch hook for a plugin. - - Args: - plugin_name: The name of the plugin to execute - payload: The prompt name and arguments to be analyzed - context: Contextual information required for execution - """ - - def prompt_pre_fetch_func(plugin: Plugin, payload: PromptPrehookPayload, context: PluginContext) -> PromptPrehookResult: - return plugin.prompt_pre_fetch(payload, context) - - return await SERVER.invoke_hook(PromptPrehookPayload, prompt_pre_fetch_func, plugin_name, payload, context) - - @mcp.tool(name=HookType.PROMPT_POST_FETCH.value) - async def prompt_post_fetch(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: - """Execute prompt postfetch hook for a plugin. - - Args: - plugin_name: The name of the plugin to execute - payload: The prompt payload to be analyzed - context: Contextual information - """ - - def prompt_post_fetch_func(plugin: Plugin, payload: PromptPosthookPayload, context: PluginContext) -> PromptPosthookResult: - return plugin.prompt_post_fetch(payload, context) - - return await SERVER.invoke_hook(PromptPosthookPayload, prompt_post_fetch_func, plugin_name, payload, context) - - @mcp.tool(name=HookType.TOOL_PRE_INVOKE.value) - async def tool_pre_invoke(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: - """Execute tool pre-invoke hook for a plugin. - - Args: - plugin_name: The name of the plugin to execute - payload: The tool name and arguments to be analyzed - context: Contextual information - """ - - def tool_pre_invoke_func(plugin: Plugin, payload: ToolPreInvokePayload, context: PluginContext) -> ToolPreInvokeResult: - return plugin.tool_pre_invoke(payload, context) - - return await SERVER.invoke_hook(ToolPreInvokePayload, tool_pre_invoke_func, plugin_name, payload, context) - - @mcp.tool(name=HookType.TOOL_POST_INVOKE.value) - async def tool_post_invoke(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: - """Execute tool post-invoke hook for a plugin. - - Args: - plugin_name: The name of the plugin to execute - payload: The tool result to be analyzed - context: Contextual information - """ - - def tool_post_invoke_func(plugin: Plugin, payload: ToolPostInvokePayload, context: PluginContext) -> ToolPostInvokeResult: - return plugin.tool_post_invoke(payload, context) - - return await SERVER.invoke_hook(ToolPostInvokePayload, tool_post_invoke_func, plugin_name, payload, context) - - @mcp.tool(name=HookType.RESOURCE_PRE_FETCH.value) - async def resource_pre_fetch(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: - """Execute resource prefetch hook for a plugin. - - Args: - plugin_name: The name of the plugin to execute - payload: The resource name and arguments to be analyzed - context: Contextual information - """ - - def resource_pre_fetch_func(plugin: Plugin, payload: ResourcePreFetchPayload, context: PluginContext) -> ResourcePreFetchResult: - return plugin.resource_pre_fetch(payload, context) - - return await SERVER.invoke_hook(ResourcePreFetchPayload, resource_pre_fetch_func, plugin_name, payload, context) - - @mcp.tool(name=HookType.RESOURCE_POST_FETCH.value) - async def resource_post_fetch(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: - """Execute resource postfetch hook for a plugin. - - Args: - plugin_name: The name of the plugin to execute - payload: The resource payload to be analyzed - context: Contextual information - """ - - def resource_post_fetch_func(plugin: Plugin, payload: ResourcePostFetchPayload, context: PluginContext) -> ResourcePostFetchResult: - return plugin.resource_post_fetch(payload, context) - - return await SERVER.invoke_hook(ResourcePostFetchPayload, resource_post_fetch_func, plugin_name, payload, context) + # Register module-level tool functions with FastMCP + # These are defined at module level for testability + mcp.tool(name=GET_PLUGIN_CONFIGS)(get_plugin_configs) + mcp.tool(name=GET_PLUGIN_CONFIG)(get_plugin_config) + mcp.tool(name=HookType.PROMPT_PRE_FETCH.value)(prompt_pre_fetch) + mcp.tool(name=HookType.PROMPT_POST_FETCH.value)(prompt_post_fetch) + mcp.tool(name=HookType.TOOL_PRE_INVOKE.value)(tool_pre_invoke) + mcp.tool(name=HookType.TOOL_POST_INVOKE.value)(tool_post_invoke) + mcp.tool(name=HookType.RESOURCE_PRE_FETCH.value)(resource_pre_fetch) + mcp.tool(name=HookType.RESOURCE_POST_FETCH.value)(resource_post_fetch) # Run with streamable-http transport logger.info("Starting MCP plugin server with FastMCP") diff --git a/tests/unit/mcpgateway/plugins/framework/test_models_tls.py b/tests/unit/mcpgateway/plugins/framework/test_models_tls.py index d61f8693a..492ec7049 100644 --- a/tests/unit/mcpgateway/plugins/framework/test_models_tls.py +++ b/tests/unit/mcpgateway/plugins/framework/test_models_tls.py @@ -7,7 +7,7 @@ import pytest # First-Party -from mcpgateway.plugins.framework.models import MCPTransportTLSConfig, PluginConfig +from mcpgateway.plugins.framework.models import MCPClientTLSConfig, PluginConfig def _write_pem(path: Path) -> str: @@ -94,7 +94,7 @@ def test_tls_config_from_env_defaults(monkeypatch, tmp_path): monkeypatch.setenv("PLUGINS_MTLS_VERIFY", "true") monkeypatch.setenv("PLUGINS_MTLS_CHECK_HOSTNAME", "true") - tls_config = MCPTransportTLSConfig.from_env() + tls_config = MCPClientTLSConfig.from_env() assert tls_config is not None assert tls_config.ca_bundle == str(ca_path) @@ -111,4 +111,4 @@ def test_tls_config_from_env_returns_none(monkeypatch): monkeypatch.delenv("PLUGINS_MTLS_VERIFY", raising=False) monkeypatch.delenv("PLUGINS_MTLS_CHECK_HOSTNAME", raising=False) - assert MCPTransportTLSConfig.from_env() is None + assert MCPClientTLSConfig.from_env() is None From 1f4e01e95e0818713c4371a92a19fd2e0d864375 Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Wed, 8 Oct 2025 21:22:36 -0600 Subject: [PATCH 11/35] feat: initial revision of configurable plugin builds. Signed-off-by: Teryl Taylor --- mcpgateway/tools/builder/__init__.py | 8 + mcpgateway/tools/builder/cli.py | 244 +++++++ mcpgateway/tools/builder/common.py | 683 ++++++++++++++++++ mcpgateway/tools/builder/dagger_deploy.py | 470 ++++++++++++ mcpgateway/tools/builder/factory.py | 113 +++ mcpgateway/tools/builder/pipeline.py | 294 ++++++++ mcpgateway/tools/builder/python_deploy.py | 440 +++++++++++ mcpgateway/tools/builder/schema.py | 156 ++++ .../templates/compose/docker-compose.yaml.j2 | 197 +++++ .../templates/kubernetes/cert-secrets.yaml.j2 | 38 + .../templates/kubernetes/deployment.yaml.j2 | 233 ++++++ .../templates/kubernetes/postgres.yaml.j2 | 120 +++ .../templates/kubernetes/redis.yaml.j2 | 76 ++ .../builder/templates/plugins-config.yaml.j2 | 49 ++ mcpgateway/tools/cli.py | 56 ++ pyproject.toml | 3 +- 16 files changed, 3179 insertions(+), 1 deletion(-) create mode 100644 mcpgateway/tools/builder/__init__.py create mode 100644 mcpgateway/tools/builder/cli.py create mode 100644 mcpgateway/tools/builder/common.py create mode 100644 mcpgateway/tools/builder/dagger_deploy.py create mode 100644 mcpgateway/tools/builder/factory.py create mode 100644 mcpgateway/tools/builder/pipeline.py create mode 100644 mcpgateway/tools/builder/python_deploy.py create mode 100644 mcpgateway/tools/builder/schema.py create mode 100644 mcpgateway/tools/builder/templates/compose/docker-compose.yaml.j2 create mode 100644 mcpgateway/tools/builder/templates/kubernetes/cert-secrets.yaml.j2 create mode 100644 mcpgateway/tools/builder/templates/kubernetes/deployment.yaml.j2 create mode 100644 mcpgateway/tools/builder/templates/kubernetes/postgres.yaml.j2 create mode 100644 mcpgateway/tools/builder/templates/kubernetes/redis.yaml.j2 create mode 100644 mcpgateway/tools/builder/templates/plugins-config.yaml.j2 create mode 100644 mcpgateway/tools/cli.py diff --git a/mcpgateway/tools/builder/__init__.py b/mcpgateway/tools/builder/__init__.py new file mode 100644 index 000000000..36a56d0e3 --- /dev/null +++ b/mcpgateway/tools/builder/__init__.py @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- +"""Location: ./mcpgateway/tools/builder/__init__.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Builder Package. +""" \ No newline at end of file diff --git a/mcpgateway/tools/builder/cli.py b/mcpgateway/tools/builder/cli.py new file mode 100644 index 000000000..10a126007 --- /dev/null +++ b/mcpgateway/tools/builder/cli.py @@ -0,0 +1,244 @@ +""" +Location: ./mcpgateway/tools/builder/cli.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +MCP Stack Deployment Tool - Hybrid Dagger/Python Implementation + +This script can run in three modes: +1. Local with Dagger (optimal performance) +2. Local without Dagger (plain Python fallback) +3. Inside builder container (all tools included) + +Usage: + # Local execution (auto-detects Dagger) + cforge deploy deploy.yaml + + # Force plain Python mode + cforge --no-dagger deploy deploy.yaml + + # Inside container + docker run -v $PWD:/workspace mcpgateway/mcp-builder:latest deploy deploy.yaml + +Features: + - Validates deploy.yaml configuration + - Builds plugin containers from git repos + - Generates mTLS certificates + - Deploys to Kubernetes or Docker Compose + - Integrates with CI/CD vault secrets +""" + +# Standard +import asyncio +import os +from pathlib import Path +import sys +from typing import Optional + +# Third-Party +from rich.console import Console +from rich.panel import Panel +import typer +from typing_extensions import Annotated + +# First-Party +from mcpgateway.tools.builder.factory import DeployFactory + +app = typer.Typer( + help="Command line tools for deploying the gateway and plugins via a config file.", +) + +console = Console() + +deployer = None + +IN_CONTAINER = os.path.exists("/.dockerenv") or os.environ.get("CONTAINER") == "true" +BUILDER_DIR = Path(__file__).parent / "builder" +IMPL_MODE = "plain" + + +@app.callback() +def cli( + ctx: typer.Context, + no_dagger: Annotated[bool, typer.Option("--no-dagger", help="Force plain Python mode (skip Dagger)")] = False, + verbose: Annotated[bool, typer.Option("--verbose", "-v", help="Verbose output")] = False, +): + """MCP Stack deployment tool + + Deploys MCP Gateway + external plugins from a single YAML configuration. + """ + ctx.ensure_object(dict) + ctx.obj["verbose"] = verbose + ctx.obj["no_dagger"] = no_dagger + + if ctx.invoked_subcommand != "version": + # Show execution mode + mode = "python" if no_dagger else "dagger" + ctx.obj["deployer"], ctx.obj["mode"] = DeployFactory.create_deployer(mode, verbose) + mode_color = "green" if ctx.obj["mode"] == "dagger" else "yellow" + env_text = "container" if IN_CONTAINER else "local" + + if verbose: + console.print(Panel(f"[bold]Mode:[/bold] [{mode_color}]{ctx.obj['mode']}[/{mode_color}]\n" f"[bold]Environment:[/bold] {env_text}\n", title="MCP Deploy", border_style=mode_color)) + + +@app.command() +def validate(ctx: typer.Context, config_file: Annotated[Path, typer.Argument(help="The deployment configuration file.")]): + """Validate mcp-stack.yaml configuration""" + impl = ctx.obj["deployer"] + + try: + impl.validate(config_file) + console.print("[green]βœ“ Configuration valid[/green]") + except Exception as e: + console.print(f"[red]βœ— Validation failed: {e}[/red]") + sys.exit(1) + + +@app.command() +def build( + ctx: typer.Context, + config_file: Annotated[Path, typer.Argument(help="The deployment configuration file")], + plugins_only: Annotated[bool, typer.Option("--plugins-only", help="Only build plugin containers")] = False, + plugin: Annotated[Optional[list[str]], typer.Option("--plugin", "-p", help="Build specific plugin(s)")] = None, + no_cache: Annotated[bool, typer.Option("--no-cache", help="Disable build cache")] = False, + copy_env_templates: Annotated[bool, typer.Option("--copy-env-templates", help="Copy .env.template files from plugin repos")] = True, +): + """Build plugin containers""" + impl = ctx.obj["deployer"] + + try: + if IMPL_MODE == "dagger": + # Use asyncio for Dagger implementation + asyncio.run(impl.build(config_file, plugins_only=plugins_only, specific_plugins=list(plugin) if plugin else None, no_cache=no_cache, copy_env_templates=copy_env_templates)) + else: + # Plain Python implementation is synchronous + impl.build(config_file, plugins_only=plugins_only, specific_plugins=list(plugin) if plugin else None, no_cache=no_cache, copy_env_templates=copy_env_templates) + console.print("[green]βœ“ Build complete[/green]") + + if copy_env_templates: + console.print("[yellow]⚠ IMPORTANT: Review .env files in deploy/env/ before deploying![/yellow]") + console.print("[yellow] Update any required configuration values.[/yellow]") + except Exception as e: + console.print(f"[red]βœ— Build failed: {e}[/red]") + sys.exit(1) + + +@app.command() +def certs(ctx: typer.Context, config_file: Annotated[Path, typer.Argument(help="The deployment configuration file")]): + """Generate mTLS certificates""" + impl = ctx.obj["deployer"] + + try: + asyncio.run(impl.generate_certificates(config_file)) + console.print("[green]βœ“ Certificates generated[/green]") + except Exception as e: + console.print(f"[red]βœ— Certificate generation failed: {e}[/red]") + sys.exit(1) + + +@app.command() +def deploy( + ctx: typer.Context, + config_file: Annotated[Path, typer.Argument(help="The deployment configuration file")], + output_dir: Annotated[Path, typer.Option("--output-dir", "-o", help="The deployment configuration file")], + dry_run: Annotated[bool, typer.Option("--dry-run", help="Generate manifests without deploying")] = False, + skip_build: Annotated[bool, typer.Option("--skip-build", help="Skip building containers")] = False, + skip_certs: Annotated[bool, typer.Option("--skip-certs", help="Skip certificate generation")] = False, +): + """Deploy MCP stack""" + impl = ctx.obj["deployer"] + + try: + asyncio.run(impl.deploy(config_file, dry_run=dry_run, skip_build=skip_build, skip_certs=skip_certs, output_dir=output_dir)) + if dry_run: + console.print("[yellow]βœ“ Dry-run complete (no changes made)[/yellow]") + else: + console.print("[green]βœ“ Deployment complete[/green]") + except Exception as e: + console.print(f"[red]βœ— Deployment failed: {e}[/red]") + sys.exit(1) + + +@app.command() +def verify( + ctx: typer.Context, + config_file: Annotated[Path, typer.Argument(help="The deployment configuration file")], + wait: Annotated[bool, typer.Option("--wait", help="Wait for deployment to be ready")] = True, + timeout: Annotated[int, typer.Option("--timeout", help="Wait timeout in seconds")] = 300, +): + """Verify deployment health""" + impl = ctx.obj["deployer"] + + try: + asyncio.run(impl.verify(config_file, wait=wait, timeout=timeout)) + console.print("[green]βœ“ Deployment healthy[/green]") + except Exception as e: + console.print(f"[red]βœ— Verification failed: {e}[/red]") + sys.exit(1) + + +@app.command() +def destroy( + ctx: typer.Context, + config_file: Annotated[Path, typer.Argument(help="The deployment configuration file")], + force: Annotated[bool, typer.Option("--force", help="Force destruction without confirmation")] = False, +): + """Destroy deployed MCP stack""" + impl = ctx.obj["deployer"] + + if not force: + if not typer.confirm("Are you sure you want to destroy the deployment?"): + console.print("[yellow]Aborted[/yellow]") + return + + try: + asyncio.run(impl.destroy(config_file)) + console.print("[green]βœ“ Deployment destroyed[/green]") + except Exception as e: + console.print(f"[red]βœ— Destruction failed: {e}[/red]") + sys.exit(1) + + +@app.command() +def version(): + """Show version information""" + console.print( + Panel(f"[bold]MCP Deploy[/bold]\n" f"Version: 1.0.0\n" f"Mode: {IMPL_MODE}\n" f"Environment: {'container' if IN_CONTAINER else 'local'}\n", title="Version Info", border_style="blue") + ) + + +@app.command() +def generate( + ctx: typer.Context, + config_file: Annotated[Path, typer.Argument(help="The deployment configuration file")], + output: Annotated[Optional[Path], typer.Option("--output", "-o", help="Output directory for manifests")] = None, +): + """Generate deployment manifests (k8s or compose)""" + impl = ctx.obj["deployer"] + + try: + manifests_dir = impl.generate_manifests(config_file, output_dir=output) + console.print(f"[green]βœ“ Manifests generated: {manifests_dir}[/green]") + except Exception as e: + console.print(f"[red]βœ— Manifest generation failed: {e}[/red]") + sys.exit(1) + + +def main(): + """Main entry point""" + try: + app(obj={}) + except KeyboardInterrupt: + console.print("\n[yellow]Interrupted by user[/yellow]") + sys.exit(130) + except Exception as e: + console.print(f"[red]Fatal error: {e}[/red]") + if os.environ.get("MCP_DEBUG"): + raise + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/mcpgateway/tools/builder/common.py b/mcpgateway/tools/builder/common.py new file mode 100644 index 000000000..880d407dd --- /dev/null +++ b/mcpgateway/tools/builder/common.py @@ -0,0 +1,683 @@ +"""Location: ./mcpgateway/tools/builder/common.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Common utilities shared between Dagger and plain Python implementations. + +This module contains shared functionality to avoid code duplication between +the Dagger-based (dagger_module.py) and plain Python (plain_deploy.py) +implementations of the MCP Stack deployment system. + +Shared functions: +- load_config: Load and parse YAML configuration file +- generate_plugin_config: Generate plugins-config.yaml for gateway from mcp-stack.yaml +- generate_kubernetes_manifests: Generate Kubernetes deployment manifests +- generate_compose_manifests: Generate Docker Compose manifest +- copy_env_template: Copy .env.template from plugin repo to env.d/ directory +- get_docker_compose_command: Detect available docker compose command +- run_compose: Run docker compose with error handling +- deploy_compose: Deploy using docker compose up -d +- verify_compose: Verify deployment with docker compose ps +- destroy_compose: Destroy deployment with docker compose down -v +- deploy_kubernetes: Deploy to Kubernetes using kubectl +- verify_kubernetes: Verify Kubernetes deployment health +- destroy_kubernetes: Destroy Kubernetes deployment with kubectl delete +""" + +# Standard +import base64 +import os +from pathlib import Path +import shutil +import subprocess +from typing import Any, Dict, List + +# Third-Party +from jinja2 import Environment, FileSystemLoader +from rich.console import Console +import yaml + +console = Console() + + +def get_deploy_dir() -> Path: + """Get deployment directory from environment variable or default. + + Checks MCP_DEPLOY_DIR environment variable, defaults to './deploy'. + + Returns: + Path to deployment directory + """ + deploy_dir = os.environ.get("MCP_DEPLOY_DIR", "./deploy") + return Path(deploy_dir) + + +def load_config(config_file: str) -> Dict[str, Any]: + """Load and parse YAML configuration file. + + Args: + config_file: Path to mcp-stack.yaml configuration file + + Returns: + Parsed configuration dictionary + + Raises: + FileNotFoundError: If configuration file doesn't exist + """ + config_path = Path(config_file) + if not config_path.exists(): + raise FileNotFoundError(f"Configuration file not found: {config_file}") + + with open(config_path, encoding="utf-8") as f: + return yaml.safe_load(f) + + +def generate_plugin_config(config: Dict[str, Any], output_dir: Path, verbose: bool = False) -> Path: + """Generate plugin config.yaml for gateway from mcp-stack.yaml. + + This function is shared between Dagger and plain Python implementations + to avoid code duplication. + + Args: + config: Parsed mcp-stack.yaml configuration + output_dir: Output directory for generated config + verbose: Print verbose output + + Returns: + Path to generated plugins-config.yaml file + """ + + deployment_type = config["deployment"]["type"] + plugins = config.get("plugins", []) + + # Load template + template_dir = Path(__file__).parent / "templates" + if not template_dir.exists(): + raise FileNotFoundError(f"Template directory not found: {template_dir}") + + env = Environment(loader=FileSystemLoader(str(template_dir))) + template = env.get_template("plugins-config.yaml.j2") + + # Prepare plugin data with computed URLs + plugin_data = [] + for plugin in plugins: + plugin_name = plugin["name"] + port = plugin.get("port", 8000) + + # Determine URL based on deployment type + if deployment_type == "compose": + # Use container hostname (lowercase) + hostname = plugin_name.lower() + # Use HTTPS if mTLS is enabled + protocol = "https" if plugin.get("mtls_enabled", True) else "http" + url = f"{protocol}://{hostname}:{port}/mcp" + else: # kubernetes + # Use Kubernetes service DNS + namespace = config["deployment"].get("namespace", "mcp-gateway") + service_name = f"mcp-plugin-{plugin_name.lower()}" + protocol = "https" if plugin.get("mtls_enabled", True) else "http" + url = f"{protocol}://{service_name}.{namespace}.svc:{port}/mcp" + + # Build plugin entry with computed URL + plugin_entry = { + "name": plugin_name, + "port": port, + "url": url, + } + + # Merge plugin_overrides (client-side config only, excludes 'config') + # Allowed client-side fields that plugin manager uses + if "plugin_overrides" in plugin: + overrides = plugin["plugin_overrides"] + allowed_fields = ["priority", "mode", "description", "version", "author", "hooks", "tags", "conditions"] + for field in allowed_fields: + if field in overrides: + plugin_entry[field] = overrides[field] + + plugin_data.append(plugin_entry) + + # Render template + rendered = template.render(plugins=plugin_data) + + # Write config file + config_path = output_dir / "plugins-config.yaml" + config_path.write_text(rendered) + + if verbose: + print(f"βœ“ Plugin config generated: {config_path}") + + return config_path + + +def generate_kubernetes_manifests(config: Dict[str, Any], output_dir: Path, verbose: bool = False) -> None: + """Generate Kubernetes manifests from configuration. + + Args: + config: Parsed mcp-stack.yaml configuration + output_dir: Output directory for manifests + verbose: Print verbose output + """ + + # Load templates + template_dir = Path(__file__).parent / "templates" / "kubernetes" + if not template_dir.exists(): + raise FileNotFoundError(f"Template directory not found: {template_dir}") + + # Auto-detect and assign env files if not specified + _auto_detect_env_files(config, output_dir, verbose=verbose) + + env = Environment(loader=FileSystemLoader(str(template_dir))) + + # Generate namespace + namespace = config["deployment"].get("namespace", "mcp-gateway") + + # Generate mTLS certificate secrets if enabled + gateway_mtls = config.get("gateway", {}).get("mtls_enabled", True) + if gateway_mtls: + + cert_secrets_template = env.get_template("cert-secrets.yaml.j2") + + # Prepare certificate data + cert_data = {"namespace": namespace, "gateway_name": "mcpgateway", "plugins": []} + + # Read and encode CA certificate + ca_cert_path = Path("certs/mcp/ca/ca.crt") + if ca_cert_path.exists(): + cert_data["ca_cert_b64"] = base64.b64encode(ca_cert_path.read_bytes()).decode("utf-8") + else: + if verbose: + print(f"[yellow]Warning: CA certificate not found at {ca_cert_path}[/yellow]") + + # Read and encode gateway certificates + gateway_cert_path = Path("certs/mcp/gateway/client.crt") + gateway_key_path = Path("certs/mcp/gateway/client.key") + if gateway_cert_path.exists() and gateway_key_path.exists(): + cert_data["gateway_cert_b64"] = base64.b64encode(gateway_cert_path.read_bytes()).decode("utf-8") + cert_data["gateway_key_b64"] = base64.b64encode(gateway_key_path.read_bytes()).decode("utf-8") + else: + if verbose: + print("[yellow]Warning: Gateway certificates not found[/yellow]") + + # Read and encode plugin certificates + for plugin in config.get("plugins", []): + if plugin.get("mtls_enabled", True): + plugin_name = plugin["name"] + plugin_cert_path = Path(f"certs/mcp/plugins/{plugin_name}/server.crt") + plugin_key_path = Path(f"certs/mcp/plugins/{plugin_name}/server.key") + + if plugin_cert_path.exists() and plugin_key_path.exists(): + cert_data["plugins"].append( + { + "name": f"mcp-plugin-{plugin_name.lower()}", + "cert_b64": base64.b64encode(plugin_cert_path.read_bytes()).decode("utf-8"), + "key_b64": base64.b64encode(plugin_key_path.read_bytes()).decode("utf-8"), + } + ) + else: + if verbose: + print(f"[yellow]Warning: Plugin {plugin_name} certificates not found[/yellow]") + + # Generate certificate secrets manifest + if "ca_cert_b64" in cert_data: + cert_secrets_manifest = cert_secrets_template.render(**cert_data) + (output_dir / "cert-secrets.yaml").write_text(cert_secrets_manifest) + if verbose: + print(" βœ“ mTLS certificate secrets manifest generated") + + # Generate infrastructure manifests (postgres, redis) if enabled + infrastructure = config.get("infrastructure", {}) + + # PostgreSQL + postgres_config = infrastructure.get("postgres", {}) + if postgres_config.get("enabled", True): + postgres_template = env.get_template("postgres.yaml.j2") + postgres_manifest = postgres_template.render( + namespace=namespace, + image=postgres_config.get("image", "postgres:17"), + database=postgres_config.get("database", "mcp"), + user=postgres_config.get("user", "postgres"), + password=postgres_config.get("password", "mysecretpassword"), + storage_size=postgres_config.get("storage_size", "10Gi"), + storage_class=postgres_config.get("storage_class"), + ) + (output_dir / "postgres-deployment.yaml").write_text(postgres_manifest) + if verbose: + print(" βœ“ PostgreSQL deployment manifest generated") + + # Redis + redis_config = infrastructure.get("redis", {}) + if redis_config.get("enabled", True): + redis_template = env.get_template("redis.yaml.j2") + redis_manifest = redis_template.render(namespace=namespace, image=redis_config.get("image", "redis:latest")) + (output_dir / "redis-deployment.yaml").write_text(redis_manifest) + if verbose: + print(" βœ“ Redis deployment manifest generated") + + # Generate gateway deployment + gateway_template = env.get_template("deployment.yaml.j2") + gateway_config = config["gateway"].copy() + gateway_config["name"] = "mcpgateway" + gateway_config["namespace"] = namespace + + # Add DATABASE_URL and REDIS_URL to gateway environment if infrastructure is enabled + if "env_vars" not in gateway_config: + gateway_config["env_vars"] = {} + + # Add init containers to wait for infrastructure services + init_containers = [] + + if postgres_config.get("enabled", True): + db_user = postgres_config.get("user", "postgres") + db_password = postgres_config.get("password", "mysecretpassword") + db_name = postgres_config.get("database", "mcp") + gateway_config["env_vars"]["DATABASE_URL"] = f"postgresql://{db_user}:{db_password}@postgres:5432/{db_name}" + + # Add init container to wait for PostgreSQL + init_containers.append({"name": "wait-for-postgres", "image": "busybox:1.36", "command": ["sh", "-c", "until nc -z postgres 5432; do echo waiting for postgres; sleep 2; done"]}) + + if redis_config.get("enabled", True): + gateway_config["env_vars"]["REDIS_URL"] = "redis://redis:6379/0" + + # Add init container to wait for Redis + init_containers.append({"name": "wait-for-redis", "image": "busybox:1.36", "command": ["sh", "-c", "until nc -z redis 6379; do echo waiting for redis; sleep 2; done"]}) + + if init_containers: + gateway_config["init_containers"] = init_containers + + gateway_manifest = gateway_template.render(**gateway_config) + (output_dir / "gateway-deployment.yaml").write_text(gateway_manifest) + + # Generate plugin deployments + for plugin in config.get("plugins", []): + plugin_config = plugin.copy() + plugin_config["name"] = f"mcp-plugin-{plugin['name'].lower()}" + plugin_config["namespace"] = namespace + plugin_manifest = gateway_template.render(**plugin_config) + (output_dir / f"plugin-{plugin['name'].lower()}-deployment.yaml").write_text(plugin_manifest) + + if verbose: + print(f"βœ“ Kubernetes manifests generated in {output_dir}") + + +def generate_compose_manifests(config: Dict[str, Any], output_dir: Path, verbose: bool = False) -> None: + """Generate Docker Compose manifest from configuration. + + Args: + config: Parsed mcp-stack.yaml configuration + output_dir: Output directory for manifests + verbose: Print verbose output + """ + + # Load templates + template_dir = Path(__file__).parent / "templates" / "compose" + if not template_dir.exists(): + raise FileNotFoundError(f"Template directory not found: {template_dir}") + + # Auto-detect and assign env files if not specified + _auto_detect_env_files(config, output_dir, verbose=verbose) + + # Auto-assign host_ports if expose_port is true but host_port not specified + plugins = config.get("plugins", []) + next_host_port = 8000 + for plugin in plugins: + # Set default port if not specified + if "port" not in plugin: + plugin["port"] = 8000 + + # Auto-assign host_port if expose_port is true + if plugin.get("expose_port", False) and "host_port" not in plugin: + plugin["host_port"] = next_host_port + next_host_port += 1 + + # Compute relative certificate paths (from output_dir to project root certs/) + # Certificates are at: ./certs/mcp/... + # Output dir is at: ./deploy/manifests/ + # So relative path is: ../../certs/mcp/... + certs_base = Path.cwd() / "certs" + certs_rel_base = os.path.relpath(certs_base, output_dir) + + # Add computed cert paths to context for template + cert_paths = { + "certs_base": certs_rel_base, + "gateway_cert_dir": os.path.join(certs_rel_base, "mcp/gateway"), + "ca_cert_file": os.path.join(certs_rel_base, "mcp/ca/ca.crt"), + "plugins_cert_base": os.path.join(certs_rel_base, "mcp/plugins"), + } + + env = Environment(loader=FileSystemLoader(str(template_dir))) + + # Generate compose file + compose_template = env.get_template("docker-compose.yaml.j2") + compose_manifest = compose_template.render(**config, cert_paths=cert_paths) + (output_dir / "docker-compose.yaml").write_text(compose_manifest) + + if verbose: + print(f"βœ“ Compose manifest generated in {output_dir}") + + +def _auto_detect_env_files(config: Dict[str, Any], output_dir: Path, verbose: bool = False) -> None: + """Auto-detect and assign env files if not explicitly specified. + + If env_file is not specified in the config, check if {deploy_dir}/env/.env.{name} + exists and use it. Warn the user when auto-detection is used. + + Args: + config: Parsed mcp-stack.yaml configuration (modified in-place) + output_dir: Output directory where manifests will be generated (for relative paths) + verbose: Print verbose output + """ + deploy_dir = get_deploy_dir() + env_dir = deploy_dir / "env" + + # Check gateway + gateway = config.get("gateway", {}) + if "env_file" not in gateway or not gateway["env_file"]: + gateway_env = env_dir / ".env.gateway" + if gateway_env.exists(): + # Make path relative to output_dir (where docker-compose.yaml will be) + relative_path = os.path.relpath(gateway_env, output_dir) + gateway["env_file"] = relative_path + print(f"⚠ Auto-detected env file: {gateway_env}") + if verbose: + print(" (Gateway env_file not specified in config)") + + # Check plugins + plugins = config.get("plugins", []) + for plugin in plugins: + plugin_name = plugin["name"] + if "env_file" not in plugin or not plugin["env_file"]: + plugin_env = env_dir / f".env.{plugin_name}" + if plugin_env.exists(): + # Make path relative to output_dir (where docker-compose.yaml will be) + relative_path = os.path.relpath(plugin_env, output_dir) + plugin["env_file"] = relative_path + print(f"⚠ Auto-detected env file: {plugin_env}") + if verbose: + print(f" (Plugin {plugin_name} env_file not specified in config)") + + +def copy_env_template(plugin_name: str, plugin_build_dir: Path, verbose: bool = False) -> None: + """Copy .env.template from plugin repo to {deploy_dir}/env/ directory. + + Uses MCP_DEPLOY_DIR environment variable if set, defaults to './deploy'. + This function is shared between Dagger and plain Python implementations. + + Args: + plugin_name: Name of the plugin + plugin_build_dir: Path to plugin build directory (contains .env.template) + verbose: Print verbose output + """ + # Create {deploy_dir}/env directory if it doesn't exist + deploy_dir = get_deploy_dir() + env_dir = deploy_dir / "env" + env_dir.mkdir(parents=True, exist_ok=True) + + # Look for .env.template in plugin build directory + template_file = plugin_build_dir / ".env.template" + if not template_file.exists(): + if verbose: + print(f"No .env.template found in {plugin_name}") + return + + # Target file path + target_file = env_dir / f".env.{plugin_name}" + + # Only copy if target doesn't exist (don't overwrite user edits) + if target_file.exists(): + if verbose: + print(f"⚠ {target_file} already exists, skipping") + return + + # Copy template + shutil.copy2(template_file, target_file) + if verbose: + print(f"βœ“ Copied .env.template -> {target_file}") + + +# Docker Compose Utilities + + +def get_docker_compose_command() -> List[str]: + """Detect and return available docker compose command. + + Tries to detect docker compose plugin first, then falls back to + standalone docker-compose command. + + Returns: + Command to use: ["docker", "compose"] or ["docker-compose"] + + Raises: + RuntimeError: If neither command is available + """ + # Try docker compose (new plugin) first + if shutil.which("docker"): + try: + subprocess.run(["docker", "compose", "version"], capture_output=True, check=True) + return ["docker", "compose"] + except (subprocess.CalledProcessError, FileNotFoundError): + pass + + # Fall back to standalone docker-compose + if shutil.which("docker-compose"): + return ["docker-compose"] + + raise RuntimeError("Docker Compose not found. Install docker compose plugin or docker-compose.") + + +def run_compose(compose_file: Path, args: List[str], verbose: bool = False, check: bool = True) -> subprocess.CompletedProcess: + """Run docker compose command with given arguments. + + Args: + compose_file: Path to docker-compose.yaml + args: Arguments to pass to compose (e.g., ["up", "-d"]) + verbose: Print verbose output + check: Raise exception on non-zero exit code + + Returns: + CompletedProcess instance + + Raises: + FileNotFoundError: If compose_file doesn't exist + RuntimeError: If docker compose command fails (when check=True) + """ + if not compose_file.exists(): + raise FileNotFoundError(f"Compose file not found: {compose_file}") + + compose_cmd = get_docker_compose_command() + full_cmd = compose_cmd + ["-f", str(compose_file)] + args + + if verbose: + console.print(f"[dim]Running: {' '.join(full_cmd)}[/dim]") + + try: + result = subprocess.run(full_cmd, capture_output=True, text=True, check=check) + return result + except subprocess.CalledProcessError as e: + console.print("\n[red bold]Docker Compose command failed:[/red bold]") + if e.stdout: + console.print(f"[yellow]Output:[/yellow]\n{e.stdout}") + if e.stderr: + console.print(f"[red]Error:[/red]\n{e.stderr}") + raise RuntimeError(f"Docker Compose failed with exit code {e.returncode}") from e + + +def deploy_compose(compose_file: Path, verbose: bool = False) -> None: + """Deploy using docker compose up -d. + + Args: + compose_file: Path to docker-compose.yaml + verbose: Print verbose output + + Raises: + RuntimeError: If deployment fails + """ + result = run_compose(compose_file, ["up", "-d"], verbose=verbose) + if result.stdout and verbose: + console.print(result.stdout) + console.print("[green]βœ“ Deployed with Docker Compose[/green]") + + +def verify_compose(compose_file: Path, verbose: bool = False) -> str: + """Verify Docker Compose deployment with ps command. + + Args: + compose_file: Path to docker-compose.yaml + verbose: Print verbose output + + Returns: + Output from docker compose ps command + """ + result = run_compose(compose_file, ["ps"], verbose=verbose, check=False) + return result.stdout + + +def destroy_compose(compose_file: Path, verbose: bool = False) -> None: + """Destroy Docker Compose deployment with down -v. + + Args: + compose_file: Path to docker-compose.yaml + verbose: Print verbose output + + Raises: + RuntimeError: If destruction fails + """ + if not compose_file.exists(): + console.print(f"[yellow]Compose file not found: {compose_file}[/yellow]") + console.print("[yellow]Nothing to destroy[/yellow]") + return + + result = run_compose(compose_file, ["down", "-v"], verbose=verbose) + if result.stdout and verbose: + console.print(result.stdout) + console.print("[green]βœ“ Destroyed Docker Compose deployment[/green]") + + +# Kubernetes kubectl utilities + + +def deploy_kubernetes(manifests_dir: Path, verbose: bool = False) -> None: + """Deploy to Kubernetes using kubectl. + + Applies manifests in correct order: + 1. Deployments (creates namespaces) + 2. Certificate secrets + 3. Infrastructure (PostgreSQL, Redis) + + Excludes plugins-config.yaml (not a Kubernetes resource). + + Args: + manifests_dir: Path to directory containing Kubernetes manifests + verbose: Print verbose output + + Raises: + RuntimeError: If kubectl not found or deployment fails + """ + if not shutil.which("kubectl"): + raise RuntimeError("kubectl not found. Cannot deploy to Kubernetes.") + + # Get all manifest files, excluding plugins-config.yaml + all_manifests = sorted(manifests_dir.glob("*.yaml")) + all_manifests = [m for m in all_manifests if m.name != "plugins-config.yaml"] + + # Apply in order to handle dependencies + cert_secrets = manifests_dir / "cert-secrets.yaml" + postgres_deploy = manifests_dir / "postgres-deployment.yaml" + redis_deploy = manifests_dir / "redis-deployment.yaml" + + # 1. Apply all deployments first (creates namespaces) + deployment_files = [m for m in all_manifests if m.name.endswith("-deployment.yaml") and m != cert_secrets and m != postgres_deploy and m != redis_deploy] + + # Apply deployment files + for manifest in deployment_files: + result = subprocess.run(["kubectl", "apply", "-f", str(manifest)], capture_output=True, text=True, check=False) + if result.stdout and verbose: + console.print(result.stdout) + if result.returncode != 0: + raise RuntimeError(f"kubectl apply failed: {result.stderr}") + + # 2. Apply certificate secrets (now namespace exists) + if cert_secrets.exists(): + result = subprocess.run(["kubectl", "apply", "-f", str(cert_secrets)], capture_output=True, text=True, check=False) + if result.stdout and verbose: + console.print(result.stdout) + if result.returncode != 0: + raise RuntimeError(f"kubectl apply failed: {result.stderr}") + + # 3. Apply infrastructure + for infra_file in [postgres_deploy, redis_deploy]: + if infra_file.exists(): + result = subprocess.run(["kubectl", "apply", "-f", str(infra_file)], capture_output=True, text=True, check=False) + if result.stdout and verbose: + console.print(result.stdout) + if result.returncode != 0: + raise RuntimeError(f"kubectl apply failed: {result.stderr}") + + console.print("[green]βœ“ Deployed to Kubernetes[/green]") + + +def verify_kubernetes(namespace: str, wait: bool = False, timeout: int = 300, verbose: bool = False) -> str: + """Verify Kubernetes deployment health. + + Args: + namespace: Kubernetes namespace to check + wait: Wait for pods to be ready + timeout: Wait timeout in seconds + verbose: Print verbose output + + Returns: + String output from kubectl get pods + + Raises: + RuntimeError: If kubectl not found or verification fails + """ + if not shutil.which("kubectl"): + raise RuntimeError("kubectl not found. Cannot verify Kubernetes deployment.") + + # Get pod status + result = subprocess.run(["kubectl", "get", "pods", "-n", namespace], capture_output=True, text=True, check=False) + output = result.stdout if result.stdout else "" + if result.returncode != 0: + raise RuntimeError(f"kubectl get pods failed: {result.stderr}") + + # Wait for pods if requested + if wait: + result = subprocess.run(["kubectl", "wait", "--for=condition=Ready", "pod", "--all", "-n", namespace, f"--timeout={timeout}s"], capture_output=True, text=True, check=False) + if result.stdout and verbose: + console.print(result.stdout) + if result.returncode != 0: + raise RuntimeError(f"kubectl wait failed: {result.stderr}") + + return output + + +def destroy_kubernetes(manifests_dir: Path, verbose: bool = False) -> None: + """Destroy Kubernetes deployment. + + Args: + manifests_dir: Path to directory containing Kubernetes manifests + verbose: Print verbose output + + Raises: + RuntimeError: If kubectl not found or destruction fails + """ + if not shutil.which("kubectl"): + raise RuntimeError("kubectl not found. Cannot destroy Kubernetes deployment.") + + if not manifests_dir.exists(): + console.print(f"[yellow]Manifests directory not found: {manifests_dir}[/yellow]") + console.print("[yellow]Nothing to destroy[/yellow]") + return + + # Delete all manifests except plugins-config.yaml + all_manifests = sorted(manifests_dir.glob("*.yaml")) + all_manifests = [m for m in all_manifests if m.name != "plugins-config.yaml"] + + for manifest in all_manifests: + result = subprocess.run(["kubectl", "delete", "-f", str(manifest), "--ignore-not-found=true"], capture_output=True, text=True, check=False) + if result.stdout and verbose: + console.print(result.stdout) + if result.returncode != 0 and "NotFound" not in result.stderr: + console.print(f"[yellow]Warning: {result.stderr}[/yellow]") + + console.print("[green]βœ“ Destroyed Kubernetes deployment[/green]") diff --git a/mcpgateway/tools/builder/dagger_deploy.py b/mcpgateway/tools/builder/dagger_deploy.py new file mode 100644 index 000000000..41a282644 --- /dev/null +++ b/mcpgateway/tools/builder/dagger_deploy.py @@ -0,0 +1,470 @@ +"""Location: ./mcpgateway/tools/builder/dagger_deploy.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Dagger-based MCP Stack Deployment Module + +This module provides optimized build and deployment using Dagger. + +Features: +- Automatic caching and parallelization +- Content-addressable storage +- Efficient multi-stage builds +- Built-in layer caching +""" + +# Standard +from pathlib import Path +from typing import Any, Dict, List, Optional + +# Third-Party +import dagger +from dagger import dag +from rich.console import Console +from rich.progress import Progress, SpinnerColumn, TextColumn + +# First-Party +from mcpgateway.tools.builder.common import ( + deploy_compose, + deploy_kubernetes, + destroy_compose, + destroy_kubernetes, + generate_compose_manifests, + generate_kubernetes_manifests, + generate_plugin_config, + get_deploy_dir, + load_config, + verify_compose, + verify_kubernetes, +) +from mcpgateway.tools.builder.pipeline import CICDModule + +console = Console() + + +class MCPStackDagger(CICDModule): + """Dagger-based implementation of MCP Stack deployment.""" + + def __init__(self, verbose: bool = False): + super().__init__(verbose) + + async def build(self, config_file: str, plugins_only: bool = False, specific_plugins: Optional[List[str]] = None, no_cache: bool = False, copy_env_templates: bool = False) -> None: + """Build gateway and plugin containers using Dagger. + + Args: + config_file: Path to mcp-stack.yaml + plugins_only: Only build plugins, skip gateway + specific_plugins: List of specific plugin names to build + no_cache: Disable Dagger cache + copy_env_templates: Copy .env.template files from cloned repos + """ + config = load_config(config_file) + + async with dagger.connection(dagger.Config(workdir=str(Path.cwd()))): + # Build gateway (unless plugins_only=True) + if not plugins_only: + gateway = config.get("gateway", {}) + if gateway.get("repo"): + with Progress(SpinnerColumn(), TextColumn("[progress.description]{task.description}"), console=self.console) as progress: + task = progress.add_task("Building gateway...", total=None) + try: + await self._build_component_with_dagger(gateway, "gateway", no_cache=no_cache) + progress.update(task, completed=1, description="[green]βœ“ Built gateway[/green]") + except Exception as e: + progress.update(task, completed=1, description="[red]βœ— Failed gateway[/red]") + # Print full error after progress bar closes + self.console.print("\n[red bold]Gateway build failed:[/red bold]") + self.console.print(f"[red]{type(e).__name__}: {str(e)}[/red]") + if self.verbose: + # Standard + import traceback + + self.console.print(f"[dim]{traceback.format_exc()}[/dim]") + raise + elif self.verbose: + self.console.print("[dim]Skipping gateway build (using pre-built image)[/dim]") + + # Build plugins + plugins = config.get("plugins", []) + + if specific_plugins: + plugins = [p for p in plugins if p["name"] in specific_plugins] + + if not plugins: + self.console.print("[yellow]No plugins to build[/yellow]") + return + + with Progress(SpinnerColumn(), TextColumn("[progress.description]{task.description}"), console=self.console) as progress: + + for plugin in plugins: + plugin_name = plugin["name"] + + # Skip if pre-built image specified + if "image" in plugin and "repo" not in plugin: + task = progress.add_task(f"Skipping {plugin_name} (using pre-built image)", total=1) + progress.update(task, completed=1) + continue + + task = progress.add_task(f"Building {plugin_name}...", total=None) + + try: + await self._build_component_with_dagger(plugin, plugin_name, no_cache=no_cache, copy_env_templates=copy_env_templates) + progress.update(task, completed=1, description=f"[green]βœ“ Built {plugin_name}[/green]") + except Exception as e: + progress.update(task, completed=1, description=f"[red]βœ— Failed {plugin_name}[/red]") + # Print full error after progress bar closes + self.console.print(f"\n[red bold]Plugin '{plugin_name}' build failed:[/red bold]") + self.console.print(f"[red]{type(e).__name__}: {str(e)}[/red]") + if self.verbose: + # Standard + import traceback + + self.console.print(f"[dim]{traceback.format_exc()}[/dim]") + raise + + async def generate_certificates(self, config_file: str) -> None: + """Generate mTLS certificates for plugins. + + Args: + config_file: Path to mcp-stack.yaml + """ + config = load_config(config_file) + + if self.verbose: + self.console.print("[blue]Generating mTLS certificates...[/blue]") + + # Use Dagger container to run certificate generation + async with dagger.connection(dagger.Config(workdir=str(Path.cwd()))): + # Mount current directory + source = dag.host().directory(".") + try: + # Use Alpine with openssl + container = ( + dag.container() + .from_("alpine:latest") + .with_exec(["apk", "add", "--no-cache", "openssl", "python3", "py3-pip", "make", "bash"]) + .with_mounted_directory("/workspace", source) + .with_workdir("/workspace") + # .with_exec(["python3", "-m", "venv", ".venv"]) + # .with_exec(["sh", "-c", "source .venv/bin/activate && pip install pyyaml"]) + # .with_exec(["pip", "install", "pyyaml"]) + ) + + # Generate CA + container = container.with_exec(["sh", "-c", "make certs-mcp-ca MCP_CERT_DAYS=825"]) + + # Generate gateway cert + container = container.with_exec(["sh", "-c", "make certs-mcp-gateway MCP_CERT_DAYS=825"]) + + # Generate plugin certificates + plugins = config.get("plugins", []) + for plugin in plugins: + plugin_name = plugin["name"] + container = container.with_exec(["sh", "-c", f"make certs-mcp-plugin PLUGIN_NAME={plugin_name} MCP_CERT_DAYS=825"]) + + # Export certificates back to host + output = container.directory("/workspace/certs") + await output.export("./certs") + except dagger.ExecError as e: + self.console.print(f"Dagger Exec Error: {e.message}") + self.console.print(f"Exit Code: {e.exit_code}") + self.console.print(f"Stderr: {e.stderr}") + raise + except dagger.QueryError as e: + self.console.print(f"Dagger Query Error: {e.errors}") + self.console.print(f"Debug Query: {e.debug_query()}") + raise + except Exception as e: + self.console.print(f"An unexpected error occurred: {e}") + raise + + if self.verbose: + self.console.print("[green]βœ“ Certificates generated[/green]") + + async def deploy(self, config_file: str, dry_run: bool = False, skip_build: bool = False, skip_certs: bool = False, output_dir: Optional[str] = None) -> None: + """Deploy MCP stack. + + Args: + config_file: Path to mcp-stack.yaml + dry_run: Generate manifests without deploying + skip_build: Skip building containers + skip_certs: Skip certificate generation + output_dir: Output directory for manifests (default: ./deploy) + """ + config = load_config(config_file) + + # Build containers + if not skip_build: + await self.build(config_file) + + # Generate certificates (only if mTLS is enabled) + gateway_mtls = config.get("gateway", {}).get("mtls_enabled", True) + plugin_mtls = any(p.get("mtls_enabled", True) for p in config.get("plugins", [])) + mtls_needed = gateway_mtls or plugin_mtls + + if not skip_certs and mtls_needed: + await self.generate_certificates(config_file) + elif not skip_certs and not mtls_needed: + if self.verbose: + self.console.print("[dim]Skipping certificate generation (mTLS disabled)[/dim]") + + # Generate manifests + manifests_dir = self.generate_manifests(config_file, output_dir=output_dir) + + if dry_run: + self.console.print(f"[yellow]Dry-run: Manifests generated in {manifests_dir}[/yellow]") + return + + # Apply deployment + deployment_type = config["deployment"]["type"] + + async with dagger.connection(dagger.Config(workdir=str(Path.cwd()))): + try: + if deployment_type == "kubernetes": + await self._deploy_kubernetes(manifests_dir) + elif deployment_type == "compose": + await self._deploy_compose(manifests_dir) + else: + raise ValueError(f"Unsupported deployment type: {deployment_type}") + except dagger.ExecError as e: + self.console.print(f"Dagger Exec Error: {e.message}") + self.console.print(f"Exit Code: {e.exit_code}") + self.console.print(f"Stderr: {e.stderr}") + raise + except dagger.QueryError as e: + self.console.print(f"Dagger Query Error: {e.errors}") + self.console.print(f"Debug Query: {e.debug_query()}") + raise + except Exception as e: + # Extract detailed error from Dagger exception + error_msg = str(e) + self.console.print("\n[red bold]Deployment failed:[/red bold]") + self.console.print(f"[red]{error_msg}[/red]") + + # Check if it's a compose-specific error and try to provide more context + if "compose" in error_msg.lower() and self.verbose: + self.console.print("\n[yellow]Hint:[/yellow] Check the generated docker-compose.yaml:") + self.console.print(f"[dim] {manifests_dir}/docker-compose.yaml[/dim]") + self.console.print("[yellow]Try running manually:[/yellow]") + self.console.print(f"[dim] cd {manifests_dir} && docker compose up[/dim]") + + raise + + async def verify(self, config_file: str, wait: bool = False, timeout: int = 300) -> None: + """Verify deployment health. + + Args: + config_file: Path to mcp-stack.yaml + wait: Wait for deployment to be ready + timeout: Wait timeout in seconds + """ + config = load_config(config_file) + deployment_type = config["deployment"]["type"] + + if self.verbose: + self.console.print("[blue]Verifying deployment...[/blue]") + + async with dagger.connection(dagger.Config(workdir=str(Path.cwd()))): + if deployment_type == "kubernetes": + await self._verify_kubernetes(config, wait=wait, timeout=timeout) + elif deployment_type == "compose": + await self._verify_compose(config, wait=wait, timeout=timeout) + + async def destroy(self, config_file: str) -> None: + """Destroy deployed MCP stack. + + Args: + config_file: Path to mcp-stack.yaml + """ + config = load_config(config_file) + deployment_type = config["deployment"]["type"] + + if self.verbose: + self.console.print("[blue]Destroying deployment...[/blue]") + + async with dagger.connection(dagger.Config(workdir=str(Path.cwd()))): + if deployment_type == "kubernetes": + await self._destroy_kubernetes(config) + elif deployment_type == "compose": + await self._destroy_compose(config) + + def generate_manifests(self, config_file: str, output_dir: Optional[str] = None) -> Path: + """Generate deployment manifests. + + Args: + config_file: Path to mcp-stack.yaml + output_dir: Output directory for manifests + + Returns: + Path to generated manifests directory + """ + config = load_config(config_file) + deployment_type = config["deployment"]["type"] + + if output_dir is None: + deploy_dir = get_deploy_dir() + # Separate subdirectories for kubernetes and compose + manifests_path = deploy_dir / "manifests" / deployment_type + else: + manifests_path = Path(output_dir) + + manifests_path.mkdir(parents=True, exist_ok=True) + + # Store output dir for later use + self._last_output_dir = manifests_path + + # Generate plugin config.yaml for gateway (shared function) + generate_plugin_config(config, manifests_path, verbose=self.verbose) + + if deployment_type == "kubernetes": + generate_kubernetes_manifests(config, manifests_path, verbose=self.verbose) + elif deployment_type == "compose": + generate_compose_manifests(config, manifests_path, verbose=self.verbose) + else: + raise ValueError(f"Unsupported deployment type: {deployment_type}") + + return manifests_path + + # Private helper methods + + async def _build_component_with_dagger(self, component: Dict[str, Any], component_name: str, no_cache: bool = False, copy_env_templates: bool = False) -> None: + """Build a component (gateway or plugin) container using Dagger. + + Args: + component: Component configuration dict + component_name: Name of the component (gateway or plugin name) + no_cache: Disable cache + copy_env_templates: Copy .env.template from repo if it exists + """ + repo = component.get("repo") + + if not repo: + raise ValueError(f"Component '{component_name}' has no 'repo' field") + + # Clone repository to local directory for env template access + git_ref = component.get("ref", "main") + clone_dir = Path(f"./build/{component_name}") + + # For Dagger, we still need local clone if copying env templates + if copy_env_templates: + # Standard + import subprocess + + # Third-Party + from common import copy_env_template as copy_template + + clone_dir.mkdir(parents=True, exist_ok=True) + + if (clone_dir / ".git").exists(): + subprocess.run(["git", "fetch", "origin", git_ref], cwd=clone_dir, check=True, capture_output=True) + subprocess.run(["git", "checkout", "-B", git_ref, f"origin/{git_ref}"], cwd=clone_dir, check=True, capture_output=True) + else: + subprocess.run(["git", "clone", "--branch", git_ref, "--depth", "1", repo, str(clone_dir)], check=True, capture_output=True) + + # Determine build context + build_context = component.get("context", ".") + build_dir = clone_dir / build_context + + # Copy env template using shared function + copy_template(component_name, build_dir, verbose=self.verbose) + + # Use Dagger for the actual build + source = dag.git(repo).branch(git_ref).tree() + + # If component has context subdirectory, navigate to it + build_context = component.get("context", ".") + if build_context != ".": + source = source.directory(build_context) + + # Detect Containerfile/Dockerfile + containerfile = component.get("containerfile", "Containerfile") + + # Build container - determine image tag + if "image" in component: + # Use explicitly specified image name + image_tag = component["image"] + else: + # Generate default image name based on component type + image_tag = f"mcpgateway-{component_name.lower()}:latest" + + # Build with optional target stage for multi-stage builds + build_kwargs = {"dockerfile": containerfile} + if "target" in component: + build_kwargs["target"] = component["target"] + + # Use docker_build on the directory + container = source.docker_build(**build_kwargs) + + # Export image to Docker daemon (always export, Dagger handles caching) + # Workaround for dagger-io 0.19.0 bug: export_image returns None instead of Void + # The export actually works, but beartype complains about the return type + try: + await container.export_image(image_tag) + except Exception as e: + # Ignore beartype validation error - the export actually succeeds + if "BeartypeCallHintReturnViolation" not in str(type(e)): + raise + + if self.verbose: + self.console.print(f"[green]βœ“ Built {component_name} -> {image_tag}[/green]") + + async def _deploy_kubernetes(self, manifests_dir: Path) -> None: + """Deploy to Kubernetes using kubectl. + + Uses shared deploy_kubernetes() from common.py to avoid code duplication. + """ + deploy_kubernetes(manifests_dir, verbose=self.verbose) + + async def _deploy_compose(self, manifests_dir: Path) -> None: + """Deploy using Docker Compose. + + Uses shared deploy_compose() from common.py to avoid code duplication. + """ + compose_file = manifests_dir / "docker-compose.yaml" + deploy_compose(compose_file, verbose=self.verbose) + + async def _verify_kubernetes(self, config: Dict[str, Any], wait: bool = False, timeout: int = 300) -> None: + """Verify Kubernetes deployment health. + + Uses shared verify_kubernetes() from common.py to avoid code duplication. + """ + namespace = config["deployment"].get("namespace", "mcp-gateway") + output = verify_kubernetes(namespace, wait=wait, timeout=timeout, verbose=self.verbose) + self.console.print(output) + + async def _verify_compose(self, config: Dict[str, Any], wait: bool = False, timeout: int = 300) -> None: + """Verify Docker Compose deployment health. + + Uses shared verify_compose() from common.py to avoid code duplication. + """ + _ = config, wait, timeout # Reserved for future use + # Use the same manifests directory as generate_manifests + deploy_dir = get_deploy_dir() + output_dir = getattr(self, "_last_output_dir", deploy_dir / "manifests" / "compose") + compose_file = output_dir / "docker-compose.yaml" + output = verify_compose(compose_file, verbose=self.verbose) + self.console.print(output) + + async def _destroy_kubernetes(self, config: Dict[str, Any]) -> None: + """Destroy Kubernetes deployment. + + Uses shared destroy_kubernetes() from common.py to avoid code duplication. + """ + _ = config # Reserved for future use (namespace, labels, etc.) + # Use the same manifests directory as generate_manifests + deploy_dir = get_deploy_dir() + manifests_dir = getattr(self, "_last_output_dir", deploy_dir / "manifests" / "kubernetes") + destroy_kubernetes(manifests_dir, verbose=self.verbose) + + async def _destroy_compose(self, config: Dict[str, Any]) -> None: + """Destroy Docker Compose deployment. + + Uses shared destroy_compose() from common.py to avoid code duplication. + """ + _ = config # Reserved for future use (project name, networks, etc.) + # Use the same manifests directory as generate_manifests + deploy_dir = get_deploy_dir() + output_dir = getattr(self, "_last_output_dir", deploy_dir / "manifests" / "compose") + compose_file = output_dir / "docker-compose.yaml" + destroy_compose(compose_file, verbose=self.verbose) diff --git a/mcpgateway/tools/builder/factory.py b/mcpgateway/tools/builder/factory.py new file mode 100644 index 000000000..340f70b49 --- /dev/null +++ b/mcpgateway/tools/builder/factory.py @@ -0,0 +1,113 @@ +"""Location: ./mcpgateway/tools/builder/factory.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Factory for creating MCP Stack deployment implementations. + +This module provides a factory pattern for creating the appropriate deployment +implementation (Dagger or Plain Python) based on availability and user preference. + +The factory handles graceful fallback from Dagger to Python if dependencies are +unavailable, ensuring the deployment system works in various environments. + +Example: + >>> deployer, mode = DeployFactory.create_deployer("dagger", verbose=True) + >>> deployer.validate("mcp-stack.yaml") + βœ“ Configuration valid +""" + +# Standard +from enum import Enum + +# Third-Party +from rich.console import Console + +# First-Party +from mcpgateway.tools.builder.pipeline import CICDModule + + +class CICDTypes(str, Enum): + """Deployment implementation types. + + Attributes: + DAGGER: Dagger-based implementation (optimal performance) + PYTHON: Plain Python implementation (fallback, no dependencies) + """ + + DAGGER = "dagger" + PYTHON = "python" + + +console = Console() + + +class DeployFactory: + """Factory for creating MCP Stack deployment implementations. + + This factory implements the Strategy pattern, allowing dynamic selection + between Dagger and Python implementations based on availability. + """ + + @staticmethod + def create_deployer(deployer: str, verbose: bool = False) -> tuple[CICDModule, CICDTypes]: + """Create a deployment implementation instance. + + Attempts to load the requested deployer type with automatic fallback + to Python implementation if dependencies are missing. + + Args: + deployer: Deployment type to create ("dagger" or "python") + verbose: Enable verbose logging during creation + + Returns: + tuple: (deployer_instance, actual_type) + - deployer_instance: Instance of MCPStackDagger or MCPStackPython + - actual_type: CICDTypes enum indicating which implementation was loaded + + Raises: + RuntimeError: If no implementation can be loaded (critical failure) + + Example: + >>> # Try to load Dagger, fall back to Python if unavailable + >>> deployer, mode = DeployFactory.create_deployer("dagger", verbose=True) + >>> if mode == CICDTypes.DAGGER: + ... print("Using optimized Dagger implementation") + ... else: + ... print("Using fallback Python implementation") + """ + # Attempt to load Dagger implementation first if requested + if deployer == "dagger": + try: + # First-Party + from mcpgateway.tools.builder.dagger_deploy import MCPStackDagger + + if verbose: + console.print("[green]βœ“ Dagger module loaded[/green]") + + return (MCPStackDagger(verbose), CICDTypes.DAGGER) + + except ImportError as e: + # Dagger dependencies not available, fall back to Python + console.print(f"[yellow]⚠ Dagger import failed: {e}[/yellow]") + console.print("[yellow]β†’ Falling back to plain Python implementation[/yellow]") + + # Load plain Python implementation (fallback or explicitly requested) + try: + # First-Party + from mcpgateway.tools.builder.python_deploy import MCPStackPython + + if verbose and deployer != "dagger": + console.print("[blue]Using plain Python implementation[/blue]") + + return (MCPStackPython(verbose), CICDTypes.PYTHON) + + except ImportError as e: + # Critical failure - neither implementation can be loaded + console.print("[red]βœ— ERROR: Cannot import deployment modules[/red]") + console.print(f"[red] Details: {e}[/red]") + console.print("[yellow] Make sure you're running from the project root[/yellow]") + console.print("[yellow] and PYTHONPATH is set correctly[/yellow]") + + # This should never be reached if PYTHONPATH is set correctly + raise RuntimeError(f"Unable to load deployer of type '{deployer}'. ") diff --git a/mcpgateway/tools/builder/pipeline.py b/mcpgateway/tools/builder/pipeline.py new file mode 100644 index 000000000..22e70aed9 --- /dev/null +++ b/mcpgateway/tools/builder/pipeline.py @@ -0,0 +1,294 @@ +"""Location: ./mcpgateway/tools/builder/pipeline.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Abstract base class for MCP Stack deployment implementations. + +This module defines the CICDModule interface that all deployment implementations +must implement. It provides a common API for building, deploying, and managing +MCP Gateway stacks with external plugin servers. + +The base class implements shared functionality (validation) while requiring +subclasses to implement deployment-specific logic (build, deploy, etc.). + +Design Pattern: + Strategy Pattern - Different implementations (Dagger vs Python) can be + swapped transparently via the DeployFactory. + +Example: + >>> from mcpgateway.tools.builder.factory import DeployFactory + >>> deployer, mode = DeployFactory.create_deployer("dagger", verbose=True) + >>> deployer.validate("mcp-stack.yaml") + βœ“ Configuration valid + >>> await deployer.build("mcp-stack.yaml") + βœ“ Built OPAPluginFilter +""" + +# Standard +from abc import ABC, abstractmethod +from pathlib import Path +from typing import Optional + +# Third-Party +from pydantic import ValidationError +from rich.console import Console +import yaml + +# First-Party +from mcpgateway.tools.builder.schema import MCPStackConfig + +# Shared console instance for consistent output formatting +console = Console() + + +class CICDModule(ABC): + """Abstract base class for MCP Stack deployment implementations. + + This class defines the interface that all deployment implementations must + implement. It provides common initialization and validation logic while + deferring implementation-specific details to subclasses. + + Attributes: + verbose (bool): Enable verbose output during operations + console (Console): Rich console for formatted output + + Implementations: + - MCPStackDagger: High-performance implementation using Dagger SDK + - MCPStackPython: Fallback implementation using plain Python + Docker/Podman + + Example: + >>> class MyDeployer(CICDModule): + ... async def build(self, config_file: str, **kwargs) -> None: + ... # Implementation-specific build logic + ... pass + """ + + def __init__(self, verbose: bool = False): + """Initialize the deployment module. + + Args: + verbose: Enable verbose output during all operations + """ + self.verbose = verbose + self.console = console + + def validate(self, config_file: str) -> None: + """Validate mcp-stack.yaml configuration using Pydantic schemas. + + This method provides comprehensive validation of the MCP stack configuration + using Pydantic models defined in schema.py. It validates: + - Required sections (deployment, gateway, plugins) + - Deployment type (kubernetes or compose) + - Gateway image specification + - Plugin configurations (name, repo/image, etc.) + - Custom business rules (unique names, valid combinations) + + Args: + config_file: Path to mcp-stack.yaml configuration file + + Raises: + ValueError: If configuration is invalid, with formatted error details + ValidationError: If Pydantic schema validation fails + FileNotFoundError: If config_file does not exist + + Example: + >>> deployer.validate("mcp-stack-local.yaml") + βœ“ Configuration valid + + >>> deployer.validate("invalid.yaml") + ValueError: Configuration validation failed: + β€’ plugins -> 0 -> name: Field required + β€’ gateway -> image: Field required + """ + if self.verbose: + self.console.print(f"[blue]Validating {config_file}...[/blue]") + + # Load YAML configuration + with open(config_file, "r") as f: + config_dict = yaml.safe_load(f) + + # Validate using Pydantic schema + try: + # Local + + MCPStackConfig(**config_dict) + except ValidationError as e: + # Format validation errors for better readability + error_msg = "Configuration validation failed:\n" + for error in e.errors(): + # Join the error location path (e.g., plugins -> 0 -> name) + loc = " -> ".join(str(x) for x in error["loc"]) + error_msg += f" β€’ {loc}: {error['msg']}\n" + raise ValueError(error_msg) from e + + if self.verbose: + self.console.print("[green]βœ“ Configuration valid[/green]") + + @abstractmethod + async def build(self, config_file: str, plugins_only: bool = False, specific_plugins: Optional[list[str]] = None, no_cache: bool = False, copy_env_templates: bool = False) -> None: + """Build container images for plugins and/or gateway. + + Subclasses must implement this to build Docker/Podman images from + Git repositories or use pre-built images. + + Args: + config_file: Path to mcp-stack.yaml + plugins_only: Only build plugins, skip gateway + specific_plugins: List of specific plugin names to build (optional) + no_cache: Disable build cache for fresh builds + copy_env_templates: Copy .env.template files from cloned repos + + Raises: + RuntimeError: If build fails + ValueError: If plugin configuration is invalid + + Example: + >>> await deployer.build("mcp-stack.yaml", plugins_only=True) + βœ“ Built OPAPluginFilter + βœ“ Built LLMGuardPlugin + """ + pass + + @abstractmethod + async def generate_certificates(self, config_file: str) -> None: + """Generate mTLS certificates for gateway and plugins. + + Creates a certificate authority (CA) and issues certificates for: + - Gateway (client certificates for connecting to plugins) + - Each plugin (server certificates for accepting connections) + + Certificates are stored in the paths defined in the config's + certificates section (default: ./certs/mcp/). + + Args: + config_file: Path to mcp-stack.yaml + + Raises: + RuntimeError: If certificate generation fails + FileNotFoundError: If required tools (openssl) are not available + + Example: + >>> await deployer.generate_certificates("mcp-stack.yaml") + βœ“ Certificates generated + """ + pass + + @abstractmethod + async def deploy(self, config_file: str, dry_run: bool = False, skip_build: bool = False, skip_certs: bool = False) -> None: + """Deploy the MCP stack to Kubernetes or Docker Compose. + + This is the main deployment method that orchestrates: + 1. Building containers (unless skip_build=True) + 2. Generating mTLS certificates (unless skip_certs=True or mTLS disabled) + 3. Generating manifests (Kubernetes YAML or docker-compose.yaml) + 4. Applying the deployment (unless dry_run=True) + + Args: + config_file: Path to mcp-stack.yaml + dry_run: Generate manifests without actually deploying + skip_build: Skip building containers (use existing images) + skip_certs: Skip certificate generation (use existing certs) + + Raises: + RuntimeError: If deployment fails at any stage + ValueError: If configuration is invalid + + Example: + >>> # Full deployment + >>> await deployer.deploy("mcp-stack.yaml") + βœ“ Build complete + βœ“ Certificates generated + βœ“ Deployment complete + + >>> # Dry run (generate manifests only) + >>> await deployer.deploy("mcp-stack.yaml", dry_run=True) + βœ“ Dry-run complete (no changes made) + """ + pass + + @abstractmethod + async def verify(self, config_file: str, wait: bool = False, timeout: int = 300) -> None: + """Verify deployment health and readiness. + + Checks that all deployed services are healthy and ready: + - Kubernetes: Checks pod status, optionally waits for Ready + - Docker Compose: Checks container status + + Args: + config_file: Path to mcp-stack.yaml + wait: Wait for deployment to become ready + timeout: Maximum time to wait in seconds (default: 300) + + Raises: + RuntimeError: If verification fails or timeout is reached + TimeoutError: If wait=True and deployment doesn't become ready + + Example: + >>> # Quick health check + >>> await deployer.verify("mcp-stack.yaml") + NAME READY STATUS RESTARTS AGE + mcpgateway-xxx 1/1 Running 0 2m + mcp-plugin-opa-xxx 1/1 Running 0 2m + + >>> # Wait for ready state + >>> await deployer.verify("mcp-stack.yaml", wait=True, timeout=600) + βœ“ Deployment healthy + """ + pass + + @abstractmethod + async def destroy(self, config_file: str) -> None: + """Destroy the deployed MCP stack. + + Removes all deployed resources: + - Kubernetes: Deletes all resources in the namespace + - Docker Compose: Stops and removes containers, networks, volumes + + WARNING: This is destructive and cannot be undone! + + Args: + config_file: Path to mcp-stack.yaml + + Raises: + RuntimeError: If destruction fails + + Example: + >>> await deployer.destroy("mcp-stack.yaml") + βœ“ Deployment destroyed + """ + pass + + @abstractmethod + def generate_manifests(self, config_file: str, output_dir: Optional[str] = None) -> Path: + """Generate deployment manifests (Kubernetes YAML or docker-compose.yaml). + + Creates deployment manifests based on configuration: + - Kubernetes: Generates Deployment, Service, ConfigMap, Secret YAML files + - Docker Compose: Generates docker-compose.yaml with all services + + Also generates: + - plugins-config.yaml: Plugin manager configuration for gateway + - Environment files: .env files for each service + + Args: + config_file: Path to mcp-stack.yaml + output_dir: Output directory for manifests (default: ./deploy/manifests) + + Returns: + Path: Directory containing generated manifests + + Raises: + ValueError: If configuration is invalid + OSError: If output directory cannot be created + + Example: + >>> manifests_path = deployer.generate_manifests("mcp-stack.yaml") + >>> print(f"Manifests generated in: {manifests_path}") + Manifests generated in: /path/to/deploy/manifests + + >>> # Custom output directory + >>> deployer.generate_manifests("mcp-stack.yaml", output_dir="./my-manifests") + βœ“ Manifests generated: ./my-manifests + """ + pass diff --git a/mcpgateway/tools/builder/python_deploy.py b/mcpgateway/tools/builder/python_deploy.py new file mode 100644 index 000000000..78663e4dc --- /dev/null +++ b/mcpgateway/tools/builder/python_deploy.py @@ -0,0 +1,440 @@ +"""Location: ./mcpgateway/tools/builder/python_deploy.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Plain Python MCP Stack Deployment Module + +This module provides deployment functionality using only standard Python +and system commands (docker/podman, kubectl, docker-compose). + +This is the fallback implementation when Dagger is not available. +""" + +# Standard +from pathlib import Path +import shutil +import subprocess +from typing import Any, Dict, List, Optional + +# Third-Party +from rich.console import Console +from rich.progress import Progress, SpinnerColumn, TextColumn + +# First-Party +from mcpgateway.tools.builder.common import ( + deploy_compose, + deploy_kubernetes, + destroy_compose, + destroy_kubernetes, + generate_compose_manifests, + generate_kubernetes_manifests, + generate_plugin_config, + get_deploy_dir, + load_config, + verify_compose, + verify_kubernetes, +) +from mcpgateway.tools.builder.pipeline import CICDModule + +console = Console() + + +class MCPStackPython(CICDModule): + """Plain Python implementation of MCP Stack deployment.""" + + def __init__(self, verbose: bool = False): + super().__init__(verbose) + + # Detect container runtime (docker or podman) + self.container_runtime = self._detect_container_runtime() + + async def build(self, config_file: str, plugins_only: bool = False, specific_plugins: Optional[List[str]] = None, no_cache: bool = False, copy_env_templates: bool = False) -> None: + """Build gateway and plugin containers using docker/podman. + + Args: + config_file: Path to mcp-stack.yaml + plugins_only: Only build plugins, skip gateway + specific_plugins: List of specific plugin names to build + no_cache: Disable build cache + copy_env_templates: Copy .env.template files from cloned repos + """ + config = load_config(config_file) + + # Build gateway (unless plugins_only=True) + if not plugins_only: + gateway = config.get("gateway", {}) + if gateway.get("repo"): + with Progress(SpinnerColumn(), TextColumn("[progress.description]{task.description}"), console=self.console) as progress: + task = progress.add_task("Building gateway...", total=None) + try: + self._build_component(gateway, "gateway", no_cache=no_cache) + progress.update(task, completed=1, description="[green]βœ“ Built gateway[/green]") + except Exception as e: + progress.update(task, completed=1, description="[red]βœ— Failed gateway[/red]") + # Print full error after progress bar closes + self.console.print("\n[red bold]Gateway build failed:[/red bold]") + self.console.print(f"[red]{type(e).__name__}: {str(e)}[/red]") + if self.verbose: + # Standard + import traceback + + self.console.print(f"[dim]{traceback.format_exc()}[/dim]") + raise + elif self.verbose: + self.console.print("[dim]Skipping gateway build (using pre-built image)[/dim]") + + # Build plugins + plugins = config.get("plugins", []) + + if specific_plugins: + plugins = [p for p in plugins if p["name"] in specific_plugins] + + if not plugins: + self.console.print("[yellow]No plugins to build[/yellow]") + return + + with Progress(SpinnerColumn(), TextColumn("[progress.description]{task.description}"), console=self.console) as progress: + + for plugin in plugins: + plugin_name = plugin["name"] + + # Skip if pre-built image specified + if "image" in plugin and "repo" not in plugin: + task = progress.add_task(f"Skipping {plugin_name} (using pre-built image)", total=1) + progress.update(task, completed=1) + continue + + task = progress.add_task(f"Building {plugin_name}...", total=None) + + try: + self._build_component(plugin, plugin_name, no_cache=no_cache, copy_env_templates=copy_env_templates) + progress.update(task, completed=1, description=f"[green]βœ“ Built {plugin_name}[/green]") + except Exception as e: + progress.update(task, completed=1, description=f"[red]βœ— Failed {plugin_name}[/red]") + # Print full error after progress bar closes + self.console.print(f"\n[red bold]Plugin '{plugin_name}' build failed:[/red bold]") + self.console.print(f"[red]{type(e).__name__}: {str(e)}[/red]") + if self.verbose: + # Standard + import traceback + + self.console.print(f"[dim]{traceback.format_exc()}[/dim]") + raise + + async def generate_certificates(self, config_file: str) -> None: + """Generate mTLS certificates for plugins. + + Args: + config_file: Path to mcp-stack.yaml + """ + config = load_config(config_file) + + if self.verbose: + self.console.print("[blue]Generating mTLS certificates...[/blue]") + + # Check if make is available + if not shutil.which("make"): + raise RuntimeError("'make' command not found. Cannot generate certificates.") + + # Generate CA + self._run_command(["make", "certs-mcp-ca", "MCP_CERT_DAYS=825"]) + + # Generate gateway cert + self._run_command(["make", "certs-mcp-gateway", "MCP_CERT_DAYS=825"]) + + # Generate plugin certificates + plugins = config.get("plugins", []) + for plugin in plugins: + plugin_name = plugin["name"] + self._run_command(["make", "certs-mcp-plugin", f"PLUGIN_NAME={plugin_name}", "MCP_CERT_DAYS=825"]) + + if self.verbose: + self.console.print("[green]βœ“ Certificates generated[/green]") + + async def deploy(self, config_file: str, dry_run: bool = False, skip_build: bool = False, skip_certs: bool = False, output_dir: Optional[str] = None) -> None: + """Deploy MCP stack. + + Args: + config_file: Path to mcp-stack.yaml + dry_run: Generate manifests without deploying + skip_build: Skip building containers + skip_certs: Skip certificate generation + output_dir: Output directory for manifests (default: ./deploy) + """ + config = load_config(config_file) + + # Build containers + if not skip_build: + await self.build(config_file) + + # Generate certificates (only if mTLS is enabled) + gateway_mtls = config.get("gateway", {}).get("mtls_enabled", True) + plugin_mtls = any(p.get("mtls_enabled", True) for p in config.get("plugins", [])) + mtls_needed = gateway_mtls or plugin_mtls + + if not skip_certs and mtls_needed: + await self.generate_certificates(config_file) + elif not skip_certs and not mtls_needed: + if self.verbose: + self.console.print("[dim]Skipping certificate generation (mTLS disabled)[/dim]") + + # Generate manifests + manifests_dir = self.generate_manifests(config_file, output_dir=output_dir) + + if dry_run: + self.console.print(f"[yellow]Dry-run: Manifests generated in {manifests_dir}[/yellow]") + return + + # Apply deployment + deployment_type = config["deployment"]["type"] + + if deployment_type == "kubernetes": + self._deploy_kubernetes(manifests_dir) + elif deployment_type == "compose": + self._deploy_compose(manifests_dir) + else: + raise ValueError(f"Unsupported deployment type: {deployment_type}") + + async def verify(self, config_file: str, wait: bool = False, timeout: int = 300) -> None: + """Verify deployment health. + + Args: + config_file: Path to mcp-stack.yaml + wait: Wait for deployment to be ready + timeout: Wait timeout in seconds + """ + config = load_config(config_file) + deployment_type = config["deployment"]["type"] + + if self.verbose: + self.console.print("[blue]Verifying deployment...[/blue]") + + if deployment_type == "kubernetes": + self._verify_kubernetes(config, wait=wait, timeout=timeout) + elif deployment_type == "compose": + self._verify_compose(config, wait=wait, timeout=timeout) + + async def destroy(self, config_file: str) -> None: + """Destroy deployed MCP stack. + + Args: + config_file: Path to mcp-stack.yaml + """ + config = load_config(config_file) + deployment_type = config["deployment"]["type"] + + if self.verbose: + self.console.print("[blue]Destroying deployment...[/blue]") + + if deployment_type == "kubernetes": + self._destroy_kubernetes(config) + elif deployment_type == "compose": + self._destroy_compose(config) + + def generate_manifests(self, config_file: str, output_dir: Optional[str] = None) -> Path: + """Generate deployment manifests. + + Args: + config_file: Path to mcp-stack.yaml + output_dir: Output directory for manifests + + Returns: + Path to generated manifests directory + """ + config = load_config(config_file) + deployment_type = config["deployment"]["type"] + + if output_dir is None: + deploy_dir = get_deploy_dir() + # Separate subdirectories for kubernetes and compose + output_dir = deploy_dir / "manifests" / deployment_type + else: + output_dir = Path(output_dir) + + output_dir.mkdir(parents=True, exist_ok=True) + + # Store output dir for later use + self._last_output_dir = output_dir + + # Generate plugin config.yaml for gateway (shared function) + generate_plugin_config(config, output_dir, verbose=self.verbose) + + if deployment_type == "kubernetes": + generate_kubernetes_manifests(config, output_dir, verbose=self.verbose) + elif deployment_type == "compose": + generate_compose_manifests(config, output_dir, verbose=self.verbose) + else: + raise ValueError(f"Unsupported deployment type: {deployment_type}") + + return output_dir + + # Private helper methods + + def _detect_container_runtime(self) -> str: + """Detect available container runtime (docker or podman).""" + if shutil.which("docker"): + return "docker" + elif shutil.which("podman"): + return "podman" + else: + raise RuntimeError("No container runtime found. Install docker or podman.") + + def _run_command(self, cmd: List[str], cwd: Optional[Path] = None, capture_output: bool = False) -> subprocess.CompletedProcess: + """Run a shell command. + + Args: + cmd: Command and arguments + cwd: Working directory + capture_output: Capture stdout/stderr + + Returns: + CompletedProcess instance + + Raises: + subprocess.CalledProcessError: If command fails + """ + if self.verbose: + self.console.print(f"[dim]Running: {' '.join(cmd)}[/dim]") + + result = subprocess.run(cmd, cwd=cwd, capture_output=capture_output, text=True, check=True) + + return result + + def _build_component(self, component: Dict[str, Any], component_name: str, no_cache: bool = False, copy_env_templates: bool = False) -> None: + """Build a component (gateway or plugin) container using docker/podman. + + Args: + component: Component configuration dict + component_name: Name of the component (gateway or plugin name) + no_cache: Disable cache + copy_env_templates: Copy .env.template from repo if it exists + """ + repo = component.get("repo") + + if not repo: + raise ValueError(f"Component '{component_name}' has no 'repo' field") + + # Clone repository + git_ref = component.get("ref", "main") + clone_dir = Path(f"./build/{component_name}") + clone_dir.mkdir(parents=True, exist_ok=True) + + # Clone or update repo + if (clone_dir / ".git").exists(): + if self.verbose: + self.console.print(f"[dim]Updating {component_name} repository...[/dim]") + self._run_command(["git", "fetch", "origin", git_ref], cwd=clone_dir) + self._run_command(["git", "checkout", "-B", git_ref, f"origin/{git_ref}"], cwd=clone_dir) + else: + if self.verbose: + self.console.print(f"[dim]Cloning {component_name} repository...[/dim]") + self._run_command(["git", "clone", "--branch", git_ref, "--depth", "1", repo, str(clone_dir)]) + + # Determine build context (subdirectory within repo) + build_context = component.get("context", ".") + build_dir = clone_dir / build_context + + if not build_dir.exists(): + raise FileNotFoundError(f"Build context not found: {build_dir}") + + # Detect Containerfile/Dockerfile + containerfile = component.get("containerfile", "Containerfile") + containerfile_path = build_dir / containerfile + + if not containerfile_path.exists(): + containerfile = "Dockerfile" + containerfile_path = build_dir / containerfile + if not containerfile_path.exists(): + raise FileNotFoundError(f"No Containerfile or Dockerfile found in {build_dir}") + + # Build container - determine image tag + if "image" in component: + # Use explicitly specified image name + image_tag = component["image"] + else: + # Generate default image name based on component type + image_tag = f"mcpgateway-{component_name.lower()}:latest" + + build_cmd = [self.container_runtime, "build", "-f", containerfile, "-t", image_tag] + + if no_cache: + build_cmd.append("--no-cache") + + # Add target stage if specified (for multi-stage builds) + if "target" in component: + build_cmd.extend(["--target", component["target"]]) + + build_cmd.append(".") + + self._run_command(build_cmd, cwd=build_dir) + + # Copy .env.template if requested and exists + if copy_env_templates: + # Third-Party + from common import copy_env_template as copy_template + + copy_template(component_name, build_dir, verbose=self.verbose) + + if self.verbose: + self.console.print(f"[green]βœ“ Built {component_name} -> {image_tag}[/green]") + + def _deploy_kubernetes(self, manifests_dir: Path) -> None: + """Deploy to Kubernetes using kubectl. + + Uses shared deploy_kubernetes() from common.py to avoid code duplication. + """ + deploy_kubernetes(manifests_dir, verbose=self.verbose) + + def _deploy_compose(self, manifests_dir: Path) -> None: + """Deploy using Docker Compose. + + Uses shared deploy_compose() from common.py to avoid code duplication. + """ + compose_file = manifests_dir / "docker-compose.yaml" + deploy_compose(compose_file, verbose=self.verbose) + + def _verify_kubernetes(self, config: Dict[str, Any], wait: bool = False, timeout: int = 300) -> None: + """Verify Kubernetes deployment health. + + Uses shared verify_kubernetes() from common.py to avoid code duplication. + """ + namespace = config["deployment"].get("namespace", "mcp-gateway") + output = verify_kubernetes(namespace, wait=wait, timeout=timeout, verbose=self.verbose) + self.console.print(output) + + def _verify_compose(self, config: Dict[str, Any], wait: bool = False, timeout: int = 300) -> None: + """Verify Docker Compose deployment health. + + Uses shared verify_compose() from common.py to avoid code duplication. + """ + _ = config, wait, timeout # Reserved for future use + # Use the same manifests directory as generate_manifests + deploy_dir = get_deploy_dir() + output_dir = getattr(self, "_last_output_dir", deploy_dir / "manifests" / "compose") + compose_file = output_dir / "docker-compose.yaml" + output = verify_compose(compose_file, verbose=self.verbose) + self.console.print(output) + + def _destroy_kubernetes(self, config: Dict[str, Any]) -> None: + """Destroy Kubernetes deployment. + + Uses shared destroy_kubernetes() from common.py to avoid code duplication. + """ + _ = config # Reserved for future use (namespace, labels, etc.) + # Use the same manifests directory as generate_manifests + deploy_dir = get_deploy_dir() + manifests_dir = getattr(self, "_last_output_dir", deploy_dir / "manifests" / "kubernetes") + destroy_kubernetes(manifests_dir, verbose=self.verbose) + + def _destroy_compose(self, config: Dict[str, Any]) -> None: + """Destroy Docker Compose deployment. + + Uses shared destroy_compose() from common.py to avoid code duplication. + """ + _ = config # Reserved for future use (project name, networks, etc.) + # Use the same manifests directory as generate_manifests + deploy_dir = get_deploy_dir() + output_dir = getattr(self, "_last_output_dir", deploy_dir / "manifests" / "compose") + compose_file = output_dir / "docker-compose.yaml" + destroy_compose(compose_file, verbose=self.verbose) diff --git a/mcpgateway/tools/builder/schema.py b/mcpgateway/tools/builder/schema.py new file mode 100644 index 000000000..2892b8f5b --- /dev/null +++ b/mcpgateway/tools/builder/schema.py @@ -0,0 +1,156 @@ +"""Location: ./mcpgateway/tools/builder/schema.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Pydantic schemas for MCP Stack configuration validation""" + +# Standard +from typing import Any, Dict, List, Literal, Optional + +# Third-Party +from pydantic import BaseModel, Field, field_validator + + +class DeploymentConfig(BaseModel): + """Deployment configuration""" + + type: Literal["kubernetes", "compose"] = Field(..., description="Deployment type") + project_name: Optional[str] = Field(None, description="Project name for compose") + namespace: Optional[str] = Field(None, description="Namespace for Kubernetes") + + +class BuildableConfig(BaseModel): + """Base class for components that can be built from source or use pre-built images. + + This base class provides common configuration for both gateway and plugins, + supporting two build modes: + 1. Pre-built image: Specify only 'image' field + 2. Build from source: Specify 'repo' and optionally 'ref', 'context', 'containerfile', 'target' + + Attributes: + image: Pre-built Docker image name (e.g., "mcpgateway/mcpgateway:latest") + repo: Git repository URL to build from + ref: Git branch/tag/commit to checkout (default: "main") + context: Build context subdirectory within repo (default: ".") + containerfile: Path to Containerfile/Dockerfile (default: "Containerfile") + target: Target stage for multi-stage builds (optional) + host_port: Host port mapping for direct access (optional) + env_vars: Environment variables for container + mtls_enabled: Enable mutual TLS authentication (default: True) + """ + + # Build configuration + image: Optional[str] = Field(None, description="Pre-built Docker image") + repo: Optional[str] = Field(None, description="Git repository URL") + ref: Optional[str] = Field("main", description="Git branch/tag/commit") + context: Optional[str] = Field(".", description="Build context subdirectory") + containerfile: Optional[str] = Field("Containerfile", description="Containerfile path") + target: Optional[str] = Field(None, description="Multi-stage build target") + + # Runtime configuration + host_port: Optional[int] = Field(None, description="Host port mapping") + env_vars: Optional[Dict[str, Any]] = Field(default_factory=dict, description="Environment variables") + mtls_enabled: Optional[bool] = Field(True, description="Enable mTLS") + + def model_post_init(self, __context: Any) -> None: + """Validate that either image or repo is specified""" + if not self.image and not self.repo: + component_type = self.__class__.__name__.replace("Config", "") + raise ValueError(f"{component_type} must specify either 'image' or 'repo'") + + +class GatewayConfig(BuildableConfig): + """Gateway configuration. + + Extends BuildableConfig to support either pre-built gateway images or + building the gateway from source repository. + + Attributes: + port: Gateway internal port (default: 4444) + """ + + port: Optional[int] = Field(4444, description="Gateway port") + + +class PluginConfig(BuildableConfig): + """Plugin configuration. + + Extends BuildableConfig to support plugin-specific configuration while + inheriting common build and runtime capabilities. + + Attributes: + name: Unique plugin identifier + port: Plugin internal port (default: 8000) + expose_port: Whether to expose plugin port on host (default: False) + plugin_overrides: Plugin-specific override configuration + """ + + name: str = Field(..., description="Plugin name") + port: Optional[int] = Field(8000, description="Plugin port") + expose_port: Optional[bool] = Field(False, description="Expose port on host") + plugin_overrides: Optional[Dict[str, Any]] = Field(default_factory=dict, description="Plugin overrides") + + @field_validator("name") + @classmethod + def validate_name(cls, v: str) -> str: + """Validate plugin name is non-empty""" + if not v or not v.strip(): + raise ValueError("Plugin name cannot be empty") + return v + + +class CertificatesConfig(BaseModel): + """Certificate configuration""" + + validity_days: Optional[int] = Field(825, description="Certificate validity in days") + auto_generate: Optional[bool] = Field(True, description="Auto-generate certificates") + ca_path: Optional[str] = Field("./certs/mcp/ca", description="CA certificate path") + gateway_path: Optional[str] = Field("./certs/mcp/gateway", description="Gateway cert path") + plugins_path: Optional[str] = Field("./certs/mcp/plugins", description="Plugins cert path") + + +class PostgresConfig(BaseModel): + """PostgreSQL database configuration""" + + enabled: Optional[bool] = Field(True, description="Enable PostgreSQL deployment") + image: Optional[str] = Field("postgres:17", description="PostgreSQL image") + database: Optional[str] = Field("mcp", description="Database name") + user: Optional[str] = Field("postgres", description="Database user") + password: Optional[str] = Field("mysecretpassword", description="Database password") + storage_size: Optional[str] = Field("10Gi", description="Persistent volume size (Kubernetes only)") + storage_class: Optional[str] = Field(None, description="Storage class name (Kubernetes only)") + + +class RedisConfig(BaseModel): + """Redis cache configuration""" + + enabled: Optional[bool] = Field(True, description="Enable Redis deployment") + image: Optional[str] = Field("redis:latest", description="Redis image") + + +class InfrastructureConfig(BaseModel): + """Infrastructure services configuration""" + + postgres: Optional[PostgresConfig] = Field(default_factory=PostgresConfig) + redis: Optional[RedisConfig] = Field(default_factory=RedisConfig) + + +class MCPStackConfig(BaseModel): + """Complete MCP Stack configuration""" + + deployment: DeploymentConfig + gateway: GatewayConfig + plugins: List[PluginConfig] = Field(default_factory=list) + certificates: Optional[CertificatesConfig] = Field(default_factory=CertificatesConfig) + infrastructure: Optional[InfrastructureConfig] = Field(default_factory=InfrastructureConfig) + + @field_validator("plugins") + @classmethod + def validate_plugin_names_unique(cls, v: List[PluginConfig]) -> List[PluginConfig]: + """Ensure plugin names are unique""" + names = [p.name for p in v] + if len(names) != len(set(names)): + duplicates = [name for name in names if names.count(name) > 1] + raise ValueError(f"Duplicate plugin names found: {duplicates}") + return v diff --git a/mcpgateway/tools/builder/templates/compose/docker-compose.yaml.j2 b/mcpgateway/tools/builder/templates/compose/docker-compose.yaml.j2 new file mode 100644 index 000000000..7d763aa30 --- /dev/null +++ b/mcpgateway/tools/builder/templates/compose/docker-compose.yaml.j2 @@ -0,0 +1,197 @@ +# Location: ./mcpgateway/tools/builder/templates/compose/docker-compose.yaml.j2 +# Copyright 2025 +# SPDX-License-Identifier: Apache-2.0 +# Authors: Teryl Taylor +# Docker Compose manifest for MCP Stack +# Generated from mcp-stack.yaml + +version: '3.8' + +networks: + mcp-network: + driver: bridge + +volumes: + gateway-data: + driver: local + pgdata: + driver: local +{% for plugin in plugins %} + {{ plugin.name | lower }}-data: + driver: local +{% endfor %} + +services: + # MCP Gateway + mcpgateway: + image: {{ gateway.image }} + container_name: mcpgateway + hostname: mcpgateway + + {% if gateway.env_file is defined %} + env_file: + - {{ gateway.env_file }} + {% endif %} + + environment: + {% if gateway.env_vars is defined and gateway.env_vars %} + # User-defined environment variables + {% for key, value in gateway.env_vars.items() %} + - {{ key }}={{ value }} + {% endfor %} + {% endif %} + # Database configuration + - DATABASE_URL=postgresql://postgres:$${POSTGRES_PASSWORD:-mysecretpassword}@postgres:5432/mcp + - REDIS_URL=redis://redis:6379/0 + {% if gateway.mtls_enabled | default(true) %} + # mTLS client configuration (gateway connects to external plugins) + - PLUGINS_CLIENT_MTLS_CA_BUNDLE=/app/certs/mcp/ca/ca.crt + - PLUGINS_CLIENT_MTLS_CERTFILE=/app/certs/mcp/gateway/client.crt + - PLUGINS_CLIENT_MTLS_KEYFILE=/app/certs/mcp/gateway/client.key + - PLUGINS_CLIENT_MTLS_VERIFY={{ gateway.mtls_verify | default('true') }} + - PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME={{ gateway.mtls_check_hostname | default('false') }} + {% endif %} + + ports: + - "{{ gateway.host_port | default(4444) }}:{{ gateway.port | default(4444) }}" + + volumes: + - gateway-data:/app/data + {% if gateway.mtls_enabled | default(true) %} + - {{ cert_paths.gateway_cert_dir }}:/app/certs/mcp/gateway:ro + - {{ cert_paths.ca_cert_file }}:/app/certs/mcp/ca/ca.crt:ro + {% endif %} + # Auto-generated plugin configuration + - ./plugins-config.yaml:/app/config/plugins.yaml:ro + + networks: + - mcp-network + + restart: unless-stopped + + healthcheck: + test: ["CMD", "python3", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:{{ gateway.port | default(4444) }}/health').read()"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 40s + + depends_on: + postgres: + condition: service_healthy + redis: + condition: service_started +{% for plugin in plugins %} {{ plugin.name | lower }}: + condition: service_started +{% endfor %} + +{% for plugin in plugins %} + # Plugin: {{ plugin.name }} + {{ plugin.name | lower }}: + image: {{ plugin.image | default('mcpgateway-' + plugin.name | lower + ':latest') }} + container_name: mcp-plugin-{{ plugin.name | lower }} + hostname: {{ plugin.name | lower }} + + {% if plugin.env_file is defined %} + env_file: + - {{ plugin.env_file }} + {% endif %} + + environment: + {% if plugin.env_vars is defined and plugin.env_vars %} + # User-defined environment variables + {% for key, value in plugin.env_vars.items() %} + - {{ key }}={{ value }} + {% endfor %} + {% endif %} + {% if plugin.mtls_enabled | default(true) %} + # mTLS server configuration (plugin accepts gateway connections) + - PLUGINS_SERVER_HOST=0.0.0.0 + - PLUGINS_SERVER_PORT={{ plugin.port | default(8000) }} + - PLUGINS_SERVER_SSL_ENABLED=true + - PLUGINS_SERVER_SSL_KEYFILE=/app/certs/mcp/server.key + - PLUGINS_SERVER_SSL_CERTFILE=/app/certs/mcp/server.crt + - PLUGINS_SERVER_SSL_CA_CERTS=/app/certs/mcp/ca.crt + - PLUGINS_SERVER_SSL_CERT_REQS=2 # CERT_REQUIRED - enforce client certificates + {% endif %} + + {% if plugin.expose_port | default(false) %} + ports: + - "{{ plugin.host_port }}:{{ plugin.port | default(8000) }}" + {% endif %} + + volumes: + - {{ plugin.name | lower }}-data:/app/data + {% if plugin.mtls_enabled | default(true) %} + - {{ cert_paths.plugins_cert_base }}/{{ plugin.name }}:/app/certs/mcp:ro + {% endif %} + + networks: + - mcp-network + + restart: unless-stopped + + healthcheck: + {% if plugin.mtls_enabled | default(true) %} + # When mTLS is enabled, health check uses separate HTTP server on port+1000 + test: ["CMD", "python3", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:{{ (plugin.port | default(8000)) + 1000 }}/health').read()"] + {% else %} + # When mTLS is disabled, health check uses main server + test: ["CMD", "python3", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:{{ plugin.port | default(8000) }}/health').read()"] + {% endif %} + interval: 30s + timeout: 10s + retries: 3 + start_period: 40s + + {% if plugin.depends_on is defined %} + depends_on: + {% for dep in plugin.depends_on %} + - {{ dep }} + {% endfor %} + {% endif %} + +{% endfor %} + # PostgreSQL Database + postgres: + image: postgres:17 + container_name: mcp-postgres + hostname: postgres + + environment: + - POSTGRES_USER=postgres + - POSTGRES_PASSWORD=$${POSTGRES_PASSWORD:-mysecretpassword} + - POSTGRES_DB=mcp + + ports: + - "5432:5432" + + volumes: + - pgdata:/var/lib/postgresql/data + + networks: + - mcp-network + + restart: unless-stopped + + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 30s + timeout: 5s + retries: 5 + start_period: 20s + + # Redis Cache + redis: + image: redis:latest + container_name: mcp-redis + hostname: redis + + ports: + - "6379:6379" + + networks: + - mcp-network + + restart: unless-stopped + diff --git a/mcpgateway/tools/builder/templates/kubernetes/cert-secrets.yaml.j2 b/mcpgateway/tools/builder/templates/kubernetes/cert-secrets.yaml.j2 new file mode 100644 index 000000000..c5a026434 --- /dev/null +++ b/mcpgateway/tools/builder/templates/kubernetes/cert-secrets.yaml.j2 @@ -0,0 +1,38 @@ +# Location: ./mcpgateway/tools/builder/templates/kubernetes/cert-secrets.yaml.j2 +# Copyright 2025 +# SPDX-License-Identifier: Apache-2.0 +# Authors: Teryl Taylor +# mTLS Certificate Secrets +# CA Certificate (shared by all components) +apiVersion: v1 +kind: Secret +metadata: + name: mcp-ca-cert + namespace: {{ namespace }} +type: Opaque +data: + ca.crt: {{ ca_cert_b64 }} +--- +# Gateway Client Certificate +apiVersion: v1 +kind: Secret +metadata: + name: mcp-{{ gateway_name }}-server-cert + namespace: {{ namespace }} +type: kubernetes.io/tls +data: + tls.crt: {{ gateway_cert_b64 }} + tls.key: {{ gateway_key_b64 }} +{% for plugin in plugins %} +--- +# Plugin {{ plugin.name }} Server Certificate +apiVersion: v1 +kind: Secret +metadata: + name: mcp-{{ plugin.name }}-server-cert + namespace: {{ namespace }} +type: kubernetes.io/tls +data: + tls.crt: {{ plugin.cert_b64 }} + tls.key: {{ plugin.key_b64 }} +{% endfor %} diff --git a/mcpgateway/tools/builder/templates/kubernetes/deployment.yaml.j2 b/mcpgateway/tools/builder/templates/kubernetes/deployment.yaml.j2 new file mode 100644 index 000000000..bf2510478 --- /dev/null +++ b/mcpgateway/tools/builder/templates/kubernetes/deployment.yaml.j2 @@ -0,0 +1,233 @@ +# Location: ./mcpgateway/tools/builder/templates/kubernetes/deployment.yaml.j2 +# Copyright 2025 +# SPDX-License-Identifier: Apache-2.0 +# Authors: Teryl Taylor +# Kubernetes Deployment for {{ name }} +apiVersion: v1 +kind: Namespace +metadata: + name: {{ namespace }} +--- +apiVersion: v1 +kind: Secret +metadata: + name: {{ name }}-env + namespace: {{ namespace }} +type: Opaque +stringData: +{% if env_vars is defined and env_vars %} + # Environment variables + # NOTE: In production, these should come from CI/CD vault secrets +{% for key, value in env_vars.items() %} + {{ key }}: "{{ value }}" +{% endfor %} +{% endif %} +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ name }} + namespace: {{ namespace }} + labels: + app: {{ name }} + component: {% if 'gateway' in name %}gateway{% else %}plugin{% endif %} +spec: + replicas: {{ replicas | default(1) }} + selector: + matchLabels: + app: {{ name }} + template: + metadata: + labels: + app: {{ name }} + component: {% if 'gateway' in name %}gateway{% else %}plugin{% endif %} + spec: + {% if image_pull_secret is defined %} + imagePullSecrets: + - name: {{ image_pull_secret }} + {% endif %} + + {% if init_containers is defined %} + initContainers: + {% for init_container in init_containers %} + - name: {{ init_container.name }} + image: {{ init_container.image }} + command: {{ init_container.command | tojson }} + {% endfor %} + {% endif %} + + containers: + - name: {{ name }} + image: {{ image }} + imagePullPolicy: {{ image_pull_policy | default('IfNotPresent') }} + + ports: + - name: http + containerPort: {{ port | default(8000) }} + protocol: TCP + {% if mtls_enabled | default(true) and 'gateway' not in name %} + - name: health + containerPort: 9000 + protocol: TCP + {% endif %} + + env: + {% if mtls_enabled | default(true) %} + {% if 'gateway' in name %} + # mTLS client configuration (gateway connects to plugins) + - name: PLUGINS_CLIENT_MTLS_CA_BUNDLE + value: "/app/certs/ca/ca.crt" + - name: PLUGINS_CLIENT_MTLS_CERTFILE + value: "/app/certs/mcp/tls.crt" + - name: PLUGINS_CLIENT_MTLS_KEYFILE + value: "/app/certs/mcp/tls.key" + - name: PLUGINS_CLIENT_MTLS_VERIFY + value: "{{ mtls_verify | default('true') }}" + - name: PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME + value: "{{ mtls_check_hostname | default('false') }}" + {% else %} + # mTLS server configuration (plugin accepts gateway connections) + - name: PLUGINS_SERVER_HOST + value: "0.0.0.0" + - name: PLUGINS_SERVER_PORT + value: "{{ port | default(8000) }}" + - name: PLUGINS_SERVER_SSL_ENABLED + value: "true" + - name: PLUGINS_SERVER_SSL_KEYFILE + value: "/app/certs/mcp/tls.key" + - name: PLUGINS_SERVER_SSL_CERTFILE + value: "/app/certs/mcp/tls.crt" + - name: PLUGINS_SERVER_SSL_CA_CERTS + value: "/app/certs/ca/ca.crt" + - name: PLUGINS_SERVER_SSL_CERT_REQS + value: "2" # CERT_REQUIRED + {% endif %} + {% endif %} + + envFrom: + - secretRef: + name: {{ name }}-env + + {% if health_check | default(true) %} + livenessProbe: + httpGet: + path: /health + {% if mtls_enabled | default(true) and 'gateway' not in name %} + # Plugin with mTLS: use separate health check server on port 9000 + port: health + scheme: HTTP + {% else %} + # Gateway or non-mTLS: health check on main HTTP port + port: http + scheme: HTTP + {% endif %} + initialDelaySeconds: 30 + periodSeconds: 10 + timeoutSeconds: 5 + failureThreshold: 3 + + readinessProbe: + httpGet: + path: /health + {% if mtls_enabled | default(true) and 'gateway' not in name %} + # Plugin with mTLS: use separate health check server on port 9000 + port: health + scheme: HTTP + {% else %} + # Gateway or non-mTLS: health check on main HTTP port + port: http + scheme: HTTP + {% endif %} + initialDelaySeconds: 10 + periodSeconds: 5 + timeoutSeconds: 3 + failureThreshold: 3 + {% endif %} + + resources: + requests: + memory: "{{ memory_request | default('256Mi') }}" + cpu: "{{ cpu_request | default('100m') }}" + limits: + memory: "{{ memory_limit | default('512Mi') }}" + cpu: "{{ cpu_limit | default('500m') }}" + + volumeMounts: + {% if mtls_enabled | default(true) %} + - name: server-cert + mountPath: /app/certs/mcp + readOnly: true + - name: ca-cert + mountPath: /app/certs/ca + readOnly: true + {% endif %} + + {% if volume_mounts is defined %} + {% for mount in volume_mounts %} + - name: {{ mount.name }} + mountPath: {{ mount.path }} + {% if mount.readonly | default(false) %} + readOnly: true + {% endif %} + {% endfor %} + {% endif %} + + securityContext: + runAsNonRoot: true + runAsUser: 1001 + allowPrivilegeEscalation: false + capabilities: + drop: + - ALL + readOnlyRootFilesystem: false + + volumes: + {% if mtls_enabled | default(true) %} + - name: server-cert + secret: + secretName: mcp-{{ name }}-server-cert + defaultMode: 0444 + - name: ca-cert + secret: + secretName: mcp-ca-cert + defaultMode: 0444 + {% endif %} + + {% if volumes is defined %} + {% for volume in volumes %} + - name: {{ volume.name }} + {% if volume.type == 'secret' %} + secret: + secretName: {{ volume.secret_name }} + {% if volume.default_mode is defined %} + defaultMode: {{ volume.default_mode }} + {% endif %} + {% elif volume.type == 'configmap' %} + configMap: + name: {{ volume.configmap_name }} + {% elif volume.type == 'persistentVolumeClaim' %} + persistentVolumeClaim: + claimName: {{ volume.claim_name }} + {% endif %} + {% endfor %} + {% endif %} +--- +apiVersion: v1 +kind: Service +metadata: + name: {{ name }} + namespace: {{ namespace }} + labels: + app: {{ name }} +spec: + type: {{ service_type | default('ClusterIP') }} + ports: + - name: http + port: {{ service_port | default(8000) }} + targetPort: http + protocol: TCP + {% if service_type == 'NodePort' and node_port is defined %} + nodePort: {{ node_port }} + {% endif %} + selector: + app: {{ name }} diff --git a/mcpgateway/tools/builder/templates/kubernetes/postgres.yaml.j2 b/mcpgateway/tools/builder/templates/kubernetes/postgres.yaml.j2 new file mode 100644 index 000000000..a41afa061 --- /dev/null +++ b/mcpgateway/tools/builder/templates/kubernetes/postgres.yaml.j2 @@ -0,0 +1,120 @@ +# Location: ./mcpgateway/tools/builder/templates/kubernetes/postgres.yaml.j2 +# Copyright 2025 +# SPDX-License-Identifier: Apache-2.0 +# Authors: Teryl Taylor +# PostgreSQL Database for MCP Gateway +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: postgres-pvc + namespace: {{ namespace }} +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: {{ storage_size }} + {% if storage_class %} + storageClassName: {{ storage_class }} + {% endif %} +--- +apiVersion: v1 +kind: Secret +metadata: + name: postgres-secret + namespace: {{ namespace }} +type: Opaque +stringData: + POSTGRES_USER: {{ user }} + POSTGRES_PASSWORD: {{ password }} + POSTGRES_DB: {{ database }} +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: postgres + namespace: {{ namespace }} + labels: + app: postgres + component: database +spec: + replicas: 1 + selector: + matchLabels: + app: postgres + template: + metadata: + labels: + app: postgres + component: database + spec: + containers: + - name: postgres + image: {{ image }} + imagePullPolicy: IfNotPresent + + ports: + - name: postgres + containerPort: 5432 + protocol: TCP + + envFrom: + - secretRef: + name: postgres-secret + + volumeMounts: + - name: postgres-data + mountPath: /var/lib/postgresql/data + subPath: postgres + + livenessProbe: + exec: + command: + - /bin/sh + - -c + - pg_isready -U {{ user }} + initialDelaySeconds: 30 + periodSeconds: 10 + timeoutSeconds: 5 + failureThreshold: 3 + + readinessProbe: + exec: + command: + - /bin/sh + - -c + - pg_isready -U {{ user }} + initialDelaySeconds: 10 + periodSeconds: 5 + timeoutSeconds: 3 + failureThreshold: 3 + + resources: + requests: + memory: "256Mi" + cpu: "100m" + limits: + memory: "512Mi" + cpu: "500m" + + volumes: + - name: postgres-data + persistentVolumeClaim: + claimName: postgres-pvc +--- +apiVersion: v1 +kind: Service +metadata: + name: postgres + namespace: {{ namespace }} + labels: + app: postgres +spec: + type: ClusterIP + ports: + - name: postgres + port: 5432 + targetPort: postgres + protocol: TCP + selector: + app: postgres diff --git a/mcpgateway/tools/builder/templates/kubernetes/redis.yaml.j2 b/mcpgateway/tools/builder/templates/kubernetes/redis.yaml.j2 new file mode 100644 index 000000000..340e2c71a --- /dev/null +++ b/mcpgateway/tools/builder/templates/kubernetes/redis.yaml.j2 @@ -0,0 +1,76 @@ +# Location: ./mcpgateway/tools/builder/templates/kubernetes/redis.yaml.j2 +# Copyright 2025 +# SPDX-License-Identifier: Apache-2.0 +# Authors: Teryl Taylor +# Redis Cache for MCP Gateway +apiVersion: apps/v1 +kind: Deployment +metadata: + name: redis + namespace: {{ namespace }} + labels: + app: redis + component: cache +spec: + replicas: 1 + selector: + matchLabels: + app: redis + template: + metadata: + labels: + app: redis + component: cache + spec: + containers: + - name: redis + image: {{ image }} + imagePullPolicy: IfNotPresent + + ports: + - name: redis + containerPort: 6379 + protocol: TCP + + livenessProbe: + tcpSocket: + port: redis + initialDelaySeconds: 30 + periodSeconds: 10 + timeoutSeconds: 5 + failureThreshold: 3 + + readinessProbe: + exec: + command: + - redis-cli + - ping + initialDelaySeconds: 10 + periodSeconds: 5 + timeoutSeconds: 3 + failureThreshold: 3 + + resources: + requests: + memory: "128Mi" + cpu: "50m" + limits: + memory: "256Mi" + cpu: "200m" +--- +apiVersion: v1 +kind: Service +metadata: + name: redis + namespace: {{ namespace }} + labels: + app: redis +spec: + type: ClusterIP + ports: + - name: redis + port: 6379 + targetPort: redis + protocol: TCP + selector: + app: redis diff --git a/mcpgateway/tools/builder/templates/plugins-config.yaml.j2 b/mcpgateway/tools/builder/templates/plugins-config.yaml.j2 new file mode 100644 index 000000000..a8221873a --- /dev/null +++ b/mcpgateway/tools/builder/templates/plugins-config.yaml.j2 @@ -0,0 +1,49 @@ +# Location: ./mcpgateway/tools/builder/templates/compose/plugins-config.yaml.j2 +# Copyright 2025 +# SPDX-License-Identifier: Apache-2.0 +# Authors: Teryl Taylor +# Plugin configuration for MCP Gateway +# Auto-generated from mcp-stack.yaml + +# Global plugin settings +plugin_settings: + parallel_execution_within_band: true + plugin_timeout: 120 + fail_on_plugin_error: false + enable_plugin_api: true + plugin_health_check_interval: 60 + +# External plugin connections +plugins: +{% for plugin in plugins -%} +- name: {{ plugin.name }} + kind: external +{%- if plugin.description %} + description: "{{ plugin.description }}" +{%- endif %} +{%- if plugin.version %} + version: "{{ plugin.version }}" +{%- endif %} +{%- if plugin.author %} + author: "{{ plugin.author }}" +{%- endif %} +{%- if plugin.hooks %} + hooks: {{ plugin.hooks }} +{%- endif %} +{%- if plugin.tags %} + tags: {{ plugin.tags }} +{%- endif %} +{%- if plugin.mode %} + mode: "{{ plugin.mode }}" +{%- endif %} +{%- if plugin.priority %} + priority: {{ plugin.priority }} +{%- endif %} +{%- if plugin.conditions %} + conditions: {{ plugin.conditions }} +{%- endif %} + mcp: + proto: STREAMABLEHTTP + url: {{ plugin.url }} + +{% endfor %} diff --git a/mcpgateway/tools/cli.py b/mcpgateway/tools/cli.py new file mode 100644 index 000000000..3fda994c1 --- /dev/null +++ b/mcpgateway/tools/cli.py @@ -0,0 +1,56 @@ +"""Location: ./mcpgateway/tools/cli.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +cforge CLI ─ command line tools for building and deploying the +MCP Gateway and its plugins. + +This module is exposed as a **console-script** via: + + [project.scripts] + cforge = "mcpgateway.tools.cli:main" + +so that a user can simply type `cforge ...` to use the CLI. + +Features +───────── +* plugin: + - bootstrap: Creates a new plugin project from template β”‚ + - install: Installs plugins into a Python environment β”‚ + - package: Builds an MCP server to serve plugins as tools +* gateway: + - Validates deploy.yaml configuration + - Builds plugin containers from git repos + - Generates mTLS certificates + - Deploys to Kubernetes or Docker Compose + - Integrates with CI/CD vault secrets + + +Typical usage +───────────── +```console +$ cforge --help +``` +""" + +# Third-Party +import typer + +# First-Party +import mcpgateway.plugins.tools.cli as plugins +import mcpgateway.tools.builder.cli as builder + +app = typer.Typer(help="Command line tools for building, deploying, and interacting with the ContextForge MCP Gateway") + +app.add_typer(plugins.app, name="plugin", help="Manage the plugin lifecycle") +app.add_typer(builder.app, name="gateway", help="Manage the building and deployment of the gateway") + + +def main() -> None: # noqa: D401 - imperative mood is fine here + """Entry point for the *cforge* console script.""" + app(obj={}) + + +if __name__ == "__main__": + main() diff --git a/pyproject.toml b/pyproject.toml index f21d5100c..7377fbd65 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,7 +12,7 @@ build-backend = "setuptools.build_meta" # ---------------------------------------------------------------- [project] name = "mcp-contextforge-gateway" -version = "0.8.0" +version = "0.7.0" description = "A production-grade MCP Gateway & Proxy built with FastAPI. Supports multi-server registration, virtual server composition, authentication, retry logic, observability, protocol translation, and a unified federated tool catalog." keywords = ["MCP","API","gateway","proxy","tools", "agents","agentic ai","model context protocol","multi-agent","fastapi", @@ -250,6 +250,7 @@ Changelog = "https://github.com/IBM/mcp-context-forge/blob/main/CHANGELOG.md" [project.scripts] mcpgateway = "mcpgateway.cli:main" mcpplugins = "mcpgateway.plugins.tools.cli:main" +cforge = "mcpgateway.tools.cli:main" # -------------------------------------------------------------------- # πŸ”§ setuptools-specific configuration From c9d5ea66917464577d9ac5c66a41e18942003424 Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Thu, 9 Oct 2025 10:05:53 -0600 Subject: [PATCH 12/35] docs: added mtls plugin documentation. Signed-off-by: Teryl Taylor --- docs/docs/using/plugins/.pages | 1 + docs/docs/using/plugins/mtls.md | 500 ++++++++++++++++++++++++++++++++ 2 files changed, 501 insertions(+) create mode 100644 docs/docs/using/plugins/mtls.md diff --git a/docs/docs/using/plugins/.pages b/docs/docs/using/plugins/.pages index e655c3e3d..a1003a7ac 100644 --- a/docs/docs/using/plugins/.pages +++ b/docs/docs/using/plugins/.pages @@ -2,3 +2,4 @@ nav: - index.md - lifecycle.md - plugins.md + - mtls.md diff --git a/docs/docs/using/plugins/mtls.md b/docs/docs/using/plugins/mtls.md new file mode 100644 index 000000000..83a532998 --- /dev/null +++ b/docs/docs/using/plugins/mtls.md @@ -0,0 +1,500 @@ +# External Plugin mTLS Setup Guide + +This guide covers how to set up mutual TLS (mTLS) authentication between the MCP Gateway and external plugin servers. + +## Port Configuration + +**Standard port convention:** +- **Port 8000**: Main plugin service (HTTP or HTTPS/mTLS) +- **Port 9000**: Health check endpoint (automatically starts on port+1000 when mTLS is enabled) + +When mTLS is enabled, the plugin runtime automatically starts a separate HTTP-only health check server on port 9000 (configurable via `port + 1000` formula). This allows health checks without requiring mTLS client certificates. + +## Certificate Generation + +The MCP Gateway includes Makefile targets to manage the complete certificate infrastructure for plugin mTLS. + +### Quick Start + +```bash +# Generate complete mTLS infrastructure (recommended) +make certs-mcp-all + +# This automatically: +# 1. Creates a Certificate Authority (CA) +# 2. Generates gateway client certificate +# 3. Reads plugins/external/config.yaml and generates server certificates for all external plugins +``` + +**Certificate validity**: Default is **825 days** (~2.25 years) + +**Output structure**: +``` +certs/mcp/ +β”œβ”€β”€ ca/ # Certificate Authority +β”‚ β”œβ”€β”€ ca.key # CA private key (protect!) +β”‚ └── ca.crt # CA certificate +β”œβ”€β”€ gateway/ # Gateway client certificates +β”‚ β”œβ”€β”€ client.key # Client private key +β”‚ β”œβ”€β”€ client.crt # Client certificate +β”‚ └── ca.crt # Copy of CA cert +└── plugins/ # Plugin server certificates + └── PluginName/ + β”œβ”€β”€ server.key # Server private key + β”œβ”€β”€ server.crt # Server certificate + └── ca.crt # Copy of CA cert +``` + +### Makefile Targets + +#### `make certs-mcp-all` + +Generate complete mTLS infrastructure. This is the **recommended** command for setting up mTLS. + +**What it does**: +1. Calls `certs-mcp-ca` to create the CA (if not exists) +2. Calls `certs-mcp-gateway` to create gateway client certificate (if not exists) +3. Reads `plugins/external/config.yaml` and generates certificates for all plugins with `kind: external` + +**Usage**: +```bash +# Use default config file (plugins/external/config.yaml) +make certs-mcp-all + +# Use custom config file +make certs-mcp-all MCP_PLUGIN_CONFIG=path/to/custom-config.yaml + +# Custom certificate validity (in days) +make certs-mcp-all MCP_CERT_DAYS=365 + +# Combine both options +make certs-mcp-all MCP_PLUGIN_CONFIG=config.yaml MCP_CERT_DAYS=730 +``` + +**Config file format** (`plugins/external/config.yaml`): +```yaml +plugins: + - name: "MyPlugin" # Certificate will be created for this plugin + kind: "external" # Must be "external" + mcp: + proto: STREAMABLEHTTP + url: http://127.0.0.1:8000/mcp + + - name: "AnotherPlugin" + kind: "external" + mcp: + proto: STREAMABLEHTTP + url: http://127.0.0.1:8001/mcp +``` + +**Fallback behavior**: If the config file doesn't exist or PyYAML is not installed, example certificates are generated for `example-plugin-a` and `example-plugin-b`. + +#### `make certs-mcp-ca` + +Generate the Certificate Authority (CA) for plugin mTLS. This is typically called automatically by other targets. + +**What it does**: +- Creates `certs/mcp/ca/ca.key` (4096-bit RSA private key) +- Creates `certs/mcp/ca/ca.crt` (CA certificate) +- Sets file permissions: `600` for `.key`, `644` for `.crt` + +**Usage**: +```bash +# Generate CA (one-time setup) +make certs-mcp-ca + +# Custom validity +make certs-mcp-ca MCP_CERT_DAYS=1825 +``` + +**Safety**: Won't overwrite existing CA. To regenerate, delete `certs/mcp/ca/` first. + +**⚠️ Warning**: The CA private key (`ca.key`) is critical. Protect it carefully! + +#### `make certs-mcp-gateway` + +Generate the gateway client certificate used by the MCP Gateway to authenticate to plugin servers. + +**What it does**: +- Depends on `certs-mcp-ca` (creates CA if needed) +- Creates `certs/mcp/gateway/client.key` (2048-bit RSA private key) +- Creates `certs/mcp/gateway/client.crt` (client certificate signed by CA) +- Copies `ca.crt` to `certs/mcp/gateway/` + +**Usage**: +```bash +# Generate gateway client certificate +make certs-mcp-gateway + +# Custom validity +make certs-mcp-gateway MCP_CERT_DAYS=365 +``` + +**Safety**: Won't overwrite existing certificate. + +#### `make certs-mcp-plugin` + +Generate a server certificate for a specific plugin. + +**What it does**: +- Depends on `certs-mcp-ca` (creates CA if needed) +- Creates `certs/mcp/plugins//server.key` +- Creates `certs/mcp/plugins//server.crt` with Subject Alternative Names (SANs): + - `DNS:` + - `DNS:mcp-plugin-` + - `DNS:localhost` +- Copies `ca.crt` to plugin directory + +**Usage**: +```bash +# Generate certificate for specific plugin +make certs-mcp-plugin PLUGIN_NAME=MyCustomPlugin + +# Custom validity +make certs-mcp-plugin PLUGIN_NAME=MyPlugin MCP_CERT_DAYS=365 +``` + +**Required**: `PLUGIN_NAME` parameter must be provided. + +**Use case**: Add a new plugin after running `certs-mcp-all`, or generate certificates manually. + +#### `make certs-mcp-check` + +Check expiry dates of all MCP certificates. + +**What it does**: +- Displays expiry dates for CA, gateway client, and all plugin certificates +- Shows remaining validity period + +**Usage**: +```bash +make certs-mcp-check +``` + +**Output example**: +``` +πŸ” Checking MCP certificate expiry dates... + +πŸ“‹ CA Certificate: + Expires: Jan 15 10:30:45 2027 GMT + +πŸ“‹ Gateway Client Certificate: + Expires: Jan 15 10:31:22 2027 GMT + +πŸ“‹ Plugin Certificates: + MyPlugin: Jan 15 10:32:10 2027 GMT + AnotherPlugin: Jan 15 10:32:45 2027 GMT +``` + +### Certificate Properties + +All certificates generated include: +- **Algorithm**: RSA with SHA-256 +- **CA Key Size**: 4096 bits +- **Client/Server Key Size**: 2048 bits +- **Default Validity**: 825 days +- **Subject Alternative Names** (plugins): DNS entries for plugin name and localhost + +### Important Notes + +1. **All `ca.crt` files are identical** - They are copies of the root CA certificate distributed to each location for convenience + +2. **Safety features** - Commands won't overwrite existing certificates. To regenerate, delete the target directory first + +3. **File permissions** - Automatically set to secure values: + - Private keys (`.key`): `600` (owner read/write only) + - Certificates (`.crt`): `644` (world-readable) + +4. **Configuration variables**: + - `MCP_CERT_DAYS`: Certificate validity in days (default: 825) + - `MCP_PLUGIN_CONFIG`: Path to plugin config file (default: `plugins/external/config.yaml`) + +## Configuration Options + +You can configure mTLS using either YAML files or environment variables. + +### Option 1: YAML Configuration + +#### Server Configuration (Plugin) + +In your plugin config file (e.g., `plugins/test.yaml`): + +```yaml +plugins: + - name: "ReplaceBadWordsPlugin" + kind: "plugins.regex_filter.search_replace.SearchReplacePlugin" + # ... plugin config ... + +server_settings: + host: "127.0.0.1" + port: 8000 + tls: + certfile: certs/mcp/plugins/ReplaceBadWordsPlugin/server.crt + keyfile: certs/mcp/plugins/ReplaceBadWordsPlugin/server.key + ca_bundle: certs/mcp/plugins/ReplaceBadWordsPlugin/ca.crt + ssl_cert_reqs: 2 # 2 = CERT_REQUIRED (enforce client certificates) +``` + +Start the server (for testing): +```bash +PYTHONPATH=. PLUGINS_CONFIG_PATH="plugins/test.yaml" \ + python3 mcpgateway/plugins/framework/external/mcp/server/runtime.py +``` + +#### Client Configuration (Gateway) + +In your gateway plugin config file (e.g., `plugins/external/config-client.yaml`): + +```yaml +plugins: + - name: "ReplaceBadWordsPlugin" + kind: "external" + mcp: + proto: STREAMABLEHTTP + url: https://127.0.0.1:8000/mcp + tls: + certfile: certs/mcp/gateway/client.crt + keyfile: certs/mcp/gateway/client.key + ca_bundle: certs/mcp/gateway/ca.crt + verify: true + check_hostname: false +``` + +### Option 2: Environment Variables + +#### Server Environment Variables + +```bash +# Server configuration +export PLUGINS_SERVER_HOST="127.0.0.1" +export PLUGINS_SERVER_PORT="8000" +export PLUGINS_SERVER_SSL_ENABLED="true" + +# TLS/mTLS configuration +export PLUGINS_SERVER_SSL_KEYFILE="certs/mcp/plugins/ReplaceBadWordsPlugin/server.key" +export PLUGINS_SERVER_SSL_CERTFILE="certs/mcp/plugins/ReplaceBadWordsPlugin/server.crt" +export PLUGINS_SERVER_SSL_CA_CERTS="certs/mcp/plugins/ReplaceBadWordsPlugin/ca.crt" +export PLUGINS_SERVER_SSL_CERT_REQS="2" # 2 = CERT_REQUIRED +``` + +Start the server (YAML without `server_settings` section for testing): +```bash +PYTHONPATH=. PLUGINS_CONFIG_PATH="plugins/test.yaml" \ + python3 mcpgateway/plugins/framework/external/mcp/server/runtime.py +``` + +#### Client Environment Variables + +```bash +export PLUGINS_CLIENT_MTLS_CERTFILE="certs/mcp/gateway/client.crt" +export PLUGINS_CLIENT_MTLS_KEYFILE="certs/mcp/gateway/client.key" +export PLUGINS_CLIENT_MTLS_CA_BUNDLE="certs/mcp/gateway/ca.crt" +export PLUGINS_CLIENT_MTLS_VERIFY="true" +export PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME="false" +``` + +Run your gateway code (YAML without `tls` section in `mcp` config). + +## Environment Variable Reference + +### Server Variables (Plugin) + +| Variable | Description | Example | +|----------|-------------|---------| +| `PLUGINS_SERVER_HOST` | Server bind address | `127.0.0.1` | +| `PLUGINS_SERVER_PORT` | Server bind port | `8000` | +| `PLUGINS_SERVER_SSL_ENABLED` | Enable SSL/TLS | `true` | +| `PLUGINS_SERVER_SSL_KEYFILE` | Path to server private key | `certs/.../server.key` | +| `PLUGINS_SERVER_SSL_CERTFILE` | Path to server certificate | `certs/.../server.crt` | +| `PLUGINS_SERVER_SSL_CA_CERTS` | Path to CA bundle | `certs/.../ca.crt` | +| `PLUGINS_SERVER_SSL_CERT_REQS` | Client cert requirement (0-2) | `2` | +| `PLUGINS_SERVER_SSL_KEYFILE_PASSWORD` | Password for encrypted key | `password` | + +**`ssl_cert_reqs` values:** +- `0` = `CERT_NONE` - No client certificate required +- `1` = `CERT_OPTIONAL` - Client certificate requested but not required +- `2` = `CERT_REQUIRED` - Client certificate required (mTLS) + +### Client Variables (Gateway) + +| Variable | Description | Example | +|----------|-------------|---------| +| `PLUGINS_CLIENT_MTLS_CERTFILE` | Path to client certificate | `certs/.../client.crt` | +| `PLUGINS_CLIENT_MTLS_KEYFILE` | Path to client private key | `certs/.../client.key` | +| `PLUGINS_CLIENT_MTLS_CA_BUNDLE` | Path to CA bundle | `certs/.../ca.crt` | +| `PLUGINS_CLIENT_MTLS_VERIFY` | Verify server certificate | `true` | +| `PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME` | Verify server hostname | `false` | +| `PLUGINS_CLIENT_MTLS_KEYFILE_PASSWORD` | Password for encrypted key | `password` | + +## Testing mTLS + +### Test without TLS + +```bash +# Server +PYTHONPATH=. PLUGINS_CONFIG_PATH="plugins/test.yaml" \ + PLUGINS_SERVER_HOST="127.0.0.1" \ + PLUGINS_SERVER_PORT="8000" \ + PLUGINS_SERVER_SSL_ENABLED="false" \ + python3 mcpgateway/plugins/framework/external/mcp/server/runtime.py & + +# Client config should use: url: http://127.0.0.1:8000/mcp +``` + +### Test with mTLS (YAML) + +```bash +# Server (config has server_settings.tls section) +PYTHONPATH=. PLUGINS_CONFIG_PATH="plugins/test.mtls.yaml" \ + python3 mcpgateway/plugins/framework/external/mcp/server/runtime.py & + +# Client (config has mcp.tls section) +python3 your_client.py +``` + +### Test with mTLS (Environment Variables) + +```bash +# Server (config has no server_settings section) +# Note: When mTLS is enabled, a health check server automatically starts on port 9000 (port+1000) +PYTHONPATH=. \ + PLUGINS_CONFIG_PATH="plugins/test.yaml" \ + PLUGINS_SERVER_HOST="127.0.0.1" \ + PLUGINS_SERVER_PORT="8000" \ + PLUGINS_SERVER_SSL_ENABLED="true" \ + PLUGINS_SERVER_SSL_KEYFILE="certs/mcp/plugins/ReplaceBadWordsPlugin/server.key" \ + PLUGINS_SERVER_SSL_CERTFILE="certs/mcp/plugins/ReplaceBadWordsPlugin/server.crt" \ + PLUGINS_SERVER_SSL_CA_CERTS="certs/mcp/plugins/ReplaceBadWordsPlugin/ca.crt" \ + PLUGINS_SERVER_SSL_CERT_REQS="2" \ + python3 mcpgateway/plugins/framework/external/mcp/server/runtime.py & + +# Client (config has no mcp.tls section) +PLUGINS_CLIENT_MTLS_CERTFILE="certs/mcp/gateway/client.crt" \ + PLUGINS_CLIENT_MTLS_KEYFILE="certs/mcp/gateway/client.key" \ + PLUGINS_CLIENT_MTLS_CA_BUNDLE="certs/mcp/gateway/ca.crt" \ + PLUGINS_CLIENT_MTLS_VERIFY="true" \ + PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME="false" \ + python3 your_client.py +``` + +## How mTLS Works + +1. **Certificate Authority (CA)**: A single root CA (`ca.crt`) signs both client and server certificates +2. **Server Certificate**: Plugin server presents its certificate (`server.crt`) to clients +3. **Client Certificate**: Gateway presents its certificate (`client.crt`) to the plugin server +4. **Mutual Verification**: Both parties verify each other's certificates against the CA bundle +5. **Secure Channel**: After mutual authentication, all communication is encrypted + +## Configuration Priority + +Environment variables take precedence over YAML configuration: +- If `PLUGINS_SERVER_SSL_ENABLED=true`, env vars override `server_settings.tls` +- If client env vars are set, they override `mcp.tls` in YAML + +## Hostname Verification (`check_hostname`) + +### Overview +`check_hostname` is a **client-side only** setting that verifies the server's certificate matches the hostname/IP you're connecting to. + +### How It Works +The client checks if the URL hostname matches entries in the server certificate's: +- **Common Name (CN)**: `CN=mcp-plugin-ReplaceBadWordsPlugin` +- **Subject Alternative Names (SANs)**: DNS names or IP addresses + +### Checking Certificate SANs +```bash +# View DNS and IP SANs in server certificate +openssl x509 -in certs/mcp/plugins/ReplaceBadWordsPlugin/server.crt -text -noout | grep -A 5 "Subject Alternative Name" + +# Example output: +# X509v3 Subject Alternative Name: +# DNS:ReplaceBadWordsPlugin, DNS:mcp-plugin-ReplaceBadWordsPlugin, DNS:localhost +``` + +### Configuration Examples + +#### Option 1: Use `localhost` with `check_hostname: true` +```yaml +# Client config +mcp: + url: https://localhost:8000/mcp + tls: + check_hostname: true # Works because "localhost" is in DNS SANs +``` + +Or with environment variables: +```bash +export PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME="true" +# Connect to: https://localhost:8000/mcp +``` + +#### Option 2: Use IP address with `check_hostname: false` +```yaml +# Client config +mcp: + url: https://127.0.0.1:8000/mcp + tls: + check_hostname: false # Required because 127.0.0.1 is not in SANs +``` + +Or with environment variables: +```bash +export PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME="false" +# Connect to: https://127.0.0.1:8000/mcp +``` + +#### Option 3: Add IP SANs to certificate (Advanced) +If you need `check_hostname: true` with IP addresses, regenerate certificates with IP SANs: + +```bash +# Modify Makefile to add IP SANs when generating certificates +# Add to server.ext or openssl command: +# subjectAltName = DNS:localhost, DNS:plugin-name, IP:127.0.0.1, IP:0.0.0.0 +``` + +### Server-Side Hostname Verification +There is **no** `check_hostname` setting on the server side. The server only: +1. Verifies the client certificate is signed by the trusted CA +2. Checks if `ssl_cert_reqs=2` (CERT_REQUIRED) to enforce client certificates + +### Testing Hostname Verification + +#### Test 1: Valid hostname (should succeed) +```bash +# Server bound to 0.0.0.0 (accepts all interfaces) +PLUGINS_SERVER_HOST="0.0.0.0" ... + +# Client connecting to localhost with hostname check +export PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME="true" +# URL: https://localhost:8000/mcp +# Result: βœ… Success (localhost is in DNS SANs) +``` + +#### Test 2: IP address with hostname check (should fail) +```bash +# Client connecting to IP with hostname check +export PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME="true" +# URL: https://127.0.0.1:8000/mcp +# Result: ❌ Fails with "IP address mismatch, certificate is not valid for '127.0.0.1'" +``` + +## Troubleshooting + +### Connection Refused +- Ensure server is running: `lsof -i :8000` +- Check server logs for startup errors +- Verify server is bound to correct interface (0.0.0.0 for all, 127.0.0.1 for localhost only) +- Note: When mTLS is enabled, a health check server also runs on port 9000 (port+1000) + +### Certificate Verification Failed +- Verify CA bundle matches on both sides: `md5 certs/**/ca.crt` +- Check certificate paths are correct +- Ensure certificates haven't expired: `openssl x509 -in cert.crt -noout -dates` + +### Hostname Verification Failed +Error: `certificate verify failed: IP address mismatch` or `Hostname mismatch` + +**Solutions:** +1. **Use hostname from SANs**: Connect to `https://localhost:8000` instead of `https://127.0.0.1:8000` +2. **Disable hostname check**: Set `check_hostname: false` or `PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME="false"` +3. **Add IP to SANs**: Regenerate certificates with IP SANs included \ No newline at end of file From 000194405b37705887b1769a9f8142d47d70d65f Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Thu, 9 Oct 2025 11:32:55 -0600 Subject: [PATCH 13/35] fix: linting issues. Signed-off-by: Teryl Taylor --- .../plugins/framework/external/mcp/client.py | 16 ++- .../framework/external/mcp/server/runtime.py | 107 +++++++++++++++++- mcpgateway/plugins/framework/models.py | 78 +++++++++++-- 3 files changed, 189 insertions(+), 12 deletions(-) diff --git a/mcpgateway/plugins/framework/external/mcp/client.py b/mcpgateway/plugins/framework/external/mcp/client.py index 8d354646d..009227777 100644 --- a/mcpgateway/plugins/framework/external/mcp/client.py +++ b/mcpgateway/plugins/framework/external/mcp/client.py @@ -154,7 +154,19 @@ def _tls_httpx_client_factory( timeout: Optional[httpx.Timeout] = None, auth: Optional[httpx.Auth] = None, ) -> httpx.AsyncClient: - """Build an httpx client with TLS configuration for external MCP servers.""" + """Build an httpx client with TLS configuration for external MCP servers. + + Args: + headers: Optional HTTP headers to include in requests. + timeout: Optional timeout configuration for HTTP requests. + auth: Optional authentication handler for HTTP requests. + + Returns: + Configured httpx AsyncClient with TLS settings applied. + + Raises: + PluginError: If TLS configuration fails. + """ kwargs: dict[str, Any] = {"follow_redirects": True} if headers: @@ -170,7 +182,7 @@ def _tls_httpx_client_factory( ssl_context = ssl.create_default_context() if not tls_config.verify: ssl_context.check_hostname = False - ssl_context.verify_mode = ssl.CERT_NONE + ssl_context.verify_mode = ssl.CERT_NONE # noqa: DUO122 else: if tls_config.ca_bundle: ssl_context.load_verify_locations(cafile=tls_config.ca_bundle) diff --git a/mcpgateway/plugins/framework/external/mcp/server/runtime.py b/mcpgateway/plugins/framework/external/mcp/server/runtime.py index f5b070edd..f7fec637e 100644 --- a/mcpgateway/plugins/framework/external/mcp/server/runtime.py +++ b/mcpgateway/plugins/framework/external/mcp/server/runtime.py @@ -59,7 +59,11 @@ async def get_plugin_configs() -> list[dict]: - """Get the plugin configurations installed on the server.""" + """Get the plugin configurations installed on the server. + + Returns: + List of plugin configuration dictionaries. + """ return await SERVER.get_plugin_configs() @@ -68,6 +72,9 @@ async def get_plugin_config(name: str) -> dict: Args: name: The name of the plugin + + Returns: + Plugin configuration dictionary. """ return await SERVER.get_plugin_config(name) @@ -79,9 +86,22 @@ async def prompt_pre_fetch(plugin_name: str, payload: Dict[str, Any], context: D plugin_name: The name of the plugin to execute payload: The prompt name and arguments to be analyzed context: Contextual information required for execution + + Returns: + Result dictionary from the prompt prefetch hook. """ def prompt_pre_fetch_func(plugin: Plugin, payload: PromptPrehookPayload, context: PluginContext) -> PromptPrehookResult: + """Wrapper function to invoke prompt prefetch on a plugin instance. + + Args: + plugin: The plugin instance to execute. + payload: The prompt prehook payload. + context: The plugin context. + + Returns: + Result from the plugin's prompt_pre_fetch method. + """ return plugin.prompt_pre_fetch(payload, context) return await SERVER.invoke_hook(PromptPrehookPayload, prompt_pre_fetch_func, plugin_name, payload, context) @@ -94,9 +114,22 @@ async def prompt_post_fetch(plugin_name: str, payload: Dict[str, Any], context: plugin_name: The name of the plugin to execute payload: The prompt payload to be analyzed context: Contextual information + + Returns: + Result dictionary from the prompt postfetch hook. """ def prompt_post_fetch_func(plugin: Plugin, payload: PromptPosthookPayload, context: PluginContext) -> PromptPosthookResult: + """Wrapper function to invoke prompt postfetch on a plugin instance. + + Args: + plugin: The plugin instance to execute. + payload: The prompt posthook payload. + context: The plugin context. + + Returns: + Result from the plugin's prompt_post_fetch method. + """ return plugin.prompt_post_fetch(payload, context) return await SERVER.invoke_hook(PromptPosthookPayload, prompt_post_fetch_func, plugin_name, payload, context) @@ -109,9 +142,22 @@ async def tool_pre_invoke(plugin_name: str, payload: Dict[str, Any], context: Di plugin_name: The name of the plugin to execute payload: The tool name and arguments to be analyzed context: Contextual information + + Returns: + Result dictionary from the tool pre-invoke hook. """ def tool_pre_invoke_func(plugin: Plugin, payload: ToolPreInvokePayload, context: PluginContext) -> ToolPreInvokeResult: + """Wrapper function to invoke tool pre-invoke on a plugin instance. + + Args: + plugin: The plugin instance to execute. + payload: The tool pre-invoke payload. + context: The plugin context. + + Returns: + Result from the plugin's tool_pre_invoke method. + """ return plugin.tool_pre_invoke(payload, context) return await SERVER.invoke_hook(ToolPreInvokePayload, tool_pre_invoke_func, plugin_name, payload, context) @@ -124,9 +170,22 @@ async def tool_post_invoke(plugin_name: str, payload: Dict[str, Any], context: D plugin_name: The name of the plugin to execute payload: The tool result to be analyzed context: Contextual information + + Returns: + Result dictionary from the tool post-invoke hook. """ def tool_post_invoke_func(plugin: Plugin, payload: ToolPostInvokePayload, context: PluginContext) -> ToolPostInvokeResult: + """Wrapper function to invoke tool post-invoke on a plugin instance. + + Args: + plugin: The plugin instance to execute. + payload: The tool post-invoke payload. + context: The plugin context. + + Returns: + Result from the plugin's tool_post_invoke method. + """ return plugin.tool_post_invoke(payload, context) return await SERVER.invoke_hook(ToolPostInvokePayload, tool_post_invoke_func, plugin_name, payload, context) @@ -139,9 +198,22 @@ async def resource_pre_fetch(plugin_name: str, payload: Dict[str, Any], context: plugin_name: The name of the plugin to execute payload: The resource name and arguments to be analyzed context: Contextual information + + Returns: + Result dictionary from the resource prefetch hook. """ def resource_pre_fetch_func(plugin: Plugin, payload: ResourcePreFetchPayload, context: PluginContext) -> ResourcePreFetchResult: + """Wrapper function to invoke resource prefetch on a plugin instance. + + Args: + plugin: The plugin instance to execute. + payload: The resource prefetch payload. + context: The plugin context. + + Returns: + Result from the plugin's resource_pre_fetch method. + """ return plugin.resource_pre_fetch(payload, context) return await SERVER.invoke_hook(ResourcePreFetchPayload, resource_pre_fetch_func, plugin_name, payload, context) @@ -154,9 +226,22 @@ async def resource_post_fetch(plugin_name: str, payload: Dict[str, Any], context plugin_name: The name of the plugin to execute payload: The resource payload to be analyzed context: Contextual information + + Returns: + Result dictionary from the resource postfetch hook. """ def resource_post_fetch_func(plugin: Plugin, payload: ResourcePostFetchPayload, context: PluginContext) -> ResourcePostFetchResult: + """Wrapper function to invoke resource postfetch on a plugin instance. + + Args: + plugin: The plugin instance to execute. + payload: The resource postfetch payload. + context: The plugin context. + + Returns: + Result from the plugin's resource_post_fetch method. + """ return plugin.resource_post_fetch(payload, context) return await SERVER.invoke_hook(ResourcePostFetchPayload, resource_post_fetch_func, plugin_name, payload, context) @@ -170,6 +255,8 @@ def __init__(self, server_config: MCPServerConfig, *args, **kwargs): Args: server_config: the MCP server configuration including mTLS information. + *args: Additional positional arguments passed to FastMCP. + **kwargs: Additional keyword arguments passed to FastMCP. """ # Load server config from environment @@ -183,7 +270,11 @@ def __init__(self, server_config: MCPServerConfig, *args, **kwargs): super().__init__(*args, **kwargs) def _get_ssl_config(self) -> dict: - """Build SSL configuration for uvicorn from MCPServerConfig.""" + """Build SSL configuration for uvicorn from MCPServerConfig. + + Returns: + Dictionary of SSL configuration parameters for uvicorn. + """ ssl_config = {} if self.server_config.tls: @@ -217,6 +308,9 @@ async def _start_health_check_server(self, health_port: int) -> None: """Start a simple HTTP-only health check server on a separate port. This allows health checks to work even when the main server uses HTTPS/mTLS. + + Args: + health_port: Port number for the health check server. """ # Third-Party from starlette.applications import Starlette @@ -229,6 +323,9 @@ async def health_check(request: Request): Args: request: the http request from which the health check occurs. + + Returns: + JSON response with health status. """ return JSONResponse({"status": "healthy"}) @@ -260,6 +357,9 @@ async def health_check(request: Request): Args: request: the http request from which the health check occurs. + + Returns: + JSON response with health status. """ return JSONResponse({"status": "healthy"}) @@ -302,6 +402,9 @@ async def run(): - PLUGINS_SERVER_SSL_CERTFILE: Path to server certificate - PLUGINS_SERVER_SSL_CA_CERTS: Path to CA bundle for client verification - PLUGINS_SERVER_SSL_CERT_REQS: Client cert requirement (0=NONE, 1=OPTIONAL, 2=REQUIRED) + + Raises: + Exception: If plugin server initialization or execution fails. """ global SERVER diff --git a/mcpgateway/plugins/framework/models.py b/mcpgateway/plugins/framework/models.py index 0d45e9db6..7906bee20 100644 --- a/mcpgateway/plugins/framework/models.py +++ b/mcpgateway/plugins/framework/models.py @@ -265,7 +265,17 @@ class MCPTransportTLSConfigBase(BaseModel): @field_validator("ca_bundle", "certfile", "keyfile", mode=AFTER) @classmethod def validate_path(cls, value: Optional[str]) -> Optional[str]: - """Expand and validate file paths supplied in TLS configuration.""" + """Expand and validate file paths supplied in TLS configuration. + + Args: + value: File path to validate. + + Returns: + Expanded file path or None if not provided. + + Raises: + ValueError: If file path does not exist. + """ if not value: return value @@ -276,7 +286,14 @@ def validate_path(cls, value: Optional[str]) -> Optional[str]: @model_validator(mode=AFTER) def validate_cert_key(self) -> Self: # pylint: disable=bad-classmethod-argument - """Ensure certificate and key options are consistent.""" + """Ensure certificate and key options are consistent. + + Returns: + Self after validation. + + Raises: + ValueError: If keyfile is specified without certfile. + """ if self.keyfile and not self.certfile: raise ValueError("keyfile requires certfile to be specified") @@ -284,7 +301,17 @@ def validate_cert_key(self) -> Self: # pylint: disable=bad-classmethod-argument @staticmethod def _parse_bool(value: Optional[str]) -> Optional[bool]: - """Convert a string environment value to boolean.""" + """Convert a string environment value to boolean. + + Args: + value: String value to parse as boolean. + + Returns: + Boolean value or None if value is None. + + Raises: + ValueError: If value is not a valid boolean string. + """ if value is None: return None @@ -309,7 +336,11 @@ class MCPClientTLSConfig(MCPTransportTLSConfigBase): @classmethod def from_env(cls) -> Optional["MCPClientTLSConfig"]: - """Construct client TLS configuration from PLUGINS_CLIENT_* environment variables.""" + """Construct client TLS configuration from PLUGINS_CLIENT_* environment variables. + + Returns: + MCPClientTLSConfig instance or None if no environment variables are set. + """ env = os.environ data: dict[str, Any] = {} @@ -348,7 +379,14 @@ class MCPServerTLSConfig(MCPTransportTLSConfigBase): @classmethod def from_env(cls) -> Optional["MCPServerTLSConfig"]: - """Construct server TLS configuration from PLUGINS_SERVER_SSL_* environment variables.""" + """Construct server TLS configuration from PLUGINS_SERVER_SSL_* environment variables. + + Returns: + MCPServerTLSConfig instance or None if no environment variables are set. + + Raises: + ValueError: If PLUGINS_SERVER_SSL_CERT_REQS is not a valid integer. + """ env = os.environ data: dict[str, Any] = {} @@ -389,7 +427,17 @@ class MCPServerConfig(BaseModel): @staticmethod def _parse_bool(value: Optional[str]) -> Optional[bool]: - """Convert a string environment value to boolean.""" + """Convert a string environment value to boolean. + + Args: + value: String value to parse as boolean. + + Returns: + Boolean value or None if value is None. + + Raises: + ValueError: If value is not a valid boolean string. + """ if value is None: return None @@ -402,7 +450,14 @@ def _parse_bool(value: Optional[str]) -> Optional[bool]: @classmethod def from_env(cls) -> Optional["MCPServerConfig"]: - """Construct server configuration from PLUGINS_SERVER_* environment variables.""" + """Construct server configuration from PLUGINS_SERVER_* environment variables. + + Returns: + MCPServerConfig instance or None if no environment variables are set. + + Raises: + ValueError: If PLUGINS_SERVER_PORT is not a valid integer. + """ env = os.environ data: dict[str, Any] = {} @@ -489,7 +544,14 @@ def validate_script(cls, script: str | None) -> str | None: @model_validator(mode=AFTER) def validate_tls_usage(self) -> Self: # pylint: disable=bad-classmethod-argument - """Ensure TLS configuration is only used with HTTP-based transports.""" + """Ensure TLS configuration is only used with HTTP-based transports. + + Returns: + Self after validation. + + Raises: + ValueError: If TLS configuration is used with non-HTTP transports. + """ if self.tls and self.proto not in (TransportType.SSE, TransportType.STREAMABLEHTTP): raise ValueError("TLS configuration is only valid for HTTP/SSE transports") From 5793e5f3cd09f32a7497fbe3a161371f5fcd59f2 Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Fri, 10 Oct 2025 13:38:08 -0600 Subject: [PATCH 14/35] fix: install templates with cli, fix error messages. Signed-off-by: Teryl Taylor --- mcpgateway/tools/builder/__init__.py | 2 +- mcpgateway/tools/builder/cli.py | 11 +++-------- mcpgateway/tools/builder/dagger_deploy.py | 4 +--- mcpgateway/tools/builder/factory.py | 5 ++--- mcpgateway/tools/builder/python_deploy.py | 4 +--- pyproject.toml | 3 +++ 6 files changed, 11 insertions(+), 18 deletions(-) diff --git a/mcpgateway/tools/builder/__init__.py b/mcpgateway/tools/builder/__init__.py index 36a56d0e3..ec309d8bd 100644 --- a/mcpgateway/tools/builder/__init__.py +++ b/mcpgateway/tools/builder/__init__.py @@ -5,4 +5,4 @@ Authors: Teryl Taylor Builder Package. -""" \ No newline at end of file +""" diff --git a/mcpgateway/tools/builder/cli.py b/mcpgateway/tools/builder/cli.py index 10a126007..b96809e7f 100644 --- a/mcpgateway/tools/builder/cli.py +++ b/mcpgateway/tools/builder/cli.py @@ -105,16 +105,11 @@ def build( no_cache: Annotated[bool, typer.Option("--no-cache", help="Disable build cache")] = False, copy_env_templates: Annotated[bool, typer.Option("--copy-env-templates", help="Copy .env.template files from plugin repos")] = True, ): - """Build plugin containers""" + """Build containers""" impl = ctx.obj["deployer"] try: - if IMPL_MODE == "dagger": - # Use asyncio for Dagger implementation - asyncio.run(impl.build(config_file, plugins_only=plugins_only, specific_plugins=list(plugin) if plugin else None, no_cache=no_cache, copy_env_templates=copy_env_templates)) - else: - # Plain Python implementation is synchronous - impl.build(config_file, plugins_only=plugins_only, specific_plugins=list(plugin) if plugin else None, no_cache=no_cache, copy_env_templates=copy_env_templates) + asyncio.run(impl.build(config_file, plugins_only=plugins_only, specific_plugins=list(plugin) if plugin else None, no_cache=no_cache, copy_env_templates=copy_env_templates)) console.print("[green]βœ“ Build complete[/green]") if copy_env_templates: @@ -142,7 +137,7 @@ def certs(ctx: typer.Context, config_file: Annotated[Path, typer.Argument(help=" def deploy( ctx: typer.Context, config_file: Annotated[Path, typer.Argument(help="The deployment configuration file")], - output_dir: Annotated[Path, typer.Option("--output-dir", "-o", help="The deployment configuration file")], + output_dir: Annotated[Optional[Path], typer.Option("--output-dir", "-o", help="The deployment configuration file")] = None, dry_run: Annotated[bool, typer.Option("--dry-run", help="Generate manifests without deploying")] = False, skip_build: Annotated[bool, typer.Option("--skip-build", help="Skip building containers")] = False, skip_certs: Annotated[bool, typer.Option("--skip-certs", help="Skip certificate generation")] = False, diff --git a/mcpgateway/tools/builder/dagger_deploy.py b/mcpgateway/tools/builder/dagger_deploy.py index 41a282644..8cb9e6d76 100644 --- a/mcpgateway/tools/builder/dagger_deploy.py +++ b/mcpgateway/tools/builder/dagger_deploy.py @@ -38,6 +38,7 @@ verify_compose, verify_kubernetes, ) +from mcpgateway.tools.builder.common import copy_env_template as copy_template from mcpgateway.tools.builder.pipeline import CICDModule console = Console() @@ -351,9 +352,6 @@ async def _build_component_with_dagger(self, component: Dict[str, Any], componen # Standard import subprocess - # Third-Party - from common import copy_env_template as copy_template - clone_dir.mkdir(parents=True, exist_ok=True) if (clone_dir / ".git").exists(): diff --git a/mcpgateway/tools/builder/factory.py b/mcpgateway/tools/builder/factory.py index 340f70b49..cf3a43d48 100644 --- a/mcpgateway/tools/builder/factory.py +++ b/mcpgateway/tools/builder/factory.py @@ -87,10 +87,9 @@ def create_deployer(deployer: str, verbose: bool = False) -> tuple[CICDModule, C return (MCPStackDagger(verbose), CICDTypes.DAGGER) - except ImportError as e: + except ImportError: # Dagger dependencies not available, fall back to Python - console.print(f"[yellow]⚠ Dagger import failed: {e}[/yellow]") - console.print("[yellow]β†’ Falling back to plain Python implementation[/yellow]") + console.print("[yellow]⚠ Dagger not installed. Using plain python.[/yellow]") # Load plain Python implementation (fallback or explicitly requested) try: diff --git a/mcpgateway/tools/builder/python_deploy.py b/mcpgateway/tools/builder/python_deploy.py index 78663e4dc..5a23f05c9 100644 --- a/mcpgateway/tools/builder/python_deploy.py +++ b/mcpgateway/tools/builder/python_deploy.py @@ -35,6 +35,7 @@ verify_compose, verify_kubernetes, ) +from mcpgateway.tools.builder.common import copy_env_template as copy_template from mcpgateway.tools.builder.pipeline import CICDModule console = Console() @@ -371,9 +372,6 @@ def _build_component(self, component: Dict[str, Any], component_name: str, no_ca # Copy .env.template if requested and exists if copy_env_templates: - # Third-Party - from common import copy_env_template as copy_template - copy_template(component_name, build_dir, verbose=self.verbose) if self.verbose: diff --git a/pyproject.toml b/pyproject.toml index 7377fbd65..8b21f343a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -269,6 +269,9 @@ exclude = ["tests*"] # - templates -> Jinja2 templates shipped at runtime [tool.setuptools.package-data] mcpgateway = [ + "tools/builder/templates/*.yaml.j2", + "tools/builder/templates/compose/*.yaml.j2", + "tools/builder/templates/kubernetes/*.yaml.j2", "py.typed", "static/*.css", "static/*.js", From 15a1af187f84bc80252d84bd3a75c7d8b0ec2634 Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Fri, 10 Oct 2025 13:46:05 -0600 Subject: [PATCH 15/35] fix: mtls and stdio test cases. Signed-off-by: Teryl Taylor --- .../framework/external/mcp/server/runtime.py | 100 ++++++++++++------ .../external/mcp/server/test_runtime.py | 2 + .../external/mcp/test_client_stdio.py | 7 +- .../framework/test_manager_extended.py | 6 -- .../plugins/framework/test_models_tls.py | 16 +-- 5 files changed, 83 insertions(+), 48 deletions(-) diff --git a/mcpgateway/plugins/framework/external/mcp/server/runtime.py b/mcpgateway/plugins/framework/external/mcp/server/runtime.py index f7fec637e..34441d202 100644 --- a/mcpgateway/plugins/framework/external/mcp/server/runtime.py +++ b/mcpgateway/plugins/framework/external/mcp/server/runtime.py @@ -18,6 +18,8 @@ # Standard import asyncio import logging +import os +import sys from typing import Any, Dict # Third-Party @@ -62,7 +64,7 @@ async def get_plugin_configs() -> list[dict]: """Get the plugin configurations installed on the server. Returns: - List of plugin configuration dictionaries. + JSON string containing list of plugin configuration dictionaries. """ return await SERVER.get_plugin_configs() @@ -74,7 +76,7 @@ async def get_plugin_config(name: str) -> dict: name: The name of the plugin Returns: - Plugin configuration dictionary. + JSON string containing plugin configuration dictionary. """ return await SERVER.get_plugin_config(name) @@ -394,14 +396,18 @@ async def health_check(request: Request): async def run(): """Run the external plugin server with FastMCP. + Supports both stdio and HTTP transports. Auto-detects transport based on stdin + (if stdin is not a TTY, uses stdio mode), or you can explicitly set PLUGINS_TRANSPORT. + Reads configuration from PLUGINS_SERVER_* environment variables: - - PLUGINS_SERVER_HOST: Server host (default: 0.0.0.0) - - PLUGINS_SERVER_PORT: Server port (default: 8000) - - PLUGINS_SERVER_SSL_ENABLED: Enable SSL/TLS (true/false) - - PLUGINS_SERVER_SSL_KEYFILE: Path to server private key - - PLUGINS_SERVER_SSL_CERTFILE: Path to server certificate - - PLUGINS_SERVER_SSL_CA_CERTS: Path to CA bundle for client verification - - PLUGINS_SERVER_SSL_CERT_REQS: Client cert requirement (0=NONE, 1=OPTIONAL, 2=REQUIRED) + - PLUGINS_TRANSPORT: Transport type - 'stdio' or 'http' (default: auto-detect) + - PLUGINS_SERVER_HOST: Server host (default: 0.0.0.0) - HTTP mode only + - PLUGINS_SERVER_PORT: Server port (default: 8000) - HTTP mode only + - PLUGINS_SERVER_SSL_ENABLED: Enable SSL/TLS (true/false) - HTTP mode only + - PLUGINS_SERVER_SSL_KEYFILE: Path to server private key - HTTP mode only + - PLUGINS_SERVER_SSL_CERTFILE: Path to server certificate - HTTP mode only + - PLUGINS_SERVER_SSL_CA_CERTS: Path to CA bundle for client verification - HTTP mode only + - PLUGINS_SERVER_SSL_CERT_REQS: Client cert requirement (0=NONE, 1=OPTIONAL, 2=REQUIRED) - HTTP mode only Raises: Exception: If plugin server initialization or execution fails. @@ -415,28 +421,62 @@ async def run(): logger.error("Failed to initialize plugin server") return - try: - # Create FastMCP server with SSL support - mcp = SSLCapableFastMCP( - server_config=SERVER.get_server_config(), - name=MCP_SERVER_NAME, - instructions=MCP_SERVER_INSTRUCTIONS, - ) + # Determine transport type from environment variable or auto-detect + # Auto-detect: if stdin is not a TTY (i.e., it's being piped), use stdio mode + transport = os.environ.get("PLUGINS_TRANSPORT", None) + if transport is None: + # Auto-detect based on stdin + if not sys.stdin.isatty(): + transport = "stdio" + logger.info("Auto-detected stdio transport (stdin is not a TTY)") + else: + transport = "http" + else: + transport = transport.lower() - # Register module-level tool functions with FastMCP - # These are defined at module level for testability - mcp.tool(name=GET_PLUGIN_CONFIGS)(get_plugin_configs) - mcp.tool(name=GET_PLUGIN_CONFIG)(get_plugin_config) - mcp.tool(name=HookType.PROMPT_PRE_FETCH.value)(prompt_pre_fetch) - mcp.tool(name=HookType.PROMPT_POST_FETCH.value)(prompt_post_fetch) - mcp.tool(name=HookType.TOOL_PRE_INVOKE.value)(tool_pre_invoke) - mcp.tool(name=HookType.TOOL_POST_INVOKE.value)(tool_post_invoke) - mcp.tool(name=HookType.RESOURCE_PRE_FETCH.value)(resource_pre_fetch) - mcp.tool(name=HookType.RESOURCE_POST_FETCH.value)(resource_post_fetch) - - # Run with streamable-http transport - logger.info("Starting MCP plugin server with FastMCP") - await mcp.run_streamable_http_async() + try: + if transport == "stdio": + # Create basic FastMCP server for stdio (no SSL support needed for stdio) + mcp = FastMCP( + name=MCP_SERVER_NAME, + instructions=MCP_SERVER_INSTRUCTIONS, + ) + + # Register module-level tool functions with FastMCP + mcp.tool(name=GET_PLUGIN_CONFIGS)(get_plugin_configs) + mcp.tool(name=GET_PLUGIN_CONFIG)(get_plugin_config) + mcp.tool(name=HookType.PROMPT_PRE_FETCH.value)(prompt_pre_fetch) + mcp.tool(name=HookType.PROMPT_POST_FETCH.value)(prompt_post_fetch) + mcp.tool(name=HookType.TOOL_PRE_INVOKE.value)(tool_pre_invoke) + mcp.tool(name=HookType.TOOL_POST_INVOKE.value)(tool_post_invoke) + mcp.tool(name=HookType.RESOURCE_PRE_FETCH.value)(resource_pre_fetch) + mcp.tool(name=HookType.RESOURCE_POST_FETCH.value)(resource_post_fetch) + + # Run with stdio transport + logger.info("Starting MCP plugin server with FastMCP (stdio transport)") + await mcp.run_stdio_async() + + else: # http or streamablehttp + # Create FastMCP server with SSL support + mcp = SSLCapableFastMCP( + server_config=SERVER.get_server_config(), + name=MCP_SERVER_NAME, + instructions=MCP_SERVER_INSTRUCTIONS, + ) + + # Register module-level tool functions with FastMCP + mcp.tool(name=GET_PLUGIN_CONFIGS)(get_plugin_configs) + mcp.tool(name=GET_PLUGIN_CONFIG)(get_plugin_config) + mcp.tool(name=HookType.PROMPT_PRE_FETCH.value)(prompt_pre_fetch) + mcp.tool(name=HookType.PROMPT_POST_FETCH.value)(prompt_post_fetch) + mcp.tool(name=HookType.TOOL_PRE_INVOKE.value)(tool_pre_invoke) + mcp.tool(name=HookType.TOOL_POST_INVOKE.value)(tool_post_invoke) + mcp.tool(name=HookType.RESOURCE_PRE_FETCH.value)(resource_pre_fetch) + mcp.tool(name=HookType.RESOURCE_POST_FETCH.value)(resource_post_fetch) + + # Run with streamable-http transport + logger.info("Starting MCP plugin server with FastMCP (HTTP transport)") + await mcp.run_streamable_http_async() except Exception: logger.exception("Caught error while executing plugin server") diff --git a/tests/unit/mcpgateway/plugins/framework/external/mcp/server/test_runtime.py b/tests/unit/mcpgateway/plugins/framework/external/mcp/server/test_runtime.py index e70565ddd..21e5e7ee3 100644 --- a/tests/unit/mcpgateway/plugins/framework/external/mcp/server/test_runtime.py +++ b/tests/unit/mcpgateway/plugins/framework/external/mcp/server/test_runtime.py @@ -9,6 +9,7 @@ # Standard import asyncio +import json # Third-Party import pytest @@ -42,6 +43,7 @@ def server(): async def test_get_plugin_configs(monkeypatch, server): monkeypatch.setattr(runtime, "SERVER", server) configs = await runtime.get_plugin_configs() + #configs = json.loads(confstr) assert len(configs) > 0 diff --git a/tests/unit/mcpgateway/plugins/framework/external/mcp/test_client_stdio.py b/tests/unit/mcpgateway/plugins/framework/external/mcp/test_client_stdio.py index 130ba510a..058553e4b 100644 --- a/tests/unit/mcpgateway/plugins/framework/external/mcp/test_client_stdio.py +++ b/tests/unit/mcpgateway/plugins/framework/external/mcp/test_client_stdio.py @@ -135,10 +135,9 @@ async def test_client_get_plugin_configs(): all_configs = [] configs = await session.call_tool("get_plugin_configs", {}) for content in configs.content: - confs = json.loads(content.text) - for c in confs: - plugconfig = PluginConfig.model_validate(c) - all_configs.append(plugconfig) + conf = json.loads(content.text) + plugconfig = PluginConfig.model_validate(conf) + all_configs.append(plugconfig) await exit_stack.aclose() assert all_configs[0].name == "SynonymsPlugin" assert all_configs[0].kind == "plugins.regex_filter.search_replace.SearchReplacePlugin" diff --git a/tests/unit/mcpgateway/plugins/framework/test_manager_extended.py b/tests/unit/mcpgateway/plugins/framework/test_manager_extended.py index 3bca0ca66..018c4fdcd 100644 --- a/tests/unit/mcpgateway/plugins/framework/test_manager_extended.py +++ b/tests/unit/mcpgateway/plugins/framework/test_manager_extended.py @@ -547,7 +547,6 @@ async def test_manager_initialization_edge_cases(): # Test plugin instantiation failure (covers lines 495-501) # First-Party - from mcpgateway.plugins.framework.loader.plugin import PluginLoader from mcpgateway.plugins.framework.models import PluginConfig, PluginMode, PluginSettings manager2 = PluginManager() @@ -592,11 +591,6 @@ async def test_manager_initialization_edge_cases(): plugin_settings=PluginSettings() ) - with patch('mcpgateway.plugins.framework.manager.logger') as mock_logger: - await manager3.initialize() - # Disabled plugins are now registered as stubs (info log), not skipped during load - mock_logger.info.assert_any_call("Registered disabled plugin: DisabledPlugin (display only, not instantiated)") - await manager3.shutdown() await manager2.shutdown() diff --git a/tests/unit/mcpgateway/plugins/framework/test_models_tls.py b/tests/unit/mcpgateway/plugins/framework/test_models_tls.py index 492ec7049..d01ba9336 100644 --- a/tests/unit/mcpgateway/plugins/framework/test_models_tls.py +++ b/tests/unit/mcpgateway/plugins/framework/test_models_tls.py @@ -34,7 +34,7 @@ def test_plugin_config_supports_tls_block(tmp_path, verify): "url": "https://plugins.internal.example.com/mcp", "tls": { "ca_bundle": str(ca_path), - "client_cert": str(client_bundle), + "certfile": str(client_bundle), "verify": verify, }, }, @@ -42,7 +42,7 @@ def test_plugin_config_supports_tls_block(tmp_path, verify): assert config.mcp is not None assert config.mcp.tls is not None - assert config.mcp.tls.client_cert == str(client_bundle) + assert config.mcp.tls.certfile == str(client_bundle) assert config.mcp.tls.verify == verify @@ -59,7 +59,7 @@ def test_plugin_config_tls_missing_cert_raises(tmp_path): "proto": "STREAMABLEHTTP", "url": "https://plugins.internal.example.com/mcp", "tls": { - "client_key": str(ca_path), + "keyfile": str(ca_path), }, }, ) @@ -89,16 +89,16 @@ def test_tls_config_from_env_defaults(monkeypatch, tmp_path): _write_pem(ca_path) _write_pem(client_cert) - monkeypatch.setenv("PLUGINS_MTLS_CA_BUNDLE", str(ca_path)) - monkeypatch.setenv("PLUGINS_MTLS_CLIENT_CERT", str(client_cert)) - monkeypatch.setenv("PLUGINS_MTLS_VERIFY", "true") - monkeypatch.setenv("PLUGINS_MTLS_CHECK_HOSTNAME", "true") + monkeypatch.setenv("PLUGINS_CLIENT_MTLS_CA_BUNDLE", str(ca_path)) + monkeypatch.setenv("PLUGINS_CLIENT_MTLS_CERTFILE", str(client_cert)) + monkeypatch.setenv("PLUGINS_CLIENT_MTLS_VERIFY", "true") + monkeypatch.setenv("PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME", "true") tls_config = MCPClientTLSConfig.from_env() assert tls_config is not None assert tls_config.ca_bundle == str(ca_path) - assert tls_config.client_cert == str(client_cert) + assert tls_config.certfile == str(client_cert) assert tls_config.verify is True assert tls_config.check_hostname is True From 4046e792dbcf2d76aadfd381aa880d0993922bfd Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Fri, 10 Oct 2025 13:48:00 -0600 Subject: [PATCH 16/35] fix: remove commented code. Signed-off-by: Teryl Taylor --- .../plugins/framework/external/mcp/server/test_runtime.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/unit/mcpgateway/plugins/framework/external/mcp/server/test_runtime.py b/tests/unit/mcpgateway/plugins/framework/external/mcp/server/test_runtime.py index 21e5e7ee3..427844cb6 100644 --- a/tests/unit/mcpgateway/plugins/framework/external/mcp/server/test_runtime.py +++ b/tests/unit/mcpgateway/plugins/framework/external/mcp/server/test_runtime.py @@ -43,7 +43,6 @@ def server(): async def test_get_plugin_configs(monkeypatch, server): monkeypatch.setattr(runtime, "SERVER", server) configs = await runtime.get_plugin_configs() - #configs = json.loads(confstr) assert len(configs) > 0 From 422395eac80affa6bf3eedd4e897138a17eda645 Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Fri, 10 Oct 2025 15:20:06 -0600 Subject: [PATCH 17/35] docs: and examples Signed-off-by: Teryl Taylor --- docs/docs/deployment/.pages | 1 + docs/docs/deployment/cforge-gateway.md | 1512 +++++++++++++++++ .../deploy-compose.mtls.yaml | 99 ++ .../deployment-configs/deploy-compose.yaml | 96 ++ .../deployment-configs/deploy-k8s.mtls.yaml | 84 + examples/deployment-configs/deploy-k8s.yaml | 82 + 6 files changed, 1874 insertions(+) create mode 100644 docs/docs/deployment/cforge-gateway.md create mode 100644 examples/deployment-configs/deploy-compose.mtls.yaml create mode 100644 examples/deployment-configs/deploy-compose.yaml create mode 100644 examples/deployment-configs/deploy-k8s.mtls.yaml create mode 100644 examples/deployment-configs/deploy-k8s.yaml diff --git a/docs/docs/deployment/.pages b/docs/docs/deployment/.pages index f7e568e00..2e093a6ad 100644 --- a/docs/docs/deployment/.pages +++ b/docs/docs/deployment/.pages @@ -14,3 +14,4 @@ nav: - azure.md - fly-io.md - proxy-auth.md + - cforge-gateway.md diff --git a/docs/docs/deployment/cforge-gateway.md b/docs/docs/deployment/cforge-gateway.md new file mode 100644 index 000000000..a90f45177 --- /dev/null +++ b/docs/docs/deployment/cforge-gateway.md @@ -0,0 +1,1512 @@ +# cforge gateway - Deployment Tool + +## Overview + +The `cforge gateway` command is a powerful deployment tool for MCP Gateway and its external plugins. It provides a unified, declarative way to build, configure, and deploy the complete MCP stack from a single YAML configuration file. + +### Why We Created It + +Before `cforge gateway`, deploying MCP Gateway with external plugins required: + +- **Manual container builds** for each plugin from different repositories +- **Complex mTLS certificate generation** and distribution +- **Hand-crafted Kubernetes manifests** or Docker Compose files +- **Environment variable management** across multiple services +- **Coordination** between gateway configuration and plugin deployments + +`cforge gateway` solves these challenges by: + +βœ… **Automating the entire deployment pipeline** from source to running services +βœ… **Managing mTLS certificates** automatically with proper distribution +βœ… **Generating deployment manifests** (Kubernetes or Docker Compose) from a single source +βœ… **Supporting multiple build modes** (Dagger for performance, plain Python for portability) +βœ… **Validating configurations** before deployment +βœ… **Integrating with CI/CD** workflows and secret management + +--- + +## Features + +### Build System + +- **Dual-mode execution**: Dagger (optimal performance) or plain Python (fallback) +- **Git-based plugin builds**: Clone and build plugins from any Git repository +- **Pre-built image support**: Use existing Docker images +- **Multi-stage build support**: Build specific stages from Dockerfiles +- **Build caching**: Intelligent caching to speed up rebuilds + +### Deployment Targets + +- **Kubernetes**: Full manifest generation with ConfigMaps, Secrets, Services, Deployments +- **Docker Compose**: Complete stack with networking and volume management +- **Local development**: Quick testing with exposed ports +- **Production-ready**: Resource limits, health checks, and best practices + +### Security + +- **Automatic mTLS**: Generate and distribute certificates for gateway ↔ plugin communication +- **Certificate rotation**: Configurable validity periods +- **Secret management**: Integration with environment files and CI/CD vaults +- **Network isolation**: Proper service-to-service communication + +### Workflow Automation + +- **Validation**: Pre-flight checks before deployment +- **Build**: Build containers from source or pull pre-built images +- **Certificate generation**: Create mTLS cert hierarchy +- **Deployment**: Apply manifests to target environment +- **Verification**: Health check deployed services +- **Destruction**: Clean teardown + +--- + +## Future Directions + +The `cforge gateway` tool is actively evolving to support broader MCP ecosystem workflows. Planned enhancements include: + +### MCP Server Lifecycle Management + +Currently, `cforge gateway` focuses on deploying external plugins. Future versions will support the complete lifecycle of MCP servers: + +- **Build & Deploy MCP Servers**: Build MCP servers from Git repositories, similar to current plugin support +- **Automatic Registration**: Deploy MCP servers and automatically register them with the gateway as peers +- **Plugin Attachment**: Attach and configure plugins for registered MCP servers, enabling policy enforcement and filtering at the server level +- **Configuration Generation**: Generate MCP server configurations from templates +- **Multi-Server Deployments**: Deploy multiple MCP servers as a coordinated fleet + +This will enable declarative deployment of complete MCP ecosystems from a single configuration file: + +```yaml +# Future concept +mcp_servers: + - name: GitHubMCPServer + repo: https://github.com/org/mcp-server-github.git + auto_register: true # Auto-register as gateway peer + expose_tools: ["*"] # Expose all tools through gateway + expose_resources: ["repos"] # Expose specific resources + + # Attach plugins to this MCP server + plugins: + - OPAPluginFilter # Apply OPA policies to this server + - PIIFilterPlugin # Filter PII from responses +``` + +### Live MCP Server Discovery + +Automatic discovery and registration of running MCP servers: + +- **mDNS/Zeroconf Discovery**: Automatically discover MCP servers on the local network +- **Service Mesh Integration**: Integrate with Kubernetes service discovery +- **Dynamic Registration**: Register servers at runtime without redeployment +- **Health-Based Registration**: Automatically register/deregister based on health checks + +### Container Security Policies + +Attach security policies to built containers for enhanced compliance and governance: + +- **OPA Policy Bundles**: Include Open Policy Agent (OPA) policies with container builds +- **SBOM Generation**: Automatically generate Software Bill of Materials (SBOM) for built images +- **Vulnerability Scanning**: Integrate Trivy/Grype scans into build pipeline +- **Policy Enforcement**: Define and enforce security policies (allowed packages, CVE thresholds, etc.) +- **Signing & Attestation**: Sign built images with Cosign/Sigstore +- **Runtime Security**: Define AppArmor/SELinux profiles for deployed containers + +Example future configuration: + +```yaml +# Future concept +security: + policies: + enabled: true + opa_bundle: ./policies/container-security.rego + sbom: true + vulnerability_scan: + enabled: true + fail_on: critical + allowlist: ["CVE-2024-1234"] + signing: + enabled: true + keyless: true # Sigstore keyless signing +``` + +These enhancements will make `cforge gateway` a comprehensive tool for building, securing, deploying, and managing the entire MCP infrastructure stack. + +--- + +## Quick Start + +### Installation + +The `cforge` CLI is installed with the MCP Gateway package: + +```bash +pip install -e . +``` + +Verify installation: + +```bash +cforge --help +cforge gateway --help +``` + +### Basic Workflow + +```bash +# 1. Validate your configuration +cforge gateway validate examples/deployment-configs/deploy-compose.yaml + +# 2. Build containers (if building from source) +cforge gateway build examples/deployment-configs/deploy-compose.yaml + +# 3. Generate mTLS certificates (if needed) +cforge gateway certs examples/deployment-configs/deploy-compose.yaml + +# 4. Deploy the stack +cforge gateway deploy examples/deployment-configs/deploy-compose.yaml + +# 5. Verify deployment health +cforge gateway verify examples/deployment-configs/deploy-compose.yaml + +# 6. (Optional) Tear down +cforge gateway destroy examples/deployment-configs/deploy-compose.yaml +``` + +--- + +## Commands + +### `cforge gateway validate` + +Validates the deployment configuration file without making any changes. + +```bash +cforge gateway validate +``` + +**Example:** +```bash +cforge gateway validate deploy.yaml +``` + +**Output:** +- βœ… Configuration syntax validation +- βœ… Plugin name uniqueness check +- βœ… Required field verification +- βœ… Build configuration validation (image XOR repo) + +--- + +### `cforge gateway build` + +Builds container images for gateway and/or plugins from source repositories. + +```bash +cforge gateway build [OPTIONS] +``` + +**Options:** + +| Option | Description | Default | +|--------|-------------|---------| +| `--plugins-only` | Only build plugin containers, skip gateway | `false` | +| `--plugin NAME`, `-p NAME` | Build specific plugin(s) only (can specify multiple) | All plugins | +| `--no-cache` | Disable Docker build cache | `false` | +| `--copy-env-templates` | Copy `.env.template` files from plugin repos | `true` | + +**Examples:** +```bash +# Build everything +cforge gateway build deploy.yaml + +# Build only plugins +cforge gateway build deploy.yaml --plugins-only + +# Build specific plugin +cforge gateway build deploy.yaml --plugin OPAPluginFilter + +# Build multiple plugins with no cache +cforge gateway build deploy.yaml --plugin OPAPluginFilter --plugin LLMGuardPlugin --no-cache +``` + +**What it does:** +1. Clones Git repositories (if `repo` specified) +2. Checks out specified branch/tag/commit (`ref`) +3. Builds Docker images from `containerfile` in `context` directory +4. Tags images appropriately for deployment +5. Copies `.env.template` files to `deploy/env/` for customization + +--- + +### `cforge gateway certs` + +Generates mTLS certificate hierarchy for secure gateway ↔ plugin communication. + +```bash +cforge gateway certs +``` + +**Example:** +```bash +cforge gateway certs deploy.yaml +``` + +**What it generates:** +``` +certs/mcp/ +β”œβ”€β”€ ca/ +β”‚ β”œβ”€β”€ ca.crt # Root CA certificate +β”‚ └── ca.key # Root CA private key +β”œβ”€β”€ gateway/ +β”‚ β”œβ”€β”€ client.crt # Gateway client certificate +β”‚ β”œβ”€β”€ client.key # Gateway client private key +β”‚ └── ca.crt # CA cert (for verification) +└── plugins/ + β”œβ”€β”€ PluginName1/ + β”‚ β”œβ”€β”€ server.crt # Plugin server certificate + β”‚ β”œβ”€β”€ server.key # Plugin server private key + β”‚ └── ca.crt # CA cert (for verification) + └── PluginName2/ + β”œβ”€β”€ server.crt + β”œβ”€β”€ server.key + └── ca.crt +``` + +**Certificate Properties:** +- Validity: Configurable (default: 825 days) +- CN for gateway: `mcp-gateway` +- CN for plugins: `mcp-plugin-{PluginName}` +- SANs: `{PluginName}, mcp-plugin-{PluginName}, localhost` + +--- + +### `cforge gateway deploy` + +Deploys the complete MCP stack to the target environment. + +```bash +cforge gateway deploy [OPTIONS] +``` + +**Options:** + +| Option | Description | Default | +|--------|-------------|---------| +| `--output-dir DIR`, `-o DIR` | Custom output directory for manifests | `deploy/` | +| `--dry-run` | Generate manifests without deploying | `false` | +| `--skip-build` | Skip container build step | `false` | +| `--skip-certs` | Skip certificate generation | `false` | + +**Examples:** +```bash +# Full deployment +cforge gateway deploy deploy.yaml + +# Dry-run (generate manifests only) +cforge gateway deploy deploy.yaml --dry-run + +# Deploy with existing images and certs +cforge gateway deploy deploy.yaml --skip-build --skip-certs + +# Custom output directory +cforge gateway deploy deploy.yaml --output-dir ./my-deployment +``` + +**Deployment Process:** +1. **Validate** configuration +2. **Build** containers (unless `--skip-build`) +3. **Generate certificates** (unless `--skip-certs` or already exist) +4. **Generate manifests** (Kubernetes or Docker Compose) +5. **Apply** to target environment: + - **Kubernetes**: `kubectl apply -f` + - **Docker Compose**: `docker-compose up -d` + +**Generated Files:** +``` +deploy/ +β”œβ”€β”€ env/ # Environment files +β”‚ β”œβ”€β”€ .env.gateway +β”‚ β”œβ”€β”€ .env.PluginName1 +β”‚ └── .env.PluginName2 +β”œβ”€β”€ manifests/ # Kubernetes OR +β”‚ β”œβ”€β”€ namespace.yaml +β”‚ β”œβ”€β”€ configmaps.yaml +β”‚ β”œβ”€β”€ secrets.yaml +β”‚ β”œβ”€β”€ gateway-deployment.yaml +β”‚ β”œβ”€β”€ gateway-service.yaml +β”‚ β”œβ”€β”€ plugin-deployments.yaml +β”‚ └── plugin-services.yaml +└── docker-compose.yaml # Docker Compose +``` + +--- + +### `cforge gateway verify` + +Verifies that the deployed stack is healthy and running. + +```bash +cforge gateway verify [OPTIONS] +``` + +**Options:** + +| Option | Description | Default | +|--------|-------------|---------| +| `--wait` | Wait for deployment to be ready | `true` | +| `--timeout SECONDS` | Wait timeout in seconds | `300` | + +**Examples:** +```bash +# Verify deployment (wait up to 5 minutes) +cforge gateway verify deploy.yaml + +# Quick check without waiting +cforge gateway verify deploy.yaml --no-wait + +# Custom timeout +cforge gateway verify deploy.yaml --timeout 600 +``` + +**Checks:** +- Container/pod readiness +- Health endpoint responses +- Service connectivity +- mTLS handshake (if enabled) + +--- + +### `cforge gateway destroy` + +Tears down the deployed MCP stack. + +```bash +cforge gateway destroy [OPTIONS] +``` + +**Options:** + +| Option | Description | Default | +|--------|-------------|---------| +| `--force` | Skip confirmation prompt | `false` | + +**Examples:** +```bash +# Destroy with confirmation +cforge gateway destroy deploy.yaml + +# Force destroy without prompt +cforge gateway destroy deploy.yaml --force +``` + +**What it removes:** +- **Kubernetes**: Deletes all resources in namespace +- **Docker Compose**: Stops and removes containers, networks, volumes + +⚠️ **Note:** This does NOT delete generated certificates or build artifacts. To clean those: +```bash +rm -rf certs/ deploy/ +``` + +--- + +### `cforge gateway generate` + +Generates deployment manifests without deploying them. + +```bash +cforge gateway generate [OPTIONS] +``` + +**Options:** + +| Option | Description | Default | +|--------|-------------|---------| +| `--output DIR`, `-o DIR` | Output directory for manifests | `deploy/` | + +**Examples:** +```bash +# Generate manifests +cforge gateway generate deploy.yaml + +# Custom output directory +cforge gateway generate deploy.yaml --output ./manifests +``` + +**Use cases:** +- GitOps workflows (commit generated manifests) +- Manual review before deployment +- Integration with external deployment tools +- CI/CD pipeline artifact generation + +--- + +### `cforge gateway version` + +Shows version and runtime information. + +```bash +cforge gateway version +``` + +**Output:** +``` +β”Œβ”€ Version Info ─────────────────┐ +β”‚ MCP Deploy β”‚ +β”‚ Version: 1.0.0 β”‚ +β”‚ Mode: dagger β”‚ +β”‚ Environment: local β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ +``` + +--- + +## Global Options + +These options apply to all commands: + +| Option | Description | Default | +|--------|-------------|---------| +| `--no-dagger` | Force plain Python mode (skip Dagger) | Auto-detect | +| `--verbose`, `-v` | Verbose output | `false` | + +**Examples:** +```bash +# Use plain Python instead of Dagger +cforge gateway --no-dagger deploy deploy.yaml + +# Verbose mode +cforge gateway -v build deploy.yaml + +# Combine options +cforge gateway --no-dagger -v deploy deploy.yaml +``` + +--- + +## Configuration Reference + +### Deployment Configuration + +Top-level deployment settings: + +```yaml +deployment: + type: kubernetes | compose # Required: Deployment target + project_name: my-project # Docker Compose only + namespace: mcp-gateway # Kubernetes only +``` + +| Field | Type | Required | Description | Default | +|-------|------|----------|-------------|---------| +| `type` | string | βœ… | Deployment type: `kubernetes` or `compose` | - | +| `project_name` | string | ❌ | Docker Compose project name | - | +| `namespace` | string | ❌ | Kubernetes namespace | - | + +--- + +### Gateway Configuration + +Gateway server settings: + +```yaml +gateway: + # Build Configuration (choose ONE) + image: mcpgateway/mcpgateway:latest # Pre-built image + # OR + repo: https://github.com/org/repo.git # Build from source + ref: main # Git branch/tag/commit + context: . # Build context directory + containerfile: Containerfile # Dockerfile path + target: production # Multi-stage build target + + # Runtime Configuration + port: 4444 # Internal port + host_port: 4444 # Host port mapping (compose only) + + # mTLS Client Configuration (gateway β†’ plugins) + mtls_enabled: true # Enable mTLS + mtls_verify: true # Verify server certs + mtls_check_hostname: false # Verify hostname + + # Environment Variables + env_vars: + LOG_LEVEL: INFO + MCPGATEWAY_UI_ENABLED: "true" + AUTH_REQUIRED: "true" + # ... (see full reference below) + + # Kubernetes-specific + replicas: 2 # Number of replicas + service_type: ClusterIP # Service type + service_port: 4444 # Service port + memory_request: 256Mi # Memory request + memory_limit: 512Mi # Memory limit + cpu_request: 100m # CPU request + cpu_limit: 500m # CPU limit + image_pull_policy: IfNotPresent # Image pull policy +``` + +**Build Configuration Fields:** + +| Field | Type | Required | Description | Default | +|-------|------|----------|-------------|---------| +| `image` | string | ❌* | Pre-built Docker image | - | +| `repo` | string | ❌* | Git repository URL | - | +| `ref` | string | ❌ | Git branch/tag/commit | `main` | +| `context` | string | ❌ | Build context subdirectory | `.` | +| `containerfile` | string | ❌ | Containerfile/Dockerfile path | `Containerfile` | +| `target` | string | ❌ | Multi-stage build target | - | + +\* **Either `image` OR `repo` must be specified** + +**Runtime Configuration Fields:** + +| Field | Type | Required | Description | Default | +|-------|------|----------|-------------|---------| +| `port` | integer | ❌ | Internal container port | `4444` | +| `host_port` | integer | ❌ | Host port mapping (compose only) | - | +| `env_vars` | object | ❌ | Environment variables | `{}` | +| `mtls_enabled` | boolean | ❌ | Enable mTLS client | `true` | +| `mtls_verify` | boolean | ❌ | Verify server certificates | `true` | +| `mtls_check_hostname` | boolean | ❌ | Verify hostname in cert | `false` | + +**Kubernetes-specific Fields:** + +| Field | Type | Required | Description | Default | +|-------|------|----------|-------------|---------| +| `replicas` | integer | ❌ | Number of pod replicas | `1` | +| `service_type` | string | ❌ | Service type (ClusterIP, NodePort, LoadBalancer) | `ClusterIP` | +| `service_port` | integer | ❌ | Service port | `4444` | +| `memory_request` | string | ❌ | Memory request | `256Mi` | +| `memory_limit` | string | ❌ | Memory limit | `512Mi` | +| `cpu_request` | string | ❌ | CPU request | `100m` | +| `cpu_limit` | string | ❌ | CPU limit | `500m` | +| `image_pull_policy` | string | ❌ | Image pull policy | `IfNotPresent` | + +--- + +### Plugin Configuration + +External plugin settings (array of plugin objects): + +```yaml +plugins: + - name: MyPlugin # Required: Unique plugin name + + # Build Configuration (choose ONE) + image: myorg/myplugin:latest # Pre-built image + # OR + repo: https://github.com/org/repo.git # Build from source + ref: main + context: plugins/myplugin + containerfile: Containerfile + target: builder + + # Runtime Configuration + port: 8000 # Internal port + expose_port: true # Expose on host (compose only) + + # mTLS Server Configuration (plugin server) + mtls_enabled: true # Enable mTLS server + + # Environment Variables + env_vars: + LOG_LEVEL: DEBUG + CUSTOM_SETTING: value + + # Plugin Manager Overrides (client-side) + plugin_overrides: + priority: 10 + mode: enforce + description: "My custom plugin" + tags: ["security", "filter"] + + # Kubernetes-specific + replicas: 1 + service_type: ClusterIP + service_port: 8000 + memory_request: 128Mi + memory_limit: 256Mi + cpu_request: 50m + cpu_limit: 200m + image_pull_policy: IfNotPresent +``` + +**Required Fields:** + +| Field | Type | Description | +|-------|------|-------------| +| `name` | string | Unique plugin identifier (used for cert CN, service names, etc.) | + +**Build Configuration:** Same as Gateway (see above) + +**Runtime Configuration:** + +| Field | Type | Required | Description | Default | +|-------|------|----------|-------------|---------| +| `port` | integer | ❌ | Internal container port | `8000` | +| `expose_port` | boolean | ❌ | Expose port on host (compose only) | `false` | +| `env_vars` | object | ❌ | Environment variables | `{}` | +| `mtls_enabled` | boolean | ❌ | Enable mTLS server | `true` | +| `plugin_overrides` | object | ❌ | Plugin manager config overrides | `{}` | + +**Plugin Overrides:** + +| Field | Type | Description | Default | +|-------|------|-------------|---------| +| `priority` | integer | Plugin execution priority (lower = earlier) | - | +| `mode` | string | `enforce`, `monitor`, or `dry-run` | - | +| `description` | string | Plugin description | - | +| `tags` | array | Plugin tags for categorization | - | +| `hooks` | array | Enabled hooks: `prompt_pre_fetch`, `tool_pre_invoke`, etc. | All hooks | + +**Kubernetes-specific:** Same as Gateway (see above) + +--- + +### Certificate Configuration + +mTLS certificate generation settings: + +```yaml +certificates: + validity_days: 825 # Certificate validity period + auto_generate: true # Auto-generate if missing + ca_path: ./certs/mcp/ca # CA certificate directory + gateway_path: ./certs/mcp/gateway # Gateway cert directory + plugins_path: ./certs/mcp/plugins # Plugins cert directory +``` + +| Field | Type | Required | Description | Default | +|-------|------|----------|-------------|---------| +| `validity_days` | integer | ❌ | Certificate validity in days | `825` | +| `auto_generate` | boolean | ❌ | Auto-generate certificates if missing | `true` | +| `ca_path` | string | ❌ | CA certificate directory | `./certs/mcp/ca` | +| `gateway_path` | string | ❌ | Gateway client cert directory | `./certs/mcp/gateway` | +| `plugins_path` | string | ❌ | Plugin server certs base directory | `./certs/mcp/plugins` | + +--- + +### Infrastructure Services + +PostgreSQL and Redis are **automatically deployed** with the MCP Gateway stack using hardcoded defaults: + +**PostgreSQL (always deployed):** +- Image: `postgres:17` +- Database: `mcp` +- User: `postgres` +- Password: `mysecretpassword` (override with `POSTGRES_PASSWORD` env var) +- Port: `5432` +- Kubernetes: Uses 10Gi PVC + +**Redis (always deployed):** +- Image: `redis:latest` +- Port: `6379` + +**Connection strings (auto-configured):** +```bash +DATABASE_URL=postgresql://postgres:${POSTGRES_PASSWORD}@postgres:5432/mcp +REDIS_URL=redis://redis:6379/0 +``` + +These services are included in all deployments and cannot currently be disabled or customized via the deployment YAML. To customize PostgreSQL password: + +```bash +# Set before deploying +export POSTGRES_PASSWORD=your-secure-password +cforge gateway deploy deploy.yaml +``` + +--- + +## Example Configurations + +### Example 1: Docker Compose (No mTLS) + +**File:** `examples/deployment-configs/deploy-compose.yaml` + +Simple local deployment for development and testing: + +```yaml +deployment: + type: compose + project_name: mcp-stack-test + +gateway: + image: mcpgateway/mcpgateway:latest + port: 4444 + host_port: 4444 + + env_vars: + LOG_LEVEL: DEBUG + MCPGATEWAY_UI_ENABLED: "true" + AUTH_REQUIRED: "false" + + mtls_enabled: false + +plugins: + - name: OPAPluginFilter + repo: https://github.com/terylt/mcp-context-forge.git + ref: feat/use_mtls_plugins + context: plugins/external/opa + + expose_port: true + mtls_enabled: false + + plugin_overrides: + priority: 10 + mode: "enforce" + +certificates: + auto_generate: true +``` + +**Use case:** Quick local testing without security overhead + +**Deploy:** +```bash +cforge gateway deploy examples/deployment-configs/deploy-compose.yaml +``` + +**Access:** +- Gateway: http://localhost:4444 +- Admin UI: http://localhost:4444/admin +- Plugin (exposed): http://localhost:8000 + +--- + +### Example 2: Docker Compose (With mTLS) + +**File:** `examples/deployment-configs/deploy-compose.mtls.yaml` + +Secure local deployment with mutual TLS: + +```yaml +deployment: + type: compose + project_name: mcp-stack-test + +gateway: + image: mcpgateway/mcpgateway:latest + port: 4444 + host_port: 4444 + + mtls_enabled: true # ← Enable mTLS client + mtls_verify: true + mtls_check_hostname: false # Don't verify hostname for localhost + +plugins: + - name: OPAPluginFilter + repo: https://github.com/terylt/mcp-context-forge.git + ref: feat/use_mtls_plugins + context: plugins/external/opa + + mtls_enabled: true # ← Enable mTLS server + + plugin_overrides: + priority: 10 + mode: "enforce" + +certificates: + validity_days: 825 + auto_generate: true # Auto-generate mTLS certs +``` + +**Use case:** Local testing with production-like security + +**Deploy:** +```bash +# Certificates are auto-generated during deploy +cforge gateway deploy examples/deployment-configs/deploy-compose.mtls.yaml +``` + +**How mTLS works:** +1. `cforge gateway certs` generates CA + gateway client cert + plugin server certs +2. Gateway connects to plugins using client certificate +3. Plugins verify gateway's client certificate against CA +4. All communication is encrypted and mutually authenticated + +--- + +### Example 3: Kubernetes (Pre-built Images) + +**File:** `examples/deployment-configs/deploy-k8s.yaml` + +Production-ready Kubernetes deployment using pre-built images: + +```yaml +deployment: + type: kubernetes + namespace: mcp-gateway-prod + +gateway: + image: mcpgateway/mcpgateway:latest + image_pull_policy: IfNotPresent + + replicas: 2 # High availability + service_type: LoadBalancer + service_port: 4444 + + memory_request: 256Mi + memory_limit: 512Mi + cpu_request: 100m + cpu_limit: 500m + + mtls_enabled: true + +plugins: + - name: OPAPluginFilter + image: mcpgateway-opapluginfilter:latest + image_pull_policy: IfNotPresent + + replicas: 2 + service_type: ClusterIP + + memory_request: 128Mi + memory_limit: 256Mi + cpu_request: 50m + cpu_limit: 200m + + mtls_enabled: true + + plugin_overrides: + priority: 10 + mode: "enforce" + +infrastructure: + postgres: + enabled: true + storage_size: 20Gi + storage_class: fast-ssd + redis: + enabled: true + +certificates: + auto_generate: true +``` + +**Use case:** Production deployment with HA and resource limits + +**Deploy:** +```bash +# Deploy to Kubernetes +cforge gateway deploy examples/deployment-configs/deploy-k8s.yaml + +# Verify +kubectl get all -n mcp-gateway-prod + +# Check logs +kubectl logs -n mcp-gateway-prod -l app=mcp-gateway +``` + +--- + +### Example 4: Kubernetes (Build from Source) + +Building plugins from Git repositories in Kubernetes: + +```yaml +deployment: + type: kubernetes + namespace: mcp-gateway-dev + +gateway: + image: mcpgateway/mcpgateway:latest + +plugins: + - name: OPAPluginFilter + # Build from source + repo: https://github.com/terylt/mcp-context-forge.git + ref: feat/use_mtls_plugins + context: plugins/external/opa + containerfile: Containerfile + + # Push to registry (configure with env vars) + # See DOCKER_REGISTRY in deploy process + + replicas: 1 + mtls_enabled: true + +certificates: + auto_generate: true +``` + +**Deploy:** +```bash +# Build locally and push to registry +export DOCKER_REGISTRY=myregistry.io/myorg +cforge gateway build deploy-k8s-build.yaml + +# Deploy to Kubernetes +cforge gateway deploy deploy-k8s-build.yaml --skip-build +``` + +--- + +## mTLS Configuration Guide + +### Understanding mTLS in MCP Gateway + +**mTLS (Mutual TLS)** provides: +- **Encryption**: All gateway ↔ plugin traffic is encrypted +- **Authentication**: Both parties prove their identity +- **Authorization**: Only trusted certificates can communicate + +### Certificate Hierarchy + +``` +CA (Root Certificate Authority) +β”œβ”€β”€ Gateway Client Certificate +β”‚ └── Used by gateway to connect to plugins +└── Plugin Server Certificates (one per plugin) + └── Used by plugins to authenticate gateway +``` + +### Enabling mTLS + +**In your configuration:** + +```yaml +gateway: + mtls_enabled: true # Enable mTLS client + mtls_verify: true # Verify server certificates + mtls_check_hostname: false # Skip hostname verification (for localhost/IPs) + +plugins: + - name: MyPlugin + mtls_enabled: true # Enable mTLS server +``` + +### Certificate Generation + +**Automatic (recommended):** +```yaml +certificates: + auto_generate: true # Auto-generate during deploy + validity_days: 825 # ~2.3 years +``` + +**Manual:** +```bash +# Generate certificates explicitly +cforge gateway certs deploy.yaml + +# Certificates are created in: +# - certs/mcp/ca/ (CA) +# - certs/mcp/gateway/ (gateway client cert) +# - certs/mcp/plugins/*/ (plugin server certs) +``` + +### Environment Variables + +The deployment tool automatically sets these environment variables: + +**Gateway (client):** +```bash +PLUGINS_CLIENT_MTLS_CERTFILE=/certs/gateway/client.crt +PLUGINS_CLIENT_MTLS_KEYFILE=/certs/gateway/client.key +PLUGINS_CLIENT_MTLS_CA_BUNDLE=/certs/gateway/ca.crt +PLUGINS_CLIENT_MTLS_VERIFY=true +PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME=false +``` + +**Plugin (server):** +```bash +PLUGINS_SERVER_SSL_CERTFILE=/certs/server.crt +PLUGINS_SERVER_SSL_KEYFILE=/certs/server.key +PLUGINS_SERVER_SSL_CA_CERTS=/certs/ca.crt +PLUGINS_SERVER_SSL_CERT_REQS=2 # CERT_REQUIRED +``` + +### Troubleshooting mTLS + +**Problem: Certificate verification fails** + +Check certificate validity: +```bash +openssl x509 -in certs/mcp/gateway/client.crt -noout -dates +openssl x509 -in certs/mcp/plugins/MyPlugin/server.crt -noout -dates +``` + +**Problem: Hostname mismatch errors** + +Solution: Set `mtls_check_hostname: false` in gateway config, or use service DNS names + +**Problem: Connection refused** + +- Verify plugin has `mtls_enabled: true` +- Check plugin logs for certificate errors +- Ensure certificates are mounted correctly + +**Problem: Expired certificates** + +Regenerate: +```bash +rm -rf certs/ +cforge gateway certs deploy.yaml +``` + +Then redeploy to distribute new certificates. + +--- + +## Deployment Modes + +### Dagger Mode (Recommended) + +**What is Dagger?** +Dagger is a programmable CI/CD engine that runs pipelines in containers. It provides: +- **Reproducible builds**: Same results everywhere +- **Parallel execution**: Faster builds +- **Intelligent caching**: Only rebuild what changed +- **Cross-platform**: Works on any system with Docker + +**When to use:** +- βœ… Local development (fastest builds) +- βœ… CI/CD pipelines (GitHub Actions, GitLab CI, etc.) +- βœ… Team environments (consistent results) + +**Requirements:** +- Docker or compatible container runtime +- Dagger CLI (auto-installed with pip package) +- Internet connection (for first run) + +**Enable:** +```bash +# Auto-detected by default +cforge gateway deploy deploy.yaml + +# Or explicitly +export USE_DAGGER=true +cforge gateway deploy deploy.yaml +``` + +**Performance benefits:** +- 2-3x faster builds with caching +- Parallel plugin builds +- Efficient layer reuse + +--- + +### Plain Python Mode (Fallback) + +**What is it?** +Pure Python implementation using standard tools (`docker`, `kubectl`, `git`, etc.) + +**When to use:** +- βœ… Environments without Dagger support +- βœ… Air-gapped networks +- βœ… Simple deployments +- βœ… Debugging/troubleshooting + +**Requirements:** +- Python 3.11+ +- Docker CLI +- `kubectl` (for Kubernetes deployments) +- `git` (for building from source) + +**Enable:** +```bash +# Force plain Python mode +cforge gateway --no-dagger deploy deploy.yaml +``` + +**Limitations:** +- Sequential builds (slower) +- Less sophisticated caching +- No parallel execution + +--- + +## CI/CD Integration + +### GitHub Actions + +```yaml +name: Deploy MCP Gateway + +on: + push: + branches: [main] + +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Install cforge + run: pip install -e . + + - name: Validate configuration + run: cforge gateway validate deploy/deploy-prod.yaml + + - name: Build containers + run: cforge gateway build deploy/deploy-prod.yaml + env: + DOCKER_REGISTRY: ${{ secrets.DOCKER_REGISTRY }} + + - name: Generate certificates + run: cforge gateway certs deploy/deploy-prod.yaml + + - name: Deploy to Kubernetes + run: cforge gateway deploy deploy/deploy-prod.yaml --skip-build + env: + KUBECONFIG: ${{ secrets.KUBECONFIG }} + + - name: Verify deployment + run: cforge gateway verify deploy/deploy-prod.yaml +``` + +--- + +### GitLab CI + +```yaml +stages: + - validate + - build + - deploy + +variables: + CONFIG_FILE: deploy/deploy-prod.yaml + +validate: + stage: validate + script: + - pip install -e . + - cforge gateway validate $CONFIG_FILE + +build: + stage: build + script: + - pip install -e . + - cforge gateway build $CONFIG_FILE + artifacts: + paths: + - deploy/ + +deploy: + stage: deploy + script: + - pip install -e . + - cforge gateway deploy $CONFIG_FILE --skip-build + environment: + name: production + only: + - main +``` + +--- + +## Best Practices + +### Configuration Management + +βœ… **DO:** +- Version control your `deploy.yaml` +- Use Git tags/branches for plugin versions (`ref: v1.2.3`) +- Separate configs for dev/staging/prod +- Document custom `env_vars` in comments + +❌ **DON'T:** +- Hardcode secrets in YAML (use environment files) +- Use `ref: main` in production (pin versions) +- Commit generated certificates to Git + +### Environment Variables + +βœ… **DO:** +```bash +# Review and customize .env files after build +cforge gateway build deploy.yaml +# Edit deploy/env/.env.gateway +# Edit deploy/env/.env.PluginName +cforge gateway deploy deploy.yaml --skip-build +``` + +❌ **DON'T:** +```bash +# Deploy without reviewing environment +cforge gateway deploy deploy.yaml # May use default/insecure values +``` + +### Certificate Management + +βœ… **DO:** +- Let `cforge` auto-generate certificates +- Rotate certificates before expiry +- Use separate CAs for dev/staging/prod +- Backup CA private key securely + +❌ **DON'T:** +- Share certificates between environments +- Commit CA private key to Git +- Use expired certificates + +### Resource Limits + +βœ… **DO:** +```yaml +gateway: + memory_request: 256Mi + memory_limit: 512Mi # 2x request for burst capacity + cpu_request: 100m + cpu_limit: 500m # Allow bursting +``` + +❌ **DON'T:** +```yaml +gateway: + # Missing resource limits = unbounded usage + # OR + memory_limit: 256Mi # Too tight, may OOM +``` + +### High Availability + +βœ… **DO:** +```yaml +gateway: + replicas: 2 # Multiple replicas + service_type: LoadBalancer + +plugins: + - name: CriticalPlugin + replicas: 2 # HA for critical plugins +``` + +❌ **DON'T:** +```yaml +gateway: + replicas: 1 # Single point of failure in production +``` + +--- + +## Troubleshooting + +### Build Issues + +**Problem: Git clone fails** +``` +Error: Failed to clone repository +``` + +**Solution:** +- Check `repo` URL is correct +- Verify Git credentials/SSH keys +- Ensure network connectivity +- For private repos, configure Git auth + +--- + +**Problem: Docker build fails** +``` +Error: Build failed for plugin MyPlugin +``` + +**Solution:** +1. Check `context` and `containerfile` paths +2. Verify Containerfile syntax +3. Review plugin repository structure +4. Try building manually: + ```bash + git clone + cd + docker build -f . + ``` + +--- + +### Deployment Issues + +**Problem: Pod/container fails to start** +``` +Error: CrashLoopBackOff +``` + +**Solution:** +1. Check logs: + ```bash + # Kubernetes + kubectl logs -n + + # Docker Compose + docker-compose -f deploy/docker-compose.yaml logs + ``` +2. Verify environment variables in `deploy/env/` +3. Check resource limits (may be too low) +4. Verify image was built/pulled correctly + +--- + +**Problem: mTLS connection fails** +``` +Error: SSL certificate verification failed +``` + +**Solution:** +1. Regenerate certificates: + ```bash + rm -rf certs/ + cforge gateway certs deploy.yaml + ``` +2. Redeploy to distribute new certs: + ```bash + cforge gateway deploy deploy.yaml --skip-build --skip-certs + ``` +3. Check certificate expiry: + ```bash + openssl x509 -in certs/mcp/gateway/client.crt -noout -dates + ``` + +--- + +### Verification Issues + +**Problem: Deployment verification timeout** +``` +Error: Verification failed: timeout waiting for deployment +``` + +**Solution:** +1. Increase timeout: + ```bash + cforge gateway verify deploy.yaml --timeout 600 + ``` +2. Check pod/container status manually +3. Review resource availability (CPU/memory) +4. Check for image pull errors + +--- + +## FAQ + +**Q: Can I use pre-built images instead of building from source?** + +A: Yes! Just specify `image` instead of `repo`: +```yaml +plugins: + - name: MyPlugin + image: myorg/myplugin:v1.0.0 +``` + +--- + +**Q: How do I update a plugin to a new version?** + +A: Update the `ref` and redeploy: +```yaml +plugins: + - name: MyPlugin + repo: https://github.com/org/repo.git + ref: v2.0.0 # ← Update version +``` + +Then: +```bash +cforge gateway build deploy.yaml --plugin MyPlugin --no-cache +cforge gateway deploy deploy.yaml --skip-certs +``` + +--- + +**Q: Can I deploy only the gateway without plugins?** + +A: Yes, just omit the `plugins` section or use an empty array: +```yaml +plugins: [] +``` + +--- + +**Q: How do I add custom environment variables?** + +A: Two ways: + +**1. In YAML (committed to Git):** +```yaml +gateway: + env_vars: + CUSTOM_VAR: value +``` + +**2. In .env file (not committed):** +```bash +# deploy/env/.env.gateway +CUSTOM_VAR=value +``` + +--- + +**Q: Can I use cforge in a CI/CD pipeline?** + +A: Absolutely! See [CI/CD Integration](#cicd-integration) section above. + +--- + +**Q: How do I switch between Dagger and plain Python modes?** + +A: +```bash +# Force plain Python +cforge gateway --no-dagger deploy deploy.yaml + +# Use Dagger (default if available) +cforge gateway deploy deploy.yaml +``` + +--- + +**Q: Where are the generated manifests stored?** + +A: Default: `deploy/` directory +- `deploy/docker-compose.yaml` (Compose mode) +- `deploy/manifests/` (Kubernetes mode) + +Custom location: +```bash +cforge gateway deploy deploy.yaml --output-dir ./my-deploy +``` + +--- + +**Q: How do I access the gateway after deployment?** + +A: +- **Docker Compose**: `http://localhost:` (default: 4444) +- **Kubernetes LoadBalancer**: Get external IP: + ```bash + kubectl get svc -n mcp-gateway + ``` +- **Kubernetes ClusterIP**: Port-forward: + ```bash + kubectl port-forward -n svc/mcp-gateway 4444:4444 + ``` + +--- + +## Additional Resources + +- **Main Documentation**: [ContextForge Documentation](/) +- **Plugin Development**: [Plugin Framework Guide](/plugins/framework) +- **mTLS Setup**: [mTLS Configuration Guide](/using/plugins/mtls) +- **Example Configs**: [`examples/deployment-configs/`](https://github.com/terylt/mcp-context-forge/tree/main/examples/deployment-configs) +- **Source Code**: [`mcpgateway/tools/builder/`](https://github.com/terylt/mcp-context-forge/tree/main/mcpgateway/tools/builder) + +--- + +## Getting Help + +If you encounter issues: + +1. **Check logs**: Review detailed error messages +2. **Validate config**: Run `cforge gateway validate deploy.yaml` +3. **Dry-run**: Test with `cforge gateway deploy deploy.yaml --dry-run` +4. **Verbose mode**: Use `cforge gateway -v ` for detailed output +5. **Debug mode**: Set `export MCP_DEBUG=1` for stack traces +6. **GitHub Issues**: [Report bugs and request features](https://github.com/terylt/mcp-context-forge/issues) + +--- diff --git a/examples/deployment-configs/deploy-compose.mtls.yaml b/examples/deployment-configs/deploy-compose.mtls.yaml new file mode 100644 index 000000000..a33fa1757 --- /dev/null +++ b/examples/deployment-configs/deploy-compose.mtls.yaml @@ -0,0 +1,99 @@ +# MCP Stack - Local Docker Compose Test Configuration +# This config deploys MCP Gateway + external plugins locally with mTLS + +deployment: + type: compose + project_name: mcp-stack-test + +# MCP Gateway configuration +gateway: + # Use local gateway image (build first with: make container-build) + image: mcpgateway/mcpgateway:latest + + port: 4444 + host_port: 4444 # Expose on localhost:4444 + + # Environment configuration + # env_file will auto-detect deploy/env/.env.gateway if not specified + env_vars: + LOG_LEVEL: DEBUG + HOST: 0.0.0.0 + PORT: 4444 + + # Enable features + MCPGATEWAY_UI_ENABLED: "true" + MCPGATEWAY_ADMIN_API_ENABLED: "true" + MCPGATEWAY_A2A_ENABLED: "true" + + # Auth + AUTH_REQUIRED: "false" # Disabled for easy testing + + # Federation + MCPGATEWAY_ENABLE_FEDERATION: "false" + + # mTLS client configuration (gateway connects to plugins) + mtls_enabled: true + mtls_verify: true # Verify server certificates (default: true) + mtls_check_hostname: false # Don't verify hostname (default: false for compose) + + # Note: plugins-config.yaml is auto-generated from the plugins section below + # No need to specify config_file anymore! + +# External plugins +plugins: + # OPA Plugin Filter + - name: OPAPluginFilter + + # Build from GitHub repository + repo: https://github.com/terylt/mcp-context-forge.git + ref: feat/use_mtls_plugins + context: plugins/external/opa + containerfile: Containerfile + + # Defaults: port=8000, host_port auto-assigned (8000, 8001, ...) + expose_port: true # Expose for testing + + # env_file will auto-detect deploy/env/.env.OPAPluginFilter if not specified + env_vars: + LOG_LEVEL: DEBUG + + # OPA-specific settings + OPA_POLICY_PATH: /app/policies + + # mTLS server configuration + mtls_enabled: true + + # Plugin manager overrides (client-side configuration) + plugin_overrides: + priority: 10 + mode: "enforce" + description: "OPA policy enforcement for tool and resource filtering" + tags: ["security", "policy", "opa"] + + # LLMGuard Plugin (content filtering) + #- name: LLMGuardPlugin + + # Build from GitHub repository + # repo: https://github.com/terylt/mcp-context-forge.git + # ref: feat/use_mtls_plugins + # context: plugins/external/llmguard + # containerfile: Containerfile + # target: builder # Build only the 'builder' stage (multi-stage build) + + # Defaults: port=8000, host_port auto-assigned (8000, 8001, ...) + # port: 8001 + # expose_port: true + + # env_file will auto-detect deploy/env/.env.LLMGuardPlugin if not specified + # env_vars: + # LOG_LEVEL: DEBUG + + # mtls_enabled: true + +# mTLS Certificate configuration +certificates: + validity_days: 825 + auto_generate: true + ca_path: ./certs/mcp/ca + gateway_path: ./certs/mcp/gateway + plugins_path: ./certs/mcp/plugins diff --git a/examples/deployment-configs/deploy-compose.yaml b/examples/deployment-configs/deploy-compose.yaml new file mode 100644 index 000000000..d1c3c89fa --- /dev/null +++ b/examples/deployment-configs/deploy-compose.yaml @@ -0,0 +1,96 @@ +# MCP Stack - Local Docker Compose Test Configuration +# This config deploys MCP Gateway + external plugins locally with mTLS + +deployment: + type: compose + project_name: mcp-stack-test + +# MCP Gateway configuration +gateway: + # Use local gateway image (build first with: make container-build) + image: mcpgateway/mcpgateway:latest + + port: 4444 + host_port: 4444 # Expose on localhost:4444 + + # Environment configuration + # env_file will auto-detect deploy/env/.env.gateway if not specified + env_vars: + LOG_LEVEL: DEBUG + HOST: 0.0.0.0 + PORT: 4444 + # Enable features + MCPGATEWAY_UI_ENABLED: "true" + MCPGATEWAY_ADMIN_API_ENABLED: "true" + MCPGATEWAY_A2A_ENABLED: "true" + + # Auth + AUTH_REQUIRED: "false" # Disabled for easy testing + + # Federation + MCPGATEWAY_ENABLE_FEDERATION: "false" + + # mTLS client configuration (gateway connects to plugins) + mtls_enabled: false + + # Note: plugins-config.yaml is auto-generated from the plugins section below + # No need to specify config_file anymore! + +# External plugins +plugins: + # OPA Plugin Filter + - name: OPAPluginFilter + + # Build from GitHub repository + repo: https://github.com/terylt/mcp-context-forge.git + ref: feat/use_mtls_plugins + context: plugins/external/opa + containerfile: Containerfile + + # Defaults: port=8000, host_port auto-assigned (8000, 8001, ...) + expose_port: true # Expose for testing + + # env_file will auto-detect deploy/env/.env.OPAPluginFilter if not specified + env_vars: + LOG_LEVEL: DEBUG + + # OPA-specific settings + OPA_POLICY_PATH: /app/policies + + # mTLS server configuration + mtls_enabled: false + + # Plugin manager overrides (client-side configuration) + plugin_overrides: + priority: 10 + mode: "enforce" + description: "OPA policy enforcement for tool and resource filtering" + tags: ["security", "policy", "opa"] + + # LLMGuard Plugin (content filtering) + #- name: LLMGuardPlugin + + # # Build from GitHub repository + # repo: https://github.com/terylt/mcp-context-forge.git + # ref: feat/use_mtls_plugins + # context: plugins/external/llmguard + # containerfile: Containerfile + # target: builder # Build only the 'builder' stage (multi-stage build) + + # Defaults: port=8000, host_port auto-assigned (8000, 8001, ...) + # port: 8001 + # expose_port: true + + # env_file will auto-detect deploy/env/.env.LLMGuardPlugin if not specified + # env_vars: + # LOG_LEVEL: DEBUG + + # mtls_enabled: false + +# mTLS Certificate configuration +certificates: + validity_days: 825 + auto_generate: true + ca_path: ./certs/mcp/ca + gateway_path: ./certs/mcp/gateway + plugins_path: ./certs/mcp/plugins diff --git a/examples/deployment-configs/deploy-k8s.mtls.yaml b/examples/deployment-configs/deploy-k8s.mtls.yaml new file mode 100644 index 000000000..32e653406 --- /dev/null +++ b/examples/deployment-configs/deploy-k8s.mtls.yaml @@ -0,0 +1,84 @@ +# MCP Stack - Kubernetes Test Configuration +# Simple test config using pre-built images + +deployment: + type: kubernetes + namespace: mcp-gateway-test + +# MCP Gateway configuration +gateway: + # Use pre-built gateway image + image: mcpgateway/mcpgateway:latest + image_pull_policy: IfNotPresent + + port: 4444 + + # Service configuration + service_type: ClusterIP + service_port: 4444 + + # Resource limits + replicas: 1 + memory_request: 256Mi + memory_limit: 512Mi + cpu_request: 100m + cpu_limit: 500m + + # Environment configuration + env_vars: + LOG_LEVEL: DEBUG + HOST: 0.0.0.0 + PORT: 4444 + MCPGATEWAY_UI_ENABLED: "true" + MCPGATEWAY_ADMIN_API_ENABLED: "true" + MCPGATEWAY_A2A_ENABLED: "true" + AUTH_REQUIRED: "false" + MCPGATEWAY_ENABLE_FEDERATION: "false" + + # mTLS client configuration (gateway connects to plugins) + mtls_enabled: true + mtls_verify: true # Verify server certificates (default: true) + mtls_check_hostname: false # Don't verify hostname (default: false for compose) + +# External plugins +plugins: + # OPA Plugin Filter + - name: OPAPluginFilter + + # Use pre-built image for faster testing + image: mcpgateway-opapluginfilter:latest + image_pull_policy: IfNotPresent + + port: 8000 + + # Service configuration + service_type: ClusterIP + service_port: 8000 + + # Resource limits + replicas: 1 + memory_request: 128Mi + memory_limit: 256Mi + cpu_request: 50m + cpu_limit: 200m + + env_vars: + LOG_LEVEL: DEBUG + OPA_POLICY_PATH: /app/policies + + mtls_enabled: true + + # Plugin manager overrides + plugin_overrides: + priority: 10 + mode: "enforce" + description: "OPA policy enforcement" + tags: ["security", "policy", "opa"] + +# mTLS Certificate configuration +certificates: + validity_days: 825 + auto_generate: true + ca_path: ./certs/mcp/ca + gateway_path: ./certs/mcp/gateway + plugins_path: ./certs/mcp/plugins diff --git a/examples/deployment-configs/deploy-k8s.yaml b/examples/deployment-configs/deploy-k8s.yaml new file mode 100644 index 000000000..518e61bd4 --- /dev/null +++ b/examples/deployment-configs/deploy-k8s.yaml @@ -0,0 +1,82 @@ +# MCP Stack - Kubernetes Test Configuration +# Simple test config using pre-built images + +deployment: + type: kubernetes + namespace: mcp-gateway-test + +# MCP Gateway configuration +gateway: + # Use pre-built gateway image + image: mcpgateway/mcpgateway:latest + image_pull_policy: IfNotPresent + + port: 4444 + + # Service configuration + service_type: ClusterIP + service_port: 4444 + + # Resource limits + replicas: 1 + memory_request: 256Mi + memory_limit: 512Mi + cpu_request: 100m + cpu_limit: 500m + + # Environment configuration + env_vars: + LOG_LEVEL: DEBUG + HOST: 0.0.0.0 + PORT: 4444 + MCPGATEWAY_UI_ENABLED: "true" + MCPGATEWAY_ADMIN_API_ENABLED: "true" + MCPGATEWAY_A2A_ENABLED: "true" + AUTH_REQUIRED: "false" + MCPGATEWAY_ENABLE_FEDERATION: "false" + + # mTLS disabled for simplicity + mtls_enabled: false + +# External plugins +plugins: + # OPA Plugin Filter + - name: OPAPluginFilter + + # Use pre-built image for faster testing + image: mcpgateway-opapluginfilter:latest + image_pull_policy: IfNotPresent + + port: 8000 + + # Service configuration + service_type: ClusterIP + service_port: 8000 + + # Resource limits + replicas: 1 + memory_request: 128Mi + memory_limit: 256Mi + cpu_request: 50m + cpu_limit: 200m + + env_vars: + LOG_LEVEL: DEBUG + OPA_POLICY_PATH: /app/policies + + mtls_enabled: false + + # Plugin manager overrides + plugin_overrides: + priority: 10 + mode: "enforce" + description: "OPA policy enforcement" + tags: ["security", "policy", "opa"] + +# mTLS Certificate configuration +certificates: + validity_days: 825 + auto_generate: true + ca_path: ./certs/mcp/ca + gateway_path: ./certs/mcp/gateway + plugins_path: ./certs/mcp/plugins From 50e9da532eaecca22a9bf2262423ad9ce41b0592 Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Fri, 10 Oct 2025 16:21:21 -0600 Subject: [PATCH 18/35] fix: docstring issues Signed-off-by: Teryl Taylor --- mcpgateway/tools/builder/cli.py | 72 ++++++++++++++++++++--- mcpgateway/tools/builder/common.py | 9 +++ mcpgateway/tools/builder/dagger_deploy.py | 41 +++++++++++++ mcpgateway/tools/builder/python_deploy.py | 47 ++++++++++++++- mcpgateway/tools/builder/schema.py | 30 +++++++++- 5 files changed, 187 insertions(+), 12 deletions(-) diff --git a/mcpgateway/tools/builder/cli.py b/mcpgateway/tools/builder/cli.py index b96809e7f..fa389b1f8 100644 --- a/mcpgateway/tools/builder/cli.py +++ b/mcpgateway/tools/builder/cli.py @@ -67,6 +67,11 @@ def cli( """MCP Stack deployment tool Deploys MCP Gateway + external plugins from a single YAML configuration. + + Args: + ctx: Typer context object + no_dagger: Force plain Python mode instead of Dagger + verbose: Enable verbose output """ ctx.ensure_object(dict) ctx.obj["verbose"] = verbose @@ -85,7 +90,12 @@ def cli( @app.command() def validate(ctx: typer.Context, config_file: Annotated[Path, typer.Argument(help="The deployment configuration file.")]): - """Validate mcp-stack.yaml configuration""" + """Validate mcp-stack.yaml configuration + + Args: + ctx: Typer context object + config_file: Path to the deployment configuration file + """ impl = ctx.obj["deployer"] try: @@ -105,7 +115,16 @@ def build( no_cache: Annotated[bool, typer.Option("--no-cache", help="Disable build cache")] = False, copy_env_templates: Annotated[bool, typer.Option("--copy-env-templates", help="Copy .env.template files from plugin repos")] = True, ): - """Build containers""" + """Build containers + + Args: + ctx: Typer context object + config_file: Path to the deployment configuration file + plugins_only: Only build plugin containers, skip gateway + plugin: List of specific plugin names to build + no_cache: Disable build cache + copy_env_templates: Copy .env.template files from plugin repos + """ impl = ctx.obj["deployer"] try: @@ -122,7 +141,12 @@ def build( @app.command() def certs(ctx: typer.Context, config_file: Annotated[Path, typer.Argument(help="The deployment configuration file")]): - """Generate mTLS certificates""" + """Generate mTLS certificates + + Args: + ctx: Typer context object + config_file: Path to the deployment configuration file + """ impl = ctx.obj["deployer"] try: @@ -142,7 +166,16 @@ def deploy( skip_build: Annotated[bool, typer.Option("--skip-build", help="Skip building containers")] = False, skip_certs: Annotated[bool, typer.Option("--skip-certs", help="Skip certificate generation")] = False, ): - """Deploy MCP stack""" + """Deploy MCP stack + + Args: + ctx: Typer context object + config_file: Path to the deployment configuration file + output_dir: Custom output directory for manifests + dry_run: Generate manifests without deploying + skip_build: Skip building containers + skip_certs: Skip certificate generation + """ impl = ctx.obj["deployer"] try: @@ -163,7 +196,14 @@ def verify( wait: Annotated[bool, typer.Option("--wait", help="Wait for deployment to be ready")] = True, timeout: Annotated[int, typer.Option("--timeout", help="Wait timeout in seconds")] = 300, ): - """Verify deployment health""" + """Verify deployment health + + Args: + ctx: Typer context object + config_file: Path to the deployment configuration file + wait: Wait for deployment to be ready + timeout: Wait timeout in seconds + """ impl = ctx.obj["deployer"] try: @@ -180,7 +220,13 @@ def destroy( config_file: Annotated[Path, typer.Argument(help="The deployment configuration file")], force: Annotated[bool, typer.Option("--force", help="Force destruction without confirmation")] = False, ): - """Destroy deployed MCP stack""" + """Destroy deployed MCP stack + + Args: + ctx: Typer context object + config_file: Path to the deployment configuration file + force: Force destruction without confirmation + """ impl = ctx.obj["deployer"] if not force: @@ -210,7 +256,13 @@ def generate( config_file: Annotated[Path, typer.Argument(help="The deployment configuration file")], output: Annotated[Optional[Path], typer.Option("--output", "-o", help="Output directory for manifests")] = None, ): - """Generate deployment manifests (k8s or compose)""" + """Generate deployment manifests (k8s or compose) + + Args: + ctx: Typer context object + config_file: Path to the deployment configuration file + output: Output directory for manifests + """ impl = ctx.obj["deployer"] try: @@ -222,7 +274,11 @@ def generate( def main(): - """Main entry point""" + """Main entry point + + Raises: + Exception: Any unhandled exception from subcommands (re-raised in debug mode) + """ try: app(obj={}) except KeyboardInterrupt: diff --git a/mcpgateway/tools/builder/common.py b/mcpgateway/tools/builder/common.py index 880d407dd..94f723d3f 100644 --- a/mcpgateway/tools/builder/common.py +++ b/mcpgateway/tools/builder/common.py @@ -86,6 +86,9 @@ def generate_plugin_config(config: Dict[str, Any], output_dir: Path, verbose: bo Returns: Path to generated plugins-config.yaml file + + Raises: + FileNotFoundError: If template directory not found """ deployment_type = config["deployment"]["type"] @@ -157,6 +160,9 @@ def generate_kubernetes_manifests(config: Dict[str, Any], output_dir: Path, verb config: Parsed mcp-stack.yaml configuration output_dir: Output directory for manifests verbose: Print verbose output + + Raises: + FileNotFoundError: If template directory not found """ # Load templates @@ -307,6 +313,9 @@ def generate_compose_manifests(config: Dict[str, Any], output_dir: Path, verbose config: Parsed mcp-stack.yaml configuration output_dir: Output directory for manifests verbose: Print verbose output + + Raises: + FileNotFoundError: If template directory not found """ # Load templates diff --git a/mcpgateway/tools/builder/dagger_deploy.py b/mcpgateway/tools/builder/dagger_deploy.py index 8cb9e6d76..7fae282ba 100644 --- a/mcpgateway/tools/builder/dagger_deploy.py +++ b/mcpgateway/tools/builder/dagger_deploy.py @@ -59,6 +59,9 @@ async def build(self, config_file: str, plugins_only: bool = False, specific_plu specific_plugins: List of specific plugin names to build no_cache: Disable Dagger cache copy_env_templates: Copy .env.template files from cloned repos + + Raises: + Exception: If build fails for any component """ config = load_config(config_file) @@ -129,6 +132,10 @@ async def generate_certificates(self, config_file: str) -> None: Args: config_file: Path to mcp-stack.yaml + + Raises: + dagger.ExecError: If certificate generation command fails + dagger.QueryError: If Dagger query fails """ config = load_config(config_file) @@ -192,6 +199,11 @@ async def deploy(self, config_file: str, dry_run: bool = False, skip_build: bool skip_build: Skip building containers skip_certs: Skip certificate generation output_dir: Output directory for manifests (default: ./deploy) + + Raises: + ValueError: If unsupported deployment type specified + dagger.ExecError: If deployment command fails + dagger.QueryError: If Dagger query fails """ config = load_config(config_file) @@ -299,6 +311,9 @@ def generate_manifests(self, config_file: str, output_dir: Optional[str] = None) Returns: Path to generated manifests directory + + Raises: + ValueError: If unsupported deployment type specified """ config = load_config(config_file) deployment_type = config["deployment"]["type"] @@ -337,6 +352,10 @@ async def _build_component_with_dagger(self, component: Dict[str, Any], componen component_name: Name of the component (gateway or plugin name) no_cache: Disable cache copy_env_templates: Copy .env.template from repo if it exists + + Raises: + ValueError: If component has no repo field + Exception: If build or export fails """ repo = component.get("repo") @@ -411,6 +430,9 @@ async def _deploy_kubernetes(self, manifests_dir: Path) -> None: """Deploy to Kubernetes using kubectl. Uses shared deploy_kubernetes() from common.py to avoid code duplication. + + Args: + manifests_dir: Path to directory containing Kubernetes manifests """ deploy_kubernetes(manifests_dir, verbose=self.verbose) @@ -418,6 +440,9 @@ async def _deploy_compose(self, manifests_dir: Path) -> None: """Deploy using Docker Compose. Uses shared deploy_compose() from common.py to avoid code duplication. + + Args: + manifests_dir: Path to directory containing compose manifest """ compose_file = manifests_dir / "docker-compose.yaml" deploy_compose(compose_file, verbose=self.verbose) @@ -426,6 +451,11 @@ async def _verify_kubernetes(self, config: Dict[str, Any], wait: bool = False, t """Verify Kubernetes deployment health. Uses shared verify_kubernetes() from common.py to avoid code duplication. + + Args: + config: Parsed configuration dict + wait: Wait for pods to be ready + timeout: Wait timeout in seconds """ namespace = config["deployment"].get("namespace", "mcp-gateway") output = verify_kubernetes(namespace, wait=wait, timeout=timeout, verbose=self.verbose) @@ -435,6 +465,11 @@ async def _verify_compose(self, config: Dict[str, Any], wait: bool = False, time """Verify Docker Compose deployment health. Uses shared verify_compose() from common.py to avoid code duplication. + + Args: + config: Parsed configuration dict + wait: Wait for containers to be ready + timeout: Wait timeout in seconds """ _ = config, wait, timeout # Reserved for future use # Use the same manifests directory as generate_manifests @@ -448,6 +483,9 @@ async def _destroy_kubernetes(self, config: Dict[str, Any]) -> None: """Destroy Kubernetes deployment. Uses shared destroy_kubernetes() from common.py to avoid code duplication. + + Args: + config: Parsed configuration dict """ _ = config # Reserved for future use (namespace, labels, etc.) # Use the same manifests directory as generate_manifests @@ -459,6 +497,9 @@ async def _destroy_compose(self, config: Dict[str, Any]) -> None: """Destroy Docker Compose deployment. Uses shared destroy_compose() from common.py to avoid code duplication. + + Args: + config: Parsed configuration dict """ _ = config # Reserved for future use (project name, networks, etc.) # Use the same manifests directory as generate_manifests diff --git a/mcpgateway/tools/builder/python_deploy.py b/mcpgateway/tools/builder/python_deploy.py index 5a23f05c9..42369bce7 100644 --- a/mcpgateway/tools/builder/python_deploy.py +++ b/mcpgateway/tools/builder/python_deploy.py @@ -59,6 +59,9 @@ async def build(self, config_file: str, plugins_only: bool = False, specific_plu specific_plugins: List of specific plugin names to build no_cache: Disable build cache copy_env_templates: Copy .env.template files from cloned repos + + Raises: + Exception: If build fails for any component """ config = load_config(config_file) @@ -128,6 +131,9 @@ async def generate_certificates(self, config_file: str) -> None: Args: config_file: Path to mcp-stack.yaml + + Raises: + RuntimeError: If make command not found """ config = load_config(config_file) @@ -162,6 +168,9 @@ async def deploy(self, config_file: str, dry_run: bool = False, skip_build: bool skip_build: Skip building containers skip_certs: Skip certificate generation output_dir: Output directory for manifests (default: ./deploy) + + Raises: + ValueError: If unsupported deployment type specified """ config = load_config(config_file) @@ -242,6 +251,9 @@ def generate_manifests(self, config_file: str, output_dir: Optional[str] = None) Returns: Path to generated manifests directory + + Raises: + ValueError: If unsupported deployment type specified """ config = load_config(config_file) deployment_type = config["deployment"]["type"] @@ -273,7 +285,14 @@ def generate_manifests(self, config_file: str, output_dir: Optional[str] = None) # Private helper methods def _detect_container_runtime(self) -> str: - """Detect available container runtime (docker or podman).""" + """Detect available container runtime (docker or podman). + + Returns: + Name of available runtime "docker" or "podman" + + Raises: + RuntimeError: If no container runtime found + """ if shutil.which("docker"): return "docker" elif shutil.which("podman"): @@ -310,6 +329,10 @@ def _build_component(self, component: Dict[str, Any], component_name: str, no_ca component_name: Name of the component (gateway or plugin name) no_cache: Disable cache copy_env_templates: Copy .env.template from repo if it exists + + Raises: + ValueError: If component has no repo field + FileNotFoundError: If build context or containerfile not found """ repo = component.get("repo") @@ -381,6 +404,9 @@ def _deploy_kubernetes(self, manifests_dir: Path) -> None: """Deploy to Kubernetes using kubectl. Uses shared deploy_kubernetes() from common.py to avoid code duplication. + + Args: + manifests_dir: Path to directory containing Kubernetes manifests """ deploy_kubernetes(manifests_dir, verbose=self.verbose) @@ -388,6 +414,9 @@ def _deploy_compose(self, manifests_dir: Path) -> None: """Deploy using Docker Compose. Uses shared deploy_compose() from common.py to avoid code duplication. + + Args: + manifests_dir: Path to directory containing compose manifest """ compose_file = manifests_dir / "docker-compose.yaml" deploy_compose(compose_file, verbose=self.verbose) @@ -396,6 +425,11 @@ def _verify_kubernetes(self, config: Dict[str, Any], wait: bool = False, timeout """Verify Kubernetes deployment health. Uses shared verify_kubernetes() from common.py to avoid code duplication. + + Args: + config: Parsed configuration dict + wait: Wait for pods to be ready + timeout: Wait timeout in seconds """ namespace = config["deployment"].get("namespace", "mcp-gateway") output = verify_kubernetes(namespace, wait=wait, timeout=timeout, verbose=self.verbose) @@ -405,6 +439,11 @@ def _verify_compose(self, config: Dict[str, Any], wait: bool = False, timeout: i """Verify Docker Compose deployment health. Uses shared verify_compose() from common.py to avoid code duplication. + + Args: + config: Parsed configuration dict + wait: Wait for containers to be ready + timeout: Wait timeout in seconds """ _ = config, wait, timeout # Reserved for future use # Use the same manifests directory as generate_manifests @@ -418,6 +457,9 @@ def _destroy_kubernetes(self, config: Dict[str, Any]) -> None: """Destroy Kubernetes deployment. Uses shared destroy_kubernetes() from common.py to avoid code duplication. + + Args: + config: Parsed configuration dict """ _ = config # Reserved for future use (namespace, labels, etc.) # Use the same manifests directory as generate_manifests @@ -429,6 +471,9 @@ def _destroy_compose(self, config: Dict[str, Any]) -> None: """Destroy Docker Compose deployment. Uses shared destroy_compose() from common.py to avoid code duplication. + + Args: + config: Parsed configuration dict """ _ = config # Reserved for future use (project name, networks, etc.) # Use the same manifests directory as generate_manifests diff --git a/mcpgateway/tools/builder/schema.py b/mcpgateway/tools/builder/schema.py index 2892b8f5b..43f77d77a 100644 --- a/mcpgateway/tools/builder/schema.py +++ b/mcpgateway/tools/builder/schema.py @@ -54,7 +54,11 @@ class BuildableConfig(BaseModel): mtls_enabled: Optional[bool] = Field(True, description="Enable mTLS") def model_post_init(self, __context: Any) -> None: - """Validate that either image or repo is specified""" + """Validate that either image or repo is specified + + Raises: + ValueError: If neither image nor repo is specified + """ if not self.image and not self.repo: component_type = self.__class__.__name__.replace("Config", "") raise ValueError(f"{component_type} must specify either 'image' or 'repo'") @@ -94,7 +98,17 @@ class PluginConfig(BuildableConfig): @field_validator("name") @classmethod def validate_name(cls, v: str) -> str: - """Validate plugin name is non-empty""" + """Validate plugin name is non-empty + + Args: + v: Plugin name value to validate + + Returns: + Validated plugin name + + Raises: + ValueError: If plugin name is empty or whitespace only + """ if not v or not v.strip(): raise ValueError("Plugin name cannot be empty") return v @@ -148,7 +162,17 @@ class MCPStackConfig(BaseModel): @field_validator("plugins") @classmethod def validate_plugin_names_unique(cls, v: List[PluginConfig]) -> List[PluginConfig]: - """Ensure plugin names are unique""" + """Ensure plugin names are unique + + Args: + v: List of plugin configurations to validate + + Returns: + Validated list of plugin configurations + + Raises: + ValueError: If duplicate plugin names are found + """ names = [p.name for p in v] if len(names) != len(set(names)): duplicates = [name for name in names if names.count(name) > 1] From 92e564a1121e24bd46c61d48d78e78f3ef111675 Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Fri, 10 Oct 2025 16:43:15 -0600 Subject: [PATCH 19/35] tests: added unit tests and more commenting. Signed-off-by: Teryl Taylor --- mcpgateway/tools/builder/dagger_deploy.py | 5 + mcpgateway/tools/builder/python_deploy.py | 5 + tests/unit/mcpgateway/tools/__init__.py | 6 + .../unit/mcpgateway/tools/builder/__init__.py | 6 + .../mcpgateway/tools/builder/test_schema.py | 330 ++++++++++++++++++ 5 files changed, 352 insertions(+) create mode 100644 tests/unit/mcpgateway/tools/__init__.py create mode 100644 tests/unit/mcpgateway/tools/builder/__init__.py create mode 100644 tests/unit/mcpgateway/tools/builder/test_schema.py diff --git a/mcpgateway/tools/builder/dagger_deploy.py b/mcpgateway/tools/builder/dagger_deploy.py index 7fae282ba..836e5e3a0 100644 --- a/mcpgateway/tools/builder/dagger_deploy.py +++ b/mcpgateway/tools/builder/dagger_deploy.py @@ -48,6 +48,11 @@ class MCPStackDagger(CICDModule): """Dagger-based implementation of MCP Stack deployment.""" def __init__(self, verbose: bool = False): + """Initialize MCPStackDagger instance. + + Args: + verbose: Enable verbose output + """ super().__init__(verbose) async def build(self, config_file: str, plugins_only: bool = False, specific_plugins: Optional[List[str]] = None, no_cache: bool = False, copy_env_templates: bool = False) -> None: diff --git a/mcpgateway/tools/builder/python_deploy.py b/mcpgateway/tools/builder/python_deploy.py index 42369bce7..78d8a9292 100644 --- a/mcpgateway/tools/builder/python_deploy.py +++ b/mcpgateway/tools/builder/python_deploy.py @@ -45,6 +45,11 @@ class MCPStackPython(CICDModule): """Plain Python implementation of MCP Stack deployment.""" def __init__(self, verbose: bool = False): + """Initialize MCPStackPython instance. + + Args: + verbose: Enable verbose output + """ super().__init__(verbose) # Detect container runtime (docker or podman) diff --git a/tests/unit/mcpgateway/tools/__init__.py b/tests/unit/mcpgateway/tools/__init__.py new file mode 100644 index 000000000..eee1aa024 --- /dev/null +++ b/tests/unit/mcpgateway/tools/__init__.py @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +"""Location: ./tests/unit/mcpgateway/tools/__init__.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor +""" diff --git a/tests/unit/mcpgateway/tools/builder/__init__.py b/tests/unit/mcpgateway/tools/builder/__init__.py new file mode 100644 index 000000000..e63d648ed --- /dev/null +++ b/tests/unit/mcpgateway/tools/builder/__init__.py @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +"""Location: ./tests/unit/mcpgateway/tools/builder/__init__.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor +""" diff --git a/tests/unit/mcpgateway/tools/builder/test_schema.py b/tests/unit/mcpgateway/tools/builder/test_schema.py new file mode 100644 index 000000000..86a66e3bc --- /dev/null +++ b/tests/unit/mcpgateway/tools/builder/test_schema.py @@ -0,0 +1,330 @@ +# -*- coding: utf-8 -*- +"""Location: ./tests/unit/mcpgateway/tools/builder/test_schema.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Unit tests for builder schema validation (Pydantic models). +""" + +# Third-Party +import pytest +from pydantic import ValidationError + +# First-Party +from mcpgateway.tools.builder.schema import ( + BuildableConfig, + CertificatesConfig, + DeploymentConfig, + GatewayConfig, + InfrastructureConfig, + MCPStackConfig, + PluginConfig, + PostgresConfig, + RedisConfig, +) + + +class TestDeploymentConfig: + """Test DeploymentConfig validation.""" + + def test_valid_kubernetes_deployment(self): + """Test valid Kubernetes deployment configuration.""" + config = DeploymentConfig(type="kubernetes", namespace="test-ns") + assert config.type == "kubernetes" + assert config.namespace == "test-ns" + assert config.project_name is None + + def test_valid_compose_deployment(self): + """Test valid Docker Compose deployment configuration.""" + config = DeploymentConfig(type="compose", project_name="test-project") + assert config.type == "compose" + assert config.project_name == "test-project" + assert config.namespace is None + + def test_invalid_deployment_type(self): + """Test invalid deployment type.""" + with pytest.raises(ValidationError): + DeploymentConfig(type="invalid") + + +class TestGatewayConfig: + """Test GatewayConfig validation.""" + + def test_gateway_with_image(self): + """Test gateway config with pre-built image.""" + config = GatewayConfig(image="mcpgateway:latest", port=4444) + assert config.image == "mcpgateway:latest" + assert config.port == 4444 + assert config.repo is None + + def test_gateway_with_repo(self): + """Test gateway config with repository build.""" + config = GatewayConfig( + repo="https://github.com/org/repo.git", + ref="main", + context=".", + port=4444 + ) + assert config.repo == "https://github.com/org/repo.git" + assert config.ref == "main" + assert config.image is None + + def test_gateway_without_image_or_repo(self): + """Test that gateway requires either image or repo.""" + with pytest.raises(ValueError, match="must specify either 'image' or 'repo'"): + GatewayConfig(port=4444) + + def test_gateway_defaults(self): + """Test gateway default values.""" + config = GatewayConfig(image="test:latest") + assert config.port == 4444 + assert config.mtls_enabled is True + assert config.ref == "main" + assert config.context == "." + assert config.containerfile == "Containerfile" + + +class TestPluginConfig: + """Test PluginConfig validation.""" + + def test_plugin_with_image(self): + """Test plugin config with pre-built image.""" + config = PluginConfig(name="TestPlugin", image="test:latest") + assert config.name == "TestPlugin" + assert config.image == "test:latest" + assert config.repo is None + + def test_plugin_with_repo(self): + """Test plugin config with repository build.""" + config = PluginConfig( + name="TestPlugin", + repo="https://github.com/org/plugin.git", + ref="v1.0.0", + context="plugins/test" + ) + assert config.name == "TestPlugin" + assert config.repo == "https://github.com/org/plugin.git" + assert config.ref == "v1.0.0" + assert config.context == "plugins/test" + + def test_plugin_without_name(self): + """Test that plugin requires name.""" + with pytest.raises(ValidationError): + PluginConfig(image="test:latest") + + def test_plugin_empty_name(self): + """Test that plugin name cannot be empty.""" + with pytest.raises(ValidationError, match="Plugin name cannot be empty"): + PluginConfig(name="", image="test:latest") + + def test_plugin_whitespace_name(self): + """Test that plugin name cannot be whitespace only.""" + with pytest.raises(ValidationError, match="Plugin name cannot be empty"): + PluginConfig(name=" ", image="test:latest") + + def test_plugin_defaults(self): + """Test plugin default values.""" + config = PluginConfig(name="TestPlugin", image="test:latest") + assert config.port == 8000 + assert config.expose_port is False + assert config.mtls_enabled is True + assert config.plugin_overrides == {} + + def test_plugin_overrides(self): + """Test plugin with overrides.""" + config = PluginConfig( + name="TestPlugin", + image="test:latest", + plugin_overrides={ + "priority": 10, + "mode": "enforce", + "tags": ["security", "filter"] + } + ) + assert config.plugin_overrides["priority"] == 10 + assert config.plugin_overrides["mode"] == "enforce" + assert config.plugin_overrides["tags"] == ["security", "filter"] + + +class TestCertificatesConfig: + """Test CertificatesConfig validation.""" + + def test_certificates_defaults(self): + """Test certificates default values.""" + config = CertificatesConfig() + assert config.validity_days == 825 + assert config.auto_generate is True + assert config.ca_path == "./certs/mcp/ca" + assert config.gateway_path == "./certs/mcp/gateway" + assert config.plugins_path == "./certs/mcp/plugins" + + def test_certificates_custom_values(self): + """Test certificates with custom values.""" + config = CertificatesConfig( + validity_days=365, + auto_generate=False, + ca_path="/custom/ca", + gateway_path="/custom/gateway", + plugins_path="/custom/plugins" + ) + assert config.validity_days == 365 + assert config.auto_generate is False + assert config.ca_path == "/custom/ca" + + +class TestInfrastructureConfig: + """Test InfrastructureConfig validation.""" + + def test_postgres_defaults(self): + """Test PostgreSQL default configuration.""" + config = PostgresConfig() + assert config.enabled is True + assert config.image == "postgres:17" + assert config.database == "mcp" + assert config.user == "postgres" + assert config.password == "mysecretpassword" + assert config.storage_size == "10Gi" + + def test_postgres_custom(self): + """Test PostgreSQL custom configuration.""" + config = PostgresConfig( + enabled=True, + image="postgres:16", + database="customdb", + user="customuser", + password="custompass", + storage_size="20Gi", + storage_class="fast-ssd" + ) + assert config.image == "postgres:16" + assert config.database == "customdb" + assert config.storage_class == "fast-ssd" + + def test_redis_defaults(self): + """Test Redis default configuration.""" + config = RedisConfig() + assert config.enabled is True + assert config.image == "redis:latest" + + def test_infrastructure_defaults(self): + """Test infrastructure with default values.""" + config = InfrastructureConfig() + assert config.postgres.enabled is True + assert config.redis.enabled is True + + +class TestMCPStackConfig: + """Test complete MCPStackConfig validation.""" + + def test_minimal_config(self): + """Test minimal valid configuration.""" + config = MCPStackConfig( + deployment=DeploymentConfig(type="compose", project_name="test"), + gateway=GatewayConfig(image="mcpgateway:latest") + ) + assert config.deployment.type == "compose" + assert config.gateway.image == "mcpgateway:latest" + assert config.plugins == [] + + def test_full_config(self): + """Test full configuration with all options.""" + config = MCPStackConfig( + deployment=DeploymentConfig(type="kubernetes", namespace="prod"), + gateway=GatewayConfig( + image="mcpgateway:latest", + port=4444, + mtls_enabled=True + ), + plugins=[ + PluginConfig(name="Plugin1", image="plugin1:latest"), + PluginConfig(name="Plugin2", image="plugin2:latest") + ], + certificates=CertificatesConfig(validity_days=365), + infrastructure=InfrastructureConfig() + ) + assert config.deployment.namespace == "prod" + assert len(config.plugins) == 2 + assert config.certificates.validity_days == 365 + + def test_duplicate_plugin_names(self): + """Test that duplicate plugin names are rejected.""" + with pytest.raises(ValidationError, match="Duplicate plugin names found"): + MCPStackConfig( + deployment=DeploymentConfig(type="compose"), + gateway=GatewayConfig(image="test:latest"), + plugins=[ + PluginConfig(name="DuplicatePlugin", image="plugin1:latest"), + PluginConfig(name="DuplicatePlugin", image="plugin2:latest") + ] + ) + + def test_unique_plugin_names(self): + """Test that unique plugin names are accepted.""" + config = MCPStackConfig( + deployment=DeploymentConfig(type="compose"), + gateway=GatewayConfig(image="test:latest"), + plugins=[ + PluginConfig(name="Plugin1", image="plugin1:latest"), + PluginConfig(name="Plugin2", image="plugin2:latest"), + PluginConfig(name="Plugin3", image="plugin3:latest") + ] + ) + assert len(config.plugins) == 3 + assert [p.name for p in config.plugins] == ["Plugin1", "Plugin2", "Plugin3"] + + def test_config_with_repo_builds(self): + """Test configuration with repository builds.""" + config = MCPStackConfig( + deployment=DeploymentConfig(type="compose"), + gateway=GatewayConfig( + repo="https://github.com/org/gateway.git", + ref="v2.0.0" + ), + plugins=[ + PluginConfig( + name="BuiltPlugin", + repo="https://github.com/org/plugin.git", + ref="main", + context="plugins/src" + ) + ] + ) + assert config.gateway.repo is not None + assert config.gateway.ref == "v2.0.0" + assert config.plugins[0].repo is not None + assert config.plugins[0].context == "plugins/src" + + +class TestBuildableConfig: + """Test BuildableConfig base class validation.""" + + def test_mtls_defaults(self): + """Test mTLS default settings.""" + config = GatewayConfig(image="test:latest") + assert config.mtls_enabled is True + + def test_mtls_disabled(self): + """Test mTLS can be disabled.""" + config = GatewayConfig(image="test:latest", mtls_enabled=False) + assert config.mtls_enabled is False + + def test_env_vars(self): + """Test environment variables.""" + config = GatewayConfig( + image="test:latest", + env_vars={"LOG_LEVEL": "DEBUG", "PORT": "4444"} + ) + assert config.env_vars["LOG_LEVEL"] == "DEBUG" + assert config.env_vars["PORT"] == "4444" + + def test_multi_stage_build(self): + """Test multi-stage build target.""" + config = PluginConfig( + name="TestPlugin", + repo="https://github.com/org/plugin.git", + containerfile="Dockerfile", + target="production" + ) + assert config.containerfile == "Dockerfile" + assert config.target == "production" From 0ef7d5f2a5e9eb1aff873d5bb600c2637e7202c6 Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Mon, 13 Oct 2025 20:54:56 -0600 Subject: [PATCH 20/35] tests: add tests. Fix doc tests. Signed-off-by: Teryl Taylor --- mcpgateway/tools/builder/dagger_deploy.py | 18 +- mcpgateway/tools/builder/factory.py | 9 +- mcpgateway/tools/builder/pipeline.py | 83 +- .../templates/kubernetes/deployment.yaml.j2 | 2 +- .../unit/mcpgateway/tools/builder/test_cli.py | 490 +++++++++ .../mcpgateway/tools/builder/test_common.py | 989 ++++++++++++++++++ .../tools/builder/test_dagger_deploy.py | 451 ++++++++ .../tools/builder/test_python_deploy.py | 468 +++++++++ 8 files changed, 2461 insertions(+), 49 deletions(-) create mode 100644 tests/unit/mcpgateway/tools/builder/test_cli.py create mode 100644 tests/unit/mcpgateway/tools/builder/test_common.py create mode 100644 tests/unit/mcpgateway/tools/builder/test_dagger_deploy.py create mode 100644 tests/unit/mcpgateway/tools/builder/test_python_deploy.py diff --git a/mcpgateway/tools/builder/dagger_deploy.py b/mcpgateway/tools/builder/dagger_deploy.py index 836e5e3a0..6133d8a15 100644 --- a/mcpgateway/tools/builder/dagger_deploy.py +++ b/mcpgateway/tools/builder/dagger_deploy.py @@ -18,9 +18,18 @@ from pathlib import Path from typing import Any, Dict, List, Optional +try: + # Third-Party + import dagger + from dagger import dag + + DAGGER_AVAILABLE = True +except ImportError: + DAGGER_AVAILABLE = False + dagger = None # type: ignore + dag = None # type: ignore + # Third-Party -import dagger -from dagger import dag from rich.console import Console from rich.progress import Progress, SpinnerColumn, TextColumn @@ -52,7 +61,12 @@ def __init__(self, verbose: bool = False): Args: verbose: Enable verbose output + + Raises: + ImportError: If dagger is not installed """ + if not DAGGER_AVAILABLE: + raise ImportError("Dagger is not installed. Install with: pip install dagger-io\n" "Alternatively, use the plain Python deployer with --deployer=python") super().__init__(verbose) async def build(self, config_file: str, plugins_only: bool = False, specific_plugins: Optional[List[str]] = None, no_cache: bool = False, copy_env_templates: bool = False) -> None: diff --git a/mcpgateway/tools/builder/factory.py b/mcpgateway/tools/builder/factory.py index cf3a43d48..a814ecda7 100644 --- a/mcpgateway/tools/builder/factory.py +++ b/mcpgateway/tools/builder/factory.py @@ -12,9 +12,9 @@ unavailable, ensuring the deployment system works in various environments. Example: - >>> deployer, mode = DeployFactory.create_deployer("dagger", verbose=True) - >>> deployer.validate("mcp-stack.yaml") - βœ“ Configuration valid + >>> deployer, mode = DeployFactory.create_deployer("dagger", verbose=False) + >>> # Validate configuration (output varies by config) + >>> # deployer.validate("mcp-stack.yaml") """ # Standard @@ -70,11 +70,12 @@ def create_deployer(deployer: str, verbose: bool = False) -> tuple[CICDModule, C Example: >>> # Try to load Dagger, fall back to Python if unavailable - >>> deployer, mode = DeployFactory.create_deployer("dagger", verbose=True) + >>> deployer, mode = DeployFactory.create_deployer("dagger", verbose=False) >>> if mode == CICDTypes.DAGGER: ... print("Using optimized Dagger implementation") ... else: ... print("Using fallback Python implementation") + Using optimized Dagger implementation """ # Attempt to load Dagger implementation first if requested if deployer == "dagger": diff --git a/mcpgateway/tools/builder/pipeline.py b/mcpgateway/tools/builder/pipeline.py index 22e70aed9..42febd774 100644 --- a/mcpgateway/tools/builder/pipeline.py +++ b/mcpgateway/tools/builder/pipeline.py @@ -18,11 +18,10 @@ Example: >>> from mcpgateway.tools.builder.factory import DeployFactory - >>> deployer, mode = DeployFactory.create_deployer("dagger", verbose=True) - >>> deployer.validate("mcp-stack.yaml") - βœ“ Configuration valid - >>> await deployer.build("mcp-stack.yaml") - βœ“ Built OPAPluginFilter + >>> deployer, mode = DeployFactory.create_deployer("dagger", verbose=False) + >>> # Validate configuration (output varies by config) + >>> # deployer.validate("mcp-stack.yaml") + >>> # Async methods must be called with await (see method examples below) """ # Standard @@ -93,13 +92,13 @@ def validate(self, config_file: str) -> None: FileNotFoundError: If config_file does not exist Example: - >>> deployer.validate("mcp-stack-local.yaml") - βœ“ Configuration valid + # deployer.validate("mcp-stack-local.yaml") + # βœ“ Configuration valid - >>> deployer.validate("invalid.yaml") - ValueError: Configuration validation failed: - β€’ plugins -> 0 -> name: Field required - β€’ gateway -> image: Field required + # deployer.validate("invalid.yaml") + # ValueError: Configuration validation failed: + # β€’ plugins -> 0 -> name: Field required + # β€’ gateway -> image: Field required """ if self.verbose: self.console.print(f"[blue]Validating {config_file}...[/blue]") @@ -144,9 +143,9 @@ async def build(self, config_file: str, plugins_only: bool = False, specific_plu ValueError: If plugin configuration is invalid Example: - >>> await deployer.build("mcp-stack.yaml", plugins_only=True) - βœ“ Built OPAPluginFilter - βœ“ Built LLMGuardPlugin + # await deployer.build("mcp-stack.yaml", plugins_only=True) + # βœ“ Built OPAPluginFilter + # βœ“ Built LLMGuardPlugin """ pass @@ -169,8 +168,8 @@ async def generate_certificates(self, config_file: str) -> None: FileNotFoundError: If required tools (openssl) are not available Example: - >>> await deployer.generate_certificates("mcp-stack.yaml") - βœ“ Certificates generated + # await deployer.generate_certificates("mcp-stack.yaml") + # βœ“ Certificates generated """ pass @@ -195,15 +194,15 @@ async def deploy(self, config_file: str, dry_run: bool = False, skip_build: bool ValueError: If configuration is invalid Example: - >>> # Full deployment - >>> await deployer.deploy("mcp-stack.yaml") - βœ“ Build complete - βœ“ Certificates generated - βœ“ Deployment complete - - >>> # Dry run (generate manifests only) - >>> await deployer.deploy("mcp-stack.yaml", dry_run=True) - βœ“ Dry-run complete (no changes made) + # Full deployment + # await deployer.deploy("mcp-stack.yaml") + # βœ“ Build complete + # βœ“ Certificates generated + # βœ“ Deployment complete + + # Dry run (generate manifests only) + # await deployer.deploy("mcp-stack.yaml", dry_run=True) + # βœ“ Dry-run complete (no changes made) """ pass @@ -225,15 +224,15 @@ async def verify(self, config_file: str, wait: bool = False, timeout: int = 300) TimeoutError: If wait=True and deployment doesn't become ready Example: - >>> # Quick health check - >>> await deployer.verify("mcp-stack.yaml") - NAME READY STATUS RESTARTS AGE - mcpgateway-xxx 1/1 Running 0 2m - mcp-plugin-opa-xxx 1/1 Running 0 2m - - >>> # Wait for ready state - >>> await deployer.verify("mcp-stack.yaml", wait=True, timeout=600) - βœ“ Deployment healthy + # Quick health check + # await deployer.verify("mcp-stack.yaml") + # NAME READY STATUS RESTARTS AGE + # mcpgateway-xxx 1/1 Running 0 2m + # mcp-plugin-opa-xxx 1/1 Running 0 2m + + # Wait for ready state + # await deployer.verify("mcp-stack.yaml", wait=True, timeout=600) + # βœ“ Deployment healthy """ pass @@ -254,8 +253,8 @@ async def destroy(self, config_file: str) -> None: RuntimeError: If destruction fails Example: - >>> await deployer.destroy("mcp-stack.yaml") - βœ“ Deployment destroyed + # await deployer.destroy("mcp-stack.yaml") + # βœ“ Deployment destroyed """ pass @@ -283,12 +282,12 @@ def generate_manifests(self, config_file: str, output_dir: Optional[str] = None) OSError: If output directory cannot be created Example: - >>> manifests_path = deployer.generate_manifests("mcp-stack.yaml") - >>> print(f"Manifests generated in: {manifests_path}") - Manifests generated in: /path/to/deploy/manifests + # manifests_path = deployer.generate_manifests("mcp-stack.yaml") + # print(f"Manifests generated in: {manifests_path}") + # Manifests generated in: /path/to/deploy/manifests - >>> # Custom output directory - >>> deployer.generate_manifests("mcp-stack.yaml", output_dir="./my-manifests") - βœ“ Manifests generated: ./my-manifests + # Custom output directory + # deployer.generate_manifests("mcp-stack.yaml", output_dir="./my-manifests") + # βœ“ Manifests generated: ./my-manifests """ pass diff --git a/mcpgateway/tools/builder/templates/kubernetes/deployment.yaml.j2 b/mcpgateway/tools/builder/templates/kubernetes/deployment.yaml.j2 index bf2510478..905edd576 100644 --- a/mcpgateway/tools/builder/templates/kubernetes/deployment.yaml.j2 +++ b/mcpgateway/tools/builder/templates/kubernetes/deployment.yaml.j2 @@ -223,7 +223,7 @@ spec: type: {{ service_type | default('ClusterIP') }} ports: - name: http - port: {{ service_port | default(8000) }} + port: {{ port | default(8000) }} targetPort: http protocol: TCP {% if service_type == 'NodePort' and node_port is defined %} diff --git a/tests/unit/mcpgateway/tools/builder/test_cli.py b/tests/unit/mcpgateway/tools/builder/test_cli.py new file mode 100644 index 000000000..541d3a99d --- /dev/null +++ b/tests/unit/mcpgateway/tools/builder/test_cli.py @@ -0,0 +1,490 @@ +# -*- coding: utf-8 -*- +"""Location: ./tests/unit/mcpgateway/tools/builder/test_cli.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Unit tests for builder CLI commands. +""" + +# Standard +import os +from pathlib import Path +from unittest.mock import AsyncMock, MagicMock, Mock, patch + +# Third-Party +import pytest +import typer +from typer.testing import CliRunner + +# First-Party +from mcpgateway.tools.builder.cli import app, main + + +@pytest.fixture +def runner(): + """Create CLI test runner.""" + return CliRunner() + + +@pytest.fixture +def mock_deployer(): + """Create mock deployer instance.""" + deployer = MagicMock() + deployer.validate = MagicMock() + deployer.build = AsyncMock() + deployer.generate_certificates = AsyncMock() + deployer.deploy = AsyncMock() + deployer.verify = AsyncMock() + deployer.destroy = AsyncMock() + deployer.generate_manifests = MagicMock(return_value=Path("/tmp/manifests")) + return deployer + + +class TestCLICallback: + """Test CLI callback initialization.""" + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_cli_callback_default(self, mock_factory, runner): + """Test CLI callback with default options.""" + mock_deployer = MagicMock() + mock_factory.return_value = (mock_deployer, "dagger") + + result = runner.invoke(app, ["--help"]) + assert result.exit_code == 0 + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_cli_callback_verbose(self, mock_factory, runner): + """Test CLI callback with verbose flag.""" + mock_deployer = MagicMock() + mock_factory.return_value = (mock_deployer, "dagger") + + result = runner.invoke(app, ["--verbose", "--help"]) + assert result.exit_code == 0 + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_cli_callback_no_dagger(self, mock_factory, runner, tmp_path): + """Test CLI callback with --no-dagger flag.""" + mock_deployer = MagicMock() + mock_factory.return_value = (mock_deployer, "python") + + config_file = tmp_path / "test-config.yaml" + config_file.write_text("deployment:\n type: compose\n") + + # Use a command that actually invokes the callback (not --help) + result = runner.invoke(app, ["--no-dagger", "version"]) + assert result.exit_code == 0 + + +class TestValidateCommand: + """Test validate command.""" + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_validate_success(self, mock_factory, runner, tmp_path, mock_deployer): + """Test successful configuration validation.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + mock_deployer.validate.return_value = None + + result = runner.invoke(app, ["validate", str(config_file)]) + assert result.exit_code == 0 + assert "Configuration valid" in result.stdout + mock_deployer.validate.assert_called_once() + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_validate_failure(self, mock_factory, runner, tmp_path, mock_deployer): + """Test validation failure.""" + config_file = tmp_path / "invalid-config.yaml" + config_file.write_text("invalid: yaml\n") + + mock_factory.return_value = (mock_deployer, "python") + mock_deployer.validate.side_effect = ValueError("Invalid configuration") + + result = runner.invoke(app, ["validate", str(config_file)]) + assert result.exit_code == 1 + assert "Validation failed" in result.stdout + + +class TestBuildCommand: + """Test build command.""" + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_build_success(self, mock_factory, runner, tmp_path, mock_deployer): + """Test successful build.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("gateway:\n image: test:latest\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["build", str(config_file)]) + assert result.exit_code == 0 + assert "Build complete" in result.stdout + mock_deployer.build.assert_called_once() + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_build_plugins_only(self, mock_factory, runner, tmp_path, mock_deployer): + """Test building only plugins.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("plugins:\n - name: TestPlugin\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["build", str(config_file), "--plugins-only"]) + assert result.exit_code == 0 + # Verify plugins_only flag was passed + call_kwargs = mock_deployer.build.call_args[1] + assert call_kwargs["plugins_only"] is True + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_build_specific_plugins(self, mock_factory, runner, tmp_path, mock_deployer): + """Test building specific plugins.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("plugins:\n - name: Plugin1\n - name: Plugin2\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke( + app, ["build", str(config_file), "--plugin", "Plugin1", "--plugin", "Plugin2"] + ) + assert result.exit_code == 0 + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_build_no_cache(self, mock_factory, runner, tmp_path, mock_deployer): + """Test building with --no-cache flag.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("gateway:\n image: test:latest\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["build", str(config_file), "--no-cache"]) + assert result.exit_code == 0 + call_kwargs = mock_deployer.build.call_args[1] + assert call_kwargs["no_cache"] is True + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_build_failure(self, mock_factory, runner, tmp_path, mock_deployer): + """Test build failure.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("gateway:\n image: test:latest\n") + + mock_factory.return_value = (mock_deployer, "python") + mock_deployer.build.side_effect = RuntimeError("Build failed") + + result = runner.invoke(app, ["build", str(config_file)]) + assert result.exit_code == 1 + assert "Build failed" in result.stdout + + +class TestCertsCommand: + """Test certs command.""" + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_certs_success(self, mock_factory, runner, tmp_path, mock_deployer): + """Test successful certificate generation.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("plugins:\n - name: TestPlugin\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["certs", str(config_file)]) + assert result.exit_code == 0 + assert "Certificates generated" in result.stdout + mock_deployer.generate_certificates.assert_called_once() + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_certs_failure(self, mock_factory, runner, tmp_path, mock_deployer): + """Test certificate generation failure.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("plugins:\n - name: TestPlugin\n") + + mock_factory.return_value = (mock_deployer, "python") + mock_deployer.generate_certificates.side_effect = RuntimeError("Cert generation failed") + + result = runner.invoke(app, ["certs", str(config_file)]) + assert result.exit_code == 1 + assert "Certificate generation failed" in result.stdout + + +class TestDeployCommand: + """Test deploy command.""" + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_deploy_success(self, mock_factory, runner, tmp_path, mock_deployer): + """Test successful deployment.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["deploy", str(config_file)]) + assert result.exit_code == 0 + assert "Deployment complete" in result.stdout + mock_deployer.deploy.assert_called_once() + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_deploy_dry_run(self, mock_factory, runner, tmp_path, mock_deployer): + """Test dry-run deployment.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["deploy", str(config_file), "--dry-run"]) + assert result.exit_code == 0 + assert "Dry-run complete" in result.stdout + call_kwargs = mock_deployer.deploy.call_args[1] + assert call_kwargs["dry_run"] is True + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_deploy_skip_build(self, mock_factory, runner, tmp_path, mock_deployer): + """Test deployment with --skip-build.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["deploy", str(config_file), "--skip-build"]) + assert result.exit_code == 0 + call_kwargs = mock_deployer.deploy.call_args[1] + assert call_kwargs["skip_build"] is True + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_deploy_skip_certs(self, mock_factory, runner, tmp_path, mock_deployer): + """Test deployment with --skip-certs.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["deploy", str(config_file), "--skip-certs"]) + assert result.exit_code == 0 + call_kwargs = mock_deployer.deploy.call_args[1] + assert call_kwargs["skip_certs"] is True + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_deploy_custom_output_dir(self, mock_factory, runner, tmp_path, mock_deployer): + """Test deployment with custom output directory.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + output_dir = tmp_path / "custom-output" + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["deploy", str(config_file), "--output-dir", str(output_dir)]) + assert result.exit_code == 0 + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_deploy_failure(self, mock_factory, runner, tmp_path, mock_deployer): + """Test deployment failure.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + mock_deployer.deploy.side_effect = RuntimeError("Deployment failed") + + result = runner.invoke(app, ["deploy", str(config_file)]) + assert result.exit_code == 1 + assert "Deployment failed" in result.stdout + + +class TestVerifyCommand: + """Test verify command.""" + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_verify_success(self, mock_factory, runner, tmp_path, mock_deployer): + """Test successful deployment verification.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["verify", str(config_file)]) + assert result.exit_code == 0 + assert "Deployment healthy" in result.stdout + mock_deployer.verify.assert_called_once() + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_verify_with_wait(self, mock_factory, runner, tmp_path, mock_deployer): + """Test verification with default wait behavior (wait=True by default).""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + + # Default wait is True, so just run verify without any flags + result = runner.invoke(app, ["verify", str(config_file)]) + assert result.exit_code == 0 + call_kwargs = mock_deployer.verify.call_args[1] + assert call_kwargs["wait"] is True + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_verify_with_timeout(self, mock_factory, runner, tmp_path, mock_deployer): + """Test verification with custom timeout.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["verify", str(config_file), "--timeout", "600"]) + assert result.exit_code == 0 + call_kwargs = mock_deployer.verify.call_args[1] + assert call_kwargs["timeout"] == 600 + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_verify_failure(self, mock_factory, runner, tmp_path, mock_deployer): + """Test verification failure.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + mock_deployer.verify.side_effect = RuntimeError("Verification failed") + + result = runner.invoke(app, ["verify", str(config_file)]) + assert result.exit_code == 1 + assert "Verification failed" in result.stdout + + +class TestDestroyCommand: + """Test destroy command.""" + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_destroy_with_force(self, mock_factory, runner, tmp_path, mock_deployer): + """Test destroy with --force flag.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["destroy", str(config_file), "--force"]) + assert result.exit_code == 0 + assert "Deployment destroyed" in result.stdout + mock_deployer.destroy.assert_called_once() + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_destroy_with_confirmation(self, mock_factory, runner, tmp_path, mock_deployer): + """Test destroy with user confirmation.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + + # Simulate user confirming "yes" + result = runner.invoke(app, ["destroy", str(config_file)], input="y\n") + assert result.exit_code == 0 + assert "Deployment destroyed" in result.stdout + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_destroy_abort(self, mock_factory, runner, tmp_path, mock_deployer): + """Test aborting destroy command.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + + # Simulate user declining "no" + result = runner.invoke(app, ["destroy", str(config_file)], input="n\n") + assert "Aborted" in result.stdout + mock_deployer.destroy.assert_not_called() + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_destroy_failure(self, mock_factory, runner, tmp_path, mock_deployer): + """Test destroy failure.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + mock_deployer.destroy.side_effect = RuntimeError("Destruction failed") + + result = runner.invoke(app, ["destroy", str(config_file), "--force"]) + assert result.exit_code == 1 + assert "Destruction failed" in result.stdout + + +class TestGenerateCommand: + """Test generate command.""" + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_generate_success(self, mock_factory, runner, tmp_path, mock_deployer): + """Test successful manifest generation.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["generate", str(config_file)]) + assert result.exit_code == 0 + assert "Manifests generated" in result.stdout + mock_deployer.generate_manifests.assert_called_once() + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_generate_with_output_dir(self, mock_factory, runner, tmp_path, mock_deployer): + """Test manifest generation with custom output directory.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + output_dir = tmp_path / "custom-manifests" + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["generate", str(config_file), "--output", str(output_dir)]) + assert result.exit_code == 0 + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_generate_failure(self, mock_factory, runner, tmp_path, mock_deployer): + """Test manifest generation failure.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + mock_deployer.generate_manifests.side_effect = ValueError("Generation failed") + + result = runner.invoke(app, ["generate", str(config_file)]) + assert result.exit_code == 1 + assert "Manifest generation failed" in result.stdout + + +class TestVersionCommand: + """Test version command.""" + + def test_version(self, runner): + """Test version command.""" + result = runner.invoke(app, ["version"]) + assert result.exit_code == 0 + assert "MCP Deploy" in result.stdout + assert "Version" in result.stdout + + +class TestMainFunction: + """Test main entry point.""" + + @patch("mcpgateway.tools.builder.cli.app") + def test_main_success(self, mock_app): + """Test successful main execution.""" + mock_app.return_value = None + main() + mock_app.assert_called_once() + + @patch("mcpgateway.tools.builder.cli.app") + def test_main_keyboard_interrupt(self, mock_app): + """Test main with keyboard interrupt.""" + mock_app.side_effect = KeyboardInterrupt() + with pytest.raises(SystemExit) as exc_info: + main() + assert exc_info.value.code == 130 + + @patch("mcpgateway.tools.builder.cli.app") + def test_main_exception_no_debug(self, mock_app): + """Test main with exception (no debug mode).""" + mock_app.side_effect = RuntimeError("Test error") + with pytest.raises(SystemExit) as exc_info: + main() + assert exc_info.value.code == 1 + + @patch("mcpgateway.tools.builder.cli.app") + @patch.dict(os.environ, {"MCP_DEBUG": "1"}) + def test_main_exception_debug_mode(self, mock_app): + """Test main with exception (debug mode enabled).""" + mock_app.side_effect = RuntimeError("Test error") + with pytest.raises(RuntimeError, match="Test error"): + main() diff --git a/tests/unit/mcpgateway/tools/builder/test_common.py b/tests/unit/mcpgateway/tools/builder/test_common.py new file mode 100644 index 000000000..d1d4a7f69 --- /dev/null +++ b/tests/unit/mcpgateway/tools/builder/test_common.py @@ -0,0 +1,989 @@ +# -*- coding: utf-8 -*- +"""Location: ./tests/unit/mcpgateway/tools/builder/test_common.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Unit tests for builder common utilities. +""" + +# Standard +import os +from pathlib import Path +import shutil +import subprocess +from unittest.mock import MagicMock, Mock, patch, mock_open + +# Third-Party +import pytest +import yaml + +# First-Party +from mcpgateway.tools.builder.common import ( + copy_env_template, + deploy_compose, + deploy_kubernetes, + destroy_compose, + destroy_kubernetes, + generate_compose_manifests, + generate_kubernetes_manifests, + generate_plugin_config, + get_deploy_dir, + get_docker_compose_command, + load_config, + run_compose, + verify_compose, + verify_kubernetes, +) + + +class TestGetDeployDir: + """Test get_deploy_dir function.""" + + def test_default_deploy_dir(self): + """Test default deploy directory.""" + with patch.dict(os.environ, {}, clear=True): + result = get_deploy_dir() + assert result == Path("./deploy") + + def test_custom_deploy_dir(self): + """Test custom deploy directory from environment variable.""" + with patch.dict(os.environ, {"MCP_DEPLOY_DIR": "/custom/deploy"}): + result = get_deploy_dir() + assert result == Path("/custom/deploy") + + +class TestLoadConfig: + """Test load_config function.""" + + def test_load_valid_config(self, tmp_path): + """Test loading valid YAML configuration.""" + config_file = tmp_path / "mcp-stack.yaml" + config_data = { + "deployment": {"type": "compose", "project_name": "test"}, + "gateway": {"image": "mcpgateway:latest"}, + "plugins": [], + } + config_file.write_text(yaml.dump(config_data)) + + result = load_config(str(config_file)) + assert result["deployment"]["type"] == "compose" + assert result["gateway"]["image"] == "mcpgateway:latest" + + def test_load_nonexistent_config(self): + """Test loading non-existent configuration file.""" + with pytest.raises(FileNotFoundError, match="Configuration file not found"): + load_config("/nonexistent/config.yaml") + + +class TestGeneratePluginConfig: + """Test generate_plugin_config function.""" + + @patch("mcpgateway.tools.builder.common.Environment") + def test_generate_plugin_config_compose(self, mock_env_class, tmp_path): + """Test generating plugin config for Docker Compose deployment.""" + # Setup mock template + mock_template = MagicMock() + mock_template.render.return_value = "plugins:\n - name: TestPlugin\n" + mock_env = MagicMock() + mock_env.get_template.return_value = mock_template + mock_env_class.return_value = mock_env + + # Create fake template directory + template_dir = tmp_path / "templates" + template_dir.mkdir() + + config = { + "deployment": {"type": "compose"}, + "plugins": [ + {"name": "TestPlugin", "port": 8000, "mtls_enabled": True} + ], + } + + with patch("mcpgateway.tools.builder.common.Path") as mock_path: + mock_path.return_value.__truediv__.return_value = template_dir + output_dir = tmp_path / "output" + output_dir.mkdir() + + result = generate_plugin_config(config, output_dir) + + # Verify template was called + mock_env.get_template.assert_called_once_with("plugins-config.yaml.j2") + assert result == output_dir / "plugins-config.yaml" + + @patch("mcpgateway.tools.builder.common.Environment") + def test_generate_plugin_config_kubernetes(self, mock_env_class, tmp_path): + """Test generating plugin config for Kubernetes deployment.""" + # Setup mock template + mock_template = MagicMock() + mock_template.render.return_value = "plugins:\n - name: TestPlugin\n" + mock_env = MagicMock() + mock_env.get_template.return_value = mock_template + mock_env_class.return_value = mock_env + + # Create fake template directory + template_dir = tmp_path / "templates" + template_dir.mkdir() + + config = { + "deployment": {"type": "kubernetes", "namespace": "test-ns"}, + "plugins": [ + {"name": "TestPlugin", "port": 8000, "mtls_enabled": False} + ], + } + + with patch("mcpgateway.tools.builder.common.Path") as mock_path: + mock_path.return_value.__truediv__.return_value = template_dir + output_dir = tmp_path / "output" + output_dir.mkdir() + + result = generate_plugin_config(config, output_dir) + + # Verify template was called + assert mock_env.get_template.called + assert result == output_dir / "plugins-config.yaml" + + @patch("mcpgateway.tools.builder.common.Environment") + def test_generate_plugin_config_with_overrides(self, mock_env_class, tmp_path): + """Test generating plugin config with plugin_overrides.""" + # Setup mock template + mock_template = MagicMock() + mock_template.render.return_value = "plugins:\n - name: TestPlugin\n" + mock_env = MagicMock() + mock_env.get_template.return_value = mock_template + mock_env_class.return_value = mock_env + + # Create fake template directory + template_dir = tmp_path / "templates" + template_dir.mkdir() + + config = { + "deployment": {"type": "compose"}, + "plugins": [ + { + "name": "TestPlugin", + "port": 8000, + "plugin_overrides": { + "priority": 10, + "mode": "enforce", + "tags": ["security"], + }, + } + ], + } + + with patch("mcpgateway.tools.builder.common.Path") as mock_path: + mock_path.return_value.__truediv__.return_value = template_dir + output_dir = tmp_path / "output" + output_dir.mkdir() + + result = generate_plugin_config(config, output_dir) + assert result == output_dir / "plugins-config.yaml" + + +class TestCopyEnvTemplate: + """Test copy_env_template function.""" + + def test_copy_env_template_success(self, tmp_path): + """Test successful copying of .env.template.""" + # Create plugin build dir with .env.template + plugin_dir = tmp_path / "plugin" + plugin_dir.mkdir() + template_file = plugin_dir / ".env.template" + template_file.write_text("TEST_VAR=value\n") + + # Setup deploy dir + deploy_dir = tmp_path / "deploy" + + with patch("mcpgateway.tools.builder.common.get_deploy_dir", return_value=deploy_dir): + copy_env_template("TestPlugin", plugin_dir) + + target_file = deploy_dir / "env" / ".env.TestPlugin" + assert target_file.exists() + assert target_file.read_text() == "TEST_VAR=value\n" + + def test_copy_env_template_no_template(self, tmp_path): + """Test when .env.template doesn't exist.""" + plugin_dir = tmp_path / "plugin" + plugin_dir.mkdir() + + deploy_dir = tmp_path / "deploy" + + with patch("mcpgateway.tools.builder.common.get_deploy_dir", return_value=deploy_dir): + # Should not raise error, just skip + copy_env_template("TestPlugin", plugin_dir, verbose=True) + + def test_copy_env_template_target_exists(self, tmp_path): + """Test when target file already exists.""" + # Create plugin build dir with .env.template + plugin_dir = tmp_path / "plugin" + plugin_dir.mkdir() + template_file = plugin_dir / ".env.template" + template_file.write_text("NEW_VAR=newvalue\n") + + # Setup deploy dir with existing target + deploy_dir = tmp_path / "deploy" + deploy_dir.mkdir() + env_dir = deploy_dir / "env" + env_dir.mkdir() + target_file = env_dir / ".env.TestPlugin" + target_file.write_text("OLD_VAR=oldvalue\n") + + with patch("mcpgateway.tools.builder.common.get_deploy_dir", return_value=deploy_dir): + copy_env_template("TestPlugin", plugin_dir) + + # Should not overwrite + assert target_file.read_text() == "OLD_VAR=oldvalue\n" + + +class TestGetDockerComposeCommand: + """Test get_docker_compose_command function.""" + + @patch("mcpgateway.tools.builder.common.shutil.which") + @patch("mcpgateway.tools.builder.common.subprocess.run") + def test_docker_compose_plugin(self, mock_run, mock_which): + """Test detecting docker compose plugin.""" + mock_which.return_value = "/usr/bin/docker" + mock_run.return_value = Mock(returncode=0) + + result = get_docker_compose_command() + assert result == ["docker", "compose"] + + @patch("mcpgateway.tools.builder.common.shutil.which") + @patch("mcpgateway.tools.builder.common.subprocess.run") + def test_docker_compose_standalone(self, mock_run, mock_which): + """Test detecting standalone docker-compose.""" + + def which_side_effect(cmd): + if cmd == "docker": + return "/usr/bin/docker" + elif cmd == "docker-compose": + return "/usr/bin/docker-compose" + return None + + mock_which.side_effect = which_side_effect + mock_run.side_effect = subprocess.CalledProcessError(1, "cmd") + + result = get_docker_compose_command() + assert result == ["docker-compose"] + + @patch("mcpgateway.tools.builder.common.shutil.which") + def test_docker_compose_not_found(self, mock_which): + """Test when docker compose is not available.""" + mock_which.return_value = None + + with pytest.raises(RuntimeError, match="Docker Compose not found"): + get_docker_compose_command() + + +class TestRunCompose: + """Test run_compose function.""" + + @patch("mcpgateway.tools.builder.common.get_docker_compose_command") + @patch("mcpgateway.tools.builder.common.subprocess.run") + def test_run_compose_success(self, mock_run, mock_get_cmd, tmp_path): + """Test successful compose command execution.""" + compose_file = tmp_path / "docker-compose.yaml" + compose_file.write_text("services:\n test: {}\n") + + mock_get_cmd.return_value = ["docker", "compose"] + mock_run.return_value = Mock(returncode=0, stdout="Success", stderr="") + + result = run_compose(compose_file, ["ps"]) + assert result.returncode == 0 + mock_run.assert_called_once() + + @patch("mcpgateway.tools.builder.common.get_docker_compose_command") + def test_run_compose_file_not_found(self, mock_get_cmd, tmp_path): + """Test run_compose with non-existent file.""" + compose_file = tmp_path / "nonexistent.yaml" + mock_get_cmd.return_value = ["docker", "compose"] + + with pytest.raises(FileNotFoundError, match="Compose file not found"): + run_compose(compose_file, ["ps"]) + + @patch("mcpgateway.tools.builder.common.get_docker_compose_command") + @patch("mcpgateway.tools.builder.common.subprocess.run") + def test_run_compose_command_failure(self, mock_run, mock_get_cmd, tmp_path): + """Test run_compose command failure.""" + compose_file = tmp_path / "docker-compose.yaml" + compose_file.write_text("services:\n test: {}\n") + + mock_get_cmd.return_value = ["docker", "compose"] + mock_run.side_effect = subprocess.CalledProcessError( + 1, "cmd", output="", stderr="Error" + ) + + with pytest.raises(RuntimeError, match="Docker Compose failed"): + run_compose(compose_file, ["up", "-d"]) + + +class TestDeployCompose: + """Test deploy_compose function.""" + + @patch("mcpgateway.tools.builder.common.run_compose") + def test_deploy_compose_success(self, mock_run, tmp_path): + """Test successful Docker Compose deployment.""" + compose_file = tmp_path / "docker-compose.yaml" + mock_run.return_value = Mock(stdout="Deployed", stderr="") + + deploy_compose(compose_file) + mock_run.assert_called_once_with(compose_file, ["up", "-d"], verbose=False) + + +class TestVerifyCompose: + """Test verify_compose function.""" + + @patch("mcpgateway.tools.builder.common.run_compose") + def test_verify_compose(self, mock_run, tmp_path): + """Test verifying Docker Compose deployment.""" + compose_file = tmp_path / "docker-compose.yaml" + mock_run.return_value = Mock(stdout="test-service running", stderr="") + + result = verify_compose(compose_file) + assert "test-service running" in result + mock_run.assert_called_once_with(compose_file, ["ps"], verbose=False, check=False) + + +class TestDestroyCompose: + """Test destroy_compose function.""" + + @patch("mcpgateway.tools.builder.common.run_compose") + def test_destroy_compose_success(self, mock_run, tmp_path): + """Test successful Docker Compose destruction.""" + compose_file = tmp_path / "docker-compose.yaml" + compose_file.write_text("services:\n test: {}\n") + mock_run.return_value = Mock(stdout="Removed", stderr="") + + destroy_compose(compose_file) + mock_run.assert_called_once_with(compose_file, ["down", "-v"], verbose=False) + + def test_destroy_compose_file_not_found(self, tmp_path): + """Test destroying with non-existent compose file.""" + compose_file = tmp_path / "nonexistent.yaml" + + # Should not raise error, just print warning + destroy_compose(compose_file) + + +class TestDeployKubernetes: + """Test deploy_kubernetes function.""" + + @patch("mcpgateway.tools.builder.common.shutil.which") + @patch("mcpgateway.tools.builder.common.subprocess.run") + def test_deploy_kubernetes_success(self, mock_run, mock_which, tmp_path): + """Test successful Kubernetes deployment.""" + mock_which.return_value = "/usr/bin/kubectl" + mock_run.return_value = Mock(returncode=0, stdout="created", stderr="") + + manifests_dir = tmp_path / "manifests" + manifests_dir.mkdir() + (manifests_dir / "gateway-deployment.yaml").write_text("apiVersion: v1\n") + (manifests_dir / "plugins-config.yaml").write_text("plugins: []\n") + + deploy_kubernetes(manifests_dir) + assert mock_run.called + + @patch("mcpgateway.tools.builder.common.shutil.which") + def test_deploy_kubernetes_kubectl_not_found(self, mock_which, tmp_path): + """Test deployment when kubectl is not available.""" + mock_which.return_value = None + manifests_dir = tmp_path / "manifests" + + with pytest.raises(RuntimeError, match="kubectl not found"): + deploy_kubernetes(manifests_dir) + + @patch("mcpgateway.tools.builder.common.shutil.which") + @patch("mcpgateway.tools.builder.common.subprocess.run") + def test_deploy_kubernetes_with_certs(self, mock_run, mock_which, tmp_path): + """Test Kubernetes deployment with certificate secrets.""" + mock_which.return_value = "/usr/bin/kubectl" + mock_run.return_value = Mock(returncode=0, stdout="created", stderr="") + + manifests_dir = tmp_path / "manifests" + manifests_dir.mkdir() + (manifests_dir / "gateway-deployment.yaml").write_text("apiVersion: v1\n") + (manifests_dir / "cert-secrets.yaml").write_text("apiVersion: v1\n") + + deploy_kubernetes(manifests_dir) + assert mock_run.called + + +class TestVerifyKubernetes: + """Test verify_kubernetes function.""" + + @patch("mcpgateway.tools.builder.common.shutil.which") + @patch("mcpgateway.tools.builder.common.subprocess.run") + def test_verify_kubernetes_success(self, mock_run, mock_which): + """Test successful Kubernetes verification.""" + mock_which.return_value = "/usr/bin/kubectl" + mock_run.return_value = Mock( + returncode=0, stdout="pod-1 Running\npod-2 Running", stderr="" + ) + + result = verify_kubernetes("test-ns") + assert "Running" in result + mock_run.assert_called_once() + + @patch("mcpgateway.tools.builder.common.shutil.which") + def test_verify_kubernetes_kubectl_not_found(self, mock_which): + """Test verification when kubectl is not available.""" + mock_which.return_value = None + + with pytest.raises(RuntimeError, match="kubectl not found"): + verify_kubernetes("test-ns") + + @patch("mcpgateway.tools.builder.common.shutil.which") + @patch("mcpgateway.tools.builder.common.subprocess.run") + def test_verify_kubernetes_with_wait(self, mock_run, mock_which): + """Test Kubernetes verification with wait.""" + mock_which.return_value = "/usr/bin/kubectl" + mock_run.return_value = Mock(returncode=0, stdout="Ready", stderr="") + + result = verify_kubernetes("test-ns", wait=True, timeout=60) + assert mock_run.call_count >= 1 + + +class TestDestroyKubernetes: + """Test destroy_kubernetes function.""" + + @patch("mcpgateway.tools.builder.common.shutil.which") + @patch("mcpgateway.tools.builder.common.subprocess.run") + def test_destroy_kubernetes_success(self, mock_run, mock_which, tmp_path): + """Test successful Kubernetes destruction.""" + mock_which.return_value = "/usr/bin/kubectl" + mock_run.return_value = Mock(returncode=0, stdout="deleted", stderr="") + + manifests_dir = tmp_path / "manifests" + manifests_dir.mkdir() + (manifests_dir / "gateway-deployment.yaml").write_text("apiVersion: v1\n") + (manifests_dir / "plugins-config.yaml").write_text("plugins: []\n") + + destroy_kubernetes(manifests_dir) + assert mock_run.called + + @patch("mcpgateway.tools.builder.common.shutil.which") + def test_destroy_kubernetes_kubectl_not_found(self, mock_which, tmp_path): + """Test destruction when kubectl is not available.""" + mock_which.return_value = None + manifests_dir = tmp_path / "manifests" + + with pytest.raises(RuntimeError, match="kubectl not found"): + destroy_kubernetes(manifests_dir) + + def test_destroy_kubernetes_dir_not_found(self, tmp_path): + """Test destroying with non-existent manifests directory.""" + manifests_dir = tmp_path / "nonexistent" + + with patch("mcpgateway.tools.builder.common.shutil.which", return_value="/usr/bin/kubectl"): + # Should not raise error, just print warning + destroy_kubernetes(manifests_dir) + + +class TestGenerateKubernetesManifests: + """Test generate_kubernetes_manifests function with real template rendering.""" + + def test_generate_manifests_gateway_only(self, tmp_path): + """Test generating Kubernetes manifests for gateway only.""" + output_dir = tmp_path / "manifests" + output_dir.mkdir() + + config = { + "deployment": {"type": "kubernetes", "namespace": "test-ns"}, + "gateway": { + "image": "mcpgateway:latest", + "port": 4444, + "mtls_enabled": False, + }, + "plugins": [], + } + + generate_kubernetes_manifests(config, output_dir) + + # Verify gateway deployment was created + gateway_file = output_dir / "gateway-deployment.yaml" + assert gateway_file.exists() + + # Parse and validate YAML + with open(gateway_file) as f: + docs = list(yaml.safe_load_all(f)) + + # Should have Deployment and Service + assert len(docs) >= 2 + + # Validate Deployment + deployment = next((d for d in docs if d.get("kind") == "Deployment"), None) + assert deployment is not None + assert deployment["metadata"]["name"] == "mcpgateway" + assert deployment["metadata"]["namespace"] == "test-ns" + assert deployment["spec"]["template"]["spec"]["containers"][0]["image"] == "mcpgateway:latest" + + # Validate Service + service = next((d for d in docs if d.get("kind") == "Service"), None) + assert service is not None + assert service["metadata"]["name"] == "mcpgateway" + assert service["spec"]["ports"][0]["port"] == 4444 + + def test_generate_manifests_with_plugins(self, tmp_path): + """Test generating Kubernetes manifests with plugins.""" + output_dir = tmp_path / "manifests" + output_dir.mkdir() + + config = { + "deployment": {"type": "kubernetes", "namespace": "mcp-test"}, + "gateway": { + "image": "mcpgateway:latest", + "port": 4444, + "mtls_enabled": False, + }, + "plugins": [ + { + "name": "TestPlugin", + "image": "test-plugin:v1", + "port": 8000, + "mtls_enabled": False, + }, + { + "name": "AnotherPlugin", + "image": "another-plugin:v2", + "port": 8001, + "mtls_enabled": False, + }, + ], + } + + generate_kubernetes_manifests(config, output_dir) + + # Verify plugin deployments were created + plugin1_file = output_dir / "plugin-testplugin-deployment.yaml" + plugin2_file = output_dir / "plugin-anotherplugin-deployment.yaml" + + assert plugin1_file.exists() + assert plugin2_file.exists() + + # Parse and validate first plugin + with open(plugin1_file) as f: + docs = list(yaml.safe_load_all(f)) + + deployment = next((d for d in docs if d.get("kind") == "Deployment"), None) + assert deployment is not None + assert deployment["metadata"]["name"] == "mcp-plugin-testplugin" + assert deployment["metadata"]["namespace"] == "mcp-test" + assert deployment["spec"]["template"]["spec"]["containers"][0]["image"] == "test-plugin:v1" + + def test_generate_manifests_with_mtls(self, tmp_path): + """Test generating Kubernetes manifests with mTLS enabled.""" + # Change to tmp_path to ensure we have a valid working directory + original_dir = None + try: + original_dir = os.getcwd() + except (FileNotFoundError, OSError): + pass # Current directory doesn't exist + + os.chdir(tmp_path) + + try: + output_dir = tmp_path / "manifests" + output_dir.mkdir() + + # Create fake certificate files in the actual location where the code looks + certs_dir = Path("certs/mcp") + ca_dir = certs_dir / "ca" + gateway_dir = certs_dir / "gateway" + plugin_dir = certs_dir / "plugins" / "SecurePlugin" + + ca_dir.mkdir(parents=True, exist_ok=True) + gateway_dir.mkdir(parents=True, exist_ok=True) + plugin_dir.mkdir(parents=True, exist_ok=True) + + (ca_dir / "ca.crt").write_bytes(b"fake-ca-cert") + (gateway_dir / "client.crt").write_bytes(b"fake-gateway-cert") + (gateway_dir / "client.key").write_bytes(b"fake-gateway-key") + (plugin_dir / "server.crt").write_bytes(b"fake-plugin-cert") + (plugin_dir / "server.key").write_bytes(b"fake-plugin-key") + + config = { + "deployment": {"type": "kubernetes", "namespace": "secure-ns"}, + "gateway": { + "image": "mcpgateway:latest", + "port": 4444, + "mtls_enabled": True, + }, + "plugins": [ + { + "name": "SecurePlugin", + "image": "secure-plugin:v1", + "port": 8000, + "mtls_enabled": True, + } + ], + } + + generate_kubernetes_manifests(config, output_dir) + finally: + # Clean up created certificate files + if Path("certs").exists(): + shutil.rmtree("certs") + + # Restore original directory if it exists + if original_dir and Path(original_dir).exists(): + os.chdir(original_dir) + + # Verify certificate secrets were created + cert_secrets_file = output_dir / "cert-secrets.yaml" + assert cert_secrets_file.exists() + + # Parse and validate secrets + with open(cert_secrets_file) as f: + docs = list(yaml.safe_load_all(f)) + + # Should have secrets for CA, gateway, and plugin + secrets = [d for d in docs if d.get("kind") == "Secret"] + assert len(secrets) >= 2 # At least gateway and plugin secrets + + def test_generate_manifests_with_infrastructure(self, tmp_path): + """Test generating Kubernetes manifests with PostgreSQL and Redis.""" + output_dir = tmp_path / "manifests" + output_dir.mkdir() + + config = { + "deployment": {"type": "kubernetes", "namespace": "infra-ns"}, + "gateway": { + "image": "mcpgateway:latest", + "port": 4444, + "mtls_enabled": False, + }, + "plugins": [], + "infrastructure": { + "postgres": { + "enabled": True, + "image": "postgres:17", + "database": "testdb", + "user": "testuser", + "password": "testpass", + }, + "redis": { + "enabled": True, + "image": "redis:alpine", + }, + }, + } + + generate_kubernetes_manifests(config, output_dir) + + # Verify infrastructure manifests were created + postgres_file = output_dir / "postgres-deployment.yaml" + redis_file = output_dir / "redis-deployment.yaml" + + assert postgres_file.exists() + assert redis_file.exists() + + # Parse and validate PostgreSQL + with open(postgres_file) as f: + docs = list(yaml.safe_load_all(f)) + + postgres_deployment = next((d for d in docs if d.get("kind") == "Deployment"), None) + assert postgres_deployment is not None + assert postgres_deployment["metadata"]["name"] == "postgres" + assert postgres_deployment["spec"]["template"]["spec"]["containers"][0]["image"] == "postgres:17" + + # Parse and validate Redis + with open(redis_file) as f: + docs = list(yaml.safe_load_all(f)) + + redis_deployment = next((d for d in docs if d.get("kind") == "Deployment"), None) + assert redis_deployment is not None + assert redis_deployment["metadata"]["name"] == "redis" + + # Verify gateway has database environment variables in Secret + gateway_file = output_dir / "gateway-deployment.yaml" + with open(gateway_file) as f: + docs = list(yaml.safe_load_all(f)) + + # Find the Secret containing environment variables + secret = next((d for d in docs if d.get("kind") == "Secret" and d["metadata"]["name"] == "mcpgateway-env"), None) + assert secret is not None + assert "stringData" in secret + + string_data = secret["stringData"] + + # Check DATABASE_URL is set + assert "DATABASE_URL" in string_data + assert "postgresql://" in string_data["DATABASE_URL"] + assert "testuser:testpass" in string_data["DATABASE_URL"] + + # Check REDIS_URL is set + assert "REDIS_URL" in string_data + assert "redis://redis:6379" in string_data["REDIS_URL"] + + # Verify deployment references the Secret via envFrom + gateway_deployment = next((d for d in docs if d.get("kind") == "Deployment"), None) + assert gateway_deployment is not None + env_from = gateway_deployment["spec"]["template"]["spec"]["containers"][0]["envFrom"] + assert any(ref.get("secretRef", {}).get("name") == "mcpgateway-env" for ref in env_from) + + +class TestGenerateComposeManifests: + """Test generate_compose_manifests function with real template rendering.""" + + def test_generate_compose_gateway_only(self, tmp_path): + """Test generating Docker Compose manifest for gateway only.""" + output_dir = tmp_path / "manifests" + output_dir.mkdir() + + config = { + "deployment": {"type": "compose", "project_name": "test-mcp"}, + "gateway": { + "image": "mcpgateway:latest", + "port": 4444, + "host_port": 4444, + "mtls_enabled": False, + }, + "plugins": [], + } + + with patch("mcpgateway.tools.builder.common.Path.cwd", return_value=tmp_path): + generate_compose_manifests(config, output_dir) + + # Verify compose file was created + compose_file = output_dir / "docker-compose.yaml" + assert compose_file.exists() + + # Parse and validate + with open(compose_file) as f: + compose_data = yaml.safe_load(f) + + assert "services" in compose_data + assert "mcpgateway" in compose_data["services"] + + gateway = compose_data["services"]["mcpgateway"] + assert gateway["image"] == "mcpgateway:latest" + assert gateway["ports"] == ["4444:4444"] + + def test_generate_compose_with_plugins(self, tmp_path): + """Test generating Docker Compose manifest with plugins.""" + output_dir = tmp_path / "manifests" + output_dir.mkdir() + + config = { + "deployment": {"type": "compose", "project_name": "mcp-stack"}, + "gateway": { + "image": "mcpgateway:latest", + "port": 4444, + "host_port": 4444, + "mtls_enabled": False, + }, + "plugins": [ + { + "name": "Plugin1", + "image": "plugin1:v1", + "port": 8000, + "expose_port": True, + "host_port": 8000, + "mtls_enabled": False, + }, + { + "name": "Plugin2", + "image": "plugin2:v1", + "port": 8001, + "expose_port": False, + "mtls_enabled": False, + }, + ], + } + + with patch("mcpgateway.tools.builder.common.Path.cwd", return_value=tmp_path): + generate_compose_manifests(config, output_dir) + + # Parse and validate + compose_file = output_dir / "docker-compose.yaml" + with open(compose_file) as f: + compose_data = yaml.safe_load(f) + + # Verify plugins are in services + assert "plugin1" in compose_data["services"] + assert "plugin2" in compose_data["services"] + + plugin1 = compose_data["services"]["plugin1"] + assert plugin1["image"] == "plugin1:v1" + assert "8000:8000" in plugin1["ports"] # Exposed + + plugin2 = compose_data["services"]["plugin2"] + assert plugin2["image"] == "plugin2:v1" + # Plugin2 should not have host port mapping since expose_port is False + + def test_generate_compose_with_mtls(self, tmp_path): + """Test generating Docker Compose manifest with mTLS certificates.""" + output_dir = tmp_path / "manifests" + output_dir.mkdir() + + # Create fake certificate structure + certs_dir = tmp_path / "certs" / "mcp" + ca_dir = certs_dir / "ca" + gateway_dir = certs_dir / "gateway" + plugin_dir = certs_dir / "plugins" / "SecurePlugin" + + ca_dir.mkdir(parents=True) + gateway_dir.mkdir(parents=True) + plugin_dir.mkdir(parents=True) + + (ca_dir / "ca.crt").write_text("fake-ca") + (gateway_dir / "client.crt").write_text("fake-cert") + (gateway_dir / "client.key").write_text("fake-key") + (plugin_dir / "server.crt").write_text("fake-plugin-cert") + (plugin_dir / "server.key").write_text("fake-plugin-key") + + config = { + "deployment": {"type": "compose"}, + "gateway": { + "image": "mcpgateway:latest", + "port": 4444, + "host_port": 4444, + "mtls_enabled": True, + }, + "plugins": [ + { + "name": "SecurePlugin", + "image": "secure:v1", + "port": 8000, + "mtls_enabled": True, + } + ], + } + + with patch("mcpgateway.tools.builder.common.Path.cwd", return_value=tmp_path): + generate_compose_manifests(config, output_dir) + + # Parse and validate + compose_file = output_dir / "docker-compose.yaml" + with open(compose_file) as f: + compose_data = yaml.safe_load(f) + + # Verify gateway has certificate volumes + gateway = compose_data["services"]["mcpgateway"] + assert "volumes" in gateway + # Should have volume mounts for certificates + volumes = gateway["volumes"] + assert any("certs" in str(v) or "ca.crt" in str(v) for v in volumes) + + # Verify plugin has certificate volumes + plugin = compose_data["services"]["secureplugin"] + assert "volumes" in plugin + + def test_generate_compose_with_env_files(self, tmp_path): + """Test generating Docker Compose manifest with environment files.""" + output_dir = tmp_path / "manifests" + output_dir.mkdir() + + # Create env files + deploy_dir = tmp_path / "deploy" + env_dir = deploy_dir / "env" + env_dir.mkdir(parents=True) + (env_dir / ".env.gateway").write_text("GATEWAY_VAR=value1\n") + (env_dir / ".env.TestPlugin").write_text("PLUGIN_VAR=value2\n") + + config = { + "deployment": {"type": "compose"}, + "gateway": { + "image": "mcpgateway:latest", + "port": 4444, + "mtls_enabled": False, + }, + "plugins": [ + { + "name": "TestPlugin", + "image": "test:v1", + "port": 8000, + "mtls_enabled": False, + } + ], + } + + with patch("mcpgateway.tools.builder.common.get_deploy_dir", return_value=deploy_dir): + with patch("mcpgateway.tools.builder.common.Path.cwd", return_value=tmp_path): + generate_compose_manifests(config, output_dir) + + # Parse and validate + compose_file = output_dir / "docker-compose.yaml" + with open(compose_file) as f: + compose_data = yaml.safe_load(f) + + # Verify env_file is set + gateway = compose_data["services"]["mcpgateway"] + assert "env_file" in gateway + + plugin = compose_data["services"]["testplugin"] + assert "env_file" in plugin + + def test_generate_compose_with_infrastructure(self, tmp_path): + """Test generating Docker Compose manifest with PostgreSQL and Redis. + + Note: Currently the template uses hardcoded infrastructure images/config. + Infrastructure customization is not yet implemented for Docker Compose. + """ + output_dir = tmp_path / "manifests" + output_dir.mkdir() + + config = { + "deployment": {"type": "compose"}, + "gateway": { + "image": "mcpgateway:latest", + "port": 4444, + "mtls_enabled": False, + }, + "plugins": [], + "infrastructure": { + "postgres": { + "enabled": True, + "image": "postgres:17", + "database": "mcpdb", + "user": "mcpuser", + "password": "secret123", + }, + "redis": { + "enabled": True, + "image": "redis:7-alpine", + }, + }, + } + + with patch("mcpgateway.tools.builder.common.Path.cwd", return_value=tmp_path): + generate_compose_manifests(config, output_dir) + + # Parse and validate + compose_file = output_dir / "docker-compose.yaml" + with open(compose_file) as f: + compose_data = yaml.safe_load(f) + + # Verify PostgreSQL service exists + # Note: Template uses hardcoded "postgres:17" and "mcp" database + assert "postgres" in compose_data["services"] + postgres = compose_data["services"]["postgres"] + assert postgres["image"] == "postgres:17" # Hardcoded in template + assert "environment" in postgres + + # Verify database name is "mcp" (hardcoded default, not "mcpdb" from config) + env = postgres["environment"] + if isinstance(env, list): + assert any("POSTGRES_DB=mcp" in str(e) for e in env) + else: + assert env["POSTGRES_DB"] == "mcp" + + # Verify Redis service exists + # Note: Template uses hardcoded "redis:latest" + assert "redis" in compose_data["services"] + redis = compose_data["services"]["redis"] + assert redis["image"] == "redis:latest" # Hardcoded in template + + # Verify gateway has database environment variables + gateway = compose_data["services"]["mcpgateway"] + assert "environment" in gateway + env = gateway["environment"] + + # Should have DATABASE_URL with default values + if isinstance(env, list): + db_url = next((e for e in env if "DATABASE_URL" in str(e)), None) + else: + db_url = env.get("DATABASE_URL") + assert db_url is not None + assert "postgresql://" in str(db_url) diff --git a/tests/unit/mcpgateway/tools/builder/test_dagger_deploy.py b/tests/unit/mcpgateway/tools/builder/test_dagger_deploy.py new file mode 100644 index 000000000..bc0f8ee87 --- /dev/null +++ b/tests/unit/mcpgateway/tools/builder/test_dagger_deploy.py @@ -0,0 +1,451 @@ +# -*- coding: utf-8 -*- +"""Location: ./tests/unit/mcpgateway/tools/builder/test_dagger_deploy.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Unit tests for Dagger-based MCP Stack deployment. + +These tests are skipped if Dagger is not installed. +""" + +# Standard +from pathlib import Path +from unittest.mock import AsyncMock, MagicMock, Mock, patch + +# Third-Party +import pytest + +# Check if dagger is available +try: + import dagger + + DAGGER_AVAILABLE = True +except ImportError: + DAGGER_AVAILABLE = False + +# Skip all tests in this module if Dagger is not available +pytestmark = pytest.mark.skipif(not DAGGER_AVAILABLE, reason="Dagger not installed") + +# Conditional import to avoid errors when Dagger is not installed +if DAGGER_AVAILABLE: + # First-Party + from mcpgateway.tools.builder.dagger_deploy import MCPStackDagger +else: + # Create a dummy class to avoid NameError in decorators + MCPStackDagger = type("MCPStackDagger", (), {}) + + +@pytest.fixture +def mock_dagger_connection(tmp_path): + """Fixture to mock Dagger connection and dag.""" + with patch("mcpgateway.tools.builder.dagger_deploy.dagger.connection") as mock_conn: + with patch("mcpgateway.tools.builder.dagger_deploy.dag") as mock_dag: + with patch("mcpgateway.tools.builder.dagger_deploy.Path.cwd") as mock_cwd: + # Mock Path.cwd() to return a valid temporary directory + mock_cwd.return_value = tmp_path + + # Mock the async context manager + mock_conn_ctx = AsyncMock() + mock_conn.return_value = mock_conn_ctx + mock_conn_ctx.__aenter__.return_value = None + mock_conn_ctx.__aexit__.return_value = None + + # Setup dag mocks (use regular Mock for synchronous Dagger API) + mock_git = Mock() + mock_tree = Mock() + mock_container = Mock() + mock_container.export_image = AsyncMock() # Only export_image is async + mock_host = Mock() + mock_dir = Mock() + mock_dir.export = AsyncMock() # export is async + + # Set up the method chain for git operations + mock_dag.git.return_value = mock_git + mock_git.branch.return_value = mock_git + mock_git.tree.return_value = mock_tree + mock_tree.docker_build.return_value = mock_container + + # Set up container operations + mock_dag.container.return_value = mock_container + mock_container.from_.return_value = mock_container + mock_container.with_exec.return_value = mock_container + mock_container.with_mounted_directory.return_value = mock_container + mock_container.with_workdir.return_value = mock_container + mock_container.directory.return_value = mock_dir + + # Set up host operations + mock_dag.host.return_value = mock_host + mock_host.directory.return_value = mock_dir + + yield {"connection": mock_conn, "dag": mock_dag, "container": mock_container} + + +class TestMCPStackDaggerInit: + """Test MCPStackDagger initialization.""" + + def test_init_default(self): + """Test default initialization.""" + stack = MCPStackDagger() + assert stack.verbose is False + + def test_init_verbose(self): + """Test initialization with verbose flag.""" + stack = MCPStackDagger(verbose=True) + assert stack.verbose is True + + +class TestMCPStackDaggerBuild: + """Test MCPStackDagger build method.""" + + @patch("mcpgateway.tools.builder.dagger_deploy.get_deploy_dir") + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @pytest.mark.asyncio + async def test_build_gateway_only(self, mock_load, mock_get_deploy, mock_dagger_connection, tmp_path): + """Test building gateway container with Dagger.""" + mock_load.return_value = { + "gateway": {"repo": "https://github.com/test/gateway.git", "ref": "main"}, + "plugins": [], + } + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackDagger() + await stack.build("test-config.yaml") + + mock_load.assert_called_once_with("test-config.yaml") + + @patch("mcpgateway.tools.builder.dagger_deploy.get_deploy_dir") + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @pytest.mark.asyncio + async def test_build_plugins_only(self, mock_load, mock_get_deploy, mock_dagger_connection, tmp_path): + """Test building only plugins.""" + mock_load.return_value = { + "gateway": {"repo": "https://github.com/test/gateway.git"}, + "plugins": [ + {"name": "Plugin1", "repo": "https://github.com/test/plugin1.git"} + ], + } + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackDagger() + await stack.build("test-config.yaml", plugins_only=True) + + mock_load.assert_called_once() + + @patch("mcpgateway.tools.builder.dagger_deploy.get_deploy_dir") + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @pytest.mark.asyncio + async def test_build_specific_plugins(self, mock_load, mock_get_deploy, mock_dagger_connection, tmp_path): + """Test building specific plugins only.""" + mock_load.return_value = { + "gateway": {"image": "mcpgateway:latest"}, + "plugins": [ + {"name": "Plugin1", "repo": "https://github.com/test/plugin1.git"}, + {"name": "Plugin2", "repo": "https://github.com/test/plugin2.git"}, + ], + } + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackDagger() + await stack.build("test-config.yaml", specific_plugins=["Plugin1"]) + + mock_load.assert_called_once() + + @patch("mcpgateway.tools.builder.dagger_deploy.get_deploy_dir") + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @pytest.mark.asyncio + async def test_build_no_plugins(self, mock_load, mock_get_deploy, mock_dagger_connection, tmp_path): + """Test building when no plugins are defined.""" + mock_load.return_value = { + "gateway": {"image": "mcpgateway:latest"}, + "plugins": [], + } + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackDagger() + # Should not raise error + await stack.build("test-config.yaml", plugins_only=True) + + +class TestMCPStackDaggerGenerateCertificates: + """Test MCPStackDagger generate_certificates method.""" + + @patch("mcpgateway.tools.builder.dagger_deploy.get_deploy_dir") + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @pytest.mark.asyncio + async def test_generate_certificates(self, mock_load, mock_get_deploy, mock_dagger_connection, tmp_path): + """Test certificate generation with Dagger.""" + mock_load.return_value = { + "plugins": [ + {"name": "Plugin1"}, + {"name": "Plugin2"}, + ] + } + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackDagger() + await stack.generate_certificates("test-config.yaml") + + mock_load.assert_called_once() + + +class TestMCPStackDaggerDeploy: + """Test MCPStackDagger deploy method.""" + + @patch("mcpgateway.tools.builder.dagger_deploy.get_deploy_dir") + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @patch.object(MCPStackDagger, "build") + @patch.object(MCPStackDagger, "generate_certificates") + @patch.object(MCPStackDagger, "generate_manifests") + @patch.object(MCPStackDagger, "_deploy_compose") + @pytest.mark.asyncio + async def test_deploy_compose_full( + self, mock_deploy, mock_gen_manifests, mock_certs, mock_build, mock_load, mock_get_deploy, mock_dagger_connection, tmp_path + ): + """Test full Docker Compose deployment with Dagger.""" + mock_load.return_value = { + "deployment": {"type": "compose", "project_name": "test"}, + "gateway": {"repo": "https://github.com/test/gateway.git", "mtls_enabled": True}, + "plugins": [], + } + mock_gen_manifests.return_value = Path("/tmp/manifests") + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackDagger() + await stack.deploy("test-config.yaml") + + mock_build.assert_called_once() + mock_certs.assert_called_once() + mock_gen_manifests.assert_called_once() + mock_deploy.assert_called_once() + + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @patch.object(MCPStackDagger, "generate_manifests") + @pytest.mark.asyncio + async def test_deploy_dry_run(self, mock_gen_manifests, mock_load, mock_dagger_connection, tmp_path): + """Test dry-run deployment with Dagger.""" + mock_load.return_value = { + "deployment": {"type": "compose"}, + "gateway": {"image": "mcpgateway:latest"}, + "plugins": [], + } + mock_gen_manifests.return_value = Path("/tmp/manifests") + + stack = MCPStackDagger() + await stack.deploy("test-config.yaml", dry_run=True, skip_build=True, skip_certs=True) + + mock_gen_manifests.assert_called_once() + + @patch("mcpgateway.tools.builder.dagger_deploy.get_deploy_dir") + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @patch.object(MCPStackDagger, "generate_manifests") + @patch.object(MCPStackDagger, "_deploy_kubernetes") + @pytest.mark.asyncio + async def test_deploy_kubernetes(self, mock_deploy, mock_gen_manifests, mock_load, mock_get_deploy, mock_dagger_connection, tmp_path): + """Test Kubernetes deployment with Dagger.""" + mock_load.return_value = { + "deployment": {"type": "kubernetes", "namespace": "test-ns"}, + "gateway": {"image": "mcpgateway:latest", "mtls_enabled": False}, + "plugins": [], + } + mock_gen_manifests.return_value = Path("/tmp/manifests") + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackDagger() + await stack.deploy("test-config.yaml", skip_build=True, skip_certs=True) + + mock_deploy.assert_called_once() + + +class TestMCPStackDaggerVerify: + """Test MCPStackDagger verify method.""" + + @patch("mcpgateway.tools.builder.dagger_deploy.get_deploy_dir") + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @patch.object(MCPStackDagger, "_verify_kubernetes") + @pytest.mark.asyncio + async def test_verify_kubernetes(self, mock_verify_kubernetes, mock_load, mock_get_deploy, mock_dagger_connection, tmp_path): + """Test Kubernetes deployment verification with Dagger.""" + mock_load.return_value = { + "deployment": {"type": "kubernetes", "namespace": "test-ns"} + } + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackDagger() + await stack.verify("test-config.yaml") + + mock_verify_kubernetes.assert_called_once() + + @patch("mcpgateway.tools.builder.dagger_deploy.get_deploy_dir") + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @patch.object(MCPStackDagger, "_verify_compose") + @pytest.mark.asyncio + async def test_verify_compose(self, mock_verify_compose, mock_load, mock_get_deploy, mock_dagger_connection, tmp_path): + """Test Docker Compose deployment verification with Dagger.""" + mock_load.return_value = {"deployment": {"type": "compose"}} + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackDagger() + await stack.verify("test-config.yaml") + + mock_verify_compose.assert_called_once() + + +class TestMCPStackDaggerDestroy: + """Test MCPStackDagger destroy method.""" + + @patch("mcpgateway.tools.builder.dagger_deploy.get_deploy_dir") + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @patch.object(MCPStackDagger, "_destroy_kubernetes") + @pytest.mark.asyncio + async def test_destroy_kubernetes(self, mock_destroy_kubernetes, mock_load, mock_get_deploy, mock_dagger_connection, tmp_path): + """Test Kubernetes deployment destruction with Dagger.""" + mock_load.return_value = {"deployment": {"type": "kubernetes"}} + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackDagger() + await stack.destroy("test-config.yaml") + + mock_destroy_kubernetes.assert_called_once() + + @patch("mcpgateway.tools.builder.dagger_deploy.get_deploy_dir") + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @patch.object(MCPStackDagger, "_destroy_compose") + @pytest.mark.asyncio + async def test_destroy_compose(self, mock_destroy_compose, mock_load, mock_get_deploy, mock_dagger_connection, tmp_path): + """Test Docker Compose deployment destruction with Dagger.""" + mock_load.return_value = {"deployment": {"type": "compose"}} + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackDagger() + await stack.destroy("test-config.yaml") + + mock_destroy_compose.assert_called_once() + + +class TestMCPStackDaggerGenerateManifests: + """Test MCPStackDagger generate_manifests method.""" + + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @patch("mcpgateway.tools.builder.dagger_deploy.generate_plugin_config") + @patch("mcpgateway.tools.builder.dagger_deploy.generate_kubernetes_manifests") + def test_generate_manifests_kubernetes( + self, mock_k8s_gen, mock_plugin_gen, mock_load, tmp_path + ): + """Test generating Kubernetes manifests with Dagger.""" + mock_load.return_value = { + "deployment": {"type": "kubernetes", "namespace": "test-ns"}, + "gateway": {"image": "mcpgateway:latest"}, + "plugins": [], + } + + stack = MCPStackDagger() + result = stack.generate_manifests("test-config.yaml", output_dir=str(tmp_path)) + + mock_plugin_gen.assert_called_once() + mock_k8s_gen.assert_called_once() + assert result == tmp_path + + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @patch("mcpgateway.tools.builder.dagger_deploy.generate_plugin_config") + @patch("mcpgateway.tools.builder.dagger_deploy.generate_compose_manifests") + def test_generate_manifests_compose( + self, mock_compose_gen, mock_plugin_gen, mock_load, tmp_path + ): + """Test generating Docker Compose manifests with Dagger.""" + mock_load.return_value = { + "deployment": {"type": "compose"}, + "gateway": {"image": "mcpgateway:latest"}, + "plugins": [], + } + + stack = MCPStackDagger() + result = stack.generate_manifests("test-config.yaml", output_dir=str(tmp_path)) + + mock_plugin_gen.assert_called_once() + mock_compose_gen.assert_called_once() + assert result == tmp_path + + @patch("mcpgateway.tools.builder.dagger_deploy.get_deploy_dir") + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + def test_generate_manifests_invalid_type(self, mock_load, mock_get_deploy, tmp_path): + """Test generating manifests with invalid deployment type.""" + mock_load.return_value = { + "deployment": {"type": "invalid"}, + "gateway": {"image": "mcpgateway:latest"}, + } + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackDagger() + with pytest.raises(ValueError, match="Unsupported deployment type"): + stack.generate_manifests("test-config.yaml") + + +class TestMCPStackDaggerBuildComponent: + """Test MCPStackDagger _build_component_with_dagger method.""" + + @pytest.mark.asyncio + async def test_build_component_basic(self, mock_dagger_connection, tmp_path): + """Test basic component build with Dagger.""" + component = { + "repo": "https://github.com/test/component.git", + "ref": "main", + "context": ".", + "containerfile": "Containerfile", + "image": "test-component:latest", + } + + stack = MCPStackDagger() + await stack._build_component_with_dagger(component, "test-component") + + # Verify Dagger operations were called (using mocks from fixture) + mock_dag = mock_dagger_connection["dag"] + mock_dag.git.assert_called_once() + + # Get the mock git object + mock_git = mock_dag.git.return_value + mock_git.branch.assert_called_with("main") + + # Get the mock tree object + mock_tree = mock_git.tree.return_value + mock_tree.docker_build.assert_called_once() + + @pytest.mark.asyncio + async def test_build_component_with_target(self, mock_dagger_connection, tmp_path): + """Test component build with multi-stage target.""" + component = { + "repo": "https://github.com/test/component.git", + "ref": "main", + "context": ".", + "image": "test:latest", + "target": "production", + } + + stack = MCPStackDagger() + await stack._build_component_with_dagger(component, "test") + + # Verify docker_build was called with target parameter + mock_dag = mock_dagger_connection["dag"] + mock_git = mock_dag.git.return_value + mock_tree = mock_git.tree.return_value + call_args = mock_tree.docker_build.call_args + assert "target" in call_args[1] or call_args[0] + + @pytest.mark.asyncio + async def test_build_component_with_env_vars(self, mock_dagger_connection, tmp_path): + """Test component build with environment variables.""" + component = { + "repo": "https://github.com/test/component.git", + "ref": "main", + "image": "test:latest", + "env_vars": {"BUILD_ENV": "production", "VERSION": "1.0"}, + } + + stack = MCPStackDagger() + await stack._build_component_with_dagger(component, "test") + + # Verify docker_build was called + mock_dag = mock_dagger_connection["dag"] + mock_git = mock_dag.git.return_value + mock_tree = mock_git.tree.return_value + mock_tree.docker_build.assert_called_once() diff --git a/tests/unit/mcpgateway/tools/builder/test_python_deploy.py b/tests/unit/mcpgateway/tools/builder/test_python_deploy.py new file mode 100644 index 000000000..3e8981268 --- /dev/null +++ b/tests/unit/mcpgateway/tools/builder/test_python_deploy.py @@ -0,0 +1,468 @@ +# -*- coding: utf-8 -*- +"""Location: ./tests/unit/mcpgateway/tools/builder/test_python_deploy.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Unit tests for plain Python MCP Stack deployment. +""" + +# Standard +from pathlib import Path +import subprocess +from unittest.mock import MagicMock, Mock, patch, call + +# Third-Party +import pytest + +# First-Party +from mcpgateway.tools.builder.python_deploy import MCPStackPython + + +class TestMCPStackPython: + """Test MCPStackPython deployment class.""" + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + def test_init_with_docker(self, mock_which): + """Test initialization with Docker runtime.""" + mock_which.return_value = "/usr/bin/docker" + stack = MCPStackPython() + assert stack.container_runtime == "docker" + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + def test_init_with_podman(self, mock_which): + """Test initialization with Podman runtime.""" + + def which_side_effect(cmd): + if cmd == "docker": + return None + elif cmd == "podman": + return "/usr/bin/podman" + return None + + mock_which.side_effect = which_side_effect + stack = MCPStackPython() + assert stack.container_runtime == "podman" + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + def test_init_no_runtime(self, mock_which): + """Test initialization when no container runtime available.""" + mock_which.return_value = None + with pytest.raises(RuntimeError, match="No container runtime found"): + MCPStackPython() + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch.object(MCPStackPython, "_build_component") + @pytest.mark.asyncio + async def test_build_gateway(self, mock_build, mock_load, mock_which): + """Test building gateway container.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = { + "gateway": {"repo": "https://github.com/test/gateway.git", "ref": "main"}, + "plugins": [], + } + + stack = MCPStackPython() + await stack.build("test-config.yaml") + + mock_build.assert_called_once() + assert mock_build.call_args[0][1] == "gateway" + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @pytest.mark.asyncio + async def test_build_plugins_only(self, mock_load, mock_which): + """Test building only plugins.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = { + "gateway": {"repo": "https://github.com/test/gateway.git"}, + "plugins": [ + {"name": "Plugin1", "repo": "https://github.com/test/plugin1.git"} + ], + } + + stack = MCPStackPython() + with patch.object(stack, "_build_component") as mock_build: + await stack.build("test-config.yaml", plugins_only=True) + + # Gateway should not be built + calls = [call_args[0][1] for call_args in mock_build.call_args_list] + assert "gateway" not in calls + assert "Plugin1" in calls + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch.object(MCPStackPython, "_build_component") + @pytest.mark.asyncio + async def test_build_specific_plugins(self, mock_build, mock_load, mock_which): + """Test building specific plugins only.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = { + "gateway": {"image": "mcpgateway:latest"}, + "plugins": [ + {"name": "Plugin1", "repo": "https://github.com/test/plugin1.git"}, + {"name": "Plugin2", "repo": "https://github.com/test/plugin2.git"}, + {"name": "Plugin3", "repo": "https://github.com/test/plugin3.git"}, + ], + } + + stack = MCPStackPython() + await stack.build("test-config.yaml", specific_plugins=["Plugin1", "Plugin3"]) + + # Should only build Plugin1 and Plugin3 + calls = [call_args[0][1] for call_args in mock_build.call_args_list] + assert "Plugin1" in calls + assert "Plugin3" in calls + assert "Plugin2" not in calls + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @pytest.mark.asyncio + async def test_build_no_plugins(self, mock_load, mock_which): + """Test building when no plugins are defined.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = { + "gateway": {"image": "mcpgateway:latest"}, + "plugins": [], + } + + stack = MCPStackPython() + # Should not raise error + await stack.build("test-config.yaml", plugins_only=True) + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch("mcpgateway.tools.builder.python_deploy.shutil.which", return_value="/usr/bin/make") + @patch.object(MCPStackPython, "_run_command") + @pytest.mark.asyncio + async def test_generate_certificates(self, mock_run, mock_make, mock_load, mock_which_runtime): + """Test certificate generation.""" + mock_which_runtime.return_value = "/usr/bin/docker" + mock_load.return_value = { + "plugins": [ + {"name": "Plugin1"}, + {"name": "Plugin2"}, + ] + } + + stack = MCPStackPython() + await stack.generate_certificates("test-config.yaml") + + # Should call make commands for CA, gateway, and each plugin + assert mock_run.call_count == 4 # CA + gateway + 2 plugins + + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @pytest.mark.asyncio + async def test_generate_certificates_make_not_found(self, mock_load): + """Test certificate generation when make is not available.""" + mock_load.return_value = {"plugins": []} + + # Patch shutil.which to return docker for __init__, then None for make check + with patch("mcpgateway.tools.builder.python_deploy.shutil.which") as mock_which: + # First call returns docker (for __init__), subsequent calls return None (for make check) + mock_which.side_effect = ["/usr/bin/docker", None] + + stack = MCPStackPython(verbose=True) + + with pytest.raises(RuntimeError, match="'make' command not found"): + await stack.generate_certificates("test-config.yaml") + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch.object(MCPStackPython, "build") + @patch.object(MCPStackPython, "generate_certificates") + @patch.object(MCPStackPython, "generate_manifests") + @patch.object(MCPStackPython, "_deploy_compose") + @pytest.mark.asyncio + async def test_deploy_compose( + self, mock_deploy, mock_gen_manifests, mock_certs, mock_build, mock_load, mock_which + ): + """Test full compose deployment.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = { + "deployment": {"type": "compose", "project_name": "test"}, + "gateway": {"image": "mcpgateway:latest", "mtls_enabled": True}, + "plugins": [], + } + mock_gen_manifests.return_value = Path("/tmp/manifests") + + stack = MCPStackPython() + await stack.deploy("test-config.yaml") + + mock_build.assert_called_once() + mock_certs.assert_called_once() + mock_gen_manifests.assert_called_once() + mock_deploy.assert_called_once() + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch.object(MCPStackPython, "build") + @patch.object(MCPStackPython, "generate_manifests") + @pytest.mark.asyncio + async def test_deploy_dry_run(self, mock_gen_manifests, mock_build, mock_load, mock_which): + """Test dry-run deployment.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = { + "deployment": {"type": "compose"}, + "gateway": {"image": "mcpgateway:latest"}, + "plugins": [], + } + mock_gen_manifests.return_value = Path("/tmp/manifests") + + stack = MCPStackPython() + await stack.deploy("test-config.yaml", dry_run=True, skip_build=True, skip_certs=True) + + mock_gen_manifests.assert_called_once() + # Should not call actual deployment + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch.object(MCPStackPython, "generate_manifests") + @pytest.mark.asyncio + async def test_deploy_skip_certs_mtls_disabled(self, mock_gen_manifests, mock_load, mock_which): + """Test deployment with mTLS disabled.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = { + "deployment": {"type": "compose"}, + "gateway": {"image": "mcpgateway:latest", "mtls_enabled": False}, + "plugins": [], + } + mock_gen_manifests.return_value = Path("/tmp/manifests") + + stack = MCPStackPython() + with patch.object(stack, "generate_certificates") as mock_certs: + await stack.deploy("test-config.yaml", dry_run=True, skip_build=True) + + # Certificates should not be generated + mock_certs.assert_not_called() + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch.object(MCPStackPython, "_verify_kubernetes") + @pytest.mark.asyncio + async def test_verify_kubernetes(self, mock_verify, mock_load, mock_which): + """Test Kubernetes deployment verification.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = { + "deployment": {"type": "kubernetes", "namespace": "test-ns"} + } + + stack = MCPStackPython() + await stack.verify("test-config.yaml") + + mock_verify.assert_called_once() + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch.object(MCPStackPython, "_verify_compose") + @pytest.mark.asyncio + async def test_verify_compose(self, mock_verify, mock_load, mock_which): + """Test Docker Compose deployment verification.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = {"deployment": {"type": "compose"}} + + stack = MCPStackPython() + await stack.verify("test-config.yaml") + + mock_verify.assert_called_once() + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch.object(MCPStackPython, "_destroy_kubernetes") + @pytest.mark.asyncio + async def test_destroy_kubernetes(self, mock_destroy, mock_load, mock_which): + """Test Kubernetes deployment destruction.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = {"deployment": {"type": "kubernetes"}} + + stack = MCPStackPython() + await stack.destroy("test-config.yaml") + + mock_destroy.assert_called_once() + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch.object(MCPStackPython, "_destroy_compose") + @pytest.mark.asyncio + async def test_destroy_compose(self, mock_destroy, mock_load, mock_which): + """Test Docker Compose deployment destruction.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = {"deployment": {"type": "compose"}} + + stack = MCPStackPython() + await stack.destroy("test-config.yaml") + + mock_destroy.assert_called_once() + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch("mcpgateway.tools.builder.python_deploy.generate_plugin_config") + @patch("mcpgateway.tools.builder.python_deploy.generate_kubernetes_manifests") + def test_generate_manifests_kubernetes( + self, mock_k8s_gen, mock_plugin_gen, mock_load, mock_which, tmp_path + ): + """Test generating Kubernetes manifests.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = { + "deployment": {"type": "kubernetes", "namespace": "test-ns"}, + "gateway": {"image": "mcpgateway:latest"}, + "plugins": [], + } + + stack = MCPStackPython() + result = stack.generate_manifests("test-config.yaml", output_dir=str(tmp_path)) + + mock_plugin_gen.assert_called_once() + mock_k8s_gen.assert_called_once() + assert result == tmp_path + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch("mcpgateway.tools.builder.python_deploy.generate_plugin_config") + @patch("mcpgateway.tools.builder.python_deploy.generate_compose_manifests") + def test_generate_manifests_compose( + self, mock_compose_gen, mock_plugin_gen, mock_load, mock_which, tmp_path + ): + """Test generating Docker Compose manifests.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = { + "deployment": {"type": "compose"}, + "gateway": {"image": "mcpgateway:latest"}, + "plugins": [], + } + + stack = MCPStackPython() + result = stack.generate_manifests("test-config.yaml", output_dir=str(tmp_path)) + + mock_plugin_gen.assert_called_once() + mock_compose_gen.assert_called_once() + assert result == tmp_path + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch("mcpgateway.tools.builder.python_deploy.get_deploy_dir") + def test_generate_manifests_invalid_type(self, mock_get_deploy, mock_load, mock_which, tmp_path): + """Test generating manifests with invalid deployment type.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = { + "deployment": {"type": "invalid"}, + "gateway": {"image": "mcpgateway:latest"}, + } + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackPython() + with pytest.raises(ValueError, match="Unsupported deployment type"): + stack.generate_manifests("test-config.yaml") + + +class TestBuildComponent: + """Test _build_component method.""" + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch.object(MCPStackPython, "_run_command") + def test_build_component_clone_new(self, mock_run, mock_which, tmp_path): + """Test building component with new git clone.""" + mock_which.return_value = "/usr/bin/docker" + component = { + "repo": "https://github.com/test/component.git", + "ref": "main", + "context": ".", + "image": "test-component:latest", + } + + # Create Containerfile in expected location + build_dir = tmp_path / "build" / "test-component" + build_dir.mkdir(parents=True) + (build_dir / "Containerfile").write_text("FROM alpine\n") + + stack = MCPStackPython() + + with patch("mcpgateway.tools.builder.python_deploy.Path") as mock_path_class: + mock_path_class.return_value = tmp_path / "build" / "test-component" + # Mock the path checks + with patch.object(Path, "exists", return_value=True): + with patch.object(Path, "__truediv__", return_value=build_dir / "Containerfile"): + stack._build_component(component, "test-component") + + # Verify git clone was called + clone_calls = [c for c in mock_run.call_args_list if "git" in str(c) and "clone" in str(c)] + assert len(clone_calls) > 0 + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + def test_build_component_no_repo(self, mock_which): + """Test building component without repo field.""" + mock_which.return_value = "/usr/bin/docker" + component = {"image": "test:latest"} + + stack = MCPStackPython() + with pytest.raises(ValueError, match="has no 'repo' field"): + stack._build_component(component, "test-component") + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch.object(MCPStackPython, "_run_command") + def test_build_component_with_target(self, mock_run, mock_which, tmp_path): + """Test building component with multi-stage target.""" + mock_which.return_value = "/usr/bin/docker" + component = { + "repo": "https://github.com/test/component.git", + "ref": "main", + "image": "test:latest", + "target": "production", + } + + build_dir = tmp_path / "build" / "test" + build_dir.mkdir(parents=True) + (build_dir / "Containerfile").write_text("FROM alpine\n") + + stack = MCPStackPython() + + with patch("mcpgateway.tools.builder.python_deploy.Path") as mock_path_class: + mock_path_class.return_value = build_dir + with patch.object(Path, "exists", return_value=True): + with patch.object(Path, "__truediv__", return_value=build_dir / "Containerfile"): + stack._build_component(component, "test") + + # Verify --target was included in build command + build_calls = [c for c in mock_run.call_args_list if "docker" in str(c) and "build" in str(c)] + assert len(build_calls) > 0 + + +class TestRunCommand: + """Test _run_command method.""" + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.subprocess.run") + def test_run_command_success(self, mock_run, mock_which): + """Test successful command execution.""" + mock_which.return_value = "/usr/bin/docker" + mock_run.return_value = Mock(returncode=0, stdout="Success", stderr="") + + stack = MCPStackPython() + result = stack._run_command(["echo", "test"]) + + assert result.returncode == 0 + mock_run.assert_called_once() + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.subprocess.run") + def test_run_command_failure(self, mock_run, mock_which): + """Test command execution failure.""" + mock_which.return_value = "/usr/bin/docker" + mock_run.side_effect = subprocess.CalledProcessError(1, "cmd") + + stack = MCPStackPython() + with pytest.raises(subprocess.CalledProcessError): + stack._run_command(["false"]) + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.subprocess.run") + def test_run_command_with_cwd(self, mock_run, mock_which, tmp_path): + """Test command execution with working directory.""" + mock_which.return_value = "/usr/bin/docker" + mock_run.return_value = Mock(returncode=0) + + stack = MCPStackPython() + stack._run_command(["ls"], cwd=tmp_path) + + assert mock_run.call_args[1]["cwd"] == tmp_path From 5d3d8c6e0aa21ac7d975c9af645decd55612ab9f Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Tue, 14 Oct 2025 10:29:33 -0600 Subject: [PATCH 21/35] fix: change to make python the default. Signed-off-by: Teryl Taylor --- docs/docs/deployment/cforge-gateway.md | 98 ++++++++++--------- mcpgateway/tools/builder/cli.py | 25 ++--- mcpgateway/tools/builder/factory.py | 6 +- .../test_translate_stdio_endpoint.py | 2 +- .../unit/mcpgateway/tools/builder/test_cli.py | 35 +++++-- 5 files changed, 99 insertions(+), 67 deletions(-) diff --git a/docs/docs/deployment/cforge-gateway.md b/docs/docs/deployment/cforge-gateway.md index a90f45177..88cdd3e86 100644 --- a/docs/docs/deployment/cforge-gateway.md +++ b/docs/docs/deployment/cforge-gateway.md @@ -467,19 +467,22 @@ These options apply to all commands: | Option | Description | Default | |--------|-------------|---------| -| `--no-dagger` | Force plain Python mode (skip Dagger) | Auto-detect | +| `--dagger` | Enable Dagger mode (auto-downloads CLI if needed) | `false` (uses plain Python) | | `--verbose`, `-v` | Verbose output | `false` | **Examples:** ```bash -# Use plain Python instead of Dagger -cforge gateway --no-dagger deploy deploy.yaml +# Use plain Python mode (default) +cforge gateway deploy deploy.yaml + +# Enable Dagger mode for optimized builds +cforge gateway --dagger deploy deploy.yaml # Verbose mode cforge gateway -v build deploy.yaml # Combine options -cforge gateway --no-dagger -v deploy deploy.yaml +cforge gateway --dagger -v deploy deploy.yaml ``` --- @@ -1053,7 +1056,38 @@ Then redeploy to distribute new certificates. ## Deployment Modes -### Dagger Mode (Recommended) +### Plain Python Mode (Default) + +**What is it?** +Pure Python implementation using standard tools (`docker`, `kubectl`, `git`, etc.). This is the **default mode** to avoid automatic downloads. + +**When to use:** +- βœ… Default choice (no surprises) +- βœ… Environments without Dagger support +- βœ… Air-gapped networks +- βœ… Simple deployments +- βœ… Debugging/troubleshooting + +**Requirements:** +- Python 3.11+ +- Docker CLI +- `kubectl` (for Kubernetes deployments) +- `git` (for building from source) + +**Usage:** +```bash +# Plain Python mode (default, no flag needed) +cforge gateway deploy deploy.yaml +``` + +**Characteristics:** +- Sequential builds +- Standard caching +- No external dependencies beyond Docker/kubectl + +--- + +### Dagger Mode (Opt-in) **What is Dagger?** Dagger is a programmable CI/CD engine that runs pipelines in containers. It provides: @@ -1066,20 +1100,20 @@ Dagger is a programmable CI/CD engine that runs pipelines in containers. It prov - βœ… Local development (fastest builds) - βœ… CI/CD pipelines (GitHub Actions, GitLab CI, etc.) - βœ… Team environments (consistent results) +- βœ… When you want optimized build performance **Requirements:** - Docker or compatible container runtime -- Dagger CLI (auto-installed with pip package) -- Internet connection (for first run) +- `dagger-io` Python package (optional, installed separately) +- **Note**: First use will auto-download the Dagger CLI (~100MB) **Enable:** ```bash -# Auto-detected by default -cforge gateway deploy deploy.yaml +# Install dagger-io package first +pip install dagger-io -# Or explicitly -export USE_DAGGER=true -cforge gateway deploy deploy.yaml +# Use Dagger mode (opt-in with --dagger flag) +cforge gateway --dagger deploy deploy.yaml ``` **Performance benefits:** @@ -1087,35 +1121,7 @@ cforge gateway deploy deploy.yaml - Parallel plugin builds - Efficient layer reuse ---- - -### Plain Python Mode (Fallback) - -**What is it?** -Pure Python implementation using standard tools (`docker`, `kubectl`, `git`, etc.) - -**When to use:** -- βœ… Environments without Dagger support -- βœ… Air-gapped networks -- βœ… Simple deployments -- βœ… Debugging/troubleshooting - -**Requirements:** -- Python 3.11+ -- Docker CLI -- `kubectl` (for Kubernetes deployments) -- `git` (for building from source) - -**Enable:** -```bash -# Force plain Python mode -cforge gateway --no-dagger deploy deploy.yaml -``` - -**Limitations:** -- Sequential builds (slower) -- Less sophisticated caching -- No parallel execution +**Important**: Using `--dagger` will automatically download the Dagger CLI binary on first use if not already present. Use plain Python mode if you want to avoid automatic downloads --- @@ -1451,13 +1457,15 @@ A: Absolutely! See [CI/CD Integration](#cicd-integration) section above. A: ```bash -# Force plain Python -cforge gateway --no-dagger deploy deploy.yaml - -# Use Dagger (default if available) +# Plain Python mode (default) cforge gateway deploy deploy.yaml + +# Dagger mode (opt-in, requires dagger-io package) +cforge gateway --dagger deploy deploy.yaml ``` +**Note**: Dagger mode requires installing the `dagger-io` package and will auto-download the Dagger CLI (~100MB) on first use + --- **Q: Where are the generated manifests stored?** diff --git a/mcpgateway/tools/builder/cli.py b/mcpgateway/tools/builder/cli.py index fa389b1f8..19d38f029 100644 --- a/mcpgateway/tools/builder/cli.py +++ b/mcpgateway/tools/builder/cli.py @@ -6,17 +6,16 @@ MCP Stack Deployment Tool - Hybrid Dagger/Python Implementation -This script can run in three modes: -1. Local with Dagger (optimal performance) -2. Local without Dagger (plain Python fallback) -3. Inside builder container (all tools included) +This script can run in two modes: +1. Plain Python mode (default) - No external dependencies +2. Dagger mode (opt-in) - Requires dagger-io package, auto-downloads CLI Usage: - # Local execution (auto-detects Dagger) + # Local execution (plain Python mode) cforge deploy deploy.yaml - # Force plain Python mode - cforge --no-dagger deploy deploy.yaml + # Use Dagger mode for optimization (requires dagger-io, auto-downloads CLI) + cforge --dagger deploy deploy.yaml # Inside container docker run -v $PWD:/workspace mcpgateway/mcp-builder:latest deploy deploy.yaml @@ -61,25 +60,27 @@ @app.callback() def cli( ctx: typer.Context, - no_dagger: Annotated[bool, typer.Option("--no-dagger", help="Force plain Python mode (skip Dagger)")] = False, + dagger: Annotated[bool, typer.Option("--dagger", help="Use Dagger mode (requires dagger-io package)")] = False, verbose: Annotated[bool, typer.Option("--verbose", "-v", help="Verbose output")] = False, ): """MCP Stack deployment tool Deploys MCP Gateway + external plugins from a single YAML configuration. + By default, uses plain Python mode. Use --dagger to enable Dagger optimization. + Args: ctx: Typer context object - no_dagger: Force plain Python mode instead of Dagger + dagger: Enable Dagger mode (requires dagger-io package and auto-downloads CLI) verbose: Enable verbose output """ ctx.ensure_object(dict) ctx.obj["verbose"] = verbose - ctx.obj["no_dagger"] = no_dagger + ctx.obj["dagger"] = dagger if ctx.invoked_subcommand != "version": - # Show execution mode - mode = "python" if no_dagger else "dagger" + # Show execution mode - default to Python, opt-in to Dagger + mode = "dagger" if dagger else "python" ctx.obj["deployer"], ctx.obj["mode"] = DeployFactory.create_deployer(mode, verbose) mode_color = "green" if ctx.obj["mode"] == "dagger" else "yellow" env_text = "container" if IN_CONTAINER else "local" diff --git a/mcpgateway/tools/builder/factory.py b/mcpgateway/tools/builder/factory.py index a814ecda7..23f3ee875 100644 --- a/mcpgateway/tools/builder/factory.py +++ b/mcpgateway/tools/builder/factory.py @@ -81,7 +81,11 @@ def create_deployer(deployer: str, verbose: bool = False) -> tuple[CICDModule, C if deployer == "dagger": try: # First-Party - from mcpgateway.tools.builder.dagger_deploy import MCPStackDagger + from mcpgateway.tools.builder.dagger_deploy import DAGGER_AVAILABLE, MCPStackDagger + + # Check if dagger is actually available (not just the module) + if not DAGGER_AVAILABLE: + raise ImportError("Dagger SDK not installed") if verbose: console.print("[green]βœ“ Dagger module loaded[/green]") diff --git a/tests/unit/mcpgateway/test_translate_stdio_endpoint.py b/tests/unit/mcpgateway/test_translate_stdio_endpoint.py index 8d0d161ac..ff391e052 100644 --- a/tests/unit/mcpgateway/test_translate_stdio_endpoint.py +++ b/tests/unit/mcpgateway/test_translate_stdio_endpoint.py @@ -286,7 +286,7 @@ async def test_empty_env_vars(self, echo_script): await endpoint.send("hello world\n") # Wait for response - await asyncio.sleep(0.1) + await asyncio.sleep(0.5) # Check that process was started assert endpoint._proc is not None diff --git a/tests/unit/mcpgateway/tools/builder/test_cli.py b/tests/unit/mcpgateway/tools/builder/test_cli.py index 541d3a99d..5328f03c3 100644 --- a/tests/unit/mcpgateway/tools/builder/test_cli.py +++ b/tests/unit/mcpgateway/tools/builder/test_cli.py @@ -46,34 +46,53 @@ class TestCLICallback: @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") def test_cli_callback_default(self, mock_factory, runner): - """Test CLI callback with default options.""" + """Test CLI callback with default options (Python mode by default).""" mock_deployer = MagicMock() - mock_factory.return_value = (mock_deployer, "dagger") + mock_factory.return_value = (mock_deployer, "python") result = runner.invoke(app, ["--help"]) assert result.exit_code == 0 @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") def test_cli_callback_verbose(self, mock_factory, runner): - """Test CLI callback with verbose flag.""" + """Test CLI callback with verbose flag (Python mode by default).""" mock_deployer = MagicMock() - mock_factory.return_value = (mock_deployer, "dagger") + mock_factory.return_value = (mock_deployer, "python") result = runner.invoke(app, ["--verbose", "--help"]) assert result.exit_code == 0 @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") - def test_cli_callback_no_dagger(self, mock_factory, runner, tmp_path): - """Test CLI callback with --no-dagger flag.""" + def test_cli_callback_with_dagger(self, mock_factory, runner, tmp_path): + """Test CLI callback with --dagger flag (opt-in).""" + mock_deployer = MagicMock() + mock_deployer.validate = MagicMock() + mock_factory.return_value = (mock_deployer, "dagger") + + config_file = tmp_path / "test-config.yaml" + config_file.write_text("deployment:\n type: compose\n") + + # Use validate command which invokes the callback + result = runner.invoke(app, ["--dagger", "validate", str(config_file)]) + assert result.exit_code == 0 + # Verify dagger mode was requested + mock_factory.assert_called_once_with("dagger", False) + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_cli_callback_default_python(self, mock_factory, runner, tmp_path): + """Test CLI callback defaults to Python mode.""" mock_deployer = MagicMock() + mock_deployer.validate = MagicMock() mock_factory.return_value = (mock_deployer, "python") config_file = tmp_path / "test-config.yaml" config_file.write_text("deployment:\n type: compose\n") - # Use a command that actually invokes the callback (not --help) - result = runner.invoke(app, ["--no-dagger", "version"]) + # Use validate command without --dagger flag to test default + result = runner.invoke(app, ["validate", str(config_file)]) assert result.exit_code == 0 + # Verify python mode was requested (default) + mock_factory.assert_called_once_with("python", False) class TestValidateCommand: From ba0888f5fc220720bd862ca4996c614bd699c517 Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Tue, 14 Oct 2025 10:48:46 -0600 Subject: [PATCH 22/35] fix: bandit issue. Signed-off-by: Teryl Taylor --- mcpgateway/tools/builder/common.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mcpgateway/tools/builder/common.py b/mcpgateway/tools/builder/common.py index 94f723d3f..797b64524 100644 --- a/mcpgateway/tools/builder/common.py +++ b/mcpgateway/tools/builder/common.py @@ -99,7 +99,7 @@ def generate_plugin_config(config: Dict[str, Any], output_dir: Path, verbose: bo if not template_dir.exists(): raise FileNotFoundError(f"Template directory not found: {template_dir}") - env = Environment(loader=FileSystemLoader(str(template_dir))) + env = Environment(loader=FileSystemLoader(str(template_dir)), autoescape=True) # nosec B701 template = env.get_template("plugins-config.yaml.j2") # Prepare plugin data with computed URLs @@ -173,7 +173,7 @@ def generate_kubernetes_manifests(config: Dict[str, Any], output_dir: Path, verb # Auto-detect and assign env files if not specified _auto_detect_env_files(config, output_dir, verbose=verbose) - env = Environment(loader=FileSystemLoader(str(template_dir))) + env = Environment(loader=FileSystemLoader(str(template_dir)), autoescape=True) # nosec B701 # Generate namespace namespace = config["deployment"].get("namespace", "mcp-gateway") @@ -354,7 +354,7 @@ def generate_compose_manifests(config: Dict[str, Any], output_dir: Path, verbose "plugins_cert_base": os.path.join(certs_rel_base, "mcp/plugins"), } - env = Environment(loader=FileSystemLoader(str(template_dir))) + env = Environment(loader=FileSystemLoader(str(template_dir)), autoescape=True) # nosec B701 # Generate compose file compose_template = env.get_template("docker-compose.yaml.j2") From 90c531ee75c8109c32240fdfd9eba787cc9572e5 Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Tue, 14 Oct 2025 11:04:29 -0600 Subject: [PATCH 23/35] fix: updated key length to 4096 Signed-off-by: Teryl Taylor --- Makefile | 4 ++-- docs/docs/using/plugins/mtls.md | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Makefile b/Makefile index 698681651..8baf53861 100644 --- a/Makefile +++ b/Makefile @@ -262,7 +262,7 @@ certs-mcp-gateway: certs-mcp-ca ## Generate gateway client certificate else \ echo "πŸ” Generating gateway client certificate ($(MCP_CERT_DAYS) days)..."; \ mkdir -p certs/mcp/gateway; \ - openssl genrsa -out certs/mcp/gateway/client.key 2048; \ + openssl genrsa -out certs/mcp/gateway/client.key 4096; \ openssl req -new -key certs/mcp/gateway/client.key \ -out certs/mcp/gateway/client.csr \ -subj "/CN=mcp-gateway-client/O=MCPGateway/OU=Gateway"; \ @@ -290,7 +290,7 @@ certs-mcp-plugin: certs-mcp-ca ## Generate plugin server certificate (PLUGIN_N else \ echo "πŸ” Generating server certificate for plugin '$(PLUGIN_NAME)' ($(MCP_CERT_DAYS) days)..."; \ mkdir -p certs/mcp/plugins/$(PLUGIN_NAME); \ - openssl genrsa -out certs/mcp/plugins/$(PLUGIN_NAME)/server.key 2048; \ + openssl genrsa -out certs/mcp/plugins/$(PLUGIN_NAME)/server.key 4096; \ openssl req -new -key certs/mcp/plugins/$(PLUGIN_NAME)/server.key \ -out certs/mcp/plugins/$(PLUGIN_NAME)/server.csr \ -subj "/CN=mcp-plugin-$(PLUGIN_NAME)/O=MCPGateway/OU=Plugins"; \ diff --git a/docs/docs/using/plugins/mtls.md b/docs/docs/using/plugins/mtls.md index 83a532998..63d80fe37 100644 --- a/docs/docs/using/plugins/mtls.md +++ b/docs/docs/using/plugins/mtls.md @@ -117,7 +117,7 @@ Generate the gateway client certificate used by the MCP Gateway to authenticate **What it does**: - Depends on `certs-mcp-ca` (creates CA if needed) -- Creates `certs/mcp/gateway/client.key` (2048-bit RSA private key) +- Creates `certs/mcp/gateway/client.key` (4096-bit RSA private key) - Creates `certs/mcp/gateway/client.crt` (client certificate signed by CA) - Copies `ca.crt` to `certs/mcp/gateway/` @@ -191,7 +191,7 @@ make certs-mcp-check All certificates generated include: - **Algorithm**: RSA with SHA-256 - **CA Key Size**: 4096 bits -- **Client/Server Key Size**: 2048 bits +- **Client/Server Key Size**: 4096 bits - **Default Validity**: 825 days - **Subject Alternative Names** (plugins): DNS entries for plugin name and localhost From 0ed34fe645ac37db6dbac7a94a4ee3125d89b739 Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Tue, 14 Oct 2025 14:00:31 -0600 Subject: [PATCH 24/35] fix: utility function for verifying certificates. Signed-off-by: Teryl Taylor --- .../framework/external/mcp/tls_utils.py | 128 ++++++++++++++++++ 1 file changed, 128 insertions(+) create mode 100644 mcpgateway/plugins/framework/external/mcp/tls_utils.py diff --git a/mcpgateway/plugins/framework/external/mcp/tls_utils.py b/mcpgateway/plugins/framework/external/mcp/tls_utils.py new file mode 100644 index 000000000..017ae523b --- /dev/null +++ b/mcpgateway/plugins/framework/external/mcp/tls_utils.py @@ -0,0 +1,128 @@ +# -*- coding: utf-8 -*- +"""Location: ./mcpgateway/plugins/framework/external/mcp/tls_utils.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +TLS/SSL utility functions for external MCP plugin connections. + +This module provides utilities for creating and configuring SSL contexts for +secure communication with external MCP plugin servers. It implements the +certificate validation logic that is tested in test_client_certificate_validation.py. +""" + +# Standard +import logging +import ssl + +# First-Party +from mcpgateway.plugins.framework.errors import PluginError +from mcpgateway.plugins.framework.models import MCPClientTLSConfig, PluginErrorModel + +logger = logging.getLogger(__name__) + + +def create_ssl_context(tls_config: MCPClientTLSConfig, plugin_name: str) -> ssl.SSLContext: + """Create and configure an SSL context for external plugin connections. + + This function implements the SSL/TLS security configuration for connecting to + external MCP plugin servers. It supports both standard TLS and mutual TLS (mTLS) + authentication. + + Security Features Implemented (per Python ssl docs and OpenSSL): + + 1. **Invalid Certificate Rejection**: ssl.create_default_context() with CERT_REQUIRED + automatically validates certificate signatures and chains via OpenSSL. + + 2. **Expired Certificate Handling**: OpenSSL automatically checks notBefore and + notAfter fields per RFC 5280 Section 6. Expired or not-yet-valid certificates + are rejected during the handshake. + + 3. **Certificate Chain Validation**: Full chain validation up to a trusted CA. + Each certificate in the chain is verified for validity period, signature, etc. + + 4. **Hostname Verification**: When check_hostname is enabled, the certificate's + Subject Alternative Name (SAN) or Common Name (CN) must match the hostname. + + 5. **MITM Prevention**: Via mutual authentication when client certificates are + provided (mTLS mode). + + Args: + tls_config: TLS configuration containing CA bundle, client certs, and verification settings + plugin_name: Name of the plugin (for error messages) + + Returns: + Configured SSLContext ready for use with httpx or other SSL connections + + Raises: + PluginError: If SSL context configuration fails + + Example: + >>> tls_config = MCPClientTLSConfig( + ... ca_bundle="/path/to/ca.crt", + ... certfile="/path/to/client.crt", + ... keyfile="/path/to/client.key", + ... verify=True, + ... check_hostname=True + ... ) + >>> ssl_context = create_ssl_context(tls_config, "MyPlugin") + >>> # Use ssl_context with httpx or other SSL connections + """ + try: + # Create SSL context with secure defaults + # Per Python docs: "The settings are chosen by the ssl module, and usually + # represent a higher security level than when calling the SSLContext + # constructor directly." + # This sets verify_mode to CERT_REQUIRED by default, which enables: + # - Certificate signature validation + # - Certificate chain validation up to trusted CA + # - Automatic expiration checking (notBefore/notAfter per RFC 5280) + ssl_context = ssl.create_default_context() + + if not tls_config.verify: + # Disable certificate verification (not recommended for production) + logger.warning(f"Certificate verification disabled for plugin '{plugin_name}'. " "This is not recommended for production use.") + ssl_context.check_hostname = False + ssl_context.verify_mode = ssl.CERT_NONE # noqa: DUO122 + else: + # Enable strict certificate verification (production mode) + # Load CA certificate bundle for server certificate validation + if tls_config.ca_bundle: + # This CA bundle will be used to validate the server's certificate + # OpenSSL will check: + # - Certificate is signed by a trusted CA in this bundle + # - Certificate hasn't expired (notAfter > now) + # - Certificate is already valid (notBefore < now) + # - Certificate chain is complete and valid + ssl_context.load_verify_locations(cafile=tls_config.ca_bundle) + + # Hostname verification + # When enabled, certificate's SAN or CN must match the server hostname + if not tls_config.check_hostname: + logger.warning(f"Hostname verification disabled for plugin '{plugin_name}'. " "This increases risk of MITM attacks.") + ssl_context.check_hostname = False + + # Load client certificate for mTLS (mutual authentication) + # If provided, the client will authenticate itself to the server + if tls_config.certfile: + ssl_context.load_cert_chain( + certfile=tls_config.certfile, + keyfile=tls_config.keyfile, + password=tls_config.keyfile_password, + ) + logger.debug(f"mTLS enabled for plugin '{plugin_name}' with client certificate: {tls_config.certfile}") + + # Log security configuration + logger.debug( + f"SSL context created for plugin '{plugin_name}': " + f"verify_mode={ssl_context.verify_mode}, " + f"check_hostname={ssl_context.check_hostname}, " + f"minimum_version={ssl_context.minimum_version}" + ) + + return ssl_context + + except Exception as exc: + error_msg = f"Failed to configure SSL context for plugin '{plugin_name}': {exc}" + logger.error(error_msg) + raise PluginError(error=PluginErrorModel(message=error_msg, plugin_name=plugin_name)) from exc From b4b55cc7e21ed0098fe56abf2ca5a49f29cead8c Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Tue, 14 Oct 2025 20:24:27 -0600 Subject: [PATCH 25/35] fix: added utility class for ssl certificate verification. Signed-off-by: Teryl Taylor --- .../plugins/framework/external/mcp/client.py | 26 ++++--------------- 1 file changed, 5 insertions(+), 21 deletions(-) diff --git a/mcpgateway/plugins/framework/external/mcp/client.py b/mcpgateway/plugins/framework/external/mcp/client.py index 009227777..d0666eaa2 100644 --- a/mcpgateway/plugins/framework/external/mcp/client.py +++ b/mcpgateway/plugins/framework/external/mcp/client.py @@ -28,6 +28,7 @@ from mcpgateway.plugins.framework.base import Plugin from mcpgateway.plugins.framework.constants import CONTEXT, ERROR, GET_PLUGIN_CONFIG, IGNORE_CONFIG_EXTERNAL, NAME, PAYLOAD, PLUGIN_NAME, PYTHON, PYTHON_SUFFIX, RESULT from mcpgateway.plugins.framework.errors import convert_exception_to_error, PluginError +from mcpgateway.plugins.framework.external.mcp.tls_utils import create_ssl_context from mcpgateway.plugins.framework.models import ( HookType, MCPClientTLSConfig, @@ -178,27 +179,10 @@ def _tls_httpx_client_factory( if not tls_config: return httpx.AsyncClient(**kwargs) - try: - ssl_context = ssl.create_default_context() - if not tls_config.verify: - ssl_context.check_hostname = False - ssl_context.verify_mode = ssl.CERT_NONE # noqa: DUO122 - else: - if tls_config.ca_bundle: - ssl_context.load_verify_locations(cafile=tls_config.ca_bundle) - if not tls_config.check_hostname: - ssl_context.check_hostname = False - - if tls_config.certfile: - ssl_context.load_cert_chain( - certfile=tls_config.certfile, - keyfile=tls_config.keyfile, - password=tls_config.keyfile_password, - ) - - kwargs["verify"] = ssl_context - except Exception as exc: # pylint: disable=broad-except - raise PluginError(error=PluginErrorModel(message=f"Failed configuring TLS for external plugin: {exc}", plugin_name=self.name)) from exc + # Create SSL context using the utility function + # This implements certificate validation per test_client_certificate_validation.py + ssl_context = create_ssl_context(tls_config, self.name) + kwargs["verify"] = ssl_context return httpx.AsyncClient(**kwargs) From 054a5996372496c3c931813b65b9015c700f6148 Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Tue, 14 Oct 2025 20:25:10 -0600 Subject: [PATCH 26/35] test: added certificate validation tests. Signed-off-by: Teryl Taylor --- .../mcp/test_client_certificate_validation.py | 454 ++++++++++++++++++ 1 file changed, 454 insertions(+) create mode 100644 tests/unit/mcpgateway/plugins/framework/external/mcp/test_client_certificate_validation.py diff --git a/tests/unit/mcpgateway/plugins/framework/external/mcp/test_client_certificate_validation.py b/tests/unit/mcpgateway/plugins/framework/external/mcp/test_client_certificate_validation.py new file mode 100644 index 000000000..41d85c8cc --- /dev/null +++ b/tests/unit/mcpgateway/plugins/framework/external/mcp/test_client_certificate_validation.py @@ -0,0 +1,454 @@ +# -*- coding: utf-8 -*- +"""Location: ./tests/unit/mcpgateway/plugins/framework/external/mcp/test_client_certificate_validation.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Tests for TLS/mTLS certificate validation in external plugin client. +""" + +# Standard +import datetime +import ssl +from pathlib import Path +from unittest.mock import Mock, patch + +# Third-Party +from cryptography import x509 +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import rsa +from cryptography.x509.oid import ExtensionOID, NameOID +import httpx +import pytest + +# First-Party +from mcpgateway.plugins.framework.external.mcp.tls_utils import create_ssl_context +from mcpgateway.plugins.framework.models import MCPClientTLSConfig + + +def generate_self_signed_cert(tmp_path: Path, common_name: str = "localhost", expired: bool = False) -> tuple[Path, Path]: + """Generate a self-signed certificate for testing. + + Args: + tmp_path: Temporary directory path + common_name: Common name for the certificate + expired: If True, create an already-expired certificate + + Returns: + Tuple of (cert_path, key_path) + """ + # Generate private key + private_key = rsa.generate_private_key(public_exponent=65537, key_size=4096, backend=default_backend()) + + # Certificate validity period + if expired: + # Create an expired certificate (valid from 2 years ago to 1 year ago) + not_valid_before = datetime.datetime.now(tz=datetime.timezone.utc) - datetime.timedelta(days=730) + not_valid_after = datetime.datetime.now(tz=datetime.timezone.utc) - datetime.timedelta(days=365) + else: + # Create a valid certificate (valid from now for 365 days) + not_valid_before = datetime.datetime.now(tz=datetime.timezone.utc) + not_valid_after = datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta(days=365) + + # Create certificate + subject = issuer = x509.Name( + [ + x509.NameAttribute(NameOID.COUNTRY_NAME, "US"), + x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, "California"), + x509.NameAttribute(NameOID.LOCALITY_NAME, "San Francisco"), + x509.NameAttribute(NameOID.ORGANIZATION_NAME, "Test Org"), + x509.NameAttribute(NameOID.COMMON_NAME, common_name), + ] + ) + + cert = ( + x509.CertificateBuilder() + .subject_name(subject) + .issuer_name(issuer) + .public_key(private_key.public_key()) + .serial_number(x509.random_serial_number()) + .not_valid_before(not_valid_before) + .not_valid_after(not_valid_after) + .add_extension( + x509.SubjectAlternativeName([x509.DNSName(common_name)]), + critical=False, + ) + .sign(private_key, hashes.SHA256(), default_backend()) + ) + + # Write certificate + cert_path = tmp_path / f"{common_name}_cert.pem" + cert_path.write_bytes(cert.public_bytes(serialization.Encoding.PEM)) + + # Write private key + key_path = tmp_path / f"{common_name}_key.pem" + key_path.write_bytes( + private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=serialization.NoEncryption(), + ) + ) + + return cert_path, key_path + + +def generate_ca_and_signed_cert(tmp_path: Path, common_name: str = "localhost") -> tuple[Path, Path, Path]: + """Generate a CA certificate and a certificate signed by that CA. + + Args: + tmp_path: Temporary directory path + common_name: Common name for the server certificate + + Returns: + Tuple of (ca_cert_path, server_cert_path, server_key_path) + """ + # Generate CA private key + ca_key = rsa.generate_private_key(public_exponent=65537, key_size=4096, backend=default_backend()) + + # Create CA certificate + ca_subject = x509.Name( + [ + x509.NameAttribute(NameOID.COUNTRY_NAME, "US"), + x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, "California"), + x509.NameAttribute(NameOID.LOCALITY_NAME, "San Francisco"), + x509.NameAttribute(NameOID.ORGANIZATION_NAME, "Test CA"), + x509.NameAttribute(NameOID.COMMON_NAME, "Test CA"), + ] + ) + + ca_cert = ( + x509.CertificateBuilder() + .subject_name(ca_subject) + .issuer_name(ca_subject) + .public_key(ca_key.public_key()) + .serial_number(x509.random_serial_number()) + .not_valid_before(datetime.datetime.now(tz=datetime.timezone.utc)) + .not_valid_after(datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta(days=3650)) + .add_extension( + x509.BasicConstraints(ca=True, path_length=None), + critical=True, + ) + .sign(ca_key, hashes.SHA256(), default_backend()) + ) + + # Generate server private key + server_key = rsa.generate_private_key(public_exponent=65537, key_size=4096, backend=default_backend()) + + # Create server certificate signed by CA + server_subject = x509.Name( + [ + x509.NameAttribute(NameOID.COUNTRY_NAME, "US"), + x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, "California"), + x509.NameAttribute(NameOID.LOCALITY_NAME, "San Francisco"), + x509.NameAttribute(NameOID.ORGANIZATION_NAME, "Test Server"), + x509.NameAttribute(NameOID.COMMON_NAME, common_name), + ] + ) + + server_cert = ( + x509.CertificateBuilder() + .subject_name(server_subject) + .issuer_name(ca_subject) + .public_key(server_key.public_key()) + .serial_number(x509.random_serial_number()) + .not_valid_before(datetime.datetime.now(tz=datetime.timezone.utc)) + .not_valid_after(datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta(days=365)) + .add_extension( + x509.SubjectAlternativeName([x509.DNSName(common_name)]), + critical=False, + ) + .sign(ca_key, hashes.SHA256(), default_backend()) + ) + + # Write CA certificate + ca_cert_path = tmp_path / "ca_cert.pem" + ca_cert_path.write_bytes(ca_cert.public_bytes(serialization.Encoding.PEM)) + + # Write server certificate + server_cert_path = tmp_path / f"{common_name}_cert.pem" + server_cert_path.write_bytes(server_cert.public_bytes(serialization.Encoding.PEM)) + + # Write server private key + server_key_path = tmp_path / f"{common_name}_key.pem" + server_key_path.write_bytes( + server_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=serialization.NoEncryption(), + ) + ) + + return ca_cert_path, server_cert_path, server_key_path + + +def test_ssl_context_configured_for_certificate_validation(tmp_path): + """Test that create_ssl_context() configures SSL context for certificate validation. + + This validates that the SSL context is configured with CERT_REQUIRED mode, + which will reject invalid certificates (like self-signed certs) during + TLS handshake. + + This test validates the actual production code path used in client.py. + Note: This tests configuration, not actual rejection. See + test_ssl_context_rejects_invalid_certificate for rejection behavior. + """ + # Generate self-signed certificate (not signed by a trusted CA) + cert_path, _key_path = generate_self_signed_cert(tmp_path, common_name="untrusted.example.com") + + # Create TLS config pointing to self-signed cert as CA + # This simulates a server presenting a self-signed certificate + tls_config = MCPClientTLSConfig(ca_bundle=str(cert_path), certfile=None, keyfile=None, verify=True, check_hostname=True) + + # Create SSL context using the production utility function + # This is the same function used in client.py for external plugin connections + ssl_context = create_ssl_context(tls_config, "TestPlugin") + + # Verify the context has strict validation enabled + assert ssl_context.verify_mode == ssl.CERT_REQUIRED + assert ssl_context.check_hostname is True + + # Note: We can't easily test the actual connection failure without spinning up + # a real HTTPS server, but we can verify the SSL context is configured correctly + # to reject invalid certificates + + +def test_ssl_context_rejects_invalid_certificate(): + """Test that SSL context with CERT_REQUIRED will reject invalid certificates. + + This test demonstrates the rejection behavior by showing that: + 1. An SSL context created with verify=True has CERT_REQUIRED mode + 2. CERT_REQUIRED mode means OpenSSL will reject invalid certificates during handshake + 3. The rejection is simulated since we can't easily spin up a real HTTPS server + + Per Python SSL docs: "If CERT_REQUIRED is used, the client or server must provide + a valid and trusted certificate. A connection attempt will raise an SSLError if + the certificate validation fails." + + This validates the actual rejection behavior mechanism. + """ + import tempfile + + # Create a valid self-signed CA certificate for testing + with tempfile.TemporaryDirectory() as tmpdir: + ca_cert_path, _ca_key_path = generate_self_signed_cert(Path(tmpdir), common_name="TestCA") + + # Create TLS config with strict verification + tls_config = MCPClientTLSConfig(ca_bundle=str(ca_cert_path), certfile=None, keyfile=None, verify=True, check_hostname=True) + + # Create SSL context - this will succeed (configuration step) + ssl_context = create_ssl_context(tls_config, "TestPlugin") + + # Verify the context requires certificate validation + assert ssl_context.verify_mode == ssl.CERT_REQUIRED, "Should require certificate verification" + assert ssl_context.check_hostname is True, "Should verify hostname" + + # The key point: When this SSL context is used in a real connection: + # - If server presents a certificate NOT signed by our test CA -> SSLError + # - If server presents an expired certificate -> SSLError + # - If server presents a certificate with wrong hostname -> SSLError + # - If server doesn't present a certificate -> SSLError + # + # This is guaranteed by the CERT_REQUIRED setting and documented in: + # - Python SSL docs: https://docs.python.org/3/library/ssl.html#ssl.CERT_REQUIRED + # - OpenSSL verify docs: https://docs.openssl.org/3.1/man1/openssl-verification-options/ + # - RFC 5280 Section 6: Certificate path validation + + # To demonstrate, we can show that attempting to verify a different certificate + # would fail. Here's what the SSL context will do during handshake: + with patch("ssl.SSLContext.wrap_socket") as mock_wrap: + # Simulate what happens when OpenSSL rejects the certificate + mock_wrap.side_effect = ssl.SSLError("[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed") + + # This is what would happen if we tried to connect to a server + # with an invalid certificate: + with pytest.raises(ssl.SSLError, match="CERTIFICATE_VERIFY_FAILED"): + ssl_context.wrap_socket(Mock(), server_hostname="example.com") + + +def test_ssl_context_accepts_valid_ca_signed_certificate(tmp_path): + """Test that create_ssl_context() accepts certificates signed by a trusted CA. + + This validates that certificate chain validation works correctly when + a proper CA certificate is provided. + + This test validates the actual production code path used in client.py. + """ + # Generate CA and a certificate signed by that CA + ca_cert_path, server_cert_path, server_key_path = generate_ca_and_signed_cert(tmp_path, common_name="valid.example.com") + + # Create TLS config with the CA certificate + tls_config = MCPClientTLSConfig(ca_bundle=str(ca_cert_path), certfile=str(server_cert_path), keyfile=str(server_key_path), verify=True, check_hostname=True) + + # Create SSL context using the production utility function + ssl_context = create_ssl_context(tls_config, "TestPlugin") + + # Verify the context is configured for strict validation + assert ssl_context.verify_mode == ssl.CERT_REQUIRED + assert ssl_context.check_hostname is True + + # Verify we can load the certificate successfully + # In a real scenario, this would successfully connect to a server + # presenting a certificate signed by our CA + + +def test_expired_certificate_detection(tmp_path): + """Test that expired certificates can be detected. + + Per OpenSSL docs and RFC 5280: Certificate validity period (notBefore/notAfter) + is automatically checked during validation. This test verifies we can + generate expired certificates that would fail validation. + + This test validates the actual production code path used in client.py. + """ + # Generate an already-expired certificate + cert_path, _key_path = generate_self_signed_cert(tmp_path, common_name="expired.example.com", expired=True) + + # Load the certificate and verify it's expired + with open(cert_path, "rb") as f: + cert_data = f.read() + cert = x509.load_pem_x509_certificate(cert_data, default_backend()) + + # Verify the certificate is expired + now = datetime.datetime.now(tz=datetime.timezone.utc) + assert cert.not_valid_after_utc < now, "Certificate should be expired" + assert cert.not_valid_before_utc < now, "Certificate notBefore should be in the past" + + # Create TLS config with the expired certificate + tls_config = MCPClientTLSConfig(ca_bundle=str(cert_path), certfile=None, keyfile=None, verify=True, check_hostname=False) + + # Create SSL context using the production utility function + ssl_context = create_ssl_context(tls_config, "TestPlugin") + + # Verify the context has verification enabled + assert ssl_context.verify_mode == ssl.CERT_REQUIRED + + # We've verified the certificate is expired - in actual usage, + # create_ssl_context() with CERT_REQUIRED would automatically + # reject this during the TLS handshake + + +def test_certificate_validity_period_future(tmp_path): + """Test detection of certificates that are not yet valid (notBefore in future). + + Per OpenSSL docs: Certificates with notBefore date after current time + are rejected with "certificate is not yet valid" error. + """ + # Generate private key + private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048, backend=default_backend()) + + # Create certificate with notBefore in the future + not_valid_before = datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta(days=30) + not_valid_after = datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta(days=395) + + subject = issuer = x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, "future.example.com")]) + + cert = ( + x509.CertificateBuilder() + .subject_name(subject) + .issuer_name(issuer) + .public_key(private_key.public_key()) + .serial_number(x509.random_serial_number()) + .not_valid_before(not_valid_before) + .not_valid_after(not_valid_after) + .sign(private_key, hashes.SHA256(), default_backend()) + ) + + # Write certificate + cert_path = tmp_path / "future_cert.pem" + cert_path.write_bytes(cert.public_bytes(serialization.Encoding.PEM)) + + # Verify the certificate is not yet valid + now = datetime.datetime.now(tz=datetime.timezone.utc) + assert cert.not_valid_before_utc > now, "Certificate should not yet be valid" + + # In actual usage, ssl.create_default_context() would reject this certificate + # during validation with "certificate is not yet valid" + + +def test_ssl_context_configuration_for_mtls(tmp_path): + """Test that SSL context is properly configured for mTLS. + + This test verifies that the SSL context configuration matches the + security requirements for mutual TLS authentication. + + This test validates the actual production code path used in client.py. + """ + # Generate CA and certificates + ca_cert_path, client_cert_path, client_key_path = generate_ca_and_signed_cert(tmp_path, common_name="client.example.com") + + # Create TLS config for mTLS + tls_config = MCPClientTLSConfig(ca_bundle=str(ca_cert_path), certfile=str(client_cert_path), keyfile=str(client_key_path), verify=True, check_hostname=True) + + # Create SSL context using the production utility function + ssl_context = create_ssl_context(tls_config, "TestPlugin") + + # Verify security settings + assert ssl_context.verify_mode == ssl.CERT_REQUIRED, "Should require certificate verification" + assert ssl_context.check_hostname is True, "Should verify hostname by default" + + # Verify protocol restrictions (no SSLv2, SSLv3) + # create_ssl_context() automatically disables weak protocols + assert ssl_context.minimum_version >= ssl.TLSVersion.TLSv1_2, "Should use TLS 1.2 or higher" + + +def test_ssl_context_with_verification_disabled(tmp_path): + """Test SSL context when certificate verification is explicitly disabled. + + When verify=False, the SSL context should allow connections without + certificate validation. This is useful for testing but not recommended + for production. + + This test validates the actual production code path used in client.py. + """ + # Generate self-signed certificate + cert_path, _key_path = generate_self_signed_cert(tmp_path, common_name="novalidate.example.com") + + # Create TLS config with verification disabled + tls_config = MCPClientTLSConfig(ca_bundle=str(cert_path), certfile=None, keyfile=None, verify=False, check_hostname=False) + + # Create SSL context using the production utility function + ssl_context = create_ssl_context(tls_config, "TestPlugin") + + # Verify security is disabled as configured + assert ssl_context.verify_mode == ssl.CERT_NONE, "Verification should be disabled" + assert ssl_context.check_hostname is False, "Hostname checking should be disabled" + + +def test_certificate_with_wrong_hostname_would_fail(tmp_path): + """Test that hostname verification would reject certificates with wrong hostname. + + Per Python ssl docs: When check_hostname is enabled, the certificate's + Subject Alternative Name (SAN) or Common Name (CN) must match the hostname. + + This test validates the actual production code path used in client.py. + """ + # Generate certificate for one hostname + cert_path, _key_path = generate_self_signed_cert(tmp_path, common_name="correct.example.com") + + # Load the certificate + with open(cert_path, "rb") as f: + cert_data = f.read() + cert = x509.load_pem_x509_certificate(cert_data, default_backend()) + + # Verify the certificate has the correct hostname in SAN + san_extension = cert.extensions.get_extension_for_oid(ExtensionOID.SUBJECT_ALTERNATIVE_NAME) + san_names = san_extension.value.get_values_for_type(x509.DNSName) + + assert "correct.example.com" in san_names, "Certificate should have correct.example.com in SAN" + assert "wrong.example.com" not in san_names, "Certificate should not have wrong.example.com in SAN" + + # Create TLS config with hostname checking enabled + tls_config = MCPClientTLSConfig(ca_bundle=str(cert_path), certfile=None, keyfile=None, verify=True, check_hostname=True) + + # Create SSL context using the production utility function + ssl_context = create_ssl_context(tls_config, "TestPlugin") + + # Verify hostname checking is enabled + assert ssl_context.check_hostname is True, "Hostname checking should be enabled" + assert ssl_context.verify_mode == ssl.CERT_REQUIRED, "Certificate verification should be required" + + # In actual usage, connecting to "wrong.example.com" with this certificate + # would fail with: ssl.CertificateError: hostname 'wrong.example.com' + # doesn't match 'correct.example.com' From 94be45e5fb55d1ca0a4002ab470f905695d027db Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Wed, 15 Oct 2025 09:04:57 -0600 Subject: [PATCH 27/35] feat: added support for cert-manager in k8s. Signed-off-by: Teryl Taylor --- docs/docs/deployment/cforge-gateway.md | 167 +++++++++++++++++- mcpgateway/tools/builder/common.py | 130 +++++++++----- mcpgateway/tools/builder/dagger_deploy.py | 31 +++- mcpgateway/tools/builder/python_deploy.py | 29 ++- mcpgateway/tools/builder/schema.py | 33 +++- .../cert-manager-certificates.yaml.j2 | 62 +++++++ 6 files changed, 386 insertions(+), 66 deletions(-) create mode 100644 mcpgateway/tools/builder/templates/kubernetes/cert-manager-certificates.yaml.j2 diff --git a/docs/docs/deployment/cforge-gateway.md b/docs/docs/deployment/cforge-gateway.md index 88cdd3e86..88e0323aa 100644 --- a/docs/docs/deployment/cforge-gateway.md +++ b/docs/docs/deployment/cforge-gateway.md @@ -674,20 +674,71 @@ mTLS certificate generation settings: ```yaml certificates: + # Local certificate generation (default) validity_days: 825 # Certificate validity period auto_generate: true # Auto-generate if missing ca_path: ./certs/mcp/ca # CA certificate directory gateway_path: ./certs/mcp/gateway # Gateway cert directory plugins_path: ./certs/mcp/plugins # Plugins cert directory + + # OR use cert-manager (Kubernetes only) + use_cert_manager: true # Use cert-manager for certificates + cert_manager_issuer: mcp-ca-issuer # Issuer/ClusterIssuer name + cert_manager_kind: Issuer # Issuer or ClusterIssuer ``` | Field | Type | Required | Description | Default | |-------|------|----------|-------------|---------| | `validity_days` | integer | ❌ | Certificate validity in days | `825` | -| `auto_generate` | boolean | ❌ | Auto-generate certificates if missing | `true` | -| `ca_path` | string | ❌ | CA certificate directory | `./certs/mcp/ca` | -| `gateway_path` | string | ❌ | Gateway client cert directory | `./certs/mcp/gateway` | -| `plugins_path` | string | ❌ | Plugin server certs base directory | `./certs/mcp/plugins` | +| `auto_generate` | boolean | ❌ | Auto-generate certificates locally if missing | `true` | +| `ca_path` | string | ❌ | CA certificate directory (local mode) | `./certs/mcp/ca` | +| `gateway_path` | string | ❌ | Gateway client cert directory (local mode) | `./certs/mcp/gateway` | +| `plugins_path` | string | ❌ | Plugin server certs base directory (local mode) | `./certs/mcp/plugins` | +| `use_cert_manager` | boolean | ❌ | Use cert-manager for certificate management (Kubernetes only) | `false` | +| `cert_manager_issuer` | string | ❌ | cert-manager Issuer/ClusterIssuer name | `mcp-ca-issuer` | +| `cert_manager_kind` | string | ❌ | cert-manager issuer kind: `Issuer` or `ClusterIssuer` | `Issuer` | + +#### cert-manager Integration (Kubernetes Only) + +[cert-manager](https://cert-manager.io) is a Kubernetes-native certificate management controller that automates certificate issuance and renewal. + +**Benefits:** +- βœ… **Automatic Renewal**: Certificates renewed before expiry (default: at 2/3 of lifetime) +- βœ… **Native Kubernetes**: Certificates defined as Kubernetes Custom Resources +- βœ… **Simplified Operations**: No manual certificate generation or rotation +- βœ… **GitOps Friendly**: Certificate definitions version-controlled + +**Prerequisites:** +1. Install cert-manager in your cluster: + ```bash + kubectl apply -f https://github.com/cert-manager/cert-manager/releases/download/v1.13.0/cert-manager.yaml + ``` + +2. Create namespace and CA Issuer (one-time setup): + ```bash + # Create namespace first + kubectl create namespace mcp-gateway-test + + # Apply CA Issuer + kubectl apply -f examples/deployment-configs/cert-manager-issuer-example.yaml + ``` + +**Configuration:** +```yaml +certificates: + use_cert_manager: true + cert_manager_issuer: mcp-ca-issuer + cert_manager_kind: Issuer + validity_days: 825 +``` + +When `use_cert_manager: true`: +- Local certificate generation is skipped +- cert-manager Certificate CRDs are generated for gateway and plugins +- cert-manager automatically creates Kubernetes TLS secrets +- Certificates are auto-renewed before expiry + +**Important**: The cert-manager Issuer and CA certificate are long-lived infrastructure. When you destroy your MCP deployment, the Issuer remains (by design) for reuse across deployments. --- @@ -947,6 +998,114 @@ cforge gateway deploy deploy-k8s-build.yaml --skip-build --- +### Example 5: Kubernetes with cert-manager + +**File:** `examples/deployment-configs/deploy-k8s-cert-manager.yaml` + +Production deployment using cert-manager for automated certificate management: + +```yaml +deployment: + type: kubernetes + namespace: mcp-gateway-test + +gateway: + image: mcpgateway/mcpgateway:latest + image_pull_policy: IfNotPresent + + port: 4444 + service_type: ClusterIP + service_port: 4444 + + replicas: 1 + memory_request: 256Mi + memory_limit: 512Mi + cpu_request: 100m + cpu_limit: 500m + + env_vars: + LOG_LEVEL: DEBUG + MCPGATEWAY_UI_ENABLED: "true" + + mtls_enabled: true + mtls_verify: true + mtls_check_hostname: false + +plugins: + - name: OPAPluginFilter + image: mcpgateway-opapluginfilter:latest + image_pull_policy: IfNotPresent + + port: 8000 + service_type: ClusterIP + + replicas: 1 + memory_request: 128Mi + memory_limit: 256Mi + + mtls_enabled: true + + plugin_overrides: + priority: 10 + mode: "enforce" + +# cert-manager configuration +certificates: + # Use cert-manager for automatic certificate management + use_cert_manager: true + + # Reference the Issuer created in prerequisites + cert_manager_issuer: mcp-ca-issuer + cert_manager_kind: Issuer + + # Certificate validity (auto-renewed at 2/3 of lifetime) + validity_days: 825 + + # Local paths not used when use_cert_manager=true + auto_generate: false +``` + +**Prerequisites:** + +1. Install cert-manager: + ```bash + kubectl apply -f https://github.com/cert-manager/cert-manager/releases/download/v1.13.0/cert-manager.yaml + ``` + +2. Create namespace and CA Issuer (one-time setup): + ```bash + # Create namespace first + kubectl create namespace mcp-gateway-test + + # Apply CA Issuer + kubectl apply -f examples/deployment-configs/cert-manager-issuer-example.yaml + ``` + +**Deploy:** +```bash +# Deploy (no need to generate certificates manually) +cforge gateway deploy examples/deployment-configs/deploy-k8s-cert-manager.yaml + +# Verify cert-manager created certificates +kubectl get certificates -n mcp-gateway-test +kubectl get secrets -n mcp-gateway-test | grep mcp- +``` + +**How it works:** +1. `cforge gateway deploy` skips local certificate generation +2. Generates cert-manager Certificate CRDs for gateway and plugins +3. Applies Certificate CRDs to Kubernetes +4. cert-manager automatically creates TLS secrets +5. Pods use the secrets created by cert-manager +6. cert-manager auto-renews certificates before expiry + +**Certificate lifecycle:** +- **Creation**: cert-manager generates certificates when CRDs are applied +- **Renewal**: Automatic renewal at 2/3 of lifetime (550 days for 825-day cert) +- **Deletion**: Certificates deleted when stack is destroyed, Issuer remains + +--- + ## mTLS Configuration Guide ### Understanding mTLS in MCP Gateway diff --git a/mcpgateway/tools/builder/common.py b/mcpgateway/tools/builder/common.py index 797b64524..879eb0d0a 100644 --- a/mcpgateway/tools/builder/common.py +++ b/mcpgateway/tools/builder/common.py @@ -178,58 +178,94 @@ def generate_kubernetes_manifests(config: Dict[str, Any], output_dir: Path, verb # Generate namespace namespace = config["deployment"].get("namespace", "mcp-gateway") - # Generate mTLS certificate secrets if enabled + # Generate mTLS certificate resources if enabled gateway_mtls = config.get("gateway", {}).get("mtls_enabled", True) + cert_config = config.get("certificates", {}) + use_cert_manager = cert_config.get("use_cert_manager", False) + if gateway_mtls: + if use_cert_manager: + # Generate cert-manager Certificate CRDs + cert_manager_template = env.get_template("cert-manager-certificates.yaml.j2") + + # Calculate duration and renewBefore in hours + validity_days = cert_config.get("validity_days", 825) + duration_hours = validity_days * 24 + # Renew at 2/3 of lifetime (cert-manager default) + renew_before_hours = int(duration_hours * 2 / 3) + + # Prepare certificate data + cert_data = { + "namespace": namespace, + "gateway_name": "mcpgateway", + "issuer_name": cert_config.get("cert_manager_issuer", "mcp-ca-issuer"), + "issuer_kind": cert_config.get("cert_manager_kind", "Issuer"), + "duration": duration_hours, + "renew_before": renew_before_hours, + "plugins": [], + } + + # Add plugins with mTLS enabled + for plugin in config.get("plugins", []): + if plugin.get("mtls_enabled", True): + cert_data["plugins"].append({"name": f"mcp-plugin-{plugin['name'].lower()}"}) + + # Generate cert-manager certificates manifest + cert_manager_manifest = cert_manager_template.render(**cert_data) + (output_dir / "cert-manager-certificates.yaml").write_text(cert_manager_manifest) + if verbose: + print(" βœ“ cert-manager Certificate CRDs manifest generated") - cert_secrets_template = env.get_template("cert-secrets.yaml.j2") + else: + # Generate traditional certificate secrets (backward compatibility) + cert_secrets_template = env.get_template("cert-secrets.yaml.j2") - # Prepare certificate data - cert_data = {"namespace": namespace, "gateway_name": "mcpgateway", "plugins": []} + # Prepare certificate data + cert_data = {"namespace": namespace, "gateway_name": "mcpgateway", "plugins": []} - # Read and encode CA certificate - ca_cert_path = Path("certs/mcp/ca/ca.crt") - if ca_cert_path.exists(): - cert_data["ca_cert_b64"] = base64.b64encode(ca_cert_path.read_bytes()).decode("utf-8") - else: - if verbose: - print(f"[yellow]Warning: CA certificate not found at {ca_cert_path}[/yellow]") - - # Read and encode gateway certificates - gateway_cert_path = Path("certs/mcp/gateway/client.crt") - gateway_key_path = Path("certs/mcp/gateway/client.key") - if gateway_cert_path.exists() and gateway_key_path.exists(): - cert_data["gateway_cert_b64"] = base64.b64encode(gateway_cert_path.read_bytes()).decode("utf-8") - cert_data["gateway_key_b64"] = base64.b64encode(gateway_key_path.read_bytes()).decode("utf-8") - else: - if verbose: - print("[yellow]Warning: Gateway certificates not found[/yellow]") - - # Read and encode plugin certificates - for plugin in config.get("plugins", []): - if plugin.get("mtls_enabled", True): - plugin_name = plugin["name"] - plugin_cert_path = Path(f"certs/mcp/plugins/{plugin_name}/server.crt") - plugin_key_path = Path(f"certs/mcp/plugins/{plugin_name}/server.key") - - if plugin_cert_path.exists() and plugin_key_path.exists(): - cert_data["plugins"].append( - { - "name": f"mcp-plugin-{plugin_name.lower()}", - "cert_b64": base64.b64encode(plugin_cert_path.read_bytes()).decode("utf-8"), - "key_b64": base64.b64encode(plugin_key_path.read_bytes()).decode("utf-8"), - } - ) - else: - if verbose: - print(f"[yellow]Warning: Plugin {plugin_name} certificates not found[/yellow]") - - # Generate certificate secrets manifest - if "ca_cert_b64" in cert_data: - cert_secrets_manifest = cert_secrets_template.render(**cert_data) - (output_dir / "cert-secrets.yaml").write_text(cert_secrets_manifest) - if verbose: - print(" βœ“ mTLS certificate secrets manifest generated") + # Read and encode CA certificate + ca_cert_path = Path("certs/mcp/ca/ca.crt") + if ca_cert_path.exists(): + cert_data["ca_cert_b64"] = base64.b64encode(ca_cert_path.read_bytes()).decode("utf-8") + else: + if verbose: + print(f"[yellow]Warning: CA certificate not found at {ca_cert_path}[/yellow]") + + # Read and encode gateway certificates + gateway_cert_path = Path("certs/mcp/gateway/client.crt") + gateway_key_path = Path("certs/mcp/gateway/client.key") + if gateway_cert_path.exists() and gateway_key_path.exists(): + cert_data["gateway_cert_b64"] = base64.b64encode(gateway_cert_path.read_bytes()).decode("utf-8") + cert_data["gateway_key_b64"] = base64.b64encode(gateway_key_path.read_bytes()).decode("utf-8") + else: + if verbose: + print("[yellow]Warning: Gateway certificates not found[/yellow]") + + # Read and encode plugin certificates + for plugin in config.get("plugins", []): + if plugin.get("mtls_enabled", True): + plugin_name = plugin["name"] + plugin_cert_path = Path(f"certs/mcp/plugins/{plugin_name}/server.crt") + plugin_key_path = Path(f"certs/mcp/plugins/{plugin_name}/server.key") + + if plugin_cert_path.exists() and plugin_key_path.exists(): + cert_data["plugins"].append( + { + "name": f"mcp-plugin-{plugin_name.lower()}", + "cert_b64": base64.b64encode(plugin_cert_path.read_bytes()).decode("utf-8"), + "key_b64": base64.b64encode(plugin_key_path.read_bytes()).decode("utf-8"), + } + ) + else: + if verbose: + print(f"[yellow]Warning: Plugin {plugin_name} certificates not found[/yellow]") + + # Generate certificate secrets manifest + if "ca_cert_b64" in cert_data: + cert_secrets_manifest = cert_secrets_template.render(**cert_data) + (output_dir / "cert-secrets.yaml").write_text(cert_secrets_manifest) + if verbose: + print(" βœ“ mTLS certificate secrets manifest generated") # Generate infrastructure manifests (postgres, redis) if enabled infrastructure = config.get("infrastructure", {}) diff --git a/mcpgateway/tools/builder/dagger_deploy.py b/mcpgateway/tools/builder/dagger_deploy.py index 6133d8a15..c3c71ab95 100644 --- a/mcpgateway/tools/builder/dagger_deploy.py +++ b/mcpgateway/tools/builder/dagger_deploy.py @@ -149,17 +149,34 @@ async def build(self, config_file: str, plugins_only: bool = False, specific_plu async def generate_certificates(self, config_file: str) -> None: """Generate mTLS certificates for plugins. + Supports two modes: + 1. Local generation (use_cert_manager=false): Uses Dagger to generate certificates locally + 2. cert-manager (use_cert_manager=true): Skips local generation, cert-manager will create certificates + Args: config_file: Path to mcp-stack.yaml Raises: - dagger.ExecError: If certificate generation command fails - dagger.QueryError: If Dagger query fails + dagger.ExecError: If certificate generation command fails (when using local generation) + dagger.QueryError: If Dagger query fails (when using local generation) """ config = load_config(config_file) + # Check if using cert-manager + cert_config = config.get("certificates", {}) + use_cert_manager = cert_config.get("use_cert_manager", False) + validity_days = cert_config.get("validity_days", 825) + + if use_cert_manager: + # Skip local generation - cert-manager will handle certificate creation + if self.verbose: + self.console.print("[blue]Using cert-manager for certificate management[/blue]") + self.console.print("[dim]Skipping local certificate generation (cert-manager will create certificates)[/dim]") + return + + # Local certificate generation (backward compatibility) if self.verbose: - self.console.print("[blue]Generating mTLS certificates...[/blue]") + self.console.print("[blue]Generating mTLS certificates locally...[/blue]") # Use Dagger container to run certificate generation async with dagger.connection(dagger.Config(workdir=str(Path.cwd()))): @@ -179,16 +196,16 @@ async def generate_certificates(self, config_file: str) -> None: ) # Generate CA - container = container.with_exec(["sh", "-c", "make certs-mcp-ca MCP_CERT_DAYS=825"]) + container = container.with_exec(["sh", "-c", f"make certs-mcp-ca MCP_CERT_DAYS={validity_days}"]) # Generate gateway cert - container = container.with_exec(["sh", "-c", "make certs-mcp-gateway MCP_CERT_DAYS=825"]) + container = container.with_exec(["sh", "-c", f"make certs-mcp-gateway MCP_CERT_DAYS={validity_days}"]) # Generate plugin certificates plugins = config.get("plugins", []) for plugin in plugins: plugin_name = plugin["name"] - container = container.with_exec(["sh", "-c", f"make certs-mcp-plugin PLUGIN_NAME={plugin_name} MCP_CERT_DAYS=825"]) + container = container.with_exec(["sh", "-c", f"make certs-mcp-plugin PLUGIN_NAME={plugin_name} MCP_CERT_DAYS={validity_days}"]) # Export certificates back to host output = container.directory("/workspace/certs") @@ -207,7 +224,7 @@ async def generate_certificates(self, config_file: str) -> None: raise if self.verbose: - self.console.print("[green]βœ“ Certificates generated[/green]") + self.console.print("[green]βœ“ Certificates generated locally[/green]") async def deploy(self, config_file: str, dry_run: bool = False, skip_build: bool = False, skip_certs: bool = False, output_dir: Optional[str] = None) -> None: """Deploy MCP stack. diff --git a/mcpgateway/tools/builder/python_deploy.py b/mcpgateway/tools/builder/python_deploy.py index 78d8a9292..bfb3ed79b 100644 --- a/mcpgateway/tools/builder/python_deploy.py +++ b/mcpgateway/tools/builder/python_deploy.py @@ -134,35 +134,52 @@ async def build(self, config_file: str, plugins_only: bool = False, specific_plu async def generate_certificates(self, config_file: str) -> None: """Generate mTLS certificates for plugins. + Supports two modes: + 1. Local generation (use_cert_manager=false): Uses Makefile to generate certificates locally + 2. cert-manager (use_cert_manager=true): Skips local generation, cert-manager will create certificates + Args: config_file: Path to mcp-stack.yaml Raises: - RuntimeError: If make command not found + RuntimeError: If make command not found (when using local generation) """ config = load_config(config_file) + # Check if using cert-manager + cert_config = config.get("certificates", {}) + use_cert_manager = cert_config.get("use_cert_manager", False) + validity_days = cert_config.get("validity_days", 825) + + if use_cert_manager: + # Skip local generation - cert-manager will handle certificate creation + if self.verbose: + self.console.print("[blue]Using cert-manager for certificate management[/blue]") + self.console.print("[dim]Skipping local certificate generation (cert-manager will create certificates)[/dim]") + return + + # Local certificate generation (backward compatibility) if self.verbose: - self.console.print("[blue]Generating mTLS certificates...[/blue]") + self.console.print("[blue]Generating mTLS certificates locally...[/blue]") # Check if make is available if not shutil.which("make"): raise RuntimeError("'make' command not found. Cannot generate certificates.") # Generate CA - self._run_command(["make", "certs-mcp-ca", "MCP_CERT_DAYS=825"]) + self._run_command(["make", "certs-mcp-ca", f"MCP_CERT_DAYS={validity_days}"]) # Generate gateway cert - self._run_command(["make", "certs-mcp-gateway", "MCP_CERT_DAYS=825"]) + self._run_command(["make", "certs-mcp-gateway", f"MCP_CERT_DAYS={validity_days}"]) # Generate plugin certificates plugins = config.get("plugins", []) for plugin in plugins: plugin_name = plugin["name"] - self._run_command(["make", "certs-mcp-plugin", f"PLUGIN_NAME={plugin_name}", "MCP_CERT_DAYS=825"]) + self._run_command(["make", "certs-mcp-plugin", f"PLUGIN_NAME={plugin_name}", f"MCP_CERT_DAYS={validity_days}"]) if self.verbose: - self.console.print("[green]βœ“ Certificates generated[/green]") + self.console.print("[green]βœ“ Certificates generated locally[/green]") async def deploy(self, config_file: str, dry_run: bool = False, skip_build: bool = False, skip_certs: bool = False, output_dir: Optional[str] = None) -> None: """Deploy MCP stack. diff --git a/mcpgateway/tools/builder/schema.py b/mcpgateway/tools/builder/schema.py index 43f77d77a..4d60adaac 100644 --- a/mcpgateway/tools/builder/schema.py +++ b/mcpgateway/tools/builder/schema.py @@ -115,10 +115,39 @@ def validate_name(cls, v: str) -> str: class CertificatesConfig(BaseModel): - """Certificate configuration""" + """Certificate configuration. + + Supports two modes: + 1. Local certificate generation (use_cert_manager=false, default): + - Certificates generated locally using OpenSSL (via Makefile) + - Deployed to Kubernetes as secrets via kubectl + - Manual rotation required before expiry + + 2. cert-manager integration (use_cert_manager=true, Kubernetes only): + - Certificates managed by cert-manager controller + - Automatic renewal before expiry (default: at 2/3 of lifetime) + - Native Kubernetes Certificate resources + - Requires cert-manager to be installed in cluster + + Attributes: + validity_days: Certificate validity period in days (default: 825 β‰ˆ 2.25 years) + auto_generate: Auto-generate certificates locally (default: True) + use_cert_manager: Use cert-manager for certificate management (default: False, Kubernetes only) + cert_manager_issuer: Name of cert-manager Issuer/ClusterIssuer (default: "mcp-ca-issuer") + cert_manager_kind: Type of issuer - Issuer or ClusterIssuer (default: "Issuer") + ca_path: Path to CA certificates for local generation (default: "./certs/mcp/ca") + gateway_path: Path to gateway certificates for local generation (default: "./certs/mcp/gateway") + plugins_path: Path to plugin certificates for local generation (default: "./certs/mcp/plugins") + """ validity_days: Optional[int] = Field(825, description="Certificate validity in days") - auto_generate: Optional[bool] = Field(True, description="Auto-generate certificates") + auto_generate: Optional[bool] = Field(True, description="Auto-generate certificates locally") + + # cert-manager integration (Kubernetes only) + use_cert_manager: Optional[bool] = Field(False, description="Use cert-manager for certificate management (Kubernetes only)") + cert_manager_issuer: Optional[str] = Field("mcp-ca-issuer", description="cert-manager Issuer/ClusterIssuer name") + cert_manager_kind: Optional[Literal["Issuer", "ClusterIssuer"]] = Field("Issuer", description="cert-manager issuer kind") + ca_path: Optional[str] = Field("./certs/mcp/ca", description="CA certificate path") gateway_path: Optional[str] = Field("./certs/mcp/gateway", description="Gateway cert path") plugins_path: Optional[str] = Field("./certs/mcp/plugins", description="Plugins cert path") diff --git a/mcpgateway/tools/builder/templates/kubernetes/cert-manager-certificates.yaml.j2 b/mcpgateway/tools/builder/templates/kubernetes/cert-manager-certificates.yaml.j2 new file mode 100644 index 000000000..e11963573 --- /dev/null +++ b/mcpgateway/tools/builder/templates/kubernetes/cert-manager-certificates.yaml.j2 @@ -0,0 +1,62 @@ +# Location: ./mcpgateway/tools/builder/templates/kubernetes/cert-manager-certificates.yaml.j2 +# Copyright 2025 +# SPDX-License-Identifier: Apache-2.0 +# Authors: Teryl Taylor +# cert-manager Certificate Resources +# Gateway Certificate +apiVersion: cert-manager.io/v1 +kind: Certificate +metadata: + name: mcp-{{ gateway_name }}-cert + namespace: {{ namespace }} +spec: + secretName: mcp-{{ gateway_name }}-server-cert + duration: {{ duration }}h + renewBefore: {{ renew_before }}h + isCA: false + privateKey: + algorithm: RSA + size: 2048 + usages: + - digital signature + - key encipherment + - server auth + - client auth + dnsNames: + - {{ gateway_name }} + - {{ gateway_name }}.{{ namespace }} + - {{ gateway_name }}.{{ namespace }}.svc + - {{ gateway_name }}.{{ namespace }}.svc.cluster.local + issuerRef: + name: {{ issuer_name }} + kind: {{ issuer_kind }} +{% for plugin in plugins %} +--- +# Plugin {{ plugin.name }} Certificate +apiVersion: cert-manager.io/v1 +kind: Certificate +metadata: + name: mcp-{{ plugin.name }}-cert + namespace: {{ namespace }} +spec: + secretName: mcp-{{ plugin.name }}-server-cert + duration: {{ duration }}h + renewBefore: {{ renew_before }}h + isCA: false + privateKey: + algorithm: RSA + size: 2048 + usages: + - digital signature + - key encipherment + - server auth + - client auth + dnsNames: + - {{ plugin.name }} + - {{ plugin.name }}.{{ namespace }} + - {{ plugin.name }}.{{ namespace }}.svc + - {{ plugin.name }}.{{ namespace }}.svc.cluster.local + issuerRef: + name: {{ issuer_name }} + kind: {{ issuer_kind }} +{% endfor %} From eae3a3bcce4bbb06b03267dd0dc3bef53aa3d236 Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Wed, 15 Oct 2025 09:20:28 -0600 Subject: [PATCH 28/35] tests: skipped tls doctest. Signed-off-by: Teryl Taylor --- mcpgateway/plugins/framework/external/mcp/tls_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mcpgateway/plugins/framework/external/mcp/tls_utils.py b/mcpgateway/plugins/framework/external/mcp/tls_utils.py index 017ae523b..8e6fe195a 100644 --- a/mcpgateway/plugins/framework/external/mcp/tls_utils.py +++ b/mcpgateway/plugins/framework/external/mcp/tls_utils.py @@ -58,14 +58,14 @@ def create_ssl_context(tls_config: MCPClientTLSConfig, plugin_name: str) -> ssl. PluginError: If SSL context configuration fails Example: - >>> tls_config = MCPClientTLSConfig( + >>> tls_config = MCPClientTLSConfig( # doctest: +SKIP ... ca_bundle="/path/to/ca.crt", ... certfile="/path/to/client.crt", ... keyfile="/path/to/client.key", ... verify=True, ... check_hostname=True ... ) - >>> ssl_context = create_ssl_context(tls_config, "MyPlugin") + >>> ssl_context = create_ssl_context(tls_config, "MyPlugin") # doctest: +SKIP >>> # Use ssl_context with httpx or other SSL connections """ try: From b1c279c2247862203d2aa6f234809ad9f19f38bc Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Wed, 15 Oct 2025 09:30:45 -0600 Subject: [PATCH 29/35] test: fix doctests. Signed-off-by: Teryl Taylor --- mcpgateway/tools/builder/factory.py | 4 +++- mcpgateway/tools/builder/pipeline.py | 1 + 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/mcpgateway/tools/builder/factory.py b/mcpgateway/tools/builder/factory.py index 23f3ee875..8c58ac5b1 100644 --- a/mcpgateway/tools/builder/factory.py +++ b/mcpgateway/tools/builder/factory.py @@ -13,6 +13,7 @@ Example: >>> deployer, mode = DeployFactory.create_deployer("dagger", verbose=False) + ⚠ Dagger not installed. Using plain python. >>> # Validate configuration (output varies by config) >>> # deployer.validate("mcp-stack.yaml") """ @@ -71,11 +72,12 @@ def create_deployer(deployer: str, verbose: bool = False) -> tuple[CICDModule, C Example: >>> # Try to load Dagger, fall back to Python if unavailable >>> deployer, mode = DeployFactory.create_deployer("dagger", verbose=False) + ⚠ Dagger not installed. Using plain python. >>> if mode == CICDTypes.DAGGER: ... print("Using optimized Dagger implementation") ... else: ... print("Using fallback Python implementation") - Using optimized Dagger implementation + Using fallback Python implementation """ # Attempt to load Dagger implementation first if requested if deployer == "dagger": diff --git a/mcpgateway/tools/builder/pipeline.py b/mcpgateway/tools/builder/pipeline.py index 42febd774..db53384ad 100644 --- a/mcpgateway/tools/builder/pipeline.py +++ b/mcpgateway/tools/builder/pipeline.py @@ -19,6 +19,7 @@ Example: >>> from mcpgateway.tools.builder.factory import DeployFactory >>> deployer, mode = DeployFactory.create_deployer("dagger", verbose=False) + ⚠ Dagger not installed. Using plain python. >>> # Validate configuration (output varies by config) >>> # deployer.validate("mcp-stack.yaml") >>> # Async methods must be called with await (see method examples below) From 88d2f27e7b00e6c26e4ebc6f3cac04e4ecd63692 Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Wed, 15 Oct 2025 10:13:05 -0600 Subject: [PATCH 30/35] fix: added example cert-manager issuer file. Signed-off-by: Teryl Taylor --- .../cert-manager-issuer-example.yaml | 58 +++++++++++++++++++ 1 file changed, 58 insertions(+) create mode 100644 examples/deployment-configs/cert-manager-issuer-example.yaml diff --git a/examples/deployment-configs/cert-manager-issuer-example.yaml b/examples/deployment-configs/cert-manager-issuer-example.yaml new file mode 100644 index 000000000..5b96aae91 --- /dev/null +++ b/examples/deployment-configs/cert-manager-issuer-example.yaml @@ -0,0 +1,58 @@ +# cert-manager CA Issuer Setup (APPLY ONCE) +# This example shows how to set up a self-signed CA using cert-manager +# for issuing mTLS certificates to the MCP Gateway and plugins. +# +# Prerequisites: +# - cert-manager must be installed in your cluster +# Install: kubectl apply -f https://github.com/cert-manager/cert-manager/releases/download/v1.13.0/cert-manager.yaml +# +# Usage: +# 1. Create namespace: kubectl create namespace mcp-gateway-test +# 2. Apply this file ONCE: kubectl apply -f cert-manager-issuer-example.yaml +# 3. Deploy stack with use_cert_manager: true in mcp-stack.yaml +# +# NOTE: This creates long-lived infrastructure (CA + Issuer). +# Do NOT delete this when tearing down your MCP stack deployment. +# The CA certificate will be reused across deployments. +# +--- +# Self-signed Issuer (used to create the CA certificate) +apiVersion: cert-manager.io/v1 +kind: Issuer +metadata: + name: mcp-selfsigned-issuer + namespace: mcp-gateway-test +spec: + selfSigned: {} + +--- +# CA Certificate (root of trust for all mTLS certificates) +apiVersion: cert-manager.io/v1 +kind: Certificate +metadata: + name: mcp-ca-certificate + namespace: mcp-gateway-test +spec: + isCA: true + commonName: mcp-ca + secretName: mcp-ca-secret + duration: 19800h # 825 days (β‰ˆ 2.25 years) + renewBefore: 13200h # Renew at 2/3 of lifetime + privateKey: + algorithm: RSA + size: 4096 + issuerRef: + name: mcp-selfsigned-issuer + kind: Issuer + +--- +# CA Issuer (used to sign gateway and plugin certificates) +# This is what your mcp-stack.yaml references via cert_manager_issuer +apiVersion: cert-manager.io/v1 +kind: Issuer +metadata: + name: mcp-ca-issuer + namespace: mcp-gateway-test +spec: + ca: + secretName: mcp-ca-secret From e083fb00eb42e925ddc33b1cd58895a8fe72cd8f Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Wed, 15 Oct 2025 10:52:42 -0600 Subject: [PATCH 31/35] docs: updated mtls documentation to point to plugins mtls documentation. Signed-off-by: Teryl Taylor --- docs/docs/manage/mtls.md | 143 ++++++++++++++++++--------------------- 1 file changed, 64 insertions(+), 79 deletions(-) diff --git a/docs/docs/manage/mtls.md b/docs/docs/manage/mtls.md index e02ed047f..f6330a177 100644 --- a/docs/docs/manage/mtls.md +++ b/docs/docs/manage/mtls.md @@ -638,115 +638,100 @@ Create a `ServiceAccount`, `Role`, and `RoleBinding` that grant `get` access to ## mTLS for External MCP Plugins -External plugins that use the `STREAMABLEHTTP` transport now support mutual TLS directly from the gateway. This is optionalβ€”if you skip the configuration below, the gateway continues to call plugins exactly as before. Enabling mTLS lets you restrict remote plugin servers so they only accept connections from gateways presenting a trusted client certificate. +External plugins that use the `STREAMABLEHTTP` transport support mutual TLS authentication between the gateway and plugin servers. This is optionalβ€”if not configured, the gateway continues to call plugins over standard HTTP/HTTPS. Enabling mTLS lets you restrict remote plugin servers to only accept connections from gateways presenting a trusted client certificate. -### 1. Issue Certificates for the Remote Plugin +### Setup Options -Reuse the same CA you generated earlier or provision a dedicated one. Create a **server** certificate for the remote plugin endpoint and a **client** certificate for the MCP Gateway: +Choose the approach that best fits your deployment: -```bash -# Server cert for the remote plugin (served by your reverse proxy/mcp server) -openssl req -newkey rsa:4096 -nodes \ - -keyout certs/plugins/remote.key -out certs/plugins/remote.csr \ - -subj "/CN=plugins.internal.example.com" +#### **Automated Deployment (Recommended for Kubernetes)** -openssl x509 -req -in certs/plugins/remote.csr \ - -CA certs/mtls/ca.crt -CAkey certs/mtls/ca.key \ - -CAcreateserial -out certs/plugins/remote.crt -days 365 \ - -extfile <(echo "subjectAltName=DNS:plugins.internal.example.com") +For production Kubernetes deployments, use the `cforge gateway` tool with cert-manager integration for automated certificate lifecycle management: -# Client cert for the gateway -openssl req -newkey rsa:4096 -nodes \ - -keyout certs/plugins/gateway-client.key -out certs/plugins/gateway-client.csr \ - -subj "/CN=mcpgateway" +- **See**: [cforge gateway Deployment Guide](../deployment/cforge-gateway.md) +- **Features**: Automated certificate generation, renewal, and distribution +- **Best for**: Kubernetes production deployments, GitOps workflows -openssl x509 -req -in certs/plugins/gateway-client.csr \ - -CA certs/mtls/ca.crt -CAkey certs/mtls/ca.key \ - -CAcreateserial -out certs/plugins/gateway-client.crt -days 365 - -cat certs/plugins/gateway-client.crt certs/plugins/gateway-client.key > certs/plugins/gateway-client.pem -``` +Example deployment with cert-manager: +```yaml +# mcp-stack.yaml +deployment: + type: kubernetes + namespace: mcp-gateway-prod -### 2. Protect the Remote Plugin with mTLS +gateway: + image: mcpgateway/mcpgateway:latest + mtls_enabled: true -Front the remote MCP plugin with a reverse proxy (Nginx, Caddy, Envoy, etc.) that enforces client certificate verification using the CA above. Example Nginx snippet: +plugins: + - name: OPAPluginFilter + image: mcpgateway-opapluginfilter:latest + mtls_enabled: true + +certificates: + use_cert_manager: true + cert_manager_issuer: mcp-ca-issuer + cert_manager_kind: Issuer +``` -```nginx -server { - listen 9443 ssl; - server_name plugins.internal.example.com; - - ssl_certificate /etc/ssl/private/remote.crt; - ssl_certificate_key /etc/ssl/private/remote.key; - ssl_client_certificate /etc/ssl/private/ca.crt; - ssl_verify_client on; - - location /mcp { - proxy_pass http://plugin-runtime:8000/mcp; - proxy_set_header Host $host; - proxy_set_header X-Forwarded-Proto https; - } -} +Deploy: +```bash +cforge gateway deploy mcp-stack.yaml ``` -### 3. Mount Certificates into the Gateway +#### **Manual Setup (Local Development & Testing)** -Expose the CA bundle and gateway client certificate to the gateway container: +For local development, Docker Compose, or manual certificate management: -```yaml -# docker-compose override - gateway: - volumes: - - ./certs/plugins:/app/certs/plugins:ro +- **See**: [External Plugin mTLS Setup Guide](../using/plugins/mtls.md) +- **Features**: `make` targets for certificate generation, manual configuration +- **Best for**: Local development, Docker Compose deployments, custom setups -# Kubernetes deployment (snippet) -volumeMounts: - - name: plugin-mtls - mountPath: /app/certs/plugins - readOnly: true -volumes: - - name: plugin-mtls - secret: - secretName: gateway-plugin-mtls +Quick start: +```bash +# Generate complete mTLS infrastructure +make certs-mcp-all + +# Configure plugin connection +export PLUGINS_CLIENT_MTLS_CERTFILE="certs/mcp/gateway/client.crt" +export PLUGINS_CLIENT_MTLS_KEYFILE="certs/mcp/gateway/client.key" +export PLUGINS_CLIENT_MTLS_CA_BUNDLE="certs/mcp/gateway/ca.crt" ``` -### 4. Configure the Plugin Entry +### Configuration Reference -Use the new `mcp.tls` block in `plugins/config.yaml` (or the Admin UI) to point the gateway at the certificates. Example external plugin definition: +Both approaches support the same configuration format for plugin connections: +**YAML Configuration** (`plugins/config.yaml`): ```yaml plugins: - - name: "LlamaGuardSafety" + - name: "MyExternalPlugin" kind: "external" - hooks: ["prompt_pre_fetch", "tool_pre_invoke"] - mode: "enforce" - priority: 20 mcp: proto: STREAMABLEHTTP - url: https://plugins.internal.example.com:9443/mcp + url: https://plugin-server:8000/mcp tls: ca_bundle: /app/certs/plugins/ca.crt - client_cert: /app/certs/plugins/gateway-client.pem - client_key: /app/certs/plugins/gateway-client.key # optional if PEM already bundles key + certfile: /app/certs/plugins/gateway-client.crt + keyfile: /app/certs/plugins/gateway-client.key verify: true check_hostname: true - - config: - policy: strict ``` -**Key behavior** -- `verify` controls whether the gateway validates the remote server certificate. Leave `true` in production; set `false` only for local debugging. -- `ca_bundle` may point to a custom CA chain; omit it if the remote certificate chains to a system-trusted CA. -- `client_cert` must reference the gateway certificate. Provide `client_key` only when the key is stored separately. -- `check_hostname` defaults to `true`. Set it to `false` for scenarios where the certificate CN does not match the URL (not recommended outside testing). - -Restart the gateway after updating the config so the external plugin client reloads with the TLS settings. Watch the logs for `Connected to plugin MCP (http) server` to confirm a successful handshake; TLS errors will surface as plugin initialization failures. +**Environment Variables** (gateway-wide defaults): +```bash +PLUGINS_CLIENT_MTLS_CA_BUNDLE=/app/certs/ca.crt +PLUGINS_CLIENT_MTLS_CERTFILE=/app/certs/client.crt +PLUGINS_CLIENT_MTLS_KEYFILE=/app/certs/client.key +PLUGINS_CLIENT_MTLS_VERIFY=true +PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME=false +``` -> πŸ’‘ **Tip:** You can set gateway-wide defaults via `PLUGINS_MTLS_CA_BUNDLE`, -> `PLUGINS_MTLS_CLIENT_CERT`, `PLUGINS_MTLS_CLIENT_KEY`, and the other -> `PLUGINS_MTLS_*` environment variables. Any plugin without an explicit -> `tls` block will inherit these values automatically. +**Key Options:** +- `verify`: Validate server certificate (default: `true`, recommended for production) +- `ca_bundle`: CA certificate for server validation (omit to use system CA) +- `certfile`/`keyfile`: Client certificate and key for mTLS authentication +- `check_hostname`: Verify hostname matches certificate (default: `true`) ## Security Best Practices From 754a9c4ad902e6e02b562f3169a45088258f2355 Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Wed, 15 Oct 2025 11:46:16 -0600 Subject: [PATCH 32/35] fix: forgot to add deploy-k8s-cert-manager.yaml Signed-off-by: Teryl Taylor --- .../deploy-k8s-cert-manager.yaml | 100 ++++++++++++++++++ 1 file changed, 100 insertions(+) create mode 100644 examples/deployment-configs/deploy-k8s-cert-manager.yaml diff --git a/examples/deployment-configs/deploy-k8s-cert-manager.yaml b/examples/deployment-configs/deploy-k8s-cert-manager.yaml new file mode 100644 index 000000000..d59c7bc57 --- /dev/null +++ b/examples/deployment-configs/deploy-k8s-cert-manager.yaml @@ -0,0 +1,100 @@ +# MCP Stack - Kubernetes Configuration with cert-manager +# This config uses cert-manager for automatic certificate management +# +# Prerequisites: +# 1. Install cert-manager in your cluster +# 2. Apply cert-manager-issuer-example.yaml to create the CA Issuer +# 3. Deploy this config + +deployment: + type: kubernetes + namespace: mcp-gateway-test + +# MCP Gateway configuration +gateway: + # Use pre-built gateway image + image: mcpgateway/mcpgateway:latest + image_pull_policy: IfNotPresent + + port: 4444 + + # Service configuration + service_type: ClusterIP + service_port: 4444 + + # Resource limits + replicas: 1 + memory_request: 256Mi + memory_limit: 512Mi + cpu_request: 100m + cpu_limit: 500m + + # Environment configuration + env_vars: + LOG_LEVEL: DEBUG + HOST: 0.0.0.0 + PORT: 4444 + MCPGATEWAY_UI_ENABLED: "true" + MCPGATEWAY_ADMIN_API_ENABLED: "true" + MCPGATEWAY_A2A_ENABLED: "true" + AUTH_REQUIRED: "false" + MCPGATEWAY_ENABLE_FEDERATION: "false" + + # mTLS client configuration (gateway connects to plugins) + mtls_enabled: true + mtls_verify: true + mtls_check_hostname: false + +# External plugins +plugins: + # OPA Plugin Filter + - name: OPAPluginFilter + + # Use pre-built image for faster testing + image: mcpgateway-opapluginfilter:latest + image_pull_policy: IfNotPresent + + port: 8000 + + # Service configuration + service_type: ClusterIP + service_port: 8000 + + # Resource limits + replicas: 1 + memory_request: 128Mi + memory_limit: 256Mi + cpu_request: 50m + cpu_limit: 200m + + env_vars: + LOG_LEVEL: DEBUG + OPA_POLICY_PATH: /app/policies + + mtls_enabled: true + + # Plugin manager overrides + plugin_overrides: + priority: 10 + mode: "enforce" + description: "OPA policy enforcement" + tags: ["security", "policy", "opa"] + +# cert-manager Certificate configuration +certificates: + # Use cert-manager for automatic certificate management + use_cert_manager: true + + # cert-manager issuer reference (must exist in namespace) + cert_manager_issuer: mcp-ca-issuer + cert_manager_kind: Issuer # or ClusterIssuer + + # Certificate validity (cert-manager will auto-renew at 2/3 of lifetime) + validity_days: 825 # β‰ˆ 2.25 years + + # Local paths not used when use_cert_manager=true + # (included for backward compatibility if switching back) + auto_generate: false + ca_path: ./certs/mcp/ca + gateway_path: ./certs/mcp/gateway + plugins_path: ./certs/mcp/plugins From 79263405205ec5a0baa7af10c2f3dd04cc6b8191 Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Thu, 16 Oct 2025 10:59:15 -0600 Subject: [PATCH 33/35] feat: add registry pushing support. clean up pydantics. Signed-off-by: Teryl Taylor --- docs/docs/deployment/cforge-gateway.md | 384 ++++++++++++++++++ .../deploy-openshift-local-registry.yaml | 141 +++++++ .../deploy-openshift-local.yaml | 131 ++++++ mcpgateway/tools/builder/common.py | 259 ++++++++---- mcpgateway/tools/builder/dagger_deploy.py | 88 ++-- mcpgateway/tools/builder/python_deploy.py | 86 ++-- mcpgateway/tools/builder/schema.py | 40 +- .../templates/kubernetes/cert-secrets.yaml.j2 | 2 +- .../templates/kubernetes/deployment.yaml.j2 | 6 +- .../templates/kubernetes/postgres.yaml.j2 | 5 + 10 files changed, 975 insertions(+), 167 deletions(-) create mode 100644 examples/deployment-configs/deploy-openshift-local-registry.yaml create mode 100644 examples/deployment-configs/deploy-openshift-local.yaml diff --git a/docs/docs/deployment/cforge-gateway.md b/docs/docs/deployment/cforge-gateway.md index 88e0323aa..c93b3ed6e 100644 --- a/docs/docs/deployment/cforge-gateway.md +++ b/docs/docs/deployment/cforge-gateway.md @@ -532,6 +532,14 @@ gateway: mtls_verify: true # Verify server certs mtls_check_hostname: false # Verify hostname + # Container Registry Configuration (optional) + registry: + enabled: true # Enable registry push + url: registry.example.com # Registry URL + namespace: myproject # Registry namespace/org + push: true # Push after build + image_pull_policy: IfNotPresent # Kubernetes imagePullPolicy + # Environment Variables env_vars: LOG_LEVEL: INFO @@ -573,6 +581,19 @@ gateway: | `mtls_enabled` | boolean | ❌ | Enable mTLS client | `true` | | `mtls_verify` | boolean | ❌ | Verify server certificates | `true` | | `mtls_check_hostname` | boolean | ❌ | Verify hostname in cert | `false` | +| `registry` | object | ❌ | Container registry configuration | - | + +**Container Registry Configuration Fields:** + +| Field | Type | Required | Description | Default | +|-------|------|----------|-------------|---------| +| `enabled` | boolean | ❌ | Enable registry integration | `false` | +| `url` | string | ❌* | Registry URL (e.g., `docker.io`, `quay.io`, OpenShift registry) | - | +| `namespace` | string | ❌* | Registry namespace/organization/project | - | +| `push` | boolean | ❌ | Push image to registry after build | `true` | +| `image_pull_policy` | string | ❌ | Kubernetes imagePullPolicy (`Always`, `IfNotPresent`, `Never`) | `IfNotPresent` | + +\* Required when `enabled: true` **Kubernetes-specific Fields:** @@ -613,6 +634,14 @@ plugins: # mTLS Server Configuration (plugin server) mtls_enabled: true # Enable mTLS server + # Container Registry Configuration (optional) + registry: + enabled: true # Enable registry push + url: registry.example.com # Registry URL + namespace: myproject # Registry namespace/org + push: true # Push after build + image_pull_policy: IfNotPresent # Kubernetes imagePullPolicy + # Environment Variables env_vars: LOG_LEVEL: DEBUG @@ -652,6 +681,7 @@ plugins: | `expose_port` | boolean | ❌ | Expose port on host (compose only) | `false` | | `env_vars` | object | ❌ | Environment variables | `{}` | | `mtls_enabled` | boolean | ❌ | Enable mTLS server | `true` | +| `registry` | object | ❌ | Container registry configuration (same fields as gateway) | - | | `plugin_overrides` | object | ❌ | Plugin manager config overrides | `{}` | **Plugin Overrides:** @@ -1213,6 +1243,360 @@ Then redeploy to distribute new certificates. --- +## Container Registry Integration + +### Overview + +The container registry feature allows you to build images locally and automatically push them to container registries (Docker Hub, Quay.io, OpenShift internal registry, private registries, etc.). This is essential for: + +βœ… **Kubernetes/OpenShift deployments** - Avoid ImagePullBackOff errors +βœ… **Team collaboration** - Share images across developers and environments +βœ… **CI/CD pipelines** - Build once, deploy everywhere +βœ… **Production deployments** - Use trusted registry sources + +### How It Works + +1. **Build**: Images are built locally using docker/podman +2. **Tag**: Images are automatically tagged with the registry path +3. **Push**: Images are pushed to the registry (if `push: true`) +4. **Deploy**: Kubernetes manifests reference the registry images + +### Configuration + +Add a `registry` section to your gateway and/or plugin configurations: + +```yaml +gateway: + repo: https://github.com/yourorg/yourrepo.git + + # Container registry configuration + registry: + enabled: true # Enable registry integration + url: registry.example.com # Registry URL + namespace: myproject # Registry namespace/org/project + push: true # Push after build (default: true) + image_pull_policy: IfNotPresent # Kubernetes imagePullPolicy +``` + +**Configuration Fields:** + +| Field | Required | Description | Example | +|-------|----------|-------------|---------| +| `enabled` | Yes | Enable registry push | `true` | +| `url` | Yes* | Registry URL | `docker.io`, `quay.io`, `registry.mycompany.com` | +| `namespace` | Yes* | Registry namespace/organization/project | `myusername`, `myorg`, `mcp-gateway-test` | +| `push` | No | Push image after build | `true` (default) | +| `image_pull_policy` | No | Kubernetes imagePullPolicy | `IfNotPresent` (default) | + +\* Required when `enabled: true` + +### Common Registry Examples + +#### Docker Hub + +```yaml +registry: + enabled: true + url: docker.io + namespace: myusername + push: true + image_pull_policy: IfNotPresent +``` + +**Authentication:** +```bash +docker login +``` + +#### Quay.io + +```yaml +registry: + enabled: true + url: quay.io + namespace: myorganization + push: true + image_pull_policy: IfNotPresent +``` + +**Authentication:** +```bash +podman login quay.io +``` + +#### OpenShift Internal Registry + +```yaml +registry: + enabled: true + url: default-route-openshift-image-registry.apps-crc.testing + namespace: mcp-gateway-test + push: true + image_pull_policy: Always +``` + +**Authentication:** +```bash +# OpenShift Local (CRC) +podman login $(oc registry info) -u $(oc whoami) -p $(oc whoami -t) + +# OpenShift on cloud +oc registry login +``` + +#### Private Registry + +```yaml +registry: + enabled: true + url: registry.mycompany.com + namespace: devteam + push: true + image_pull_policy: IfNotPresent +``` + +**Authentication:** +```bash +podman login registry.mycompany.com -u myusername +``` + +### Image Naming + +When registry is enabled, images are automatically tagged with the full registry path: + +**Local tag (without registry):** +``` +mcpgateway-gateway:latest +mcpgateway-opapluginfilter:latest +``` + +**Registry tag (with registry enabled):** +``` +registry.example.com/myproject/mcpgateway-gateway:latest +registry.example.com/myproject/mcpgateway-opapluginfilter:latest +``` + +### Image Pull Policies + +Choose the appropriate policy for your use case: + +| Policy | Description | Best For | +|--------|-------------|----------| +| `Always` | Pull image every time pod starts | Development, testing latest changes | +| `IfNotPresent` | Pull only if image doesn't exist locally | Production, stable releases | +| `Never` | Never pull, only use local images | Air-gapped environments | + +### Workflow Example + +#### OpenShift Local Deployment + +```bash +# 1. Authenticate to OpenShift registry +podman login $(oc registry info) -u $(oc whoami) -p $(oc whoami -t) + +# 2. Build and push images +cforge gateway deploy examples/deployment-configs/deploy-openshift-local-registry.yaml + +# The tool will: +# - Build images locally +# - Tag with registry paths +# - Push to OpenShift internal registry +# - Generate manifests with registry image references +# - Deploy to cluster + +# 3. Verify images were pushed +oc get imagestreams -n mcp-gateway-test + +# Output: +# NAME IMAGE REPOSITORY +# mcpgateway-gateway default-route-.../mcp-gateway-test/mcpgateway-gateway +# mcpgateway-opapluginfilter default-route-.../mcp-gateway-test/mcpgateway-opapluginfilter +``` + +#### CI/CD Pipeline Example + +```bash +# In your CI/CD pipeline: + +# 1. Authenticate to registry +echo "$REGISTRY_PASSWORD" | docker login $REGISTRY_URL -u $REGISTRY_USER --password-stdin + +# 2. Build and push +cforge gateway build deploy-prod.yaml + +# 3. Images are automatically pushed to registry + +# 4. Deploy to Kubernetes (manifests already reference registry images) +cforge gateway deploy deploy-prod.yaml --skip-build --skip-certs +``` + +### Per-Component Configuration + +Each component (gateway and plugins) can have different registry settings: + +```yaml +gateway: + repo: https://github.com/myorg/gateway.git + registry: + enabled: true + url: quay.io + namespace: myorg + push: true + +plugins: + - name: MyPlugin + repo: https://github.com/myorg/plugin.git + registry: + enabled: true + url: docker.io # Different registry + namespace: myusername # Different namespace + push: true + + - name: InternalPlugin + repo: https://github.com/myorg/internal-plugin.git + # No registry - use local image only + registry: + enabled: false +``` + +This allows you to: +- Push gateway to one registry, plugins to another +- Skip registry push for some components +- Use different namespaces per component +- Mix local and registry images + +### Tag-Only Mode + +To tag images without pushing (useful for testing): + +```yaml +registry: + enabled: true + url: registry.example.com + namespace: myproject + push: false # Tag but don't push +``` + +**Use cases:** +- Test registry configuration before pushing +- Generate manifests with registry paths for GitOps +- Manual push workflow + +### Troubleshooting + +#### Authentication Errors + +**Error:** `Failed to push to registry: unauthorized` + +**Solution:** Authenticate to the registry before building: +```bash +# Docker Hub +docker login + +# Quay.io +podman login quay.io + +# Private registry +podman login registry.mycompany.com -u myusername + +# OpenShift +podman login $(oc registry info) -u $(oc whoami) -p $(oc whoami -t) +``` + +#### ImagePullBackOff in Kubernetes + +**Error:** Pods show `ImagePullBackOff` status + +**Possible causes:** +1. Image doesn't exist in registry (push failed) +2. Registry authentication not configured in Kubernetes +3. Network connectivity issues +4. Wrong image path/tag + +**Solutions:** + +**1. Verify image exists:** +```bash +# OpenShift +oc get imagestreams -n mcp-gateway-test + +# Docker Hub/Quay +podman search your-registry.com/namespace/image-name +``` + +**2. Configure Kubernetes pull secrets:** +```bash +# Create docker-registry secret +kubectl create secret docker-registry regcred \ + --docker-server=registry.example.com \ + --docker-username=myusername \ + --docker-password=mypassword \ + --docker-email=myemail@example.com \ + -n mcp-gateway-test + +# Update deployment to use secret (manual step, or add to template) +``` + +**3. For OpenShift, grant pull permissions:** +```bash +# Allow default service account to pull from namespace +oc policy add-role-to-user system:image-puller \ + system:serviceaccount:mcp-gateway-test:default \ + -n mcp-gateway-test +``` + +#### Push Failed: Too Large + +**Error:** `image push failed: blob upload exceeds max size` + +**Solution:** Some registries have size limits. Options: +1. Use multi-stage builds to reduce image size +2. Switch to a registry with larger limits +3. Split into smaller images + +#### Registry URL Format + +**Correct formats:** +```yaml +url: docker.io # Docker Hub +url: quay.io # Quay.io +url: gcr.io # Google Container Registry +url: registry.mycompany.com # Private registry +url: default-route-openshift-image-registry.apps-crc.testing # OpenShift +``` + +**Incorrect formats:** +```yaml +url: https://docker.io # No protocol +url: docker.io/myusername # No namespace in URL +url: registry:5000 # Include port in URL, not namespace +``` + +### Best Practices + +βœ… **DO:** +- Authenticate to registry before building +- Use specific version tags in production (not `:latest`) +- Test registry configuration with `push: false` first +- Use `image_pull_policy: Always` for development +- Use `image_pull_policy: IfNotPresent` for production +- Organize images by namespace/project + +❌ **DON'T:** +- Commit registry credentials to Git +- Use `latest` tag in production +- Mix local and registry images without testing +- Skip authentication step +- Use `push: true` for testing without verifying first + +### Example Configurations + +Full examples available in: +- `examples/deployment-configs/deploy-openshift-local.yaml` - Registry config commented +- `examples/deployment-configs/deploy-openshift-local-registry.yaml` - Full registry setup + +--- + ## Deployment Modes ### Plain Python Mode (Default) diff --git a/examples/deployment-configs/deploy-openshift-local-registry.yaml b/examples/deployment-configs/deploy-openshift-local-registry.yaml new file mode 100644 index 000000000..9f9c3d22d --- /dev/null +++ b/examples/deployment-configs/deploy-openshift-local-registry.yaml @@ -0,0 +1,141 @@ +# MCP Stack - OpenShift Local with Registry Push +# Build from source and push to OpenShift internal registry +# +# This example demonstrates how to build images locally and push them to +# OpenShift's internal registry. This is useful for: +# - Testing images in a production-like environment +# - Avoiding ImagePullBackOff errors when deploying to OpenShift +# - Sharing images across multiple namespaces +# +# Prerequisites: +# 1. Install cert-manager in your cluster +# 2. Apply cert-manager-issuer-example.yaml to create the CA Issuer +# 3. Authenticate to OpenShift internal registry: +# podman login $(oc registry info) -u $(oc whoami) -p $(oc whoami -t) +# 4. Deploy this config + +deployment: + type: kubernetes + namespace: mcp-gateway-test + +# MCP Gateway configuration +gateway: + # Build gateway from current repository + repo: https://github.com/terylt/mcp-context-forge.git + ref: feat/configurable_plugin_deployment + context: . + containerfile: Containerfile + image: mcpgateway-gateway:latest + + port: 4444 + + # Service configuration + service_type: ClusterIP + service_port: 4444 + + # Resource limits + replicas: 1 + memory_request: 256Mi + memory_limit: 512Mi + cpu_request: 100m + cpu_limit: 500m + + # Environment configuration + env_vars: + LOG_LEVEL: DEBUG + HOST: 0.0.0.0 + PORT: 4444 + MCPGATEWAY_UI_ENABLED: "true" + MCPGATEWAY_ADMIN_API_ENABLED: "true" + MCPGATEWAY_A2A_ENABLED: "true" + AUTH_REQUIRED: "false" + MCPGATEWAY_ENABLE_FEDERATION: "false" + + # mTLS client configuration (gateway connects to plugins) + mtls_enabled: true + mtls_verify: true + mtls_check_hostname: false + + # Container registry configuration + # Build locally, then tag and push to OpenShift internal registry + registry: + enabled: true + # OpenShift internal registry URL (get with: oc registry info) + url: default-route-openshift-image-registry.apps-crc.testing + # Namespace where images will be pushed (must have push permissions) + namespace: mcp-gateway-test + # Push image after build + push: true + # imagePullPolicy for Kubernetes pods + image_pull_policy: Always + +# External plugins +plugins: + # OPA Plugin Filter - build from source and push to registry + - name: OPAPluginFilter + + # Build from repository + repo: https://github.com/terylt/mcp-context-forge.git + ref: feat/configurable_plugin_deployment + context: plugins/external/opa + containerfile: Containerfile + image: mcpgateway-opapluginfilter:latest + + port: 8000 + + # Service configuration + service_type: ClusterIP + service_port: 8000 + + # Resource limits + replicas: 1 + memory_request: 128Mi + memory_limit: 256Mi + cpu_request: 50m + cpu_limit: 200m + + env_vars: + LOG_LEVEL: DEBUG + OPA_POLICY_PATH: /app/policies + + mtls_enabled: true + + # Container registry configuration + # Push plugin image to same registry as gateway + registry: + enabled: true + url: default-route-openshift-image-registry.apps-crc.testing + namespace: mcp-gateway-test + push: true + image_pull_policy: Always + + # Plugin manager overrides + plugin_overrides: + priority: 10 + mode: "enforce" + description: "OPA policy enforcement" + tags: ["security", "policy", "opa"] + +# Infrastructure services +infrastructure: + postgres: + enabled: true + image: quay.io/sclorg/postgresql-15-c9s:latest + user: mcpuser # Use non-'postgres' username for Red Hat images + database: mcp + password: mysecretpassword + +# cert-manager Certificate configuration +certificates: + # Use cert-manager for automatic certificate management + use_cert_manager: true + + # cert-manager issuer reference (must exist in namespace) + cert_manager_issuer: mcp-ca-issuer + cert_manager_kind: Issuer # or ClusterIssuer + + # Certificate validity (cert-manager will auto-renew at 2/3 of lifetime) + validity_days: 825 # β‰ˆ 2.25 years + + # Local paths not used when use_cert_manager=true + auto_generate: false diff --git a/examples/deployment-configs/deploy-openshift-local.yaml b/examples/deployment-configs/deploy-openshift-local.yaml new file mode 100644 index 000000000..8256478ba --- /dev/null +++ b/examples/deployment-configs/deploy-openshift-local.yaml @@ -0,0 +1,131 @@ +# MCP Stack - OpenShift Local Configuration with cert-manager +# Build from source for local development +# +# Prerequisites: +# 1. Install cert-manager in your cluster +# 2. Apply cert-manager-issuer-example.yaml to create the CA Issuer +# 3. (Optional) Authenticate to OpenShift internal registry: +# podman login $(oc registry info) -u $(oc whoami) -p $(oc whoami -t) +# 4. Deploy this config + +deployment: + type: kubernetes + namespace: mcp-gateway-test + +# MCP Gateway configuration +gateway: + # Build gateway from current repository + repo: https://github.com/terylt/mcp-context-forge.git + ref: feat/configurable_plugin_deployment + context: . + containerfile: Containerfile + image: mcpgateway-gateway:latest + + port: 4444 + + # Service configuration + service_type: ClusterIP + service_port: 4444 + + # Resource limits + replicas: 1 + memory_request: 256Mi + memory_limit: 512Mi + cpu_request: 100m + cpu_limit: 500m + + # Environment configuration + env_vars: + LOG_LEVEL: DEBUG + HOST: 0.0.0.0 + PORT: 4444 + MCPGATEWAY_UI_ENABLED: "true" + MCPGATEWAY_ADMIN_API_ENABLED: "true" + MCPGATEWAY_A2A_ENABLED: "true" + AUTH_REQUIRED: "false" + MCPGATEWAY_ENABLE_FEDERATION: "false" + + # mTLS client configuration (gateway connects to plugins) + mtls_enabled: true + mtls_verify: true + mtls_check_hostname: false + + # Container registry configuration (optional) + # Uncomment to push images to OpenShift internal registry + # registry: + # enabled: true + # url: default-route-openshift-image-registry.apps-crc.testing + # namespace: mcp-gateway-test + # push: true + # image_pull_policy: Always + +# External plugins +plugins: + # OPA Plugin Filter - build from source + - name: OPAPluginFilter + + # Build from repository + repo: https://github.com/terylt/mcp-context-forge.git + ref: feat/configurable_plugin_deployment + context: plugins/external/opa + containerfile: Containerfile + image: mcpgateway-opapluginfilter:latest + + port: 8000 + + # Service configuration + service_type: ClusterIP + service_port: 8000 + + # Resource limits + replicas: 1 + memory_request: 128Mi + memory_limit: 256Mi + cpu_request: 50m + cpu_limit: 200m + + env_vars: + LOG_LEVEL: DEBUG + OPA_POLICY_PATH: /app/policies + + mtls_enabled: true + + # Container registry configuration (optional) + # Uncomment to push images to OpenShift internal registry + # registry: + # enabled: true + # url: default-route-openshift-image-registry.apps-crc.testing + # namespace: mcp-gateway-test + # push: true + # image_pull_policy: Always + + # Plugin manager overrides + plugin_overrides: + priority: 10 + mode: "enforce" + description: "OPA policy enforcement" + tags: ["security", "policy", "opa"] + +# Infrastructure services +infrastructure: + postgres: + enabled: true + image: quay.io/sclorg/postgresql-15-c9s:latest + user: mcpuser # Use non-'postgres' username for Red Hat images + database: mcp + password: mysecretpassword + +# cert-manager Certificate configuration +certificates: + # Use cert-manager for automatic certificate management + use_cert_manager: true + + # cert-manager issuer reference (must exist in namespace) + cert_manager_issuer: mcp-ca-issuer + cert_manager_kind: Issuer # or ClusterIssuer + + # Certificate validity (cert-manager will auto-renew at 2/3 of lifetime) + validity_days: 825 # β‰ˆ 2.25 years + + # Local paths not used when use_cert_manager=true + auto_generate: false diff --git a/mcpgateway/tools/builder/common.py b/mcpgateway/tools/builder/common.py index 879eb0d0a..2a293b361 100644 --- a/mcpgateway/tools/builder/common.py +++ b/mcpgateway/tools/builder/common.py @@ -15,6 +15,7 @@ - generate_kubernetes_manifests: Generate Kubernetes deployment manifests - generate_compose_manifests: Generate Docker Compose manifest - copy_env_template: Copy .env.template from plugin repo to env.d/ directory +- handle_registry_operations: Tag and push images to container registry - get_docker_compose_command: Detect available docker compose command - run_compose: Run docker compose with error handling - deploy_compose: Deploy using docker compose up -d @@ -38,6 +39,9 @@ from rich.console import Console import yaml +# First-Party +from mcpgateway.tools.builder.schema import MCPStackConfig + console = Console() @@ -53,34 +57,38 @@ def get_deploy_dir() -> Path: return Path(deploy_dir) -def load_config(config_file: str) -> Dict[str, Any]: - """Load and parse YAML configuration file. +def load_config(config_file: str) -> MCPStackConfig: + """Load and parse YAML configuration file into validated Pydantic model. Args: config_file: Path to mcp-stack.yaml configuration file Returns: - Parsed configuration dictionary + Validated MCPStackConfig Pydantic model Raises: FileNotFoundError: If configuration file doesn't exist + ValidationError: If configuration validation fails """ config_path = Path(config_file) if not config_path.exists(): raise FileNotFoundError(f"Configuration file not found: {config_file}") with open(config_path, encoding="utf-8") as f: - return yaml.safe_load(f) + config_dict = yaml.safe_load(f) + + # Validate and return Pydantic model + return MCPStackConfig.model_validate(config_dict) -def generate_plugin_config(config: Dict[str, Any], output_dir: Path, verbose: bool = False) -> Path: +def generate_plugin_config(config: MCPStackConfig, output_dir: Path, verbose: bool = False) -> Path: """Generate plugin config.yaml for gateway from mcp-stack.yaml. This function is shared between Dagger and plain Python implementations to avoid code duplication. Args: - config: Parsed mcp-stack.yaml configuration + config: Validated MCPStackConfig Pydantic model output_dir: Output directory for generated config verbose: Print verbose output @@ -91,8 +99,8 @@ def generate_plugin_config(config: Dict[str, Any], output_dir: Path, verbose: bo FileNotFoundError: If template directory not found """ - deployment_type = config["deployment"]["type"] - plugins = config.get("plugins", []) + deployment_type = config.deployment.type + plugins = config.plugins # Load template template_dir = Path(__file__).parent / "templates" @@ -105,21 +113,21 @@ def generate_plugin_config(config: Dict[str, Any], output_dir: Path, verbose: bo # Prepare plugin data with computed URLs plugin_data = [] for plugin in plugins: - plugin_name = plugin["name"] - port = plugin.get("port", 8000) + plugin_name = plugin.name + port = plugin.port or 8000 # Determine URL based on deployment type if deployment_type == "compose": # Use container hostname (lowercase) hostname = plugin_name.lower() # Use HTTPS if mTLS is enabled - protocol = "https" if plugin.get("mtls_enabled", True) else "http" + protocol = "https" if plugin.mtls_enabled else "http" url = f"{protocol}://{hostname}:{port}/mcp" else: # kubernetes # Use Kubernetes service DNS - namespace = config["deployment"].get("namespace", "mcp-gateway") + namespace = config.deployment.namespace or "mcp-gateway" service_name = f"mcp-plugin-{plugin_name.lower()}" - protocol = "https" if plugin.get("mtls_enabled", True) else "http" + protocol = "https" if plugin.mtls_enabled else "http" url = f"{protocol}://{service_name}.{namespace}.svc:{port}/mcp" # Build plugin entry with computed URL @@ -131,8 +139,8 @@ def generate_plugin_config(config: Dict[str, Any], output_dir: Path, verbose: bo # Merge plugin_overrides (client-side config only, excludes 'config') # Allowed client-side fields that plugin manager uses - if "plugin_overrides" in plugin: - overrides = plugin["plugin_overrides"] + if plugin.plugin_overrides: + overrides = plugin.plugin_overrides allowed_fields = ["priority", "mode", "description", "version", "author", "hooks", "tags", "conditions"] for field in allowed_fields: if field in overrides: @@ -153,11 +161,11 @@ def generate_plugin_config(config: Dict[str, Any], output_dir: Path, verbose: bo return config_path -def generate_kubernetes_manifests(config: Dict[str, Any], output_dir: Path, verbose: bool = False) -> None: +def generate_kubernetes_manifests(config: MCPStackConfig, output_dir: Path, verbose: bool = False) -> None: """Generate Kubernetes manifests from configuration. Args: - config: Parsed mcp-stack.yaml configuration + config: Validated MCPStackConfig Pydantic model output_dir: Output directory for manifests verbose: Print verbose output @@ -176,12 +184,12 @@ def generate_kubernetes_manifests(config: Dict[str, Any], output_dir: Path, verb env = Environment(loader=FileSystemLoader(str(template_dir)), autoescape=True) # nosec B701 # Generate namespace - namespace = config["deployment"].get("namespace", "mcp-gateway") + namespace = config.deployment.namespace or "mcp-gateway" # Generate mTLS certificate resources if enabled - gateway_mtls = config.get("gateway", {}).get("mtls_enabled", True) - cert_config = config.get("certificates", {}) - use_cert_manager = cert_config.get("use_cert_manager", False) + gateway_mtls = config.gateway.mtls_enabled if config.gateway.mtls_enabled is not None else True + cert_config = config.certificates + use_cert_manager = cert_config.use_cert_manager if cert_config else False if gateway_mtls: if use_cert_manager: @@ -189,7 +197,7 @@ def generate_kubernetes_manifests(config: Dict[str, Any], output_dir: Path, verb cert_manager_template = env.get_template("cert-manager-certificates.yaml.j2") # Calculate duration and renewBefore in hours - validity_days = cert_config.get("validity_days", 825) + validity_days = cert_config.validity_days or 825 duration_hours = validity_days * 24 # Renew at 2/3 of lifetime (cert-manager default) renew_before_hours = int(duration_hours * 2 / 3) @@ -198,17 +206,17 @@ def generate_kubernetes_manifests(config: Dict[str, Any], output_dir: Path, verb cert_data = { "namespace": namespace, "gateway_name": "mcpgateway", - "issuer_name": cert_config.get("cert_manager_issuer", "mcp-ca-issuer"), - "issuer_kind": cert_config.get("cert_manager_kind", "Issuer"), + "issuer_name": cert_config.cert_manager_issuer or "mcp-ca-issuer", + "issuer_kind": cert_config.cert_manager_kind or "Issuer", "duration": duration_hours, "renew_before": renew_before_hours, "plugins": [], } # Add plugins with mTLS enabled - for plugin in config.get("plugins", []): - if plugin.get("mtls_enabled", True): - cert_data["plugins"].append({"name": f"mcp-plugin-{plugin['name'].lower()}"}) + for plugin in config.plugins: + if plugin.mtls_enabled if plugin.mtls_enabled is not None else True: + cert_data["plugins"].append({"name": f"mcp-plugin-{plugin.name.lower()}"}) # Generate cert-manager certificates manifest cert_manager_manifest = cert_manager_template.render(**cert_data) @@ -242,9 +250,9 @@ def generate_kubernetes_manifests(config: Dict[str, Any], output_dir: Path, verb print("[yellow]Warning: Gateway certificates not found[/yellow]") # Read and encode plugin certificates - for plugin in config.get("plugins", []): - if plugin.get("mtls_enabled", True): - plugin_name = plugin["name"] + for plugin in config.plugins: + if plugin.mtls_enabled if plugin.mtls_enabled is not None else True: + plugin_name = plugin.name plugin_cert_path = Path(f"certs/mcp/plugins/{plugin_name}/server.crt") plugin_key_path = Path(f"certs/mcp/plugins/{plugin_name}/server.key") @@ -268,85 +276,88 @@ def generate_kubernetes_manifests(config: Dict[str, Any], output_dir: Path, verb print(" βœ“ mTLS certificate secrets manifest generated") # Generate infrastructure manifests (postgres, redis) if enabled - infrastructure = config.get("infrastructure", {}) + infrastructure = config.infrastructure # PostgreSQL - postgres_config = infrastructure.get("postgres", {}) - if postgres_config.get("enabled", True): + if infrastructure and infrastructure.postgres and infrastructure.postgres.enabled: + postgres_config = infrastructure.postgres postgres_template = env.get_template("postgres.yaml.j2") postgres_manifest = postgres_template.render( namespace=namespace, - image=postgres_config.get("image", "postgres:17"), - database=postgres_config.get("database", "mcp"), - user=postgres_config.get("user", "postgres"), - password=postgres_config.get("password", "mysecretpassword"), - storage_size=postgres_config.get("storage_size", "10Gi"), - storage_class=postgres_config.get("storage_class"), + image=postgres_config.image or "quay.io/sclorg/postgresql-15-c9s:latest", + database=postgres_config.database or "mcp", + user=postgres_config.user or "postgres", + password=postgres_config.password or "mysecretpassword", + storage_size=postgres_config.storage_size or "10Gi", + storage_class=postgres_config.storage_class, ) (output_dir / "postgres-deployment.yaml").write_text(postgres_manifest) if verbose: print(" βœ“ PostgreSQL deployment manifest generated") # Redis - redis_config = infrastructure.get("redis", {}) - if redis_config.get("enabled", True): + if infrastructure and infrastructure.redis and infrastructure.redis.enabled: + redis_config = infrastructure.redis redis_template = env.get_template("redis.yaml.j2") - redis_manifest = redis_template.render(namespace=namespace, image=redis_config.get("image", "redis:latest")) + redis_manifest = redis_template.render(namespace=namespace, image=redis_config.image or "redis:latest") (output_dir / "redis-deployment.yaml").write_text(redis_manifest) if verbose: print(" βœ“ Redis deployment manifest generated") # Generate gateway deployment gateway_template = env.get_template("deployment.yaml.j2") - gateway_config = config["gateway"].copy() - gateway_config["name"] = "mcpgateway" - gateway_config["namespace"] = namespace + # Convert Pydantic model to dict for template rendering + gateway_dict = config.gateway.model_dump(exclude_none=True) + gateway_dict["name"] = "mcpgateway" + gateway_dict["namespace"] = namespace # Add DATABASE_URL and REDIS_URL to gateway environment if infrastructure is enabled - if "env_vars" not in gateway_config: - gateway_config["env_vars"] = {} + if "env_vars" not in gateway_dict: + gateway_dict["env_vars"] = {} # Add init containers to wait for infrastructure services init_containers = [] - if postgres_config.get("enabled", True): - db_user = postgres_config.get("user", "postgres") - db_password = postgres_config.get("password", "mysecretpassword") - db_name = postgres_config.get("database", "mcp") - gateway_config["env_vars"]["DATABASE_URL"] = f"postgresql://{db_user}:{db_password}@postgres:5432/{db_name}" + if infrastructure and infrastructure.postgres and infrastructure.postgres.enabled: + postgres = infrastructure.postgres + db_user = postgres.user or "postgres" + db_password = postgres.password or "mysecretpassword" + db_name = postgres.database or "mcp" + gateway_dict["env_vars"]["DATABASE_URL"] = f"postgresql://{db_user}:{db_password}@postgres:5432/{db_name}" # Add init container to wait for PostgreSQL init_containers.append({"name": "wait-for-postgres", "image": "busybox:1.36", "command": ["sh", "-c", "until nc -z postgres 5432; do echo waiting for postgres; sleep 2; done"]}) - if redis_config.get("enabled", True): - gateway_config["env_vars"]["REDIS_URL"] = "redis://redis:6379/0" + if infrastructure and infrastructure.redis and infrastructure.redis.enabled: + gateway_dict["env_vars"]["REDIS_URL"] = "redis://redis:6379/0" # Add init container to wait for Redis init_containers.append({"name": "wait-for-redis", "image": "busybox:1.36", "command": ["sh", "-c", "until nc -z redis 6379; do echo waiting for redis; sleep 2; done"]}) if init_containers: - gateway_config["init_containers"] = init_containers + gateway_dict["init_containers"] = init_containers - gateway_manifest = gateway_template.render(**gateway_config) + gateway_manifest = gateway_template.render(**gateway_dict) (output_dir / "gateway-deployment.yaml").write_text(gateway_manifest) # Generate plugin deployments - for plugin in config.get("plugins", []): - plugin_config = plugin.copy() - plugin_config["name"] = f"mcp-plugin-{plugin['name'].lower()}" - plugin_config["namespace"] = namespace - plugin_manifest = gateway_template.render(**plugin_config) - (output_dir / f"plugin-{plugin['name'].lower()}-deployment.yaml").write_text(plugin_manifest) + for plugin in config.plugins: + # Convert Pydantic model to dict for template rendering + plugin_dict = plugin.model_dump(exclude_none=True) + plugin_dict["name"] = f"mcp-plugin-{plugin.name.lower()}" + plugin_dict["namespace"] = namespace + plugin_manifest = gateway_template.render(**plugin_dict) + (output_dir / f"plugin-{plugin.name.lower()}-deployment.yaml").write_text(plugin_manifest) if verbose: print(f"βœ“ Kubernetes manifests generated in {output_dir}") -def generate_compose_manifests(config: Dict[str, Any], output_dir: Path, verbose: bool = False) -> None: +def generate_compose_manifests(config: MCPStackConfig, output_dir: Path, verbose: bool = False) -> None: """Generate Docker Compose manifest from configuration. Args: - config: Parsed mcp-stack.yaml configuration + config: Validated MCPStackConfig Pydantic model output_dir: Output directory for manifests verbose: Print verbose output @@ -363,16 +374,13 @@ def generate_compose_manifests(config: Dict[str, Any], output_dir: Path, verbose _auto_detect_env_files(config, output_dir, verbose=verbose) # Auto-assign host_ports if expose_port is true but host_port not specified - plugins = config.get("plugins", []) next_host_port = 8000 - for plugin in plugins: - # Set default port if not specified - if "port" not in plugin: - plugin["port"] = 8000 + for plugin in config.plugins: + # Port defaults are handled by Pydantic defaults in schema # Auto-assign host_port if expose_port is true - if plugin.get("expose_port", False) and "host_port" not in plugin: - plugin["host_port"] = next_host_port + if plugin.expose_port and not plugin.host_port: + plugin.host_port = next_host_port # type: ignore next_host_port += 1 # Compute relative certificate paths (from output_dir to project root certs/) @@ -394,49 +402,50 @@ def generate_compose_manifests(config: Dict[str, Any], output_dir: Path, verbose # Generate compose file compose_template = env.get_template("docker-compose.yaml.j2") - compose_manifest = compose_template.render(**config, cert_paths=cert_paths) + # Convert Pydantic model to dict for template rendering + config_dict = config.model_dump(exclude_none=True) + compose_manifest = compose_template.render(**config_dict, cert_paths=cert_paths) (output_dir / "docker-compose.yaml").write_text(compose_manifest) if verbose: print(f"βœ“ Compose manifest generated in {output_dir}") -def _auto_detect_env_files(config: Dict[str, Any], output_dir: Path, verbose: bool = False) -> None: +def _auto_detect_env_files(config: MCPStackConfig, output_dir: Path, verbose: bool = False) -> None: """Auto-detect and assign env files if not explicitly specified. If env_file is not specified in the config, check if {deploy_dir}/env/.env.{name} exists and use it. Warn the user when auto-detection is used. Args: - config: Parsed mcp-stack.yaml configuration (modified in-place) + config: MCPStackConfig Pydantic model (modified in-place via attribute assignment) output_dir: Output directory where manifests will be generated (for relative paths) verbose: Print verbose output """ deploy_dir = get_deploy_dir() env_dir = deploy_dir / "env" - # Check gateway - gateway = config.get("gateway", {}) - if "env_file" not in gateway or not gateway["env_file"]: + # Check gateway - since we need to modify the model, we access env_file directly + # Note: Pydantic models allow attribute assignment after creation + if not hasattr(config.gateway, 'env_file') or not config.gateway.env_file: gateway_env = env_dir / ".env.gateway" if gateway_env.exists(): # Make path relative to output_dir (where docker-compose.yaml will be) relative_path = os.path.relpath(gateway_env, output_dir) - gateway["env_file"] = relative_path + config.gateway.env_file = relative_path # type: ignore print(f"⚠ Auto-detected env file: {gateway_env}") if verbose: print(" (Gateway env_file not specified in config)") # Check plugins - plugins = config.get("plugins", []) - for plugin in plugins: - plugin_name = plugin["name"] - if "env_file" not in plugin or not plugin["env_file"]: + for plugin in config.plugins: + plugin_name = plugin.name + if not hasattr(plugin, 'env_file') or not plugin.env_file: plugin_env = env_dir / f".env.{plugin_name}" if plugin_env.exists(): # Make path relative to output_dir (where docker-compose.yaml will be) relative_path = os.path.relpath(plugin_env, output_dir) - plugin["env_file"] = relative_path + plugin.env_file = relative_path # type: ignore print(f"⚠ Auto-detected env file: {plugin_env}") if verbose: print(f" (Plugin {plugin_name} env_file not specified in config)") @@ -480,6 +489,78 @@ def copy_env_template(plugin_name: str, plugin_build_dir: Path, verbose: bool = print(f"βœ“ Copied .env.template -> {target_file}") +def handle_registry_operations(component, component_name: str, image_tag: str, container_runtime: str, verbose: bool = False) -> str: + """Handle registry tagging and pushing for a built component. + + This function is shared between Dagger and plain Python implementations. + It tags the locally built image with the registry path and optionally pushes it. + + Args: + component: BuildableConfig component (GatewayConfig or PluginConfig) + component_name: Name of the component (gateway or plugin name) + image_tag: Current local image tag + container_runtime: Container runtime to use ("docker" or "podman") + verbose: Print verbose output + + Returns: + Final image tag (registry path if registry enabled, otherwise original tag) + + Raises: + ValueError: If registry enabled but missing required configuration + subprocess.CalledProcessError: If tag or push command fails + """ + from mcpgateway.tools.builder.schema import BuildableConfig + + # Type check for better error messages + if not isinstance(component, BuildableConfig): + raise TypeError(f"Component must be a BuildableConfig instance, got {type(component)}") + + # Check if registry is enabled + if not component.registry or not component.registry.enabled: + return image_tag + + registry_config = component.registry + + # Validate registry configuration + if not registry_config.url or not registry_config.namespace: + raise ValueError(f"Registry enabled for {component_name} but missing 'url' or 'namespace' configuration") + + # Construct registry image path + # Format: {registry_url}/{namespace}/{image_name}:{tag} + base_image_name = image_tag.split(":")[0].split("/")[-1] # Extract base name (e.g., "mcpgateway-gateway") + image_version = image_tag.split(":")[-1] if ":" in image_tag else "latest" # Extract tag + registry_image = f"{registry_config.url}/{registry_config.namespace}/{base_image_name}:{image_version}" + + # Tag image for registry + if verbose: + console.print(f"[dim]Tagging {image_tag} as {registry_image}[/dim]") + tag_cmd = [container_runtime, "tag", image_tag, registry_image] + result = subprocess.run(tag_cmd, capture_output=True, text=True, check=True) + if result.stdout and verbose: + console.print(result.stdout) + + # Push to registry if enabled + if registry_config.push: + if verbose: + console.print(f"[blue]Pushing {registry_image} to registry...[/blue]") + push_cmd = [container_runtime, "push", registry_image] + try: + result = subprocess.run(push_cmd, capture_output=True, text=True, check=True) + if result.stdout and verbose: + console.print(result.stdout) + console.print(f"[green]βœ“ Pushed to registry: {registry_image}[/green]") + except subprocess.CalledProcessError as e: + console.print(f"[red]βœ— Failed to push to registry: {e}[/red]") + console.print("[yellow]Tip: Authenticate to the registry first:[/yellow]") + console.print(f" {container_runtime} login {registry_config.url}") + raise + + # Update component image reference to use registry path for manifests + component.image = registry_image + + return registry_image + + # Docker Compose Utilities @@ -641,8 +722,16 @@ def deploy_kubernetes(manifests_dir: Path, verbose: bool = False) -> None: if result.returncode != 0: raise RuntimeError(f"kubectl apply failed: {result.stderr}") - # 2. Apply certificate secrets (now namespace exists) - if cert_secrets.exists(): + # 2. Apply certificate resources (now namespace exists) + # Check for both cert-secrets.yaml (local mode) and cert-manager-certificates.yaml (cert-manager mode) + cert_manager_certs = manifests_dir / "cert-manager-certificates.yaml" + if cert_manager_certs.exists(): + result = subprocess.run(["kubectl", "apply", "-f", str(cert_manager_certs)], capture_output=True, text=True, check=False) + if result.stdout and verbose: + console.print(result.stdout) + if result.returncode != 0: + raise RuntimeError(f"kubectl apply failed: {result.stderr}") + elif cert_secrets.exists(): result = subprocess.run(["kubectl", "apply", "-f", str(cert_secrets)], capture_output=True, text=True, check=False) if result.stdout and verbose: console.print(result.stdout) diff --git a/mcpgateway/tools/builder/dagger_deploy.py b/mcpgateway/tools/builder/dagger_deploy.py index c3c71ab95..d7f7b248a 100644 --- a/mcpgateway/tools/builder/dagger_deploy.py +++ b/mcpgateway/tools/builder/dagger_deploy.py @@ -16,7 +16,7 @@ # Standard from pathlib import Path -from typing import Any, Dict, List, Optional +from typing import List, Optional try: # Third-Party @@ -43,12 +43,14 @@ generate_kubernetes_manifests, generate_plugin_config, get_deploy_dir, + handle_registry_operations, load_config, verify_compose, verify_kubernetes, ) from mcpgateway.tools.builder.common import copy_env_template as copy_template from mcpgateway.tools.builder.pipeline import CICDModule +from mcpgateway.tools.builder.schema import BuildableConfig, MCPStackConfig console = Console() @@ -87,8 +89,8 @@ async def build(self, config_file: str, plugins_only: bool = False, specific_plu async with dagger.connection(dagger.Config(workdir=str(Path.cwd()))): # Build gateway (unless plugins_only=True) if not plugins_only: - gateway = config.get("gateway", {}) - if gateway.get("repo"): + gateway = config.gateway + if gateway.repo: with Progress(SpinnerColumn(), TextColumn("[progress.description]{task.description}"), console=self.console) as progress: task = progress.add_task("Building gateway...", total=None) try: @@ -109,10 +111,10 @@ async def build(self, config_file: str, plugins_only: bool = False, specific_plu self.console.print("[dim]Skipping gateway build (using pre-built image)[/dim]") # Build plugins - plugins = config.get("plugins", []) + plugins = config.plugins if specific_plugins: - plugins = [p for p in plugins if p["name"] in specific_plugins] + plugins = [p for p in plugins if p.name in specific_plugins] if not plugins: self.console.print("[yellow]No plugins to build[/yellow]") @@ -121,10 +123,10 @@ async def build(self, config_file: str, plugins_only: bool = False, specific_plu with Progress(SpinnerColumn(), TextColumn("[progress.description]{task.description}"), console=self.console) as progress: for plugin in plugins: - plugin_name = plugin["name"] + plugin_name = plugin.name # Skip if pre-built image specified - if "image" in plugin and "repo" not in plugin: + if plugin.image and not plugin.repo: task = progress.add_task(f"Skipping {plugin_name} (using pre-built image)", total=1) progress.update(task, completed=1) continue @@ -163,9 +165,9 @@ async def generate_certificates(self, config_file: str) -> None: config = load_config(config_file) # Check if using cert-manager - cert_config = config.get("certificates", {}) - use_cert_manager = cert_config.get("use_cert_manager", False) - validity_days = cert_config.get("validity_days", 825) + cert_config = config.certificates + use_cert_manager = cert_config.use_cert_manager if cert_config else False + validity_days = cert_config.validity_days if cert_config else 825 if use_cert_manager: # Skip local generation - cert-manager will handle certificate creation @@ -202,9 +204,9 @@ async def generate_certificates(self, config_file: str) -> None: container = container.with_exec(["sh", "-c", f"make certs-mcp-gateway MCP_CERT_DAYS={validity_days}"]) # Generate plugin certificates - plugins = config.get("plugins", []) + plugins = config.plugins for plugin in plugins: - plugin_name = plugin["name"] + plugin_name = plugin.name container = container.with_exec(["sh", "-c", f"make certs-mcp-plugin PLUGIN_NAME={plugin_name} MCP_CERT_DAYS={validity_days}"]) # Export certificates back to host @@ -248,8 +250,8 @@ async def deploy(self, config_file: str, dry_run: bool = False, skip_build: bool await self.build(config_file) # Generate certificates (only if mTLS is enabled) - gateway_mtls = config.get("gateway", {}).get("mtls_enabled", True) - plugin_mtls = any(p.get("mtls_enabled", True) for p in config.get("plugins", [])) + gateway_mtls = config.gateway.mtls_enabled if config.gateway.mtls_enabled is not None else True + plugin_mtls = any((p.mtls_enabled if p.mtls_enabled is not None else True) for p in config.plugins) mtls_needed = gateway_mtls or plugin_mtls if not skip_certs and mtls_needed: @@ -266,7 +268,7 @@ async def deploy(self, config_file: str, dry_run: bool = False, skip_build: bool return # Apply deployment - deployment_type = config["deployment"]["type"] + deployment_type = config.deployment.type async with dagger.connection(dagger.Config(workdir=str(Path.cwd()))): try: @@ -309,7 +311,7 @@ async def verify(self, config_file: str, wait: bool = False, timeout: int = 300) timeout: Wait timeout in seconds """ config = load_config(config_file) - deployment_type = config["deployment"]["type"] + deployment_type = config.deployment.type if self.verbose: self.console.print("[blue]Verifying deployment...[/blue]") @@ -327,7 +329,7 @@ async def destroy(self, config_file: str) -> None: config_file: Path to mcp-stack.yaml """ config = load_config(config_file) - deployment_type = config["deployment"]["type"] + deployment_type = config.deployment.type if self.verbose: self.console.print("[blue]Destroying deployment...[/blue]") @@ -352,7 +354,7 @@ def generate_manifests(self, config_file: str, output_dir: Optional[str] = None) ValueError: If unsupported deployment type specified """ config = load_config(config_file) - deployment_type = config["deployment"]["type"] + deployment_type = config.deployment.type if output_dir is None: deploy_dir = get_deploy_dir() @@ -380,11 +382,11 @@ def generate_manifests(self, config_file: str, output_dir: Optional[str] = None) # Private helper methods - async def _build_component_with_dagger(self, component: Dict[str, Any], component_name: str, no_cache: bool = False, copy_env_templates: bool = False) -> None: + async def _build_component_with_dagger(self, component: BuildableConfig, component_name: str, no_cache: bool = False, copy_env_templates: bool = False) -> None: """Build a component (gateway or plugin) container using Dagger. Args: - component: Component configuration dict + component: Component configuration (GatewayConfig or PluginConfig) component_name: Name of the component (gateway or plugin name) no_cache: Disable cache copy_env_templates: Copy .env.template from repo if it exists @@ -393,13 +395,13 @@ async def _build_component_with_dagger(self, component: Dict[str, Any], componen ValueError: If component has no repo field Exception: If build or export fails """ - repo = component.get("repo") + repo = component.repo if not repo: raise ValueError(f"Component '{component_name}' has no 'repo' field") # Clone repository to local directory for env template access - git_ref = component.get("ref", "main") + git_ref = component.ref or "main" clone_dir = Path(f"./build/{component_name}") # For Dagger, we still need local clone if copying env templates @@ -411,12 +413,12 @@ async def _build_component_with_dagger(self, component: Dict[str, Any], componen if (clone_dir / ".git").exists(): subprocess.run(["git", "fetch", "origin", git_ref], cwd=clone_dir, check=True, capture_output=True) - subprocess.run(["git", "checkout", "-B", git_ref, f"origin/{git_ref}"], cwd=clone_dir, check=True, capture_output=True) + subprocess.run(["git", "checkout", git_ref], cwd=clone_dir, check=True, capture_output=True) else: subprocess.run(["git", "clone", "--branch", git_ref, "--depth", "1", repo, str(clone_dir)], check=True, capture_output=True) # Determine build context - build_context = component.get("context", ".") + build_context = component.context or "." build_dir = clone_dir / build_context # Copy env template using shared function @@ -426,25 +428,25 @@ async def _build_component_with_dagger(self, component: Dict[str, Any], componen source = dag.git(repo).branch(git_ref).tree() # If component has context subdirectory, navigate to it - build_context = component.get("context", ".") + build_context = component.context or "." if build_context != ".": source = source.directory(build_context) # Detect Containerfile/Dockerfile - containerfile = component.get("containerfile", "Containerfile") + containerfile = component.containerfile or "Containerfile" # Build container - determine image tag - if "image" in component: + if component.image: # Use explicitly specified image name - image_tag = component["image"] + image_tag = component.image else: # Generate default image name based on component type image_tag = f"mcpgateway-{component_name.lower()}:latest" # Build with optional target stage for multi-stage builds build_kwargs = {"dockerfile": containerfile} - if "target" in component: - build_kwargs["target"] = component["target"] + if component.target: + build_kwargs["target"] = component.target # Use docker_build on the directory container = source.docker_build(**build_kwargs) @@ -459,6 +461,14 @@ async def _build_component_with_dagger(self, component: Dict[str, Any], componen if "BeartypeCallHintReturnViolation" not in str(type(e)): raise + # Handle registry operations (tag and push if enabled) + # Note: Dagger exports to local docker/podman, so we need to detect which runtime to use + # Standard + import shutil + + container_runtime = "docker" if shutil.which("docker") else "podman" + image_tag = handle_registry_operations(component, component_name, image_tag, container_runtime, verbose=self.verbose) + if self.verbose: self.console.print(f"[green]βœ“ Built {component_name} -> {image_tag}[/green]") @@ -483,27 +493,27 @@ async def _deploy_compose(self, manifests_dir: Path) -> None: compose_file = manifests_dir / "docker-compose.yaml" deploy_compose(compose_file, verbose=self.verbose) - async def _verify_kubernetes(self, config: Dict[str, Any], wait: bool = False, timeout: int = 300) -> None: + async def _verify_kubernetes(self, config: MCPStackConfig, wait: bool = False, timeout: int = 300) -> None: """Verify Kubernetes deployment health. Uses shared verify_kubernetes() from common.py to avoid code duplication. Args: - config: Parsed configuration dict + config: Parsed configuration Pydantic model wait: Wait for pods to be ready timeout: Wait timeout in seconds """ - namespace = config["deployment"].get("namespace", "mcp-gateway") + namespace = config.deployment.namespace or "mcp-gateway" output = verify_kubernetes(namespace, wait=wait, timeout=timeout, verbose=self.verbose) self.console.print(output) - async def _verify_compose(self, config: Dict[str, Any], wait: bool = False, timeout: int = 300) -> None: + async def _verify_compose(self, config: MCPStackConfig, wait: bool = False, timeout: int = 300) -> None: """Verify Docker Compose deployment health. Uses shared verify_compose() from common.py to avoid code duplication. Args: - config: Parsed configuration dict + config: Parsed configuration Pydantic model wait: Wait for containers to be ready timeout: Wait timeout in seconds """ @@ -515,13 +525,13 @@ async def _verify_compose(self, config: Dict[str, Any], wait: bool = False, time output = verify_compose(compose_file, verbose=self.verbose) self.console.print(output) - async def _destroy_kubernetes(self, config: Dict[str, Any]) -> None: + async def _destroy_kubernetes(self, config: MCPStackConfig) -> None: """Destroy Kubernetes deployment. Uses shared destroy_kubernetes() from common.py to avoid code duplication. Args: - config: Parsed configuration dict + config: Parsed configuration Pydantic model """ _ = config # Reserved for future use (namespace, labels, etc.) # Use the same manifests directory as generate_manifests @@ -529,13 +539,13 @@ async def _destroy_kubernetes(self, config: Dict[str, Any]) -> None: manifests_dir = getattr(self, "_last_output_dir", deploy_dir / "manifests" / "kubernetes") destroy_kubernetes(manifests_dir, verbose=self.verbose) - async def _destroy_compose(self, config: Dict[str, Any]) -> None: + async def _destroy_compose(self, config: MCPStackConfig) -> None: """Destroy Docker Compose deployment. Uses shared destroy_compose() from common.py to avoid code duplication. Args: - config: Parsed configuration dict + config: Parsed configuration Pydantic model """ _ = config # Reserved for future use (project name, networks, etc.) # Use the same manifests directory as generate_manifests diff --git a/mcpgateway/tools/builder/python_deploy.py b/mcpgateway/tools/builder/python_deploy.py index bfb3ed79b..c5da5fec6 100644 --- a/mcpgateway/tools/builder/python_deploy.py +++ b/mcpgateway/tools/builder/python_deploy.py @@ -15,7 +15,7 @@ from pathlib import Path import shutil import subprocess -from typing import Any, Dict, List, Optional +from typing import List, Optional # Third-Party from rich.console import Console @@ -31,12 +31,14 @@ generate_kubernetes_manifests, generate_plugin_config, get_deploy_dir, + handle_registry_operations, load_config, verify_compose, verify_kubernetes, ) from mcpgateway.tools.builder.common import copy_env_template as copy_template from mcpgateway.tools.builder.pipeline import CICDModule +from mcpgateway.tools.builder.schema import BuildableConfig, MCPStackConfig console = Console() @@ -72,8 +74,8 @@ async def build(self, config_file: str, plugins_only: bool = False, specific_plu # Build gateway (unless plugins_only=True) if not plugins_only: - gateway = config.get("gateway", {}) - if gateway.get("repo"): + gateway = config.gateway + if gateway.repo: with Progress(SpinnerColumn(), TextColumn("[progress.description]{task.description}"), console=self.console) as progress: task = progress.add_task("Building gateway...", total=None) try: @@ -94,10 +96,10 @@ async def build(self, config_file: str, plugins_only: bool = False, specific_plu self.console.print("[dim]Skipping gateway build (using pre-built image)[/dim]") # Build plugins - plugins = config.get("plugins", []) + plugins = config.plugins if specific_plugins: - plugins = [p for p in plugins if p["name"] in specific_plugins] + plugins = [p for p in plugins if p.name in specific_plugins] if not plugins: self.console.print("[yellow]No plugins to build[/yellow]") @@ -106,10 +108,10 @@ async def build(self, config_file: str, plugins_only: bool = False, specific_plu with Progress(SpinnerColumn(), TextColumn("[progress.description]{task.description}"), console=self.console) as progress: for plugin in plugins: - plugin_name = plugin["name"] + plugin_name = plugin.name # Skip if pre-built image specified - if "image" in plugin and "repo" not in plugin: + if plugin.image and not plugin.repo: task = progress.add_task(f"Skipping {plugin_name} (using pre-built image)", total=1) progress.update(task, completed=1) continue @@ -147,9 +149,9 @@ async def generate_certificates(self, config_file: str) -> None: config = load_config(config_file) # Check if using cert-manager - cert_config = config.get("certificates", {}) - use_cert_manager = cert_config.get("use_cert_manager", False) - validity_days = cert_config.get("validity_days", 825) + cert_config = config.certificates + use_cert_manager = cert_config.use_cert_manager if cert_config else False + validity_days = cert_config.validity_days if cert_config else 825 if use_cert_manager: # Skip local generation - cert-manager will handle certificate creation @@ -173,9 +175,9 @@ async def generate_certificates(self, config_file: str) -> None: self._run_command(["make", "certs-mcp-gateway", f"MCP_CERT_DAYS={validity_days}"]) # Generate plugin certificates - plugins = config.get("plugins", []) + plugins = config.plugins for plugin in plugins: - plugin_name = plugin["name"] + plugin_name = plugin.name self._run_command(["make", "certs-mcp-plugin", f"PLUGIN_NAME={plugin_name}", f"MCP_CERT_DAYS={validity_days}"]) if self.verbose: @@ -201,8 +203,8 @@ async def deploy(self, config_file: str, dry_run: bool = False, skip_build: bool await self.build(config_file) # Generate certificates (only if mTLS is enabled) - gateway_mtls = config.get("gateway", {}).get("mtls_enabled", True) - plugin_mtls = any(p.get("mtls_enabled", True) for p in config.get("plugins", [])) + gateway_mtls = config.gateway.mtls_enabled if config.gateway.mtls_enabled is not None else True + plugin_mtls = any((p.mtls_enabled if p.mtls_enabled is not None else True) for p in config.plugins) mtls_needed = gateway_mtls or plugin_mtls if not skip_certs and mtls_needed: @@ -219,7 +221,7 @@ async def deploy(self, config_file: str, dry_run: bool = False, skip_build: bool return # Apply deployment - deployment_type = config["deployment"]["type"] + deployment_type = config.deployment.type if deployment_type == "kubernetes": self._deploy_kubernetes(manifests_dir) @@ -237,7 +239,7 @@ async def verify(self, config_file: str, wait: bool = False, timeout: int = 300) timeout: Wait timeout in seconds """ config = load_config(config_file) - deployment_type = config["deployment"]["type"] + deployment_type = config.deployment.type if self.verbose: self.console.print("[blue]Verifying deployment...[/blue]") @@ -254,7 +256,7 @@ async def destroy(self, config_file: str) -> None: config_file: Path to mcp-stack.yaml """ config = load_config(config_file) - deployment_type = config["deployment"]["type"] + deployment_type = config.deployment.type if self.verbose: self.console.print("[blue]Destroying deployment...[/blue]") @@ -278,7 +280,7 @@ def generate_manifests(self, config_file: str, output_dir: Optional[str] = None) ValueError: If unsupported deployment type specified """ config = load_config(config_file) - deployment_type = config["deployment"]["type"] + deployment_type = config.deployment.type if output_dir is None: deploy_dir = get_deploy_dir() @@ -343,11 +345,11 @@ def _run_command(self, cmd: List[str], cwd: Optional[Path] = None, capture_outpu return result - def _build_component(self, component: Dict[str, Any], component_name: str, no_cache: bool = False, copy_env_templates: bool = False) -> None: + def _build_component(self, component: BuildableConfig, component_name: str, no_cache: bool = False, copy_env_templates: bool = False) -> None: """Build a component (gateway or plugin) container using docker/podman. Args: - component: Component configuration dict + component: Component configuration (GatewayConfig or PluginConfig) component_name: Name of the component (gateway or plugin name) no_cache: Disable cache copy_env_templates: Copy .env.template from repo if it exists @@ -356,13 +358,13 @@ def _build_component(self, component: Dict[str, Any], component_name: str, no_ca ValueError: If component has no repo field FileNotFoundError: If build context or containerfile not found """ - repo = component.get("repo") + repo = component.repo if not repo: raise ValueError(f"Component '{component_name}' has no 'repo' field") # Clone repository - git_ref = component.get("ref", "main") + git_ref = component.ref or "main" clone_dir = Path(f"./build/{component_name}") clone_dir.mkdir(parents=True, exist_ok=True) @@ -371,21 +373,21 @@ def _build_component(self, component: Dict[str, Any], component_name: str, no_ca if self.verbose: self.console.print(f"[dim]Updating {component_name} repository...[/dim]") self._run_command(["git", "fetch", "origin", git_ref], cwd=clone_dir) - self._run_command(["git", "checkout", "-B", git_ref, f"origin/{git_ref}"], cwd=clone_dir) + self._run_command(["git", "checkout", git_ref], cwd=clone_dir) else: if self.verbose: self.console.print(f"[dim]Cloning {component_name} repository...[/dim]") self._run_command(["git", "clone", "--branch", git_ref, "--depth", "1", repo, str(clone_dir)]) # Determine build context (subdirectory within repo) - build_context = component.get("context", ".") + build_context = component.context or "." build_dir = clone_dir / build_context if not build_dir.exists(): raise FileNotFoundError(f"Build context not found: {build_dir}") # Detect Containerfile/Dockerfile - containerfile = component.get("containerfile", "Containerfile") + containerfile = component.containerfile or "Containerfile" containerfile_path = build_dir / containerfile if not containerfile_path.exists(): @@ -395,9 +397,9 @@ def _build_component(self, component: Dict[str, Any], component_name: str, no_ca raise FileNotFoundError(f"No Containerfile or Dockerfile found in {build_dir}") # Build container - determine image tag - if "image" in component: + if component.image: # Use explicitly specified image name - image_tag = component["image"] + image_tag = component.image else: # Generate default image name based on component type image_tag = f"mcpgateway-{component_name.lower()}:latest" @@ -408,13 +410,21 @@ def _build_component(self, component: Dict[str, Any], component_name: str, no_ca build_cmd.append("--no-cache") # Add target stage if specified (for multi-stage builds) - if "target" in component: - build_cmd.extend(["--target", component["target"]]) + if component.target: + build_cmd.extend(["--target", component.target]) + + # For Docker, add --load to ensure image is loaded into daemon + # (needed for buildx/docker-container driver) + if self.container_runtime == "docker": + build_cmd.append("--load") build_cmd.append(".") self._run_command(build_cmd, cwd=build_dir) + # Handle registry operations (tag and push if enabled) + image_tag = handle_registry_operations(component, component_name, image_tag, self.container_runtime, verbose=self.verbose) + # Copy .env.template if requested and exists if copy_env_templates: copy_template(component_name, build_dir, verbose=self.verbose) @@ -443,27 +453,27 @@ def _deploy_compose(self, manifests_dir: Path) -> None: compose_file = manifests_dir / "docker-compose.yaml" deploy_compose(compose_file, verbose=self.verbose) - def _verify_kubernetes(self, config: Dict[str, Any], wait: bool = False, timeout: int = 300) -> None: + def _verify_kubernetes(self, config: MCPStackConfig, wait: bool = False, timeout: int = 300) -> None: """Verify Kubernetes deployment health. Uses shared verify_kubernetes() from common.py to avoid code duplication. Args: - config: Parsed configuration dict + config: Parsed configuration Pydantic model wait: Wait for pods to be ready timeout: Wait timeout in seconds """ - namespace = config["deployment"].get("namespace", "mcp-gateway") + namespace = config.deployment.namespace or "mcp-gateway" output = verify_kubernetes(namespace, wait=wait, timeout=timeout, verbose=self.verbose) self.console.print(output) - def _verify_compose(self, config: Dict[str, Any], wait: bool = False, timeout: int = 300) -> None: + def _verify_compose(self, config: MCPStackConfig, wait: bool = False, timeout: int = 300) -> None: """Verify Docker Compose deployment health. Uses shared verify_compose() from common.py to avoid code duplication. Args: - config: Parsed configuration dict + config: Parsed configuration Pydantic model wait: Wait for containers to be ready timeout: Wait timeout in seconds """ @@ -475,13 +485,13 @@ def _verify_compose(self, config: Dict[str, Any], wait: bool = False, timeout: i output = verify_compose(compose_file, verbose=self.verbose) self.console.print(output) - def _destroy_kubernetes(self, config: Dict[str, Any]) -> None: + def _destroy_kubernetes(self, config: MCPStackConfig) -> None: """Destroy Kubernetes deployment. Uses shared destroy_kubernetes() from common.py to avoid code duplication. Args: - config: Parsed configuration dict + config: Parsed configuration Pydantic model """ _ = config # Reserved for future use (namespace, labels, etc.) # Use the same manifests directory as generate_manifests @@ -489,13 +499,13 @@ def _destroy_kubernetes(self, config: Dict[str, Any]) -> None: manifests_dir = getattr(self, "_last_output_dir", deploy_dir / "manifests" / "kubernetes") destroy_kubernetes(manifests_dir, verbose=self.verbose) - def _destroy_compose(self, config: Dict[str, Any]) -> None: + def _destroy_compose(self, config: MCPStackConfig) -> None: """Destroy Docker Compose deployment. Uses shared destroy_compose() from common.py to avoid code duplication. Args: - config: Parsed configuration dict + config: Parsed configuration Pydantic model """ _ = config # Reserved for future use (project name, networks, etc.) # Use the same manifests directory as generate_manifests diff --git a/mcpgateway/tools/builder/schema.py b/mcpgateway/tools/builder/schema.py index 4d60adaac..401daf1f5 100644 --- a/mcpgateway/tools/builder/schema.py +++ b/mcpgateway/tools/builder/schema.py @@ -9,7 +9,7 @@ from typing import Any, Dict, List, Literal, Optional # Third-Party -from pydantic import BaseModel, Field, field_validator +from pydantic import BaseModel, ConfigDict, Field, field_validator class DeploymentConfig(BaseModel): @@ -20,6 +20,34 @@ class DeploymentConfig(BaseModel): namespace: Optional[str] = Field(None, description="Namespace for Kubernetes") +class RegistryConfig(BaseModel): + """Container registry configuration. + + Optional configuration for pushing built images to a container registry. + When enabled, images will be tagged with the full registry path and optionally pushed. + + Authentication: + Users must authenticate to the registry before running the build: + - Docker Hub: `docker login` + - Quay.io: `podman login quay.io` + - OpenShift internal: `podman login $(oc registry info) -u $(oc whoami) -p $(oc whoami -t)` + - Private registry: `podman login your-registry.com -u username` + + Attributes: + enabled: Enable registry integration (default: False) + url: Registry URL (e.g., "docker.io", "quay.io", "default-route-openshift-image-registry.apps-crc.testing") + namespace: Registry namespace/organization/project (e.g., "myorg", "mcp-gateway-test") + push: Push image after build (default: True) + image_pull_policy: Kubernetes imagePullPolicy (default: "IfNotPresent") + """ + + enabled: bool = Field(False, description="Enable registry push") + url: Optional[str] = Field(None, description="Registry URL (e.g., docker.io, quay.io, or internal registry)") + namespace: Optional[str] = Field(None, description="Registry namespace/organization/project") + push: bool = Field(True, description="Push image after build") + image_pull_policy: Optional[str] = Field("IfNotPresent", description="Kubernetes imagePullPolicy (IfNotPresent, Always, Never)") + + class BuildableConfig(BaseModel): """Base class for components that can be built from source or use pre-built images. @@ -37,9 +65,13 @@ class BuildableConfig(BaseModel): target: Target stage for multi-stage builds (optional) host_port: Host port mapping for direct access (optional) env_vars: Environment variables for container + env_file: Path to environment file (.env) mtls_enabled: Enable mutual TLS authentication (default: True) """ + # Allow attribute assignment after model creation (needed for auto-detection of env_file) + model_config = ConfigDict(validate_assignment=True) + # Build configuration image: Optional[str] = Field(None, description="Pre-built Docker image") repo: Optional[str] = Field(None, description="Git repository URL") @@ -51,8 +83,12 @@ class BuildableConfig(BaseModel): # Runtime configuration host_port: Optional[int] = Field(None, description="Host port mapping") env_vars: Optional[Dict[str, Any]] = Field(default_factory=dict, description="Environment variables") + env_file: Optional[str] = Field(None, description="Path to environment file (.env)") mtls_enabled: Optional[bool] = Field(True, description="Enable mTLS") + # Registry configuration + registry: Optional[RegistryConfig] = Field(None, description="Container registry configuration") + def model_post_init(self, __context: Any) -> None: """Validate that either image or repo is specified @@ -157,7 +193,7 @@ class PostgresConfig(BaseModel): """PostgreSQL database configuration""" enabled: Optional[bool] = Field(True, description="Enable PostgreSQL deployment") - image: Optional[str] = Field("postgres:17", description="PostgreSQL image") + image: Optional[str] = Field("quay.io/sclorg/postgresql-15-c9s:latest", description="PostgreSQL image (default is OpenShift-compatible)") database: Optional[str] = Field("mcp", description="Database name") user: Optional[str] = Field("postgres", description="Database user") password: Optional[str] = Field("mysecretpassword", description="Database password") diff --git a/mcpgateway/tools/builder/templates/kubernetes/cert-secrets.yaml.j2 b/mcpgateway/tools/builder/templates/kubernetes/cert-secrets.yaml.j2 index c5a026434..67e5a1e87 100644 --- a/mcpgateway/tools/builder/templates/kubernetes/cert-secrets.yaml.j2 +++ b/mcpgateway/tools/builder/templates/kubernetes/cert-secrets.yaml.j2 @@ -7,7 +7,7 @@ apiVersion: v1 kind: Secret metadata: - name: mcp-ca-cert + name: mcp-ca-secret namespace: {{ namespace }} type: Opaque data: diff --git a/mcpgateway/tools/builder/templates/kubernetes/deployment.yaml.j2 b/mcpgateway/tools/builder/templates/kubernetes/deployment.yaml.j2 index 905edd576..2946135aa 100644 --- a/mcpgateway/tools/builder/templates/kubernetes/deployment.yaml.j2 +++ b/mcpgateway/tools/builder/templates/kubernetes/deployment.yaml.j2 @@ -174,7 +174,9 @@ spec: securityContext: runAsNonRoot: true - runAsUser: 1001 + {% if run_as_user is defined %} + runAsUser: {{ run_as_user }} + {% endif %} allowPrivilegeEscalation: false capabilities: drop: @@ -189,7 +191,7 @@ spec: defaultMode: 0444 - name: ca-cert secret: - secretName: mcp-ca-cert + secretName: mcp-ca-secret defaultMode: 0444 {% endif %} diff --git a/mcpgateway/tools/builder/templates/kubernetes/postgres.yaml.j2 b/mcpgateway/tools/builder/templates/kubernetes/postgres.yaml.j2 index a41afa061..de58a288e 100644 --- a/mcpgateway/tools/builder/templates/kubernetes/postgres.yaml.j2 +++ b/mcpgateway/tools/builder/templates/kubernetes/postgres.yaml.j2 @@ -25,9 +25,14 @@ metadata: namespace: {{ namespace }} type: Opaque stringData: + # Official PostgreSQL image variables POSTGRES_USER: {{ user }} POSTGRES_PASSWORD: {{ password }} POSTGRES_DB: {{ database }} + # Red Hat/SCL PostgreSQL image variables (OpenShift-compatible) + POSTGRESQL_USER: {{ user }} + POSTGRESQL_PASSWORD: {{ password }} + POSTGRESQL_DATABASE: {{ database }} --- apiVersion: apps/v1 kind: Deployment From 3cf32c385f96d63c8263d9588cf225b738ef58cd Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Fri, 17 Oct 2025 12:21:38 -0600 Subject: [PATCH 34/35] fix: fixes to support Openshift, and support enabling plugins in k8s. Signed-off-by: Teryl Taylor --- docs/docs/deployment/cforge-gateway.md | 44 ++++++++ .../deploy-openshift-local-registry.yaml | 3 +- mcpgateway/tools/builder/common.py | 101 +++++++++++++++--- mcpgateway/tools/builder/dagger_deploy.py | 3 +- mcpgateway/tools/builder/python_deploy.py | 58 ++++++---- mcpgateway/tools/builder/schema.py | 1 + .../templates/compose/docker-compose.yaml.j2 | 1 + .../templates/kubernetes/deployment.yaml.j2 | 25 +++-- .../kubernetes/plugins-configmap.yaml.j2 | 13 +++ 9 files changed, 205 insertions(+), 44 deletions(-) create mode 100644 mcpgateway/tools/builder/templates/kubernetes/plugins-configmap.yaml.j2 diff --git a/docs/docs/deployment/cforge-gateway.md b/docs/docs/deployment/cforge-gateway.md index c93b3ed6e..5d2806601 100644 --- a/docs/docs/deployment/cforge-gateway.md +++ b/docs/docs/deployment/cforge-gateway.md @@ -498,6 +498,7 @@ deployment: type: kubernetes | compose # Required: Deployment target project_name: my-project # Docker Compose only namespace: mcp-gateway # Kubernetes only + container_engine: podman | docker # Container runtime (auto-detected if not specified) ``` | Field | Type | Required | Description | Default | @@ -505,6 +506,7 @@ deployment: | `type` | string | βœ… | Deployment type: `kubernetes` or `compose` | - | | `project_name` | string | ❌ | Docker Compose project name | - | | `namespace` | string | ❌ | Kubernetes namespace | - | +| `container_engine` | string | ❌ | Container runtime: `docker` or `podman` | Auto-detected | --- @@ -1554,6 +1556,48 @@ oc policy add-role-to-user system:image-puller \ 2. Switch to a registry with larger limits 3. Split into smaller images +#### Podman Trying HTTP Instead of HTTPS (OpenShift/CRC) + +**Error:** `pinging container registry ...: Get "http://...: dial tcp 127.0.0.1:80: connection refused` + +**Cause:** Podman doesn't know the registry uses HTTPS and defaults to HTTP on port 80. + +**Solution:** Configure podman to use HTTPS for the registry: + +```bash +# SSH into podman machine and configure registries.conf +podman machine ssh -- "sudo bash -c ' +if ! grep -q \"default-route-openshift-image-registry.apps-crc.testing\" /etc/containers/registries.conf 2>/dev/null; then + echo \"\" >> /etc/containers/registries.conf + echo \"[[registry]]\" >> /etc/containers/registries.conf + echo \"location = \\\"default-route-openshift-image-registry.apps-crc.testing\\\"\" >> /etc/containers/registries.conf + echo \"insecure = true\" >> /etc/containers/registries.conf + echo \"Registry configuration added\" +else + echo \"Registry already configured\" +fi +'" + +# Restart podman machine +podman machine restart + +# Wait for restart +sleep 10 + +# Verify you can now push +podman push default-route-openshift-image-registry.apps-crc.testing/namespace/image:tag +``` + +**Alternative solution:** Use the internal registry service name instead of the route: + +```yaml +registry: + url: image-registry.openshift-image-registry.svc:5000 + namespace: mcp-gateway-test +``` + +This bypasses the external route and connects directly to the internal service (HTTPS on port 5000). + #### Registry URL Format **Correct formats:** diff --git a/examples/deployment-configs/deploy-openshift-local-registry.yaml b/examples/deployment-configs/deploy-openshift-local-registry.yaml index 9f9c3d22d..d1d6a064c 100644 --- a/examples/deployment-configs/deploy-openshift-local-registry.yaml +++ b/examples/deployment-configs/deploy-openshift-local-registry.yaml @@ -17,6 +17,7 @@ deployment: type: kubernetes namespace: mcp-gateway-test + container_engine: podman # MCP Gateway configuration gateway: @@ -76,7 +77,7 @@ plugins: # Build from repository repo: https://github.com/terylt/mcp-context-forge.git - ref: feat/configurable_plugin_deployment + ref: feat/use_mtls_plugins context: plugins/external/opa containerfile: Containerfile image: mcpgateway-opapluginfilter:latest diff --git a/mcpgateway/tools/builder/common.py b/mcpgateway/tools/builder/common.py index 2a293b361..5b7080491 100644 --- a/mcpgateway/tools/builder/common.py +++ b/mcpgateway/tools/builder/common.py @@ -32,7 +32,7 @@ from pathlib import Path import shutil import subprocess -from typing import Any, Dict, List +from typing import List # Third-Party from jinja2 import Environment, FileSystemLoader @@ -107,7 +107,8 @@ def generate_plugin_config(config: MCPStackConfig, output_dir: Path, verbose: bo if not template_dir.exists(): raise FileNotFoundError(f"Template directory not found: {template_dir}") - env = Environment(loader=FileSystemLoader(str(template_dir)), autoescape=True) # nosec B701 + # YAML files should not use HTML autoescape + env = Environment(loader=FileSystemLoader(str(template_dir)), autoescape=False) # nosec B701 template = env.get_template("plugins-config.yaml.j2") # Prepare plugin data with computed URLs @@ -304,17 +305,44 @@ def generate_kubernetes_manifests(config: MCPStackConfig, output_dir: Path, verb if verbose: print(" βœ“ Redis deployment manifest generated") + # Generate plugins ConfigMap if plugins are configured + if config.plugins and len(config.plugins) > 0: + configmap_template = env.get_template("plugins-configmap.yaml.j2") + # Read the generated plugins-config.yaml file + plugins_config_path = output_dir / "plugins-config.yaml" + if plugins_config_path.exists(): + plugins_config_content = plugins_config_path.read_text() + configmap_manifest = configmap_template.render(namespace=namespace, plugins_config=plugins_config_content) + (output_dir / "plugins-configmap.yaml").write_text(configmap_manifest) + if verbose: + print(" βœ“ Plugins ConfigMap manifest generated") + # Generate gateway deployment gateway_template = env.get_template("deployment.yaml.j2") # Convert Pydantic model to dict for template rendering gateway_dict = config.gateway.model_dump(exclude_none=True) gateway_dict["name"] = "mcpgateway" gateway_dict["namespace"] = namespace + gateway_dict["has_plugins"] = config.plugins and len(config.plugins) > 0 + + # Update image to use full registry path if registry is enabled + if config.gateway.registry and config.gateway.registry.enabled: + base_image_name = config.gateway.image.split(":")[0].split("/")[-1] + image_version = config.gateway.image.split(":")[-1] if ":" in config.gateway.image else "latest" + gateway_dict["image"] = f"{config.gateway.registry.url}/{config.gateway.registry.namespace}/{base_image_name}:{image_version}" + # Set imagePullPolicy from registry config + if config.gateway.registry.image_pull_policy: + gateway_dict["image_pull_policy"] = config.gateway.registry.image_pull_policy # Add DATABASE_URL and REDIS_URL to gateway environment if infrastructure is enabled if "env_vars" not in gateway_dict: gateway_dict["env_vars"] = {} + # Enable plugins if any are configured + if config.plugins and len(config.plugins) > 0: + gateway_dict["env_vars"]["PLUGINS_ENABLED"] = "true" + gateway_dict["env_vars"]["PLUGIN_CONFIG_FILE"] = "/app/config/plugins.yaml" + # Add init containers to wait for infrastructure services init_containers = [] @@ -334,6 +362,20 @@ def generate_kubernetes_manifests(config: MCPStackConfig, output_dir: Path, verb # Add init container to wait for Redis init_containers.append({"name": "wait-for-redis", "image": "busybox:1.36", "command": ["sh", "-c", "until nc -z redis 6379; do echo waiting for redis; sleep 2; done"]}) + # Add init containers to wait for plugins to be ready + if config.plugins and len(config.plugins) > 0: + for plugin in config.plugins: + plugin_service_name = f"mcp-plugin-{plugin.name.lower()}" + plugin_port = plugin.port or 8000 + # Wait for plugin service to be available + init_containers.append( + { + "name": f"wait-for-{plugin.name.lower()}", + "image": "busybox:1.36", + "command": ["sh", "-c", f"until nc -z {plugin_service_name} {plugin_port}; do echo waiting for {plugin_service_name}; sleep 2; done"], + } + ) + if init_containers: gateway_dict["init_containers"] = init_containers @@ -346,6 +388,16 @@ def generate_kubernetes_manifests(config: MCPStackConfig, output_dir: Path, verb plugin_dict = plugin.model_dump(exclude_none=True) plugin_dict["name"] = f"mcp-plugin-{plugin.name.lower()}" plugin_dict["namespace"] = namespace + + # Update image to use full registry path if registry is enabled + if plugin.registry and plugin.registry.enabled: + base_image_name = plugin.image.split(":")[0].split("/")[-1] + image_version = plugin.image.split(":")[-1] if ":" in plugin.image else "latest" + plugin_dict["image"] = f"{plugin.registry.url}/{plugin.registry.namespace}/{base_image_name}:{image_version}" + # Set imagePullPolicy from registry config + if plugin.registry.image_pull_policy: + plugin_dict["image_pull_policy"] = plugin.registry.image_pull_policy + plugin_manifest = gateway_template.render(**plugin_dict) (output_dir / f"plugin-{plugin.name.lower()}-deployment.yaml").write_text(plugin_manifest) @@ -427,7 +479,7 @@ def _auto_detect_env_files(config: MCPStackConfig, output_dir: Path, verbose: bo # Check gateway - since we need to modify the model, we access env_file directly # Note: Pydantic models allow attribute assignment after creation - if not hasattr(config.gateway, 'env_file') or not config.gateway.env_file: + if not hasattr(config.gateway, "env_file") or not config.gateway.env_file: gateway_env = env_dir / ".env.gateway" if gateway_env.exists(): # Make path relative to output_dir (where docker-compose.yaml will be) @@ -440,7 +492,7 @@ def _auto_detect_env_files(config: MCPStackConfig, output_dir: Path, verbose: bo # Check plugins for plugin in config.plugins: plugin_name = plugin.name - if not hasattr(plugin, 'env_file') or not plugin.env_file: + if not hasattr(plugin, "env_file") or not plugin.env_file: plugin_env = env_dir / f".env.{plugin_name}" if plugin_env.exists(): # Make path relative to output_dir (where docker-compose.yaml will be) @@ -509,6 +561,7 @@ def handle_registry_operations(component, component_name: str, image_tag: str, c ValueError: If registry enabled but missing required configuration subprocess.CalledProcessError: If tag or push command fails """ + # First-Party from mcpgateway.tools.builder.schema import BuildableConfig # Type check for better error messages @@ -543,7 +596,17 @@ def handle_registry_operations(component, component_name: str, image_tag: str, c if registry_config.push: if verbose: console.print(f"[blue]Pushing {registry_image} to registry...[/blue]") - push_cmd = [container_runtime, "push", registry_image] + + # Build push command with TLS options + push_cmd = [container_runtime, "push"] + + # For podman, add --tls-verify=false for registries with self-signed certs + # This is common for OpenShift internal registries and local development + if container_runtime == "podman": + push_cmd.append("--tls-verify=false") + + push_cmd.append(registry_image) + try: result = subprocess.run(push_cmd, capture_output=True, text=True, check=True) if result.stdout and verbose: @@ -551,6 +614,8 @@ def handle_registry_operations(component, component_name: str, image_tag: str, c console.print(f"[green]βœ“ Pushed to registry: {registry_image}[/green]") except subprocess.CalledProcessError as e: console.print(f"[red]βœ— Failed to push to registry: {e}[/red]") + if e.stderr: + console.print(f"[red]Error output: {e.stderr}[/red]") console.print("[yellow]Tip: Authenticate to the registry first:[/yellow]") console.print(f" {container_runtime} login {registry_config.url}") raise @@ -687,8 +752,9 @@ def deploy_kubernetes(manifests_dir: Path, verbose: bool = False) -> None: Applies manifests in correct order: 1. Deployments (creates namespaces) - 2. Certificate secrets - 3. Infrastructure (PostgreSQL, Redis) + 2. Certificate resources (secrets or cert-manager CRDs) + 3. ConfigMaps (plugins configuration) + 4. Infrastructure (PostgreSQL, Redis) Excludes plugins-config.yaml (not a Kubernetes resource). @@ -702,19 +768,21 @@ def deploy_kubernetes(manifests_dir: Path, verbose: bool = False) -> None: if not shutil.which("kubectl"): raise RuntimeError("kubectl not found. Cannot deploy to Kubernetes.") - # Get all manifest files, excluding plugins-config.yaml + # Get all manifest files, excluding plugins-config.yaml (not a Kubernetes resource) all_manifests = sorted(manifests_dir.glob("*.yaml")) all_manifests = [m for m in all_manifests if m.name != "plugins-config.yaml"] - # Apply in order to handle dependencies + # Identify different types of manifests cert_secrets = manifests_dir / "cert-secrets.yaml" + cert_manager_certs = manifests_dir / "cert-manager-certificates.yaml" postgres_deploy = manifests_dir / "postgres-deployment.yaml" redis_deploy = manifests_dir / "redis-deployment.yaml" + plugins_configmap = manifests_dir / "plugins-configmap.yaml" # 1. Apply all deployments first (creates namespaces) - deployment_files = [m for m in all_manifests if m.name.endswith("-deployment.yaml") and m != cert_secrets and m != postgres_deploy and m != redis_deploy] + deployment_files = [m for m in all_manifests if m.name.endswith("-deployment.yaml") and m not in [cert_secrets, postgres_deploy, redis_deploy]] - # Apply deployment files + # Apply deployment files (this creates the namespace) for manifest in deployment_files: result = subprocess.run(["kubectl", "apply", "-f", str(manifest)], capture_output=True, text=True, check=False) if result.stdout and verbose: @@ -724,7 +792,6 @@ def deploy_kubernetes(manifests_dir: Path, verbose: bool = False) -> None: # 2. Apply certificate resources (now namespace exists) # Check for both cert-secrets.yaml (local mode) and cert-manager-certificates.yaml (cert-manager mode) - cert_manager_certs = manifests_dir / "cert-manager-certificates.yaml" if cert_manager_certs.exists(): result = subprocess.run(["kubectl", "apply", "-f", str(cert_manager_certs)], capture_output=True, text=True, check=False) if result.stdout and verbose: @@ -738,7 +805,15 @@ def deploy_kubernetes(manifests_dir: Path, verbose: bool = False) -> None: if result.returncode != 0: raise RuntimeError(f"kubectl apply failed: {result.stderr}") - # 3. Apply infrastructure + # 3. Apply ConfigMaps (needed by deployments) + if plugins_configmap.exists(): + result = subprocess.run(["kubectl", "apply", "-f", str(plugins_configmap)], capture_output=True, text=True, check=False) + if result.stdout and verbose: + console.print(result.stdout) + if result.returncode != 0: + raise RuntimeError(f"kubectl apply failed: {result.stderr}") + + # 4. Apply infrastructure for infra_file in [postgres_deploy, redis_deploy]: if infra_file.exists(): result = subprocess.run(["kubectl", "apply", "-f", str(infra_file)], capture_output=True, text=True, check=False) diff --git a/mcpgateway/tools/builder/dagger_deploy.py b/mcpgateway/tools/builder/dagger_deploy.py index d7f7b248a..d5ece26d5 100644 --- a/mcpgateway/tools/builder/dagger_deploy.py +++ b/mcpgateway/tools/builder/dagger_deploy.py @@ -413,7 +413,8 @@ async def _build_component_with_dagger(self, component: BuildableConfig, compone if (clone_dir / ".git").exists(): subprocess.run(["git", "fetch", "origin", git_ref], cwd=clone_dir, check=True, capture_output=True) - subprocess.run(["git", "checkout", git_ref], cwd=clone_dir, check=True, capture_output=True) + # Checkout what we just fetched (FETCH_HEAD) + subprocess.run(["git", "checkout", "FETCH_HEAD"], cwd=clone_dir, check=True, capture_output=True) else: subprocess.run(["git", "clone", "--branch", git_ref, "--depth", "1", repo, str(clone_dir)], check=True, capture_output=True) diff --git a/mcpgateway/tools/builder/python_deploy.py b/mcpgateway/tools/builder/python_deploy.py index c5da5fec6..3af5eb17e 100644 --- a/mcpgateway/tools/builder/python_deploy.py +++ b/mcpgateway/tools/builder/python_deploy.py @@ -46,17 +46,6 @@ class MCPStackPython(CICDModule): """Plain Python implementation of MCP Stack deployment.""" - def __init__(self, verbose: bool = False): - """Initialize MCPStackPython instance. - - Args: - verbose: Enable verbose output - """ - super().__init__(verbose) - - # Detect container runtime (docker or podman) - self.container_runtime = self._detect_container_runtime() - async def build(self, config_file: str, plugins_only: bool = False, specific_plugins: Optional[List[str]] = None, no_cache: bool = False, copy_env_templates: bool = False) -> None: """Build gateway and plugin containers using docker/podman. @@ -79,7 +68,7 @@ async def build(self, config_file: str, plugins_only: bool = False, specific_plu with Progress(SpinnerColumn(), TextColumn("[progress.description]{task.description}"), console=self.console) as progress: task = progress.add_task("Building gateway...", total=None) try: - self._build_component(gateway, "gateway", no_cache=no_cache) + self._build_component(gateway, config, "gateway", no_cache=no_cache) progress.update(task, completed=1, description="[green]βœ“ Built gateway[/green]") except Exception as e: progress.update(task, completed=1, description="[red]βœ— Failed gateway[/red]") @@ -119,7 +108,7 @@ async def build(self, config_file: str, plugins_only: bool = False, specific_plu task = progress.add_task(f"Building {plugin_name}...", total=None) try: - self._build_component(plugin, plugin_name, no_cache=no_cache, copy_env_templates=copy_env_templates) + self._build_component(plugin, config, plugin_name, no_cache=no_cache, copy_env_templates=copy_env_templates) progress.update(task, completed=1, description=f"[green]βœ“ Built {plugin_name}[/green]") except Exception as e: progress.update(task, completed=1, description=f"[red]βœ— Failed {plugin_name}[/red]") @@ -308,21 +297,40 @@ def generate_manifests(self, config_file: str, output_dir: Optional[str] = None) # Private helper methods - def _detect_container_runtime(self) -> str: - """Detect available container runtime (docker or podman). + def _detect_container_engine(self, config: MCPStackConfig) -> str: + """Detect available container engine (docker or podman). + + Supports both engine names ("docker", "podman") and full paths ("/opt/podman/bin/podman"). Returns: - Name of available runtime "docker" or "podman" + Name or full path to available engine Raises: - RuntimeError: If no container runtime found + RuntimeError: If no container engine found """ + if config.deployment.container_engine: + engine = config.deployment.container_engine + + # Check if it's a full path + if "/" in engine: + if Path(engine).exists() and Path(engine).is_file(): + return engine + else: + raise RuntimeError(f"Specified container engine path does not exist: {engine}") + + # Otherwise treat as command name and check PATH + if shutil.which(engine): + return engine + else: + raise RuntimeError(f"Unable to find specified container engine: {engine}") + + # Auto-detect if shutil.which("docker"): return "docker" elif shutil.which("podman"): return "podman" else: - raise RuntimeError("No container runtime found. Install docker or podman.") + raise RuntimeError("No container engine found. Install docker or podman.") def _run_command(self, cmd: List[str], cwd: Optional[Path] = None, capture_output: bool = False) -> subprocess.CompletedProcess: """Run a shell command. @@ -345,11 +353,12 @@ def _run_command(self, cmd: List[str], cwd: Optional[Path] = None, capture_outpu return result - def _build_component(self, component: BuildableConfig, component_name: str, no_cache: bool = False, copy_env_templates: bool = False) -> None: + def _build_component(self, component: BuildableConfig, config: MCPStackConfig, component_name: str, no_cache: bool = False, copy_env_templates: bool = False) -> None: """Build a component (gateway or plugin) container using docker/podman. Args: component: Component configuration (GatewayConfig or PluginConfig) + config: Overall stack configuration component_name: Name of the component (gateway or plugin name) no_cache: Disable cache copy_env_templates: Copy .env.template from repo if it exists @@ -360,6 +369,8 @@ def _build_component(self, component: BuildableConfig, component_name: str, no_c """ repo = component.repo + container_engine = self._detect_container_engine(config) + if not repo: raise ValueError(f"Component '{component_name}' has no 'repo' field") @@ -373,7 +384,8 @@ def _build_component(self, component: BuildableConfig, component_name: str, no_c if self.verbose: self.console.print(f"[dim]Updating {component_name} repository...[/dim]") self._run_command(["git", "fetch", "origin", git_ref], cwd=clone_dir) - self._run_command(["git", "checkout", git_ref], cwd=clone_dir) + # Checkout what we just fetched (FETCH_HEAD) + self._run_command(["git", "checkout", "FETCH_HEAD"], cwd=clone_dir) else: if self.verbose: self.console.print(f"[dim]Cloning {component_name} repository...[/dim]") @@ -404,7 +416,7 @@ def _build_component(self, component: BuildableConfig, component_name: str, no_c # Generate default image name based on component type image_tag = f"mcpgateway-{component_name.lower()}:latest" - build_cmd = [self.container_runtime, "build", "-f", containerfile, "-t", image_tag] + build_cmd = [container_engine, "build", "-f", containerfile, "-t", image_tag] if no_cache: build_cmd.append("--no-cache") @@ -415,7 +427,7 @@ def _build_component(self, component: BuildableConfig, component_name: str, no_c # For Docker, add --load to ensure image is loaded into daemon # (needed for buildx/docker-container driver) - if self.container_runtime == "docker": + if container_engine == "docker": build_cmd.append("--load") build_cmd.append(".") @@ -423,7 +435,7 @@ def _build_component(self, component: BuildableConfig, component_name: str, no_c self._run_command(build_cmd, cwd=build_dir) # Handle registry operations (tag and push if enabled) - image_tag = handle_registry_operations(component, component_name, image_tag, self.container_runtime, verbose=self.verbose) + image_tag = handle_registry_operations(component, component_name, image_tag, container_engine, verbose=self.verbose) # Copy .env.template if requested and exists if copy_env_templates: diff --git a/mcpgateway/tools/builder/schema.py b/mcpgateway/tools/builder/schema.py index 401daf1f5..a15991ebb 100644 --- a/mcpgateway/tools/builder/schema.py +++ b/mcpgateway/tools/builder/schema.py @@ -16,6 +16,7 @@ class DeploymentConfig(BaseModel): """Deployment configuration""" type: Literal["kubernetes", "compose"] = Field(..., description="Deployment type") + container_engine: Optional[str] = Field(default=None, description="Container engine: 'podman', 'docker', or full path (e.g., '/opt/podman/bin/podman')") project_name: Optional[str] = Field(None, description="Project name for compose") namespace: Optional[str] = Field(None, description="Namespace for Kubernetes") diff --git a/mcpgateway/tools/builder/templates/compose/docker-compose.yaml.j2 b/mcpgateway/tools/builder/templates/compose/docker-compose.yaml.j2 index 7d763aa30..aaf2fc04e 100644 --- a/mcpgateway/tools/builder/templates/compose/docker-compose.yaml.j2 +++ b/mcpgateway/tools/builder/templates/compose/docker-compose.yaml.j2 @@ -106,6 +106,7 @@ services: {% endif %} {% if plugin.mtls_enabled | default(true) %} # mTLS server configuration (plugin accepts gateway connections) + - PLUGINS_TRANSPORT=http - PLUGINS_SERVER_HOST=0.0.0.0 - PLUGINS_SERVER_PORT={{ plugin.port | default(8000) }} - PLUGINS_SERVER_SSL_ENABLED=true diff --git a/mcpgateway/tools/builder/templates/kubernetes/deployment.yaml.j2 b/mcpgateway/tools/builder/templates/kubernetes/deployment.yaml.j2 index 2946135aa..843bb5fd4 100644 --- a/mcpgateway/tools/builder/templates/kubernetes/deployment.yaml.j2 +++ b/mcpgateway/tools/builder/templates/kubernetes/deployment.yaml.j2 @@ -30,7 +30,7 @@ metadata: namespace: {{ namespace }} labels: app: {{ name }} - component: {% if 'gateway' in name %}gateway{% else %}plugin{% endif %} + component: {% if name == 'mcpgateway' %}gateway{% else %}plugin{% endif %} spec: replicas: {{ replicas | default(1) }} selector: @@ -40,7 +40,7 @@ spec: metadata: labels: app: {{ name }} - component: {% if 'gateway' in name %}gateway{% else %}plugin{% endif %} + component: {% if name == 'mcpgateway' %}gateway{% else %}plugin{% endif %} spec: {% if image_pull_secret is defined %} imagePullSecrets: @@ -65,7 +65,7 @@ spec: - name: http containerPort: {{ port | default(8000) }} protocol: TCP - {% if mtls_enabled | default(true) and 'gateway' not in name %} + {% if mtls_enabled | default(true) and name != 'mcpgateway' %} - name: health containerPort: 9000 protocol: TCP @@ -73,7 +73,7 @@ spec: env: {% if mtls_enabled | default(true) %} - {% if 'gateway' in name %} + {% if name == 'mcpgateway' %} # mTLS client configuration (gateway connects to plugins) - name: PLUGINS_CLIENT_MTLS_CA_BUNDLE value: "/app/certs/ca/ca.crt" @@ -87,6 +87,8 @@ spec: value: "{{ mtls_check_hostname | default('false') }}" {% else %} # mTLS server configuration (plugin accepts gateway connections) + - name: PLUGINS_TRANSPORT + value: "http" - name: PLUGINS_SERVER_HOST value: "0.0.0.0" - name: PLUGINS_SERVER_PORT @@ -112,7 +114,7 @@ spec: livenessProbe: httpGet: path: /health - {% if mtls_enabled | default(true) and 'gateway' not in name %} + {% if mtls_enabled | default(true) and name != 'mcpgateway' %} # Plugin with mTLS: use separate health check server on port 9000 port: health scheme: HTTP @@ -129,7 +131,7 @@ spec: readinessProbe: httpGet: path: /health - {% if mtls_enabled | default(true) and 'gateway' not in name %} + {% if mtls_enabled | default(true) and name != 'mcpgateway' %} # Plugin with mTLS: use separate health check server on port 9000 port: health scheme: HTTP @@ -161,6 +163,11 @@ spec: mountPath: /app/certs/ca readOnly: true {% endif %} + {% if name == 'mcpgateway' and has_plugins | default(false) %} + - name: plugins-config + mountPath: /app/config + readOnly: true + {% endif %} {% if volume_mounts is defined %} {% for mount in volume_mounts %} @@ -194,6 +201,12 @@ spec: secretName: mcp-ca-secret defaultMode: 0444 {% endif %} + {% if name == 'mcpgateway' and has_plugins | default(false) %} + - name: plugins-config + configMap: + name: plugins-config + defaultMode: 0444 + {% endif %} {% if volumes is defined %} {% for volume in volumes %} diff --git a/mcpgateway/tools/builder/templates/kubernetes/plugins-configmap.yaml.j2 b/mcpgateway/tools/builder/templates/kubernetes/plugins-configmap.yaml.j2 new file mode 100644 index 000000000..d517d8459 --- /dev/null +++ b/mcpgateway/tools/builder/templates/kubernetes/plugins-configmap.yaml.j2 @@ -0,0 +1,13 @@ +# Location: ./mcpgateway/tools/builder/templates/kubernetes/plugins-configmap.yaml.j2 +# Copyright 2025 +# SPDX-License-Identifier: Apache-2.0 +# Authors: Teryl Taylor +# ConfigMap for plugins configuration +apiVersion: v1 +kind: ConfigMap +metadata: + name: plugins-config + namespace: {{ namespace }} +data: + plugins.yaml: | +{{ plugins_config | safe | indent(4, first=True) }} From 68bd4bdf5235c2c174010e46be7159fc24938459 Mon Sep 17 00:00:00 2001 From: Teryl Taylor Date: Fri, 17 Oct 2025 14:42:02 -0600 Subject: [PATCH 35/35] feat: added openshift route file for installing route to mcpgateway admin site. Signed-off-by: Teryl Taylor --- docs/docs/deployment/cforge-gateway.md | 48 +++++++++++++++++++ .../deploy-openshift-local-registry.yaml | 4 ++ mcpgateway/tools/builder/common.py | 43 +++++++++++++++++ mcpgateway/tools/builder/schema.py | 18 +++++++ 4 files changed, 113 insertions(+) diff --git a/docs/docs/deployment/cforge-gateway.md b/docs/docs/deployment/cforge-gateway.md index 5d2806601..042a0d2ab 100644 --- a/docs/docs/deployment/cforge-gateway.md +++ b/docs/docs/deployment/cforge-gateway.md @@ -499,6 +499,12 @@ deployment: project_name: my-project # Docker Compose only namespace: mcp-gateway # Kubernetes only container_engine: podman | docker # Container runtime (auto-detected if not specified) + + # OpenShift-specific configuration (optional) + openshift: + create_routes: true # Create OpenShift Route resources + domain: apps-crc.testing # OpenShift apps domain (auto-detected if omitted) + tls_termination: edge # TLS termination mode: edge, passthrough, or reencrypt ``` | Field | Type | Required | Description | Default | @@ -507,6 +513,48 @@ deployment: | `project_name` | string | ❌ | Docker Compose project name | - | | `namespace` | string | ❌ | Kubernetes namespace | - | | `container_engine` | string | ❌ | Container runtime: `docker` or `podman` | Auto-detected | +| `openshift` | object | ❌ | OpenShift-specific configuration (see below) | - | + +#### OpenShift Configuration + +OpenShift Routes provide native external access to services, with built-in TLS termination and integration with OpenShift's router/HAProxy infrastructure. + +| Field | Type | Required | Description | Default | +|-------|------|----------|-------------|---------| +| `create_routes` | boolean | ❌ | Create OpenShift Route resources for external access | `false` | +| `domain` | string | ❌ | OpenShift apps domain for route hostnames | Auto-detected from cluster | +| `tls_termination` | string | ❌ | TLS termination mode: `edge`, `passthrough`, or `reencrypt` | `edge` | + +**Example:** +```yaml +deployment: + type: kubernetes + namespace: mcp-gateway-test + openshift: + create_routes: true + domain: apps-crc.testing + tls_termination: edge +``` + +When `create_routes: true`, the tool generates an OpenShift Route for the gateway: +- **Host**: `mcpgateway-admin-{namespace}.{domain}` +- **Path**: `/` +- **TLS**: Edge termination (default) +- **Target**: Gateway service on HTTP port + +**Access the gateway:** +```bash +# OpenShift Local (CRC) example +https://mcpgateway-admin-mcp-gateway-test.apps-crc.testing +``` + +**Domain auto-detection:** +If `domain` is not specified, the tool attempts to auto-detect the OpenShift apps domain from the cluster: +```bash +kubectl get ingresses.config.openshift.io cluster -o jsonpath='{.spec.domain}' +``` + +If auto-detection fails, it defaults to `apps-crc.testing` (OpenShift Local). --- diff --git a/examples/deployment-configs/deploy-openshift-local-registry.yaml b/examples/deployment-configs/deploy-openshift-local-registry.yaml index d1d6a064c..3a18713a1 100644 --- a/examples/deployment-configs/deploy-openshift-local-registry.yaml +++ b/examples/deployment-configs/deploy-openshift-local-registry.yaml @@ -18,6 +18,10 @@ deployment: type: kubernetes namespace: mcp-gateway-test container_engine: podman + openshift: + create_routes: true + domain: apps-crc.testing # Optional, auto-detected if omitted + tls_termination: edge # MCP Gateway configuration gateway: diff --git a/mcpgateway/tools/builder/common.py b/mcpgateway/tools/builder/common.py index 5b7080491..9601479cf 100644 --- a/mcpgateway/tools/builder/common.py +++ b/mcpgateway/tools/builder/common.py @@ -382,6 +382,37 @@ def generate_kubernetes_manifests(config: MCPStackConfig, output_dir: Path, verb gateway_manifest = gateway_template.render(**gateway_dict) (output_dir / "gateway-deployment.yaml").write_text(gateway_manifest) + # Generate OpenShift Route if configured + if config.deployment.openshift and config.deployment.openshift.create_routes: + route_template = env.get_template("route.yaml.j2") + openshift_config = config.deployment.openshift + + # Auto-detect OpenShift apps domain if not specified + openshift_domain = openshift_config.domain + if not openshift_domain: + try: + # Try to get domain from OpenShift cluster info + result = subprocess.run(["kubectl", "get", "ingresses.config.openshift.io", "cluster", "-o", "jsonpath={.spec.domain}"], capture_output=True, text=True, check=False) + if result.returncode == 0 and result.stdout.strip(): + openshift_domain = result.stdout.strip() + if verbose: + console.print(f"[dim]Auto-detected OpenShift domain: {openshift_domain}[/dim]") + else: + # Fallback to common OpenShift Local domain + openshift_domain = "apps-crc.testing" + if verbose: + console.print(f"[yellow]Could not auto-detect OpenShift domain, using default: {openshift_domain}[/yellow]") + except Exception: + # Fallback to common OpenShift Local domain + openshift_domain = "apps-crc.testing" + if verbose: + console.print(f"[yellow]Could not auto-detect OpenShift domain, using default: {openshift_domain}[/yellow]") + + route_manifest = route_template.render(namespace=namespace, openshift_domain=openshift_domain, tls_termination=openshift_config.tls_termination) + (output_dir / "gateway-route.yaml").write_text(route_manifest) + if verbose: + print(" βœ“ OpenShift Route manifest generated") + # Generate plugin deployments for plugin in config.plugins: # Convert Pydantic model to dict for template rendering @@ -755,6 +786,7 @@ def deploy_kubernetes(manifests_dir: Path, verbose: bool = False) -> None: 2. Certificate resources (secrets or cert-manager CRDs) 3. ConfigMaps (plugins configuration) 4. Infrastructure (PostgreSQL, Redis) + 5. OpenShift Routes (if configured) Excludes plugins-config.yaml (not a Kubernetes resource). @@ -822,6 +854,17 @@ def deploy_kubernetes(manifests_dir: Path, verbose: bool = False) -> None: if result.returncode != 0: raise RuntimeError(f"kubectl apply failed: {result.stderr}") + # 5. Apply OpenShift Routes (if configured) + gateway_route = manifests_dir / "gateway-route.yaml" + if gateway_route.exists(): + result = subprocess.run(["kubectl", "apply", "-f", str(gateway_route)], capture_output=True, text=True, check=False) + if result.stdout and verbose: + console.print(result.stdout) + if result.returncode != 0: + # Don't fail on Route errors (may not be on OpenShift) + if verbose: + console.print(f"[yellow]Warning: Could not apply Route (may not be on OpenShift): {result.stderr}[/yellow]") + console.print("[green]βœ“ Deployed to Kubernetes[/green]") diff --git a/mcpgateway/tools/builder/schema.py b/mcpgateway/tools/builder/schema.py index a15991ebb..3f3220049 100644 --- a/mcpgateway/tools/builder/schema.py +++ b/mcpgateway/tools/builder/schema.py @@ -12,6 +12,23 @@ from pydantic import BaseModel, ConfigDict, Field, field_validator +class OpenShiftConfig(BaseModel): + """OpenShift-specific configuration. + + Routes are OpenShift's native way of exposing services externally (predates Kubernetes Ingress). + They provide built-in TLS termination and are integrated with OpenShift's router/HAProxy infrastructure. + + Attributes: + create_routes: Create OpenShift Route resources for external access (default: False) + domain: OpenShift apps domain for route hostnames (default: auto-detected from cluster) + tls_termination: TLS termination mode - edge, passthrough, or reencrypt (default: edge) + """ + + create_routes: bool = Field(False, description="Create OpenShift Route resources") + domain: Optional[str] = Field(None, description="OpenShift apps domain (e.g., apps-crc.testing)") + tls_termination: Literal["edge", "passthrough", "reencrypt"] = Field("edge", description="TLS termination mode") + + class DeploymentConfig(BaseModel): """Deployment configuration""" @@ -19,6 +36,7 @@ class DeploymentConfig(BaseModel): container_engine: Optional[str] = Field(default=None, description="Container engine: 'podman', 'docker', or full path (e.g., '/opt/podman/bin/podman')") project_name: Optional[str] = Field(None, description="Project name for compose") namespace: Optional[str] = Field(None, description="Namespace for Kubernetes") + openshift: Optional[OpenShiftConfig] = Field(None, description="OpenShift-specific configuration") class RegistryConfig(BaseModel):